##// END OF EJS Templates
merge: cut import cycle at merge -> extensions...
Yuya Nishihara -
r36027:46a54de9 default
parent child Browse files
Show More
@@ -1,3164 +1,3162 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 bookmarks,
24 bookmarks,
25 changelog,
25 changelog,
26 copies,
26 copies,
27 crecord as crecordmod,
27 crecord as crecordmod,
28 dirstateguard,
28 dirstateguard,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 logcmdutil,
32 logcmdutil,
33 match as matchmod,
33 match as matchmod,
34 merge as mergemod,
34 obsolete,
35 obsolete,
35 patch,
36 patch,
36 pathutil,
37 pathutil,
37 pycompat,
38 pycompat,
38 registrar,
39 registrar,
39 revlog,
40 revlog,
40 rewriteutil,
41 rewriteutil,
41 scmutil,
42 scmutil,
42 smartset,
43 smartset,
43 subrepoutil,
44 subrepoutil,
44 templater,
45 templater,
45 util,
46 util,
46 vfs as vfsmod,
47 vfs as vfsmod,
47 )
48 )
48 stringio = util.stringio
49 stringio = util.stringio
49
50
50 # templates of common command options
51 # templates of common command options
51
52
52 dryrunopts = [
53 dryrunopts = [
53 ('n', 'dry-run', None,
54 ('n', 'dry-run', None,
54 _('do not perform actions, just print output')),
55 _('do not perform actions, just print output')),
55 ]
56 ]
56
57
57 remoteopts = [
58 remoteopts = [
58 ('e', 'ssh', '',
59 ('e', 'ssh', '',
59 _('specify ssh command to use'), _('CMD')),
60 _('specify ssh command to use'), _('CMD')),
60 ('', 'remotecmd', '',
61 ('', 'remotecmd', '',
61 _('specify hg command to run on the remote side'), _('CMD')),
62 _('specify hg command to run on the remote side'), _('CMD')),
62 ('', 'insecure', None,
63 ('', 'insecure', None,
63 _('do not verify server certificate (ignoring web.cacerts config)')),
64 _('do not verify server certificate (ignoring web.cacerts config)')),
64 ]
65 ]
65
66
66 walkopts = [
67 walkopts = [
67 ('I', 'include', [],
68 ('I', 'include', [],
68 _('include names matching the given patterns'), _('PATTERN')),
69 _('include names matching the given patterns'), _('PATTERN')),
69 ('X', 'exclude', [],
70 ('X', 'exclude', [],
70 _('exclude names matching the given patterns'), _('PATTERN')),
71 _('exclude names matching the given patterns'), _('PATTERN')),
71 ]
72 ]
72
73
73 commitopts = [
74 commitopts = [
74 ('m', 'message', '',
75 ('m', 'message', '',
75 _('use text as commit message'), _('TEXT')),
76 _('use text as commit message'), _('TEXT')),
76 ('l', 'logfile', '',
77 ('l', 'logfile', '',
77 _('read commit message from file'), _('FILE')),
78 _('read commit message from file'), _('FILE')),
78 ]
79 ]
79
80
80 commitopts2 = [
81 commitopts2 = [
81 ('d', 'date', '',
82 ('d', 'date', '',
82 _('record the specified date as commit date'), _('DATE')),
83 _('record the specified date as commit date'), _('DATE')),
83 ('u', 'user', '',
84 ('u', 'user', '',
84 _('record the specified user as committer'), _('USER')),
85 _('record the specified user as committer'), _('USER')),
85 ]
86 ]
86
87
87 # hidden for now
88 # hidden for now
88 formatteropts = [
89 formatteropts = [
89 ('T', 'template', '',
90 ('T', 'template', '',
90 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
91 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
91 ]
92 ]
92
93
93 templateopts = [
94 templateopts = [
94 ('', 'style', '',
95 ('', 'style', '',
95 _('display using template map file (DEPRECATED)'), _('STYLE')),
96 _('display using template map file (DEPRECATED)'), _('STYLE')),
96 ('T', 'template', '',
97 ('T', 'template', '',
97 _('display with template'), _('TEMPLATE')),
98 _('display with template'), _('TEMPLATE')),
98 ]
99 ]
99
100
100 logopts = [
101 logopts = [
101 ('p', 'patch', None, _('show patch')),
102 ('p', 'patch', None, _('show patch')),
102 ('g', 'git', None, _('use git extended diff format')),
103 ('g', 'git', None, _('use git extended diff format')),
103 ('l', 'limit', '',
104 ('l', 'limit', '',
104 _('limit number of changes displayed'), _('NUM')),
105 _('limit number of changes displayed'), _('NUM')),
105 ('M', 'no-merges', None, _('do not show merges')),
106 ('M', 'no-merges', None, _('do not show merges')),
106 ('', 'stat', None, _('output diffstat-style summary of changes')),
107 ('', 'stat', None, _('output diffstat-style summary of changes')),
107 ('G', 'graph', None, _("show the revision DAG")),
108 ('G', 'graph', None, _("show the revision DAG")),
108 ] + templateopts
109 ] + templateopts
109
110
110 diffopts = [
111 diffopts = [
111 ('a', 'text', None, _('treat all files as text')),
112 ('a', 'text', None, _('treat all files as text')),
112 ('g', 'git', None, _('use git extended diff format')),
113 ('g', 'git', None, _('use git extended diff format')),
113 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
114 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
114 ('', 'nodates', None, _('omit dates from diff headers'))
115 ('', 'nodates', None, _('omit dates from diff headers'))
115 ]
116 ]
116
117
117 diffwsopts = [
118 diffwsopts = [
118 ('w', 'ignore-all-space', None,
119 ('w', 'ignore-all-space', None,
119 _('ignore white space when comparing lines')),
120 _('ignore white space when comparing lines')),
120 ('b', 'ignore-space-change', None,
121 ('b', 'ignore-space-change', None,
121 _('ignore changes in the amount of white space')),
122 _('ignore changes in the amount of white space')),
122 ('B', 'ignore-blank-lines', None,
123 ('B', 'ignore-blank-lines', None,
123 _('ignore changes whose lines are all blank')),
124 _('ignore changes whose lines are all blank')),
124 ('Z', 'ignore-space-at-eol', None,
125 ('Z', 'ignore-space-at-eol', None,
125 _('ignore changes in whitespace at EOL')),
126 _('ignore changes in whitespace at EOL')),
126 ]
127 ]
127
128
128 diffopts2 = [
129 diffopts2 = [
129 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('p', 'show-function', None, _('show which function each change is in')),
131 ('p', 'show-function', None, _('show which function each change is in')),
131 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ] + diffwsopts + [
133 ] + diffwsopts + [
133 ('U', 'unified', '',
134 ('U', 'unified', '',
134 _('number of lines of context to show'), _('NUM')),
135 _('number of lines of context to show'), _('NUM')),
135 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ]
138 ]
138
139
139 mergetoolopts = [
140 mergetoolopts = [
140 ('t', 'tool', '', _('specify merge tool')),
141 ('t', 'tool', '', _('specify merge tool')),
141 ]
142 ]
142
143
143 similarityopts = [
144 similarityopts = [
144 ('s', 'similarity', '',
145 ('s', 'similarity', '',
145 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 ]
147 ]
147
148
148 subrepoopts = [
149 subrepoopts = [
149 ('S', 'subrepos', None,
150 ('S', 'subrepos', None,
150 _('recurse into subrepositories'))
151 _('recurse into subrepositories'))
151 ]
152 ]
152
153
153 debugrevlogopts = [
154 debugrevlogopts = [
154 ('c', 'changelog', False, _('open changelog')),
155 ('c', 'changelog', False, _('open changelog')),
155 ('m', 'manifest', False, _('open manifest')),
156 ('m', 'manifest', False, _('open manifest')),
156 ('', 'dir', '', _('open directory manifest')),
157 ('', 'dir', '', _('open directory manifest')),
157 ]
158 ]
158
159
159 # special string such that everything below this line will be ingored in the
160 # special string such that everything below this line will be ingored in the
160 # editor text
161 # editor text
161 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162
163
163 def ishunk(x):
164 def ishunk(x):
164 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 return isinstance(x, hunkclasses)
166 return isinstance(x, hunkclasses)
166
167
167 def newandmodified(chunks, originalchunks):
168 def newandmodified(chunks, originalchunks):
168 newlyaddedandmodifiedfiles = set()
169 newlyaddedandmodifiedfiles = set()
169 for chunk in chunks:
170 for chunk in chunks:
170 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 originalchunks:
172 originalchunks:
172 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 return newlyaddedandmodifiedfiles
174 return newlyaddedandmodifiedfiles
174
175
175 def parsealiases(cmd):
176 def parsealiases(cmd):
176 return cmd.lstrip("^").split("|")
177 return cmd.lstrip("^").split("|")
177
178
178 def setupwrapcolorwrite(ui):
179 def setupwrapcolorwrite(ui):
179 # wrap ui.write so diff output can be labeled/colorized
180 # wrap ui.write so diff output can be labeled/colorized
180 def wrapwrite(orig, *args, **kw):
181 def wrapwrite(orig, *args, **kw):
181 label = kw.pop(r'label', '')
182 label = kw.pop(r'label', '')
182 for chunk, l in patch.difflabel(lambda: args):
183 for chunk, l in patch.difflabel(lambda: args):
183 orig(chunk, label=label + l)
184 orig(chunk, label=label + l)
184
185
185 oldwrite = ui.write
186 oldwrite = ui.write
186 def wrap(*args, **kwargs):
187 def wrap(*args, **kwargs):
187 return wrapwrite(oldwrite, *args, **kwargs)
188 return wrapwrite(oldwrite, *args, **kwargs)
188 setattr(ui, 'write', wrap)
189 setattr(ui, 'write', wrap)
189 return oldwrite
190 return oldwrite
190
191
191 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 if usecurses:
193 if usecurses:
193 if testfile:
194 if testfile:
194 recordfn = crecordmod.testdecorator(testfile,
195 recordfn = crecordmod.testdecorator(testfile,
195 crecordmod.testchunkselector)
196 crecordmod.testchunkselector)
196 else:
197 else:
197 recordfn = crecordmod.chunkselector
198 recordfn = crecordmod.chunkselector
198
199
199 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200
201
201 else:
202 else:
202 return patch.filterpatch(ui, originalhunks, operation)
203 return patch.filterpatch(ui, originalhunks, operation)
203
204
204 def recordfilter(ui, originalhunks, operation=None):
205 def recordfilter(ui, originalhunks, operation=None):
205 """ Prompts the user to filter the originalhunks and return a list of
206 """ Prompts the user to filter the originalhunks and return a list of
206 selected hunks.
207 selected hunks.
207 *operation* is used for to build ui messages to indicate the user what
208 *operation* is used for to build ui messages to indicate the user what
208 kind of filtering they are doing: reverting, committing, shelving, etc.
209 kind of filtering they are doing: reverting, committing, shelving, etc.
209 (see patch.filterpatch).
210 (see patch.filterpatch).
210 """
211 """
211 usecurses = crecordmod.checkcurses(ui)
212 usecurses = crecordmod.checkcurses(ui)
212 testfile = ui.config('experimental', 'crecordtest')
213 testfile = ui.config('experimental', 'crecordtest')
213 oldwrite = setupwrapcolorwrite(ui)
214 oldwrite = setupwrapcolorwrite(ui)
214 try:
215 try:
215 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 testfile, operation)
217 testfile, operation)
217 finally:
218 finally:
218 ui.write = oldwrite
219 ui.write = oldwrite
219 return newchunks, newopts
220 return newchunks, newopts
220
221
221 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 filterfn, *pats, **opts):
223 filterfn, *pats, **opts):
223 from . import merge as mergemod
224 opts = pycompat.byteskwargs(opts)
224 opts = pycompat.byteskwargs(opts)
225 if not ui.interactive():
225 if not ui.interactive():
226 if cmdsuggest:
226 if cmdsuggest:
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 else:
228 else:
229 msg = _('running non-interactively')
229 msg = _('running non-interactively')
230 raise error.Abort(msg)
230 raise error.Abort(msg)
231
231
232 # make sure username is set before going interactive
232 # make sure username is set before going interactive
233 if not opts.get('user'):
233 if not opts.get('user'):
234 ui.username() # raise exception, username not provided
234 ui.username() # raise exception, username not provided
235
235
236 def recordfunc(ui, repo, message, match, opts):
236 def recordfunc(ui, repo, message, match, opts):
237 """This is generic record driver.
237 """This is generic record driver.
238
238
239 Its job is to interactively filter local changes, and
239 Its job is to interactively filter local changes, and
240 accordingly prepare working directory into a state in which the
240 accordingly prepare working directory into a state in which the
241 job can be delegated to a non-interactive commit command such as
241 job can be delegated to a non-interactive commit command such as
242 'commit' or 'qrefresh'.
242 'commit' or 'qrefresh'.
243
243
244 After the actual job is done by non-interactive command, the
244 After the actual job is done by non-interactive command, the
245 working directory is restored to its original state.
245 working directory is restored to its original state.
246
246
247 In the end we'll record interesting changes, and everything else
247 In the end we'll record interesting changes, and everything else
248 will be left in place, so the user can continue working.
248 will be left in place, so the user can continue working.
249 """
249 """
250
250
251 checkunfinished(repo, commit=True)
251 checkunfinished(repo, commit=True)
252 wctx = repo[None]
252 wctx = repo[None]
253 merge = len(wctx.parents()) > 1
253 merge = len(wctx.parents()) > 1
254 if merge:
254 if merge:
255 raise error.Abort(_('cannot partially commit a merge '
255 raise error.Abort(_('cannot partially commit a merge '
256 '(use "hg commit" instead)'))
256 '(use "hg commit" instead)'))
257
257
258 def fail(f, msg):
258 def fail(f, msg):
259 raise error.Abort('%s: %s' % (f, msg))
259 raise error.Abort('%s: %s' % (f, msg))
260
260
261 force = opts.get('force')
261 force = opts.get('force')
262 if not force:
262 if not force:
263 vdirs = []
263 vdirs = []
264 match.explicitdir = vdirs.append
264 match.explicitdir = vdirs.append
265 match.bad = fail
265 match.bad = fail
266
266
267 status = repo.status(match=match)
267 status = repo.status(match=match)
268 if not force:
268 if not force:
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts.nodates = True
271 diffopts.nodates = True
272 diffopts.git = True
272 diffopts.git = True
273 diffopts.showfunc = True
273 diffopts.showfunc = True
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originalchunks = patch.parsepatch(originaldiff)
275 originalchunks = patch.parsepatch(originaldiff)
276
276
277 # 1. filter patch, since we are intending to apply subset of it
277 # 1. filter patch, since we are intending to apply subset of it
278 try:
278 try:
279 chunks, newopts = filterfn(ui, originalchunks)
279 chunks, newopts = filterfn(ui, originalchunks)
280 except error.PatchError as err:
280 except error.PatchError as err:
281 raise error.Abort(_('error parsing patch: %s') % err)
281 raise error.Abort(_('error parsing patch: %s') % err)
282 opts.update(newopts)
282 opts.update(newopts)
283
283
284 # We need to keep a backup of files that have been newly added and
284 # We need to keep a backup of files that have been newly added and
285 # modified during the recording process because there is a previous
285 # modified during the recording process because there is a previous
286 # version without the edit in the workdir
286 # version without the edit in the workdir
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 contenders = set()
288 contenders = set()
289 for h in chunks:
289 for h in chunks:
290 try:
290 try:
291 contenders.update(set(h.files()))
291 contenders.update(set(h.files()))
292 except AttributeError:
292 except AttributeError:
293 pass
293 pass
294
294
295 changed = status.modified + status.added + status.removed
295 changed = status.modified + status.added + status.removed
296 newfiles = [f for f in changed if f in contenders]
296 newfiles = [f for f in changed if f in contenders]
297 if not newfiles:
297 if not newfiles:
298 ui.status(_('no changes to record\n'))
298 ui.status(_('no changes to record\n'))
299 return 0
299 return 0
300
300
301 modified = set(status.modified)
301 modified = set(status.modified)
302
302
303 # 2. backup changed files, so we can restore them in the end
303 # 2. backup changed files, so we can restore them in the end
304
304
305 if backupall:
305 if backupall:
306 tobackup = changed
306 tobackup = changed
307 else:
307 else:
308 tobackup = [f for f in newfiles if f in modified or f in \
308 tobackup = [f for f in newfiles if f in modified or f in \
309 newlyaddedandmodifiedfiles]
309 newlyaddedandmodifiedfiles]
310 backups = {}
310 backups = {}
311 if tobackup:
311 if tobackup:
312 backupdir = repo.vfs.join('record-backups')
312 backupdir = repo.vfs.join('record-backups')
313 try:
313 try:
314 os.mkdir(backupdir)
314 os.mkdir(backupdir)
315 except OSError as err:
315 except OSError as err:
316 if err.errno != errno.EEXIST:
316 if err.errno != errno.EEXIST:
317 raise
317 raise
318 try:
318 try:
319 # backup continues
319 # backup continues
320 for f in tobackup:
320 for f in tobackup:
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 dir=backupdir)
322 dir=backupdir)
323 os.close(fd)
323 os.close(fd)
324 ui.debug('backup %r as %r\n' % (f, tmpname))
324 ui.debug('backup %r as %r\n' % (f, tmpname))
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 backups[f] = tmpname
326 backups[f] = tmpname
327
327
328 fp = stringio()
328 fp = stringio()
329 for c in chunks:
329 for c in chunks:
330 fname = c.filename()
330 fname = c.filename()
331 if fname in backups:
331 if fname in backups:
332 c.write(fp)
332 c.write(fp)
333 dopatch = fp.tell()
333 dopatch = fp.tell()
334 fp.seek(0)
334 fp.seek(0)
335
335
336 # 2.5 optionally review / modify patch in text editor
336 # 2.5 optionally review / modify patch in text editor
337 if opts.get('review', False):
337 if opts.get('review', False):
338 patchtext = (crecordmod.diffhelptext
338 patchtext = (crecordmod.diffhelptext
339 + crecordmod.patchhelptext
339 + crecordmod.patchhelptext
340 + fp.read())
340 + fp.read())
341 reviewedpatch = ui.edit(patchtext, "",
341 reviewedpatch = ui.edit(patchtext, "",
342 action="diff",
342 action="diff",
343 repopath=repo.path)
343 repopath=repo.path)
344 fp.truncate(0)
344 fp.truncate(0)
345 fp.write(reviewedpatch)
345 fp.write(reviewedpatch)
346 fp.seek(0)
346 fp.seek(0)
347
347
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 # 3a. apply filtered patch to clean repo (clean)
349 # 3a. apply filtered patch to clean repo (clean)
350 if backups:
350 if backups:
351 # Equivalent to hg.revert
351 # Equivalent to hg.revert
352 m = scmutil.matchfiles(repo, backups.keys())
352 m = scmutil.matchfiles(repo, backups.keys())
353 mergemod.update(repo, repo.dirstate.p1(),
353 mergemod.update(repo, repo.dirstate.p1(),
354 False, True, matcher=m)
354 False, True, matcher=m)
355
355
356 # 3b. (apply)
356 # 3b. (apply)
357 if dopatch:
357 if dopatch:
358 try:
358 try:
359 ui.debug('applying patch\n')
359 ui.debug('applying patch\n')
360 ui.debug(fp.getvalue())
360 ui.debug(fp.getvalue())
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 except error.PatchError as err:
362 except error.PatchError as err:
363 raise error.Abort(str(err))
363 raise error.Abort(str(err))
364 del fp
364 del fp
365
365
366 # 4. We prepared working directory according to filtered
366 # 4. We prepared working directory according to filtered
367 # patch. Now is the time to delegate the job to
367 # patch. Now is the time to delegate the job to
368 # commit/qrefresh or the like!
368 # commit/qrefresh or the like!
369
369
370 # Make all of the pathnames absolute.
370 # Make all of the pathnames absolute.
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
372 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
373 finally:
373 finally:
374 # 5. finally restore backed-up files
374 # 5. finally restore backed-up files
375 try:
375 try:
376 dirstate = repo.dirstate
376 dirstate = repo.dirstate
377 for realname, tmpname in backups.iteritems():
377 for realname, tmpname in backups.iteritems():
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379
379
380 if dirstate[realname] == 'n':
380 if dirstate[realname] == 'n':
381 # without normallookup, restoring timestamp
381 # without normallookup, restoring timestamp
382 # may cause partially committed files
382 # may cause partially committed files
383 # to be treated as unmodified
383 # to be treated as unmodified
384 dirstate.normallookup(realname)
384 dirstate.normallookup(realname)
385
385
386 # copystat=True here and above are a hack to trick any
386 # copystat=True here and above are a hack to trick any
387 # editors that have f open that we haven't modified them.
387 # editors that have f open that we haven't modified them.
388 #
388 #
389 # Also note that this racy as an editor could notice the
389 # Also note that this racy as an editor could notice the
390 # file's mtime before we've finished writing it.
390 # file's mtime before we've finished writing it.
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 os.unlink(tmpname)
392 os.unlink(tmpname)
393 if tobackup:
393 if tobackup:
394 os.rmdir(backupdir)
394 os.rmdir(backupdir)
395 except OSError:
395 except OSError:
396 pass
396 pass
397
397
398 def recordinwlock(ui, repo, message, match, opts):
398 def recordinwlock(ui, repo, message, match, opts):
399 with repo.wlock():
399 with repo.wlock():
400 return recordfunc(ui, repo, message, match, opts)
400 return recordfunc(ui, repo, message, match, opts)
401
401
402 return commit(ui, repo, recordinwlock, pats, opts)
402 return commit(ui, repo, recordinwlock, pats, opts)
403
403
404 class dirnode(object):
404 class dirnode(object):
405 """
405 """
406 Represent a directory in user working copy with information required for
406 Represent a directory in user working copy with information required for
407 the purpose of tersing its status.
407 the purpose of tersing its status.
408
408
409 path is the path to the directory
409 path is the path to the directory
410
410
411 statuses is a set of statuses of all files in this directory (this includes
411 statuses is a set of statuses of all files in this directory (this includes
412 all the files in all the subdirectories too)
412 all the files in all the subdirectories too)
413
413
414 files is a list of files which are direct child of this directory
414 files is a list of files which are direct child of this directory
415
415
416 subdirs is a dictionary of sub-directory name as the key and it's own
416 subdirs is a dictionary of sub-directory name as the key and it's own
417 dirnode object as the value
417 dirnode object as the value
418 """
418 """
419
419
420 def __init__(self, dirpath):
420 def __init__(self, dirpath):
421 self.path = dirpath
421 self.path = dirpath
422 self.statuses = set([])
422 self.statuses = set([])
423 self.files = []
423 self.files = []
424 self.subdirs = {}
424 self.subdirs = {}
425
425
426 def _addfileindir(self, filename, status):
426 def _addfileindir(self, filename, status):
427 """Add a file in this directory as a direct child."""
427 """Add a file in this directory as a direct child."""
428 self.files.append((filename, status))
428 self.files.append((filename, status))
429
429
430 def addfile(self, filename, status):
430 def addfile(self, filename, status):
431 """
431 """
432 Add a file to this directory or to its direct parent directory.
432 Add a file to this directory or to its direct parent directory.
433
433
434 If the file is not direct child of this directory, we traverse to the
434 If the file is not direct child of this directory, we traverse to the
435 directory of which this file is a direct child of and add the file
435 directory of which this file is a direct child of and add the file
436 there.
436 there.
437 """
437 """
438
438
439 # the filename contains a path separator, it means it's not the direct
439 # the filename contains a path separator, it means it's not the direct
440 # child of this directory
440 # child of this directory
441 if '/' in filename:
441 if '/' in filename:
442 subdir, filep = filename.split('/', 1)
442 subdir, filep = filename.split('/', 1)
443
443
444 # does the dirnode object for subdir exists
444 # does the dirnode object for subdir exists
445 if subdir not in self.subdirs:
445 if subdir not in self.subdirs:
446 subdirpath = os.path.join(self.path, subdir)
446 subdirpath = os.path.join(self.path, subdir)
447 self.subdirs[subdir] = dirnode(subdirpath)
447 self.subdirs[subdir] = dirnode(subdirpath)
448
448
449 # try adding the file in subdir
449 # try adding the file in subdir
450 self.subdirs[subdir].addfile(filep, status)
450 self.subdirs[subdir].addfile(filep, status)
451
451
452 else:
452 else:
453 self._addfileindir(filename, status)
453 self._addfileindir(filename, status)
454
454
455 if status not in self.statuses:
455 if status not in self.statuses:
456 self.statuses.add(status)
456 self.statuses.add(status)
457
457
458 def iterfilepaths(self):
458 def iterfilepaths(self):
459 """Yield (status, path) for files directly under this directory."""
459 """Yield (status, path) for files directly under this directory."""
460 for f, st in self.files:
460 for f, st in self.files:
461 yield st, os.path.join(self.path, f)
461 yield st, os.path.join(self.path, f)
462
462
463 def tersewalk(self, terseargs):
463 def tersewalk(self, terseargs):
464 """
464 """
465 Yield (status, path) obtained by processing the status of this
465 Yield (status, path) obtained by processing the status of this
466 dirnode.
466 dirnode.
467
467
468 terseargs is the string of arguments passed by the user with `--terse`
468 terseargs is the string of arguments passed by the user with `--terse`
469 flag.
469 flag.
470
470
471 Following are the cases which can happen:
471 Following are the cases which can happen:
472
472
473 1) All the files in the directory (including all the files in its
473 1) All the files in the directory (including all the files in its
474 subdirectories) share the same status and the user has asked us to terse
474 subdirectories) share the same status and the user has asked us to terse
475 that status. -> yield (status, dirpath)
475 that status. -> yield (status, dirpath)
476
476
477 2) Otherwise, we do following:
477 2) Otherwise, we do following:
478
478
479 a) Yield (status, filepath) for all the files which are in this
479 a) Yield (status, filepath) for all the files which are in this
480 directory (only the ones in this directory, not the subdirs)
480 directory (only the ones in this directory, not the subdirs)
481
481
482 b) Recurse the function on all the subdirectories of this
482 b) Recurse the function on all the subdirectories of this
483 directory
483 directory
484 """
484 """
485
485
486 if len(self.statuses) == 1:
486 if len(self.statuses) == 1:
487 onlyst = self.statuses.pop()
487 onlyst = self.statuses.pop()
488
488
489 # Making sure we terse only when the status abbreviation is
489 # Making sure we terse only when the status abbreviation is
490 # passed as terse argument
490 # passed as terse argument
491 if onlyst in terseargs:
491 if onlyst in terseargs:
492 yield onlyst, self.path + pycompat.ossep
492 yield onlyst, self.path + pycompat.ossep
493 return
493 return
494
494
495 # add the files to status list
495 # add the files to status list
496 for st, fpath in self.iterfilepaths():
496 for st, fpath in self.iterfilepaths():
497 yield st, fpath
497 yield st, fpath
498
498
499 #recurse on the subdirs
499 #recurse on the subdirs
500 for dirobj in self.subdirs.values():
500 for dirobj in self.subdirs.values():
501 for st, fpath in dirobj.tersewalk(terseargs):
501 for st, fpath in dirobj.tersewalk(terseargs):
502 yield st, fpath
502 yield st, fpath
503
503
504 def tersedir(statuslist, terseargs):
504 def tersedir(statuslist, terseargs):
505 """
505 """
506 Terse the status if all the files in a directory shares the same status.
506 Terse the status if all the files in a directory shares the same status.
507
507
508 statuslist is scmutil.status() object which contains a list of files for
508 statuslist is scmutil.status() object which contains a list of files for
509 each status.
509 each status.
510 terseargs is string which is passed by the user as the argument to `--terse`
510 terseargs is string which is passed by the user as the argument to `--terse`
511 flag.
511 flag.
512
512
513 The function makes a tree of objects of dirnode class, and at each node it
513 The function makes a tree of objects of dirnode class, and at each node it
514 stores the information required to know whether we can terse a certain
514 stores the information required to know whether we can terse a certain
515 directory or not.
515 directory or not.
516 """
516 """
517 # the order matters here as that is used to produce final list
517 # the order matters here as that is used to produce final list
518 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
518 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
519
519
520 # checking the argument validity
520 # checking the argument validity
521 for s in pycompat.bytestr(terseargs):
521 for s in pycompat.bytestr(terseargs):
522 if s not in allst:
522 if s not in allst:
523 raise error.Abort(_("'%s' not recognized") % s)
523 raise error.Abort(_("'%s' not recognized") % s)
524
524
525 # creating a dirnode object for the root of the repo
525 # creating a dirnode object for the root of the repo
526 rootobj = dirnode('')
526 rootobj = dirnode('')
527 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
527 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
528 'ignored', 'removed')
528 'ignored', 'removed')
529
529
530 tersedict = {}
530 tersedict = {}
531 for attrname in pstatus:
531 for attrname in pstatus:
532 statuschar = attrname[0:1]
532 statuschar = attrname[0:1]
533 for f in getattr(statuslist, attrname):
533 for f in getattr(statuslist, attrname):
534 rootobj.addfile(f, statuschar)
534 rootobj.addfile(f, statuschar)
535 tersedict[statuschar] = []
535 tersedict[statuschar] = []
536
536
537 # we won't be tersing the root dir, so add files in it
537 # we won't be tersing the root dir, so add files in it
538 for st, fpath in rootobj.iterfilepaths():
538 for st, fpath in rootobj.iterfilepaths():
539 tersedict[st].append(fpath)
539 tersedict[st].append(fpath)
540
540
541 # process each sub-directory and build tersedict
541 # process each sub-directory and build tersedict
542 for subdir in rootobj.subdirs.values():
542 for subdir in rootobj.subdirs.values():
543 for st, f in subdir.tersewalk(terseargs):
543 for st, f in subdir.tersewalk(terseargs):
544 tersedict[st].append(f)
544 tersedict[st].append(f)
545
545
546 tersedlist = []
546 tersedlist = []
547 for st in allst:
547 for st in allst:
548 tersedict[st].sort()
548 tersedict[st].sort()
549 tersedlist.append(tersedict[st])
549 tersedlist.append(tersedict[st])
550
550
551 return tersedlist
551 return tersedlist
552
552
553 def _commentlines(raw):
553 def _commentlines(raw):
554 '''Surround lineswith a comment char and a new line'''
554 '''Surround lineswith a comment char and a new line'''
555 lines = raw.splitlines()
555 lines = raw.splitlines()
556 commentedlines = ['# %s' % line for line in lines]
556 commentedlines = ['# %s' % line for line in lines]
557 return '\n'.join(commentedlines) + '\n'
557 return '\n'.join(commentedlines) + '\n'
558
558
559 def _conflictsmsg(repo):
559 def _conflictsmsg(repo):
560 # avoid merge cycle
561 from . import merge as mergemod
562 mergestate = mergemod.mergestate.read(repo)
560 mergestate = mergemod.mergestate.read(repo)
563 if not mergestate.active():
561 if not mergestate.active():
564 return
562 return
565
563
566 m = scmutil.match(repo[None])
564 m = scmutil.match(repo[None])
567 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
565 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
568 if unresolvedlist:
566 if unresolvedlist:
569 mergeliststr = '\n'.join(
567 mergeliststr = '\n'.join(
570 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
568 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
571 for path in unresolvedlist])
569 for path in unresolvedlist])
572 msg = _('''Unresolved merge conflicts:
570 msg = _('''Unresolved merge conflicts:
573
571
574 %s
572 %s
575
573
576 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
574 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
577 else:
575 else:
578 msg = _('No unresolved merge conflicts.')
576 msg = _('No unresolved merge conflicts.')
579
577
580 return _commentlines(msg)
578 return _commentlines(msg)
581
579
582 def _helpmessage(continuecmd, abortcmd):
580 def _helpmessage(continuecmd, abortcmd):
583 msg = _('To continue: %s\n'
581 msg = _('To continue: %s\n'
584 'To abort: %s') % (continuecmd, abortcmd)
582 'To abort: %s') % (continuecmd, abortcmd)
585 return _commentlines(msg)
583 return _commentlines(msg)
586
584
587 def _rebasemsg():
585 def _rebasemsg():
588 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
586 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
589
587
590 def _histeditmsg():
588 def _histeditmsg():
591 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
589 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
592
590
593 def _unshelvemsg():
591 def _unshelvemsg():
594 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
592 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
595
593
596 def _updatecleanmsg(dest=None):
594 def _updatecleanmsg(dest=None):
597 warning = _('warning: this will discard uncommitted changes')
595 warning = _('warning: this will discard uncommitted changes')
598 return 'hg update --clean %s (%s)' % (dest or '.', warning)
596 return 'hg update --clean %s (%s)' % (dest or '.', warning)
599
597
600 def _graftmsg():
598 def _graftmsg():
601 # tweakdefaults requires `update` to have a rev hence the `.`
599 # tweakdefaults requires `update` to have a rev hence the `.`
602 return _helpmessage('hg graft --continue', _updatecleanmsg())
600 return _helpmessage('hg graft --continue', _updatecleanmsg())
603
601
604 def _mergemsg():
602 def _mergemsg():
605 # tweakdefaults requires `update` to have a rev hence the `.`
603 # tweakdefaults requires `update` to have a rev hence the `.`
606 return _helpmessage('hg commit', _updatecleanmsg())
604 return _helpmessage('hg commit', _updatecleanmsg())
607
605
608 def _bisectmsg():
606 def _bisectmsg():
609 msg = _('To mark the changeset good: hg bisect --good\n'
607 msg = _('To mark the changeset good: hg bisect --good\n'
610 'To mark the changeset bad: hg bisect --bad\n'
608 'To mark the changeset bad: hg bisect --bad\n'
611 'To abort: hg bisect --reset\n')
609 'To abort: hg bisect --reset\n')
612 return _commentlines(msg)
610 return _commentlines(msg)
613
611
614 def fileexistspredicate(filename):
612 def fileexistspredicate(filename):
615 return lambda repo: repo.vfs.exists(filename)
613 return lambda repo: repo.vfs.exists(filename)
616
614
617 def _mergepredicate(repo):
615 def _mergepredicate(repo):
618 return len(repo[None].parents()) > 1
616 return len(repo[None].parents()) > 1
619
617
620 STATES = (
618 STATES = (
621 # (state, predicate to detect states, helpful message function)
619 # (state, predicate to detect states, helpful message function)
622 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
620 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
623 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
621 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
624 ('graft', fileexistspredicate('graftstate'), _graftmsg),
622 ('graft', fileexistspredicate('graftstate'), _graftmsg),
625 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
623 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
626 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
624 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
627 # The merge state is part of a list that will be iterated over.
625 # The merge state is part of a list that will be iterated over.
628 # They need to be last because some of the other unfinished states may also
626 # They need to be last because some of the other unfinished states may also
629 # be in a merge or update state (eg. rebase, histedit, graft, etc).
627 # be in a merge or update state (eg. rebase, histedit, graft, etc).
630 # We want those to have priority.
628 # We want those to have priority.
631 ('merge', _mergepredicate, _mergemsg),
629 ('merge', _mergepredicate, _mergemsg),
632 )
630 )
633
631
634 def _getrepostate(repo):
632 def _getrepostate(repo):
635 # experimental config: commands.status.skipstates
633 # experimental config: commands.status.skipstates
636 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
634 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
637 for state, statedetectionpredicate, msgfn in STATES:
635 for state, statedetectionpredicate, msgfn in STATES:
638 if state in skip:
636 if state in skip:
639 continue
637 continue
640 if statedetectionpredicate(repo):
638 if statedetectionpredicate(repo):
641 return (state, statedetectionpredicate, msgfn)
639 return (state, statedetectionpredicate, msgfn)
642
640
643 def morestatus(repo, fm):
641 def morestatus(repo, fm):
644 statetuple = _getrepostate(repo)
642 statetuple = _getrepostate(repo)
645 label = 'status.morestatus'
643 label = 'status.morestatus'
646 if statetuple:
644 if statetuple:
647 fm.startitem()
645 fm.startitem()
648 state, statedetectionpredicate, helpfulmsg = statetuple
646 state, statedetectionpredicate, helpfulmsg = statetuple
649 statemsg = _('The repository is in an unfinished *%s* state.') % state
647 statemsg = _('The repository is in an unfinished *%s* state.') % state
650 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
648 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
651 conmsg = _conflictsmsg(repo)
649 conmsg = _conflictsmsg(repo)
652 if conmsg:
650 if conmsg:
653 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
651 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
654 if helpfulmsg:
652 if helpfulmsg:
655 helpmsg = helpfulmsg()
653 helpmsg = helpfulmsg()
656 fm.write('helpmsg', '%s\n', helpmsg, label=label)
654 fm.write('helpmsg', '%s\n', helpmsg, label=label)
657
655
658 def findpossible(cmd, table, strict=False):
656 def findpossible(cmd, table, strict=False):
659 """
657 """
660 Return cmd -> (aliases, command table entry)
658 Return cmd -> (aliases, command table entry)
661 for each matching command.
659 for each matching command.
662 Return debug commands (or their aliases) only if no normal command matches.
660 Return debug commands (or their aliases) only if no normal command matches.
663 """
661 """
664 choice = {}
662 choice = {}
665 debugchoice = {}
663 debugchoice = {}
666
664
667 if cmd in table:
665 if cmd in table:
668 # short-circuit exact matches, "log" alias beats "^log|history"
666 # short-circuit exact matches, "log" alias beats "^log|history"
669 keys = [cmd]
667 keys = [cmd]
670 else:
668 else:
671 keys = table.keys()
669 keys = table.keys()
672
670
673 allcmds = []
671 allcmds = []
674 for e in keys:
672 for e in keys:
675 aliases = parsealiases(e)
673 aliases = parsealiases(e)
676 allcmds.extend(aliases)
674 allcmds.extend(aliases)
677 found = None
675 found = None
678 if cmd in aliases:
676 if cmd in aliases:
679 found = cmd
677 found = cmd
680 elif not strict:
678 elif not strict:
681 for a in aliases:
679 for a in aliases:
682 if a.startswith(cmd):
680 if a.startswith(cmd):
683 found = a
681 found = a
684 break
682 break
685 if found is not None:
683 if found is not None:
686 if aliases[0].startswith("debug") or found.startswith("debug"):
684 if aliases[0].startswith("debug") or found.startswith("debug"):
687 debugchoice[found] = (aliases, table[e])
685 debugchoice[found] = (aliases, table[e])
688 else:
686 else:
689 choice[found] = (aliases, table[e])
687 choice[found] = (aliases, table[e])
690
688
691 if not choice and debugchoice:
689 if not choice and debugchoice:
692 choice = debugchoice
690 choice = debugchoice
693
691
694 return choice, allcmds
692 return choice, allcmds
695
693
696 def findcmd(cmd, table, strict=True):
694 def findcmd(cmd, table, strict=True):
697 """Return (aliases, command table entry) for command string."""
695 """Return (aliases, command table entry) for command string."""
698 choice, allcmds = findpossible(cmd, table, strict)
696 choice, allcmds = findpossible(cmd, table, strict)
699
697
700 if cmd in choice:
698 if cmd in choice:
701 return choice[cmd]
699 return choice[cmd]
702
700
703 if len(choice) > 1:
701 if len(choice) > 1:
704 clist = sorted(choice)
702 clist = sorted(choice)
705 raise error.AmbiguousCommand(cmd, clist)
703 raise error.AmbiguousCommand(cmd, clist)
706
704
707 if choice:
705 if choice:
708 return list(choice.values())[0]
706 return list(choice.values())[0]
709
707
710 raise error.UnknownCommand(cmd, allcmds)
708 raise error.UnknownCommand(cmd, allcmds)
711
709
712 def changebranch(ui, repo, revs, label):
710 def changebranch(ui, repo, revs, label):
713 """ Change the branch name of given revs to label """
711 """ Change the branch name of given revs to label """
714
712
715 with repo.wlock(), repo.lock(), repo.transaction('branches'):
713 with repo.wlock(), repo.lock(), repo.transaction('branches'):
716 # abort in case of uncommitted merge or dirty wdir
714 # abort in case of uncommitted merge or dirty wdir
717 bailifchanged(repo)
715 bailifchanged(repo)
718 revs = scmutil.revrange(repo, revs)
716 revs = scmutil.revrange(repo, revs)
719 if not revs:
717 if not revs:
720 raise error.Abort("empty revision set")
718 raise error.Abort("empty revision set")
721 roots = repo.revs('roots(%ld)', revs)
719 roots = repo.revs('roots(%ld)', revs)
722 if len(roots) > 1:
720 if len(roots) > 1:
723 raise error.Abort(_("cannot change branch of non-linear revisions"))
721 raise error.Abort(_("cannot change branch of non-linear revisions"))
724 rewriteutil.precheck(repo, revs, 'change branch of')
722 rewriteutil.precheck(repo, revs, 'change branch of')
725
723
726 root = repo[roots.first()]
724 root = repo[roots.first()]
727 if not root.p1().branch() == label and label in repo.branchmap():
725 if not root.p1().branch() == label and label in repo.branchmap():
728 raise error.Abort(_("a branch of the same name already exists"))
726 raise error.Abort(_("a branch of the same name already exists"))
729
727
730 if repo.revs('merge() and %ld', revs):
728 if repo.revs('merge() and %ld', revs):
731 raise error.Abort(_("cannot change branch of a merge commit"))
729 raise error.Abort(_("cannot change branch of a merge commit"))
732 if repo.revs('obsolete() and %ld', revs):
730 if repo.revs('obsolete() and %ld', revs):
733 raise error.Abort(_("cannot change branch of a obsolete changeset"))
731 raise error.Abort(_("cannot change branch of a obsolete changeset"))
734
732
735 # make sure only topological heads
733 # make sure only topological heads
736 if repo.revs('heads(%ld) - head()', revs):
734 if repo.revs('heads(%ld) - head()', revs):
737 raise error.Abort(_("cannot change branch in middle of a stack"))
735 raise error.Abort(_("cannot change branch in middle of a stack"))
738
736
739 replacements = {}
737 replacements = {}
740 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
738 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
741 # mercurial.subrepo -> mercurial.cmdutil
739 # mercurial.subrepo -> mercurial.cmdutil
742 from . import context
740 from . import context
743 for rev in revs:
741 for rev in revs:
744 ctx = repo[rev]
742 ctx = repo[rev]
745 oldbranch = ctx.branch()
743 oldbranch = ctx.branch()
746 # check if ctx has same branch
744 # check if ctx has same branch
747 if oldbranch == label:
745 if oldbranch == label:
748 continue
746 continue
749
747
750 def filectxfn(repo, newctx, path):
748 def filectxfn(repo, newctx, path):
751 try:
749 try:
752 return ctx[path]
750 return ctx[path]
753 except error.ManifestLookupError:
751 except error.ManifestLookupError:
754 return None
752 return None
755
753
756 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
754 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
757 % (hex(ctx.node()), oldbranch, label))
755 % (hex(ctx.node()), oldbranch, label))
758 extra = ctx.extra()
756 extra = ctx.extra()
759 extra['branch_change'] = hex(ctx.node())
757 extra['branch_change'] = hex(ctx.node())
760 # While changing branch of set of linear commits, make sure that
758 # While changing branch of set of linear commits, make sure that
761 # we base our commits on new parent rather than old parent which
759 # we base our commits on new parent rather than old parent which
762 # was obsoleted while changing the branch
760 # was obsoleted while changing the branch
763 p1 = ctx.p1().node()
761 p1 = ctx.p1().node()
764 p2 = ctx.p2().node()
762 p2 = ctx.p2().node()
765 if p1 in replacements:
763 if p1 in replacements:
766 p1 = replacements[p1][0]
764 p1 = replacements[p1][0]
767 if p2 in replacements:
765 if p2 in replacements:
768 p2 = replacements[p2][0]
766 p2 = replacements[p2][0]
769
767
770 mc = context.memctx(repo, (p1, p2),
768 mc = context.memctx(repo, (p1, p2),
771 ctx.description(),
769 ctx.description(),
772 ctx.files(),
770 ctx.files(),
773 filectxfn,
771 filectxfn,
774 user=ctx.user(),
772 user=ctx.user(),
775 date=ctx.date(),
773 date=ctx.date(),
776 extra=extra,
774 extra=extra,
777 branch=label)
775 branch=label)
778
776
779 commitphase = ctx.phase()
777 commitphase = ctx.phase()
780 overrides = {('phases', 'new-commit'): commitphase}
778 overrides = {('phases', 'new-commit'): commitphase}
781 with repo.ui.configoverride(overrides, 'branch-change'):
779 with repo.ui.configoverride(overrides, 'branch-change'):
782 newnode = repo.commitctx(mc)
780 newnode = repo.commitctx(mc)
783
781
784 replacements[ctx.node()] = (newnode,)
782 replacements[ctx.node()] = (newnode,)
785 ui.debug('new node id is %s\n' % hex(newnode))
783 ui.debug('new node id is %s\n' % hex(newnode))
786
784
787 # create obsmarkers and move bookmarks
785 # create obsmarkers and move bookmarks
788 scmutil.cleanupnodes(repo, replacements, 'branch-change')
786 scmutil.cleanupnodes(repo, replacements, 'branch-change')
789
787
790 # move the working copy too
788 # move the working copy too
791 wctx = repo[None]
789 wctx = repo[None]
792 # in-progress merge is a bit too complex for now.
790 # in-progress merge is a bit too complex for now.
793 if len(wctx.parents()) == 1:
791 if len(wctx.parents()) == 1:
794 newid = replacements.get(wctx.p1().node())
792 newid = replacements.get(wctx.p1().node())
795 if newid is not None:
793 if newid is not None:
796 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
794 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
797 # mercurial.cmdutil
795 # mercurial.cmdutil
798 from . import hg
796 from . import hg
799 hg.update(repo, newid[0], quietempty=True)
797 hg.update(repo, newid[0], quietempty=True)
800
798
801 ui.status(_("changed branch on %d changesets\n") % len(replacements))
799 ui.status(_("changed branch on %d changesets\n") % len(replacements))
802
800
803 def findrepo(p):
801 def findrepo(p):
804 while not os.path.isdir(os.path.join(p, ".hg")):
802 while not os.path.isdir(os.path.join(p, ".hg")):
805 oldp, p = p, os.path.dirname(p)
803 oldp, p = p, os.path.dirname(p)
806 if p == oldp:
804 if p == oldp:
807 return None
805 return None
808
806
809 return p
807 return p
810
808
811 def bailifchanged(repo, merge=True, hint=None):
809 def bailifchanged(repo, merge=True, hint=None):
812 """ enforce the precondition that working directory must be clean.
810 """ enforce the precondition that working directory must be clean.
813
811
814 'merge' can be set to false if a pending uncommitted merge should be
812 'merge' can be set to false if a pending uncommitted merge should be
815 ignored (such as when 'update --check' runs).
813 ignored (such as when 'update --check' runs).
816
814
817 'hint' is the usual hint given to Abort exception.
815 'hint' is the usual hint given to Abort exception.
818 """
816 """
819
817
820 if merge and repo.dirstate.p2() != nullid:
818 if merge and repo.dirstate.p2() != nullid:
821 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
819 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
822 modified, added, removed, deleted = repo.status()[:4]
820 modified, added, removed, deleted = repo.status()[:4]
823 if modified or added or removed or deleted:
821 if modified or added or removed or deleted:
824 raise error.Abort(_('uncommitted changes'), hint=hint)
822 raise error.Abort(_('uncommitted changes'), hint=hint)
825 ctx = repo[None]
823 ctx = repo[None]
826 for s in sorted(ctx.substate):
824 for s in sorted(ctx.substate):
827 ctx.sub(s).bailifchanged(hint=hint)
825 ctx.sub(s).bailifchanged(hint=hint)
828
826
829 def logmessage(ui, opts):
827 def logmessage(ui, opts):
830 """ get the log message according to -m and -l option """
828 """ get the log message according to -m and -l option """
831 message = opts.get('message')
829 message = opts.get('message')
832 logfile = opts.get('logfile')
830 logfile = opts.get('logfile')
833
831
834 if message and logfile:
832 if message and logfile:
835 raise error.Abort(_('options --message and --logfile are mutually '
833 raise error.Abort(_('options --message and --logfile are mutually '
836 'exclusive'))
834 'exclusive'))
837 if not message and logfile:
835 if not message and logfile:
838 try:
836 try:
839 if isstdiofilename(logfile):
837 if isstdiofilename(logfile):
840 message = ui.fin.read()
838 message = ui.fin.read()
841 else:
839 else:
842 message = '\n'.join(util.readfile(logfile).splitlines())
840 message = '\n'.join(util.readfile(logfile).splitlines())
843 except IOError as inst:
841 except IOError as inst:
844 raise error.Abort(_("can't read commit message '%s': %s") %
842 raise error.Abort(_("can't read commit message '%s': %s") %
845 (logfile, encoding.strtolocal(inst.strerror)))
843 (logfile, encoding.strtolocal(inst.strerror)))
846 return message
844 return message
847
845
848 def mergeeditform(ctxorbool, baseformname):
846 def mergeeditform(ctxorbool, baseformname):
849 """return appropriate editform name (referencing a committemplate)
847 """return appropriate editform name (referencing a committemplate)
850
848
851 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
849 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
852 merging is committed.
850 merging is committed.
853
851
854 This returns baseformname with '.merge' appended if it is a merge,
852 This returns baseformname with '.merge' appended if it is a merge,
855 otherwise '.normal' is appended.
853 otherwise '.normal' is appended.
856 """
854 """
857 if isinstance(ctxorbool, bool):
855 if isinstance(ctxorbool, bool):
858 if ctxorbool:
856 if ctxorbool:
859 return baseformname + ".merge"
857 return baseformname + ".merge"
860 elif 1 < len(ctxorbool.parents()):
858 elif 1 < len(ctxorbool.parents()):
861 return baseformname + ".merge"
859 return baseformname + ".merge"
862
860
863 return baseformname + ".normal"
861 return baseformname + ".normal"
864
862
865 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
863 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
866 editform='', **opts):
864 editform='', **opts):
867 """get appropriate commit message editor according to '--edit' option
865 """get appropriate commit message editor according to '--edit' option
868
866
869 'finishdesc' is a function to be called with edited commit message
867 'finishdesc' is a function to be called with edited commit message
870 (= 'description' of the new changeset) just after editing, but
868 (= 'description' of the new changeset) just after editing, but
871 before checking empty-ness. It should return actual text to be
869 before checking empty-ness. It should return actual text to be
872 stored into history. This allows to change description before
870 stored into history. This allows to change description before
873 storing.
871 storing.
874
872
875 'extramsg' is a extra message to be shown in the editor instead of
873 'extramsg' is a extra message to be shown in the editor instead of
876 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
874 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
877 is automatically added.
875 is automatically added.
878
876
879 'editform' is a dot-separated list of names, to distinguish
877 'editform' is a dot-separated list of names, to distinguish
880 the purpose of commit text editing.
878 the purpose of commit text editing.
881
879
882 'getcommiteditor' returns 'commitforceeditor' regardless of
880 'getcommiteditor' returns 'commitforceeditor' regardless of
883 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
881 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
884 they are specific for usage in MQ.
882 they are specific for usage in MQ.
885 """
883 """
886 if edit or finishdesc or extramsg:
884 if edit or finishdesc or extramsg:
887 return lambda r, c, s: commitforceeditor(r, c, s,
885 return lambda r, c, s: commitforceeditor(r, c, s,
888 finishdesc=finishdesc,
886 finishdesc=finishdesc,
889 extramsg=extramsg,
887 extramsg=extramsg,
890 editform=editform)
888 editform=editform)
891 elif editform:
889 elif editform:
892 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
890 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
893 else:
891 else:
894 return commiteditor
892 return commiteditor
895
893
896 def makefilename(repo, pat, node, desc=None,
894 def makefilename(repo, pat, node, desc=None,
897 total=None, seqno=None, revwidth=None, pathname=None):
895 total=None, seqno=None, revwidth=None, pathname=None):
898 node_expander = {
896 node_expander = {
899 'H': lambda: hex(node),
897 'H': lambda: hex(node),
900 'R': lambda: '%d' % repo.changelog.rev(node),
898 'R': lambda: '%d' % repo.changelog.rev(node),
901 'h': lambda: short(node),
899 'h': lambda: short(node),
902 'm': lambda: re.sub('[^\w]', '_', desc or '')
900 'm': lambda: re.sub('[^\w]', '_', desc or '')
903 }
901 }
904 expander = {
902 expander = {
905 '%': lambda: '%',
903 '%': lambda: '%',
906 'b': lambda: os.path.basename(repo.root),
904 'b': lambda: os.path.basename(repo.root),
907 }
905 }
908
906
909 try:
907 try:
910 if node:
908 if node:
911 expander.update(node_expander)
909 expander.update(node_expander)
912 if node:
910 if node:
913 expander['r'] = (lambda:
911 expander['r'] = (lambda:
914 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
912 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
915 if total is not None:
913 if total is not None:
916 expander['N'] = lambda: '%d' % total
914 expander['N'] = lambda: '%d' % total
917 if seqno is not None:
915 if seqno is not None:
918 expander['n'] = lambda: '%d' % seqno
916 expander['n'] = lambda: '%d' % seqno
919 if total is not None and seqno is not None:
917 if total is not None and seqno is not None:
920 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
918 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
921 if pathname is not None:
919 if pathname is not None:
922 expander['s'] = lambda: os.path.basename(pathname)
920 expander['s'] = lambda: os.path.basename(pathname)
923 expander['d'] = lambda: os.path.dirname(pathname) or '.'
921 expander['d'] = lambda: os.path.dirname(pathname) or '.'
924 expander['p'] = lambda: pathname
922 expander['p'] = lambda: pathname
925
923
926 newname = []
924 newname = []
927 patlen = len(pat)
925 patlen = len(pat)
928 i = 0
926 i = 0
929 while i < patlen:
927 while i < patlen:
930 c = pat[i:i + 1]
928 c = pat[i:i + 1]
931 if c == '%':
929 if c == '%':
932 i += 1
930 i += 1
933 c = pat[i:i + 1]
931 c = pat[i:i + 1]
934 c = expander[c]()
932 c = expander[c]()
935 newname.append(c)
933 newname.append(c)
936 i += 1
934 i += 1
937 return ''.join(newname)
935 return ''.join(newname)
938 except KeyError as inst:
936 except KeyError as inst:
939 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
937 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
940 inst.args[0])
938 inst.args[0])
941
939
942 def isstdiofilename(pat):
940 def isstdiofilename(pat):
943 """True if the given pat looks like a filename denoting stdin/stdout"""
941 """True if the given pat looks like a filename denoting stdin/stdout"""
944 return not pat or pat == '-'
942 return not pat or pat == '-'
945
943
946 class _unclosablefile(object):
944 class _unclosablefile(object):
947 def __init__(self, fp):
945 def __init__(self, fp):
948 self._fp = fp
946 self._fp = fp
949
947
950 def close(self):
948 def close(self):
951 pass
949 pass
952
950
953 def __iter__(self):
951 def __iter__(self):
954 return iter(self._fp)
952 return iter(self._fp)
955
953
956 def __getattr__(self, attr):
954 def __getattr__(self, attr):
957 return getattr(self._fp, attr)
955 return getattr(self._fp, attr)
958
956
959 def __enter__(self):
957 def __enter__(self):
960 return self
958 return self
961
959
962 def __exit__(self, exc_type, exc_value, exc_tb):
960 def __exit__(self, exc_type, exc_value, exc_tb):
963 pass
961 pass
964
962
965 def makefileobj(repo, pat, node=None, desc=None, total=None,
963 def makefileobj(repo, pat, node=None, desc=None, total=None,
966 seqno=None, revwidth=None, mode='wb', modemap=None,
964 seqno=None, revwidth=None, mode='wb', modemap=None,
967 pathname=None):
965 pathname=None):
968
966
969 writable = mode not in ('r', 'rb')
967 writable = mode not in ('r', 'rb')
970
968
971 if isstdiofilename(pat):
969 if isstdiofilename(pat):
972 if writable:
970 if writable:
973 fp = repo.ui.fout
971 fp = repo.ui.fout
974 else:
972 else:
975 fp = repo.ui.fin
973 fp = repo.ui.fin
976 return _unclosablefile(fp)
974 return _unclosablefile(fp)
977 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
975 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
978 if modemap is not None:
976 if modemap is not None:
979 mode = modemap.get(fn, mode)
977 mode = modemap.get(fn, mode)
980 if mode == 'wb':
978 if mode == 'wb':
981 modemap[fn] = 'ab'
979 modemap[fn] = 'ab'
982 return open(fn, mode)
980 return open(fn, mode)
983
981
984 def openrevlog(repo, cmd, file_, opts):
982 def openrevlog(repo, cmd, file_, opts):
985 """opens the changelog, manifest, a filelog or a given revlog"""
983 """opens the changelog, manifest, a filelog or a given revlog"""
986 cl = opts['changelog']
984 cl = opts['changelog']
987 mf = opts['manifest']
985 mf = opts['manifest']
988 dir = opts['dir']
986 dir = opts['dir']
989 msg = None
987 msg = None
990 if cl and mf:
988 if cl and mf:
991 msg = _('cannot specify --changelog and --manifest at the same time')
989 msg = _('cannot specify --changelog and --manifest at the same time')
992 elif cl and dir:
990 elif cl and dir:
993 msg = _('cannot specify --changelog and --dir at the same time')
991 msg = _('cannot specify --changelog and --dir at the same time')
994 elif cl or mf or dir:
992 elif cl or mf or dir:
995 if file_:
993 if file_:
996 msg = _('cannot specify filename with --changelog or --manifest')
994 msg = _('cannot specify filename with --changelog or --manifest')
997 elif not repo:
995 elif not repo:
998 msg = _('cannot specify --changelog or --manifest or --dir '
996 msg = _('cannot specify --changelog or --manifest or --dir '
999 'without a repository')
997 'without a repository')
1000 if msg:
998 if msg:
1001 raise error.Abort(msg)
999 raise error.Abort(msg)
1002
1000
1003 r = None
1001 r = None
1004 if repo:
1002 if repo:
1005 if cl:
1003 if cl:
1006 r = repo.unfiltered().changelog
1004 r = repo.unfiltered().changelog
1007 elif dir:
1005 elif dir:
1008 if 'treemanifest' not in repo.requirements:
1006 if 'treemanifest' not in repo.requirements:
1009 raise error.Abort(_("--dir can only be used on repos with "
1007 raise error.Abort(_("--dir can only be used on repos with "
1010 "treemanifest enabled"))
1008 "treemanifest enabled"))
1011 dirlog = repo.manifestlog._revlog.dirlog(dir)
1009 dirlog = repo.manifestlog._revlog.dirlog(dir)
1012 if len(dirlog):
1010 if len(dirlog):
1013 r = dirlog
1011 r = dirlog
1014 elif mf:
1012 elif mf:
1015 r = repo.manifestlog._revlog
1013 r = repo.manifestlog._revlog
1016 elif file_:
1014 elif file_:
1017 filelog = repo.file(file_)
1015 filelog = repo.file(file_)
1018 if len(filelog):
1016 if len(filelog):
1019 r = filelog
1017 r = filelog
1020 if not r:
1018 if not r:
1021 if not file_:
1019 if not file_:
1022 raise error.CommandError(cmd, _('invalid arguments'))
1020 raise error.CommandError(cmd, _('invalid arguments'))
1023 if not os.path.isfile(file_):
1021 if not os.path.isfile(file_):
1024 raise error.Abort(_("revlog '%s' not found") % file_)
1022 raise error.Abort(_("revlog '%s' not found") % file_)
1025 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1023 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1026 file_[:-2] + ".i")
1024 file_[:-2] + ".i")
1027 return r
1025 return r
1028
1026
1029 def copy(ui, repo, pats, opts, rename=False):
1027 def copy(ui, repo, pats, opts, rename=False):
1030 # called with the repo lock held
1028 # called with the repo lock held
1031 #
1029 #
1032 # hgsep => pathname that uses "/" to separate directories
1030 # hgsep => pathname that uses "/" to separate directories
1033 # ossep => pathname that uses os.sep to separate directories
1031 # ossep => pathname that uses os.sep to separate directories
1034 cwd = repo.getcwd()
1032 cwd = repo.getcwd()
1035 targets = {}
1033 targets = {}
1036 after = opts.get("after")
1034 after = opts.get("after")
1037 dryrun = opts.get("dry_run")
1035 dryrun = opts.get("dry_run")
1038 wctx = repo[None]
1036 wctx = repo[None]
1039
1037
1040 def walkpat(pat):
1038 def walkpat(pat):
1041 srcs = []
1039 srcs = []
1042 if after:
1040 if after:
1043 badstates = '?'
1041 badstates = '?'
1044 else:
1042 else:
1045 badstates = '?r'
1043 badstates = '?r'
1046 m = scmutil.match(wctx, [pat], opts, globbed=True)
1044 m = scmutil.match(wctx, [pat], opts, globbed=True)
1047 for abs in wctx.walk(m):
1045 for abs in wctx.walk(m):
1048 state = repo.dirstate[abs]
1046 state = repo.dirstate[abs]
1049 rel = m.rel(abs)
1047 rel = m.rel(abs)
1050 exact = m.exact(abs)
1048 exact = m.exact(abs)
1051 if state in badstates:
1049 if state in badstates:
1052 if exact and state == '?':
1050 if exact and state == '?':
1053 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1051 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1054 if exact and state == 'r':
1052 if exact and state == 'r':
1055 ui.warn(_('%s: not copying - file has been marked for'
1053 ui.warn(_('%s: not copying - file has been marked for'
1056 ' remove\n') % rel)
1054 ' remove\n') % rel)
1057 continue
1055 continue
1058 # abs: hgsep
1056 # abs: hgsep
1059 # rel: ossep
1057 # rel: ossep
1060 srcs.append((abs, rel, exact))
1058 srcs.append((abs, rel, exact))
1061 return srcs
1059 return srcs
1062
1060
1063 # abssrc: hgsep
1061 # abssrc: hgsep
1064 # relsrc: ossep
1062 # relsrc: ossep
1065 # otarget: ossep
1063 # otarget: ossep
1066 def copyfile(abssrc, relsrc, otarget, exact):
1064 def copyfile(abssrc, relsrc, otarget, exact):
1067 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1065 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1068 if '/' in abstarget:
1066 if '/' in abstarget:
1069 # We cannot normalize abstarget itself, this would prevent
1067 # We cannot normalize abstarget itself, this would prevent
1070 # case only renames, like a => A.
1068 # case only renames, like a => A.
1071 abspath, absname = abstarget.rsplit('/', 1)
1069 abspath, absname = abstarget.rsplit('/', 1)
1072 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1070 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1073 reltarget = repo.pathto(abstarget, cwd)
1071 reltarget = repo.pathto(abstarget, cwd)
1074 target = repo.wjoin(abstarget)
1072 target = repo.wjoin(abstarget)
1075 src = repo.wjoin(abssrc)
1073 src = repo.wjoin(abssrc)
1076 state = repo.dirstate[abstarget]
1074 state = repo.dirstate[abstarget]
1077
1075
1078 scmutil.checkportable(ui, abstarget)
1076 scmutil.checkportable(ui, abstarget)
1079
1077
1080 # check for collisions
1078 # check for collisions
1081 prevsrc = targets.get(abstarget)
1079 prevsrc = targets.get(abstarget)
1082 if prevsrc is not None:
1080 if prevsrc is not None:
1083 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1081 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1084 (reltarget, repo.pathto(abssrc, cwd),
1082 (reltarget, repo.pathto(abssrc, cwd),
1085 repo.pathto(prevsrc, cwd)))
1083 repo.pathto(prevsrc, cwd)))
1086 return
1084 return
1087
1085
1088 # check for overwrites
1086 # check for overwrites
1089 exists = os.path.lexists(target)
1087 exists = os.path.lexists(target)
1090 samefile = False
1088 samefile = False
1091 if exists and abssrc != abstarget:
1089 if exists and abssrc != abstarget:
1092 if (repo.dirstate.normalize(abssrc) ==
1090 if (repo.dirstate.normalize(abssrc) ==
1093 repo.dirstate.normalize(abstarget)):
1091 repo.dirstate.normalize(abstarget)):
1094 if not rename:
1092 if not rename:
1095 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1093 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1096 return
1094 return
1097 exists = False
1095 exists = False
1098 samefile = True
1096 samefile = True
1099
1097
1100 if not after and exists or after and state in 'mn':
1098 if not after and exists or after and state in 'mn':
1101 if not opts['force']:
1099 if not opts['force']:
1102 if state in 'mn':
1100 if state in 'mn':
1103 msg = _('%s: not overwriting - file already committed\n')
1101 msg = _('%s: not overwriting - file already committed\n')
1104 if after:
1102 if after:
1105 flags = '--after --force'
1103 flags = '--after --force'
1106 else:
1104 else:
1107 flags = '--force'
1105 flags = '--force'
1108 if rename:
1106 if rename:
1109 hint = _('(hg rename %s to replace the file by '
1107 hint = _('(hg rename %s to replace the file by '
1110 'recording a rename)\n') % flags
1108 'recording a rename)\n') % flags
1111 else:
1109 else:
1112 hint = _('(hg copy %s to replace the file by '
1110 hint = _('(hg copy %s to replace the file by '
1113 'recording a copy)\n') % flags
1111 'recording a copy)\n') % flags
1114 else:
1112 else:
1115 msg = _('%s: not overwriting - file exists\n')
1113 msg = _('%s: not overwriting - file exists\n')
1116 if rename:
1114 if rename:
1117 hint = _('(hg rename --after to record the rename)\n')
1115 hint = _('(hg rename --after to record the rename)\n')
1118 else:
1116 else:
1119 hint = _('(hg copy --after to record the copy)\n')
1117 hint = _('(hg copy --after to record the copy)\n')
1120 ui.warn(msg % reltarget)
1118 ui.warn(msg % reltarget)
1121 ui.warn(hint)
1119 ui.warn(hint)
1122 return
1120 return
1123
1121
1124 if after:
1122 if after:
1125 if not exists:
1123 if not exists:
1126 if rename:
1124 if rename:
1127 ui.warn(_('%s: not recording move - %s does not exist\n') %
1125 ui.warn(_('%s: not recording move - %s does not exist\n') %
1128 (relsrc, reltarget))
1126 (relsrc, reltarget))
1129 else:
1127 else:
1130 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1128 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1131 (relsrc, reltarget))
1129 (relsrc, reltarget))
1132 return
1130 return
1133 elif not dryrun:
1131 elif not dryrun:
1134 try:
1132 try:
1135 if exists:
1133 if exists:
1136 os.unlink(target)
1134 os.unlink(target)
1137 targetdir = os.path.dirname(target) or '.'
1135 targetdir = os.path.dirname(target) or '.'
1138 if not os.path.isdir(targetdir):
1136 if not os.path.isdir(targetdir):
1139 os.makedirs(targetdir)
1137 os.makedirs(targetdir)
1140 if samefile:
1138 if samefile:
1141 tmp = target + "~hgrename"
1139 tmp = target + "~hgrename"
1142 os.rename(src, tmp)
1140 os.rename(src, tmp)
1143 os.rename(tmp, target)
1141 os.rename(tmp, target)
1144 else:
1142 else:
1145 util.copyfile(src, target)
1143 util.copyfile(src, target)
1146 srcexists = True
1144 srcexists = True
1147 except IOError as inst:
1145 except IOError as inst:
1148 if inst.errno == errno.ENOENT:
1146 if inst.errno == errno.ENOENT:
1149 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1147 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1150 srcexists = False
1148 srcexists = False
1151 else:
1149 else:
1152 ui.warn(_('%s: cannot copy - %s\n') %
1150 ui.warn(_('%s: cannot copy - %s\n') %
1153 (relsrc, encoding.strtolocal(inst.strerror)))
1151 (relsrc, encoding.strtolocal(inst.strerror)))
1154 return True # report a failure
1152 return True # report a failure
1155
1153
1156 if ui.verbose or not exact:
1154 if ui.verbose or not exact:
1157 if rename:
1155 if rename:
1158 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1156 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1159 else:
1157 else:
1160 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1158 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1161
1159
1162 targets[abstarget] = abssrc
1160 targets[abstarget] = abssrc
1163
1161
1164 # fix up dirstate
1162 # fix up dirstate
1165 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1163 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1166 dryrun=dryrun, cwd=cwd)
1164 dryrun=dryrun, cwd=cwd)
1167 if rename and not dryrun:
1165 if rename and not dryrun:
1168 if not after and srcexists and not samefile:
1166 if not after and srcexists and not samefile:
1169 repo.wvfs.unlinkpath(abssrc)
1167 repo.wvfs.unlinkpath(abssrc)
1170 wctx.forget([abssrc])
1168 wctx.forget([abssrc])
1171
1169
1172 # pat: ossep
1170 # pat: ossep
1173 # dest ossep
1171 # dest ossep
1174 # srcs: list of (hgsep, hgsep, ossep, bool)
1172 # srcs: list of (hgsep, hgsep, ossep, bool)
1175 # return: function that takes hgsep and returns ossep
1173 # return: function that takes hgsep and returns ossep
1176 def targetpathfn(pat, dest, srcs):
1174 def targetpathfn(pat, dest, srcs):
1177 if os.path.isdir(pat):
1175 if os.path.isdir(pat):
1178 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1176 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1179 abspfx = util.localpath(abspfx)
1177 abspfx = util.localpath(abspfx)
1180 if destdirexists:
1178 if destdirexists:
1181 striplen = len(os.path.split(abspfx)[0])
1179 striplen = len(os.path.split(abspfx)[0])
1182 else:
1180 else:
1183 striplen = len(abspfx)
1181 striplen = len(abspfx)
1184 if striplen:
1182 if striplen:
1185 striplen += len(pycompat.ossep)
1183 striplen += len(pycompat.ossep)
1186 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1184 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1187 elif destdirexists:
1185 elif destdirexists:
1188 res = lambda p: os.path.join(dest,
1186 res = lambda p: os.path.join(dest,
1189 os.path.basename(util.localpath(p)))
1187 os.path.basename(util.localpath(p)))
1190 else:
1188 else:
1191 res = lambda p: dest
1189 res = lambda p: dest
1192 return res
1190 return res
1193
1191
1194 # pat: ossep
1192 # pat: ossep
1195 # dest ossep
1193 # dest ossep
1196 # srcs: list of (hgsep, hgsep, ossep, bool)
1194 # srcs: list of (hgsep, hgsep, ossep, bool)
1197 # return: function that takes hgsep and returns ossep
1195 # return: function that takes hgsep and returns ossep
1198 def targetpathafterfn(pat, dest, srcs):
1196 def targetpathafterfn(pat, dest, srcs):
1199 if matchmod.patkind(pat):
1197 if matchmod.patkind(pat):
1200 # a mercurial pattern
1198 # a mercurial pattern
1201 res = lambda p: os.path.join(dest,
1199 res = lambda p: os.path.join(dest,
1202 os.path.basename(util.localpath(p)))
1200 os.path.basename(util.localpath(p)))
1203 else:
1201 else:
1204 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1202 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1205 if len(abspfx) < len(srcs[0][0]):
1203 if len(abspfx) < len(srcs[0][0]):
1206 # A directory. Either the target path contains the last
1204 # A directory. Either the target path contains the last
1207 # component of the source path or it does not.
1205 # component of the source path or it does not.
1208 def evalpath(striplen):
1206 def evalpath(striplen):
1209 score = 0
1207 score = 0
1210 for s in srcs:
1208 for s in srcs:
1211 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1209 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1212 if os.path.lexists(t):
1210 if os.path.lexists(t):
1213 score += 1
1211 score += 1
1214 return score
1212 return score
1215
1213
1216 abspfx = util.localpath(abspfx)
1214 abspfx = util.localpath(abspfx)
1217 striplen = len(abspfx)
1215 striplen = len(abspfx)
1218 if striplen:
1216 if striplen:
1219 striplen += len(pycompat.ossep)
1217 striplen += len(pycompat.ossep)
1220 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1218 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1221 score = evalpath(striplen)
1219 score = evalpath(striplen)
1222 striplen1 = len(os.path.split(abspfx)[0])
1220 striplen1 = len(os.path.split(abspfx)[0])
1223 if striplen1:
1221 if striplen1:
1224 striplen1 += len(pycompat.ossep)
1222 striplen1 += len(pycompat.ossep)
1225 if evalpath(striplen1) > score:
1223 if evalpath(striplen1) > score:
1226 striplen = striplen1
1224 striplen = striplen1
1227 res = lambda p: os.path.join(dest,
1225 res = lambda p: os.path.join(dest,
1228 util.localpath(p)[striplen:])
1226 util.localpath(p)[striplen:])
1229 else:
1227 else:
1230 # a file
1228 # a file
1231 if destdirexists:
1229 if destdirexists:
1232 res = lambda p: os.path.join(dest,
1230 res = lambda p: os.path.join(dest,
1233 os.path.basename(util.localpath(p)))
1231 os.path.basename(util.localpath(p)))
1234 else:
1232 else:
1235 res = lambda p: dest
1233 res = lambda p: dest
1236 return res
1234 return res
1237
1235
1238 pats = scmutil.expandpats(pats)
1236 pats = scmutil.expandpats(pats)
1239 if not pats:
1237 if not pats:
1240 raise error.Abort(_('no source or destination specified'))
1238 raise error.Abort(_('no source or destination specified'))
1241 if len(pats) == 1:
1239 if len(pats) == 1:
1242 raise error.Abort(_('no destination specified'))
1240 raise error.Abort(_('no destination specified'))
1243 dest = pats.pop()
1241 dest = pats.pop()
1244 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1242 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1245 if not destdirexists:
1243 if not destdirexists:
1246 if len(pats) > 1 or matchmod.patkind(pats[0]):
1244 if len(pats) > 1 or matchmod.patkind(pats[0]):
1247 raise error.Abort(_('with multiple sources, destination must be an '
1245 raise error.Abort(_('with multiple sources, destination must be an '
1248 'existing directory'))
1246 'existing directory'))
1249 if util.endswithsep(dest):
1247 if util.endswithsep(dest):
1250 raise error.Abort(_('destination %s is not a directory') % dest)
1248 raise error.Abort(_('destination %s is not a directory') % dest)
1251
1249
1252 tfn = targetpathfn
1250 tfn = targetpathfn
1253 if after:
1251 if after:
1254 tfn = targetpathafterfn
1252 tfn = targetpathafterfn
1255 copylist = []
1253 copylist = []
1256 for pat in pats:
1254 for pat in pats:
1257 srcs = walkpat(pat)
1255 srcs = walkpat(pat)
1258 if not srcs:
1256 if not srcs:
1259 continue
1257 continue
1260 copylist.append((tfn(pat, dest, srcs), srcs))
1258 copylist.append((tfn(pat, dest, srcs), srcs))
1261 if not copylist:
1259 if not copylist:
1262 raise error.Abort(_('no files to copy'))
1260 raise error.Abort(_('no files to copy'))
1263
1261
1264 errors = 0
1262 errors = 0
1265 for targetpath, srcs in copylist:
1263 for targetpath, srcs in copylist:
1266 for abssrc, relsrc, exact in srcs:
1264 for abssrc, relsrc, exact in srcs:
1267 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1265 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1268 errors += 1
1266 errors += 1
1269
1267
1270 if errors:
1268 if errors:
1271 ui.warn(_('(consider using --after)\n'))
1269 ui.warn(_('(consider using --after)\n'))
1272
1270
1273 return errors != 0
1271 return errors != 0
1274
1272
1275 ## facility to let extension process additional data into an import patch
1273 ## facility to let extension process additional data into an import patch
1276 # list of identifier to be executed in order
1274 # list of identifier to be executed in order
1277 extrapreimport = [] # run before commit
1275 extrapreimport = [] # run before commit
1278 extrapostimport = [] # run after commit
1276 extrapostimport = [] # run after commit
1279 # mapping from identifier to actual import function
1277 # mapping from identifier to actual import function
1280 #
1278 #
1281 # 'preimport' are run before the commit is made and are provided the following
1279 # 'preimport' are run before the commit is made and are provided the following
1282 # arguments:
1280 # arguments:
1283 # - repo: the localrepository instance,
1281 # - repo: the localrepository instance,
1284 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1282 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1285 # - extra: the future extra dictionary of the changeset, please mutate it,
1283 # - extra: the future extra dictionary of the changeset, please mutate it,
1286 # - opts: the import options.
1284 # - opts: the import options.
1287 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1285 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1288 # mutation of in memory commit and more. Feel free to rework the code to get
1286 # mutation of in memory commit and more. Feel free to rework the code to get
1289 # there.
1287 # there.
1290 extrapreimportmap = {}
1288 extrapreimportmap = {}
1291 # 'postimport' are run after the commit is made and are provided the following
1289 # 'postimport' are run after the commit is made and are provided the following
1292 # argument:
1290 # argument:
1293 # - ctx: the changectx created by import.
1291 # - ctx: the changectx created by import.
1294 extrapostimportmap = {}
1292 extrapostimportmap = {}
1295
1293
1296 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1294 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1297 """Utility function used by commands.import to import a single patch
1295 """Utility function used by commands.import to import a single patch
1298
1296
1299 This function is explicitly defined here to help the evolve extension to
1297 This function is explicitly defined here to help the evolve extension to
1300 wrap this part of the import logic.
1298 wrap this part of the import logic.
1301
1299
1302 The API is currently a bit ugly because it a simple code translation from
1300 The API is currently a bit ugly because it a simple code translation from
1303 the import command. Feel free to make it better.
1301 the import command. Feel free to make it better.
1304
1302
1305 :hunk: a patch (as a binary string)
1303 :hunk: a patch (as a binary string)
1306 :parents: nodes that will be parent of the created commit
1304 :parents: nodes that will be parent of the created commit
1307 :opts: the full dict of option passed to the import command
1305 :opts: the full dict of option passed to the import command
1308 :msgs: list to save commit message to.
1306 :msgs: list to save commit message to.
1309 (used in case we need to save it when failing)
1307 (used in case we need to save it when failing)
1310 :updatefunc: a function that update a repo to a given node
1308 :updatefunc: a function that update a repo to a given node
1311 updatefunc(<repo>, <node>)
1309 updatefunc(<repo>, <node>)
1312 """
1310 """
1313 # avoid cycle context -> subrepo -> cmdutil
1311 # avoid cycle context -> subrepo -> cmdutil
1314 from . import context
1312 from . import context
1315 extractdata = patch.extract(ui, hunk)
1313 extractdata = patch.extract(ui, hunk)
1316 tmpname = extractdata.get('filename')
1314 tmpname = extractdata.get('filename')
1317 message = extractdata.get('message')
1315 message = extractdata.get('message')
1318 user = opts.get('user') or extractdata.get('user')
1316 user = opts.get('user') or extractdata.get('user')
1319 date = opts.get('date') or extractdata.get('date')
1317 date = opts.get('date') or extractdata.get('date')
1320 branch = extractdata.get('branch')
1318 branch = extractdata.get('branch')
1321 nodeid = extractdata.get('nodeid')
1319 nodeid = extractdata.get('nodeid')
1322 p1 = extractdata.get('p1')
1320 p1 = extractdata.get('p1')
1323 p2 = extractdata.get('p2')
1321 p2 = extractdata.get('p2')
1324
1322
1325 nocommit = opts.get('no_commit')
1323 nocommit = opts.get('no_commit')
1326 importbranch = opts.get('import_branch')
1324 importbranch = opts.get('import_branch')
1327 update = not opts.get('bypass')
1325 update = not opts.get('bypass')
1328 strip = opts["strip"]
1326 strip = opts["strip"]
1329 prefix = opts["prefix"]
1327 prefix = opts["prefix"]
1330 sim = float(opts.get('similarity') or 0)
1328 sim = float(opts.get('similarity') or 0)
1331 if not tmpname:
1329 if not tmpname:
1332 return (None, None, False)
1330 return (None, None, False)
1333
1331
1334 rejects = False
1332 rejects = False
1335
1333
1336 try:
1334 try:
1337 cmdline_message = logmessage(ui, opts)
1335 cmdline_message = logmessage(ui, opts)
1338 if cmdline_message:
1336 if cmdline_message:
1339 # pickup the cmdline msg
1337 # pickup the cmdline msg
1340 message = cmdline_message
1338 message = cmdline_message
1341 elif message:
1339 elif message:
1342 # pickup the patch msg
1340 # pickup the patch msg
1343 message = message.strip()
1341 message = message.strip()
1344 else:
1342 else:
1345 # launch the editor
1343 # launch the editor
1346 message = None
1344 message = None
1347 ui.debug('message:\n%s\n' % message)
1345 ui.debug('message:\n%s\n' % message)
1348
1346
1349 if len(parents) == 1:
1347 if len(parents) == 1:
1350 parents.append(repo[nullid])
1348 parents.append(repo[nullid])
1351 if opts.get('exact'):
1349 if opts.get('exact'):
1352 if not nodeid or not p1:
1350 if not nodeid or not p1:
1353 raise error.Abort(_('not a Mercurial patch'))
1351 raise error.Abort(_('not a Mercurial patch'))
1354 p1 = repo[p1]
1352 p1 = repo[p1]
1355 p2 = repo[p2 or nullid]
1353 p2 = repo[p2 or nullid]
1356 elif p2:
1354 elif p2:
1357 try:
1355 try:
1358 p1 = repo[p1]
1356 p1 = repo[p1]
1359 p2 = repo[p2]
1357 p2 = repo[p2]
1360 # Without any options, consider p2 only if the
1358 # Without any options, consider p2 only if the
1361 # patch is being applied on top of the recorded
1359 # patch is being applied on top of the recorded
1362 # first parent.
1360 # first parent.
1363 if p1 != parents[0]:
1361 if p1 != parents[0]:
1364 p1 = parents[0]
1362 p1 = parents[0]
1365 p2 = repo[nullid]
1363 p2 = repo[nullid]
1366 except error.RepoError:
1364 except error.RepoError:
1367 p1, p2 = parents
1365 p1, p2 = parents
1368 if p2.node() == nullid:
1366 if p2.node() == nullid:
1369 ui.warn(_("warning: import the patch as a normal revision\n"
1367 ui.warn(_("warning: import the patch as a normal revision\n"
1370 "(use --exact to import the patch as a merge)\n"))
1368 "(use --exact to import the patch as a merge)\n"))
1371 else:
1369 else:
1372 p1, p2 = parents
1370 p1, p2 = parents
1373
1371
1374 n = None
1372 n = None
1375 if update:
1373 if update:
1376 if p1 != parents[0]:
1374 if p1 != parents[0]:
1377 updatefunc(repo, p1.node())
1375 updatefunc(repo, p1.node())
1378 if p2 != parents[1]:
1376 if p2 != parents[1]:
1379 repo.setparents(p1.node(), p2.node())
1377 repo.setparents(p1.node(), p2.node())
1380
1378
1381 if opts.get('exact') or importbranch:
1379 if opts.get('exact') or importbranch:
1382 repo.dirstate.setbranch(branch or 'default')
1380 repo.dirstate.setbranch(branch or 'default')
1383
1381
1384 partial = opts.get('partial', False)
1382 partial = opts.get('partial', False)
1385 files = set()
1383 files = set()
1386 try:
1384 try:
1387 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1385 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1388 files=files, eolmode=None, similarity=sim / 100.0)
1386 files=files, eolmode=None, similarity=sim / 100.0)
1389 except error.PatchError as e:
1387 except error.PatchError as e:
1390 if not partial:
1388 if not partial:
1391 raise error.Abort(str(e))
1389 raise error.Abort(str(e))
1392 if partial:
1390 if partial:
1393 rejects = True
1391 rejects = True
1394
1392
1395 files = list(files)
1393 files = list(files)
1396 if nocommit:
1394 if nocommit:
1397 if message:
1395 if message:
1398 msgs.append(message)
1396 msgs.append(message)
1399 else:
1397 else:
1400 if opts.get('exact') or p2:
1398 if opts.get('exact') or p2:
1401 # If you got here, you either use --force and know what
1399 # If you got here, you either use --force and know what
1402 # you are doing or used --exact or a merge patch while
1400 # you are doing or used --exact or a merge patch while
1403 # being updated to its first parent.
1401 # being updated to its first parent.
1404 m = None
1402 m = None
1405 else:
1403 else:
1406 m = scmutil.matchfiles(repo, files or [])
1404 m = scmutil.matchfiles(repo, files or [])
1407 editform = mergeeditform(repo[None], 'import.normal')
1405 editform = mergeeditform(repo[None], 'import.normal')
1408 if opts.get('exact'):
1406 if opts.get('exact'):
1409 editor = None
1407 editor = None
1410 else:
1408 else:
1411 editor = getcommiteditor(editform=editform,
1409 editor = getcommiteditor(editform=editform,
1412 **pycompat.strkwargs(opts))
1410 **pycompat.strkwargs(opts))
1413 extra = {}
1411 extra = {}
1414 for idfunc in extrapreimport:
1412 for idfunc in extrapreimport:
1415 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1413 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1416 overrides = {}
1414 overrides = {}
1417 if partial:
1415 if partial:
1418 overrides[('ui', 'allowemptycommit')] = True
1416 overrides[('ui', 'allowemptycommit')] = True
1419 with repo.ui.configoverride(overrides, 'import'):
1417 with repo.ui.configoverride(overrides, 'import'):
1420 n = repo.commit(message, user,
1418 n = repo.commit(message, user,
1421 date, match=m,
1419 date, match=m,
1422 editor=editor, extra=extra)
1420 editor=editor, extra=extra)
1423 for idfunc in extrapostimport:
1421 for idfunc in extrapostimport:
1424 extrapostimportmap[idfunc](repo[n])
1422 extrapostimportmap[idfunc](repo[n])
1425 else:
1423 else:
1426 if opts.get('exact') or importbranch:
1424 if opts.get('exact') or importbranch:
1427 branch = branch or 'default'
1425 branch = branch or 'default'
1428 else:
1426 else:
1429 branch = p1.branch()
1427 branch = p1.branch()
1430 store = patch.filestore()
1428 store = patch.filestore()
1431 try:
1429 try:
1432 files = set()
1430 files = set()
1433 try:
1431 try:
1434 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1432 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1435 files, eolmode=None)
1433 files, eolmode=None)
1436 except error.PatchError as e:
1434 except error.PatchError as e:
1437 raise error.Abort(str(e))
1435 raise error.Abort(str(e))
1438 if opts.get('exact'):
1436 if opts.get('exact'):
1439 editor = None
1437 editor = None
1440 else:
1438 else:
1441 editor = getcommiteditor(editform='import.bypass')
1439 editor = getcommiteditor(editform='import.bypass')
1442 memctx = context.memctx(repo, (p1.node(), p2.node()),
1440 memctx = context.memctx(repo, (p1.node(), p2.node()),
1443 message,
1441 message,
1444 files=files,
1442 files=files,
1445 filectxfn=store,
1443 filectxfn=store,
1446 user=user,
1444 user=user,
1447 date=date,
1445 date=date,
1448 branch=branch,
1446 branch=branch,
1449 editor=editor)
1447 editor=editor)
1450 n = memctx.commit()
1448 n = memctx.commit()
1451 finally:
1449 finally:
1452 store.close()
1450 store.close()
1453 if opts.get('exact') and nocommit:
1451 if opts.get('exact') and nocommit:
1454 # --exact with --no-commit is still useful in that it does merge
1452 # --exact with --no-commit is still useful in that it does merge
1455 # and branch bits
1453 # and branch bits
1456 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1454 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1457 elif opts.get('exact') and hex(n) != nodeid:
1455 elif opts.get('exact') and hex(n) != nodeid:
1458 raise error.Abort(_('patch is damaged or loses information'))
1456 raise error.Abort(_('patch is damaged or loses information'))
1459 msg = _('applied to working directory')
1457 msg = _('applied to working directory')
1460 if n:
1458 if n:
1461 # i18n: refers to a short changeset id
1459 # i18n: refers to a short changeset id
1462 msg = _('created %s') % short(n)
1460 msg = _('created %s') % short(n)
1463 return (msg, n, rejects)
1461 return (msg, n, rejects)
1464 finally:
1462 finally:
1465 os.unlink(tmpname)
1463 os.unlink(tmpname)
1466
1464
1467 # facility to let extensions include additional data in an exported patch
1465 # facility to let extensions include additional data in an exported patch
1468 # list of identifiers to be executed in order
1466 # list of identifiers to be executed in order
1469 extraexport = []
1467 extraexport = []
1470 # mapping from identifier to actual export function
1468 # mapping from identifier to actual export function
1471 # function as to return a string to be added to the header or None
1469 # function as to return a string to be added to the header or None
1472 # it is given two arguments (sequencenumber, changectx)
1470 # it is given two arguments (sequencenumber, changectx)
1473 extraexportmap = {}
1471 extraexportmap = {}
1474
1472
1475 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1473 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1476 node = scmutil.binnode(ctx)
1474 node = scmutil.binnode(ctx)
1477 parents = [p.node() for p in ctx.parents() if p]
1475 parents = [p.node() for p in ctx.parents() if p]
1478 branch = ctx.branch()
1476 branch = ctx.branch()
1479 if switch_parent:
1477 if switch_parent:
1480 parents.reverse()
1478 parents.reverse()
1481
1479
1482 if parents:
1480 if parents:
1483 prev = parents[0]
1481 prev = parents[0]
1484 else:
1482 else:
1485 prev = nullid
1483 prev = nullid
1486
1484
1487 write("# HG changeset patch\n")
1485 write("# HG changeset patch\n")
1488 write("# User %s\n" % ctx.user())
1486 write("# User %s\n" % ctx.user())
1489 write("# Date %d %d\n" % ctx.date())
1487 write("# Date %d %d\n" % ctx.date())
1490 write("# %s\n" % util.datestr(ctx.date()))
1488 write("# %s\n" % util.datestr(ctx.date()))
1491 if branch and branch != 'default':
1489 if branch and branch != 'default':
1492 write("# Branch %s\n" % branch)
1490 write("# Branch %s\n" % branch)
1493 write("# Node ID %s\n" % hex(node))
1491 write("# Node ID %s\n" % hex(node))
1494 write("# Parent %s\n" % hex(prev))
1492 write("# Parent %s\n" % hex(prev))
1495 if len(parents) > 1:
1493 if len(parents) > 1:
1496 write("# Parent %s\n" % hex(parents[1]))
1494 write("# Parent %s\n" % hex(parents[1]))
1497
1495
1498 for headerid in extraexport:
1496 for headerid in extraexport:
1499 header = extraexportmap[headerid](seqno, ctx)
1497 header = extraexportmap[headerid](seqno, ctx)
1500 if header is not None:
1498 if header is not None:
1501 write('# %s\n' % header)
1499 write('# %s\n' % header)
1502 write(ctx.description().rstrip())
1500 write(ctx.description().rstrip())
1503 write("\n\n")
1501 write("\n\n")
1504
1502
1505 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1503 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1506 write(chunk, label=label)
1504 write(chunk, label=label)
1507
1505
1508 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1506 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1509 opts=None, match=None):
1507 opts=None, match=None):
1510 '''export changesets as hg patches
1508 '''export changesets as hg patches
1511
1509
1512 Args:
1510 Args:
1513 repo: The repository from which we're exporting revisions.
1511 repo: The repository from which we're exporting revisions.
1514 revs: A list of revisions to export as revision numbers.
1512 revs: A list of revisions to export as revision numbers.
1515 fntemplate: An optional string to use for generating patch file names.
1513 fntemplate: An optional string to use for generating patch file names.
1516 fp: An optional file-like object to which patches should be written.
1514 fp: An optional file-like object to which patches should be written.
1517 switch_parent: If True, show diffs against second parent when not nullid.
1515 switch_parent: If True, show diffs against second parent when not nullid.
1518 Default is false, which always shows diff against p1.
1516 Default is false, which always shows diff against p1.
1519 opts: diff options to use for generating the patch.
1517 opts: diff options to use for generating the patch.
1520 match: If specified, only export changes to files matching this matcher.
1518 match: If specified, only export changes to files matching this matcher.
1521
1519
1522 Returns:
1520 Returns:
1523 Nothing.
1521 Nothing.
1524
1522
1525 Side Effect:
1523 Side Effect:
1526 "HG Changeset Patch" data is emitted to one of the following
1524 "HG Changeset Patch" data is emitted to one of the following
1527 destinations:
1525 destinations:
1528 fp is specified: All revs are written to the specified
1526 fp is specified: All revs are written to the specified
1529 file-like object.
1527 file-like object.
1530 fntemplate specified: Each rev is written to a unique file named using
1528 fntemplate specified: Each rev is written to a unique file named using
1531 the given template.
1529 the given template.
1532 Neither fp nor template specified: All revs written to repo.ui.write()
1530 Neither fp nor template specified: All revs written to repo.ui.write()
1533 '''
1531 '''
1534
1532
1535 total = len(revs)
1533 total = len(revs)
1536 revwidth = max(len(str(rev)) for rev in revs)
1534 revwidth = max(len(str(rev)) for rev in revs)
1537 filemode = {}
1535 filemode = {}
1538
1536
1539 write = None
1537 write = None
1540 dest = '<unnamed>'
1538 dest = '<unnamed>'
1541 if fp:
1539 if fp:
1542 dest = getattr(fp, 'name', dest)
1540 dest = getattr(fp, 'name', dest)
1543 def write(s, **kw):
1541 def write(s, **kw):
1544 fp.write(s)
1542 fp.write(s)
1545 elif not fntemplate:
1543 elif not fntemplate:
1546 write = repo.ui.write
1544 write = repo.ui.write
1547
1545
1548 for seqno, rev in enumerate(revs, 1):
1546 for seqno, rev in enumerate(revs, 1):
1549 ctx = repo[rev]
1547 ctx = repo[rev]
1550 fo = None
1548 fo = None
1551 if not fp and fntemplate:
1549 if not fp and fntemplate:
1552 desc_lines = ctx.description().rstrip().split('\n')
1550 desc_lines = ctx.description().rstrip().split('\n')
1553 desc = desc_lines[0] #Commit always has a first line.
1551 desc = desc_lines[0] #Commit always has a first line.
1554 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1552 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1555 total=total, seqno=seqno, revwidth=revwidth,
1553 total=total, seqno=seqno, revwidth=revwidth,
1556 mode='wb', modemap=filemode)
1554 mode='wb', modemap=filemode)
1557 dest = fo.name
1555 dest = fo.name
1558 def write(s, **kw):
1556 def write(s, **kw):
1559 fo.write(s)
1557 fo.write(s)
1560 if not dest.startswith('<'):
1558 if not dest.startswith('<'):
1561 repo.ui.note("%s\n" % dest)
1559 repo.ui.note("%s\n" % dest)
1562 _exportsingle(
1560 _exportsingle(
1563 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1561 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1564 if fo is not None:
1562 if fo is not None:
1565 fo.close()
1563 fo.close()
1566
1564
1567 def showmarker(fm, marker, index=None):
1565 def showmarker(fm, marker, index=None):
1568 """utility function to display obsolescence marker in a readable way
1566 """utility function to display obsolescence marker in a readable way
1569
1567
1570 To be used by debug function."""
1568 To be used by debug function."""
1571 if index is not None:
1569 if index is not None:
1572 fm.write('index', '%i ', index)
1570 fm.write('index', '%i ', index)
1573 fm.write('prednode', '%s ', hex(marker.prednode()))
1571 fm.write('prednode', '%s ', hex(marker.prednode()))
1574 succs = marker.succnodes()
1572 succs = marker.succnodes()
1575 fm.condwrite(succs, 'succnodes', '%s ',
1573 fm.condwrite(succs, 'succnodes', '%s ',
1576 fm.formatlist(map(hex, succs), name='node'))
1574 fm.formatlist(map(hex, succs), name='node'))
1577 fm.write('flag', '%X ', marker.flags())
1575 fm.write('flag', '%X ', marker.flags())
1578 parents = marker.parentnodes()
1576 parents = marker.parentnodes()
1579 if parents is not None:
1577 if parents is not None:
1580 fm.write('parentnodes', '{%s} ',
1578 fm.write('parentnodes', '{%s} ',
1581 fm.formatlist(map(hex, parents), name='node', sep=', '))
1579 fm.formatlist(map(hex, parents), name='node', sep=', '))
1582 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1580 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1583 meta = marker.metadata().copy()
1581 meta = marker.metadata().copy()
1584 meta.pop('date', None)
1582 meta.pop('date', None)
1585 smeta = util.rapply(pycompat.maybebytestr, meta)
1583 smeta = util.rapply(pycompat.maybebytestr, meta)
1586 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1584 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1587 fm.plain('\n')
1585 fm.plain('\n')
1588
1586
1589 def finddate(ui, repo, date):
1587 def finddate(ui, repo, date):
1590 """Find the tipmost changeset that matches the given date spec"""
1588 """Find the tipmost changeset that matches the given date spec"""
1591
1589
1592 df = util.matchdate(date)
1590 df = util.matchdate(date)
1593 m = scmutil.matchall(repo)
1591 m = scmutil.matchall(repo)
1594 results = {}
1592 results = {}
1595
1593
1596 def prep(ctx, fns):
1594 def prep(ctx, fns):
1597 d = ctx.date()
1595 d = ctx.date()
1598 if df(d[0]):
1596 if df(d[0]):
1599 results[ctx.rev()] = d
1597 results[ctx.rev()] = d
1600
1598
1601 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1599 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1602 rev = ctx.rev()
1600 rev = ctx.rev()
1603 if rev in results:
1601 if rev in results:
1604 ui.status(_("found revision %s from %s\n") %
1602 ui.status(_("found revision %s from %s\n") %
1605 (rev, util.datestr(results[rev])))
1603 (rev, util.datestr(results[rev])))
1606 return '%d' % rev
1604 return '%d' % rev
1607
1605
1608 raise error.Abort(_("revision matching date not found"))
1606 raise error.Abort(_("revision matching date not found"))
1609
1607
1610 def increasingwindows(windowsize=8, sizelimit=512):
1608 def increasingwindows(windowsize=8, sizelimit=512):
1611 while True:
1609 while True:
1612 yield windowsize
1610 yield windowsize
1613 if windowsize < sizelimit:
1611 if windowsize < sizelimit:
1614 windowsize *= 2
1612 windowsize *= 2
1615
1613
1616 def _walkrevs(repo, opts):
1614 def _walkrevs(repo, opts):
1617 # Default --rev value depends on --follow but --follow behavior
1615 # Default --rev value depends on --follow but --follow behavior
1618 # depends on revisions resolved from --rev...
1616 # depends on revisions resolved from --rev...
1619 follow = opts.get('follow') or opts.get('follow_first')
1617 follow = opts.get('follow') or opts.get('follow_first')
1620 if opts.get('rev'):
1618 if opts.get('rev'):
1621 revs = scmutil.revrange(repo, opts['rev'])
1619 revs = scmutil.revrange(repo, opts['rev'])
1622 elif follow and repo.dirstate.p1() == nullid:
1620 elif follow and repo.dirstate.p1() == nullid:
1623 revs = smartset.baseset()
1621 revs = smartset.baseset()
1624 elif follow:
1622 elif follow:
1625 revs = repo.revs('reverse(:.)')
1623 revs = repo.revs('reverse(:.)')
1626 else:
1624 else:
1627 revs = smartset.spanset(repo)
1625 revs = smartset.spanset(repo)
1628 revs.reverse()
1626 revs.reverse()
1629 return revs
1627 return revs
1630
1628
1631 class FileWalkError(Exception):
1629 class FileWalkError(Exception):
1632 pass
1630 pass
1633
1631
1634 def walkfilerevs(repo, match, follow, revs, fncache):
1632 def walkfilerevs(repo, match, follow, revs, fncache):
1635 '''Walks the file history for the matched files.
1633 '''Walks the file history for the matched files.
1636
1634
1637 Returns the changeset revs that are involved in the file history.
1635 Returns the changeset revs that are involved in the file history.
1638
1636
1639 Throws FileWalkError if the file history can't be walked using
1637 Throws FileWalkError if the file history can't be walked using
1640 filelogs alone.
1638 filelogs alone.
1641 '''
1639 '''
1642 wanted = set()
1640 wanted = set()
1643 copies = []
1641 copies = []
1644 minrev, maxrev = min(revs), max(revs)
1642 minrev, maxrev = min(revs), max(revs)
1645 def filerevgen(filelog, last):
1643 def filerevgen(filelog, last):
1646 """
1644 """
1647 Only files, no patterns. Check the history of each file.
1645 Only files, no patterns. Check the history of each file.
1648
1646
1649 Examines filelog entries within minrev, maxrev linkrev range
1647 Examines filelog entries within minrev, maxrev linkrev range
1650 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1648 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1651 tuples in backwards order
1649 tuples in backwards order
1652 """
1650 """
1653 cl_count = len(repo)
1651 cl_count = len(repo)
1654 revs = []
1652 revs = []
1655 for j in xrange(0, last + 1):
1653 for j in xrange(0, last + 1):
1656 linkrev = filelog.linkrev(j)
1654 linkrev = filelog.linkrev(j)
1657 if linkrev < minrev:
1655 if linkrev < minrev:
1658 continue
1656 continue
1659 # only yield rev for which we have the changelog, it can
1657 # only yield rev for which we have the changelog, it can
1660 # happen while doing "hg log" during a pull or commit
1658 # happen while doing "hg log" during a pull or commit
1661 if linkrev >= cl_count:
1659 if linkrev >= cl_count:
1662 break
1660 break
1663
1661
1664 parentlinkrevs = []
1662 parentlinkrevs = []
1665 for p in filelog.parentrevs(j):
1663 for p in filelog.parentrevs(j):
1666 if p != nullrev:
1664 if p != nullrev:
1667 parentlinkrevs.append(filelog.linkrev(p))
1665 parentlinkrevs.append(filelog.linkrev(p))
1668 n = filelog.node(j)
1666 n = filelog.node(j)
1669 revs.append((linkrev, parentlinkrevs,
1667 revs.append((linkrev, parentlinkrevs,
1670 follow and filelog.renamed(n)))
1668 follow and filelog.renamed(n)))
1671
1669
1672 return reversed(revs)
1670 return reversed(revs)
1673 def iterfiles():
1671 def iterfiles():
1674 pctx = repo['.']
1672 pctx = repo['.']
1675 for filename in match.files():
1673 for filename in match.files():
1676 if follow:
1674 if follow:
1677 if filename not in pctx:
1675 if filename not in pctx:
1678 raise error.Abort(_('cannot follow file not in parent '
1676 raise error.Abort(_('cannot follow file not in parent '
1679 'revision: "%s"') % filename)
1677 'revision: "%s"') % filename)
1680 yield filename, pctx[filename].filenode()
1678 yield filename, pctx[filename].filenode()
1681 else:
1679 else:
1682 yield filename, None
1680 yield filename, None
1683 for filename_node in copies:
1681 for filename_node in copies:
1684 yield filename_node
1682 yield filename_node
1685
1683
1686 for file_, node in iterfiles():
1684 for file_, node in iterfiles():
1687 filelog = repo.file(file_)
1685 filelog = repo.file(file_)
1688 if not len(filelog):
1686 if not len(filelog):
1689 if node is None:
1687 if node is None:
1690 # A zero count may be a directory or deleted file, so
1688 # A zero count may be a directory or deleted file, so
1691 # try to find matching entries on the slow path.
1689 # try to find matching entries on the slow path.
1692 if follow:
1690 if follow:
1693 raise error.Abort(
1691 raise error.Abort(
1694 _('cannot follow nonexistent file: "%s"') % file_)
1692 _('cannot follow nonexistent file: "%s"') % file_)
1695 raise FileWalkError("Cannot walk via filelog")
1693 raise FileWalkError("Cannot walk via filelog")
1696 else:
1694 else:
1697 continue
1695 continue
1698
1696
1699 if node is None:
1697 if node is None:
1700 last = len(filelog) - 1
1698 last = len(filelog) - 1
1701 else:
1699 else:
1702 last = filelog.rev(node)
1700 last = filelog.rev(node)
1703
1701
1704 # keep track of all ancestors of the file
1702 # keep track of all ancestors of the file
1705 ancestors = {filelog.linkrev(last)}
1703 ancestors = {filelog.linkrev(last)}
1706
1704
1707 # iterate from latest to oldest revision
1705 # iterate from latest to oldest revision
1708 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1706 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1709 if not follow:
1707 if not follow:
1710 if rev > maxrev:
1708 if rev > maxrev:
1711 continue
1709 continue
1712 else:
1710 else:
1713 # Note that last might not be the first interesting
1711 # Note that last might not be the first interesting
1714 # rev to us:
1712 # rev to us:
1715 # if the file has been changed after maxrev, we'll
1713 # if the file has been changed after maxrev, we'll
1716 # have linkrev(last) > maxrev, and we still need
1714 # have linkrev(last) > maxrev, and we still need
1717 # to explore the file graph
1715 # to explore the file graph
1718 if rev not in ancestors:
1716 if rev not in ancestors:
1719 continue
1717 continue
1720 # XXX insert 1327 fix here
1718 # XXX insert 1327 fix here
1721 if flparentlinkrevs:
1719 if flparentlinkrevs:
1722 ancestors.update(flparentlinkrevs)
1720 ancestors.update(flparentlinkrevs)
1723
1721
1724 fncache.setdefault(rev, []).append(file_)
1722 fncache.setdefault(rev, []).append(file_)
1725 wanted.add(rev)
1723 wanted.add(rev)
1726 if copied:
1724 if copied:
1727 copies.append(copied)
1725 copies.append(copied)
1728
1726
1729 return wanted
1727 return wanted
1730
1728
1731 class _followfilter(object):
1729 class _followfilter(object):
1732 def __init__(self, repo, onlyfirst=False):
1730 def __init__(self, repo, onlyfirst=False):
1733 self.repo = repo
1731 self.repo = repo
1734 self.startrev = nullrev
1732 self.startrev = nullrev
1735 self.roots = set()
1733 self.roots = set()
1736 self.onlyfirst = onlyfirst
1734 self.onlyfirst = onlyfirst
1737
1735
1738 def match(self, rev):
1736 def match(self, rev):
1739 def realparents(rev):
1737 def realparents(rev):
1740 if self.onlyfirst:
1738 if self.onlyfirst:
1741 return self.repo.changelog.parentrevs(rev)[0:1]
1739 return self.repo.changelog.parentrevs(rev)[0:1]
1742 else:
1740 else:
1743 return filter(lambda x: x != nullrev,
1741 return filter(lambda x: x != nullrev,
1744 self.repo.changelog.parentrevs(rev))
1742 self.repo.changelog.parentrevs(rev))
1745
1743
1746 if self.startrev == nullrev:
1744 if self.startrev == nullrev:
1747 self.startrev = rev
1745 self.startrev = rev
1748 return True
1746 return True
1749
1747
1750 if rev > self.startrev:
1748 if rev > self.startrev:
1751 # forward: all descendants
1749 # forward: all descendants
1752 if not self.roots:
1750 if not self.roots:
1753 self.roots.add(self.startrev)
1751 self.roots.add(self.startrev)
1754 for parent in realparents(rev):
1752 for parent in realparents(rev):
1755 if parent in self.roots:
1753 if parent in self.roots:
1756 self.roots.add(rev)
1754 self.roots.add(rev)
1757 return True
1755 return True
1758 else:
1756 else:
1759 # backwards: all parents
1757 # backwards: all parents
1760 if not self.roots:
1758 if not self.roots:
1761 self.roots.update(realparents(self.startrev))
1759 self.roots.update(realparents(self.startrev))
1762 if rev in self.roots:
1760 if rev in self.roots:
1763 self.roots.remove(rev)
1761 self.roots.remove(rev)
1764 self.roots.update(realparents(rev))
1762 self.roots.update(realparents(rev))
1765 return True
1763 return True
1766
1764
1767 return False
1765 return False
1768
1766
1769 def walkchangerevs(repo, match, opts, prepare):
1767 def walkchangerevs(repo, match, opts, prepare):
1770 '''Iterate over files and the revs in which they changed.
1768 '''Iterate over files and the revs in which they changed.
1771
1769
1772 Callers most commonly need to iterate backwards over the history
1770 Callers most commonly need to iterate backwards over the history
1773 in which they are interested. Doing so has awful (quadratic-looking)
1771 in which they are interested. Doing so has awful (quadratic-looking)
1774 performance, so we use iterators in a "windowed" way.
1772 performance, so we use iterators in a "windowed" way.
1775
1773
1776 We walk a window of revisions in the desired order. Within the
1774 We walk a window of revisions in the desired order. Within the
1777 window, we first walk forwards to gather data, then in the desired
1775 window, we first walk forwards to gather data, then in the desired
1778 order (usually backwards) to display it.
1776 order (usually backwards) to display it.
1779
1777
1780 This function returns an iterator yielding contexts. Before
1778 This function returns an iterator yielding contexts. Before
1781 yielding each context, the iterator will first call the prepare
1779 yielding each context, the iterator will first call the prepare
1782 function on each context in the window in forward order.'''
1780 function on each context in the window in forward order.'''
1783
1781
1784 follow = opts.get('follow') or opts.get('follow_first')
1782 follow = opts.get('follow') or opts.get('follow_first')
1785 revs = _walkrevs(repo, opts)
1783 revs = _walkrevs(repo, opts)
1786 if not revs:
1784 if not revs:
1787 return []
1785 return []
1788 wanted = set()
1786 wanted = set()
1789 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1787 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1790 fncache = {}
1788 fncache = {}
1791 change = repo.changectx
1789 change = repo.changectx
1792
1790
1793 # First step is to fill wanted, the set of revisions that we want to yield.
1791 # First step is to fill wanted, the set of revisions that we want to yield.
1794 # When it does not induce extra cost, we also fill fncache for revisions in
1792 # When it does not induce extra cost, we also fill fncache for revisions in
1795 # wanted: a cache of filenames that were changed (ctx.files()) and that
1793 # wanted: a cache of filenames that were changed (ctx.files()) and that
1796 # match the file filtering conditions.
1794 # match the file filtering conditions.
1797
1795
1798 if match.always():
1796 if match.always():
1799 # No files, no patterns. Display all revs.
1797 # No files, no patterns. Display all revs.
1800 wanted = revs
1798 wanted = revs
1801 elif not slowpath:
1799 elif not slowpath:
1802 # We only have to read through the filelog to find wanted revisions
1800 # We only have to read through the filelog to find wanted revisions
1803
1801
1804 try:
1802 try:
1805 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1803 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1806 except FileWalkError:
1804 except FileWalkError:
1807 slowpath = True
1805 slowpath = True
1808
1806
1809 # We decided to fall back to the slowpath because at least one
1807 # We decided to fall back to the slowpath because at least one
1810 # of the paths was not a file. Check to see if at least one of them
1808 # of the paths was not a file. Check to see if at least one of them
1811 # existed in history, otherwise simply return
1809 # existed in history, otherwise simply return
1812 for path in match.files():
1810 for path in match.files():
1813 if path == '.' or path in repo.store:
1811 if path == '.' or path in repo.store:
1814 break
1812 break
1815 else:
1813 else:
1816 return []
1814 return []
1817
1815
1818 if slowpath:
1816 if slowpath:
1819 # We have to read the changelog to match filenames against
1817 # We have to read the changelog to match filenames against
1820 # changed files
1818 # changed files
1821
1819
1822 if follow:
1820 if follow:
1823 raise error.Abort(_('can only follow copies/renames for explicit '
1821 raise error.Abort(_('can only follow copies/renames for explicit '
1824 'filenames'))
1822 'filenames'))
1825
1823
1826 # The slow path checks files modified in every changeset.
1824 # The slow path checks files modified in every changeset.
1827 # This is really slow on large repos, so compute the set lazily.
1825 # This is really slow on large repos, so compute the set lazily.
1828 class lazywantedset(object):
1826 class lazywantedset(object):
1829 def __init__(self):
1827 def __init__(self):
1830 self.set = set()
1828 self.set = set()
1831 self.revs = set(revs)
1829 self.revs = set(revs)
1832
1830
1833 # No need to worry about locality here because it will be accessed
1831 # No need to worry about locality here because it will be accessed
1834 # in the same order as the increasing window below.
1832 # in the same order as the increasing window below.
1835 def __contains__(self, value):
1833 def __contains__(self, value):
1836 if value in self.set:
1834 if value in self.set:
1837 return True
1835 return True
1838 elif not value in self.revs:
1836 elif not value in self.revs:
1839 return False
1837 return False
1840 else:
1838 else:
1841 self.revs.discard(value)
1839 self.revs.discard(value)
1842 ctx = change(value)
1840 ctx = change(value)
1843 matches = filter(match, ctx.files())
1841 matches = filter(match, ctx.files())
1844 if matches:
1842 if matches:
1845 fncache[value] = matches
1843 fncache[value] = matches
1846 self.set.add(value)
1844 self.set.add(value)
1847 return True
1845 return True
1848 return False
1846 return False
1849
1847
1850 def discard(self, value):
1848 def discard(self, value):
1851 self.revs.discard(value)
1849 self.revs.discard(value)
1852 self.set.discard(value)
1850 self.set.discard(value)
1853
1851
1854 wanted = lazywantedset()
1852 wanted = lazywantedset()
1855
1853
1856 # it might be worthwhile to do this in the iterator if the rev range
1854 # it might be worthwhile to do this in the iterator if the rev range
1857 # is descending and the prune args are all within that range
1855 # is descending and the prune args are all within that range
1858 for rev in opts.get('prune', ()):
1856 for rev in opts.get('prune', ()):
1859 rev = repo[rev].rev()
1857 rev = repo[rev].rev()
1860 ff = _followfilter(repo)
1858 ff = _followfilter(repo)
1861 stop = min(revs[0], revs[-1])
1859 stop = min(revs[0], revs[-1])
1862 for x in xrange(rev, stop - 1, -1):
1860 for x in xrange(rev, stop - 1, -1):
1863 if ff.match(x):
1861 if ff.match(x):
1864 wanted = wanted - [x]
1862 wanted = wanted - [x]
1865
1863
1866 # Now that wanted is correctly initialized, we can iterate over the
1864 # Now that wanted is correctly initialized, we can iterate over the
1867 # revision range, yielding only revisions in wanted.
1865 # revision range, yielding only revisions in wanted.
1868 def iterate():
1866 def iterate():
1869 if follow and match.always():
1867 if follow and match.always():
1870 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1868 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1871 def want(rev):
1869 def want(rev):
1872 return ff.match(rev) and rev in wanted
1870 return ff.match(rev) and rev in wanted
1873 else:
1871 else:
1874 def want(rev):
1872 def want(rev):
1875 return rev in wanted
1873 return rev in wanted
1876
1874
1877 it = iter(revs)
1875 it = iter(revs)
1878 stopiteration = False
1876 stopiteration = False
1879 for windowsize in increasingwindows():
1877 for windowsize in increasingwindows():
1880 nrevs = []
1878 nrevs = []
1881 for i in xrange(windowsize):
1879 for i in xrange(windowsize):
1882 rev = next(it, None)
1880 rev = next(it, None)
1883 if rev is None:
1881 if rev is None:
1884 stopiteration = True
1882 stopiteration = True
1885 break
1883 break
1886 elif want(rev):
1884 elif want(rev):
1887 nrevs.append(rev)
1885 nrevs.append(rev)
1888 for rev in sorted(nrevs):
1886 for rev in sorted(nrevs):
1889 fns = fncache.get(rev)
1887 fns = fncache.get(rev)
1890 ctx = change(rev)
1888 ctx = change(rev)
1891 if not fns:
1889 if not fns:
1892 def fns_generator():
1890 def fns_generator():
1893 for f in ctx.files():
1891 for f in ctx.files():
1894 if match(f):
1892 if match(f):
1895 yield f
1893 yield f
1896 fns = fns_generator()
1894 fns = fns_generator()
1897 prepare(ctx, fns)
1895 prepare(ctx, fns)
1898 for rev in nrevs:
1896 for rev in nrevs:
1899 yield change(rev)
1897 yield change(rev)
1900
1898
1901 if stopiteration:
1899 if stopiteration:
1902 break
1900 break
1903
1901
1904 return iterate()
1902 return iterate()
1905
1903
1906 def add(ui, repo, match, prefix, explicitonly, **opts):
1904 def add(ui, repo, match, prefix, explicitonly, **opts):
1907 join = lambda f: os.path.join(prefix, f)
1905 join = lambda f: os.path.join(prefix, f)
1908 bad = []
1906 bad = []
1909
1907
1910 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1908 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1911 names = []
1909 names = []
1912 wctx = repo[None]
1910 wctx = repo[None]
1913 cca = None
1911 cca = None
1914 abort, warn = scmutil.checkportabilityalert(ui)
1912 abort, warn = scmutil.checkportabilityalert(ui)
1915 if abort or warn:
1913 if abort or warn:
1916 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1914 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1917
1915
1918 badmatch = matchmod.badmatch(match, badfn)
1916 badmatch = matchmod.badmatch(match, badfn)
1919 dirstate = repo.dirstate
1917 dirstate = repo.dirstate
1920 # We don't want to just call wctx.walk here, since it would return a lot of
1918 # We don't want to just call wctx.walk here, since it would return a lot of
1921 # clean files, which we aren't interested in and takes time.
1919 # clean files, which we aren't interested in and takes time.
1922 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1920 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1923 unknown=True, ignored=False, full=False)):
1921 unknown=True, ignored=False, full=False)):
1924 exact = match.exact(f)
1922 exact = match.exact(f)
1925 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1923 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1926 if cca:
1924 if cca:
1927 cca(f)
1925 cca(f)
1928 names.append(f)
1926 names.append(f)
1929 if ui.verbose or not exact:
1927 if ui.verbose or not exact:
1930 ui.status(_('adding %s\n') % match.rel(f))
1928 ui.status(_('adding %s\n') % match.rel(f))
1931
1929
1932 for subpath in sorted(wctx.substate):
1930 for subpath in sorted(wctx.substate):
1933 sub = wctx.sub(subpath)
1931 sub = wctx.sub(subpath)
1934 try:
1932 try:
1935 submatch = matchmod.subdirmatcher(subpath, match)
1933 submatch = matchmod.subdirmatcher(subpath, match)
1936 if opts.get(r'subrepos'):
1934 if opts.get(r'subrepos'):
1937 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1935 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1938 else:
1936 else:
1939 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1937 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1940 except error.LookupError:
1938 except error.LookupError:
1941 ui.status(_("skipping missing subrepository: %s\n")
1939 ui.status(_("skipping missing subrepository: %s\n")
1942 % join(subpath))
1940 % join(subpath))
1943
1941
1944 if not opts.get(r'dry_run'):
1942 if not opts.get(r'dry_run'):
1945 rejected = wctx.add(names, prefix)
1943 rejected = wctx.add(names, prefix)
1946 bad.extend(f for f in rejected if f in match.files())
1944 bad.extend(f for f in rejected if f in match.files())
1947 return bad
1945 return bad
1948
1946
1949 def addwebdirpath(repo, serverpath, webconf):
1947 def addwebdirpath(repo, serverpath, webconf):
1950 webconf[serverpath] = repo.root
1948 webconf[serverpath] = repo.root
1951 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
1949 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
1952
1950
1953 for r in repo.revs('filelog("path:.hgsub")'):
1951 for r in repo.revs('filelog("path:.hgsub")'):
1954 ctx = repo[r]
1952 ctx = repo[r]
1955 for subpath in ctx.substate:
1953 for subpath in ctx.substate:
1956 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
1954 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
1957
1955
1958 def forget(ui, repo, match, prefix, explicitonly):
1956 def forget(ui, repo, match, prefix, explicitonly):
1959 join = lambda f: os.path.join(prefix, f)
1957 join = lambda f: os.path.join(prefix, f)
1960 bad = []
1958 bad = []
1961 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1959 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1962 wctx = repo[None]
1960 wctx = repo[None]
1963 forgot = []
1961 forgot = []
1964
1962
1965 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
1963 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
1966 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1964 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1967 if explicitonly:
1965 if explicitonly:
1968 forget = [f for f in forget if match.exact(f)]
1966 forget = [f for f in forget if match.exact(f)]
1969
1967
1970 for subpath in sorted(wctx.substate):
1968 for subpath in sorted(wctx.substate):
1971 sub = wctx.sub(subpath)
1969 sub = wctx.sub(subpath)
1972 try:
1970 try:
1973 submatch = matchmod.subdirmatcher(subpath, match)
1971 submatch = matchmod.subdirmatcher(subpath, match)
1974 subbad, subforgot = sub.forget(submatch, prefix)
1972 subbad, subforgot = sub.forget(submatch, prefix)
1975 bad.extend([subpath + '/' + f for f in subbad])
1973 bad.extend([subpath + '/' + f for f in subbad])
1976 forgot.extend([subpath + '/' + f for f in subforgot])
1974 forgot.extend([subpath + '/' + f for f in subforgot])
1977 except error.LookupError:
1975 except error.LookupError:
1978 ui.status(_("skipping missing subrepository: %s\n")
1976 ui.status(_("skipping missing subrepository: %s\n")
1979 % join(subpath))
1977 % join(subpath))
1980
1978
1981 if not explicitonly:
1979 if not explicitonly:
1982 for f in match.files():
1980 for f in match.files():
1983 if f not in repo.dirstate and not repo.wvfs.isdir(f):
1981 if f not in repo.dirstate and not repo.wvfs.isdir(f):
1984 if f not in forgot:
1982 if f not in forgot:
1985 if repo.wvfs.exists(f):
1983 if repo.wvfs.exists(f):
1986 # Don't complain if the exact case match wasn't given.
1984 # Don't complain if the exact case match wasn't given.
1987 # But don't do this until after checking 'forgot', so
1985 # But don't do this until after checking 'forgot', so
1988 # that subrepo files aren't normalized, and this op is
1986 # that subrepo files aren't normalized, and this op is
1989 # purely from data cached by the status walk above.
1987 # purely from data cached by the status walk above.
1990 if repo.dirstate.normalize(f) in repo.dirstate:
1988 if repo.dirstate.normalize(f) in repo.dirstate:
1991 continue
1989 continue
1992 ui.warn(_('not removing %s: '
1990 ui.warn(_('not removing %s: '
1993 'file is already untracked\n')
1991 'file is already untracked\n')
1994 % match.rel(f))
1992 % match.rel(f))
1995 bad.append(f)
1993 bad.append(f)
1996
1994
1997 for f in forget:
1995 for f in forget:
1998 if ui.verbose or not match.exact(f):
1996 if ui.verbose or not match.exact(f):
1999 ui.status(_('removing %s\n') % match.rel(f))
1997 ui.status(_('removing %s\n') % match.rel(f))
2000
1998
2001 rejected = wctx.forget(forget, prefix)
1999 rejected = wctx.forget(forget, prefix)
2002 bad.extend(f for f in rejected if f in match.files())
2000 bad.extend(f for f in rejected if f in match.files())
2003 forgot.extend(f for f in forget if f not in rejected)
2001 forgot.extend(f for f in forget if f not in rejected)
2004 return bad, forgot
2002 return bad, forgot
2005
2003
2006 def files(ui, ctx, m, fm, fmt, subrepos):
2004 def files(ui, ctx, m, fm, fmt, subrepos):
2007 rev = ctx.rev()
2005 rev = ctx.rev()
2008 ret = 1
2006 ret = 1
2009 ds = ctx.repo().dirstate
2007 ds = ctx.repo().dirstate
2010
2008
2011 for f in ctx.matches(m):
2009 for f in ctx.matches(m):
2012 if rev is None and ds[f] == 'r':
2010 if rev is None and ds[f] == 'r':
2013 continue
2011 continue
2014 fm.startitem()
2012 fm.startitem()
2015 if ui.verbose:
2013 if ui.verbose:
2016 fc = ctx[f]
2014 fc = ctx[f]
2017 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2015 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2018 fm.data(abspath=f)
2016 fm.data(abspath=f)
2019 fm.write('path', fmt, m.rel(f))
2017 fm.write('path', fmt, m.rel(f))
2020 ret = 0
2018 ret = 0
2021
2019
2022 for subpath in sorted(ctx.substate):
2020 for subpath in sorted(ctx.substate):
2023 submatch = matchmod.subdirmatcher(subpath, m)
2021 submatch = matchmod.subdirmatcher(subpath, m)
2024 if (subrepos or m.exact(subpath) or any(submatch.files())):
2022 if (subrepos or m.exact(subpath) or any(submatch.files())):
2025 sub = ctx.sub(subpath)
2023 sub = ctx.sub(subpath)
2026 try:
2024 try:
2027 recurse = m.exact(subpath) or subrepos
2025 recurse = m.exact(subpath) or subrepos
2028 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2026 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2029 ret = 0
2027 ret = 0
2030 except error.LookupError:
2028 except error.LookupError:
2031 ui.status(_("skipping missing subrepository: %s\n")
2029 ui.status(_("skipping missing subrepository: %s\n")
2032 % m.abs(subpath))
2030 % m.abs(subpath))
2033
2031
2034 return ret
2032 return ret
2035
2033
2036 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2034 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2037 join = lambda f: os.path.join(prefix, f)
2035 join = lambda f: os.path.join(prefix, f)
2038 ret = 0
2036 ret = 0
2039 s = repo.status(match=m, clean=True)
2037 s = repo.status(match=m, clean=True)
2040 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2038 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2041
2039
2042 wctx = repo[None]
2040 wctx = repo[None]
2043
2041
2044 if warnings is None:
2042 if warnings is None:
2045 warnings = []
2043 warnings = []
2046 warn = True
2044 warn = True
2047 else:
2045 else:
2048 warn = False
2046 warn = False
2049
2047
2050 subs = sorted(wctx.substate)
2048 subs = sorted(wctx.substate)
2051 total = len(subs)
2049 total = len(subs)
2052 count = 0
2050 count = 0
2053 for subpath in subs:
2051 for subpath in subs:
2054 count += 1
2052 count += 1
2055 submatch = matchmod.subdirmatcher(subpath, m)
2053 submatch = matchmod.subdirmatcher(subpath, m)
2056 if subrepos or m.exact(subpath) or any(submatch.files()):
2054 if subrepos or m.exact(subpath) or any(submatch.files()):
2057 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2055 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2058 sub = wctx.sub(subpath)
2056 sub = wctx.sub(subpath)
2059 try:
2057 try:
2060 if sub.removefiles(submatch, prefix, after, force, subrepos,
2058 if sub.removefiles(submatch, prefix, after, force, subrepos,
2061 warnings):
2059 warnings):
2062 ret = 1
2060 ret = 1
2063 except error.LookupError:
2061 except error.LookupError:
2064 warnings.append(_("skipping missing subrepository: %s\n")
2062 warnings.append(_("skipping missing subrepository: %s\n")
2065 % join(subpath))
2063 % join(subpath))
2066 ui.progress(_('searching'), None)
2064 ui.progress(_('searching'), None)
2067
2065
2068 # warn about failure to delete explicit files/dirs
2066 # warn about failure to delete explicit files/dirs
2069 deleteddirs = util.dirs(deleted)
2067 deleteddirs = util.dirs(deleted)
2070 files = m.files()
2068 files = m.files()
2071 total = len(files)
2069 total = len(files)
2072 count = 0
2070 count = 0
2073 for f in files:
2071 for f in files:
2074 def insubrepo():
2072 def insubrepo():
2075 for subpath in wctx.substate:
2073 for subpath in wctx.substate:
2076 if f.startswith(subpath + '/'):
2074 if f.startswith(subpath + '/'):
2077 return True
2075 return True
2078 return False
2076 return False
2079
2077
2080 count += 1
2078 count += 1
2081 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2079 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2082 isdir = f in deleteddirs or wctx.hasdir(f)
2080 isdir = f in deleteddirs or wctx.hasdir(f)
2083 if (f in repo.dirstate or isdir or f == '.'
2081 if (f in repo.dirstate or isdir or f == '.'
2084 or insubrepo() or f in subs):
2082 or insubrepo() or f in subs):
2085 continue
2083 continue
2086
2084
2087 if repo.wvfs.exists(f):
2085 if repo.wvfs.exists(f):
2088 if repo.wvfs.isdir(f):
2086 if repo.wvfs.isdir(f):
2089 warnings.append(_('not removing %s: no tracked files\n')
2087 warnings.append(_('not removing %s: no tracked files\n')
2090 % m.rel(f))
2088 % m.rel(f))
2091 else:
2089 else:
2092 warnings.append(_('not removing %s: file is untracked\n')
2090 warnings.append(_('not removing %s: file is untracked\n')
2093 % m.rel(f))
2091 % m.rel(f))
2094 # missing files will generate a warning elsewhere
2092 # missing files will generate a warning elsewhere
2095 ret = 1
2093 ret = 1
2096 ui.progress(_('deleting'), None)
2094 ui.progress(_('deleting'), None)
2097
2095
2098 if force:
2096 if force:
2099 list = modified + deleted + clean + added
2097 list = modified + deleted + clean + added
2100 elif after:
2098 elif after:
2101 list = deleted
2099 list = deleted
2102 remaining = modified + added + clean
2100 remaining = modified + added + clean
2103 total = len(remaining)
2101 total = len(remaining)
2104 count = 0
2102 count = 0
2105 for f in remaining:
2103 for f in remaining:
2106 count += 1
2104 count += 1
2107 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2105 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2108 if ui.verbose or (f in files):
2106 if ui.verbose or (f in files):
2109 warnings.append(_('not removing %s: file still exists\n')
2107 warnings.append(_('not removing %s: file still exists\n')
2110 % m.rel(f))
2108 % m.rel(f))
2111 ret = 1
2109 ret = 1
2112 ui.progress(_('skipping'), None)
2110 ui.progress(_('skipping'), None)
2113 else:
2111 else:
2114 list = deleted + clean
2112 list = deleted + clean
2115 total = len(modified) + len(added)
2113 total = len(modified) + len(added)
2116 count = 0
2114 count = 0
2117 for f in modified:
2115 for f in modified:
2118 count += 1
2116 count += 1
2119 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2117 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2120 warnings.append(_('not removing %s: file is modified (use -f'
2118 warnings.append(_('not removing %s: file is modified (use -f'
2121 ' to force removal)\n') % m.rel(f))
2119 ' to force removal)\n') % m.rel(f))
2122 ret = 1
2120 ret = 1
2123 for f in added:
2121 for f in added:
2124 count += 1
2122 count += 1
2125 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2123 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2126 warnings.append(_("not removing %s: file has been marked for add"
2124 warnings.append(_("not removing %s: file has been marked for add"
2127 " (use 'hg forget' to undo add)\n") % m.rel(f))
2125 " (use 'hg forget' to undo add)\n") % m.rel(f))
2128 ret = 1
2126 ret = 1
2129 ui.progress(_('skipping'), None)
2127 ui.progress(_('skipping'), None)
2130
2128
2131 list = sorted(list)
2129 list = sorted(list)
2132 total = len(list)
2130 total = len(list)
2133 count = 0
2131 count = 0
2134 for f in list:
2132 for f in list:
2135 count += 1
2133 count += 1
2136 if ui.verbose or not m.exact(f):
2134 if ui.verbose or not m.exact(f):
2137 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2135 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2138 ui.status(_('removing %s\n') % m.rel(f))
2136 ui.status(_('removing %s\n') % m.rel(f))
2139 ui.progress(_('deleting'), None)
2137 ui.progress(_('deleting'), None)
2140
2138
2141 with repo.wlock():
2139 with repo.wlock():
2142 if not after:
2140 if not after:
2143 for f in list:
2141 for f in list:
2144 if f in added:
2142 if f in added:
2145 continue # we never unlink added files on remove
2143 continue # we never unlink added files on remove
2146 repo.wvfs.unlinkpath(f, ignoremissing=True)
2144 repo.wvfs.unlinkpath(f, ignoremissing=True)
2147 repo[None].forget(list)
2145 repo[None].forget(list)
2148
2146
2149 if warn:
2147 if warn:
2150 for warning in warnings:
2148 for warning in warnings:
2151 ui.warn(warning)
2149 ui.warn(warning)
2152
2150
2153 return ret
2151 return ret
2154
2152
2155 def _updatecatformatter(fm, ctx, matcher, path, decode):
2153 def _updatecatformatter(fm, ctx, matcher, path, decode):
2156 """Hook for adding data to the formatter used by ``hg cat``.
2154 """Hook for adding data to the formatter used by ``hg cat``.
2157
2155
2158 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2156 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2159 this method first."""
2157 this method first."""
2160 data = ctx[path].data()
2158 data = ctx[path].data()
2161 if decode:
2159 if decode:
2162 data = ctx.repo().wwritedata(path, data)
2160 data = ctx.repo().wwritedata(path, data)
2163 fm.startitem()
2161 fm.startitem()
2164 fm.write('data', '%s', data)
2162 fm.write('data', '%s', data)
2165 fm.data(abspath=path, path=matcher.rel(path))
2163 fm.data(abspath=path, path=matcher.rel(path))
2166
2164
2167 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2165 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2168 err = 1
2166 err = 1
2169 opts = pycompat.byteskwargs(opts)
2167 opts = pycompat.byteskwargs(opts)
2170
2168
2171 def write(path):
2169 def write(path):
2172 filename = None
2170 filename = None
2173 if fntemplate:
2171 if fntemplate:
2174 filename = makefilename(repo, fntemplate, ctx.node(),
2172 filename = makefilename(repo, fntemplate, ctx.node(),
2175 pathname=os.path.join(prefix, path))
2173 pathname=os.path.join(prefix, path))
2176 # attempt to create the directory if it does not already exist
2174 # attempt to create the directory if it does not already exist
2177 try:
2175 try:
2178 os.makedirs(os.path.dirname(filename))
2176 os.makedirs(os.path.dirname(filename))
2179 except OSError:
2177 except OSError:
2180 pass
2178 pass
2181 with formatter.maybereopen(basefm, filename, opts) as fm:
2179 with formatter.maybereopen(basefm, filename, opts) as fm:
2182 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2180 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2183
2181
2184 # Automation often uses hg cat on single files, so special case it
2182 # Automation often uses hg cat on single files, so special case it
2185 # for performance to avoid the cost of parsing the manifest.
2183 # for performance to avoid the cost of parsing the manifest.
2186 if len(matcher.files()) == 1 and not matcher.anypats():
2184 if len(matcher.files()) == 1 and not matcher.anypats():
2187 file = matcher.files()[0]
2185 file = matcher.files()[0]
2188 mfl = repo.manifestlog
2186 mfl = repo.manifestlog
2189 mfnode = ctx.manifestnode()
2187 mfnode = ctx.manifestnode()
2190 try:
2188 try:
2191 if mfnode and mfl[mfnode].find(file)[0]:
2189 if mfnode and mfl[mfnode].find(file)[0]:
2192 _prefetchfiles(repo, ctx, [file])
2190 _prefetchfiles(repo, ctx, [file])
2193 write(file)
2191 write(file)
2194 return 0
2192 return 0
2195 except KeyError:
2193 except KeyError:
2196 pass
2194 pass
2197
2195
2198 files = [f for f in ctx.walk(matcher)]
2196 files = [f for f in ctx.walk(matcher)]
2199 _prefetchfiles(repo, ctx, files)
2197 _prefetchfiles(repo, ctx, files)
2200
2198
2201 for abs in files:
2199 for abs in files:
2202 write(abs)
2200 write(abs)
2203 err = 0
2201 err = 0
2204
2202
2205 for subpath in sorted(ctx.substate):
2203 for subpath in sorted(ctx.substate):
2206 sub = ctx.sub(subpath)
2204 sub = ctx.sub(subpath)
2207 try:
2205 try:
2208 submatch = matchmod.subdirmatcher(subpath, matcher)
2206 submatch = matchmod.subdirmatcher(subpath, matcher)
2209
2207
2210 if not sub.cat(submatch, basefm, fntemplate,
2208 if not sub.cat(submatch, basefm, fntemplate,
2211 os.path.join(prefix, sub._path),
2209 os.path.join(prefix, sub._path),
2212 **pycompat.strkwargs(opts)):
2210 **pycompat.strkwargs(opts)):
2213 err = 0
2211 err = 0
2214 except error.RepoLookupError:
2212 except error.RepoLookupError:
2215 ui.status(_("skipping missing subrepository: %s\n")
2213 ui.status(_("skipping missing subrepository: %s\n")
2216 % os.path.join(prefix, subpath))
2214 % os.path.join(prefix, subpath))
2217
2215
2218 return err
2216 return err
2219
2217
2220 def commit(ui, repo, commitfunc, pats, opts):
2218 def commit(ui, repo, commitfunc, pats, opts):
2221 '''commit the specified files or all outstanding changes'''
2219 '''commit the specified files or all outstanding changes'''
2222 date = opts.get('date')
2220 date = opts.get('date')
2223 if date:
2221 if date:
2224 opts['date'] = util.parsedate(date)
2222 opts['date'] = util.parsedate(date)
2225 message = logmessage(ui, opts)
2223 message = logmessage(ui, opts)
2226 matcher = scmutil.match(repo[None], pats, opts)
2224 matcher = scmutil.match(repo[None], pats, opts)
2227
2225
2228 dsguard = None
2226 dsguard = None
2229 # extract addremove carefully -- this function can be called from a command
2227 # extract addremove carefully -- this function can be called from a command
2230 # that doesn't support addremove
2228 # that doesn't support addremove
2231 if opts.get('addremove'):
2229 if opts.get('addremove'):
2232 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2230 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2233 with dsguard or util.nullcontextmanager():
2231 with dsguard or util.nullcontextmanager():
2234 if dsguard:
2232 if dsguard:
2235 if scmutil.addremove(repo, matcher, "", opts) != 0:
2233 if scmutil.addremove(repo, matcher, "", opts) != 0:
2236 raise error.Abort(
2234 raise error.Abort(
2237 _("failed to mark all new/missing files as added/removed"))
2235 _("failed to mark all new/missing files as added/removed"))
2238
2236
2239 return commitfunc(ui, repo, message, matcher, opts)
2237 return commitfunc(ui, repo, message, matcher, opts)
2240
2238
2241 def samefile(f, ctx1, ctx2):
2239 def samefile(f, ctx1, ctx2):
2242 if f in ctx1.manifest():
2240 if f in ctx1.manifest():
2243 a = ctx1.filectx(f)
2241 a = ctx1.filectx(f)
2244 if f in ctx2.manifest():
2242 if f in ctx2.manifest():
2245 b = ctx2.filectx(f)
2243 b = ctx2.filectx(f)
2246 return (not a.cmp(b)
2244 return (not a.cmp(b)
2247 and a.flags() == b.flags())
2245 and a.flags() == b.flags())
2248 else:
2246 else:
2249 return False
2247 return False
2250 else:
2248 else:
2251 return f not in ctx2.manifest()
2249 return f not in ctx2.manifest()
2252
2250
2253 def amend(ui, repo, old, extra, pats, opts):
2251 def amend(ui, repo, old, extra, pats, opts):
2254 # avoid cycle context -> subrepo -> cmdutil
2252 # avoid cycle context -> subrepo -> cmdutil
2255 from . import context
2253 from . import context
2256
2254
2257 # amend will reuse the existing user if not specified, but the obsolete
2255 # amend will reuse the existing user if not specified, but the obsolete
2258 # marker creation requires that the current user's name is specified.
2256 # marker creation requires that the current user's name is specified.
2259 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2257 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2260 ui.username() # raise exception if username not set
2258 ui.username() # raise exception if username not set
2261
2259
2262 ui.note(_('amending changeset %s\n') % old)
2260 ui.note(_('amending changeset %s\n') % old)
2263 base = old.p1()
2261 base = old.p1()
2264
2262
2265 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2263 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2266 # Participating changesets:
2264 # Participating changesets:
2267 #
2265 #
2268 # wctx o - workingctx that contains changes from working copy
2266 # wctx o - workingctx that contains changes from working copy
2269 # | to go into amending commit
2267 # | to go into amending commit
2270 # |
2268 # |
2271 # old o - changeset to amend
2269 # old o - changeset to amend
2272 # |
2270 # |
2273 # base o - first parent of the changeset to amend
2271 # base o - first parent of the changeset to amend
2274 wctx = repo[None]
2272 wctx = repo[None]
2275
2273
2276 # Copy to avoid mutating input
2274 # Copy to avoid mutating input
2277 extra = extra.copy()
2275 extra = extra.copy()
2278 # Update extra dict from amended commit (e.g. to preserve graft
2276 # Update extra dict from amended commit (e.g. to preserve graft
2279 # source)
2277 # source)
2280 extra.update(old.extra())
2278 extra.update(old.extra())
2281
2279
2282 # Also update it from the from the wctx
2280 # Also update it from the from the wctx
2283 extra.update(wctx.extra())
2281 extra.update(wctx.extra())
2284
2282
2285 user = opts.get('user') or old.user()
2283 user = opts.get('user') or old.user()
2286 date = opts.get('date') or old.date()
2284 date = opts.get('date') or old.date()
2287
2285
2288 # Parse the date to allow comparison between date and old.date()
2286 # Parse the date to allow comparison between date and old.date()
2289 date = util.parsedate(date)
2287 date = util.parsedate(date)
2290
2288
2291 if len(old.parents()) > 1:
2289 if len(old.parents()) > 1:
2292 # ctx.files() isn't reliable for merges, so fall back to the
2290 # ctx.files() isn't reliable for merges, so fall back to the
2293 # slower repo.status() method
2291 # slower repo.status() method
2294 files = set([fn for st in repo.status(base, old)[:3]
2292 files = set([fn for st in repo.status(base, old)[:3]
2295 for fn in st])
2293 for fn in st])
2296 else:
2294 else:
2297 files = set(old.files())
2295 files = set(old.files())
2298
2296
2299 # add/remove the files to the working copy if the "addremove" option
2297 # add/remove the files to the working copy if the "addremove" option
2300 # was specified.
2298 # was specified.
2301 matcher = scmutil.match(wctx, pats, opts)
2299 matcher = scmutil.match(wctx, pats, opts)
2302 if (opts.get('addremove')
2300 if (opts.get('addremove')
2303 and scmutil.addremove(repo, matcher, "", opts)):
2301 and scmutil.addremove(repo, matcher, "", opts)):
2304 raise error.Abort(
2302 raise error.Abort(
2305 _("failed to mark all new/missing files as added/removed"))
2303 _("failed to mark all new/missing files as added/removed"))
2306
2304
2307 # Check subrepos. This depends on in-place wctx._status update in
2305 # Check subrepos. This depends on in-place wctx._status update in
2308 # subrepo.precommit(). To minimize the risk of this hack, we do
2306 # subrepo.precommit(). To minimize the risk of this hack, we do
2309 # nothing if .hgsub does not exist.
2307 # nothing if .hgsub does not exist.
2310 if '.hgsub' in wctx or '.hgsub' in old:
2308 if '.hgsub' in wctx or '.hgsub' in old:
2311 subs, commitsubs, newsubstate = subrepoutil.precommit(
2309 subs, commitsubs, newsubstate = subrepoutil.precommit(
2312 ui, wctx, wctx._status, matcher)
2310 ui, wctx, wctx._status, matcher)
2313 # amend should abort if commitsubrepos is enabled
2311 # amend should abort if commitsubrepos is enabled
2314 assert not commitsubs
2312 assert not commitsubs
2315 if subs:
2313 if subs:
2316 subrepoutil.writestate(repo, newsubstate)
2314 subrepoutil.writestate(repo, newsubstate)
2317
2315
2318 filestoamend = set(f for f in wctx.files() if matcher(f))
2316 filestoamend = set(f for f in wctx.files() if matcher(f))
2319
2317
2320 changes = (len(filestoamend) > 0)
2318 changes = (len(filestoamend) > 0)
2321 if changes:
2319 if changes:
2322 # Recompute copies (avoid recording a -> b -> a)
2320 # Recompute copies (avoid recording a -> b -> a)
2323 copied = copies.pathcopies(base, wctx, matcher)
2321 copied = copies.pathcopies(base, wctx, matcher)
2324 if old.p2:
2322 if old.p2:
2325 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2323 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2326
2324
2327 # Prune files which were reverted by the updates: if old
2325 # Prune files which were reverted by the updates: if old
2328 # introduced file X and the file was renamed in the working
2326 # introduced file X and the file was renamed in the working
2329 # copy, then those two files are the same and
2327 # copy, then those two files are the same and
2330 # we can discard X from our list of files. Likewise if X
2328 # we can discard X from our list of files. Likewise if X
2331 # was removed, it's no longer relevant. If X is missing (aka
2329 # was removed, it's no longer relevant. If X is missing (aka
2332 # deleted), old X must be preserved.
2330 # deleted), old X must be preserved.
2333 files.update(filestoamend)
2331 files.update(filestoamend)
2334 files = [f for f in files if (not samefile(f, wctx, base)
2332 files = [f for f in files if (not samefile(f, wctx, base)
2335 or f in wctx.deleted())]
2333 or f in wctx.deleted())]
2336
2334
2337 def filectxfn(repo, ctx_, path):
2335 def filectxfn(repo, ctx_, path):
2338 try:
2336 try:
2339 # If the file being considered is not amongst the files
2337 # If the file being considered is not amongst the files
2340 # to be amended, we should return the file context from the
2338 # to be amended, we should return the file context from the
2341 # old changeset. This avoids issues when only some files in
2339 # old changeset. This avoids issues when only some files in
2342 # the working copy are being amended but there are also
2340 # the working copy are being amended but there are also
2343 # changes to other files from the old changeset.
2341 # changes to other files from the old changeset.
2344 if path not in filestoamend:
2342 if path not in filestoamend:
2345 return old.filectx(path)
2343 return old.filectx(path)
2346
2344
2347 # Return None for removed files.
2345 # Return None for removed files.
2348 if path in wctx.removed():
2346 if path in wctx.removed():
2349 return None
2347 return None
2350
2348
2351 fctx = wctx[path]
2349 fctx = wctx[path]
2352 flags = fctx.flags()
2350 flags = fctx.flags()
2353 mctx = context.memfilectx(repo, ctx_,
2351 mctx = context.memfilectx(repo, ctx_,
2354 fctx.path(), fctx.data(),
2352 fctx.path(), fctx.data(),
2355 islink='l' in flags,
2353 islink='l' in flags,
2356 isexec='x' in flags,
2354 isexec='x' in flags,
2357 copied=copied.get(path))
2355 copied=copied.get(path))
2358 return mctx
2356 return mctx
2359 except KeyError:
2357 except KeyError:
2360 return None
2358 return None
2361 else:
2359 else:
2362 ui.note(_('copying changeset %s to %s\n') % (old, base))
2360 ui.note(_('copying changeset %s to %s\n') % (old, base))
2363
2361
2364 # Use version of files as in the old cset
2362 # Use version of files as in the old cset
2365 def filectxfn(repo, ctx_, path):
2363 def filectxfn(repo, ctx_, path):
2366 try:
2364 try:
2367 return old.filectx(path)
2365 return old.filectx(path)
2368 except KeyError:
2366 except KeyError:
2369 return None
2367 return None
2370
2368
2371 # See if we got a message from -m or -l, if not, open the editor with
2369 # See if we got a message from -m or -l, if not, open the editor with
2372 # the message of the changeset to amend.
2370 # the message of the changeset to amend.
2373 message = logmessage(ui, opts)
2371 message = logmessage(ui, opts)
2374
2372
2375 editform = mergeeditform(old, 'commit.amend')
2373 editform = mergeeditform(old, 'commit.amend')
2376 editor = getcommiteditor(editform=editform,
2374 editor = getcommiteditor(editform=editform,
2377 **pycompat.strkwargs(opts))
2375 **pycompat.strkwargs(opts))
2378
2376
2379 if not message:
2377 if not message:
2380 editor = getcommiteditor(edit=True, editform=editform)
2378 editor = getcommiteditor(edit=True, editform=editform)
2381 message = old.description()
2379 message = old.description()
2382
2380
2383 pureextra = extra.copy()
2381 pureextra = extra.copy()
2384 extra['amend_source'] = old.hex()
2382 extra['amend_source'] = old.hex()
2385
2383
2386 new = context.memctx(repo,
2384 new = context.memctx(repo,
2387 parents=[base.node(), old.p2().node()],
2385 parents=[base.node(), old.p2().node()],
2388 text=message,
2386 text=message,
2389 files=files,
2387 files=files,
2390 filectxfn=filectxfn,
2388 filectxfn=filectxfn,
2391 user=user,
2389 user=user,
2392 date=date,
2390 date=date,
2393 extra=extra,
2391 extra=extra,
2394 editor=editor)
2392 editor=editor)
2395
2393
2396 newdesc = changelog.stripdesc(new.description())
2394 newdesc = changelog.stripdesc(new.description())
2397 if ((not changes)
2395 if ((not changes)
2398 and newdesc == old.description()
2396 and newdesc == old.description()
2399 and user == old.user()
2397 and user == old.user()
2400 and date == old.date()
2398 and date == old.date()
2401 and pureextra == old.extra()):
2399 and pureextra == old.extra()):
2402 # nothing changed. continuing here would create a new node
2400 # nothing changed. continuing here would create a new node
2403 # anyway because of the amend_source noise.
2401 # anyway because of the amend_source noise.
2404 #
2402 #
2405 # This not what we expect from amend.
2403 # This not what we expect from amend.
2406 return old.node()
2404 return old.node()
2407
2405
2408 if opts.get('secret'):
2406 if opts.get('secret'):
2409 commitphase = 'secret'
2407 commitphase = 'secret'
2410 else:
2408 else:
2411 commitphase = old.phase()
2409 commitphase = old.phase()
2412 overrides = {('phases', 'new-commit'): commitphase}
2410 overrides = {('phases', 'new-commit'): commitphase}
2413 with ui.configoverride(overrides, 'amend'):
2411 with ui.configoverride(overrides, 'amend'):
2414 newid = repo.commitctx(new)
2412 newid = repo.commitctx(new)
2415
2413
2416 # Reroute the working copy parent to the new changeset
2414 # Reroute the working copy parent to the new changeset
2417 repo.setparents(newid, nullid)
2415 repo.setparents(newid, nullid)
2418 mapping = {old.node(): (newid,)}
2416 mapping = {old.node(): (newid,)}
2419 obsmetadata = None
2417 obsmetadata = None
2420 if opts.get('note'):
2418 if opts.get('note'):
2421 obsmetadata = {'note': opts['note']}
2419 obsmetadata = {'note': opts['note']}
2422 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2420 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2423
2421
2424 # Fixing the dirstate because localrepo.commitctx does not update
2422 # Fixing the dirstate because localrepo.commitctx does not update
2425 # it. This is rather convenient because we did not need to update
2423 # it. This is rather convenient because we did not need to update
2426 # the dirstate for all the files in the new commit which commitctx
2424 # the dirstate for all the files in the new commit which commitctx
2427 # could have done if it updated the dirstate. Now, we can
2425 # could have done if it updated the dirstate. Now, we can
2428 # selectively update the dirstate only for the amended files.
2426 # selectively update the dirstate only for the amended files.
2429 dirstate = repo.dirstate
2427 dirstate = repo.dirstate
2430
2428
2431 # Update the state of the files which were added and
2429 # Update the state of the files which were added and
2432 # and modified in the amend to "normal" in the dirstate.
2430 # and modified in the amend to "normal" in the dirstate.
2433 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2431 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2434 for f in normalfiles:
2432 for f in normalfiles:
2435 dirstate.normal(f)
2433 dirstate.normal(f)
2436
2434
2437 # Update the state of files which were removed in the amend
2435 # Update the state of files which were removed in the amend
2438 # to "removed" in the dirstate.
2436 # to "removed" in the dirstate.
2439 removedfiles = set(wctx.removed()) & filestoamend
2437 removedfiles = set(wctx.removed()) & filestoamend
2440 for f in removedfiles:
2438 for f in removedfiles:
2441 dirstate.drop(f)
2439 dirstate.drop(f)
2442
2440
2443 return newid
2441 return newid
2444
2442
2445 def commiteditor(repo, ctx, subs, editform=''):
2443 def commiteditor(repo, ctx, subs, editform=''):
2446 if ctx.description():
2444 if ctx.description():
2447 return ctx.description()
2445 return ctx.description()
2448 return commitforceeditor(repo, ctx, subs, editform=editform,
2446 return commitforceeditor(repo, ctx, subs, editform=editform,
2449 unchangedmessagedetection=True)
2447 unchangedmessagedetection=True)
2450
2448
2451 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2449 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2452 editform='', unchangedmessagedetection=False):
2450 editform='', unchangedmessagedetection=False):
2453 if not extramsg:
2451 if not extramsg:
2454 extramsg = _("Leave message empty to abort commit.")
2452 extramsg = _("Leave message empty to abort commit.")
2455
2453
2456 forms = [e for e in editform.split('.') if e]
2454 forms = [e for e in editform.split('.') if e]
2457 forms.insert(0, 'changeset')
2455 forms.insert(0, 'changeset')
2458 templatetext = None
2456 templatetext = None
2459 while forms:
2457 while forms:
2460 ref = '.'.join(forms)
2458 ref = '.'.join(forms)
2461 if repo.ui.config('committemplate', ref):
2459 if repo.ui.config('committemplate', ref):
2462 templatetext = committext = buildcommittemplate(
2460 templatetext = committext = buildcommittemplate(
2463 repo, ctx, subs, extramsg, ref)
2461 repo, ctx, subs, extramsg, ref)
2464 break
2462 break
2465 forms.pop()
2463 forms.pop()
2466 else:
2464 else:
2467 committext = buildcommittext(repo, ctx, subs, extramsg)
2465 committext = buildcommittext(repo, ctx, subs, extramsg)
2468
2466
2469 # run editor in the repository root
2467 # run editor in the repository root
2470 olddir = pycompat.getcwd()
2468 olddir = pycompat.getcwd()
2471 os.chdir(repo.root)
2469 os.chdir(repo.root)
2472
2470
2473 # make in-memory changes visible to external process
2471 # make in-memory changes visible to external process
2474 tr = repo.currenttransaction()
2472 tr = repo.currenttransaction()
2475 repo.dirstate.write(tr)
2473 repo.dirstate.write(tr)
2476 pending = tr and tr.writepending() and repo.root
2474 pending = tr and tr.writepending() and repo.root
2477
2475
2478 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2476 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2479 editform=editform, pending=pending,
2477 editform=editform, pending=pending,
2480 repopath=repo.path, action='commit')
2478 repopath=repo.path, action='commit')
2481 text = editortext
2479 text = editortext
2482
2480
2483 # strip away anything below this special string (used for editors that want
2481 # strip away anything below this special string (used for editors that want
2484 # to display the diff)
2482 # to display the diff)
2485 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2483 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2486 if stripbelow:
2484 if stripbelow:
2487 text = text[:stripbelow.start()]
2485 text = text[:stripbelow.start()]
2488
2486
2489 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2487 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2490 os.chdir(olddir)
2488 os.chdir(olddir)
2491
2489
2492 if finishdesc:
2490 if finishdesc:
2493 text = finishdesc(text)
2491 text = finishdesc(text)
2494 if not text.strip():
2492 if not text.strip():
2495 raise error.Abort(_("empty commit message"))
2493 raise error.Abort(_("empty commit message"))
2496 if unchangedmessagedetection and editortext == templatetext:
2494 if unchangedmessagedetection and editortext == templatetext:
2497 raise error.Abort(_("commit message unchanged"))
2495 raise error.Abort(_("commit message unchanged"))
2498
2496
2499 return text
2497 return text
2500
2498
2501 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2499 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2502 ui = repo.ui
2500 ui = repo.ui
2503 spec = formatter.templatespec(ref, None, None)
2501 spec = formatter.templatespec(ref, None, None)
2504 t = logcmdutil.changesettemplater(ui, repo, spec)
2502 t = logcmdutil.changesettemplater(ui, repo, spec)
2505 t.t.cache.update((k, templater.unquotestring(v))
2503 t.t.cache.update((k, templater.unquotestring(v))
2506 for k, v in repo.ui.configitems('committemplate'))
2504 for k, v in repo.ui.configitems('committemplate'))
2507
2505
2508 if not extramsg:
2506 if not extramsg:
2509 extramsg = '' # ensure that extramsg is string
2507 extramsg = '' # ensure that extramsg is string
2510
2508
2511 ui.pushbuffer()
2509 ui.pushbuffer()
2512 t.show(ctx, extramsg=extramsg)
2510 t.show(ctx, extramsg=extramsg)
2513 return ui.popbuffer()
2511 return ui.popbuffer()
2514
2512
2515 def hgprefix(msg):
2513 def hgprefix(msg):
2516 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2514 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2517
2515
2518 def buildcommittext(repo, ctx, subs, extramsg):
2516 def buildcommittext(repo, ctx, subs, extramsg):
2519 edittext = []
2517 edittext = []
2520 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2518 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2521 if ctx.description():
2519 if ctx.description():
2522 edittext.append(ctx.description())
2520 edittext.append(ctx.description())
2523 edittext.append("")
2521 edittext.append("")
2524 edittext.append("") # Empty line between message and comments.
2522 edittext.append("") # Empty line between message and comments.
2525 edittext.append(hgprefix(_("Enter commit message."
2523 edittext.append(hgprefix(_("Enter commit message."
2526 " Lines beginning with 'HG:' are removed.")))
2524 " Lines beginning with 'HG:' are removed.")))
2527 edittext.append(hgprefix(extramsg))
2525 edittext.append(hgprefix(extramsg))
2528 edittext.append("HG: --")
2526 edittext.append("HG: --")
2529 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2527 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2530 if ctx.p2():
2528 if ctx.p2():
2531 edittext.append(hgprefix(_("branch merge")))
2529 edittext.append(hgprefix(_("branch merge")))
2532 if ctx.branch():
2530 if ctx.branch():
2533 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2531 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2534 if bookmarks.isactivewdirparent(repo):
2532 if bookmarks.isactivewdirparent(repo):
2535 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2533 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2536 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2534 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2537 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2535 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2538 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2536 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2539 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2537 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2540 if not added and not modified and not removed:
2538 if not added and not modified and not removed:
2541 edittext.append(hgprefix(_("no files changed")))
2539 edittext.append(hgprefix(_("no files changed")))
2542 edittext.append("")
2540 edittext.append("")
2543
2541
2544 return "\n".join(edittext)
2542 return "\n".join(edittext)
2545
2543
2546 def commitstatus(repo, node, branch, bheads=None, opts=None):
2544 def commitstatus(repo, node, branch, bheads=None, opts=None):
2547 if opts is None:
2545 if opts is None:
2548 opts = {}
2546 opts = {}
2549 ctx = repo[node]
2547 ctx = repo[node]
2550 parents = ctx.parents()
2548 parents = ctx.parents()
2551
2549
2552 if (not opts.get('amend') and bheads and node not in bheads and not
2550 if (not opts.get('amend') and bheads and node not in bheads and not
2553 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2551 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2554 repo.ui.status(_('created new head\n'))
2552 repo.ui.status(_('created new head\n'))
2555 # The message is not printed for initial roots. For the other
2553 # The message is not printed for initial roots. For the other
2556 # changesets, it is printed in the following situations:
2554 # changesets, it is printed in the following situations:
2557 #
2555 #
2558 # Par column: for the 2 parents with ...
2556 # Par column: for the 2 parents with ...
2559 # N: null or no parent
2557 # N: null or no parent
2560 # B: parent is on another named branch
2558 # B: parent is on another named branch
2561 # C: parent is a regular non head changeset
2559 # C: parent is a regular non head changeset
2562 # H: parent was a branch head of the current branch
2560 # H: parent was a branch head of the current branch
2563 # Msg column: whether we print "created new head" message
2561 # Msg column: whether we print "created new head" message
2564 # In the following, it is assumed that there already exists some
2562 # In the following, it is assumed that there already exists some
2565 # initial branch heads of the current branch, otherwise nothing is
2563 # initial branch heads of the current branch, otherwise nothing is
2566 # printed anyway.
2564 # printed anyway.
2567 #
2565 #
2568 # Par Msg Comment
2566 # Par Msg Comment
2569 # N N y additional topo root
2567 # N N y additional topo root
2570 #
2568 #
2571 # B N y additional branch root
2569 # B N y additional branch root
2572 # C N y additional topo head
2570 # C N y additional topo head
2573 # H N n usual case
2571 # H N n usual case
2574 #
2572 #
2575 # B B y weird additional branch root
2573 # B B y weird additional branch root
2576 # C B y branch merge
2574 # C B y branch merge
2577 # H B n merge with named branch
2575 # H B n merge with named branch
2578 #
2576 #
2579 # C C y additional head from merge
2577 # C C y additional head from merge
2580 # C H n merge with a head
2578 # C H n merge with a head
2581 #
2579 #
2582 # H H n head merge: head count decreases
2580 # H H n head merge: head count decreases
2583
2581
2584 if not opts.get('close_branch'):
2582 if not opts.get('close_branch'):
2585 for r in parents:
2583 for r in parents:
2586 if r.closesbranch() and r.branch() == branch:
2584 if r.closesbranch() and r.branch() == branch:
2587 repo.ui.status(_('reopening closed branch head %d\n') % r)
2585 repo.ui.status(_('reopening closed branch head %d\n') % r)
2588
2586
2589 if repo.ui.debugflag:
2587 if repo.ui.debugflag:
2590 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2588 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2591 elif repo.ui.verbose:
2589 elif repo.ui.verbose:
2592 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2590 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2593
2591
2594 def postcommitstatus(repo, pats, opts):
2592 def postcommitstatus(repo, pats, opts):
2595 return repo.status(match=scmutil.match(repo[None], pats, opts))
2593 return repo.status(match=scmutil.match(repo[None], pats, opts))
2596
2594
2597 def revert(ui, repo, ctx, parents, *pats, **opts):
2595 def revert(ui, repo, ctx, parents, *pats, **opts):
2598 opts = pycompat.byteskwargs(opts)
2596 opts = pycompat.byteskwargs(opts)
2599 parent, p2 = parents
2597 parent, p2 = parents
2600 node = ctx.node()
2598 node = ctx.node()
2601
2599
2602 mf = ctx.manifest()
2600 mf = ctx.manifest()
2603 if node == p2:
2601 if node == p2:
2604 parent = p2
2602 parent = p2
2605
2603
2606 # need all matching names in dirstate and manifest of target rev,
2604 # need all matching names in dirstate and manifest of target rev,
2607 # so have to walk both. do not print errors if files exist in one
2605 # so have to walk both. do not print errors if files exist in one
2608 # but not other. in both cases, filesets should be evaluated against
2606 # but not other. in both cases, filesets should be evaluated against
2609 # workingctx to get consistent result (issue4497). this means 'set:**'
2607 # workingctx to get consistent result (issue4497). this means 'set:**'
2610 # cannot be used to select missing files from target rev.
2608 # cannot be used to select missing files from target rev.
2611
2609
2612 # `names` is a mapping for all elements in working copy and target revision
2610 # `names` is a mapping for all elements in working copy and target revision
2613 # The mapping is in the form:
2611 # The mapping is in the form:
2614 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2612 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2615 names = {}
2613 names = {}
2616
2614
2617 with repo.wlock():
2615 with repo.wlock():
2618 ## filling of the `names` mapping
2616 ## filling of the `names` mapping
2619 # walk dirstate to fill `names`
2617 # walk dirstate to fill `names`
2620
2618
2621 interactive = opts.get('interactive', False)
2619 interactive = opts.get('interactive', False)
2622 wctx = repo[None]
2620 wctx = repo[None]
2623 m = scmutil.match(wctx, pats, opts)
2621 m = scmutil.match(wctx, pats, opts)
2624
2622
2625 # we'll need this later
2623 # we'll need this later
2626 targetsubs = sorted(s for s in wctx.substate if m(s))
2624 targetsubs = sorted(s for s in wctx.substate if m(s))
2627
2625
2628 if not m.always():
2626 if not m.always():
2629 matcher = matchmod.badmatch(m, lambda x, y: False)
2627 matcher = matchmod.badmatch(m, lambda x, y: False)
2630 for abs in wctx.walk(matcher):
2628 for abs in wctx.walk(matcher):
2631 names[abs] = m.rel(abs), m.exact(abs)
2629 names[abs] = m.rel(abs), m.exact(abs)
2632
2630
2633 # walk target manifest to fill `names`
2631 # walk target manifest to fill `names`
2634
2632
2635 def badfn(path, msg):
2633 def badfn(path, msg):
2636 if path in names:
2634 if path in names:
2637 return
2635 return
2638 if path in ctx.substate:
2636 if path in ctx.substate:
2639 return
2637 return
2640 path_ = path + '/'
2638 path_ = path + '/'
2641 for f in names:
2639 for f in names:
2642 if f.startswith(path_):
2640 if f.startswith(path_):
2643 return
2641 return
2644 ui.warn("%s: %s\n" % (m.rel(path), msg))
2642 ui.warn("%s: %s\n" % (m.rel(path), msg))
2645
2643
2646 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2644 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2647 if abs not in names:
2645 if abs not in names:
2648 names[abs] = m.rel(abs), m.exact(abs)
2646 names[abs] = m.rel(abs), m.exact(abs)
2649
2647
2650 # Find status of all file in `names`.
2648 # Find status of all file in `names`.
2651 m = scmutil.matchfiles(repo, names)
2649 m = scmutil.matchfiles(repo, names)
2652
2650
2653 changes = repo.status(node1=node, match=m,
2651 changes = repo.status(node1=node, match=m,
2654 unknown=True, ignored=True, clean=True)
2652 unknown=True, ignored=True, clean=True)
2655 else:
2653 else:
2656 changes = repo.status(node1=node, match=m)
2654 changes = repo.status(node1=node, match=m)
2657 for kind in changes:
2655 for kind in changes:
2658 for abs in kind:
2656 for abs in kind:
2659 names[abs] = m.rel(abs), m.exact(abs)
2657 names[abs] = m.rel(abs), m.exact(abs)
2660
2658
2661 m = scmutil.matchfiles(repo, names)
2659 m = scmutil.matchfiles(repo, names)
2662
2660
2663 modified = set(changes.modified)
2661 modified = set(changes.modified)
2664 added = set(changes.added)
2662 added = set(changes.added)
2665 removed = set(changes.removed)
2663 removed = set(changes.removed)
2666 _deleted = set(changes.deleted)
2664 _deleted = set(changes.deleted)
2667 unknown = set(changes.unknown)
2665 unknown = set(changes.unknown)
2668 unknown.update(changes.ignored)
2666 unknown.update(changes.ignored)
2669 clean = set(changes.clean)
2667 clean = set(changes.clean)
2670 modadded = set()
2668 modadded = set()
2671
2669
2672 # We need to account for the state of the file in the dirstate,
2670 # We need to account for the state of the file in the dirstate,
2673 # even when we revert against something else than parent. This will
2671 # even when we revert against something else than parent. This will
2674 # slightly alter the behavior of revert (doing back up or not, delete
2672 # slightly alter the behavior of revert (doing back up or not, delete
2675 # or just forget etc).
2673 # or just forget etc).
2676 if parent == node:
2674 if parent == node:
2677 dsmodified = modified
2675 dsmodified = modified
2678 dsadded = added
2676 dsadded = added
2679 dsremoved = removed
2677 dsremoved = removed
2680 # store all local modifications, useful later for rename detection
2678 # store all local modifications, useful later for rename detection
2681 localchanges = dsmodified | dsadded
2679 localchanges = dsmodified | dsadded
2682 modified, added, removed = set(), set(), set()
2680 modified, added, removed = set(), set(), set()
2683 else:
2681 else:
2684 changes = repo.status(node1=parent, match=m)
2682 changes = repo.status(node1=parent, match=m)
2685 dsmodified = set(changes.modified)
2683 dsmodified = set(changes.modified)
2686 dsadded = set(changes.added)
2684 dsadded = set(changes.added)
2687 dsremoved = set(changes.removed)
2685 dsremoved = set(changes.removed)
2688 # store all local modifications, useful later for rename detection
2686 # store all local modifications, useful later for rename detection
2689 localchanges = dsmodified | dsadded
2687 localchanges = dsmodified | dsadded
2690
2688
2691 # only take into account for removes between wc and target
2689 # only take into account for removes between wc and target
2692 clean |= dsremoved - removed
2690 clean |= dsremoved - removed
2693 dsremoved &= removed
2691 dsremoved &= removed
2694 # distinct between dirstate remove and other
2692 # distinct between dirstate remove and other
2695 removed -= dsremoved
2693 removed -= dsremoved
2696
2694
2697 modadded = added & dsmodified
2695 modadded = added & dsmodified
2698 added -= modadded
2696 added -= modadded
2699
2697
2700 # tell newly modified apart.
2698 # tell newly modified apart.
2701 dsmodified &= modified
2699 dsmodified &= modified
2702 dsmodified |= modified & dsadded # dirstate added may need backup
2700 dsmodified |= modified & dsadded # dirstate added may need backup
2703 modified -= dsmodified
2701 modified -= dsmodified
2704
2702
2705 # We need to wait for some post-processing to update this set
2703 # We need to wait for some post-processing to update this set
2706 # before making the distinction. The dirstate will be used for
2704 # before making the distinction. The dirstate will be used for
2707 # that purpose.
2705 # that purpose.
2708 dsadded = added
2706 dsadded = added
2709
2707
2710 # in case of merge, files that are actually added can be reported as
2708 # in case of merge, files that are actually added can be reported as
2711 # modified, we need to post process the result
2709 # modified, we need to post process the result
2712 if p2 != nullid:
2710 if p2 != nullid:
2713 mergeadd = set(dsmodified)
2711 mergeadd = set(dsmodified)
2714 for path in dsmodified:
2712 for path in dsmodified:
2715 if path in mf:
2713 if path in mf:
2716 mergeadd.remove(path)
2714 mergeadd.remove(path)
2717 dsadded |= mergeadd
2715 dsadded |= mergeadd
2718 dsmodified -= mergeadd
2716 dsmodified -= mergeadd
2719
2717
2720 # if f is a rename, update `names` to also revert the source
2718 # if f is a rename, update `names` to also revert the source
2721 cwd = repo.getcwd()
2719 cwd = repo.getcwd()
2722 for f in localchanges:
2720 for f in localchanges:
2723 src = repo.dirstate.copied(f)
2721 src = repo.dirstate.copied(f)
2724 # XXX should we check for rename down to target node?
2722 # XXX should we check for rename down to target node?
2725 if src and src not in names and repo.dirstate[src] == 'r':
2723 if src and src not in names and repo.dirstate[src] == 'r':
2726 dsremoved.add(src)
2724 dsremoved.add(src)
2727 names[src] = (repo.pathto(src, cwd), True)
2725 names[src] = (repo.pathto(src, cwd), True)
2728
2726
2729 # determine the exact nature of the deleted changesets
2727 # determine the exact nature of the deleted changesets
2730 deladded = set(_deleted)
2728 deladded = set(_deleted)
2731 for path in _deleted:
2729 for path in _deleted:
2732 if path in mf:
2730 if path in mf:
2733 deladded.remove(path)
2731 deladded.remove(path)
2734 deleted = _deleted - deladded
2732 deleted = _deleted - deladded
2735
2733
2736 # distinguish between file to forget and the other
2734 # distinguish between file to forget and the other
2737 added = set()
2735 added = set()
2738 for abs in dsadded:
2736 for abs in dsadded:
2739 if repo.dirstate[abs] != 'a':
2737 if repo.dirstate[abs] != 'a':
2740 added.add(abs)
2738 added.add(abs)
2741 dsadded -= added
2739 dsadded -= added
2742
2740
2743 for abs in deladded:
2741 for abs in deladded:
2744 if repo.dirstate[abs] == 'a':
2742 if repo.dirstate[abs] == 'a':
2745 dsadded.add(abs)
2743 dsadded.add(abs)
2746 deladded -= dsadded
2744 deladded -= dsadded
2747
2745
2748 # For files marked as removed, we check if an unknown file is present at
2746 # For files marked as removed, we check if an unknown file is present at
2749 # the same path. If a such file exists it may need to be backed up.
2747 # the same path. If a such file exists it may need to be backed up.
2750 # Making the distinction at this stage helps have simpler backup
2748 # Making the distinction at this stage helps have simpler backup
2751 # logic.
2749 # logic.
2752 removunk = set()
2750 removunk = set()
2753 for abs in removed:
2751 for abs in removed:
2754 target = repo.wjoin(abs)
2752 target = repo.wjoin(abs)
2755 if os.path.lexists(target):
2753 if os.path.lexists(target):
2756 removunk.add(abs)
2754 removunk.add(abs)
2757 removed -= removunk
2755 removed -= removunk
2758
2756
2759 dsremovunk = set()
2757 dsremovunk = set()
2760 for abs in dsremoved:
2758 for abs in dsremoved:
2761 target = repo.wjoin(abs)
2759 target = repo.wjoin(abs)
2762 if os.path.lexists(target):
2760 if os.path.lexists(target):
2763 dsremovunk.add(abs)
2761 dsremovunk.add(abs)
2764 dsremoved -= dsremovunk
2762 dsremoved -= dsremovunk
2765
2763
2766 # action to be actually performed by revert
2764 # action to be actually performed by revert
2767 # (<list of file>, message>) tuple
2765 # (<list of file>, message>) tuple
2768 actions = {'revert': ([], _('reverting %s\n')),
2766 actions = {'revert': ([], _('reverting %s\n')),
2769 'add': ([], _('adding %s\n')),
2767 'add': ([], _('adding %s\n')),
2770 'remove': ([], _('removing %s\n')),
2768 'remove': ([], _('removing %s\n')),
2771 'drop': ([], _('removing %s\n')),
2769 'drop': ([], _('removing %s\n')),
2772 'forget': ([], _('forgetting %s\n')),
2770 'forget': ([], _('forgetting %s\n')),
2773 'undelete': ([], _('undeleting %s\n')),
2771 'undelete': ([], _('undeleting %s\n')),
2774 'noop': (None, _('no changes needed to %s\n')),
2772 'noop': (None, _('no changes needed to %s\n')),
2775 'unknown': (None, _('file not managed: %s\n')),
2773 'unknown': (None, _('file not managed: %s\n')),
2776 }
2774 }
2777
2775
2778 # "constant" that convey the backup strategy.
2776 # "constant" that convey the backup strategy.
2779 # All set to `discard` if `no-backup` is set do avoid checking
2777 # All set to `discard` if `no-backup` is set do avoid checking
2780 # no_backup lower in the code.
2778 # no_backup lower in the code.
2781 # These values are ordered for comparison purposes
2779 # These values are ordered for comparison purposes
2782 backupinteractive = 3 # do backup if interactively modified
2780 backupinteractive = 3 # do backup if interactively modified
2783 backup = 2 # unconditionally do backup
2781 backup = 2 # unconditionally do backup
2784 check = 1 # check if the existing file differs from target
2782 check = 1 # check if the existing file differs from target
2785 discard = 0 # never do backup
2783 discard = 0 # never do backup
2786 if opts.get('no_backup'):
2784 if opts.get('no_backup'):
2787 backupinteractive = backup = check = discard
2785 backupinteractive = backup = check = discard
2788 if interactive:
2786 if interactive:
2789 dsmodifiedbackup = backupinteractive
2787 dsmodifiedbackup = backupinteractive
2790 else:
2788 else:
2791 dsmodifiedbackup = backup
2789 dsmodifiedbackup = backup
2792 tobackup = set()
2790 tobackup = set()
2793
2791
2794 backupanddel = actions['remove']
2792 backupanddel = actions['remove']
2795 if not opts.get('no_backup'):
2793 if not opts.get('no_backup'):
2796 backupanddel = actions['drop']
2794 backupanddel = actions['drop']
2797
2795
2798 disptable = (
2796 disptable = (
2799 # dispatch table:
2797 # dispatch table:
2800 # file state
2798 # file state
2801 # action
2799 # action
2802 # make backup
2800 # make backup
2803
2801
2804 ## Sets that results that will change file on disk
2802 ## Sets that results that will change file on disk
2805 # Modified compared to target, no local change
2803 # Modified compared to target, no local change
2806 (modified, actions['revert'], discard),
2804 (modified, actions['revert'], discard),
2807 # Modified compared to target, but local file is deleted
2805 # Modified compared to target, but local file is deleted
2808 (deleted, actions['revert'], discard),
2806 (deleted, actions['revert'], discard),
2809 # Modified compared to target, local change
2807 # Modified compared to target, local change
2810 (dsmodified, actions['revert'], dsmodifiedbackup),
2808 (dsmodified, actions['revert'], dsmodifiedbackup),
2811 # Added since target
2809 # Added since target
2812 (added, actions['remove'], discard),
2810 (added, actions['remove'], discard),
2813 # Added in working directory
2811 # Added in working directory
2814 (dsadded, actions['forget'], discard),
2812 (dsadded, actions['forget'], discard),
2815 # Added since target, have local modification
2813 # Added since target, have local modification
2816 (modadded, backupanddel, backup),
2814 (modadded, backupanddel, backup),
2817 # Added since target but file is missing in working directory
2815 # Added since target but file is missing in working directory
2818 (deladded, actions['drop'], discard),
2816 (deladded, actions['drop'], discard),
2819 # Removed since target, before working copy parent
2817 # Removed since target, before working copy parent
2820 (removed, actions['add'], discard),
2818 (removed, actions['add'], discard),
2821 # Same as `removed` but an unknown file exists at the same path
2819 # Same as `removed` but an unknown file exists at the same path
2822 (removunk, actions['add'], check),
2820 (removunk, actions['add'], check),
2823 # Removed since targe, marked as such in working copy parent
2821 # Removed since targe, marked as such in working copy parent
2824 (dsremoved, actions['undelete'], discard),
2822 (dsremoved, actions['undelete'], discard),
2825 # Same as `dsremoved` but an unknown file exists at the same path
2823 # Same as `dsremoved` but an unknown file exists at the same path
2826 (dsremovunk, actions['undelete'], check),
2824 (dsremovunk, actions['undelete'], check),
2827 ## the following sets does not result in any file changes
2825 ## the following sets does not result in any file changes
2828 # File with no modification
2826 # File with no modification
2829 (clean, actions['noop'], discard),
2827 (clean, actions['noop'], discard),
2830 # Existing file, not tracked anywhere
2828 # Existing file, not tracked anywhere
2831 (unknown, actions['unknown'], discard),
2829 (unknown, actions['unknown'], discard),
2832 )
2830 )
2833
2831
2834 for abs, (rel, exact) in sorted(names.items()):
2832 for abs, (rel, exact) in sorted(names.items()):
2835 # target file to be touch on disk (relative to cwd)
2833 # target file to be touch on disk (relative to cwd)
2836 target = repo.wjoin(abs)
2834 target = repo.wjoin(abs)
2837 # search the entry in the dispatch table.
2835 # search the entry in the dispatch table.
2838 # if the file is in any of these sets, it was touched in the working
2836 # if the file is in any of these sets, it was touched in the working
2839 # directory parent and we are sure it needs to be reverted.
2837 # directory parent and we are sure it needs to be reverted.
2840 for table, (xlist, msg), dobackup in disptable:
2838 for table, (xlist, msg), dobackup in disptable:
2841 if abs not in table:
2839 if abs not in table:
2842 continue
2840 continue
2843 if xlist is not None:
2841 if xlist is not None:
2844 xlist.append(abs)
2842 xlist.append(abs)
2845 if dobackup:
2843 if dobackup:
2846 # If in interactive mode, don't automatically create
2844 # If in interactive mode, don't automatically create
2847 # .orig files (issue4793)
2845 # .orig files (issue4793)
2848 if dobackup == backupinteractive:
2846 if dobackup == backupinteractive:
2849 tobackup.add(abs)
2847 tobackup.add(abs)
2850 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2848 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2851 bakname = scmutil.origpath(ui, repo, rel)
2849 bakname = scmutil.origpath(ui, repo, rel)
2852 ui.note(_('saving current version of %s as %s\n') %
2850 ui.note(_('saving current version of %s as %s\n') %
2853 (rel, bakname))
2851 (rel, bakname))
2854 if not opts.get('dry_run'):
2852 if not opts.get('dry_run'):
2855 if interactive:
2853 if interactive:
2856 util.copyfile(target, bakname)
2854 util.copyfile(target, bakname)
2857 else:
2855 else:
2858 util.rename(target, bakname)
2856 util.rename(target, bakname)
2859 if ui.verbose or not exact:
2857 if ui.verbose or not exact:
2860 if not isinstance(msg, bytes):
2858 if not isinstance(msg, bytes):
2861 msg = msg(abs)
2859 msg = msg(abs)
2862 ui.status(msg % rel)
2860 ui.status(msg % rel)
2863 elif exact:
2861 elif exact:
2864 ui.warn(msg % rel)
2862 ui.warn(msg % rel)
2865 break
2863 break
2866
2864
2867 if not opts.get('dry_run'):
2865 if not opts.get('dry_run'):
2868 needdata = ('revert', 'add', 'undelete')
2866 needdata = ('revert', 'add', 'undelete')
2869 if _revertprefetch is not _revertprefetchstub:
2867 if _revertprefetch is not _revertprefetchstub:
2870 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, use "
2868 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, use "
2871 "'cmdutil._prefetchfiles'", '4.6', stacklevel=1)
2869 "'cmdutil._prefetchfiles'", '4.6', stacklevel=1)
2872 _revertprefetch(repo, ctx,
2870 _revertprefetch(repo, ctx,
2873 *[actions[name][0] for name in needdata])
2871 *[actions[name][0] for name in needdata])
2874 oplist = [actions[name][0] for name in needdata]
2872 oplist = [actions[name][0] for name in needdata]
2875 _prefetchfiles(repo, ctx,
2873 _prefetchfiles(repo, ctx,
2876 [f for sublist in oplist for f in sublist])
2874 [f for sublist in oplist for f in sublist])
2877 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2875 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2878
2876
2879 if targetsubs:
2877 if targetsubs:
2880 # Revert the subrepos on the revert list
2878 # Revert the subrepos on the revert list
2881 for sub in targetsubs:
2879 for sub in targetsubs:
2882 try:
2880 try:
2883 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2881 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2884 **pycompat.strkwargs(opts))
2882 **pycompat.strkwargs(opts))
2885 except KeyError:
2883 except KeyError:
2886 raise error.Abort("subrepository '%s' does not exist in %s!"
2884 raise error.Abort("subrepository '%s' does not exist in %s!"
2887 % (sub, short(ctx.node())))
2885 % (sub, short(ctx.node())))
2888
2886
2889 def _revertprefetchstub(repo, ctx, *files):
2887 def _revertprefetchstub(repo, ctx, *files):
2890 """Stub method for detecting extension wrapping of _revertprefetch(), to
2888 """Stub method for detecting extension wrapping of _revertprefetch(), to
2891 issue a deprecation warning."""
2889 issue a deprecation warning."""
2892
2890
2893 _revertprefetch = _revertprefetchstub
2891 _revertprefetch = _revertprefetchstub
2894
2892
2895 def _prefetchfiles(repo, ctx, files):
2893 def _prefetchfiles(repo, ctx, files):
2896 """Let extensions changing the storage layer prefetch content for any non
2894 """Let extensions changing the storage layer prefetch content for any non
2897 merge based command."""
2895 merge based command."""
2898
2896
2899 def _performrevert(repo, parents, ctx, actions, interactive=False,
2897 def _performrevert(repo, parents, ctx, actions, interactive=False,
2900 tobackup=None):
2898 tobackup=None):
2901 """function that actually perform all the actions computed for revert
2899 """function that actually perform all the actions computed for revert
2902
2900
2903 This is an independent function to let extension to plug in and react to
2901 This is an independent function to let extension to plug in and react to
2904 the imminent revert.
2902 the imminent revert.
2905
2903
2906 Make sure you have the working directory locked when calling this function.
2904 Make sure you have the working directory locked when calling this function.
2907 """
2905 """
2908 parent, p2 = parents
2906 parent, p2 = parents
2909 node = ctx.node()
2907 node = ctx.node()
2910 excluded_files = []
2908 excluded_files = []
2911 matcher_opts = {"exclude": excluded_files}
2909 matcher_opts = {"exclude": excluded_files}
2912
2910
2913 def checkout(f):
2911 def checkout(f):
2914 fc = ctx[f]
2912 fc = ctx[f]
2915 repo.wwrite(f, fc.data(), fc.flags())
2913 repo.wwrite(f, fc.data(), fc.flags())
2916
2914
2917 def doremove(f):
2915 def doremove(f):
2918 try:
2916 try:
2919 repo.wvfs.unlinkpath(f)
2917 repo.wvfs.unlinkpath(f)
2920 except OSError:
2918 except OSError:
2921 pass
2919 pass
2922 repo.dirstate.remove(f)
2920 repo.dirstate.remove(f)
2923
2921
2924 audit_path = pathutil.pathauditor(repo.root, cached=True)
2922 audit_path = pathutil.pathauditor(repo.root, cached=True)
2925 for f in actions['forget'][0]:
2923 for f in actions['forget'][0]:
2926 if interactive:
2924 if interactive:
2927 choice = repo.ui.promptchoice(
2925 choice = repo.ui.promptchoice(
2928 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2926 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2929 if choice == 0:
2927 if choice == 0:
2930 repo.dirstate.drop(f)
2928 repo.dirstate.drop(f)
2931 else:
2929 else:
2932 excluded_files.append(repo.wjoin(f))
2930 excluded_files.append(repo.wjoin(f))
2933 else:
2931 else:
2934 repo.dirstate.drop(f)
2932 repo.dirstate.drop(f)
2935 for f in actions['remove'][0]:
2933 for f in actions['remove'][0]:
2936 audit_path(f)
2934 audit_path(f)
2937 if interactive:
2935 if interactive:
2938 choice = repo.ui.promptchoice(
2936 choice = repo.ui.promptchoice(
2939 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2937 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2940 if choice == 0:
2938 if choice == 0:
2941 doremove(f)
2939 doremove(f)
2942 else:
2940 else:
2943 excluded_files.append(repo.wjoin(f))
2941 excluded_files.append(repo.wjoin(f))
2944 else:
2942 else:
2945 doremove(f)
2943 doremove(f)
2946 for f in actions['drop'][0]:
2944 for f in actions['drop'][0]:
2947 audit_path(f)
2945 audit_path(f)
2948 repo.dirstate.remove(f)
2946 repo.dirstate.remove(f)
2949
2947
2950 normal = None
2948 normal = None
2951 if node == parent:
2949 if node == parent:
2952 # We're reverting to our parent. If possible, we'd like status
2950 # We're reverting to our parent. If possible, we'd like status
2953 # to report the file as clean. We have to use normallookup for
2951 # to report the file as clean. We have to use normallookup for
2954 # merges to avoid losing information about merged/dirty files.
2952 # merges to avoid losing information about merged/dirty files.
2955 if p2 != nullid:
2953 if p2 != nullid:
2956 normal = repo.dirstate.normallookup
2954 normal = repo.dirstate.normallookup
2957 else:
2955 else:
2958 normal = repo.dirstate.normal
2956 normal = repo.dirstate.normal
2959
2957
2960 newlyaddedandmodifiedfiles = set()
2958 newlyaddedandmodifiedfiles = set()
2961 if interactive:
2959 if interactive:
2962 # Prompt the user for changes to revert
2960 # Prompt the user for changes to revert
2963 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
2961 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
2964 m = scmutil.match(ctx, torevert, matcher_opts)
2962 m = scmutil.match(ctx, torevert, matcher_opts)
2965 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
2963 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
2966 diffopts.nodates = True
2964 diffopts.nodates = True
2967 diffopts.git = True
2965 diffopts.git = True
2968 operation = 'discard'
2966 operation = 'discard'
2969 reversehunks = True
2967 reversehunks = True
2970 if node != parent:
2968 if node != parent:
2971 operation = 'apply'
2969 operation = 'apply'
2972 reversehunks = False
2970 reversehunks = False
2973 if reversehunks:
2971 if reversehunks:
2974 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
2972 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
2975 else:
2973 else:
2976 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
2974 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
2977 originalchunks = patch.parsepatch(diff)
2975 originalchunks = patch.parsepatch(diff)
2978
2976
2979 try:
2977 try:
2980
2978
2981 chunks, opts = recordfilter(repo.ui, originalchunks,
2979 chunks, opts = recordfilter(repo.ui, originalchunks,
2982 operation=operation)
2980 operation=operation)
2983 if reversehunks:
2981 if reversehunks:
2984 chunks = patch.reversehunks(chunks)
2982 chunks = patch.reversehunks(chunks)
2985
2983
2986 except error.PatchError as err:
2984 except error.PatchError as err:
2987 raise error.Abort(_('error parsing patch: %s') % err)
2985 raise error.Abort(_('error parsing patch: %s') % err)
2988
2986
2989 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
2987 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
2990 if tobackup is None:
2988 if tobackup is None:
2991 tobackup = set()
2989 tobackup = set()
2992 # Apply changes
2990 # Apply changes
2993 fp = stringio()
2991 fp = stringio()
2994 for c in chunks:
2992 for c in chunks:
2995 # Create a backup file only if this hunk should be backed up
2993 # Create a backup file only if this hunk should be backed up
2996 if ishunk(c) and c.header.filename() in tobackup:
2994 if ishunk(c) and c.header.filename() in tobackup:
2997 abs = c.header.filename()
2995 abs = c.header.filename()
2998 target = repo.wjoin(abs)
2996 target = repo.wjoin(abs)
2999 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
2997 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3000 util.copyfile(target, bakname)
2998 util.copyfile(target, bakname)
3001 tobackup.remove(abs)
2999 tobackup.remove(abs)
3002 c.write(fp)
3000 c.write(fp)
3003 dopatch = fp.tell()
3001 dopatch = fp.tell()
3004 fp.seek(0)
3002 fp.seek(0)
3005 if dopatch:
3003 if dopatch:
3006 try:
3004 try:
3007 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3005 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3008 except error.PatchError as err:
3006 except error.PatchError as err:
3009 raise error.Abort(str(err))
3007 raise error.Abort(str(err))
3010 del fp
3008 del fp
3011 else:
3009 else:
3012 for f in actions['revert'][0]:
3010 for f in actions['revert'][0]:
3013 checkout(f)
3011 checkout(f)
3014 if normal:
3012 if normal:
3015 normal(f)
3013 normal(f)
3016
3014
3017 for f in actions['add'][0]:
3015 for f in actions['add'][0]:
3018 # Don't checkout modified files, they are already created by the diff
3016 # Don't checkout modified files, they are already created by the diff
3019 if f not in newlyaddedandmodifiedfiles:
3017 if f not in newlyaddedandmodifiedfiles:
3020 checkout(f)
3018 checkout(f)
3021 repo.dirstate.add(f)
3019 repo.dirstate.add(f)
3022
3020
3023 normal = repo.dirstate.normallookup
3021 normal = repo.dirstate.normallookup
3024 if node == parent and p2 == nullid:
3022 if node == parent and p2 == nullid:
3025 normal = repo.dirstate.normal
3023 normal = repo.dirstate.normal
3026 for f in actions['undelete'][0]:
3024 for f in actions['undelete'][0]:
3027 checkout(f)
3025 checkout(f)
3028 normal(f)
3026 normal(f)
3029
3027
3030 copied = copies.pathcopies(repo[parent], ctx)
3028 copied = copies.pathcopies(repo[parent], ctx)
3031
3029
3032 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3030 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3033 if f in copied:
3031 if f in copied:
3034 repo.dirstate.copy(copied[f], f)
3032 repo.dirstate.copy(copied[f], f)
3035
3033
3036 class command(registrar.command):
3034 class command(registrar.command):
3037 """deprecated: used registrar.command instead"""
3035 """deprecated: used registrar.command instead"""
3038 def _doregister(self, func, name, *args, **kwargs):
3036 def _doregister(self, func, name, *args, **kwargs):
3039 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3037 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3040 return super(command, self)._doregister(func, name, *args, **kwargs)
3038 return super(command, self)._doregister(func, name, *args, **kwargs)
3041
3039
3042 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3040 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3043 # commands.outgoing. "missing" is "missing" of the result of
3041 # commands.outgoing. "missing" is "missing" of the result of
3044 # "findcommonoutgoing()"
3042 # "findcommonoutgoing()"
3045 outgoinghooks = util.hooks()
3043 outgoinghooks = util.hooks()
3046
3044
3047 # a list of (ui, repo) functions called by commands.summary
3045 # a list of (ui, repo) functions called by commands.summary
3048 summaryhooks = util.hooks()
3046 summaryhooks = util.hooks()
3049
3047
3050 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3048 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3051 #
3049 #
3052 # functions should return tuple of booleans below, if 'changes' is None:
3050 # functions should return tuple of booleans below, if 'changes' is None:
3053 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3051 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3054 #
3052 #
3055 # otherwise, 'changes' is a tuple of tuples below:
3053 # otherwise, 'changes' is a tuple of tuples below:
3056 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3054 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3057 # - (desturl, destbranch, destpeer, outgoing)
3055 # - (desturl, destbranch, destpeer, outgoing)
3058 summaryremotehooks = util.hooks()
3056 summaryremotehooks = util.hooks()
3059
3057
3060 # A list of state files kept by multistep operations like graft.
3058 # A list of state files kept by multistep operations like graft.
3061 # Since graft cannot be aborted, it is considered 'clearable' by update.
3059 # Since graft cannot be aborted, it is considered 'clearable' by update.
3062 # note: bisect is intentionally excluded
3060 # note: bisect is intentionally excluded
3063 # (state file, clearable, allowcommit, error, hint)
3061 # (state file, clearable, allowcommit, error, hint)
3064 unfinishedstates = [
3062 unfinishedstates = [
3065 ('graftstate', True, False, _('graft in progress'),
3063 ('graftstate', True, False, _('graft in progress'),
3066 _("use 'hg graft --continue' or 'hg update' to abort")),
3064 _("use 'hg graft --continue' or 'hg update' to abort")),
3067 ('updatestate', True, False, _('last update was interrupted'),
3065 ('updatestate', True, False, _('last update was interrupted'),
3068 _("use 'hg update' to get a consistent checkout"))
3066 _("use 'hg update' to get a consistent checkout"))
3069 ]
3067 ]
3070
3068
3071 def checkunfinished(repo, commit=False):
3069 def checkunfinished(repo, commit=False):
3072 '''Look for an unfinished multistep operation, like graft, and abort
3070 '''Look for an unfinished multistep operation, like graft, and abort
3073 if found. It's probably good to check this right before
3071 if found. It's probably good to check this right before
3074 bailifchanged().
3072 bailifchanged().
3075 '''
3073 '''
3076 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3074 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3077 if commit and allowcommit:
3075 if commit and allowcommit:
3078 continue
3076 continue
3079 if repo.vfs.exists(f):
3077 if repo.vfs.exists(f):
3080 raise error.Abort(msg, hint=hint)
3078 raise error.Abort(msg, hint=hint)
3081
3079
3082 def clearunfinished(repo):
3080 def clearunfinished(repo):
3083 '''Check for unfinished operations (as above), and clear the ones
3081 '''Check for unfinished operations (as above), and clear the ones
3084 that are clearable.
3082 that are clearable.
3085 '''
3083 '''
3086 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3084 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3087 if not clearable and repo.vfs.exists(f):
3085 if not clearable and repo.vfs.exists(f):
3088 raise error.Abort(msg, hint=hint)
3086 raise error.Abort(msg, hint=hint)
3089 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3087 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3090 if clearable and repo.vfs.exists(f):
3088 if clearable and repo.vfs.exists(f):
3091 util.unlink(repo.vfs.join(f))
3089 util.unlink(repo.vfs.join(f))
3092
3090
3093 afterresolvedstates = [
3091 afterresolvedstates = [
3094 ('graftstate',
3092 ('graftstate',
3095 _('hg graft --continue')),
3093 _('hg graft --continue')),
3096 ]
3094 ]
3097
3095
3098 def howtocontinue(repo):
3096 def howtocontinue(repo):
3099 '''Check for an unfinished operation and return the command to finish
3097 '''Check for an unfinished operation and return the command to finish
3100 it.
3098 it.
3101
3099
3102 afterresolvedstates tuples define a .hg/{file} and the corresponding
3100 afterresolvedstates tuples define a .hg/{file} and the corresponding
3103 command needed to finish it.
3101 command needed to finish it.
3104
3102
3105 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3103 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3106 a boolean.
3104 a boolean.
3107 '''
3105 '''
3108 contmsg = _("continue: %s")
3106 contmsg = _("continue: %s")
3109 for f, msg in afterresolvedstates:
3107 for f, msg in afterresolvedstates:
3110 if repo.vfs.exists(f):
3108 if repo.vfs.exists(f):
3111 return contmsg % msg, True
3109 return contmsg % msg, True
3112 if repo[None].dirty(missing=True, merge=False, branch=False):
3110 if repo[None].dirty(missing=True, merge=False, branch=False):
3113 return contmsg % _("hg commit"), False
3111 return contmsg % _("hg commit"), False
3114 return None, None
3112 return None, None
3115
3113
3116 def checkafterresolved(repo):
3114 def checkafterresolved(repo):
3117 '''Inform the user about the next action after completing hg resolve
3115 '''Inform the user about the next action after completing hg resolve
3118
3116
3119 If there's a matching afterresolvedstates, howtocontinue will yield
3117 If there's a matching afterresolvedstates, howtocontinue will yield
3120 repo.ui.warn as the reporter.
3118 repo.ui.warn as the reporter.
3121
3119
3122 Otherwise, it will yield repo.ui.note.
3120 Otherwise, it will yield repo.ui.note.
3123 '''
3121 '''
3124 msg, warning = howtocontinue(repo)
3122 msg, warning = howtocontinue(repo)
3125 if msg is not None:
3123 if msg is not None:
3126 if warning:
3124 if warning:
3127 repo.ui.warn("%s\n" % msg)
3125 repo.ui.warn("%s\n" % msg)
3128 else:
3126 else:
3129 repo.ui.note("%s\n" % msg)
3127 repo.ui.note("%s\n" % msg)
3130
3128
3131 def wrongtooltocontinue(repo, task):
3129 def wrongtooltocontinue(repo, task):
3132 '''Raise an abort suggesting how to properly continue if there is an
3130 '''Raise an abort suggesting how to properly continue if there is an
3133 active task.
3131 active task.
3134
3132
3135 Uses howtocontinue() to find the active task.
3133 Uses howtocontinue() to find the active task.
3136
3134
3137 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3135 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3138 a hint.
3136 a hint.
3139 '''
3137 '''
3140 after = howtocontinue(repo)
3138 after = howtocontinue(repo)
3141 hint = None
3139 hint = None
3142 if after[1]:
3140 if after[1]:
3143 hint = after[0]
3141 hint = after[0]
3144 raise error.Abort(_('no %s in progress') % task, hint=hint)
3142 raise error.Abort(_('no %s in progress') % task, hint=hint)
3145
3143
3146 class changeset_printer(logcmdutil.changesetprinter):
3144 class changeset_printer(logcmdutil.changesetprinter):
3147
3145
3148 def __init__(self, ui, *args, **kwargs):
3146 def __init__(self, ui, *args, **kwargs):
3149 msg = ("'cmdutil.changeset_printer' is deprecated, "
3147 msg = ("'cmdutil.changeset_printer' is deprecated, "
3150 "use 'logcmdutil.logcmdutil'")
3148 "use 'logcmdutil.logcmdutil'")
3151 ui.deprecwarn(msg, "4.6")
3149 ui.deprecwarn(msg, "4.6")
3152 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3150 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3153
3151
3154 def displaygraph(ui, *args, **kwargs):
3152 def displaygraph(ui, *args, **kwargs):
3155 msg = ("'cmdutil.displaygraph' is deprecated, "
3153 msg = ("'cmdutil.displaygraph' is deprecated, "
3156 "use 'logcmdutil.displaygraph'")
3154 "use 'logcmdutil.displaygraph'")
3157 ui.deprecwarn(msg, "4.6")
3155 ui.deprecwarn(msg, "4.6")
3158 return logcmdutil.displaygraph(ui, *args, **kwargs)
3156 return logcmdutil.displaygraph(ui, *args, **kwargs)
3159
3157
3160 def show_changeset(ui, *args, **kwargs):
3158 def show_changeset(ui, *args, **kwargs):
3161 msg = ("'cmdutil.show_changeset' is deprecated, "
3159 msg = ("'cmdutil.show_changeset' is deprecated, "
3162 "use 'logcmdutil.changesetdisplayer'")
3160 "use 'logcmdutil.changesetdisplayer'")
3163 ui.deprecwarn(msg, "4.6")
3161 ui.deprecwarn(msg, "4.6")
3164 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
3162 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
@@ -1,2071 +1,2072 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import shutil
12 import shutil
13 import struct
13 import struct
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullhex,
21 nullhex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 )
24 )
25 from . import (
25 from . import (
26 copies,
26 copies,
27 error,
27 error,
28 extensions,
29 filemerge,
28 filemerge,
30 match as matchmod,
29 match as matchmod,
31 obsutil,
30 obsutil,
32 pycompat,
31 pycompat,
33 scmutil,
32 scmutil,
34 subrepoutil,
33 subrepoutil,
35 util,
34 util,
36 worker,
35 worker,
37 )
36 )
38
37
39 _pack = struct.pack
38 _pack = struct.pack
40 _unpack = struct.unpack
39 _unpack = struct.unpack
41
40
42 def _droponode(data):
41 def _droponode(data):
43 # used for compatibility for v1
42 # used for compatibility for v1
44 bits = data.split('\0')
43 bits = data.split('\0')
45 bits = bits[:-2] + bits[-1:]
44 bits = bits[:-2] + bits[-1:]
46 return '\0'.join(bits)
45 return '\0'.join(bits)
47
46
48 class mergestate(object):
47 class mergestate(object):
49 '''track 3-way merge state of individual files
48 '''track 3-way merge state of individual files
50
49
51 The merge state is stored on disk when needed. Two files are used: one with
50 The merge state is stored on disk when needed. Two files are used: one with
52 an old format (version 1), and one with a new format (version 2). Version 2
51 an old format (version 1), and one with a new format (version 2). Version 2
53 stores a superset of the data in version 1, including new kinds of records
52 stores a superset of the data in version 1, including new kinds of records
54 in the future. For more about the new format, see the documentation for
53 in the future. For more about the new format, see the documentation for
55 `_readrecordsv2`.
54 `_readrecordsv2`.
56
55
57 Each record can contain arbitrary content, and has an associated type. This
56 Each record can contain arbitrary content, and has an associated type. This
58 `type` should be a letter. If `type` is uppercase, the record is mandatory:
57 `type` should be a letter. If `type` is uppercase, the record is mandatory:
59 versions of Mercurial that don't support it should abort. If `type` is
58 versions of Mercurial that don't support it should abort. If `type` is
60 lowercase, the record can be safely ignored.
59 lowercase, the record can be safely ignored.
61
60
62 Currently known records:
61 Currently known records:
63
62
64 L: the node of the "local" part of the merge (hexified version)
63 L: the node of the "local" part of the merge (hexified version)
65 O: the node of the "other" part of the merge (hexified version)
64 O: the node of the "other" part of the merge (hexified version)
66 F: a file to be merged entry
65 F: a file to be merged entry
67 C: a change/delete or delete/change conflict
66 C: a change/delete or delete/change conflict
68 D: a file that the external merge driver will merge internally
67 D: a file that the external merge driver will merge internally
69 (experimental)
68 (experimental)
70 P: a path conflict (file vs directory)
69 P: a path conflict (file vs directory)
71 m: the external merge driver defined for this merge plus its run state
70 m: the external merge driver defined for this merge plus its run state
72 (experimental)
71 (experimental)
73 f: a (filename, dictionary) tuple of optional values for a given file
72 f: a (filename, dictionary) tuple of optional values for a given file
74 X: unsupported mandatory record type (used in tests)
73 X: unsupported mandatory record type (used in tests)
75 x: unsupported advisory record type (used in tests)
74 x: unsupported advisory record type (used in tests)
76 l: the labels for the parts of the merge.
75 l: the labels for the parts of the merge.
77
76
78 Merge driver run states (experimental):
77 Merge driver run states (experimental):
79 u: driver-resolved files unmarked -- needs to be run next time we're about
78 u: driver-resolved files unmarked -- needs to be run next time we're about
80 to resolve or commit
79 to resolve or commit
81 m: driver-resolved files marked -- only needs to be run before commit
80 m: driver-resolved files marked -- only needs to be run before commit
82 s: success/skipped -- does not need to be run any more
81 s: success/skipped -- does not need to be run any more
83
82
84 Merge record states (stored in self._state, indexed by filename):
83 Merge record states (stored in self._state, indexed by filename):
85 u: unresolved conflict
84 u: unresolved conflict
86 r: resolved conflict
85 r: resolved conflict
87 pu: unresolved path conflict (file conflicts with directory)
86 pu: unresolved path conflict (file conflicts with directory)
88 pr: resolved path conflict
87 pr: resolved path conflict
89 d: driver-resolved conflict
88 d: driver-resolved conflict
90
89
91 The resolve command transitions between 'u' and 'r' for conflicts and
90 The resolve command transitions between 'u' and 'r' for conflicts and
92 'pu' and 'pr' for path conflicts.
91 'pu' and 'pr' for path conflicts.
93 '''
92 '''
94 statepathv1 = 'merge/state'
93 statepathv1 = 'merge/state'
95 statepathv2 = 'merge/state2'
94 statepathv2 = 'merge/state2'
96
95
97 @staticmethod
96 @staticmethod
98 def clean(repo, node=None, other=None, labels=None):
97 def clean(repo, node=None, other=None, labels=None):
99 """Initialize a brand new merge state, removing any existing state on
98 """Initialize a brand new merge state, removing any existing state on
100 disk."""
99 disk."""
101 ms = mergestate(repo)
100 ms = mergestate(repo)
102 ms.reset(node, other, labels)
101 ms.reset(node, other, labels)
103 return ms
102 return ms
104
103
105 @staticmethod
104 @staticmethod
106 def read(repo):
105 def read(repo):
107 """Initialize the merge state, reading it from disk."""
106 """Initialize the merge state, reading it from disk."""
108 ms = mergestate(repo)
107 ms = mergestate(repo)
109 ms._read()
108 ms._read()
110 return ms
109 return ms
111
110
112 def __init__(self, repo):
111 def __init__(self, repo):
113 """Initialize the merge state.
112 """Initialize the merge state.
114
113
115 Do not use this directly! Instead call read() or clean()."""
114 Do not use this directly! Instead call read() or clean()."""
116 self._repo = repo
115 self._repo = repo
117 self._dirty = False
116 self._dirty = False
118 self._labels = None
117 self._labels = None
119
118
120 def reset(self, node=None, other=None, labels=None):
119 def reset(self, node=None, other=None, labels=None):
121 self._state = {}
120 self._state = {}
122 self._stateextras = {}
121 self._stateextras = {}
123 self._local = None
122 self._local = None
124 self._other = None
123 self._other = None
125 self._labels = labels
124 self._labels = labels
126 for var in ('localctx', 'otherctx'):
125 for var in ('localctx', 'otherctx'):
127 if var in vars(self):
126 if var in vars(self):
128 delattr(self, var)
127 delattr(self, var)
129 if node:
128 if node:
130 self._local = node
129 self._local = node
131 self._other = other
130 self._other = other
132 self._readmergedriver = None
131 self._readmergedriver = None
133 if self.mergedriver:
132 if self.mergedriver:
134 self._mdstate = 's'
133 self._mdstate = 's'
135 else:
134 else:
136 self._mdstate = 'u'
135 self._mdstate = 'u'
137 shutil.rmtree(self._repo.vfs.join('merge'), True)
136 shutil.rmtree(self._repo.vfs.join('merge'), True)
138 self._results = {}
137 self._results = {}
139 self._dirty = False
138 self._dirty = False
140
139
141 def _read(self):
140 def _read(self):
142 """Analyse each record content to restore a serialized state from disk
141 """Analyse each record content to restore a serialized state from disk
143
142
144 This function process "record" entry produced by the de-serialization
143 This function process "record" entry produced by the de-serialization
145 of on disk file.
144 of on disk file.
146 """
145 """
147 self._state = {}
146 self._state = {}
148 self._stateextras = {}
147 self._stateextras = {}
149 self._local = None
148 self._local = None
150 self._other = None
149 self._other = None
151 for var in ('localctx', 'otherctx'):
150 for var in ('localctx', 'otherctx'):
152 if var in vars(self):
151 if var in vars(self):
153 delattr(self, var)
152 delattr(self, var)
154 self._readmergedriver = None
153 self._readmergedriver = None
155 self._mdstate = 's'
154 self._mdstate = 's'
156 unsupported = set()
155 unsupported = set()
157 records = self._readrecords()
156 records = self._readrecords()
158 for rtype, record in records:
157 for rtype, record in records:
159 if rtype == 'L':
158 if rtype == 'L':
160 self._local = bin(record)
159 self._local = bin(record)
161 elif rtype == 'O':
160 elif rtype == 'O':
162 self._other = bin(record)
161 self._other = bin(record)
163 elif rtype == 'm':
162 elif rtype == 'm':
164 bits = record.split('\0', 1)
163 bits = record.split('\0', 1)
165 mdstate = bits[1]
164 mdstate = bits[1]
166 if len(mdstate) != 1 or mdstate not in 'ums':
165 if len(mdstate) != 1 or mdstate not in 'ums':
167 # the merge driver should be idempotent, so just rerun it
166 # the merge driver should be idempotent, so just rerun it
168 mdstate = 'u'
167 mdstate = 'u'
169
168
170 self._readmergedriver = bits[0]
169 self._readmergedriver = bits[0]
171 self._mdstate = mdstate
170 self._mdstate = mdstate
172 elif rtype in 'FDCP':
171 elif rtype in 'FDCP':
173 bits = record.split('\0')
172 bits = record.split('\0')
174 self._state[bits[0]] = bits[1:]
173 self._state[bits[0]] = bits[1:]
175 elif rtype == 'f':
174 elif rtype == 'f':
176 filename, rawextras = record.split('\0', 1)
175 filename, rawextras = record.split('\0', 1)
177 extraparts = rawextras.split('\0')
176 extraparts = rawextras.split('\0')
178 extras = {}
177 extras = {}
179 i = 0
178 i = 0
180 while i < len(extraparts):
179 while i < len(extraparts):
181 extras[extraparts[i]] = extraparts[i + 1]
180 extras[extraparts[i]] = extraparts[i + 1]
182 i += 2
181 i += 2
183
182
184 self._stateextras[filename] = extras
183 self._stateextras[filename] = extras
185 elif rtype == 'l':
184 elif rtype == 'l':
186 labels = record.split('\0', 2)
185 labels = record.split('\0', 2)
187 self._labels = [l for l in labels if len(l) > 0]
186 self._labels = [l for l in labels if len(l) > 0]
188 elif not rtype.islower():
187 elif not rtype.islower():
189 unsupported.add(rtype)
188 unsupported.add(rtype)
190 self._results = {}
189 self._results = {}
191 self._dirty = False
190 self._dirty = False
192
191
193 if unsupported:
192 if unsupported:
194 raise error.UnsupportedMergeRecords(unsupported)
193 raise error.UnsupportedMergeRecords(unsupported)
195
194
196 def _readrecords(self):
195 def _readrecords(self):
197 """Read merge state from disk and return a list of record (TYPE, data)
196 """Read merge state from disk and return a list of record (TYPE, data)
198
197
199 We read data from both v1 and v2 files and decide which one to use.
198 We read data from both v1 and v2 files and decide which one to use.
200
199
201 V1 has been used by version prior to 2.9.1 and contains less data than
200 V1 has been used by version prior to 2.9.1 and contains less data than
202 v2. We read both versions and check if no data in v2 contradicts
201 v2. We read both versions and check if no data in v2 contradicts
203 v1. If there is not contradiction we can safely assume that both v1
202 v1. If there is not contradiction we can safely assume that both v1
204 and v2 were written at the same time and use the extract data in v2. If
203 and v2 were written at the same time and use the extract data in v2. If
205 there is contradiction we ignore v2 content as we assume an old version
204 there is contradiction we ignore v2 content as we assume an old version
206 of Mercurial has overwritten the mergestate file and left an old v2
205 of Mercurial has overwritten the mergestate file and left an old v2
207 file around.
206 file around.
208
207
209 returns list of record [(TYPE, data), ...]"""
208 returns list of record [(TYPE, data), ...]"""
210 v1records = self._readrecordsv1()
209 v1records = self._readrecordsv1()
211 v2records = self._readrecordsv2()
210 v2records = self._readrecordsv2()
212 if self._v1v2match(v1records, v2records):
211 if self._v1v2match(v1records, v2records):
213 return v2records
212 return v2records
214 else:
213 else:
215 # v1 file is newer than v2 file, use it
214 # v1 file is newer than v2 file, use it
216 # we have to infer the "other" changeset of the merge
215 # we have to infer the "other" changeset of the merge
217 # we cannot do better than that with v1 of the format
216 # we cannot do better than that with v1 of the format
218 mctx = self._repo[None].parents()[-1]
217 mctx = self._repo[None].parents()[-1]
219 v1records.append(('O', mctx.hex()))
218 v1records.append(('O', mctx.hex()))
220 # add place holder "other" file node information
219 # add place holder "other" file node information
221 # nobody is using it yet so we do no need to fetch the data
220 # nobody is using it yet so we do no need to fetch the data
222 # if mctx was wrong `mctx[bits[-2]]` may fails.
221 # if mctx was wrong `mctx[bits[-2]]` may fails.
223 for idx, r in enumerate(v1records):
222 for idx, r in enumerate(v1records):
224 if r[0] == 'F':
223 if r[0] == 'F':
225 bits = r[1].split('\0')
224 bits = r[1].split('\0')
226 bits.insert(-2, '')
225 bits.insert(-2, '')
227 v1records[idx] = (r[0], '\0'.join(bits))
226 v1records[idx] = (r[0], '\0'.join(bits))
228 return v1records
227 return v1records
229
228
230 def _v1v2match(self, v1records, v2records):
229 def _v1v2match(self, v1records, v2records):
231 oldv2 = set() # old format version of v2 record
230 oldv2 = set() # old format version of v2 record
232 for rec in v2records:
231 for rec in v2records:
233 if rec[0] == 'L':
232 if rec[0] == 'L':
234 oldv2.add(rec)
233 oldv2.add(rec)
235 elif rec[0] == 'F':
234 elif rec[0] == 'F':
236 # drop the onode data (not contained in v1)
235 # drop the onode data (not contained in v1)
237 oldv2.add(('F', _droponode(rec[1])))
236 oldv2.add(('F', _droponode(rec[1])))
238 for rec in v1records:
237 for rec in v1records:
239 if rec not in oldv2:
238 if rec not in oldv2:
240 return False
239 return False
241 else:
240 else:
242 return True
241 return True
243
242
244 def _readrecordsv1(self):
243 def _readrecordsv1(self):
245 """read on disk merge state for version 1 file
244 """read on disk merge state for version 1 file
246
245
247 returns list of record [(TYPE, data), ...]
246 returns list of record [(TYPE, data), ...]
248
247
249 Note: the "F" data from this file are one entry short
248 Note: the "F" data from this file are one entry short
250 (no "other file node" entry)
249 (no "other file node" entry)
251 """
250 """
252 records = []
251 records = []
253 try:
252 try:
254 f = self._repo.vfs(self.statepathv1)
253 f = self._repo.vfs(self.statepathv1)
255 for i, l in enumerate(f):
254 for i, l in enumerate(f):
256 if i == 0:
255 if i == 0:
257 records.append(('L', l[:-1]))
256 records.append(('L', l[:-1]))
258 else:
257 else:
259 records.append(('F', l[:-1]))
258 records.append(('F', l[:-1]))
260 f.close()
259 f.close()
261 except IOError as err:
260 except IOError as err:
262 if err.errno != errno.ENOENT:
261 if err.errno != errno.ENOENT:
263 raise
262 raise
264 return records
263 return records
265
264
266 def _readrecordsv2(self):
265 def _readrecordsv2(self):
267 """read on disk merge state for version 2 file
266 """read on disk merge state for version 2 file
268
267
269 This format is a list of arbitrary records of the form:
268 This format is a list of arbitrary records of the form:
270
269
271 [type][length][content]
270 [type][length][content]
272
271
273 `type` is a single character, `length` is a 4 byte integer, and
272 `type` is a single character, `length` is a 4 byte integer, and
274 `content` is an arbitrary byte sequence of length `length`.
273 `content` is an arbitrary byte sequence of length `length`.
275
274
276 Mercurial versions prior to 3.7 have a bug where if there are
275 Mercurial versions prior to 3.7 have a bug where if there are
277 unsupported mandatory merge records, attempting to clear out the merge
276 unsupported mandatory merge records, attempting to clear out the merge
278 state with hg update --clean or similar aborts. The 't' record type
277 state with hg update --clean or similar aborts. The 't' record type
279 works around that by writing out what those versions treat as an
278 works around that by writing out what those versions treat as an
280 advisory record, but later versions interpret as special: the first
279 advisory record, but later versions interpret as special: the first
281 character is the 'real' record type and everything onwards is the data.
280 character is the 'real' record type and everything onwards is the data.
282
281
283 Returns list of records [(TYPE, data), ...]."""
282 Returns list of records [(TYPE, data), ...]."""
284 records = []
283 records = []
285 try:
284 try:
286 f = self._repo.vfs(self.statepathv2)
285 f = self._repo.vfs(self.statepathv2)
287 data = f.read()
286 data = f.read()
288 off = 0
287 off = 0
289 end = len(data)
288 end = len(data)
290 while off < end:
289 while off < end:
291 rtype = data[off]
290 rtype = data[off]
292 off += 1
291 off += 1
293 length = _unpack('>I', data[off:(off + 4)])[0]
292 length = _unpack('>I', data[off:(off + 4)])[0]
294 off += 4
293 off += 4
295 record = data[off:(off + length)]
294 record = data[off:(off + length)]
296 off += length
295 off += length
297 if rtype == 't':
296 if rtype == 't':
298 rtype, record = record[0], record[1:]
297 rtype, record = record[0], record[1:]
299 records.append((rtype, record))
298 records.append((rtype, record))
300 f.close()
299 f.close()
301 except IOError as err:
300 except IOError as err:
302 if err.errno != errno.ENOENT:
301 if err.errno != errno.ENOENT:
303 raise
302 raise
304 return records
303 return records
305
304
306 @util.propertycache
305 @util.propertycache
307 def mergedriver(self):
306 def mergedriver(self):
308 # protect against the following:
307 # protect against the following:
309 # - A configures a malicious merge driver in their hgrc, then
308 # - A configures a malicious merge driver in their hgrc, then
310 # pauses the merge
309 # pauses the merge
311 # - A edits their hgrc to remove references to the merge driver
310 # - A edits their hgrc to remove references to the merge driver
312 # - A gives a copy of their entire repo, including .hg, to B
311 # - A gives a copy of their entire repo, including .hg, to B
313 # - B inspects .hgrc and finds it to be clean
312 # - B inspects .hgrc and finds it to be clean
314 # - B then continues the merge and the malicious merge driver
313 # - B then continues the merge and the malicious merge driver
315 # gets invoked
314 # gets invoked
316 configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
315 configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
317 if (self._readmergedriver is not None
316 if (self._readmergedriver is not None
318 and self._readmergedriver != configmergedriver):
317 and self._readmergedriver != configmergedriver):
319 raise error.ConfigError(
318 raise error.ConfigError(
320 _("merge driver changed since merge started"),
319 _("merge driver changed since merge started"),
321 hint=_("revert merge driver change or abort merge"))
320 hint=_("revert merge driver change or abort merge"))
322
321
323 return configmergedriver
322 return configmergedriver
324
323
325 @util.propertycache
324 @util.propertycache
326 def localctx(self):
325 def localctx(self):
327 if self._local is None:
326 if self._local is None:
328 msg = "localctx accessed but self._local isn't set"
327 msg = "localctx accessed but self._local isn't set"
329 raise error.ProgrammingError(msg)
328 raise error.ProgrammingError(msg)
330 return self._repo[self._local]
329 return self._repo[self._local]
331
330
332 @util.propertycache
331 @util.propertycache
333 def otherctx(self):
332 def otherctx(self):
334 if self._other is None:
333 if self._other is None:
335 msg = "otherctx accessed but self._other isn't set"
334 msg = "otherctx accessed but self._other isn't set"
336 raise error.ProgrammingError(msg)
335 raise error.ProgrammingError(msg)
337 return self._repo[self._other]
336 return self._repo[self._other]
338
337
339 def active(self):
338 def active(self):
340 """Whether mergestate is active.
339 """Whether mergestate is active.
341
340
342 Returns True if there appears to be mergestate. This is a rough proxy
341 Returns True if there appears to be mergestate. This is a rough proxy
343 for "is a merge in progress."
342 for "is a merge in progress."
344 """
343 """
345 # Check local variables before looking at filesystem for performance
344 # Check local variables before looking at filesystem for performance
346 # reasons.
345 # reasons.
347 return bool(self._local) or bool(self._state) or \
346 return bool(self._local) or bool(self._state) or \
348 self._repo.vfs.exists(self.statepathv1) or \
347 self._repo.vfs.exists(self.statepathv1) or \
349 self._repo.vfs.exists(self.statepathv2)
348 self._repo.vfs.exists(self.statepathv2)
350
349
351 def commit(self):
350 def commit(self):
352 """Write current state on disk (if necessary)"""
351 """Write current state on disk (if necessary)"""
353 if self._dirty:
352 if self._dirty:
354 records = self._makerecords()
353 records = self._makerecords()
355 self._writerecords(records)
354 self._writerecords(records)
356 self._dirty = False
355 self._dirty = False
357
356
358 def _makerecords(self):
357 def _makerecords(self):
359 records = []
358 records = []
360 records.append(('L', hex(self._local)))
359 records.append(('L', hex(self._local)))
361 records.append(('O', hex(self._other)))
360 records.append(('O', hex(self._other)))
362 if self.mergedriver:
361 if self.mergedriver:
363 records.append(('m', '\0'.join([
362 records.append(('m', '\0'.join([
364 self.mergedriver, self._mdstate])))
363 self.mergedriver, self._mdstate])))
365 # Write out state items. In all cases, the value of the state map entry
364 # Write out state items. In all cases, the value of the state map entry
366 # is written as the contents of the record. The record type depends on
365 # is written as the contents of the record. The record type depends on
367 # the type of state that is stored, and capital-letter records are used
366 # the type of state that is stored, and capital-letter records are used
368 # to prevent older versions of Mercurial that do not support the feature
367 # to prevent older versions of Mercurial that do not support the feature
369 # from loading them.
368 # from loading them.
370 for filename, v in self._state.iteritems():
369 for filename, v in self._state.iteritems():
371 if v[0] == 'd':
370 if v[0] == 'd':
372 # Driver-resolved merge. These are stored in 'D' records.
371 # Driver-resolved merge. These are stored in 'D' records.
373 records.append(('D', '\0'.join([filename] + v)))
372 records.append(('D', '\0'.join([filename] + v)))
374 elif v[0] in ('pu', 'pr'):
373 elif v[0] in ('pu', 'pr'):
375 # Path conflicts. These are stored in 'P' records. The current
374 # Path conflicts. These are stored in 'P' records. The current
376 # resolution state ('pu' or 'pr') is stored within the record.
375 # resolution state ('pu' or 'pr') is stored within the record.
377 records.append(('P', '\0'.join([filename] + v)))
376 records.append(('P', '\0'.join([filename] + v)))
378 elif v[1] == nullhex or v[6] == nullhex:
377 elif v[1] == nullhex or v[6] == nullhex:
379 # Change/Delete or Delete/Change conflicts. These are stored in
378 # Change/Delete or Delete/Change conflicts. These are stored in
380 # 'C' records. v[1] is the local file, and is nullhex when the
379 # 'C' records. v[1] is the local file, and is nullhex when the
381 # file is deleted locally ('dc'). v[6] is the remote file, and
380 # file is deleted locally ('dc'). v[6] is the remote file, and
382 # is nullhex when the file is deleted remotely ('cd').
381 # is nullhex when the file is deleted remotely ('cd').
383 records.append(('C', '\0'.join([filename] + v)))
382 records.append(('C', '\0'.join([filename] + v)))
384 else:
383 else:
385 # Normal files. These are stored in 'F' records.
384 # Normal files. These are stored in 'F' records.
386 records.append(('F', '\0'.join([filename] + v)))
385 records.append(('F', '\0'.join([filename] + v)))
387 for filename, extras in sorted(self._stateextras.iteritems()):
386 for filename, extras in sorted(self._stateextras.iteritems()):
388 rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
387 rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
389 extras.iteritems())
388 extras.iteritems())
390 records.append(('f', '%s\0%s' % (filename, rawextras)))
389 records.append(('f', '%s\0%s' % (filename, rawextras)))
391 if self._labels is not None:
390 if self._labels is not None:
392 labels = '\0'.join(self._labels)
391 labels = '\0'.join(self._labels)
393 records.append(('l', labels))
392 records.append(('l', labels))
394 return records
393 return records
395
394
396 def _writerecords(self, records):
395 def _writerecords(self, records):
397 """Write current state on disk (both v1 and v2)"""
396 """Write current state on disk (both v1 and v2)"""
398 self._writerecordsv1(records)
397 self._writerecordsv1(records)
399 self._writerecordsv2(records)
398 self._writerecordsv2(records)
400
399
401 def _writerecordsv1(self, records):
400 def _writerecordsv1(self, records):
402 """Write current state on disk in a version 1 file"""
401 """Write current state on disk in a version 1 file"""
403 f = self._repo.vfs(self.statepathv1, 'w')
402 f = self._repo.vfs(self.statepathv1, 'w')
404 irecords = iter(records)
403 irecords = iter(records)
405 lrecords = next(irecords)
404 lrecords = next(irecords)
406 assert lrecords[0] == 'L'
405 assert lrecords[0] == 'L'
407 f.write(hex(self._local) + '\n')
406 f.write(hex(self._local) + '\n')
408 for rtype, data in irecords:
407 for rtype, data in irecords:
409 if rtype == 'F':
408 if rtype == 'F':
410 f.write('%s\n' % _droponode(data))
409 f.write('%s\n' % _droponode(data))
411 f.close()
410 f.close()
412
411
413 def _writerecordsv2(self, records):
412 def _writerecordsv2(self, records):
414 """Write current state on disk in a version 2 file
413 """Write current state on disk in a version 2 file
415
414
416 See the docstring for _readrecordsv2 for why we use 't'."""
415 See the docstring for _readrecordsv2 for why we use 't'."""
417 # these are the records that all version 2 clients can read
416 # these are the records that all version 2 clients can read
418 whitelist = 'LOF'
417 whitelist = 'LOF'
419 f = self._repo.vfs(self.statepathv2, 'w')
418 f = self._repo.vfs(self.statepathv2, 'w')
420 for key, data in records:
419 for key, data in records:
421 assert len(key) == 1
420 assert len(key) == 1
422 if key not in whitelist:
421 if key not in whitelist:
423 key, data = 't', '%s%s' % (key, data)
422 key, data = 't', '%s%s' % (key, data)
424 format = '>sI%is' % len(data)
423 format = '>sI%is' % len(data)
425 f.write(_pack(format, key, len(data), data))
424 f.write(_pack(format, key, len(data), data))
426 f.close()
425 f.close()
427
426
428 def add(self, fcl, fco, fca, fd):
427 def add(self, fcl, fco, fca, fd):
429 """add a new (potentially?) conflicting file the merge state
428 """add a new (potentially?) conflicting file the merge state
430 fcl: file context for local,
429 fcl: file context for local,
431 fco: file context for remote,
430 fco: file context for remote,
432 fca: file context for ancestors,
431 fca: file context for ancestors,
433 fd: file path of the resulting merge.
432 fd: file path of the resulting merge.
434
433
435 note: also write the local version to the `.hg/merge` directory.
434 note: also write the local version to the `.hg/merge` directory.
436 """
435 """
437 if fcl.isabsent():
436 if fcl.isabsent():
438 hash = nullhex
437 hash = nullhex
439 else:
438 else:
440 hash = hex(hashlib.sha1(fcl.path()).digest())
439 hash = hex(hashlib.sha1(fcl.path()).digest())
441 self._repo.vfs.write('merge/' + hash, fcl.data())
440 self._repo.vfs.write('merge/' + hash, fcl.data())
442 self._state[fd] = ['u', hash, fcl.path(),
441 self._state[fd] = ['u', hash, fcl.path(),
443 fca.path(), hex(fca.filenode()),
442 fca.path(), hex(fca.filenode()),
444 fco.path(), hex(fco.filenode()),
443 fco.path(), hex(fco.filenode()),
445 fcl.flags()]
444 fcl.flags()]
446 self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
445 self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
447 self._dirty = True
446 self._dirty = True
448
447
449 def addpath(self, path, frename, forigin):
448 def addpath(self, path, frename, forigin):
450 """add a new conflicting path to the merge state
449 """add a new conflicting path to the merge state
451 path: the path that conflicts
450 path: the path that conflicts
452 frename: the filename the conflicting file was renamed to
451 frename: the filename the conflicting file was renamed to
453 forigin: origin of the file ('l' or 'r' for local/remote)
452 forigin: origin of the file ('l' or 'r' for local/remote)
454 """
453 """
455 self._state[path] = ['pu', frename, forigin]
454 self._state[path] = ['pu', frename, forigin]
456 self._dirty = True
455 self._dirty = True
457
456
458 def __contains__(self, dfile):
457 def __contains__(self, dfile):
459 return dfile in self._state
458 return dfile in self._state
460
459
461 def __getitem__(self, dfile):
460 def __getitem__(self, dfile):
462 return self._state[dfile][0]
461 return self._state[dfile][0]
463
462
464 def __iter__(self):
463 def __iter__(self):
465 return iter(sorted(self._state))
464 return iter(sorted(self._state))
466
465
467 def files(self):
466 def files(self):
468 return self._state.keys()
467 return self._state.keys()
469
468
470 def mark(self, dfile, state):
469 def mark(self, dfile, state):
471 self._state[dfile][0] = state
470 self._state[dfile][0] = state
472 self._dirty = True
471 self._dirty = True
473
472
474 def mdstate(self):
473 def mdstate(self):
475 return self._mdstate
474 return self._mdstate
476
475
477 def unresolved(self):
476 def unresolved(self):
478 """Obtain the paths of unresolved files."""
477 """Obtain the paths of unresolved files."""
479
478
480 for f, entry in self._state.iteritems():
479 for f, entry in self._state.iteritems():
481 if entry[0] in ('u', 'pu'):
480 if entry[0] in ('u', 'pu'):
482 yield f
481 yield f
483
482
484 def driverresolved(self):
483 def driverresolved(self):
485 """Obtain the paths of driver-resolved files."""
484 """Obtain the paths of driver-resolved files."""
486
485
487 for f, entry in self._state.items():
486 for f, entry in self._state.items():
488 if entry[0] == 'd':
487 if entry[0] == 'd':
489 yield f
488 yield f
490
489
491 def extras(self, filename):
490 def extras(self, filename):
492 return self._stateextras.setdefault(filename, {})
491 return self._stateextras.setdefault(filename, {})
493
492
494 def _resolve(self, preresolve, dfile, wctx):
493 def _resolve(self, preresolve, dfile, wctx):
495 """rerun merge process for file path `dfile`"""
494 """rerun merge process for file path `dfile`"""
496 if self[dfile] in 'rd':
495 if self[dfile] in 'rd':
497 return True, 0
496 return True, 0
498 stateentry = self._state[dfile]
497 stateentry = self._state[dfile]
499 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
498 state, hash, lfile, afile, anode, ofile, onode, flags = stateentry
500 octx = self._repo[self._other]
499 octx = self._repo[self._other]
501 extras = self.extras(dfile)
500 extras = self.extras(dfile)
502 anccommitnode = extras.get('ancestorlinknode')
501 anccommitnode = extras.get('ancestorlinknode')
503 if anccommitnode:
502 if anccommitnode:
504 actx = self._repo[anccommitnode]
503 actx = self._repo[anccommitnode]
505 else:
504 else:
506 actx = None
505 actx = None
507 fcd = self._filectxorabsent(hash, wctx, dfile)
506 fcd = self._filectxorabsent(hash, wctx, dfile)
508 fco = self._filectxorabsent(onode, octx, ofile)
507 fco = self._filectxorabsent(onode, octx, ofile)
509 # TODO: move this to filectxorabsent
508 # TODO: move this to filectxorabsent
510 fca = self._repo.filectx(afile, fileid=anode, changeid=actx)
509 fca = self._repo.filectx(afile, fileid=anode, changeid=actx)
511 # "premerge" x flags
510 # "premerge" x flags
512 flo = fco.flags()
511 flo = fco.flags()
513 fla = fca.flags()
512 fla = fca.flags()
514 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
513 if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
515 if fca.node() == nullid and flags != flo:
514 if fca.node() == nullid and flags != flo:
516 if preresolve:
515 if preresolve:
517 self._repo.ui.warn(
516 self._repo.ui.warn(
518 _('warning: cannot merge flags for %s '
517 _('warning: cannot merge flags for %s '
519 'without common ancestor - keeping local flags\n')
518 'without common ancestor - keeping local flags\n')
520 % afile)
519 % afile)
521 elif flags == fla:
520 elif flags == fla:
522 flags = flo
521 flags = flo
523 if preresolve:
522 if preresolve:
524 # restore local
523 # restore local
525 if hash != nullhex:
524 if hash != nullhex:
526 f = self._repo.vfs('merge/' + hash)
525 f = self._repo.vfs('merge/' + hash)
527 wctx[dfile].write(f.read(), flags)
526 wctx[dfile].write(f.read(), flags)
528 f.close()
527 f.close()
529 else:
528 else:
530 wctx[dfile].remove(ignoremissing=True)
529 wctx[dfile].remove(ignoremissing=True)
531 complete, r, deleted = filemerge.premerge(self._repo, wctx,
530 complete, r, deleted = filemerge.premerge(self._repo, wctx,
532 self._local, lfile, fcd,
531 self._local, lfile, fcd,
533 fco, fca,
532 fco, fca,
534 labels=self._labels)
533 labels=self._labels)
535 else:
534 else:
536 complete, r, deleted = filemerge.filemerge(self._repo, wctx,
535 complete, r, deleted = filemerge.filemerge(self._repo, wctx,
537 self._local, lfile, fcd,
536 self._local, lfile, fcd,
538 fco, fca,
537 fco, fca,
539 labels=self._labels)
538 labels=self._labels)
540 if r is None:
539 if r is None:
541 # no real conflict
540 # no real conflict
542 del self._state[dfile]
541 del self._state[dfile]
543 self._stateextras.pop(dfile, None)
542 self._stateextras.pop(dfile, None)
544 self._dirty = True
543 self._dirty = True
545 elif not r:
544 elif not r:
546 self.mark(dfile, 'r')
545 self.mark(dfile, 'r')
547
546
548 if complete:
547 if complete:
549 action = None
548 action = None
550 if deleted:
549 if deleted:
551 if fcd.isabsent():
550 if fcd.isabsent():
552 # dc: local picked. Need to drop if present, which may
551 # dc: local picked. Need to drop if present, which may
553 # happen on re-resolves.
552 # happen on re-resolves.
554 action = 'f'
553 action = 'f'
555 else:
554 else:
556 # cd: remote picked (or otherwise deleted)
555 # cd: remote picked (or otherwise deleted)
557 action = 'r'
556 action = 'r'
558 else:
557 else:
559 if fcd.isabsent(): # dc: remote picked
558 if fcd.isabsent(): # dc: remote picked
560 action = 'g'
559 action = 'g'
561 elif fco.isabsent(): # cd: local picked
560 elif fco.isabsent(): # cd: local picked
562 if dfile in self.localctx:
561 if dfile in self.localctx:
563 action = 'am'
562 action = 'am'
564 else:
563 else:
565 action = 'a'
564 action = 'a'
566 # else: regular merges (no action necessary)
565 # else: regular merges (no action necessary)
567 self._results[dfile] = r, action
566 self._results[dfile] = r, action
568
567
569 return complete, r
568 return complete, r
570
569
571 def _filectxorabsent(self, hexnode, ctx, f):
570 def _filectxorabsent(self, hexnode, ctx, f):
572 if hexnode == nullhex:
571 if hexnode == nullhex:
573 return filemerge.absentfilectx(ctx, f)
572 return filemerge.absentfilectx(ctx, f)
574 else:
573 else:
575 return ctx[f]
574 return ctx[f]
576
575
577 def preresolve(self, dfile, wctx):
576 def preresolve(self, dfile, wctx):
578 """run premerge process for dfile
577 """run premerge process for dfile
579
578
580 Returns whether the merge is complete, and the exit code."""
579 Returns whether the merge is complete, and the exit code."""
581 return self._resolve(True, dfile, wctx)
580 return self._resolve(True, dfile, wctx)
582
581
583 def resolve(self, dfile, wctx):
582 def resolve(self, dfile, wctx):
584 """run merge process (assuming premerge was run) for dfile
583 """run merge process (assuming premerge was run) for dfile
585
584
586 Returns the exit code of the merge."""
585 Returns the exit code of the merge."""
587 return self._resolve(False, dfile, wctx)[1]
586 return self._resolve(False, dfile, wctx)[1]
588
587
589 def counts(self):
588 def counts(self):
590 """return counts for updated, merged and removed files in this
589 """return counts for updated, merged and removed files in this
591 session"""
590 session"""
592 updated, merged, removed = 0, 0, 0
591 updated, merged, removed = 0, 0, 0
593 for r, action in self._results.itervalues():
592 for r, action in self._results.itervalues():
594 if r is None:
593 if r is None:
595 updated += 1
594 updated += 1
596 elif r == 0:
595 elif r == 0:
597 if action == 'r':
596 if action == 'r':
598 removed += 1
597 removed += 1
599 else:
598 else:
600 merged += 1
599 merged += 1
601 return updated, merged, removed
600 return updated, merged, removed
602
601
603 def unresolvedcount(self):
602 def unresolvedcount(self):
604 """get unresolved count for this merge (persistent)"""
603 """get unresolved count for this merge (persistent)"""
605 return len(list(self.unresolved()))
604 return len(list(self.unresolved()))
606
605
607 def actions(self):
606 def actions(self):
608 """return lists of actions to perform on the dirstate"""
607 """return lists of actions to perform on the dirstate"""
609 actions = {'r': [], 'f': [], 'a': [], 'am': [], 'g': []}
608 actions = {'r': [], 'f': [], 'a': [], 'am': [], 'g': []}
610 for f, (r, action) in self._results.iteritems():
609 for f, (r, action) in self._results.iteritems():
611 if action is not None:
610 if action is not None:
612 actions[action].append((f, None, "merge result"))
611 actions[action].append((f, None, "merge result"))
613 return actions
612 return actions
614
613
615 def recordactions(self):
614 def recordactions(self):
616 """record remove/add/get actions in the dirstate"""
615 """record remove/add/get actions in the dirstate"""
617 branchmerge = self._repo.dirstate.p2() != nullid
616 branchmerge = self._repo.dirstate.p2() != nullid
618 recordupdates(self._repo, self.actions(), branchmerge)
617 recordupdates(self._repo, self.actions(), branchmerge)
619
618
620 def queueremove(self, f):
619 def queueremove(self, f):
621 """queues a file to be removed from the dirstate
620 """queues a file to be removed from the dirstate
622
621
623 Meant for use by custom merge drivers."""
622 Meant for use by custom merge drivers."""
624 self._results[f] = 0, 'r'
623 self._results[f] = 0, 'r'
625
624
626 def queueadd(self, f):
625 def queueadd(self, f):
627 """queues a file to be added to the dirstate
626 """queues a file to be added to the dirstate
628
627
629 Meant for use by custom merge drivers."""
628 Meant for use by custom merge drivers."""
630 self._results[f] = 0, 'a'
629 self._results[f] = 0, 'a'
631
630
632 def queueget(self, f):
631 def queueget(self, f):
633 """queues a file to be marked modified in the dirstate
632 """queues a file to be marked modified in the dirstate
634
633
635 Meant for use by custom merge drivers."""
634 Meant for use by custom merge drivers."""
636 self._results[f] = 0, 'g'
635 self._results[f] = 0, 'g'
637
636
638 def _getcheckunknownconfig(repo, section, name):
637 def _getcheckunknownconfig(repo, section, name):
639 config = repo.ui.config(section, name)
638 config = repo.ui.config(section, name)
640 valid = ['abort', 'ignore', 'warn']
639 valid = ['abort', 'ignore', 'warn']
641 if config not in valid:
640 if config not in valid:
642 validstr = ', '.join(["'" + v + "'" for v in valid])
641 validstr = ', '.join(["'" + v + "'" for v in valid])
643 raise error.ConfigError(_("%s.%s not valid "
642 raise error.ConfigError(_("%s.%s not valid "
644 "('%s' is none of %s)")
643 "('%s' is none of %s)")
645 % (section, name, config, validstr))
644 % (section, name, config, validstr))
646 return config
645 return config
647
646
648 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
647 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
649 if wctx.isinmemory():
648 if wctx.isinmemory():
650 # Nothing to do in IMM because nothing in the "working copy" can be an
649 # Nothing to do in IMM because nothing in the "working copy" can be an
651 # unknown file.
650 # unknown file.
652 #
651 #
653 # Note that we should bail out here, not in ``_checkunknownfiles()``,
652 # Note that we should bail out here, not in ``_checkunknownfiles()``,
654 # because that function does other useful work.
653 # because that function does other useful work.
655 return False
654 return False
656
655
657 if f2 is None:
656 if f2 is None:
658 f2 = f
657 f2 = f
659 return (repo.wvfs.audit.check(f)
658 return (repo.wvfs.audit.check(f)
660 and repo.wvfs.isfileorlink(f)
659 and repo.wvfs.isfileorlink(f)
661 and repo.dirstate.normalize(f) not in repo.dirstate
660 and repo.dirstate.normalize(f) not in repo.dirstate
662 and mctx[f2].cmp(wctx[f]))
661 and mctx[f2].cmp(wctx[f]))
663
662
664 class _unknowndirschecker(object):
663 class _unknowndirschecker(object):
665 """
664 """
666 Look for any unknown files or directories that may have a path conflict
665 Look for any unknown files or directories that may have a path conflict
667 with a file. If any path prefix of the file exists as a file or link,
666 with a file. If any path prefix of the file exists as a file or link,
668 then it conflicts. If the file itself is a directory that contains any
667 then it conflicts. If the file itself is a directory that contains any
669 file that is not tracked, then it conflicts.
668 file that is not tracked, then it conflicts.
670
669
671 Returns the shortest path at which a conflict occurs, or None if there is
670 Returns the shortest path at which a conflict occurs, or None if there is
672 no conflict.
671 no conflict.
673 """
672 """
674 def __init__(self):
673 def __init__(self):
675 # A set of paths known to be good. This prevents repeated checking of
674 # A set of paths known to be good. This prevents repeated checking of
676 # dirs. It will be updated with any new dirs that are checked and found
675 # dirs. It will be updated with any new dirs that are checked and found
677 # to be safe.
676 # to be safe.
678 self._unknowndircache = set()
677 self._unknowndircache = set()
679
678
680 # A set of paths that are known to be absent. This prevents repeated
679 # A set of paths that are known to be absent. This prevents repeated
681 # checking of subdirectories that are known not to exist. It will be
680 # checking of subdirectories that are known not to exist. It will be
682 # updated with any new dirs that are checked and found to be absent.
681 # updated with any new dirs that are checked and found to be absent.
683 self._missingdircache = set()
682 self._missingdircache = set()
684
683
685 def __call__(self, repo, wctx, f):
684 def __call__(self, repo, wctx, f):
686 if wctx.isinmemory():
685 if wctx.isinmemory():
687 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
686 # Nothing to do in IMM for the same reason as ``_checkunknownfile``.
688 return False
687 return False
689
688
690 # Check for path prefixes that exist as unknown files.
689 # Check for path prefixes that exist as unknown files.
691 for p in reversed(list(util.finddirs(f))):
690 for p in reversed(list(util.finddirs(f))):
692 if p in self._missingdircache:
691 if p in self._missingdircache:
693 return
692 return
694 if p in self._unknowndircache:
693 if p in self._unknowndircache:
695 continue
694 continue
696 if repo.wvfs.audit.check(p):
695 if repo.wvfs.audit.check(p):
697 if (repo.wvfs.isfileorlink(p)
696 if (repo.wvfs.isfileorlink(p)
698 and repo.dirstate.normalize(p) not in repo.dirstate):
697 and repo.dirstate.normalize(p) not in repo.dirstate):
699 return p
698 return p
700 if not repo.wvfs.lexists(p):
699 if not repo.wvfs.lexists(p):
701 self._missingdircache.add(p)
700 self._missingdircache.add(p)
702 return
701 return
703 self._unknowndircache.add(p)
702 self._unknowndircache.add(p)
704
703
705 # Check if the file conflicts with a directory containing unknown files.
704 # Check if the file conflicts with a directory containing unknown files.
706 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
705 if repo.wvfs.audit.check(f) and repo.wvfs.isdir(f):
707 # Does the directory contain any files that are not in the dirstate?
706 # Does the directory contain any files that are not in the dirstate?
708 for p, dirs, files in repo.wvfs.walk(f):
707 for p, dirs, files in repo.wvfs.walk(f):
709 for fn in files:
708 for fn in files:
710 relf = repo.dirstate.normalize(repo.wvfs.reljoin(p, fn))
709 relf = repo.dirstate.normalize(repo.wvfs.reljoin(p, fn))
711 if relf not in repo.dirstate:
710 if relf not in repo.dirstate:
712 return f
711 return f
713 return None
712 return None
714
713
715 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
714 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
716 """
715 """
717 Considers any actions that care about the presence of conflicting unknown
716 Considers any actions that care about the presence of conflicting unknown
718 files. For some actions, the result is to abort; for others, it is to
717 files. For some actions, the result is to abort; for others, it is to
719 choose a different action.
718 choose a different action.
720 """
719 """
721 fileconflicts = set()
720 fileconflicts = set()
722 pathconflicts = set()
721 pathconflicts = set()
723 warnconflicts = set()
722 warnconflicts = set()
724 abortconflicts = set()
723 abortconflicts = set()
725 unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
724 unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown')
726 ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
725 ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
727 pathconfig = repo.ui.configbool('experimental', 'merge.checkpathconflicts')
726 pathconfig = repo.ui.configbool('experimental', 'merge.checkpathconflicts')
728 if not force:
727 if not force:
729 def collectconflicts(conflicts, config):
728 def collectconflicts(conflicts, config):
730 if config == 'abort':
729 if config == 'abort':
731 abortconflicts.update(conflicts)
730 abortconflicts.update(conflicts)
732 elif config == 'warn':
731 elif config == 'warn':
733 warnconflicts.update(conflicts)
732 warnconflicts.update(conflicts)
734
733
735 checkunknowndirs = _unknowndirschecker()
734 checkunknowndirs = _unknowndirschecker()
736 for f, (m, args, msg) in actions.iteritems():
735 for f, (m, args, msg) in actions.iteritems():
737 if m in ('c', 'dc'):
736 if m in ('c', 'dc'):
738 if _checkunknownfile(repo, wctx, mctx, f):
737 if _checkunknownfile(repo, wctx, mctx, f):
739 fileconflicts.add(f)
738 fileconflicts.add(f)
740 elif pathconfig and f not in wctx:
739 elif pathconfig and f not in wctx:
741 path = checkunknowndirs(repo, wctx, f)
740 path = checkunknowndirs(repo, wctx, f)
742 if path is not None:
741 if path is not None:
743 pathconflicts.add(path)
742 pathconflicts.add(path)
744 elif m == 'dg':
743 elif m == 'dg':
745 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
744 if _checkunknownfile(repo, wctx, mctx, f, args[0]):
746 fileconflicts.add(f)
745 fileconflicts.add(f)
747
746
748 allconflicts = fileconflicts | pathconflicts
747 allconflicts = fileconflicts | pathconflicts
749 ignoredconflicts = set([c for c in allconflicts
748 ignoredconflicts = set([c for c in allconflicts
750 if repo.dirstate._ignore(c)])
749 if repo.dirstate._ignore(c)])
751 unknownconflicts = allconflicts - ignoredconflicts
750 unknownconflicts = allconflicts - ignoredconflicts
752 collectconflicts(ignoredconflicts, ignoredconfig)
751 collectconflicts(ignoredconflicts, ignoredconfig)
753 collectconflicts(unknownconflicts, unknownconfig)
752 collectconflicts(unknownconflicts, unknownconfig)
754 else:
753 else:
755 for f, (m, args, msg) in actions.iteritems():
754 for f, (m, args, msg) in actions.iteritems():
756 if m == 'cm':
755 if m == 'cm':
757 fl2, anc = args
756 fl2, anc = args
758 different = _checkunknownfile(repo, wctx, mctx, f)
757 different = _checkunknownfile(repo, wctx, mctx, f)
759 if repo.dirstate._ignore(f):
758 if repo.dirstate._ignore(f):
760 config = ignoredconfig
759 config = ignoredconfig
761 else:
760 else:
762 config = unknownconfig
761 config = unknownconfig
763
762
764 # The behavior when force is True is described by this table:
763 # The behavior when force is True is described by this table:
765 # config different mergeforce | action backup
764 # config different mergeforce | action backup
766 # * n * | get n
765 # * n * | get n
767 # * y y | merge -
766 # * y y | merge -
768 # abort y n | merge - (1)
767 # abort y n | merge - (1)
769 # warn y n | warn + get y
768 # warn y n | warn + get y
770 # ignore y n | get y
769 # ignore y n | get y
771 #
770 #
772 # (1) this is probably the wrong behavior here -- we should
771 # (1) this is probably the wrong behavior here -- we should
773 # probably abort, but some actions like rebases currently
772 # probably abort, but some actions like rebases currently
774 # don't like an abort happening in the middle of
773 # don't like an abort happening in the middle of
775 # merge.update.
774 # merge.update.
776 if not different:
775 if not different:
777 actions[f] = ('g', (fl2, False), "remote created")
776 actions[f] = ('g', (fl2, False), "remote created")
778 elif mergeforce or config == 'abort':
777 elif mergeforce or config == 'abort':
779 actions[f] = ('m', (f, f, None, False, anc),
778 actions[f] = ('m', (f, f, None, False, anc),
780 "remote differs from untracked local")
779 "remote differs from untracked local")
781 elif config == 'abort':
780 elif config == 'abort':
782 abortconflicts.add(f)
781 abortconflicts.add(f)
783 else:
782 else:
784 if config == 'warn':
783 if config == 'warn':
785 warnconflicts.add(f)
784 warnconflicts.add(f)
786 actions[f] = ('g', (fl2, True), "remote created")
785 actions[f] = ('g', (fl2, True), "remote created")
787
786
788 for f in sorted(abortconflicts):
787 for f in sorted(abortconflicts):
789 warn = repo.ui.warn
788 warn = repo.ui.warn
790 if f in pathconflicts:
789 if f in pathconflicts:
791 if repo.wvfs.isfileorlink(f):
790 if repo.wvfs.isfileorlink(f):
792 warn(_("%s: untracked file conflicts with directory\n") % f)
791 warn(_("%s: untracked file conflicts with directory\n") % f)
793 else:
792 else:
794 warn(_("%s: untracked directory conflicts with file\n") % f)
793 warn(_("%s: untracked directory conflicts with file\n") % f)
795 else:
794 else:
796 warn(_("%s: untracked file differs\n") % f)
795 warn(_("%s: untracked file differs\n") % f)
797 if abortconflicts:
796 if abortconflicts:
798 raise error.Abort(_("untracked files in working directory "
797 raise error.Abort(_("untracked files in working directory "
799 "differ from files in requested revision"))
798 "differ from files in requested revision"))
800
799
801 for f in sorted(warnconflicts):
800 for f in sorted(warnconflicts):
802 if repo.wvfs.isfileorlink(f):
801 if repo.wvfs.isfileorlink(f):
803 repo.ui.warn(_("%s: replacing untracked file\n") % f)
802 repo.ui.warn(_("%s: replacing untracked file\n") % f)
804 else:
803 else:
805 repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
804 repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
806
805
807 for f, (m, args, msg) in actions.iteritems():
806 for f, (m, args, msg) in actions.iteritems():
808 if m == 'c':
807 if m == 'c':
809 backup = (f in fileconflicts or f in pathconflicts or
808 backup = (f in fileconflicts or f in pathconflicts or
810 any(p in pathconflicts for p in util.finddirs(f)))
809 any(p in pathconflicts for p in util.finddirs(f)))
811 flags, = args
810 flags, = args
812 actions[f] = ('g', (flags, backup), msg)
811 actions[f] = ('g', (flags, backup), msg)
813
812
814 def _forgetremoved(wctx, mctx, branchmerge):
813 def _forgetremoved(wctx, mctx, branchmerge):
815 """
814 """
816 Forget removed files
815 Forget removed files
817
816
818 If we're jumping between revisions (as opposed to merging), and if
817 If we're jumping between revisions (as opposed to merging), and if
819 neither the working directory nor the target rev has the file,
818 neither the working directory nor the target rev has the file,
820 then we need to remove it from the dirstate, to prevent the
819 then we need to remove it from the dirstate, to prevent the
821 dirstate from listing the file when it is no longer in the
820 dirstate from listing the file when it is no longer in the
822 manifest.
821 manifest.
823
822
824 If we're merging, and the other revision has removed a file
823 If we're merging, and the other revision has removed a file
825 that is not present in the working directory, we need to mark it
824 that is not present in the working directory, we need to mark it
826 as removed.
825 as removed.
827 """
826 """
828
827
829 actions = {}
828 actions = {}
830 m = 'f'
829 m = 'f'
831 if branchmerge:
830 if branchmerge:
832 m = 'r'
831 m = 'r'
833 for f in wctx.deleted():
832 for f in wctx.deleted():
834 if f not in mctx:
833 if f not in mctx:
835 actions[f] = m, None, "forget deleted"
834 actions[f] = m, None, "forget deleted"
836
835
837 if not branchmerge:
836 if not branchmerge:
838 for f in wctx.removed():
837 for f in wctx.removed():
839 if f not in mctx:
838 if f not in mctx:
840 actions[f] = 'f', None, "forget removed"
839 actions[f] = 'f', None, "forget removed"
841
840
842 return actions
841 return actions
843
842
844 def _checkcollision(repo, wmf, actions):
843 def _checkcollision(repo, wmf, actions):
845 # build provisional merged manifest up
844 # build provisional merged manifest up
846 pmmf = set(wmf)
845 pmmf = set(wmf)
847
846
848 if actions:
847 if actions:
849 # k, dr, e and rd are no-op
848 # k, dr, e and rd are no-op
850 for m in 'a', 'am', 'f', 'g', 'cd', 'dc':
849 for m in 'a', 'am', 'f', 'g', 'cd', 'dc':
851 for f, args, msg in actions[m]:
850 for f, args, msg in actions[m]:
852 pmmf.add(f)
851 pmmf.add(f)
853 for f, args, msg in actions['r']:
852 for f, args, msg in actions['r']:
854 pmmf.discard(f)
853 pmmf.discard(f)
855 for f, args, msg in actions['dm']:
854 for f, args, msg in actions['dm']:
856 f2, flags = args
855 f2, flags = args
857 pmmf.discard(f2)
856 pmmf.discard(f2)
858 pmmf.add(f)
857 pmmf.add(f)
859 for f, args, msg in actions['dg']:
858 for f, args, msg in actions['dg']:
860 pmmf.add(f)
859 pmmf.add(f)
861 for f, args, msg in actions['m']:
860 for f, args, msg in actions['m']:
862 f1, f2, fa, move, anc = args
861 f1, f2, fa, move, anc = args
863 if move:
862 if move:
864 pmmf.discard(f1)
863 pmmf.discard(f1)
865 pmmf.add(f)
864 pmmf.add(f)
866
865
867 # check case-folding collision in provisional merged manifest
866 # check case-folding collision in provisional merged manifest
868 foldmap = {}
867 foldmap = {}
869 for f in pmmf:
868 for f in pmmf:
870 fold = util.normcase(f)
869 fold = util.normcase(f)
871 if fold in foldmap:
870 if fold in foldmap:
872 raise error.Abort(_("case-folding collision between %s and %s")
871 raise error.Abort(_("case-folding collision between %s and %s")
873 % (f, foldmap[fold]))
872 % (f, foldmap[fold]))
874 foldmap[fold] = f
873 foldmap[fold] = f
875
874
876 # check case-folding of directories
875 # check case-folding of directories
877 foldprefix = unfoldprefix = lastfull = ''
876 foldprefix = unfoldprefix = lastfull = ''
878 for fold, f in sorted(foldmap.items()):
877 for fold, f in sorted(foldmap.items()):
879 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
878 if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
880 # the folded prefix matches but actual casing is different
879 # the folded prefix matches but actual casing is different
881 raise error.Abort(_("case-folding collision between "
880 raise error.Abort(_("case-folding collision between "
882 "%s and directory of %s") % (lastfull, f))
881 "%s and directory of %s") % (lastfull, f))
883 foldprefix = fold + '/'
882 foldprefix = fold + '/'
884 unfoldprefix = f + '/'
883 unfoldprefix = f + '/'
885 lastfull = f
884 lastfull = f
886
885
887 def driverpreprocess(repo, ms, wctx, labels=None):
886 def driverpreprocess(repo, ms, wctx, labels=None):
888 """run the preprocess step of the merge driver, if any
887 """run the preprocess step of the merge driver, if any
889
888
890 This is currently not implemented -- it's an extension point."""
889 This is currently not implemented -- it's an extension point."""
891 return True
890 return True
892
891
893 def driverconclude(repo, ms, wctx, labels=None):
892 def driverconclude(repo, ms, wctx, labels=None):
894 """run the conclude step of the merge driver, if any
893 """run the conclude step of the merge driver, if any
895
894
896 This is currently not implemented -- it's an extension point."""
895 This is currently not implemented -- it's an extension point."""
897 return True
896 return True
898
897
899 def _filesindirs(repo, manifest, dirs):
898 def _filesindirs(repo, manifest, dirs):
900 """
899 """
901 Generator that yields pairs of all the files in the manifest that are found
900 Generator that yields pairs of all the files in the manifest that are found
902 inside the directories listed in dirs, and which directory they are found
901 inside the directories listed in dirs, and which directory they are found
903 in.
902 in.
904 """
903 """
905 for f in manifest:
904 for f in manifest:
906 for p in util.finddirs(f):
905 for p in util.finddirs(f):
907 if p in dirs:
906 if p in dirs:
908 yield f, p
907 yield f, p
909 break
908 break
910
909
911 def checkpathconflicts(repo, wctx, mctx, actions):
910 def checkpathconflicts(repo, wctx, mctx, actions):
912 """
911 """
913 Check if any actions introduce path conflicts in the repository, updating
912 Check if any actions introduce path conflicts in the repository, updating
914 actions to record or handle the path conflict accordingly.
913 actions to record or handle the path conflict accordingly.
915 """
914 """
916 mf = wctx.manifest()
915 mf = wctx.manifest()
917
916
918 # The set of local files that conflict with a remote directory.
917 # The set of local files that conflict with a remote directory.
919 localconflicts = set()
918 localconflicts = set()
920
919
921 # The set of directories that conflict with a remote file, and so may cause
920 # The set of directories that conflict with a remote file, and so may cause
922 # conflicts if they still contain any files after the merge.
921 # conflicts if they still contain any files after the merge.
923 remoteconflicts = set()
922 remoteconflicts = set()
924
923
925 # The set of directories that appear as both a file and a directory in the
924 # The set of directories that appear as both a file and a directory in the
926 # remote manifest. These indicate an invalid remote manifest, which
925 # remote manifest. These indicate an invalid remote manifest, which
927 # can't be updated to cleanly.
926 # can't be updated to cleanly.
928 invalidconflicts = set()
927 invalidconflicts = set()
929
928
930 # The set of directories that contain files that are being created.
929 # The set of directories that contain files that are being created.
931 createdfiledirs = set()
930 createdfiledirs = set()
932
931
933 # The set of files deleted by all the actions.
932 # The set of files deleted by all the actions.
934 deletedfiles = set()
933 deletedfiles = set()
935
934
936 for f, (m, args, msg) in actions.items():
935 for f, (m, args, msg) in actions.items():
937 if m in ('c', 'dc', 'm', 'cm'):
936 if m in ('c', 'dc', 'm', 'cm'):
938 # This action may create a new local file.
937 # This action may create a new local file.
939 createdfiledirs.update(util.finddirs(f))
938 createdfiledirs.update(util.finddirs(f))
940 if mf.hasdir(f):
939 if mf.hasdir(f):
941 # The file aliases a local directory. This might be ok if all
940 # The file aliases a local directory. This might be ok if all
942 # the files in the local directory are being deleted. This
941 # the files in the local directory are being deleted. This
943 # will be checked once we know what all the deleted files are.
942 # will be checked once we know what all the deleted files are.
944 remoteconflicts.add(f)
943 remoteconflicts.add(f)
945 # Track the names of all deleted files.
944 # Track the names of all deleted files.
946 if m == 'r':
945 if m == 'r':
947 deletedfiles.add(f)
946 deletedfiles.add(f)
948 if m == 'm':
947 if m == 'm':
949 f1, f2, fa, move, anc = args
948 f1, f2, fa, move, anc = args
950 if move:
949 if move:
951 deletedfiles.add(f1)
950 deletedfiles.add(f1)
952 if m == 'dm':
951 if m == 'dm':
953 f2, flags = args
952 f2, flags = args
954 deletedfiles.add(f2)
953 deletedfiles.add(f2)
955
954
956 # Check all directories that contain created files for path conflicts.
955 # Check all directories that contain created files for path conflicts.
957 for p in createdfiledirs:
956 for p in createdfiledirs:
958 if p in mf:
957 if p in mf:
959 if p in mctx:
958 if p in mctx:
960 # A file is in a directory which aliases both a local
959 # A file is in a directory which aliases both a local
961 # and a remote file. This is an internal inconsistency
960 # and a remote file. This is an internal inconsistency
962 # within the remote manifest.
961 # within the remote manifest.
963 invalidconflicts.add(p)
962 invalidconflicts.add(p)
964 else:
963 else:
965 # A file is in a directory which aliases a local file.
964 # A file is in a directory which aliases a local file.
966 # We will need to rename the local file.
965 # We will need to rename the local file.
967 localconflicts.add(p)
966 localconflicts.add(p)
968 if p in actions and actions[p][0] in ('c', 'dc', 'm', 'cm'):
967 if p in actions and actions[p][0] in ('c', 'dc', 'm', 'cm'):
969 # The file is in a directory which aliases a remote file.
968 # The file is in a directory which aliases a remote file.
970 # This is an internal inconsistency within the remote
969 # This is an internal inconsistency within the remote
971 # manifest.
970 # manifest.
972 invalidconflicts.add(p)
971 invalidconflicts.add(p)
973
972
974 # Rename all local conflicting files that have not been deleted.
973 # Rename all local conflicting files that have not been deleted.
975 for p in localconflicts:
974 for p in localconflicts:
976 if p not in deletedfiles:
975 if p not in deletedfiles:
977 ctxname = str(wctx).rstrip('+')
976 ctxname = str(wctx).rstrip('+')
978 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
977 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
979 actions[pnew] = ('pr', (p,), "local path conflict")
978 actions[pnew] = ('pr', (p,), "local path conflict")
980 actions[p] = ('p', (pnew, 'l'), "path conflict")
979 actions[p] = ('p', (pnew, 'l'), "path conflict")
981
980
982 if remoteconflicts:
981 if remoteconflicts:
983 # Check if all files in the conflicting directories have been removed.
982 # Check if all files in the conflicting directories have been removed.
984 ctxname = str(mctx).rstrip('+')
983 ctxname = str(mctx).rstrip('+')
985 for f, p in _filesindirs(repo, mf, remoteconflicts):
984 for f, p in _filesindirs(repo, mf, remoteconflicts):
986 if f not in deletedfiles:
985 if f not in deletedfiles:
987 m, args, msg = actions[p]
986 m, args, msg = actions[p]
988 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
987 pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
989 if m in ('dc', 'm'):
988 if m in ('dc', 'm'):
990 # Action was merge, just update target.
989 # Action was merge, just update target.
991 actions[pnew] = (m, args, msg)
990 actions[pnew] = (m, args, msg)
992 else:
991 else:
993 # Action was create, change to renamed get action.
992 # Action was create, change to renamed get action.
994 fl = args[0]
993 fl = args[0]
995 actions[pnew] = ('dg', (p, fl), "remote path conflict")
994 actions[pnew] = ('dg', (p, fl), "remote path conflict")
996 actions[p] = ('p', (pnew, 'r'), "path conflict")
995 actions[p] = ('p', (pnew, 'r'), "path conflict")
997 remoteconflicts.remove(p)
996 remoteconflicts.remove(p)
998 break
997 break
999
998
1000 if invalidconflicts:
999 if invalidconflicts:
1001 for p in invalidconflicts:
1000 for p in invalidconflicts:
1002 repo.ui.warn(_("%s: is both a file and a directory\n") % p)
1001 repo.ui.warn(_("%s: is both a file and a directory\n") % p)
1003 raise error.Abort(_("destination manifest contains path conflicts"))
1002 raise error.Abort(_("destination manifest contains path conflicts"))
1004
1003
1005 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
1004 def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
1006 acceptremote, followcopies, forcefulldiff=False):
1005 acceptremote, followcopies, forcefulldiff=False):
1007 """
1006 """
1008 Merge wctx and p2 with ancestor pa and generate merge action list
1007 Merge wctx and p2 with ancestor pa and generate merge action list
1009
1008
1010 branchmerge and force are as passed in to update
1009 branchmerge and force are as passed in to update
1011 matcher = matcher to filter file lists
1010 matcher = matcher to filter file lists
1012 acceptremote = accept the incoming changes without prompting
1011 acceptremote = accept the incoming changes without prompting
1013 """
1012 """
1014 if matcher is not None and matcher.always():
1013 if matcher is not None and matcher.always():
1015 matcher = None
1014 matcher = None
1016
1015
1017 copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
1016 copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
1018
1017
1019 # manifests fetched in order are going to be faster, so prime the caches
1018 # manifests fetched in order are going to be faster, so prime the caches
1020 [x.manifest() for x in
1019 [x.manifest() for x in
1021 sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
1020 sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
1022
1021
1023 if followcopies:
1022 if followcopies:
1024 ret = copies.mergecopies(repo, wctx, p2, pa)
1023 ret = copies.mergecopies(repo, wctx, p2, pa)
1025 copy, movewithdir, diverge, renamedelete, dirmove = ret
1024 copy, movewithdir, diverge, renamedelete, dirmove = ret
1026
1025
1027 boolbm = pycompat.bytestr(bool(branchmerge))
1026 boolbm = pycompat.bytestr(bool(branchmerge))
1028 boolf = pycompat.bytestr(bool(force))
1027 boolf = pycompat.bytestr(bool(force))
1029 boolm = pycompat.bytestr(bool(matcher))
1028 boolm = pycompat.bytestr(bool(matcher))
1030 repo.ui.note(_("resolving manifests\n"))
1029 repo.ui.note(_("resolving manifests\n"))
1031 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
1030 repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
1032 % (boolbm, boolf, boolm))
1031 % (boolbm, boolf, boolm))
1033 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
1032 repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
1034
1033
1035 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
1034 m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
1036 copied = set(copy.values())
1035 copied = set(copy.values())
1037 copied.update(movewithdir.values())
1036 copied.update(movewithdir.values())
1038
1037
1039 if '.hgsubstate' in m1:
1038 if '.hgsubstate' in m1:
1040 # check whether sub state is modified
1039 # check whether sub state is modified
1041 if any(wctx.sub(s).dirty() for s in wctx.substate):
1040 if any(wctx.sub(s).dirty() for s in wctx.substate):
1042 m1['.hgsubstate'] = modifiednodeid
1041 m1['.hgsubstate'] = modifiednodeid
1043
1042
1044 # Don't use m2-vs-ma optimization if:
1043 # Don't use m2-vs-ma optimization if:
1045 # - ma is the same as m1 or m2, which we're just going to diff again later
1044 # - ma is the same as m1 or m2, which we're just going to diff again later
1046 # - The caller specifically asks for a full diff, which is useful during bid
1045 # - The caller specifically asks for a full diff, which is useful during bid
1047 # merge.
1046 # merge.
1048 if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
1047 if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
1049 # Identify which files are relevant to the merge, so we can limit the
1048 # Identify which files are relevant to the merge, so we can limit the
1050 # total m1-vs-m2 diff to just those files. This has significant
1049 # total m1-vs-m2 diff to just those files. This has significant
1051 # performance benefits in large repositories.
1050 # performance benefits in large repositories.
1052 relevantfiles = set(ma.diff(m2).keys())
1051 relevantfiles = set(ma.diff(m2).keys())
1053
1052
1054 # For copied and moved files, we need to add the source file too.
1053 # For copied and moved files, we need to add the source file too.
1055 for copykey, copyvalue in copy.iteritems():
1054 for copykey, copyvalue in copy.iteritems():
1056 if copyvalue in relevantfiles:
1055 if copyvalue in relevantfiles:
1057 relevantfiles.add(copykey)
1056 relevantfiles.add(copykey)
1058 for movedirkey in movewithdir:
1057 for movedirkey in movewithdir:
1059 relevantfiles.add(movedirkey)
1058 relevantfiles.add(movedirkey)
1060 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
1059 filesmatcher = scmutil.matchfiles(repo, relevantfiles)
1061 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
1060 matcher = matchmod.intersectmatchers(matcher, filesmatcher)
1062
1061
1063 diff = m1.diff(m2, match=matcher)
1062 diff = m1.diff(m2, match=matcher)
1064
1063
1065 if matcher is None:
1064 if matcher is None:
1066 matcher = matchmod.always('', '')
1065 matcher = matchmod.always('', '')
1067
1066
1068 actions = {}
1067 actions = {}
1069 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1068 for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
1070 if n1 and n2: # file exists on both local and remote side
1069 if n1 and n2: # file exists on both local and remote side
1071 if f not in ma:
1070 if f not in ma:
1072 fa = copy.get(f, None)
1071 fa = copy.get(f, None)
1073 if fa is not None:
1072 if fa is not None:
1074 actions[f] = ('m', (f, f, fa, False, pa.node()),
1073 actions[f] = ('m', (f, f, fa, False, pa.node()),
1075 "both renamed from " + fa)
1074 "both renamed from " + fa)
1076 else:
1075 else:
1077 actions[f] = ('m', (f, f, None, False, pa.node()),
1076 actions[f] = ('m', (f, f, None, False, pa.node()),
1078 "both created")
1077 "both created")
1079 else:
1078 else:
1080 a = ma[f]
1079 a = ma[f]
1081 fla = ma.flags(f)
1080 fla = ma.flags(f)
1082 nol = 'l' not in fl1 + fl2 + fla
1081 nol = 'l' not in fl1 + fl2 + fla
1083 if n2 == a and fl2 == fla:
1082 if n2 == a and fl2 == fla:
1084 actions[f] = ('k', (), "remote unchanged")
1083 actions[f] = ('k', (), "remote unchanged")
1085 elif n1 == a and fl1 == fla: # local unchanged - use remote
1084 elif n1 == a and fl1 == fla: # local unchanged - use remote
1086 if n1 == n2: # optimization: keep local content
1085 if n1 == n2: # optimization: keep local content
1087 actions[f] = ('e', (fl2,), "update permissions")
1086 actions[f] = ('e', (fl2,), "update permissions")
1088 else:
1087 else:
1089 actions[f] = ('g', (fl2, False), "remote is newer")
1088 actions[f] = ('g', (fl2, False), "remote is newer")
1090 elif nol and n2 == a: # remote only changed 'x'
1089 elif nol and n2 == a: # remote only changed 'x'
1091 actions[f] = ('e', (fl2,), "update permissions")
1090 actions[f] = ('e', (fl2,), "update permissions")
1092 elif nol and n1 == a: # local only changed 'x'
1091 elif nol and n1 == a: # local only changed 'x'
1093 actions[f] = ('g', (fl1, False), "remote is newer")
1092 actions[f] = ('g', (fl1, False), "remote is newer")
1094 else: # both changed something
1093 else: # both changed something
1095 actions[f] = ('m', (f, f, f, False, pa.node()),
1094 actions[f] = ('m', (f, f, f, False, pa.node()),
1096 "versions differ")
1095 "versions differ")
1097 elif n1: # file exists only on local side
1096 elif n1: # file exists only on local side
1098 if f in copied:
1097 if f in copied:
1099 pass # we'll deal with it on m2 side
1098 pass # we'll deal with it on m2 side
1100 elif f in movewithdir: # directory rename, move local
1099 elif f in movewithdir: # directory rename, move local
1101 f2 = movewithdir[f]
1100 f2 = movewithdir[f]
1102 if f2 in m2:
1101 if f2 in m2:
1103 actions[f2] = ('m', (f, f2, None, True, pa.node()),
1102 actions[f2] = ('m', (f, f2, None, True, pa.node()),
1104 "remote directory rename, both created")
1103 "remote directory rename, both created")
1105 else:
1104 else:
1106 actions[f2] = ('dm', (f, fl1),
1105 actions[f2] = ('dm', (f, fl1),
1107 "remote directory rename - move from " + f)
1106 "remote directory rename - move from " + f)
1108 elif f in copy:
1107 elif f in copy:
1109 f2 = copy[f]
1108 f2 = copy[f]
1110 actions[f] = ('m', (f, f2, f2, False, pa.node()),
1109 actions[f] = ('m', (f, f2, f2, False, pa.node()),
1111 "local copied/moved from " + f2)
1110 "local copied/moved from " + f2)
1112 elif f in ma: # clean, a different, no remote
1111 elif f in ma: # clean, a different, no remote
1113 if n1 != ma[f]:
1112 if n1 != ma[f]:
1114 if acceptremote:
1113 if acceptremote:
1115 actions[f] = ('r', None, "remote delete")
1114 actions[f] = ('r', None, "remote delete")
1116 else:
1115 else:
1117 actions[f] = ('cd', (f, None, f, False, pa.node()),
1116 actions[f] = ('cd', (f, None, f, False, pa.node()),
1118 "prompt changed/deleted")
1117 "prompt changed/deleted")
1119 elif n1 == addednodeid:
1118 elif n1 == addednodeid:
1120 # This extra 'a' is added by working copy manifest to mark
1119 # This extra 'a' is added by working copy manifest to mark
1121 # the file as locally added. We should forget it instead of
1120 # the file as locally added. We should forget it instead of
1122 # deleting it.
1121 # deleting it.
1123 actions[f] = ('f', None, "remote deleted")
1122 actions[f] = ('f', None, "remote deleted")
1124 else:
1123 else:
1125 actions[f] = ('r', None, "other deleted")
1124 actions[f] = ('r', None, "other deleted")
1126 elif n2: # file exists only on remote side
1125 elif n2: # file exists only on remote side
1127 if f in copied:
1126 if f in copied:
1128 pass # we'll deal with it on m1 side
1127 pass # we'll deal with it on m1 side
1129 elif f in movewithdir:
1128 elif f in movewithdir:
1130 f2 = movewithdir[f]
1129 f2 = movewithdir[f]
1131 if f2 in m1:
1130 if f2 in m1:
1132 actions[f2] = ('m', (f2, f, None, False, pa.node()),
1131 actions[f2] = ('m', (f2, f, None, False, pa.node()),
1133 "local directory rename, both created")
1132 "local directory rename, both created")
1134 else:
1133 else:
1135 actions[f2] = ('dg', (f, fl2),
1134 actions[f2] = ('dg', (f, fl2),
1136 "local directory rename - get from " + f)
1135 "local directory rename - get from " + f)
1137 elif f in copy:
1136 elif f in copy:
1138 f2 = copy[f]
1137 f2 = copy[f]
1139 if f2 in m2:
1138 if f2 in m2:
1140 actions[f] = ('m', (f2, f, f2, False, pa.node()),
1139 actions[f] = ('m', (f2, f, f2, False, pa.node()),
1141 "remote copied from " + f2)
1140 "remote copied from " + f2)
1142 else:
1141 else:
1143 actions[f] = ('m', (f2, f, f2, True, pa.node()),
1142 actions[f] = ('m', (f2, f, f2, True, pa.node()),
1144 "remote moved from " + f2)
1143 "remote moved from " + f2)
1145 elif f not in ma:
1144 elif f not in ma:
1146 # local unknown, remote created: the logic is described by the
1145 # local unknown, remote created: the logic is described by the
1147 # following table:
1146 # following table:
1148 #
1147 #
1149 # force branchmerge different | action
1148 # force branchmerge different | action
1150 # n * * | create
1149 # n * * | create
1151 # y n * | create
1150 # y n * | create
1152 # y y n | create
1151 # y y n | create
1153 # y y y | merge
1152 # y y y | merge
1154 #
1153 #
1155 # Checking whether the files are different is expensive, so we
1154 # Checking whether the files are different is expensive, so we
1156 # don't do that when we can avoid it.
1155 # don't do that when we can avoid it.
1157 if not force:
1156 if not force:
1158 actions[f] = ('c', (fl2,), "remote created")
1157 actions[f] = ('c', (fl2,), "remote created")
1159 elif not branchmerge:
1158 elif not branchmerge:
1160 actions[f] = ('c', (fl2,), "remote created")
1159 actions[f] = ('c', (fl2,), "remote created")
1161 else:
1160 else:
1162 actions[f] = ('cm', (fl2, pa.node()),
1161 actions[f] = ('cm', (fl2, pa.node()),
1163 "remote created, get or merge")
1162 "remote created, get or merge")
1164 elif n2 != ma[f]:
1163 elif n2 != ma[f]:
1165 df = None
1164 df = None
1166 for d in dirmove:
1165 for d in dirmove:
1167 if f.startswith(d):
1166 if f.startswith(d):
1168 # new file added in a directory that was moved
1167 # new file added in a directory that was moved
1169 df = dirmove[d] + f[len(d):]
1168 df = dirmove[d] + f[len(d):]
1170 break
1169 break
1171 if df is not None and df in m1:
1170 if df is not None and df in m1:
1172 actions[df] = ('m', (df, f, f, False, pa.node()),
1171 actions[df] = ('m', (df, f, f, False, pa.node()),
1173 "local directory rename - respect move from " + f)
1172 "local directory rename - respect move from " + f)
1174 elif acceptremote:
1173 elif acceptremote:
1175 actions[f] = ('c', (fl2,), "remote recreating")
1174 actions[f] = ('c', (fl2,), "remote recreating")
1176 else:
1175 else:
1177 actions[f] = ('dc', (None, f, f, False, pa.node()),
1176 actions[f] = ('dc', (None, f, f, False, pa.node()),
1178 "prompt deleted/changed")
1177 "prompt deleted/changed")
1179
1178
1180 if repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1179 if repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1181 # If we are merging, look for path conflicts.
1180 # If we are merging, look for path conflicts.
1182 checkpathconflicts(repo, wctx, p2, actions)
1181 checkpathconflicts(repo, wctx, p2, actions)
1183
1182
1184 return actions, diverge, renamedelete
1183 return actions, diverge, renamedelete
1185
1184
1186 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
1185 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
1187 """Resolves false conflicts where the nodeid changed but the content
1186 """Resolves false conflicts where the nodeid changed but the content
1188 remained the same."""
1187 remained the same."""
1189
1188
1190 for f, (m, args, msg) in actions.items():
1189 for f, (m, args, msg) in actions.items():
1191 if m == 'cd' and f in ancestor and not wctx[f].cmp(ancestor[f]):
1190 if m == 'cd' and f in ancestor and not wctx[f].cmp(ancestor[f]):
1192 # local did change but ended up with same content
1191 # local did change but ended up with same content
1193 actions[f] = 'r', None, "prompt same"
1192 actions[f] = 'r', None, "prompt same"
1194 elif m == 'dc' and f in ancestor and not mctx[f].cmp(ancestor[f]):
1193 elif m == 'dc' and f in ancestor and not mctx[f].cmp(ancestor[f]):
1195 # remote did change but ended up with same content
1194 # remote did change but ended up with same content
1196 del actions[f] # don't get = keep local deleted
1195 del actions[f] # don't get = keep local deleted
1197
1196
1198 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
1197 def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
1199 acceptremote, followcopies, matcher=None,
1198 acceptremote, followcopies, matcher=None,
1200 mergeforce=False):
1199 mergeforce=False):
1201 """Calculate the actions needed to merge mctx into wctx using ancestors"""
1200 """Calculate the actions needed to merge mctx into wctx using ancestors"""
1202 # Avoid cycle.
1201 # Avoid cycle.
1203 from . import sparse
1202 from . import sparse
1204
1203
1205 if len(ancestors) == 1: # default
1204 if len(ancestors) == 1: # default
1206 actions, diverge, renamedelete = manifestmerge(
1205 actions, diverge, renamedelete = manifestmerge(
1207 repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
1206 repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
1208 acceptremote, followcopies)
1207 acceptremote, followcopies)
1209 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1208 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1210
1209
1211 else: # only when merge.preferancestor=* - the default
1210 else: # only when merge.preferancestor=* - the default
1212 repo.ui.note(
1211 repo.ui.note(
1213 _("note: merging %s and %s using bids from ancestors %s\n") %
1212 _("note: merging %s and %s using bids from ancestors %s\n") %
1214 (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
1213 (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
1215 for anc in ancestors)))
1214 for anc in ancestors)))
1216
1215
1217 # Call for bids
1216 # Call for bids
1218 fbids = {} # mapping filename to bids (action method to list af actions)
1217 fbids = {} # mapping filename to bids (action method to list af actions)
1219 diverge, renamedelete = None, None
1218 diverge, renamedelete = None, None
1220 for ancestor in ancestors:
1219 for ancestor in ancestors:
1221 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
1220 repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
1222 actions, diverge1, renamedelete1 = manifestmerge(
1221 actions, diverge1, renamedelete1 = manifestmerge(
1223 repo, wctx, mctx, ancestor, branchmerge, force, matcher,
1222 repo, wctx, mctx, ancestor, branchmerge, force, matcher,
1224 acceptremote, followcopies, forcefulldiff=True)
1223 acceptremote, followcopies, forcefulldiff=True)
1225 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1224 _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
1226
1225
1227 # Track the shortest set of warning on the theory that bid
1226 # Track the shortest set of warning on the theory that bid
1228 # merge will correctly incorporate more information
1227 # merge will correctly incorporate more information
1229 if diverge is None or len(diverge1) < len(diverge):
1228 if diverge is None or len(diverge1) < len(diverge):
1230 diverge = diverge1
1229 diverge = diverge1
1231 if renamedelete is None or len(renamedelete) < len(renamedelete1):
1230 if renamedelete is None or len(renamedelete) < len(renamedelete1):
1232 renamedelete = renamedelete1
1231 renamedelete = renamedelete1
1233
1232
1234 for f, a in sorted(actions.iteritems()):
1233 for f, a in sorted(actions.iteritems()):
1235 m, args, msg = a
1234 m, args, msg = a
1236 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
1235 repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m))
1237 if f in fbids:
1236 if f in fbids:
1238 d = fbids[f]
1237 d = fbids[f]
1239 if m in d:
1238 if m in d:
1240 d[m].append(a)
1239 d[m].append(a)
1241 else:
1240 else:
1242 d[m] = [a]
1241 d[m] = [a]
1243 else:
1242 else:
1244 fbids[f] = {m: [a]}
1243 fbids[f] = {m: [a]}
1245
1244
1246 # Pick the best bid for each file
1245 # Pick the best bid for each file
1247 repo.ui.note(_('\nauction for merging merge bids\n'))
1246 repo.ui.note(_('\nauction for merging merge bids\n'))
1248 actions = {}
1247 actions = {}
1249 dms = [] # filenames that have dm actions
1248 dms = [] # filenames that have dm actions
1250 for f, bids in sorted(fbids.items()):
1249 for f, bids in sorted(fbids.items()):
1251 # bids is a mapping from action method to list af actions
1250 # bids is a mapping from action method to list af actions
1252 # Consensus?
1251 # Consensus?
1253 if len(bids) == 1: # all bids are the same kind of method
1252 if len(bids) == 1: # all bids are the same kind of method
1254 m, l = list(bids.items())[0]
1253 m, l = list(bids.items())[0]
1255 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1254 if all(a == l[0] for a in l[1:]): # len(bids) is > 1
1256 repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
1255 repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
1257 actions[f] = l[0]
1256 actions[f] = l[0]
1258 if m == 'dm':
1257 if m == 'dm':
1259 dms.append(f)
1258 dms.append(f)
1260 continue
1259 continue
1261 # If keep is an option, just do it.
1260 # If keep is an option, just do it.
1262 if 'k' in bids:
1261 if 'k' in bids:
1263 repo.ui.note(_(" %s: picking 'keep' action\n") % f)
1262 repo.ui.note(_(" %s: picking 'keep' action\n") % f)
1264 actions[f] = bids['k'][0]
1263 actions[f] = bids['k'][0]
1265 continue
1264 continue
1266 # If there are gets and they all agree [how could they not?], do it.
1265 # If there are gets and they all agree [how could they not?], do it.
1267 if 'g' in bids:
1266 if 'g' in bids:
1268 ga0 = bids['g'][0]
1267 ga0 = bids['g'][0]
1269 if all(a == ga0 for a in bids['g'][1:]):
1268 if all(a == ga0 for a in bids['g'][1:]):
1270 repo.ui.note(_(" %s: picking 'get' action\n") % f)
1269 repo.ui.note(_(" %s: picking 'get' action\n") % f)
1271 actions[f] = ga0
1270 actions[f] = ga0
1272 continue
1271 continue
1273 # TODO: Consider other simple actions such as mode changes
1272 # TODO: Consider other simple actions such as mode changes
1274 # Handle inefficient democrazy.
1273 # Handle inefficient democrazy.
1275 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
1274 repo.ui.note(_(' %s: multiple bids for merge action:\n') % f)
1276 for m, l in sorted(bids.items()):
1275 for m, l in sorted(bids.items()):
1277 for _f, args, msg in l:
1276 for _f, args, msg in l:
1278 repo.ui.note(' %s -> %s\n' % (msg, m))
1277 repo.ui.note(' %s -> %s\n' % (msg, m))
1279 # Pick random action. TODO: Instead, prompt user when resolving
1278 # Pick random action. TODO: Instead, prompt user when resolving
1280 m, l = list(bids.items())[0]
1279 m, l = list(bids.items())[0]
1281 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
1280 repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
1282 (f, m))
1281 (f, m))
1283 actions[f] = l[0]
1282 actions[f] = l[0]
1284 if m == 'dm':
1283 if m == 'dm':
1285 dms.append(f)
1284 dms.append(f)
1286 continue
1285 continue
1287 # Work around 'dm' that can cause multiple actions for the same file
1286 # Work around 'dm' that can cause multiple actions for the same file
1288 for f in dms:
1287 for f in dms:
1289 dm, (f0, flags), msg = actions[f]
1288 dm, (f0, flags), msg = actions[f]
1290 assert dm == 'dm', dm
1289 assert dm == 'dm', dm
1291 if f0 in actions and actions[f0][0] == 'r':
1290 if f0 in actions and actions[f0][0] == 'r':
1292 # We have one bid for removing a file and another for moving it.
1291 # We have one bid for removing a file and another for moving it.
1293 # These two could be merged as first move and then delete ...
1292 # These two could be merged as first move and then delete ...
1294 # but instead drop moving and just delete.
1293 # but instead drop moving and just delete.
1295 del actions[f]
1294 del actions[f]
1296 repo.ui.note(_('end of auction\n\n'))
1295 repo.ui.note(_('end of auction\n\n'))
1297
1296
1298 _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
1297 _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
1299
1298
1300 if wctx.rev() is None:
1299 if wctx.rev() is None:
1301 fractions = _forgetremoved(wctx, mctx, branchmerge)
1300 fractions = _forgetremoved(wctx, mctx, branchmerge)
1302 actions.update(fractions)
1301 actions.update(fractions)
1303
1302
1304 prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
1303 prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
1305 actions)
1304 actions)
1306
1305
1307 return prunedactions, diverge, renamedelete
1306 return prunedactions, diverge, renamedelete
1308
1307
1309 def _getcwd():
1308 def _getcwd():
1310 try:
1309 try:
1311 return pycompat.getcwd()
1310 return pycompat.getcwd()
1312 except OSError as err:
1311 except OSError as err:
1313 if err.errno == errno.ENOENT:
1312 if err.errno == errno.ENOENT:
1314 return None
1313 return None
1315 raise
1314 raise
1316
1315
1317 def batchremove(repo, wctx, actions):
1316 def batchremove(repo, wctx, actions):
1318 """apply removes to the working directory
1317 """apply removes to the working directory
1319
1318
1320 yields tuples for progress updates
1319 yields tuples for progress updates
1321 """
1320 """
1322 verbose = repo.ui.verbose
1321 verbose = repo.ui.verbose
1323 cwd = _getcwd()
1322 cwd = _getcwd()
1324 i = 0
1323 i = 0
1325 for f, args, msg in actions:
1324 for f, args, msg in actions:
1326 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
1325 repo.ui.debug(" %s: %s -> r\n" % (f, msg))
1327 if verbose:
1326 if verbose:
1328 repo.ui.note(_("removing %s\n") % f)
1327 repo.ui.note(_("removing %s\n") % f)
1329 wctx[f].audit()
1328 wctx[f].audit()
1330 try:
1329 try:
1331 wctx[f].remove(ignoremissing=True)
1330 wctx[f].remove(ignoremissing=True)
1332 except OSError as inst:
1331 except OSError as inst:
1333 repo.ui.warn(_("update failed to remove %s: %s!\n") %
1332 repo.ui.warn(_("update failed to remove %s: %s!\n") %
1334 (f, inst.strerror))
1333 (f, inst.strerror))
1335 if i == 100:
1334 if i == 100:
1336 yield i, f
1335 yield i, f
1337 i = 0
1336 i = 0
1338 i += 1
1337 i += 1
1339 if i > 0:
1338 if i > 0:
1340 yield i, f
1339 yield i, f
1341
1340
1342 if cwd and not _getcwd():
1341 if cwd and not _getcwd():
1343 # cwd was removed in the course of removing files; print a helpful
1342 # cwd was removed in the course of removing files; print a helpful
1344 # warning.
1343 # warning.
1345 repo.ui.warn(_("current directory was removed\n"
1344 repo.ui.warn(_("current directory was removed\n"
1346 "(consider changing to repo root: %s)\n") % repo.root)
1345 "(consider changing to repo root: %s)\n") % repo.root)
1347
1346
1348 def batchget(repo, mctx, wctx, actions):
1347 def batchget(repo, mctx, wctx, actions):
1349 """apply gets to the working directory
1348 """apply gets to the working directory
1350
1349
1351 mctx is the context to get from
1350 mctx is the context to get from
1352
1351
1353 yields tuples for progress updates
1352 yields tuples for progress updates
1354 """
1353 """
1355 verbose = repo.ui.verbose
1354 verbose = repo.ui.verbose
1356 fctx = mctx.filectx
1355 fctx = mctx.filectx
1357 ui = repo.ui
1356 ui = repo.ui
1358 i = 0
1357 i = 0
1359 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1358 with repo.wvfs.backgroundclosing(ui, expectedcount=len(actions)):
1360 for f, (flags, backup), msg in actions:
1359 for f, (flags, backup), msg in actions:
1361 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
1360 repo.ui.debug(" %s: %s -> g\n" % (f, msg))
1362 if verbose:
1361 if verbose:
1363 repo.ui.note(_("getting %s\n") % f)
1362 repo.ui.note(_("getting %s\n") % f)
1364
1363
1365 if backup:
1364 if backup:
1366 # If a file or directory exists with the same name, back that
1365 # If a file or directory exists with the same name, back that
1367 # up. Otherwise, look to see if there is a file that conflicts
1366 # up. Otherwise, look to see if there is a file that conflicts
1368 # with a directory this file is in, and if so, back that up.
1367 # with a directory this file is in, and if so, back that up.
1369 absf = repo.wjoin(f)
1368 absf = repo.wjoin(f)
1370 if not repo.wvfs.lexists(f):
1369 if not repo.wvfs.lexists(f):
1371 for p in util.finddirs(f):
1370 for p in util.finddirs(f):
1372 if repo.wvfs.isfileorlink(p):
1371 if repo.wvfs.isfileorlink(p):
1373 absf = repo.wjoin(p)
1372 absf = repo.wjoin(p)
1374 break
1373 break
1375 orig = scmutil.origpath(ui, repo, absf)
1374 orig = scmutil.origpath(ui, repo, absf)
1376 if repo.wvfs.lexists(absf):
1375 if repo.wvfs.lexists(absf):
1377 util.rename(absf, orig)
1376 util.rename(absf, orig)
1378 wctx[f].clearunknown()
1377 wctx[f].clearunknown()
1379 atomictemp = ui.configbool("experimental", "update.atomic-file")
1378 atomictemp = ui.configbool("experimental", "update.atomic-file")
1380 wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
1379 wctx[f].write(fctx(f).data(), flags, backgroundclose=True,
1381 atomictemp=atomictemp)
1380 atomictemp=atomictemp)
1382 if i == 100:
1381 if i == 100:
1383 yield i, f
1382 yield i, f
1384 i = 0
1383 i = 0
1385 i += 1
1384 i += 1
1386 if i > 0:
1385 if i > 0:
1387 yield i, f
1386 yield i, f
1388
1387
1389
1388
1390 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
1389 def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None):
1391 """apply the merge action list to the working directory
1390 """apply the merge action list to the working directory
1392
1391
1393 wctx is the working copy context
1392 wctx is the working copy context
1394 mctx is the context to be merged into the working copy
1393 mctx is the context to be merged into the working copy
1395
1394
1396 Return a tuple of counts (updated, merged, removed, unresolved) that
1395 Return a tuple of counts (updated, merged, removed, unresolved) that
1397 describes how many files were affected by the update.
1396 describes how many files were affected by the update.
1398 """
1397 """
1399
1398
1400 updated, merged, removed = 0, 0, 0
1399 updated, merged, removed = 0, 0, 0
1401 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels)
1400 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node(), labels)
1402 moves = []
1401 moves = []
1403 for m, l in actions.items():
1402 for m, l in actions.items():
1404 l.sort()
1403 l.sort()
1405
1404
1406 # 'cd' and 'dc' actions are treated like other merge conflicts
1405 # 'cd' and 'dc' actions are treated like other merge conflicts
1407 mergeactions = sorted(actions['cd'])
1406 mergeactions = sorted(actions['cd'])
1408 mergeactions.extend(sorted(actions['dc']))
1407 mergeactions.extend(sorted(actions['dc']))
1409 mergeactions.extend(actions['m'])
1408 mergeactions.extend(actions['m'])
1410 for f, args, msg in mergeactions:
1409 for f, args, msg in mergeactions:
1411 f1, f2, fa, move, anc = args
1410 f1, f2, fa, move, anc = args
1412 if f == '.hgsubstate': # merged internally
1411 if f == '.hgsubstate': # merged internally
1413 continue
1412 continue
1414 if f1 is None:
1413 if f1 is None:
1415 fcl = filemerge.absentfilectx(wctx, fa)
1414 fcl = filemerge.absentfilectx(wctx, fa)
1416 else:
1415 else:
1417 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
1416 repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f))
1418 fcl = wctx[f1]
1417 fcl = wctx[f1]
1419 if f2 is None:
1418 if f2 is None:
1420 fco = filemerge.absentfilectx(mctx, fa)
1419 fco = filemerge.absentfilectx(mctx, fa)
1421 else:
1420 else:
1422 fco = mctx[f2]
1421 fco = mctx[f2]
1423 actx = repo[anc]
1422 actx = repo[anc]
1424 if fa in actx:
1423 if fa in actx:
1425 fca = actx[fa]
1424 fca = actx[fa]
1426 else:
1425 else:
1427 # TODO: move to absentfilectx
1426 # TODO: move to absentfilectx
1428 fca = repo.filectx(f1, fileid=nullrev)
1427 fca = repo.filectx(f1, fileid=nullrev)
1429 ms.add(fcl, fco, fca, f)
1428 ms.add(fcl, fco, fca, f)
1430 if f1 != f and move:
1429 if f1 != f and move:
1431 moves.append(f1)
1430 moves.append(f1)
1432
1431
1433 _updating = _('updating')
1432 _updating = _('updating')
1434 _files = _('files')
1433 _files = _('files')
1435 progress = repo.ui.progress
1434 progress = repo.ui.progress
1436
1435
1437 # remove renamed files after safely stored
1436 # remove renamed files after safely stored
1438 for f in moves:
1437 for f in moves:
1439 if wctx[f].lexists():
1438 if wctx[f].lexists():
1440 repo.ui.debug("removing %s\n" % f)
1439 repo.ui.debug("removing %s\n" % f)
1441 wctx[f].audit()
1440 wctx[f].audit()
1442 wctx[f].remove()
1441 wctx[f].remove()
1443
1442
1444 numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
1443 numupdates = sum(len(l) for m, l in actions.items() if m != 'k')
1445 z = 0
1444 z = 0
1446
1445
1447 if [a for a in actions['r'] if a[0] == '.hgsubstate']:
1446 if [a for a in actions['r'] if a[0] == '.hgsubstate']:
1448 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1447 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1449
1448
1450 # record path conflicts
1449 # record path conflicts
1451 for f, args, msg in actions['p']:
1450 for f, args, msg in actions['p']:
1452 f1, fo = args
1451 f1, fo = args
1453 s = repo.ui.status
1452 s = repo.ui.status
1454 s(_("%s: path conflict - a file or link has the same name as a "
1453 s(_("%s: path conflict - a file or link has the same name as a "
1455 "directory\n") % f)
1454 "directory\n") % f)
1456 if fo == 'l':
1455 if fo == 'l':
1457 s(_("the local file has been renamed to %s\n") % f1)
1456 s(_("the local file has been renamed to %s\n") % f1)
1458 else:
1457 else:
1459 s(_("the remote file has been renamed to %s\n") % f1)
1458 s(_("the remote file has been renamed to %s\n") % f1)
1460 s(_("resolve manually then use 'hg resolve --mark %s'\n") % f)
1459 s(_("resolve manually then use 'hg resolve --mark %s'\n") % f)
1461 ms.addpath(f, f1, fo)
1460 ms.addpath(f, f1, fo)
1462 z += 1
1461 z += 1
1463 progress(_updating, z, item=f, total=numupdates, unit=_files)
1462 progress(_updating, z, item=f, total=numupdates, unit=_files)
1464
1463
1465 # When merging in-memory, we can't support worker processes, so set the
1464 # When merging in-memory, we can't support worker processes, so set the
1466 # per-item cost at 0 in that case.
1465 # per-item cost at 0 in that case.
1467 cost = 0 if wctx.isinmemory() else 0.001
1466 cost = 0 if wctx.isinmemory() else 0.001
1468
1467
1469 # remove in parallel (must come before resolving path conflicts and getting)
1468 # remove in parallel (must come before resolving path conflicts and getting)
1470 prog = worker.worker(repo.ui, cost, batchremove, (repo, wctx),
1469 prog = worker.worker(repo.ui, cost, batchremove, (repo, wctx),
1471 actions['r'])
1470 actions['r'])
1472 for i, item in prog:
1471 for i, item in prog:
1473 z += i
1472 z += i
1474 progress(_updating, z, item=item, total=numupdates, unit=_files)
1473 progress(_updating, z, item=item, total=numupdates, unit=_files)
1475 removed = len(actions['r'])
1474 removed = len(actions['r'])
1476
1475
1477 # resolve path conflicts (must come before getting)
1476 # resolve path conflicts (must come before getting)
1478 for f, args, msg in actions['pr']:
1477 for f, args, msg in actions['pr']:
1479 repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
1478 repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
1480 f0, = args
1479 f0, = args
1481 if wctx[f0].lexists():
1480 if wctx[f0].lexists():
1482 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1481 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1483 wctx[f].audit()
1482 wctx[f].audit()
1484 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1483 wctx[f].write(wctx.filectx(f0).data(), wctx.filectx(f0).flags())
1485 wctx[f0].remove()
1484 wctx[f0].remove()
1486 z += 1
1485 z += 1
1487 progress(_updating, z, item=f, total=numupdates, unit=_files)
1486 progress(_updating, z, item=f, total=numupdates, unit=_files)
1488
1487
1489 # get in parallel
1488 # get in parallel
1490 prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
1489 prog = worker.worker(repo.ui, cost, batchget, (repo, mctx, wctx),
1491 actions['g'])
1490 actions['g'])
1492 for i, item in prog:
1491 for i, item in prog:
1493 z += i
1492 z += i
1494 progress(_updating, z, item=item, total=numupdates, unit=_files)
1493 progress(_updating, z, item=item, total=numupdates, unit=_files)
1495 updated = len(actions['g'])
1494 updated = len(actions['g'])
1496
1495
1497 if [a for a in actions['g'] if a[0] == '.hgsubstate']:
1496 if [a for a in actions['g'] if a[0] == '.hgsubstate']:
1498 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1497 subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
1499
1498
1500 # forget (manifest only, just log it) (must come first)
1499 # forget (manifest only, just log it) (must come first)
1501 for f, args, msg in actions['f']:
1500 for f, args, msg in actions['f']:
1502 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
1501 repo.ui.debug(" %s: %s -> f\n" % (f, msg))
1503 z += 1
1502 z += 1
1504 progress(_updating, z, item=f, total=numupdates, unit=_files)
1503 progress(_updating, z, item=f, total=numupdates, unit=_files)
1505
1504
1506 # re-add (manifest only, just log it)
1505 # re-add (manifest only, just log it)
1507 for f, args, msg in actions['a']:
1506 for f, args, msg in actions['a']:
1508 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
1507 repo.ui.debug(" %s: %s -> a\n" % (f, msg))
1509 z += 1
1508 z += 1
1510 progress(_updating, z, item=f, total=numupdates, unit=_files)
1509 progress(_updating, z, item=f, total=numupdates, unit=_files)
1511
1510
1512 # re-add/mark as modified (manifest only, just log it)
1511 # re-add/mark as modified (manifest only, just log it)
1513 for f, args, msg in actions['am']:
1512 for f, args, msg in actions['am']:
1514 repo.ui.debug(" %s: %s -> am\n" % (f, msg))
1513 repo.ui.debug(" %s: %s -> am\n" % (f, msg))
1515 z += 1
1514 z += 1
1516 progress(_updating, z, item=f, total=numupdates, unit=_files)
1515 progress(_updating, z, item=f, total=numupdates, unit=_files)
1517
1516
1518 # keep (noop, just log it)
1517 # keep (noop, just log it)
1519 for f, args, msg in actions['k']:
1518 for f, args, msg in actions['k']:
1520 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
1519 repo.ui.debug(" %s: %s -> k\n" % (f, msg))
1521 # no progress
1520 # no progress
1522
1521
1523 # directory rename, move local
1522 # directory rename, move local
1524 for f, args, msg in actions['dm']:
1523 for f, args, msg in actions['dm']:
1525 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
1524 repo.ui.debug(" %s: %s -> dm\n" % (f, msg))
1526 z += 1
1525 z += 1
1527 progress(_updating, z, item=f, total=numupdates, unit=_files)
1526 progress(_updating, z, item=f, total=numupdates, unit=_files)
1528 f0, flags = args
1527 f0, flags = args
1529 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1528 repo.ui.note(_("moving %s to %s\n") % (f0, f))
1530 wctx[f].audit()
1529 wctx[f].audit()
1531 wctx[f].write(wctx.filectx(f0).data(), flags)
1530 wctx[f].write(wctx.filectx(f0).data(), flags)
1532 wctx[f0].remove()
1531 wctx[f0].remove()
1533 updated += 1
1532 updated += 1
1534
1533
1535 # local directory rename, get
1534 # local directory rename, get
1536 for f, args, msg in actions['dg']:
1535 for f, args, msg in actions['dg']:
1537 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
1536 repo.ui.debug(" %s: %s -> dg\n" % (f, msg))
1538 z += 1
1537 z += 1
1539 progress(_updating, z, item=f, total=numupdates, unit=_files)
1538 progress(_updating, z, item=f, total=numupdates, unit=_files)
1540 f0, flags = args
1539 f0, flags = args
1541 repo.ui.note(_("getting %s to %s\n") % (f0, f))
1540 repo.ui.note(_("getting %s to %s\n") % (f0, f))
1542 wctx[f].write(mctx.filectx(f0).data(), flags)
1541 wctx[f].write(mctx.filectx(f0).data(), flags)
1543 updated += 1
1542 updated += 1
1544
1543
1545 # exec
1544 # exec
1546 for f, args, msg in actions['e']:
1545 for f, args, msg in actions['e']:
1547 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
1546 repo.ui.debug(" %s: %s -> e\n" % (f, msg))
1548 z += 1
1547 z += 1
1549 progress(_updating, z, item=f, total=numupdates, unit=_files)
1548 progress(_updating, z, item=f, total=numupdates, unit=_files)
1550 flags, = args
1549 flags, = args
1551 wctx[f].audit()
1550 wctx[f].audit()
1552 wctx[f].setflags('l' in flags, 'x' in flags)
1551 wctx[f].setflags('l' in flags, 'x' in flags)
1553 updated += 1
1552 updated += 1
1554
1553
1555 # the ordering is important here -- ms.mergedriver will raise if the merge
1554 # the ordering is important here -- ms.mergedriver will raise if the merge
1556 # driver has changed, and we want to be able to bypass it when overwrite is
1555 # driver has changed, and we want to be able to bypass it when overwrite is
1557 # True
1556 # True
1558 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1557 usemergedriver = not overwrite and mergeactions and ms.mergedriver
1559
1558
1560 if usemergedriver:
1559 if usemergedriver:
1561 if wctx.isinmemory():
1560 if wctx.isinmemory():
1562 raise error.InMemoryMergeConflictsError("in-memory merge does not "
1561 raise error.InMemoryMergeConflictsError("in-memory merge does not "
1563 "support mergedriver")
1562 "support mergedriver")
1564 ms.commit()
1563 ms.commit()
1565 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1564 proceed = driverpreprocess(repo, ms, wctx, labels=labels)
1566 # the driver might leave some files unresolved
1565 # the driver might leave some files unresolved
1567 unresolvedf = set(ms.unresolved())
1566 unresolvedf = set(ms.unresolved())
1568 if not proceed:
1567 if not proceed:
1569 # XXX setting unresolved to at least 1 is a hack to make sure we
1568 # XXX setting unresolved to at least 1 is a hack to make sure we
1570 # error out
1569 # error out
1571 return updated, merged, removed, max(len(unresolvedf), 1)
1570 return updated, merged, removed, max(len(unresolvedf), 1)
1572 newactions = []
1571 newactions = []
1573 for f, args, msg in mergeactions:
1572 for f, args, msg in mergeactions:
1574 if f in unresolvedf:
1573 if f in unresolvedf:
1575 newactions.append((f, args, msg))
1574 newactions.append((f, args, msg))
1576 mergeactions = newactions
1575 mergeactions = newactions
1577
1576
1578 try:
1577 try:
1579 # premerge
1578 # premerge
1580 tocomplete = []
1579 tocomplete = []
1581 for f, args, msg in mergeactions:
1580 for f, args, msg in mergeactions:
1582 repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
1581 repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
1583 z += 1
1582 z += 1
1584 progress(_updating, z, item=f, total=numupdates, unit=_files)
1583 progress(_updating, z, item=f, total=numupdates, unit=_files)
1585 if f == '.hgsubstate': # subrepo states need updating
1584 if f == '.hgsubstate': # subrepo states need updating
1586 subrepoutil.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
1585 subrepoutil.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
1587 overwrite, labels)
1586 overwrite, labels)
1588 continue
1587 continue
1589 wctx[f].audit()
1588 wctx[f].audit()
1590 complete, r = ms.preresolve(f, wctx)
1589 complete, r = ms.preresolve(f, wctx)
1591 if not complete:
1590 if not complete:
1592 numupdates += 1
1591 numupdates += 1
1593 tocomplete.append((f, args, msg))
1592 tocomplete.append((f, args, msg))
1594
1593
1595 # merge
1594 # merge
1596 for f, args, msg in tocomplete:
1595 for f, args, msg in tocomplete:
1597 repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
1596 repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg))
1598 z += 1
1597 z += 1
1599 progress(_updating, z, item=f, total=numupdates, unit=_files)
1598 progress(_updating, z, item=f, total=numupdates, unit=_files)
1600 ms.resolve(f, wctx)
1599 ms.resolve(f, wctx)
1601
1600
1602 finally:
1601 finally:
1603 ms.commit()
1602 ms.commit()
1604
1603
1605 unresolved = ms.unresolvedcount()
1604 unresolved = ms.unresolvedcount()
1606
1605
1607 if usemergedriver and not unresolved and ms.mdstate() != 's':
1606 if usemergedriver and not unresolved and ms.mdstate() != 's':
1608 if not driverconclude(repo, ms, wctx, labels=labels):
1607 if not driverconclude(repo, ms, wctx, labels=labels):
1609 # XXX setting unresolved to at least 1 is a hack to make sure we
1608 # XXX setting unresolved to at least 1 is a hack to make sure we
1610 # error out
1609 # error out
1611 unresolved = max(unresolved, 1)
1610 unresolved = max(unresolved, 1)
1612
1611
1613 ms.commit()
1612 ms.commit()
1614
1613
1615 msupdated, msmerged, msremoved = ms.counts()
1614 msupdated, msmerged, msremoved = ms.counts()
1616 updated += msupdated
1615 updated += msupdated
1617 merged += msmerged
1616 merged += msmerged
1618 removed += msremoved
1617 removed += msremoved
1619
1618
1620 extraactions = ms.actions()
1619 extraactions = ms.actions()
1621 if extraactions:
1620 if extraactions:
1622 mfiles = set(a[0] for a in actions['m'])
1621 mfiles = set(a[0] for a in actions['m'])
1623 for k, acts in extraactions.iteritems():
1622 for k, acts in extraactions.iteritems():
1624 actions[k].extend(acts)
1623 actions[k].extend(acts)
1625 # Remove these files from actions['m'] as well. This is important
1624 # Remove these files from actions['m'] as well. This is important
1626 # because in recordupdates, files in actions['m'] are processed
1625 # because in recordupdates, files in actions['m'] are processed
1627 # after files in other actions, and the merge driver might add
1626 # after files in other actions, and the merge driver might add
1628 # files to those actions via extraactions above. This can lead to a
1627 # files to those actions via extraactions above. This can lead to a
1629 # file being recorded twice, with poor results. This is especially
1628 # file being recorded twice, with poor results. This is especially
1630 # problematic for actions['r'] (currently only possible with the
1629 # problematic for actions['r'] (currently only possible with the
1631 # merge driver in the initial merge process; interrupted merges
1630 # merge driver in the initial merge process; interrupted merges
1632 # don't go through this flow).
1631 # don't go through this flow).
1633 #
1632 #
1634 # The real fix here is to have indexes by both file and action so
1633 # The real fix here is to have indexes by both file and action so
1635 # that when the action for a file is changed it is automatically
1634 # that when the action for a file is changed it is automatically
1636 # reflected in the other action lists. But that involves a more
1635 # reflected in the other action lists. But that involves a more
1637 # complex data structure, so this will do for now.
1636 # complex data structure, so this will do for now.
1638 #
1637 #
1639 # We don't need to do the same operation for 'dc' and 'cd' because
1638 # We don't need to do the same operation for 'dc' and 'cd' because
1640 # those lists aren't consulted again.
1639 # those lists aren't consulted again.
1641 mfiles.difference_update(a[0] for a in acts)
1640 mfiles.difference_update(a[0] for a in acts)
1642
1641
1643 actions['m'] = [a for a in actions['m'] if a[0] in mfiles]
1642 actions['m'] = [a for a in actions['m'] if a[0] in mfiles]
1644
1643
1645 progress(_updating, None, total=numupdates, unit=_files)
1644 progress(_updating, None, total=numupdates, unit=_files)
1646
1645
1647 return updated, merged, removed, unresolved
1646 return updated, merged, removed, unresolved
1648
1647
1649 def recordupdates(repo, actions, branchmerge):
1648 def recordupdates(repo, actions, branchmerge):
1650 "record merge actions to the dirstate"
1649 "record merge actions to the dirstate"
1651 # remove (must come first)
1650 # remove (must come first)
1652 for f, args, msg in actions.get('r', []):
1651 for f, args, msg in actions.get('r', []):
1653 if branchmerge:
1652 if branchmerge:
1654 repo.dirstate.remove(f)
1653 repo.dirstate.remove(f)
1655 else:
1654 else:
1656 repo.dirstate.drop(f)
1655 repo.dirstate.drop(f)
1657
1656
1658 # forget (must come first)
1657 # forget (must come first)
1659 for f, args, msg in actions.get('f', []):
1658 for f, args, msg in actions.get('f', []):
1660 repo.dirstate.drop(f)
1659 repo.dirstate.drop(f)
1661
1660
1662 # resolve path conflicts
1661 # resolve path conflicts
1663 for f, args, msg in actions.get('pr', []):
1662 for f, args, msg in actions.get('pr', []):
1664 f0, = args
1663 f0, = args
1665 origf0 = repo.dirstate.copied(f0) or f0
1664 origf0 = repo.dirstate.copied(f0) or f0
1666 repo.dirstate.add(f)
1665 repo.dirstate.add(f)
1667 repo.dirstate.copy(origf0, f)
1666 repo.dirstate.copy(origf0, f)
1668 if f0 == origf0:
1667 if f0 == origf0:
1669 repo.dirstate.remove(f0)
1668 repo.dirstate.remove(f0)
1670 else:
1669 else:
1671 repo.dirstate.drop(f0)
1670 repo.dirstate.drop(f0)
1672
1671
1673 # re-add
1672 # re-add
1674 for f, args, msg in actions.get('a', []):
1673 for f, args, msg in actions.get('a', []):
1675 repo.dirstate.add(f)
1674 repo.dirstate.add(f)
1676
1675
1677 # re-add/mark as modified
1676 # re-add/mark as modified
1678 for f, args, msg in actions.get('am', []):
1677 for f, args, msg in actions.get('am', []):
1679 if branchmerge:
1678 if branchmerge:
1680 repo.dirstate.normallookup(f)
1679 repo.dirstate.normallookup(f)
1681 else:
1680 else:
1682 repo.dirstate.add(f)
1681 repo.dirstate.add(f)
1683
1682
1684 # exec change
1683 # exec change
1685 for f, args, msg in actions.get('e', []):
1684 for f, args, msg in actions.get('e', []):
1686 repo.dirstate.normallookup(f)
1685 repo.dirstate.normallookup(f)
1687
1686
1688 # keep
1687 # keep
1689 for f, args, msg in actions.get('k', []):
1688 for f, args, msg in actions.get('k', []):
1690 pass
1689 pass
1691
1690
1692 # get
1691 # get
1693 for f, args, msg in actions.get('g', []):
1692 for f, args, msg in actions.get('g', []):
1694 if branchmerge:
1693 if branchmerge:
1695 repo.dirstate.otherparent(f)
1694 repo.dirstate.otherparent(f)
1696 else:
1695 else:
1697 repo.dirstate.normal(f)
1696 repo.dirstate.normal(f)
1698
1697
1699 # merge
1698 # merge
1700 for f, args, msg in actions.get('m', []):
1699 for f, args, msg in actions.get('m', []):
1701 f1, f2, fa, move, anc = args
1700 f1, f2, fa, move, anc = args
1702 if branchmerge:
1701 if branchmerge:
1703 # We've done a branch merge, mark this file as merged
1702 # We've done a branch merge, mark this file as merged
1704 # so that we properly record the merger later
1703 # so that we properly record the merger later
1705 repo.dirstate.merge(f)
1704 repo.dirstate.merge(f)
1706 if f1 != f2: # copy/rename
1705 if f1 != f2: # copy/rename
1707 if move:
1706 if move:
1708 repo.dirstate.remove(f1)
1707 repo.dirstate.remove(f1)
1709 if f1 != f:
1708 if f1 != f:
1710 repo.dirstate.copy(f1, f)
1709 repo.dirstate.copy(f1, f)
1711 else:
1710 else:
1712 repo.dirstate.copy(f2, f)
1711 repo.dirstate.copy(f2, f)
1713 else:
1712 else:
1714 # We've update-merged a locally modified file, so
1713 # We've update-merged a locally modified file, so
1715 # we set the dirstate to emulate a normal checkout
1714 # we set the dirstate to emulate a normal checkout
1716 # of that file some time in the past. Thus our
1715 # of that file some time in the past. Thus our
1717 # merge will appear as a normal local file
1716 # merge will appear as a normal local file
1718 # modification.
1717 # modification.
1719 if f2 == f: # file not locally copied/moved
1718 if f2 == f: # file not locally copied/moved
1720 repo.dirstate.normallookup(f)
1719 repo.dirstate.normallookup(f)
1721 if move:
1720 if move:
1722 repo.dirstate.drop(f1)
1721 repo.dirstate.drop(f1)
1723
1722
1724 # directory rename, move local
1723 # directory rename, move local
1725 for f, args, msg in actions.get('dm', []):
1724 for f, args, msg in actions.get('dm', []):
1726 f0, flag = args
1725 f0, flag = args
1727 if branchmerge:
1726 if branchmerge:
1728 repo.dirstate.add(f)
1727 repo.dirstate.add(f)
1729 repo.dirstate.remove(f0)
1728 repo.dirstate.remove(f0)
1730 repo.dirstate.copy(f0, f)
1729 repo.dirstate.copy(f0, f)
1731 else:
1730 else:
1732 repo.dirstate.normal(f)
1731 repo.dirstate.normal(f)
1733 repo.dirstate.drop(f0)
1732 repo.dirstate.drop(f0)
1734
1733
1735 # directory rename, get
1734 # directory rename, get
1736 for f, args, msg in actions.get('dg', []):
1735 for f, args, msg in actions.get('dg', []):
1737 f0, flag = args
1736 f0, flag = args
1738 if branchmerge:
1737 if branchmerge:
1739 repo.dirstate.add(f)
1738 repo.dirstate.add(f)
1740 repo.dirstate.copy(f0, f)
1739 repo.dirstate.copy(f0, f)
1741 else:
1740 else:
1742 repo.dirstate.normal(f)
1741 repo.dirstate.normal(f)
1743
1742
1744 def update(repo, node, branchmerge, force, ancestor=None,
1743 def update(repo, node, branchmerge, force, ancestor=None,
1745 mergeancestor=False, labels=None, matcher=None, mergeforce=False,
1744 mergeancestor=False, labels=None, matcher=None, mergeforce=False,
1746 updatecheck=None, wc=None):
1745 updatecheck=None, wc=None):
1747 """
1746 """
1748 Perform a merge between the working directory and the given node
1747 Perform a merge between the working directory and the given node
1749
1748
1750 node = the node to update to
1749 node = the node to update to
1751 branchmerge = whether to merge between branches
1750 branchmerge = whether to merge between branches
1752 force = whether to force branch merging or file overwriting
1751 force = whether to force branch merging or file overwriting
1753 matcher = a matcher to filter file lists (dirstate not updated)
1752 matcher = a matcher to filter file lists (dirstate not updated)
1754 mergeancestor = whether it is merging with an ancestor. If true,
1753 mergeancestor = whether it is merging with an ancestor. If true,
1755 we should accept the incoming changes for any prompts that occur.
1754 we should accept the incoming changes for any prompts that occur.
1756 If false, merging with an ancestor (fast-forward) is only allowed
1755 If false, merging with an ancestor (fast-forward) is only allowed
1757 between different named branches. This flag is used by rebase extension
1756 between different named branches. This flag is used by rebase extension
1758 as a temporary fix and should be avoided in general.
1757 as a temporary fix and should be avoided in general.
1759 labels = labels to use for base, local and other
1758 labels = labels to use for base, local and other
1760 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1759 mergeforce = whether the merge was run with 'merge --force' (deprecated): if
1761 this is True, then 'force' should be True as well.
1760 this is True, then 'force' should be True as well.
1762
1761
1763 The table below shows all the behaviors of the update command given the
1762 The table below shows all the behaviors of the update command given the
1764 -c/--check and -C/--clean or no options, whether the working directory is
1763 -c/--check and -C/--clean or no options, whether the working directory is
1765 dirty, whether a revision is specified, and the relationship of the parent
1764 dirty, whether a revision is specified, and the relationship of the parent
1766 rev to the target rev (linear or not). Match from top first. The -n
1765 rev to the target rev (linear or not). Match from top first. The -n
1767 option doesn't exist on the command line, but represents the
1766 option doesn't exist on the command line, but represents the
1768 experimental.updatecheck=noconflict option.
1767 experimental.updatecheck=noconflict option.
1769
1768
1770 This logic is tested by test-update-branches.t.
1769 This logic is tested by test-update-branches.t.
1771
1770
1772 -c -C -n -m dirty rev linear | result
1771 -c -C -n -m dirty rev linear | result
1773 y y * * * * * | (1)
1772 y y * * * * * | (1)
1774 y * y * * * * | (1)
1773 y * y * * * * | (1)
1775 y * * y * * * | (1)
1774 y * * y * * * | (1)
1776 * y y * * * * | (1)
1775 * y y * * * * | (1)
1777 * y * y * * * | (1)
1776 * y * y * * * | (1)
1778 * * y y * * * | (1)
1777 * * y y * * * | (1)
1779 * * * * * n n | x
1778 * * * * * n n | x
1780 * * * * n * * | ok
1779 * * * * n * * | ok
1781 n n n n y * y | merge
1780 n n n n y * y | merge
1782 n n n n y y n | (2)
1781 n n n n y y n | (2)
1783 n n n y y * * | merge
1782 n n n y y * * | merge
1784 n n y n y * * | merge if no conflict
1783 n n y n y * * | merge if no conflict
1785 n y n n y * * | discard
1784 n y n n y * * | discard
1786 y n n n y * * | (3)
1785 y n n n y * * | (3)
1787
1786
1788 x = can't happen
1787 x = can't happen
1789 * = don't-care
1788 * = don't-care
1790 1 = incompatible options (checked in commands.py)
1789 1 = incompatible options (checked in commands.py)
1791 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1790 2 = abort: uncommitted changes (commit or update --clean to discard changes)
1792 3 = abort: uncommitted changes (checked in commands.py)
1791 3 = abort: uncommitted changes (checked in commands.py)
1793
1792
1794 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1793 The merge is performed inside ``wc``, a workingctx-like objects. It defaults
1795 to repo[None] if None is passed.
1794 to repo[None] if None is passed.
1796
1795
1797 Return the same tuple as applyupdates().
1796 Return the same tuple as applyupdates().
1798 """
1797 """
1799 # Avoid cycle.
1798 # Avoid cycle.
1800 from . import sparse
1799 from . import sparse
1801
1800
1802 # This function used to find the default destination if node was None, but
1801 # This function used to find the default destination if node was None, but
1803 # that's now in destutil.py.
1802 # that's now in destutil.py.
1804 assert node is not None
1803 assert node is not None
1805 if not branchmerge and not force:
1804 if not branchmerge and not force:
1806 # TODO: remove the default once all callers that pass branchmerge=False
1805 # TODO: remove the default once all callers that pass branchmerge=False
1807 # and force=False pass a value for updatecheck. We may want to allow
1806 # and force=False pass a value for updatecheck. We may want to allow
1808 # updatecheck='abort' to better suppport some of these callers.
1807 # updatecheck='abort' to better suppport some of these callers.
1809 if updatecheck is None:
1808 if updatecheck is None:
1810 updatecheck = 'linear'
1809 updatecheck = 'linear'
1811 assert updatecheck in ('none', 'linear', 'noconflict')
1810 assert updatecheck in ('none', 'linear', 'noconflict')
1812 # If we're doing a partial update, we need to skip updating
1811 # If we're doing a partial update, we need to skip updating
1813 # the dirstate, so make a note of any partial-ness to the
1812 # the dirstate, so make a note of any partial-ness to the
1814 # update here.
1813 # update here.
1815 if matcher is None or matcher.always():
1814 if matcher is None or matcher.always():
1816 partial = False
1815 partial = False
1817 else:
1816 else:
1818 partial = True
1817 partial = True
1819 with repo.wlock():
1818 with repo.wlock():
1820 if wc is None:
1819 if wc is None:
1821 wc = repo[None]
1820 wc = repo[None]
1822 pl = wc.parents()
1821 pl = wc.parents()
1823 p1 = pl[0]
1822 p1 = pl[0]
1824 pas = [None]
1823 pas = [None]
1825 if ancestor is not None:
1824 if ancestor is not None:
1826 pas = [repo[ancestor]]
1825 pas = [repo[ancestor]]
1827
1826
1828 overwrite = force and not branchmerge
1827 overwrite = force and not branchmerge
1829
1828
1830 p2 = repo[node]
1829 p2 = repo[node]
1831 if pas[0] is None:
1830 if pas[0] is None:
1832 if repo.ui.configlist('merge', 'preferancestor') == ['*']:
1831 if repo.ui.configlist('merge', 'preferancestor') == ['*']:
1833 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1832 cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node())
1834 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1833 pas = [repo[anc] for anc in (sorted(cahs) or [nullid])]
1835 else:
1834 else:
1836 pas = [p1.ancestor(p2, warn=branchmerge)]
1835 pas = [p1.ancestor(p2, warn=branchmerge)]
1837
1836
1838 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
1837 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
1839
1838
1840 ### check phase
1839 ### check phase
1841 if not overwrite:
1840 if not overwrite:
1842 if len(pl) > 1:
1841 if len(pl) > 1:
1843 raise error.Abort(_("outstanding uncommitted merge"))
1842 raise error.Abort(_("outstanding uncommitted merge"))
1844 ms = mergestate.read(repo)
1843 ms = mergestate.read(repo)
1845 if list(ms.unresolved()):
1844 if list(ms.unresolved()):
1846 raise error.Abort(_("outstanding merge conflicts"))
1845 raise error.Abort(_("outstanding merge conflicts"))
1847 if branchmerge:
1846 if branchmerge:
1848 if pas == [p2]:
1847 if pas == [p2]:
1849 raise error.Abort(_("merging with a working directory ancestor"
1848 raise error.Abort(_("merging with a working directory ancestor"
1850 " has no effect"))
1849 " has no effect"))
1851 elif pas == [p1]:
1850 elif pas == [p1]:
1852 if not mergeancestor and wc.branch() == p2.branch():
1851 if not mergeancestor and wc.branch() == p2.branch():
1853 raise error.Abort(_("nothing to merge"),
1852 raise error.Abort(_("nothing to merge"),
1854 hint=_("use 'hg update' "
1853 hint=_("use 'hg update' "
1855 "or check 'hg heads'"))
1854 "or check 'hg heads'"))
1856 if not force and (wc.files() or wc.deleted()):
1855 if not force and (wc.files() or wc.deleted()):
1857 raise error.Abort(_("uncommitted changes"),
1856 raise error.Abort(_("uncommitted changes"),
1858 hint=_("use 'hg status' to list changes"))
1857 hint=_("use 'hg status' to list changes"))
1859 if not wc.isinmemory():
1858 if not wc.isinmemory():
1860 for s in sorted(wc.substate):
1859 for s in sorted(wc.substate):
1861 wc.sub(s).bailifchanged()
1860 wc.sub(s).bailifchanged()
1862
1861
1863 elif not overwrite:
1862 elif not overwrite:
1864 if p1 == p2: # no-op update
1863 if p1 == p2: # no-op update
1865 # call the hooks and exit early
1864 # call the hooks and exit early
1866 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
1865 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
1867 repo.hook('update', parent1=xp2, parent2='', error=0)
1866 repo.hook('update', parent1=xp2, parent2='', error=0)
1868 return 0, 0, 0, 0
1867 return 0, 0, 0, 0
1869
1868
1870 if (updatecheck == 'linear' and
1869 if (updatecheck == 'linear' and
1871 pas not in ([p1], [p2])): # nonlinear
1870 pas not in ([p1], [p2])): # nonlinear
1872 dirty = wc.dirty(missing=True)
1871 dirty = wc.dirty(missing=True)
1873 if dirty:
1872 if dirty:
1874 # Branching is a bit strange to ensure we do the minimal
1873 # Branching is a bit strange to ensure we do the minimal
1875 # amount of call to obsutil.foreground.
1874 # amount of call to obsutil.foreground.
1876 foreground = obsutil.foreground(repo, [p1.node()])
1875 foreground = obsutil.foreground(repo, [p1.node()])
1877 # note: the <node> variable contains a random identifier
1876 # note: the <node> variable contains a random identifier
1878 if repo[node].node() in foreground:
1877 if repo[node].node() in foreground:
1879 pass # allow updating to successors
1878 pass # allow updating to successors
1880 else:
1879 else:
1881 msg = _("uncommitted changes")
1880 msg = _("uncommitted changes")
1882 hint = _("commit or update --clean to discard changes")
1881 hint = _("commit or update --clean to discard changes")
1883 raise error.UpdateAbort(msg, hint=hint)
1882 raise error.UpdateAbort(msg, hint=hint)
1884 else:
1883 else:
1885 # Allow jumping branches if clean and specific rev given
1884 # Allow jumping branches if clean and specific rev given
1886 pass
1885 pass
1887
1886
1888 if overwrite:
1887 if overwrite:
1889 pas = [wc]
1888 pas = [wc]
1890 elif not branchmerge:
1889 elif not branchmerge:
1891 pas = [p1]
1890 pas = [p1]
1892
1891
1893 # deprecated config: merge.followcopies
1892 # deprecated config: merge.followcopies
1894 followcopies = repo.ui.configbool('merge', 'followcopies')
1893 followcopies = repo.ui.configbool('merge', 'followcopies')
1895 if overwrite:
1894 if overwrite:
1896 followcopies = False
1895 followcopies = False
1897 elif not pas[0]:
1896 elif not pas[0]:
1898 followcopies = False
1897 followcopies = False
1899 if not branchmerge and not wc.dirty(missing=True):
1898 if not branchmerge and not wc.dirty(missing=True):
1900 followcopies = False
1899 followcopies = False
1901
1900
1902 ### calculate phase
1901 ### calculate phase
1903 actionbyfile, diverge, renamedelete = calculateupdates(
1902 actionbyfile, diverge, renamedelete = calculateupdates(
1904 repo, wc, p2, pas, branchmerge, force, mergeancestor,
1903 repo, wc, p2, pas, branchmerge, force, mergeancestor,
1905 followcopies, matcher=matcher, mergeforce=mergeforce)
1904 followcopies, matcher=matcher, mergeforce=mergeforce)
1906
1905
1907 if updatecheck == 'noconflict':
1906 if updatecheck == 'noconflict':
1908 for f, (m, args, msg) in actionbyfile.iteritems():
1907 for f, (m, args, msg) in actionbyfile.iteritems():
1909 if m not in ('g', 'k', 'e', 'r', 'pr'):
1908 if m not in ('g', 'k', 'e', 'r', 'pr'):
1910 msg = _("conflicting changes")
1909 msg = _("conflicting changes")
1911 hint = _("commit or update --clean to discard changes")
1910 hint = _("commit or update --clean to discard changes")
1912 raise error.Abort(msg, hint=hint)
1911 raise error.Abort(msg, hint=hint)
1913
1912
1914 # Prompt and create actions. Most of this is in the resolve phase
1913 # Prompt and create actions. Most of this is in the resolve phase
1915 # already, but we can't handle .hgsubstate in filemerge or
1914 # already, but we can't handle .hgsubstate in filemerge or
1916 # subrepoutil.submerge yet so we have to keep prompting for it.
1915 # subrepoutil.submerge yet so we have to keep prompting for it.
1917 if '.hgsubstate' in actionbyfile:
1916 if '.hgsubstate' in actionbyfile:
1918 f = '.hgsubstate'
1917 f = '.hgsubstate'
1919 m, args, msg = actionbyfile[f]
1918 m, args, msg = actionbyfile[f]
1920 prompts = filemerge.partextras(labels)
1919 prompts = filemerge.partextras(labels)
1921 prompts['f'] = f
1920 prompts['f'] = f
1922 if m == 'cd':
1921 if m == 'cd':
1923 if repo.ui.promptchoice(
1922 if repo.ui.promptchoice(
1924 _("local%(l)s changed %(f)s which other%(o)s deleted\n"
1923 _("local%(l)s changed %(f)s which other%(o)s deleted\n"
1925 "use (c)hanged version or (d)elete?"
1924 "use (c)hanged version or (d)elete?"
1926 "$$ &Changed $$ &Delete") % prompts, 0):
1925 "$$ &Changed $$ &Delete") % prompts, 0):
1927 actionbyfile[f] = ('r', None, "prompt delete")
1926 actionbyfile[f] = ('r', None, "prompt delete")
1928 elif f in p1:
1927 elif f in p1:
1929 actionbyfile[f] = ('am', None, "prompt keep")
1928 actionbyfile[f] = ('am', None, "prompt keep")
1930 else:
1929 else:
1931 actionbyfile[f] = ('a', None, "prompt keep")
1930 actionbyfile[f] = ('a', None, "prompt keep")
1932 elif m == 'dc':
1931 elif m == 'dc':
1933 f1, f2, fa, move, anc = args
1932 f1, f2, fa, move, anc = args
1934 flags = p2[f2].flags()
1933 flags = p2[f2].flags()
1935 if repo.ui.promptchoice(
1934 if repo.ui.promptchoice(
1936 _("other%(o)s changed %(f)s which local%(l)s deleted\n"
1935 _("other%(o)s changed %(f)s which local%(l)s deleted\n"
1937 "use (c)hanged version or leave (d)eleted?"
1936 "use (c)hanged version or leave (d)eleted?"
1938 "$$ &Changed $$ &Deleted") % prompts, 0) == 0:
1937 "$$ &Changed $$ &Deleted") % prompts, 0) == 0:
1939 actionbyfile[f] = ('g', (flags, False), "prompt recreating")
1938 actionbyfile[f] = ('g', (flags, False), "prompt recreating")
1940 else:
1939 else:
1941 del actionbyfile[f]
1940 del actionbyfile[f]
1942
1941
1943 # Convert to dictionary-of-lists format
1942 # Convert to dictionary-of-lists format
1944 actions = dict((m, [])
1943 actions = dict((m, [])
1945 for m in 'a am f g cd dc r dm dg m e k p pr'.split())
1944 for m in 'a am f g cd dc r dm dg m e k p pr'.split())
1946 for f, (m, args, msg) in actionbyfile.iteritems():
1945 for f, (m, args, msg) in actionbyfile.iteritems():
1947 if m not in actions:
1946 if m not in actions:
1948 actions[m] = []
1947 actions[m] = []
1949 actions[m].append((f, args, msg))
1948 actions[m].append((f, args, msg))
1950
1949
1951 if not util.fscasesensitive(repo.path):
1950 if not util.fscasesensitive(repo.path):
1952 # check collision between files only in p2 for clean update
1951 # check collision between files only in p2 for clean update
1953 if (not branchmerge and
1952 if (not branchmerge and
1954 (force or not wc.dirty(missing=True, branch=False))):
1953 (force or not wc.dirty(missing=True, branch=False))):
1955 _checkcollision(repo, p2.manifest(), None)
1954 _checkcollision(repo, p2.manifest(), None)
1956 else:
1955 else:
1957 _checkcollision(repo, wc.manifest(), actions)
1956 _checkcollision(repo, wc.manifest(), actions)
1958
1957
1959 # divergent renames
1958 # divergent renames
1960 for f, fl in sorted(diverge.iteritems()):
1959 for f, fl in sorted(diverge.iteritems()):
1961 repo.ui.warn(_("note: possible conflict - %s was renamed "
1960 repo.ui.warn(_("note: possible conflict - %s was renamed "
1962 "multiple times to:\n") % f)
1961 "multiple times to:\n") % f)
1963 for nf in fl:
1962 for nf in fl:
1964 repo.ui.warn(" %s\n" % nf)
1963 repo.ui.warn(" %s\n" % nf)
1965
1964
1966 # rename and delete
1965 # rename and delete
1967 for f, fl in sorted(renamedelete.iteritems()):
1966 for f, fl in sorted(renamedelete.iteritems()):
1968 repo.ui.warn(_("note: possible conflict - %s was deleted "
1967 repo.ui.warn(_("note: possible conflict - %s was deleted "
1969 "and renamed to:\n") % f)
1968 "and renamed to:\n") % f)
1970 for nf in fl:
1969 for nf in fl:
1971 repo.ui.warn(" %s\n" % nf)
1970 repo.ui.warn(" %s\n" % nf)
1972
1971
1973 ### apply phase
1972 ### apply phase
1974 if not branchmerge: # just jump to the new rev
1973 if not branchmerge: # just jump to the new rev
1975 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
1974 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
1976 if not partial and not wc.isinmemory():
1975 if not partial and not wc.isinmemory():
1977 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
1976 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
1978 # note that we're in the middle of an update
1977 # note that we're in the middle of an update
1979 repo.vfs.write('updatestate', p2.hex())
1978 repo.vfs.write('updatestate', p2.hex())
1980
1979
1981 # Advertise fsmonitor when its presence could be useful.
1980 # Advertise fsmonitor when its presence could be useful.
1982 #
1981 #
1983 # We only advertise when performing an update from an empty working
1982 # We only advertise when performing an update from an empty working
1984 # directory. This typically only occurs during initial clone.
1983 # directory. This typically only occurs during initial clone.
1985 #
1984 #
1986 # We give users a mechanism to disable the warning in case it is
1985 # We give users a mechanism to disable the warning in case it is
1987 # annoying.
1986 # annoying.
1988 #
1987 #
1989 # We only allow on Linux and MacOS because that's where fsmonitor is
1988 # We only allow on Linux and MacOS because that's where fsmonitor is
1990 # considered stable.
1989 # considered stable.
1991 fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
1990 fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
1992 fsmonitorthreshold = repo.ui.configint('fsmonitor',
1991 fsmonitorthreshold = repo.ui.configint('fsmonitor',
1993 'warn_update_file_count')
1992 'warn_update_file_count')
1994 try:
1993 try:
1994 # avoid cycle: extensions -> cmdutil -> merge
1995 from . import extensions
1995 extensions.find('fsmonitor')
1996 extensions.find('fsmonitor')
1996 fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
1997 fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
1997 # We intentionally don't look at whether fsmonitor has disabled
1998 # We intentionally don't look at whether fsmonitor has disabled
1998 # itself because a) fsmonitor may have already printed a warning
1999 # itself because a) fsmonitor may have already printed a warning
1999 # b) we only care about the config state here.
2000 # b) we only care about the config state here.
2000 except KeyError:
2001 except KeyError:
2001 fsmonitorenabled = False
2002 fsmonitorenabled = False
2002
2003
2003 if (fsmonitorwarning
2004 if (fsmonitorwarning
2004 and not fsmonitorenabled
2005 and not fsmonitorenabled
2005 and p1.node() == nullid
2006 and p1.node() == nullid
2006 and len(actions['g']) >= fsmonitorthreshold
2007 and len(actions['g']) >= fsmonitorthreshold
2007 and pycompat.sysplatform.startswith(('linux', 'darwin'))):
2008 and pycompat.sysplatform.startswith(('linux', 'darwin'))):
2008 repo.ui.warn(
2009 repo.ui.warn(
2009 _('(warning: large working directory being used without '
2010 _('(warning: large working directory being used without '
2010 'fsmonitor enabled; enable fsmonitor to improve performance; '
2011 'fsmonitor enabled; enable fsmonitor to improve performance; '
2011 'see "hg help -e fsmonitor")\n'))
2012 'see "hg help -e fsmonitor")\n'))
2012
2013
2013 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
2014 stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels)
2014
2015
2015 if not partial and not wc.isinmemory():
2016 if not partial and not wc.isinmemory():
2016 with repo.dirstate.parentchange():
2017 with repo.dirstate.parentchange():
2017 repo.setparents(fp1, fp2)
2018 repo.setparents(fp1, fp2)
2018 recordupdates(repo, actions, branchmerge)
2019 recordupdates(repo, actions, branchmerge)
2019 # update completed, clear state
2020 # update completed, clear state
2020 util.unlink(repo.vfs.join('updatestate'))
2021 util.unlink(repo.vfs.join('updatestate'))
2021
2022
2022 if not branchmerge:
2023 if not branchmerge:
2023 repo.dirstate.setbranch(p2.branch())
2024 repo.dirstate.setbranch(p2.branch())
2024
2025
2025 # If we're updating to a location, clean up any stale temporary includes
2026 # If we're updating to a location, clean up any stale temporary includes
2026 # (ex: this happens during hg rebase --abort).
2027 # (ex: this happens during hg rebase --abort).
2027 if not branchmerge:
2028 if not branchmerge:
2028 sparse.prunetemporaryincludes(repo)
2029 sparse.prunetemporaryincludes(repo)
2029
2030
2030 if not partial:
2031 if not partial:
2031 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
2032 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
2032 return stats
2033 return stats
2033
2034
2034 def graft(repo, ctx, pctx, labels, keepparent=False):
2035 def graft(repo, ctx, pctx, labels, keepparent=False):
2035 """Do a graft-like merge.
2036 """Do a graft-like merge.
2036
2037
2037 This is a merge where the merge ancestor is chosen such that one
2038 This is a merge where the merge ancestor is chosen such that one
2038 or more changesets are grafted onto the current changeset. In
2039 or more changesets are grafted onto the current changeset. In
2039 addition to the merge, this fixes up the dirstate to include only
2040 addition to the merge, this fixes up the dirstate to include only
2040 a single parent (if keepparent is False) and tries to duplicate any
2041 a single parent (if keepparent is False) and tries to duplicate any
2041 renames/copies appropriately.
2042 renames/copies appropriately.
2042
2043
2043 ctx - changeset to rebase
2044 ctx - changeset to rebase
2044 pctx - merge base, usually ctx.p1()
2045 pctx - merge base, usually ctx.p1()
2045 labels - merge labels eg ['local', 'graft']
2046 labels - merge labels eg ['local', 'graft']
2046 keepparent - keep second parent if any
2047 keepparent - keep second parent if any
2047
2048
2048 """
2049 """
2049 # If we're grafting a descendant onto an ancestor, be sure to pass
2050 # If we're grafting a descendant onto an ancestor, be sure to pass
2050 # mergeancestor=True to update. This does two things: 1) allows the merge if
2051 # mergeancestor=True to update. This does two things: 1) allows the merge if
2051 # the destination is the same as the parent of the ctx (so we can use graft
2052 # the destination is the same as the parent of the ctx (so we can use graft
2052 # to copy commits), and 2) informs update that the incoming changes are
2053 # to copy commits), and 2) informs update that the incoming changes are
2053 # newer than the destination so it doesn't prompt about "remote changed foo
2054 # newer than the destination so it doesn't prompt about "remote changed foo
2054 # which local deleted".
2055 # which local deleted".
2055 mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
2056 mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
2056
2057
2057 stats = update(repo, ctx.node(), True, True, pctx.node(),
2058 stats = update(repo, ctx.node(), True, True, pctx.node(),
2058 mergeancestor=mergeancestor, labels=labels)
2059 mergeancestor=mergeancestor, labels=labels)
2059
2060
2060 pother = nullid
2061 pother = nullid
2061 parents = ctx.parents()
2062 parents = ctx.parents()
2062 if keepparent and len(parents) == 2 and pctx in parents:
2063 if keepparent and len(parents) == 2 and pctx in parents:
2063 parents.remove(pctx)
2064 parents.remove(pctx)
2064 pother = parents[0].node()
2065 pother = parents[0].node()
2065
2066
2066 with repo.dirstate.parentchange():
2067 with repo.dirstate.parentchange():
2067 repo.setparents(repo['.'].node(), pother)
2068 repo.setparents(repo['.'].node(), pother)
2068 repo.dirstate.write(repo.currenttransaction())
2069 repo.dirstate.write(repo.currenttransaction())
2069 # fix up dirstate for copies and renames
2070 # fix up dirstate for copies and renames
2070 copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
2071 copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
2071 return stats
2072 return stats
General Comments 0
You need to be logged in to leave comments. Login now