##// END OF EJS Templates
export: invoke the file prefetch hook...
Matt Harbison -
r37781:b54404d6 default
parent child Browse files
Show More
@@ -1,3268 +1,3272 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 bookmarks,
24 bookmarks,
25 changelog,
25 changelog,
26 copies,
26 copies,
27 crecord as crecordmod,
27 crecord as crecordmod,
28 dirstateguard,
28 dirstateguard,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 logcmdutil,
32 logcmdutil,
33 match as matchmod,
33 match as matchmod,
34 merge as mergemod,
34 merge as mergemod,
35 mergeutil,
35 mergeutil,
36 obsolete,
36 obsolete,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 pycompat,
39 pycompat,
40 registrar,
40 registrar,
41 revlog,
41 revlog,
42 rewriteutil,
42 rewriteutil,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 subrepoutil,
45 subrepoutil,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51
51
52 from .utils import (
52 from .utils import (
53 dateutil,
53 dateutil,
54 stringutil,
54 stringutil,
55 )
55 )
56
56
57 stringio = util.stringio
57 stringio = util.stringio
58
58
59 # templates of common command options
59 # templates of common command options
60
60
61 dryrunopts = [
61 dryrunopts = [
62 ('n', 'dry-run', None,
62 ('n', 'dry-run', None,
63 _('do not perform actions, just print output')),
63 _('do not perform actions, just print output')),
64 ]
64 ]
65
65
66 confirmopts = [
66 confirmopts = [
67 ('', 'confirm', None,
67 ('', 'confirm', None,
68 _('ask before applying actions')),
68 _('ask before applying actions')),
69 ]
69 ]
70
70
71 remoteopts = [
71 remoteopts = [
72 ('e', 'ssh', '',
72 ('e', 'ssh', '',
73 _('specify ssh command to use'), _('CMD')),
73 _('specify ssh command to use'), _('CMD')),
74 ('', 'remotecmd', '',
74 ('', 'remotecmd', '',
75 _('specify hg command to run on the remote side'), _('CMD')),
75 _('specify hg command to run on the remote side'), _('CMD')),
76 ('', 'insecure', None,
76 ('', 'insecure', None,
77 _('do not verify server certificate (ignoring web.cacerts config)')),
77 _('do not verify server certificate (ignoring web.cacerts config)')),
78 ]
78 ]
79
79
80 walkopts = [
80 walkopts = [
81 ('I', 'include', [],
81 ('I', 'include', [],
82 _('include names matching the given patterns'), _('PATTERN')),
82 _('include names matching the given patterns'), _('PATTERN')),
83 ('X', 'exclude', [],
83 ('X', 'exclude', [],
84 _('exclude names matching the given patterns'), _('PATTERN')),
84 _('exclude names matching the given patterns'), _('PATTERN')),
85 ]
85 ]
86
86
87 commitopts = [
87 commitopts = [
88 ('m', 'message', '',
88 ('m', 'message', '',
89 _('use text as commit message'), _('TEXT')),
89 _('use text as commit message'), _('TEXT')),
90 ('l', 'logfile', '',
90 ('l', 'logfile', '',
91 _('read commit message from file'), _('FILE')),
91 _('read commit message from file'), _('FILE')),
92 ]
92 ]
93
93
94 commitopts2 = [
94 commitopts2 = [
95 ('d', 'date', '',
95 ('d', 'date', '',
96 _('record the specified date as commit date'), _('DATE')),
96 _('record the specified date as commit date'), _('DATE')),
97 ('u', 'user', '',
97 ('u', 'user', '',
98 _('record the specified user as committer'), _('USER')),
98 _('record the specified user as committer'), _('USER')),
99 ]
99 ]
100
100
101 # hidden for now
101 # hidden for now
102 formatteropts = [
102 formatteropts = [
103 ('T', 'template', '',
103 ('T', 'template', '',
104 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
104 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
105 ]
105 ]
106
106
107 templateopts = [
107 templateopts = [
108 ('', 'style', '',
108 ('', 'style', '',
109 _('display using template map file (DEPRECATED)'), _('STYLE')),
109 _('display using template map file (DEPRECATED)'), _('STYLE')),
110 ('T', 'template', '',
110 ('T', 'template', '',
111 _('display with template'), _('TEMPLATE')),
111 _('display with template'), _('TEMPLATE')),
112 ]
112 ]
113
113
114 logopts = [
114 logopts = [
115 ('p', 'patch', None, _('show patch')),
115 ('p', 'patch', None, _('show patch')),
116 ('g', 'git', None, _('use git extended diff format')),
116 ('g', 'git', None, _('use git extended diff format')),
117 ('l', 'limit', '',
117 ('l', 'limit', '',
118 _('limit number of changes displayed'), _('NUM')),
118 _('limit number of changes displayed'), _('NUM')),
119 ('M', 'no-merges', None, _('do not show merges')),
119 ('M', 'no-merges', None, _('do not show merges')),
120 ('', 'stat', None, _('output diffstat-style summary of changes')),
120 ('', 'stat', None, _('output diffstat-style summary of changes')),
121 ('G', 'graph', None, _("show the revision DAG")),
121 ('G', 'graph', None, _("show the revision DAG")),
122 ] + templateopts
122 ] + templateopts
123
123
124 diffopts = [
124 diffopts = [
125 ('a', 'text', None, _('treat all files as text')),
125 ('a', 'text', None, _('treat all files as text')),
126 ('g', 'git', None, _('use git extended diff format')),
126 ('g', 'git', None, _('use git extended diff format')),
127 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
127 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
128 ('', 'nodates', None, _('omit dates from diff headers'))
128 ('', 'nodates', None, _('omit dates from diff headers'))
129 ]
129 ]
130
130
131 diffwsopts = [
131 diffwsopts = [
132 ('w', 'ignore-all-space', None,
132 ('w', 'ignore-all-space', None,
133 _('ignore white space when comparing lines')),
133 _('ignore white space when comparing lines')),
134 ('b', 'ignore-space-change', None,
134 ('b', 'ignore-space-change', None,
135 _('ignore changes in the amount of white space')),
135 _('ignore changes in the amount of white space')),
136 ('B', 'ignore-blank-lines', None,
136 ('B', 'ignore-blank-lines', None,
137 _('ignore changes whose lines are all blank')),
137 _('ignore changes whose lines are all blank')),
138 ('Z', 'ignore-space-at-eol', None,
138 ('Z', 'ignore-space-at-eol', None,
139 _('ignore changes in whitespace at EOL')),
139 _('ignore changes in whitespace at EOL')),
140 ]
140 ]
141
141
142 diffopts2 = [
142 diffopts2 = [
143 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
143 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
144 ('p', 'show-function', None, _('show which function each change is in')),
144 ('p', 'show-function', None, _('show which function each change is in')),
145 ('', 'reverse', None, _('produce a diff that undoes the changes')),
145 ('', 'reverse', None, _('produce a diff that undoes the changes')),
146 ] + diffwsopts + [
146 ] + diffwsopts + [
147 ('U', 'unified', '',
147 ('U', 'unified', '',
148 _('number of lines of context to show'), _('NUM')),
148 _('number of lines of context to show'), _('NUM')),
149 ('', 'stat', None, _('output diffstat-style summary of changes')),
149 ('', 'stat', None, _('output diffstat-style summary of changes')),
150 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
150 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
151 ]
151 ]
152
152
153 mergetoolopts = [
153 mergetoolopts = [
154 ('t', 'tool', '', _('specify merge tool')),
154 ('t', 'tool', '', _('specify merge tool')),
155 ]
155 ]
156
156
157 similarityopts = [
157 similarityopts = [
158 ('s', 'similarity', '',
158 ('s', 'similarity', '',
159 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
159 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
160 ]
160 ]
161
161
162 subrepoopts = [
162 subrepoopts = [
163 ('S', 'subrepos', None,
163 ('S', 'subrepos', None,
164 _('recurse into subrepositories'))
164 _('recurse into subrepositories'))
165 ]
165 ]
166
166
167 debugrevlogopts = [
167 debugrevlogopts = [
168 ('c', 'changelog', False, _('open changelog')),
168 ('c', 'changelog', False, _('open changelog')),
169 ('m', 'manifest', False, _('open manifest')),
169 ('m', 'manifest', False, _('open manifest')),
170 ('', 'dir', '', _('open directory manifest')),
170 ('', 'dir', '', _('open directory manifest')),
171 ]
171 ]
172
172
173 # special string such that everything below this line will be ingored in the
173 # special string such that everything below this line will be ingored in the
174 # editor text
174 # editor text
175 _linebelow = "^HG: ------------------------ >8 ------------------------$"
175 _linebelow = "^HG: ------------------------ >8 ------------------------$"
176
176
177 def ishunk(x):
177 def ishunk(x):
178 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
178 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
179 return isinstance(x, hunkclasses)
179 return isinstance(x, hunkclasses)
180
180
181 def newandmodified(chunks, originalchunks):
181 def newandmodified(chunks, originalchunks):
182 newlyaddedandmodifiedfiles = set()
182 newlyaddedandmodifiedfiles = set()
183 for chunk in chunks:
183 for chunk in chunks:
184 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
184 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
185 originalchunks:
185 originalchunks:
186 newlyaddedandmodifiedfiles.add(chunk.header.filename())
186 newlyaddedandmodifiedfiles.add(chunk.header.filename())
187 return newlyaddedandmodifiedfiles
187 return newlyaddedandmodifiedfiles
188
188
189 def parsealiases(cmd):
189 def parsealiases(cmd):
190 return cmd.lstrip("^").split("|")
190 return cmd.lstrip("^").split("|")
191
191
192 def setupwrapcolorwrite(ui):
192 def setupwrapcolorwrite(ui):
193 # wrap ui.write so diff output can be labeled/colorized
193 # wrap ui.write so diff output can be labeled/colorized
194 def wrapwrite(orig, *args, **kw):
194 def wrapwrite(orig, *args, **kw):
195 label = kw.pop(r'label', '')
195 label = kw.pop(r'label', '')
196 for chunk, l in patch.difflabel(lambda: args):
196 for chunk, l in patch.difflabel(lambda: args):
197 orig(chunk, label=label + l)
197 orig(chunk, label=label + l)
198
198
199 oldwrite = ui.write
199 oldwrite = ui.write
200 def wrap(*args, **kwargs):
200 def wrap(*args, **kwargs):
201 return wrapwrite(oldwrite, *args, **kwargs)
201 return wrapwrite(oldwrite, *args, **kwargs)
202 setattr(ui, 'write', wrap)
202 setattr(ui, 'write', wrap)
203 return oldwrite
203 return oldwrite
204
204
205 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
205 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
206 if usecurses:
206 if usecurses:
207 if testfile:
207 if testfile:
208 recordfn = crecordmod.testdecorator(testfile,
208 recordfn = crecordmod.testdecorator(testfile,
209 crecordmod.testchunkselector)
209 crecordmod.testchunkselector)
210 else:
210 else:
211 recordfn = crecordmod.chunkselector
211 recordfn = crecordmod.chunkselector
212
212
213 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
213 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
214
214
215 else:
215 else:
216 return patch.filterpatch(ui, originalhunks, operation)
216 return patch.filterpatch(ui, originalhunks, operation)
217
217
218 def recordfilter(ui, originalhunks, operation=None):
218 def recordfilter(ui, originalhunks, operation=None):
219 """ Prompts the user to filter the originalhunks and return a list of
219 """ Prompts the user to filter the originalhunks and return a list of
220 selected hunks.
220 selected hunks.
221 *operation* is used for to build ui messages to indicate the user what
221 *operation* is used for to build ui messages to indicate the user what
222 kind of filtering they are doing: reverting, committing, shelving, etc.
222 kind of filtering they are doing: reverting, committing, shelving, etc.
223 (see patch.filterpatch).
223 (see patch.filterpatch).
224 """
224 """
225 usecurses = crecordmod.checkcurses(ui)
225 usecurses = crecordmod.checkcurses(ui)
226 testfile = ui.config('experimental', 'crecordtest')
226 testfile = ui.config('experimental', 'crecordtest')
227 oldwrite = setupwrapcolorwrite(ui)
227 oldwrite = setupwrapcolorwrite(ui)
228 try:
228 try:
229 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
229 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
230 testfile, operation)
230 testfile, operation)
231 finally:
231 finally:
232 ui.write = oldwrite
232 ui.write = oldwrite
233 return newchunks, newopts
233 return newchunks, newopts
234
234
235 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
235 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
236 filterfn, *pats, **opts):
236 filterfn, *pats, **opts):
237 opts = pycompat.byteskwargs(opts)
237 opts = pycompat.byteskwargs(opts)
238 if not ui.interactive():
238 if not ui.interactive():
239 if cmdsuggest:
239 if cmdsuggest:
240 msg = _('running non-interactively, use %s instead') % cmdsuggest
240 msg = _('running non-interactively, use %s instead') % cmdsuggest
241 else:
241 else:
242 msg = _('running non-interactively')
242 msg = _('running non-interactively')
243 raise error.Abort(msg)
243 raise error.Abort(msg)
244
244
245 # make sure username is set before going interactive
245 # make sure username is set before going interactive
246 if not opts.get('user'):
246 if not opts.get('user'):
247 ui.username() # raise exception, username not provided
247 ui.username() # raise exception, username not provided
248
248
249 def recordfunc(ui, repo, message, match, opts):
249 def recordfunc(ui, repo, message, match, opts):
250 """This is generic record driver.
250 """This is generic record driver.
251
251
252 Its job is to interactively filter local changes, and
252 Its job is to interactively filter local changes, and
253 accordingly prepare working directory into a state in which the
253 accordingly prepare working directory into a state in which the
254 job can be delegated to a non-interactive commit command such as
254 job can be delegated to a non-interactive commit command such as
255 'commit' or 'qrefresh'.
255 'commit' or 'qrefresh'.
256
256
257 After the actual job is done by non-interactive command, the
257 After the actual job is done by non-interactive command, the
258 working directory is restored to its original state.
258 working directory is restored to its original state.
259
259
260 In the end we'll record interesting changes, and everything else
260 In the end we'll record interesting changes, and everything else
261 will be left in place, so the user can continue working.
261 will be left in place, so the user can continue working.
262 """
262 """
263
263
264 checkunfinished(repo, commit=True)
264 checkunfinished(repo, commit=True)
265 wctx = repo[None]
265 wctx = repo[None]
266 merge = len(wctx.parents()) > 1
266 merge = len(wctx.parents()) > 1
267 if merge:
267 if merge:
268 raise error.Abort(_('cannot partially commit a merge '
268 raise error.Abort(_('cannot partially commit a merge '
269 '(use "hg commit" instead)'))
269 '(use "hg commit" instead)'))
270
270
271 def fail(f, msg):
271 def fail(f, msg):
272 raise error.Abort('%s: %s' % (f, msg))
272 raise error.Abort('%s: %s' % (f, msg))
273
273
274 force = opts.get('force')
274 force = opts.get('force')
275 if not force:
275 if not force:
276 vdirs = []
276 vdirs = []
277 match.explicitdir = vdirs.append
277 match.explicitdir = vdirs.append
278 match.bad = fail
278 match.bad = fail
279
279
280 status = repo.status(match=match)
280 status = repo.status(match=match)
281 if not force:
281 if not force:
282 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
282 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
283 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
283 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
284 diffopts.nodates = True
284 diffopts.nodates = True
285 diffopts.git = True
285 diffopts.git = True
286 diffopts.showfunc = True
286 diffopts.showfunc = True
287 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
287 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
288 originalchunks = patch.parsepatch(originaldiff)
288 originalchunks = patch.parsepatch(originaldiff)
289
289
290 # 1. filter patch, since we are intending to apply subset of it
290 # 1. filter patch, since we are intending to apply subset of it
291 try:
291 try:
292 chunks, newopts = filterfn(ui, originalchunks)
292 chunks, newopts = filterfn(ui, originalchunks)
293 except error.PatchError as err:
293 except error.PatchError as err:
294 raise error.Abort(_('error parsing patch: %s') % err)
294 raise error.Abort(_('error parsing patch: %s') % err)
295 opts.update(newopts)
295 opts.update(newopts)
296
296
297 # We need to keep a backup of files that have been newly added and
297 # We need to keep a backup of files that have been newly added and
298 # modified during the recording process because there is a previous
298 # modified during the recording process because there is a previous
299 # version without the edit in the workdir
299 # version without the edit in the workdir
300 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
300 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
301 contenders = set()
301 contenders = set()
302 for h in chunks:
302 for h in chunks:
303 try:
303 try:
304 contenders.update(set(h.files()))
304 contenders.update(set(h.files()))
305 except AttributeError:
305 except AttributeError:
306 pass
306 pass
307
307
308 changed = status.modified + status.added + status.removed
308 changed = status.modified + status.added + status.removed
309 newfiles = [f for f in changed if f in contenders]
309 newfiles = [f for f in changed if f in contenders]
310 if not newfiles:
310 if not newfiles:
311 ui.status(_('no changes to record\n'))
311 ui.status(_('no changes to record\n'))
312 return 0
312 return 0
313
313
314 modified = set(status.modified)
314 modified = set(status.modified)
315
315
316 # 2. backup changed files, so we can restore them in the end
316 # 2. backup changed files, so we can restore them in the end
317
317
318 if backupall:
318 if backupall:
319 tobackup = changed
319 tobackup = changed
320 else:
320 else:
321 tobackup = [f for f in newfiles if f in modified or f in \
321 tobackup = [f for f in newfiles if f in modified or f in \
322 newlyaddedandmodifiedfiles]
322 newlyaddedandmodifiedfiles]
323 backups = {}
323 backups = {}
324 if tobackup:
324 if tobackup:
325 backupdir = repo.vfs.join('record-backups')
325 backupdir = repo.vfs.join('record-backups')
326 try:
326 try:
327 os.mkdir(backupdir)
327 os.mkdir(backupdir)
328 except OSError as err:
328 except OSError as err:
329 if err.errno != errno.EEXIST:
329 if err.errno != errno.EEXIST:
330 raise
330 raise
331 try:
331 try:
332 # backup continues
332 # backup continues
333 for f in tobackup:
333 for f in tobackup:
334 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
334 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
335 dir=backupdir)
335 dir=backupdir)
336 os.close(fd)
336 os.close(fd)
337 ui.debug('backup %r as %r\n' % (f, tmpname))
337 ui.debug('backup %r as %r\n' % (f, tmpname))
338 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
338 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
339 backups[f] = tmpname
339 backups[f] = tmpname
340
340
341 fp = stringio()
341 fp = stringio()
342 for c in chunks:
342 for c in chunks:
343 fname = c.filename()
343 fname = c.filename()
344 if fname in backups:
344 if fname in backups:
345 c.write(fp)
345 c.write(fp)
346 dopatch = fp.tell()
346 dopatch = fp.tell()
347 fp.seek(0)
347 fp.seek(0)
348
348
349 # 2.5 optionally review / modify patch in text editor
349 # 2.5 optionally review / modify patch in text editor
350 if opts.get('review', False):
350 if opts.get('review', False):
351 patchtext = (crecordmod.diffhelptext
351 patchtext = (crecordmod.diffhelptext
352 + crecordmod.patchhelptext
352 + crecordmod.patchhelptext
353 + fp.read())
353 + fp.read())
354 reviewedpatch = ui.edit(patchtext, "",
354 reviewedpatch = ui.edit(patchtext, "",
355 action="diff",
355 action="diff",
356 repopath=repo.path)
356 repopath=repo.path)
357 fp.truncate(0)
357 fp.truncate(0)
358 fp.write(reviewedpatch)
358 fp.write(reviewedpatch)
359 fp.seek(0)
359 fp.seek(0)
360
360
361 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
361 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
362 # 3a. apply filtered patch to clean repo (clean)
362 # 3a. apply filtered patch to clean repo (clean)
363 if backups:
363 if backups:
364 # Equivalent to hg.revert
364 # Equivalent to hg.revert
365 m = scmutil.matchfiles(repo, backups.keys())
365 m = scmutil.matchfiles(repo, backups.keys())
366 mergemod.update(repo, repo.dirstate.p1(),
366 mergemod.update(repo, repo.dirstate.p1(),
367 False, True, matcher=m)
367 False, True, matcher=m)
368
368
369 # 3b. (apply)
369 # 3b. (apply)
370 if dopatch:
370 if dopatch:
371 try:
371 try:
372 ui.debug('applying patch\n')
372 ui.debug('applying patch\n')
373 ui.debug(fp.getvalue())
373 ui.debug(fp.getvalue())
374 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
374 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
375 except error.PatchError as err:
375 except error.PatchError as err:
376 raise error.Abort(pycompat.bytestr(err))
376 raise error.Abort(pycompat.bytestr(err))
377 del fp
377 del fp
378
378
379 # 4. We prepared working directory according to filtered
379 # 4. We prepared working directory according to filtered
380 # patch. Now is the time to delegate the job to
380 # patch. Now is the time to delegate the job to
381 # commit/qrefresh or the like!
381 # commit/qrefresh or the like!
382
382
383 # Make all of the pathnames absolute.
383 # Make all of the pathnames absolute.
384 newfiles = [repo.wjoin(nf) for nf in newfiles]
384 newfiles = [repo.wjoin(nf) for nf in newfiles]
385 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
385 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
386 finally:
386 finally:
387 # 5. finally restore backed-up files
387 # 5. finally restore backed-up files
388 try:
388 try:
389 dirstate = repo.dirstate
389 dirstate = repo.dirstate
390 for realname, tmpname in backups.iteritems():
390 for realname, tmpname in backups.iteritems():
391 ui.debug('restoring %r to %r\n' % (tmpname, realname))
391 ui.debug('restoring %r to %r\n' % (tmpname, realname))
392
392
393 if dirstate[realname] == 'n':
393 if dirstate[realname] == 'n':
394 # without normallookup, restoring timestamp
394 # without normallookup, restoring timestamp
395 # may cause partially committed files
395 # may cause partially committed files
396 # to be treated as unmodified
396 # to be treated as unmodified
397 dirstate.normallookup(realname)
397 dirstate.normallookup(realname)
398
398
399 # copystat=True here and above are a hack to trick any
399 # copystat=True here and above are a hack to trick any
400 # editors that have f open that we haven't modified them.
400 # editors that have f open that we haven't modified them.
401 #
401 #
402 # Also note that this racy as an editor could notice the
402 # Also note that this racy as an editor could notice the
403 # file's mtime before we've finished writing it.
403 # file's mtime before we've finished writing it.
404 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
404 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
405 os.unlink(tmpname)
405 os.unlink(tmpname)
406 if tobackup:
406 if tobackup:
407 os.rmdir(backupdir)
407 os.rmdir(backupdir)
408 except OSError:
408 except OSError:
409 pass
409 pass
410
410
411 def recordinwlock(ui, repo, message, match, opts):
411 def recordinwlock(ui, repo, message, match, opts):
412 with repo.wlock():
412 with repo.wlock():
413 return recordfunc(ui, repo, message, match, opts)
413 return recordfunc(ui, repo, message, match, opts)
414
414
415 return commit(ui, repo, recordinwlock, pats, opts)
415 return commit(ui, repo, recordinwlock, pats, opts)
416
416
417 class dirnode(object):
417 class dirnode(object):
418 """
418 """
419 Represent a directory in user working copy with information required for
419 Represent a directory in user working copy with information required for
420 the purpose of tersing its status.
420 the purpose of tersing its status.
421
421
422 path is the path to the directory
422 path is the path to the directory
423
423
424 statuses is a set of statuses of all files in this directory (this includes
424 statuses is a set of statuses of all files in this directory (this includes
425 all the files in all the subdirectories too)
425 all the files in all the subdirectories too)
426
426
427 files is a list of files which are direct child of this directory
427 files is a list of files which are direct child of this directory
428
428
429 subdirs is a dictionary of sub-directory name as the key and it's own
429 subdirs is a dictionary of sub-directory name as the key and it's own
430 dirnode object as the value
430 dirnode object as the value
431 """
431 """
432
432
433 def __init__(self, dirpath):
433 def __init__(self, dirpath):
434 self.path = dirpath
434 self.path = dirpath
435 self.statuses = set([])
435 self.statuses = set([])
436 self.files = []
436 self.files = []
437 self.subdirs = {}
437 self.subdirs = {}
438
438
439 def _addfileindir(self, filename, status):
439 def _addfileindir(self, filename, status):
440 """Add a file in this directory as a direct child."""
440 """Add a file in this directory as a direct child."""
441 self.files.append((filename, status))
441 self.files.append((filename, status))
442
442
443 def addfile(self, filename, status):
443 def addfile(self, filename, status):
444 """
444 """
445 Add a file to this directory or to its direct parent directory.
445 Add a file to this directory or to its direct parent directory.
446
446
447 If the file is not direct child of this directory, we traverse to the
447 If the file is not direct child of this directory, we traverse to the
448 directory of which this file is a direct child of and add the file
448 directory of which this file is a direct child of and add the file
449 there.
449 there.
450 """
450 """
451
451
452 # the filename contains a path separator, it means it's not the direct
452 # the filename contains a path separator, it means it's not the direct
453 # child of this directory
453 # child of this directory
454 if '/' in filename:
454 if '/' in filename:
455 subdir, filep = filename.split('/', 1)
455 subdir, filep = filename.split('/', 1)
456
456
457 # does the dirnode object for subdir exists
457 # does the dirnode object for subdir exists
458 if subdir not in self.subdirs:
458 if subdir not in self.subdirs:
459 subdirpath = os.path.join(self.path, subdir)
459 subdirpath = os.path.join(self.path, subdir)
460 self.subdirs[subdir] = dirnode(subdirpath)
460 self.subdirs[subdir] = dirnode(subdirpath)
461
461
462 # try adding the file in subdir
462 # try adding the file in subdir
463 self.subdirs[subdir].addfile(filep, status)
463 self.subdirs[subdir].addfile(filep, status)
464
464
465 else:
465 else:
466 self._addfileindir(filename, status)
466 self._addfileindir(filename, status)
467
467
468 if status not in self.statuses:
468 if status not in self.statuses:
469 self.statuses.add(status)
469 self.statuses.add(status)
470
470
471 def iterfilepaths(self):
471 def iterfilepaths(self):
472 """Yield (status, path) for files directly under this directory."""
472 """Yield (status, path) for files directly under this directory."""
473 for f, st in self.files:
473 for f, st in self.files:
474 yield st, os.path.join(self.path, f)
474 yield st, os.path.join(self.path, f)
475
475
476 def tersewalk(self, terseargs):
476 def tersewalk(self, terseargs):
477 """
477 """
478 Yield (status, path) obtained by processing the status of this
478 Yield (status, path) obtained by processing the status of this
479 dirnode.
479 dirnode.
480
480
481 terseargs is the string of arguments passed by the user with `--terse`
481 terseargs is the string of arguments passed by the user with `--terse`
482 flag.
482 flag.
483
483
484 Following are the cases which can happen:
484 Following are the cases which can happen:
485
485
486 1) All the files in the directory (including all the files in its
486 1) All the files in the directory (including all the files in its
487 subdirectories) share the same status and the user has asked us to terse
487 subdirectories) share the same status and the user has asked us to terse
488 that status. -> yield (status, dirpath)
488 that status. -> yield (status, dirpath)
489
489
490 2) Otherwise, we do following:
490 2) Otherwise, we do following:
491
491
492 a) Yield (status, filepath) for all the files which are in this
492 a) Yield (status, filepath) for all the files which are in this
493 directory (only the ones in this directory, not the subdirs)
493 directory (only the ones in this directory, not the subdirs)
494
494
495 b) Recurse the function on all the subdirectories of this
495 b) Recurse the function on all the subdirectories of this
496 directory
496 directory
497 """
497 """
498
498
499 if len(self.statuses) == 1:
499 if len(self.statuses) == 1:
500 onlyst = self.statuses.pop()
500 onlyst = self.statuses.pop()
501
501
502 # Making sure we terse only when the status abbreviation is
502 # Making sure we terse only when the status abbreviation is
503 # passed as terse argument
503 # passed as terse argument
504 if onlyst in terseargs:
504 if onlyst in terseargs:
505 yield onlyst, self.path + pycompat.ossep
505 yield onlyst, self.path + pycompat.ossep
506 return
506 return
507
507
508 # add the files to status list
508 # add the files to status list
509 for st, fpath in self.iterfilepaths():
509 for st, fpath in self.iterfilepaths():
510 yield st, fpath
510 yield st, fpath
511
511
512 #recurse on the subdirs
512 #recurse on the subdirs
513 for dirobj in self.subdirs.values():
513 for dirobj in self.subdirs.values():
514 for st, fpath in dirobj.tersewalk(terseargs):
514 for st, fpath in dirobj.tersewalk(terseargs):
515 yield st, fpath
515 yield st, fpath
516
516
517 def tersedir(statuslist, terseargs):
517 def tersedir(statuslist, terseargs):
518 """
518 """
519 Terse the status if all the files in a directory shares the same status.
519 Terse the status if all the files in a directory shares the same status.
520
520
521 statuslist is scmutil.status() object which contains a list of files for
521 statuslist is scmutil.status() object which contains a list of files for
522 each status.
522 each status.
523 terseargs is string which is passed by the user as the argument to `--terse`
523 terseargs is string which is passed by the user as the argument to `--terse`
524 flag.
524 flag.
525
525
526 The function makes a tree of objects of dirnode class, and at each node it
526 The function makes a tree of objects of dirnode class, and at each node it
527 stores the information required to know whether we can terse a certain
527 stores the information required to know whether we can terse a certain
528 directory or not.
528 directory or not.
529 """
529 """
530 # the order matters here as that is used to produce final list
530 # the order matters here as that is used to produce final list
531 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
531 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
532
532
533 # checking the argument validity
533 # checking the argument validity
534 for s in pycompat.bytestr(terseargs):
534 for s in pycompat.bytestr(terseargs):
535 if s not in allst:
535 if s not in allst:
536 raise error.Abort(_("'%s' not recognized") % s)
536 raise error.Abort(_("'%s' not recognized") % s)
537
537
538 # creating a dirnode object for the root of the repo
538 # creating a dirnode object for the root of the repo
539 rootobj = dirnode('')
539 rootobj = dirnode('')
540 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
540 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
541 'ignored', 'removed')
541 'ignored', 'removed')
542
542
543 tersedict = {}
543 tersedict = {}
544 for attrname in pstatus:
544 for attrname in pstatus:
545 statuschar = attrname[0:1]
545 statuschar = attrname[0:1]
546 for f in getattr(statuslist, attrname):
546 for f in getattr(statuslist, attrname):
547 rootobj.addfile(f, statuschar)
547 rootobj.addfile(f, statuschar)
548 tersedict[statuschar] = []
548 tersedict[statuschar] = []
549
549
550 # we won't be tersing the root dir, so add files in it
550 # we won't be tersing the root dir, so add files in it
551 for st, fpath in rootobj.iterfilepaths():
551 for st, fpath in rootobj.iterfilepaths():
552 tersedict[st].append(fpath)
552 tersedict[st].append(fpath)
553
553
554 # process each sub-directory and build tersedict
554 # process each sub-directory and build tersedict
555 for subdir in rootobj.subdirs.values():
555 for subdir in rootobj.subdirs.values():
556 for st, f in subdir.tersewalk(terseargs):
556 for st, f in subdir.tersewalk(terseargs):
557 tersedict[st].append(f)
557 tersedict[st].append(f)
558
558
559 tersedlist = []
559 tersedlist = []
560 for st in allst:
560 for st in allst:
561 tersedict[st].sort()
561 tersedict[st].sort()
562 tersedlist.append(tersedict[st])
562 tersedlist.append(tersedict[st])
563
563
564 return tersedlist
564 return tersedlist
565
565
566 def _commentlines(raw):
566 def _commentlines(raw):
567 '''Surround lineswith a comment char and a new line'''
567 '''Surround lineswith a comment char and a new line'''
568 lines = raw.splitlines()
568 lines = raw.splitlines()
569 commentedlines = ['# %s' % line for line in lines]
569 commentedlines = ['# %s' % line for line in lines]
570 return '\n'.join(commentedlines) + '\n'
570 return '\n'.join(commentedlines) + '\n'
571
571
572 def _conflictsmsg(repo):
572 def _conflictsmsg(repo):
573 mergestate = mergemod.mergestate.read(repo)
573 mergestate = mergemod.mergestate.read(repo)
574 if not mergestate.active():
574 if not mergestate.active():
575 return
575 return
576
576
577 m = scmutil.match(repo[None])
577 m = scmutil.match(repo[None])
578 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
578 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
579 if unresolvedlist:
579 if unresolvedlist:
580 mergeliststr = '\n'.join(
580 mergeliststr = '\n'.join(
581 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
581 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
582 for path in unresolvedlist])
582 for path in unresolvedlist])
583 msg = _('''Unresolved merge conflicts:
583 msg = _('''Unresolved merge conflicts:
584
584
585 %s
585 %s
586
586
587 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
587 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
588 else:
588 else:
589 msg = _('No unresolved merge conflicts.')
589 msg = _('No unresolved merge conflicts.')
590
590
591 return _commentlines(msg)
591 return _commentlines(msg)
592
592
593 def _helpmessage(continuecmd, abortcmd):
593 def _helpmessage(continuecmd, abortcmd):
594 msg = _('To continue: %s\n'
594 msg = _('To continue: %s\n'
595 'To abort: %s') % (continuecmd, abortcmd)
595 'To abort: %s') % (continuecmd, abortcmd)
596 return _commentlines(msg)
596 return _commentlines(msg)
597
597
598 def _rebasemsg():
598 def _rebasemsg():
599 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
599 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
600
600
601 def _histeditmsg():
601 def _histeditmsg():
602 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
602 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
603
603
604 def _unshelvemsg():
604 def _unshelvemsg():
605 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
605 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
606
606
607 def _updatecleanmsg(dest=None):
607 def _updatecleanmsg(dest=None):
608 warning = _('warning: this will discard uncommitted changes')
608 warning = _('warning: this will discard uncommitted changes')
609 return 'hg update --clean %s (%s)' % (dest or '.', warning)
609 return 'hg update --clean %s (%s)' % (dest or '.', warning)
610
610
611 def _graftmsg():
611 def _graftmsg():
612 # tweakdefaults requires `update` to have a rev hence the `.`
612 # tweakdefaults requires `update` to have a rev hence the `.`
613 return _helpmessage('hg graft --continue', _updatecleanmsg())
613 return _helpmessage('hg graft --continue', _updatecleanmsg())
614
614
615 def _mergemsg():
615 def _mergemsg():
616 # tweakdefaults requires `update` to have a rev hence the `.`
616 # tweakdefaults requires `update` to have a rev hence the `.`
617 return _helpmessage('hg commit', _updatecleanmsg())
617 return _helpmessage('hg commit', _updatecleanmsg())
618
618
619 def _bisectmsg():
619 def _bisectmsg():
620 msg = _('To mark the changeset good: hg bisect --good\n'
620 msg = _('To mark the changeset good: hg bisect --good\n'
621 'To mark the changeset bad: hg bisect --bad\n'
621 'To mark the changeset bad: hg bisect --bad\n'
622 'To abort: hg bisect --reset\n')
622 'To abort: hg bisect --reset\n')
623 return _commentlines(msg)
623 return _commentlines(msg)
624
624
625 def fileexistspredicate(filename):
625 def fileexistspredicate(filename):
626 return lambda repo: repo.vfs.exists(filename)
626 return lambda repo: repo.vfs.exists(filename)
627
627
628 def _mergepredicate(repo):
628 def _mergepredicate(repo):
629 return len(repo[None].parents()) > 1
629 return len(repo[None].parents()) > 1
630
630
631 STATES = (
631 STATES = (
632 # (state, predicate to detect states, helpful message function)
632 # (state, predicate to detect states, helpful message function)
633 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
633 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
634 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
634 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
635 ('graft', fileexistspredicate('graftstate'), _graftmsg),
635 ('graft', fileexistspredicate('graftstate'), _graftmsg),
636 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
636 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
637 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
637 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
638 # The merge state is part of a list that will be iterated over.
638 # The merge state is part of a list that will be iterated over.
639 # They need to be last because some of the other unfinished states may also
639 # They need to be last because some of the other unfinished states may also
640 # be in a merge or update state (eg. rebase, histedit, graft, etc).
640 # be in a merge or update state (eg. rebase, histedit, graft, etc).
641 # We want those to have priority.
641 # We want those to have priority.
642 ('merge', _mergepredicate, _mergemsg),
642 ('merge', _mergepredicate, _mergemsg),
643 )
643 )
644
644
645 def _getrepostate(repo):
645 def _getrepostate(repo):
646 # experimental config: commands.status.skipstates
646 # experimental config: commands.status.skipstates
647 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
647 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
648 for state, statedetectionpredicate, msgfn in STATES:
648 for state, statedetectionpredicate, msgfn in STATES:
649 if state in skip:
649 if state in skip:
650 continue
650 continue
651 if statedetectionpredicate(repo):
651 if statedetectionpredicate(repo):
652 return (state, statedetectionpredicate, msgfn)
652 return (state, statedetectionpredicate, msgfn)
653
653
654 def morestatus(repo, fm):
654 def morestatus(repo, fm):
655 statetuple = _getrepostate(repo)
655 statetuple = _getrepostate(repo)
656 label = 'status.morestatus'
656 label = 'status.morestatus'
657 if statetuple:
657 if statetuple:
658 fm.startitem()
658 fm.startitem()
659 state, statedetectionpredicate, helpfulmsg = statetuple
659 state, statedetectionpredicate, helpfulmsg = statetuple
660 statemsg = _('The repository is in an unfinished *%s* state.') % state
660 statemsg = _('The repository is in an unfinished *%s* state.') % state
661 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
661 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
662 conmsg = _conflictsmsg(repo)
662 conmsg = _conflictsmsg(repo)
663 if conmsg:
663 if conmsg:
664 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
664 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
665 if helpfulmsg:
665 if helpfulmsg:
666 helpmsg = helpfulmsg()
666 helpmsg = helpfulmsg()
667 fm.write('helpmsg', '%s\n', helpmsg, label=label)
667 fm.write('helpmsg', '%s\n', helpmsg, label=label)
668
668
669 def findpossible(cmd, table, strict=False):
669 def findpossible(cmd, table, strict=False):
670 """
670 """
671 Return cmd -> (aliases, command table entry)
671 Return cmd -> (aliases, command table entry)
672 for each matching command.
672 for each matching command.
673 Return debug commands (or their aliases) only if no normal command matches.
673 Return debug commands (or their aliases) only if no normal command matches.
674 """
674 """
675 choice = {}
675 choice = {}
676 debugchoice = {}
676 debugchoice = {}
677
677
678 if cmd in table:
678 if cmd in table:
679 # short-circuit exact matches, "log" alias beats "^log|history"
679 # short-circuit exact matches, "log" alias beats "^log|history"
680 keys = [cmd]
680 keys = [cmd]
681 else:
681 else:
682 keys = table.keys()
682 keys = table.keys()
683
683
684 allcmds = []
684 allcmds = []
685 for e in keys:
685 for e in keys:
686 aliases = parsealiases(e)
686 aliases = parsealiases(e)
687 allcmds.extend(aliases)
687 allcmds.extend(aliases)
688 found = None
688 found = None
689 if cmd in aliases:
689 if cmd in aliases:
690 found = cmd
690 found = cmd
691 elif not strict:
691 elif not strict:
692 for a in aliases:
692 for a in aliases:
693 if a.startswith(cmd):
693 if a.startswith(cmd):
694 found = a
694 found = a
695 break
695 break
696 if found is not None:
696 if found is not None:
697 if aliases[0].startswith("debug") or found.startswith("debug"):
697 if aliases[0].startswith("debug") or found.startswith("debug"):
698 debugchoice[found] = (aliases, table[e])
698 debugchoice[found] = (aliases, table[e])
699 else:
699 else:
700 choice[found] = (aliases, table[e])
700 choice[found] = (aliases, table[e])
701
701
702 if not choice and debugchoice:
702 if not choice and debugchoice:
703 choice = debugchoice
703 choice = debugchoice
704
704
705 return choice, allcmds
705 return choice, allcmds
706
706
707 def findcmd(cmd, table, strict=True):
707 def findcmd(cmd, table, strict=True):
708 """Return (aliases, command table entry) for command string."""
708 """Return (aliases, command table entry) for command string."""
709 choice, allcmds = findpossible(cmd, table, strict)
709 choice, allcmds = findpossible(cmd, table, strict)
710
710
711 if cmd in choice:
711 if cmd in choice:
712 return choice[cmd]
712 return choice[cmd]
713
713
714 if len(choice) > 1:
714 if len(choice) > 1:
715 clist = sorted(choice)
715 clist = sorted(choice)
716 raise error.AmbiguousCommand(cmd, clist)
716 raise error.AmbiguousCommand(cmd, clist)
717
717
718 if choice:
718 if choice:
719 return list(choice.values())[0]
719 return list(choice.values())[0]
720
720
721 raise error.UnknownCommand(cmd, allcmds)
721 raise error.UnknownCommand(cmd, allcmds)
722
722
723 def changebranch(ui, repo, revs, label):
723 def changebranch(ui, repo, revs, label):
724 """ Change the branch name of given revs to label """
724 """ Change the branch name of given revs to label """
725
725
726 with repo.wlock(), repo.lock(), repo.transaction('branches'):
726 with repo.wlock(), repo.lock(), repo.transaction('branches'):
727 # abort in case of uncommitted merge or dirty wdir
727 # abort in case of uncommitted merge or dirty wdir
728 bailifchanged(repo)
728 bailifchanged(repo)
729 revs = scmutil.revrange(repo, revs)
729 revs = scmutil.revrange(repo, revs)
730 if not revs:
730 if not revs:
731 raise error.Abort("empty revision set")
731 raise error.Abort("empty revision set")
732 roots = repo.revs('roots(%ld)', revs)
732 roots = repo.revs('roots(%ld)', revs)
733 if len(roots) > 1:
733 if len(roots) > 1:
734 raise error.Abort(_("cannot change branch of non-linear revisions"))
734 raise error.Abort(_("cannot change branch of non-linear revisions"))
735 rewriteutil.precheck(repo, revs, 'change branch of')
735 rewriteutil.precheck(repo, revs, 'change branch of')
736
736
737 root = repo[roots.first()]
737 root = repo[roots.first()]
738 if not root.p1().branch() == label and label in repo.branchmap():
738 if not root.p1().branch() == label and label in repo.branchmap():
739 raise error.Abort(_("a branch of the same name already exists"))
739 raise error.Abort(_("a branch of the same name already exists"))
740
740
741 if repo.revs('merge() and %ld', revs):
741 if repo.revs('merge() and %ld', revs):
742 raise error.Abort(_("cannot change branch of a merge commit"))
742 raise error.Abort(_("cannot change branch of a merge commit"))
743 if repo.revs('obsolete() and %ld', revs):
743 if repo.revs('obsolete() and %ld', revs):
744 raise error.Abort(_("cannot change branch of a obsolete changeset"))
744 raise error.Abort(_("cannot change branch of a obsolete changeset"))
745
745
746 # make sure only topological heads
746 # make sure only topological heads
747 if repo.revs('heads(%ld) - head()', revs):
747 if repo.revs('heads(%ld) - head()', revs):
748 raise error.Abort(_("cannot change branch in middle of a stack"))
748 raise error.Abort(_("cannot change branch in middle of a stack"))
749
749
750 replacements = {}
750 replacements = {}
751 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
751 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
752 # mercurial.subrepo -> mercurial.cmdutil
752 # mercurial.subrepo -> mercurial.cmdutil
753 from . import context
753 from . import context
754 for rev in revs:
754 for rev in revs:
755 ctx = repo[rev]
755 ctx = repo[rev]
756 oldbranch = ctx.branch()
756 oldbranch = ctx.branch()
757 # check if ctx has same branch
757 # check if ctx has same branch
758 if oldbranch == label:
758 if oldbranch == label:
759 continue
759 continue
760
760
761 def filectxfn(repo, newctx, path):
761 def filectxfn(repo, newctx, path):
762 try:
762 try:
763 return ctx[path]
763 return ctx[path]
764 except error.ManifestLookupError:
764 except error.ManifestLookupError:
765 return None
765 return None
766
766
767 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
767 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
768 % (hex(ctx.node()), oldbranch, label))
768 % (hex(ctx.node()), oldbranch, label))
769 extra = ctx.extra()
769 extra = ctx.extra()
770 extra['branch_change'] = hex(ctx.node())
770 extra['branch_change'] = hex(ctx.node())
771 # While changing branch of set of linear commits, make sure that
771 # While changing branch of set of linear commits, make sure that
772 # we base our commits on new parent rather than old parent which
772 # we base our commits on new parent rather than old parent which
773 # was obsoleted while changing the branch
773 # was obsoleted while changing the branch
774 p1 = ctx.p1().node()
774 p1 = ctx.p1().node()
775 p2 = ctx.p2().node()
775 p2 = ctx.p2().node()
776 if p1 in replacements:
776 if p1 in replacements:
777 p1 = replacements[p1][0]
777 p1 = replacements[p1][0]
778 if p2 in replacements:
778 if p2 in replacements:
779 p2 = replacements[p2][0]
779 p2 = replacements[p2][0]
780
780
781 mc = context.memctx(repo, (p1, p2),
781 mc = context.memctx(repo, (p1, p2),
782 ctx.description(),
782 ctx.description(),
783 ctx.files(),
783 ctx.files(),
784 filectxfn,
784 filectxfn,
785 user=ctx.user(),
785 user=ctx.user(),
786 date=ctx.date(),
786 date=ctx.date(),
787 extra=extra,
787 extra=extra,
788 branch=label)
788 branch=label)
789
789
790 commitphase = ctx.phase()
790 commitphase = ctx.phase()
791 overrides = {('phases', 'new-commit'): commitphase}
791 overrides = {('phases', 'new-commit'): commitphase}
792 with repo.ui.configoverride(overrides, 'branch-change'):
792 with repo.ui.configoverride(overrides, 'branch-change'):
793 newnode = repo.commitctx(mc)
793 newnode = repo.commitctx(mc)
794
794
795 replacements[ctx.node()] = (newnode,)
795 replacements[ctx.node()] = (newnode,)
796 ui.debug('new node id is %s\n' % hex(newnode))
796 ui.debug('new node id is %s\n' % hex(newnode))
797
797
798 # create obsmarkers and move bookmarks
798 # create obsmarkers and move bookmarks
799 scmutil.cleanupnodes(repo, replacements, 'branch-change')
799 scmutil.cleanupnodes(repo, replacements, 'branch-change')
800
800
801 # move the working copy too
801 # move the working copy too
802 wctx = repo[None]
802 wctx = repo[None]
803 # in-progress merge is a bit too complex for now.
803 # in-progress merge is a bit too complex for now.
804 if len(wctx.parents()) == 1:
804 if len(wctx.parents()) == 1:
805 newid = replacements.get(wctx.p1().node())
805 newid = replacements.get(wctx.p1().node())
806 if newid is not None:
806 if newid is not None:
807 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
807 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
808 # mercurial.cmdutil
808 # mercurial.cmdutil
809 from . import hg
809 from . import hg
810 hg.update(repo, newid[0], quietempty=True)
810 hg.update(repo, newid[0], quietempty=True)
811
811
812 ui.status(_("changed branch on %d changesets\n") % len(replacements))
812 ui.status(_("changed branch on %d changesets\n") % len(replacements))
813
813
814 def findrepo(p):
814 def findrepo(p):
815 while not os.path.isdir(os.path.join(p, ".hg")):
815 while not os.path.isdir(os.path.join(p, ".hg")):
816 oldp, p = p, os.path.dirname(p)
816 oldp, p = p, os.path.dirname(p)
817 if p == oldp:
817 if p == oldp:
818 return None
818 return None
819
819
820 return p
820 return p
821
821
822 def bailifchanged(repo, merge=True, hint=None):
822 def bailifchanged(repo, merge=True, hint=None):
823 """ enforce the precondition that working directory must be clean.
823 """ enforce the precondition that working directory must be clean.
824
824
825 'merge' can be set to false if a pending uncommitted merge should be
825 'merge' can be set to false if a pending uncommitted merge should be
826 ignored (such as when 'update --check' runs).
826 ignored (such as when 'update --check' runs).
827
827
828 'hint' is the usual hint given to Abort exception.
828 'hint' is the usual hint given to Abort exception.
829 """
829 """
830
830
831 if merge and repo.dirstate.p2() != nullid:
831 if merge and repo.dirstate.p2() != nullid:
832 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
832 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
833 modified, added, removed, deleted = repo.status()[:4]
833 modified, added, removed, deleted = repo.status()[:4]
834 if modified or added or removed or deleted:
834 if modified or added or removed or deleted:
835 raise error.Abort(_('uncommitted changes'), hint=hint)
835 raise error.Abort(_('uncommitted changes'), hint=hint)
836 ctx = repo[None]
836 ctx = repo[None]
837 for s in sorted(ctx.substate):
837 for s in sorted(ctx.substate):
838 ctx.sub(s).bailifchanged(hint=hint)
838 ctx.sub(s).bailifchanged(hint=hint)
839
839
840 def logmessage(ui, opts):
840 def logmessage(ui, opts):
841 """ get the log message according to -m and -l option """
841 """ get the log message according to -m and -l option """
842 message = opts.get('message')
842 message = opts.get('message')
843 logfile = opts.get('logfile')
843 logfile = opts.get('logfile')
844
844
845 if message and logfile:
845 if message and logfile:
846 raise error.Abort(_('options --message and --logfile are mutually '
846 raise error.Abort(_('options --message and --logfile are mutually '
847 'exclusive'))
847 'exclusive'))
848 if not message and logfile:
848 if not message and logfile:
849 try:
849 try:
850 if isstdiofilename(logfile):
850 if isstdiofilename(logfile):
851 message = ui.fin.read()
851 message = ui.fin.read()
852 else:
852 else:
853 message = '\n'.join(util.readfile(logfile).splitlines())
853 message = '\n'.join(util.readfile(logfile).splitlines())
854 except IOError as inst:
854 except IOError as inst:
855 raise error.Abort(_("can't read commit message '%s': %s") %
855 raise error.Abort(_("can't read commit message '%s': %s") %
856 (logfile, encoding.strtolocal(inst.strerror)))
856 (logfile, encoding.strtolocal(inst.strerror)))
857 return message
857 return message
858
858
859 def mergeeditform(ctxorbool, baseformname):
859 def mergeeditform(ctxorbool, baseformname):
860 """return appropriate editform name (referencing a committemplate)
860 """return appropriate editform name (referencing a committemplate)
861
861
862 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
862 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
863 merging is committed.
863 merging is committed.
864
864
865 This returns baseformname with '.merge' appended if it is a merge,
865 This returns baseformname with '.merge' appended if it is a merge,
866 otherwise '.normal' is appended.
866 otherwise '.normal' is appended.
867 """
867 """
868 if isinstance(ctxorbool, bool):
868 if isinstance(ctxorbool, bool):
869 if ctxorbool:
869 if ctxorbool:
870 return baseformname + ".merge"
870 return baseformname + ".merge"
871 elif 1 < len(ctxorbool.parents()):
871 elif 1 < len(ctxorbool.parents()):
872 return baseformname + ".merge"
872 return baseformname + ".merge"
873
873
874 return baseformname + ".normal"
874 return baseformname + ".normal"
875
875
876 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
876 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
877 editform='', **opts):
877 editform='', **opts):
878 """get appropriate commit message editor according to '--edit' option
878 """get appropriate commit message editor according to '--edit' option
879
879
880 'finishdesc' is a function to be called with edited commit message
880 'finishdesc' is a function to be called with edited commit message
881 (= 'description' of the new changeset) just after editing, but
881 (= 'description' of the new changeset) just after editing, but
882 before checking empty-ness. It should return actual text to be
882 before checking empty-ness. It should return actual text to be
883 stored into history. This allows to change description before
883 stored into history. This allows to change description before
884 storing.
884 storing.
885
885
886 'extramsg' is a extra message to be shown in the editor instead of
886 'extramsg' is a extra message to be shown in the editor instead of
887 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
887 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
888 is automatically added.
888 is automatically added.
889
889
890 'editform' is a dot-separated list of names, to distinguish
890 'editform' is a dot-separated list of names, to distinguish
891 the purpose of commit text editing.
891 the purpose of commit text editing.
892
892
893 'getcommiteditor' returns 'commitforceeditor' regardless of
893 'getcommiteditor' returns 'commitforceeditor' regardless of
894 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
894 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
895 they are specific for usage in MQ.
895 they are specific for usage in MQ.
896 """
896 """
897 if edit or finishdesc or extramsg:
897 if edit or finishdesc or extramsg:
898 return lambda r, c, s: commitforceeditor(r, c, s,
898 return lambda r, c, s: commitforceeditor(r, c, s,
899 finishdesc=finishdesc,
899 finishdesc=finishdesc,
900 extramsg=extramsg,
900 extramsg=extramsg,
901 editform=editform)
901 editform=editform)
902 elif editform:
902 elif editform:
903 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
903 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
904 else:
904 else:
905 return commiteditor
905 return commiteditor
906
906
907 def rendertemplate(ctx, tmpl, props=None):
907 def rendertemplate(ctx, tmpl, props=None):
908 """Expand a literal template 'tmpl' byte-string against one changeset
908 """Expand a literal template 'tmpl' byte-string against one changeset
909
909
910 Each props item must be a stringify-able value or a callable returning
910 Each props item must be a stringify-able value or a callable returning
911 such value, i.e. no bare list nor dict should be passed.
911 such value, i.e. no bare list nor dict should be passed.
912 """
912 """
913 repo = ctx.repo()
913 repo = ctx.repo()
914 tres = formatter.templateresources(repo.ui, repo)
914 tres = formatter.templateresources(repo.ui, repo)
915 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
915 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
916 resources=tres)
916 resources=tres)
917 mapping = {'ctx': ctx}
917 mapping = {'ctx': ctx}
918 if props:
918 if props:
919 mapping.update(props)
919 mapping.update(props)
920 return t.renderdefault(mapping)
920 return t.renderdefault(mapping)
921
921
922 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
922 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
923 r"""Convert old-style filename format string to template string
923 r"""Convert old-style filename format string to template string
924
924
925 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
925 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
926 'foo-{reporoot|basename}-{seqno}.patch'
926 'foo-{reporoot|basename}-{seqno}.patch'
927 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
927 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
928 '{rev}{tags % "{tag}"}{node}'
928 '{rev}{tags % "{tag}"}{node}'
929
929
930 '\' in outermost strings has to be escaped because it is a directory
930 '\' in outermost strings has to be escaped because it is a directory
931 separator on Windows:
931 separator on Windows:
932
932
933 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
933 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
934 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
934 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
935 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
935 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
936 '\\\\\\\\foo\\\\bar.patch'
936 '\\\\\\\\foo\\\\bar.patch'
937 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
937 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
938 '\\\\{tags % "{tag}"}'
938 '\\\\{tags % "{tag}"}'
939
939
940 but inner strings follow the template rules (i.e. '\' is taken as an
940 but inner strings follow the template rules (i.e. '\' is taken as an
941 escape character):
941 escape character):
942
942
943 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
943 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
944 '{"c:\\tmp"}'
944 '{"c:\\tmp"}'
945 """
945 """
946 expander = {
946 expander = {
947 b'H': b'{node}',
947 b'H': b'{node}',
948 b'R': b'{rev}',
948 b'R': b'{rev}',
949 b'h': b'{node|short}',
949 b'h': b'{node|short}',
950 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
950 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
951 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
951 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
952 b'%': b'%',
952 b'%': b'%',
953 b'b': b'{reporoot|basename}',
953 b'b': b'{reporoot|basename}',
954 }
954 }
955 if total is not None:
955 if total is not None:
956 expander[b'N'] = b'{total}'
956 expander[b'N'] = b'{total}'
957 if seqno is not None:
957 if seqno is not None:
958 expander[b'n'] = b'{seqno}'
958 expander[b'n'] = b'{seqno}'
959 if total is not None and seqno is not None:
959 if total is not None and seqno is not None:
960 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
960 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
961 if pathname is not None:
961 if pathname is not None:
962 expander[b's'] = b'{pathname|basename}'
962 expander[b's'] = b'{pathname|basename}'
963 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
963 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
964 expander[b'p'] = b'{pathname}'
964 expander[b'p'] = b'{pathname}'
965
965
966 newname = []
966 newname = []
967 for typ, start, end in templater.scantemplate(pat, raw=True):
967 for typ, start, end in templater.scantemplate(pat, raw=True):
968 if typ != b'string':
968 if typ != b'string':
969 newname.append(pat[start:end])
969 newname.append(pat[start:end])
970 continue
970 continue
971 i = start
971 i = start
972 while i < end:
972 while i < end:
973 n = pat.find(b'%', i, end)
973 n = pat.find(b'%', i, end)
974 if n < 0:
974 if n < 0:
975 newname.append(stringutil.escapestr(pat[i:end]))
975 newname.append(stringutil.escapestr(pat[i:end]))
976 break
976 break
977 newname.append(stringutil.escapestr(pat[i:n]))
977 newname.append(stringutil.escapestr(pat[i:n]))
978 if n + 2 > end:
978 if n + 2 > end:
979 raise error.Abort(_("incomplete format spec in output "
979 raise error.Abort(_("incomplete format spec in output "
980 "filename"))
980 "filename"))
981 c = pat[n + 1:n + 2]
981 c = pat[n + 1:n + 2]
982 i = n + 2
982 i = n + 2
983 try:
983 try:
984 newname.append(expander[c])
984 newname.append(expander[c])
985 except KeyError:
985 except KeyError:
986 raise error.Abort(_("invalid format spec '%%%s' in output "
986 raise error.Abort(_("invalid format spec '%%%s' in output "
987 "filename") % c)
987 "filename") % c)
988 return ''.join(newname)
988 return ''.join(newname)
989
989
990 def makefilename(ctx, pat, **props):
990 def makefilename(ctx, pat, **props):
991 if not pat:
991 if not pat:
992 return pat
992 return pat
993 tmpl = _buildfntemplate(pat, **props)
993 tmpl = _buildfntemplate(pat, **props)
994 # BUG: alias expansion shouldn't be made against template fragments
994 # BUG: alias expansion shouldn't be made against template fragments
995 # rewritten from %-format strings, but we have no easy way to partially
995 # rewritten from %-format strings, but we have no easy way to partially
996 # disable the expansion.
996 # disable the expansion.
997 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
997 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
998
998
999 def isstdiofilename(pat):
999 def isstdiofilename(pat):
1000 """True if the given pat looks like a filename denoting stdin/stdout"""
1000 """True if the given pat looks like a filename denoting stdin/stdout"""
1001 return not pat or pat == '-'
1001 return not pat or pat == '-'
1002
1002
1003 class _unclosablefile(object):
1003 class _unclosablefile(object):
1004 def __init__(self, fp):
1004 def __init__(self, fp):
1005 self._fp = fp
1005 self._fp = fp
1006
1006
1007 def close(self):
1007 def close(self):
1008 pass
1008 pass
1009
1009
1010 def __iter__(self):
1010 def __iter__(self):
1011 return iter(self._fp)
1011 return iter(self._fp)
1012
1012
1013 def __getattr__(self, attr):
1013 def __getattr__(self, attr):
1014 return getattr(self._fp, attr)
1014 return getattr(self._fp, attr)
1015
1015
1016 def __enter__(self):
1016 def __enter__(self):
1017 return self
1017 return self
1018
1018
1019 def __exit__(self, exc_type, exc_value, exc_tb):
1019 def __exit__(self, exc_type, exc_value, exc_tb):
1020 pass
1020 pass
1021
1021
1022 def makefileobj(ctx, pat, mode='wb', **props):
1022 def makefileobj(ctx, pat, mode='wb', **props):
1023 writable = mode not in ('r', 'rb')
1023 writable = mode not in ('r', 'rb')
1024
1024
1025 if isstdiofilename(pat):
1025 if isstdiofilename(pat):
1026 repo = ctx.repo()
1026 repo = ctx.repo()
1027 if writable:
1027 if writable:
1028 fp = repo.ui.fout
1028 fp = repo.ui.fout
1029 else:
1029 else:
1030 fp = repo.ui.fin
1030 fp = repo.ui.fin
1031 return _unclosablefile(fp)
1031 return _unclosablefile(fp)
1032 fn = makefilename(ctx, pat, **props)
1032 fn = makefilename(ctx, pat, **props)
1033 return open(fn, mode)
1033 return open(fn, mode)
1034
1034
1035 def openrevlog(repo, cmd, file_, opts):
1035 def openrevlog(repo, cmd, file_, opts):
1036 """opens the changelog, manifest, a filelog or a given revlog"""
1036 """opens the changelog, manifest, a filelog or a given revlog"""
1037 cl = opts['changelog']
1037 cl = opts['changelog']
1038 mf = opts['manifest']
1038 mf = opts['manifest']
1039 dir = opts['dir']
1039 dir = opts['dir']
1040 msg = None
1040 msg = None
1041 if cl and mf:
1041 if cl and mf:
1042 msg = _('cannot specify --changelog and --manifest at the same time')
1042 msg = _('cannot specify --changelog and --manifest at the same time')
1043 elif cl and dir:
1043 elif cl and dir:
1044 msg = _('cannot specify --changelog and --dir at the same time')
1044 msg = _('cannot specify --changelog and --dir at the same time')
1045 elif cl or mf or dir:
1045 elif cl or mf or dir:
1046 if file_:
1046 if file_:
1047 msg = _('cannot specify filename with --changelog or --manifest')
1047 msg = _('cannot specify filename with --changelog or --manifest')
1048 elif not repo:
1048 elif not repo:
1049 msg = _('cannot specify --changelog or --manifest or --dir '
1049 msg = _('cannot specify --changelog or --manifest or --dir '
1050 'without a repository')
1050 'without a repository')
1051 if msg:
1051 if msg:
1052 raise error.Abort(msg)
1052 raise error.Abort(msg)
1053
1053
1054 r = None
1054 r = None
1055 if repo:
1055 if repo:
1056 if cl:
1056 if cl:
1057 r = repo.unfiltered().changelog
1057 r = repo.unfiltered().changelog
1058 elif dir:
1058 elif dir:
1059 if 'treemanifest' not in repo.requirements:
1059 if 'treemanifest' not in repo.requirements:
1060 raise error.Abort(_("--dir can only be used on repos with "
1060 raise error.Abort(_("--dir can only be used on repos with "
1061 "treemanifest enabled"))
1061 "treemanifest enabled"))
1062 if not dir.endswith('/'):
1062 if not dir.endswith('/'):
1063 dir = dir + '/'
1063 dir = dir + '/'
1064 dirlog = repo.manifestlog._revlog.dirlog(dir)
1064 dirlog = repo.manifestlog._revlog.dirlog(dir)
1065 if len(dirlog):
1065 if len(dirlog):
1066 r = dirlog
1066 r = dirlog
1067 elif mf:
1067 elif mf:
1068 r = repo.manifestlog._revlog
1068 r = repo.manifestlog._revlog
1069 elif file_:
1069 elif file_:
1070 filelog = repo.file(file_)
1070 filelog = repo.file(file_)
1071 if len(filelog):
1071 if len(filelog):
1072 r = filelog
1072 r = filelog
1073 if not r:
1073 if not r:
1074 if not file_:
1074 if not file_:
1075 raise error.CommandError(cmd, _('invalid arguments'))
1075 raise error.CommandError(cmd, _('invalid arguments'))
1076 if not os.path.isfile(file_):
1076 if not os.path.isfile(file_):
1077 raise error.Abort(_("revlog '%s' not found") % file_)
1077 raise error.Abort(_("revlog '%s' not found") % file_)
1078 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1078 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1079 file_[:-2] + ".i")
1079 file_[:-2] + ".i")
1080 return r
1080 return r
1081
1081
1082 def copy(ui, repo, pats, opts, rename=False):
1082 def copy(ui, repo, pats, opts, rename=False):
1083 # called with the repo lock held
1083 # called with the repo lock held
1084 #
1084 #
1085 # hgsep => pathname that uses "/" to separate directories
1085 # hgsep => pathname that uses "/" to separate directories
1086 # ossep => pathname that uses os.sep to separate directories
1086 # ossep => pathname that uses os.sep to separate directories
1087 cwd = repo.getcwd()
1087 cwd = repo.getcwd()
1088 targets = {}
1088 targets = {}
1089 after = opts.get("after")
1089 after = opts.get("after")
1090 dryrun = opts.get("dry_run")
1090 dryrun = opts.get("dry_run")
1091 wctx = repo[None]
1091 wctx = repo[None]
1092
1092
1093 def walkpat(pat):
1093 def walkpat(pat):
1094 srcs = []
1094 srcs = []
1095 if after:
1095 if after:
1096 badstates = '?'
1096 badstates = '?'
1097 else:
1097 else:
1098 badstates = '?r'
1098 badstates = '?r'
1099 m = scmutil.match(wctx, [pat], opts, globbed=True)
1099 m = scmutil.match(wctx, [pat], opts, globbed=True)
1100 for abs in wctx.walk(m):
1100 for abs in wctx.walk(m):
1101 state = repo.dirstate[abs]
1101 state = repo.dirstate[abs]
1102 rel = m.rel(abs)
1102 rel = m.rel(abs)
1103 exact = m.exact(abs)
1103 exact = m.exact(abs)
1104 if state in badstates:
1104 if state in badstates:
1105 if exact and state == '?':
1105 if exact and state == '?':
1106 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1106 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1107 if exact and state == 'r':
1107 if exact and state == 'r':
1108 ui.warn(_('%s: not copying - file has been marked for'
1108 ui.warn(_('%s: not copying - file has been marked for'
1109 ' remove\n') % rel)
1109 ' remove\n') % rel)
1110 continue
1110 continue
1111 # abs: hgsep
1111 # abs: hgsep
1112 # rel: ossep
1112 # rel: ossep
1113 srcs.append((abs, rel, exact))
1113 srcs.append((abs, rel, exact))
1114 return srcs
1114 return srcs
1115
1115
1116 # abssrc: hgsep
1116 # abssrc: hgsep
1117 # relsrc: ossep
1117 # relsrc: ossep
1118 # otarget: ossep
1118 # otarget: ossep
1119 def copyfile(abssrc, relsrc, otarget, exact):
1119 def copyfile(abssrc, relsrc, otarget, exact):
1120 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1120 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1121 if '/' in abstarget:
1121 if '/' in abstarget:
1122 # We cannot normalize abstarget itself, this would prevent
1122 # We cannot normalize abstarget itself, this would prevent
1123 # case only renames, like a => A.
1123 # case only renames, like a => A.
1124 abspath, absname = abstarget.rsplit('/', 1)
1124 abspath, absname = abstarget.rsplit('/', 1)
1125 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1125 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1126 reltarget = repo.pathto(abstarget, cwd)
1126 reltarget = repo.pathto(abstarget, cwd)
1127 target = repo.wjoin(abstarget)
1127 target = repo.wjoin(abstarget)
1128 src = repo.wjoin(abssrc)
1128 src = repo.wjoin(abssrc)
1129 state = repo.dirstate[abstarget]
1129 state = repo.dirstate[abstarget]
1130
1130
1131 scmutil.checkportable(ui, abstarget)
1131 scmutil.checkportable(ui, abstarget)
1132
1132
1133 # check for collisions
1133 # check for collisions
1134 prevsrc = targets.get(abstarget)
1134 prevsrc = targets.get(abstarget)
1135 if prevsrc is not None:
1135 if prevsrc is not None:
1136 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1136 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1137 (reltarget, repo.pathto(abssrc, cwd),
1137 (reltarget, repo.pathto(abssrc, cwd),
1138 repo.pathto(prevsrc, cwd)))
1138 repo.pathto(prevsrc, cwd)))
1139 return
1139 return
1140
1140
1141 # check for overwrites
1141 # check for overwrites
1142 exists = os.path.lexists(target)
1142 exists = os.path.lexists(target)
1143 samefile = False
1143 samefile = False
1144 if exists and abssrc != abstarget:
1144 if exists and abssrc != abstarget:
1145 if (repo.dirstate.normalize(abssrc) ==
1145 if (repo.dirstate.normalize(abssrc) ==
1146 repo.dirstate.normalize(abstarget)):
1146 repo.dirstate.normalize(abstarget)):
1147 if not rename:
1147 if not rename:
1148 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1148 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1149 return
1149 return
1150 exists = False
1150 exists = False
1151 samefile = True
1151 samefile = True
1152
1152
1153 if not after and exists or after and state in 'mn':
1153 if not after and exists or after and state in 'mn':
1154 if not opts['force']:
1154 if not opts['force']:
1155 if state in 'mn':
1155 if state in 'mn':
1156 msg = _('%s: not overwriting - file already committed\n')
1156 msg = _('%s: not overwriting - file already committed\n')
1157 if after:
1157 if after:
1158 flags = '--after --force'
1158 flags = '--after --force'
1159 else:
1159 else:
1160 flags = '--force'
1160 flags = '--force'
1161 if rename:
1161 if rename:
1162 hint = _('(hg rename %s to replace the file by '
1162 hint = _('(hg rename %s to replace the file by '
1163 'recording a rename)\n') % flags
1163 'recording a rename)\n') % flags
1164 else:
1164 else:
1165 hint = _('(hg copy %s to replace the file by '
1165 hint = _('(hg copy %s to replace the file by '
1166 'recording a copy)\n') % flags
1166 'recording a copy)\n') % flags
1167 else:
1167 else:
1168 msg = _('%s: not overwriting - file exists\n')
1168 msg = _('%s: not overwriting - file exists\n')
1169 if rename:
1169 if rename:
1170 hint = _('(hg rename --after to record the rename)\n')
1170 hint = _('(hg rename --after to record the rename)\n')
1171 else:
1171 else:
1172 hint = _('(hg copy --after to record the copy)\n')
1172 hint = _('(hg copy --after to record the copy)\n')
1173 ui.warn(msg % reltarget)
1173 ui.warn(msg % reltarget)
1174 ui.warn(hint)
1174 ui.warn(hint)
1175 return
1175 return
1176
1176
1177 if after:
1177 if after:
1178 if not exists:
1178 if not exists:
1179 if rename:
1179 if rename:
1180 ui.warn(_('%s: not recording move - %s does not exist\n') %
1180 ui.warn(_('%s: not recording move - %s does not exist\n') %
1181 (relsrc, reltarget))
1181 (relsrc, reltarget))
1182 else:
1182 else:
1183 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1183 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1184 (relsrc, reltarget))
1184 (relsrc, reltarget))
1185 return
1185 return
1186 elif not dryrun:
1186 elif not dryrun:
1187 try:
1187 try:
1188 if exists:
1188 if exists:
1189 os.unlink(target)
1189 os.unlink(target)
1190 targetdir = os.path.dirname(target) or '.'
1190 targetdir = os.path.dirname(target) or '.'
1191 if not os.path.isdir(targetdir):
1191 if not os.path.isdir(targetdir):
1192 os.makedirs(targetdir)
1192 os.makedirs(targetdir)
1193 if samefile:
1193 if samefile:
1194 tmp = target + "~hgrename"
1194 tmp = target + "~hgrename"
1195 os.rename(src, tmp)
1195 os.rename(src, tmp)
1196 os.rename(tmp, target)
1196 os.rename(tmp, target)
1197 else:
1197 else:
1198 # Preserve stat info on renames, not on copies; this matches
1198 # Preserve stat info on renames, not on copies; this matches
1199 # Linux CLI behavior.
1199 # Linux CLI behavior.
1200 util.copyfile(src, target, copystat=rename)
1200 util.copyfile(src, target, copystat=rename)
1201 srcexists = True
1201 srcexists = True
1202 except IOError as inst:
1202 except IOError as inst:
1203 if inst.errno == errno.ENOENT:
1203 if inst.errno == errno.ENOENT:
1204 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1204 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1205 srcexists = False
1205 srcexists = False
1206 else:
1206 else:
1207 ui.warn(_('%s: cannot copy - %s\n') %
1207 ui.warn(_('%s: cannot copy - %s\n') %
1208 (relsrc, encoding.strtolocal(inst.strerror)))
1208 (relsrc, encoding.strtolocal(inst.strerror)))
1209 return True # report a failure
1209 return True # report a failure
1210
1210
1211 if ui.verbose or not exact:
1211 if ui.verbose or not exact:
1212 if rename:
1212 if rename:
1213 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1213 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1214 else:
1214 else:
1215 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1215 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1216
1216
1217 targets[abstarget] = abssrc
1217 targets[abstarget] = abssrc
1218
1218
1219 # fix up dirstate
1219 # fix up dirstate
1220 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1220 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1221 dryrun=dryrun, cwd=cwd)
1221 dryrun=dryrun, cwd=cwd)
1222 if rename and not dryrun:
1222 if rename and not dryrun:
1223 if not after and srcexists and not samefile:
1223 if not after and srcexists and not samefile:
1224 repo.wvfs.unlinkpath(abssrc)
1224 repo.wvfs.unlinkpath(abssrc)
1225 wctx.forget([abssrc])
1225 wctx.forget([abssrc])
1226
1226
1227 # pat: ossep
1227 # pat: ossep
1228 # dest ossep
1228 # dest ossep
1229 # srcs: list of (hgsep, hgsep, ossep, bool)
1229 # srcs: list of (hgsep, hgsep, ossep, bool)
1230 # return: function that takes hgsep and returns ossep
1230 # return: function that takes hgsep and returns ossep
1231 def targetpathfn(pat, dest, srcs):
1231 def targetpathfn(pat, dest, srcs):
1232 if os.path.isdir(pat):
1232 if os.path.isdir(pat):
1233 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1233 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1234 abspfx = util.localpath(abspfx)
1234 abspfx = util.localpath(abspfx)
1235 if destdirexists:
1235 if destdirexists:
1236 striplen = len(os.path.split(abspfx)[0])
1236 striplen = len(os.path.split(abspfx)[0])
1237 else:
1237 else:
1238 striplen = len(abspfx)
1238 striplen = len(abspfx)
1239 if striplen:
1239 if striplen:
1240 striplen += len(pycompat.ossep)
1240 striplen += len(pycompat.ossep)
1241 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1241 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1242 elif destdirexists:
1242 elif destdirexists:
1243 res = lambda p: os.path.join(dest,
1243 res = lambda p: os.path.join(dest,
1244 os.path.basename(util.localpath(p)))
1244 os.path.basename(util.localpath(p)))
1245 else:
1245 else:
1246 res = lambda p: dest
1246 res = lambda p: dest
1247 return res
1247 return res
1248
1248
1249 # pat: ossep
1249 # pat: ossep
1250 # dest ossep
1250 # dest ossep
1251 # srcs: list of (hgsep, hgsep, ossep, bool)
1251 # srcs: list of (hgsep, hgsep, ossep, bool)
1252 # return: function that takes hgsep and returns ossep
1252 # return: function that takes hgsep and returns ossep
1253 def targetpathafterfn(pat, dest, srcs):
1253 def targetpathafterfn(pat, dest, srcs):
1254 if matchmod.patkind(pat):
1254 if matchmod.patkind(pat):
1255 # a mercurial pattern
1255 # a mercurial pattern
1256 res = lambda p: os.path.join(dest,
1256 res = lambda p: os.path.join(dest,
1257 os.path.basename(util.localpath(p)))
1257 os.path.basename(util.localpath(p)))
1258 else:
1258 else:
1259 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1259 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1260 if len(abspfx) < len(srcs[0][0]):
1260 if len(abspfx) < len(srcs[0][0]):
1261 # A directory. Either the target path contains the last
1261 # A directory. Either the target path contains the last
1262 # component of the source path or it does not.
1262 # component of the source path or it does not.
1263 def evalpath(striplen):
1263 def evalpath(striplen):
1264 score = 0
1264 score = 0
1265 for s in srcs:
1265 for s in srcs:
1266 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1266 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1267 if os.path.lexists(t):
1267 if os.path.lexists(t):
1268 score += 1
1268 score += 1
1269 return score
1269 return score
1270
1270
1271 abspfx = util.localpath(abspfx)
1271 abspfx = util.localpath(abspfx)
1272 striplen = len(abspfx)
1272 striplen = len(abspfx)
1273 if striplen:
1273 if striplen:
1274 striplen += len(pycompat.ossep)
1274 striplen += len(pycompat.ossep)
1275 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1275 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1276 score = evalpath(striplen)
1276 score = evalpath(striplen)
1277 striplen1 = len(os.path.split(abspfx)[0])
1277 striplen1 = len(os.path.split(abspfx)[0])
1278 if striplen1:
1278 if striplen1:
1279 striplen1 += len(pycompat.ossep)
1279 striplen1 += len(pycompat.ossep)
1280 if evalpath(striplen1) > score:
1280 if evalpath(striplen1) > score:
1281 striplen = striplen1
1281 striplen = striplen1
1282 res = lambda p: os.path.join(dest,
1282 res = lambda p: os.path.join(dest,
1283 util.localpath(p)[striplen:])
1283 util.localpath(p)[striplen:])
1284 else:
1284 else:
1285 # a file
1285 # a file
1286 if destdirexists:
1286 if destdirexists:
1287 res = lambda p: os.path.join(dest,
1287 res = lambda p: os.path.join(dest,
1288 os.path.basename(util.localpath(p)))
1288 os.path.basename(util.localpath(p)))
1289 else:
1289 else:
1290 res = lambda p: dest
1290 res = lambda p: dest
1291 return res
1291 return res
1292
1292
1293 pats = scmutil.expandpats(pats)
1293 pats = scmutil.expandpats(pats)
1294 if not pats:
1294 if not pats:
1295 raise error.Abort(_('no source or destination specified'))
1295 raise error.Abort(_('no source or destination specified'))
1296 if len(pats) == 1:
1296 if len(pats) == 1:
1297 raise error.Abort(_('no destination specified'))
1297 raise error.Abort(_('no destination specified'))
1298 dest = pats.pop()
1298 dest = pats.pop()
1299 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1299 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1300 if not destdirexists:
1300 if not destdirexists:
1301 if len(pats) > 1 or matchmod.patkind(pats[0]):
1301 if len(pats) > 1 or matchmod.patkind(pats[0]):
1302 raise error.Abort(_('with multiple sources, destination must be an '
1302 raise error.Abort(_('with multiple sources, destination must be an '
1303 'existing directory'))
1303 'existing directory'))
1304 if util.endswithsep(dest):
1304 if util.endswithsep(dest):
1305 raise error.Abort(_('destination %s is not a directory') % dest)
1305 raise error.Abort(_('destination %s is not a directory') % dest)
1306
1306
1307 tfn = targetpathfn
1307 tfn = targetpathfn
1308 if after:
1308 if after:
1309 tfn = targetpathafterfn
1309 tfn = targetpathafterfn
1310 copylist = []
1310 copylist = []
1311 for pat in pats:
1311 for pat in pats:
1312 srcs = walkpat(pat)
1312 srcs = walkpat(pat)
1313 if not srcs:
1313 if not srcs:
1314 continue
1314 continue
1315 copylist.append((tfn(pat, dest, srcs), srcs))
1315 copylist.append((tfn(pat, dest, srcs), srcs))
1316 if not copylist:
1316 if not copylist:
1317 raise error.Abort(_('no files to copy'))
1317 raise error.Abort(_('no files to copy'))
1318
1318
1319 errors = 0
1319 errors = 0
1320 for targetpath, srcs in copylist:
1320 for targetpath, srcs in copylist:
1321 for abssrc, relsrc, exact in srcs:
1321 for abssrc, relsrc, exact in srcs:
1322 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1322 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1323 errors += 1
1323 errors += 1
1324
1324
1325 if errors:
1325 if errors:
1326 ui.warn(_('(consider using --after)\n'))
1326 ui.warn(_('(consider using --after)\n'))
1327
1327
1328 return errors != 0
1328 return errors != 0
1329
1329
1330 ## facility to let extension process additional data into an import patch
1330 ## facility to let extension process additional data into an import patch
1331 # list of identifier to be executed in order
1331 # list of identifier to be executed in order
1332 extrapreimport = [] # run before commit
1332 extrapreimport = [] # run before commit
1333 extrapostimport = [] # run after commit
1333 extrapostimport = [] # run after commit
1334 # mapping from identifier to actual import function
1334 # mapping from identifier to actual import function
1335 #
1335 #
1336 # 'preimport' are run before the commit is made and are provided the following
1336 # 'preimport' are run before the commit is made and are provided the following
1337 # arguments:
1337 # arguments:
1338 # - repo: the localrepository instance,
1338 # - repo: the localrepository instance,
1339 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1339 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1340 # - extra: the future extra dictionary of the changeset, please mutate it,
1340 # - extra: the future extra dictionary of the changeset, please mutate it,
1341 # - opts: the import options.
1341 # - opts: the import options.
1342 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1342 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1343 # mutation of in memory commit and more. Feel free to rework the code to get
1343 # mutation of in memory commit and more. Feel free to rework the code to get
1344 # there.
1344 # there.
1345 extrapreimportmap = {}
1345 extrapreimportmap = {}
1346 # 'postimport' are run after the commit is made and are provided the following
1346 # 'postimport' are run after the commit is made and are provided the following
1347 # argument:
1347 # argument:
1348 # - ctx: the changectx created by import.
1348 # - ctx: the changectx created by import.
1349 extrapostimportmap = {}
1349 extrapostimportmap = {}
1350
1350
1351 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1351 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1352 """Utility function used by commands.import to import a single patch
1352 """Utility function used by commands.import to import a single patch
1353
1353
1354 This function is explicitly defined here to help the evolve extension to
1354 This function is explicitly defined here to help the evolve extension to
1355 wrap this part of the import logic.
1355 wrap this part of the import logic.
1356
1356
1357 The API is currently a bit ugly because it a simple code translation from
1357 The API is currently a bit ugly because it a simple code translation from
1358 the import command. Feel free to make it better.
1358 the import command. Feel free to make it better.
1359
1359
1360 :patchdata: a dictionary containing parsed patch data (such as from
1360 :patchdata: a dictionary containing parsed patch data (such as from
1361 ``patch.extract()``)
1361 ``patch.extract()``)
1362 :parents: nodes that will be parent of the created commit
1362 :parents: nodes that will be parent of the created commit
1363 :opts: the full dict of option passed to the import command
1363 :opts: the full dict of option passed to the import command
1364 :msgs: list to save commit message to.
1364 :msgs: list to save commit message to.
1365 (used in case we need to save it when failing)
1365 (used in case we need to save it when failing)
1366 :updatefunc: a function that update a repo to a given node
1366 :updatefunc: a function that update a repo to a given node
1367 updatefunc(<repo>, <node>)
1367 updatefunc(<repo>, <node>)
1368 """
1368 """
1369 # avoid cycle context -> subrepo -> cmdutil
1369 # avoid cycle context -> subrepo -> cmdutil
1370 from . import context
1370 from . import context
1371
1371
1372 tmpname = patchdata.get('filename')
1372 tmpname = patchdata.get('filename')
1373 message = patchdata.get('message')
1373 message = patchdata.get('message')
1374 user = opts.get('user') or patchdata.get('user')
1374 user = opts.get('user') or patchdata.get('user')
1375 date = opts.get('date') or patchdata.get('date')
1375 date = opts.get('date') or patchdata.get('date')
1376 branch = patchdata.get('branch')
1376 branch = patchdata.get('branch')
1377 nodeid = patchdata.get('nodeid')
1377 nodeid = patchdata.get('nodeid')
1378 p1 = patchdata.get('p1')
1378 p1 = patchdata.get('p1')
1379 p2 = patchdata.get('p2')
1379 p2 = patchdata.get('p2')
1380
1380
1381 nocommit = opts.get('no_commit')
1381 nocommit = opts.get('no_commit')
1382 importbranch = opts.get('import_branch')
1382 importbranch = opts.get('import_branch')
1383 update = not opts.get('bypass')
1383 update = not opts.get('bypass')
1384 strip = opts["strip"]
1384 strip = opts["strip"]
1385 prefix = opts["prefix"]
1385 prefix = opts["prefix"]
1386 sim = float(opts.get('similarity') or 0)
1386 sim = float(opts.get('similarity') or 0)
1387
1387
1388 if not tmpname:
1388 if not tmpname:
1389 return None, None, False
1389 return None, None, False
1390
1390
1391 rejects = False
1391 rejects = False
1392
1392
1393 cmdline_message = logmessage(ui, opts)
1393 cmdline_message = logmessage(ui, opts)
1394 if cmdline_message:
1394 if cmdline_message:
1395 # pickup the cmdline msg
1395 # pickup the cmdline msg
1396 message = cmdline_message
1396 message = cmdline_message
1397 elif message:
1397 elif message:
1398 # pickup the patch msg
1398 # pickup the patch msg
1399 message = message.strip()
1399 message = message.strip()
1400 else:
1400 else:
1401 # launch the editor
1401 # launch the editor
1402 message = None
1402 message = None
1403 ui.debug('message:\n%s\n' % (message or ''))
1403 ui.debug('message:\n%s\n' % (message or ''))
1404
1404
1405 if len(parents) == 1:
1405 if len(parents) == 1:
1406 parents.append(repo[nullid])
1406 parents.append(repo[nullid])
1407 if opts.get('exact'):
1407 if opts.get('exact'):
1408 if not nodeid or not p1:
1408 if not nodeid or not p1:
1409 raise error.Abort(_('not a Mercurial patch'))
1409 raise error.Abort(_('not a Mercurial patch'))
1410 p1 = repo[p1]
1410 p1 = repo[p1]
1411 p2 = repo[p2 or nullid]
1411 p2 = repo[p2 or nullid]
1412 elif p2:
1412 elif p2:
1413 try:
1413 try:
1414 p1 = repo[p1]
1414 p1 = repo[p1]
1415 p2 = repo[p2]
1415 p2 = repo[p2]
1416 # Without any options, consider p2 only if the
1416 # Without any options, consider p2 only if the
1417 # patch is being applied on top of the recorded
1417 # patch is being applied on top of the recorded
1418 # first parent.
1418 # first parent.
1419 if p1 != parents[0]:
1419 if p1 != parents[0]:
1420 p1 = parents[0]
1420 p1 = parents[0]
1421 p2 = repo[nullid]
1421 p2 = repo[nullid]
1422 except error.RepoError:
1422 except error.RepoError:
1423 p1, p2 = parents
1423 p1, p2 = parents
1424 if p2.node() == nullid:
1424 if p2.node() == nullid:
1425 ui.warn(_("warning: import the patch as a normal revision\n"
1425 ui.warn(_("warning: import the patch as a normal revision\n"
1426 "(use --exact to import the patch as a merge)\n"))
1426 "(use --exact to import the patch as a merge)\n"))
1427 else:
1427 else:
1428 p1, p2 = parents
1428 p1, p2 = parents
1429
1429
1430 n = None
1430 n = None
1431 if update:
1431 if update:
1432 if p1 != parents[0]:
1432 if p1 != parents[0]:
1433 updatefunc(repo, p1.node())
1433 updatefunc(repo, p1.node())
1434 if p2 != parents[1]:
1434 if p2 != parents[1]:
1435 repo.setparents(p1.node(), p2.node())
1435 repo.setparents(p1.node(), p2.node())
1436
1436
1437 if opts.get('exact') or importbranch:
1437 if opts.get('exact') or importbranch:
1438 repo.dirstate.setbranch(branch or 'default')
1438 repo.dirstate.setbranch(branch or 'default')
1439
1439
1440 partial = opts.get('partial', False)
1440 partial = opts.get('partial', False)
1441 files = set()
1441 files = set()
1442 try:
1442 try:
1443 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1443 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1444 files=files, eolmode=None, similarity=sim / 100.0)
1444 files=files, eolmode=None, similarity=sim / 100.0)
1445 except error.PatchError as e:
1445 except error.PatchError as e:
1446 if not partial:
1446 if not partial:
1447 raise error.Abort(pycompat.bytestr(e))
1447 raise error.Abort(pycompat.bytestr(e))
1448 if partial:
1448 if partial:
1449 rejects = True
1449 rejects = True
1450
1450
1451 files = list(files)
1451 files = list(files)
1452 if nocommit:
1452 if nocommit:
1453 if message:
1453 if message:
1454 msgs.append(message)
1454 msgs.append(message)
1455 else:
1455 else:
1456 if opts.get('exact') or p2:
1456 if opts.get('exact') or p2:
1457 # If you got here, you either use --force and know what
1457 # If you got here, you either use --force and know what
1458 # you are doing or used --exact or a merge patch while
1458 # you are doing or used --exact or a merge patch while
1459 # being updated to its first parent.
1459 # being updated to its first parent.
1460 m = None
1460 m = None
1461 else:
1461 else:
1462 m = scmutil.matchfiles(repo, files or [])
1462 m = scmutil.matchfiles(repo, files or [])
1463 editform = mergeeditform(repo[None], 'import.normal')
1463 editform = mergeeditform(repo[None], 'import.normal')
1464 if opts.get('exact'):
1464 if opts.get('exact'):
1465 editor = None
1465 editor = None
1466 else:
1466 else:
1467 editor = getcommiteditor(editform=editform,
1467 editor = getcommiteditor(editform=editform,
1468 **pycompat.strkwargs(opts))
1468 **pycompat.strkwargs(opts))
1469 extra = {}
1469 extra = {}
1470 for idfunc in extrapreimport:
1470 for idfunc in extrapreimport:
1471 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1471 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1472 overrides = {}
1472 overrides = {}
1473 if partial:
1473 if partial:
1474 overrides[('ui', 'allowemptycommit')] = True
1474 overrides[('ui', 'allowemptycommit')] = True
1475 with repo.ui.configoverride(overrides, 'import'):
1475 with repo.ui.configoverride(overrides, 'import'):
1476 n = repo.commit(message, user,
1476 n = repo.commit(message, user,
1477 date, match=m,
1477 date, match=m,
1478 editor=editor, extra=extra)
1478 editor=editor, extra=extra)
1479 for idfunc in extrapostimport:
1479 for idfunc in extrapostimport:
1480 extrapostimportmap[idfunc](repo[n])
1480 extrapostimportmap[idfunc](repo[n])
1481 else:
1481 else:
1482 if opts.get('exact') or importbranch:
1482 if opts.get('exact') or importbranch:
1483 branch = branch or 'default'
1483 branch = branch or 'default'
1484 else:
1484 else:
1485 branch = p1.branch()
1485 branch = p1.branch()
1486 store = patch.filestore()
1486 store = patch.filestore()
1487 try:
1487 try:
1488 files = set()
1488 files = set()
1489 try:
1489 try:
1490 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1490 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1491 files, eolmode=None)
1491 files, eolmode=None)
1492 except error.PatchError as e:
1492 except error.PatchError as e:
1493 raise error.Abort(stringutil.forcebytestr(e))
1493 raise error.Abort(stringutil.forcebytestr(e))
1494 if opts.get('exact'):
1494 if opts.get('exact'):
1495 editor = None
1495 editor = None
1496 else:
1496 else:
1497 editor = getcommiteditor(editform='import.bypass')
1497 editor = getcommiteditor(editform='import.bypass')
1498 memctx = context.memctx(repo, (p1.node(), p2.node()),
1498 memctx = context.memctx(repo, (p1.node(), p2.node()),
1499 message,
1499 message,
1500 files=files,
1500 files=files,
1501 filectxfn=store,
1501 filectxfn=store,
1502 user=user,
1502 user=user,
1503 date=date,
1503 date=date,
1504 branch=branch,
1504 branch=branch,
1505 editor=editor)
1505 editor=editor)
1506 n = memctx.commit()
1506 n = memctx.commit()
1507 finally:
1507 finally:
1508 store.close()
1508 store.close()
1509 if opts.get('exact') and nocommit:
1509 if opts.get('exact') and nocommit:
1510 # --exact with --no-commit is still useful in that it does merge
1510 # --exact with --no-commit is still useful in that it does merge
1511 # and branch bits
1511 # and branch bits
1512 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1512 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1513 elif opts.get('exact') and hex(n) != nodeid:
1513 elif opts.get('exact') and hex(n) != nodeid:
1514 raise error.Abort(_('patch is damaged or loses information'))
1514 raise error.Abort(_('patch is damaged or loses information'))
1515 msg = _('applied to working directory')
1515 msg = _('applied to working directory')
1516 if n:
1516 if n:
1517 # i18n: refers to a short changeset id
1517 # i18n: refers to a short changeset id
1518 msg = _('created %s') % short(n)
1518 msg = _('created %s') % short(n)
1519 return msg, n, rejects
1519 return msg, n, rejects
1520
1520
1521 # facility to let extensions include additional data in an exported patch
1521 # facility to let extensions include additional data in an exported patch
1522 # list of identifiers to be executed in order
1522 # list of identifiers to be executed in order
1523 extraexport = []
1523 extraexport = []
1524 # mapping from identifier to actual export function
1524 # mapping from identifier to actual export function
1525 # function as to return a string to be added to the header or None
1525 # function as to return a string to be added to the header or None
1526 # it is given two arguments (sequencenumber, changectx)
1526 # it is given two arguments (sequencenumber, changectx)
1527 extraexportmap = {}
1527 extraexportmap = {}
1528
1528
1529 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1529 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1530 node = scmutil.binnode(ctx)
1530 node = scmutil.binnode(ctx)
1531 parents = [p.node() for p in ctx.parents() if p]
1531 parents = [p.node() for p in ctx.parents() if p]
1532 branch = ctx.branch()
1532 branch = ctx.branch()
1533 if switch_parent:
1533 if switch_parent:
1534 parents.reverse()
1534 parents.reverse()
1535
1535
1536 if parents:
1536 if parents:
1537 prev = parents[0]
1537 prev = parents[0]
1538 else:
1538 else:
1539 prev = nullid
1539 prev = nullid
1540
1540
1541 fm.context(ctx=ctx)
1541 fm.context(ctx=ctx)
1542 fm.plain('# HG changeset patch\n')
1542 fm.plain('# HG changeset patch\n')
1543 fm.write('user', '# User %s\n', ctx.user())
1543 fm.write('user', '# User %s\n', ctx.user())
1544 fm.plain('# Date %d %d\n' % ctx.date())
1544 fm.plain('# Date %d %d\n' % ctx.date())
1545 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1545 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1546 fm.condwrite(branch and branch != 'default',
1546 fm.condwrite(branch and branch != 'default',
1547 'branch', '# Branch %s\n', branch)
1547 'branch', '# Branch %s\n', branch)
1548 fm.write('node', '# Node ID %s\n', hex(node))
1548 fm.write('node', '# Node ID %s\n', hex(node))
1549 fm.plain('# Parent %s\n' % hex(prev))
1549 fm.plain('# Parent %s\n' % hex(prev))
1550 if len(parents) > 1:
1550 if len(parents) > 1:
1551 fm.plain('# Parent %s\n' % hex(parents[1]))
1551 fm.plain('# Parent %s\n' % hex(parents[1]))
1552 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1552 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1553
1553
1554 # TODO: redesign extraexportmap function to support formatter
1554 # TODO: redesign extraexportmap function to support formatter
1555 for headerid in extraexport:
1555 for headerid in extraexport:
1556 header = extraexportmap[headerid](seqno, ctx)
1556 header = extraexportmap[headerid](seqno, ctx)
1557 if header is not None:
1557 if header is not None:
1558 fm.plain('# %s\n' % header)
1558 fm.plain('# %s\n' % header)
1559
1559
1560 fm.write('desc', '%s\n', ctx.description().rstrip())
1560 fm.write('desc', '%s\n', ctx.description().rstrip())
1561 fm.plain('\n')
1561 fm.plain('\n')
1562
1562
1563 if fm.isplain():
1563 if fm.isplain():
1564 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1564 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1565 for chunk, label in chunkiter:
1565 for chunk, label in chunkiter:
1566 fm.plain(chunk, label=label)
1566 fm.plain(chunk, label=label)
1567 else:
1567 else:
1568 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1568 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1569 # TODO: make it structured?
1569 # TODO: make it structured?
1570 fm.data(diff=b''.join(chunkiter))
1570 fm.data(diff=b''.join(chunkiter))
1571
1571
1572 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1572 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1573 """Export changesets to stdout or a single file"""
1573 """Export changesets to stdout or a single file"""
1574 for seqno, rev in enumerate(revs, 1):
1574 for seqno, rev in enumerate(revs, 1):
1575 ctx = repo[rev]
1575 ctx = repo[rev]
1576 if not dest.startswith('<'):
1576 if not dest.startswith('<'):
1577 repo.ui.note("%s\n" % dest)
1577 repo.ui.note("%s\n" % dest)
1578 fm.startitem()
1578 fm.startitem()
1579 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1579 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1580
1580
1581 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1581 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1582 match):
1582 match):
1583 """Export changesets to possibly multiple files"""
1583 """Export changesets to possibly multiple files"""
1584 total = len(revs)
1584 total = len(revs)
1585 revwidth = max(len(str(rev)) for rev in revs)
1585 revwidth = max(len(str(rev)) for rev in revs)
1586 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1586 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1587
1587
1588 for seqno, rev in enumerate(revs, 1):
1588 for seqno, rev in enumerate(revs, 1):
1589 ctx = repo[rev]
1589 ctx = repo[rev]
1590 dest = makefilename(ctx, fntemplate,
1590 dest = makefilename(ctx, fntemplate,
1591 total=total, seqno=seqno, revwidth=revwidth)
1591 total=total, seqno=seqno, revwidth=revwidth)
1592 filemap.setdefault(dest, []).append((seqno, rev))
1592 filemap.setdefault(dest, []).append((seqno, rev))
1593
1593
1594 for dest in filemap:
1594 for dest in filemap:
1595 with formatter.maybereopen(basefm, dest) as fm:
1595 with formatter.maybereopen(basefm, dest) as fm:
1596 repo.ui.note("%s\n" % dest)
1596 repo.ui.note("%s\n" % dest)
1597 for seqno, rev in filemap[dest]:
1597 for seqno, rev in filemap[dest]:
1598 fm.startitem()
1598 fm.startitem()
1599 ctx = repo[rev]
1599 ctx = repo[rev]
1600 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1600 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1601 diffopts)
1601 diffopts)
1602
1602
1603 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1603 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1604 opts=None, match=None):
1604 opts=None, match=None):
1605 '''export changesets as hg patches
1605 '''export changesets as hg patches
1606
1606
1607 Args:
1607 Args:
1608 repo: The repository from which we're exporting revisions.
1608 repo: The repository from which we're exporting revisions.
1609 revs: A list of revisions to export as revision numbers.
1609 revs: A list of revisions to export as revision numbers.
1610 basefm: A formatter to which patches should be written.
1610 basefm: A formatter to which patches should be written.
1611 fntemplate: An optional string to use for generating patch file names.
1611 fntemplate: An optional string to use for generating patch file names.
1612 switch_parent: If True, show diffs against second parent when not nullid.
1612 switch_parent: If True, show diffs against second parent when not nullid.
1613 Default is false, which always shows diff against p1.
1613 Default is false, which always shows diff against p1.
1614 opts: diff options to use for generating the patch.
1614 opts: diff options to use for generating the patch.
1615 match: If specified, only export changes to files matching this matcher.
1615 match: If specified, only export changes to files matching this matcher.
1616
1616
1617 Returns:
1617 Returns:
1618 Nothing.
1618 Nothing.
1619
1619
1620 Side Effect:
1620 Side Effect:
1621 "HG Changeset Patch" data is emitted to one of the following
1621 "HG Changeset Patch" data is emitted to one of the following
1622 destinations:
1622 destinations:
1623 fntemplate specified: Each rev is written to a unique file named using
1623 fntemplate specified: Each rev is written to a unique file named using
1624 the given template.
1624 the given template.
1625 Otherwise: All revs will be written to basefm.
1625 Otherwise: All revs will be written to basefm.
1626 '''
1626 '''
1627 scmutil.prefetchfiles(repo, revs, match)
1628
1627 if not fntemplate:
1629 if not fntemplate:
1628 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1630 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1629 else:
1631 else:
1630 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1632 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1631 match)
1633 match)
1632
1634
1633 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1635 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1634 """Export changesets to the given file stream"""
1636 """Export changesets to the given file stream"""
1637 scmutil.prefetchfiles(repo, revs, match)
1638
1635 dest = getattr(fp, 'name', '<unnamed>')
1639 dest = getattr(fp, 'name', '<unnamed>')
1636 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1640 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1637 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1641 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1638
1642
1639 def showmarker(fm, marker, index=None):
1643 def showmarker(fm, marker, index=None):
1640 """utility function to display obsolescence marker in a readable way
1644 """utility function to display obsolescence marker in a readable way
1641
1645
1642 To be used by debug function."""
1646 To be used by debug function."""
1643 if index is not None:
1647 if index is not None:
1644 fm.write('index', '%i ', index)
1648 fm.write('index', '%i ', index)
1645 fm.write('prednode', '%s ', hex(marker.prednode()))
1649 fm.write('prednode', '%s ', hex(marker.prednode()))
1646 succs = marker.succnodes()
1650 succs = marker.succnodes()
1647 fm.condwrite(succs, 'succnodes', '%s ',
1651 fm.condwrite(succs, 'succnodes', '%s ',
1648 fm.formatlist(map(hex, succs), name='node'))
1652 fm.formatlist(map(hex, succs), name='node'))
1649 fm.write('flag', '%X ', marker.flags())
1653 fm.write('flag', '%X ', marker.flags())
1650 parents = marker.parentnodes()
1654 parents = marker.parentnodes()
1651 if parents is not None:
1655 if parents is not None:
1652 fm.write('parentnodes', '{%s} ',
1656 fm.write('parentnodes', '{%s} ',
1653 fm.formatlist(map(hex, parents), name='node', sep=', '))
1657 fm.formatlist(map(hex, parents), name='node', sep=', '))
1654 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1658 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1655 meta = marker.metadata().copy()
1659 meta = marker.metadata().copy()
1656 meta.pop('date', None)
1660 meta.pop('date', None)
1657 smeta = util.rapply(pycompat.maybebytestr, meta)
1661 smeta = util.rapply(pycompat.maybebytestr, meta)
1658 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1662 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1659 fm.plain('\n')
1663 fm.plain('\n')
1660
1664
1661 def finddate(ui, repo, date):
1665 def finddate(ui, repo, date):
1662 """Find the tipmost changeset that matches the given date spec"""
1666 """Find the tipmost changeset that matches the given date spec"""
1663
1667
1664 df = dateutil.matchdate(date)
1668 df = dateutil.matchdate(date)
1665 m = scmutil.matchall(repo)
1669 m = scmutil.matchall(repo)
1666 results = {}
1670 results = {}
1667
1671
1668 def prep(ctx, fns):
1672 def prep(ctx, fns):
1669 d = ctx.date()
1673 d = ctx.date()
1670 if df(d[0]):
1674 if df(d[0]):
1671 results[ctx.rev()] = d
1675 results[ctx.rev()] = d
1672
1676
1673 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1677 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1674 rev = ctx.rev()
1678 rev = ctx.rev()
1675 if rev in results:
1679 if rev in results:
1676 ui.status(_("found revision %s from %s\n") %
1680 ui.status(_("found revision %s from %s\n") %
1677 (rev, dateutil.datestr(results[rev])))
1681 (rev, dateutil.datestr(results[rev])))
1678 return '%d' % rev
1682 return '%d' % rev
1679
1683
1680 raise error.Abort(_("revision matching date not found"))
1684 raise error.Abort(_("revision matching date not found"))
1681
1685
1682 def increasingwindows(windowsize=8, sizelimit=512):
1686 def increasingwindows(windowsize=8, sizelimit=512):
1683 while True:
1687 while True:
1684 yield windowsize
1688 yield windowsize
1685 if windowsize < sizelimit:
1689 if windowsize < sizelimit:
1686 windowsize *= 2
1690 windowsize *= 2
1687
1691
1688 def _walkrevs(repo, opts):
1692 def _walkrevs(repo, opts):
1689 # Default --rev value depends on --follow but --follow behavior
1693 # Default --rev value depends on --follow but --follow behavior
1690 # depends on revisions resolved from --rev...
1694 # depends on revisions resolved from --rev...
1691 follow = opts.get('follow') or opts.get('follow_first')
1695 follow = opts.get('follow') or opts.get('follow_first')
1692 if opts.get('rev'):
1696 if opts.get('rev'):
1693 revs = scmutil.revrange(repo, opts['rev'])
1697 revs = scmutil.revrange(repo, opts['rev'])
1694 elif follow and repo.dirstate.p1() == nullid:
1698 elif follow and repo.dirstate.p1() == nullid:
1695 revs = smartset.baseset()
1699 revs = smartset.baseset()
1696 elif follow:
1700 elif follow:
1697 revs = repo.revs('reverse(:.)')
1701 revs = repo.revs('reverse(:.)')
1698 else:
1702 else:
1699 revs = smartset.spanset(repo)
1703 revs = smartset.spanset(repo)
1700 revs.reverse()
1704 revs.reverse()
1701 return revs
1705 return revs
1702
1706
1703 class FileWalkError(Exception):
1707 class FileWalkError(Exception):
1704 pass
1708 pass
1705
1709
1706 def walkfilerevs(repo, match, follow, revs, fncache):
1710 def walkfilerevs(repo, match, follow, revs, fncache):
1707 '''Walks the file history for the matched files.
1711 '''Walks the file history for the matched files.
1708
1712
1709 Returns the changeset revs that are involved in the file history.
1713 Returns the changeset revs that are involved in the file history.
1710
1714
1711 Throws FileWalkError if the file history can't be walked using
1715 Throws FileWalkError if the file history can't be walked using
1712 filelogs alone.
1716 filelogs alone.
1713 '''
1717 '''
1714 wanted = set()
1718 wanted = set()
1715 copies = []
1719 copies = []
1716 minrev, maxrev = min(revs), max(revs)
1720 minrev, maxrev = min(revs), max(revs)
1717 def filerevgen(filelog, last):
1721 def filerevgen(filelog, last):
1718 """
1722 """
1719 Only files, no patterns. Check the history of each file.
1723 Only files, no patterns. Check the history of each file.
1720
1724
1721 Examines filelog entries within minrev, maxrev linkrev range
1725 Examines filelog entries within minrev, maxrev linkrev range
1722 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1726 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1723 tuples in backwards order
1727 tuples in backwards order
1724 """
1728 """
1725 cl_count = len(repo)
1729 cl_count = len(repo)
1726 revs = []
1730 revs = []
1727 for j in xrange(0, last + 1):
1731 for j in xrange(0, last + 1):
1728 linkrev = filelog.linkrev(j)
1732 linkrev = filelog.linkrev(j)
1729 if linkrev < minrev:
1733 if linkrev < minrev:
1730 continue
1734 continue
1731 # only yield rev for which we have the changelog, it can
1735 # only yield rev for which we have the changelog, it can
1732 # happen while doing "hg log" during a pull or commit
1736 # happen while doing "hg log" during a pull or commit
1733 if linkrev >= cl_count:
1737 if linkrev >= cl_count:
1734 break
1738 break
1735
1739
1736 parentlinkrevs = []
1740 parentlinkrevs = []
1737 for p in filelog.parentrevs(j):
1741 for p in filelog.parentrevs(j):
1738 if p != nullrev:
1742 if p != nullrev:
1739 parentlinkrevs.append(filelog.linkrev(p))
1743 parentlinkrevs.append(filelog.linkrev(p))
1740 n = filelog.node(j)
1744 n = filelog.node(j)
1741 revs.append((linkrev, parentlinkrevs,
1745 revs.append((linkrev, parentlinkrevs,
1742 follow and filelog.renamed(n)))
1746 follow and filelog.renamed(n)))
1743
1747
1744 return reversed(revs)
1748 return reversed(revs)
1745 def iterfiles():
1749 def iterfiles():
1746 pctx = repo['.']
1750 pctx = repo['.']
1747 for filename in match.files():
1751 for filename in match.files():
1748 if follow:
1752 if follow:
1749 if filename not in pctx:
1753 if filename not in pctx:
1750 raise error.Abort(_('cannot follow file not in parent '
1754 raise error.Abort(_('cannot follow file not in parent '
1751 'revision: "%s"') % filename)
1755 'revision: "%s"') % filename)
1752 yield filename, pctx[filename].filenode()
1756 yield filename, pctx[filename].filenode()
1753 else:
1757 else:
1754 yield filename, None
1758 yield filename, None
1755 for filename_node in copies:
1759 for filename_node in copies:
1756 yield filename_node
1760 yield filename_node
1757
1761
1758 for file_, node in iterfiles():
1762 for file_, node in iterfiles():
1759 filelog = repo.file(file_)
1763 filelog = repo.file(file_)
1760 if not len(filelog):
1764 if not len(filelog):
1761 if node is None:
1765 if node is None:
1762 # A zero count may be a directory or deleted file, so
1766 # A zero count may be a directory or deleted file, so
1763 # try to find matching entries on the slow path.
1767 # try to find matching entries on the slow path.
1764 if follow:
1768 if follow:
1765 raise error.Abort(
1769 raise error.Abort(
1766 _('cannot follow nonexistent file: "%s"') % file_)
1770 _('cannot follow nonexistent file: "%s"') % file_)
1767 raise FileWalkError("Cannot walk via filelog")
1771 raise FileWalkError("Cannot walk via filelog")
1768 else:
1772 else:
1769 continue
1773 continue
1770
1774
1771 if node is None:
1775 if node is None:
1772 last = len(filelog) - 1
1776 last = len(filelog) - 1
1773 else:
1777 else:
1774 last = filelog.rev(node)
1778 last = filelog.rev(node)
1775
1779
1776 # keep track of all ancestors of the file
1780 # keep track of all ancestors of the file
1777 ancestors = {filelog.linkrev(last)}
1781 ancestors = {filelog.linkrev(last)}
1778
1782
1779 # iterate from latest to oldest revision
1783 # iterate from latest to oldest revision
1780 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1784 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1781 if not follow:
1785 if not follow:
1782 if rev > maxrev:
1786 if rev > maxrev:
1783 continue
1787 continue
1784 else:
1788 else:
1785 # Note that last might not be the first interesting
1789 # Note that last might not be the first interesting
1786 # rev to us:
1790 # rev to us:
1787 # if the file has been changed after maxrev, we'll
1791 # if the file has been changed after maxrev, we'll
1788 # have linkrev(last) > maxrev, and we still need
1792 # have linkrev(last) > maxrev, and we still need
1789 # to explore the file graph
1793 # to explore the file graph
1790 if rev not in ancestors:
1794 if rev not in ancestors:
1791 continue
1795 continue
1792 # XXX insert 1327 fix here
1796 # XXX insert 1327 fix here
1793 if flparentlinkrevs:
1797 if flparentlinkrevs:
1794 ancestors.update(flparentlinkrevs)
1798 ancestors.update(flparentlinkrevs)
1795
1799
1796 fncache.setdefault(rev, []).append(file_)
1800 fncache.setdefault(rev, []).append(file_)
1797 wanted.add(rev)
1801 wanted.add(rev)
1798 if copied:
1802 if copied:
1799 copies.append(copied)
1803 copies.append(copied)
1800
1804
1801 return wanted
1805 return wanted
1802
1806
1803 class _followfilter(object):
1807 class _followfilter(object):
1804 def __init__(self, repo, onlyfirst=False):
1808 def __init__(self, repo, onlyfirst=False):
1805 self.repo = repo
1809 self.repo = repo
1806 self.startrev = nullrev
1810 self.startrev = nullrev
1807 self.roots = set()
1811 self.roots = set()
1808 self.onlyfirst = onlyfirst
1812 self.onlyfirst = onlyfirst
1809
1813
1810 def match(self, rev):
1814 def match(self, rev):
1811 def realparents(rev):
1815 def realparents(rev):
1812 if self.onlyfirst:
1816 if self.onlyfirst:
1813 return self.repo.changelog.parentrevs(rev)[0:1]
1817 return self.repo.changelog.parentrevs(rev)[0:1]
1814 else:
1818 else:
1815 return filter(lambda x: x != nullrev,
1819 return filter(lambda x: x != nullrev,
1816 self.repo.changelog.parentrevs(rev))
1820 self.repo.changelog.parentrevs(rev))
1817
1821
1818 if self.startrev == nullrev:
1822 if self.startrev == nullrev:
1819 self.startrev = rev
1823 self.startrev = rev
1820 return True
1824 return True
1821
1825
1822 if rev > self.startrev:
1826 if rev > self.startrev:
1823 # forward: all descendants
1827 # forward: all descendants
1824 if not self.roots:
1828 if not self.roots:
1825 self.roots.add(self.startrev)
1829 self.roots.add(self.startrev)
1826 for parent in realparents(rev):
1830 for parent in realparents(rev):
1827 if parent in self.roots:
1831 if parent in self.roots:
1828 self.roots.add(rev)
1832 self.roots.add(rev)
1829 return True
1833 return True
1830 else:
1834 else:
1831 # backwards: all parents
1835 # backwards: all parents
1832 if not self.roots:
1836 if not self.roots:
1833 self.roots.update(realparents(self.startrev))
1837 self.roots.update(realparents(self.startrev))
1834 if rev in self.roots:
1838 if rev in self.roots:
1835 self.roots.remove(rev)
1839 self.roots.remove(rev)
1836 self.roots.update(realparents(rev))
1840 self.roots.update(realparents(rev))
1837 return True
1841 return True
1838
1842
1839 return False
1843 return False
1840
1844
1841 def walkchangerevs(repo, match, opts, prepare):
1845 def walkchangerevs(repo, match, opts, prepare):
1842 '''Iterate over files and the revs in which they changed.
1846 '''Iterate over files and the revs in which they changed.
1843
1847
1844 Callers most commonly need to iterate backwards over the history
1848 Callers most commonly need to iterate backwards over the history
1845 in which they are interested. Doing so has awful (quadratic-looking)
1849 in which they are interested. Doing so has awful (quadratic-looking)
1846 performance, so we use iterators in a "windowed" way.
1850 performance, so we use iterators in a "windowed" way.
1847
1851
1848 We walk a window of revisions in the desired order. Within the
1852 We walk a window of revisions in the desired order. Within the
1849 window, we first walk forwards to gather data, then in the desired
1853 window, we first walk forwards to gather data, then in the desired
1850 order (usually backwards) to display it.
1854 order (usually backwards) to display it.
1851
1855
1852 This function returns an iterator yielding contexts. Before
1856 This function returns an iterator yielding contexts. Before
1853 yielding each context, the iterator will first call the prepare
1857 yielding each context, the iterator will first call the prepare
1854 function on each context in the window in forward order.'''
1858 function on each context in the window in forward order.'''
1855
1859
1856 follow = opts.get('follow') or opts.get('follow_first')
1860 follow = opts.get('follow') or opts.get('follow_first')
1857 revs = _walkrevs(repo, opts)
1861 revs = _walkrevs(repo, opts)
1858 if not revs:
1862 if not revs:
1859 return []
1863 return []
1860 wanted = set()
1864 wanted = set()
1861 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1865 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1862 fncache = {}
1866 fncache = {}
1863 change = repo.__getitem__
1867 change = repo.__getitem__
1864
1868
1865 # First step is to fill wanted, the set of revisions that we want to yield.
1869 # First step is to fill wanted, the set of revisions that we want to yield.
1866 # When it does not induce extra cost, we also fill fncache for revisions in
1870 # When it does not induce extra cost, we also fill fncache for revisions in
1867 # wanted: a cache of filenames that were changed (ctx.files()) and that
1871 # wanted: a cache of filenames that were changed (ctx.files()) and that
1868 # match the file filtering conditions.
1872 # match the file filtering conditions.
1869
1873
1870 if match.always():
1874 if match.always():
1871 # No files, no patterns. Display all revs.
1875 # No files, no patterns. Display all revs.
1872 wanted = revs
1876 wanted = revs
1873 elif not slowpath:
1877 elif not slowpath:
1874 # We only have to read through the filelog to find wanted revisions
1878 # We only have to read through the filelog to find wanted revisions
1875
1879
1876 try:
1880 try:
1877 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1881 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1878 except FileWalkError:
1882 except FileWalkError:
1879 slowpath = True
1883 slowpath = True
1880
1884
1881 # We decided to fall back to the slowpath because at least one
1885 # We decided to fall back to the slowpath because at least one
1882 # of the paths was not a file. Check to see if at least one of them
1886 # of the paths was not a file. Check to see if at least one of them
1883 # existed in history, otherwise simply return
1887 # existed in history, otherwise simply return
1884 for path in match.files():
1888 for path in match.files():
1885 if path == '.' or path in repo.store:
1889 if path == '.' or path in repo.store:
1886 break
1890 break
1887 else:
1891 else:
1888 return []
1892 return []
1889
1893
1890 if slowpath:
1894 if slowpath:
1891 # We have to read the changelog to match filenames against
1895 # We have to read the changelog to match filenames against
1892 # changed files
1896 # changed files
1893
1897
1894 if follow:
1898 if follow:
1895 raise error.Abort(_('can only follow copies/renames for explicit '
1899 raise error.Abort(_('can only follow copies/renames for explicit '
1896 'filenames'))
1900 'filenames'))
1897
1901
1898 # The slow path checks files modified in every changeset.
1902 # The slow path checks files modified in every changeset.
1899 # This is really slow on large repos, so compute the set lazily.
1903 # This is really slow on large repos, so compute the set lazily.
1900 class lazywantedset(object):
1904 class lazywantedset(object):
1901 def __init__(self):
1905 def __init__(self):
1902 self.set = set()
1906 self.set = set()
1903 self.revs = set(revs)
1907 self.revs = set(revs)
1904
1908
1905 # No need to worry about locality here because it will be accessed
1909 # No need to worry about locality here because it will be accessed
1906 # in the same order as the increasing window below.
1910 # in the same order as the increasing window below.
1907 def __contains__(self, value):
1911 def __contains__(self, value):
1908 if value in self.set:
1912 if value in self.set:
1909 return True
1913 return True
1910 elif not value in self.revs:
1914 elif not value in self.revs:
1911 return False
1915 return False
1912 else:
1916 else:
1913 self.revs.discard(value)
1917 self.revs.discard(value)
1914 ctx = change(value)
1918 ctx = change(value)
1915 matches = [f for f in ctx.files() if match(f)]
1919 matches = [f for f in ctx.files() if match(f)]
1916 if matches:
1920 if matches:
1917 fncache[value] = matches
1921 fncache[value] = matches
1918 self.set.add(value)
1922 self.set.add(value)
1919 return True
1923 return True
1920 return False
1924 return False
1921
1925
1922 def discard(self, value):
1926 def discard(self, value):
1923 self.revs.discard(value)
1927 self.revs.discard(value)
1924 self.set.discard(value)
1928 self.set.discard(value)
1925
1929
1926 wanted = lazywantedset()
1930 wanted = lazywantedset()
1927
1931
1928 # it might be worthwhile to do this in the iterator if the rev range
1932 # it might be worthwhile to do this in the iterator if the rev range
1929 # is descending and the prune args are all within that range
1933 # is descending and the prune args are all within that range
1930 for rev in opts.get('prune', ()):
1934 for rev in opts.get('prune', ()):
1931 rev = repo[rev].rev()
1935 rev = repo[rev].rev()
1932 ff = _followfilter(repo)
1936 ff = _followfilter(repo)
1933 stop = min(revs[0], revs[-1])
1937 stop = min(revs[0], revs[-1])
1934 for x in xrange(rev, stop - 1, -1):
1938 for x in xrange(rev, stop - 1, -1):
1935 if ff.match(x):
1939 if ff.match(x):
1936 wanted = wanted - [x]
1940 wanted = wanted - [x]
1937
1941
1938 # Now that wanted is correctly initialized, we can iterate over the
1942 # Now that wanted is correctly initialized, we can iterate over the
1939 # revision range, yielding only revisions in wanted.
1943 # revision range, yielding only revisions in wanted.
1940 def iterate():
1944 def iterate():
1941 if follow and match.always():
1945 if follow and match.always():
1942 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1946 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1943 def want(rev):
1947 def want(rev):
1944 return ff.match(rev) and rev in wanted
1948 return ff.match(rev) and rev in wanted
1945 else:
1949 else:
1946 def want(rev):
1950 def want(rev):
1947 return rev in wanted
1951 return rev in wanted
1948
1952
1949 it = iter(revs)
1953 it = iter(revs)
1950 stopiteration = False
1954 stopiteration = False
1951 for windowsize in increasingwindows():
1955 for windowsize in increasingwindows():
1952 nrevs = []
1956 nrevs = []
1953 for i in xrange(windowsize):
1957 for i in xrange(windowsize):
1954 rev = next(it, None)
1958 rev = next(it, None)
1955 if rev is None:
1959 if rev is None:
1956 stopiteration = True
1960 stopiteration = True
1957 break
1961 break
1958 elif want(rev):
1962 elif want(rev):
1959 nrevs.append(rev)
1963 nrevs.append(rev)
1960 for rev in sorted(nrevs):
1964 for rev in sorted(nrevs):
1961 fns = fncache.get(rev)
1965 fns = fncache.get(rev)
1962 ctx = change(rev)
1966 ctx = change(rev)
1963 if not fns:
1967 if not fns:
1964 def fns_generator():
1968 def fns_generator():
1965 for f in ctx.files():
1969 for f in ctx.files():
1966 if match(f):
1970 if match(f):
1967 yield f
1971 yield f
1968 fns = fns_generator()
1972 fns = fns_generator()
1969 prepare(ctx, fns)
1973 prepare(ctx, fns)
1970 for rev in nrevs:
1974 for rev in nrevs:
1971 yield change(rev)
1975 yield change(rev)
1972
1976
1973 if stopiteration:
1977 if stopiteration:
1974 break
1978 break
1975
1979
1976 return iterate()
1980 return iterate()
1977
1981
1978 def add(ui, repo, match, prefix, explicitonly, **opts):
1982 def add(ui, repo, match, prefix, explicitonly, **opts):
1979 join = lambda f: os.path.join(prefix, f)
1983 join = lambda f: os.path.join(prefix, f)
1980 bad = []
1984 bad = []
1981
1985
1982 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1986 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1983 names = []
1987 names = []
1984 wctx = repo[None]
1988 wctx = repo[None]
1985 cca = None
1989 cca = None
1986 abort, warn = scmutil.checkportabilityalert(ui)
1990 abort, warn = scmutil.checkportabilityalert(ui)
1987 if abort or warn:
1991 if abort or warn:
1988 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1992 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1989
1993
1990 badmatch = matchmod.badmatch(match, badfn)
1994 badmatch = matchmod.badmatch(match, badfn)
1991 dirstate = repo.dirstate
1995 dirstate = repo.dirstate
1992 # We don't want to just call wctx.walk here, since it would return a lot of
1996 # We don't want to just call wctx.walk here, since it would return a lot of
1993 # clean files, which we aren't interested in and takes time.
1997 # clean files, which we aren't interested in and takes time.
1994 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1998 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1995 unknown=True, ignored=False, full=False)):
1999 unknown=True, ignored=False, full=False)):
1996 exact = match.exact(f)
2000 exact = match.exact(f)
1997 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2001 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1998 if cca:
2002 if cca:
1999 cca(f)
2003 cca(f)
2000 names.append(f)
2004 names.append(f)
2001 if ui.verbose or not exact:
2005 if ui.verbose or not exact:
2002 ui.status(_('adding %s\n') % match.rel(f))
2006 ui.status(_('adding %s\n') % match.rel(f))
2003
2007
2004 for subpath in sorted(wctx.substate):
2008 for subpath in sorted(wctx.substate):
2005 sub = wctx.sub(subpath)
2009 sub = wctx.sub(subpath)
2006 try:
2010 try:
2007 submatch = matchmod.subdirmatcher(subpath, match)
2011 submatch = matchmod.subdirmatcher(subpath, match)
2008 if opts.get(r'subrepos'):
2012 if opts.get(r'subrepos'):
2009 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2013 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2010 else:
2014 else:
2011 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2015 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2012 except error.LookupError:
2016 except error.LookupError:
2013 ui.status(_("skipping missing subrepository: %s\n")
2017 ui.status(_("skipping missing subrepository: %s\n")
2014 % join(subpath))
2018 % join(subpath))
2015
2019
2016 if not opts.get(r'dry_run'):
2020 if not opts.get(r'dry_run'):
2017 rejected = wctx.add(names, prefix)
2021 rejected = wctx.add(names, prefix)
2018 bad.extend(f for f in rejected if f in match.files())
2022 bad.extend(f for f in rejected if f in match.files())
2019 return bad
2023 return bad
2020
2024
2021 def addwebdirpath(repo, serverpath, webconf):
2025 def addwebdirpath(repo, serverpath, webconf):
2022 webconf[serverpath] = repo.root
2026 webconf[serverpath] = repo.root
2023 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2027 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2024
2028
2025 for r in repo.revs('filelog("path:.hgsub")'):
2029 for r in repo.revs('filelog("path:.hgsub")'):
2026 ctx = repo[r]
2030 ctx = repo[r]
2027 for subpath in ctx.substate:
2031 for subpath in ctx.substate:
2028 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2032 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2029
2033
2030 def forget(ui, repo, match, prefix, explicitonly, dryrun, confirm):
2034 def forget(ui, repo, match, prefix, explicitonly, dryrun, confirm):
2031 if dryrun and confirm:
2035 if dryrun and confirm:
2032 raise error.Abort(_("cannot specify both --dry-run and --confirm"))
2036 raise error.Abort(_("cannot specify both --dry-run and --confirm"))
2033 join = lambda f: os.path.join(prefix, f)
2037 join = lambda f: os.path.join(prefix, f)
2034 bad = []
2038 bad = []
2035 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2039 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2036 wctx = repo[None]
2040 wctx = repo[None]
2037 forgot = []
2041 forgot = []
2038
2042
2039 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2043 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2040 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2044 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2041 if explicitonly:
2045 if explicitonly:
2042 forget = [f for f in forget if match.exact(f)]
2046 forget = [f for f in forget if match.exact(f)]
2043
2047
2044 for subpath in sorted(wctx.substate):
2048 for subpath in sorted(wctx.substate):
2045 sub = wctx.sub(subpath)
2049 sub = wctx.sub(subpath)
2046 try:
2050 try:
2047 submatch = matchmod.subdirmatcher(subpath, match)
2051 submatch = matchmod.subdirmatcher(subpath, match)
2048 subbad, subforgot = sub.forget(submatch, prefix,
2052 subbad, subforgot = sub.forget(submatch, prefix,
2049 dryrun=dryrun, confirm=confirm)
2053 dryrun=dryrun, confirm=confirm)
2050 bad.extend([subpath + '/' + f for f in subbad])
2054 bad.extend([subpath + '/' + f for f in subbad])
2051 forgot.extend([subpath + '/' + f for f in subforgot])
2055 forgot.extend([subpath + '/' + f for f in subforgot])
2052 except error.LookupError:
2056 except error.LookupError:
2053 ui.status(_("skipping missing subrepository: %s\n")
2057 ui.status(_("skipping missing subrepository: %s\n")
2054 % join(subpath))
2058 % join(subpath))
2055
2059
2056 if not explicitonly:
2060 if not explicitonly:
2057 for f in match.files():
2061 for f in match.files():
2058 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2062 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2059 if f not in forgot:
2063 if f not in forgot:
2060 if repo.wvfs.exists(f):
2064 if repo.wvfs.exists(f):
2061 # Don't complain if the exact case match wasn't given.
2065 # Don't complain if the exact case match wasn't given.
2062 # But don't do this until after checking 'forgot', so
2066 # But don't do this until after checking 'forgot', so
2063 # that subrepo files aren't normalized, and this op is
2067 # that subrepo files aren't normalized, and this op is
2064 # purely from data cached by the status walk above.
2068 # purely from data cached by the status walk above.
2065 if repo.dirstate.normalize(f) in repo.dirstate:
2069 if repo.dirstate.normalize(f) in repo.dirstate:
2066 continue
2070 continue
2067 ui.warn(_('not removing %s: '
2071 ui.warn(_('not removing %s: '
2068 'file is already untracked\n')
2072 'file is already untracked\n')
2069 % match.rel(f))
2073 % match.rel(f))
2070 bad.append(f)
2074 bad.append(f)
2071
2075
2072 if confirm:
2076 if confirm:
2073 responses = _('[Ynsa?]'
2077 responses = _('[Ynsa?]'
2074 '$$ &Yes, forget this file'
2078 '$$ &Yes, forget this file'
2075 '$$ &No, skip this file'
2079 '$$ &No, skip this file'
2076 '$$ &Skip remaining files'
2080 '$$ &Skip remaining files'
2077 '$$ Include &all remaining files'
2081 '$$ Include &all remaining files'
2078 '$$ &? (display help)')
2082 '$$ &? (display help)')
2079 for filename in forget[:]:
2083 for filename in forget[:]:
2080 r = ui.promptchoice(_('forget %s %s') % (filename, responses))
2084 r = ui.promptchoice(_('forget %s %s') % (filename, responses))
2081 if r == 4: # ?
2085 if r == 4: # ?
2082 while r == 4:
2086 while r == 4:
2083 for c, t in ui.extractchoices(responses)[1]:
2087 for c, t in ui.extractchoices(responses)[1]:
2084 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2088 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2085 r = ui.promptchoice(_('forget %s %s') % (filename,
2089 r = ui.promptchoice(_('forget %s %s') % (filename,
2086 responses))
2090 responses))
2087 if r == 0: # yes
2091 if r == 0: # yes
2088 continue
2092 continue
2089 elif r == 1: # no
2093 elif r == 1: # no
2090 forget.remove(filename)
2094 forget.remove(filename)
2091 elif r == 2: # Skip
2095 elif r == 2: # Skip
2092 fnindex = forget.index(filename)
2096 fnindex = forget.index(filename)
2093 del forget[fnindex:]
2097 del forget[fnindex:]
2094 break
2098 break
2095 elif r == 3: # All
2099 elif r == 3: # All
2096 break
2100 break
2097
2101
2098 for f in forget:
2102 for f in forget:
2099 if ui.verbose or not match.exact(f) or confirm:
2103 if ui.verbose or not match.exact(f) or confirm:
2100 ui.status(_('removing %s\n') % match.rel(f))
2104 ui.status(_('removing %s\n') % match.rel(f))
2101
2105
2102 if not dryrun:
2106 if not dryrun:
2103 rejected = wctx.forget(forget, prefix)
2107 rejected = wctx.forget(forget, prefix)
2104 bad.extend(f for f in rejected if f in match.files())
2108 bad.extend(f for f in rejected if f in match.files())
2105 forgot.extend(f for f in forget if f not in rejected)
2109 forgot.extend(f for f in forget if f not in rejected)
2106 return bad, forgot
2110 return bad, forgot
2107
2111
2108 def files(ui, ctx, m, fm, fmt, subrepos):
2112 def files(ui, ctx, m, fm, fmt, subrepos):
2109 rev = ctx.rev()
2113 rev = ctx.rev()
2110 ret = 1
2114 ret = 1
2111 ds = ctx.repo().dirstate
2115 ds = ctx.repo().dirstate
2112
2116
2113 for f in ctx.matches(m):
2117 for f in ctx.matches(m):
2114 if rev is None and ds[f] == 'r':
2118 if rev is None and ds[f] == 'r':
2115 continue
2119 continue
2116 fm.startitem()
2120 fm.startitem()
2117 if ui.verbose:
2121 if ui.verbose:
2118 fc = ctx[f]
2122 fc = ctx[f]
2119 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2123 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2120 fm.data(abspath=f)
2124 fm.data(abspath=f)
2121 fm.write('path', fmt, m.rel(f))
2125 fm.write('path', fmt, m.rel(f))
2122 ret = 0
2126 ret = 0
2123
2127
2124 for subpath in sorted(ctx.substate):
2128 for subpath in sorted(ctx.substate):
2125 submatch = matchmod.subdirmatcher(subpath, m)
2129 submatch = matchmod.subdirmatcher(subpath, m)
2126 if (subrepos or m.exact(subpath) or any(submatch.files())):
2130 if (subrepos or m.exact(subpath) or any(submatch.files())):
2127 sub = ctx.sub(subpath)
2131 sub = ctx.sub(subpath)
2128 try:
2132 try:
2129 recurse = m.exact(subpath) or subrepos
2133 recurse = m.exact(subpath) or subrepos
2130 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2134 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2131 ret = 0
2135 ret = 0
2132 except error.LookupError:
2136 except error.LookupError:
2133 ui.status(_("skipping missing subrepository: %s\n")
2137 ui.status(_("skipping missing subrepository: %s\n")
2134 % m.abs(subpath))
2138 % m.abs(subpath))
2135
2139
2136 return ret
2140 return ret
2137
2141
2138 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2142 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2139 join = lambda f: os.path.join(prefix, f)
2143 join = lambda f: os.path.join(prefix, f)
2140 ret = 0
2144 ret = 0
2141 s = repo.status(match=m, clean=True)
2145 s = repo.status(match=m, clean=True)
2142 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2146 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2143
2147
2144 wctx = repo[None]
2148 wctx = repo[None]
2145
2149
2146 if warnings is None:
2150 if warnings is None:
2147 warnings = []
2151 warnings = []
2148 warn = True
2152 warn = True
2149 else:
2153 else:
2150 warn = False
2154 warn = False
2151
2155
2152 subs = sorted(wctx.substate)
2156 subs = sorted(wctx.substate)
2153 total = len(subs)
2157 total = len(subs)
2154 count = 0
2158 count = 0
2155 for subpath in subs:
2159 for subpath in subs:
2156 count += 1
2160 count += 1
2157 submatch = matchmod.subdirmatcher(subpath, m)
2161 submatch = matchmod.subdirmatcher(subpath, m)
2158 if subrepos or m.exact(subpath) or any(submatch.files()):
2162 if subrepos or m.exact(subpath) or any(submatch.files()):
2159 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2163 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2160 sub = wctx.sub(subpath)
2164 sub = wctx.sub(subpath)
2161 try:
2165 try:
2162 if sub.removefiles(submatch, prefix, after, force, subrepos,
2166 if sub.removefiles(submatch, prefix, after, force, subrepos,
2163 dryrun, warnings):
2167 dryrun, warnings):
2164 ret = 1
2168 ret = 1
2165 except error.LookupError:
2169 except error.LookupError:
2166 warnings.append(_("skipping missing subrepository: %s\n")
2170 warnings.append(_("skipping missing subrepository: %s\n")
2167 % join(subpath))
2171 % join(subpath))
2168 ui.progress(_('searching'), None)
2172 ui.progress(_('searching'), None)
2169
2173
2170 # warn about failure to delete explicit files/dirs
2174 # warn about failure to delete explicit files/dirs
2171 deleteddirs = util.dirs(deleted)
2175 deleteddirs = util.dirs(deleted)
2172 files = m.files()
2176 files = m.files()
2173 total = len(files)
2177 total = len(files)
2174 count = 0
2178 count = 0
2175 for f in files:
2179 for f in files:
2176 def insubrepo():
2180 def insubrepo():
2177 for subpath in wctx.substate:
2181 for subpath in wctx.substate:
2178 if f.startswith(subpath + '/'):
2182 if f.startswith(subpath + '/'):
2179 return True
2183 return True
2180 return False
2184 return False
2181
2185
2182 count += 1
2186 count += 1
2183 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2187 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2184 isdir = f in deleteddirs or wctx.hasdir(f)
2188 isdir = f in deleteddirs or wctx.hasdir(f)
2185 if (f in repo.dirstate or isdir or f == '.'
2189 if (f in repo.dirstate or isdir or f == '.'
2186 or insubrepo() or f in subs):
2190 or insubrepo() or f in subs):
2187 continue
2191 continue
2188
2192
2189 if repo.wvfs.exists(f):
2193 if repo.wvfs.exists(f):
2190 if repo.wvfs.isdir(f):
2194 if repo.wvfs.isdir(f):
2191 warnings.append(_('not removing %s: no tracked files\n')
2195 warnings.append(_('not removing %s: no tracked files\n')
2192 % m.rel(f))
2196 % m.rel(f))
2193 else:
2197 else:
2194 warnings.append(_('not removing %s: file is untracked\n')
2198 warnings.append(_('not removing %s: file is untracked\n')
2195 % m.rel(f))
2199 % m.rel(f))
2196 # missing files will generate a warning elsewhere
2200 # missing files will generate a warning elsewhere
2197 ret = 1
2201 ret = 1
2198 ui.progress(_('deleting'), None)
2202 ui.progress(_('deleting'), None)
2199
2203
2200 if force:
2204 if force:
2201 list = modified + deleted + clean + added
2205 list = modified + deleted + clean + added
2202 elif after:
2206 elif after:
2203 list = deleted
2207 list = deleted
2204 remaining = modified + added + clean
2208 remaining = modified + added + clean
2205 total = len(remaining)
2209 total = len(remaining)
2206 count = 0
2210 count = 0
2207 for f in remaining:
2211 for f in remaining:
2208 count += 1
2212 count += 1
2209 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2213 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2210 if ui.verbose or (f in files):
2214 if ui.verbose or (f in files):
2211 warnings.append(_('not removing %s: file still exists\n')
2215 warnings.append(_('not removing %s: file still exists\n')
2212 % m.rel(f))
2216 % m.rel(f))
2213 ret = 1
2217 ret = 1
2214 ui.progress(_('skipping'), None)
2218 ui.progress(_('skipping'), None)
2215 else:
2219 else:
2216 list = deleted + clean
2220 list = deleted + clean
2217 total = len(modified) + len(added)
2221 total = len(modified) + len(added)
2218 count = 0
2222 count = 0
2219 for f in modified:
2223 for f in modified:
2220 count += 1
2224 count += 1
2221 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2225 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2222 warnings.append(_('not removing %s: file is modified (use -f'
2226 warnings.append(_('not removing %s: file is modified (use -f'
2223 ' to force removal)\n') % m.rel(f))
2227 ' to force removal)\n') % m.rel(f))
2224 ret = 1
2228 ret = 1
2225 for f in added:
2229 for f in added:
2226 count += 1
2230 count += 1
2227 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2231 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2228 warnings.append(_("not removing %s: file has been marked for add"
2232 warnings.append(_("not removing %s: file has been marked for add"
2229 " (use 'hg forget' to undo add)\n") % m.rel(f))
2233 " (use 'hg forget' to undo add)\n") % m.rel(f))
2230 ret = 1
2234 ret = 1
2231 ui.progress(_('skipping'), None)
2235 ui.progress(_('skipping'), None)
2232
2236
2233 list = sorted(list)
2237 list = sorted(list)
2234 total = len(list)
2238 total = len(list)
2235 count = 0
2239 count = 0
2236 for f in list:
2240 for f in list:
2237 count += 1
2241 count += 1
2238 if ui.verbose or not m.exact(f):
2242 if ui.verbose or not m.exact(f):
2239 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2243 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2240 ui.status(_('removing %s\n') % m.rel(f))
2244 ui.status(_('removing %s\n') % m.rel(f))
2241 ui.progress(_('deleting'), None)
2245 ui.progress(_('deleting'), None)
2242
2246
2243 if not dryrun:
2247 if not dryrun:
2244 with repo.wlock():
2248 with repo.wlock():
2245 if not after:
2249 if not after:
2246 for f in list:
2250 for f in list:
2247 if f in added:
2251 if f in added:
2248 continue # we never unlink added files on remove
2252 continue # we never unlink added files on remove
2249 repo.wvfs.unlinkpath(f, ignoremissing=True)
2253 repo.wvfs.unlinkpath(f, ignoremissing=True)
2250 repo[None].forget(list)
2254 repo[None].forget(list)
2251
2255
2252 if warn:
2256 if warn:
2253 for warning in warnings:
2257 for warning in warnings:
2254 ui.warn(warning)
2258 ui.warn(warning)
2255
2259
2256 return ret
2260 return ret
2257
2261
2258 def _updatecatformatter(fm, ctx, matcher, path, decode):
2262 def _updatecatformatter(fm, ctx, matcher, path, decode):
2259 """Hook for adding data to the formatter used by ``hg cat``.
2263 """Hook for adding data to the formatter used by ``hg cat``.
2260
2264
2261 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2265 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2262 this method first."""
2266 this method first."""
2263 data = ctx[path].data()
2267 data = ctx[path].data()
2264 if decode:
2268 if decode:
2265 data = ctx.repo().wwritedata(path, data)
2269 data = ctx.repo().wwritedata(path, data)
2266 fm.startitem()
2270 fm.startitem()
2267 fm.write('data', '%s', data)
2271 fm.write('data', '%s', data)
2268 fm.data(abspath=path, path=matcher.rel(path))
2272 fm.data(abspath=path, path=matcher.rel(path))
2269
2273
2270 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2274 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2271 err = 1
2275 err = 1
2272 opts = pycompat.byteskwargs(opts)
2276 opts = pycompat.byteskwargs(opts)
2273
2277
2274 def write(path):
2278 def write(path):
2275 filename = None
2279 filename = None
2276 if fntemplate:
2280 if fntemplate:
2277 filename = makefilename(ctx, fntemplate,
2281 filename = makefilename(ctx, fntemplate,
2278 pathname=os.path.join(prefix, path))
2282 pathname=os.path.join(prefix, path))
2279 # attempt to create the directory if it does not already exist
2283 # attempt to create the directory if it does not already exist
2280 try:
2284 try:
2281 os.makedirs(os.path.dirname(filename))
2285 os.makedirs(os.path.dirname(filename))
2282 except OSError:
2286 except OSError:
2283 pass
2287 pass
2284 with formatter.maybereopen(basefm, filename) as fm:
2288 with formatter.maybereopen(basefm, filename) as fm:
2285 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2289 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2286
2290
2287 # Automation often uses hg cat on single files, so special case it
2291 # Automation often uses hg cat on single files, so special case it
2288 # for performance to avoid the cost of parsing the manifest.
2292 # for performance to avoid the cost of parsing the manifest.
2289 if len(matcher.files()) == 1 and not matcher.anypats():
2293 if len(matcher.files()) == 1 and not matcher.anypats():
2290 file = matcher.files()[0]
2294 file = matcher.files()[0]
2291 mfl = repo.manifestlog
2295 mfl = repo.manifestlog
2292 mfnode = ctx.manifestnode()
2296 mfnode = ctx.manifestnode()
2293 try:
2297 try:
2294 if mfnode and mfl[mfnode].find(file)[0]:
2298 if mfnode and mfl[mfnode].find(file)[0]:
2295 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2299 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2296 write(file)
2300 write(file)
2297 return 0
2301 return 0
2298 except KeyError:
2302 except KeyError:
2299 pass
2303 pass
2300
2304
2301 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2305 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2302
2306
2303 for abs in ctx.walk(matcher):
2307 for abs in ctx.walk(matcher):
2304 write(abs)
2308 write(abs)
2305 err = 0
2309 err = 0
2306
2310
2307 for subpath in sorted(ctx.substate):
2311 for subpath in sorted(ctx.substate):
2308 sub = ctx.sub(subpath)
2312 sub = ctx.sub(subpath)
2309 try:
2313 try:
2310 submatch = matchmod.subdirmatcher(subpath, matcher)
2314 submatch = matchmod.subdirmatcher(subpath, matcher)
2311
2315
2312 if not sub.cat(submatch, basefm, fntemplate,
2316 if not sub.cat(submatch, basefm, fntemplate,
2313 os.path.join(prefix, sub._path),
2317 os.path.join(prefix, sub._path),
2314 **pycompat.strkwargs(opts)):
2318 **pycompat.strkwargs(opts)):
2315 err = 0
2319 err = 0
2316 except error.RepoLookupError:
2320 except error.RepoLookupError:
2317 ui.status(_("skipping missing subrepository: %s\n")
2321 ui.status(_("skipping missing subrepository: %s\n")
2318 % os.path.join(prefix, subpath))
2322 % os.path.join(prefix, subpath))
2319
2323
2320 return err
2324 return err
2321
2325
2322 def commit(ui, repo, commitfunc, pats, opts):
2326 def commit(ui, repo, commitfunc, pats, opts):
2323 '''commit the specified files or all outstanding changes'''
2327 '''commit the specified files or all outstanding changes'''
2324 date = opts.get('date')
2328 date = opts.get('date')
2325 if date:
2329 if date:
2326 opts['date'] = dateutil.parsedate(date)
2330 opts['date'] = dateutil.parsedate(date)
2327 message = logmessage(ui, opts)
2331 message = logmessage(ui, opts)
2328 matcher = scmutil.match(repo[None], pats, opts)
2332 matcher = scmutil.match(repo[None], pats, opts)
2329
2333
2330 dsguard = None
2334 dsguard = None
2331 # extract addremove carefully -- this function can be called from a command
2335 # extract addremove carefully -- this function can be called from a command
2332 # that doesn't support addremove
2336 # that doesn't support addremove
2333 if opts.get('addremove'):
2337 if opts.get('addremove'):
2334 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2338 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2335 with dsguard or util.nullcontextmanager():
2339 with dsguard or util.nullcontextmanager():
2336 if dsguard:
2340 if dsguard:
2337 if scmutil.addremove(repo, matcher, "", opts) != 0:
2341 if scmutil.addremove(repo, matcher, "", opts) != 0:
2338 raise error.Abort(
2342 raise error.Abort(
2339 _("failed to mark all new/missing files as added/removed"))
2343 _("failed to mark all new/missing files as added/removed"))
2340
2344
2341 return commitfunc(ui, repo, message, matcher, opts)
2345 return commitfunc(ui, repo, message, matcher, opts)
2342
2346
2343 def samefile(f, ctx1, ctx2):
2347 def samefile(f, ctx1, ctx2):
2344 if f in ctx1.manifest():
2348 if f in ctx1.manifest():
2345 a = ctx1.filectx(f)
2349 a = ctx1.filectx(f)
2346 if f in ctx2.manifest():
2350 if f in ctx2.manifest():
2347 b = ctx2.filectx(f)
2351 b = ctx2.filectx(f)
2348 return (not a.cmp(b)
2352 return (not a.cmp(b)
2349 and a.flags() == b.flags())
2353 and a.flags() == b.flags())
2350 else:
2354 else:
2351 return False
2355 return False
2352 else:
2356 else:
2353 return f not in ctx2.manifest()
2357 return f not in ctx2.manifest()
2354
2358
2355 def amend(ui, repo, old, extra, pats, opts):
2359 def amend(ui, repo, old, extra, pats, opts):
2356 # avoid cycle context -> subrepo -> cmdutil
2360 # avoid cycle context -> subrepo -> cmdutil
2357 from . import context
2361 from . import context
2358
2362
2359 # amend will reuse the existing user if not specified, but the obsolete
2363 # amend will reuse the existing user if not specified, but the obsolete
2360 # marker creation requires that the current user's name is specified.
2364 # marker creation requires that the current user's name is specified.
2361 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2365 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2362 ui.username() # raise exception if username not set
2366 ui.username() # raise exception if username not set
2363
2367
2364 ui.note(_('amending changeset %s\n') % old)
2368 ui.note(_('amending changeset %s\n') % old)
2365 base = old.p1()
2369 base = old.p1()
2366
2370
2367 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2371 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2368 # Participating changesets:
2372 # Participating changesets:
2369 #
2373 #
2370 # wctx o - workingctx that contains changes from working copy
2374 # wctx o - workingctx that contains changes from working copy
2371 # | to go into amending commit
2375 # | to go into amending commit
2372 # |
2376 # |
2373 # old o - changeset to amend
2377 # old o - changeset to amend
2374 # |
2378 # |
2375 # base o - first parent of the changeset to amend
2379 # base o - first parent of the changeset to amend
2376 wctx = repo[None]
2380 wctx = repo[None]
2377
2381
2378 # Copy to avoid mutating input
2382 # Copy to avoid mutating input
2379 extra = extra.copy()
2383 extra = extra.copy()
2380 # Update extra dict from amended commit (e.g. to preserve graft
2384 # Update extra dict from amended commit (e.g. to preserve graft
2381 # source)
2385 # source)
2382 extra.update(old.extra())
2386 extra.update(old.extra())
2383
2387
2384 # Also update it from the from the wctx
2388 # Also update it from the from the wctx
2385 extra.update(wctx.extra())
2389 extra.update(wctx.extra())
2386
2390
2387 user = opts.get('user') or old.user()
2391 user = opts.get('user') or old.user()
2388 date = opts.get('date') or old.date()
2392 date = opts.get('date') or old.date()
2389
2393
2390 # Parse the date to allow comparison between date and old.date()
2394 # Parse the date to allow comparison between date and old.date()
2391 date = dateutil.parsedate(date)
2395 date = dateutil.parsedate(date)
2392
2396
2393 if len(old.parents()) > 1:
2397 if len(old.parents()) > 1:
2394 # ctx.files() isn't reliable for merges, so fall back to the
2398 # ctx.files() isn't reliable for merges, so fall back to the
2395 # slower repo.status() method
2399 # slower repo.status() method
2396 files = set([fn for st in repo.status(base, old)[:3]
2400 files = set([fn for st in repo.status(base, old)[:3]
2397 for fn in st])
2401 for fn in st])
2398 else:
2402 else:
2399 files = set(old.files())
2403 files = set(old.files())
2400
2404
2401 # add/remove the files to the working copy if the "addremove" option
2405 # add/remove the files to the working copy if the "addremove" option
2402 # was specified.
2406 # was specified.
2403 matcher = scmutil.match(wctx, pats, opts)
2407 matcher = scmutil.match(wctx, pats, opts)
2404 if (opts.get('addremove')
2408 if (opts.get('addremove')
2405 and scmutil.addremove(repo, matcher, "", opts)):
2409 and scmutil.addremove(repo, matcher, "", opts)):
2406 raise error.Abort(
2410 raise error.Abort(
2407 _("failed to mark all new/missing files as added/removed"))
2411 _("failed to mark all new/missing files as added/removed"))
2408
2412
2409 # Check subrepos. This depends on in-place wctx._status update in
2413 # Check subrepos. This depends on in-place wctx._status update in
2410 # subrepo.precommit(). To minimize the risk of this hack, we do
2414 # subrepo.precommit(). To minimize the risk of this hack, we do
2411 # nothing if .hgsub does not exist.
2415 # nothing if .hgsub does not exist.
2412 if '.hgsub' in wctx or '.hgsub' in old:
2416 if '.hgsub' in wctx or '.hgsub' in old:
2413 subs, commitsubs, newsubstate = subrepoutil.precommit(
2417 subs, commitsubs, newsubstate = subrepoutil.precommit(
2414 ui, wctx, wctx._status, matcher)
2418 ui, wctx, wctx._status, matcher)
2415 # amend should abort if commitsubrepos is enabled
2419 # amend should abort if commitsubrepos is enabled
2416 assert not commitsubs
2420 assert not commitsubs
2417 if subs:
2421 if subs:
2418 subrepoutil.writestate(repo, newsubstate)
2422 subrepoutil.writestate(repo, newsubstate)
2419
2423
2420 ms = mergemod.mergestate.read(repo)
2424 ms = mergemod.mergestate.read(repo)
2421 mergeutil.checkunresolved(ms)
2425 mergeutil.checkunresolved(ms)
2422
2426
2423 filestoamend = set(f for f in wctx.files() if matcher(f))
2427 filestoamend = set(f for f in wctx.files() if matcher(f))
2424
2428
2425 changes = (len(filestoamend) > 0)
2429 changes = (len(filestoamend) > 0)
2426 if changes:
2430 if changes:
2427 # Recompute copies (avoid recording a -> b -> a)
2431 # Recompute copies (avoid recording a -> b -> a)
2428 copied = copies.pathcopies(base, wctx, matcher)
2432 copied = copies.pathcopies(base, wctx, matcher)
2429 if old.p2:
2433 if old.p2:
2430 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2434 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2431
2435
2432 # Prune files which were reverted by the updates: if old
2436 # Prune files which were reverted by the updates: if old
2433 # introduced file X and the file was renamed in the working
2437 # introduced file X and the file was renamed in the working
2434 # copy, then those two files are the same and
2438 # copy, then those two files are the same and
2435 # we can discard X from our list of files. Likewise if X
2439 # we can discard X from our list of files. Likewise if X
2436 # was removed, it's no longer relevant. If X is missing (aka
2440 # was removed, it's no longer relevant. If X is missing (aka
2437 # deleted), old X must be preserved.
2441 # deleted), old X must be preserved.
2438 files.update(filestoamend)
2442 files.update(filestoamend)
2439 files = [f for f in files if (not samefile(f, wctx, base)
2443 files = [f for f in files if (not samefile(f, wctx, base)
2440 or f in wctx.deleted())]
2444 or f in wctx.deleted())]
2441
2445
2442 def filectxfn(repo, ctx_, path):
2446 def filectxfn(repo, ctx_, path):
2443 try:
2447 try:
2444 # If the file being considered is not amongst the files
2448 # If the file being considered is not amongst the files
2445 # to be amended, we should return the file context from the
2449 # to be amended, we should return the file context from the
2446 # old changeset. This avoids issues when only some files in
2450 # old changeset. This avoids issues when only some files in
2447 # the working copy are being amended but there are also
2451 # the working copy are being amended but there are also
2448 # changes to other files from the old changeset.
2452 # changes to other files from the old changeset.
2449 if path not in filestoamend:
2453 if path not in filestoamend:
2450 return old.filectx(path)
2454 return old.filectx(path)
2451
2455
2452 # Return None for removed files.
2456 # Return None for removed files.
2453 if path in wctx.removed():
2457 if path in wctx.removed():
2454 return None
2458 return None
2455
2459
2456 fctx = wctx[path]
2460 fctx = wctx[path]
2457 flags = fctx.flags()
2461 flags = fctx.flags()
2458 mctx = context.memfilectx(repo, ctx_,
2462 mctx = context.memfilectx(repo, ctx_,
2459 fctx.path(), fctx.data(),
2463 fctx.path(), fctx.data(),
2460 islink='l' in flags,
2464 islink='l' in flags,
2461 isexec='x' in flags,
2465 isexec='x' in flags,
2462 copied=copied.get(path))
2466 copied=copied.get(path))
2463 return mctx
2467 return mctx
2464 except KeyError:
2468 except KeyError:
2465 return None
2469 return None
2466 else:
2470 else:
2467 ui.note(_('copying changeset %s to %s\n') % (old, base))
2471 ui.note(_('copying changeset %s to %s\n') % (old, base))
2468
2472
2469 # Use version of files as in the old cset
2473 # Use version of files as in the old cset
2470 def filectxfn(repo, ctx_, path):
2474 def filectxfn(repo, ctx_, path):
2471 try:
2475 try:
2472 return old.filectx(path)
2476 return old.filectx(path)
2473 except KeyError:
2477 except KeyError:
2474 return None
2478 return None
2475
2479
2476 # See if we got a message from -m or -l, if not, open the editor with
2480 # See if we got a message from -m or -l, if not, open the editor with
2477 # the message of the changeset to amend.
2481 # the message of the changeset to amend.
2478 message = logmessage(ui, opts)
2482 message = logmessage(ui, opts)
2479
2483
2480 editform = mergeeditform(old, 'commit.amend')
2484 editform = mergeeditform(old, 'commit.amend')
2481 editor = getcommiteditor(editform=editform,
2485 editor = getcommiteditor(editform=editform,
2482 **pycompat.strkwargs(opts))
2486 **pycompat.strkwargs(opts))
2483
2487
2484 if not message:
2488 if not message:
2485 editor = getcommiteditor(edit=True, editform=editform)
2489 editor = getcommiteditor(edit=True, editform=editform)
2486 message = old.description()
2490 message = old.description()
2487
2491
2488 pureextra = extra.copy()
2492 pureextra = extra.copy()
2489 extra['amend_source'] = old.hex()
2493 extra['amend_source'] = old.hex()
2490
2494
2491 new = context.memctx(repo,
2495 new = context.memctx(repo,
2492 parents=[base.node(), old.p2().node()],
2496 parents=[base.node(), old.p2().node()],
2493 text=message,
2497 text=message,
2494 files=files,
2498 files=files,
2495 filectxfn=filectxfn,
2499 filectxfn=filectxfn,
2496 user=user,
2500 user=user,
2497 date=date,
2501 date=date,
2498 extra=extra,
2502 extra=extra,
2499 editor=editor)
2503 editor=editor)
2500
2504
2501 newdesc = changelog.stripdesc(new.description())
2505 newdesc = changelog.stripdesc(new.description())
2502 if ((not changes)
2506 if ((not changes)
2503 and newdesc == old.description()
2507 and newdesc == old.description()
2504 and user == old.user()
2508 and user == old.user()
2505 and date == old.date()
2509 and date == old.date()
2506 and pureextra == old.extra()):
2510 and pureextra == old.extra()):
2507 # nothing changed. continuing here would create a new node
2511 # nothing changed. continuing here would create a new node
2508 # anyway because of the amend_source noise.
2512 # anyway because of the amend_source noise.
2509 #
2513 #
2510 # This not what we expect from amend.
2514 # This not what we expect from amend.
2511 return old.node()
2515 return old.node()
2512
2516
2513 if opts.get('secret'):
2517 if opts.get('secret'):
2514 commitphase = 'secret'
2518 commitphase = 'secret'
2515 else:
2519 else:
2516 commitphase = old.phase()
2520 commitphase = old.phase()
2517 overrides = {('phases', 'new-commit'): commitphase}
2521 overrides = {('phases', 'new-commit'): commitphase}
2518 with ui.configoverride(overrides, 'amend'):
2522 with ui.configoverride(overrides, 'amend'):
2519 newid = repo.commitctx(new)
2523 newid = repo.commitctx(new)
2520
2524
2521 # Reroute the working copy parent to the new changeset
2525 # Reroute the working copy parent to the new changeset
2522 repo.setparents(newid, nullid)
2526 repo.setparents(newid, nullid)
2523 mapping = {old.node(): (newid,)}
2527 mapping = {old.node(): (newid,)}
2524 obsmetadata = None
2528 obsmetadata = None
2525 if opts.get('note'):
2529 if opts.get('note'):
2526 obsmetadata = {'note': opts['note']}
2530 obsmetadata = {'note': opts['note']}
2527 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2531 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2528
2532
2529 # Fixing the dirstate because localrepo.commitctx does not update
2533 # Fixing the dirstate because localrepo.commitctx does not update
2530 # it. This is rather convenient because we did not need to update
2534 # it. This is rather convenient because we did not need to update
2531 # the dirstate for all the files in the new commit which commitctx
2535 # the dirstate for all the files in the new commit which commitctx
2532 # could have done if it updated the dirstate. Now, we can
2536 # could have done if it updated the dirstate. Now, we can
2533 # selectively update the dirstate only for the amended files.
2537 # selectively update the dirstate only for the amended files.
2534 dirstate = repo.dirstate
2538 dirstate = repo.dirstate
2535
2539
2536 # Update the state of the files which were added and
2540 # Update the state of the files which were added and
2537 # and modified in the amend to "normal" in the dirstate.
2541 # and modified in the amend to "normal" in the dirstate.
2538 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2542 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2539 for f in normalfiles:
2543 for f in normalfiles:
2540 dirstate.normal(f)
2544 dirstate.normal(f)
2541
2545
2542 # Update the state of files which were removed in the amend
2546 # Update the state of files which were removed in the amend
2543 # to "removed" in the dirstate.
2547 # to "removed" in the dirstate.
2544 removedfiles = set(wctx.removed()) & filestoamend
2548 removedfiles = set(wctx.removed()) & filestoamend
2545 for f in removedfiles:
2549 for f in removedfiles:
2546 dirstate.drop(f)
2550 dirstate.drop(f)
2547
2551
2548 return newid
2552 return newid
2549
2553
2550 def commiteditor(repo, ctx, subs, editform=''):
2554 def commiteditor(repo, ctx, subs, editform=''):
2551 if ctx.description():
2555 if ctx.description():
2552 return ctx.description()
2556 return ctx.description()
2553 return commitforceeditor(repo, ctx, subs, editform=editform,
2557 return commitforceeditor(repo, ctx, subs, editform=editform,
2554 unchangedmessagedetection=True)
2558 unchangedmessagedetection=True)
2555
2559
2556 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2560 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2557 editform='', unchangedmessagedetection=False):
2561 editform='', unchangedmessagedetection=False):
2558 if not extramsg:
2562 if not extramsg:
2559 extramsg = _("Leave message empty to abort commit.")
2563 extramsg = _("Leave message empty to abort commit.")
2560
2564
2561 forms = [e for e in editform.split('.') if e]
2565 forms = [e for e in editform.split('.') if e]
2562 forms.insert(0, 'changeset')
2566 forms.insert(0, 'changeset')
2563 templatetext = None
2567 templatetext = None
2564 while forms:
2568 while forms:
2565 ref = '.'.join(forms)
2569 ref = '.'.join(forms)
2566 if repo.ui.config('committemplate', ref):
2570 if repo.ui.config('committemplate', ref):
2567 templatetext = committext = buildcommittemplate(
2571 templatetext = committext = buildcommittemplate(
2568 repo, ctx, subs, extramsg, ref)
2572 repo, ctx, subs, extramsg, ref)
2569 break
2573 break
2570 forms.pop()
2574 forms.pop()
2571 else:
2575 else:
2572 committext = buildcommittext(repo, ctx, subs, extramsg)
2576 committext = buildcommittext(repo, ctx, subs, extramsg)
2573
2577
2574 # run editor in the repository root
2578 # run editor in the repository root
2575 olddir = pycompat.getcwd()
2579 olddir = pycompat.getcwd()
2576 os.chdir(repo.root)
2580 os.chdir(repo.root)
2577
2581
2578 # make in-memory changes visible to external process
2582 # make in-memory changes visible to external process
2579 tr = repo.currenttransaction()
2583 tr = repo.currenttransaction()
2580 repo.dirstate.write(tr)
2584 repo.dirstate.write(tr)
2581 pending = tr and tr.writepending() and repo.root
2585 pending = tr and tr.writepending() and repo.root
2582
2586
2583 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2587 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2584 editform=editform, pending=pending,
2588 editform=editform, pending=pending,
2585 repopath=repo.path, action='commit')
2589 repopath=repo.path, action='commit')
2586 text = editortext
2590 text = editortext
2587
2591
2588 # strip away anything below this special string (used for editors that want
2592 # strip away anything below this special string (used for editors that want
2589 # to display the diff)
2593 # to display the diff)
2590 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2594 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2591 if stripbelow:
2595 if stripbelow:
2592 text = text[:stripbelow.start()]
2596 text = text[:stripbelow.start()]
2593
2597
2594 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2598 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2595 os.chdir(olddir)
2599 os.chdir(olddir)
2596
2600
2597 if finishdesc:
2601 if finishdesc:
2598 text = finishdesc(text)
2602 text = finishdesc(text)
2599 if not text.strip():
2603 if not text.strip():
2600 raise error.Abort(_("empty commit message"))
2604 raise error.Abort(_("empty commit message"))
2601 if unchangedmessagedetection and editortext == templatetext:
2605 if unchangedmessagedetection and editortext == templatetext:
2602 raise error.Abort(_("commit message unchanged"))
2606 raise error.Abort(_("commit message unchanged"))
2603
2607
2604 return text
2608 return text
2605
2609
2606 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2610 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2607 ui = repo.ui
2611 ui = repo.ui
2608 spec = formatter.templatespec(ref, None, None)
2612 spec = formatter.templatespec(ref, None, None)
2609 t = logcmdutil.changesettemplater(ui, repo, spec)
2613 t = logcmdutil.changesettemplater(ui, repo, spec)
2610 t.t.cache.update((k, templater.unquotestring(v))
2614 t.t.cache.update((k, templater.unquotestring(v))
2611 for k, v in repo.ui.configitems('committemplate'))
2615 for k, v in repo.ui.configitems('committemplate'))
2612
2616
2613 if not extramsg:
2617 if not extramsg:
2614 extramsg = '' # ensure that extramsg is string
2618 extramsg = '' # ensure that extramsg is string
2615
2619
2616 ui.pushbuffer()
2620 ui.pushbuffer()
2617 t.show(ctx, extramsg=extramsg)
2621 t.show(ctx, extramsg=extramsg)
2618 return ui.popbuffer()
2622 return ui.popbuffer()
2619
2623
2620 def hgprefix(msg):
2624 def hgprefix(msg):
2621 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2625 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2622
2626
2623 def buildcommittext(repo, ctx, subs, extramsg):
2627 def buildcommittext(repo, ctx, subs, extramsg):
2624 edittext = []
2628 edittext = []
2625 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2629 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2626 if ctx.description():
2630 if ctx.description():
2627 edittext.append(ctx.description())
2631 edittext.append(ctx.description())
2628 edittext.append("")
2632 edittext.append("")
2629 edittext.append("") # Empty line between message and comments.
2633 edittext.append("") # Empty line between message and comments.
2630 edittext.append(hgprefix(_("Enter commit message."
2634 edittext.append(hgprefix(_("Enter commit message."
2631 " Lines beginning with 'HG:' are removed.")))
2635 " Lines beginning with 'HG:' are removed.")))
2632 edittext.append(hgprefix(extramsg))
2636 edittext.append(hgprefix(extramsg))
2633 edittext.append("HG: --")
2637 edittext.append("HG: --")
2634 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2638 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2635 if ctx.p2():
2639 if ctx.p2():
2636 edittext.append(hgprefix(_("branch merge")))
2640 edittext.append(hgprefix(_("branch merge")))
2637 if ctx.branch():
2641 if ctx.branch():
2638 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2642 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2639 if bookmarks.isactivewdirparent(repo):
2643 if bookmarks.isactivewdirparent(repo):
2640 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2644 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2641 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2645 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2642 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2646 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2643 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2647 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2644 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2648 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2645 if not added and not modified and not removed:
2649 if not added and not modified and not removed:
2646 edittext.append(hgprefix(_("no files changed")))
2650 edittext.append(hgprefix(_("no files changed")))
2647 edittext.append("")
2651 edittext.append("")
2648
2652
2649 return "\n".join(edittext)
2653 return "\n".join(edittext)
2650
2654
2651 def commitstatus(repo, node, branch, bheads=None, opts=None):
2655 def commitstatus(repo, node, branch, bheads=None, opts=None):
2652 if opts is None:
2656 if opts is None:
2653 opts = {}
2657 opts = {}
2654 ctx = repo[node]
2658 ctx = repo[node]
2655 parents = ctx.parents()
2659 parents = ctx.parents()
2656
2660
2657 if (not opts.get('amend') and bheads and node not in bheads and not
2661 if (not opts.get('amend') and bheads and node not in bheads and not
2658 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2662 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2659 repo.ui.status(_('created new head\n'))
2663 repo.ui.status(_('created new head\n'))
2660 # The message is not printed for initial roots. For the other
2664 # The message is not printed for initial roots. For the other
2661 # changesets, it is printed in the following situations:
2665 # changesets, it is printed in the following situations:
2662 #
2666 #
2663 # Par column: for the 2 parents with ...
2667 # Par column: for the 2 parents with ...
2664 # N: null or no parent
2668 # N: null or no parent
2665 # B: parent is on another named branch
2669 # B: parent is on another named branch
2666 # C: parent is a regular non head changeset
2670 # C: parent is a regular non head changeset
2667 # H: parent was a branch head of the current branch
2671 # H: parent was a branch head of the current branch
2668 # Msg column: whether we print "created new head" message
2672 # Msg column: whether we print "created new head" message
2669 # In the following, it is assumed that there already exists some
2673 # In the following, it is assumed that there already exists some
2670 # initial branch heads of the current branch, otherwise nothing is
2674 # initial branch heads of the current branch, otherwise nothing is
2671 # printed anyway.
2675 # printed anyway.
2672 #
2676 #
2673 # Par Msg Comment
2677 # Par Msg Comment
2674 # N N y additional topo root
2678 # N N y additional topo root
2675 #
2679 #
2676 # B N y additional branch root
2680 # B N y additional branch root
2677 # C N y additional topo head
2681 # C N y additional topo head
2678 # H N n usual case
2682 # H N n usual case
2679 #
2683 #
2680 # B B y weird additional branch root
2684 # B B y weird additional branch root
2681 # C B y branch merge
2685 # C B y branch merge
2682 # H B n merge with named branch
2686 # H B n merge with named branch
2683 #
2687 #
2684 # C C y additional head from merge
2688 # C C y additional head from merge
2685 # C H n merge with a head
2689 # C H n merge with a head
2686 #
2690 #
2687 # H H n head merge: head count decreases
2691 # H H n head merge: head count decreases
2688
2692
2689 if not opts.get('close_branch'):
2693 if not opts.get('close_branch'):
2690 for r in parents:
2694 for r in parents:
2691 if r.closesbranch() and r.branch() == branch:
2695 if r.closesbranch() and r.branch() == branch:
2692 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2696 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2693
2697
2694 if repo.ui.debugflag:
2698 if repo.ui.debugflag:
2695 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2699 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2696 elif repo.ui.verbose:
2700 elif repo.ui.verbose:
2697 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2701 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2698
2702
2699 def postcommitstatus(repo, pats, opts):
2703 def postcommitstatus(repo, pats, opts):
2700 return repo.status(match=scmutil.match(repo[None], pats, opts))
2704 return repo.status(match=scmutil.match(repo[None], pats, opts))
2701
2705
2702 def revert(ui, repo, ctx, parents, *pats, **opts):
2706 def revert(ui, repo, ctx, parents, *pats, **opts):
2703 opts = pycompat.byteskwargs(opts)
2707 opts = pycompat.byteskwargs(opts)
2704 parent, p2 = parents
2708 parent, p2 = parents
2705 node = ctx.node()
2709 node = ctx.node()
2706
2710
2707 mf = ctx.manifest()
2711 mf = ctx.manifest()
2708 if node == p2:
2712 if node == p2:
2709 parent = p2
2713 parent = p2
2710
2714
2711 # need all matching names in dirstate and manifest of target rev,
2715 # need all matching names in dirstate and manifest of target rev,
2712 # so have to walk both. do not print errors if files exist in one
2716 # so have to walk both. do not print errors if files exist in one
2713 # but not other. in both cases, filesets should be evaluated against
2717 # but not other. in both cases, filesets should be evaluated against
2714 # workingctx to get consistent result (issue4497). this means 'set:**'
2718 # workingctx to get consistent result (issue4497). this means 'set:**'
2715 # cannot be used to select missing files from target rev.
2719 # cannot be used to select missing files from target rev.
2716
2720
2717 # `names` is a mapping for all elements in working copy and target revision
2721 # `names` is a mapping for all elements in working copy and target revision
2718 # The mapping is in the form:
2722 # The mapping is in the form:
2719 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2723 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2720 names = {}
2724 names = {}
2721
2725
2722 with repo.wlock():
2726 with repo.wlock():
2723 ## filling of the `names` mapping
2727 ## filling of the `names` mapping
2724 # walk dirstate to fill `names`
2728 # walk dirstate to fill `names`
2725
2729
2726 interactive = opts.get('interactive', False)
2730 interactive = opts.get('interactive', False)
2727 wctx = repo[None]
2731 wctx = repo[None]
2728 m = scmutil.match(wctx, pats, opts)
2732 m = scmutil.match(wctx, pats, opts)
2729
2733
2730 # we'll need this later
2734 # we'll need this later
2731 targetsubs = sorted(s for s in wctx.substate if m(s))
2735 targetsubs = sorted(s for s in wctx.substate if m(s))
2732
2736
2733 if not m.always():
2737 if not m.always():
2734 matcher = matchmod.badmatch(m, lambda x, y: False)
2738 matcher = matchmod.badmatch(m, lambda x, y: False)
2735 for abs in wctx.walk(matcher):
2739 for abs in wctx.walk(matcher):
2736 names[abs] = m.rel(abs), m.exact(abs)
2740 names[abs] = m.rel(abs), m.exact(abs)
2737
2741
2738 # walk target manifest to fill `names`
2742 # walk target manifest to fill `names`
2739
2743
2740 def badfn(path, msg):
2744 def badfn(path, msg):
2741 if path in names:
2745 if path in names:
2742 return
2746 return
2743 if path in ctx.substate:
2747 if path in ctx.substate:
2744 return
2748 return
2745 path_ = path + '/'
2749 path_ = path + '/'
2746 for f in names:
2750 for f in names:
2747 if f.startswith(path_):
2751 if f.startswith(path_):
2748 return
2752 return
2749 ui.warn("%s: %s\n" % (m.rel(path), msg))
2753 ui.warn("%s: %s\n" % (m.rel(path), msg))
2750
2754
2751 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2755 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2752 if abs not in names:
2756 if abs not in names:
2753 names[abs] = m.rel(abs), m.exact(abs)
2757 names[abs] = m.rel(abs), m.exact(abs)
2754
2758
2755 # Find status of all file in `names`.
2759 # Find status of all file in `names`.
2756 m = scmutil.matchfiles(repo, names)
2760 m = scmutil.matchfiles(repo, names)
2757
2761
2758 changes = repo.status(node1=node, match=m,
2762 changes = repo.status(node1=node, match=m,
2759 unknown=True, ignored=True, clean=True)
2763 unknown=True, ignored=True, clean=True)
2760 else:
2764 else:
2761 changes = repo.status(node1=node, match=m)
2765 changes = repo.status(node1=node, match=m)
2762 for kind in changes:
2766 for kind in changes:
2763 for abs in kind:
2767 for abs in kind:
2764 names[abs] = m.rel(abs), m.exact(abs)
2768 names[abs] = m.rel(abs), m.exact(abs)
2765
2769
2766 m = scmutil.matchfiles(repo, names)
2770 m = scmutil.matchfiles(repo, names)
2767
2771
2768 modified = set(changes.modified)
2772 modified = set(changes.modified)
2769 added = set(changes.added)
2773 added = set(changes.added)
2770 removed = set(changes.removed)
2774 removed = set(changes.removed)
2771 _deleted = set(changes.deleted)
2775 _deleted = set(changes.deleted)
2772 unknown = set(changes.unknown)
2776 unknown = set(changes.unknown)
2773 unknown.update(changes.ignored)
2777 unknown.update(changes.ignored)
2774 clean = set(changes.clean)
2778 clean = set(changes.clean)
2775 modadded = set()
2779 modadded = set()
2776
2780
2777 # We need to account for the state of the file in the dirstate,
2781 # We need to account for the state of the file in the dirstate,
2778 # even when we revert against something else than parent. This will
2782 # even when we revert against something else than parent. This will
2779 # slightly alter the behavior of revert (doing back up or not, delete
2783 # slightly alter the behavior of revert (doing back up or not, delete
2780 # or just forget etc).
2784 # or just forget etc).
2781 if parent == node:
2785 if parent == node:
2782 dsmodified = modified
2786 dsmodified = modified
2783 dsadded = added
2787 dsadded = added
2784 dsremoved = removed
2788 dsremoved = removed
2785 # store all local modifications, useful later for rename detection
2789 # store all local modifications, useful later for rename detection
2786 localchanges = dsmodified | dsadded
2790 localchanges = dsmodified | dsadded
2787 modified, added, removed = set(), set(), set()
2791 modified, added, removed = set(), set(), set()
2788 else:
2792 else:
2789 changes = repo.status(node1=parent, match=m)
2793 changes = repo.status(node1=parent, match=m)
2790 dsmodified = set(changes.modified)
2794 dsmodified = set(changes.modified)
2791 dsadded = set(changes.added)
2795 dsadded = set(changes.added)
2792 dsremoved = set(changes.removed)
2796 dsremoved = set(changes.removed)
2793 # store all local modifications, useful later for rename detection
2797 # store all local modifications, useful later for rename detection
2794 localchanges = dsmodified | dsadded
2798 localchanges = dsmodified | dsadded
2795
2799
2796 # only take into account for removes between wc and target
2800 # only take into account for removes between wc and target
2797 clean |= dsremoved - removed
2801 clean |= dsremoved - removed
2798 dsremoved &= removed
2802 dsremoved &= removed
2799 # distinct between dirstate remove and other
2803 # distinct between dirstate remove and other
2800 removed -= dsremoved
2804 removed -= dsremoved
2801
2805
2802 modadded = added & dsmodified
2806 modadded = added & dsmodified
2803 added -= modadded
2807 added -= modadded
2804
2808
2805 # tell newly modified apart.
2809 # tell newly modified apart.
2806 dsmodified &= modified
2810 dsmodified &= modified
2807 dsmodified |= modified & dsadded # dirstate added may need backup
2811 dsmodified |= modified & dsadded # dirstate added may need backup
2808 modified -= dsmodified
2812 modified -= dsmodified
2809
2813
2810 # We need to wait for some post-processing to update this set
2814 # We need to wait for some post-processing to update this set
2811 # before making the distinction. The dirstate will be used for
2815 # before making the distinction. The dirstate will be used for
2812 # that purpose.
2816 # that purpose.
2813 dsadded = added
2817 dsadded = added
2814
2818
2815 # in case of merge, files that are actually added can be reported as
2819 # in case of merge, files that are actually added can be reported as
2816 # modified, we need to post process the result
2820 # modified, we need to post process the result
2817 if p2 != nullid:
2821 if p2 != nullid:
2818 mergeadd = set(dsmodified)
2822 mergeadd = set(dsmodified)
2819 for path in dsmodified:
2823 for path in dsmodified:
2820 if path in mf:
2824 if path in mf:
2821 mergeadd.remove(path)
2825 mergeadd.remove(path)
2822 dsadded |= mergeadd
2826 dsadded |= mergeadd
2823 dsmodified -= mergeadd
2827 dsmodified -= mergeadd
2824
2828
2825 # if f is a rename, update `names` to also revert the source
2829 # if f is a rename, update `names` to also revert the source
2826 cwd = repo.getcwd()
2830 cwd = repo.getcwd()
2827 for f in localchanges:
2831 for f in localchanges:
2828 src = repo.dirstate.copied(f)
2832 src = repo.dirstate.copied(f)
2829 # XXX should we check for rename down to target node?
2833 # XXX should we check for rename down to target node?
2830 if src and src not in names and repo.dirstate[src] == 'r':
2834 if src and src not in names and repo.dirstate[src] == 'r':
2831 dsremoved.add(src)
2835 dsremoved.add(src)
2832 names[src] = (repo.pathto(src, cwd), True)
2836 names[src] = (repo.pathto(src, cwd), True)
2833
2837
2834 # determine the exact nature of the deleted changesets
2838 # determine the exact nature of the deleted changesets
2835 deladded = set(_deleted)
2839 deladded = set(_deleted)
2836 for path in _deleted:
2840 for path in _deleted:
2837 if path in mf:
2841 if path in mf:
2838 deladded.remove(path)
2842 deladded.remove(path)
2839 deleted = _deleted - deladded
2843 deleted = _deleted - deladded
2840
2844
2841 # distinguish between file to forget and the other
2845 # distinguish between file to forget and the other
2842 added = set()
2846 added = set()
2843 for abs in dsadded:
2847 for abs in dsadded:
2844 if repo.dirstate[abs] != 'a':
2848 if repo.dirstate[abs] != 'a':
2845 added.add(abs)
2849 added.add(abs)
2846 dsadded -= added
2850 dsadded -= added
2847
2851
2848 for abs in deladded:
2852 for abs in deladded:
2849 if repo.dirstate[abs] == 'a':
2853 if repo.dirstate[abs] == 'a':
2850 dsadded.add(abs)
2854 dsadded.add(abs)
2851 deladded -= dsadded
2855 deladded -= dsadded
2852
2856
2853 # For files marked as removed, we check if an unknown file is present at
2857 # For files marked as removed, we check if an unknown file is present at
2854 # the same path. If a such file exists it may need to be backed up.
2858 # the same path. If a such file exists it may need to be backed up.
2855 # Making the distinction at this stage helps have simpler backup
2859 # Making the distinction at this stage helps have simpler backup
2856 # logic.
2860 # logic.
2857 removunk = set()
2861 removunk = set()
2858 for abs in removed:
2862 for abs in removed:
2859 target = repo.wjoin(abs)
2863 target = repo.wjoin(abs)
2860 if os.path.lexists(target):
2864 if os.path.lexists(target):
2861 removunk.add(abs)
2865 removunk.add(abs)
2862 removed -= removunk
2866 removed -= removunk
2863
2867
2864 dsremovunk = set()
2868 dsremovunk = set()
2865 for abs in dsremoved:
2869 for abs in dsremoved:
2866 target = repo.wjoin(abs)
2870 target = repo.wjoin(abs)
2867 if os.path.lexists(target):
2871 if os.path.lexists(target):
2868 dsremovunk.add(abs)
2872 dsremovunk.add(abs)
2869 dsremoved -= dsremovunk
2873 dsremoved -= dsremovunk
2870
2874
2871 # action to be actually performed by revert
2875 # action to be actually performed by revert
2872 # (<list of file>, message>) tuple
2876 # (<list of file>, message>) tuple
2873 actions = {'revert': ([], _('reverting %s\n')),
2877 actions = {'revert': ([], _('reverting %s\n')),
2874 'add': ([], _('adding %s\n')),
2878 'add': ([], _('adding %s\n')),
2875 'remove': ([], _('removing %s\n')),
2879 'remove': ([], _('removing %s\n')),
2876 'drop': ([], _('removing %s\n')),
2880 'drop': ([], _('removing %s\n')),
2877 'forget': ([], _('forgetting %s\n')),
2881 'forget': ([], _('forgetting %s\n')),
2878 'undelete': ([], _('undeleting %s\n')),
2882 'undelete': ([], _('undeleting %s\n')),
2879 'noop': (None, _('no changes needed to %s\n')),
2883 'noop': (None, _('no changes needed to %s\n')),
2880 'unknown': (None, _('file not managed: %s\n')),
2884 'unknown': (None, _('file not managed: %s\n')),
2881 }
2885 }
2882
2886
2883 # "constant" that convey the backup strategy.
2887 # "constant" that convey the backup strategy.
2884 # All set to `discard` if `no-backup` is set do avoid checking
2888 # All set to `discard` if `no-backup` is set do avoid checking
2885 # no_backup lower in the code.
2889 # no_backup lower in the code.
2886 # These values are ordered for comparison purposes
2890 # These values are ordered for comparison purposes
2887 backupinteractive = 3 # do backup if interactively modified
2891 backupinteractive = 3 # do backup if interactively modified
2888 backup = 2 # unconditionally do backup
2892 backup = 2 # unconditionally do backup
2889 check = 1 # check if the existing file differs from target
2893 check = 1 # check if the existing file differs from target
2890 discard = 0 # never do backup
2894 discard = 0 # never do backup
2891 if opts.get('no_backup'):
2895 if opts.get('no_backup'):
2892 backupinteractive = backup = check = discard
2896 backupinteractive = backup = check = discard
2893 if interactive:
2897 if interactive:
2894 dsmodifiedbackup = backupinteractive
2898 dsmodifiedbackup = backupinteractive
2895 else:
2899 else:
2896 dsmodifiedbackup = backup
2900 dsmodifiedbackup = backup
2897 tobackup = set()
2901 tobackup = set()
2898
2902
2899 backupanddel = actions['remove']
2903 backupanddel = actions['remove']
2900 if not opts.get('no_backup'):
2904 if not opts.get('no_backup'):
2901 backupanddel = actions['drop']
2905 backupanddel = actions['drop']
2902
2906
2903 disptable = (
2907 disptable = (
2904 # dispatch table:
2908 # dispatch table:
2905 # file state
2909 # file state
2906 # action
2910 # action
2907 # make backup
2911 # make backup
2908
2912
2909 ## Sets that results that will change file on disk
2913 ## Sets that results that will change file on disk
2910 # Modified compared to target, no local change
2914 # Modified compared to target, no local change
2911 (modified, actions['revert'], discard),
2915 (modified, actions['revert'], discard),
2912 # Modified compared to target, but local file is deleted
2916 # Modified compared to target, but local file is deleted
2913 (deleted, actions['revert'], discard),
2917 (deleted, actions['revert'], discard),
2914 # Modified compared to target, local change
2918 # Modified compared to target, local change
2915 (dsmodified, actions['revert'], dsmodifiedbackup),
2919 (dsmodified, actions['revert'], dsmodifiedbackup),
2916 # Added since target
2920 # Added since target
2917 (added, actions['remove'], discard),
2921 (added, actions['remove'], discard),
2918 # Added in working directory
2922 # Added in working directory
2919 (dsadded, actions['forget'], discard),
2923 (dsadded, actions['forget'], discard),
2920 # Added since target, have local modification
2924 # Added since target, have local modification
2921 (modadded, backupanddel, backup),
2925 (modadded, backupanddel, backup),
2922 # Added since target but file is missing in working directory
2926 # Added since target but file is missing in working directory
2923 (deladded, actions['drop'], discard),
2927 (deladded, actions['drop'], discard),
2924 # Removed since target, before working copy parent
2928 # Removed since target, before working copy parent
2925 (removed, actions['add'], discard),
2929 (removed, actions['add'], discard),
2926 # Same as `removed` but an unknown file exists at the same path
2930 # Same as `removed` but an unknown file exists at the same path
2927 (removunk, actions['add'], check),
2931 (removunk, actions['add'], check),
2928 # Removed since targe, marked as such in working copy parent
2932 # Removed since targe, marked as such in working copy parent
2929 (dsremoved, actions['undelete'], discard),
2933 (dsremoved, actions['undelete'], discard),
2930 # Same as `dsremoved` but an unknown file exists at the same path
2934 # Same as `dsremoved` but an unknown file exists at the same path
2931 (dsremovunk, actions['undelete'], check),
2935 (dsremovunk, actions['undelete'], check),
2932 ## the following sets does not result in any file changes
2936 ## the following sets does not result in any file changes
2933 # File with no modification
2937 # File with no modification
2934 (clean, actions['noop'], discard),
2938 (clean, actions['noop'], discard),
2935 # Existing file, not tracked anywhere
2939 # Existing file, not tracked anywhere
2936 (unknown, actions['unknown'], discard),
2940 (unknown, actions['unknown'], discard),
2937 )
2941 )
2938
2942
2939 for abs, (rel, exact) in sorted(names.items()):
2943 for abs, (rel, exact) in sorted(names.items()):
2940 # target file to be touch on disk (relative to cwd)
2944 # target file to be touch on disk (relative to cwd)
2941 target = repo.wjoin(abs)
2945 target = repo.wjoin(abs)
2942 # search the entry in the dispatch table.
2946 # search the entry in the dispatch table.
2943 # if the file is in any of these sets, it was touched in the working
2947 # if the file is in any of these sets, it was touched in the working
2944 # directory parent and we are sure it needs to be reverted.
2948 # directory parent and we are sure it needs to be reverted.
2945 for table, (xlist, msg), dobackup in disptable:
2949 for table, (xlist, msg), dobackup in disptable:
2946 if abs not in table:
2950 if abs not in table:
2947 continue
2951 continue
2948 if xlist is not None:
2952 if xlist is not None:
2949 xlist.append(abs)
2953 xlist.append(abs)
2950 if dobackup:
2954 if dobackup:
2951 # If in interactive mode, don't automatically create
2955 # If in interactive mode, don't automatically create
2952 # .orig files (issue4793)
2956 # .orig files (issue4793)
2953 if dobackup == backupinteractive:
2957 if dobackup == backupinteractive:
2954 tobackup.add(abs)
2958 tobackup.add(abs)
2955 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2959 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2956 bakname = scmutil.origpath(ui, repo, rel)
2960 bakname = scmutil.origpath(ui, repo, rel)
2957 ui.note(_('saving current version of %s as %s\n') %
2961 ui.note(_('saving current version of %s as %s\n') %
2958 (rel, bakname))
2962 (rel, bakname))
2959 if not opts.get('dry_run'):
2963 if not opts.get('dry_run'):
2960 if interactive:
2964 if interactive:
2961 util.copyfile(target, bakname)
2965 util.copyfile(target, bakname)
2962 else:
2966 else:
2963 util.rename(target, bakname)
2967 util.rename(target, bakname)
2964 if ui.verbose or not exact:
2968 if ui.verbose or not exact:
2965 if not isinstance(msg, bytes):
2969 if not isinstance(msg, bytes):
2966 msg = msg(abs)
2970 msg = msg(abs)
2967 ui.status(msg % rel)
2971 ui.status(msg % rel)
2968 elif exact:
2972 elif exact:
2969 ui.warn(msg % rel)
2973 ui.warn(msg % rel)
2970 break
2974 break
2971
2975
2972 if not opts.get('dry_run'):
2976 if not opts.get('dry_run'):
2973 needdata = ('revert', 'add', 'undelete')
2977 needdata = ('revert', 'add', 'undelete')
2974 if _revertprefetch is not _revertprefetchstub:
2978 if _revertprefetch is not _revertprefetchstub:
2975 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
2979 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
2976 "add a callback to 'scmutil.fileprefetchhooks'",
2980 "add a callback to 'scmutil.fileprefetchhooks'",
2977 '4.6', stacklevel=1)
2981 '4.6', stacklevel=1)
2978 _revertprefetch(repo, ctx,
2982 _revertprefetch(repo, ctx,
2979 *[actions[name][0] for name in needdata])
2983 *[actions[name][0] for name in needdata])
2980 oplist = [actions[name][0] for name in needdata]
2984 oplist = [actions[name][0] for name in needdata]
2981 prefetch = scmutil.prefetchfiles
2985 prefetch = scmutil.prefetchfiles
2982 matchfiles = scmutil.matchfiles
2986 matchfiles = scmutil.matchfiles
2983 prefetch(repo, [ctx.rev()],
2987 prefetch(repo, [ctx.rev()],
2984 matchfiles(repo,
2988 matchfiles(repo,
2985 [f for sublist in oplist for f in sublist]))
2989 [f for sublist in oplist for f in sublist]))
2986 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2990 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2987
2991
2988 if targetsubs:
2992 if targetsubs:
2989 # Revert the subrepos on the revert list
2993 # Revert the subrepos on the revert list
2990 for sub in targetsubs:
2994 for sub in targetsubs:
2991 try:
2995 try:
2992 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2996 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2993 **pycompat.strkwargs(opts))
2997 **pycompat.strkwargs(opts))
2994 except KeyError:
2998 except KeyError:
2995 raise error.Abort("subrepository '%s' does not exist in %s!"
2999 raise error.Abort("subrepository '%s' does not exist in %s!"
2996 % (sub, short(ctx.node())))
3000 % (sub, short(ctx.node())))
2997
3001
2998 def _revertprefetchstub(repo, ctx, *files):
3002 def _revertprefetchstub(repo, ctx, *files):
2999 """Stub method for detecting extension wrapping of _revertprefetch(), to
3003 """Stub method for detecting extension wrapping of _revertprefetch(), to
3000 issue a deprecation warning."""
3004 issue a deprecation warning."""
3001
3005
3002 _revertprefetch = _revertprefetchstub
3006 _revertprefetch = _revertprefetchstub
3003
3007
3004 def _performrevert(repo, parents, ctx, actions, interactive=False,
3008 def _performrevert(repo, parents, ctx, actions, interactive=False,
3005 tobackup=None):
3009 tobackup=None):
3006 """function that actually perform all the actions computed for revert
3010 """function that actually perform all the actions computed for revert
3007
3011
3008 This is an independent function to let extension to plug in and react to
3012 This is an independent function to let extension to plug in and react to
3009 the imminent revert.
3013 the imminent revert.
3010
3014
3011 Make sure you have the working directory locked when calling this function.
3015 Make sure you have the working directory locked when calling this function.
3012 """
3016 """
3013 parent, p2 = parents
3017 parent, p2 = parents
3014 node = ctx.node()
3018 node = ctx.node()
3015 excluded_files = []
3019 excluded_files = []
3016
3020
3017 def checkout(f):
3021 def checkout(f):
3018 fc = ctx[f]
3022 fc = ctx[f]
3019 repo.wwrite(f, fc.data(), fc.flags())
3023 repo.wwrite(f, fc.data(), fc.flags())
3020
3024
3021 def doremove(f):
3025 def doremove(f):
3022 try:
3026 try:
3023 repo.wvfs.unlinkpath(f)
3027 repo.wvfs.unlinkpath(f)
3024 except OSError:
3028 except OSError:
3025 pass
3029 pass
3026 repo.dirstate.remove(f)
3030 repo.dirstate.remove(f)
3027
3031
3028 audit_path = pathutil.pathauditor(repo.root, cached=True)
3032 audit_path = pathutil.pathauditor(repo.root, cached=True)
3029 for f in actions['forget'][0]:
3033 for f in actions['forget'][0]:
3030 if interactive:
3034 if interactive:
3031 choice = repo.ui.promptchoice(
3035 choice = repo.ui.promptchoice(
3032 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3036 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3033 if choice == 0:
3037 if choice == 0:
3034 repo.dirstate.drop(f)
3038 repo.dirstate.drop(f)
3035 else:
3039 else:
3036 excluded_files.append(f)
3040 excluded_files.append(f)
3037 else:
3041 else:
3038 repo.dirstate.drop(f)
3042 repo.dirstate.drop(f)
3039 for f in actions['remove'][0]:
3043 for f in actions['remove'][0]:
3040 audit_path(f)
3044 audit_path(f)
3041 if interactive:
3045 if interactive:
3042 choice = repo.ui.promptchoice(
3046 choice = repo.ui.promptchoice(
3043 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3047 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3044 if choice == 0:
3048 if choice == 0:
3045 doremove(f)
3049 doremove(f)
3046 else:
3050 else:
3047 excluded_files.append(f)
3051 excluded_files.append(f)
3048 else:
3052 else:
3049 doremove(f)
3053 doremove(f)
3050 for f in actions['drop'][0]:
3054 for f in actions['drop'][0]:
3051 audit_path(f)
3055 audit_path(f)
3052 repo.dirstate.remove(f)
3056 repo.dirstate.remove(f)
3053
3057
3054 normal = None
3058 normal = None
3055 if node == parent:
3059 if node == parent:
3056 # We're reverting to our parent. If possible, we'd like status
3060 # We're reverting to our parent. If possible, we'd like status
3057 # to report the file as clean. We have to use normallookup for
3061 # to report the file as clean. We have to use normallookup for
3058 # merges to avoid losing information about merged/dirty files.
3062 # merges to avoid losing information about merged/dirty files.
3059 if p2 != nullid:
3063 if p2 != nullid:
3060 normal = repo.dirstate.normallookup
3064 normal = repo.dirstate.normallookup
3061 else:
3065 else:
3062 normal = repo.dirstate.normal
3066 normal = repo.dirstate.normal
3063
3067
3064 newlyaddedandmodifiedfiles = set()
3068 newlyaddedandmodifiedfiles = set()
3065 if interactive:
3069 if interactive:
3066 # Prompt the user for changes to revert
3070 # Prompt the user for changes to revert
3067 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3071 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3068 m = scmutil.matchfiles(repo, torevert)
3072 m = scmutil.matchfiles(repo, torevert)
3069 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3073 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3070 diffopts.nodates = True
3074 diffopts.nodates = True
3071 diffopts.git = True
3075 diffopts.git = True
3072 operation = 'discard'
3076 operation = 'discard'
3073 reversehunks = True
3077 reversehunks = True
3074 if node != parent:
3078 if node != parent:
3075 operation = 'apply'
3079 operation = 'apply'
3076 reversehunks = False
3080 reversehunks = False
3077 if reversehunks:
3081 if reversehunks:
3078 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3082 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3079 else:
3083 else:
3080 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3084 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3081 originalchunks = patch.parsepatch(diff)
3085 originalchunks = patch.parsepatch(diff)
3082
3086
3083 try:
3087 try:
3084
3088
3085 chunks, opts = recordfilter(repo.ui, originalchunks,
3089 chunks, opts = recordfilter(repo.ui, originalchunks,
3086 operation=operation)
3090 operation=operation)
3087 if reversehunks:
3091 if reversehunks:
3088 chunks = patch.reversehunks(chunks)
3092 chunks = patch.reversehunks(chunks)
3089
3093
3090 except error.PatchError as err:
3094 except error.PatchError as err:
3091 raise error.Abort(_('error parsing patch: %s') % err)
3095 raise error.Abort(_('error parsing patch: %s') % err)
3092
3096
3093 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3097 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3094 if tobackup is None:
3098 if tobackup is None:
3095 tobackup = set()
3099 tobackup = set()
3096 # Apply changes
3100 # Apply changes
3097 fp = stringio()
3101 fp = stringio()
3098 for c in chunks:
3102 for c in chunks:
3099 # Create a backup file only if this hunk should be backed up
3103 # Create a backup file only if this hunk should be backed up
3100 if ishunk(c) and c.header.filename() in tobackup:
3104 if ishunk(c) and c.header.filename() in tobackup:
3101 abs = c.header.filename()
3105 abs = c.header.filename()
3102 target = repo.wjoin(abs)
3106 target = repo.wjoin(abs)
3103 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3107 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3104 util.copyfile(target, bakname)
3108 util.copyfile(target, bakname)
3105 tobackup.remove(abs)
3109 tobackup.remove(abs)
3106 c.write(fp)
3110 c.write(fp)
3107 dopatch = fp.tell()
3111 dopatch = fp.tell()
3108 fp.seek(0)
3112 fp.seek(0)
3109 if dopatch:
3113 if dopatch:
3110 try:
3114 try:
3111 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3115 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3112 except error.PatchError as err:
3116 except error.PatchError as err:
3113 raise error.Abort(pycompat.bytestr(err))
3117 raise error.Abort(pycompat.bytestr(err))
3114 del fp
3118 del fp
3115 else:
3119 else:
3116 for f in actions['revert'][0]:
3120 for f in actions['revert'][0]:
3117 checkout(f)
3121 checkout(f)
3118 if normal:
3122 if normal:
3119 normal(f)
3123 normal(f)
3120
3124
3121 for f in actions['add'][0]:
3125 for f in actions['add'][0]:
3122 # Don't checkout modified files, they are already created by the diff
3126 # Don't checkout modified files, they are already created by the diff
3123 if f not in newlyaddedandmodifiedfiles:
3127 if f not in newlyaddedandmodifiedfiles:
3124 checkout(f)
3128 checkout(f)
3125 repo.dirstate.add(f)
3129 repo.dirstate.add(f)
3126
3130
3127 normal = repo.dirstate.normallookup
3131 normal = repo.dirstate.normallookup
3128 if node == parent and p2 == nullid:
3132 if node == parent and p2 == nullid:
3129 normal = repo.dirstate.normal
3133 normal = repo.dirstate.normal
3130 for f in actions['undelete'][0]:
3134 for f in actions['undelete'][0]:
3131 checkout(f)
3135 checkout(f)
3132 normal(f)
3136 normal(f)
3133
3137
3134 copied = copies.pathcopies(repo[parent], ctx)
3138 copied = copies.pathcopies(repo[parent], ctx)
3135
3139
3136 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3140 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3137 if f in copied:
3141 if f in copied:
3138 repo.dirstate.copy(copied[f], f)
3142 repo.dirstate.copy(copied[f], f)
3139
3143
3140 class command(registrar.command):
3144 class command(registrar.command):
3141 """deprecated: used registrar.command instead"""
3145 """deprecated: used registrar.command instead"""
3142 def _doregister(self, func, name, *args, **kwargs):
3146 def _doregister(self, func, name, *args, **kwargs):
3143 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3147 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3144 return super(command, self)._doregister(func, name, *args, **kwargs)
3148 return super(command, self)._doregister(func, name, *args, **kwargs)
3145
3149
3146 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3150 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3147 # commands.outgoing. "missing" is "missing" of the result of
3151 # commands.outgoing. "missing" is "missing" of the result of
3148 # "findcommonoutgoing()"
3152 # "findcommonoutgoing()"
3149 outgoinghooks = util.hooks()
3153 outgoinghooks = util.hooks()
3150
3154
3151 # a list of (ui, repo) functions called by commands.summary
3155 # a list of (ui, repo) functions called by commands.summary
3152 summaryhooks = util.hooks()
3156 summaryhooks = util.hooks()
3153
3157
3154 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3158 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3155 #
3159 #
3156 # functions should return tuple of booleans below, if 'changes' is None:
3160 # functions should return tuple of booleans below, if 'changes' is None:
3157 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3161 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3158 #
3162 #
3159 # otherwise, 'changes' is a tuple of tuples below:
3163 # otherwise, 'changes' is a tuple of tuples below:
3160 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3164 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3161 # - (desturl, destbranch, destpeer, outgoing)
3165 # - (desturl, destbranch, destpeer, outgoing)
3162 summaryremotehooks = util.hooks()
3166 summaryremotehooks = util.hooks()
3163
3167
3164 # A list of state files kept by multistep operations like graft.
3168 # A list of state files kept by multistep operations like graft.
3165 # Since graft cannot be aborted, it is considered 'clearable' by update.
3169 # Since graft cannot be aborted, it is considered 'clearable' by update.
3166 # note: bisect is intentionally excluded
3170 # note: bisect is intentionally excluded
3167 # (state file, clearable, allowcommit, error, hint)
3171 # (state file, clearable, allowcommit, error, hint)
3168 unfinishedstates = [
3172 unfinishedstates = [
3169 ('graftstate', True, False, _('graft in progress'),
3173 ('graftstate', True, False, _('graft in progress'),
3170 _("use 'hg graft --continue' or 'hg update' to abort")),
3174 _("use 'hg graft --continue' or 'hg update' to abort")),
3171 ('updatestate', True, False, _('last update was interrupted'),
3175 ('updatestate', True, False, _('last update was interrupted'),
3172 _("use 'hg update' to get a consistent checkout"))
3176 _("use 'hg update' to get a consistent checkout"))
3173 ]
3177 ]
3174
3178
3175 def checkunfinished(repo, commit=False):
3179 def checkunfinished(repo, commit=False):
3176 '''Look for an unfinished multistep operation, like graft, and abort
3180 '''Look for an unfinished multistep operation, like graft, and abort
3177 if found. It's probably good to check this right before
3181 if found. It's probably good to check this right before
3178 bailifchanged().
3182 bailifchanged().
3179 '''
3183 '''
3180 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3184 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3181 if commit and allowcommit:
3185 if commit and allowcommit:
3182 continue
3186 continue
3183 if repo.vfs.exists(f):
3187 if repo.vfs.exists(f):
3184 raise error.Abort(msg, hint=hint)
3188 raise error.Abort(msg, hint=hint)
3185
3189
3186 def clearunfinished(repo):
3190 def clearunfinished(repo):
3187 '''Check for unfinished operations (as above), and clear the ones
3191 '''Check for unfinished operations (as above), and clear the ones
3188 that are clearable.
3192 that are clearable.
3189 '''
3193 '''
3190 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3194 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3191 if not clearable and repo.vfs.exists(f):
3195 if not clearable and repo.vfs.exists(f):
3192 raise error.Abort(msg, hint=hint)
3196 raise error.Abort(msg, hint=hint)
3193 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3197 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3194 if clearable and repo.vfs.exists(f):
3198 if clearable and repo.vfs.exists(f):
3195 util.unlink(repo.vfs.join(f))
3199 util.unlink(repo.vfs.join(f))
3196
3200
3197 afterresolvedstates = [
3201 afterresolvedstates = [
3198 ('graftstate',
3202 ('graftstate',
3199 _('hg graft --continue')),
3203 _('hg graft --continue')),
3200 ]
3204 ]
3201
3205
3202 def howtocontinue(repo):
3206 def howtocontinue(repo):
3203 '''Check for an unfinished operation and return the command to finish
3207 '''Check for an unfinished operation and return the command to finish
3204 it.
3208 it.
3205
3209
3206 afterresolvedstates tuples define a .hg/{file} and the corresponding
3210 afterresolvedstates tuples define a .hg/{file} and the corresponding
3207 command needed to finish it.
3211 command needed to finish it.
3208
3212
3209 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3213 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3210 a boolean.
3214 a boolean.
3211 '''
3215 '''
3212 contmsg = _("continue: %s")
3216 contmsg = _("continue: %s")
3213 for f, msg in afterresolvedstates:
3217 for f, msg in afterresolvedstates:
3214 if repo.vfs.exists(f):
3218 if repo.vfs.exists(f):
3215 return contmsg % msg, True
3219 return contmsg % msg, True
3216 if repo[None].dirty(missing=True, merge=False, branch=False):
3220 if repo[None].dirty(missing=True, merge=False, branch=False):
3217 return contmsg % _("hg commit"), False
3221 return contmsg % _("hg commit"), False
3218 return None, None
3222 return None, None
3219
3223
3220 def checkafterresolved(repo):
3224 def checkafterresolved(repo):
3221 '''Inform the user about the next action after completing hg resolve
3225 '''Inform the user about the next action after completing hg resolve
3222
3226
3223 If there's a matching afterresolvedstates, howtocontinue will yield
3227 If there's a matching afterresolvedstates, howtocontinue will yield
3224 repo.ui.warn as the reporter.
3228 repo.ui.warn as the reporter.
3225
3229
3226 Otherwise, it will yield repo.ui.note.
3230 Otherwise, it will yield repo.ui.note.
3227 '''
3231 '''
3228 msg, warning = howtocontinue(repo)
3232 msg, warning = howtocontinue(repo)
3229 if msg is not None:
3233 if msg is not None:
3230 if warning:
3234 if warning:
3231 repo.ui.warn("%s\n" % msg)
3235 repo.ui.warn("%s\n" % msg)
3232 else:
3236 else:
3233 repo.ui.note("%s\n" % msg)
3237 repo.ui.note("%s\n" % msg)
3234
3238
3235 def wrongtooltocontinue(repo, task):
3239 def wrongtooltocontinue(repo, task):
3236 '''Raise an abort suggesting how to properly continue if there is an
3240 '''Raise an abort suggesting how to properly continue if there is an
3237 active task.
3241 active task.
3238
3242
3239 Uses howtocontinue() to find the active task.
3243 Uses howtocontinue() to find the active task.
3240
3244
3241 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3245 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3242 a hint.
3246 a hint.
3243 '''
3247 '''
3244 after = howtocontinue(repo)
3248 after = howtocontinue(repo)
3245 hint = None
3249 hint = None
3246 if after[1]:
3250 if after[1]:
3247 hint = after[0]
3251 hint = after[0]
3248 raise error.Abort(_('no %s in progress') % task, hint=hint)
3252 raise error.Abort(_('no %s in progress') % task, hint=hint)
3249
3253
3250 class changeset_printer(logcmdutil.changesetprinter):
3254 class changeset_printer(logcmdutil.changesetprinter):
3251
3255
3252 def __init__(self, ui, *args, **kwargs):
3256 def __init__(self, ui, *args, **kwargs):
3253 msg = ("'cmdutil.changeset_printer' is deprecated, "
3257 msg = ("'cmdutil.changeset_printer' is deprecated, "
3254 "use 'logcmdutil.logcmdutil'")
3258 "use 'logcmdutil.logcmdutil'")
3255 ui.deprecwarn(msg, "4.6")
3259 ui.deprecwarn(msg, "4.6")
3256 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3260 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3257
3261
3258 def displaygraph(ui, *args, **kwargs):
3262 def displaygraph(ui, *args, **kwargs):
3259 msg = ("'cmdutil.displaygraph' is deprecated, "
3263 msg = ("'cmdutil.displaygraph' is deprecated, "
3260 "use 'logcmdutil.displaygraph'")
3264 "use 'logcmdutil.displaygraph'")
3261 ui.deprecwarn(msg, "4.6")
3265 ui.deprecwarn(msg, "4.6")
3262 return logcmdutil.displaygraph(ui, *args, **kwargs)
3266 return logcmdutil.displaygraph(ui, *args, **kwargs)
3263
3267
3264 def show_changeset(ui, *args, **kwargs):
3268 def show_changeset(ui, *args, **kwargs):
3265 msg = ("'cmdutil.show_changeset' is deprecated, "
3269 msg = ("'cmdutil.show_changeset' is deprecated, "
3266 "use 'logcmdutil.changesetdisplayer'")
3270 "use 'logcmdutil.changesetdisplayer'")
3267 ui.deprecwarn(msg, "4.6")
3271 ui.deprecwarn(msg, "4.6")
3268 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
3272 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
@@ -1,319 +1,380 b''
1 #testcases lfsremote-on lfsremote-off
1 #testcases lfsremote-on lfsremote-off
2 #require serve no-reposimplestore
2 #require serve no-reposimplestore
3
3
4 This test splits `hg serve` with and without using the extension into separate
4 This test splits `hg serve` with and without using the extension into separate
5 tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
5 tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
6 indicates whether or not there are commits that use an LFS file, and "D"/"E"
6 indicates whether or not there are commits that use an LFS file, and "D"/"E"
7 indicates whether or not the extension is loaded. The "X" cases are not tested
7 indicates whether or not the extension is loaded. The "X" cases are not tested
8 individually, because the lfs requirement causes the process to bail early if
8 individually, because the lfs requirement causes the process to bail early if
9 the extension is disabled.
9 the extension is disabled.
10
10
11 . Server
11 . Server
12 .
12 .
13 . No-LFS LFS
13 . No-LFS LFS
14 . +----------------------------+
14 . +----------------------------+
15 . | || D | E | D | E |
15 . | || D | E | D | E |
16 . |---++=======================|
16 . |---++=======================|
17 . C | D || N/A | #1 | X | #4 |
17 . C | D || N/A | #1 | X | #4 |
18 . l No +---++-----------------------|
18 . l No +---++-----------------------|
19 . i LFS | E || #2 | #2 | X | #5 |
19 . i LFS | E || #2 | #2 | X | #5 |
20 . e +---++-----------------------|
20 . e +---++-----------------------|
21 . n | D || X | X | X | X |
21 . n | D || X | X | X | X |
22 . t LFS |---++-----------------------|
22 . t LFS |---++-----------------------|
23 . | E || #3 | #3 | X | #6 |
23 . | E || #3 | #3 | X | #6 |
24 . |---++-----------------------+
24 . |---++-----------------------+
25
25
26 $ hg init server
26 $ hg init server
27 $ SERVER_REQUIRES="$TESTTMP/server/.hg/requires"
27 $ SERVER_REQUIRES="$TESTTMP/server/.hg/requires"
28
28
29 Skip the experimental.changegroup3=True config. Failure to agree on this comes
29 Skip the experimental.changegroup3=True config. Failure to agree on this comes
30 first, and causes a "ValueError: no common changegroup version" or "abort:
30 first, and causes a "ValueError: no common changegroup version" or "abort:
31 HTTP Error 500: Internal Server Error", if the extension is only loaded on one
31 HTTP Error 500: Internal Server Error", if the extension is only loaded on one
32 side. If that *is* enabled, the subsequent failure is "abort: missing processor
32 side. If that *is* enabled, the subsequent failure is "abort: missing processor
33 for flag '0x2000'!" if the extension is only loaded on one side (possibly also
33 for flag '0x2000'!" if the extension is only loaded on one side (possibly also
34 masked by the Internal Server Error message).
34 masked by the Internal Server Error message).
35 $ cat >> $HGRCPATH <<EOF
35 $ cat >> $HGRCPATH <<EOF
36 > [experimental]
36 > [experimental]
37 > lfs.disableusercache = True
37 > lfs.disableusercache = True
38 > [lfs]
38 > [lfs]
39 > threshold=10
39 > threshold=10
40 > [web]
40 > [web]
41 > allow_push=*
41 > allow_push=*
42 > push_ssl=False
42 > push_ssl=False
43 > EOF
43 > EOF
44
44
45 #if lfsremote-on
45 #if lfsremote-on
46 $ hg --config extensions.lfs= -R server \
46 $ hg --config extensions.lfs= -R server \
47 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
47 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
48 #else
48 #else
49 $ hg --config extensions.lfs=! -R server \
49 $ hg --config extensions.lfs=! -R server \
50 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
50 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
51 #endif
51 #endif
52
52
53 $ cat hg.pid >> $DAEMON_PIDS
53 $ cat hg.pid >> $DAEMON_PIDS
54 $ hg clone -q http://localhost:$HGPORT client
54 $ hg clone -q http://localhost:$HGPORT client
55 $ grep 'lfs' client/.hg/requires $SERVER_REQUIRES
55 $ grep 'lfs' client/.hg/requires $SERVER_REQUIRES
56 [1]
56 [1]
57
57
58 --------------------------------------------------------------------------------
58 --------------------------------------------------------------------------------
59 Case #1: client with non-lfs content and the extension disabled; server with
59 Case #1: client with non-lfs content and the extension disabled; server with
60 non-lfs content, and the extension enabled.
60 non-lfs content, and the extension enabled.
61
61
62 $ cd client
62 $ cd client
63 $ echo 'non-lfs' > nonlfs.txt
63 $ echo 'non-lfs' > nonlfs.txt
64 $ hg ci -Aqm 'non-lfs'
64 $ hg ci -Aqm 'non-lfs'
65 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
65 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
66 [1]
66 [1]
67
67
68 #if lfsremote-on
68 #if lfsremote-on
69
69
70 $ hg push -q
70 $ hg push -q
71 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
71 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
72 [1]
72 [1]
73
73
74 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client1_clone
74 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client1_clone
75 $ grep 'lfs' $TESTTMP/client1_clone/.hg/requires $SERVER_REQUIRES
75 $ grep 'lfs' $TESTTMP/client1_clone/.hg/requires $SERVER_REQUIRES
76 [1]
76 [1]
77
77
78 $ hg init $TESTTMP/client1_pull
78 $ hg init $TESTTMP/client1_pull
79 $ hg -R $TESTTMP/client1_pull pull -q http://localhost:$HGPORT
79 $ hg -R $TESTTMP/client1_pull pull -q http://localhost:$HGPORT
80 $ grep 'lfs' $TESTTMP/client1_pull/.hg/requires $SERVER_REQUIRES
80 $ grep 'lfs' $TESTTMP/client1_pull/.hg/requires $SERVER_REQUIRES
81 [1]
81 [1]
82
82
83 $ hg identify http://localhost:$HGPORT
83 $ hg identify http://localhost:$HGPORT
84 d437e1d24fbd
84 d437e1d24fbd
85
85
86 #endif
86 #endif
87
87
88 --------------------------------------------------------------------------------
88 --------------------------------------------------------------------------------
89 Case #2: client with non-lfs content and the extension enabled; server with
89 Case #2: client with non-lfs content and the extension enabled; server with
90 non-lfs content, and the extension state controlled by #testcases.
90 non-lfs content, and the extension state controlled by #testcases.
91
91
92 $ cat >> $HGRCPATH <<EOF
92 $ cat >> $HGRCPATH <<EOF
93 > [extensions]
93 > [extensions]
94 > lfs =
94 > lfs =
95 > EOF
95 > EOF
96 $ echo 'non-lfs' > nonlfs2.txt
96 $ echo 'non-lfs' > nonlfs2.txt
97 $ hg ci -Aqm 'non-lfs file with lfs client'
97 $ hg ci -Aqm 'non-lfs file with lfs client'
98
98
99 Since no lfs content has been added yet, the push is allowed, even when the
99 Since no lfs content has been added yet, the push is allowed, even when the
100 extension is not enabled remotely.
100 extension is not enabled remotely.
101
101
102 $ hg push -q
102 $ hg push -q
103 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
103 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
104 [1]
104 [1]
105
105
106 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client2_clone
106 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client2_clone
107 $ grep 'lfs' $TESTTMP/client2_clone/.hg/requires $SERVER_REQUIRES
107 $ grep 'lfs' $TESTTMP/client2_clone/.hg/requires $SERVER_REQUIRES
108 [1]
108 [1]
109
109
110 $ hg init $TESTTMP/client2_pull
110 $ hg init $TESTTMP/client2_pull
111 $ hg -R $TESTTMP/client2_pull pull -q http://localhost:$HGPORT
111 $ hg -R $TESTTMP/client2_pull pull -q http://localhost:$HGPORT
112 $ grep 'lfs' $TESTTMP/client2_pull/.hg/requires $SERVER_REQUIRES
112 $ grep 'lfs' $TESTTMP/client2_pull/.hg/requires $SERVER_REQUIRES
113 [1]
113 [1]
114
114
115 $ hg identify http://localhost:$HGPORT
115 $ hg identify http://localhost:$HGPORT
116 1477875038c6
116 1477875038c6
117
117
118 --------------------------------------------------------------------------------
118 --------------------------------------------------------------------------------
119 Case #3: client with lfs content and the extension enabled; server with
119 Case #3: client with lfs content and the extension enabled; server with
120 non-lfs content, and the extension state controlled by #testcases. The server
120 non-lfs content, and the extension state controlled by #testcases. The server
121 should have an 'lfs' requirement after it picks up its first commit with a blob.
121 should have an 'lfs' requirement after it picks up its first commit with a blob.
122
122
123 $ echo 'this is a big lfs file' > lfs.bin
123 $ echo 'this is a big lfs file' > lfs.bin
124 $ hg ci -Aqm 'lfs'
124 $ hg ci -Aqm 'lfs'
125 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
125 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
126 .hg/requires:lfs
126 .hg/requires:lfs
127
127
128 #if lfsremote-off
128 #if lfsremote-off
129 $ hg push -q
129 $ hg push -q
130 abort: required features are not supported in the destination: lfs
130 abort: required features are not supported in the destination: lfs
131 (enable the lfs extension on the server)
131 (enable the lfs extension on the server)
132 [255]
132 [255]
133 #else
133 #else
134 $ hg push -q
134 $ hg push -q
135 #endif
135 #endif
136 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
136 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
137 .hg/requires:lfs
137 .hg/requires:lfs
138 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
138 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
139
139
140 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client3_clone
140 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client3_clone
141 $ grep 'lfs' $TESTTMP/client3_clone/.hg/requires $SERVER_REQUIRES || true
141 $ grep 'lfs' $TESTTMP/client3_clone/.hg/requires $SERVER_REQUIRES || true
142 $TESTTMP/client3_clone/.hg/requires:lfs (lfsremote-on !)
142 $TESTTMP/client3_clone/.hg/requires:lfs (lfsremote-on !)
143 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
143 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
144
144
145 $ hg init $TESTTMP/client3_pull
145 $ hg init $TESTTMP/client3_pull
146 $ hg -R $TESTTMP/client3_pull pull -q http://localhost:$HGPORT
146 $ hg -R $TESTTMP/client3_pull pull -q http://localhost:$HGPORT
147 $ grep 'lfs' $TESTTMP/client3_pull/.hg/requires $SERVER_REQUIRES || true
147 $ grep 'lfs' $TESTTMP/client3_pull/.hg/requires $SERVER_REQUIRES || true
148 $TESTTMP/client3_pull/.hg/requires:lfs (lfsremote-on !)
148 $TESTTMP/client3_pull/.hg/requires:lfs (lfsremote-on !)
149 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
149 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
150
150
151 The difference here is the push failed above when the extension isn't
151 The difference here is the push failed above when the extension isn't
152 enabled on the server.
152 enabled on the server.
153 $ hg identify http://localhost:$HGPORT
153 $ hg identify http://localhost:$HGPORT
154 8374dc4052cb (lfsremote-on !)
154 8374dc4052cb (lfsremote-on !)
155 1477875038c6 (lfsremote-off !)
155 1477875038c6 (lfsremote-off !)
156
156
157 Don't bother testing the lfsremote-off cases- the server won't be able
157 Don't bother testing the lfsremote-off cases- the server won't be able
158 to launch if there's lfs content and the extension is disabled.
158 to launch if there's lfs content and the extension is disabled.
159
159
160 #if lfsremote-on
160 #if lfsremote-on
161
161
162 --------------------------------------------------------------------------------
162 --------------------------------------------------------------------------------
163 Case #4: client with non-lfs content and the extension disabled; server with
163 Case #4: client with non-lfs content and the extension disabled; server with
164 lfs content, and the extension enabled.
164 lfs content, and the extension enabled.
165
165
166 $ cat >> $HGRCPATH <<EOF
166 $ cat >> $HGRCPATH <<EOF
167 > [extensions]
167 > [extensions]
168 > lfs = !
168 > lfs = !
169 > EOF
169 > EOF
170
170
171 $ hg init $TESTTMP/client4
171 $ hg init $TESTTMP/client4
172 $ cd $TESTTMP/client4
172 $ cd $TESTTMP/client4
173 $ cat >> .hg/hgrc <<EOF
173 $ cat >> .hg/hgrc <<EOF
174 > [paths]
174 > [paths]
175 > default = http://localhost:$HGPORT
175 > default = http://localhost:$HGPORT
176 > EOF
176 > EOF
177 $ echo 'non-lfs' > nonlfs2.txt
177 $ echo 'non-lfs' > nonlfs2.txt
178 $ hg ci -Aqm 'non-lfs'
178 $ hg ci -Aqm 'non-lfs'
179 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
179 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
180 $TESTTMP/server/.hg/requires:lfs
180 $TESTTMP/server/.hg/requires:lfs
181
181
182 $ hg push -q --force
182 $ hg push -q --force
183 warning: repository is unrelated
183 warning: repository is unrelated
184 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
184 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
185 $TESTTMP/server/.hg/requires:lfs
185 $TESTTMP/server/.hg/requires:lfs
186
186
187 TODO: fail more gracefully.
187 TODO: fail more gracefully.
188
188
189 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client4_clone
189 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client4_clone
190 abort: HTTP Error 500: Internal Server Error
190 abort: HTTP Error 500: Internal Server Error
191 [255]
191 [255]
192 $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
192 $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
193 grep: $TESTTMP/client4_clone/.hg/requires: $ENOENT$
193 grep: $TESTTMP/client4_clone/.hg/requires: $ENOENT$
194 $TESTTMP/server/.hg/requires:lfs
194 $TESTTMP/server/.hg/requires:lfs
195 [2]
195 [2]
196
196
197 TODO: fail more gracefully.
197 TODO: fail more gracefully.
198
198
199 $ hg init $TESTTMP/client4_pull
199 $ hg init $TESTTMP/client4_pull
200 $ hg -R $TESTTMP/client4_pull pull -q http://localhost:$HGPORT
200 $ hg -R $TESTTMP/client4_pull pull -q http://localhost:$HGPORT
201 abort: HTTP Error 500: Internal Server Error
201 abort: HTTP Error 500: Internal Server Error
202 [255]
202 [255]
203 $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES
203 $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES
204 $TESTTMP/server/.hg/requires:lfs
204 $TESTTMP/server/.hg/requires:lfs
205
205
206 $ hg identify http://localhost:$HGPORT
206 $ hg identify http://localhost:$HGPORT
207 03b080fa9d93
207 03b080fa9d93
208
208
209 --------------------------------------------------------------------------------
209 --------------------------------------------------------------------------------
210 Case #5: client with non-lfs content and the extension enabled; server with
210 Case #5: client with non-lfs content and the extension enabled; server with
211 lfs content, and the extension enabled.
211 lfs content, and the extension enabled.
212
212
213 $ cat >> $HGRCPATH <<EOF
213 $ cat >> $HGRCPATH <<EOF
214 > [extensions]
214 > [extensions]
215 > lfs =
215 > lfs =
216 > EOF
216 > EOF
217 $ echo 'non-lfs' > nonlfs3.txt
217 $ echo 'non-lfs' > nonlfs3.txt
218 $ hg ci -Aqm 'non-lfs file with lfs client'
218 $ hg ci -Aqm 'non-lfs file with lfs client'
219
219
220 $ hg push -q
220 $ hg push -q
221 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
221 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
222 $TESTTMP/server/.hg/requires:lfs
222 $TESTTMP/server/.hg/requires:lfs
223
223
224 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client5_clone
224 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client5_clone
225 $ grep 'lfs' $TESTTMP/client5_clone/.hg/requires $SERVER_REQUIRES
225 $ grep 'lfs' $TESTTMP/client5_clone/.hg/requires $SERVER_REQUIRES
226 $TESTTMP/client5_clone/.hg/requires:lfs
226 $TESTTMP/client5_clone/.hg/requires:lfs
227 $TESTTMP/server/.hg/requires:lfs
227 $TESTTMP/server/.hg/requires:lfs
228
228
229 $ hg init $TESTTMP/client5_pull
229 $ hg init $TESTTMP/client5_pull
230 $ hg -R $TESTTMP/client5_pull pull -q http://localhost:$HGPORT
230 $ hg -R $TESTTMP/client5_pull pull -q http://localhost:$HGPORT
231 $ grep 'lfs' $TESTTMP/client5_pull/.hg/requires $SERVER_REQUIRES
231 $ grep 'lfs' $TESTTMP/client5_pull/.hg/requires $SERVER_REQUIRES
232 $TESTTMP/client5_pull/.hg/requires:lfs
232 $TESTTMP/client5_pull/.hg/requires:lfs
233 $TESTTMP/server/.hg/requires:lfs
233 $TESTTMP/server/.hg/requires:lfs
234
234
235 $ hg identify http://localhost:$HGPORT
235 $ hg identify http://localhost:$HGPORT
236 c729025cc5e3
236 c729025cc5e3
237
237
238 --------------------------------------------------------------------------------
238 --------------------------------------------------------------------------------
239 Case #6: client with lfs content and the extension enabled; server with
239 Case #6: client with lfs content and the extension enabled; server with
240 lfs content, and the extension enabled.
240 lfs content, and the extension enabled.
241
241
242 $ echo 'this is another lfs file' > lfs2.txt
242 $ echo 'this is another lfs file' > lfs2.txt
243 $ hg ci -Aqm 'lfs file with lfs client'
243 $ hg ci -Aqm 'lfs file with lfs client'
244
244
245 $ hg --config paths.default= push -v http://localhost:$HGPORT
245 $ hg --config paths.default= push -v http://localhost:$HGPORT
246 pushing to http://localhost:$HGPORT/
246 pushing to http://localhost:$HGPORT/
247 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
247 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
248 searching for changes
248 searching for changes
249 remote has heads on branch 'default' that are not known locally: 8374dc4052cb
249 remote has heads on branch 'default' that are not known locally: 8374dc4052cb
250 lfs: uploading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
250 lfs: uploading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
251 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
251 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
252 lfs: uploaded 1 files (25 bytes)
252 lfs: uploaded 1 files (25 bytes)
253 1 changesets found
253 1 changesets found
254 uncompressed size of bundle content:
254 uncompressed size of bundle content:
255 206 (changelog)
255 206 (changelog)
256 172 (manifests)
256 172 (manifests)
257 275 lfs2.txt
257 275 lfs2.txt
258 remote: adding changesets
258 remote: adding changesets
259 remote: adding manifests
259 remote: adding manifests
260 remote: adding file changes
260 remote: adding file changes
261 remote: added 1 changesets with 1 changes to 1 files
261 remote: added 1 changesets with 1 changes to 1 files
262 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
262 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
263 .hg/requires:lfs
263 .hg/requires:lfs
264 $TESTTMP/server/.hg/requires:lfs
264 $TESTTMP/server/.hg/requires:lfs
265
265
266 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client6_clone
266 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client6_clone
267 $ grep 'lfs' $TESTTMP/client6_clone/.hg/requires $SERVER_REQUIRES
267 $ grep 'lfs' $TESTTMP/client6_clone/.hg/requires $SERVER_REQUIRES
268 $TESTTMP/client6_clone/.hg/requires:lfs
268 $TESTTMP/client6_clone/.hg/requires:lfs
269 $TESTTMP/server/.hg/requires:lfs
269 $TESTTMP/server/.hg/requires:lfs
270
270
271 $ hg init $TESTTMP/client6_pull
271 $ hg init $TESTTMP/client6_pull
272 $ hg -R $TESTTMP/client6_pull pull -u -v http://localhost:$HGPORT
272 $ hg -R $TESTTMP/client6_pull pull -u -v http://localhost:$HGPORT
273 pulling from http://localhost:$HGPORT/
273 pulling from http://localhost:$HGPORT/
274 requesting all changes
274 requesting all changes
275 adding changesets
275 adding changesets
276 adding manifests
276 adding manifests
277 adding file changes
277 adding file changes
278 added 6 changesets with 5 changes to 5 files (+1 heads)
278 added 6 changesets with 5 changes to 5 files (+1 heads)
279 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
279 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
280 new changesets d437e1d24fbd:d3b84d50eacb
280 new changesets d437e1d24fbd:d3b84d50eacb
281 resolving manifests
281 resolving manifests
282 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
282 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
283 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
283 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
284 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
284 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
285 getting lfs2.txt
285 getting lfs2.txt
286 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
286 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
287 getting nonlfs2.txt
287 getting nonlfs2.txt
288 getting nonlfs3.txt
288 getting nonlfs3.txt
289 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
289 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
290 updated to "d3b84d50eacb: lfs file with lfs client"
290 updated to "d3b84d50eacb: lfs file with lfs client"
291 1 other heads for branch "default"
291 1 other heads for branch "default"
292 $ grep 'lfs' $TESTTMP/client6_pull/.hg/requires $SERVER_REQUIRES
292 $ grep 'lfs' $TESTTMP/client6_pull/.hg/requires $SERVER_REQUIRES
293 $TESTTMP/client6_pull/.hg/requires:lfs
293 $TESTTMP/client6_pull/.hg/requires:lfs
294 $TESTTMP/server/.hg/requires:lfs
294 $TESTTMP/server/.hg/requires:lfs
295
295
296 $ hg identify http://localhost:$HGPORT
296 $ hg identify http://localhost:$HGPORT
297 d3b84d50eacb
297 d3b84d50eacb
298
298
299 --------------------------------------------------------------------------------
299 --------------------------------------------------------------------------------
300 Misc: process dies early if a requirement exists and the extension is disabled
300 Misc: process dies early if a requirement exists and the extension is disabled
301
301
302 $ hg --config extensions.lfs=! summary
302 $ hg --config extensions.lfs=! summary
303 abort: repository requires features unknown to this Mercurial: lfs!
303 abort: repository requires features unknown to this Mercurial: lfs!
304 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
304 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
305 [255]
305 [255]
306
306
307 $ echo 'this is an lfs file' > $TESTTMP/client6_clone/lfspair1.bin
308 $ echo 'this is an lfs file too' > $TESTTMP/client6_clone/lfspair2.bin
309 $ hg -R $TESTTMP/client6_clone ci -Aqm 'add lfs pair'
310 $ hg -R $TESTTMP/client6_clone push -q
311
312 $ hg clone -qU http://localhost:$HGPORT $TESTTMP/bulkfetch
313
314 Export will prefetch all needed files across all needed revisions
315
316 $ hg -R $TESTTMP/bulkfetch -v export -r 0:tip -o all.export
317 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
318 exporting patches:
319 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
320 lfs: need to transfer 4 objects (92 bytes)
321 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
322 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
323 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
324 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
325 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
326 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
327 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
328 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
329 all.export
330 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
331 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
332 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
333 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
334
335 Export with selected files is used with `extdiff --patch`
336
337 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
338 $ hg --config extensions.extdiff= \
339 > -R $TESTTMP/bulkfetch -v extdiff -r 2:tip --patch $TESTTMP/bulkfetch/lfs.bin
340 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
341 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
342 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
343 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
344 */hg-8374dc4052cb.patch (glob)
345 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
346 */hg-9640b57e77b1.patch (glob)
347 --- */hg-8374dc4052cb.patch * (glob)
348 +++ */hg-9640b57e77b1.patch * (glob)
349 @@ -2,12 +2,7 @@
350 # User test
351 # Date 0 0
352 # Thu Jan 01 00:00:00 1970 +0000
353 -# Node ID 8374dc4052cbd388e79d9dc4ddb29784097aa354
354 -# Parent 1477875038c60152e391238920a16381c627b487
355 -lfs
356 +# Node ID 9640b57e77b14c3a0144fb4478b6cc13e13ea0d1
357 +# Parent d3b84d50eacbd56638e11abce6b8616aaba54420
358 +add lfs pair
359
360 -diff -r 1477875038c6 -r 8374dc4052cb lfs.bin
361 ---- /dev/null Thu Jan 01 00:00:00 1970 +0000
362 -+++ b/lfs.bin Thu Jan 01 00:00:00 1970 +0000
363 -@@ -0,0 +1,1 @@
364 -+this is a big lfs file
365 cleaning up temp directory
366 [1]
367
307 #endif
368 #endif
308
369
309 $ $PYTHON $TESTDIR/killdaemons.py $DAEMON_PIDS
370 $ $PYTHON $TESTDIR/killdaemons.py $DAEMON_PIDS
310
371
311 #if lfsremote-on
372 #if lfsremote-on
312 $ cat $TESTTMP/errors.log | grep '^[A-Z]'
373 $ cat $TESTTMP/errors.log | grep '^[A-Z]'
313 Traceback (most recent call last):
374 Traceback (most recent call last):
314 ValueError: no common changegroup version
375 ValueError: no common changegroup version
315 Traceback (most recent call last):
376 Traceback (most recent call last):
316 ValueError: no common changegroup version
377 ValueError: no common changegroup version
317 #else
378 #else
318 $ cat $TESTTMP/errors.log
379 $ cat $TESTTMP/errors.log
319 #endif
380 #endif
General Comments 0
You need to be logged in to leave comments. Login now