##// END OF EJS Templates
cmdutil: convert the prefetchfiles() hook to a callback mechanism (API)...
Matt Harbison -
r36154:f52a9336 default
parent child Browse files
Show More
@@ -1,3162 +1,3164 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 bookmarks,
24 bookmarks,
25 changelog,
25 changelog,
26 copies,
26 copies,
27 crecord as crecordmod,
27 crecord as crecordmod,
28 dirstateguard,
28 dirstateguard,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 logcmdutil,
32 logcmdutil,
33 match as matchmod,
33 match as matchmod,
34 merge as mergemod,
34 merge as mergemod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 pycompat,
38 pycompat,
39 registrar,
39 registrar,
40 revlog,
40 revlog,
41 rewriteutil,
41 rewriteutil,
42 scmutil,
42 scmutil,
43 smartset,
43 smartset,
44 subrepoutil,
44 subrepoutil,
45 templater,
45 templater,
46 util,
46 util,
47 vfs as vfsmod,
47 vfs as vfsmod,
48 )
48 )
49 stringio = util.stringio
49 stringio = util.stringio
50
50
51 # templates of common command options
51 # templates of common command options
52
52
53 dryrunopts = [
53 dryrunopts = [
54 ('n', 'dry-run', None,
54 ('n', 'dry-run', None,
55 _('do not perform actions, just print output')),
55 _('do not perform actions, just print output')),
56 ]
56 ]
57
57
58 remoteopts = [
58 remoteopts = [
59 ('e', 'ssh', '',
59 ('e', 'ssh', '',
60 _('specify ssh command to use'), _('CMD')),
60 _('specify ssh command to use'), _('CMD')),
61 ('', 'remotecmd', '',
61 ('', 'remotecmd', '',
62 _('specify hg command to run on the remote side'), _('CMD')),
62 _('specify hg command to run on the remote side'), _('CMD')),
63 ('', 'insecure', None,
63 ('', 'insecure', None,
64 _('do not verify server certificate (ignoring web.cacerts config)')),
64 _('do not verify server certificate (ignoring web.cacerts config)')),
65 ]
65 ]
66
66
67 walkopts = [
67 walkopts = [
68 ('I', 'include', [],
68 ('I', 'include', [],
69 _('include names matching the given patterns'), _('PATTERN')),
69 _('include names matching the given patterns'), _('PATTERN')),
70 ('X', 'exclude', [],
70 ('X', 'exclude', [],
71 _('exclude names matching the given patterns'), _('PATTERN')),
71 _('exclude names matching the given patterns'), _('PATTERN')),
72 ]
72 ]
73
73
74 commitopts = [
74 commitopts = [
75 ('m', 'message', '',
75 ('m', 'message', '',
76 _('use text as commit message'), _('TEXT')),
76 _('use text as commit message'), _('TEXT')),
77 ('l', 'logfile', '',
77 ('l', 'logfile', '',
78 _('read commit message from file'), _('FILE')),
78 _('read commit message from file'), _('FILE')),
79 ]
79 ]
80
80
81 commitopts2 = [
81 commitopts2 = [
82 ('d', 'date', '',
82 ('d', 'date', '',
83 _('record the specified date as commit date'), _('DATE')),
83 _('record the specified date as commit date'), _('DATE')),
84 ('u', 'user', '',
84 ('u', 'user', '',
85 _('record the specified user as committer'), _('USER')),
85 _('record the specified user as committer'), _('USER')),
86 ]
86 ]
87
87
88 # hidden for now
88 # hidden for now
89 formatteropts = [
89 formatteropts = [
90 ('T', 'template', '',
90 ('T', 'template', '',
91 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
91 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
92 ]
92 ]
93
93
94 templateopts = [
94 templateopts = [
95 ('', 'style', '',
95 ('', 'style', '',
96 _('display using template map file (DEPRECATED)'), _('STYLE')),
96 _('display using template map file (DEPRECATED)'), _('STYLE')),
97 ('T', 'template', '',
97 ('T', 'template', '',
98 _('display with template'), _('TEMPLATE')),
98 _('display with template'), _('TEMPLATE')),
99 ]
99 ]
100
100
101 logopts = [
101 logopts = [
102 ('p', 'patch', None, _('show patch')),
102 ('p', 'patch', None, _('show patch')),
103 ('g', 'git', None, _('use git extended diff format')),
103 ('g', 'git', None, _('use git extended diff format')),
104 ('l', 'limit', '',
104 ('l', 'limit', '',
105 _('limit number of changes displayed'), _('NUM')),
105 _('limit number of changes displayed'), _('NUM')),
106 ('M', 'no-merges', None, _('do not show merges')),
106 ('M', 'no-merges', None, _('do not show merges')),
107 ('', 'stat', None, _('output diffstat-style summary of changes')),
107 ('', 'stat', None, _('output diffstat-style summary of changes')),
108 ('G', 'graph', None, _("show the revision DAG")),
108 ('G', 'graph', None, _("show the revision DAG")),
109 ] + templateopts
109 ] + templateopts
110
110
111 diffopts = [
111 diffopts = [
112 ('a', 'text', None, _('treat all files as text')),
112 ('a', 'text', None, _('treat all files as text')),
113 ('g', 'git', None, _('use git extended diff format')),
113 ('g', 'git', None, _('use git extended diff format')),
114 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
114 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
115 ('', 'nodates', None, _('omit dates from diff headers'))
115 ('', 'nodates', None, _('omit dates from diff headers'))
116 ]
116 ]
117
117
118 diffwsopts = [
118 diffwsopts = [
119 ('w', 'ignore-all-space', None,
119 ('w', 'ignore-all-space', None,
120 _('ignore white space when comparing lines')),
120 _('ignore white space when comparing lines')),
121 ('b', 'ignore-space-change', None,
121 ('b', 'ignore-space-change', None,
122 _('ignore changes in the amount of white space')),
122 _('ignore changes in the amount of white space')),
123 ('B', 'ignore-blank-lines', None,
123 ('B', 'ignore-blank-lines', None,
124 _('ignore changes whose lines are all blank')),
124 _('ignore changes whose lines are all blank')),
125 ('Z', 'ignore-space-at-eol', None,
125 ('Z', 'ignore-space-at-eol', None,
126 _('ignore changes in whitespace at EOL')),
126 _('ignore changes in whitespace at EOL')),
127 ]
127 ]
128
128
129 diffopts2 = [
129 diffopts2 = [
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 ('p', 'show-function', None, _('show which function each change is in')),
131 ('p', 'show-function', None, _('show which function each change is in')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 ] + diffwsopts + [
133 ] + diffwsopts + [
134 ('U', 'unified', '',
134 ('U', 'unified', '',
135 _('number of lines of context to show'), _('NUM')),
135 _('number of lines of context to show'), _('NUM')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 ]
138 ]
139
139
140 mergetoolopts = [
140 mergetoolopts = [
141 ('t', 'tool', '', _('specify merge tool')),
141 ('t', 'tool', '', _('specify merge tool')),
142 ]
142 ]
143
143
144 similarityopts = [
144 similarityopts = [
145 ('s', 'similarity', '',
145 ('s', 'similarity', '',
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 ]
147 ]
148
148
149 subrepoopts = [
149 subrepoopts = [
150 ('S', 'subrepos', None,
150 ('S', 'subrepos', None,
151 _('recurse into subrepositories'))
151 _('recurse into subrepositories'))
152 ]
152 ]
153
153
154 debugrevlogopts = [
154 debugrevlogopts = [
155 ('c', 'changelog', False, _('open changelog')),
155 ('c', 'changelog', False, _('open changelog')),
156 ('m', 'manifest', False, _('open manifest')),
156 ('m', 'manifest', False, _('open manifest')),
157 ('', 'dir', '', _('open directory manifest')),
157 ('', 'dir', '', _('open directory manifest')),
158 ]
158 ]
159
159
160 # special string such that everything below this line will be ingored in the
160 # special string such that everything below this line will be ingored in the
161 # editor text
161 # editor text
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163
163
164 def ishunk(x):
164 def ishunk(x):
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 return isinstance(x, hunkclasses)
166 return isinstance(x, hunkclasses)
167
167
168 def newandmodified(chunks, originalchunks):
168 def newandmodified(chunks, originalchunks):
169 newlyaddedandmodifiedfiles = set()
169 newlyaddedandmodifiedfiles = set()
170 for chunk in chunks:
170 for chunk in chunks:
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 originalchunks:
172 originalchunks:
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 return newlyaddedandmodifiedfiles
174 return newlyaddedandmodifiedfiles
175
175
176 def parsealiases(cmd):
176 def parsealiases(cmd):
177 return cmd.lstrip("^").split("|")
177 return cmd.lstrip("^").split("|")
178
178
179 def setupwrapcolorwrite(ui):
179 def setupwrapcolorwrite(ui):
180 # wrap ui.write so diff output can be labeled/colorized
180 # wrap ui.write so diff output can be labeled/colorized
181 def wrapwrite(orig, *args, **kw):
181 def wrapwrite(orig, *args, **kw):
182 label = kw.pop(r'label', '')
182 label = kw.pop(r'label', '')
183 for chunk, l in patch.difflabel(lambda: args):
183 for chunk, l in patch.difflabel(lambda: args):
184 orig(chunk, label=label + l)
184 orig(chunk, label=label + l)
185
185
186 oldwrite = ui.write
186 oldwrite = ui.write
187 def wrap(*args, **kwargs):
187 def wrap(*args, **kwargs):
188 return wrapwrite(oldwrite, *args, **kwargs)
188 return wrapwrite(oldwrite, *args, **kwargs)
189 setattr(ui, 'write', wrap)
189 setattr(ui, 'write', wrap)
190 return oldwrite
190 return oldwrite
191
191
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 if usecurses:
193 if usecurses:
194 if testfile:
194 if testfile:
195 recordfn = crecordmod.testdecorator(testfile,
195 recordfn = crecordmod.testdecorator(testfile,
196 crecordmod.testchunkselector)
196 crecordmod.testchunkselector)
197 else:
197 else:
198 recordfn = crecordmod.chunkselector
198 recordfn = crecordmod.chunkselector
199
199
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201
201
202 else:
202 else:
203 return patch.filterpatch(ui, originalhunks, operation)
203 return patch.filterpatch(ui, originalhunks, operation)
204
204
205 def recordfilter(ui, originalhunks, operation=None):
205 def recordfilter(ui, originalhunks, operation=None):
206 """ Prompts the user to filter the originalhunks and return a list of
206 """ Prompts the user to filter the originalhunks and return a list of
207 selected hunks.
207 selected hunks.
208 *operation* is used for to build ui messages to indicate the user what
208 *operation* is used for to build ui messages to indicate the user what
209 kind of filtering they are doing: reverting, committing, shelving, etc.
209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 (see patch.filterpatch).
210 (see patch.filterpatch).
211 """
211 """
212 usecurses = crecordmod.checkcurses(ui)
212 usecurses = crecordmod.checkcurses(ui)
213 testfile = ui.config('experimental', 'crecordtest')
213 testfile = ui.config('experimental', 'crecordtest')
214 oldwrite = setupwrapcolorwrite(ui)
214 oldwrite = setupwrapcolorwrite(ui)
215 try:
215 try:
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 testfile, operation)
217 testfile, operation)
218 finally:
218 finally:
219 ui.write = oldwrite
219 ui.write = oldwrite
220 return newchunks, newopts
220 return newchunks, newopts
221
221
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 filterfn, *pats, **opts):
223 filterfn, *pats, **opts):
224 opts = pycompat.byteskwargs(opts)
224 opts = pycompat.byteskwargs(opts)
225 if not ui.interactive():
225 if not ui.interactive():
226 if cmdsuggest:
226 if cmdsuggest:
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 else:
228 else:
229 msg = _('running non-interactively')
229 msg = _('running non-interactively')
230 raise error.Abort(msg)
230 raise error.Abort(msg)
231
231
232 # make sure username is set before going interactive
232 # make sure username is set before going interactive
233 if not opts.get('user'):
233 if not opts.get('user'):
234 ui.username() # raise exception, username not provided
234 ui.username() # raise exception, username not provided
235
235
236 def recordfunc(ui, repo, message, match, opts):
236 def recordfunc(ui, repo, message, match, opts):
237 """This is generic record driver.
237 """This is generic record driver.
238
238
239 Its job is to interactively filter local changes, and
239 Its job is to interactively filter local changes, and
240 accordingly prepare working directory into a state in which the
240 accordingly prepare working directory into a state in which the
241 job can be delegated to a non-interactive commit command such as
241 job can be delegated to a non-interactive commit command such as
242 'commit' or 'qrefresh'.
242 'commit' or 'qrefresh'.
243
243
244 After the actual job is done by non-interactive command, the
244 After the actual job is done by non-interactive command, the
245 working directory is restored to its original state.
245 working directory is restored to its original state.
246
246
247 In the end we'll record interesting changes, and everything else
247 In the end we'll record interesting changes, and everything else
248 will be left in place, so the user can continue working.
248 will be left in place, so the user can continue working.
249 """
249 """
250
250
251 checkunfinished(repo, commit=True)
251 checkunfinished(repo, commit=True)
252 wctx = repo[None]
252 wctx = repo[None]
253 merge = len(wctx.parents()) > 1
253 merge = len(wctx.parents()) > 1
254 if merge:
254 if merge:
255 raise error.Abort(_('cannot partially commit a merge '
255 raise error.Abort(_('cannot partially commit a merge '
256 '(use "hg commit" instead)'))
256 '(use "hg commit" instead)'))
257
257
258 def fail(f, msg):
258 def fail(f, msg):
259 raise error.Abort('%s: %s' % (f, msg))
259 raise error.Abort('%s: %s' % (f, msg))
260
260
261 force = opts.get('force')
261 force = opts.get('force')
262 if not force:
262 if not force:
263 vdirs = []
263 vdirs = []
264 match.explicitdir = vdirs.append
264 match.explicitdir = vdirs.append
265 match.bad = fail
265 match.bad = fail
266
266
267 status = repo.status(match=match)
267 status = repo.status(match=match)
268 if not force:
268 if not force:
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts.nodates = True
271 diffopts.nodates = True
272 diffopts.git = True
272 diffopts.git = True
273 diffopts.showfunc = True
273 diffopts.showfunc = True
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originalchunks = patch.parsepatch(originaldiff)
275 originalchunks = patch.parsepatch(originaldiff)
276
276
277 # 1. filter patch, since we are intending to apply subset of it
277 # 1. filter patch, since we are intending to apply subset of it
278 try:
278 try:
279 chunks, newopts = filterfn(ui, originalchunks)
279 chunks, newopts = filterfn(ui, originalchunks)
280 except error.PatchError as err:
280 except error.PatchError as err:
281 raise error.Abort(_('error parsing patch: %s') % err)
281 raise error.Abort(_('error parsing patch: %s') % err)
282 opts.update(newopts)
282 opts.update(newopts)
283
283
284 # We need to keep a backup of files that have been newly added and
284 # We need to keep a backup of files that have been newly added and
285 # modified during the recording process because there is a previous
285 # modified during the recording process because there is a previous
286 # version without the edit in the workdir
286 # version without the edit in the workdir
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 contenders = set()
288 contenders = set()
289 for h in chunks:
289 for h in chunks:
290 try:
290 try:
291 contenders.update(set(h.files()))
291 contenders.update(set(h.files()))
292 except AttributeError:
292 except AttributeError:
293 pass
293 pass
294
294
295 changed = status.modified + status.added + status.removed
295 changed = status.modified + status.added + status.removed
296 newfiles = [f for f in changed if f in contenders]
296 newfiles = [f for f in changed if f in contenders]
297 if not newfiles:
297 if not newfiles:
298 ui.status(_('no changes to record\n'))
298 ui.status(_('no changes to record\n'))
299 return 0
299 return 0
300
300
301 modified = set(status.modified)
301 modified = set(status.modified)
302
302
303 # 2. backup changed files, so we can restore them in the end
303 # 2. backup changed files, so we can restore them in the end
304
304
305 if backupall:
305 if backupall:
306 tobackup = changed
306 tobackup = changed
307 else:
307 else:
308 tobackup = [f for f in newfiles if f in modified or f in \
308 tobackup = [f for f in newfiles if f in modified or f in \
309 newlyaddedandmodifiedfiles]
309 newlyaddedandmodifiedfiles]
310 backups = {}
310 backups = {}
311 if tobackup:
311 if tobackup:
312 backupdir = repo.vfs.join('record-backups')
312 backupdir = repo.vfs.join('record-backups')
313 try:
313 try:
314 os.mkdir(backupdir)
314 os.mkdir(backupdir)
315 except OSError as err:
315 except OSError as err:
316 if err.errno != errno.EEXIST:
316 if err.errno != errno.EEXIST:
317 raise
317 raise
318 try:
318 try:
319 # backup continues
319 # backup continues
320 for f in tobackup:
320 for f in tobackup:
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 dir=backupdir)
322 dir=backupdir)
323 os.close(fd)
323 os.close(fd)
324 ui.debug('backup %r as %r\n' % (f, tmpname))
324 ui.debug('backup %r as %r\n' % (f, tmpname))
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 backups[f] = tmpname
326 backups[f] = tmpname
327
327
328 fp = stringio()
328 fp = stringio()
329 for c in chunks:
329 for c in chunks:
330 fname = c.filename()
330 fname = c.filename()
331 if fname in backups:
331 if fname in backups:
332 c.write(fp)
332 c.write(fp)
333 dopatch = fp.tell()
333 dopatch = fp.tell()
334 fp.seek(0)
334 fp.seek(0)
335
335
336 # 2.5 optionally review / modify patch in text editor
336 # 2.5 optionally review / modify patch in text editor
337 if opts.get('review', False):
337 if opts.get('review', False):
338 patchtext = (crecordmod.diffhelptext
338 patchtext = (crecordmod.diffhelptext
339 + crecordmod.patchhelptext
339 + crecordmod.patchhelptext
340 + fp.read())
340 + fp.read())
341 reviewedpatch = ui.edit(patchtext, "",
341 reviewedpatch = ui.edit(patchtext, "",
342 action="diff",
342 action="diff",
343 repopath=repo.path)
343 repopath=repo.path)
344 fp.truncate(0)
344 fp.truncate(0)
345 fp.write(reviewedpatch)
345 fp.write(reviewedpatch)
346 fp.seek(0)
346 fp.seek(0)
347
347
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 # 3a. apply filtered patch to clean repo (clean)
349 # 3a. apply filtered patch to clean repo (clean)
350 if backups:
350 if backups:
351 # Equivalent to hg.revert
351 # Equivalent to hg.revert
352 m = scmutil.matchfiles(repo, backups.keys())
352 m = scmutil.matchfiles(repo, backups.keys())
353 mergemod.update(repo, repo.dirstate.p1(),
353 mergemod.update(repo, repo.dirstate.p1(),
354 False, True, matcher=m)
354 False, True, matcher=m)
355
355
356 # 3b. (apply)
356 # 3b. (apply)
357 if dopatch:
357 if dopatch:
358 try:
358 try:
359 ui.debug('applying patch\n')
359 ui.debug('applying patch\n')
360 ui.debug(fp.getvalue())
360 ui.debug(fp.getvalue())
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 except error.PatchError as err:
362 except error.PatchError as err:
363 raise error.Abort(str(err))
363 raise error.Abort(str(err))
364 del fp
364 del fp
365
365
366 # 4. We prepared working directory according to filtered
366 # 4. We prepared working directory according to filtered
367 # patch. Now is the time to delegate the job to
367 # patch. Now is the time to delegate the job to
368 # commit/qrefresh or the like!
368 # commit/qrefresh or the like!
369
369
370 # Make all of the pathnames absolute.
370 # Make all of the pathnames absolute.
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
372 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
373 finally:
373 finally:
374 # 5. finally restore backed-up files
374 # 5. finally restore backed-up files
375 try:
375 try:
376 dirstate = repo.dirstate
376 dirstate = repo.dirstate
377 for realname, tmpname in backups.iteritems():
377 for realname, tmpname in backups.iteritems():
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379
379
380 if dirstate[realname] == 'n':
380 if dirstate[realname] == 'n':
381 # without normallookup, restoring timestamp
381 # without normallookup, restoring timestamp
382 # may cause partially committed files
382 # may cause partially committed files
383 # to be treated as unmodified
383 # to be treated as unmodified
384 dirstate.normallookup(realname)
384 dirstate.normallookup(realname)
385
385
386 # copystat=True here and above are a hack to trick any
386 # copystat=True here and above are a hack to trick any
387 # editors that have f open that we haven't modified them.
387 # editors that have f open that we haven't modified them.
388 #
388 #
389 # Also note that this racy as an editor could notice the
389 # Also note that this racy as an editor could notice the
390 # file's mtime before we've finished writing it.
390 # file's mtime before we've finished writing it.
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 os.unlink(tmpname)
392 os.unlink(tmpname)
393 if tobackup:
393 if tobackup:
394 os.rmdir(backupdir)
394 os.rmdir(backupdir)
395 except OSError:
395 except OSError:
396 pass
396 pass
397
397
398 def recordinwlock(ui, repo, message, match, opts):
398 def recordinwlock(ui, repo, message, match, opts):
399 with repo.wlock():
399 with repo.wlock():
400 return recordfunc(ui, repo, message, match, opts)
400 return recordfunc(ui, repo, message, match, opts)
401
401
402 return commit(ui, repo, recordinwlock, pats, opts)
402 return commit(ui, repo, recordinwlock, pats, opts)
403
403
404 class dirnode(object):
404 class dirnode(object):
405 """
405 """
406 Represent a directory in user working copy with information required for
406 Represent a directory in user working copy with information required for
407 the purpose of tersing its status.
407 the purpose of tersing its status.
408
408
409 path is the path to the directory
409 path is the path to the directory
410
410
411 statuses is a set of statuses of all files in this directory (this includes
411 statuses is a set of statuses of all files in this directory (this includes
412 all the files in all the subdirectories too)
412 all the files in all the subdirectories too)
413
413
414 files is a list of files which are direct child of this directory
414 files is a list of files which are direct child of this directory
415
415
416 subdirs is a dictionary of sub-directory name as the key and it's own
416 subdirs is a dictionary of sub-directory name as the key and it's own
417 dirnode object as the value
417 dirnode object as the value
418 """
418 """
419
419
420 def __init__(self, dirpath):
420 def __init__(self, dirpath):
421 self.path = dirpath
421 self.path = dirpath
422 self.statuses = set([])
422 self.statuses = set([])
423 self.files = []
423 self.files = []
424 self.subdirs = {}
424 self.subdirs = {}
425
425
426 def _addfileindir(self, filename, status):
426 def _addfileindir(self, filename, status):
427 """Add a file in this directory as a direct child."""
427 """Add a file in this directory as a direct child."""
428 self.files.append((filename, status))
428 self.files.append((filename, status))
429
429
430 def addfile(self, filename, status):
430 def addfile(self, filename, status):
431 """
431 """
432 Add a file to this directory or to its direct parent directory.
432 Add a file to this directory or to its direct parent directory.
433
433
434 If the file is not direct child of this directory, we traverse to the
434 If the file is not direct child of this directory, we traverse to the
435 directory of which this file is a direct child of and add the file
435 directory of which this file is a direct child of and add the file
436 there.
436 there.
437 """
437 """
438
438
439 # the filename contains a path separator, it means it's not the direct
439 # the filename contains a path separator, it means it's not the direct
440 # child of this directory
440 # child of this directory
441 if '/' in filename:
441 if '/' in filename:
442 subdir, filep = filename.split('/', 1)
442 subdir, filep = filename.split('/', 1)
443
443
444 # does the dirnode object for subdir exists
444 # does the dirnode object for subdir exists
445 if subdir not in self.subdirs:
445 if subdir not in self.subdirs:
446 subdirpath = os.path.join(self.path, subdir)
446 subdirpath = os.path.join(self.path, subdir)
447 self.subdirs[subdir] = dirnode(subdirpath)
447 self.subdirs[subdir] = dirnode(subdirpath)
448
448
449 # try adding the file in subdir
449 # try adding the file in subdir
450 self.subdirs[subdir].addfile(filep, status)
450 self.subdirs[subdir].addfile(filep, status)
451
451
452 else:
452 else:
453 self._addfileindir(filename, status)
453 self._addfileindir(filename, status)
454
454
455 if status not in self.statuses:
455 if status not in self.statuses:
456 self.statuses.add(status)
456 self.statuses.add(status)
457
457
458 def iterfilepaths(self):
458 def iterfilepaths(self):
459 """Yield (status, path) for files directly under this directory."""
459 """Yield (status, path) for files directly under this directory."""
460 for f, st in self.files:
460 for f, st in self.files:
461 yield st, os.path.join(self.path, f)
461 yield st, os.path.join(self.path, f)
462
462
463 def tersewalk(self, terseargs):
463 def tersewalk(self, terseargs):
464 """
464 """
465 Yield (status, path) obtained by processing the status of this
465 Yield (status, path) obtained by processing the status of this
466 dirnode.
466 dirnode.
467
467
468 terseargs is the string of arguments passed by the user with `--terse`
468 terseargs is the string of arguments passed by the user with `--terse`
469 flag.
469 flag.
470
470
471 Following are the cases which can happen:
471 Following are the cases which can happen:
472
472
473 1) All the files in the directory (including all the files in its
473 1) All the files in the directory (including all the files in its
474 subdirectories) share the same status and the user has asked us to terse
474 subdirectories) share the same status and the user has asked us to terse
475 that status. -> yield (status, dirpath)
475 that status. -> yield (status, dirpath)
476
476
477 2) Otherwise, we do following:
477 2) Otherwise, we do following:
478
478
479 a) Yield (status, filepath) for all the files which are in this
479 a) Yield (status, filepath) for all the files which are in this
480 directory (only the ones in this directory, not the subdirs)
480 directory (only the ones in this directory, not the subdirs)
481
481
482 b) Recurse the function on all the subdirectories of this
482 b) Recurse the function on all the subdirectories of this
483 directory
483 directory
484 """
484 """
485
485
486 if len(self.statuses) == 1:
486 if len(self.statuses) == 1:
487 onlyst = self.statuses.pop()
487 onlyst = self.statuses.pop()
488
488
489 # Making sure we terse only when the status abbreviation is
489 # Making sure we terse only when the status abbreviation is
490 # passed as terse argument
490 # passed as terse argument
491 if onlyst in terseargs:
491 if onlyst in terseargs:
492 yield onlyst, self.path + pycompat.ossep
492 yield onlyst, self.path + pycompat.ossep
493 return
493 return
494
494
495 # add the files to status list
495 # add the files to status list
496 for st, fpath in self.iterfilepaths():
496 for st, fpath in self.iterfilepaths():
497 yield st, fpath
497 yield st, fpath
498
498
499 #recurse on the subdirs
499 #recurse on the subdirs
500 for dirobj in self.subdirs.values():
500 for dirobj in self.subdirs.values():
501 for st, fpath in dirobj.tersewalk(terseargs):
501 for st, fpath in dirobj.tersewalk(terseargs):
502 yield st, fpath
502 yield st, fpath
503
503
504 def tersedir(statuslist, terseargs):
504 def tersedir(statuslist, terseargs):
505 """
505 """
506 Terse the status if all the files in a directory shares the same status.
506 Terse the status if all the files in a directory shares the same status.
507
507
508 statuslist is scmutil.status() object which contains a list of files for
508 statuslist is scmutil.status() object which contains a list of files for
509 each status.
509 each status.
510 terseargs is string which is passed by the user as the argument to `--terse`
510 terseargs is string which is passed by the user as the argument to `--terse`
511 flag.
511 flag.
512
512
513 The function makes a tree of objects of dirnode class, and at each node it
513 The function makes a tree of objects of dirnode class, and at each node it
514 stores the information required to know whether we can terse a certain
514 stores the information required to know whether we can terse a certain
515 directory or not.
515 directory or not.
516 """
516 """
517 # the order matters here as that is used to produce final list
517 # the order matters here as that is used to produce final list
518 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
518 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
519
519
520 # checking the argument validity
520 # checking the argument validity
521 for s in pycompat.bytestr(terseargs):
521 for s in pycompat.bytestr(terseargs):
522 if s not in allst:
522 if s not in allst:
523 raise error.Abort(_("'%s' not recognized") % s)
523 raise error.Abort(_("'%s' not recognized") % s)
524
524
525 # creating a dirnode object for the root of the repo
525 # creating a dirnode object for the root of the repo
526 rootobj = dirnode('')
526 rootobj = dirnode('')
527 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
527 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
528 'ignored', 'removed')
528 'ignored', 'removed')
529
529
530 tersedict = {}
530 tersedict = {}
531 for attrname in pstatus:
531 for attrname in pstatus:
532 statuschar = attrname[0:1]
532 statuschar = attrname[0:1]
533 for f in getattr(statuslist, attrname):
533 for f in getattr(statuslist, attrname):
534 rootobj.addfile(f, statuschar)
534 rootobj.addfile(f, statuschar)
535 tersedict[statuschar] = []
535 tersedict[statuschar] = []
536
536
537 # we won't be tersing the root dir, so add files in it
537 # we won't be tersing the root dir, so add files in it
538 for st, fpath in rootobj.iterfilepaths():
538 for st, fpath in rootobj.iterfilepaths():
539 tersedict[st].append(fpath)
539 tersedict[st].append(fpath)
540
540
541 # process each sub-directory and build tersedict
541 # process each sub-directory and build tersedict
542 for subdir in rootobj.subdirs.values():
542 for subdir in rootobj.subdirs.values():
543 for st, f in subdir.tersewalk(terseargs):
543 for st, f in subdir.tersewalk(terseargs):
544 tersedict[st].append(f)
544 tersedict[st].append(f)
545
545
546 tersedlist = []
546 tersedlist = []
547 for st in allst:
547 for st in allst:
548 tersedict[st].sort()
548 tersedict[st].sort()
549 tersedlist.append(tersedict[st])
549 tersedlist.append(tersedict[st])
550
550
551 return tersedlist
551 return tersedlist
552
552
553 def _commentlines(raw):
553 def _commentlines(raw):
554 '''Surround lineswith a comment char and a new line'''
554 '''Surround lineswith a comment char and a new line'''
555 lines = raw.splitlines()
555 lines = raw.splitlines()
556 commentedlines = ['# %s' % line for line in lines]
556 commentedlines = ['# %s' % line for line in lines]
557 return '\n'.join(commentedlines) + '\n'
557 return '\n'.join(commentedlines) + '\n'
558
558
559 def _conflictsmsg(repo):
559 def _conflictsmsg(repo):
560 mergestate = mergemod.mergestate.read(repo)
560 mergestate = mergemod.mergestate.read(repo)
561 if not mergestate.active():
561 if not mergestate.active():
562 return
562 return
563
563
564 m = scmutil.match(repo[None])
564 m = scmutil.match(repo[None])
565 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
565 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
566 if unresolvedlist:
566 if unresolvedlist:
567 mergeliststr = '\n'.join(
567 mergeliststr = '\n'.join(
568 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
568 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
569 for path in unresolvedlist])
569 for path in unresolvedlist])
570 msg = _('''Unresolved merge conflicts:
570 msg = _('''Unresolved merge conflicts:
571
571
572 %s
572 %s
573
573
574 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
574 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
575 else:
575 else:
576 msg = _('No unresolved merge conflicts.')
576 msg = _('No unresolved merge conflicts.')
577
577
578 return _commentlines(msg)
578 return _commentlines(msg)
579
579
580 def _helpmessage(continuecmd, abortcmd):
580 def _helpmessage(continuecmd, abortcmd):
581 msg = _('To continue: %s\n'
581 msg = _('To continue: %s\n'
582 'To abort: %s') % (continuecmd, abortcmd)
582 'To abort: %s') % (continuecmd, abortcmd)
583 return _commentlines(msg)
583 return _commentlines(msg)
584
584
585 def _rebasemsg():
585 def _rebasemsg():
586 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
586 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
587
587
588 def _histeditmsg():
588 def _histeditmsg():
589 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
589 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
590
590
591 def _unshelvemsg():
591 def _unshelvemsg():
592 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
592 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
593
593
594 def _updatecleanmsg(dest=None):
594 def _updatecleanmsg(dest=None):
595 warning = _('warning: this will discard uncommitted changes')
595 warning = _('warning: this will discard uncommitted changes')
596 return 'hg update --clean %s (%s)' % (dest or '.', warning)
596 return 'hg update --clean %s (%s)' % (dest or '.', warning)
597
597
598 def _graftmsg():
598 def _graftmsg():
599 # tweakdefaults requires `update` to have a rev hence the `.`
599 # tweakdefaults requires `update` to have a rev hence the `.`
600 return _helpmessage('hg graft --continue', _updatecleanmsg())
600 return _helpmessage('hg graft --continue', _updatecleanmsg())
601
601
602 def _mergemsg():
602 def _mergemsg():
603 # tweakdefaults requires `update` to have a rev hence the `.`
603 # tweakdefaults requires `update` to have a rev hence the `.`
604 return _helpmessage('hg commit', _updatecleanmsg())
604 return _helpmessage('hg commit', _updatecleanmsg())
605
605
606 def _bisectmsg():
606 def _bisectmsg():
607 msg = _('To mark the changeset good: hg bisect --good\n'
607 msg = _('To mark the changeset good: hg bisect --good\n'
608 'To mark the changeset bad: hg bisect --bad\n'
608 'To mark the changeset bad: hg bisect --bad\n'
609 'To abort: hg bisect --reset\n')
609 'To abort: hg bisect --reset\n')
610 return _commentlines(msg)
610 return _commentlines(msg)
611
611
612 def fileexistspredicate(filename):
612 def fileexistspredicate(filename):
613 return lambda repo: repo.vfs.exists(filename)
613 return lambda repo: repo.vfs.exists(filename)
614
614
615 def _mergepredicate(repo):
615 def _mergepredicate(repo):
616 return len(repo[None].parents()) > 1
616 return len(repo[None].parents()) > 1
617
617
618 STATES = (
618 STATES = (
619 # (state, predicate to detect states, helpful message function)
619 # (state, predicate to detect states, helpful message function)
620 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
620 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
621 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
621 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
622 ('graft', fileexistspredicate('graftstate'), _graftmsg),
622 ('graft', fileexistspredicate('graftstate'), _graftmsg),
623 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
623 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
624 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
624 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
625 # The merge state is part of a list that will be iterated over.
625 # The merge state is part of a list that will be iterated over.
626 # They need to be last because some of the other unfinished states may also
626 # They need to be last because some of the other unfinished states may also
627 # be in a merge or update state (eg. rebase, histedit, graft, etc).
627 # be in a merge or update state (eg. rebase, histedit, graft, etc).
628 # We want those to have priority.
628 # We want those to have priority.
629 ('merge', _mergepredicate, _mergemsg),
629 ('merge', _mergepredicate, _mergemsg),
630 )
630 )
631
631
632 def _getrepostate(repo):
632 def _getrepostate(repo):
633 # experimental config: commands.status.skipstates
633 # experimental config: commands.status.skipstates
634 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
634 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
635 for state, statedetectionpredicate, msgfn in STATES:
635 for state, statedetectionpredicate, msgfn in STATES:
636 if state in skip:
636 if state in skip:
637 continue
637 continue
638 if statedetectionpredicate(repo):
638 if statedetectionpredicate(repo):
639 return (state, statedetectionpredicate, msgfn)
639 return (state, statedetectionpredicate, msgfn)
640
640
641 def morestatus(repo, fm):
641 def morestatus(repo, fm):
642 statetuple = _getrepostate(repo)
642 statetuple = _getrepostate(repo)
643 label = 'status.morestatus'
643 label = 'status.morestatus'
644 if statetuple:
644 if statetuple:
645 fm.startitem()
645 fm.startitem()
646 state, statedetectionpredicate, helpfulmsg = statetuple
646 state, statedetectionpredicate, helpfulmsg = statetuple
647 statemsg = _('The repository is in an unfinished *%s* state.') % state
647 statemsg = _('The repository is in an unfinished *%s* state.') % state
648 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
648 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
649 conmsg = _conflictsmsg(repo)
649 conmsg = _conflictsmsg(repo)
650 if conmsg:
650 if conmsg:
651 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
651 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
652 if helpfulmsg:
652 if helpfulmsg:
653 helpmsg = helpfulmsg()
653 helpmsg = helpfulmsg()
654 fm.write('helpmsg', '%s\n', helpmsg, label=label)
654 fm.write('helpmsg', '%s\n', helpmsg, label=label)
655
655
656 def findpossible(cmd, table, strict=False):
656 def findpossible(cmd, table, strict=False):
657 """
657 """
658 Return cmd -> (aliases, command table entry)
658 Return cmd -> (aliases, command table entry)
659 for each matching command.
659 for each matching command.
660 Return debug commands (or their aliases) only if no normal command matches.
660 Return debug commands (or their aliases) only if no normal command matches.
661 """
661 """
662 choice = {}
662 choice = {}
663 debugchoice = {}
663 debugchoice = {}
664
664
665 if cmd in table:
665 if cmd in table:
666 # short-circuit exact matches, "log" alias beats "^log|history"
666 # short-circuit exact matches, "log" alias beats "^log|history"
667 keys = [cmd]
667 keys = [cmd]
668 else:
668 else:
669 keys = table.keys()
669 keys = table.keys()
670
670
671 allcmds = []
671 allcmds = []
672 for e in keys:
672 for e in keys:
673 aliases = parsealiases(e)
673 aliases = parsealiases(e)
674 allcmds.extend(aliases)
674 allcmds.extend(aliases)
675 found = None
675 found = None
676 if cmd in aliases:
676 if cmd in aliases:
677 found = cmd
677 found = cmd
678 elif not strict:
678 elif not strict:
679 for a in aliases:
679 for a in aliases:
680 if a.startswith(cmd):
680 if a.startswith(cmd):
681 found = a
681 found = a
682 break
682 break
683 if found is not None:
683 if found is not None:
684 if aliases[0].startswith("debug") or found.startswith("debug"):
684 if aliases[0].startswith("debug") or found.startswith("debug"):
685 debugchoice[found] = (aliases, table[e])
685 debugchoice[found] = (aliases, table[e])
686 else:
686 else:
687 choice[found] = (aliases, table[e])
687 choice[found] = (aliases, table[e])
688
688
689 if not choice and debugchoice:
689 if not choice and debugchoice:
690 choice = debugchoice
690 choice = debugchoice
691
691
692 return choice, allcmds
692 return choice, allcmds
693
693
694 def findcmd(cmd, table, strict=True):
694 def findcmd(cmd, table, strict=True):
695 """Return (aliases, command table entry) for command string."""
695 """Return (aliases, command table entry) for command string."""
696 choice, allcmds = findpossible(cmd, table, strict)
696 choice, allcmds = findpossible(cmd, table, strict)
697
697
698 if cmd in choice:
698 if cmd in choice:
699 return choice[cmd]
699 return choice[cmd]
700
700
701 if len(choice) > 1:
701 if len(choice) > 1:
702 clist = sorted(choice)
702 clist = sorted(choice)
703 raise error.AmbiguousCommand(cmd, clist)
703 raise error.AmbiguousCommand(cmd, clist)
704
704
705 if choice:
705 if choice:
706 return list(choice.values())[0]
706 return list(choice.values())[0]
707
707
708 raise error.UnknownCommand(cmd, allcmds)
708 raise error.UnknownCommand(cmd, allcmds)
709
709
710 def changebranch(ui, repo, revs, label):
710 def changebranch(ui, repo, revs, label):
711 """ Change the branch name of given revs to label """
711 """ Change the branch name of given revs to label """
712
712
713 with repo.wlock(), repo.lock(), repo.transaction('branches'):
713 with repo.wlock(), repo.lock(), repo.transaction('branches'):
714 # abort in case of uncommitted merge or dirty wdir
714 # abort in case of uncommitted merge or dirty wdir
715 bailifchanged(repo)
715 bailifchanged(repo)
716 revs = scmutil.revrange(repo, revs)
716 revs = scmutil.revrange(repo, revs)
717 if not revs:
717 if not revs:
718 raise error.Abort("empty revision set")
718 raise error.Abort("empty revision set")
719 roots = repo.revs('roots(%ld)', revs)
719 roots = repo.revs('roots(%ld)', revs)
720 if len(roots) > 1:
720 if len(roots) > 1:
721 raise error.Abort(_("cannot change branch of non-linear revisions"))
721 raise error.Abort(_("cannot change branch of non-linear revisions"))
722 rewriteutil.precheck(repo, revs, 'change branch of')
722 rewriteutil.precheck(repo, revs, 'change branch of')
723
723
724 root = repo[roots.first()]
724 root = repo[roots.first()]
725 if not root.p1().branch() == label and label in repo.branchmap():
725 if not root.p1().branch() == label and label in repo.branchmap():
726 raise error.Abort(_("a branch of the same name already exists"))
726 raise error.Abort(_("a branch of the same name already exists"))
727
727
728 if repo.revs('merge() and %ld', revs):
728 if repo.revs('merge() and %ld', revs):
729 raise error.Abort(_("cannot change branch of a merge commit"))
729 raise error.Abort(_("cannot change branch of a merge commit"))
730 if repo.revs('obsolete() and %ld', revs):
730 if repo.revs('obsolete() and %ld', revs):
731 raise error.Abort(_("cannot change branch of a obsolete changeset"))
731 raise error.Abort(_("cannot change branch of a obsolete changeset"))
732
732
733 # make sure only topological heads
733 # make sure only topological heads
734 if repo.revs('heads(%ld) - head()', revs):
734 if repo.revs('heads(%ld) - head()', revs):
735 raise error.Abort(_("cannot change branch in middle of a stack"))
735 raise error.Abort(_("cannot change branch in middle of a stack"))
736
736
737 replacements = {}
737 replacements = {}
738 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
738 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
739 # mercurial.subrepo -> mercurial.cmdutil
739 # mercurial.subrepo -> mercurial.cmdutil
740 from . import context
740 from . import context
741 for rev in revs:
741 for rev in revs:
742 ctx = repo[rev]
742 ctx = repo[rev]
743 oldbranch = ctx.branch()
743 oldbranch = ctx.branch()
744 # check if ctx has same branch
744 # check if ctx has same branch
745 if oldbranch == label:
745 if oldbranch == label:
746 continue
746 continue
747
747
748 def filectxfn(repo, newctx, path):
748 def filectxfn(repo, newctx, path):
749 try:
749 try:
750 return ctx[path]
750 return ctx[path]
751 except error.ManifestLookupError:
751 except error.ManifestLookupError:
752 return None
752 return None
753
753
754 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
754 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
755 % (hex(ctx.node()), oldbranch, label))
755 % (hex(ctx.node()), oldbranch, label))
756 extra = ctx.extra()
756 extra = ctx.extra()
757 extra['branch_change'] = hex(ctx.node())
757 extra['branch_change'] = hex(ctx.node())
758 # While changing branch of set of linear commits, make sure that
758 # While changing branch of set of linear commits, make sure that
759 # we base our commits on new parent rather than old parent which
759 # we base our commits on new parent rather than old parent which
760 # was obsoleted while changing the branch
760 # was obsoleted while changing the branch
761 p1 = ctx.p1().node()
761 p1 = ctx.p1().node()
762 p2 = ctx.p2().node()
762 p2 = ctx.p2().node()
763 if p1 in replacements:
763 if p1 in replacements:
764 p1 = replacements[p1][0]
764 p1 = replacements[p1][0]
765 if p2 in replacements:
765 if p2 in replacements:
766 p2 = replacements[p2][0]
766 p2 = replacements[p2][0]
767
767
768 mc = context.memctx(repo, (p1, p2),
768 mc = context.memctx(repo, (p1, p2),
769 ctx.description(),
769 ctx.description(),
770 ctx.files(),
770 ctx.files(),
771 filectxfn,
771 filectxfn,
772 user=ctx.user(),
772 user=ctx.user(),
773 date=ctx.date(),
773 date=ctx.date(),
774 extra=extra,
774 extra=extra,
775 branch=label)
775 branch=label)
776
776
777 commitphase = ctx.phase()
777 commitphase = ctx.phase()
778 overrides = {('phases', 'new-commit'): commitphase}
778 overrides = {('phases', 'new-commit'): commitphase}
779 with repo.ui.configoverride(overrides, 'branch-change'):
779 with repo.ui.configoverride(overrides, 'branch-change'):
780 newnode = repo.commitctx(mc)
780 newnode = repo.commitctx(mc)
781
781
782 replacements[ctx.node()] = (newnode,)
782 replacements[ctx.node()] = (newnode,)
783 ui.debug('new node id is %s\n' % hex(newnode))
783 ui.debug('new node id is %s\n' % hex(newnode))
784
784
785 # create obsmarkers and move bookmarks
785 # create obsmarkers and move bookmarks
786 scmutil.cleanupnodes(repo, replacements, 'branch-change')
786 scmutil.cleanupnodes(repo, replacements, 'branch-change')
787
787
788 # move the working copy too
788 # move the working copy too
789 wctx = repo[None]
789 wctx = repo[None]
790 # in-progress merge is a bit too complex for now.
790 # in-progress merge is a bit too complex for now.
791 if len(wctx.parents()) == 1:
791 if len(wctx.parents()) == 1:
792 newid = replacements.get(wctx.p1().node())
792 newid = replacements.get(wctx.p1().node())
793 if newid is not None:
793 if newid is not None:
794 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
794 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
795 # mercurial.cmdutil
795 # mercurial.cmdutil
796 from . import hg
796 from . import hg
797 hg.update(repo, newid[0], quietempty=True)
797 hg.update(repo, newid[0], quietempty=True)
798
798
799 ui.status(_("changed branch on %d changesets\n") % len(replacements))
799 ui.status(_("changed branch on %d changesets\n") % len(replacements))
800
800
801 def findrepo(p):
801 def findrepo(p):
802 while not os.path.isdir(os.path.join(p, ".hg")):
802 while not os.path.isdir(os.path.join(p, ".hg")):
803 oldp, p = p, os.path.dirname(p)
803 oldp, p = p, os.path.dirname(p)
804 if p == oldp:
804 if p == oldp:
805 return None
805 return None
806
806
807 return p
807 return p
808
808
809 def bailifchanged(repo, merge=True, hint=None):
809 def bailifchanged(repo, merge=True, hint=None):
810 """ enforce the precondition that working directory must be clean.
810 """ enforce the precondition that working directory must be clean.
811
811
812 'merge' can be set to false if a pending uncommitted merge should be
812 'merge' can be set to false if a pending uncommitted merge should be
813 ignored (such as when 'update --check' runs).
813 ignored (such as when 'update --check' runs).
814
814
815 'hint' is the usual hint given to Abort exception.
815 'hint' is the usual hint given to Abort exception.
816 """
816 """
817
817
818 if merge and repo.dirstate.p2() != nullid:
818 if merge and repo.dirstate.p2() != nullid:
819 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
819 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
820 modified, added, removed, deleted = repo.status()[:4]
820 modified, added, removed, deleted = repo.status()[:4]
821 if modified or added or removed or deleted:
821 if modified or added or removed or deleted:
822 raise error.Abort(_('uncommitted changes'), hint=hint)
822 raise error.Abort(_('uncommitted changes'), hint=hint)
823 ctx = repo[None]
823 ctx = repo[None]
824 for s in sorted(ctx.substate):
824 for s in sorted(ctx.substate):
825 ctx.sub(s).bailifchanged(hint=hint)
825 ctx.sub(s).bailifchanged(hint=hint)
826
826
827 def logmessage(ui, opts):
827 def logmessage(ui, opts):
828 """ get the log message according to -m and -l option """
828 """ get the log message according to -m and -l option """
829 message = opts.get('message')
829 message = opts.get('message')
830 logfile = opts.get('logfile')
830 logfile = opts.get('logfile')
831
831
832 if message and logfile:
832 if message and logfile:
833 raise error.Abort(_('options --message and --logfile are mutually '
833 raise error.Abort(_('options --message and --logfile are mutually '
834 'exclusive'))
834 'exclusive'))
835 if not message and logfile:
835 if not message and logfile:
836 try:
836 try:
837 if isstdiofilename(logfile):
837 if isstdiofilename(logfile):
838 message = ui.fin.read()
838 message = ui.fin.read()
839 else:
839 else:
840 message = '\n'.join(util.readfile(logfile).splitlines())
840 message = '\n'.join(util.readfile(logfile).splitlines())
841 except IOError as inst:
841 except IOError as inst:
842 raise error.Abort(_("can't read commit message '%s': %s") %
842 raise error.Abort(_("can't read commit message '%s': %s") %
843 (logfile, encoding.strtolocal(inst.strerror)))
843 (logfile, encoding.strtolocal(inst.strerror)))
844 return message
844 return message
845
845
846 def mergeeditform(ctxorbool, baseformname):
846 def mergeeditform(ctxorbool, baseformname):
847 """return appropriate editform name (referencing a committemplate)
847 """return appropriate editform name (referencing a committemplate)
848
848
849 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
849 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
850 merging is committed.
850 merging is committed.
851
851
852 This returns baseformname with '.merge' appended if it is a merge,
852 This returns baseformname with '.merge' appended if it is a merge,
853 otherwise '.normal' is appended.
853 otherwise '.normal' is appended.
854 """
854 """
855 if isinstance(ctxorbool, bool):
855 if isinstance(ctxorbool, bool):
856 if ctxorbool:
856 if ctxorbool:
857 return baseformname + ".merge"
857 return baseformname + ".merge"
858 elif 1 < len(ctxorbool.parents()):
858 elif 1 < len(ctxorbool.parents()):
859 return baseformname + ".merge"
859 return baseformname + ".merge"
860
860
861 return baseformname + ".normal"
861 return baseformname + ".normal"
862
862
863 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
863 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
864 editform='', **opts):
864 editform='', **opts):
865 """get appropriate commit message editor according to '--edit' option
865 """get appropriate commit message editor according to '--edit' option
866
866
867 'finishdesc' is a function to be called with edited commit message
867 'finishdesc' is a function to be called with edited commit message
868 (= 'description' of the new changeset) just after editing, but
868 (= 'description' of the new changeset) just after editing, but
869 before checking empty-ness. It should return actual text to be
869 before checking empty-ness. It should return actual text to be
870 stored into history. This allows to change description before
870 stored into history. This allows to change description before
871 storing.
871 storing.
872
872
873 'extramsg' is a extra message to be shown in the editor instead of
873 'extramsg' is a extra message to be shown in the editor instead of
874 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
874 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
875 is automatically added.
875 is automatically added.
876
876
877 'editform' is a dot-separated list of names, to distinguish
877 'editform' is a dot-separated list of names, to distinguish
878 the purpose of commit text editing.
878 the purpose of commit text editing.
879
879
880 'getcommiteditor' returns 'commitforceeditor' regardless of
880 'getcommiteditor' returns 'commitforceeditor' regardless of
881 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
881 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
882 they are specific for usage in MQ.
882 they are specific for usage in MQ.
883 """
883 """
884 if edit or finishdesc or extramsg:
884 if edit or finishdesc or extramsg:
885 return lambda r, c, s: commitforceeditor(r, c, s,
885 return lambda r, c, s: commitforceeditor(r, c, s,
886 finishdesc=finishdesc,
886 finishdesc=finishdesc,
887 extramsg=extramsg,
887 extramsg=extramsg,
888 editform=editform)
888 editform=editform)
889 elif editform:
889 elif editform:
890 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
890 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
891 else:
891 else:
892 return commiteditor
892 return commiteditor
893
893
894 def makefilename(repo, pat, node, desc=None,
894 def makefilename(repo, pat, node, desc=None,
895 total=None, seqno=None, revwidth=None, pathname=None):
895 total=None, seqno=None, revwidth=None, pathname=None):
896 node_expander = {
896 node_expander = {
897 'H': lambda: hex(node),
897 'H': lambda: hex(node),
898 'R': lambda: '%d' % repo.changelog.rev(node),
898 'R': lambda: '%d' % repo.changelog.rev(node),
899 'h': lambda: short(node),
899 'h': lambda: short(node),
900 'm': lambda: re.sub('[^\w]', '_', desc or '')
900 'm': lambda: re.sub('[^\w]', '_', desc or '')
901 }
901 }
902 expander = {
902 expander = {
903 '%': lambda: '%',
903 '%': lambda: '%',
904 'b': lambda: os.path.basename(repo.root),
904 'b': lambda: os.path.basename(repo.root),
905 }
905 }
906
906
907 try:
907 try:
908 if node:
908 if node:
909 expander.update(node_expander)
909 expander.update(node_expander)
910 if node:
910 if node:
911 expander['r'] = (lambda:
911 expander['r'] = (lambda:
912 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
912 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
913 if total is not None:
913 if total is not None:
914 expander['N'] = lambda: '%d' % total
914 expander['N'] = lambda: '%d' % total
915 if seqno is not None:
915 if seqno is not None:
916 expander['n'] = lambda: '%d' % seqno
916 expander['n'] = lambda: '%d' % seqno
917 if total is not None and seqno is not None:
917 if total is not None and seqno is not None:
918 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
918 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
919 if pathname is not None:
919 if pathname is not None:
920 expander['s'] = lambda: os.path.basename(pathname)
920 expander['s'] = lambda: os.path.basename(pathname)
921 expander['d'] = lambda: os.path.dirname(pathname) or '.'
921 expander['d'] = lambda: os.path.dirname(pathname) or '.'
922 expander['p'] = lambda: pathname
922 expander['p'] = lambda: pathname
923
923
924 newname = []
924 newname = []
925 patlen = len(pat)
925 patlen = len(pat)
926 i = 0
926 i = 0
927 while i < patlen:
927 while i < patlen:
928 c = pat[i:i + 1]
928 c = pat[i:i + 1]
929 if c == '%':
929 if c == '%':
930 i += 1
930 i += 1
931 c = pat[i:i + 1]
931 c = pat[i:i + 1]
932 c = expander[c]()
932 c = expander[c]()
933 newname.append(c)
933 newname.append(c)
934 i += 1
934 i += 1
935 return ''.join(newname)
935 return ''.join(newname)
936 except KeyError as inst:
936 except KeyError as inst:
937 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
937 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
938 inst.args[0])
938 inst.args[0])
939
939
940 def isstdiofilename(pat):
940 def isstdiofilename(pat):
941 """True if the given pat looks like a filename denoting stdin/stdout"""
941 """True if the given pat looks like a filename denoting stdin/stdout"""
942 return not pat or pat == '-'
942 return not pat or pat == '-'
943
943
944 class _unclosablefile(object):
944 class _unclosablefile(object):
945 def __init__(self, fp):
945 def __init__(self, fp):
946 self._fp = fp
946 self._fp = fp
947
947
948 def close(self):
948 def close(self):
949 pass
949 pass
950
950
951 def __iter__(self):
951 def __iter__(self):
952 return iter(self._fp)
952 return iter(self._fp)
953
953
954 def __getattr__(self, attr):
954 def __getattr__(self, attr):
955 return getattr(self._fp, attr)
955 return getattr(self._fp, attr)
956
956
957 def __enter__(self):
957 def __enter__(self):
958 return self
958 return self
959
959
960 def __exit__(self, exc_type, exc_value, exc_tb):
960 def __exit__(self, exc_type, exc_value, exc_tb):
961 pass
961 pass
962
962
963 def makefileobj(repo, pat, node=None, desc=None, total=None,
963 def makefileobj(repo, pat, node=None, desc=None, total=None,
964 seqno=None, revwidth=None, mode='wb', modemap=None,
964 seqno=None, revwidth=None, mode='wb', modemap=None,
965 pathname=None):
965 pathname=None):
966
966
967 writable = mode not in ('r', 'rb')
967 writable = mode not in ('r', 'rb')
968
968
969 if isstdiofilename(pat):
969 if isstdiofilename(pat):
970 if writable:
970 if writable:
971 fp = repo.ui.fout
971 fp = repo.ui.fout
972 else:
972 else:
973 fp = repo.ui.fin
973 fp = repo.ui.fin
974 return _unclosablefile(fp)
974 return _unclosablefile(fp)
975 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
975 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
976 if modemap is not None:
976 if modemap is not None:
977 mode = modemap.get(fn, mode)
977 mode = modemap.get(fn, mode)
978 if mode == 'wb':
978 if mode == 'wb':
979 modemap[fn] = 'ab'
979 modemap[fn] = 'ab'
980 return open(fn, mode)
980 return open(fn, mode)
981
981
982 def openrevlog(repo, cmd, file_, opts):
982 def openrevlog(repo, cmd, file_, opts):
983 """opens the changelog, manifest, a filelog or a given revlog"""
983 """opens the changelog, manifest, a filelog or a given revlog"""
984 cl = opts['changelog']
984 cl = opts['changelog']
985 mf = opts['manifest']
985 mf = opts['manifest']
986 dir = opts['dir']
986 dir = opts['dir']
987 msg = None
987 msg = None
988 if cl and mf:
988 if cl and mf:
989 msg = _('cannot specify --changelog and --manifest at the same time')
989 msg = _('cannot specify --changelog and --manifest at the same time')
990 elif cl and dir:
990 elif cl and dir:
991 msg = _('cannot specify --changelog and --dir at the same time')
991 msg = _('cannot specify --changelog and --dir at the same time')
992 elif cl or mf or dir:
992 elif cl or mf or dir:
993 if file_:
993 if file_:
994 msg = _('cannot specify filename with --changelog or --manifest')
994 msg = _('cannot specify filename with --changelog or --manifest')
995 elif not repo:
995 elif not repo:
996 msg = _('cannot specify --changelog or --manifest or --dir '
996 msg = _('cannot specify --changelog or --manifest or --dir '
997 'without a repository')
997 'without a repository')
998 if msg:
998 if msg:
999 raise error.Abort(msg)
999 raise error.Abort(msg)
1000
1000
1001 r = None
1001 r = None
1002 if repo:
1002 if repo:
1003 if cl:
1003 if cl:
1004 r = repo.unfiltered().changelog
1004 r = repo.unfiltered().changelog
1005 elif dir:
1005 elif dir:
1006 if 'treemanifest' not in repo.requirements:
1006 if 'treemanifest' not in repo.requirements:
1007 raise error.Abort(_("--dir can only be used on repos with "
1007 raise error.Abort(_("--dir can only be used on repos with "
1008 "treemanifest enabled"))
1008 "treemanifest enabled"))
1009 dirlog = repo.manifestlog._revlog.dirlog(dir)
1009 dirlog = repo.manifestlog._revlog.dirlog(dir)
1010 if len(dirlog):
1010 if len(dirlog):
1011 r = dirlog
1011 r = dirlog
1012 elif mf:
1012 elif mf:
1013 r = repo.manifestlog._revlog
1013 r = repo.manifestlog._revlog
1014 elif file_:
1014 elif file_:
1015 filelog = repo.file(file_)
1015 filelog = repo.file(file_)
1016 if len(filelog):
1016 if len(filelog):
1017 r = filelog
1017 r = filelog
1018 if not r:
1018 if not r:
1019 if not file_:
1019 if not file_:
1020 raise error.CommandError(cmd, _('invalid arguments'))
1020 raise error.CommandError(cmd, _('invalid arguments'))
1021 if not os.path.isfile(file_):
1021 if not os.path.isfile(file_):
1022 raise error.Abort(_("revlog '%s' not found") % file_)
1022 raise error.Abort(_("revlog '%s' not found") % file_)
1023 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1023 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
1024 file_[:-2] + ".i")
1024 file_[:-2] + ".i")
1025 return r
1025 return r
1026
1026
1027 def copy(ui, repo, pats, opts, rename=False):
1027 def copy(ui, repo, pats, opts, rename=False):
1028 # called with the repo lock held
1028 # called with the repo lock held
1029 #
1029 #
1030 # hgsep => pathname that uses "/" to separate directories
1030 # hgsep => pathname that uses "/" to separate directories
1031 # ossep => pathname that uses os.sep to separate directories
1031 # ossep => pathname that uses os.sep to separate directories
1032 cwd = repo.getcwd()
1032 cwd = repo.getcwd()
1033 targets = {}
1033 targets = {}
1034 after = opts.get("after")
1034 after = opts.get("after")
1035 dryrun = opts.get("dry_run")
1035 dryrun = opts.get("dry_run")
1036 wctx = repo[None]
1036 wctx = repo[None]
1037
1037
1038 def walkpat(pat):
1038 def walkpat(pat):
1039 srcs = []
1039 srcs = []
1040 if after:
1040 if after:
1041 badstates = '?'
1041 badstates = '?'
1042 else:
1042 else:
1043 badstates = '?r'
1043 badstates = '?r'
1044 m = scmutil.match(wctx, [pat], opts, globbed=True)
1044 m = scmutil.match(wctx, [pat], opts, globbed=True)
1045 for abs in wctx.walk(m):
1045 for abs in wctx.walk(m):
1046 state = repo.dirstate[abs]
1046 state = repo.dirstate[abs]
1047 rel = m.rel(abs)
1047 rel = m.rel(abs)
1048 exact = m.exact(abs)
1048 exact = m.exact(abs)
1049 if state in badstates:
1049 if state in badstates:
1050 if exact and state == '?':
1050 if exact and state == '?':
1051 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1051 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1052 if exact and state == 'r':
1052 if exact and state == 'r':
1053 ui.warn(_('%s: not copying - file has been marked for'
1053 ui.warn(_('%s: not copying - file has been marked for'
1054 ' remove\n') % rel)
1054 ' remove\n') % rel)
1055 continue
1055 continue
1056 # abs: hgsep
1056 # abs: hgsep
1057 # rel: ossep
1057 # rel: ossep
1058 srcs.append((abs, rel, exact))
1058 srcs.append((abs, rel, exact))
1059 return srcs
1059 return srcs
1060
1060
1061 # abssrc: hgsep
1061 # abssrc: hgsep
1062 # relsrc: ossep
1062 # relsrc: ossep
1063 # otarget: ossep
1063 # otarget: ossep
1064 def copyfile(abssrc, relsrc, otarget, exact):
1064 def copyfile(abssrc, relsrc, otarget, exact):
1065 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1065 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1066 if '/' in abstarget:
1066 if '/' in abstarget:
1067 # We cannot normalize abstarget itself, this would prevent
1067 # We cannot normalize abstarget itself, this would prevent
1068 # case only renames, like a => A.
1068 # case only renames, like a => A.
1069 abspath, absname = abstarget.rsplit('/', 1)
1069 abspath, absname = abstarget.rsplit('/', 1)
1070 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1070 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1071 reltarget = repo.pathto(abstarget, cwd)
1071 reltarget = repo.pathto(abstarget, cwd)
1072 target = repo.wjoin(abstarget)
1072 target = repo.wjoin(abstarget)
1073 src = repo.wjoin(abssrc)
1073 src = repo.wjoin(abssrc)
1074 state = repo.dirstate[abstarget]
1074 state = repo.dirstate[abstarget]
1075
1075
1076 scmutil.checkportable(ui, abstarget)
1076 scmutil.checkportable(ui, abstarget)
1077
1077
1078 # check for collisions
1078 # check for collisions
1079 prevsrc = targets.get(abstarget)
1079 prevsrc = targets.get(abstarget)
1080 if prevsrc is not None:
1080 if prevsrc is not None:
1081 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1081 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1082 (reltarget, repo.pathto(abssrc, cwd),
1082 (reltarget, repo.pathto(abssrc, cwd),
1083 repo.pathto(prevsrc, cwd)))
1083 repo.pathto(prevsrc, cwd)))
1084 return
1084 return
1085
1085
1086 # check for overwrites
1086 # check for overwrites
1087 exists = os.path.lexists(target)
1087 exists = os.path.lexists(target)
1088 samefile = False
1088 samefile = False
1089 if exists and abssrc != abstarget:
1089 if exists and abssrc != abstarget:
1090 if (repo.dirstate.normalize(abssrc) ==
1090 if (repo.dirstate.normalize(abssrc) ==
1091 repo.dirstate.normalize(abstarget)):
1091 repo.dirstate.normalize(abstarget)):
1092 if not rename:
1092 if not rename:
1093 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1093 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1094 return
1094 return
1095 exists = False
1095 exists = False
1096 samefile = True
1096 samefile = True
1097
1097
1098 if not after and exists or after and state in 'mn':
1098 if not after and exists or after and state in 'mn':
1099 if not opts['force']:
1099 if not opts['force']:
1100 if state in 'mn':
1100 if state in 'mn':
1101 msg = _('%s: not overwriting - file already committed\n')
1101 msg = _('%s: not overwriting - file already committed\n')
1102 if after:
1102 if after:
1103 flags = '--after --force'
1103 flags = '--after --force'
1104 else:
1104 else:
1105 flags = '--force'
1105 flags = '--force'
1106 if rename:
1106 if rename:
1107 hint = _('(hg rename %s to replace the file by '
1107 hint = _('(hg rename %s to replace the file by '
1108 'recording a rename)\n') % flags
1108 'recording a rename)\n') % flags
1109 else:
1109 else:
1110 hint = _('(hg copy %s to replace the file by '
1110 hint = _('(hg copy %s to replace the file by '
1111 'recording a copy)\n') % flags
1111 'recording a copy)\n') % flags
1112 else:
1112 else:
1113 msg = _('%s: not overwriting - file exists\n')
1113 msg = _('%s: not overwriting - file exists\n')
1114 if rename:
1114 if rename:
1115 hint = _('(hg rename --after to record the rename)\n')
1115 hint = _('(hg rename --after to record the rename)\n')
1116 else:
1116 else:
1117 hint = _('(hg copy --after to record the copy)\n')
1117 hint = _('(hg copy --after to record the copy)\n')
1118 ui.warn(msg % reltarget)
1118 ui.warn(msg % reltarget)
1119 ui.warn(hint)
1119 ui.warn(hint)
1120 return
1120 return
1121
1121
1122 if after:
1122 if after:
1123 if not exists:
1123 if not exists:
1124 if rename:
1124 if rename:
1125 ui.warn(_('%s: not recording move - %s does not exist\n') %
1125 ui.warn(_('%s: not recording move - %s does not exist\n') %
1126 (relsrc, reltarget))
1126 (relsrc, reltarget))
1127 else:
1127 else:
1128 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1128 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1129 (relsrc, reltarget))
1129 (relsrc, reltarget))
1130 return
1130 return
1131 elif not dryrun:
1131 elif not dryrun:
1132 try:
1132 try:
1133 if exists:
1133 if exists:
1134 os.unlink(target)
1134 os.unlink(target)
1135 targetdir = os.path.dirname(target) or '.'
1135 targetdir = os.path.dirname(target) or '.'
1136 if not os.path.isdir(targetdir):
1136 if not os.path.isdir(targetdir):
1137 os.makedirs(targetdir)
1137 os.makedirs(targetdir)
1138 if samefile:
1138 if samefile:
1139 tmp = target + "~hgrename"
1139 tmp = target + "~hgrename"
1140 os.rename(src, tmp)
1140 os.rename(src, tmp)
1141 os.rename(tmp, target)
1141 os.rename(tmp, target)
1142 else:
1142 else:
1143 util.copyfile(src, target)
1143 util.copyfile(src, target)
1144 srcexists = True
1144 srcexists = True
1145 except IOError as inst:
1145 except IOError as inst:
1146 if inst.errno == errno.ENOENT:
1146 if inst.errno == errno.ENOENT:
1147 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1147 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1148 srcexists = False
1148 srcexists = False
1149 else:
1149 else:
1150 ui.warn(_('%s: cannot copy - %s\n') %
1150 ui.warn(_('%s: cannot copy - %s\n') %
1151 (relsrc, encoding.strtolocal(inst.strerror)))
1151 (relsrc, encoding.strtolocal(inst.strerror)))
1152 return True # report a failure
1152 return True # report a failure
1153
1153
1154 if ui.verbose or not exact:
1154 if ui.verbose or not exact:
1155 if rename:
1155 if rename:
1156 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1156 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1157 else:
1157 else:
1158 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1158 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1159
1159
1160 targets[abstarget] = abssrc
1160 targets[abstarget] = abssrc
1161
1161
1162 # fix up dirstate
1162 # fix up dirstate
1163 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1163 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1164 dryrun=dryrun, cwd=cwd)
1164 dryrun=dryrun, cwd=cwd)
1165 if rename and not dryrun:
1165 if rename and not dryrun:
1166 if not after and srcexists and not samefile:
1166 if not after and srcexists and not samefile:
1167 repo.wvfs.unlinkpath(abssrc)
1167 repo.wvfs.unlinkpath(abssrc)
1168 wctx.forget([abssrc])
1168 wctx.forget([abssrc])
1169
1169
1170 # pat: ossep
1170 # pat: ossep
1171 # dest ossep
1171 # dest ossep
1172 # srcs: list of (hgsep, hgsep, ossep, bool)
1172 # srcs: list of (hgsep, hgsep, ossep, bool)
1173 # return: function that takes hgsep and returns ossep
1173 # return: function that takes hgsep and returns ossep
1174 def targetpathfn(pat, dest, srcs):
1174 def targetpathfn(pat, dest, srcs):
1175 if os.path.isdir(pat):
1175 if os.path.isdir(pat):
1176 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1176 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1177 abspfx = util.localpath(abspfx)
1177 abspfx = util.localpath(abspfx)
1178 if destdirexists:
1178 if destdirexists:
1179 striplen = len(os.path.split(abspfx)[0])
1179 striplen = len(os.path.split(abspfx)[0])
1180 else:
1180 else:
1181 striplen = len(abspfx)
1181 striplen = len(abspfx)
1182 if striplen:
1182 if striplen:
1183 striplen += len(pycompat.ossep)
1183 striplen += len(pycompat.ossep)
1184 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1184 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1185 elif destdirexists:
1185 elif destdirexists:
1186 res = lambda p: os.path.join(dest,
1186 res = lambda p: os.path.join(dest,
1187 os.path.basename(util.localpath(p)))
1187 os.path.basename(util.localpath(p)))
1188 else:
1188 else:
1189 res = lambda p: dest
1189 res = lambda p: dest
1190 return res
1190 return res
1191
1191
1192 # pat: ossep
1192 # pat: ossep
1193 # dest ossep
1193 # dest ossep
1194 # srcs: list of (hgsep, hgsep, ossep, bool)
1194 # srcs: list of (hgsep, hgsep, ossep, bool)
1195 # return: function that takes hgsep and returns ossep
1195 # return: function that takes hgsep and returns ossep
1196 def targetpathafterfn(pat, dest, srcs):
1196 def targetpathafterfn(pat, dest, srcs):
1197 if matchmod.patkind(pat):
1197 if matchmod.patkind(pat):
1198 # a mercurial pattern
1198 # a mercurial pattern
1199 res = lambda p: os.path.join(dest,
1199 res = lambda p: os.path.join(dest,
1200 os.path.basename(util.localpath(p)))
1200 os.path.basename(util.localpath(p)))
1201 else:
1201 else:
1202 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1202 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1203 if len(abspfx) < len(srcs[0][0]):
1203 if len(abspfx) < len(srcs[0][0]):
1204 # A directory. Either the target path contains the last
1204 # A directory. Either the target path contains the last
1205 # component of the source path or it does not.
1205 # component of the source path or it does not.
1206 def evalpath(striplen):
1206 def evalpath(striplen):
1207 score = 0
1207 score = 0
1208 for s in srcs:
1208 for s in srcs:
1209 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1209 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1210 if os.path.lexists(t):
1210 if os.path.lexists(t):
1211 score += 1
1211 score += 1
1212 return score
1212 return score
1213
1213
1214 abspfx = util.localpath(abspfx)
1214 abspfx = util.localpath(abspfx)
1215 striplen = len(abspfx)
1215 striplen = len(abspfx)
1216 if striplen:
1216 if striplen:
1217 striplen += len(pycompat.ossep)
1217 striplen += len(pycompat.ossep)
1218 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1218 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1219 score = evalpath(striplen)
1219 score = evalpath(striplen)
1220 striplen1 = len(os.path.split(abspfx)[0])
1220 striplen1 = len(os.path.split(abspfx)[0])
1221 if striplen1:
1221 if striplen1:
1222 striplen1 += len(pycompat.ossep)
1222 striplen1 += len(pycompat.ossep)
1223 if evalpath(striplen1) > score:
1223 if evalpath(striplen1) > score:
1224 striplen = striplen1
1224 striplen = striplen1
1225 res = lambda p: os.path.join(dest,
1225 res = lambda p: os.path.join(dest,
1226 util.localpath(p)[striplen:])
1226 util.localpath(p)[striplen:])
1227 else:
1227 else:
1228 # a file
1228 # a file
1229 if destdirexists:
1229 if destdirexists:
1230 res = lambda p: os.path.join(dest,
1230 res = lambda p: os.path.join(dest,
1231 os.path.basename(util.localpath(p)))
1231 os.path.basename(util.localpath(p)))
1232 else:
1232 else:
1233 res = lambda p: dest
1233 res = lambda p: dest
1234 return res
1234 return res
1235
1235
1236 pats = scmutil.expandpats(pats)
1236 pats = scmutil.expandpats(pats)
1237 if not pats:
1237 if not pats:
1238 raise error.Abort(_('no source or destination specified'))
1238 raise error.Abort(_('no source or destination specified'))
1239 if len(pats) == 1:
1239 if len(pats) == 1:
1240 raise error.Abort(_('no destination specified'))
1240 raise error.Abort(_('no destination specified'))
1241 dest = pats.pop()
1241 dest = pats.pop()
1242 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1242 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1243 if not destdirexists:
1243 if not destdirexists:
1244 if len(pats) > 1 or matchmod.patkind(pats[0]):
1244 if len(pats) > 1 or matchmod.patkind(pats[0]):
1245 raise error.Abort(_('with multiple sources, destination must be an '
1245 raise error.Abort(_('with multiple sources, destination must be an '
1246 'existing directory'))
1246 'existing directory'))
1247 if util.endswithsep(dest):
1247 if util.endswithsep(dest):
1248 raise error.Abort(_('destination %s is not a directory') % dest)
1248 raise error.Abort(_('destination %s is not a directory') % dest)
1249
1249
1250 tfn = targetpathfn
1250 tfn = targetpathfn
1251 if after:
1251 if after:
1252 tfn = targetpathafterfn
1252 tfn = targetpathafterfn
1253 copylist = []
1253 copylist = []
1254 for pat in pats:
1254 for pat in pats:
1255 srcs = walkpat(pat)
1255 srcs = walkpat(pat)
1256 if not srcs:
1256 if not srcs:
1257 continue
1257 continue
1258 copylist.append((tfn(pat, dest, srcs), srcs))
1258 copylist.append((tfn(pat, dest, srcs), srcs))
1259 if not copylist:
1259 if not copylist:
1260 raise error.Abort(_('no files to copy'))
1260 raise error.Abort(_('no files to copy'))
1261
1261
1262 errors = 0
1262 errors = 0
1263 for targetpath, srcs in copylist:
1263 for targetpath, srcs in copylist:
1264 for abssrc, relsrc, exact in srcs:
1264 for abssrc, relsrc, exact in srcs:
1265 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1265 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1266 errors += 1
1266 errors += 1
1267
1267
1268 if errors:
1268 if errors:
1269 ui.warn(_('(consider using --after)\n'))
1269 ui.warn(_('(consider using --after)\n'))
1270
1270
1271 return errors != 0
1271 return errors != 0
1272
1272
1273 ## facility to let extension process additional data into an import patch
1273 ## facility to let extension process additional data into an import patch
1274 # list of identifier to be executed in order
1274 # list of identifier to be executed in order
1275 extrapreimport = [] # run before commit
1275 extrapreimport = [] # run before commit
1276 extrapostimport = [] # run after commit
1276 extrapostimport = [] # run after commit
1277 # mapping from identifier to actual import function
1277 # mapping from identifier to actual import function
1278 #
1278 #
1279 # 'preimport' are run before the commit is made and are provided the following
1279 # 'preimport' are run before the commit is made and are provided the following
1280 # arguments:
1280 # arguments:
1281 # - repo: the localrepository instance,
1281 # - repo: the localrepository instance,
1282 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1282 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1283 # - extra: the future extra dictionary of the changeset, please mutate it,
1283 # - extra: the future extra dictionary of the changeset, please mutate it,
1284 # - opts: the import options.
1284 # - opts: the import options.
1285 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1285 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1286 # mutation of in memory commit and more. Feel free to rework the code to get
1286 # mutation of in memory commit and more. Feel free to rework the code to get
1287 # there.
1287 # there.
1288 extrapreimportmap = {}
1288 extrapreimportmap = {}
1289 # 'postimport' are run after the commit is made and are provided the following
1289 # 'postimport' are run after the commit is made and are provided the following
1290 # argument:
1290 # argument:
1291 # - ctx: the changectx created by import.
1291 # - ctx: the changectx created by import.
1292 extrapostimportmap = {}
1292 extrapostimportmap = {}
1293
1293
1294 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1294 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1295 """Utility function used by commands.import to import a single patch
1295 """Utility function used by commands.import to import a single patch
1296
1296
1297 This function is explicitly defined here to help the evolve extension to
1297 This function is explicitly defined here to help the evolve extension to
1298 wrap this part of the import logic.
1298 wrap this part of the import logic.
1299
1299
1300 The API is currently a bit ugly because it a simple code translation from
1300 The API is currently a bit ugly because it a simple code translation from
1301 the import command. Feel free to make it better.
1301 the import command. Feel free to make it better.
1302
1302
1303 :hunk: a patch (as a binary string)
1303 :hunk: a patch (as a binary string)
1304 :parents: nodes that will be parent of the created commit
1304 :parents: nodes that will be parent of the created commit
1305 :opts: the full dict of option passed to the import command
1305 :opts: the full dict of option passed to the import command
1306 :msgs: list to save commit message to.
1306 :msgs: list to save commit message to.
1307 (used in case we need to save it when failing)
1307 (used in case we need to save it when failing)
1308 :updatefunc: a function that update a repo to a given node
1308 :updatefunc: a function that update a repo to a given node
1309 updatefunc(<repo>, <node>)
1309 updatefunc(<repo>, <node>)
1310 """
1310 """
1311 # avoid cycle context -> subrepo -> cmdutil
1311 # avoid cycle context -> subrepo -> cmdutil
1312 from . import context
1312 from . import context
1313 extractdata = patch.extract(ui, hunk)
1313 extractdata = patch.extract(ui, hunk)
1314 tmpname = extractdata.get('filename')
1314 tmpname = extractdata.get('filename')
1315 message = extractdata.get('message')
1315 message = extractdata.get('message')
1316 user = opts.get('user') or extractdata.get('user')
1316 user = opts.get('user') or extractdata.get('user')
1317 date = opts.get('date') or extractdata.get('date')
1317 date = opts.get('date') or extractdata.get('date')
1318 branch = extractdata.get('branch')
1318 branch = extractdata.get('branch')
1319 nodeid = extractdata.get('nodeid')
1319 nodeid = extractdata.get('nodeid')
1320 p1 = extractdata.get('p1')
1320 p1 = extractdata.get('p1')
1321 p2 = extractdata.get('p2')
1321 p2 = extractdata.get('p2')
1322
1322
1323 nocommit = opts.get('no_commit')
1323 nocommit = opts.get('no_commit')
1324 importbranch = opts.get('import_branch')
1324 importbranch = opts.get('import_branch')
1325 update = not opts.get('bypass')
1325 update = not opts.get('bypass')
1326 strip = opts["strip"]
1326 strip = opts["strip"]
1327 prefix = opts["prefix"]
1327 prefix = opts["prefix"]
1328 sim = float(opts.get('similarity') or 0)
1328 sim = float(opts.get('similarity') or 0)
1329 if not tmpname:
1329 if not tmpname:
1330 return (None, None, False)
1330 return (None, None, False)
1331
1331
1332 rejects = False
1332 rejects = False
1333
1333
1334 try:
1334 try:
1335 cmdline_message = logmessage(ui, opts)
1335 cmdline_message = logmessage(ui, opts)
1336 if cmdline_message:
1336 if cmdline_message:
1337 # pickup the cmdline msg
1337 # pickup the cmdline msg
1338 message = cmdline_message
1338 message = cmdline_message
1339 elif message:
1339 elif message:
1340 # pickup the patch msg
1340 # pickup the patch msg
1341 message = message.strip()
1341 message = message.strip()
1342 else:
1342 else:
1343 # launch the editor
1343 # launch the editor
1344 message = None
1344 message = None
1345 ui.debug('message:\n%s\n' % message)
1345 ui.debug('message:\n%s\n' % message)
1346
1346
1347 if len(parents) == 1:
1347 if len(parents) == 1:
1348 parents.append(repo[nullid])
1348 parents.append(repo[nullid])
1349 if opts.get('exact'):
1349 if opts.get('exact'):
1350 if not nodeid or not p1:
1350 if not nodeid or not p1:
1351 raise error.Abort(_('not a Mercurial patch'))
1351 raise error.Abort(_('not a Mercurial patch'))
1352 p1 = repo[p1]
1352 p1 = repo[p1]
1353 p2 = repo[p2 or nullid]
1353 p2 = repo[p2 or nullid]
1354 elif p2:
1354 elif p2:
1355 try:
1355 try:
1356 p1 = repo[p1]
1356 p1 = repo[p1]
1357 p2 = repo[p2]
1357 p2 = repo[p2]
1358 # Without any options, consider p2 only if the
1358 # Without any options, consider p2 only if the
1359 # patch is being applied on top of the recorded
1359 # patch is being applied on top of the recorded
1360 # first parent.
1360 # first parent.
1361 if p1 != parents[0]:
1361 if p1 != parents[0]:
1362 p1 = parents[0]
1362 p1 = parents[0]
1363 p2 = repo[nullid]
1363 p2 = repo[nullid]
1364 except error.RepoError:
1364 except error.RepoError:
1365 p1, p2 = parents
1365 p1, p2 = parents
1366 if p2.node() == nullid:
1366 if p2.node() == nullid:
1367 ui.warn(_("warning: import the patch as a normal revision\n"
1367 ui.warn(_("warning: import the patch as a normal revision\n"
1368 "(use --exact to import the patch as a merge)\n"))
1368 "(use --exact to import the patch as a merge)\n"))
1369 else:
1369 else:
1370 p1, p2 = parents
1370 p1, p2 = parents
1371
1371
1372 n = None
1372 n = None
1373 if update:
1373 if update:
1374 if p1 != parents[0]:
1374 if p1 != parents[0]:
1375 updatefunc(repo, p1.node())
1375 updatefunc(repo, p1.node())
1376 if p2 != parents[1]:
1376 if p2 != parents[1]:
1377 repo.setparents(p1.node(), p2.node())
1377 repo.setparents(p1.node(), p2.node())
1378
1378
1379 if opts.get('exact') or importbranch:
1379 if opts.get('exact') or importbranch:
1380 repo.dirstate.setbranch(branch or 'default')
1380 repo.dirstate.setbranch(branch or 'default')
1381
1381
1382 partial = opts.get('partial', False)
1382 partial = opts.get('partial', False)
1383 files = set()
1383 files = set()
1384 try:
1384 try:
1385 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1385 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1386 files=files, eolmode=None, similarity=sim / 100.0)
1386 files=files, eolmode=None, similarity=sim / 100.0)
1387 except error.PatchError as e:
1387 except error.PatchError as e:
1388 if not partial:
1388 if not partial:
1389 raise error.Abort(str(e))
1389 raise error.Abort(str(e))
1390 if partial:
1390 if partial:
1391 rejects = True
1391 rejects = True
1392
1392
1393 files = list(files)
1393 files = list(files)
1394 if nocommit:
1394 if nocommit:
1395 if message:
1395 if message:
1396 msgs.append(message)
1396 msgs.append(message)
1397 else:
1397 else:
1398 if opts.get('exact') or p2:
1398 if opts.get('exact') or p2:
1399 # If you got here, you either use --force and know what
1399 # If you got here, you either use --force and know what
1400 # you are doing or used --exact or a merge patch while
1400 # you are doing or used --exact or a merge patch while
1401 # being updated to its first parent.
1401 # being updated to its first parent.
1402 m = None
1402 m = None
1403 else:
1403 else:
1404 m = scmutil.matchfiles(repo, files or [])
1404 m = scmutil.matchfiles(repo, files or [])
1405 editform = mergeeditform(repo[None], 'import.normal')
1405 editform = mergeeditform(repo[None], 'import.normal')
1406 if opts.get('exact'):
1406 if opts.get('exact'):
1407 editor = None
1407 editor = None
1408 else:
1408 else:
1409 editor = getcommiteditor(editform=editform,
1409 editor = getcommiteditor(editform=editform,
1410 **pycompat.strkwargs(opts))
1410 **pycompat.strkwargs(opts))
1411 extra = {}
1411 extra = {}
1412 for idfunc in extrapreimport:
1412 for idfunc in extrapreimport:
1413 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1413 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1414 overrides = {}
1414 overrides = {}
1415 if partial:
1415 if partial:
1416 overrides[('ui', 'allowemptycommit')] = True
1416 overrides[('ui', 'allowemptycommit')] = True
1417 with repo.ui.configoverride(overrides, 'import'):
1417 with repo.ui.configoverride(overrides, 'import'):
1418 n = repo.commit(message, user,
1418 n = repo.commit(message, user,
1419 date, match=m,
1419 date, match=m,
1420 editor=editor, extra=extra)
1420 editor=editor, extra=extra)
1421 for idfunc in extrapostimport:
1421 for idfunc in extrapostimport:
1422 extrapostimportmap[idfunc](repo[n])
1422 extrapostimportmap[idfunc](repo[n])
1423 else:
1423 else:
1424 if opts.get('exact') or importbranch:
1424 if opts.get('exact') or importbranch:
1425 branch = branch or 'default'
1425 branch = branch or 'default'
1426 else:
1426 else:
1427 branch = p1.branch()
1427 branch = p1.branch()
1428 store = patch.filestore()
1428 store = patch.filestore()
1429 try:
1429 try:
1430 files = set()
1430 files = set()
1431 try:
1431 try:
1432 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1432 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1433 files, eolmode=None)
1433 files, eolmode=None)
1434 except error.PatchError as e:
1434 except error.PatchError as e:
1435 raise error.Abort(str(e))
1435 raise error.Abort(str(e))
1436 if opts.get('exact'):
1436 if opts.get('exact'):
1437 editor = None
1437 editor = None
1438 else:
1438 else:
1439 editor = getcommiteditor(editform='import.bypass')
1439 editor = getcommiteditor(editform='import.bypass')
1440 memctx = context.memctx(repo, (p1.node(), p2.node()),
1440 memctx = context.memctx(repo, (p1.node(), p2.node()),
1441 message,
1441 message,
1442 files=files,
1442 files=files,
1443 filectxfn=store,
1443 filectxfn=store,
1444 user=user,
1444 user=user,
1445 date=date,
1445 date=date,
1446 branch=branch,
1446 branch=branch,
1447 editor=editor)
1447 editor=editor)
1448 n = memctx.commit()
1448 n = memctx.commit()
1449 finally:
1449 finally:
1450 store.close()
1450 store.close()
1451 if opts.get('exact') and nocommit:
1451 if opts.get('exact') and nocommit:
1452 # --exact with --no-commit is still useful in that it does merge
1452 # --exact with --no-commit is still useful in that it does merge
1453 # and branch bits
1453 # and branch bits
1454 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1454 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1455 elif opts.get('exact') and hex(n) != nodeid:
1455 elif opts.get('exact') and hex(n) != nodeid:
1456 raise error.Abort(_('patch is damaged or loses information'))
1456 raise error.Abort(_('patch is damaged or loses information'))
1457 msg = _('applied to working directory')
1457 msg = _('applied to working directory')
1458 if n:
1458 if n:
1459 # i18n: refers to a short changeset id
1459 # i18n: refers to a short changeset id
1460 msg = _('created %s') % short(n)
1460 msg = _('created %s') % short(n)
1461 return (msg, n, rejects)
1461 return (msg, n, rejects)
1462 finally:
1462 finally:
1463 os.unlink(tmpname)
1463 os.unlink(tmpname)
1464
1464
1465 # facility to let extensions include additional data in an exported patch
1465 # facility to let extensions include additional data in an exported patch
1466 # list of identifiers to be executed in order
1466 # list of identifiers to be executed in order
1467 extraexport = []
1467 extraexport = []
1468 # mapping from identifier to actual export function
1468 # mapping from identifier to actual export function
1469 # function as to return a string to be added to the header or None
1469 # function as to return a string to be added to the header or None
1470 # it is given two arguments (sequencenumber, changectx)
1470 # it is given two arguments (sequencenumber, changectx)
1471 extraexportmap = {}
1471 extraexportmap = {}
1472
1472
1473 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1473 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1474 node = scmutil.binnode(ctx)
1474 node = scmutil.binnode(ctx)
1475 parents = [p.node() for p in ctx.parents() if p]
1475 parents = [p.node() for p in ctx.parents() if p]
1476 branch = ctx.branch()
1476 branch = ctx.branch()
1477 if switch_parent:
1477 if switch_parent:
1478 parents.reverse()
1478 parents.reverse()
1479
1479
1480 if parents:
1480 if parents:
1481 prev = parents[0]
1481 prev = parents[0]
1482 else:
1482 else:
1483 prev = nullid
1483 prev = nullid
1484
1484
1485 write("# HG changeset patch\n")
1485 write("# HG changeset patch\n")
1486 write("# User %s\n" % ctx.user())
1486 write("# User %s\n" % ctx.user())
1487 write("# Date %d %d\n" % ctx.date())
1487 write("# Date %d %d\n" % ctx.date())
1488 write("# %s\n" % util.datestr(ctx.date()))
1488 write("# %s\n" % util.datestr(ctx.date()))
1489 if branch and branch != 'default':
1489 if branch and branch != 'default':
1490 write("# Branch %s\n" % branch)
1490 write("# Branch %s\n" % branch)
1491 write("# Node ID %s\n" % hex(node))
1491 write("# Node ID %s\n" % hex(node))
1492 write("# Parent %s\n" % hex(prev))
1492 write("# Parent %s\n" % hex(prev))
1493 if len(parents) > 1:
1493 if len(parents) > 1:
1494 write("# Parent %s\n" % hex(parents[1]))
1494 write("# Parent %s\n" % hex(parents[1]))
1495
1495
1496 for headerid in extraexport:
1496 for headerid in extraexport:
1497 header = extraexportmap[headerid](seqno, ctx)
1497 header = extraexportmap[headerid](seqno, ctx)
1498 if header is not None:
1498 if header is not None:
1499 write('# %s\n' % header)
1499 write('# %s\n' % header)
1500 write(ctx.description().rstrip())
1500 write(ctx.description().rstrip())
1501 write("\n\n")
1501 write("\n\n")
1502
1502
1503 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1503 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1504 write(chunk, label=label)
1504 write(chunk, label=label)
1505
1505
1506 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1506 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1507 opts=None, match=None):
1507 opts=None, match=None):
1508 '''export changesets as hg patches
1508 '''export changesets as hg patches
1509
1509
1510 Args:
1510 Args:
1511 repo: The repository from which we're exporting revisions.
1511 repo: The repository from which we're exporting revisions.
1512 revs: A list of revisions to export as revision numbers.
1512 revs: A list of revisions to export as revision numbers.
1513 fntemplate: An optional string to use for generating patch file names.
1513 fntemplate: An optional string to use for generating patch file names.
1514 fp: An optional file-like object to which patches should be written.
1514 fp: An optional file-like object to which patches should be written.
1515 switch_parent: If True, show diffs against second parent when not nullid.
1515 switch_parent: If True, show diffs against second parent when not nullid.
1516 Default is false, which always shows diff against p1.
1516 Default is false, which always shows diff against p1.
1517 opts: diff options to use for generating the patch.
1517 opts: diff options to use for generating the patch.
1518 match: If specified, only export changes to files matching this matcher.
1518 match: If specified, only export changes to files matching this matcher.
1519
1519
1520 Returns:
1520 Returns:
1521 Nothing.
1521 Nothing.
1522
1522
1523 Side Effect:
1523 Side Effect:
1524 "HG Changeset Patch" data is emitted to one of the following
1524 "HG Changeset Patch" data is emitted to one of the following
1525 destinations:
1525 destinations:
1526 fp is specified: All revs are written to the specified
1526 fp is specified: All revs are written to the specified
1527 file-like object.
1527 file-like object.
1528 fntemplate specified: Each rev is written to a unique file named using
1528 fntemplate specified: Each rev is written to a unique file named using
1529 the given template.
1529 the given template.
1530 Neither fp nor template specified: All revs written to repo.ui.write()
1530 Neither fp nor template specified: All revs written to repo.ui.write()
1531 '''
1531 '''
1532
1532
1533 total = len(revs)
1533 total = len(revs)
1534 revwidth = max(len(str(rev)) for rev in revs)
1534 revwidth = max(len(str(rev)) for rev in revs)
1535 filemode = {}
1535 filemode = {}
1536
1536
1537 write = None
1537 write = None
1538 dest = '<unnamed>'
1538 dest = '<unnamed>'
1539 if fp:
1539 if fp:
1540 dest = getattr(fp, 'name', dest)
1540 dest = getattr(fp, 'name', dest)
1541 def write(s, **kw):
1541 def write(s, **kw):
1542 fp.write(s)
1542 fp.write(s)
1543 elif not fntemplate:
1543 elif not fntemplate:
1544 write = repo.ui.write
1544 write = repo.ui.write
1545
1545
1546 for seqno, rev in enumerate(revs, 1):
1546 for seqno, rev in enumerate(revs, 1):
1547 ctx = repo[rev]
1547 ctx = repo[rev]
1548 fo = None
1548 fo = None
1549 if not fp and fntemplate:
1549 if not fp and fntemplate:
1550 desc_lines = ctx.description().rstrip().split('\n')
1550 desc_lines = ctx.description().rstrip().split('\n')
1551 desc = desc_lines[0] #Commit always has a first line.
1551 desc = desc_lines[0] #Commit always has a first line.
1552 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1552 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1553 total=total, seqno=seqno, revwidth=revwidth,
1553 total=total, seqno=seqno, revwidth=revwidth,
1554 mode='wb', modemap=filemode)
1554 mode='wb', modemap=filemode)
1555 dest = fo.name
1555 dest = fo.name
1556 def write(s, **kw):
1556 def write(s, **kw):
1557 fo.write(s)
1557 fo.write(s)
1558 if not dest.startswith('<'):
1558 if not dest.startswith('<'):
1559 repo.ui.note("%s\n" % dest)
1559 repo.ui.note("%s\n" % dest)
1560 _exportsingle(
1560 _exportsingle(
1561 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1561 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1562 if fo is not None:
1562 if fo is not None:
1563 fo.close()
1563 fo.close()
1564
1564
1565 def showmarker(fm, marker, index=None):
1565 def showmarker(fm, marker, index=None):
1566 """utility function to display obsolescence marker in a readable way
1566 """utility function to display obsolescence marker in a readable way
1567
1567
1568 To be used by debug function."""
1568 To be used by debug function."""
1569 if index is not None:
1569 if index is not None:
1570 fm.write('index', '%i ', index)
1570 fm.write('index', '%i ', index)
1571 fm.write('prednode', '%s ', hex(marker.prednode()))
1571 fm.write('prednode', '%s ', hex(marker.prednode()))
1572 succs = marker.succnodes()
1572 succs = marker.succnodes()
1573 fm.condwrite(succs, 'succnodes', '%s ',
1573 fm.condwrite(succs, 'succnodes', '%s ',
1574 fm.formatlist(map(hex, succs), name='node'))
1574 fm.formatlist(map(hex, succs), name='node'))
1575 fm.write('flag', '%X ', marker.flags())
1575 fm.write('flag', '%X ', marker.flags())
1576 parents = marker.parentnodes()
1576 parents = marker.parentnodes()
1577 if parents is not None:
1577 if parents is not None:
1578 fm.write('parentnodes', '{%s} ',
1578 fm.write('parentnodes', '{%s} ',
1579 fm.formatlist(map(hex, parents), name='node', sep=', '))
1579 fm.formatlist(map(hex, parents), name='node', sep=', '))
1580 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1580 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1581 meta = marker.metadata().copy()
1581 meta = marker.metadata().copy()
1582 meta.pop('date', None)
1582 meta.pop('date', None)
1583 smeta = util.rapply(pycompat.maybebytestr, meta)
1583 smeta = util.rapply(pycompat.maybebytestr, meta)
1584 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1584 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1585 fm.plain('\n')
1585 fm.plain('\n')
1586
1586
1587 def finddate(ui, repo, date):
1587 def finddate(ui, repo, date):
1588 """Find the tipmost changeset that matches the given date spec"""
1588 """Find the tipmost changeset that matches the given date spec"""
1589
1589
1590 df = util.matchdate(date)
1590 df = util.matchdate(date)
1591 m = scmutil.matchall(repo)
1591 m = scmutil.matchall(repo)
1592 results = {}
1592 results = {}
1593
1593
1594 def prep(ctx, fns):
1594 def prep(ctx, fns):
1595 d = ctx.date()
1595 d = ctx.date()
1596 if df(d[0]):
1596 if df(d[0]):
1597 results[ctx.rev()] = d
1597 results[ctx.rev()] = d
1598
1598
1599 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1599 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1600 rev = ctx.rev()
1600 rev = ctx.rev()
1601 if rev in results:
1601 if rev in results:
1602 ui.status(_("found revision %s from %s\n") %
1602 ui.status(_("found revision %s from %s\n") %
1603 (rev, util.datestr(results[rev])))
1603 (rev, util.datestr(results[rev])))
1604 return '%d' % rev
1604 return '%d' % rev
1605
1605
1606 raise error.Abort(_("revision matching date not found"))
1606 raise error.Abort(_("revision matching date not found"))
1607
1607
1608 def increasingwindows(windowsize=8, sizelimit=512):
1608 def increasingwindows(windowsize=8, sizelimit=512):
1609 while True:
1609 while True:
1610 yield windowsize
1610 yield windowsize
1611 if windowsize < sizelimit:
1611 if windowsize < sizelimit:
1612 windowsize *= 2
1612 windowsize *= 2
1613
1613
1614 def _walkrevs(repo, opts):
1614 def _walkrevs(repo, opts):
1615 # Default --rev value depends on --follow but --follow behavior
1615 # Default --rev value depends on --follow but --follow behavior
1616 # depends on revisions resolved from --rev...
1616 # depends on revisions resolved from --rev...
1617 follow = opts.get('follow') or opts.get('follow_first')
1617 follow = opts.get('follow') or opts.get('follow_first')
1618 if opts.get('rev'):
1618 if opts.get('rev'):
1619 revs = scmutil.revrange(repo, opts['rev'])
1619 revs = scmutil.revrange(repo, opts['rev'])
1620 elif follow and repo.dirstate.p1() == nullid:
1620 elif follow and repo.dirstate.p1() == nullid:
1621 revs = smartset.baseset()
1621 revs = smartset.baseset()
1622 elif follow:
1622 elif follow:
1623 revs = repo.revs('reverse(:.)')
1623 revs = repo.revs('reverse(:.)')
1624 else:
1624 else:
1625 revs = smartset.spanset(repo)
1625 revs = smartset.spanset(repo)
1626 revs.reverse()
1626 revs.reverse()
1627 return revs
1627 return revs
1628
1628
1629 class FileWalkError(Exception):
1629 class FileWalkError(Exception):
1630 pass
1630 pass
1631
1631
1632 def walkfilerevs(repo, match, follow, revs, fncache):
1632 def walkfilerevs(repo, match, follow, revs, fncache):
1633 '''Walks the file history for the matched files.
1633 '''Walks the file history for the matched files.
1634
1634
1635 Returns the changeset revs that are involved in the file history.
1635 Returns the changeset revs that are involved in the file history.
1636
1636
1637 Throws FileWalkError if the file history can't be walked using
1637 Throws FileWalkError if the file history can't be walked using
1638 filelogs alone.
1638 filelogs alone.
1639 '''
1639 '''
1640 wanted = set()
1640 wanted = set()
1641 copies = []
1641 copies = []
1642 minrev, maxrev = min(revs), max(revs)
1642 minrev, maxrev = min(revs), max(revs)
1643 def filerevgen(filelog, last):
1643 def filerevgen(filelog, last):
1644 """
1644 """
1645 Only files, no patterns. Check the history of each file.
1645 Only files, no patterns. Check the history of each file.
1646
1646
1647 Examines filelog entries within minrev, maxrev linkrev range
1647 Examines filelog entries within minrev, maxrev linkrev range
1648 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1648 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1649 tuples in backwards order
1649 tuples in backwards order
1650 """
1650 """
1651 cl_count = len(repo)
1651 cl_count = len(repo)
1652 revs = []
1652 revs = []
1653 for j in xrange(0, last + 1):
1653 for j in xrange(0, last + 1):
1654 linkrev = filelog.linkrev(j)
1654 linkrev = filelog.linkrev(j)
1655 if linkrev < minrev:
1655 if linkrev < minrev:
1656 continue
1656 continue
1657 # only yield rev for which we have the changelog, it can
1657 # only yield rev for which we have the changelog, it can
1658 # happen while doing "hg log" during a pull or commit
1658 # happen while doing "hg log" during a pull or commit
1659 if linkrev >= cl_count:
1659 if linkrev >= cl_count:
1660 break
1660 break
1661
1661
1662 parentlinkrevs = []
1662 parentlinkrevs = []
1663 for p in filelog.parentrevs(j):
1663 for p in filelog.parentrevs(j):
1664 if p != nullrev:
1664 if p != nullrev:
1665 parentlinkrevs.append(filelog.linkrev(p))
1665 parentlinkrevs.append(filelog.linkrev(p))
1666 n = filelog.node(j)
1666 n = filelog.node(j)
1667 revs.append((linkrev, parentlinkrevs,
1667 revs.append((linkrev, parentlinkrevs,
1668 follow and filelog.renamed(n)))
1668 follow and filelog.renamed(n)))
1669
1669
1670 return reversed(revs)
1670 return reversed(revs)
1671 def iterfiles():
1671 def iterfiles():
1672 pctx = repo['.']
1672 pctx = repo['.']
1673 for filename in match.files():
1673 for filename in match.files():
1674 if follow:
1674 if follow:
1675 if filename not in pctx:
1675 if filename not in pctx:
1676 raise error.Abort(_('cannot follow file not in parent '
1676 raise error.Abort(_('cannot follow file not in parent '
1677 'revision: "%s"') % filename)
1677 'revision: "%s"') % filename)
1678 yield filename, pctx[filename].filenode()
1678 yield filename, pctx[filename].filenode()
1679 else:
1679 else:
1680 yield filename, None
1680 yield filename, None
1681 for filename_node in copies:
1681 for filename_node in copies:
1682 yield filename_node
1682 yield filename_node
1683
1683
1684 for file_, node in iterfiles():
1684 for file_, node in iterfiles():
1685 filelog = repo.file(file_)
1685 filelog = repo.file(file_)
1686 if not len(filelog):
1686 if not len(filelog):
1687 if node is None:
1687 if node is None:
1688 # A zero count may be a directory or deleted file, so
1688 # A zero count may be a directory or deleted file, so
1689 # try to find matching entries on the slow path.
1689 # try to find matching entries on the slow path.
1690 if follow:
1690 if follow:
1691 raise error.Abort(
1691 raise error.Abort(
1692 _('cannot follow nonexistent file: "%s"') % file_)
1692 _('cannot follow nonexistent file: "%s"') % file_)
1693 raise FileWalkError("Cannot walk via filelog")
1693 raise FileWalkError("Cannot walk via filelog")
1694 else:
1694 else:
1695 continue
1695 continue
1696
1696
1697 if node is None:
1697 if node is None:
1698 last = len(filelog) - 1
1698 last = len(filelog) - 1
1699 else:
1699 else:
1700 last = filelog.rev(node)
1700 last = filelog.rev(node)
1701
1701
1702 # keep track of all ancestors of the file
1702 # keep track of all ancestors of the file
1703 ancestors = {filelog.linkrev(last)}
1703 ancestors = {filelog.linkrev(last)}
1704
1704
1705 # iterate from latest to oldest revision
1705 # iterate from latest to oldest revision
1706 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1706 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1707 if not follow:
1707 if not follow:
1708 if rev > maxrev:
1708 if rev > maxrev:
1709 continue
1709 continue
1710 else:
1710 else:
1711 # Note that last might not be the first interesting
1711 # Note that last might not be the first interesting
1712 # rev to us:
1712 # rev to us:
1713 # if the file has been changed after maxrev, we'll
1713 # if the file has been changed after maxrev, we'll
1714 # have linkrev(last) > maxrev, and we still need
1714 # have linkrev(last) > maxrev, and we still need
1715 # to explore the file graph
1715 # to explore the file graph
1716 if rev not in ancestors:
1716 if rev not in ancestors:
1717 continue
1717 continue
1718 # XXX insert 1327 fix here
1718 # XXX insert 1327 fix here
1719 if flparentlinkrevs:
1719 if flparentlinkrevs:
1720 ancestors.update(flparentlinkrevs)
1720 ancestors.update(flparentlinkrevs)
1721
1721
1722 fncache.setdefault(rev, []).append(file_)
1722 fncache.setdefault(rev, []).append(file_)
1723 wanted.add(rev)
1723 wanted.add(rev)
1724 if copied:
1724 if copied:
1725 copies.append(copied)
1725 copies.append(copied)
1726
1726
1727 return wanted
1727 return wanted
1728
1728
1729 class _followfilter(object):
1729 class _followfilter(object):
1730 def __init__(self, repo, onlyfirst=False):
1730 def __init__(self, repo, onlyfirst=False):
1731 self.repo = repo
1731 self.repo = repo
1732 self.startrev = nullrev
1732 self.startrev = nullrev
1733 self.roots = set()
1733 self.roots = set()
1734 self.onlyfirst = onlyfirst
1734 self.onlyfirst = onlyfirst
1735
1735
1736 def match(self, rev):
1736 def match(self, rev):
1737 def realparents(rev):
1737 def realparents(rev):
1738 if self.onlyfirst:
1738 if self.onlyfirst:
1739 return self.repo.changelog.parentrevs(rev)[0:1]
1739 return self.repo.changelog.parentrevs(rev)[0:1]
1740 else:
1740 else:
1741 return filter(lambda x: x != nullrev,
1741 return filter(lambda x: x != nullrev,
1742 self.repo.changelog.parentrevs(rev))
1742 self.repo.changelog.parentrevs(rev))
1743
1743
1744 if self.startrev == nullrev:
1744 if self.startrev == nullrev:
1745 self.startrev = rev
1745 self.startrev = rev
1746 return True
1746 return True
1747
1747
1748 if rev > self.startrev:
1748 if rev > self.startrev:
1749 # forward: all descendants
1749 # forward: all descendants
1750 if not self.roots:
1750 if not self.roots:
1751 self.roots.add(self.startrev)
1751 self.roots.add(self.startrev)
1752 for parent in realparents(rev):
1752 for parent in realparents(rev):
1753 if parent in self.roots:
1753 if parent in self.roots:
1754 self.roots.add(rev)
1754 self.roots.add(rev)
1755 return True
1755 return True
1756 else:
1756 else:
1757 # backwards: all parents
1757 # backwards: all parents
1758 if not self.roots:
1758 if not self.roots:
1759 self.roots.update(realparents(self.startrev))
1759 self.roots.update(realparents(self.startrev))
1760 if rev in self.roots:
1760 if rev in self.roots:
1761 self.roots.remove(rev)
1761 self.roots.remove(rev)
1762 self.roots.update(realparents(rev))
1762 self.roots.update(realparents(rev))
1763 return True
1763 return True
1764
1764
1765 return False
1765 return False
1766
1766
1767 def walkchangerevs(repo, match, opts, prepare):
1767 def walkchangerevs(repo, match, opts, prepare):
1768 '''Iterate over files and the revs in which they changed.
1768 '''Iterate over files and the revs in which they changed.
1769
1769
1770 Callers most commonly need to iterate backwards over the history
1770 Callers most commonly need to iterate backwards over the history
1771 in which they are interested. Doing so has awful (quadratic-looking)
1771 in which they are interested. Doing so has awful (quadratic-looking)
1772 performance, so we use iterators in a "windowed" way.
1772 performance, so we use iterators in a "windowed" way.
1773
1773
1774 We walk a window of revisions in the desired order. Within the
1774 We walk a window of revisions in the desired order. Within the
1775 window, we first walk forwards to gather data, then in the desired
1775 window, we first walk forwards to gather data, then in the desired
1776 order (usually backwards) to display it.
1776 order (usually backwards) to display it.
1777
1777
1778 This function returns an iterator yielding contexts. Before
1778 This function returns an iterator yielding contexts. Before
1779 yielding each context, the iterator will first call the prepare
1779 yielding each context, the iterator will first call the prepare
1780 function on each context in the window in forward order.'''
1780 function on each context in the window in forward order.'''
1781
1781
1782 follow = opts.get('follow') or opts.get('follow_first')
1782 follow = opts.get('follow') or opts.get('follow_first')
1783 revs = _walkrevs(repo, opts)
1783 revs = _walkrevs(repo, opts)
1784 if not revs:
1784 if not revs:
1785 return []
1785 return []
1786 wanted = set()
1786 wanted = set()
1787 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1787 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1788 fncache = {}
1788 fncache = {}
1789 change = repo.changectx
1789 change = repo.changectx
1790
1790
1791 # First step is to fill wanted, the set of revisions that we want to yield.
1791 # First step is to fill wanted, the set of revisions that we want to yield.
1792 # When it does not induce extra cost, we also fill fncache for revisions in
1792 # When it does not induce extra cost, we also fill fncache for revisions in
1793 # wanted: a cache of filenames that were changed (ctx.files()) and that
1793 # wanted: a cache of filenames that were changed (ctx.files()) and that
1794 # match the file filtering conditions.
1794 # match the file filtering conditions.
1795
1795
1796 if match.always():
1796 if match.always():
1797 # No files, no patterns. Display all revs.
1797 # No files, no patterns. Display all revs.
1798 wanted = revs
1798 wanted = revs
1799 elif not slowpath:
1799 elif not slowpath:
1800 # We only have to read through the filelog to find wanted revisions
1800 # We only have to read through the filelog to find wanted revisions
1801
1801
1802 try:
1802 try:
1803 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1803 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1804 except FileWalkError:
1804 except FileWalkError:
1805 slowpath = True
1805 slowpath = True
1806
1806
1807 # We decided to fall back to the slowpath because at least one
1807 # We decided to fall back to the slowpath because at least one
1808 # of the paths was not a file. Check to see if at least one of them
1808 # of the paths was not a file. Check to see if at least one of them
1809 # existed in history, otherwise simply return
1809 # existed in history, otherwise simply return
1810 for path in match.files():
1810 for path in match.files():
1811 if path == '.' or path in repo.store:
1811 if path == '.' or path in repo.store:
1812 break
1812 break
1813 else:
1813 else:
1814 return []
1814 return []
1815
1815
1816 if slowpath:
1816 if slowpath:
1817 # We have to read the changelog to match filenames against
1817 # We have to read the changelog to match filenames against
1818 # changed files
1818 # changed files
1819
1819
1820 if follow:
1820 if follow:
1821 raise error.Abort(_('can only follow copies/renames for explicit '
1821 raise error.Abort(_('can only follow copies/renames for explicit '
1822 'filenames'))
1822 'filenames'))
1823
1823
1824 # The slow path checks files modified in every changeset.
1824 # The slow path checks files modified in every changeset.
1825 # This is really slow on large repos, so compute the set lazily.
1825 # This is really slow on large repos, so compute the set lazily.
1826 class lazywantedset(object):
1826 class lazywantedset(object):
1827 def __init__(self):
1827 def __init__(self):
1828 self.set = set()
1828 self.set = set()
1829 self.revs = set(revs)
1829 self.revs = set(revs)
1830
1830
1831 # No need to worry about locality here because it will be accessed
1831 # No need to worry about locality here because it will be accessed
1832 # in the same order as the increasing window below.
1832 # in the same order as the increasing window below.
1833 def __contains__(self, value):
1833 def __contains__(self, value):
1834 if value in self.set:
1834 if value in self.set:
1835 return True
1835 return True
1836 elif not value in self.revs:
1836 elif not value in self.revs:
1837 return False
1837 return False
1838 else:
1838 else:
1839 self.revs.discard(value)
1839 self.revs.discard(value)
1840 ctx = change(value)
1840 ctx = change(value)
1841 matches = filter(match, ctx.files())
1841 matches = filter(match, ctx.files())
1842 if matches:
1842 if matches:
1843 fncache[value] = matches
1843 fncache[value] = matches
1844 self.set.add(value)
1844 self.set.add(value)
1845 return True
1845 return True
1846 return False
1846 return False
1847
1847
1848 def discard(self, value):
1848 def discard(self, value):
1849 self.revs.discard(value)
1849 self.revs.discard(value)
1850 self.set.discard(value)
1850 self.set.discard(value)
1851
1851
1852 wanted = lazywantedset()
1852 wanted = lazywantedset()
1853
1853
1854 # it might be worthwhile to do this in the iterator if the rev range
1854 # it might be worthwhile to do this in the iterator if the rev range
1855 # is descending and the prune args are all within that range
1855 # is descending and the prune args are all within that range
1856 for rev in opts.get('prune', ()):
1856 for rev in opts.get('prune', ()):
1857 rev = repo[rev].rev()
1857 rev = repo[rev].rev()
1858 ff = _followfilter(repo)
1858 ff = _followfilter(repo)
1859 stop = min(revs[0], revs[-1])
1859 stop = min(revs[0], revs[-1])
1860 for x in xrange(rev, stop - 1, -1):
1860 for x in xrange(rev, stop - 1, -1):
1861 if ff.match(x):
1861 if ff.match(x):
1862 wanted = wanted - [x]
1862 wanted = wanted - [x]
1863
1863
1864 # Now that wanted is correctly initialized, we can iterate over the
1864 # Now that wanted is correctly initialized, we can iterate over the
1865 # revision range, yielding only revisions in wanted.
1865 # revision range, yielding only revisions in wanted.
1866 def iterate():
1866 def iterate():
1867 if follow and match.always():
1867 if follow and match.always():
1868 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1868 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1869 def want(rev):
1869 def want(rev):
1870 return ff.match(rev) and rev in wanted
1870 return ff.match(rev) and rev in wanted
1871 else:
1871 else:
1872 def want(rev):
1872 def want(rev):
1873 return rev in wanted
1873 return rev in wanted
1874
1874
1875 it = iter(revs)
1875 it = iter(revs)
1876 stopiteration = False
1876 stopiteration = False
1877 for windowsize in increasingwindows():
1877 for windowsize in increasingwindows():
1878 nrevs = []
1878 nrevs = []
1879 for i in xrange(windowsize):
1879 for i in xrange(windowsize):
1880 rev = next(it, None)
1880 rev = next(it, None)
1881 if rev is None:
1881 if rev is None:
1882 stopiteration = True
1882 stopiteration = True
1883 break
1883 break
1884 elif want(rev):
1884 elif want(rev):
1885 nrevs.append(rev)
1885 nrevs.append(rev)
1886 for rev in sorted(nrevs):
1886 for rev in sorted(nrevs):
1887 fns = fncache.get(rev)
1887 fns = fncache.get(rev)
1888 ctx = change(rev)
1888 ctx = change(rev)
1889 if not fns:
1889 if not fns:
1890 def fns_generator():
1890 def fns_generator():
1891 for f in ctx.files():
1891 for f in ctx.files():
1892 if match(f):
1892 if match(f):
1893 yield f
1893 yield f
1894 fns = fns_generator()
1894 fns = fns_generator()
1895 prepare(ctx, fns)
1895 prepare(ctx, fns)
1896 for rev in nrevs:
1896 for rev in nrevs:
1897 yield change(rev)
1897 yield change(rev)
1898
1898
1899 if stopiteration:
1899 if stopiteration:
1900 break
1900 break
1901
1901
1902 return iterate()
1902 return iterate()
1903
1903
1904 def add(ui, repo, match, prefix, explicitonly, **opts):
1904 def add(ui, repo, match, prefix, explicitonly, **opts):
1905 join = lambda f: os.path.join(prefix, f)
1905 join = lambda f: os.path.join(prefix, f)
1906 bad = []
1906 bad = []
1907
1907
1908 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1908 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1909 names = []
1909 names = []
1910 wctx = repo[None]
1910 wctx = repo[None]
1911 cca = None
1911 cca = None
1912 abort, warn = scmutil.checkportabilityalert(ui)
1912 abort, warn = scmutil.checkportabilityalert(ui)
1913 if abort or warn:
1913 if abort or warn:
1914 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1914 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1915
1915
1916 badmatch = matchmod.badmatch(match, badfn)
1916 badmatch = matchmod.badmatch(match, badfn)
1917 dirstate = repo.dirstate
1917 dirstate = repo.dirstate
1918 # We don't want to just call wctx.walk here, since it would return a lot of
1918 # We don't want to just call wctx.walk here, since it would return a lot of
1919 # clean files, which we aren't interested in and takes time.
1919 # clean files, which we aren't interested in and takes time.
1920 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1920 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
1921 unknown=True, ignored=False, full=False)):
1921 unknown=True, ignored=False, full=False)):
1922 exact = match.exact(f)
1922 exact = match.exact(f)
1923 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1923 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1924 if cca:
1924 if cca:
1925 cca(f)
1925 cca(f)
1926 names.append(f)
1926 names.append(f)
1927 if ui.verbose or not exact:
1927 if ui.verbose or not exact:
1928 ui.status(_('adding %s\n') % match.rel(f))
1928 ui.status(_('adding %s\n') % match.rel(f))
1929
1929
1930 for subpath in sorted(wctx.substate):
1930 for subpath in sorted(wctx.substate):
1931 sub = wctx.sub(subpath)
1931 sub = wctx.sub(subpath)
1932 try:
1932 try:
1933 submatch = matchmod.subdirmatcher(subpath, match)
1933 submatch = matchmod.subdirmatcher(subpath, match)
1934 if opts.get(r'subrepos'):
1934 if opts.get(r'subrepos'):
1935 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1935 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1936 else:
1936 else:
1937 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1937 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1938 except error.LookupError:
1938 except error.LookupError:
1939 ui.status(_("skipping missing subrepository: %s\n")
1939 ui.status(_("skipping missing subrepository: %s\n")
1940 % join(subpath))
1940 % join(subpath))
1941
1941
1942 if not opts.get(r'dry_run'):
1942 if not opts.get(r'dry_run'):
1943 rejected = wctx.add(names, prefix)
1943 rejected = wctx.add(names, prefix)
1944 bad.extend(f for f in rejected if f in match.files())
1944 bad.extend(f for f in rejected if f in match.files())
1945 return bad
1945 return bad
1946
1946
1947 def addwebdirpath(repo, serverpath, webconf):
1947 def addwebdirpath(repo, serverpath, webconf):
1948 webconf[serverpath] = repo.root
1948 webconf[serverpath] = repo.root
1949 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
1949 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
1950
1950
1951 for r in repo.revs('filelog("path:.hgsub")'):
1951 for r in repo.revs('filelog("path:.hgsub")'):
1952 ctx = repo[r]
1952 ctx = repo[r]
1953 for subpath in ctx.substate:
1953 for subpath in ctx.substate:
1954 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
1954 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
1955
1955
1956 def forget(ui, repo, match, prefix, explicitonly):
1956 def forget(ui, repo, match, prefix, explicitonly):
1957 join = lambda f: os.path.join(prefix, f)
1957 join = lambda f: os.path.join(prefix, f)
1958 bad = []
1958 bad = []
1959 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1959 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
1960 wctx = repo[None]
1960 wctx = repo[None]
1961 forgot = []
1961 forgot = []
1962
1962
1963 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
1963 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
1964 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1964 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1965 if explicitonly:
1965 if explicitonly:
1966 forget = [f for f in forget if match.exact(f)]
1966 forget = [f for f in forget if match.exact(f)]
1967
1967
1968 for subpath in sorted(wctx.substate):
1968 for subpath in sorted(wctx.substate):
1969 sub = wctx.sub(subpath)
1969 sub = wctx.sub(subpath)
1970 try:
1970 try:
1971 submatch = matchmod.subdirmatcher(subpath, match)
1971 submatch = matchmod.subdirmatcher(subpath, match)
1972 subbad, subforgot = sub.forget(submatch, prefix)
1972 subbad, subforgot = sub.forget(submatch, prefix)
1973 bad.extend([subpath + '/' + f for f in subbad])
1973 bad.extend([subpath + '/' + f for f in subbad])
1974 forgot.extend([subpath + '/' + f for f in subforgot])
1974 forgot.extend([subpath + '/' + f for f in subforgot])
1975 except error.LookupError:
1975 except error.LookupError:
1976 ui.status(_("skipping missing subrepository: %s\n")
1976 ui.status(_("skipping missing subrepository: %s\n")
1977 % join(subpath))
1977 % join(subpath))
1978
1978
1979 if not explicitonly:
1979 if not explicitonly:
1980 for f in match.files():
1980 for f in match.files():
1981 if f not in repo.dirstate and not repo.wvfs.isdir(f):
1981 if f not in repo.dirstate and not repo.wvfs.isdir(f):
1982 if f not in forgot:
1982 if f not in forgot:
1983 if repo.wvfs.exists(f):
1983 if repo.wvfs.exists(f):
1984 # Don't complain if the exact case match wasn't given.
1984 # Don't complain if the exact case match wasn't given.
1985 # But don't do this until after checking 'forgot', so
1985 # But don't do this until after checking 'forgot', so
1986 # that subrepo files aren't normalized, and this op is
1986 # that subrepo files aren't normalized, and this op is
1987 # purely from data cached by the status walk above.
1987 # purely from data cached by the status walk above.
1988 if repo.dirstate.normalize(f) in repo.dirstate:
1988 if repo.dirstate.normalize(f) in repo.dirstate:
1989 continue
1989 continue
1990 ui.warn(_('not removing %s: '
1990 ui.warn(_('not removing %s: '
1991 'file is already untracked\n')
1991 'file is already untracked\n')
1992 % match.rel(f))
1992 % match.rel(f))
1993 bad.append(f)
1993 bad.append(f)
1994
1994
1995 for f in forget:
1995 for f in forget:
1996 if ui.verbose or not match.exact(f):
1996 if ui.verbose or not match.exact(f):
1997 ui.status(_('removing %s\n') % match.rel(f))
1997 ui.status(_('removing %s\n') % match.rel(f))
1998
1998
1999 rejected = wctx.forget(forget, prefix)
1999 rejected = wctx.forget(forget, prefix)
2000 bad.extend(f for f in rejected if f in match.files())
2000 bad.extend(f for f in rejected if f in match.files())
2001 forgot.extend(f for f in forget if f not in rejected)
2001 forgot.extend(f for f in forget if f not in rejected)
2002 return bad, forgot
2002 return bad, forgot
2003
2003
2004 def files(ui, ctx, m, fm, fmt, subrepos):
2004 def files(ui, ctx, m, fm, fmt, subrepos):
2005 rev = ctx.rev()
2005 rev = ctx.rev()
2006 ret = 1
2006 ret = 1
2007 ds = ctx.repo().dirstate
2007 ds = ctx.repo().dirstate
2008
2008
2009 for f in ctx.matches(m):
2009 for f in ctx.matches(m):
2010 if rev is None and ds[f] == 'r':
2010 if rev is None and ds[f] == 'r':
2011 continue
2011 continue
2012 fm.startitem()
2012 fm.startitem()
2013 if ui.verbose:
2013 if ui.verbose:
2014 fc = ctx[f]
2014 fc = ctx[f]
2015 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2015 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2016 fm.data(abspath=f)
2016 fm.data(abspath=f)
2017 fm.write('path', fmt, m.rel(f))
2017 fm.write('path', fmt, m.rel(f))
2018 ret = 0
2018 ret = 0
2019
2019
2020 for subpath in sorted(ctx.substate):
2020 for subpath in sorted(ctx.substate):
2021 submatch = matchmod.subdirmatcher(subpath, m)
2021 submatch = matchmod.subdirmatcher(subpath, m)
2022 if (subrepos or m.exact(subpath) or any(submatch.files())):
2022 if (subrepos or m.exact(subpath) or any(submatch.files())):
2023 sub = ctx.sub(subpath)
2023 sub = ctx.sub(subpath)
2024 try:
2024 try:
2025 recurse = m.exact(subpath) or subrepos
2025 recurse = m.exact(subpath) or subrepos
2026 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2026 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2027 ret = 0
2027 ret = 0
2028 except error.LookupError:
2028 except error.LookupError:
2029 ui.status(_("skipping missing subrepository: %s\n")
2029 ui.status(_("skipping missing subrepository: %s\n")
2030 % m.abs(subpath))
2030 % m.abs(subpath))
2031
2031
2032 return ret
2032 return ret
2033
2033
2034 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2034 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2035 join = lambda f: os.path.join(prefix, f)
2035 join = lambda f: os.path.join(prefix, f)
2036 ret = 0
2036 ret = 0
2037 s = repo.status(match=m, clean=True)
2037 s = repo.status(match=m, clean=True)
2038 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2038 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2039
2039
2040 wctx = repo[None]
2040 wctx = repo[None]
2041
2041
2042 if warnings is None:
2042 if warnings is None:
2043 warnings = []
2043 warnings = []
2044 warn = True
2044 warn = True
2045 else:
2045 else:
2046 warn = False
2046 warn = False
2047
2047
2048 subs = sorted(wctx.substate)
2048 subs = sorted(wctx.substate)
2049 total = len(subs)
2049 total = len(subs)
2050 count = 0
2050 count = 0
2051 for subpath in subs:
2051 for subpath in subs:
2052 count += 1
2052 count += 1
2053 submatch = matchmod.subdirmatcher(subpath, m)
2053 submatch = matchmod.subdirmatcher(subpath, m)
2054 if subrepos or m.exact(subpath) or any(submatch.files()):
2054 if subrepos or m.exact(subpath) or any(submatch.files()):
2055 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2055 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2056 sub = wctx.sub(subpath)
2056 sub = wctx.sub(subpath)
2057 try:
2057 try:
2058 if sub.removefiles(submatch, prefix, after, force, subrepos,
2058 if sub.removefiles(submatch, prefix, after, force, subrepos,
2059 warnings):
2059 warnings):
2060 ret = 1
2060 ret = 1
2061 except error.LookupError:
2061 except error.LookupError:
2062 warnings.append(_("skipping missing subrepository: %s\n")
2062 warnings.append(_("skipping missing subrepository: %s\n")
2063 % join(subpath))
2063 % join(subpath))
2064 ui.progress(_('searching'), None)
2064 ui.progress(_('searching'), None)
2065
2065
2066 # warn about failure to delete explicit files/dirs
2066 # warn about failure to delete explicit files/dirs
2067 deleteddirs = util.dirs(deleted)
2067 deleteddirs = util.dirs(deleted)
2068 files = m.files()
2068 files = m.files()
2069 total = len(files)
2069 total = len(files)
2070 count = 0
2070 count = 0
2071 for f in files:
2071 for f in files:
2072 def insubrepo():
2072 def insubrepo():
2073 for subpath in wctx.substate:
2073 for subpath in wctx.substate:
2074 if f.startswith(subpath + '/'):
2074 if f.startswith(subpath + '/'):
2075 return True
2075 return True
2076 return False
2076 return False
2077
2077
2078 count += 1
2078 count += 1
2079 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2079 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2080 isdir = f in deleteddirs or wctx.hasdir(f)
2080 isdir = f in deleteddirs or wctx.hasdir(f)
2081 if (f in repo.dirstate or isdir or f == '.'
2081 if (f in repo.dirstate or isdir or f == '.'
2082 or insubrepo() or f in subs):
2082 or insubrepo() or f in subs):
2083 continue
2083 continue
2084
2084
2085 if repo.wvfs.exists(f):
2085 if repo.wvfs.exists(f):
2086 if repo.wvfs.isdir(f):
2086 if repo.wvfs.isdir(f):
2087 warnings.append(_('not removing %s: no tracked files\n')
2087 warnings.append(_('not removing %s: no tracked files\n')
2088 % m.rel(f))
2088 % m.rel(f))
2089 else:
2089 else:
2090 warnings.append(_('not removing %s: file is untracked\n')
2090 warnings.append(_('not removing %s: file is untracked\n')
2091 % m.rel(f))
2091 % m.rel(f))
2092 # missing files will generate a warning elsewhere
2092 # missing files will generate a warning elsewhere
2093 ret = 1
2093 ret = 1
2094 ui.progress(_('deleting'), None)
2094 ui.progress(_('deleting'), None)
2095
2095
2096 if force:
2096 if force:
2097 list = modified + deleted + clean + added
2097 list = modified + deleted + clean + added
2098 elif after:
2098 elif after:
2099 list = deleted
2099 list = deleted
2100 remaining = modified + added + clean
2100 remaining = modified + added + clean
2101 total = len(remaining)
2101 total = len(remaining)
2102 count = 0
2102 count = 0
2103 for f in remaining:
2103 for f in remaining:
2104 count += 1
2104 count += 1
2105 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2105 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2106 if ui.verbose or (f in files):
2106 if ui.verbose or (f in files):
2107 warnings.append(_('not removing %s: file still exists\n')
2107 warnings.append(_('not removing %s: file still exists\n')
2108 % m.rel(f))
2108 % m.rel(f))
2109 ret = 1
2109 ret = 1
2110 ui.progress(_('skipping'), None)
2110 ui.progress(_('skipping'), None)
2111 else:
2111 else:
2112 list = deleted + clean
2112 list = deleted + clean
2113 total = len(modified) + len(added)
2113 total = len(modified) + len(added)
2114 count = 0
2114 count = 0
2115 for f in modified:
2115 for f in modified:
2116 count += 1
2116 count += 1
2117 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2117 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2118 warnings.append(_('not removing %s: file is modified (use -f'
2118 warnings.append(_('not removing %s: file is modified (use -f'
2119 ' to force removal)\n') % m.rel(f))
2119 ' to force removal)\n') % m.rel(f))
2120 ret = 1
2120 ret = 1
2121 for f in added:
2121 for f in added:
2122 count += 1
2122 count += 1
2123 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2123 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2124 warnings.append(_("not removing %s: file has been marked for add"
2124 warnings.append(_("not removing %s: file has been marked for add"
2125 " (use 'hg forget' to undo add)\n") % m.rel(f))
2125 " (use 'hg forget' to undo add)\n") % m.rel(f))
2126 ret = 1
2126 ret = 1
2127 ui.progress(_('skipping'), None)
2127 ui.progress(_('skipping'), None)
2128
2128
2129 list = sorted(list)
2129 list = sorted(list)
2130 total = len(list)
2130 total = len(list)
2131 count = 0
2131 count = 0
2132 for f in list:
2132 for f in list:
2133 count += 1
2133 count += 1
2134 if ui.verbose or not m.exact(f):
2134 if ui.verbose or not m.exact(f):
2135 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2135 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2136 ui.status(_('removing %s\n') % m.rel(f))
2136 ui.status(_('removing %s\n') % m.rel(f))
2137 ui.progress(_('deleting'), None)
2137 ui.progress(_('deleting'), None)
2138
2138
2139 with repo.wlock():
2139 with repo.wlock():
2140 if not after:
2140 if not after:
2141 for f in list:
2141 for f in list:
2142 if f in added:
2142 if f in added:
2143 continue # we never unlink added files on remove
2143 continue # we never unlink added files on remove
2144 repo.wvfs.unlinkpath(f, ignoremissing=True)
2144 repo.wvfs.unlinkpath(f, ignoremissing=True)
2145 repo[None].forget(list)
2145 repo[None].forget(list)
2146
2146
2147 if warn:
2147 if warn:
2148 for warning in warnings:
2148 for warning in warnings:
2149 ui.warn(warning)
2149 ui.warn(warning)
2150
2150
2151 return ret
2151 return ret
2152
2152
2153 def _updatecatformatter(fm, ctx, matcher, path, decode):
2153 def _updatecatformatter(fm, ctx, matcher, path, decode):
2154 """Hook for adding data to the formatter used by ``hg cat``.
2154 """Hook for adding data to the formatter used by ``hg cat``.
2155
2155
2156 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2156 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2157 this method first."""
2157 this method first."""
2158 data = ctx[path].data()
2158 data = ctx[path].data()
2159 if decode:
2159 if decode:
2160 data = ctx.repo().wwritedata(path, data)
2160 data = ctx.repo().wwritedata(path, data)
2161 fm.startitem()
2161 fm.startitem()
2162 fm.write('data', '%s', data)
2162 fm.write('data', '%s', data)
2163 fm.data(abspath=path, path=matcher.rel(path))
2163 fm.data(abspath=path, path=matcher.rel(path))
2164
2164
2165 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2165 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2166 err = 1
2166 err = 1
2167 opts = pycompat.byteskwargs(opts)
2167 opts = pycompat.byteskwargs(opts)
2168
2168
2169 def write(path):
2169 def write(path):
2170 filename = None
2170 filename = None
2171 if fntemplate:
2171 if fntemplate:
2172 filename = makefilename(repo, fntemplate, ctx.node(),
2172 filename = makefilename(repo, fntemplate, ctx.node(),
2173 pathname=os.path.join(prefix, path))
2173 pathname=os.path.join(prefix, path))
2174 # attempt to create the directory if it does not already exist
2174 # attempt to create the directory if it does not already exist
2175 try:
2175 try:
2176 os.makedirs(os.path.dirname(filename))
2176 os.makedirs(os.path.dirname(filename))
2177 except OSError:
2177 except OSError:
2178 pass
2178 pass
2179 with formatter.maybereopen(basefm, filename, opts) as fm:
2179 with formatter.maybereopen(basefm, filename, opts) as fm:
2180 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2180 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2181
2181
2182 # Automation often uses hg cat on single files, so special case it
2182 # Automation often uses hg cat on single files, so special case it
2183 # for performance to avoid the cost of parsing the manifest.
2183 # for performance to avoid the cost of parsing the manifest.
2184 if len(matcher.files()) == 1 and not matcher.anypats():
2184 if len(matcher.files()) == 1 and not matcher.anypats():
2185 file = matcher.files()[0]
2185 file = matcher.files()[0]
2186 mfl = repo.manifestlog
2186 mfl = repo.manifestlog
2187 mfnode = ctx.manifestnode()
2187 mfnode = ctx.manifestnode()
2188 try:
2188 try:
2189 if mfnode and mfl[mfnode].find(file)[0]:
2189 if mfnode and mfl[mfnode].find(file)[0]:
2190 _prefetchfiles(repo, ctx, [file])
2190 _prefetchfiles(repo, ctx, [file])
2191 write(file)
2191 write(file)
2192 return 0
2192 return 0
2193 except KeyError:
2193 except KeyError:
2194 pass
2194 pass
2195
2195
2196 files = [f for f in ctx.walk(matcher)]
2196 files = [f for f in ctx.walk(matcher)]
2197 _prefetchfiles(repo, ctx, files)
2197 _prefetchfiles(repo, ctx, files)
2198
2198
2199 for abs in files:
2199 for abs in files:
2200 write(abs)
2200 write(abs)
2201 err = 0
2201 err = 0
2202
2202
2203 for subpath in sorted(ctx.substate):
2203 for subpath in sorted(ctx.substate):
2204 sub = ctx.sub(subpath)
2204 sub = ctx.sub(subpath)
2205 try:
2205 try:
2206 submatch = matchmod.subdirmatcher(subpath, matcher)
2206 submatch = matchmod.subdirmatcher(subpath, matcher)
2207
2207
2208 if not sub.cat(submatch, basefm, fntemplate,
2208 if not sub.cat(submatch, basefm, fntemplate,
2209 os.path.join(prefix, sub._path),
2209 os.path.join(prefix, sub._path),
2210 **pycompat.strkwargs(opts)):
2210 **pycompat.strkwargs(opts)):
2211 err = 0
2211 err = 0
2212 except error.RepoLookupError:
2212 except error.RepoLookupError:
2213 ui.status(_("skipping missing subrepository: %s\n")
2213 ui.status(_("skipping missing subrepository: %s\n")
2214 % os.path.join(prefix, subpath))
2214 % os.path.join(prefix, subpath))
2215
2215
2216 return err
2216 return err
2217
2217
2218 def commit(ui, repo, commitfunc, pats, opts):
2218 def commit(ui, repo, commitfunc, pats, opts):
2219 '''commit the specified files or all outstanding changes'''
2219 '''commit the specified files or all outstanding changes'''
2220 date = opts.get('date')
2220 date = opts.get('date')
2221 if date:
2221 if date:
2222 opts['date'] = util.parsedate(date)
2222 opts['date'] = util.parsedate(date)
2223 message = logmessage(ui, opts)
2223 message = logmessage(ui, opts)
2224 matcher = scmutil.match(repo[None], pats, opts)
2224 matcher = scmutil.match(repo[None], pats, opts)
2225
2225
2226 dsguard = None
2226 dsguard = None
2227 # extract addremove carefully -- this function can be called from a command
2227 # extract addremove carefully -- this function can be called from a command
2228 # that doesn't support addremove
2228 # that doesn't support addremove
2229 if opts.get('addremove'):
2229 if opts.get('addremove'):
2230 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2230 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2231 with dsguard or util.nullcontextmanager():
2231 with dsguard or util.nullcontextmanager():
2232 if dsguard:
2232 if dsguard:
2233 if scmutil.addremove(repo, matcher, "", opts) != 0:
2233 if scmutil.addremove(repo, matcher, "", opts) != 0:
2234 raise error.Abort(
2234 raise error.Abort(
2235 _("failed to mark all new/missing files as added/removed"))
2235 _("failed to mark all new/missing files as added/removed"))
2236
2236
2237 return commitfunc(ui, repo, message, matcher, opts)
2237 return commitfunc(ui, repo, message, matcher, opts)
2238
2238
2239 def samefile(f, ctx1, ctx2):
2239 def samefile(f, ctx1, ctx2):
2240 if f in ctx1.manifest():
2240 if f in ctx1.manifest():
2241 a = ctx1.filectx(f)
2241 a = ctx1.filectx(f)
2242 if f in ctx2.manifest():
2242 if f in ctx2.manifest():
2243 b = ctx2.filectx(f)
2243 b = ctx2.filectx(f)
2244 return (not a.cmp(b)
2244 return (not a.cmp(b)
2245 and a.flags() == b.flags())
2245 and a.flags() == b.flags())
2246 else:
2246 else:
2247 return False
2247 return False
2248 else:
2248 else:
2249 return f not in ctx2.manifest()
2249 return f not in ctx2.manifest()
2250
2250
2251 def amend(ui, repo, old, extra, pats, opts):
2251 def amend(ui, repo, old, extra, pats, opts):
2252 # avoid cycle context -> subrepo -> cmdutil
2252 # avoid cycle context -> subrepo -> cmdutil
2253 from . import context
2253 from . import context
2254
2254
2255 # amend will reuse the existing user if not specified, but the obsolete
2255 # amend will reuse the existing user if not specified, but the obsolete
2256 # marker creation requires that the current user's name is specified.
2256 # marker creation requires that the current user's name is specified.
2257 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2257 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2258 ui.username() # raise exception if username not set
2258 ui.username() # raise exception if username not set
2259
2259
2260 ui.note(_('amending changeset %s\n') % old)
2260 ui.note(_('amending changeset %s\n') % old)
2261 base = old.p1()
2261 base = old.p1()
2262
2262
2263 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2263 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2264 # Participating changesets:
2264 # Participating changesets:
2265 #
2265 #
2266 # wctx o - workingctx that contains changes from working copy
2266 # wctx o - workingctx that contains changes from working copy
2267 # | to go into amending commit
2267 # | to go into amending commit
2268 # |
2268 # |
2269 # old o - changeset to amend
2269 # old o - changeset to amend
2270 # |
2270 # |
2271 # base o - first parent of the changeset to amend
2271 # base o - first parent of the changeset to amend
2272 wctx = repo[None]
2272 wctx = repo[None]
2273
2273
2274 # Copy to avoid mutating input
2274 # Copy to avoid mutating input
2275 extra = extra.copy()
2275 extra = extra.copy()
2276 # Update extra dict from amended commit (e.g. to preserve graft
2276 # Update extra dict from amended commit (e.g. to preserve graft
2277 # source)
2277 # source)
2278 extra.update(old.extra())
2278 extra.update(old.extra())
2279
2279
2280 # Also update it from the from the wctx
2280 # Also update it from the from the wctx
2281 extra.update(wctx.extra())
2281 extra.update(wctx.extra())
2282
2282
2283 user = opts.get('user') or old.user()
2283 user = opts.get('user') or old.user()
2284 date = opts.get('date') or old.date()
2284 date = opts.get('date') or old.date()
2285
2285
2286 # Parse the date to allow comparison between date and old.date()
2286 # Parse the date to allow comparison between date and old.date()
2287 date = util.parsedate(date)
2287 date = util.parsedate(date)
2288
2288
2289 if len(old.parents()) > 1:
2289 if len(old.parents()) > 1:
2290 # ctx.files() isn't reliable for merges, so fall back to the
2290 # ctx.files() isn't reliable for merges, so fall back to the
2291 # slower repo.status() method
2291 # slower repo.status() method
2292 files = set([fn for st in repo.status(base, old)[:3]
2292 files = set([fn for st in repo.status(base, old)[:3]
2293 for fn in st])
2293 for fn in st])
2294 else:
2294 else:
2295 files = set(old.files())
2295 files = set(old.files())
2296
2296
2297 # add/remove the files to the working copy if the "addremove" option
2297 # add/remove the files to the working copy if the "addremove" option
2298 # was specified.
2298 # was specified.
2299 matcher = scmutil.match(wctx, pats, opts)
2299 matcher = scmutil.match(wctx, pats, opts)
2300 if (opts.get('addremove')
2300 if (opts.get('addremove')
2301 and scmutil.addremove(repo, matcher, "", opts)):
2301 and scmutil.addremove(repo, matcher, "", opts)):
2302 raise error.Abort(
2302 raise error.Abort(
2303 _("failed to mark all new/missing files as added/removed"))
2303 _("failed to mark all new/missing files as added/removed"))
2304
2304
2305 # Check subrepos. This depends on in-place wctx._status update in
2305 # Check subrepos. This depends on in-place wctx._status update in
2306 # subrepo.precommit(). To minimize the risk of this hack, we do
2306 # subrepo.precommit(). To minimize the risk of this hack, we do
2307 # nothing if .hgsub does not exist.
2307 # nothing if .hgsub does not exist.
2308 if '.hgsub' in wctx or '.hgsub' in old:
2308 if '.hgsub' in wctx or '.hgsub' in old:
2309 subs, commitsubs, newsubstate = subrepoutil.precommit(
2309 subs, commitsubs, newsubstate = subrepoutil.precommit(
2310 ui, wctx, wctx._status, matcher)
2310 ui, wctx, wctx._status, matcher)
2311 # amend should abort if commitsubrepos is enabled
2311 # amend should abort if commitsubrepos is enabled
2312 assert not commitsubs
2312 assert not commitsubs
2313 if subs:
2313 if subs:
2314 subrepoutil.writestate(repo, newsubstate)
2314 subrepoutil.writestate(repo, newsubstate)
2315
2315
2316 filestoamend = set(f for f in wctx.files() if matcher(f))
2316 filestoamend = set(f for f in wctx.files() if matcher(f))
2317
2317
2318 changes = (len(filestoamend) > 0)
2318 changes = (len(filestoamend) > 0)
2319 if changes:
2319 if changes:
2320 # Recompute copies (avoid recording a -> b -> a)
2320 # Recompute copies (avoid recording a -> b -> a)
2321 copied = copies.pathcopies(base, wctx, matcher)
2321 copied = copies.pathcopies(base, wctx, matcher)
2322 if old.p2:
2322 if old.p2:
2323 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2323 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2324
2324
2325 # Prune files which were reverted by the updates: if old
2325 # Prune files which were reverted by the updates: if old
2326 # introduced file X and the file was renamed in the working
2326 # introduced file X and the file was renamed in the working
2327 # copy, then those two files are the same and
2327 # copy, then those two files are the same and
2328 # we can discard X from our list of files. Likewise if X
2328 # we can discard X from our list of files. Likewise if X
2329 # was removed, it's no longer relevant. If X is missing (aka
2329 # was removed, it's no longer relevant. If X is missing (aka
2330 # deleted), old X must be preserved.
2330 # deleted), old X must be preserved.
2331 files.update(filestoamend)
2331 files.update(filestoamend)
2332 files = [f for f in files if (not samefile(f, wctx, base)
2332 files = [f for f in files if (not samefile(f, wctx, base)
2333 or f in wctx.deleted())]
2333 or f in wctx.deleted())]
2334
2334
2335 def filectxfn(repo, ctx_, path):
2335 def filectxfn(repo, ctx_, path):
2336 try:
2336 try:
2337 # If the file being considered is not amongst the files
2337 # If the file being considered is not amongst the files
2338 # to be amended, we should return the file context from the
2338 # to be amended, we should return the file context from the
2339 # old changeset. This avoids issues when only some files in
2339 # old changeset. This avoids issues when only some files in
2340 # the working copy are being amended but there are also
2340 # the working copy are being amended but there are also
2341 # changes to other files from the old changeset.
2341 # changes to other files from the old changeset.
2342 if path not in filestoamend:
2342 if path not in filestoamend:
2343 return old.filectx(path)
2343 return old.filectx(path)
2344
2344
2345 # Return None for removed files.
2345 # Return None for removed files.
2346 if path in wctx.removed():
2346 if path in wctx.removed():
2347 return None
2347 return None
2348
2348
2349 fctx = wctx[path]
2349 fctx = wctx[path]
2350 flags = fctx.flags()
2350 flags = fctx.flags()
2351 mctx = context.memfilectx(repo, ctx_,
2351 mctx = context.memfilectx(repo, ctx_,
2352 fctx.path(), fctx.data(),
2352 fctx.path(), fctx.data(),
2353 islink='l' in flags,
2353 islink='l' in flags,
2354 isexec='x' in flags,
2354 isexec='x' in flags,
2355 copied=copied.get(path))
2355 copied=copied.get(path))
2356 return mctx
2356 return mctx
2357 except KeyError:
2357 except KeyError:
2358 return None
2358 return None
2359 else:
2359 else:
2360 ui.note(_('copying changeset %s to %s\n') % (old, base))
2360 ui.note(_('copying changeset %s to %s\n') % (old, base))
2361
2361
2362 # Use version of files as in the old cset
2362 # Use version of files as in the old cset
2363 def filectxfn(repo, ctx_, path):
2363 def filectxfn(repo, ctx_, path):
2364 try:
2364 try:
2365 return old.filectx(path)
2365 return old.filectx(path)
2366 except KeyError:
2366 except KeyError:
2367 return None
2367 return None
2368
2368
2369 # See if we got a message from -m or -l, if not, open the editor with
2369 # See if we got a message from -m or -l, if not, open the editor with
2370 # the message of the changeset to amend.
2370 # the message of the changeset to amend.
2371 message = logmessage(ui, opts)
2371 message = logmessage(ui, opts)
2372
2372
2373 editform = mergeeditform(old, 'commit.amend')
2373 editform = mergeeditform(old, 'commit.amend')
2374 editor = getcommiteditor(editform=editform,
2374 editor = getcommiteditor(editform=editform,
2375 **pycompat.strkwargs(opts))
2375 **pycompat.strkwargs(opts))
2376
2376
2377 if not message:
2377 if not message:
2378 editor = getcommiteditor(edit=True, editform=editform)
2378 editor = getcommiteditor(edit=True, editform=editform)
2379 message = old.description()
2379 message = old.description()
2380
2380
2381 pureextra = extra.copy()
2381 pureextra = extra.copy()
2382 extra['amend_source'] = old.hex()
2382 extra['amend_source'] = old.hex()
2383
2383
2384 new = context.memctx(repo,
2384 new = context.memctx(repo,
2385 parents=[base.node(), old.p2().node()],
2385 parents=[base.node(), old.p2().node()],
2386 text=message,
2386 text=message,
2387 files=files,
2387 files=files,
2388 filectxfn=filectxfn,
2388 filectxfn=filectxfn,
2389 user=user,
2389 user=user,
2390 date=date,
2390 date=date,
2391 extra=extra,
2391 extra=extra,
2392 editor=editor)
2392 editor=editor)
2393
2393
2394 newdesc = changelog.stripdesc(new.description())
2394 newdesc = changelog.stripdesc(new.description())
2395 if ((not changes)
2395 if ((not changes)
2396 and newdesc == old.description()
2396 and newdesc == old.description()
2397 and user == old.user()
2397 and user == old.user()
2398 and date == old.date()
2398 and date == old.date()
2399 and pureextra == old.extra()):
2399 and pureextra == old.extra()):
2400 # nothing changed. continuing here would create a new node
2400 # nothing changed. continuing here would create a new node
2401 # anyway because of the amend_source noise.
2401 # anyway because of the amend_source noise.
2402 #
2402 #
2403 # This not what we expect from amend.
2403 # This not what we expect from amend.
2404 return old.node()
2404 return old.node()
2405
2405
2406 if opts.get('secret'):
2406 if opts.get('secret'):
2407 commitphase = 'secret'
2407 commitphase = 'secret'
2408 else:
2408 else:
2409 commitphase = old.phase()
2409 commitphase = old.phase()
2410 overrides = {('phases', 'new-commit'): commitphase}
2410 overrides = {('phases', 'new-commit'): commitphase}
2411 with ui.configoverride(overrides, 'amend'):
2411 with ui.configoverride(overrides, 'amend'):
2412 newid = repo.commitctx(new)
2412 newid = repo.commitctx(new)
2413
2413
2414 # Reroute the working copy parent to the new changeset
2414 # Reroute the working copy parent to the new changeset
2415 repo.setparents(newid, nullid)
2415 repo.setparents(newid, nullid)
2416 mapping = {old.node(): (newid,)}
2416 mapping = {old.node(): (newid,)}
2417 obsmetadata = None
2417 obsmetadata = None
2418 if opts.get('note'):
2418 if opts.get('note'):
2419 obsmetadata = {'note': opts['note']}
2419 obsmetadata = {'note': opts['note']}
2420 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2420 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
2421
2421
2422 # Fixing the dirstate because localrepo.commitctx does not update
2422 # Fixing the dirstate because localrepo.commitctx does not update
2423 # it. This is rather convenient because we did not need to update
2423 # it. This is rather convenient because we did not need to update
2424 # the dirstate for all the files in the new commit which commitctx
2424 # the dirstate for all the files in the new commit which commitctx
2425 # could have done if it updated the dirstate. Now, we can
2425 # could have done if it updated the dirstate. Now, we can
2426 # selectively update the dirstate only for the amended files.
2426 # selectively update the dirstate only for the amended files.
2427 dirstate = repo.dirstate
2427 dirstate = repo.dirstate
2428
2428
2429 # Update the state of the files which were added and
2429 # Update the state of the files which were added and
2430 # and modified in the amend to "normal" in the dirstate.
2430 # and modified in the amend to "normal" in the dirstate.
2431 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2431 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2432 for f in normalfiles:
2432 for f in normalfiles:
2433 dirstate.normal(f)
2433 dirstate.normal(f)
2434
2434
2435 # Update the state of files which were removed in the amend
2435 # Update the state of files which were removed in the amend
2436 # to "removed" in the dirstate.
2436 # to "removed" in the dirstate.
2437 removedfiles = set(wctx.removed()) & filestoamend
2437 removedfiles = set(wctx.removed()) & filestoamend
2438 for f in removedfiles:
2438 for f in removedfiles:
2439 dirstate.drop(f)
2439 dirstate.drop(f)
2440
2440
2441 return newid
2441 return newid
2442
2442
2443 def commiteditor(repo, ctx, subs, editform=''):
2443 def commiteditor(repo, ctx, subs, editform=''):
2444 if ctx.description():
2444 if ctx.description():
2445 return ctx.description()
2445 return ctx.description()
2446 return commitforceeditor(repo, ctx, subs, editform=editform,
2446 return commitforceeditor(repo, ctx, subs, editform=editform,
2447 unchangedmessagedetection=True)
2447 unchangedmessagedetection=True)
2448
2448
2449 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2449 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2450 editform='', unchangedmessagedetection=False):
2450 editform='', unchangedmessagedetection=False):
2451 if not extramsg:
2451 if not extramsg:
2452 extramsg = _("Leave message empty to abort commit.")
2452 extramsg = _("Leave message empty to abort commit.")
2453
2453
2454 forms = [e for e in editform.split('.') if e]
2454 forms = [e for e in editform.split('.') if e]
2455 forms.insert(0, 'changeset')
2455 forms.insert(0, 'changeset')
2456 templatetext = None
2456 templatetext = None
2457 while forms:
2457 while forms:
2458 ref = '.'.join(forms)
2458 ref = '.'.join(forms)
2459 if repo.ui.config('committemplate', ref):
2459 if repo.ui.config('committemplate', ref):
2460 templatetext = committext = buildcommittemplate(
2460 templatetext = committext = buildcommittemplate(
2461 repo, ctx, subs, extramsg, ref)
2461 repo, ctx, subs, extramsg, ref)
2462 break
2462 break
2463 forms.pop()
2463 forms.pop()
2464 else:
2464 else:
2465 committext = buildcommittext(repo, ctx, subs, extramsg)
2465 committext = buildcommittext(repo, ctx, subs, extramsg)
2466
2466
2467 # run editor in the repository root
2467 # run editor in the repository root
2468 olddir = pycompat.getcwd()
2468 olddir = pycompat.getcwd()
2469 os.chdir(repo.root)
2469 os.chdir(repo.root)
2470
2470
2471 # make in-memory changes visible to external process
2471 # make in-memory changes visible to external process
2472 tr = repo.currenttransaction()
2472 tr = repo.currenttransaction()
2473 repo.dirstate.write(tr)
2473 repo.dirstate.write(tr)
2474 pending = tr and tr.writepending() and repo.root
2474 pending = tr and tr.writepending() and repo.root
2475
2475
2476 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2476 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2477 editform=editform, pending=pending,
2477 editform=editform, pending=pending,
2478 repopath=repo.path, action='commit')
2478 repopath=repo.path, action='commit')
2479 text = editortext
2479 text = editortext
2480
2480
2481 # strip away anything below this special string (used for editors that want
2481 # strip away anything below this special string (used for editors that want
2482 # to display the diff)
2482 # to display the diff)
2483 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2483 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2484 if stripbelow:
2484 if stripbelow:
2485 text = text[:stripbelow.start()]
2485 text = text[:stripbelow.start()]
2486
2486
2487 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2487 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2488 os.chdir(olddir)
2488 os.chdir(olddir)
2489
2489
2490 if finishdesc:
2490 if finishdesc:
2491 text = finishdesc(text)
2491 text = finishdesc(text)
2492 if not text.strip():
2492 if not text.strip():
2493 raise error.Abort(_("empty commit message"))
2493 raise error.Abort(_("empty commit message"))
2494 if unchangedmessagedetection and editortext == templatetext:
2494 if unchangedmessagedetection and editortext == templatetext:
2495 raise error.Abort(_("commit message unchanged"))
2495 raise error.Abort(_("commit message unchanged"))
2496
2496
2497 return text
2497 return text
2498
2498
2499 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2499 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2500 ui = repo.ui
2500 ui = repo.ui
2501 spec = formatter.templatespec(ref, None, None)
2501 spec = formatter.templatespec(ref, None, None)
2502 t = logcmdutil.changesettemplater(ui, repo, spec)
2502 t = logcmdutil.changesettemplater(ui, repo, spec)
2503 t.t.cache.update((k, templater.unquotestring(v))
2503 t.t.cache.update((k, templater.unquotestring(v))
2504 for k, v in repo.ui.configitems('committemplate'))
2504 for k, v in repo.ui.configitems('committemplate'))
2505
2505
2506 if not extramsg:
2506 if not extramsg:
2507 extramsg = '' # ensure that extramsg is string
2507 extramsg = '' # ensure that extramsg is string
2508
2508
2509 ui.pushbuffer()
2509 ui.pushbuffer()
2510 t.show(ctx, extramsg=extramsg)
2510 t.show(ctx, extramsg=extramsg)
2511 return ui.popbuffer()
2511 return ui.popbuffer()
2512
2512
2513 def hgprefix(msg):
2513 def hgprefix(msg):
2514 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2514 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2515
2515
2516 def buildcommittext(repo, ctx, subs, extramsg):
2516 def buildcommittext(repo, ctx, subs, extramsg):
2517 edittext = []
2517 edittext = []
2518 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2518 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2519 if ctx.description():
2519 if ctx.description():
2520 edittext.append(ctx.description())
2520 edittext.append(ctx.description())
2521 edittext.append("")
2521 edittext.append("")
2522 edittext.append("") # Empty line between message and comments.
2522 edittext.append("") # Empty line between message and comments.
2523 edittext.append(hgprefix(_("Enter commit message."
2523 edittext.append(hgprefix(_("Enter commit message."
2524 " Lines beginning with 'HG:' are removed.")))
2524 " Lines beginning with 'HG:' are removed.")))
2525 edittext.append(hgprefix(extramsg))
2525 edittext.append(hgprefix(extramsg))
2526 edittext.append("HG: --")
2526 edittext.append("HG: --")
2527 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2527 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2528 if ctx.p2():
2528 if ctx.p2():
2529 edittext.append(hgprefix(_("branch merge")))
2529 edittext.append(hgprefix(_("branch merge")))
2530 if ctx.branch():
2530 if ctx.branch():
2531 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2531 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2532 if bookmarks.isactivewdirparent(repo):
2532 if bookmarks.isactivewdirparent(repo):
2533 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2533 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2534 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2534 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2535 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2535 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2536 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2536 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2537 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2537 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2538 if not added and not modified and not removed:
2538 if not added and not modified and not removed:
2539 edittext.append(hgprefix(_("no files changed")))
2539 edittext.append(hgprefix(_("no files changed")))
2540 edittext.append("")
2540 edittext.append("")
2541
2541
2542 return "\n".join(edittext)
2542 return "\n".join(edittext)
2543
2543
2544 def commitstatus(repo, node, branch, bheads=None, opts=None):
2544 def commitstatus(repo, node, branch, bheads=None, opts=None):
2545 if opts is None:
2545 if opts is None:
2546 opts = {}
2546 opts = {}
2547 ctx = repo[node]
2547 ctx = repo[node]
2548 parents = ctx.parents()
2548 parents = ctx.parents()
2549
2549
2550 if (not opts.get('amend') and bheads and node not in bheads and not
2550 if (not opts.get('amend') and bheads and node not in bheads and not
2551 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2551 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2552 repo.ui.status(_('created new head\n'))
2552 repo.ui.status(_('created new head\n'))
2553 # The message is not printed for initial roots. For the other
2553 # The message is not printed for initial roots. For the other
2554 # changesets, it is printed in the following situations:
2554 # changesets, it is printed in the following situations:
2555 #
2555 #
2556 # Par column: for the 2 parents with ...
2556 # Par column: for the 2 parents with ...
2557 # N: null or no parent
2557 # N: null or no parent
2558 # B: parent is on another named branch
2558 # B: parent is on another named branch
2559 # C: parent is a regular non head changeset
2559 # C: parent is a regular non head changeset
2560 # H: parent was a branch head of the current branch
2560 # H: parent was a branch head of the current branch
2561 # Msg column: whether we print "created new head" message
2561 # Msg column: whether we print "created new head" message
2562 # In the following, it is assumed that there already exists some
2562 # In the following, it is assumed that there already exists some
2563 # initial branch heads of the current branch, otherwise nothing is
2563 # initial branch heads of the current branch, otherwise nothing is
2564 # printed anyway.
2564 # printed anyway.
2565 #
2565 #
2566 # Par Msg Comment
2566 # Par Msg Comment
2567 # N N y additional topo root
2567 # N N y additional topo root
2568 #
2568 #
2569 # B N y additional branch root
2569 # B N y additional branch root
2570 # C N y additional topo head
2570 # C N y additional topo head
2571 # H N n usual case
2571 # H N n usual case
2572 #
2572 #
2573 # B B y weird additional branch root
2573 # B B y weird additional branch root
2574 # C B y branch merge
2574 # C B y branch merge
2575 # H B n merge with named branch
2575 # H B n merge with named branch
2576 #
2576 #
2577 # C C y additional head from merge
2577 # C C y additional head from merge
2578 # C H n merge with a head
2578 # C H n merge with a head
2579 #
2579 #
2580 # H H n head merge: head count decreases
2580 # H H n head merge: head count decreases
2581
2581
2582 if not opts.get('close_branch'):
2582 if not opts.get('close_branch'):
2583 for r in parents:
2583 for r in parents:
2584 if r.closesbranch() and r.branch() == branch:
2584 if r.closesbranch() and r.branch() == branch:
2585 repo.ui.status(_('reopening closed branch head %d\n') % r)
2585 repo.ui.status(_('reopening closed branch head %d\n') % r)
2586
2586
2587 if repo.ui.debugflag:
2587 if repo.ui.debugflag:
2588 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2588 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2589 elif repo.ui.verbose:
2589 elif repo.ui.verbose:
2590 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2590 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2591
2591
2592 def postcommitstatus(repo, pats, opts):
2592 def postcommitstatus(repo, pats, opts):
2593 return repo.status(match=scmutil.match(repo[None], pats, opts))
2593 return repo.status(match=scmutil.match(repo[None], pats, opts))
2594
2594
2595 def revert(ui, repo, ctx, parents, *pats, **opts):
2595 def revert(ui, repo, ctx, parents, *pats, **opts):
2596 opts = pycompat.byteskwargs(opts)
2596 opts = pycompat.byteskwargs(opts)
2597 parent, p2 = parents
2597 parent, p2 = parents
2598 node = ctx.node()
2598 node = ctx.node()
2599
2599
2600 mf = ctx.manifest()
2600 mf = ctx.manifest()
2601 if node == p2:
2601 if node == p2:
2602 parent = p2
2602 parent = p2
2603
2603
2604 # need all matching names in dirstate and manifest of target rev,
2604 # need all matching names in dirstate and manifest of target rev,
2605 # so have to walk both. do not print errors if files exist in one
2605 # so have to walk both. do not print errors if files exist in one
2606 # but not other. in both cases, filesets should be evaluated against
2606 # but not other. in both cases, filesets should be evaluated against
2607 # workingctx to get consistent result (issue4497). this means 'set:**'
2607 # workingctx to get consistent result (issue4497). this means 'set:**'
2608 # cannot be used to select missing files from target rev.
2608 # cannot be used to select missing files from target rev.
2609
2609
2610 # `names` is a mapping for all elements in working copy and target revision
2610 # `names` is a mapping for all elements in working copy and target revision
2611 # The mapping is in the form:
2611 # The mapping is in the form:
2612 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2612 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2613 names = {}
2613 names = {}
2614
2614
2615 with repo.wlock():
2615 with repo.wlock():
2616 ## filling of the `names` mapping
2616 ## filling of the `names` mapping
2617 # walk dirstate to fill `names`
2617 # walk dirstate to fill `names`
2618
2618
2619 interactive = opts.get('interactive', False)
2619 interactive = opts.get('interactive', False)
2620 wctx = repo[None]
2620 wctx = repo[None]
2621 m = scmutil.match(wctx, pats, opts)
2621 m = scmutil.match(wctx, pats, opts)
2622
2622
2623 # we'll need this later
2623 # we'll need this later
2624 targetsubs = sorted(s for s in wctx.substate if m(s))
2624 targetsubs = sorted(s for s in wctx.substate if m(s))
2625
2625
2626 if not m.always():
2626 if not m.always():
2627 matcher = matchmod.badmatch(m, lambda x, y: False)
2627 matcher = matchmod.badmatch(m, lambda x, y: False)
2628 for abs in wctx.walk(matcher):
2628 for abs in wctx.walk(matcher):
2629 names[abs] = m.rel(abs), m.exact(abs)
2629 names[abs] = m.rel(abs), m.exact(abs)
2630
2630
2631 # walk target manifest to fill `names`
2631 # walk target manifest to fill `names`
2632
2632
2633 def badfn(path, msg):
2633 def badfn(path, msg):
2634 if path in names:
2634 if path in names:
2635 return
2635 return
2636 if path in ctx.substate:
2636 if path in ctx.substate:
2637 return
2637 return
2638 path_ = path + '/'
2638 path_ = path + '/'
2639 for f in names:
2639 for f in names:
2640 if f.startswith(path_):
2640 if f.startswith(path_):
2641 return
2641 return
2642 ui.warn("%s: %s\n" % (m.rel(path), msg))
2642 ui.warn("%s: %s\n" % (m.rel(path), msg))
2643
2643
2644 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2644 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2645 if abs not in names:
2645 if abs not in names:
2646 names[abs] = m.rel(abs), m.exact(abs)
2646 names[abs] = m.rel(abs), m.exact(abs)
2647
2647
2648 # Find status of all file in `names`.
2648 # Find status of all file in `names`.
2649 m = scmutil.matchfiles(repo, names)
2649 m = scmutil.matchfiles(repo, names)
2650
2650
2651 changes = repo.status(node1=node, match=m,
2651 changes = repo.status(node1=node, match=m,
2652 unknown=True, ignored=True, clean=True)
2652 unknown=True, ignored=True, clean=True)
2653 else:
2653 else:
2654 changes = repo.status(node1=node, match=m)
2654 changes = repo.status(node1=node, match=m)
2655 for kind in changes:
2655 for kind in changes:
2656 for abs in kind:
2656 for abs in kind:
2657 names[abs] = m.rel(abs), m.exact(abs)
2657 names[abs] = m.rel(abs), m.exact(abs)
2658
2658
2659 m = scmutil.matchfiles(repo, names)
2659 m = scmutil.matchfiles(repo, names)
2660
2660
2661 modified = set(changes.modified)
2661 modified = set(changes.modified)
2662 added = set(changes.added)
2662 added = set(changes.added)
2663 removed = set(changes.removed)
2663 removed = set(changes.removed)
2664 _deleted = set(changes.deleted)
2664 _deleted = set(changes.deleted)
2665 unknown = set(changes.unknown)
2665 unknown = set(changes.unknown)
2666 unknown.update(changes.ignored)
2666 unknown.update(changes.ignored)
2667 clean = set(changes.clean)
2667 clean = set(changes.clean)
2668 modadded = set()
2668 modadded = set()
2669
2669
2670 # We need to account for the state of the file in the dirstate,
2670 # We need to account for the state of the file in the dirstate,
2671 # even when we revert against something else than parent. This will
2671 # even when we revert against something else than parent. This will
2672 # slightly alter the behavior of revert (doing back up or not, delete
2672 # slightly alter the behavior of revert (doing back up or not, delete
2673 # or just forget etc).
2673 # or just forget etc).
2674 if parent == node:
2674 if parent == node:
2675 dsmodified = modified
2675 dsmodified = modified
2676 dsadded = added
2676 dsadded = added
2677 dsremoved = removed
2677 dsremoved = removed
2678 # store all local modifications, useful later for rename detection
2678 # store all local modifications, useful later for rename detection
2679 localchanges = dsmodified | dsadded
2679 localchanges = dsmodified | dsadded
2680 modified, added, removed = set(), set(), set()
2680 modified, added, removed = set(), set(), set()
2681 else:
2681 else:
2682 changes = repo.status(node1=parent, match=m)
2682 changes = repo.status(node1=parent, match=m)
2683 dsmodified = set(changes.modified)
2683 dsmodified = set(changes.modified)
2684 dsadded = set(changes.added)
2684 dsadded = set(changes.added)
2685 dsremoved = set(changes.removed)
2685 dsremoved = set(changes.removed)
2686 # store all local modifications, useful later for rename detection
2686 # store all local modifications, useful later for rename detection
2687 localchanges = dsmodified | dsadded
2687 localchanges = dsmodified | dsadded
2688
2688
2689 # only take into account for removes between wc and target
2689 # only take into account for removes between wc and target
2690 clean |= dsremoved - removed
2690 clean |= dsremoved - removed
2691 dsremoved &= removed
2691 dsremoved &= removed
2692 # distinct between dirstate remove and other
2692 # distinct between dirstate remove and other
2693 removed -= dsremoved
2693 removed -= dsremoved
2694
2694
2695 modadded = added & dsmodified
2695 modadded = added & dsmodified
2696 added -= modadded
2696 added -= modadded
2697
2697
2698 # tell newly modified apart.
2698 # tell newly modified apart.
2699 dsmodified &= modified
2699 dsmodified &= modified
2700 dsmodified |= modified & dsadded # dirstate added may need backup
2700 dsmodified |= modified & dsadded # dirstate added may need backup
2701 modified -= dsmodified
2701 modified -= dsmodified
2702
2702
2703 # We need to wait for some post-processing to update this set
2703 # We need to wait for some post-processing to update this set
2704 # before making the distinction. The dirstate will be used for
2704 # before making the distinction. The dirstate will be used for
2705 # that purpose.
2705 # that purpose.
2706 dsadded = added
2706 dsadded = added
2707
2707
2708 # in case of merge, files that are actually added can be reported as
2708 # in case of merge, files that are actually added can be reported as
2709 # modified, we need to post process the result
2709 # modified, we need to post process the result
2710 if p2 != nullid:
2710 if p2 != nullid:
2711 mergeadd = set(dsmodified)
2711 mergeadd = set(dsmodified)
2712 for path in dsmodified:
2712 for path in dsmodified:
2713 if path in mf:
2713 if path in mf:
2714 mergeadd.remove(path)
2714 mergeadd.remove(path)
2715 dsadded |= mergeadd
2715 dsadded |= mergeadd
2716 dsmodified -= mergeadd
2716 dsmodified -= mergeadd
2717
2717
2718 # if f is a rename, update `names` to also revert the source
2718 # if f is a rename, update `names` to also revert the source
2719 cwd = repo.getcwd()
2719 cwd = repo.getcwd()
2720 for f in localchanges:
2720 for f in localchanges:
2721 src = repo.dirstate.copied(f)
2721 src = repo.dirstate.copied(f)
2722 # XXX should we check for rename down to target node?
2722 # XXX should we check for rename down to target node?
2723 if src and src not in names and repo.dirstate[src] == 'r':
2723 if src and src not in names and repo.dirstate[src] == 'r':
2724 dsremoved.add(src)
2724 dsremoved.add(src)
2725 names[src] = (repo.pathto(src, cwd), True)
2725 names[src] = (repo.pathto(src, cwd), True)
2726
2726
2727 # determine the exact nature of the deleted changesets
2727 # determine the exact nature of the deleted changesets
2728 deladded = set(_deleted)
2728 deladded = set(_deleted)
2729 for path in _deleted:
2729 for path in _deleted:
2730 if path in mf:
2730 if path in mf:
2731 deladded.remove(path)
2731 deladded.remove(path)
2732 deleted = _deleted - deladded
2732 deleted = _deleted - deladded
2733
2733
2734 # distinguish between file to forget and the other
2734 # distinguish between file to forget and the other
2735 added = set()
2735 added = set()
2736 for abs in dsadded:
2736 for abs in dsadded:
2737 if repo.dirstate[abs] != 'a':
2737 if repo.dirstate[abs] != 'a':
2738 added.add(abs)
2738 added.add(abs)
2739 dsadded -= added
2739 dsadded -= added
2740
2740
2741 for abs in deladded:
2741 for abs in deladded:
2742 if repo.dirstate[abs] == 'a':
2742 if repo.dirstate[abs] == 'a':
2743 dsadded.add(abs)
2743 dsadded.add(abs)
2744 deladded -= dsadded
2744 deladded -= dsadded
2745
2745
2746 # For files marked as removed, we check if an unknown file is present at
2746 # For files marked as removed, we check if an unknown file is present at
2747 # the same path. If a such file exists it may need to be backed up.
2747 # the same path. If a such file exists it may need to be backed up.
2748 # Making the distinction at this stage helps have simpler backup
2748 # Making the distinction at this stage helps have simpler backup
2749 # logic.
2749 # logic.
2750 removunk = set()
2750 removunk = set()
2751 for abs in removed:
2751 for abs in removed:
2752 target = repo.wjoin(abs)
2752 target = repo.wjoin(abs)
2753 if os.path.lexists(target):
2753 if os.path.lexists(target):
2754 removunk.add(abs)
2754 removunk.add(abs)
2755 removed -= removunk
2755 removed -= removunk
2756
2756
2757 dsremovunk = set()
2757 dsremovunk = set()
2758 for abs in dsremoved:
2758 for abs in dsremoved:
2759 target = repo.wjoin(abs)
2759 target = repo.wjoin(abs)
2760 if os.path.lexists(target):
2760 if os.path.lexists(target):
2761 dsremovunk.add(abs)
2761 dsremovunk.add(abs)
2762 dsremoved -= dsremovunk
2762 dsremoved -= dsremovunk
2763
2763
2764 # action to be actually performed by revert
2764 # action to be actually performed by revert
2765 # (<list of file>, message>) tuple
2765 # (<list of file>, message>) tuple
2766 actions = {'revert': ([], _('reverting %s\n')),
2766 actions = {'revert': ([], _('reverting %s\n')),
2767 'add': ([], _('adding %s\n')),
2767 'add': ([], _('adding %s\n')),
2768 'remove': ([], _('removing %s\n')),
2768 'remove': ([], _('removing %s\n')),
2769 'drop': ([], _('removing %s\n')),
2769 'drop': ([], _('removing %s\n')),
2770 'forget': ([], _('forgetting %s\n')),
2770 'forget': ([], _('forgetting %s\n')),
2771 'undelete': ([], _('undeleting %s\n')),
2771 'undelete': ([], _('undeleting %s\n')),
2772 'noop': (None, _('no changes needed to %s\n')),
2772 'noop': (None, _('no changes needed to %s\n')),
2773 'unknown': (None, _('file not managed: %s\n')),
2773 'unknown': (None, _('file not managed: %s\n')),
2774 }
2774 }
2775
2775
2776 # "constant" that convey the backup strategy.
2776 # "constant" that convey the backup strategy.
2777 # All set to `discard` if `no-backup` is set do avoid checking
2777 # All set to `discard` if `no-backup` is set do avoid checking
2778 # no_backup lower in the code.
2778 # no_backup lower in the code.
2779 # These values are ordered for comparison purposes
2779 # These values are ordered for comparison purposes
2780 backupinteractive = 3 # do backup if interactively modified
2780 backupinteractive = 3 # do backup if interactively modified
2781 backup = 2 # unconditionally do backup
2781 backup = 2 # unconditionally do backup
2782 check = 1 # check if the existing file differs from target
2782 check = 1 # check if the existing file differs from target
2783 discard = 0 # never do backup
2783 discard = 0 # never do backup
2784 if opts.get('no_backup'):
2784 if opts.get('no_backup'):
2785 backupinteractive = backup = check = discard
2785 backupinteractive = backup = check = discard
2786 if interactive:
2786 if interactive:
2787 dsmodifiedbackup = backupinteractive
2787 dsmodifiedbackup = backupinteractive
2788 else:
2788 else:
2789 dsmodifiedbackup = backup
2789 dsmodifiedbackup = backup
2790 tobackup = set()
2790 tobackup = set()
2791
2791
2792 backupanddel = actions['remove']
2792 backupanddel = actions['remove']
2793 if not opts.get('no_backup'):
2793 if not opts.get('no_backup'):
2794 backupanddel = actions['drop']
2794 backupanddel = actions['drop']
2795
2795
2796 disptable = (
2796 disptable = (
2797 # dispatch table:
2797 # dispatch table:
2798 # file state
2798 # file state
2799 # action
2799 # action
2800 # make backup
2800 # make backup
2801
2801
2802 ## Sets that results that will change file on disk
2802 ## Sets that results that will change file on disk
2803 # Modified compared to target, no local change
2803 # Modified compared to target, no local change
2804 (modified, actions['revert'], discard),
2804 (modified, actions['revert'], discard),
2805 # Modified compared to target, but local file is deleted
2805 # Modified compared to target, but local file is deleted
2806 (deleted, actions['revert'], discard),
2806 (deleted, actions['revert'], discard),
2807 # Modified compared to target, local change
2807 # Modified compared to target, local change
2808 (dsmodified, actions['revert'], dsmodifiedbackup),
2808 (dsmodified, actions['revert'], dsmodifiedbackup),
2809 # Added since target
2809 # Added since target
2810 (added, actions['remove'], discard),
2810 (added, actions['remove'], discard),
2811 # Added in working directory
2811 # Added in working directory
2812 (dsadded, actions['forget'], discard),
2812 (dsadded, actions['forget'], discard),
2813 # Added since target, have local modification
2813 # Added since target, have local modification
2814 (modadded, backupanddel, backup),
2814 (modadded, backupanddel, backup),
2815 # Added since target but file is missing in working directory
2815 # Added since target but file is missing in working directory
2816 (deladded, actions['drop'], discard),
2816 (deladded, actions['drop'], discard),
2817 # Removed since target, before working copy parent
2817 # Removed since target, before working copy parent
2818 (removed, actions['add'], discard),
2818 (removed, actions['add'], discard),
2819 # Same as `removed` but an unknown file exists at the same path
2819 # Same as `removed` but an unknown file exists at the same path
2820 (removunk, actions['add'], check),
2820 (removunk, actions['add'], check),
2821 # Removed since targe, marked as such in working copy parent
2821 # Removed since targe, marked as such in working copy parent
2822 (dsremoved, actions['undelete'], discard),
2822 (dsremoved, actions['undelete'], discard),
2823 # Same as `dsremoved` but an unknown file exists at the same path
2823 # Same as `dsremoved` but an unknown file exists at the same path
2824 (dsremovunk, actions['undelete'], check),
2824 (dsremovunk, actions['undelete'], check),
2825 ## the following sets does not result in any file changes
2825 ## the following sets does not result in any file changes
2826 # File with no modification
2826 # File with no modification
2827 (clean, actions['noop'], discard),
2827 (clean, actions['noop'], discard),
2828 # Existing file, not tracked anywhere
2828 # Existing file, not tracked anywhere
2829 (unknown, actions['unknown'], discard),
2829 (unknown, actions['unknown'], discard),
2830 )
2830 )
2831
2831
2832 for abs, (rel, exact) in sorted(names.items()):
2832 for abs, (rel, exact) in sorted(names.items()):
2833 # target file to be touch on disk (relative to cwd)
2833 # target file to be touch on disk (relative to cwd)
2834 target = repo.wjoin(abs)
2834 target = repo.wjoin(abs)
2835 # search the entry in the dispatch table.
2835 # search the entry in the dispatch table.
2836 # if the file is in any of these sets, it was touched in the working
2836 # if the file is in any of these sets, it was touched in the working
2837 # directory parent and we are sure it needs to be reverted.
2837 # directory parent and we are sure it needs to be reverted.
2838 for table, (xlist, msg), dobackup in disptable:
2838 for table, (xlist, msg), dobackup in disptable:
2839 if abs not in table:
2839 if abs not in table:
2840 continue
2840 continue
2841 if xlist is not None:
2841 if xlist is not None:
2842 xlist.append(abs)
2842 xlist.append(abs)
2843 if dobackup:
2843 if dobackup:
2844 # If in interactive mode, don't automatically create
2844 # If in interactive mode, don't automatically create
2845 # .orig files (issue4793)
2845 # .orig files (issue4793)
2846 if dobackup == backupinteractive:
2846 if dobackup == backupinteractive:
2847 tobackup.add(abs)
2847 tobackup.add(abs)
2848 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2848 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
2849 bakname = scmutil.origpath(ui, repo, rel)
2849 bakname = scmutil.origpath(ui, repo, rel)
2850 ui.note(_('saving current version of %s as %s\n') %
2850 ui.note(_('saving current version of %s as %s\n') %
2851 (rel, bakname))
2851 (rel, bakname))
2852 if not opts.get('dry_run'):
2852 if not opts.get('dry_run'):
2853 if interactive:
2853 if interactive:
2854 util.copyfile(target, bakname)
2854 util.copyfile(target, bakname)
2855 else:
2855 else:
2856 util.rename(target, bakname)
2856 util.rename(target, bakname)
2857 if ui.verbose or not exact:
2857 if ui.verbose or not exact:
2858 if not isinstance(msg, bytes):
2858 if not isinstance(msg, bytes):
2859 msg = msg(abs)
2859 msg = msg(abs)
2860 ui.status(msg % rel)
2860 ui.status(msg % rel)
2861 elif exact:
2861 elif exact:
2862 ui.warn(msg % rel)
2862 ui.warn(msg % rel)
2863 break
2863 break
2864
2864
2865 if not opts.get('dry_run'):
2865 if not opts.get('dry_run'):
2866 needdata = ('revert', 'add', 'undelete')
2866 needdata = ('revert', 'add', 'undelete')
2867 if _revertprefetch is not _revertprefetchstub:
2867 if _revertprefetch is not _revertprefetchstub:
2868 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, use "
2868 ui.deprecwarn("'cmdutil._revertprefetch' is deprecated, "
2869 "'cmdutil._prefetchfiles'", '4.6', stacklevel=1)
2869 "add a callback to 'scmutil.fileprefetchhooks'",
2870 '4.6', stacklevel=1)
2870 _revertprefetch(repo, ctx,
2871 _revertprefetch(repo, ctx,
2871 *[actions[name][0] for name in needdata])
2872 *[actions[name][0] for name in needdata])
2872 oplist = [actions[name][0] for name in needdata]
2873 oplist = [actions[name][0] for name in needdata]
2873 _prefetchfiles(repo, ctx,
2874 _prefetchfiles(repo, ctx,
2874 [f for sublist in oplist for f in sublist])
2875 [f for sublist in oplist for f in sublist])
2875 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2876 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
2876
2877
2877 if targetsubs:
2878 if targetsubs:
2878 # Revert the subrepos on the revert list
2879 # Revert the subrepos on the revert list
2879 for sub in targetsubs:
2880 for sub in targetsubs:
2880 try:
2881 try:
2881 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2882 wctx.sub(sub).revert(ctx.substate[sub], *pats,
2882 **pycompat.strkwargs(opts))
2883 **pycompat.strkwargs(opts))
2883 except KeyError:
2884 except KeyError:
2884 raise error.Abort("subrepository '%s' does not exist in %s!"
2885 raise error.Abort("subrepository '%s' does not exist in %s!"
2885 % (sub, short(ctx.node())))
2886 % (sub, short(ctx.node())))
2886
2887
2887 def _revertprefetchstub(repo, ctx, *files):
2888 def _revertprefetchstub(repo, ctx, *files):
2888 """Stub method for detecting extension wrapping of _revertprefetch(), to
2889 """Stub method for detecting extension wrapping of _revertprefetch(), to
2889 issue a deprecation warning."""
2890 issue a deprecation warning."""
2890
2891
2891 _revertprefetch = _revertprefetchstub
2892 _revertprefetch = _revertprefetchstub
2892
2893
2893 def _prefetchfiles(repo, ctx, files):
2894 def _prefetchfiles(repo, ctx, files):
2894 """Let extensions changing the storage layer prefetch content for any non
2895 """Let extensions changing the storage layer prefetch content for any non
2895 merge based command."""
2896 merge based command."""
2897 scmutil.fileprefetchhooks(repo, ctx, files)
2896
2898
2897 def _performrevert(repo, parents, ctx, actions, interactive=False,
2899 def _performrevert(repo, parents, ctx, actions, interactive=False,
2898 tobackup=None):
2900 tobackup=None):
2899 """function that actually perform all the actions computed for revert
2901 """function that actually perform all the actions computed for revert
2900
2902
2901 This is an independent function to let extension to plug in and react to
2903 This is an independent function to let extension to plug in and react to
2902 the imminent revert.
2904 the imminent revert.
2903
2905
2904 Make sure you have the working directory locked when calling this function.
2906 Make sure you have the working directory locked when calling this function.
2905 """
2907 """
2906 parent, p2 = parents
2908 parent, p2 = parents
2907 node = ctx.node()
2909 node = ctx.node()
2908 excluded_files = []
2910 excluded_files = []
2909 matcher_opts = {"exclude": excluded_files}
2911 matcher_opts = {"exclude": excluded_files}
2910
2912
2911 def checkout(f):
2913 def checkout(f):
2912 fc = ctx[f]
2914 fc = ctx[f]
2913 repo.wwrite(f, fc.data(), fc.flags())
2915 repo.wwrite(f, fc.data(), fc.flags())
2914
2916
2915 def doremove(f):
2917 def doremove(f):
2916 try:
2918 try:
2917 repo.wvfs.unlinkpath(f)
2919 repo.wvfs.unlinkpath(f)
2918 except OSError:
2920 except OSError:
2919 pass
2921 pass
2920 repo.dirstate.remove(f)
2922 repo.dirstate.remove(f)
2921
2923
2922 audit_path = pathutil.pathauditor(repo.root, cached=True)
2924 audit_path = pathutil.pathauditor(repo.root, cached=True)
2923 for f in actions['forget'][0]:
2925 for f in actions['forget'][0]:
2924 if interactive:
2926 if interactive:
2925 choice = repo.ui.promptchoice(
2927 choice = repo.ui.promptchoice(
2926 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2928 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
2927 if choice == 0:
2929 if choice == 0:
2928 repo.dirstate.drop(f)
2930 repo.dirstate.drop(f)
2929 else:
2931 else:
2930 excluded_files.append(repo.wjoin(f))
2932 excluded_files.append(repo.wjoin(f))
2931 else:
2933 else:
2932 repo.dirstate.drop(f)
2934 repo.dirstate.drop(f)
2933 for f in actions['remove'][0]:
2935 for f in actions['remove'][0]:
2934 audit_path(f)
2936 audit_path(f)
2935 if interactive:
2937 if interactive:
2936 choice = repo.ui.promptchoice(
2938 choice = repo.ui.promptchoice(
2937 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2939 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
2938 if choice == 0:
2940 if choice == 0:
2939 doremove(f)
2941 doremove(f)
2940 else:
2942 else:
2941 excluded_files.append(repo.wjoin(f))
2943 excluded_files.append(repo.wjoin(f))
2942 else:
2944 else:
2943 doremove(f)
2945 doremove(f)
2944 for f in actions['drop'][0]:
2946 for f in actions['drop'][0]:
2945 audit_path(f)
2947 audit_path(f)
2946 repo.dirstate.remove(f)
2948 repo.dirstate.remove(f)
2947
2949
2948 normal = None
2950 normal = None
2949 if node == parent:
2951 if node == parent:
2950 # We're reverting to our parent. If possible, we'd like status
2952 # We're reverting to our parent. If possible, we'd like status
2951 # to report the file as clean. We have to use normallookup for
2953 # to report the file as clean. We have to use normallookup for
2952 # merges to avoid losing information about merged/dirty files.
2954 # merges to avoid losing information about merged/dirty files.
2953 if p2 != nullid:
2955 if p2 != nullid:
2954 normal = repo.dirstate.normallookup
2956 normal = repo.dirstate.normallookup
2955 else:
2957 else:
2956 normal = repo.dirstate.normal
2958 normal = repo.dirstate.normal
2957
2959
2958 newlyaddedandmodifiedfiles = set()
2960 newlyaddedandmodifiedfiles = set()
2959 if interactive:
2961 if interactive:
2960 # Prompt the user for changes to revert
2962 # Prompt the user for changes to revert
2961 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
2963 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
2962 m = scmutil.match(ctx, torevert, matcher_opts)
2964 m = scmutil.match(ctx, torevert, matcher_opts)
2963 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
2965 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
2964 diffopts.nodates = True
2966 diffopts.nodates = True
2965 diffopts.git = True
2967 diffopts.git = True
2966 operation = 'discard'
2968 operation = 'discard'
2967 reversehunks = True
2969 reversehunks = True
2968 if node != parent:
2970 if node != parent:
2969 operation = 'apply'
2971 operation = 'apply'
2970 reversehunks = False
2972 reversehunks = False
2971 if reversehunks:
2973 if reversehunks:
2972 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
2974 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
2973 else:
2975 else:
2974 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
2976 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
2975 originalchunks = patch.parsepatch(diff)
2977 originalchunks = patch.parsepatch(diff)
2976
2978
2977 try:
2979 try:
2978
2980
2979 chunks, opts = recordfilter(repo.ui, originalchunks,
2981 chunks, opts = recordfilter(repo.ui, originalchunks,
2980 operation=operation)
2982 operation=operation)
2981 if reversehunks:
2983 if reversehunks:
2982 chunks = patch.reversehunks(chunks)
2984 chunks = patch.reversehunks(chunks)
2983
2985
2984 except error.PatchError as err:
2986 except error.PatchError as err:
2985 raise error.Abort(_('error parsing patch: %s') % err)
2987 raise error.Abort(_('error parsing patch: %s') % err)
2986
2988
2987 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
2989 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
2988 if tobackup is None:
2990 if tobackup is None:
2989 tobackup = set()
2991 tobackup = set()
2990 # Apply changes
2992 # Apply changes
2991 fp = stringio()
2993 fp = stringio()
2992 for c in chunks:
2994 for c in chunks:
2993 # Create a backup file only if this hunk should be backed up
2995 # Create a backup file only if this hunk should be backed up
2994 if ishunk(c) and c.header.filename() in tobackup:
2996 if ishunk(c) and c.header.filename() in tobackup:
2995 abs = c.header.filename()
2997 abs = c.header.filename()
2996 target = repo.wjoin(abs)
2998 target = repo.wjoin(abs)
2997 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
2999 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
2998 util.copyfile(target, bakname)
3000 util.copyfile(target, bakname)
2999 tobackup.remove(abs)
3001 tobackup.remove(abs)
3000 c.write(fp)
3002 c.write(fp)
3001 dopatch = fp.tell()
3003 dopatch = fp.tell()
3002 fp.seek(0)
3004 fp.seek(0)
3003 if dopatch:
3005 if dopatch:
3004 try:
3006 try:
3005 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3007 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3006 except error.PatchError as err:
3008 except error.PatchError as err:
3007 raise error.Abort(str(err))
3009 raise error.Abort(str(err))
3008 del fp
3010 del fp
3009 else:
3011 else:
3010 for f in actions['revert'][0]:
3012 for f in actions['revert'][0]:
3011 checkout(f)
3013 checkout(f)
3012 if normal:
3014 if normal:
3013 normal(f)
3015 normal(f)
3014
3016
3015 for f in actions['add'][0]:
3017 for f in actions['add'][0]:
3016 # Don't checkout modified files, they are already created by the diff
3018 # Don't checkout modified files, they are already created by the diff
3017 if f not in newlyaddedandmodifiedfiles:
3019 if f not in newlyaddedandmodifiedfiles:
3018 checkout(f)
3020 checkout(f)
3019 repo.dirstate.add(f)
3021 repo.dirstate.add(f)
3020
3022
3021 normal = repo.dirstate.normallookup
3023 normal = repo.dirstate.normallookup
3022 if node == parent and p2 == nullid:
3024 if node == parent and p2 == nullid:
3023 normal = repo.dirstate.normal
3025 normal = repo.dirstate.normal
3024 for f in actions['undelete'][0]:
3026 for f in actions['undelete'][0]:
3025 checkout(f)
3027 checkout(f)
3026 normal(f)
3028 normal(f)
3027
3029
3028 copied = copies.pathcopies(repo[parent], ctx)
3030 copied = copies.pathcopies(repo[parent], ctx)
3029
3031
3030 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3032 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3031 if f in copied:
3033 if f in copied:
3032 repo.dirstate.copy(copied[f], f)
3034 repo.dirstate.copy(copied[f], f)
3033
3035
3034 class command(registrar.command):
3036 class command(registrar.command):
3035 """deprecated: used registrar.command instead"""
3037 """deprecated: used registrar.command instead"""
3036 def _doregister(self, func, name, *args, **kwargs):
3038 def _doregister(self, func, name, *args, **kwargs):
3037 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3039 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3038 return super(command, self)._doregister(func, name, *args, **kwargs)
3040 return super(command, self)._doregister(func, name, *args, **kwargs)
3039
3041
3040 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3042 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3041 # commands.outgoing. "missing" is "missing" of the result of
3043 # commands.outgoing. "missing" is "missing" of the result of
3042 # "findcommonoutgoing()"
3044 # "findcommonoutgoing()"
3043 outgoinghooks = util.hooks()
3045 outgoinghooks = util.hooks()
3044
3046
3045 # a list of (ui, repo) functions called by commands.summary
3047 # a list of (ui, repo) functions called by commands.summary
3046 summaryhooks = util.hooks()
3048 summaryhooks = util.hooks()
3047
3049
3048 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3050 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3049 #
3051 #
3050 # functions should return tuple of booleans below, if 'changes' is None:
3052 # functions should return tuple of booleans below, if 'changes' is None:
3051 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3053 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3052 #
3054 #
3053 # otherwise, 'changes' is a tuple of tuples below:
3055 # otherwise, 'changes' is a tuple of tuples below:
3054 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3056 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3055 # - (desturl, destbranch, destpeer, outgoing)
3057 # - (desturl, destbranch, destpeer, outgoing)
3056 summaryremotehooks = util.hooks()
3058 summaryremotehooks = util.hooks()
3057
3059
3058 # A list of state files kept by multistep operations like graft.
3060 # A list of state files kept by multistep operations like graft.
3059 # Since graft cannot be aborted, it is considered 'clearable' by update.
3061 # Since graft cannot be aborted, it is considered 'clearable' by update.
3060 # note: bisect is intentionally excluded
3062 # note: bisect is intentionally excluded
3061 # (state file, clearable, allowcommit, error, hint)
3063 # (state file, clearable, allowcommit, error, hint)
3062 unfinishedstates = [
3064 unfinishedstates = [
3063 ('graftstate', True, False, _('graft in progress'),
3065 ('graftstate', True, False, _('graft in progress'),
3064 _("use 'hg graft --continue' or 'hg update' to abort")),
3066 _("use 'hg graft --continue' or 'hg update' to abort")),
3065 ('updatestate', True, False, _('last update was interrupted'),
3067 ('updatestate', True, False, _('last update was interrupted'),
3066 _("use 'hg update' to get a consistent checkout"))
3068 _("use 'hg update' to get a consistent checkout"))
3067 ]
3069 ]
3068
3070
3069 def checkunfinished(repo, commit=False):
3071 def checkunfinished(repo, commit=False):
3070 '''Look for an unfinished multistep operation, like graft, and abort
3072 '''Look for an unfinished multistep operation, like graft, and abort
3071 if found. It's probably good to check this right before
3073 if found. It's probably good to check this right before
3072 bailifchanged().
3074 bailifchanged().
3073 '''
3075 '''
3074 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3076 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3075 if commit and allowcommit:
3077 if commit and allowcommit:
3076 continue
3078 continue
3077 if repo.vfs.exists(f):
3079 if repo.vfs.exists(f):
3078 raise error.Abort(msg, hint=hint)
3080 raise error.Abort(msg, hint=hint)
3079
3081
3080 def clearunfinished(repo):
3082 def clearunfinished(repo):
3081 '''Check for unfinished operations (as above), and clear the ones
3083 '''Check for unfinished operations (as above), and clear the ones
3082 that are clearable.
3084 that are clearable.
3083 '''
3085 '''
3084 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3086 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3085 if not clearable and repo.vfs.exists(f):
3087 if not clearable and repo.vfs.exists(f):
3086 raise error.Abort(msg, hint=hint)
3088 raise error.Abort(msg, hint=hint)
3087 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3089 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3088 if clearable and repo.vfs.exists(f):
3090 if clearable and repo.vfs.exists(f):
3089 util.unlink(repo.vfs.join(f))
3091 util.unlink(repo.vfs.join(f))
3090
3092
3091 afterresolvedstates = [
3093 afterresolvedstates = [
3092 ('graftstate',
3094 ('graftstate',
3093 _('hg graft --continue')),
3095 _('hg graft --continue')),
3094 ]
3096 ]
3095
3097
3096 def howtocontinue(repo):
3098 def howtocontinue(repo):
3097 '''Check for an unfinished operation and return the command to finish
3099 '''Check for an unfinished operation and return the command to finish
3098 it.
3100 it.
3099
3101
3100 afterresolvedstates tuples define a .hg/{file} and the corresponding
3102 afterresolvedstates tuples define a .hg/{file} and the corresponding
3101 command needed to finish it.
3103 command needed to finish it.
3102
3104
3103 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3105 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3104 a boolean.
3106 a boolean.
3105 '''
3107 '''
3106 contmsg = _("continue: %s")
3108 contmsg = _("continue: %s")
3107 for f, msg in afterresolvedstates:
3109 for f, msg in afterresolvedstates:
3108 if repo.vfs.exists(f):
3110 if repo.vfs.exists(f):
3109 return contmsg % msg, True
3111 return contmsg % msg, True
3110 if repo[None].dirty(missing=True, merge=False, branch=False):
3112 if repo[None].dirty(missing=True, merge=False, branch=False):
3111 return contmsg % _("hg commit"), False
3113 return contmsg % _("hg commit"), False
3112 return None, None
3114 return None, None
3113
3115
3114 def checkafterresolved(repo):
3116 def checkafterresolved(repo):
3115 '''Inform the user about the next action after completing hg resolve
3117 '''Inform the user about the next action after completing hg resolve
3116
3118
3117 If there's a matching afterresolvedstates, howtocontinue will yield
3119 If there's a matching afterresolvedstates, howtocontinue will yield
3118 repo.ui.warn as the reporter.
3120 repo.ui.warn as the reporter.
3119
3121
3120 Otherwise, it will yield repo.ui.note.
3122 Otherwise, it will yield repo.ui.note.
3121 '''
3123 '''
3122 msg, warning = howtocontinue(repo)
3124 msg, warning = howtocontinue(repo)
3123 if msg is not None:
3125 if msg is not None:
3124 if warning:
3126 if warning:
3125 repo.ui.warn("%s\n" % msg)
3127 repo.ui.warn("%s\n" % msg)
3126 else:
3128 else:
3127 repo.ui.note("%s\n" % msg)
3129 repo.ui.note("%s\n" % msg)
3128
3130
3129 def wrongtooltocontinue(repo, task):
3131 def wrongtooltocontinue(repo, task):
3130 '''Raise an abort suggesting how to properly continue if there is an
3132 '''Raise an abort suggesting how to properly continue if there is an
3131 active task.
3133 active task.
3132
3134
3133 Uses howtocontinue() to find the active task.
3135 Uses howtocontinue() to find the active task.
3134
3136
3135 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3137 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3136 a hint.
3138 a hint.
3137 '''
3139 '''
3138 after = howtocontinue(repo)
3140 after = howtocontinue(repo)
3139 hint = None
3141 hint = None
3140 if after[1]:
3142 if after[1]:
3141 hint = after[0]
3143 hint = after[0]
3142 raise error.Abort(_('no %s in progress') % task, hint=hint)
3144 raise error.Abort(_('no %s in progress') % task, hint=hint)
3143
3145
3144 class changeset_printer(logcmdutil.changesetprinter):
3146 class changeset_printer(logcmdutil.changesetprinter):
3145
3147
3146 def __init__(self, ui, *args, **kwargs):
3148 def __init__(self, ui, *args, **kwargs):
3147 msg = ("'cmdutil.changeset_printer' is deprecated, "
3149 msg = ("'cmdutil.changeset_printer' is deprecated, "
3148 "use 'logcmdutil.logcmdutil'")
3150 "use 'logcmdutil.logcmdutil'")
3149 ui.deprecwarn(msg, "4.6")
3151 ui.deprecwarn(msg, "4.6")
3150 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3152 super(changeset_printer, self).__init__(ui, *args, **kwargs)
3151
3153
3152 def displaygraph(ui, *args, **kwargs):
3154 def displaygraph(ui, *args, **kwargs):
3153 msg = ("'cmdutil.displaygraph' is deprecated, "
3155 msg = ("'cmdutil.displaygraph' is deprecated, "
3154 "use 'logcmdutil.displaygraph'")
3156 "use 'logcmdutil.displaygraph'")
3155 ui.deprecwarn(msg, "4.6")
3157 ui.deprecwarn(msg, "4.6")
3156 return logcmdutil.displaygraph(ui, *args, **kwargs)
3158 return logcmdutil.displaygraph(ui, *args, **kwargs)
3157
3159
3158 def show_changeset(ui, *args, **kwargs):
3160 def show_changeset(ui, *args, **kwargs):
3159 msg = ("'cmdutil.show_changeset' is deprecated, "
3161 msg = ("'cmdutil.show_changeset' is deprecated, "
3160 "use 'logcmdutil.changesetdisplayer'")
3162 "use 'logcmdutil.changesetdisplayer'")
3161 ui.deprecwarn(msg, "4.6")
3163 ui.deprecwarn(msg, "4.6")
3162 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
3164 return logcmdutil.changesetdisplayer(ui, *args, **kwargs)
@@ -1,1415 +1,1420 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 hex,
21 hex,
22 nullid,
22 nullid,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirrev,
25 wdirrev,
26 )
26 )
27
27
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 match as matchmod,
31 match as matchmod,
32 obsolete,
32 obsolete,
33 obsutil,
33 obsutil,
34 pathutil,
34 pathutil,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 revsetlang,
37 revsetlang,
38 similar,
38 similar,
39 url,
39 url,
40 util,
40 util,
41 vfs,
41 vfs,
42 )
42 )
43
43
44 if pycompat.iswindows:
44 if pycompat.iswindows:
45 from . import scmwindows as scmplatform
45 from . import scmwindows as scmplatform
46 else:
46 else:
47 from . import scmposix as scmplatform
47 from . import scmposix as scmplatform
48
48
49 termsize = scmplatform.termsize
49 termsize = scmplatform.termsize
50
50
51 class status(tuple):
51 class status(tuple):
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
52 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
53 and 'ignored' properties are only relevant to the working copy.
53 and 'ignored' properties are only relevant to the working copy.
54 '''
54 '''
55
55
56 __slots__ = ()
56 __slots__ = ()
57
57
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
58 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
59 clean):
59 clean):
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
60 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
61 ignored, clean))
61 ignored, clean))
62
62
63 @property
63 @property
64 def modified(self):
64 def modified(self):
65 '''files that have been modified'''
65 '''files that have been modified'''
66 return self[0]
66 return self[0]
67
67
68 @property
68 @property
69 def added(self):
69 def added(self):
70 '''files that have been added'''
70 '''files that have been added'''
71 return self[1]
71 return self[1]
72
72
73 @property
73 @property
74 def removed(self):
74 def removed(self):
75 '''files that have been removed'''
75 '''files that have been removed'''
76 return self[2]
76 return self[2]
77
77
78 @property
78 @property
79 def deleted(self):
79 def deleted(self):
80 '''files that are in the dirstate, but have been deleted from the
80 '''files that are in the dirstate, but have been deleted from the
81 working copy (aka "missing")
81 working copy (aka "missing")
82 '''
82 '''
83 return self[3]
83 return self[3]
84
84
85 @property
85 @property
86 def unknown(self):
86 def unknown(self):
87 '''files not in the dirstate that are not ignored'''
87 '''files not in the dirstate that are not ignored'''
88 return self[4]
88 return self[4]
89
89
90 @property
90 @property
91 def ignored(self):
91 def ignored(self):
92 '''files not in the dirstate that are ignored (by _dirignore())'''
92 '''files not in the dirstate that are ignored (by _dirignore())'''
93 return self[5]
93 return self[5]
94
94
95 @property
95 @property
96 def clean(self):
96 def clean(self):
97 '''files that have not been modified'''
97 '''files that have not been modified'''
98 return self[6]
98 return self[6]
99
99
100 def __repr__(self, *args, **kwargs):
100 def __repr__(self, *args, **kwargs):
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
101 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
102 'unknown=%r, ignored=%r, clean=%r>') % self)
102 'unknown=%r, ignored=%r, clean=%r>') % self)
103
103
104 def itersubrepos(ctx1, ctx2):
104 def itersubrepos(ctx1, ctx2):
105 """find subrepos in ctx1 or ctx2"""
105 """find subrepos in ctx1 or ctx2"""
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
106 # Create a (subpath, ctx) mapping where we prefer subpaths from
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
107 # ctx1. The subpaths from ctx2 are important when the .hgsub file
108 # has been modified (in ctx2) but not yet committed (in ctx1).
108 # has been modified (in ctx2) but not yet committed (in ctx1).
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
109 subpaths = dict.fromkeys(ctx2.substate, ctx2)
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
110 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
111
111
112 missing = set()
112 missing = set()
113
113
114 for subpath in ctx2.substate:
114 for subpath in ctx2.substate:
115 if subpath not in ctx1.substate:
115 if subpath not in ctx1.substate:
116 del subpaths[subpath]
116 del subpaths[subpath]
117 missing.add(subpath)
117 missing.add(subpath)
118
118
119 for subpath, ctx in sorted(subpaths.iteritems()):
119 for subpath, ctx in sorted(subpaths.iteritems()):
120 yield subpath, ctx.sub(subpath)
120 yield subpath, ctx.sub(subpath)
121
121
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
122 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
123 # status and diff will have an accurate result when it does
123 # status and diff will have an accurate result when it does
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
124 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
125 # against itself.
125 # against itself.
126 for subpath in missing:
126 for subpath in missing:
127 yield subpath, ctx2.nullsub(subpath, ctx1)
127 yield subpath, ctx2.nullsub(subpath, ctx1)
128
128
129 def nochangesfound(ui, repo, excluded=None):
129 def nochangesfound(ui, repo, excluded=None):
130 '''Report no changes for push/pull, excluded is None or a list of
130 '''Report no changes for push/pull, excluded is None or a list of
131 nodes excluded from the push/pull.
131 nodes excluded from the push/pull.
132 '''
132 '''
133 secretlist = []
133 secretlist = []
134 if excluded:
134 if excluded:
135 for n in excluded:
135 for n in excluded:
136 ctx = repo[n]
136 ctx = repo[n]
137 if ctx.phase() >= phases.secret and not ctx.extinct():
137 if ctx.phase() >= phases.secret and not ctx.extinct():
138 secretlist.append(n)
138 secretlist.append(n)
139
139
140 if secretlist:
140 if secretlist:
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
141 ui.status(_("no changes found (ignored %d secret changesets)\n")
142 % len(secretlist))
142 % len(secretlist))
143 else:
143 else:
144 ui.status(_("no changes found\n"))
144 ui.status(_("no changes found\n"))
145
145
146 def callcatch(ui, func):
146 def callcatch(ui, func):
147 """call func() with global exception handling
147 """call func() with global exception handling
148
148
149 return func() if no exception happens. otherwise do some error handling
149 return func() if no exception happens. otherwise do some error handling
150 and return an exit code accordingly. does not handle all exceptions.
150 and return an exit code accordingly. does not handle all exceptions.
151 """
151 """
152 try:
152 try:
153 try:
153 try:
154 return func()
154 return func()
155 except: # re-raises
155 except: # re-raises
156 ui.traceback()
156 ui.traceback()
157 raise
157 raise
158 # Global exception handling, alphabetically
158 # Global exception handling, alphabetically
159 # Mercurial-specific first, followed by built-in and library exceptions
159 # Mercurial-specific first, followed by built-in and library exceptions
160 except error.LockHeld as inst:
160 except error.LockHeld as inst:
161 if inst.errno == errno.ETIMEDOUT:
161 if inst.errno == errno.ETIMEDOUT:
162 reason = _('timed out waiting for lock held by %r') % inst.locker
162 reason = _('timed out waiting for lock held by %r') % inst.locker
163 else:
163 else:
164 reason = _('lock held by %r') % inst.locker
164 reason = _('lock held by %r') % inst.locker
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
165 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
166 if not inst.locker:
166 if not inst.locker:
167 ui.warn(_("(lock might be very busy)\n"))
167 ui.warn(_("(lock might be very busy)\n"))
168 except error.LockUnavailable as inst:
168 except error.LockUnavailable as inst:
169 ui.warn(_("abort: could not lock %s: %s\n") %
169 ui.warn(_("abort: could not lock %s: %s\n") %
170 (inst.desc or inst.filename,
170 (inst.desc or inst.filename,
171 encoding.strtolocal(inst.strerror)))
171 encoding.strtolocal(inst.strerror)))
172 except error.OutOfBandError as inst:
172 except error.OutOfBandError as inst:
173 if inst.args:
173 if inst.args:
174 msg = _("abort: remote error:\n")
174 msg = _("abort: remote error:\n")
175 else:
175 else:
176 msg = _("abort: remote error\n")
176 msg = _("abort: remote error\n")
177 ui.warn(msg)
177 ui.warn(msg)
178 if inst.args:
178 if inst.args:
179 ui.warn(''.join(inst.args))
179 ui.warn(''.join(inst.args))
180 if inst.hint:
180 if inst.hint:
181 ui.warn('(%s)\n' % inst.hint)
181 ui.warn('(%s)\n' % inst.hint)
182 except error.RepoError as inst:
182 except error.RepoError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
183 ui.warn(_("abort: %s!\n") % inst)
184 if inst.hint:
184 if inst.hint:
185 ui.warn(_("(%s)\n") % inst.hint)
185 ui.warn(_("(%s)\n") % inst.hint)
186 except error.ResponseError as inst:
186 except error.ResponseError as inst:
187 ui.warn(_("abort: %s") % inst.args[0])
187 ui.warn(_("abort: %s") % inst.args[0])
188 if not isinstance(inst.args[1], basestring):
188 if not isinstance(inst.args[1], basestring):
189 ui.warn(" %r\n" % (inst.args[1],))
189 ui.warn(" %r\n" % (inst.args[1],))
190 elif not inst.args[1]:
190 elif not inst.args[1]:
191 ui.warn(_(" empty string\n"))
191 ui.warn(_(" empty string\n"))
192 else:
192 else:
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
193 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
194 except error.CensoredNodeError as inst:
194 except error.CensoredNodeError as inst:
195 ui.warn(_("abort: file censored %s!\n") % inst)
195 ui.warn(_("abort: file censored %s!\n") % inst)
196 except error.RevlogError as inst:
196 except error.RevlogError as inst:
197 ui.warn(_("abort: %s!\n") % inst)
197 ui.warn(_("abort: %s!\n") % inst)
198 except error.InterventionRequired as inst:
198 except error.InterventionRequired as inst:
199 ui.warn("%s\n" % inst)
199 ui.warn("%s\n" % inst)
200 if inst.hint:
200 if inst.hint:
201 ui.warn(_("(%s)\n") % inst.hint)
201 ui.warn(_("(%s)\n") % inst.hint)
202 return 1
202 return 1
203 except error.WdirUnsupported:
203 except error.WdirUnsupported:
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
204 ui.warn(_("abort: working directory revision cannot be specified\n"))
205 except error.Abort as inst:
205 except error.Abort as inst:
206 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
207 if inst.hint:
207 if inst.hint:
208 ui.warn(_("(%s)\n") % inst.hint)
208 ui.warn(_("(%s)\n") % inst.hint)
209 except ImportError as inst:
209 except ImportError as inst:
210 ui.warn(_("abort: %s!\n") % inst)
210 ui.warn(_("abort: %s!\n") % inst)
211 m = str(inst).split()[-1]
211 m = str(inst).split()[-1]
212 if m in "mpatch bdiff".split():
212 if m in "mpatch bdiff".split():
213 ui.warn(_("(did you forget to compile extensions?)\n"))
213 ui.warn(_("(did you forget to compile extensions?)\n"))
214 elif m in "zlib".split():
214 elif m in "zlib".split():
215 ui.warn(_("(is your Python install correct?)\n"))
215 ui.warn(_("(is your Python install correct?)\n"))
216 except IOError as inst:
216 except IOError as inst:
217 if util.safehasattr(inst, "code"):
217 if util.safehasattr(inst, "code"):
218 ui.warn(_("abort: %s\n") % inst)
218 ui.warn(_("abort: %s\n") % inst)
219 elif util.safehasattr(inst, "reason"):
219 elif util.safehasattr(inst, "reason"):
220 try: # usually it is in the form (errno, strerror)
220 try: # usually it is in the form (errno, strerror)
221 reason = inst.reason.args[1]
221 reason = inst.reason.args[1]
222 except (AttributeError, IndexError):
222 except (AttributeError, IndexError):
223 # it might be anything, for example a string
223 # it might be anything, for example a string
224 reason = inst.reason
224 reason = inst.reason
225 if isinstance(reason, unicode):
225 if isinstance(reason, unicode):
226 # SSLError of Python 2.7.9 contains a unicode
226 # SSLError of Python 2.7.9 contains a unicode
227 reason = encoding.unitolocal(reason)
227 reason = encoding.unitolocal(reason)
228 ui.warn(_("abort: error: %s\n") % reason)
228 ui.warn(_("abort: error: %s\n") % reason)
229 elif (util.safehasattr(inst, "args")
229 elif (util.safehasattr(inst, "args")
230 and inst.args and inst.args[0] == errno.EPIPE):
230 and inst.args and inst.args[0] == errno.EPIPE):
231 pass
231 pass
232 elif getattr(inst, "strerror", None):
232 elif getattr(inst, "strerror", None):
233 if getattr(inst, "filename", None):
233 if getattr(inst, "filename", None):
234 ui.warn(_("abort: %s: %s\n") % (
234 ui.warn(_("abort: %s: %s\n") % (
235 encoding.strtolocal(inst.strerror), inst.filename))
235 encoding.strtolocal(inst.strerror), inst.filename))
236 else:
236 else:
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
237 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
238 else:
238 else:
239 raise
239 raise
240 except OSError as inst:
240 except OSError as inst:
241 if getattr(inst, "filename", None) is not None:
241 if getattr(inst, "filename", None) is not None:
242 ui.warn(_("abort: %s: '%s'\n") % (
242 ui.warn(_("abort: %s: '%s'\n") % (
243 encoding.strtolocal(inst.strerror), inst.filename))
243 encoding.strtolocal(inst.strerror), inst.filename))
244 else:
244 else:
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
245 ui.warn(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
246 except MemoryError:
246 except MemoryError:
247 ui.warn(_("abort: out of memory\n"))
247 ui.warn(_("abort: out of memory\n"))
248 except SystemExit as inst:
248 except SystemExit as inst:
249 # Commands shouldn't sys.exit directly, but give a return code.
249 # Commands shouldn't sys.exit directly, but give a return code.
250 # Just in case catch this and and pass exit code to caller.
250 # Just in case catch this and and pass exit code to caller.
251 return inst.code
251 return inst.code
252 except socket.error as inst:
252 except socket.error as inst:
253 ui.warn(_("abort: %s\n") % inst.args[-1])
253 ui.warn(_("abort: %s\n") % inst.args[-1])
254
254
255 return -1
255 return -1
256
256
257 def checknewlabel(repo, lbl, kind):
257 def checknewlabel(repo, lbl, kind):
258 # Do not use the "kind" parameter in ui output.
258 # Do not use the "kind" parameter in ui output.
259 # It makes strings difficult to translate.
259 # It makes strings difficult to translate.
260 if lbl in ['tip', '.', 'null']:
260 if lbl in ['tip', '.', 'null']:
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
261 raise error.Abort(_("the name '%s' is reserved") % lbl)
262 for c in (':', '\0', '\n', '\r'):
262 for c in (':', '\0', '\n', '\r'):
263 if c in lbl:
263 if c in lbl:
264 raise error.Abort(_("%r cannot be used in a name") % c)
264 raise error.Abort(_("%r cannot be used in a name") % c)
265 try:
265 try:
266 int(lbl)
266 int(lbl)
267 raise error.Abort(_("cannot use an integer as a name"))
267 raise error.Abort(_("cannot use an integer as a name"))
268 except ValueError:
268 except ValueError:
269 pass
269 pass
270
270
271 def checkfilename(f):
271 def checkfilename(f):
272 '''Check that the filename f is an acceptable filename for a tracked file'''
272 '''Check that the filename f is an acceptable filename for a tracked file'''
273 if '\r' in f or '\n' in f:
273 if '\r' in f or '\n' in f:
274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
274 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
275
275
276 def checkportable(ui, f):
276 def checkportable(ui, f):
277 '''Check if filename f is portable and warn or abort depending on config'''
277 '''Check if filename f is portable and warn or abort depending on config'''
278 checkfilename(f)
278 checkfilename(f)
279 abort, warn = checkportabilityalert(ui)
279 abort, warn = checkportabilityalert(ui)
280 if abort or warn:
280 if abort or warn:
281 msg = util.checkwinfilename(f)
281 msg = util.checkwinfilename(f)
282 if msg:
282 if msg:
283 msg = "%s: %s" % (msg, util.shellquote(f))
283 msg = "%s: %s" % (msg, util.shellquote(f))
284 if abort:
284 if abort:
285 raise error.Abort(msg)
285 raise error.Abort(msg)
286 ui.warn(_("warning: %s\n") % msg)
286 ui.warn(_("warning: %s\n") % msg)
287
287
288 def checkportabilityalert(ui):
288 def checkportabilityalert(ui):
289 '''check if the user's config requests nothing, a warning, or abort for
289 '''check if the user's config requests nothing, a warning, or abort for
290 non-portable filenames'''
290 non-portable filenames'''
291 val = ui.config('ui', 'portablefilenames')
291 val = ui.config('ui', 'portablefilenames')
292 lval = val.lower()
292 lval = val.lower()
293 bval = util.parsebool(val)
293 bval = util.parsebool(val)
294 abort = pycompat.iswindows or lval == 'abort'
294 abort = pycompat.iswindows or lval == 'abort'
295 warn = bval or lval == 'warn'
295 warn = bval or lval == 'warn'
296 if bval is None and not (warn or abort or lval == 'ignore'):
296 if bval is None and not (warn or abort or lval == 'ignore'):
297 raise error.ConfigError(
297 raise error.ConfigError(
298 _("ui.portablefilenames value is invalid ('%s')") % val)
298 _("ui.portablefilenames value is invalid ('%s')") % val)
299 return abort, warn
299 return abort, warn
300
300
301 class casecollisionauditor(object):
301 class casecollisionauditor(object):
302 def __init__(self, ui, abort, dirstate):
302 def __init__(self, ui, abort, dirstate):
303 self._ui = ui
303 self._ui = ui
304 self._abort = abort
304 self._abort = abort
305 allfiles = '\0'.join(dirstate._map)
305 allfiles = '\0'.join(dirstate._map)
306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
306 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
307 self._dirstate = dirstate
307 self._dirstate = dirstate
308 # The purpose of _newfiles is so that we don't complain about
308 # The purpose of _newfiles is so that we don't complain about
309 # case collisions if someone were to call this object with the
309 # case collisions if someone were to call this object with the
310 # same filename twice.
310 # same filename twice.
311 self._newfiles = set()
311 self._newfiles = set()
312
312
313 def __call__(self, f):
313 def __call__(self, f):
314 if f in self._newfiles:
314 if f in self._newfiles:
315 return
315 return
316 fl = encoding.lower(f)
316 fl = encoding.lower(f)
317 if fl in self._loweredfiles and f not in self._dirstate:
317 if fl in self._loweredfiles and f not in self._dirstate:
318 msg = _('possible case-folding collision for %s') % f
318 msg = _('possible case-folding collision for %s') % f
319 if self._abort:
319 if self._abort:
320 raise error.Abort(msg)
320 raise error.Abort(msg)
321 self._ui.warn(_("warning: %s\n") % msg)
321 self._ui.warn(_("warning: %s\n") % msg)
322 self._loweredfiles.add(fl)
322 self._loweredfiles.add(fl)
323 self._newfiles.add(f)
323 self._newfiles.add(f)
324
324
325 def filteredhash(repo, maxrev):
325 def filteredhash(repo, maxrev):
326 """build hash of filtered revisions in the current repoview.
326 """build hash of filtered revisions in the current repoview.
327
327
328 Multiple caches perform up-to-date validation by checking that the
328 Multiple caches perform up-to-date validation by checking that the
329 tiprev and tipnode stored in the cache file match the current repository.
329 tiprev and tipnode stored in the cache file match the current repository.
330 However, this is not sufficient for validating repoviews because the set
330 However, this is not sufficient for validating repoviews because the set
331 of revisions in the view may change without the repository tiprev and
331 of revisions in the view may change without the repository tiprev and
332 tipnode changing.
332 tipnode changing.
333
333
334 This function hashes all the revs filtered from the view and returns
334 This function hashes all the revs filtered from the view and returns
335 that SHA-1 digest.
335 that SHA-1 digest.
336 """
336 """
337 cl = repo.changelog
337 cl = repo.changelog
338 if not cl.filteredrevs:
338 if not cl.filteredrevs:
339 return None
339 return None
340 key = None
340 key = None
341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
341 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
342 if revs:
342 if revs:
343 s = hashlib.sha1()
343 s = hashlib.sha1()
344 for rev in revs:
344 for rev in revs:
345 s.update('%d;' % rev)
345 s.update('%d;' % rev)
346 key = s.digest()
346 key = s.digest()
347 return key
347 return key
348
348
349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
350 '''yield every hg repository under path, always recursively.
350 '''yield every hg repository under path, always recursively.
351 The recurse flag will only control recursion into repo working dirs'''
351 The recurse flag will only control recursion into repo working dirs'''
352 def errhandler(err):
352 def errhandler(err):
353 if err.filename == path:
353 if err.filename == path:
354 raise err
354 raise err
355 samestat = getattr(os.path, 'samestat', None)
355 samestat = getattr(os.path, 'samestat', None)
356 if followsym and samestat is not None:
356 if followsym and samestat is not None:
357 def adddir(dirlst, dirname):
357 def adddir(dirlst, dirname):
358 match = False
358 match = False
359 dirstat = os.stat(dirname)
359 dirstat = os.stat(dirname)
360 for lstdirstat in dirlst:
360 for lstdirstat in dirlst:
361 if samestat(dirstat, lstdirstat):
361 if samestat(dirstat, lstdirstat):
362 match = True
362 match = True
363 break
363 break
364 if not match:
364 if not match:
365 dirlst.append(dirstat)
365 dirlst.append(dirstat)
366 return not match
366 return not match
367 else:
367 else:
368 followsym = False
368 followsym = False
369
369
370 if (seen_dirs is None) and followsym:
370 if (seen_dirs is None) and followsym:
371 seen_dirs = []
371 seen_dirs = []
372 adddir(seen_dirs, path)
372 adddir(seen_dirs, path)
373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
373 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
374 dirs.sort()
374 dirs.sort()
375 if '.hg' in dirs:
375 if '.hg' in dirs:
376 yield root # found a repository
376 yield root # found a repository
377 qroot = os.path.join(root, '.hg', 'patches')
377 qroot = os.path.join(root, '.hg', 'patches')
378 if os.path.isdir(os.path.join(qroot, '.hg')):
378 if os.path.isdir(os.path.join(qroot, '.hg')):
379 yield qroot # we have a patch queue repo here
379 yield qroot # we have a patch queue repo here
380 if recurse:
380 if recurse:
381 # avoid recursing inside the .hg directory
381 # avoid recursing inside the .hg directory
382 dirs.remove('.hg')
382 dirs.remove('.hg')
383 else:
383 else:
384 dirs[:] = [] # don't descend further
384 dirs[:] = [] # don't descend further
385 elif followsym:
385 elif followsym:
386 newdirs = []
386 newdirs = []
387 for d in dirs:
387 for d in dirs:
388 fname = os.path.join(root, d)
388 fname = os.path.join(root, d)
389 if adddir(seen_dirs, fname):
389 if adddir(seen_dirs, fname):
390 if os.path.islink(fname):
390 if os.path.islink(fname):
391 for hgname in walkrepos(fname, True, seen_dirs):
391 for hgname in walkrepos(fname, True, seen_dirs):
392 yield hgname
392 yield hgname
393 else:
393 else:
394 newdirs.append(d)
394 newdirs.append(d)
395 dirs[:] = newdirs
395 dirs[:] = newdirs
396
396
397 def binnode(ctx):
397 def binnode(ctx):
398 """Return binary node id for a given basectx"""
398 """Return binary node id for a given basectx"""
399 node = ctx.node()
399 node = ctx.node()
400 if node is None:
400 if node is None:
401 return wdirid
401 return wdirid
402 return node
402 return node
403
403
404 def intrev(ctx):
404 def intrev(ctx):
405 """Return integer for a given basectx that can be used in comparison or
405 """Return integer for a given basectx that can be used in comparison or
406 arithmetic operation"""
406 arithmetic operation"""
407 rev = ctx.rev()
407 rev = ctx.rev()
408 if rev is None:
408 if rev is None:
409 return wdirrev
409 return wdirrev
410 return rev
410 return rev
411
411
412 def formatchangeid(ctx):
412 def formatchangeid(ctx):
413 """Format changectx as '{rev}:{node|formatnode}', which is the default
413 """Format changectx as '{rev}:{node|formatnode}', which is the default
414 template provided by logcmdutil.changesettemplater"""
414 template provided by logcmdutil.changesettemplater"""
415 repo = ctx.repo()
415 repo = ctx.repo()
416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
416 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
417
417
418 def formatrevnode(ui, rev, node):
418 def formatrevnode(ui, rev, node):
419 """Format given revision and node depending on the current verbosity"""
419 """Format given revision and node depending on the current verbosity"""
420 if ui.debugflag:
420 if ui.debugflag:
421 hexfunc = hex
421 hexfunc = hex
422 else:
422 else:
423 hexfunc = short
423 hexfunc = short
424 return '%d:%s' % (rev, hexfunc(node))
424 return '%d:%s' % (rev, hexfunc(node))
425
425
426 def revsingle(repo, revspec, default='.', localalias=None):
426 def revsingle(repo, revspec, default='.', localalias=None):
427 if not revspec and revspec != 0:
427 if not revspec and revspec != 0:
428 return repo[default]
428 return repo[default]
429
429
430 l = revrange(repo, [revspec], localalias=localalias)
430 l = revrange(repo, [revspec], localalias=localalias)
431 if not l:
431 if not l:
432 raise error.Abort(_('empty revision set'))
432 raise error.Abort(_('empty revision set'))
433 return repo[l.last()]
433 return repo[l.last()]
434
434
435 def _pairspec(revspec):
435 def _pairspec(revspec):
436 tree = revsetlang.parse(revspec)
436 tree = revsetlang.parse(revspec)
437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
437 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
438
438
439 def revpair(repo, revs):
439 def revpair(repo, revs):
440 if not revs:
440 if not revs:
441 return repo.dirstate.p1(), None
441 return repo.dirstate.p1(), None
442
442
443 l = revrange(repo, revs)
443 l = revrange(repo, revs)
444
444
445 if not l:
445 if not l:
446 first = second = None
446 first = second = None
447 elif l.isascending():
447 elif l.isascending():
448 first = l.min()
448 first = l.min()
449 second = l.max()
449 second = l.max()
450 elif l.isdescending():
450 elif l.isdescending():
451 first = l.max()
451 first = l.max()
452 second = l.min()
452 second = l.min()
453 else:
453 else:
454 first = l.first()
454 first = l.first()
455 second = l.last()
455 second = l.last()
456
456
457 if first is None:
457 if first is None:
458 raise error.Abort(_('empty revision range'))
458 raise error.Abort(_('empty revision range'))
459 if (first == second and len(revs) >= 2
459 if (first == second and len(revs) >= 2
460 and not all(revrange(repo, [r]) for r in revs)):
460 and not all(revrange(repo, [r]) for r in revs)):
461 raise error.Abort(_('empty revision on one side of range'))
461 raise error.Abort(_('empty revision on one side of range'))
462
462
463 # if top-level is range expression, the result must always be a pair
463 # if top-level is range expression, the result must always be a pair
464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
464 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
465 return repo.lookup(first), None
465 return repo.lookup(first), None
466
466
467 return repo.lookup(first), repo.lookup(second)
467 return repo.lookup(first), repo.lookup(second)
468
468
469 def revrange(repo, specs, localalias=None):
469 def revrange(repo, specs, localalias=None):
470 """Execute 1 to many revsets and return the union.
470 """Execute 1 to many revsets and return the union.
471
471
472 This is the preferred mechanism for executing revsets using user-specified
472 This is the preferred mechanism for executing revsets using user-specified
473 config options, such as revset aliases.
473 config options, such as revset aliases.
474
474
475 The revsets specified by ``specs`` will be executed via a chained ``OR``
475 The revsets specified by ``specs`` will be executed via a chained ``OR``
476 expression. If ``specs`` is empty, an empty result is returned.
476 expression. If ``specs`` is empty, an empty result is returned.
477
477
478 ``specs`` can contain integers, in which case they are assumed to be
478 ``specs`` can contain integers, in which case they are assumed to be
479 revision numbers.
479 revision numbers.
480
480
481 It is assumed the revsets are already formatted. If you have arguments
481 It is assumed the revsets are already formatted. If you have arguments
482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
482 that need to be expanded in the revset, call ``revsetlang.formatspec()``
483 and pass the result as an element of ``specs``.
483 and pass the result as an element of ``specs``.
484
484
485 Specifying a single revset is allowed.
485 Specifying a single revset is allowed.
486
486
487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
487 Returns a ``revset.abstractsmartset`` which is a list-like interface over
488 integer revisions.
488 integer revisions.
489 """
489 """
490 allspecs = []
490 allspecs = []
491 for spec in specs:
491 for spec in specs:
492 if isinstance(spec, int):
492 if isinstance(spec, int):
493 spec = revsetlang.formatspec('rev(%d)', spec)
493 spec = revsetlang.formatspec('rev(%d)', spec)
494 allspecs.append(spec)
494 allspecs.append(spec)
495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
495 return repo.anyrevs(allspecs, user=True, localalias=localalias)
496
496
497 def meaningfulparents(repo, ctx):
497 def meaningfulparents(repo, ctx):
498 """Return list of meaningful (or all if debug) parentrevs for rev.
498 """Return list of meaningful (or all if debug) parentrevs for rev.
499
499
500 For merges (two non-nullrev revisions) both parents are meaningful.
500 For merges (two non-nullrev revisions) both parents are meaningful.
501 Otherwise the first parent revision is considered meaningful if it
501 Otherwise the first parent revision is considered meaningful if it
502 is not the preceding revision.
502 is not the preceding revision.
503 """
503 """
504 parents = ctx.parents()
504 parents = ctx.parents()
505 if len(parents) > 1:
505 if len(parents) > 1:
506 return parents
506 return parents
507 if repo.ui.debugflag:
507 if repo.ui.debugflag:
508 return [parents[0], repo['null']]
508 return [parents[0], repo['null']]
509 if parents[0].rev() >= intrev(ctx) - 1:
509 if parents[0].rev() >= intrev(ctx) - 1:
510 return []
510 return []
511 return parents
511 return parents
512
512
513 def expandpats(pats):
513 def expandpats(pats):
514 '''Expand bare globs when running on windows.
514 '''Expand bare globs when running on windows.
515 On posix we assume it already has already been done by sh.'''
515 On posix we assume it already has already been done by sh.'''
516 if not util.expandglobs:
516 if not util.expandglobs:
517 return list(pats)
517 return list(pats)
518 ret = []
518 ret = []
519 for kindpat in pats:
519 for kindpat in pats:
520 kind, pat = matchmod._patsplit(kindpat, None)
520 kind, pat = matchmod._patsplit(kindpat, None)
521 if kind is None:
521 if kind is None:
522 try:
522 try:
523 globbed = glob.glob(pat)
523 globbed = glob.glob(pat)
524 except re.error:
524 except re.error:
525 globbed = [pat]
525 globbed = [pat]
526 if globbed:
526 if globbed:
527 ret.extend(globbed)
527 ret.extend(globbed)
528 continue
528 continue
529 ret.append(kindpat)
529 ret.append(kindpat)
530 return ret
530 return ret
531
531
532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
532 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
533 badfn=None):
533 badfn=None):
534 '''Return a matcher and the patterns that were used.
534 '''Return a matcher and the patterns that were used.
535 The matcher will warn about bad matches, unless an alternate badfn callback
535 The matcher will warn about bad matches, unless an alternate badfn callback
536 is provided.'''
536 is provided.'''
537 if pats == ("",):
537 if pats == ("",):
538 pats = []
538 pats = []
539 if opts is None:
539 if opts is None:
540 opts = {}
540 opts = {}
541 if not globbed and default == 'relpath':
541 if not globbed and default == 'relpath':
542 pats = expandpats(pats or [])
542 pats = expandpats(pats or [])
543
543
544 def bad(f, msg):
544 def bad(f, msg):
545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
545 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
546
546
547 if badfn is None:
547 if badfn is None:
548 badfn = bad
548 badfn = bad
549
549
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
551 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
552
552
553 if m.always():
553 if m.always():
554 pats = []
554 pats = []
555 return m, pats
555 return m, pats
556
556
557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
557 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
558 badfn=None):
558 badfn=None):
559 '''Return a matcher that will warn about bad matches.'''
559 '''Return a matcher that will warn about bad matches.'''
560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
560 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
561
561
562 def matchall(repo):
562 def matchall(repo):
563 '''Return a matcher that will efficiently match everything.'''
563 '''Return a matcher that will efficiently match everything.'''
564 return matchmod.always(repo.root, repo.getcwd())
564 return matchmod.always(repo.root, repo.getcwd())
565
565
566 def matchfiles(repo, files, badfn=None):
566 def matchfiles(repo, files, badfn=None):
567 '''Return a matcher that will efficiently match exactly these files.'''
567 '''Return a matcher that will efficiently match exactly these files.'''
568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
568 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
569
569
570 def parsefollowlinespattern(repo, rev, pat, msg):
570 def parsefollowlinespattern(repo, rev, pat, msg):
571 """Return a file name from `pat` pattern suitable for usage in followlines
571 """Return a file name from `pat` pattern suitable for usage in followlines
572 logic.
572 logic.
573 """
573 """
574 if not matchmod.patkind(pat):
574 if not matchmod.patkind(pat):
575 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
575 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
576 else:
576 else:
577 ctx = repo[rev]
577 ctx = repo[rev]
578 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
578 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
579 files = [f for f in ctx if m(f)]
579 files = [f for f in ctx if m(f)]
580 if len(files) != 1:
580 if len(files) != 1:
581 raise error.ParseError(msg)
581 raise error.ParseError(msg)
582 return files[0]
582 return files[0]
583
583
584 def origpath(ui, repo, filepath):
584 def origpath(ui, repo, filepath):
585 '''customize where .orig files are created
585 '''customize where .orig files are created
586
586
587 Fetch user defined path from config file: [ui] origbackuppath = <path>
587 Fetch user defined path from config file: [ui] origbackuppath = <path>
588 Fall back to default (filepath with .orig suffix) if not specified
588 Fall back to default (filepath with .orig suffix) if not specified
589 '''
589 '''
590 origbackuppath = ui.config('ui', 'origbackuppath')
590 origbackuppath = ui.config('ui', 'origbackuppath')
591 if not origbackuppath:
591 if not origbackuppath:
592 return filepath + ".orig"
592 return filepath + ".orig"
593
593
594 # Convert filepath from an absolute path into a path inside the repo.
594 # Convert filepath from an absolute path into a path inside the repo.
595 filepathfromroot = util.normpath(os.path.relpath(filepath,
595 filepathfromroot = util.normpath(os.path.relpath(filepath,
596 start=repo.root))
596 start=repo.root))
597
597
598 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
598 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
599 origbackupdir = origvfs.dirname(filepathfromroot)
599 origbackupdir = origvfs.dirname(filepathfromroot)
600 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
600 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
601 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
601 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
602
602
603 # Remove any files that conflict with the backup file's path
603 # Remove any files that conflict with the backup file's path
604 for f in reversed(list(util.finddirs(filepathfromroot))):
604 for f in reversed(list(util.finddirs(filepathfromroot))):
605 if origvfs.isfileorlink(f):
605 if origvfs.isfileorlink(f):
606 ui.note(_('removing conflicting file: %s\n')
606 ui.note(_('removing conflicting file: %s\n')
607 % origvfs.join(f))
607 % origvfs.join(f))
608 origvfs.unlink(f)
608 origvfs.unlink(f)
609 break
609 break
610
610
611 origvfs.makedirs(origbackupdir)
611 origvfs.makedirs(origbackupdir)
612
612
613 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
613 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
614 ui.note(_('removing conflicting directory: %s\n')
614 ui.note(_('removing conflicting directory: %s\n')
615 % origvfs.join(filepathfromroot))
615 % origvfs.join(filepathfromroot))
616 origvfs.rmtree(filepathfromroot, forcibly=True)
616 origvfs.rmtree(filepathfromroot, forcibly=True)
617
617
618 return origvfs.join(filepathfromroot)
618 return origvfs.join(filepathfromroot)
619
619
620 class _containsnode(object):
620 class _containsnode(object):
621 """proxy __contains__(node) to container.__contains__ which accepts revs"""
621 """proxy __contains__(node) to container.__contains__ which accepts revs"""
622
622
623 def __init__(self, repo, revcontainer):
623 def __init__(self, repo, revcontainer):
624 self._torev = repo.changelog.rev
624 self._torev = repo.changelog.rev
625 self._revcontains = revcontainer.__contains__
625 self._revcontains = revcontainer.__contains__
626
626
627 def __contains__(self, node):
627 def __contains__(self, node):
628 return self._revcontains(self._torev(node))
628 return self._revcontains(self._torev(node))
629
629
630 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
630 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None):
631 """do common cleanups when old nodes are replaced by new nodes
631 """do common cleanups when old nodes are replaced by new nodes
632
632
633 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
633 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
634 (we might also want to move working directory parent in the future)
634 (we might also want to move working directory parent in the future)
635
635
636 By default, bookmark moves are calculated automatically from 'replacements',
636 By default, bookmark moves are calculated automatically from 'replacements',
637 but 'moves' can be used to override that. Also, 'moves' may include
637 but 'moves' can be used to override that. Also, 'moves' may include
638 additional bookmark moves that should not have associated obsmarkers.
638 additional bookmark moves that should not have associated obsmarkers.
639
639
640 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
640 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
641 have replacements. operation is a string, like "rebase".
641 have replacements. operation is a string, like "rebase".
642
642
643 metadata is dictionary containing metadata to be stored in obsmarker if
643 metadata is dictionary containing metadata to be stored in obsmarker if
644 obsolescence is enabled.
644 obsolescence is enabled.
645 """
645 """
646 if not replacements and not moves:
646 if not replacements and not moves:
647 return
647 return
648
648
649 # translate mapping's other forms
649 # translate mapping's other forms
650 if not util.safehasattr(replacements, 'items'):
650 if not util.safehasattr(replacements, 'items'):
651 replacements = {n: () for n in replacements}
651 replacements = {n: () for n in replacements}
652
652
653 # Calculate bookmark movements
653 # Calculate bookmark movements
654 if moves is None:
654 if moves is None:
655 moves = {}
655 moves = {}
656 # Unfiltered repo is needed since nodes in replacements might be hidden.
656 # Unfiltered repo is needed since nodes in replacements might be hidden.
657 unfi = repo.unfiltered()
657 unfi = repo.unfiltered()
658 for oldnode, newnodes in replacements.items():
658 for oldnode, newnodes in replacements.items():
659 if oldnode in moves:
659 if oldnode in moves:
660 continue
660 continue
661 if len(newnodes) > 1:
661 if len(newnodes) > 1:
662 # usually a split, take the one with biggest rev number
662 # usually a split, take the one with biggest rev number
663 newnode = next(unfi.set('max(%ln)', newnodes)).node()
663 newnode = next(unfi.set('max(%ln)', newnodes)).node()
664 elif len(newnodes) == 0:
664 elif len(newnodes) == 0:
665 # move bookmark backwards
665 # move bookmark backwards
666 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
666 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
667 list(replacements)))
667 list(replacements)))
668 if roots:
668 if roots:
669 newnode = roots[0].node()
669 newnode = roots[0].node()
670 else:
670 else:
671 newnode = nullid
671 newnode = nullid
672 else:
672 else:
673 newnode = newnodes[0]
673 newnode = newnodes[0]
674 moves[oldnode] = newnode
674 moves[oldnode] = newnode
675
675
676 with repo.transaction('cleanup') as tr:
676 with repo.transaction('cleanup') as tr:
677 # Move bookmarks
677 # Move bookmarks
678 bmarks = repo._bookmarks
678 bmarks = repo._bookmarks
679 bmarkchanges = []
679 bmarkchanges = []
680 allnewnodes = [n for ns in replacements.values() for n in ns]
680 allnewnodes = [n for ns in replacements.values() for n in ns]
681 for oldnode, newnode in moves.items():
681 for oldnode, newnode in moves.items():
682 oldbmarks = repo.nodebookmarks(oldnode)
682 oldbmarks = repo.nodebookmarks(oldnode)
683 if not oldbmarks:
683 if not oldbmarks:
684 continue
684 continue
685 from . import bookmarks # avoid import cycle
685 from . import bookmarks # avoid import cycle
686 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
686 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
687 (oldbmarks, hex(oldnode), hex(newnode)))
687 (oldbmarks, hex(oldnode), hex(newnode)))
688 # Delete divergent bookmarks being parents of related newnodes
688 # Delete divergent bookmarks being parents of related newnodes
689 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
689 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
690 allnewnodes, newnode, oldnode)
690 allnewnodes, newnode, oldnode)
691 deletenodes = _containsnode(repo, deleterevs)
691 deletenodes = _containsnode(repo, deleterevs)
692 for name in oldbmarks:
692 for name in oldbmarks:
693 bmarkchanges.append((name, newnode))
693 bmarkchanges.append((name, newnode))
694 for b in bookmarks.divergent2delete(repo, deletenodes, name):
694 for b in bookmarks.divergent2delete(repo, deletenodes, name):
695 bmarkchanges.append((b, None))
695 bmarkchanges.append((b, None))
696
696
697 if bmarkchanges:
697 if bmarkchanges:
698 bmarks.applychanges(repo, tr, bmarkchanges)
698 bmarks.applychanges(repo, tr, bmarkchanges)
699
699
700 # Obsolete or strip nodes
700 # Obsolete or strip nodes
701 if obsolete.isenabled(repo, obsolete.createmarkersopt):
701 if obsolete.isenabled(repo, obsolete.createmarkersopt):
702 # If a node is already obsoleted, and we want to obsolete it
702 # If a node is already obsoleted, and we want to obsolete it
703 # without a successor, skip that obssolete request since it's
703 # without a successor, skip that obssolete request since it's
704 # unnecessary. That's the "if s or not isobs(n)" check below.
704 # unnecessary. That's the "if s or not isobs(n)" check below.
705 # Also sort the node in topology order, that might be useful for
705 # Also sort the node in topology order, that might be useful for
706 # some obsstore logic.
706 # some obsstore logic.
707 # NOTE: the filtering and sorting might belong to createmarkers.
707 # NOTE: the filtering and sorting might belong to createmarkers.
708 isobs = unfi.obsstore.successors.__contains__
708 isobs = unfi.obsstore.successors.__contains__
709 torev = unfi.changelog.rev
709 torev = unfi.changelog.rev
710 sortfunc = lambda ns: torev(ns[0])
710 sortfunc = lambda ns: torev(ns[0])
711 rels = [(unfi[n], tuple(unfi[m] for m in s))
711 rels = [(unfi[n], tuple(unfi[m] for m in s))
712 for n, s in sorted(replacements.items(), key=sortfunc)
712 for n, s in sorted(replacements.items(), key=sortfunc)
713 if s or not isobs(n)]
713 if s or not isobs(n)]
714 if rels:
714 if rels:
715 obsolete.createmarkers(repo, rels, operation=operation,
715 obsolete.createmarkers(repo, rels, operation=operation,
716 metadata=metadata)
716 metadata=metadata)
717 else:
717 else:
718 from . import repair # avoid import cycle
718 from . import repair # avoid import cycle
719 tostrip = list(replacements)
719 tostrip = list(replacements)
720 if tostrip:
720 if tostrip:
721 repair.delayedstrip(repo.ui, repo, tostrip, operation)
721 repair.delayedstrip(repo.ui, repo, tostrip, operation)
722
722
723 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
723 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
724 if opts is None:
724 if opts is None:
725 opts = {}
725 opts = {}
726 m = matcher
726 m = matcher
727 if dry_run is None:
727 if dry_run is None:
728 dry_run = opts.get('dry_run')
728 dry_run = opts.get('dry_run')
729 if similarity is None:
729 if similarity is None:
730 similarity = float(opts.get('similarity') or 0)
730 similarity = float(opts.get('similarity') or 0)
731
731
732 ret = 0
732 ret = 0
733 join = lambda f: os.path.join(prefix, f)
733 join = lambda f: os.path.join(prefix, f)
734
734
735 wctx = repo[None]
735 wctx = repo[None]
736 for subpath in sorted(wctx.substate):
736 for subpath in sorted(wctx.substate):
737 submatch = matchmod.subdirmatcher(subpath, m)
737 submatch = matchmod.subdirmatcher(subpath, m)
738 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
738 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
739 sub = wctx.sub(subpath)
739 sub = wctx.sub(subpath)
740 try:
740 try:
741 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
741 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
742 ret = 1
742 ret = 1
743 except error.LookupError:
743 except error.LookupError:
744 repo.ui.status(_("skipping missing subrepository: %s\n")
744 repo.ui.status(_("skipping missing subrepository: %s\n")
745 % join(subpath))
745 % join(subpath))
746
746
747 rejected = []
747 rejected = []
748 def badfn(f, msg):
748 def badfn(f, msg):
749 if f in m.files():
749 if f in m.files():
750 m.bad(f, msg)
750 m.bad(f, msg)
751 rejected.append(f)
751 rejected.append(f)
752
752
753 badmatch = matchmod.badmatch(m, badfn)
753 badmatch = matchmod.badmatch(m, badfn)
754 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
754 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
755 badmatch)
755 badmatch)
756
756
757 unknownset = set(unknown + forgotten)
757 unknownset = set(unknown + forgotten)
758 toprint = unknownset.copy()
758 toprint = unknownset.copy()
759 toprint.update(deleted)
759 toprint.update(deleted)
760 for abs in sorted(toprint):
760 for abs in sorted(toprint):
761 if repo.ui.verbose or not m.exact(abs):
761 if repo.ui.verbose or not m.exact(abs):
762 if abs in unknownset:
762 if abs in unknownset:
763 status = _('adding %s\n') % m.uipath(abs)
763 status = _('adding %s\n') % m.uipath(abs)
764 else:
764 else:
765 status = _('removing %s\n') % m.uipath(abs)
765 status = _('removing %s\n') % m.uipath(abs)
766 repo.ui.status(status)
766 repo.ui.status(status)
767
767
768 renames = _findrenames(repo, m, added + unknown, removed + deleted,
768 renames = _findrenames(repo, m, added + unknown, removed + deleted,
769 similarity)
769 similarity)
770
770
771 if not dry_run:
771 if not dry_run:
772 _markchanges(repo, unknown + forgotten, deleted, renames)
772 _markchanges(repo, unknown + forgotten, deleted, renames)
773
773
774 for f in rejected:
774 for f in rejected:
775 if f in m.files():
775 if f in m.files():
776 return 1
776 return 1
777 return ret
777 return ret
778
778
779 def marktouched(repo, files, similarity=0.0):
779 def marktouched(repo, files, similarity=0.0):
780 '''Assert that files have somehow been operated upon. files are relative to
780 '''Assert that files have somehow been operated upon. files are relative to
781 the repo root.'''
781 the repo root.'''
782 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
782 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
783 rejected = []
783 rejected = []
784
784
785 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
785 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
786
786
787 if repo.ui.verbose:
787 if repo.ui.verbose:
788 unknownset = set(unknown + forgotten)
788 unknownset = set(unknown + forgotten)
789 toprint = unknownset.copy()
789 toprint = unknownset.copy()
790 toprint.update(deleted)
790 toprint.update(deleted)
791 for abs in sorted(toprint):
791 for abs in sorted(toprint):
792 if abs in unknownset:
792 if abs in unknownset:
793 status = _('adding %s\n') % abs
793 status = _('adding %s\n') % abs
794 else:
794 else:
795 status = _('removing %s\n') % abs
795 status = _('removing %s\n') % abs
796 repo.ui.status(status)
796 repo.ui.status(status)
797
797
798 renames = _findrenames(repo, m, added + unknown, removed + deleted,
798 renames = _findrenames(repo, m, added + unknown, removed + deleted,
799 similarity)
799 similarity)
800
800
801 _markchanges(repo, unknown + forgotten, deleted, renames)
801 _markchanges(repo, unknown + forgotten, deleted, renames)
802
802
803 for f in rejected:
803 for f in rejected:
804 if f in m.files():
804 if f in m.files():
805 return 1
805 return 1
806 return 0
806 return 0
807
807
808 def _interestingfiles(repo, matcher):
808 def _interestingfiles(repo, matcher):
809 '''Walk dirstate with matcher, looking for files that addremove would care
809 '''Walk dirstate with matcher, looking for files that addremove would care
810 about.
810 about.
811
811
812 This is different from dirstate.status because it doesn't care about
812 This is different from dirstate.status because it doesn't care about
813 whether files are modified or clean.'''
813 whether files are modified or clean.'''
814 added, unknown, deleted, removed, forgotten = [], [], [], [], []
814 added, unknown, deleted, removed, forgotten = [], [], [], [], []
815 audit_path = pathutil.pathauditor(repo.root, cached=True)
815 audit_path = pathutil.pathauditor(repo.root, cached=True)
816
816
817 ctx = repo[None]
817 ctx = repo[None]
818 dirstate = repo.dirstate
818 dirstate = repo.dirstate
819 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
819 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
820 unknown=True, ignored=False, full=False)
820 unknown=True, ignored=False, full=False)
821 for abs, st in walkresults.iteritems():
821 for abs, st in walkresults.iteritems():
822 dstate = dirstate[abs]
822 dstate = dirstate[abs]
823 if dstate == '?' and audit_path.check(abs):
823 if dstate == '?' and audit_path.check(abs):
824 unknown.append(abs)
824 unknown.append(abs)
825 elif dstate != 'r' and not st:
825 elif dstate != 'r' and not st:
826 deleted.append(abs)
826 deleted.append(abs)
827 elif dstate == 'r' and st:
827 elif dstate == 'r' and st:
828 forgotten.append(abs)
828 forgotten.append(abs)
829 # for finding renames
829 # for finding renames
830 elif dstate == 'r' and not st:
830 elif dstate == 'r' and not st:
831 removed.append(abs)
831 removed.append(abs)
832 elif dstate == 'a':
832 elif dstate == 'a':
833 added.append(abs)
833 added.append(abs)
834
834
835 return added, unknown, deleted, removed, forgotten
835 return added, unknown, deleted, removed, forgotten
836
836
837 def _findrenames(repo, matcher, added, removed, similarity):
837 def _findrenames(repo, matcher, added, removed, similarity):
838 '''Find renames from removed files to added ones.'''
838 '''Find renames from removed files to added ones.'''
839 renames = {}
839 renames = {}
840 if similarity > 0:
840 if similarity > 0:
841 for old, new, score in similar.findrenames(repo, added, removed,
841 for old, new, score in similar.findrenames(repo, added, removed,
842 similarity):
842 similarity):
843 if (repo.ui.verbose or not matcher.exact(old)
843 if (repo.ui.verbose or not matcher.exact(old)
844 or not matcher.exact(new)):
844 or not matcher.exact(new)):
845 repo.ui.status(_('recording removal of %s as rename to %s '
845 repo.ui.status(_('recording removal of %s as rename to %s '
846 '(%d%% similar)\n') %
846 '(%d%% similar)\n') %
847 (matcher.rel(old), matcher.rel(new),
847 (matcher.rel(old), matcher.rel(new),
848 score * 100))
848 score * 100))
849 renames[new] = old
849 renames[new] = old
850 return renames
850 return renames
851
851
852 def _markchanges(repo, unknown, deleted, renames):
852 def _markchanges(repo, unknown, deleted, renames):
853 '''Marks the files in unknown as added, the files in deleted as removed,
853 '''Marks the files in unknown as added, the files in deleted as removed,
854 and the files in renames as copied.'''
854 and the files in renames as copied.'''
855 wctx = repo[None]
855 wctx = repo[None]
856 with repo.wlock():
856 with repo.wlock():
857 wctx.forget(deleted)
857 wctx.forget(deleted)
858 wctx.add(unknown)
858 wctx.add(unknown)
859 for new, old in renames.iteritems():
859 for new, old in renames.iteritems():
860 wctx.copy(old, new)
860 wctx.copy(old, new)
861
861
862 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
862 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
863 """Update the dirstate to reflect the intent of copying src to dst. For
863 """Update the dirstate to reflect the intent of copying src to dst. For
864 different reasons it might not end with dst being marked as copied from src.
864 different reasons it might not end with dst being marked as copied from src.
865 """
865 """
866 origsrc = repo.dirstate.copied(src) or src
866 origsrc = repo.dirstate.copied(src) or src
867 if dst == origsrc: # copying back a copy?
867 if dst == origsrc: # copying back a copy?
868 if repo.dirstate[dst] not in 'mn' and not dryrun:
868 if repo.dirstate[dst] not in 'mn' and not dryrun:
869 repo.dirstate.normallookup(dst)
869 repo.dirstate.normallookup(dst)
870 else:
870 else:
871 if repo.dirstate[origsrc] == 'a' and origsrc == src:
871 if repo.dirstate[origsrc] == 'a' and origsrc == src:
872 if not ui.quiet:
872 if not ui.quiet:
873 ui.warn(_("%s has not been committed yet, so no copy "
873 ui.warn(_("%s has not been committed yet, so no copy "
874 "data will be stored for %s.\n")
874 "data will be stored for %s.\n")
875 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
875 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
876 if repo.dirstate[dst] in '?r' and not dryrun:
876 if repo.dirstate[dst] in '?r' and not dryrun:
877 wctx.add([dst])
877 wctx.add([dst])
878 elif not dryrun:
878 elif not dryrun:
879 wctx.copy(origsrc, dst)
879 wctx.copy(origsrc, dst)
880
880
881 def readrequires(opener, supported):
881 def readrequires(opener, supported):
882 '''Reads and parses .hg/requires and checks if all entries found
882 '''Reads and parses .hg/requires and checks if all entries found
883 are in the list of supported features.'''
883 are in the list of supported features.'''
884 requirements = set(opener.read("requires").splitlines())
884 requirements = set(opener.read("requires").splitlines())
885 missings = []
885 missings = []
886 for r in requirements:
886 for r in requirements:
887 if r not in supported:
887 if r not in supported:
888 if not r or not r[0].isalnum():
888 if not r or not r[0].isalnum():
889 raise error.RequirementError(_(".hg/requires file is corrupt"))
889 raise error.RequirementError(_(".hg/requires file is corrupt"))
890 missings.append(r)
890 missings.append(r)
891 missings.sort()
891 missings.sort()
892 if missings:
892 if missings:
893 raise error.RequirementError(
893 raise error.RequirementError(
894 _("repository requires features unknown to this Mercurial: %s")
894 _("repository requires features unknown to this Mercurial: %s")
895 % " ".join(missings),
895 % " ".join(missings),
896 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
896 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
897 " for more information"))
897 " for more information"))
898 return requirements
898 return requirements
899
899
900 def writerequires(opener, requirements):
900 def writerequires(opener, requirements):
901 with opener('requires', 'w') as fp:
901 with opener('requires', 'w') as fp:
902 for r in sorted(requirements):
902 for r in sorted(requirements):
903 fp.write("%s\n" % r)
903 fp.write("%s\n" % r)
904
904
905 class filecachesubentry(object):
905 class filecachesubentry(object):
906 def __init__(self, path, stat):
906 def __init__(self, path, stat):
907 self.path = path
907 self.path = path
908 self.cachestat = None
908 self.cachestat = None
909 self._cacheable = None
909 self._cacheable = None
910
910
911 if stat:
911 if stat:
912 self.cachestat = filecachesubentry.stat(self.path)
912 self.cachestat = filecachesubentry.stat(self.path)
913
913
914 if self.cachestat:
914 if self.cachestat:
915 self._cacheable = self.cachestat.cacheable()
915 self._cacheable = self.cachestat.cacheable()
916 else:
916 else:
917 # None means we don't know yet
917 # None means we don't know yet
918 self._cacheable = None
918 self._cacheable = None
919
919
920 def refresh(self):
920 def refresh(self):
921 if self.cacheable():
921 if self.cacheable():
922 self.cachestat = filecachesubentry.stat(self.path)
922 self.cachestat = filecachesubentry.stat(self.path)
923
923
924 def cacheable(self):
924 def cacheable(self):
925 if self._cacheable is not None:
925 if self._cacheable is not None:
926 return self._cacheable
926 return self._cacheable
927
927
928 # we don't know yet, assume it is for now
928 # we don't know yet, assume it is for now
929 return True
929 return True
930
930
931 def changed(self):
931 def changed(self):
932 # no point in going further if we can't cache it
932 # no point in going further if we can't cache it
933 if not self.cacheable():
933 if not self.cacheable():
934 return True
934 return True
935
935
936 newstat = filecachesubentry.stat(self.path)
936 newstat = filecachesubentry.stat(self.path)
937
937
938 # we may not know if it's cacheable yet, check again now
938 # we may not know if it's cacheable yet, check again now
939 if newstat and self._cacheable is None:
939 if newstat and self._cacheable is None:
940 self._cacheable = newstat.cacheable()
940 self._cacheable = newstat.cacheable()
941
941
942 # check again
942 # check again
943 if not self._cacheable:
943 if not self._cacheable:
944 return True
944 return True
945
945
946 if self.cachestat != newstat:
946 if self.cachestat != newstat:
947 self.cachestat = newstat
947 self.cachestat = newstat
948 return True
948 return True
949 else:
949 else:
950 return False
950 return False
951
951
952 @staticmethod
952 @staticmethod
953 def stat(path):
953 def stat(path):
954 try:
954 try:
955 return util.cachestat(path)
955 return util.cachestat(path)
956 except OSError as e:
956 except OSError as e:
957 if e.errno != errno.ENOENT:
957 if e.errno != errno.ENOENT:
958 raise
958 raise
959
959
960 class filecacheentry(object):
960 class filecacheentry(object):
961 def __init__(self, paths, stat=True):
961 def __init__(self, paths, stat=True):
962 self._entries = []
962 self._entries = []
963 for path in paths:
963 for path in paths:
964 self._entries.append(filecachesubentry(path, stat))
964 self._entries.append(filecachesubentry(path, stat))
965
965
966 def changed(self):
966 def changed(self):
967 '''true if any entry has changed'''
967 '''true if any entry has changed'''
968 for entry in self._entries:
968 for entry in self._entries:
969 if entry.changed():
969 if entry.changed():
970 return True
970 return True
971 return False
971 return False
972
972
973 def refresh(self):
973 def refresh(self):
974 for entry in self._entries:
974 for entry in self._entries:
975 entry.refresh()
975 entry.refresh()
976
976
977 class filecache(object):
977 class filecache(object):
978 '''A property like decorator that tracks files under .hg/ for updates.
978 '''A property like decorator that tracks files under .hg/ for updates.
979
979
980 Records stat info when called in _filecache.
980 Records stat info when called in _filecache.
981
981
982 On subsequent calls, compares old stat info with new info, and recreates the
982 On subsequent calls, compares old stat info with new info, and recreates the
983 object when any of the files changes, updating the new stat info in
983 object when any of the files changes, updating the new stat info in
984 _filecache.
984 _filecache.
985
985
986 Mercurial either atomic renames or appends for files under .hg,
986 Mercurial either atomic renames or appends for files under .hg,
987 so to ensure the cache is reliable we need the filesystem to be able
987 so to ensure the cache is reliable we need the filesystem to be able
988 to tell us if a file has been replaced. If it can't, we fallback to
988 to tell us if a file has been replaced. If it can't, we fallback to
989 recreating the object on every call (essentially the same behavior as
989 recreating the object on every call (essentially the same behavior as
990 propertycache).
990 propertycache).
991
991
992 '''
992 '''
993 def __init__(self, *paths):
993 def __init__(self, *paths):
994 self.paths = paths
994 self.paths = paths
995
995
996 def join(self, obj, fname):
996 def join(self, obj, fname):
997 """Used to compute the runtime path of a cached file.
997 """Used to compute the runtime path of a cached file.
998
998
999 Users should subclass filecache and provide their own version of this
999 Users should subclass filecache and provide their own version of this
1000 function to call the appropriate join function on 'obj' (an instance
1000 function to call the appropriate join function on 'obj' (an instance
1001 of the class that its member function was decorated).
1001 of the class that its member function was decorated).
1002 """
1002 """
1003 raise NotImplementedError
1003 raise NotImplementedError
1004
1004
1005 def __call__(self, func):
1005 def __call__(self, func):
1006 self.func = func
1006 self.func = func
1007 self.name = func.__name__.encode('ascii')
1007 self.name = func.__name__.encode('ascii')
1008 return self
1008 return self
1009
1009
1010 def __get__(self, obj, type=None):
1010 def __get__(self, obj, type=None):
1011 # if accessed on the class, return the descriptor itself.
1011 # if accessed on the class, return the descriptor itself.
1012 if obj is None:
1012 if obj is None:
1013 return self
1013 return self
1014 # do we need to check if the file changed?
1014 # do we need to check if the file changed?
1015 if self.name in obj.__dict__:
1015 if self.name in obj.__dict__:
1016 assert self.name in obj._filecache, self.name
1016 assert self.name in obj._filecache, self.name
1017 return obj.__dict__[self.name]
1017 return obj.__dict__[self.name]
1018
1018
1019 entry = obj._filecache.get(self.name)
1019 entry = obj._filecache.get(self.name)
1020
1020
1021 if entry:
1021 if entry:
1022 if entry.changed():
1022 if entry.changed():
1023 entry.obj = self.func(obj)
1023 entry.obj = self.func(obj)
1024 else:
1024 else:
1025 paths = [self.join(obj, path) for path in self.paths]
1025 paths = [self.join(obj, path) for path in self.paths]
1026
1026
1027 # We stat -before- creating the object so our cache doesn't lie if
1027 # We stat -before- creating the object so our cache doesn't lie if
1028 # a writer modified between the time we read and stat
1028 # a writer modified between the time we read and stat
1029 entry = filecacheentry(paths, True)
1029 entry = filecacheentry(paths, True)
1030 entry.obj = self.func(obj)
1030 entry.obj = self.func(obj)
1031
1031
1032 obj._filecache[self.name] = entry
1032 obj._filecache[self.name] = entry
1033
1033
1034 obj.__dict__[self.name] = entry.obj
1034 obj.__dict__[self.name] = entry.obj
1035 return entry.obj
1035 return entry.obj
1036
1036
1037 def __set__(self, obj, value):
1037 def __set__(self, obj, value):
1038 if self.name not in obj._filecache:
1038 if self.name not in obj._filecache:
1039 # we add an entry for the missing value because X in __dict__
1039 # we add an entry for the missing value because X in __dict__
1040 # implies X in _filecache
1040 # implies X in _filecache
1041 paths = [self.join(obj, path) for path in self.paths]
1041 paths = [self.join(obj, path) for path in self.paths]
1042 ce = filecacheentry(paths, False)
1042 ce = filecacheentry(paths, False)
1043 obj._filecache[self.name] = ce
1043 obj._filecache[self.name] = ce
1044 else:
1044 else:
1045 ce = obj._filecache[self.name]
1045 ce = obj._filecache[self.name]
1046
1046
1047 ce.obj = value # update cached copy
1047 ce.obj = value # update cached copy
1048 obj.__dict__[self.name] = value # update copy returned by obj.x
1048 obj.__dict__[self.name] = value # update copy returned by obj.x
1049
1049
1050 def __delete__(self, obj):
1050 def __delete__(self, obj):
1051 try:
1051 try:
1052 del obj.__dict__[self.name]
1052 del obj.__dict__[self.name]
1053 except KeyError:
1053 except KeyError:
1054 raise AttributeError(self.name)
1054 raise AttributeError(self.name)
1055
1055
1056 def extdatasource(repo, source):
1056 def extdatasource(repo, source):
1057 """Gather a map of rev -> value dict from the specified source
1057 """Gather a map of rev -> value dict from the specified source
1058
1058
1059 A source spec is treated as a URL, with a special case shell: type
1059 A source spec is treated as a URL, with a special case shell: type
1060 for parsing the output from a shell command.
1060 for parsing the output from a shell command.
1061
1061
1062 The data is parsed as a series of newline-separated records where
1062 The data is parsed as a series of newline-separated records where
1063 each record is a revision specifier optionally followed by a space
1063 each record is a revision specifier optionally followed by a space
1064 and a freeform string value. If the revision is known locally, it
1064 and a freeform string value. If the revision is known locally, it
1065 is converted to a rev, otherwise the record is skipped.
1065 is converted to a rev, otherwise the record is skipped.
1066
1066
1067 Note that both key and value are treated as UTF-8 and converted to
1067 Note that both key and value are treated as UTF-8 and converted to
1068 the local encoding. This allows uniformity between local and
1068 the local encoding. This allows uniformity between local and
1069 remote data sources.
1069 remote data sources.
1070 """
1070 """
1071
1071
1072 spec = repo.ui.config("extdata", source)
1072 spec = repo.ui.config("extdata", source)
1073 if not spec:
1073 if not spec:
1074 raise error.Abort(_("unknown extdata source '%s'") % source)
1074 raise error.Abort(_("unknown extdata source '%s'") % source)
1075
1075
1076 data = {}
1076 data = {}
1077 src = proc = None
1077 src = proc = None
1078 try:
1078 try:
1079 if spec.startswith("shell:"):
1079 if spec.startswith("shell:"):
1080 # external commands should be run relative to the repo root
1080 # external commands should be run relative to the repo root
1081 cmd = spec[6:]
1081 cmd = spec[6:]
1082 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1082 proc = subprocess.Popen(cmd, shell=True, bufsize=-1,
1083 close_fds=util.closefds,
1083 close_fds=util.closefds,
1084 stdout=subprocess.PIPE, cwd=repo.root)
1084 stdout=subprocess.PIPE, cwd=repo.root)
1085 src = proc.stdout
1085 src = proc.stdout
1086 else:
1086 else:
1087 # treat as a URL or file
1087 # treat as a URL or file
1088 src = url.open(repo.ui, spec)
1088 src = url.open(repo.ui, spec)
1089 for l in src:
1089 for l in src:
1090 if " " in l:
1090 if " " in l:
1091 k, v = l.strip().split(" ", 1)
1091 k, v = l.strip().split(" ", 1)
1092 else:
1092 else:
1093 k, v = l.strip(), ""
1093 k, v = l.strip(), ""
1094
1094
1095 k = encoding.tolocal(k)
1095 k = encoding.tolocal(k)
1096 try:
1096 try:
1097 data[repo[k].rev()] = encoding.tolocal(v)
1097 data[repo[k].rev()] = encoding.tolocal(v)
1098 except (error.LookupError, error.RepoLookupError):
1098 except (error.LookupError, error.RepoLookupError):
1099 pass # we ignore data for nodes that don't exist locally
1099 pass # we ignore data for nodes that don't exist locally
1100 finally:
1100 finally:
1101 if proc:
1101 if proc:
1102 proc.communicate()
1102 proc.communicate()
1103 if src:
1103 if src:
1104 src.close()
1104 src.close()
1105 if proc and proc.returncode != 0:
1105 if proc and proc.returncode != 0:
1106 raise error.Abort(_("extdata command '%s' failed: %s")
1106 raise error.Abort(_("extdata command '%s' failed: %s")
1107 % (cmd, util.explainexit(proc.returncode)[0]))
1107 % (cmd, util.explainexit(proc.returncode)[0]))
1108
1108
1109 return data
1109 return data
1110
1110
1111 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1111 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1112 if lock is None:
1112 if lock is None:
1113 raise error.LockInheritanceContractViolation(
1113 raise error.LockInheritanceContractViolation(
1114 'lock can only be inherited while held')
1114 'lock can only be inherited while held')
1115 if environ is None:
1115 if environ is None:
1116 environ = {}
1116 environ = {}
1117 with lock.inherit() as locker:
1117 with lock.inherit() as locker:
1118 environ[envvar] = locker
1118 environ[envvar] = locker
1119 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1119 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1120
1120
1121 def wlocksub(repo, cmd, *args, **kwargs):
1121 def wlocksub(repo, cmd, *args, **kwargs):
1122 """run cmd as a subprocess that allows inheriting repo's wlock
1122 """run cmd as a subprocess that allows inheriting repo's wlock
1123
1123
1124 This can only be called while the wlock is held. This takes all the
1124 This can only be called while the wlock is held. This takes all the
1125 arguments that ui.system does, and returns the exit code of the
1125 arguments that ui.system does, and returns the exit code of the
1126 subprocess."""
1126 subprocess."""
1127 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1127 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1128 **kwargs)
1128 **kwargs)
1129
1129
1130 def gdinitconfig(ui):
1130 def gdinitconfig(ui):
1131 """helper function to know if a repo should be created as general delta
1131 """helper function to know if a repo should be created as general delta
1132 """
1132 """
1133 # experimental config: format.generaldelta
1133 # experimental config: format.generaldelta
1134 return (ui.configbool('format', 'generaldelta')
1134 return (ui.configbool('format', 'generaldelta')
1135 or ui.configbool('format', 'usegeneraldelta'))
1135 or ui.configbool('format', 'usegeneraldelta'))
1136
1136
1137 def gddeltaconfig(ui):
1137 def gddeltaconfig(ui):
1138 """helper function to know if incoming delta should be optimised
1138 """helper function to know if incoming delta should be optimised
1139 """
1139 """
1140 # experimental config: format.generaldelta
1140 # experimental config: format.generaldelta
1141 return ui.configbool('format', 'generaldelta')
1141 return ui.configbool('format', 'generaldelta')
1142
1142
1143 class simplekeyvaluefile(object):
1143 class simplekeyvaluefile(object):
1144 """A simple file with key=value lines
1144 """A simple file with key=value lines
1145
1145
1146 Keys must be alphanumerics and start with a letter, values must not
1146 Keys must be alphanumerics and start with a letter, values must not
1147 contain '\n' characters"""
1147 contain '\n' characters"""
1148 firstlinekey = '__firstline'
1148 firstlinekey = '__firstline'
1149
1149
1150 def __init__(self, vfs, path, keys=None):
1150 def __init__(self, vfs, path, keys=None):
1151 self.vfs = vfs
1151 self.vfs = vfs
1152 self.path = path
1152 self.path = path
1153
1153
1154 def read(self, firstlinenonkeyval=False):
1154 def read(self, firstlinenonkeyval=False):
1155 """Read the contents of a simple key-value file
1155 """Read the contents of a simple key-value file
1156
1156
1157 'firstlinenonkeyval' indicates whether the first line of file should
1157 'firstlinenonkeyval' indicates whether the first line of file should
1158 be treated as a key-value pair or reuturned fully under the
1158 be treated as a key-value pair or reuturned fully under the
1159 __firstline key."""
1159 __firstline key."""
1160 lines = self.vfs.readlines(self.path)
1160 lines = self.vfs.readlines(self.path)
1161 d = {}
1161 d = {}
1162 if firstlinenonkeyval:
1162 if firstlinenonkeyval:
1163 if not lines:
1163 if not lines:
1164 e = _("empty simplekeyvalue file")
1164 e = _("empty simplekeyvalue file")
1165 raise error.CorruptedState(e)
1165 raise error.CorruptedState(e)
1166 # we don't want to include '\n' in the __firstline
1166 # we don't want to include '\n' in the __firstline
1167 d[self.firstlinekey] = lines[0][:-1]
1167 d[self.firstlinekey] = lines[0][:-1]
1168 del lines[0]
1168 del lines[0]
1169
1169
1170 try:
1170 try:
1171 # the 'if line.strip()' part prevents us from failing on empty
1171 # the 'if line.strip()' part prevents us from failing on empty
1172 # lines which only contain '\n' therefore are not skipped
1172 # lines which only contain '\n' therefore are not skipped
1173 # by 'if line'
1173 # by 'if line'
1174 updatedict = dict(line[:-1].split('=', 1) for line in lines
1174 updatedict = dict(line[:-1].split('=', 1) for line in lines
1175 if line.strip())
1175 if line.strip())
1176 if self.firstlinekey in updatedict:
1176 if self.firstlinekey in updatedict:
1177 e = _("%r can't be used as a key")
1177 e = _("%r can't be used as a key")
1178 raise error.CorruptedState(e % self.firstlinekey)
1178 raise error.CorruptedState(e % self.firstlinekey)
1179 d.update(updatedict)
1179 d.update(updatedict)
1180 except ValueError as e:
1180 except ValueError as e:
1181 raise error.CorruptedState(str(e))
1181 raise error.CorruptedState(str(e))
1182 return d
1182 return d
1183
1183
1184 def write(self, data, firstline=None):
1184 def write(self, data, firstline=None):
1185 """Write key=>value mapping to a file
1185 """Write key=>value mapping to a file
1186 data is a dict. Keys must be alphanumerical and start with a letter.
1186 data is a dict. Keys must be alphanumerical and start with a letter.
1187 Values must not contain newline characters.
1187 Values must not contain newline characters.
1188
1188
1189 If 'firstline' is not None, it is written to file before
1189 If 'firstline' is not None, it is written to file before
1190 everything else, as it is, not in a key=value form"""
1190 everything else, as it is, not in a key=value form"""
1191 lines = []
1191 lines = []
1192 if firstline is not None:
1192 if firstline is not None:
1193 lines.append('%s\n' % firstline)
1193 lines.append('%s\n' % firstline)
1194
1194
1195 for k, v in data.items():
1195 for k, v in data.items():
1196 if k == self.firstlinekey:
1196 if k == self.firstlinekey:
1197 e = "key name '%s' is reserved" % self.firstlinekey
1197 e = "key name '%s' is reserved" % self.firstlinekey
1198 raise error.ProgrammingError(e)
1198 raise error.ProgrammingError(e)
1199 if not k[0:1].isalpha():
1199 if not k[0:1].isalpha():
1200 e = "keys must start with a letter in a key-value file"
1200 e = "keys must start with a letter in a key-value file"
1201 raise error.ProgrammingError(e)
1201 raise error.ProgrammingError(e)
1202 if not k.isalnum():
1202 if not k.isalnum():
1203 e = "invalid key name in a simple key-value file"
1203 e = "invalid key name in a simple key-value file"
1204 raise error.ProgrammingError(e)
1204 raise error.ProgrammingError(e)
1205 if '\n' in v:
1205 if '\n' in v:
1206 e = "invalid value in a simple key-value file"
1206 e = "invalid value in a simple key-value file"
1207 raise error.ProgrammingError(e)
1207 raise error.ProgrammingError(e)
1208 lines.append("%s=%s\n" % (k, v))
1208 lines.append("%s=%s\n" % (k, v))
1209 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1209 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1210 fp.write(''.join(lines))
1210 fp.write(''.join(lines))
1211
1211
1212 _reportobsoletedsource = [
1212 _reportobsoletedsource = [
1213 'debugobsolete',
1213 'debugobsolete',
1214 'pull',
1214 'pull',
1215 'push',
1215 'push',
1216 'serve',
1216 'serve',
1217 'unbundle',
1217 'unbundle',
1218 ]
1218 ]
1219
1219
1220 _reportnewcssource = [
1220 _reportnewcssource = [
1221 'pull',
1221 'pull',
1222 'unbundle',
1222 'unbundle',
1223 ]
1223 ]
1224
1224
1225 # a list of (repo, ctx, files) functions called by various commands to allow
1226 # extensions to ensure the corresponding files are available locally, before the
1227 # command uses them.
1228 fileprefetchhooks = util.hooks()
1229
1225 # A marker that tells the evolve extension to suppress its own reporting
1230 # A marker that tells the evolve extension to suppress its own reporting
1226 _reportstroubledchangesets = True
1231 _reportstroubledchangesets = True
1227
1232
1228 def registersummarycallback(repo, otr, txnname=''):
1233 def registersummarycallback(repo, otr, txnname=''):
1229 """register a callback to issue a summary after the transaction is closed
1234 """register a callback to issue a summary after the transaction is closed
1230 """
1235 """
1231 def txmatch(sources):
1236 def txmatch(sources):
1232 return any(txnname.startswith(source) for source in sources)
1237 return any(txnname.startswith(source) for source in sources)
1233
1238
1234 categories = []
1239 categories = []
1235
1240
1236 def reportsummary(func):
1241 def reportsummary(func):
1237 """decorator for report callbacks."""
1242 """decorator for report callbacks."""
1238 # The repoview life cycle is shorter than the one of the actual
1243 # The repoview life cycle is shorter than the one of the actual
1239 # underlying repository. So the filtered object can die before the
1244 # underlying repository. So the filtered object can die before the
1240 # weakref is used leading to troubles. We keep a reference to the
1245 # weakref is used leading to troubles. We keep a reference to the
1241 # unfiltered object and restore the filtering when retrieving the
1246 # unfiltered object and restore the filtering when retrieving the
1242 # repository through the weakref.
1247 # repository through the weakref.
1243 filtername = repo.filtername
1248 filtername = repo.filtername
1244 reporef = weakref.ref(repo.unfiltered())
1249 reporef = weakref.ref(repo.unfiltered())
1245 def wrapped(tr):
1250 def wrapped(tr):
1246 repo = reporef()
1251 repo = reporef()
1247 if filtername:
1252 if filtername:
1248 repo = repo.filtered(filtername)
1253 repo = repo.filtered(filtername)
1249 func(repo, tr)
1254 func(repo, tr)
1250 newcat = '%02i-txnreport' % len(categories)
1255 newcat = '%02i-txnreport' % len(categories)
1251 otr.addpostclose(newcat, wrapped)
1256 otr.addpostclose(newcat, wrapped)
1252 categories.append(newcat)
1257 categories.append(newcat)
1253 return wrapped
1258 return wrapped
1254
1259
1255 if txmatch(_reportobsoletedsource):
1260 if txmatch(_reportobsoletedsource):
1256 @reportsummary
1261 @reportsummary
1257 def reportobsoleted(repo, tr):
1262 def reportobsoleted(repo, tr):
1258 obsoleted = obsutil.getobsoleted(repo, tr)
1263 obsoleted = obsutil.getobsoleted(repo, tr)
1259 if obsoleted:
1264 if obsoleted:
1260 repo.ui.status(_('obsoleted %i changesets\n')
1265 repo.ui.status(_('obsoleted %i changesets\n')
1261 % len(obsoleted))
1266 % len(obsoleted))
1262
1267
1263 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1268 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1264 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1269 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1265 instabilitytypes = [
1270 instabilitytypes = [
1266 ('orphan', 'orphan'),
1271 ('orphan', 'orphan'),
1267 ('phase-divergent', 'phasedivergent'),
1272 ('phase-divergent', 'phasedivergent'),
1268 ('content-divergent', 'contentdivergent'),
1273 ('content-divergent', 'contentdivergent'),
1269 ]
1274 ]
1270
1275
1271 def getinstabilitycounts(repo):
1276 def getinstabilitycounts(repo):
1272 filtered = repo.changelog.filteredrevs
1277 filtered = repo.changelog.filteredrevs
1273 counts = {}
1278 counts = {}
1274 for instability, revset in instabilitytypes:
1279 for instability, revset in instabilitytypes:
1275 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1280 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1276 filtered)
1281 filtered)
1277 return counts
1282 return counts
1278
1283
1279 oldinstabilitycounts = getinstabilitycounts(repo)
1284 oldinstabilitycounts = getinstabilitycounts(repo)
1280 @reportsummary
1285 @reportsummary
1281 def reportnewinstabilities(repo, tr):
1286 def reportnewinstabilities(repo, tr):
1282 newinstabilitycounts = getinstabilitycounts(repo)
1287 newinstabilitycounts = getinstabilitycounts(repo)
1283 for instability, revset in instabilitytypes:
1288 for instability, revset in instabilitytypes:
1284 delta = (newinstabilitycounts[instability] -
1289 delta = (newinstabilitycounts[instability] -
1285 oldinstabilitycounts[instability])
1290 oldinstabilitycounts[instability])
1286 if delta > 0:
1291 if delta > 0:
1287 repo.ui.warn(_('%i new %s changesets\n') %
1292 repo.ui.warn(_('%i new %s changesets\n') %
1288 (delta, instability))
1293 (delta, instability))
1289
1294
1290 if txmatch(_reportnewcssource):
1295 if txmatch(_reportnewcssource):
1291 @reportsummary
1296 @reportsummary
1292 def reportnewcs(repo, tr):
1297 def reportnewcs(repo, tr):
1293 """Report the range of new revisions pulled/unbundled."""
1298 """Report the range of new revisions pulled/unbundled."""
1294 newrevs = tr.changes.get('revs', xrange(0, 0))
1299 newrevs = tr.changes.get('revs', xrange(0, 0))
1295 if not newrevs:
1300 if not newrevs:
1296 return
1301 return
1297
1302
1298 # Compute the bounds of new revisions' range, excluding obsoletes.
1303 # Compute the bounds of new revisions' range, excluding obsoletes.
1299 unfi = repo.unfiltered()
1304 unfi = repo.unfiltered()
1300 revs = unfi.revs('%ld and not obsolete()', newrevs)
1305 revs = unfi.revs('%ld and not obsolete()', newrevs)
1301 if not revs:
1306 if not revs:
1302 # Got only obsoletes.
1307 # Got only obsoletes.
1303 return
1308 return
1304 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1309 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1305
1310
1306 if minrev == maxrev:
1311 if minrev == maxrev:
1307 revrange = minrev
1312 revrange = minrev
1308 else:
1313 else:
1309 revrange = '%s:%s' % (minrev, maxrev)
1314 revrange = '%s:%s' % (minrev, maxrev)
1310 repo.ui.status(_('new changesets %s\n') % revrange)
1315 repo.ui.status(_('new changesets %s\n') % revrange)
1311
1316
1312 def nodesummaries(repo, nodes, maxnumnodes=4):
1317 def nodesummaries(repo, nodes, maxnumnodes=4):
1313 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1318 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1314 return ' '.join(short(h) for h in nodes)
1319 return ' '.join(short(h) for h in nodes)
1315 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1320 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1316 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1321 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1317
1322
1318 def enforcesinglehead(repo, tr, desc):
1323 def enforcesinglehead(repo, tr, desc):
1319 """check that no named branch has multiple heads"""
1324 """check that no named branch has multiple heads"""
1320 if desc in ('strip', 'repair'):
1325 if desc in ('strip', 'repair'):
1321 # skip the logic during strip
1326 # skip the logic during strip
1322 return
1327 return
1323 visible = repo.filtered('visible')
1328 visible = repo.filtered('visible')
1324 # possible improvement: we could restrict the check to affected branch
1329 # possible improvement: we could restrict the check to affected branch
1325 for name, heads in visible.branchmap().iteritems():
1330 for name, heads in visible.branchmap().iteritems():
1326 if len(heads) > 1:
1331 if len(heads) > 1:
1327 msg = _('rejecting multiple heads on branch "%s"')
1332 msg = _('rejecting multiple heads on branch "%s"')
1328 msg %= name
1333 msg %= name
1329 hint = _('%d heads: %s')
1334 hint = _('%d heads: %s')
1330 hint %= (len(heads), nodesummaries(repo, heads))
1335 hint %= (len(heads), nodesummaries(repo, heads))
1331 raise error.Abort(msg, hint=hint)
1336 raise error.Abort(msg, hint=hint)
1332
1337
1333 def wrapconvertsink(sink):
1338 def wrapconvertsink(sink):
1334 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1339 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1335 before it is used, whether or not the convert extension was formally loaded.
1340 before it is used, whether or not the convert extension was formally loaded.
1336 """
1341 """
1337 return sink
1342 return sink
1338
1343
1339 def unhidehashlikerevs(repo, specs, hiddentype):
1344 def unhidehashlikerevs(repo, specs, hiddentype):
1340 """parse the user specs and unhide changesets whose hash or revision number
1345 """parse the user specs and unhide changesets whose hash or revision number
1341 is passed.
1346 is passed.
1342
1347
1343 hiddentype can be: 1) 'warn': warn while unhiding changesets
1348 hiddentype can be: 1) 'warn': warn while unhiding changesets
1344 2) 'nowarn': don't warn while unhiding changesets
1349 2) 'nowarn': don't warn while unhiding changesets
1345
1350
1346 returns a repo object with the required changesets unhidden
1351 returns a repo object with the required changesets unhidden
1347 """
1352 """
1348 if not repo.filtername or not repo.ui.configbool('experimental',
1353 if not repo.filtername or not repo.ui.configbool('experimental',
1349 'directaccess'):
1354 'directaccess'):
1350 return repo
1355 return repo
1351
1356
1352 if repo.filtername not in ('visible', 'visible-hidden'):
1357 if repo.filtername not in ('visible', 'visible-hidden'):
1353 return repo
1358 return repo
1354
1359
1355 symbols = set()
1360 symbols = set()
1356 for spec in specs:
1361 for spec in specs:
1357 try:
1362 try:
1358 tree = revsetlang.parse(spec)
1363 tree = revsetlang.parse(spec)
1359 except error.ParseError: # will be reported by scmutil.revrange()
1364 except error.ParseError: # will be reported by scmutil.revrange()
1360 continue
1365 continue
1361
1366
1362 symbols.update(revsetlang.gethashlikesymbols(tree))
1367 symbols.update(revsetlang.gethashlikesymbols(tree))
1363
1368
1364 if not symbols:
1369 if not symbols:
1365 return repo
1370 return repo
1366
1371
1367 revs = _getrevsfromsymbols(repo, symbols)
1372 revs = _getrevsfromsymbols(repo, symbols)
1368
1373
1369 if not revs:
1374 if not revs:
1370 return repo
1375 return repo
1371
1376
1372 if hiddentype == 'warn':
1377 if hiddentype == 'warn':
1373 unfi = repo.unfiltered()
1378 unfi = repo.unfiltered()
1374 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1379 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1375 repo.ui.warn(_("warning: accessing hidden changesets for write "
1380 repo.ui.warn(_("warning: accessing hidden changesets for write "
1376 "operation: %s\n") % revstr)
1381 "operation: %s\n") % revstr)
1377
1382
1378 # we have to use new filtername to separate branch/tags cache until we can
1383 # we have to use new filtername to separate branch/tags cache until we can
1379 # disbale these cache when revisions are dynamically pinned.
1384 # disbale these cache when revisions are dynamically pinned.
1380 return repo.filtered('visible-hidden', revs)
1385 return repo.filtered('visible-hidden', revs)
1381
1386
1382 def _getrevsfromsymbols(repo, symbols):
1387 def _getrevsfromsymbols(repo, symbols):
1383 """parse the list of symbols and returns a set of revision numbers of hidden
1388 """parse the list of symbols and returns a set of revision numbers of hidden
1384 changesets present in symbols"""
1389 changesets present in symbols"""
1385 revs = set()
1390 revs = set()
1386 unfi = repo.unfiltered()
1391 unfi = repo.unfiltered()
1387 unficl = unfi.changelog
1392 unficl = unfi.changelog
1388 cl = repo.changelog
1393 cl = repo.changelog
1389 tiprev = len(unficl)
1394 tiprev = len(unficl)
1390 pmatch = unficl._partialmatch
1395 pmatch = unficl._partialmatch
1391 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1396 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1392 for s in symbols:
1397 for s in symbols:
1393 try:
1398 try:
1394 n = int(s)
1399 n = int(s)
1395 if n <= tiprev:
1400 if n <= tiprev:
1396 if not allowrevnums:
1401 if not allowrevnums:
1397 continue
1402 continue
1398 else:
1403 else:
1399 if n not in cl:
1404 if n not in cl:
1400 revs.add(n)
1405 revs.add(n)
1401 continue
1406 continue
1402 except ValueError:
1407 except ValueError:
1403 pass
1408 pass
1404
1409
1405 try:
1410 try:
1406 s = pmatch(s)
1411 s = pmatch(s)
1407 except error.LookupError:
1412 except error.LookupError:
1408 s = None
1413 s = None
1409
1414
1410 if s is not None:
1415 if s is not None:
1411 rev = unficl.rev(s)
1416 rev = unficl.rev(s)
1412 if rev not in cl:
1417 if rev not in cl:
1413 revs.add(rev)
1418 revs.add(rev)
1414
1419
1415 return revs
1420 return revs
General Comments 0
You need to be logged in to leave comments. Login now