##// END OF EJS Templates
narrow: move remaining narrow-limited dirstate walks to core...
Martin von Zweigbergk -
r40123:1d09ba0d default
parent child Browse files
Show More
@@ -1,16 +1,24 b''
1 Integration with the share extension needs improvement. Right now
1 Integration with the share extension needs improvement. Right now
2 we've seen some odd bugs.
2 we've seen some odd bugs.
3
3
4 Address commentary in manifest.excludedmanifestrevlog.add -
4 Address commentary in manifest.excludedmanifestrevlog.add -
5 specifically we should improve the collaboration with core so that
5 specifically we should improve the collaboration with core so that
6 add() never gets called on an excluded directory and we can improve
6 add() never gets called on an excluded directory and we can improve
7 the stand-in to raise a ProgrammingError.
7 the stand-in to raise a ProgrammingError.
8
8
9 Reason more completely about rename-filtering logic in
9 Reason more completely about rename-filtering logic in
10 narrowfilelog. There could be some surprises lurking there.
10 narrowfilelog. There could be some surprises lurking there.
11
11
12 Formally document the narrowspec format. For bonus points, unify with the
12 Formally document the narrowspec format. For bonus points, unify with the
13 server-specified narrowspec format.
13 server-specified narrowspec format.
14
14
15 narrowrepo.setnarrowpats() or narrowspec.save() need to make sure
15 narrowrepo.setnarrowpats() or narrowspec.save() need to make sure
16 they're holding the wlock.
16 they're holding the wlock.
17
18 The follinwg places do an unrestricted dirstate walk (including files outside the
19 narrowspec). Some of them should perhaps not do that.
20
21 * debugfileset
22 * perfwalk
23 * sparse (but restricted to sparse config)
24 * largefiles
@@ -1,70 +1,63 b''
1 # narrowdirstate.py - extensions to mercurial dirstate to support narrow clones
1 # narrowdirstate.py - extensions to mercurial dirstate to support narrow clones
2 #
2 #
3 # Copyright 2017 Google, Inc.
3 # Copyright 2017 Google, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import (
11 from mercurial import (
12 error,
12 error,
13 )
13 )
14
14
15 def wrapdirstate(repo, dirstate):
15 def wrapdirstate(repo, dirstate):
16 """Add narrow spec dirstate ignore, block changes outside narrow spec."""
16 """Add narrow spec dirstate ignore, block changes outside narrow spec."""
17
17
18 def _editfunc(fn):
18 def _editfunc(fn):
19 def _wrapper(self, *args):
19 def _wrapper(self, *args):
20 narrowmatch = repo.narrowmatch()
20 narrowmatch = repo.narrowmatch()
21 for f in args:
21 for f in args:
22 if f is not None and not narrowmatch(f) and f not in self:
22 if f is not None and not narrowmatch(f) and f not in self:
23 raise error.Abort(_("cannot track '%s' - it is outside " +
23 raise error.Abort(_("cannot track '%s' - it is outside " +
24 "the narrow clone") % f)
24 "the narrow clone") % f)
25 return fn(self, *args)
25 return fn(self, *args)
26 return _wrapper
26 return _wrapper
27
27
28 class narrowdirstate(dirstate.__class__):
28 class narrowdirstate(dirstate.__class__):
29 def walk(self, match, subrepos, unknown, ignored, full=True,
30 narrowonly=True):
31 if narrowonly:
32 match = repo.narrowmatch(match, includeexact=True)
33 return super(narrowdirstate, self).walk(match, subrepos, unknown,
34 ignored, full)
35
36 # Prevent adding/editing/copying/deleting files that are outside the
29 # Prevent adding/editing/copying/deleting files that are outside the
37 # sparse checkout
30 # sparse checkout
38 @_editfunc
31 @_editfunc
39 def normal(self, *args):
32 def normal(self, *args):
40 return super(narrowdirstate, self).normal(*args)
33 return super(narrowdirstate, self).normal(*args)
41
34
42 @_editfunc
35 @_editfunc
43 def add(self, *args):
36 def add(self, *args):
44 return super(narrowdirstate, self).add(*args)
37 return super(narrowdirstate, self).add(*args)
45
38
46 @_editfunc
39 @_editfunc
47 def normallookup(self, *args):
40 def normallookup(self, *args):
48 return super(narrowdirstate, self).normallookup(*args)
41 return super(narrowdirstate, self).normallookup(*args)
49
42
50 @_editfunc
43 @_editfunc
51 def copy(self, *args):
44 def copy(self, *args):
52 return super(narrowdirstate, self).copy(*args)
45 return super(narrowdirstate, self).copy(*args)
53
46
54 @_editfunc
47 @_editfunc
55 def remove(self, *args):
48 def remove(self, *args):
56 return super(narrowdirstate, self).remove(*args)
49 return super(narrowdirstate, self).remove(*args)
57
50
58 @_editfunc
51 @_editfunc
59 def merge(self, *args):
52 def merge(self, *args):
60 return super(narrowdirstate, self).merge(*args)
53 return super(narrowdirstate, self).merge(*args)
61
54
62 def rebuild(self, parent, allfiles, changedfiles=None):
55 def rebuild(self, parent, allfiles, changedfiles=None):
63 if changedfiles is None:
56 if changedfiles is None:
64 # Rebuilding entire dirstate, let's filter allfiles to match the
57 # Rebuilding entire dirstate, let's filter allfiles to match the
65 # narrowspec.
58 # narrowspec.
66 allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
59 allfiles = [f for f in allfiles if repo.narrowmatch()(f)]
67 super(narrowdirstate, self).rebuild(parent, allfiles, changedfiles)
60 super(narrowdirstate, self).rebuild(parent, allfiles, changedfiles)
68
61
69 dirstate.__class__ = narrowdirstate
62 dirstate.__class__ = narrowdirstate
70 return dirstate
63 return dirstate
@@ -1,3312 +1,3313 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13
13
14 from .i18n import _
14 from .i18n import _
15 from .node import (
15 from .node import (
16 hex,
16 hex,
17 nullid,
17 nullid,
18 nullrev,
18 nullrev,
19 short,
19 short,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 bookmarks,
23 bookmarks,
24 changelog,
24 changelog,
25 copies,
25 copies,
26 crecord as crecordmod,
26 crecord as crecordmod,
27 dirstateguard,
27 dirstateguard,
28 encoding,
28 encoding,
29 error,
29 error,
30 formatter,
30 formatter,
31 logcmdutil,
31 logcmdutil,
32 match as matchmod,
32 match as matchmod,
33 merge as mergemod,
33 merge as mergemod,
34 mergeutil,
34 mergeutil,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 revlog,
40 revlog,
41 rewriteutil,
41 rewriteutil,
42 scmutil,
42 scmutil,
43 smartset,
43 smartset,
44 subrepoutil,
44 subrepoutil,
45 templatekw,
45 templatekw,
46 templater,
46 templater,
47 util,
47 util,
48 vfs as vfsmod,
48 vfs as vfsmod,
49 )
49 )
50
50
51 from .utils import (
51 from .utils import (
52 dateutil,
52 dateutil,
53 stringutil,
53 stringutil,
54 )
54 )
55
55
56 stringio = util.stringio
56 stringio = util.stringio
57
57
58 # templates of common command options
58 # templates of common command options
59
59
60 dryrunopts = [
60 dryrunopts = [
61 ('n', 'dry-run', None,
61 ('n', 'dry-run', None,
62 _('do not perform actions, just print output')),
62 _('do not perform actions, just print output')),
63 ]
63 ]
64
64
65 confirmopts = [
65 confirmopts = [
66 ('', 'confirm', None,
66 ('', 'confirm', None,
67 _('ask before applying actions')),
67 _('ask before applying actions')),
68 ]
68 ]
69
69
70 remoteopts = [
70 remoteopts = [
71 ('e', 'ssh', '',
71 ('e', 'ssh', '',
72 _('specify ssh command to use'), _('CMD')),
72 _('specify ssh command to use'), _('CMD')),
73 ('', 'remotecmd', '',
73 ('', 'remotecmd', '',
74 _('specify hg command to run on the remote side'), _('CMD')),
74 _('specify hg command to run on the remote side'), _('CMD')),
75 ('', 'insecure', None,
75 ('', 'insecure', None,
76 _('do not verify server certificate (ignoring web.cacerts config)')),
76 _('do not verify server certificate (ignoring web.cacerts config)')),
77 ]
77 ]
78
78
79 walkopts = [
79 walkopts = [
80 ('I', 'include', [],
80 ('I', 'include', [],
81 _('include names matching the given patterns'), _('PATTERN')),
81 _('include names matching the given patterns'), _('PATTERN')),
82 ('X', 'exclude', [],
82 ('X', 'exclude', [],
83 _('exclude names matching the given patterns'), _('PATTERN')),
83 _('exclude names matching the given patterns'), _('PATTERN')),
84 ]
84 ]
85
85
86 commitopts = [
86 commitopts = [
87 ('m', 'message', '',
87 ('m', 'message', '',
88 _('use text as commit message'), _('TEXT')),
88 _('use text as commit message'), _('TEXT')),
89 ('l', 'logfile', '',
89 ('l', 'logfile', '',
90 _('read commit message from file'), _('FILE')),
90 _('read commit message from file'), _('FILE')),
91 ]
91 ]
92
92
93 commitopts2 = [
93 commitopts2 = [
94 ('d', 'date', '',
94 ('d', 'date', '',
95 _('record the specified date as commit date'), _('DATE')),
95 _('record the specified date as commit date'), _('DATE')),
96 ('u', 'user', '',
96 ('u', 'user', '',
97 _('record the specified user as committer'), _('USER')),
97 _('record the specified user as committer'), _('USER')),
98 ]
98 ]
99
99
100 formatteropts = [
100 formatteropts = [
101 ('T', 'template', '',
101 ('T', 'template', '',
102 _('display with template'), _('TEMPLATE')),
102 _('display with template'), _('TEMPLATE')),
103 ]
103 ]
104
104
105 templateopts = [
105 templateopts = [
106 ('', 'style', '',
106 ('', 'style', '',
107 _('display using template map file (DEPRECATED)'), _('STYLE')),
107 _('display using template map file (DEPRECATED)'), _('STYLE')),
108 ('T', 'template', '',
108 ('T', 'template', '',
109 _('display with template'), _('TEMPLATE')),
109 _('display with template'), _('TEMPLATE')),
110 ]
110 ]
111
111
112 logopts = [
112 logopts = [
113 ('p', 'patch', None, _('show patch')),
113 ('p', 'patch', None, _('show patch')),
114 ('g', 'git', None, _('use git extended diff format')),
114 ('g', 'git', None, _('use git extended diff format')),
115 ('l', 'limit', '',
115 ('l', 'limit', '',
116 _('limit number of changes displayed'), _('NUM')),
116 _('limit number of changes displayed'), _('NUM')),
117 ('M', 'no-merges', None, _('do not show merges')),
117 ('M', 'no-merges', None, _('do not show merges')),
118 ('', 'stat', None, _('output diffstat-style summary of changes')),
118 ('', 'stat', None, _('output diffstat-style summary of changes')),
119 ('G', 'graph', None, _("show the revision DAG")),
119 ('G', 'graph', None, _("show the revision DAG")),
120 ] + templateopts
120 ] + templateopts
121
121
122 diffopts = [
122 diffopts = [
123 ('a', 'text', None, _('treat all files as text')),
123 ('a', 'text', None, _('treat all files as text')),
124 ('g', 'git', None, _('use git extended diff format')),
124 ('g', 'git', None, _('use git extended diff format')),
125 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
125 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
126 ('', 'nodates', None, _('omit dates from diff headers'))
126 ('', 'nodates', None, _('omit dates from diff headers'))
127 ]
127 ]
128
128
129 diffwsopts = [
129 diffwsopts = [
130 ('w', 'ignore-all-space', None,
130 ('w', 'ignore-all-space', None,
131 _('ignore white space when comparing lines')),
131 _('ignore white space when comparing lines')),
132 ('b', 'ignore-space-change', None,
132 ('b', 'ignore-space-change', None,
133 _('ignore changes in the amount of white space')),
133 _('ignore changes in the amount of white space')),
134 ('B', 'ignore-blank-lines', None,
134 ('B', 'ignore-blank-lines', None,
135 _('ignore changes whose lines are all blank')),
135 _('ignore changes whose lines are all blank')),
136 ('Z', 'ignore-space-at-eol', None,
136 ('Z', 'ignore-space-at-eol', None,
137 _('ignore changes in whitespace at EOL')),
137 _('ignore changes in whitespace at EOL')),
138 ]
138 ]
139
139
140 diffopts2 = [
140 diffopts2 = [
141 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
141 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
142 ('p', 'show-function', None, _('show which function each change is in')),
142 ('p', 'show-function', None, _('show which function each change is in')),
143 ('', 'reverse', None, _('produce a diff that undoes the changes')),
143 ('', 'reverse', None, _('produce a diff that undoes the changes')),
144 ] + diffwsopts + [
144 ] + diffwsopts + [
145 ('U', 'unified', '',
145 ('U', 'unified', '',
146 _('number of lines of context to show'), _('NUM')),
146 _('number of lines of context to show'), _('NUM')),
147 ('', 'stat', None, _('output diffstat-style summary of changes')),
147 ('', 'stat', None, _('output diffstat-style summary of changes')),
148 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
148 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
149 ]
149 ]
150
150
151 mergetoolopts = [
151 mergetoolopts = [
152 ('t', 'tool', '', _('specify merge tool')),
152 ('t', 'tool', '', _('specify merge tool')),
153 ]
153 ]
154
154
155 similarityopts = [
155 similarityopts = [
156 ('s', 'similarity', '',
156 ('s', 'similarity', '',
157 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
157 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
158 ]
158 ]
159
159
160 subrepoopts = [
160 subrepoopts = [
161 ('S', 'subrepos', None,
161 ('S', 'subrepos', None,
162 _('recurse into subrepositories'))
162 _('recurse into subrepositories'))
163 ]
163 ]
164
164
165 debugrevlogopts = [
165 debugrevlogopts = [
166 ('c', 'changelog', False, _('open changelog')),
166 ('c', 'changelog', False, _('open changelog')),
167 ('m', 'manifest', False, _('open manifest')),
167 ('m', 'manifest', False, _('open manifest')),
168 ('', 'dir', '', _('open directory manifest')),
168 ('', 'dir', '', _('open directory manifest')),
169 ]
169 ]
170
170
171 # special string such that everything below this line will be ingored in the
171 # special string such that everything below this line will be ingored in the
172 # editor text
172 # editor text
173 _linebelow = "^HG: ------------------------ >8 ------------------------$"
173 _linebelow = "^HG: ------------------------ >8 ------------------------$"
174
174
175 def ishunk(x):
175 def ishunk(x):
176 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
176 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
177 return isinstance(x, hunkclasses)
177 return isinstance(x, hunkclasses)
178
178
179 def newandmodified(chunks, originalchunks):
179 def newandmodified(chunks, originalchunks):
180 newlyaddedandmodifiedfiles = set()
180 newlyaddedandmodifiedfiles = set()
181 for chunk in chunks:
181 for chunk in chunks:
182 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
182 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
183 originalchunks:
183 originalchunks:
184 newlyaddedandmodifiedfiles.add(chunk.header.filename())
184 newlyaddedandmodifiedfiles.add(chunk.header.filename())
185 return newlyaddedandmodifiedfiles
185 return newlyaddedandmodifiedfiles
186
186
187 def parsealiases(cmd):
187 def parsealiases(cmd):
188 return cmd.lstrip("^").split("|")
188 return cmd.lstrip("^").split("|")
189
189
190 def setupwrapcolorwrite(ui):
190 def setupwrapcolorwrite(ui):
191 # wrap ui.write so diff output can be labeled/colorized
191 # wrap ui.write so diff output can be labeled/colorized
192 def wrapwrite(orig, *args, **kw):
192 def wrapwrite(orig, *args, **kw):
193 label = kw.pop(r'label', '')
193 label = kw.pop(r'label', '')
194 for chunk, l in patch.difflabel(lambda: args):
194 for chunk, l in patch.difflabel(lambda: args):
195 orig(chunk, label=label + l)
195 orig(chunk, label=label + l)
196
196
197 oldwrite = ui.write
197 oldwrite = ui.write
198 def wrap(*args, **kwargs):
198 def wrap(*args, **kwargs):
199 return wrapwrite(oldwrite, *args, **kwargs)
199 return wrapwrite(oldwrite, *args, **kwargs)
200 setattr(ui, 'write', wrap)
200 setattr(ui, 'write', wrap)
201 return oldwrite
201 return oldwrite
202
202
203 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
203 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
204 try:
204 try:
205 if usecurses:
205 if usecurses:
206 if testfile:
206 if testfile:
207 recordfn = crecordmod.testdecorator(
207 recordfn = crecordmod.testdecorator(
208 testfile, crecordmod.testchunkselector)
208 testfile, crecordmod.testchunkselector)
209 else:
209 else:
210 recordfn = crecordmod.chunkselector
210 recordfn = crecordmod.chunkselector
211
211
212 return crecordmod.filterpatch(ui, originalhunks, recordfn,
212 return crecordmod.filterpatch(ui, originalhunks, recordfn,
213 operation)
213 operation)
214 except crecordmod.fallbackerror as e:
214 except crecordmod.fallbackerror as e:
215 ui.warn('%s\n' % e.message)
215 ui.warn('%s\n' % e.message)
216 ui.warn(_('falling back to text mode\n'))
216 ui.warn(_('falling back to text mode\n'))
217
217
218 return patch.filterpatch(ui, originalhunks, operation)
218 return patch.filterpatch(ui, originalhunks, operation)
219
219
220 def recordfilter(ui, originalhunks, operation=None):
220 def recordfilter(ui, originalhunks, operation=None):
221 """ Prompts the user to filter the originalhunks and return a list of
221 """ Prompts the user to filter the originalhunks and return a list of
222 selected hunks.
222 selected hunks.
223 *operation* is used for to build ui messages to indicate the user what
223 *operation* is used for to build ui messages to indicate the user what
224 kind of filtering they are doing: reverting, committing, shelving, etc.
224 kind of filtering they are doing: reverting, committing, shelving, etc.
225 (see patch.filterpatch).
225 (see patch.filterpatch).
226 """
226 """
227 usecurses = crecordmod.checkcurses(ui)
227 usecurses = crecordmod.checkcurses(ui)
228 testfile = ui.config('experimental', 'crecordtest')
228 testfile = ui.config('experimental', 'crecordtest')
229 oldwrite = setupwrapcolorwrite(ui)
229 oldwrite = setupwrapcolorwrite(ui)
230 try:
230 try:
231 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
231 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
232 testfile, operation)
232 testfile, operation)
233 finally:
233 finally:
234 ui.write = oldwrite
234 ui.write = oldwrite
235 return newchunks, newopts
235 return newchunks, newopts
236
236
237 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
237 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
238 filterfn, *pats, **opts):
238 filterfn, *pats, **opts):
239 opts = pycompat.byteskwargs(opts)
239 opts = pycompat.byteskwargs(opts)
240 if not ui.interactive():
240 if not ui.interactive():
241 if cmdsuggest:
241 if cmdsuggest:
242 msg = _('running non-interactively, use %s instead') % cmdsuggest
242 msg = _('running non-interactively, use %s instead') % cmdsuggest
243 else:
243 else:
244 msg = _('running non-interactively')
244 msg = _('running non-interactively')
245 raise error.Abort(msg)
245 raise error.Abort(msg)
246
246
247 # make sure username is set before going interactive
247 # make sure username is set before going interactive
248 if not opts.get('user'):
248 if not opts.get('user'):
249 ui.username() # raise exception, username not provided
249 ui.username() # raise exception, username not provided
250
250
251 def recordfunc(ui, repo, message, match, opts):
251 def recordfunc(ui, repo, message, match, opts):
252 """This is generic record driver.
252 """This is generic record driver.
253
253
254 Its job is to interactively filter local changes, and
254 Its job is to interactively filter local changes, and
255 accordingly prepare working directory into a state in which the
255 accordingly prepare working directory into a state in which the
256 job can be delegated to a non-interactive commit command such as
256 job can be delegated to a non-interactive commit command such as
257 'commit' or 'qrefresh'.
257 'commit' or 'qrefresh'.
258
258
259 After the actual job is done by non-interactive command, the
259 After the actual job is done by non-interactive command, the
260 working directory is restored to its original state.
260 working directory is restored to its original state.
261
261
262 In the end we'll record interesting changes, and everything else
262 In the end we'll record interesting changes, and everything else
263 will be left in place, so the user can continue working.
263 will be left in place, so the user can continue working.
264 """
264 """
265
265
266 checkunfinished(repo, commit=True)
266 checkunfinished(repo, commit=True)
267 wctx = repo[None]
267 wctx = repo[None]
268 merge = len(wctx.parents()) > 1
268 merge = len(wctx.parents()) > 1
269 if merge:
269 if merge:
270 raise error.Abort(_('cannot partially commit a merge '
270 raise error.Abort(_('cannot partially commit a merge '
271 '(use "hg commit" instead)'))
271 '(use "hg commit" instead)'))
272
272
273 def fail(f, msg):
273 def fail(f, msg):
274 raise error.Abort('%s: %s' % (f, msg))
274 raise error.Abort('%s: %s' % (f, msg))
275
275
276 force = opts.get('force')
276 force = opts.get('force')
277 if not force:
277 if not force:
278 vdirs = []
278 vdirs = []
279 match.explicitdir = vdirs.append
279 match.explicitdir = vdirs.append
280 match.bad = fail
280 match.bad = fail
281
281
282 status = repo.status(match=match)
282 status = repo.status(match=match)
283 if not force:
283 if not force:
284 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
284 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
285 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
285 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
286 diffopts.nodates = True
286 diffopts.nodates = True
287 diffopts.git = True
287 diffopts.git = True
288 diffopts.showfunc = True
288 diffopts.showfunc = True
289 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
289 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
290 originalchunks = patch.parsepatch(originaldiff)
290 originalchunks = patch.parsepatch(originaldiff)
291
291
292 # 1. filter patch, since we are intending to apply subset of it
292 # 1. filter patch, since we are intending to apply subset of it
293 try:
293 try:
294 chunks, newopts = filterfn(ui, originalchunks)
294 chunks, newopts = filterfn(ui, originalchunks)
295 except error.PatchError as err:
295 except error.PatchError as err:
296 raise error.Abort(_('error parsing patch: %s') % err)
296 raise error.Abort(_('error parsing patch: %s') % err)
297 opts.update(newopts)
297 opts.update(newopts)
298
298
299 # We need to keep a backup of files that have been newly added and
299 # We need to keep a backup of files that have been newly added and
300 # modified during the recording process because there is a previous
300 # modified during the recording process because there is a previous
301 # version without the edit in the workdir
301 # version without the edit in the workdir
302 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
302 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
303 contenders = set()
303 contenders = set()
304 for h in chunks:
304 for h in chunks:
305 try:
305 try:
306 contenders.update(set(h.files()))
306 contenders.update(set(h.files()))
307 except AttributeError:
307 except AttributeError:
308 pass
308 pass
309
309
310 changed = status.modified + status.added + status.removed
310 changed = status.modified + status.added + status.removed
311 newfiles = [f for f in changed if f in contenders]
311 newfiles = [f for f in changed if f in contenders]
312 if not newfiles:
312 if not newfiles:
313 ui.status(_('no changes to record\n'))
313 ui.status(_('no changes to record\n'))
314 return 0
314 return 0
315
315
316 modified = set(status.modified)
316 modified = set(status.modified)
317
317
318 # 2. backup changed files, so we can restore them in the end
318 # 2. backup changed files, so we can restore them in the end
319
319
320 if backupall:
320 if backupall:
321 tobackup = changed
321 tobackup = changed
322 else:
322 else:
323 tobackup = [f for f in newfiles if f in modified or f in \
323 tobackup = [f for f in newfiles if f in modified or f in \
324 newlyaddedandmodifiedfiles]
324 newlyaddedandmodifiedfiles]
325 backups = {}
325 backups = {}
326 if tobackup:
326 if tobackup:
327 backupdir = repo.vfs.join('record-backups')
327 backupdir = repo.vfs.join('record-backups')
328 try:
328 try:
329 os.mkdir(backupdir)
329 os.mkdir(backupdir)
330 except OSError as err:
330 except OSError as err:
331 if err.errno != errno.EEXIST:
331 if err.errno != errno.EEXIST:
332 raise
332 raise
333 try:
333 try:
334 # backup continues
334 # backup continues
335 for f in tobackup:
335 for f in tobackup:
336 fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
336 fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
337 dir=backupdir)
337 dir=backupdir)
338 os.close(fd)
338 os.close(fd)
339 ui.debug('backup %r as %r\n' % (f, tmpname))
339 ui.debug('backup %r as %r\n' % (f, tmpname))
340 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
340 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
341 backups[f] = tmpname
341 backups[f] = tmpname
342
342
343 fp = stringio()
343 fp = stringio()
344 for c in chunks:
344 for c in chunks:
345 fname = c.filename()
345 fname = c.filename()
346 if fname in backups:
346 if fname in backups:
347 c.write(fp)
347 c.write(fp)
348 dopatch = fp.tell()
348 dopatch = fp.tell()
349 fp.seek(0)
349 fp.seek(0)
350
350
351 # 2.5 optionally review / modify patch in text editor
351 # 2.5 optionally review / modify patch in text editor
352 if opts.get('review', False):
352 if opts.get('review', False):
353 patchtext = (crecordmod.diffhelptext
353 patchtext = (crecordmod.diffhelptext
354 + crecordmod.patchhelptext
354 + crecordmod.patchhelptext
355 + fp.read())
355 + fp.read())
356 reviewedpatch = ui.edit(patchtext, "",
356 reviewedpatch = ui.edit(patchtext, "",
357 action="diff",
357 action="diff",
358 repopath=repo.path)
358 repopath=repo.path)
359 fp.truncate(0)
359 fp.truncate(0)
360 fp.write(reviewedpatch)
360 fp.write(reviewedpatch)
361 fp.seek(0)
361 fp.seek(0)
362
362
363 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
363 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
364 # 3a. apply filtered patch to clean repo (clean)
364 # 3a. apply filtered patch to clean repo (clean)
365 if backups:
365 if backups:
366 # Equivalent to hg.revert
366 # Equivalent to hg.revert
367 m = scmutil.matchfiles(repo, backups.keys())
367 m = scmutil.matchfiles(repo, backups.keys())
368 mergemod.update(repo, repo.dirstate.p1(),
368 mergemod.update(repo, repo.dirstate.p1(),
369 False, True, matcher=m)
369 False, True, matcher=m)
370
370
371 # 3b. (apply)
371 # 3b. (apply)
372 if dopatch:
372 if dopatch:
373 try:
373 try:
374 ui.debug('applying patch\n')
374 ui.debug('applying patch\n')
375 ui.debug(fp.getvalue())
375 ui.debug(fp.getvalue())
376 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
376 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
377 except error.PatchError as err:
377 except error.PatchError as err:
378 raise error.Abort(pycompat.bytestr(err))
378 raise error.Abort(pycompat.bytestr(err))
379 del fp
379 del fp
380
380
381 # 4. We prepared working directory according to filtered
381 # 4. We prepared working directory according to filtered
382 # patch. Now is the time to delegate the job to
382 # patch. Now is the time to delegate the job to
383 # commit/qrefresh or the like!
383 # commit/qrefresh or the like!
384
384
385 # Make all of the pathnames absolute.
385 # Make all of the pathnames absolute.
386 newfiles = [repo.wjoin(nf) for nf in newfiles]
386 newfiles = [repo.wjoin(nf) for nf in newfiles]
387 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
387 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
388 finally:
388 finally:
389 # 5. finally restore backed-up files
389 # 5. finally restore backed-up files
390 try:
390 try:
391 dirstate = repo.dirstate
391 dirstate = repo.dirstate
392 for realname, tmpname in backups.iteritems():
392 for realname, tmpname in backups.iteritems():
393 ui.debug('restoring %r to %r\n' % (tmpname, realname))
393 ui.debug('restoring %r to %r\n' % (tmpname, realname))
394
394
395 if dirstate[realname] == 'n':
395 if dirstate[realname] == 'n':
396 # without normallookup, restoring timestamp
396 # without normallookup, restoring timestamp
397 # may cause partially committed files
397 # may cause partially committed files
398 # to be treated as unmodified
398 # to be treated as unmodified
399 dirstate.normallookup(realname)
399 dirstate.normallookup(realname)
400
400
401 # copystat=True here and above are a hack to trick any
401 # copystat=True here and above are a hack to trick any
402 # editors that have f open that we haven't modified them.
402 # editors that have f open that we haven't modified them.
403 #
403 #
404 # Also note that this racy as an editor could notice the
404 # Also note that this racy as an editor could notice the
405 # file's mtime before we've finished writing it.
405 # file's mtime before we've finished writing it.
406 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
406 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
407 os.unlink(tmpname)
407 os.unlink(tmpname)
408 if tobackup:
408 if tobackup:
409 os.rmdir(backupdir)
409 os.rmdir(backupdir)
410 except OSError:
410 except OSError:
411 pass
411 pass
412
412
413 def recordinwlock(ui, repo, message, match, opts):
413 def recordinwlock(ui, repo, message, match, opts):
414 with repo.wlock():
414 with repo.wlock():
415 return recordfunc(ui, repo, message, match, opts)
415 return recordfunc(ui, repo, message, match, opts)
416
416
417 return commit(ui, repo, recordinwlock, pats, opts)
417 return commit(ui, repo, recordinwlock, pats, opts)
418
418
419 class dirnode(object):
419 class dirnode(object):
420 """
420 """
421 Represent a directory in user working copy with information required for
421 Represent a directory in user working copy with information required for
422 the purpose of tersing its status.
422 the purpose of tersing its status.
423
423
424 path is the path to the directory, without a trailing '/'
424 path is the path to the directory, without a trailing '/'
425
425
426 statuses is a set of statuses of all files in this directory (this includes
426 statuses is a set of statuses of all files in this directory (this includes
427 all the files in all the subdirectories too)
427 all the files in all the subdirectories too)
428
428
429 files is a list of files which are direct child of this directory
429 files is a list of files which are direct child of this directory
430
430
431 subdirs is a dictionary of sub-directory name as the key and it's own
431 subdirs is a dictionary of sub-directory name as the key and it's own
432 dirnode object as the value
432 dirnode object as the value
433 """
433 """
434
434
435 def __init__(self, dirpath):
435 def __init__(self, dirpath):
436 self.path = dirpath
436 self.path = dirpath
437 self.statuses = set([])
437 self.statuses = set([])
438 self.files = []
438 self.files = []
439 self.subdirs = {}
439 self.subdirs = {}
440
440
441 def _addfileindir(self, filename, status):
441 def _addfileindir(self, filename, status):
442 """Add a file in this directory as a direct child."""
442 """Add a file in this directory as a direct child."""
443 self.files.append((filename, status))
443 self.files.append((filename, status))
444
444
445 def addfile(self, filename, status):
445 def addfile(self, filename, status):
446 """
446 """
447 Add a file to this directory or to its direct parent directory.
447 Add a file to this directory or to its direct parent directory.
448
448
449 If the file is not direct child of this directory, we traverse to the
449 If the file is not direct child of this directory, we traverse to the
450 directory of which this file is a direct child of and add the file
450 directory of which this file is a direct child of and add the file
451 there.
451 there.
452 """
452 """
453
453
454 # the filename contains a path separator, it means it's not the direct
454 # the filename contains a path separator, it means it's not the direct
455 # child of this directory
455 # child of this directory
456 if '/' in filename:
456 if '/' in filename:
457 subdir, filep = filename.split('/', 1)
457 subdir, filep = filename.split('/', 1)
458
458
459 # does the dirnode object for subdir exists
459 # does the dirnode object for subdir exists
460 if subdir not in self.subdirs:
460 if subdir not in self.subdirs:
461 subdirpath = pathutil.join(self.path, subdir)
461 subdirpath = pathutil.join(self.path, subdir)
462 self.subdirs[subdir] = dirnode(subdirpath)
462 self.subdirs[subdir] = dirnode(subdirpath)
463
463
464 # try adding the file in subdir
464 # try adding the file in subdir
465 self.subdirs[subdir].addfile(filep, status)
465 self.subdirs[subdir].addfile(filep, status)
466
466
467 else:
467 else:
468 self._addfileindir(filename, status)
468 self._addfileindir(filename, status)
469
469
470 if status not in self.statuses:
470 if status not in self.statuses:
471 self.statuses.add(status)
471 self.statuses.add(status)
472
472
473 def iterfilepaths(self):
473 def iterfilepaths(self):
474 """Yield (status, path) for files directly under this directory."""
474 """Yield (status, path) for files directly under this directory."""
475 for f, st in self.files:
475 for f, st in self.files:
476 yield st, pathutil.join(self.path, f)
476 yield st, pathutil.join(self.path, f)
477
477
478 def tersewalk(self, terseargs):
478 def tersewalk(self, terseargs):
479 """
479 """
480 Yield (status, path) obtained by processing the status of this
480 Yield (status, path) obtained by processing the status of this
481 dirnode.
481 dirnode.
482
482
483 terseargs is the string of arguments passed by the user with `--terse`
483 terseargs is the string of arguments passed by the user with `--terse`
484 flag.
484 flag.
485
485
486 Following are the cases which can happen:
486 Following are the cases which can happen:
487
487
488 1) All the files in the directory (including all the files in its
488 1) All the files in the directory (including all the files in its
489 subdirectories) share the same status and the user has asked us to terse
489 subdirectories) share the same status and the user has asked us to terse
490 that status. -> yield (status, dirpath). dirpath will end in '/'.
490 that status. -> yield (status, dirpath). dirpath will end in '/'.
491
491
492 2) Otherwise, we do following:
492 2) Otherwise, we do following:
493
493
494 a) Yield (status, filepath) for all the files which are in this
494 a) Yield (status, filepath) for all the files which are in this
495 directory (only the ones in this directory, not the subdirs)
495 directory (only the ones in this directory, not the subdirs)
496
496
497 b) Recurse the function on all the subdirectories of this
497 b) Recurse the function on all the subdirectories of this
498 directory
498 directory
499 """
499 """
500
500
501 if len(self.statuses) == 1:
501 if len(self.statuses) == 1:
502 onlyst = self.statuses.pop()
502 onlyst = self.statuses.pop()
503
503
504 # Making sure we terse only when the status abbreviation is
504 # Making sure we terse only when the status abbreviation is
505 # passed as terse argument
505 # passed as terse argument
506 if onlyst in terseargs:
506 if onlyst in terseargs:
507 yield onlyst, self.path + '/'
507 yield onlyst, self.path + '/'
508 return
508 return
509
509
510 # add the files to status list
510 # add the files to status list
511 for st, fpath in self.iterfilepaths():
511 for st, fpath in self.iterfilepaths():
512 yield st, fpath
512 yield st, fpath
513
513
514 #recurse on the subdirs
514 #recurse on the subdirs
515 for dirobj in self.subdirs.values():
515 for dirobj in self.subdirs.values():
516 for st, fpath in dirobj.tersewalk(terseargs):
516 for st, fpath in dirobj.tersewalk(terseargs):
517 yield st, fpath
517 yield st, fpath
518
518
519 def tersedir(statuslist, terseargs):
519 def tersedir(statuslist, terseargs):
520 """
520 """
521 Terse the status if all the files in a directory shares the same status.
521 Terse the status if all the files in a directory shares the same status.
522
522
523 statuslist is scmutil.status() object which contains a list of files for
523 statuslist is scmutil.status() object which contains a list of files for
524 each status.
524 each status.
525 terseargs is string which is passed by the user as the argument to `--terse`
525 terseargs is string which is passed by the user as the argument to `--terse`
526 flag.
526 flag.
527
527
528 The function makes a tree of objects of dirnode class, and at each node it
528 The function makes a tree of objects of dirnode class, and at each node it
529 stores the information required to know whether we can terse a certain
529 stores the information required to know whether we can terse a certain
530 directory or not.
530 directory or not.
531 """
531 """
532 # the order matters here as that is used to produce final list
532 # the order matters here as that is used to produce final list
533 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
533 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
534
534
535 # checking the argument validity
535 # checking the argument validity
536 for s in pycompat.bytestr(terseargs):
536 for s in pycompat.bytestr(terseargs):
537 if s not in allst:
537 if s not in allst:
538 raise error.Abort(_("'%s' not recognized") % s)
538 raise error.Abort(_("'%s' not recognized") % s)
539
539
540 # creating a dirnode object for the root of the repo
540 # creating a dirnode object for the root of the repo
541 rootobj = dirnode('')
541 rootobj = dirnode('')
542 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
542 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
543 'ignored', 'removed')
543 'ignored', 'removed')
544
544
545 tersedict = {}
545 tersedict = {}
546 for attrname in pstatus:
546 for attrname in pstatus:
547 statuschar = attrname[0:1]
547 statuschar = attrname[0:1]
548 for f in getattr(statuslist, attrname):
548 for f in getattr(statuslist, attrname):
549 rootobj.addfile(f, statuschar)
549 rootobj.addfile(f, statuschar)
550 tersedict[statuschar] = []
550 tersedict[statuschar] = []
551
551
552 # we won't be tersing the root dir, so add files in it
552 # we won't be tersing the root dir, so add files in it
553 for st, fpath in rootobj.iterfilepaths():
553 for st, fpath in rootobj.iterfilepaths():
554 tersedict[st].append(fpath)
554 tersedict[st].append(fpath)
555
555
556 # process each sub-directory and build tersedict
556 # process each sub-directory and build tersedict
557 for subdir in rootobj.subdirs.values():
557 for subdir in rootobj.subdirs.values():
558 for st, f in subdir.tersewalk(terseargs):
558 for st, f in subdir.tersewalk(terseargs):
559 tersedict[st].append(f)
559 tersedict[st].append(f)
560
560
561 tersedlist = []
561 tersedlist = []
562 for st in allst:
562 for st in allst:
563 tersedict[st].sort()
563 tersedict[st].sort()
564 tersedlist.append(tersedict[st])
564 tersedlist.append(tersedict[st])
565
565
566 return tersedlist
566 return tersedlist
567
567
568 def _commentlines(raw):
568 def _commentlines(raw):
569 '''Surround lineswith a comment char and a new line'''
569 '''Surround lineswith a comment char and a new line'''
570 lines = raw.splitlines()
570 lines = raw.splitlines()
571 commentedlines = ['# %s' % line for line in lines]
571 commentedlines = ['# %s' % line for line in lines]
572 return '\n'.join(commentedlines) + '\n'
572 return '\n'.join(commentedlines) + '\n'
573
573
574 def _conflictsmsg(repo):
574 def _conflictsmsg(repo):
575 mergestate = mergemod.mergestate.read(repo)
575 mergestate = mergemod.mergestate.read(repo)
576 if not mergestate.active():
576 if not mergestate.active():
577 return
577 return
578
578
579 m = scmutil.match(repo[None])
579 m = scmutil.match(repo[None])
580 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
580 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
581 if unresolvedlist:
581 if unresolvedlist:
582 mergeliststr = '\n'.join(
582 mergeliststr = '\n'.join(
583 [' %s' % util.pathto(repo.root, encoding.getcwd(), path)
583 [' %s' % util.pathto(repo.root, encoding.getcwd(), path)
584 for path in unresolvedlist])
584 for path in unresolvedlist])
585 msg = _('''Unresolved merge conflicts:
585 msg = _('''Unresolved merge conflicts:
586
586
587 %s
587 %s
588
588
589 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
589 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
590 else:
590 else:
591 msg = _('No unresolved merge conflicts.')
591 msg = _('No unresolved merge conflicts.')
592
592
593 return _commentlines(msg)
593 return _commentlines(msg)
594
594
595 def _helpmessage(continuecmd, abortcmd):
595 def _helpmessage(continuecmd, abortcmd):
596 msg = _('To continue: %s\n'
596 msg = _('To continue: %s\n'
597 'To abort: %s') % (continuecmd, abortcmd)
597 'To abort: %s') % (continuecmd, abortcmd)
598 return _commentlines(msg)
598 return _commentlines(msg)
599
599
600 def _rebasemsg():
600 def _rebasemsg():
601 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
601 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
602
602
603 def _histeditmsg():
603 def _histeditmsg():
604 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
604 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
605
605
606 def _unshelvemsg():
606 def _unshelvemsg():
607 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
607 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
608
608
609 def _graftmsg():
609 def _graftmsg():
610 # tweakdefaults requires `update` to have a rev hence the `.`
610 # tweakdefaults requires `update` to have a rev hence the `.`
611 return _helpmessage('hg graft --continue', 'hg graft --abort')
611 return _helpmessage('hg graft --continue', 'hg graft --abort')
612
612
613 def _mergemsg():
613 def _mergemsg():
614 # tweakdefaults requires `update` to have a rev hence the `.`
614 # tweakdefaults requires `update` to have a rev hence the `.`
615 return _helpmessage('hg commit', 'hg merge --abort')
615 return _helpmessage('hg commit', 'hg merge --abort')
616
616
617 def _bisectmsg():
617 def _bisectmsg():
618 msg = _('To mark the changeset good: hg bisect --good\n'
618 msg = _('To mark the changeset good: hg bisect --good\n'
619 'To mark the changeset bad: hg bisect --bad\n'
619 'To mark the changeset bad: hg bisect --bad\n'
620 'To abort: hg bisect --reset\n')
620 'To abort: hg bisect --reset\n')
621 return _commentlines(msg)
621 return _commentlines(msg)
622
622
623 def fileexistspredicate(filename):
623 def fileexistspredicate(filename):
624 return lambda repo: repo.vfs.exists(filename)
624 return lambda repo: repo.vfs.exists(filename)
625
625
626 def _mergepredicate(repo):
626 def _mergepredicate(repo):
627 return len(repo[None].parents()) > 1
627 return len(repo[None].parents()) > 1
628
628
629 STATES = (
629 STATES = (
630 # (state, predicate to detect states, helpful message function)
630 # (state, predicate to detect states, helpful message function)
631 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
631 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
632 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
632 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
633 ('graft', fileexistspredicate('graftstate'), _graftmsg),
633 ('graft', fileexistspredicate('graftstate'), _graftmsg),
634 ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
634 ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
635 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
635 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
636 # The merge state is part of a list that will be iterated over.
636 # The merge state is part of a list that will be iterated over.
637 # They need to be last because some of the other unfinished states may also
637 # They need to be last because some of the other unfinished states may also
638 # be in a merge or update state (eg. rebase, histedit, graft, etc).
638 # be in a merge or update state (eg. rebase, histedit, graft, etc).
639 # We want those to have priority.
639 # We want those to have priority.
640 ('merge', _mergepredicate, _mergemsg),
640 ('merge', _mergepredicate, _mergemsg),
641 )
641 )
642
642
643 def _getrepostate(repo):
643 def _getrepostate(repo):
644 # experimental config: commands.status.skipstates
644 # experimental config: commands.status.skipstates
645 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
645 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
646 for state, statedetectionpredicate, msgfn in STATES:
646 for state, statedetectionpredicate, msgfn in STATES:
647 if state in skip:
647 if state in skip:
648 continue
648 continue
649 if statedetectionpredicate(repo):
649 if statedetectionpredicate(repo):
650 return (state, statedetectionpredicate, msgfn)
650 return (state, statedetectionpredicate, msgfn)
651
651
652 def morestatus(repo, fm):
652 def morestatus(repo, fm):
653 statetuple = _getrepostate(repo)
653 statetuple = _getrepostate(repo)
654 label = 'status.morestatus'
654 label = 'status.morestatus'
655 if statetuple:
655 if statetuple:
656 state, statedetectionpredicate, helpfulmsg = statetuple
656 state, statedetectionpredicate, helpfulmsg = statetuple
657 statemsg = _('The repository is in an unfinished *%s* state.') % state
657 statemsg = _('The repository is in an unfinished *%s* state.') % state
658 fm.plain('%s\n' % _commentlines(statemsg), label=label)
658 fm.plain('%s\n' % _commentlines(statemsg), label=label)
659 conmsg = _conflictsmsg(repo)
659 conmsg = _conflictsmsg(repo)
660 if conmsg:
660 if conmsg:
661 fm.plain('%s\n' % conmsg, label=label)
661 fm.plain('%s\n' % conmsg, label=label)
662 if helpfulmsg:
662 if helpfulmsg:
663 helpmsg = helpfulmsg()
663 helpmsg = helpfulmsg()
664 fm.plain('%s\n' % helpmsg, label=label)
664 fm.plain('%s\n' % helpmsg, label=label)
665
665
666 def findpossible(cmd, table, strict=False):
666 def findpossible(cmd, table, strict=False):
667 """
667 """
668 Return cmd -> (aliases, command table entry)
668 Return cmd -> (aliases, command table entry)
669 for each matching command.
669 for each matching command.
670 Return debug commands (or their aliases) only if no normal command matches.
670 Return debug commands (or their aliases) only if no normal command matches.
671 """
671 """
672 choice = {}
672 choice = {}
673 debugchoice = {}
673 debugchoice = {}
674
674
675 if cmd in table:
675 if cmd in table:
676 # short-circuit exact matches, "log" alias beats "^log|history"
676 # short-circuit exact matches, "log" alias beats "^log|history"
677 keys = [cmd]
677 keys = [cmd]
678 else:
678 else:
679 keys = table.keys()
679 keys = table.keys()
680
680
681 allcmds = []
681 allcmds = []
682 for e in keys:
682 for e in keys:
683 aliases = parsealiases(e)
683 aliases = parsealiases(e)
684 allcmds.extend(aliases)
684 allcmds.extend(aliases)
685 found = None
685 found = None
686 if cmd in aliases:
686 if cmd in aliases:
687 found = cmd
687 found = cmd
688 elif not strict:
688 elif not strict:
689 for a in aliases:
689 for a in aliases:
690 if a.startswith(cmd):
690 if a.startswith(cmd):
691 found = a
691 found = a
692 break
692 break
693 if found is not None:
693 if found is not None:
694 if aliases[0].startswith("debug") or found.startswith("debug"):
694 if aliases[0].startswith("debug") or found.startswith("debug"):
695 debugchoice[found] = (aliases, table[e])
695 debugchoice[found] = (aliases, table[e])
696 else:
696 else:
697 choice[found] = (aliases, table[e])
697 choice[found] = (aliases, table[e])
698
698
699 if not choice and debugchoice:
699 if not choice and debugchoice:
700 choice = debugchoice
700 choice = debugchoice
701
701
702 return choice, allcmds
702 return choice, allcmds
703
703
704 def findcmd(cmd, table, strict=True):
704 def findcmd(cmd, table, strict=True):
705 """Return (aliases, command table entry) for command string."""
705 """Return (aliases, command table entry) for command string."""
706 choice, allcmds = findpossible(cmd, table, strict)
706 choice, allcmds = findpossible(cmd, table, strict)
707
707
708 if cmd in choice:
708 if cmd in choice:
709 return choice[cmd]
709 return choice[cmd]
710
710
711 if len(choice) > 1:
711 if len(choice) > 1:
712 clist = sorted(choice)
712 clist = sorted(choice)
713 raise error.AmbiguousCommand(cmd, clist)
713 raise error.AmbiguousCommand(cmd, clist)
714
714
715 if choice:
715 if choice:
716 return list(choice.values())[0]
716 return list(choice.values())[0]
717
717
718 raise error.UnknownCommand(cmd, allcmds)
718 raise error.UnknownCommand(cmd, allcmds)
719
719
720 def changebranch(ui, repo, revs, label):
720 def changebranch(ui, repo, revs, label):
721 """ Change the branch name of given revs to label """
721 """ Change the branch name of given revs to label """
722
722
723 with repo.wlock(), repo.lock(), repo.transaction('branches'):
723 with repo.wlock(), repo.lock(), repo.transaction('branches'):
724 # abort in case of uncommitted merge or dirty wdir
724 # abort in case of uncommitted merge or dirty wdir
725 bailifchanged(repo)
725 bailifchanged(repo)
726 revs = scmutil.revrange(repo, revs)
726 revs = scmutil.revrange(repo, revs)
727 if not revs:
727 if not revs:
728 raise error.Abort("empty revision set")
728 raise error.Abort("empty revision set")
729 roots = repo.revs('roots(%ld)', revs)
729 roots = repo.revs('roots(%ld)', revs)
730 if len(roots) > 1:
730 if len(roots) > 1:
731 raise error.Abort(_("cannot change branch of non-linear revisions"))
731 raise error.Abort(_("cannot change branch of non-linear revisions"))
732 rewriteutil.precheck(repo, revs, 'change branch of')
732 rewriteutil.precheck(repo, revs, 'change branch of')
733
733
734 root = repo[roots.first()]
734 root = repo[roots.first()]
735 if not root.p1().branch() == label and label in repo.branchmap():
735 if not root.p1().branch() == label and label in repo.branchmap():
736 raise error.Abort(_("a branch of the same name already exists"))
736 raise error.Abort(_("a branch of the same name already exists"))
737
737
738 if repo.revs('merge() and %ld', revs):
738 if repo.revs('merge() and %ld', revs):
739 raise error.Abort(_("cannot change branch of a merge commit"))
739 raise error.Abort(_("cannot change branch of a merge commit"))
740 if repo.revs('obsolete() and %ld', revs):
740 if repo.revs('obsolete() and %ld', revs):
741 raise error.Abort(_("cannot change branch of a obsolete changeset"))
741 raise error.Abort(_("cannot change branch of a obsolete changeset"))
742
742
743 # make sure only topological heads
743 # make sure only topological heads
744 if repo.revs('heads(%ld) - head()', revs):
744 if repo.revs('heads(%ld) - head()', revs):
745 raise error.Abort(_("cannot change branch in middle of a stack"))
745 raise error.Abort(_("cannot change branch in middle of a stack"))
746
746
747 replacements = {}
747 replacements = {}
748 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
748 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
749 # mercurial.subrepo -> mercurial.cmdutil
749 # mercurial.subrepo -> mercurial.cmdutil
750 from . import context
750 from . import context
751 for rev in revs:
751 for rev in revs:
752 ctx = repo[rev]
752 ctx = repo[rev]
753 oldbranch = ctx.branch()
753 oldbranch = ctx.branch()
754 # check if ctx has same branch
754 # check if ctx has same branch
755 if oldbranch == label:
755 if oldbranch == label:
756 continue
756 continue
757
757
758 def filectxfn(repo, newctx, path):
758 def filectxfn(repo, newctx, path):
759 try:
759 try:
760 return ctx[path]
760 return ctx[path]
761 except error.ManifestLookupError:
761 except error.ManifestLookupError:
762 return None
762 return None
763
763
764 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
764 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
765 % (hex(ctx.node()), oldbranch, label))
765 % (hex(ctx.node()), oldbranch, label))
766 extra = ctx.extra()
766 extra = ctx.extra()
767 extra['branch_change'] = hex(ctx.node())
767 extra['branch_change'] = hex(ctx.node())
768 # While changing branch of set of linear commits, make sure that
768 # While changing branch of set of linear commits, make sure that
769 # we base our commits on new parent rather than old parent which
769 # we base our commits on new parent rather than old parent which
770 # was obsoleted while changing the branch
770 # was obsoleted while changing the branch
771 p1 = ctx.p1().node()
771 p1 = ctx.p1().node()
772 p2 = ctx.p2().node()
772 p2 = ctx.p2().node()
773 if p1 in replacements:
773 if p1 in replacements:
774 p1 = replacements[p1][0]
774 p1 = replacements[p1][0]
775 if p2 in replacements:
775 if p2 in replacements:
776 p2 = replacements[p2][0]
776 p2 = replacements[p2][0]
777
777
778 mc = context.memctx(repo, (p1, p2),
778 mc = context.memctx(repo, (p1, p2),
779 ctx.description(),
779 ctx.description(),
780 ctx.files(),
780 ctx.files(),
781 filectxfn,
781 filectxfn,
782 user=ctx.user(),
782 user=ctx.user(),
783 date=ctx.date(),
783 date=ctx.date(),
784 extra=extra,
784 extra=extra,
785 branch=label)
785 branch=label)
786
786
787 newnode = repo.commitctx(mc)
787 newnode = repo.commitctx(mc)
788 replacements[ctx.node()] = (newnode,)
788 replacements[ctx.node()] = (newnode,)
789 ui.debug('new node id is %s\n' % hex(newnode))
789 ui.debug('new node id is %s\n' % hex(newnode))
790
790
791 # create obsmarkers and move bookmarks
791 # create obsmarkers and move bookmarks
792 scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
792 scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
793
793
794 # move the working copy too
794 # move the working copy too
795 wctx = repo[None]
795 wctx = repo[None]
796 # in-progress merge is a bit too complex for now.
796 # in-progress merge is a bit too complex for now.
797 if len(wctx.parents()) == 1:
797 if len(wctx.parents()) == 1:
798 newid = replacements.get(wctx.p1().node())
798 newid = replacements.get(wctx.p1().node())
799 if newid is not None:
799 if newid is not None:
800 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
800 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
801 # mercurial.cmdutil
801 # mercurial.cmdutil
802 from . import hg
802 from . import hg
803 hg.update(repo, newid[0], quietempty=True)
803 hg.update(repo, newid[0], quietempty=True)
804
804
805 ui.status(_("changed branch on %d changesets\n") % len(replacements))
805 ui.status(_("changed branch on %d changesets\n") % len(replacements))
806
806
807 def findrepo(p):
807 def findrepo(p):
808 while not os.path.isdir(os.path.join(p, ".hg")):
808 while not os.path.isdir(os.path.join(p, ".hg")):
809 oldp, p = p, os.path.dirname(p)
809 oldp, p = p, os.path.dirname(p)
810 if p == oldp:
810 if p == oldp:
811 return None
811 return None
812
812
813 return p
813 return p
814
814
815 def bailifchanged(repo, merge=True, hint=None):
815 def bailifchanged(repo, merge=True, hint=None):
816 """ enforce the precondition that working directory must be clean.
816 """ enforce the precondition that working directory must be clean.
817
817
818 'merge' can be set to false if a pending uncommitted merge should be
818 'merge' can be set to false if a pending uncommitted merge should be
819 ignored (such as when 'update --check' runs).
819 ignored (such as when 'update --check' runs).
820
820
821 'hint' is the usual hint given to Abort exception.
821 'hint' is the usual hint given to Abort exception.
822 """
822 """
823
823
824 if merge and repo.dirstate.p2() != nullid:
824 if merge and repo.dirstate.p2() != nullid:
825 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
825 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
826 modified, added, removed, deleted = repo.status()[:4]
826 modified, added, removed, deleted = repo.status()[:4]
827 if modified or added or removed or deleted:
827 if modified or added or removed or deleted:
828 raise error.Abort(_('uncommitted changes'), hint=hint)
828 raise error.Abort(_('uncommitted changes'), hint=hint)
829 ctx = repo[None]
829 ctx = repo[None]
830 for s in sorted(ctx.substate):
830 for s in sorted(ctx.substate):
831 ctx.sub(s).bailifchanged(hint=hint)
831 ctx.sub(s).bailifchanged(hint=hint)
832
832
833 def logmessage(ui, opts):
833 def logmessage(ui, opts):
834 """ get the log message according to -m and -l option """
834 """ get the log message according to -m and -l option """
835 message = opts.get('message')
835 message = opts.get('message')
836 logfile = opts.get('logfile')
836 logfile = opts.get('logfile')
837
837
838 if message and logfile:
838 if message and logfile:
839 raise error.Abort(_('options --message and --logfile are mutually '
839 raise error.Abort(_('options --message and --logfile are mutually '
840 'exclusive'))
840 'exclusive'))
841 if not message and logfile:
841 if not message and logfile:
842 try:
842 try:
843 if isstdiofilename(logfile):
843 if isstdiofilename(logfile):
844 message = ui.fin.read()
844 message = ui.fin.read()
845 else:
845 else:
846 message = '\n'.join(util.readfile(logfile).splitlines())
846 message = '\n'.join(util.readfile(logfile).splitlines())
847 except IOError as inst:
847 except IOError as inst:
848 raise error.Abort(_("can't read commit message '%s': %s") %
848 raise error.Abort(_("can't read commit message '%s': %s") %
849 (logfile, encoding.strtolocal(inst.strerror)))
849 (logfile, encoding.strtolocal(inst.strerror)))
850 return message
850 return message
851
851
852 def mergeeditform(ctxorbool, baseformname):
852 def mergeeditform(ctxorbool, baseformname):
853 """return appropriate editform name (referencing a committemplate)
853 """return appropriate editform name (referencing a committemplate)
854
854
855 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
855 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
856 merging is committed.
856 merging is committed.
857
857
858 This returns baseformname with '.merge' appended if it is a merge,
858 This returns baseformname with '.merge' appended if it is a merge,
859 otherwise '.normal' is appended.
859 otherwise '.normal' is appended.
860 """
860 """
861 if isinstance(ctxorbool, bool):
861 if isinstance(ctxorbool, bool):
862 if ctxorbool:
862 if ctxorbool:
863 return baseformname + ".merge"
863 return baseformname + ".merge"
864 elif len(ctxorbool.parents()) > 1:
864 elif len(ctxorbool.parents()) > 1:
865 return baseformname + ".merge"
865 return baseformname + ".merge"
866
866
867 return baseformname + ".normal"
867 return baseformname + ".normal"
868
868
869 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
869 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
870 editform='', **opts):
870 editform='', **opts):
871 """get appropriate commit message editor according to '--edit' option
871 """get appropriate commit message editor according to '--edit' option
872
872
873 'finishdesc' is a function to be called with edited commit message
873 'finishdesc' is a function to be called with edited commit message
874 (= 'description' of the new changeset) just after editing, but
874 (= 'description' of the new changeset) just after editing, but
875 before checking empty-ness. It should return actual text to be
875 before checking empty-ness. It should return actual text to be
876 stored into history. This allows to change description before
876 stored into history. This allows to change description before
877 storing.
877 storing.
878
878
879 'extramsg' is a extra message to be shown in the editor instead of
879 'extramsg' is a extra message to be shown in the editor instead of
880 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
880 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
881 is automatically added.
881 is automatically added.
882
882
883 'editform' is a dot-separated list of names, to distinguish
883 'editform' is a dot-separated list of names, to distinguish
884 the purpose of commit text editing.
884 the purpose of commit text editing.
885
885
886 'getcommiteditor' returns 'commitforceeditor' regardless of
886 'getcommiteditor' returns 'commitforceeditor' regardless of
887 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
887 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
888 they are specific for usage in MQ.
888 they are specific for usage in MQ.
889 """
889 """
890 if edit or finishdesc or extramsg:
890 if edit or finishdesc or extramsg:
891 return lambda r, c, s: commitforceeditor(r, c, s,
891 return lambda r, c, s: commitforceeditor(r, c, s,
892 finishdesc=finishdesc,
892 finishdesc=finishdesc,
893 extramsg=extramsg,
893 extramsg=extramsg,
894 editform=editform)
894 editform=editform)
895 elif editform:
895 elif editform:
896 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
896 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
897 else:
897 else:
898 return commiteditor
898 return commiteditor
899
899
900 def _escapecommandtemplate(tmpl):
900 def _escapecommandtemplate(tmpl):
901 parts = []
901 parts = []
902 for typ, start, end in templater.scantemplate(tmpl, raw=True):
902 for typ, start, end in templater.scantemplate(tmpl, raw=True):
903 if typ == b'string':
903 if typ == b'string':
904 parts.append(stringutil.escapestr(tmpl[start:end]))
904 parts.append(stringutil.escapestr(tmpl[start:end]))
905 else:
905 else:
906 parts.append(tmpl[start:end])
906 parts.append(tmpl[start:end])
907 return b''.join(parts)
907 return b''.join(parts)
908
908
909 def rendercommandtemplate(ui, tmpl, props):
909 def rendercommandtemplate(ui, tmpl, props):
910 r"""Expand a literal template 'tmpl' in a way suitable for command line
910 r"""Expand a literal template 'tmpl' in a way suitable for command line
911
911
912 '\' in outermost string is not taken as an escape character because it
912 '\' in outermost string is not taken as an escape character because it
913 is a directory separator on Windows.
913 is a directory separator on Windows.
914
914
915 >>> from . import ui as uimod
915 >>> from . import ui as uimod
916 >>> ui = uimod.ui()
916 >>> ui = uimod.ui()
917 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
917 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
918 'c:\\foo'
918 'c:\\foo'
919 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
919 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
920 'c:{path}'
920 'c:{path}'
921 """
921 """
922 if not tmpl:
922 if not tmpl:
923 return tmpl
923 return tmpl
924 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
924 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
925 return t.renderdefault(props)
925 return t.renderdefault(props)
926
926
927 def rendertemplate(ctx, tmpl, props=None):
927 def rendertemplate(ctx, tmpl, props=None):
928 """Expand a literal template 'tmpl' byte-string against one changeset
928 """Expand a literal template 'tmpl' byte-string against one changeset
929
929
930 Each props item must be a stringify-able value or a callable returning
930 Each props item must be a stringify-able value or a callable returning
931 such value, i.e. no bare list nor dict should be passed.
931 such value, i.e. no bare list nor dict should be passed.
932 """
932 """
933 repo = ctx.repo()
933 repo = ctx.repo()
934 tres = formatter.templateresources(repo.ui, repo)
934 tres = formatter.templateresources(repo.ui, repo)
935 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
935 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
936 resources=tres)
936 resources=tres)
937 mapping = {'ctx': ctx}
937 mapping = {'ctx': ctx}
938 if props:
938 if props:
939 mapping.update(props)
939 mapping.update(props)
940 return t.renderdefault(mapping)
940 return t.renderdefault(mapping)
941
941
942 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
942 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
943 r"""Convert old-style filename format string to template string
943 r"""Convert old-style filename format string to template string
944
944
945 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
945 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
946 'foo-{reporoot|basename}-{seqno}.patch'
946 'foo-{reporoot|basename}-{seqno}.patch'
947 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
947 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
948 '{rev}{tags % "{tag}"}{node}'
948 '{rev}{tags % "{tag}"}{node}'
949
949
950 '\' in outermost strings has to be escaped because it is a directory
950 '\' in outermost strings has to be escaped because it is a directory
951 separator on Windows:
951 separator on Windows:
952
952
953 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
953 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
954 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
954 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
955 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
955 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
956 '\\\\\\\\foo\\\\bar.patch'
956 '\\\\\\\\foo\\\\bar.patch'
957 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
957 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
958 '\\\\{tags % "{tag}"}'
958 '\\\\{tags % "{tag}"}'
959
959
960 but inner strings follow the template rules (i.e. '\' is taken as an
960 but inner strings follow the template rules (i.e. '\' is taken as an
961 escape character):
961 escape character):
962
962
963 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
963 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
964 '{"c:\\tmp"}'
964 '{"c:\\tmp"}'
965 """
965 """
966 expander = {
966 expander = {
967 b'H': b'{node}',
967 b'H': b'{node}',
968 b'R': b'{rev}',
968 b'R': b'{rev}',
969 b'h': b'{node|short}',
969 b'h': b'{node|short}',
970 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
970 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
971 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
971 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
972 b'%': b'%',
972 b'%': b'%',
973 b'b': b'{reporoot|basename}',
973 b'b': b'{reporoot|basename}',
974 }
974 }
975 if total is not None:
975 if total is not None:
976 expander[b'N'] = b'{total}'
976 expander[b'N'] = b'{total}'
977 if seqno is not None:
977 if seqno is not None:
978 expander[b'n'] = b'{seqno}'
978 expander[b'n'] = b'{seqno}'
979 if total is not None and seqno is not None:
979 if total is not None and seqno is not None:
980 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
980 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
981 if pathname is not None:
981 if pathname is not None:
982 expander[b's'] = b'{pathname|basename}'
982 expander[b's'] = b'{pathname|basename}'
983 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
983 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
984 expander[b'p'] = b'{pathname}'
984 expander[b'p'] = b'{pathname}'
985
985
986 newname = []
986 newname = []
987 for typ, start, end in templater.scantemplate(pat, raw=True):
987 for typ, start, end in templater.scantemplate(pat, raw=True):
988 if typ != b'string':
988 if typ != b'string':
989 newname.append(pat[start:end])
989 newname.append(pat[start:end])
990 continue
990 continue
991 i = start
991 i = start
992 while i < end:
992 while i < end:
993 n = pat.find(b'%', i, end)
993 n = pat.find(b'%', i, end)
994 if n < 0:
994 if n < 0:
995 newname.append(stringutil.escapestr(pat[i:end]))
995 newname.append(stringutil.escapestr(pat[i:end]))
996 break
996 break
997 newname.append(stringutil.escapestr(pat[i:n]))
997 newname.append(stringutil.escapestr(pat[i:n]))
998 if n + 2 > end:
998 if n + 2 > end:
999 raise error.Abort(_("incomplete format spec in output "
999 raise error.Abort(_("incomplete format spec in output "
1000 "filename"))
1000 "filename"))
1001 c = pat[n + 1:n + 2]
1001 c = pat[n + 1:n + 2]
1002 i = n + 2
1002 i = n + 2
1003 try:
1003 try:
1004 newname.append(expander[c])
1004 newname.append(expander[c])
1005 except KeyError:
1005 except KeyError:
1006 raise error.Abort(_("invalid format spec '%%%s' in output "
1006 raise error.Abort(_("invalid format spec '%%%s' in output "
1007 "filename") % c)
1007 "filename") % c)
1008 return ''.join(newname)
1008 return ''.join(newname)
1009
1009
1010 def makefilename(ctx, pat, **props):
1010 def makefilename(ctx, pat, **props):
1011 if not pat:
1011 if not pat:
1012 return pat
1012 return pat
1013 tmpl = _buildfntemplate(pat, **props)
1013 tmpl = _buildfntemplate(pat, **props)
1014 # BUG: alias expansion shouldn't be made against template fragments
1014 # BUG: alias expansion shouldn't be made against template fragments
1015 # rewritten from %-format strings, but we have no easy way to partially
1015 # rewritten from %-format strings, but we have no easy way to partially
1016 # disable the expansion.
1016 # disable the expansion.
1017 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1017 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1018
1018
1019 def isstdiofilename(pat):
1019 def isstdiofilename(pat):
1020 """True if the given pat looks like a filename denoting stdin/stdout"""
1020 """True if the given pat looks like a filename denoting stdin/stdout"""
1021 return not pat or pat == '-'
1021 return not pat or pat == '-'
1022
1022
1023 class _unclosablefile(object):
1023 class _unclosablefile(object):
1024 def __init__(self, fp):
1024 def __init__(self, fp):
1025 self._fp = fp
1025 self._fp = fp
1026
1026
1027 def close(self):
1027 def close(self):
1028 pass
1028 pass
1029
1029
1030 def __iter__(self):
1030 def __iter__(self):
1031 return iter(self._fp)
1031 return iter(self._fp)
1032
1032
1033 def __getattr__(self, attr):
1033 def __getattr__(self, attr):
1034 return getattr(self._fp, attr)
1034 return getattr(self._fp, attr)
1035
1035
1036 def __enter__(self):
1036 def __enter__(self):
1037 return self
1037 return self
1038
1038
1039 def __exit__(self, exc_type, exc_value, exc_tb):
1039 def __exit__(self, exc_type, exc_value, exc_tb):
1040 pass
1040 pass
1041
1041
1042 def makefileobj(ctx, pat, mode='wb', **props):
1042 def makefileobj(ctx, pat, mode='wb', **props):
1043 writable = mode not in ('r', 'rb')
1043 writable = mode not in ('r', 'rb')
1044
1044
1045 if isstdiofilename(pat):
1045 if isstdiofilename(pat):
1046 repo = ctx.repo()
1046 repo = ctx.repo()
1047 if writable:
1047 if writable:
1048 fp = repo.ui.fout
1048 fp = repo.ui.fout
1049 else:
1049 else:
1050 fp = repo.ui.fin
1050 fp = repo.ui.fin
1051 return _unclosablefile(fp)
1051 return _unclosablefile(fp)
1052 fn = makefilename(ctx, pat, **props)
1052 fn = makefilename(ctx, pat, **props)
1053 return open(fn, mode)
1053 return open(fn, mode)
1054
1054
1055 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1055 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1056 """opens the changelog, manifest, a filelog or a given revlog"""
1056 """opens the changelog, manifest, a filelog or a given revlog"""
1057 cl = opts['changelog']
1057 cl = opts['changelog']
1058 mf = opts['manifest']
1058 mf = opts['manifest']
1059 dir = opts['dir']
1059 dir = opts['dir']
1060 msg = None
1060 msg = None
1061 if cl and mf:
1061 if cl and mf:
1062 msg = _('cannot specify --changelog and --manifest at the same time')
1062 msg = _('cannot specify --changelog and --manifest at the same time')
1063 elif cl and dir:
1063 elif cl and dir:
1064 msg = _('cannot specify --changelog and --dir at the same time')
1064 msg = _('cannot specify --changelog and --dir at the same time')
1065 elif cl or mf or dir:
1065 elif cl or mf or dir:
1066 if file_:
1066 if file_:
1067 msg = _('cannot specify filename with --changelog or --manifest')
1067 msg = _('cannot specify filename with --changelog or --manifest')
1068 elif not repo:
1068 elif not repo:
1069 msg = _('cannot specify --changelog or --manifest or --dir '
1069 msg = _('cannot specify --changelog or --manifest or --dir '
1070 'without a repository')
1070 'without a repository')
1071 if msg:
1071 if msg:
1072 raise error.Abort(msg)
1072 raise error.Abort(msg)
1073
1073
1074 r = None
1074 r = None
1075 if repo:
1075 if repo:
1076 if cl:
1076 if cl:
1077 r = repo.unfiltered().changelog
1077 r = repo.unfiltered().changelog
1078 elif dir:
1078 elif dir:
1079 if 'treemanifest' not in repo.requirements:
1079 if 'treemanifest' not in repo.requirements:
1080 raise error.Abort(_("--dir can only be used on repos with "
1080 raise error.Abort(_("--dir can only be used on repos with "
1081 "treemanifest enabled"))
1081 "treemanifest enabled"))
1082 if not dir.endswith('/'):
1082 if not dir.endswith('/'):
1083 dir = dir + '/'
1083 dir = dir + '/'
1084 dirlog = repo.manifestlog.getstorage(dir)
1084 dirlog = repo.manifestlog.getstorage(dir)
1085 if len(dirlog):
1085 if len(dirlog):
1086 r = dirlog
1086 r = dirlog
1087 elif mf:
1087 elif mf:
1088 r = repo.manifestlog.getstorage(b'')
1088 r = repo.manifestlog.getstorage(b'')
1089 elif file_:
1089 elif file_:
1090 filelog = repo.file(file_)
1090 filelog = repo.file(file_)
1091 if len(filelog):
1091 if len(filelog):
1092 r = filelog
1092 r = filelog
1093
1093
1094 # Not all storage may be revlogs. If requested, try to return an actual
1094 # Not all storage may be revlogs. If requested, try to return an actual
1095 # revlog instance.
1095 # revlog instance.
1096 if returnrevlog:
1096 if returnrevlog:
1097 if isinstance(r, revlog.revlog):
1097 if isinstance(r, revlog.revlog):
1098 pass
1098 pass
1099 elif util.safehasattr(r, '_revlog'):
1099 elif util.safehasattr(r, '_revlog'):
1100 r = r._revlog
1100 r = r._revlog
1101 elif r is not None:
1101 elif r is not None:
1102 raise error.Abort(_('%r does not appear to be a revlog') % r)
1102 raise error.Abort(_('%r does not appear to be a revlog') % r)
1103
1103
1104 if not r:
1104 if not r:
1105 if not returnrevlog:
1105 if not returnrevlog:
1106 raise error.Abort(_('cannot give path to non-revlog'))
1106 raise error.Abort(_('cannot give path to non-revlog'))
1107
1107
1108 if not file_:
1108 if not file_:
1109 raise error.CommandError(cmd, _('invalid arguments'))
1109 raise error.CommandError(cmd, _('invalid arguments'))
1110 if not os.path.isfile(file_):
1110 if not os.path.isfile(file_):
1111 raise error.Abort(_("revlog '%s' not found") % file_)
1111 raise error.Abort(_("revlog '%s' not found") % file_)
1112 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
1112 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
1113 file_[:-2] + ".i")
1113 file_[:-2] + ".i")
1114 return r
1114 return r
1115
1115
1116 def openrevlog(repo, cmd, file_, opts):
1116 def openrevlog(repo, cmd, file_, opts):
1117 """Obtain a revlog backing storage of an item.
1117 """Obtain a revlog backing storage of an item.
1118
1118
1119 This is similar to ``openstorage()`` except it always returns a revlog.
1119 This is similar to ``openstorage()`` except it always returns a revlog.
1120
1120
1121 In most cases, a caller cares about the main storage object - not the
1121 In most cases, a caller cares about the main storage object - not the
1122 revlog backing it. Therefore, this function should only be used by code
1122 revlog backing it. Therefore, this function should only be used by code
1123 that needs to examine low-level revlog implementation details. e.g. debug
1123 that needs to examine low-level revlog implementation details. e.g. debug
1124 commands.
1124 commands.
1125 """
1125 """
1126 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1126 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1127
1127
1128 def copy(ui, repo, pats, opts, rename=False):
1128 def copy(ui, repo, pats, opts, rename=False):
1129 # called with the repo lock held
1129 # called with the repo lock held
1130 #
1130 #
1131 # hgsep => pathname that uses "/" to separate directories
1131 # hgsep => pathname that uses "/" to separate directories
1132 # ossep => pathname that uses os.sep to separate directories
1132 # ossep => pathname that uses os.sep to separate directories
1133 cwd = repo.getcwd()
1133 cwd = repo.getcwd()
1134 targets = {}
1134 targets = {}
1135 after = opts.get("after")
1135 after = opts.get("after")
1136 dryrun = opts.get("dry_run")
1136 dryrun = opts.get("dry_run")
1137 wctx = repo[None]
1137 wctx = repo[None]
1138
1138
1139 def walkpat(pat):
1139 def walkpat(pat):
1140 srcs = []
1140 srcs = []
1141 if after:
1141 if after:
1142 badstates = '?'
1142 badstates = '?'
1143 else:
1143 else:
1144 badstates = '?r'
1144 badstates = '?r'
1145 m = scmutil.match(wctx, [pat], opts, globbed=True)
1145 m = scmutil.match(wctx, [pat], opts, globbed=True)
1146 for abs in wctx.walk(m):
1146 for abs in wctx.walk(m):
1147 state = repo.dirstate[abs]
1147 state = repo.dirstate[abs]
1148 rel = m.rel(abs)
1148 rel = m.rel(abs)
1149 exact = m.exact(abs)
1149 exact = m.exact(abs)
1150 if state in badstates:
1150 if state in badstates:
1151 if exact and state == '?':
1151 if exact and state == '?':
1152 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1152 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1153 if exact and state == 'r':
1153 if exact and state == 'r':
1154 ui.warn(_('%s: not copying - file has been marked for'
1154 ui.warn(_('%s: not copying - file has been marked for'
1155 ' remove\n') % rel)
1155 ' remove\n') % rel)
1156 continue
1156 continue
1157 # abs: hgsep
1157 # abs: hgsep
1158 # rel: ossep
1158 # rel: ossep
1159 srcs.append((abs, rel, exact))
1159 srcs.append((abs, rel, exact))
1160 return srcs
1160 return srcs
1161
1161
1162 # abssrc: hgsep
1162 # abssrc: hgsep
1163 # relsrc: ossep
1163 # relsrc: ossep
1164 # otarget: ossep
1164 # otarget: ossep
1165 def copyfile(abssrc, relsrc, otarget, exact):
1165 def copyfile(abssrc, relsrc, otarget, exact):
1166 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1166 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1167 if '/' in abstarget:
1167 if '/' in abstarget:
1168 # We cannot normalize abstarget itself, this would prevent
1168 # We cannot normalize abstarget itself, this would prevent
1169 # case only renames, like a => A.
1169 # case only renames, like a => A.
1170 abspath, absname = abstarget.rsplit('/', 1)
1170 abspath, absname = abstarget.rsplit('/', 1)
1171 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1171 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1172 reltarget = repo.pathto(abstarget, cwd)
1172 reltarget = repo.pathto(abstarget, cwd)
1173 target = repo.wjoin(abstarget)
1173 target = repo.wjoin(abstarget)
1174 src = repo.wjoin(abssrc)
1174 src = repo.wjoin(abssrc)
1175 state = repo.dirstate[abstarget]
1175 state = repo.dirstate[abstarget]
1176
1176
1177 scmutil.checkportable(ui, abstarget)
1177 scmutil.checkportable(ui, abstarget)
1178
1178
1179 # check for collisions
1179 # check for collisions
1180 prevsrc = targets.get(abstarget)
1180 prevsrc = targets.get(abstarget)
1181 if prevsrc is not None:
1181 if prevsrc is not None:
1182 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1182 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1183 (reltarget, repo.pathto(abssrc, cwd),
1183 (reltarget, repo.pathto(abssrc, cwd),
1184 repo.pathto(prevsrc, cwd)))
1184 repo.pathto(prevsrc, cwd)))
1185 return True # report a failure
1185 return True # report a failure
1186
1186
1187 # check for overwrites
1187 # check for overwrites
1188 exists = os.path.lexists(target)
1188 exists = os.path.lexists(target)
1189 samefile = False
1189 samefile = False
1190 if exists and abssrc != abstarget:
1190 if exists and abssrc != abstarget:
1191 if (repo.dirstate.normalize(abssrc) ==
1191 if (repo.dirstate.normalize(abssrc) ==
1192 repo.dirstate.normalize(abstarget)):
1192 repo.dirstate.normalize(abstarget)):
1193 if not rename:
1193 if not rename:
1194 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1194 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1195 return True # report a failure
1195 return True # report a failure
1196 exists = False
1196 exists = False
1197 samefile = True
1197 samefile = True
1198
1198
1199 if not after and exists or after and state in 'mn':
1199 if not after and exists or after and state in 'mn':
1200 if not opts['force']:
1200 if not opts['force']:
1201 if state in 'mn':
1201 if state in 'mn':
1202 msg = _('%s: not overwriting - file already committed\n')
1202 msg = _('%s: not overwriting - file already committed\n')
1203 if after:
1203 if after:
1204 flags = '--after --force'
1204 flags = '--after --force'
1205 else:
1205 else:
1206 flags = '--force'
1206 flags = '--force'
1207 if rename:
1207 if rename:
1208 hint = _("('hg rename %s' to replace the file by "
1208 hint = _("('hg rename %s' to replace the file by "
1209 'recording a rename)\n') % flags
1209 'recording a rename)\n') % flags
1210 else:
1210 else:
1211 hint = _("('hg copy %s' to replace the file by "
1211 hint = _("('hg copy %s' to replace the file by "
1212 'recording a copy)\n') % flags
1212 'recording a copy)\n') % flags
1213 else:
1213 else:
1214 msg = _('%s: not overwriting - file exists\n')
1214 msg = _('%s: not overwriting - file exists\n')
1215 if rename:
1215 if rename:
1216 hint = _("('hg rename --after' to record the rename)\n")
1216 hint = _("('hg rename --after' to record the rename)\n")
1217 else:
1217 else:
1218 hint = _("('hg copy --after' to record the copy)\n")
1218 hint = _("('hg copy --after' to record the copy)\n")
1219 ui.warn(msg % reltarget)
1219 ui.warn(msg % reltarget)
1220 ui.warn(hint)
1220 ui.warn(hint)
1221 return True # report a failure
1221 return True # report a failure
1222
1222
1223 if after:
1223 if after:
1224 if not exists:
1224 if not exists:
1225 if rename:
1225 if rename:
1226 ui.warn(_('%s: not recording move - %s does not exist\n') %
1226 ui.warn(_('%s: not recording move - %s does not exist\n') %
1227 (relsrc, reltarget))
1227 (relsrc, reltarget))
1228 else:
1228 else:
1229 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1229 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1230 (relsrc, reltarget))
1230 (relsrc, reltarget))
1231 return True # report a failure
1231 return True # report a failure
1232 elif not dryrun:
1232 elif not dryrun:
1233 try:
1233 try:
1234 if exists:
1234 if exists:
1235 os.unlink(target)
1235 os.unlink(target)
1236 targetdir = os.path.dirname(target) or '.'
1236 targetdir = os.path.dirname(target) or '.'
1237 if not os.path.isdir(targetdir):
1237 if not os.path.isdir(targetdir):
1238 os.makedirs(targetdir)
1238 os.makedirs(targetdir)
1239 if samefile:
1239 if samefile:
1240 tmp = target + "~hgrename"
1240 tmp = target + "~hgrename"
1241 os.rename(src, tmp)
1241 os.rename(src, tmp)
1242 os.rename(tmp, target)
1242 os.rename(tmp, target)
1243 else:
1243 else:
1244 # Preserve stat info on renames, not on copies; this matches
1244 # Preserve stat info on renames, not on copies; this matches
1245 # Linux CLI behavior.
1245 # Linux CLI behavior.
1246 util.copyfile(src, target, copystat=rename)
1246 util.copyfile(src, target, copystat=rename)
1247 srcexists = True
1247 srcexists = True
1248 except IOError as inst:
1248 except IOError as inst:
1249 if inst.errno == errno.ENOENT:
1249 if inst.errno == errno.ENOENT:
1250 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1250 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1251 srcexists = False
1251 srcexists = False
1252 else:
1252 else:
1253 ui.warn(_('%s: cannot copy - %s\n') %
1253 ui.warn(_('%s: cannot copy - %s\n') %
1254 (relsrc, encoding.strtolocal(inst.strerror)))
1254 (relsrc, encoding.strtolocal(inst.strerror)))
1255 if rename:
1255 if rename:
1256 hint = _("('hg rename --after' to record the rename)\n")
1256 hint = _("('hg rename --after' to record the rename)\n")
1257 else:
1257 else:
1258 hint = _("('hg copy --after' to record the copy)\n")
1258 hint = _("('hg copy --after' to record the copy)\n")
1259 return True # report a failure
1259 return True # report a failure
1260
1260
1261 if ui.verbose or not exact:
1261 if ui.verbose or not exact:
1262 if rename:
1262 if rename:
1263 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1263 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1264 else:
1264 else:
1265 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1265 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1266
1266
1267 targets[abstarget] = abssrc
1267 targets[abstarget] = abssrc
1268
1268
1269 # fix up dirstate
1269 # fix up dirstate
1270 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1270 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1271 dryrun=dryrun, cwd=cwd)
1271 dryrun=dryrun, cwd=cwd)
1272 if rename and not dryrun:
1272 if rename and not dryrun:
1273 if not after and srcexists and not samefile:
1273 if not after and srcexists and not samefile:
1274 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
1274 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
1275 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1275 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1276 wctx.forget([abssrc])
1276 wctx.forget([abssrc])
1277
1277
1278 # pat: ossep
1278 # pat: ossep
1279 # dest ossep
1279 # dest ossep
1280 # srcs: list of (hgsep, hgsep, ossep, bool)
1280 # srcs: list of (hgsep, hgsep, ossep, bool)
1281 # return: function that takes hgsep and returns ossep
1281 # return: function that takes hgsep and returns ossep
1282 def targetpathfn(pat, dest, srcs):
1282 def targetpathfn(pat, dest, srcs):
1283 if os.path.isdir(pat):
1283 if os.path.isdir(pat):
1284 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1284 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1285 abspfx = util.localpath(abspfx)
1285 abspfx = util.localpath(abspfx)
1286 if destdirexists:
1286 if destdirexists:
1287 striplen = len(os.path.split(abspfx)[0])
1287 striplen = len(os.path.split(abspfx)[0])
1288 else:
1288 else:
1289 striplen = len(abspfx)
1289 striplen = len(abspfx)
1290 if striplen:
1290 if striplen:
1291 striplen += len(pycompat.ossep)
1291 striplen += len(pycompat.ossep)
1292 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1292 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1293 elif destdirexists:
1293 elif destdirexists:
1294 res = lambda p: os.path.join(dest,
1294 res = lambda p: os.path.join(dest,
1295 os.path.basename(util.localpath(p)))
1295 os.path.basename(util.localpath(p)))
1296 else:
1296 else:
1297 res = lambda p: dest
1297 res = lambda p: dest
1298 return res
1298 return res
1299
1299
1300 # pat: ossep
1300 # pat: ossep
1301 # dest ossep
1301 # dest ossep
1302 # srcs: list of (hgsep, hgsep, ossep, bool)
1302 # srcs: list of (hgsep, hgsep, ossep, bool)
1303 # return: function that takes hgsep and returns ossep
1303 # return: function that takes hgsep and returns ossep
1304 def targetpathafterfn(pat, dest, srcs):
1304 def targetpathafterfn(pat, dest, srcs):
1305 if matchmod.patkind(pat):
1305 if matchmod.patkind(pat):
1306 # a mercurial pattern
1306 # a mercurial pattern
1307 res = lambda p: os.path.join(dest,
1307 res = lambda p: os.path.join(dest,
1308 os.path.basename(util.localpath(p)))
1308 os.path.basename(util.localpath(p)))
1309 else:
1309 else:
1310 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1310 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1311 if len(abspfx) < len(srcs[0][0]):
1311 if len(abspfx) < len(srcs[0][0]):
1312 # A directory. Either the target path contains the last
1312 # A directory. Either the target path contains the last
1313 # component of the source path or it does not.
1313 # component of the source path or it does not.
1314 def evalpath(striplen):
1314 def evalpath(striplen):
1315 score = 0
1315 score = 0
1316 for s in srcs:
1316 for s in srcs:
1317 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1317 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1318 if os.path.lexists(t):
1318 if os.path.lexists(t):
1319 score += 1
1319 score += 1
1320 return score
1320 return score
1321
1321
1322 abspfx = util.localpath(abspfx)
1322 abspfx = util.localpath(abspfx)
1323 striplen = len(abspfx)
1323 striplen = len(abspfx)
1324 if striplen:
1324 if striplen:
1325 striplen += len(pycompat.ossep)
1325 striplen += len(pycompat.ossep)
1326 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1326 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1327 score = evalpath(striplen)
1327 score = evalpath(striplen)
1328 striplen1 = len(os.path.split(abspfx)[0])
1328 striplen1 = len(os.path.split(abspfx)[0])
1329 if striplen1:
1329 if striplen1:
1330 striplen1 += len(pycompat.ossep)
1330 striplen1 += len(pycompat.ossep)
1331 if evalpath(striplen1) > score:
1331 if evalpath(striplen1) > score:
1332 striplen = striplen1
1332 striplen = striplen1
1333 res = lambda p: os.path.join(dest,
1333 res = lambda p: os.path.join(dest,
1334 util.localpath(p)[striplen:])
1334 util.localpath(p)[striplen:])
1335 else:
1335 else:
1336 # a file
1336 # a file
1337 if destdirexists:
1337 if destdirexists:
1338 res = lambda p: os.path.join(dest,
1338 res = lambda p: os.path.join(dest,
1339 os.path.basename(util.localpath(p)))
1339 os.path.basename(util.localpath(p)))
1340 else:
1340 else:
1341 res = lambda p: dest
1341 res = lambda p: dest
1342 return res
1342 return res
1343
1343
1344 pats = scmutil.expandpats(pats)
1344 pats = scmutil.expandpats(pats)
1345 if not pats:
1345 if not pats:
1346 raise error.Abort(_('no source or destination specified'))
1346 raise error.Abort(_('no source or destination specified'))
1347 if len(pats) == 1:
1347 if len(pats) == 1:
1348 raise error.Abort(_('no destination specified'))
1348 raise error.Abort(_('no destination specified'))
1349 dest = pats.pop()
1349 dest = pats.pop()
1350 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1350 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1351 if not destdirexists:
1351 if not destdirexists:
1352 if len(pats) > 1 or matchmod.patkind(pats[0]):
1352 if len(pats) > 1 or matchmod.patkind(pats[0]):
1353 raise error.Abort(_('with multiple sources, destination must be an '
1353 raise error.Abort(_('with multiple sources, destination must be an '
1354 'existing directory'))
1354 'existing directory'))
1355 if util.endswithsep(dest):
1355 if util.endswithsep(dest):
1356 raise error.Abort(_('destination %s is not a directory') % dest)
1356 raise error.Abort(_('destination %s is not a directory') % dest)
1357
1357
1358 tfn = targetpathfn
1358 tfn = targetpathfn
1359 if after:
1359 if after:
1360 tfn = targetpathafterfn
1360 tfn = targetpathafterfn
1361 copylist = []
1361 copylist = []
1362 for pat in pats:
1362 for pat in pats:
1363 srcs = walkpat(pat)
1363 srcs = walkpat(pat)
1364 if not srcs:
1364 if not srcs:
1365 continue
1365 continue
1366 copylist.append((tfn(pat, dest, srcs), srcs))
1366 copylist.append((tfn(pat, dest, srcs), srcs))
1367 if not copylist:
1367 if not copylist:
1368 raise error.Abort(_('no files to copy'))
1368 raise error.Abort(_('no files to copy'))
1369
1369
1370 errors = 0
1370 errors = 0
1371 for targetpath, srcs in copylist:
1371 for targetpath, srcs in copylist:
1372 for abssrc, relsrc, exact in srcs:
1372 for abssrc, relsrc, exact in srcs:
1373 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1373 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1374 errors += 1
1374 errors += 1
1375
1375
1376 return errors != 0
1376 return errors != 0
1377
1377
1378 ## facility to let extension process additional data into an import patch
1378 ## facility to let extension process additional data into an import patch
1379 # list of identifier to be executed in order
1379 # list of identifier to be executed in order
1380 extrapreimport = [] # run before commit
1380 extrapreimport = [] # run before commit
1381 extrapostimport = [] # run after commit
1381 extrapostimport = [] # run after commit
1382 # mapping from identifier to actual import function
1382 # mapping from identifier to actual import function
1383 #
1383 #
1384 # 'preimport' are run before the commit is made and are provided the following
1384 # 'preimport' are run before the commit is made and are provided the following
1385 # arguments:
1385 # arguments:
1386 # - repo: the localrepository instance,
1386 # - repo: the localrepository instance,
1387 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1387 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1388 # - extra: the future extra dictionary of the changeset, please mutate it,
1388 # - extra: the future extra dictionary of the changeset, please mutate it,
1389 # - opts: the import options.
1389 # - opts: the import options.
1390 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1390 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1391 # mutation of in memory commit and more. Feel free to rework the code to get
1391 # mutation of in memory commit and more. Feel free to rework the code to get
1392 # there.
1392 # there.
1393 extrapreimportmap = {}
1393 extrapreimportmap = {}
1394 # 'postimport' are run after the commit is made and are provided the following
1394 # 'postimport' are run after the commit is made and are provided the following
1395 # argument:
1395 # argument:
1396 # - ctx: the changectx created by import.
1396 # - ctx: the changectx created by import.
1397 extrapostimportmap = {}
1397 extrapostimportmap = {}
1398
1398
1399 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1399 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1400 """Utility function used by commands.import to import a single patch
1400 """Utility function used by commands.import to import a single patch
1401
1401
1402 This function is explicitly defined here to help the evolve extension to
1402 This function is explicitly defined here to help the evolve extension to
1403 wrap this part of the import logic.
1403 wrap this part of the import logic.
1404
1404
1405 The API is currently a bit ugly because it a simple code translation from
1405 The API is currently a bit ugly because it a simple code translation from
1406 the import command. Feel free to make it better.
1406 the import command. Feel free to make it better.
1407
1407
1408 :patchdata: a dictionary containing parsed patch data (such as from
1408 :patchdata: a dictionary containing parsed patch data (such as from
1409 ``patch.extract()``)
1409 ``patch.extract()``)
1410 :parents: nodes that will be parent of the created commit
1410 :parents: nodes that will be parent of the created commit
1411 :opts: the full dict of option passed to the import command
1411 :opts: the full dict of option passed to the import command
1412 :msgs: list to save commit message to.
1412 :msgs: list to save commit message to.
1413 (used in case we need to save it when failing)
1413 (used in case we need to save it when failing)
1414 :updatefunc: a function that update a repo to a given node
1414 :updatefunc: a function that update a repo to a given node
1415 updatefunc(<repo>, <node>)
1415 updatefunc(<repo>, <node>)
1416 """
1416 """
1417 # avoid cycle context -> subrepo -> cmdutil
1417 # avoid cycle context -> subrepo -> cmdutil
1418 from . import context
1418 from . import context
1419
1419
1420 tmpname = patchdata.get('filename')
1420 tmpname = patchdata.get('filename')
1421 message = patchdata.get('message')
1421 message = patchdata.get('message')
1422 user = opts.get('user') or patchdata.get('user')
1422 user = opts.get('user') or patchdata.get('user')
1423 date = opts.get('date') or patchdata.get('date')
1423 date = opts.get('date') or patchdata.get('date')
1424 branch = patchdata.get('branch')
1424 branch = patchdata.get('branch')
1425 nodeid = patchdata.get('nodeid')
1425 nodeid = patchdata.get('nodeid')
1426 p1 = patchdata.get('p1')
1426 p1 = patchdata.get('p1')
1427 p2 = patchdata.get('p2')
1427 p2 = patchdata.get('p2')
1428
1428
1429 nocommit = opts.get('no_commit')
1429 nocommit = opts.get('no_commit')
1430 importbranch = opts.get('import_branch')
1430 importbranch = opts.get('import_branch')
1431 update = not opts.get('bypass')
1431 update = not opts.get('bypass')
1432 strip = opts["strip"]
1432 strip = opts["strip"]
1433 prefix = opts["prefix"]
1433 prefix = opts["prefix"]
1434 sim = float(opts.get('similarity') or 0)
1434 sim = float(opts.get('similarity') or 0)
1435
1435
1436 if not tmpname:
1436 if not tmpname:
1437 return None, None, False
1437 return None, None, False
1438
1438
1439 rejects = False
1439 rejects = False
1440
1440
1441 cmdline_message = logmessage(ui, opts)
1441 cmdline_message = logmessage(ui, opts)
1442 if cmdline_message:
1442 if cmdline_message:
1443 # pickup the cmdline msg
1443 # pickup the cmdline msg
1444 message = cmdline_message
1444 message = cmdline_message
1445 elif message:
1445 elif message:
1446 # pickup the patch msg
1446 # pickup the patch msg
1447 message = message.strip()
1447 message = message.strip()
1448 else:
1448 else:
1449 # launch the editor
1449 # launch the editor
1450 message = None
1450 message = None
1451 ui.debug('message:\n%s\n' % (message or ''))
1451 ui.debug('message:\n%s\n' % (message or ''))
1452
1452
1453 if len(parents) == 1:
1453 if len(parents) == 1:
1454 parents.append(repo[nullid])
1454 parents.append(repo[nullid])
1455 if opts.get('exact'):
1455 if opts.get('exact'):
1456 if not nodeid or not p1:
1456 if not nodeid or not p1:
1457 raise error.Abort(_('not a Mercurial patch'))
1457 raise error.Abort(_('not a Mercurial patch'))
1458 p1 = repo[p1]
1458 p1 = repo[p1]
1459 p2 = repo[p2 or nullid]
1459 p2 = repo[p2 or nullid]
1460 elif p2:
1460 elif p2:
1461 try:
1461 try:
1462 p1 = repo[p1]
1462 p1 = repo[p1]
1463 p2 = repo[p2]
1463 p2 = repo[p2]
1464 # Without any options, consider p2 only if the
1464 # Without any options, consider p2 only if the
1465 # patch is being applied on top of the recorded
1465 # patch is being applied on top of the recorded
1466 # first parent.
1466 # first parent.
1467 if p1 != parents[0]:
1467 if p1 != parents[0]:
1468 p1 = parents[0]
1468 p1 = parents[0]
1469 p2 = repo[nullid]
1469 p2 = repo[nullid]
1470 except error.RepoError:
1470 except error.RepoError:
1471 p1, p2 = parents
1471 p1, p2 = parents
1472 if p2.node() == nullid:
1472 if p2.node() == nullid:
1473 ui.warn(_("warning: import the patch as a normal revision\n"
1473 ui.warn(_("warning: import the patch as a normal revision\n"
1474 "(use --exact to import the patch as a merge)\n"))
1474 "(use --exact to import the patch as a merge)\n"))
1475 else:
1475 else:
1476 p1, p2 = parents
1476 p1, p2 = parents
1477
1477
1478 n = None
1478 n = None
1479 if update:
1479 if update:
1480 if p1 != parents[0]:
1480 if p1 != parents[0]:
1481 updatefunc(repo, p1.node())
1481 updatefunc(repo, p1.node())
1482 if p2 != parents[1]:
1482 if p2 != parents[1]:
1483 repo.setparents(p1.node(), p2.node())
1483 repo.setparents(p1.node(), p2.node())
1484
1484
1485 if opts.get('exact') or importbranch:
1485 if opts.get('exact') or importbranch:
1486 repo.dirstate.setbranch(branch or 'default')
1486 repo.dirstate.setbranch(branch or 'default')
1487
1487
1488 partial = opts.get('partial', False)
1488 partial = opts.get('partial', False)
1489 files = set()
1489 files = set()
1490 try:
1490 try:
1491 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1491 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1492 files=files, eolmode=None, similarity=sim / 100.0)
1492 files=files, eolmode=None, similarity=sim / 100.0)
1493 except error.PatchError as e:
1493 except error.PatchError as e:
1494 if not partial:
1494 if not partial:
1495 raise error.Abort(pycompat.bytestr(e))
1495 raise error.Abort(pycompat.bytestr(e))
1496 if partial:
1496 if partial:
1497 rejects = True
1497 rejects = True
1498
1498
1499 files = list(files)
1499 files = list(files)
1500 if nocommit:
1500 if nocommit:
1501 if message:
1501 if message:
1502 msgs.append(message)
1502 msgs.append(message)
1503 else:
1503 else:
1504 if opts.get('exact') or p2:
1504 if opts.get('exact') or p2:
1505 # If you got here, you either use --force and know what
1505 # If you got here, you either use --force and know what
1506 # you are doing or used --exact or a merge patch while
1506 # you are doing or used --exact or a merge patch while
1507 # being updated to its first parent.
1507 # being updated to its first parent.
1508 m = None
1508 m = None
1509 else:
1509 else:
1510 m = scmutil.matchfiles(repo, files or [])
1510 m = scmutil.matchfiles(repo, files or [])
1511 editform = mergeeditform(repo[None], 'import.normal')
1511 editform = mergeeditform(repo[None], 'import.normal')
1512 if opts.get('exact'):
1512 if opts.get('exact'):
1513 editor = None
1513 editor = None
1514 else:
1514 else:
1515 editor = getcommiteditor(editform=editform,
1515 editor = getcommiteditor(editform=editform,
1516 **pycompat.strkwargs(opts))
1516 **pycompat.strkwargs(opts))
1517 extra = {}
1517 extra = {}
1518 for idfunc in extrapreimport:
1518 for idfunc in extrapreimport:
1519 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1519 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1520 overrides = {}
1520 overrides = {}
1521 if partial:
1521 if partial:
1522 overrides[('ui', 'allowemptycommit')] = True
1522 overrides[('ui', 'allowemptycommit')] = True
1523 with repo.ui.configoverride(overrides, 'import'):
1523 with repo.ui.configoverride(overrides, 'import'):
1524 n = repo.commit(message, user,
1524 n = repo.commit(message, user,
1525 date, match=m,
1525 date, match=m,
1526 editor=editor, extra=extra)
1526 editor=editor, extra=extra)
1527 for idfunc in extrapostimport:
1527 for idfunc in extrapostimport:
1528 extrapostimportmap[idfunc](repo[n])
1528 extrapostimportmap[idfunc](repo[n])
1529 else:
1529 else:
1530 if opts.get('exact') or importbranch:
1530 if opts.get('exact') or importbranch:
1531 branch = branch or 'default'
1531 branch = branch or 'default'
1532 else:
1532 else:
1533 branch = p1.branch()
1533 branch = p1.branch()
1534 store = patch.filestore()
1534 store = patch.filestore()
1535 try:
1535 try:
1536 files = set()
1536 files = set()
1537 try:
1537 try:
1538 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1538 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1539 files, eolmode=None)
1539 files, eolmode=None)
1540 except error.PatchError as e:
1540 except error.PatchError as e:
1541 raise error.Abort(stringutil.forcebytestr(e))
1541 raise error.Abort(stringutil.forcebytestr(e))
1542 if opts.get('exact'):
1542 if opts.get('exact'):
1543 editor = None
1543 editor = None
1544 else:
1544 else:
1545 editor = getcommiteditor(editform='import.bypass')
1545 editor = getcommiteditor(editform='import.bypass')
1546 memctx = context.memctx(repo, (p1.node(), p2.node()),
1546 memctx = context.memctx(repo, (p1.node(), p2.node()),
1547 message,
1547 message,
1548 files=files,
1548 files=files,
1549 filectxfn=store,
1549 filectxfn=store,
1550 user=user,
1550 user=user,
1551 date=date,
1551 date=date,
1552 branch=branch,
1552 branch=branch,
1553 editor=editor)
1553 editor=editor)
1554 n = memctx.commit()
1554 n = memctx.commit()
1555 finally:
1555 finally:
1556 store.close()
1556 store.close()
1557 if opts.get('exact') and nocommit:
1557 if opts.get('exact') and nocommit:
1558 # --exact with --no-commit is still useful in that it does merge
1558 # --exact with --no-commit is still useful in that it does merge
1559 # and branch bits
1559 # and branch bits
1560 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1560 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1561 elif opts.get('exact') and (not n or hex(n) != nodeid):
1561 elif opts.get('exact') and (not n or hex(n) != nodeid):
1562 raise error.Abort(_('patch is damaged or loses information'))
1562 raise error.Abort(_('patch is damaged or loses information'))
1563 msg = _('applied to working directory')
1563 msg = _('applied to working directory')
1564 if n:
1564 if n:
1565 # i18n: refers to a short changeset id
1565 # i18n: refers to a short changeset id
1566 msg = _('created %s') % short(n)
1566 msg = _('created %s') % short(n)
1567 return msg, n, rejects
1567 return msg, n, rejects
1568
1568
1569 # facility to let extensions include additional data in an exported patch
1569 # facility to let extensions include additional data in an exported patch
1570 # list of identifiers to be executed in order
1570 # list of identifiers to be executed in order
1571 extraexport = []
1571 extraexport = []
1572 # mapping from identifier to actual export function
1572 # mapping from identifier to actual export function
1573 # function as to return a string to be added to the header or None
1573 # function as to return a string to be added to the header or None
1574 # it is given two arguments (sequencenumber, changectx)
1574 # it is given two arguments (sequencenumber, changectx)
1575 extraexportmap = {}
1575 extraexportmap = {}
1576
1576
1577 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1577 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1578 node = scmutil.binnode(ctx)
1578 node = scmutil.binnode(ctx)
1579 parents = [p.node() for p in ctx.parents() if p]
1579 parents = [p.node() for p in ctx.parents() if p]
1580 branch = ctx.branch()
1580 branch = ctx.branch()
1581 if switch_parent:
1581 if switch_parent:
1582 parents.reverse()
1582 parents.reverse()
1583
1583
1584 if parents:
1584 if parents:
1585 prev = parents[0]
1585 prev = parents[0]
1586 else:
1586 else:
1587 prev = nullid
1587 prev = nullid
1588
1588
1589 fm.context(ctx=ctx)
1589 fm.context(ctx=ctx)
1590 fm.plain('# HG changeset patch\n')
1590 fm.plain('# HG changeset patch\n')
1591 fm.write('user', '# User %s\n', ctx.user())
1591 fm.write('user', '# User %s\n', ctx.user())
1592 fm.plain('# Date %d %d\n' % ctx.date())
1592 fm.plain('# Date %d %d\n' % ctx.date())
1593 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1593 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1594 fm.condwrite(branch and branch != 'default',
1594 fm.condwrite(branch and branch != 'default',
1595 'branch', '# Branch %s\n', branch)
1595 'branch', '# Branch %s\n', branch)
1596 fm.write('node', '# Node ID %s\n', hex(node))
1596 fm.write('node', '# Node ID %s\n', hex(node))
1597 fm.plain('# Parent %s\n' % hex(prev))
1597 fm.plain('# Parent %s\n' % hex(prev))
1598 if len(parents) > 1:
1598 if len(parents) > 1:
1599 fm.plain('# Parent %s\n' % hex(parents[1]))
1599 fm.plain('# Parent %s\n' % hex(parents[1]))
1600 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1600 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1601
1601
1602 # TODO: redesign extraexportmap function to support formatter
1602 # TODO: redesign extraexportmap function to support formatter
1603 for headerid in extraexport:
1603 for headerid in extraexport:
1604 header = extraexportmap[headerid](seqno, ctx)
1604 header = extraexportmap[headerid](seqno, ctx)
1605 if header is not None:
1605 if header is not None:
1606 fm.plain('# %s\n' % header)
1606 fm.plain('# %s\n' % header)
1607
1607
1608 fm.write('desc', '%s\n', ctx.description().rstrip())
1608 fm.write('desc', '%s\n', ctx.description().rstrip())
1609 fm.plain('\n')
1609 fm.plain('\n')
1610
1610
1611 if fm.isplain():
1611 if fm.isplain():
1612 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1612 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1613 for chunk, label in chunkiter:
1613 for chunk, label in chunkiter:
1614 fm.plain(chunk, label=label)
1614 fm.plain(chunk, label=label)
1615 else:
1615 else:
1616 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1616 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1617 # TODO: make it structured?
1617 # TODO: make it structured?
1618 fm.data(diff=b''.join(chunkiter))
1618 fm.data(diff=b''.join(chunkiter))
1619
1619
1620 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1620 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1621 """Export changesets to stdout or a single file"""
1621 """Export changesets to stdout or a single file"""
1622 for seqno, rev in enumerate(revs, 1):
1622 for seqno, rev in enumerate(revs, 1):
1623 ctx = repo[rev]
1623 ctx = repo[rev]
1624 if not dest.startswith('<'):
1624 if not dest.startswith('<'):
1625 repo.ui.note("%s\n" % dest)
1625 repo.ui.note("%s\n" % dest)
1626 fm.startitem()
1626 fm.startitem()
1627 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1627 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1628
1628
1629 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1629 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1630 match):
1630 match):
1631 """Export changesets to possibly multiple files"""
1631 """Export changesets to possibly multiple files"""
1632 total = len(revs)
1632 total = len(revs)
1633 revwidth = max(len(str(rev)) for rev in revs)
1633 revwidth = max(len(str(rev)) for rev in revs)
1634 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1634 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1635
1635
1636 for seqno, rev in enumerate(revs, 1):
1636 for seqno, rev in enumerate(revs, 1):
1637 ctx = repo[rev]
1637 ctx = repo[rev]
1638 dest = makefilename(ctx, fntemplate,
1638 dest = makefilename(ctx, fntemplate,
1639 total=total, seqno=seqno, revwidth=revwidth)
1639 total=total, seqno=seqno, revwidth=revwidth)
1640 filemap.setdefault(dest, []).append((seqno, rev))
1640 filemap.setdefault(dest, []).append((seqno, rev))
1641
1641
1642 for dest in filemap:
1642 for dest in filemap:
1643 with formatter.maybereopen(basefm, dest) as fm:
1643 with formatter.maybereopen(basefm, dest) as fm:
1644 repo.ui.note("%s\n" % dest)
1644 repo.ui.note("%s\n" % dest)
1645 for seqno, rev in filemap[dest]:
1645 for seqno, rev in filemap[dest]:
1646 fm.startitem()
1646 fm.startitem()
1647 ctx = repo[rev]
1647 ctx = repo[rev]
1648 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1648 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1649 diffopts)
1649 diffopts)
1650
1650
1651 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1651 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1652 opts=None, match=None):
1652 opts=None, match=None):
1653 '''export changesets as hg patches
1653 '''export changesets as hg patches
1654
1654
1655 Args:
1655 Args:
1656 repo: The repository from which we're exporting revisions.
1656 repo: The repository from which we're exporting revisions.
1657 revs: A list of revisions to export as revision numbers.
1657 revs: A list of revisions to export as revision numbers.
1658 basefm: A formatter to which patches should be written.
1658 basefm: A formatter to which patches should be written.
1659 fntemplate: An optional string to use for generating patch file names.
1659 fntemplate: An optional string to use for generating patch file names.
1660 switch_parent: If True, show diffs against second parent when not nullid.
1660 switch_parent: If True, show diffs against second parent when not nullid.
1661 Default is false, which always shows diff against p1.
1661 Default is false, which always shows diff against p1.
1662 opts: diff options to use for generating the patch.
1662 opts: diff options to use for generating the patch.
1663 match: If specified, only export changes to files matching this matcher.
1663 match: If specified, only export changes to files matching this matcher.
1664
1664
1665 Returns:
1665 Returns:
1666 Nothing.
1666 Nothing.
1667
1667
1668 Side Effect:
1668 Side Effect:
1669 "HG Changeset Patch" data is emitted to one of the following
1669 "HG Changeset Patch" data is emitted to one of the following
1670 destinations:
1670 destinations:
1671 fntemplate specified: Each rev is written to a unique file named using
1671 fntemplate specified: Each rev is written to a unique file named using
1672 the given template.
1672 the given template.
1673 Otherwise: All revs will be written to basefm.
1673 Otherwise: All revs will be written to basefm.
1674 '''
1674 '''
1675 scmutil.prefetchfiles(repo, revs, match)
1675 scmutil.prefetchfiles(repo, revs, match)
1676
1676
1677 if not fntemplate:
1677 if not fntemplate:
1678 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1678 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1679 else:
1679 else:
1680 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1680 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1681 match)
1681 match)
1682
1682
1683 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1683 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1684 """Export changesets to the given file stream"""
1684 """Export changesets to the given file stream"""
1685 scmutil.prefetchfiles(repo, revs, match)
1685 scmutil.prefetchfiles(repo, revs, match)
1686
1686
1687 dest = getattr(fp, 'name', '<unnamed>')
1687 dest = getattr(fp, 'name', '<unnamed>')
1688 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1688 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1689 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1689 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1690
1690
1691 def showmarker(fm, marker, index=None):
1691 def showmarker(fm, marker, index=None):
1692 """utility function to display obsolescence marker in a readable way
1692 """utility function to display obsolescence marker in a readable way
1693
1693
1694 To be used by debug function."""
1694 To be used by debug function."""
1695 if index is not None:
1695 if index is not None:
1696 fm.write('index', '%i ', index)
1696 fm.write('index', '%i ', index)
1697 fm.write('prednode', '%s ', hex(marker.prednode()))
1697 fm.write('prednode', '%s ', hex(marker.prednode()))
1698 succs = marker.succnodes()
1698 succs = marker.succnodes()
1699 fm.condwrite(succs, 'succnodes', '%s ',
1699 fm.condwrite(succs, 'succnodes', '%s ',
1700 fm.formatlist(map(hex, succs), name='node'))
1700 fm.formatlist(map(hex, succs), name='node'))
1701 fm.write('flag', '%X ', marker.flags())
1701 fm.write('flag', '%X ', marker.flags())
1702 parents = marker.parentnodes()
1702 parents = marker.parentnodes()
1703 if parents is not None:
1703 if parents is not None:
1704 fm.write('parentnodes', '{%s} ',
1704 fm.write('parentnodes', '{%s} ',
1705 fm.formatlist(map(hex, parents), name='node', sep=', '))
1705 fm.formatlist(map(hex, parents), name='node', sep=', '))
1706 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1706 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1707 meta = marker.metadata().copy()
1707 meta = marker.metadata().copy()
1708 meta.pop('date', None)
1708 meta.pop('date', None)
1709 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
1709 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
1710 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1710 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1711 fm.plain('\n')
1711 fm.plain('\n')
1712
1712
1713 def finddate(ui, repo, date):
1713 def finddate(ui, repo, date):
1714 """Find the tipmost changeset that matches the given date spec"""
1714 """Find the tipmost changeset that matches the given date spec"""
1715
1715
1716 df = dateutil.matchdate(date)
1716 df = dateutil.matchdate(date)
1717 m = scmutil.matchall(repo)
1717 m = scmutil.matchall(repo)
1718 results = {}
1718 results = {}
1719
1719
1720 def prep(ctx, fns):
1720 def prep(ctx, fns):
1721 d = ctx.date()
1721 d = ctx.date()
1722 if df(d[0]):
1722 if df(d[0]):
1723 results[ctx.rev()] = d
1723 results[ctx.rev()] = d
1724
1724
1725 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1725 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1726 rev = ctx.rev()
1726 rev = ctx.rev()
1727 if rev in results:
1727 if rev in results:
1728 ui.status(_("found revision %s from %s\n") %
1728 ui.status(_("found revision %s from %s\n") %
1729 (rev, dateutil.datestr(results[rev])))
1729 (rev, dateutil.datestr(results[rev])))
1730 return '%d' % rev
1730 return '%d' % rev
1731
1731
1732 raise error.Abort(_("revision matching date not found"))
1732 raise error.Abort(_("revision matching date not found"))
1733
1733
1734 def increasingwindows(windowsize=8, sizelimit=512):
1734 def increasingwindows(windowsize=8, sizelimit=512):
1735 while True:
1735 while True:
1736 yield windowsize
1736 yield windowsize
1737 if windowsize < sizelimit:
1737 if windowsize < sizelimit:
1738 windowsize *= 2
1738 windowsize *= 2
1739
1739
1740 def _walkrevs(repo, opts):
1740 def _walkrevs(repo, opts):
1741 # Default --rev value depends on --follow but --follow behavior
1741 # Default --rev value depends on --follow but --follow behavior
1742 # depends on revisions resolved from --rev...
1742 # depends on revisions resolved from --rev...
1743 follow = opts.get('follow') or opts.get('follow_first')
1743 follow = opts.get('follow') or opts.get('follow_first')
1744 if opts.get('rev'):
1744 if opts.get('rev'):
1745 revs = scmutil.revrange(repo, opts['rev'])
1745 revs = scmutil.revrange(repo, opts['rev'])
1746 elif follow and repo.dirstate.p1() == nullid:
1746 elif follow and repo.dirstate.p1() == nullid:
1747 revs = smartset.baseset()
1747 revs = smartset.baseset()
1748 elif follow:
1748 elif follow:
1749 revs = repo.revs('reverse(:.)')
1749 revs = repo.revs('reverse(:.)')
1750 else:
1750 else:
1751 revs = smartset.spanset(repo)
1751 revs = smartset.spanset(repo)
1752 revs.reverse()
1752 revs.reverse()
1753 return revs
1753 return revs
1754
1754
1755 class FileWalkError(Exception):
1755 class FileWalkError(Exception):
1756 pass
1756 pass
1757
1757
1758 def walkfilerevs(repo, match, follow, revs, fncache):
1758 def walkfilerevs(repo, match, follow, revs, fncache):
1759 '''Walks the file history for the matched files.
1759 '''Walks the file history for the matched files.
1760
1760
1761 Returns the changeset revs that are involved in the file history.
1761 Returns the changeset revs that are involved in the file history.
1762
1762
1763 Throws FileWalkError if the file history can't be walked using
1763 Throws FileWalkError if the file history can't be walked using
1764 filelogs alone.
1764 filelogs alone.
1765 '''
1765 '''
1766 wanted = set()
1766 wanted = set()
1767 copies = []
1767 copies = []
1768 minrev, maxrev = min(revs), max(revs)
1768 minrev, maxrev = min(revs), max(revs)
1769 def filerevgen(filelog, last):
1769 def filerevgen(filelog, last):
1770 """
1770 """
1771 Only files, no patterns. Check the history of each file.
1771 Only files, no patterns. Check the history of each file.
1772
1772
1773 Examines filelog entries within minrev, maxrev linkrev range
1773 Examines filelog entries within minrev, maxrev linkrev range
1774 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1774 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1775 tuples in backwards order
1775 tuples in backwards order
1776 """
1776 """
1777 cl_count = len(repo)
1777 cl_count = len(repo)
1778 revs = []
1778 revs = []
1779 for j in pycompat.xrange(0, last + 1):
1779 for j in pycompat.xrange(0, last + 1):
1780 linkrev = filelog.linkrev(j)
1780 linkrev = filelog.linkrev(j)
1781 if linkrev < minrev:
1781 if linkrev < minrev:
1782 continue
1782 continue
1783 # only yield rev for which we have the changelog, it can
1783 # only yield rev for which we have the changelog, it can
1784 # happen while doing "hg log" during a pull or commit
1784 # happen while doing "hg log" during a pull or commit
1785 if linkrev >= cl_count:
1785 if linkrev >= cl_count:
1786 break
1786 break
1787
1787
1788 parentlinkrevs = []
1788 parentlinkrevs = []
1789 for p in filelog.parentrevs(j):
1789 for p in filelog.parentrevs(j):
1790 if p != nullrev:
1790 if p != nullrev:
1791 parentlinkrevs.append(filelog.linkrev(p))
1791 parentlinkrevs.append(filelog.linkrev(p))
1792 n = filelog.node(j)
1792 n = filelog.node(j)
1793 revs.append((linkrev, parentlinkrevs,
1793 revs.append((linkrev, parentlinkrevs,
1794 follow and filelog.renamed(n)))
1794 follow and filelog.renamed(n)))
1795
1795
1796 return reversed(revs)
1796 return reversed(revs)
1797 def iterfiles():
1797 def iterfiles():
1798 pctx = repo['.']
1798 pctx = repo['.']
1799 for filename in match.files():
1799 for filename in match.files():
1800 if follow:
1800 if follow:
1801 if filename not in pctx:
1801 if filename not in pctx:
1802 raise error.Abort(_('cannot follow file not in parent '
1802 raise error.Abort(_('cannot follow file not in parent '
1803 'revision: "%s"') % filename)
1803 'revision: "%s"') % filename)
1804 yield filename, pctx[filename].filenode()
1804 yield filename, pctx[filename].filenode()
1805 else:
1805 else:
1806 yield filename, None
1806 yield filename, None
1807 for filename_node in copies:
1807 for filename_node in copies:
1808 yield filename_node
1808 yield filename_node
1809
1809
1810 for file_, node in iterfiles():
1810 for file_, node in iterfiles():
1811 filelog = repo.file(file_)
1811 filelog = repo.file(file_)
1812 if not len(filelog):
1812 if not len(filelog):
1813 if node is None:
1813 if node is None:
1814 # A zero count may be a directory or deleted file, so
1814 # A zero count may be a directory or deleted file, so
1815 # try to find matching entries on the slow path.
1815 # try to find matching entries on the slow path.
1816 if follow:
1816 if follow:
1817 raise error.Abort(
1817 raise error.Abort(
1818 _('cannot follow nonexistent file: "%s"') % file_)
1818 _('cannot follow nonexistent file: "%s"') % file_)
1819 raise FileWalkError("Cannot walk via filelog")
1819 raise FileWalkError("Cannot walk via filelog")
1820 else:
1820 else:
1821 continue
1821 continue
1822
1822
1823 if node is None:
1823 if node is None:
1824 last = len(filelog) - 1
1824 last = len(filelog) - 1
1825 else:
1825 else:
1826 last = filelog.rev(node)
1826 last = filelog.rev(node)
1827
1827
1828 # keep track of all ancestors of the file
1828 # keep track of all ancestors of the file
1829 ancestors = {filelog.linkrev(last)}
1829 ancestors = {filelog.linkrev(last)}
1830
1830
1831 # iterate from latest to oldest revision
1831 # iterate from latest to oldest revision
1832 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1832 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1833 if not follow:
1833 if not follow:
1834 if rev > maxrev:
1834 if rev > maxrev:
1835 continue
1835 continue
1836 else:
1836 else:
1837 # Note that last might not be the first interesting
1837 # Note that last might not be the first interesting
1838 # rev to us:
1838 # rev to us:
1839 # if the file has been changed after maxrev, we'll
1839 # if the file has been changed after maxrev, we'll
1840 # have linkrev(last) > maxrev, and we still need
1840 # have linkrev(last) > maxrev, and we still need
1841 # to explore the file graph
1841 # to explore the file graph
1842 if rev not in ancestors:
1842 if rev not in ancestors:
1843 continue
1843 continue
1844 # XXX insert 1327 fix here
1844 # XXX insert 1327 fix here
1845 if flparentlinkrevs:
1845 if flparentlinkrevs:
1846 ancestors.update(flparentlinkrevs)
1846 ancestors.update(flparentlinkrevs)
1847
1847
1848 fncache.setdefault(rev, []).append(file_)
1848 fncache.setdefault(rev, []).append(file_)
1849 wanted.add(rev)
1849 wanted.add(rev)
1850 if copied:
1850 if copied:
1851 copies.append(copied)
1851 copies.append(copied)
1852
1852
1853 return wanted
1853 return wanted
1854
1854
1855 class _followfilter(object):
1855 class _followfilter(object):
1856 def __init__(self, repo, onlyfirst=False):
1856 def __init__(self, repo, onlyfirst=False):
1857 self.repo = repo
1857 self.repo = repo
1858 self.startrev = nullrev
1858 self.startrev = nullrev
1859 self.roots = set()
1859 self.roots = set()
1860 self.onlyfirst = onlyfirst
1860 self.onlyfirst = onlyfirst
1861
1861
1862 def match(self, rev):
1862 def match(self, rev):
1863 def realparents(rev):
1863 def realparents(rev):
1864 if self.onlyfirst:
1864 if self.onlyfirst:
1865 return self.repo.changelog.parentrevs(rev)[0:1]
1865 return self.repo.changelog.parentrevs(rev)[0:1]
1866 else:
1866 else:
1867 return filter(lambda x: x != nullrev,
1867 return filter(lambda x: x != nullrev,
1868 self.repo.changelog.parentrevs(rev))
1868 self.repo.changelog.parentrevs(rev))
1869
1869
1870 if self.startrev == nullrev:
1870 if self.startrev == nullrev:
1871 self.startrev = rev
1871 self.startrev = rev
1872 return True
1872 return True
1873
1873
1874 if rev > self.startrev:
1874 if rev > self.startrev:
1875 # forward: all descendants
1875 # forward: all descendants
1876 if not self.roots:
1876 if not self.roots:
1877 self.roots.add(self.startrev)
1877 self.roots.add(self.startrev)
1878 for parent in realparents(rev):
1878 for parent in realparents(rev):
1879 if parent in self.roots:
1879 if parent in self.roots:
1880 self.roots.add(rev)
1880 self.roots.add(rev)
1881 return True
1881 return True
1882 else:
1882 else:
1883 # backwards: all parents
1883 # backwards: all parents
1884 if not self.roots:
1884 if not self.roots:
1885 self.roots.update(realparents(self.startrev))
1885 self.roots.update(realparents(self.startrev))
1886 if rev in self.roots:
1886 if rev in self.roots:
1887 self.roots.remove(rev)
1887 self.roots.remove(rev)
1888 self.roots.update(realparents(rev))
1888 self.roots.update(realparents(rev))
1889 return True
1889 return True
1890
1890
1891 return False
1891 return False
1892
1892
1893 def walkchangerevs(repo, match, opts, prepare):
1893 def walkchangerevs(repo, match, opts, prepare):
1894 '''Iterate over files and the revs in which they changed.
1894 '''Iterate over files and the revs in which they changed.
1895
1895
1896 Callers most commonly need to iterate backwards over the history
1896 Callers most commonly need to iterate backwards over the history
1897 in which they are interested. Doing so has awful (quadratic-looking)
1897 in which they are interested. Doing so has awful (quadratic-looking)
1898 performance, so we use iterators in a "windowed" way.
1898 performance, so we use iterators in a "windowed" way.
1899
1899
1900 We walk a window of revisions in the desired order. Within the
1900 We walk a window of revisions in the desired order. Within the
1901 window, we first walk forwards to gather data, then in the desired
1901 window, we first walk forwards to gather data, then in the desired
1902 order (usually backwards) to display it.
1902 order (usually backwards) to display it.
1903
1903
1904 This function returns an iterator yielding contexts. Before
1904 This function returns an iterator yielding contexts. Before
1905 yielding each context, the iterator will first call the prepare
1905 yielding each context, the iterator will first call the prepare
1906 function on each context in the window in forward order.'''
1906 function on each context in the window in forward order.'''
1907
1907
1908 allfiles = opts.get('all_files')
1908 allfiles = opts.get('all_files')
1909 follow = opts.get('follow') or opts.get('follow_first')
1909 follow = opts.get('follow') or opts.get('follow_first')
1910 revs = _walkrevs(repo, opts)
1910 revs = _walkrevs(repo, opts)
1911 if not revs:
1911 if not revs:
1912 return []
1912 return []
1913 wanted = set()
1913 wanted = set()
1914 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1914 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1915 fncache = {}
1915 fncache = {}
1916 change = repo.__getitem__
1916 change = repo.__getitem__
1917
1917
1918 # First step is to fill wanted, the set of revisions that we want to yield.
1918 # First step is to fill wanted, the set of revisions that we want to yield.
1919 # When it does not induce extra cost, we also fill fncache for revisions in
1919 # When it does not induce extra cost, we also fill fncache for revisions in
1920 # wanted: a cache of filenames that were changed (ctx.files()) and that
1920 # wanted: a cache of filenames that were changed (ctx.files()) and that
1921 # match the file filtering conditions.
1921 # match the file filtering conditions.
1922
1922
1923 if match.always() or allfiles:
1923 if match.always() or allfiles:
1924 # No files, no patterns. Display all revs.
1924 # No files, no patterns. Display all revs.
1925 wanted = revs
1925 wanted = revs
1926 elif not slowpath:
1926 elif not slowpath:
1927 # We only have to read through the filelog to find wanted revisions
1927 # We only have to read through the filelog to find wanted revisions
1928
1928
1929 try:
1929 try:
1930 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1930 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1931 except FileWalkError:
1931 except FileWalkError:
1932 slowpath = True
1932 slowpath = True
1933
1933
1934 # We decided to fall back to the slowpath because at least one
1934 # We decided to fall back to the slowpath because at least one
1935 # of the paths was not a file. Check to see if at least one of them
1935 # of the paths was not a file. Check to see if at least one of them
1936 # existed in history, otherwise simply return
1936 # existed in history, otherwise simply return
1937 for path in match.files():
1937 for path in match.files():
1938 if path == '.' or path in repo.store:
1938 if path == '.' or path in repo.store:
1939 break
1939 break
1940 else:
1940 else:
1941 return []
1941 return []
1942
1942
1943 if slowpath:
1943 if slowpath:
1944 # We have to read the changelog to match filenames against
1944 # We have to read the changelog to match filenames against
1945 # changed files
1945 # changed files
1946
1946
1947 if follow:
1947 if follow:
1948 raise error.Abort(_('can only follow copies/renames for explicit '
1948 raise error.Abort(_('can only follow copies/renames for explicit '
1949 'filenames'))
1949 'filenames'))
1950
1950
1951 # The slow path checks files modified in every changeset.
1951 # The slow path checks files modified in every changeset.
1952 # This is really slow on large repos, so compute the set lazily.
1952 # This is really slow on large repos, so compute the set lazily.
1953 class lazywantedset(object):
1953 class lazywantedset(object):
1954 def __init__(self):
1954 def __init__(self):
1955 self.set = set()
1955 self.set = set()
1956 self.revs = set(revs)
1956 self.revs = set(revs)
1957
1957
1958 # No need to worry about locality here because it will be accessed
1958 # No need to worry about locality here because it will be accessed
1959 # in the same order as the increasing window below.
1959 # in the same order as the increasing window below.
1960 def __contains__(self, value):
1960 def __contains__(self, value):
1961 if value in self.set:
1961 if value in self.set:
1962 return True
1962 return True
1963 elif not value in self.revs:
1963 elif not value in self.revs:
1964 return False
1964 return False
1965 else:
1965 else:
1966 self.revs.discard(value)
1966 self.revs.discard(value)
1967 ctx = change(value)
1967 ctx = change(value)
1968 matches = [f for f in ctx.files() if match(f)]
1968 matches = [f for f in ctx.files() if match(f)]
1969 if matches:
1969 if matches:
1970 fncache[value] = matches
1970 fncache[value] = matches
1971 self.set.add(value)
1971 self.set.add(value)
1972 return True
1972 return True
1973 return False
1973 return False
1974
1974
1975 def discard(self, value):
1975 def discard(self, value):
1976 self.revs.discard(value)
1976 self.revs.discard(value)
1977 self.set.discard(value)
1977 self.set.discard(value)
1978
1978
1979 wanted = lazywantedset()
1979 wanted = lazywantedset()
1980
1980
1981 # it might be worthwhile to do this in the iterator if the rev range
1981 # it might be worthwhile to do this in the iterator if the rev range
1982 # is descending and the prune args are all within that range
1982 # is descending and the prune args are all within that range
1983 for rev in opts.get('prune', ()):
1983 for rev in opts.get('prune', ()):
1984 rev = repo[rev].rev()
1984 rev = repo[rev].rev()
1985 ff = _followfilter(repo)
1985 ff = _followfilter(repo)
1986 stop = min(revs[0], revs[-1])
1986 stop = min(revs[0], revs[-1])
1987 for x in pycompat.xrange(rev, stop - 1, -1):
1987 for x in pycompat.xrange(rev, stop - 1, -1):
1988 if ff.match(x):
1988 if ff.match(x):
1989 wanted = wanted - [x]
1989 wanted = wanted - [x]
1990
1990
1991 # Now that wanted is correctly initialized, we can iterate over the
1991 # Now that wanted is correctly initialized, we can iterate over the
1992 # revision range, yielding only revisions in wanted.
1992 # revision range, yielding only revisions in wanted.
1993 def iterate():
1993 def iterate():
1994 if follow and match.always():
1994 if follow and match.always():
1995 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1995 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1996 def want(rev):
1996 def want(rev):
1997 return ff.match(rev) and rev in wanted
1997 return ff.match(rev) and rev in wanted
1998 else:
1998 else:
1999 def want(rev):
1999 def want(rev):
2000 return rev in wanted
2000 return rev in wanted
2001
2001
2002 it = iter(revs)
2002 it = iter(revs)
2003 stopiteration = False
2003 stopiteration = False
2004 for windowsize in increasingwindows():
2004 for windowsize in increasingwindows():
2005 nrevs = []
2005 nrevs = []
2006 for i in pycompat.xrange(windowsize):
2006 for i in pycompat.xrange(windowsize):
2007 rev = next(it, None)
2007 rev = next(it, None)
2008 if rev is None:
2008 if rev is None:
2009 stopiteration = True
2009 stopiteration = True
2010 break
2010 break
2011 elif want(rev):
2011 elif want(rev):
2012 nrevs.append(rev)
2012 nrevs.append(rev)
2013 for rev in sorted(nrevs):
2013 for rev in sorted(nrevs):
2014 fns = fncache.get(rev)
2014 fns = fncache.get(rev)
2015 ctx = change(rev)
2015 ctx = change(rev)
2016 if not fns:
2016 if not fns:
2017 def fns_generator():
2017 def fns_generator():
2018 if allfiles:
2018 if allfiles:
2019 fiter = iter(ctx)
2019 fiter = iter(ctx)
2020 else:
2020 else:
2021 fiter = ctx.files()
2021 fiter = ctx.files()
2022 for f in fiter:
2022 for f in fiter:
2023 if match(f):
2023 if match(f):
2024 yield f
2024 yield f
2025 fns = fns_generator()
2025 fns = fns_generator()
2026 prepare(ctx, fns)
2026 prepare(ctx, fns)
2027 for rev in nrevs:
2027 for rev in nrevs:
2028 yield change(rev)
2028 yield change(rev)
2029
2029
2030 if stopiteration:
2030 if stopiteration:
2031 break
2031 break
2032
2032
2033 return iterate()
2033 return iterate()
2034
2034
2035 def add(ui, repo, match, prefix, explicitonly, **opts):
2035 def add(ui, repo, match, prefix, explicitonly, **opts):
2036 join = lambda f: os.path.join(prefix, f)
2036 join = lambda f: os.path.join(prefix, f)
2037 bad = []
2037 bad = []
2038
2038
2039 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2039 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2040 names = []
2040 names = []
2041 wctx = repo[None]
2041 wctx = repo[None]
2042 cca = None
2042 cca = None
2043 abort, warn = scmutil.checkportabilityalert(ui)
2043 abort, warn = scmutil.checkportabilityalert(ui)
2044 if abort or warn:
2044 if abort or warn:
2045 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2045 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2046
2046
2047 match = repo.narrowmatch(match, includeexact=True)
2047 badmatch = matchmod.badmatch(match, badfn)
2048 badmatch = matchmod.badmatch(match, badfn)
2048 dirstate = repo.dirstate
2049 dirstate = repo.dirstate
2049 # We don't want to just call wctx.walk here, since it would return a lot of
2050 # We don't want to just call wctx.walk here, since it would return a lot of
2050 # clean files, which we aren't interested in and takes time.
2051 # clean files, which we aren't interested in and takes time.
2051 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2052 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2052 unknown=True, ignored=False, full=False)):
2053 unknown=True, ignored=False, full=False)):
2053 exact = match.exact(f)
2054 exact = match.exact(f)
2054 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2055 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2055 if cca:
2056 if cca:
2056 cca(f)
2057 cca(f)
2057 names.append(f)
2058 names.append(f)
2058 if ui.verbose or not exact:
2059 if ui.verbose or not exact:
2059 ui.status(_('adding %s\n') % match.rel(f),
2060 ui.status(_('adding %s\n') % match.rel(f),
2060 label='addremove.added')
2061 label='addremove.added')
2061
2062
2062 for subpath in sorted(wctx.substate):
2063 for subpath in sorted(wctx.substate):
2063 sub = wctx.sub(subpath)
2064 sub = wctx.sub(subpath)
2064 try:
2065 try:
2065 submatch = matchmod.subdirmatcher(subpath, match)
2066 submatch = matchmod.subdirmatcher(subpath, match)
2066 if opts.get(r'subrepos'):
2067 if opts.get(r'subrepos'):
2067 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2068 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2068 else:
2069 else:
2069 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2070 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2070 except error.LookupError:
2071 except error.LookupError:
2071 ui.status(_("skipping missing subrepository: %s\n")
2072 ui.status(_("skipping missing subrepository: %s\n")
2072 % join(subpath))
2073 % join(subpath))
2073
2074
2074 if not opts.get(r'dry_run'):
2075 if not opts.get(r'dry_run'):
2075 rejected = wctx.add(names, prefix)
2076 rejected = wctx.add(names, prefix)
2076 bad.extend(f for f in rejected if f in match.files())
2077 bad.extend(f for f in rejected if f in match.files())
2077 return bad
2078 return bad
2078
2079
2079 def addwebdirpath(repo, serverpath, webconf):
2080 def addwebdirpath(repo, serverpath, webconf):
2080 webconf[serverpath] = repo.root
2081 webconf[serverpath] = repo.root
2081 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2082 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2082
2083
2083 for r in repo.revs('filelog("path:.hgsub")'):
2084 for r in repo.revs('filelog("path:.hgsub")'):
2084 ctx = repo[r]
2085 ctx = repo[r]
2085 for subpath in ctx.substate:
2086 for subpath in ctx.substate:
2086 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2087 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2087
2088
2088 def forget(ui, repo, match, prefix, explicitonly, dryrun, interactive):
2089 def forget(ui, repo, match, prefix, explicitonly, dryrun, interactive):
2089 if dryrun and interactive:
2090 if dryrun and interactive:
2090 raise error.Abort(_("cannot specify both --dry-run and --interactive"))
2091 raise error.Abort(_("cannot specify both --dry-run and --interactive"))
2091 join = lambda f: os.path.join(prefix, f)
2092 join = lambda f: os.path.join(prefix, f)
2092 bad = []
2093 bad = []
2093 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2094 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2094 wctx = repo[None]
2095 wctx = repo[None]
2095 forgot = []
2096 forgot = []
2096
2097
2097 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2098 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2098 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2099 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2099 if explicitonly:
2100 if explicitonly:
2100 forget = [f for f in forget if match.exact(f)]
2101 forget = [f for f in forget if match.exact(f)]
2101
2102
2102 for subpath in sorted(wctx.substate):
2103 for subpath in sorted(wctx.substate):
2103 sub = wctx.sub(subpath)
2104 sub = wctx.sub(subpath)
2104 try:
2105 try:
2105 submatch = matchmod.subdirmatcher(subpath, match)
2106 submatch = matchmod.subdirmatcher(subpath, match)
2106 subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun,
2107 subbad, subforgot = sub.forget(submatch, prefix, dryrun=dryrun,
2107 interactive=interactive)
2108 interactive=interactive)
2108 bad.extend([subpath + '/' + f for f in subbad])
2109 bad.extend([subpath + '/' + f for f in subbad])
2109 forgot.extend([subpath + '/' + f for f in subforgot])
2110 forgot.extend([subpath + '/' + f for f in subforgot])
2110 except error.LookupError:
2111 except error.LookupError:
2111 ui.status(_("skipping missing subrepository: %s\n")
2112 ui.status(_("skipping missing subrepository: %s\n")
2112 % join(subpath))
2113 % join(subpath))
2113
2114
2114 if not explicitonly:
2115 if not explicitonly:
2115 for f in match.files():
2116 for f in match.files():
2116 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2117 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2117 if f not in forgot:
2118 if f not in forgot:
2118 if repo.wvfs.exists(f):
2119 if repo.wvfs.exists(f):
2119 # Don't complain if the exact case match wasn't given.
2120 # Don't complain if the exact case match wasn't given.
2120 # But don't do this until after checking 'forgot', so
2121 # But don't do this until after checking 'forgot', so
2121 # that subrepo files aren't normalized, and this op is
2122 # that subrepo files aren't normalized, and this op is
2122 # purely from data cached by the status walk above.
2123 # purely from data cached by the status walk above.
2123 if repo.dirstate.normalize(f) in repo.dirstate:
2124 if repo.dirstate.normalize(f) in repo.dirstate:
2124 continue
2125 continue
2125 ui.warn(_('not removing %s: '
2126 ui.warn(_('not removing %s: '
2126 'file is already untracked\n')
2127 'file is already untracked\n')
2127 % match.rel(f))
2128 % match.rel(f))
2128 bad.append(f)
2129 bad.append(f)
2129
2130
2130 if interactive:
2131 if interactive:
2131 responses = _('[Ynsa?]'
2132 responses = _('[Ynsa?]'
2132 '$$ &Yes, forget this file'
2133 '$$ &Yes, forget this file'
2133 '$$ &No, skip this file'
2134 '$$ &No, skip this file'
2134 '$$ &Skip remaining files'
2135 '$$ &Skip remaining files'
2135 '$$ Include &all remaining files'
2136 '$$ Include &all remaining files'
2136 '$$ &? (display help)')
2137 '$$ &? (display help)')
2137 for filename in forget[:]:
2138 for filename in forget[:]:
2138 r = ui.promptchoice(_('forget %s %s') % (filename, responses))
2139 r = ui.promptchoice(_('forget %s %s') % (filename, responses))
2139 if r == 4: # ?
2140 if r == 4: # ?
2140 while r == 4:
2141 while r == 4:
2141 for c, t in ui.extractchoices(responses)[1]:
2142 for c, t in ui.extractchoices(responses)[1]:
2142 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2143 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2143 r = ui.promptchoice(_('forget %s %s') % (filename,
2144 r = ui.promptchoice(_('forget %s %s') % (filename,
2144 responses))
2145 responses))
2145 if r == 0: # yes
2146 if r == 0: # yes
2146 continue
2147 continue
2147 elif r == 1: # no
2148 elif r == 1: # no
2148 forget.remove(filename)
2149 forget.remove(filename)
2149 elif r == 2: # Skip
2150 elif r == 2: # Skip
2150 fnindex = forget.index(filename)
2151 fnindex = forget.index(filename)
2151 del forget[fnindex:]
2152 del forget[fnindex:]
2152 break
2153 break
2153 elif r == 3: # All
2154 elif r == 3: # All
2154 break
2155 break
2155
2156
2156 for f in forget:
2157 for f in forget:
2157 if ui.verbose or not match.exact(f) or interactive:
2158 if ui.verbose or not match.exact(f) or interactive:
2158 ui.status(_('removing %s\n') % match.rel(f),
2159 ui.status(_('removing %s\n') % match.rel(f),
2159 label='addremove.removed')
2160 label='addremove.removed')
2160
2161
2161 if not dryrun:
2162 if not dryrun:
2162 rejected = wctx.forget(forget, prefix)
2163 rejected = wctx.forget(forget, prefix)
2163 bad.extend(f for f in rejected if f in match.files())
2164 bad.extend(f for f in rejected if f in match.files())
2164 forgot.extend(f for f in forget if f not in rejected)
2165 forgot.extend(f for f in forget if f not in rejected)
2165 return bad, forgot
2166 return bad, forgot
2166
2167
2167 def files(ui, ctx, m, fm, fmt, subrepos):
2168 def files(ui, ctx, m, fm, fmt, subrepos):
2168 ret = 1
2169 ret = 1
2169
2170
2170 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
2171 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
2171 for f in ctx.matches(m):
2172 for f in ctx.matches(m):
2172 fm.startitem()
2173 fm.startitem()
2173 fm.context(ctx=ctx)
2174 fm.context(ctx=ctx)
2174 if needsfctx:
2175 if needsfctx:
2175 fc = ctx[f]
2176 fc = ctx[f]
2176 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2177 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2177 fm.data(path=f)
2178 fm.data(path=f)
2178 fm.plain(fmt % m.rel(f))
2179 fm.plain(fmt % m.rel(f))
2179 ret = 0
2180 ret = 0
2180
2181
2181 for subpath in sorted(ctx.substate):
2182 for subpath in sorted(ctx.substate):
2182 submatch = matchmod.subdirmatcher(subpath, m)
2183 submatch = matchmod.subdirmatcher(subpath, m)
2183 if (subrepos or m.exact(subpath) or any(submatch.files())):
2184 if (subrepos or m.exact(subpath) or any(submatch.files())):
2184 sub = ctx.sub(subpath)
2185 sub = ctx.sub(subpath)
2185 try:
2186 try:
2186 recurse = m.exact(subpath) or subrepos
2187 recurse = m.exact(subpath) or subrepos
2187 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2188 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2188 ret = 0
2189 ret = 0
2189 except error.LookupError:
2190 except error.LookupError:
2190 ui.status(_("skipping missing subrepository: %s\n")
2191 ui.status(_("skipping missing subrepository: %s\n")
2191 % m.abs(subpath))
2192 % m.abs(subpath))
2192
2193
2193 return ret
2194 return ret
2194
2195
2195 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2196 def remove(ui, repo, m, prefix, after, force, subrepos, dryrun, warnings=None):
2196 join = lambda f: os.path.join(prefix, f)
2197 join = lambda f: os.path.join(prefix, f)
2197 ret = 0
2198 ret = 0
2198 s = repo.status(match=m, clean=True)
2199 s = repo.status(match=m, clean=True)
2199 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2200 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2200
2201
2201 wctx = repo[None]
2202 wctx = repo[None]
2202
2203
2203 if warnings is None:
2204 if warnings is None:
2204 warnings = []
2205 warnings = []
2205 warn = True
2206 warn = True
2206 else:
2207 else:
2207 warn = False
2208 warn = False
2208
2209
2209 subs = sorted(wctx.substate)
2210 subs = sorted(wctx.substate)
2210 progress = ui.makeprogress(_('searching'), total=len(subs),
2211 progress = ui.makeprogress(_('searching'), total=len(subs),
2211 unit=_('subrepos'))
2212 unit=_('subrepos'))
2212 for subpath in subs:
2213 for subpath in subs:
2213 submatch = matchmod.subdirmatcher(subpath, m)
2214 submatch = matchmod.subdirmatcher(subpath, m)
2214 if subrepos or m.exact(subpath) or any(submatch.files()):
2215 if subrepos or m.exact(subpath) or any(submatch.files()):
2215 progress.increment()
2216 progress.increment()
2216 sub = wctx.sub(subpath)
2217 sub = wctx.sub(subpath)
2217 try:
2218 try:
2218 if sub.removefiles(submatch, prefix, after, force, subrepos,
2219 if sub.removefiles(submatch, prefix, after, force, subrepos,
2219 dryrun, warnings):
2220 dryrun, warnings):
2220 ret = 1
2221 ret = 1
2221 except error.LookupError:
2222 except error.LookupError:
2222 warnings.append(_("skipping missing subrepository: %s\n")
2223 warnings.append(_("skipping missing subrepository: %s\n")
2223 % join(subpath))
2224 % join(subpath))
2224 progress.complete()
2225 progress.complete()
2225
2226
2226 # warn about failure to delete explicit files/dirs
2227 # warn about failure to delete explicit files/dirs
2227 deleteddirs = util.dirs(deleted)
2228 deleteddirs = util.dirs(deleted)
2228 files = m.files()
2229 files = m.files()
2229 progress = ui.makeprogress(_('deleting'), total=len(files),
2230 progress = ui.makeprogress(_('deleting'), total=len(files),
2230 unit=_('files'))
2231 unit=_('files'))
2231 for f in files:
2232 for f in files:
2232 def insubrepo():
2233 def insubrepo():
2233 for subpath in wctx.substate:
2234 for subpath in wctx.substate:
2234 if f.startswith(subpath + '/'):
2235 if f.startswith(subpath + '/'):
2235 return True
2236 return True
2236 return False
2237 return False
2237
2238
2238 progress.increment()
2239 progress.increment()
2239 isdir = f in deleteddirs or wctx.hasdir(f)
2240 isdir = f in deleteddirs or wctx.hasdir(f)
2240 if (f in repo.dirstate or isdir or f == '.'
2241 if (f in repo.dirstate or isdir or f == '.'
2241 or insubrepo() or f in subs):
2242 or insubrepo() or f in subs):
2242 continue
2243 continue
2243
2244
2244 if repo.wvfs.exists(f):
2245 if repo.wvfs.exists(f):
2245 if repo.wvfs.isdir(f):
2246 if repo.wvfs.isdir(f):
2246 warnings.append(_('not removing %s: no tracked files\n')
2247 warnings.append(_('not removing %s: no tracked files\n')
2247 % m.rel(f))
2248 % m.rel(f))
2248 else:
2249 else:
2249 warnings.append(_('not removing %s: file is untracked\n')
2250 warnings.append(_('not removing %s: file is untracked\n')
2250 % m.rel(f))
2251 % m.rel(f))
2251 # missing files will generate a warning elsewhere
2252 # missing files will generate a warning elsewhere
2252 ret = 1
2253 ret = 1
2253 progress.complete()
2254 progress.complete()
2254
2255
2255 if force:
2256 if force:
2256 list = modified + deleted + clean + added
2257 list = modified + deleted + clean + added
2257 elif after:
2258 elif after:
2258 list = deleted
2259 list = deleted
2259 remaining = modified + added + clean
2260 remaining = modified + added + clean
2260 progress = ui.makeprogress(_('skipping'), total=len(remaining),
2261 progress = ui.makeprogress(_('skipping'), total=len(remaining),
2261 unit=_('files'))
2262 unit=_('files'))
2262 for f in remaining:
2263 for f in remaining:
2263 progress.increment()
2264 progress.increment()
2264 if ui.verbose or (f in files):
2265 if ui.verbose or (f in files):
2265 warnings.append(_('not removing %s: file still exists\n')
2266 warnings.append(_('not removing %s: file still exists\n')
2266 % m.rel(f))
2267 % m.rel(f))
2267 ret = 1
2268 ret = 1
2268 progress.complete()
2269 progress.complete()
2269 else:
2270 else:
2270 list = deleted + clean
2271 list = deleted + clean
2271 progress = ui.makeprogress(_('skipping'),
2272 progress = ui.makeprogress(_('skipping'),
2272 total=(len(modified) + len(added)),
2273 total=(len(modified) + len(added)),
2273 unit=_('files'))
2274 unit=_('files'))
2274 for f in modified:
2275 for f in modified:
2275 progress.increment()
2276 progress.increment()
2276 warnings.append(_('not removing %s: file is modified (use -f'
2277 warnings.append(_('not removing %s: file is modified (use -f'
2277 ' to force removal)\n') % m.rel(f))
2278 ' to force removal)\n') % m.rel(f))
2278 ret = 1
2279 ret = 1
2279 for f in added:
2280 for f in added:
2280 progress.increment()
2281 progress.increment()
2281 warnings.append(_("not removing %s: file has been marked for add"
2282 warnings.append(_("not removing %s: file has been marked for add"
2282 " (use 'hg forget' to undo add)\n") % m.rel(f))
2283 " (use 'hg forget' to undo add)\n") % m.rel(f))
2283 ret = 1
2284 ret = 1
2284 progress.complete()
2285 progress.complete()
2285
2286
2286 list = sorted(list)
2287 list = sorted(list)
2287 progress = ui.makeprogress(_('deleting'), total=len(list),
2288 progress = ui.makeprogress(_('deleting'), total=len(list),
2288 unit=_('files'))
2289 unit=_('files'))
2289 for f in list:
2290 for f in list:
2290 if ui.verbose or not m.exact(f):
2291 if ui.verbose or not m.exact(f):
2291 progress.increment()
2292 progress.increment()
2292 ui.status(_('removing %s\n') % m.rel(f),
2293 ui.status(_('removing %s\n') % m.rel(f),
2293 label='addremove.removed')
2294 label='addremove.removed')
2294 progress.complete()
2295 progress.complete()
2295
2296
2296 if not dryrun:
2297 if not dryrun:
2297 with repo.wlock():
2298 with repo.wlock():
2298 if not after:
2299 if not after:
2299 for f in list:
2300 for f in list:
2300 if f in added:
2301 if f in added:
2301 continue # we never unlink added files on remove
2302 continue # we never unlink added files on remove
2302 rmdir = repo.ui.configbool('experimental',
2303 rmdir = repo.ui.configbool('experimental',
2303 'removeemptydirs')
2304 'removeemptydirs')
2304 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2305 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2305 repo[None].forget(list)
2306 repo[None].forget(list)
2306
2307
2307 if warn:
2308 if warn:
2308 for warning in warnings:
2309 for warning in warnings:
2309 ui.warn(warning)
2310 ui.warn(warning)
2310
2311
2311 return ret
2312 return ret
2312
2313
2313 def _updatecatformatter(fm, ctx, matcher, path, decode):
2314 def _updatecatformatter(fm, ctx, matcher, path, decode):
2314 """Hook for adding data to the formatter used by ``hg cat``.
2315 """Hook for adding data to the formatter used by ``hg cat``.
2315
2316
2316 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2317 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2317 this method first."""
2318 this method first."""
2318 data = ctx[path].data()
2319 data = ctx[path].data()
2319 if decode:
2320 if decode:
2320 data = ctx.repo().wwritedata(path, data)
2321 data = ctx.repo().wwritedata(path, data)
2321 fm.startitem()
2322 fm.startitem()
2322 fm.context(ctx=ctx)
2323 fm.context(ctx=ctx)
2323 fm.write('data', '%s', data)
2324 fm.write('data', '%s', data)
2324 fm.data(path=path)
2325 fm.data(path=path)
2325
2326
2326 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2327 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2327 err = 1
2328 err = 1
2328 opts = pycompat.byteskwargs(opts)
2329 opts = pycompat.byteskwargs(opts)
2329
2330
2330 def write(path):
2331 def write(path):
2331 filename = None
2332 filename = None
2332 if fntemplate:
2333 if fntemplate:
2333 filename = makefilename(ctx, fntemplate,
2334 filename = makefilename(ctx, fntemplate,
2334 pathname=os.path.join(prefix, path))
2335 pathname=os.path.join(prefix, path))
2335 # attempt to create the directory if it does not already exist
2336 # attempt to create the directory if it does not already exist
2336 try:
2337 try:
2337 os.makedirs(os.path.dirname(filename))
2338 os.makedirs(os.path.dirname(filename))
2338 except OSError:
2339 except OSError:
2339 pass
2340 pass
2340 with formatter.maybereopen(basefm, filename) as fm:
2341 with formatter.maybereopen(basefm, filename) as fm:
2341 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2342 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2342
2343
2343 # Automation often uses hg cat on single files, so special case it
2344 # Automation often uses hg cat on single files, so special case it
2344 # for performance to avoid the cost of parsing the manifest.
2345 # for performance to avoid the cost of parsing the manifest.
2345 if len(matcher.files()) == 1 and not matcher.anypats():
2346 if len(matcher.files()) == 1 and not matcher.anypats():
2346 file = matcher.files()[0]
2347 file = matcher.files()[0]
2347 mfl = repo.manifestlog
2348 mfl = repo.manifestlog
2348 mfnode = ctx.manifestnode()
2349 mfnode = ctx.manifestnode()
2349 try:
2350 try:
2350 if mfnode and mfl[mfnode].find(file)[0]:
2351 if mfnode and mfl[mfnode].find(file)[0]:
2351 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2352 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2352 write(file)
2353 write(file)
2353 return 0
2354 return 0
2354 except KeyError:
2355 except KeyError:
2355 pass
2356 pass
2356
2357
2357 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2358 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2358
2359
2359 for abs in ctx.walk(matcher):
2360 for abs in ctx.walk(matcher):
2360 write(abs)
2361 write(abs)
2361 err = 0
2362 err = 0
2362
2363
2363 for subpath in sorted(ctx.substate):
2364 for subpath in sorted(ctx.substate):
2364 sub = ctx.sub(subpath)
2365 sub = ctx.sub(subpath)
2365 try:
2366 try:
2366 submatch = matchmod.subdirmatcher(subpath, matcher)
2367 submatch = matchmod.subdirmatcher(subpath, matcher)
2367
2368
2368 if not sub.cat(submatch, basefm, fntemplate,
2369 if not sub.cat(submatch, basefm, fntemplate,
2369 os.path.join(prefix, sub._path),
2370 os.path.join(prefix, sub._path),
2370 **pycompat.strkwargs(opts)):
2371 **pycompat.strkwargs(opts)):
2371 err = 0
2372 err = 0
2372 except error.RepoLookupError:
2373 except error.RepoLookupError:
2373 ui.status(_("skipping missing subrepository: %s\n")
2374 ui.status(_("skipping missing subrepository: %s\n")
2374 % os.path.join(prefix, subpath))
2375 % os.path.join(prefix, subpath))
2375
2376
2376 return err
2377 return err
2377
2378
2378 def commit(ui, repo, commitfunc, pats, opts):
2379 def commit(ui, repo, commitfunc, pats, opts):
2379 '''commit the specified files or all outstanding changes'''
2380 '''commit the specified files or all outstanding changes'''
2380 date = opts.get('date')
2381 date = opts.get('date')
2381 if date:
2382 if date:
2382 opts['date'] = dateutil.parsedate(date)
2383 opts['date'] = dateutil.parsedate(date)
2383 message = logmessage(ui, opts)
2384 message = logmessage(ui, opts)
2384 matcher = scmutil.match(repo[None], pats, opts)
2385 matcher = scmutil.match(repo[None], pats, opts)
2385
2386
2386 dsguard = None
2387 dsguard = None
2387 # extract addremove carefully -- this function can be called from a command
2388 # extract addremove carefully -- this function can be called from a command
2388 # that doesn't support addremove
2389 # that doesn't support addremove
2389 if opts.get('addremove'):
2390 if opts.get('addremove'):
2390 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2391 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2391 with dsguard or util.nullcontextmanager():
2392 with dsguard or util.nullcontextmanager():
2392 if dsguard:
2393 if dsguard:
2393 if scmutil.addremove(repo, matcher, "", opts) != 0:
2394 if scmutil.addremove(repo, matcher, "", opts) != 0:
2394 raise error.Abort(
2395 raise error.Abort(
2395 _("failed to mark all new/missing files as added/removed"))
2396 _("failed to mark all new/missing files as added/removed"))
2396
2397
2397 return commitfunc(ui, repo, message, matcher, opts)
2398 return commitfunc(ui, repo, message, matcher, opts)
2398
2399
2399 def samefile(f, ctx1, ctx2):
2400 def samefile(f, ctx1, ctx2):
2400 if f in ctx1.manifest():
2401 if f in ctx1.manifest():
2401 a = ctx1.filectx(f)
2402 a = ctx1.filectx(f)
2402 if f in ctx2.manifest():
2403 if f in ctx2.manifest():
2403 b = ctx2.filectx(f)
2404 b = ctx2.filectx(f)
2404 return (not a.cmp(b)
2405 return (not a.cmp(b)
2405 and a.flags() == b.flags())
2406 and a.flags() == b.flags())
2406 else:
2407 else:
2407 return False
2408 return False
2408 else:
2409 else:
2409 return f not in ctx2.manifest()
2410 return f not in ctx2.manifest()
2410
2411
2411 def amend(ui, repo, old, extra, pats, opts):
2412 def amend(ui, repo, old, extra, pats, opts):
2412 # avoid cycle context -> subrepo -> cmdutil
2413 # avoid cycle context -> subrepo -> cmdutil
2413 from . import context
2414 from . import context
2414
2415
2415 # amend will reuse the existing user if not specified, but the obsolete
2416 # amend will reuse the existing user if not specified, but the obsolete
2416 # marker creation requires that the current user's name is specified.
2417 # marker creation requires that the current user's name is specified.
2417 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2418 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2418 ui.username() # raise exception if username not set
2419 ui.username() # raise exception if username not set
2419
2420
2420 ui.note(_('amending changeset %s\n') % old)
2421 ui.note(_('amending changeset %s\n') % old)
2421 base = old.p1()
2422 base = old.p1()
2422
2423
2423 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2424 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2424 # Participating changesets:
2425 # Participating changesets:
2425 #
2426 #
2426 # wctx o - workingctx that contains changes from working copy
2427 # wctx o - workingctx that contains changes from working copy
2427 # | to go into amending commit
2428 # | to go into amending commit
2428 # |
2429 # |
2429 # old o - changeset to amend
2430 # old o - changeset to amend
2430 # |
2431 # |
2431 # base o - first parent of the changeset to amend
2432 # base o - first parent of the changeset to amend
2432 wctx = repo[None]
2433 wctx = repo[None]
2433
2434
2434 # Copy to avoid mutating input
2435 # Copy to avoid mutating input
2435 extra = extra.copy()
2436 extra = extra.copy()
2436 # Update extra dict from amended commit (e.g. to preserve graft
2437 # Update extra dict from amended commit (e.g. to preserve graft
2437 # source)
2438 # source)
2438 extra.update(old.extra())
2439 extra.update(old.extra())
2439
2440
2440 # Also update it from the from the wctx
2441 # Also update it from the from the wctx
2441 extra.update(wctx.extra())
2442 extra.update(wctx.extra())
2442
2443
2443 user = opts.get('user') or old.user()
2444 user = opts.get('user') or old.user()
2444 date = opts.get('date') or old.date()
2445 date = opts.get('date') or old.date()
2445
2446
2446 # Parse the date to allow comparison between date and old.date()
2447 # Parse the date to allow comparison between date and old.date()
2447 date = dateutil.parsedate(date)
2448 date = dateutil.parsedate(date)
2448
2449
2449 if len(old.parents()) > 1:
2450 if len(old.parents()) > 1:
2450 # ctx.files() isn't reliable for merges, so fall back to the
2451 # ctx.files() isn't reliable for merges, so fall back to the
2451 # slower repo.status() method
2452 # slower repo.status() method
2452 files = set([fn for st in base.status(old)[:3]
2453 files = set([fn for st in base.status(old)[:3]
2453 for fn in st])
2454 for fn in st])
2454 else:
2455 else:
2455 files = set(old.files())
2456 files = set(old.files())
2456
2457
2457 # add/remove the files to the working copy if the "addremove" option
2458 # add/remove the files to the working copy if the "addremove" option
2458 # was specified.
2459 # was specified.
2459 matcher = scmutil.match(wctx, pats, opts)
2460 matcher = scmutil.match(wctx, pats, opts)
2460 if (opts.get('addremove')
2461 if (opts.get('addremove')
2461 and scmutil.addremove(repo, matcher, "", opts)):
2462 and scmutil.addremove(repo, matcher, "", opts)):
2462 raise error.Abort(
2463 raise error.Abort(
2463 _("failed to mark all new/missing files as added/removed"))
2464 _("failed to mark all new/missing files as added/removed"))
2464
2465
2465 # Check subrepos. This depends on in-place wctx._status update in
2466 # Check subrepos. This depends on in-place wctx._status update in
2466 # subrepo.precommit(). To minimize the risk of this hack, we do
2467 # subrepo.precommit(). To minimize the risk of this hack, we do
2467 # nothing if .hgsub does not exist.
2468 # nothing if .hgsub does not exist.
2468 if '.hgsub' in wctx or '.hgsub' in old:
2469 if '.hgsub' in wctx or '.hgsub' in old:
2469 subs, commitsubs, newsubstate = subrepoutil.precommit(
2470 subs, commitsubs, newsubstate = subrepoutil.precommit(
2470 ui, wctx, wctx._status, matcher)
2471 ui, wctx, wctx._status, matcher)
2471 # amend should abort if commitsubrepos is enabled
2472 # amend should abort if commitsubrepos is enabled
2472 assert not commitsubs
2473 assert not commitsubs
2473 if subs:
2474 if subs:
2474 subrepoutil.writestate(repo, newsubstate)
2475 subrepoutil.writestate(repo, newsubstate)
2475
2476
2476 ms = mergemod.mergestate.read(repo)
2477 ms = mergemod.mergestate.read(repo)
2477 mergeutil.checkunresolved(ms)
2478 mergeutil.checkunresolved(ms)
2478
2479
2479 filestoamend = set(f for f in wctx.files() if matcher(f))
2480 filestoamend = set(f for f in wctx.files() if matcher(f))
2480
2481
2481 changes = (len(filestoamend) > 0)
2482 changes = (len(filestoamend) > 0)
2482 if changes:
2483 if changes:
2483 # Recompute copies (avoid recording a -> b -> a)
2484 # Recompute copies (avoid recording a -> b -> a)
2484 copied = copies.pathcopies(base, wctx, matcher)
2485 copied = copies.pathcopies(base, wctx, matcher)
2485 if old.p2:
2486 if old.p2:
2486 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2487 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2487
2488
2488 # Prune files which were reverted by the updates: if old
2489 # Prune files which were reverted by the updates: if old
2489 # introduced file X and the file was renamed in the working
2490 # introduced file X and the file was renamed in the working
2490 # copy, then those two files are the same and
2491 # copy, then those two files are the same and
2491 # we can discard X from our list of files. Likewise if X
2492 # we can discard X from our list of files. Likewise if X
2492 # was removed, it's no longer relevant. If X is missing (aka
2493 # was removed, it's no longer relevant. If X is missing (aka
2493 # deleted), old X must be preserved.
2494 # deleted), old X must be preserved.
2494 files.update(filestoamend)
2495 files.update(filestoamend)
2495 files = [f for f in files if (not samefile(f, wctx, base)
2496 files = [f for f in files if (not samefile(f, wctx, base)
2496 or f in wctx.deleted())]
2497 or f in wctx.deleted())]
2497
2498
2498 def filectxfn(repo, ctx_, path):
2499 def filectxfn(repo, ctx_, path):
2499 try:
2500 try:
2500 # If the file being considered is not amongst the files
2501 # If the file being considered is not amongst the files
2501 # to be amended, we should return the file context from the
2502 # to be amended, we should return the file context from the
2502 # old changeset. This avoids issues when only some files in
2503 # old changeset. This avoids issues when only some files in
2503 # the working copy are being amended but there are also
2504 # the working copy are being amended but there are also
2504 # changes to other files from the old changeset.
2505 # changes to other files from the old changeset.
2505 if path not in filestoamend:
2506 if path not in filestoamend:
2506 return old.filectx(path)
2507 return old.filectx(path)
2507
2508
2508 # Return None for removed files.
2509 # Return None for removed files.
2509 if path in wctx.removed():
2510 if path in wctx.removed():
2510 return None
2511 return None
2511
2512
2512 fctx = wctx[path]
2513 fctx = wctx[path]
2513 flags = fctx.flags()
2514 flags = fctx.flags()
2514 mctx = context.memfilectx(repo, ctx_,
2515 mctx = context.memfilectx(repo, ctx_,
2515 fctx.path(), fctx.data(),
2516 fctx.path(), fctx.data(),
2516 islink='l' in flags,
2517 islink='l' in flags,
2517 isexec='x' in flags,
2518 isexec='x' in flags,
2518 copied=copied.get(path))
2519 copied=copied.get(path))
2519 return mctx
2520 return mctx
2520 except KeyError:
2521 except KeyError:
2521 return None
2522 return None
2522 else:
2523 else:
2523 ui.note(_('copying changeset %s to %s\n') % (old, base))
2524 ui.note(_('copying changeset %s to %s\n') % (old, base))
2524
2525
2525 # Use version of files as in the old cset
2526 # Use version of files as in the old cset
2526 def filectxfn(repo, ctx_, path):
2527 def filectxfn(repo, ctx_, path):
2527 try:
2528 try:
2528 return old.filectx(path)
2529 return old.filectx(path)
2529 except KeyError:
2530 except KeyError:
2530 return None
2531 return None
2531
2532
2532 # See if we got a message from -m or -l, if not, open the editor with
2533 # See if we got a message from -m or -l, if not, open the editor with
2533 # the message of the changeset to amend.
2534 # the message of the changeset to amend.
2534 message = logmessage(ui, opts)
2535 message = logmessage(ui, opts)
2535
2536
2536 editform = mergeeditform(old, 'commit.amend')
2537 editform = mergeeditform(old, 'commit.amend')
2537 editor = getcommiteditor(editform=editform,
2538 editor = getcommiteditor(editform=editform,
2538 **pycompat.strkwargs(opts))
2539 **pycompat.strkwargs(opts))
2539
2540
2540 if not message:
2541 if not message:
2541 editor = getcommiteditor(edit=True, editform=editform)
2542 editor = getcommiteditor(edit=True, editform=editform)
2542 message = old.description()
2543 message = old.description()
2543
2544
2544 pureextra = extra.copy()
2545 pureextra = extra.copy()
2545 extra['amend_source'] = old.hex()
2546 extra['amend_source'] = old.hex()
2546
2547
2547 new = context.memctx(repo,
2548 new = context.memctx(repo,
2548 parents=[base.node(), old.p2().node()],
2549 parents=[base.node(), old.p2().node()],
2549 text=message,
2550 text=message,
2550 files=files,
2551 files=files,
2551 filectxfn=filectxfn,
2552 filectxfn=filectxfn,
2552 user=user,
2553 user=user,
2553 date=date,
2554 date=date,
2554 extra=extra,
2555 extra=extra,
2555 editor=editor)
2556 editor=editor)
2556
2557
2557 newdesc = changelog.stripdesc(new.description())
2558 newdesc = changelog.stripdesc(new.description())
2558 if ((not changes)
2559 if ((not changes)
2559 and newdesc == old.description()
2560 and newdesc == old.description()
2560 and user == old.user()
2561 and user == old.user()
2561 and date == old.date()
2562 and date == old.date()
2562 and pureextra == old.extra()):
2563 and pureextra == old.extra()):
2563 # nothing changed. continuing here would create a new node
2564 # nothing changed. continuing here would create a new node
2564 # anyway because of the amend_source noise.
2565 # anyway because of the amend_source noise.
2565 #
2566 #
2566 # This not what we expect from amend.
2567 # This not what we expect from amend.
2567 return old.node()
2568 return old.node()
2568
2569
2569 commitphase = None
2570 commitphase = None
2570 if opts.get('secret'):
2571 if opts.get('secret'):
2571 commitphase = phases.secret
2572 commitphase = phases.secret
2572 newid = repo.commitctx(new)
2573 newid = repo.commitctx(new)
2573
2574
2574 # Reroute the working copy parent to the new changeset
2575 # Reroute the working copy parent to the new changeset
2575 repo.setparents(newid, nullid)
2576 repo.setparents(newid, nullid)
2576 mapping = {old.node(): (newid,)}
2577 mapping = {old.node(): (newid,)}
2577 obsmetadata = None
2578 obsmetadata = None
2578 if opts.get('note'):
2579 if opts.get('note'):
2579 obsmetadata = {'note': encoding.fromlocal(opts['note'])}
2580 obsmetadata = {'note': encoding.fromlocal(opts['note'])}
2580 backup = ui.configbool('ui', 'history-editing-backup')
2581 backup = ui.configbool('ui', 'history-editing-backup')
2581 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
2582 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
2582 fixphase=True, targetphase=commitphase,
2583 fixphase=True, targetphase=commitphase,
2583 backup=backup)
2584 backup=backup)
2584
2585
2585 # Fixing the dirstate because localrepo.commitctx does not update
2586 # Fixing the dirstate because localrepo.commitctx does not update
2586 # it. This is rather convenient because we did not need to update
2587 # it. This is rather convenient because we did not need to update
2587 # the dirstate for all the files in the new commit which commitctx
2588 # the dirstate for all the files in the new commit which commitctx
2588 # could have done if it updated the dirstate. Now, we can
2589 # could have done if it updated the dirstate. Now, we can
2589 # selectively update the dirstate only for the amended files.
2590 # selectively update the dirstate only for the amended files.
2590 dirstate = repo.dirstate
2591 dirstate = repo.dirstate
2591
2592
2592 # Update the state of the files which were added and
2593 # Update the state of the files which were added and
2593 # and modified in the amend to "normal" in the dirstate.
2594 # and modified in the amend to "normal" in the dirstate.
2594 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2595 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2595 for f in normalfiles:
2596 for f in normalfiles:
2596 dirstate.normal(f)
2597 dirstate.normal(f)
2597
2598
2598 # Update the state of files which were removed in the amend
2599 # Update the state of files which were removed in the amend
2599 # to "removed" in the dirstate.
2600 # to "removed" in the dirstate.
2600 removedfiles = set(wctx.removed()) & filestoamend
2601 removedfiles = set(wctx.removed()) & filestoamend
2601 for f in removedfiles:
2602 for f in removedfiles:
2602 dirstate.drop(f)
2603 dirstate.drop(f)
2603
2604
2604 return newid
2605 return newid
2605
2606
2606 def commiteditor(repo, ctx, subs, editform=''):
2607 def commiteditor(repo, ctx, subs, editform=''):
2607 if ctx.description():
2608 if ctx.description():
2608 return ctx.description()
2609 return ctx.description()
2609 return commitforceeditor(repo, ctx, subs, editform=editform,
2610 return commitforceeditor(repo, ctx, subs, editform=editform,
2610 unchangedmessagedetection=True)
2611 unchangedmessagedetection=True)
2611
2612
2612 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2613 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2613 editform='', unchangedmessagedetection=False):
2614 editform='', unchangedmessagedetection=False):
2614 if not extramsg:
2615 if not extramsg:
2615 extramsg = _("Leave message empty to abort commit.")
2616 extramsg = _("Leave message empty to abort commit.")
2616
2617
2617 forms = [e for e in editform.split('.') if e]
2618 forms = [e for e in editform.split('.') if e]
2618 forms.insert(0, 'changeset')
2619 forms.insert(0, 'changeset')
2619 templatetext = None
2620 templatetext = None
2620 while forms:
2621 while forms:
2621 ref = '.'.join(forms)
2622 ref = '.'.join(forms)
2622 if repo.ui.config('committemplate', ref):
2623 if repo.ui.config('committemplate', ref):
2623 templatetext = committext = buildcommittemplate(
2624 templatetext = committext = buildcommittemplate(
2624 repo, ctx, subs, extramsg, ref)
2625 repo, ctx, subs, extramsg, ref)
2625 break
2626 break
2626 forms.pop()
2627 forms.pop()
2627 else:
2628 else:
2628 committext = buildcommittext(repo, ctx, subs, extramsg)
2629 committext = buildcommittext(repo, ctx, subs, extramsg)
2629
2630
2630 # run editor in the repository root
2631 # run editor in the repository root
2631 olddir = encoding.getcwd()
2632 olddir = encoding.getcwd()
2632 os.chdir(repo.root)
2633 os.chdir(repo.root)
2633
2634
2634 # make in-memory changes visible to external process
2635 # make in-memory changes visible to external process
2635 tr = repo.currenttransaction()
2636 tr = repo.currenttransaction()
2636 repo.dirstate.write(tr)
2637 repo.dirstate.write(tr)
2637 pending = tr and tr.writepending() and repo.root
2638 pending = tr and tr.writepending() and repo.root
2638
2639
2639 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2640 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2640 editform=editform, pending=pending,
2641 editform=editform, pending=pending,
2641 repopath=repo.path, action='commit')
2642 repopath=repo.path, action='commit')
2642 text = editortext
2643 text = editortext
2643
2644
2644 # strip away anything below this special string (used for editors that want
2645 # strip away anything below this special string (used for editors that want
2645 # to display the diff)
2646 # to display the diff)
2646 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2647 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2647 if stripbelow:
2648 if stripbelow:
2648 text = text[:stripbelow.start()]
2649 text = text[:stripbelow.start()]
2649
2650
2650 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2651 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2651 os.chdir(olddir)
2652 os.chdir(olddir)
2652
2653
2653 if finishdesc:
2654 if finishdesc:
2654 text = finishdesc(text)
2655 text = finishdesc(text)
2655 if not text.strip():
2656 if not text.strip():
2656 raise error.Abort(_("empty commit message"))
2657 raise error.Abort(_("empty commit message"))
2657 if unchangedmessagedetection and editortext == templatetext:
2658 if unchangedmessagedetection and editortext == templatetext:
2658 raise error.Abort(_("commit message unchanged"))
2659 raise error.Abort(_("commit message unchanged"))
2659
2660
2660 return text
2661 return text
2661
2662
2662 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2663 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2663 ui = repo.ui
2664 ui = repo.ui
2664 spec = formatter.templatespec(ref, None, None)
2665 spec = formatter.templatespec(ref, None, None)
2665 t = logcmdutil.changesettemplater(ui, repo, spec)
2666 t = logcmdutil.changesettemplater(ui, repo, spec)
2666 t.t.cache.update((k, templater.unquotestring(v))
2667 t.t.cache.update((k, templater.unquotestring(v))
2667 for k, v in repo.ui.configitems('committemplate'))
2668 for k, v in repo.ui.configitems('committemplate'))
2668
2669
2669 if not extramsg:
2670 if not extramsg:
2670 extramsg = '' # ensure that extramsg is string
2671 extramsg = '' # ensure that extramsg is string
2671
2672
2672 ui.pushbuffer()
2673 ui.pushbuffer()
2673 t.show(ctx, extramsg=extramsg)
2674 t.show(ctx, extramsg=extramsg)
2674 return ui.popbuffer()
2675 return ui.popbuffer()
2675
2676
2676 def hgprefix(msg):
2677 def hgprefix(msg):
2677 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2678 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2678
2679
2679 def buildcommittext(repo, ctx, subs, extramsg):
2680 def buildcommittext(repo, ctx, subs, extramsg):
2680 edittext = []
2681 edittext = []
2681 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2682 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2682 if ctx.description():
2683 if ctx.description():
2683 edittext.append(ctx.description())
2684 edittext.append(ctx.description())
2684 edittext.append("")
2685 edittext.append("")
2685 edittext.append("") # Empty line between message and comments.
2686 edittext.append("") # Empty line between message and comments.
2686 edittext.append(hgprefix(_("Enter commit message."
2687 edittext.append(hgprefix(_("Enter commit message."
2687 " Lines beginning with 'HG:' are removed.")))
2688 " Lines beginning with 'HG:' are removed.")))
2688 edittext.append(hgprefix(extramsg))
2689 edittext.append(hgprefix(extramsg))
2689 edittext.append("HG: --")
2690 edittext.append("HG: --")
2690 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2691 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2691 if ctx.p2():
2692 if ctx.p2():
2692 edittext.append(hgprefix(_("branch merge")))
2693 edittext.append(hgprefix(_("branch merge")))
2693 if ctx.branch():
2694 if ctx.branch():
2694 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2695 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2695 if bookmarks.isactivewdirparent(repo):
2696 if bookmarks.isactivewdirparent(repo):
2696 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2697 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2697 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2698 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2698 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2699 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2699 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2700 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2700 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2701 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2701 if not added and not modified and not removed:
2702 if not added and not modified and not removed:
2702 edittext.append(hgprefix(_("no files changed")))
2703 edittext.append(hgprefix(_("no files changed")))
2703 edittext.append("")
2704 edittext.append("")
2704
2705
2705 return "\n".join(edittext)
2706 return "\n".join(edittext)
2706
2707
2707 def commitstatus(repo, node, branch, bheads=None, opts=None):
2708 def commitstatus(repo, node, branch, bheads=None, opts=None):
2708 if opts is None:
2709 if opts is None:
2709 opts = {}
2710 opts = {}
2710 ctx = repo[node]
2711 ctx = repo[node]
2711 parents = ctx.parents()
2712 parents = ctx.parents()
2712
2713
2713 if (not opts.get('amend') and bheads and node not in bheads and not
2714 if (not opts.get('amend') and bheads and node not in bheads and not
2714 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2715 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2715 repo.ui.status(_('created new head\n'))
2716 repo.ui.status(_('created new head\n'))
2716 # The message is not printed for initial roots. For the other
2717 # The message is not printed for initial roots. For the other
2717 # changesets, it is printed in the following situations:
2718 # changesets, it is printed in the following situations:
2718 #
2719 #
2719 # Par column: for the 2 parents with ...
2720 # Par column: for the 2 parents with ...
2720 # N: null or no parent
2721 # N: null or no parent
2721 # B: parent is on another named branch
2722 # B: parent is on another named branch
2722 # C: parent is a regular non head changeset
2723 # C: parent is a regular non head changeset
2723 # H: parent was a branch head of the current branch
2724 # H: parent was a branch head of the current branch
2724 # Msg column: whether we print "created new head" message
2725 # Msg column: whether we print "created new head" message
2725 # In the following, it is assumed that there already exists some
2726 # In the following, it is assumed that there already exists some
2726 # initial branch heads of the current branch, otherwise nothing is
2727 # initial branch heads of the current branch, otherwise nothing is
2727 # printed anyway.
2728 # printed anyway.
2728 #
2729 #
2729 # Par Msg Comment
2730 # Par Msg Comment
2730 # N N y additional topo root
2731 # N N y additional topo root
2731 #
2732 #
2732 # B N y additional branch root
2733 # B N y additional branch root
2733 # C N y additional topo head
2734 # C N y additional topo head
2734 # H N n usual case
2735 # H N n usual case
2735 #
2736 #
2736 # B B y weird additional branch root
2737 # B B y weird additional branch root
2737 # C B y branch merge
2738 # C B y branch merge
2738 # H B n merge with named branch
2739 # H B n merge with named branch
2739 #
2740 #
2740 # C C y additional head from merge
2741 # C C y additional head from merge
2741 # C H n merge with a head
2742 # C H n merge with a head
2742 #
2743 #
2743 # H H n head merge: head count decreases
2744 # H H n head merge: head count decreases
2744
2745
2745 if not opts.get('close_branch'):
2746 if not opts.get('close_branch'):
2746 for r in parents:
2747 for r in parents:
2747 if r.closesbranch() and r.branch() == branch:
2748 if r.closesbranch() and r.branch() == branch:
2748 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2749 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2749
2750
2750 if repo.ui.debugflag:
2751 if repo.ui.debugflag:
2751 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2752 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2752 elif repo.ui.verbose:
2753 elif repo.ui.verbose:
2753 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2754 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2754
2755
2755 def postcommitstatus(repo, pats, opts):
2756 def postcommitstatus(repo, pats, opts):
2756 return repo.status(match=scmutil.match(repo[None], pats, opts))
2757 return repo.status(match=scmutil.match(repo[None], pats, opts))
2757
2758
2758 def revert(ui, repo, ctx, parents, *pats, **opts):
2759 def revert(ui, repo, ctx, parents, *pats, **opts):
2759 opts = pycompat.byteskwargs(opts)
2760 opts = pycompat.byteskwargs(opts)
2760 parent, p2 = parents
2761 parent, p2 = parents
2761 node = ctx.node()
2762 node = ctx.node()
2762
2763
2763 mf = ctx.manifest()
2764 mf = ctx.manifest()
2764 if node == p2:
2765 if node == p2:
2765 parent = p2
2766 parent = p2
2766
2767
2767 # need all matching names in dirstate and manifest of target rev,
2768 # need all matching names in dirstate and manifest of target rev,
2768 # so have to walk both. do not print errors if files exist in one
2769 # so have to walk both. do not print errors if files exist in one
2769 # but not other. in both cases, filesets should be evaluated against
2770 # but not other. in both cases, filesets should be evaluated against
2770 # workingctx to get consistent result (issue4497). this means 'set:**'
2771 # workingctx to get consistent result (issue4497). this means 'set:**'
2771 # cannot be used to select missing files from target rev.
2772 # cannot be used to select missing files from target rev.
2772
2773
2773 # `names` is a mapping for all elements in working copy and target revision
2774 # `names` is a mapping for all elements in working copy and target revision
2774 # The mapping is in the form:
2775 # The mapping is in the form:
2775 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2776 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2776 names = {}
2777 names = {}
2777
2778
2778 with repo.wlock():
2779 with repo.wlock():
2779 ## filling of the `names` mapping
2780 ## filling of the `names` mapping
2780 # walk dirstate to fill `names`
2781 # walk dirstate to fill `names`
2781
2782
2782 interactive = opts.get('interactive', False)
2783 interactive = opts.get('interactive', False)
2783 wctx = repo[None]
2784 wctx = repo[None]
2784 m = scmutil.match(wctx, pats, opts)
2785 m = scmutil.match(wctx, pats, opts)
2785
2786
2786 # we'll need this later
2787 # we'll need this later
2787 targetsubs = sorted(s for s in wctx.substate if m(s))
2788 targetsubs = sorted(s for s in wctx.substate if m(s))
2788
2789
2789 if not m.always():
2790 if not m.always():
2790 matcher = matchmod.badmatch(m, lambda x, y: False)
2791 matcher = matchmod.badmatch(m, lambda x, y: False)
2791 for abs in wctx.walk(matcher):
2792 for abs in wctx.walk(matcher):
2792 names[abs] = m.rel(abs), m.exact(abs)
2793 names[abs] = m.rel(abs), m.exact(abs)
2793
2794
2794 # walk target manifest to fill `names`
2795 # walk target manifest to fill `names`
2795
2796
2796 def badfn(path, msg):
2797 def badfn(path, msg):
2797 if path in names:
2798 if path in names:
2798 return
2799 return
2799 if path in ctx.substate:
2800 if path in ctx.substate:
2800 return
2801 return
2801 path_ = path + '/'
2802 path_ = path + '/'
2802 for f in names:
2803 for f in names:
2803 if f.startswith(path_):
2804 if f.startswith(path_):
2804 return
2805 return
2805 ui.warn("%s: %s\n" % (m.rel(path), msg))
2806 ui.warn("%s: %s\n" % (m.rel(path), msg))
2806
2807
2807 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2808 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2808 if abs not in names:
2809 if abs not in names:
2809 names[abs] = m.rel(abs), m.exact(abs)
2810 names[abs] = m.rel(abs), m.exact(abs)
2810
2811
2811 # Find status of all file in `names`.
2812 # Find status of all file in `names`.
2812 m = scmutil.matchfiles(repo, names)
2813 m = scmutil.matchfiles(repo, names)
2813
2814
2814 changes = repo.status(node1=node, match=m,
2815 changes = repo.status(node1=node, match=m,
2815 unknown=True, ignored=True, clean=True)
2816 unknown=True, ignored=True, clean=True)
2816 else:
2817 else:
2817 changes = repo.status(node1=node, match=m)
2818 changes = repo.status(node1=node, match=m)
2818 for kind in changes:
2819 for kind in changes:
2819 for abs in kind:
2820 for abs in kind:
2820 names[abs] = m.rel(abs), m.exact(abs)
2821 names[abs] = m.rel(abs), m.exact(abs)
2821
2822
2822 m = scmutil.matchfiles(repo, names)
2823 m = scmutil.matchfiles(repo, names)
2823
2824
2824 modified = set(changes.modified)
2825 modified = set(changes.modified)
2825 added = set(changes.added)
2826 added = set(changes.added)
2826 removed = set(changes.removed)
2827 removed = set(changes.removed)
2827 _deleted = set(changes.deleted)
2828 _deleted = set(changes.deleted)
2828 unknown = set(changes.unknown)
2829 unknown = set(changes.unknown)
2829 unknown.update(changes.ignored)
2830 unknown.update(changes.ignored)
2830 clean = set(changes.clean)
2831 clean = set(changes.clean)
2831 modadded = set()
2832 modadded = set()
2832
2833
2833 # We need to account for the state of the file in the dirstate,
2834 # We need to account for the state of the file in the dirstate,
2834 # even when we revert against something else than parent. This will
2835 # even when we revert against something else than parent. This will
2835 # slightly alter the behavior of revert (doing back up or not, delete
2836 # slightly alter the behavior of revert (doing back up or not, delete
2836 # or just forget etc).
2837 # or just forget etc).
2837 if parent == node:
2838 if parent == node:
2838 dsmodified = modified
2839 dsmodified = modified
2839 dsadded = added
2840 dsadded = added
2840 dsremoved = removed
2841 dsremoved = removed
2841 # store all local modifications, useful later for rename detection
2842 # store all local modifications, useful later for rename detection
2842 localchanges = dsmodified | dsadded
2843 localchanges = dsmodified | dsadded
2843 modified, added, removed = set(), set(), set()
2844 modified, added, removed = set(), set(), set()
2844 else:
2845 else:
2845 changes = repo.status(node1=parent, match=m)
2846 changes = repo.status(node1=parent, match=m)
2846 dsmodified = set(changes.modified)
2847 dsmodified = set(changes.modified)
2847 dsadded = set(changes.added)
2848 dsadded = set(changes.added)
2848 dsremoved = set(changes.removed)
2849 dsremoved = set(changes.removed)
2849 # store all local modifications, useful later for rename detection
2850 # store all local modifications, useful later for rename detection
2850 localchanges = dsmodified | dsadded
2851 localchanges = dsmodified | dsadded
2851
2852
2852 # only take into account for removes between wc and target
2853 # only take into account for removes between wc and target
2853 clean |= dsremoved - removed
2854 clean |= dsremoved - removed
2854 dsremoved &= removed
2855 dsremoved &= removed
2855 # distinct between dirstate remove and other
2856 # distinct between dirstate remove and other
2856 removed -= dsremoved
2857 removed -= dsremoved
2857
2858
2858 modadded = added & dsmodified
2859 modadded = added & dsmodified
2859 added -= modadded
2860 added -= modadded
2860
2861
2861 # tell newly modified apart.
2862 # tell newly modified apart.
2862 dsmodified &= modified
2863 dsmodified &= modified
2863 dsmodified |= modified & dsadded # dirstate added may need backup
2864 dsmodified |= modified & dsadded # dirstate added may need backup
2864 modified -= dsmodified
2865 modified -= dsmodified
2865
2866
2866 # We need to wait for some post-processing to update this set
2867 # We need to wait for some post-processing to update this set
2867 # before making the distinction. The dirstate will be used for
2868 # before making the distinction. The dirstate will be used for
2868 # that purpose.
2869 # that purpose.
2869 dsadded = added
2870 dsadded = added
2870
2871
2871 # in case of merge, files that are actually added can be reported as
2872 # in case of merge, files that are actually added can be reported as
2872 # modified, we need to post process the result
2873 # modified, we need to post process the result
2873 if p2 != nullid:
2874 if p2 != nullid:
2874 mergeadd = set(dsmodified)
2875 mergeadd = set(dsmodified)
2875 for path in dsmodified:
2876 for path in dsmodified:
2876 if path in mf:
2877 if path in mf:
2877 mergeadd.remove(path)
2878 mergeadd.remove(path)
2878 dsadded |= mergeadd
2879 dsadded |= mergeadd
2879 dsmodified -= mergeadd
2880 dsmodified -= mergeadd
2880
2881
2881 # if f is a rename, update `names` to also revert the source
2882 # if f is a rename, update `names` to also revert the source
2882 cwd = repo.getcwd()
2883 cwd = repo.getcwd()
2883 for f in localchanges:
2884 for f in localchanges:
2884 src = repo.dirstate.copied(f)
2885 src = repo.dirstate.copied(f)
2885 # XXX should we check for rename down to target node?
2886 # XXX should we check for rename down to target node?
2886 if src and src not in names and repo.dirstate[src] == 'r':
2887 if src and src not in names and repo.dirstate[src] == 'r':
2887 dsremoved.add(src)
2888 dsremoved.add(src)
2888 names[src] = (repo.pathto(src, cwd), True)
2889 names[src] = (repo.pathto(src, cwd), True)
2889
2890
2890 # determine the exact nature of the deleted changesets
2891 # determine the exact nature of the deleted changesets
2891 deladded = set(_deleted)
2892 deladded = set(_deleted)
2892 for path in _deleted:
2893 for path in _deleted:
2893 if path in mf:
2894 if path in mf:
2894 deladded.remove(path)
2895 deladded.remove(path)
2895 deleted = _deleted - deladded
2896 deleted = _deleted - deladded
2896
2897
2897 # distinguish between file to forget and the other
2898 # distinguish between file to forget and the other
2898 added = set()
2899 added = set()
2899 for abs in dsadded:
2900 for abs in dsadded:
2900 if repo.dirstate[abs] != 'a':
2901 if repo.dirstate[abs] != 'a':
2901 added.add(abs)
2902 added.add(abs)
2902 dsadded -= added
2903 dsadded -= added
2903
2904
2904 for abs in deladded:
2905 for abs in deladded:
2905 if repo.dirstate[abs] == 'a':
2906 if repo.dirstate[abs] == 'a':
2906 dsadded.add(abs)
2907 dsadded.add(abs)
2907 deladded -= dsadded
2908 deladded -= dsadded
2908
2909
2909 # For files marked as removed, we check if an unknown file is present at
2910 # For files marked as removed, we check if an unknown file is present at
2910 # the same path. If a such file exists it may need to be backed up.
2911 # the same path. If a such file exists it may need to be backed up.
2911 # Making the distinction at this stage helps have simpler backup
2912 # Making the distinction at this stage helps have simpler backup
2912 # logic.
2913 # logic.
2913 removunk = set()
2914 removunk = set()
2914 for abs in removed:
2915 for abs in removed:
2915 target = repo.wjoin(abs)
2916 target = repo.wjoin(abs)
2916 if os.path.lexists(target):
2917 if os.path.lexists(target):
2917 removunk.add(abs)
2918 removunk.add(abs)
2918 removed -= removunk
2919 removed -= removunk
2919
2920
2920 dsremovunk = set()
2921 dsremovunk = set()
2921 for abs in dsremoved:
2922 for abs in dsremoved:
2922 target = repo.wjoin(abs)
2923 target = repo.wjoin(abs)
2923 if os.path.lexists(target):
2924 if os.path.lexists(target):
2924 dsremovunk.add(abs)
2925 dsremovunk.add(abs)
2925 dsremoved -= dsremovunk
2926 dsremoved -= dsremovunk
2926
2927
2927 # action to be actually performed by revert
2928 # action to be actually performed by revert
2928 # (<list of file>, message>) tuple
2929 # (<list of file>, message>) tuple
2929 actions = {'revert': ([], _('reverting %s\n')),
2930 actions = {'revert': ([], _('reverting %s\n')),
2930 'add': ([], _('adding %s\n')),
2931 'add': ([], _('adding %s\n')),
2931 'remove': ([], _('removing %s\n')),
2932 'remove': ([], _('removing %s\n')),
2932 'drop': ([], _('removing %s\n')),
2933 'drop': ([], _('removing %s\n')),
2933 'forget': ([], _('forgetting %s\n')),
2934 'forget': ([], _('forgetting %s\n')),
2934 'undelete': ([], _('undeleting %s\n')),
2935 'undelete': ([], _('undeleting %s\n')),
2935 'noop': (None, _('no changes needed to %s\n')),
2936 'noop': (None, _('no changes needed to %s\n')),
2936 'unknown': (None, _('file not managed: %s\n')),
2937 'unknown': (None, _('file not managed: %s\n')),
2937 }
2938 }
2938
2939
2939 # "constant" that convey the backup strategy.
2940 # "constant" that convey the backup strategy.
2940 # All set to `discard` if `no-backup` is set do avoid checking
2941 # All set to `discard` if `no-backup` is set do avoid checking
2941 # no_backup lower in the code.
2942 # no_backup lower in the code.
2942 # These values are ordered for comparison purposes
2943 # These values are ordered for comparison purposes
2943 backupinteractive = 3 # do backup if interactively modified
2944 backupinteractive = 3 # do backup if interactively modified
2944 backup = 2 # unconditionally do backup
2945 backup = 2 # unconditionally do backup
2945 check = 1 # check if the existing file differs from target
2946 check = 1 # check if the existing file differs from target
2946 discard = 0 # never do backup
2947 discard = 0 # never do backup
2947 if opts.get('no_backup'):
2948 if opts.get('no_backup'):
2948 backupinteractive = backup = check = discard
2949 backupinteractive = backup = check = discard
2949 if interactive:
2950 if interactive:
2950 dsmodifiedbackup = backupinteractive
2951 dsmodifiedbackup = backupinteractive
2951 else:
2952 else:
2952 dsmodifiedbackup = backup
2953 dsmodifiedbackup = backup
2953 tobackup = set()
2954 tobackup = set()
2954
2955
2955 backupanddel = actions['remove']
2956 backupanddel = actions['remove']
2956 if not opts.get('no_backup'):
2957 if not opts.get('no_backup'):
2957 backupanddel = actions['drop']
2958 backupanddel = actions['drop']
2958
2959
2959 disptable = (
2960 disptable = (
2960 # dispatch table:
2961 # dispatch table:
2961 # file state
2962 # file state
2962 # action
2963 # action
2963 # make backup
2964 # make backup
2964
2965
2965 ## Sets that results that will change file on disk
2966 ## Sets that results that will change file on disk
2966 # Modified compared to target, no local change
2967 # Modified compared to target, no local change
2967 (modified, actions['revert'], discard),
2968 (modified, actions['revert'], discard),
2968 # Modified compared to target, but local file is deleted
2969 # Modified compared to target, but local file is deleted
2969 (deleted, actions['revert'], discard),
2970 (deleted, actions['revert'], discard),
2970 # Modified compared to target, local change
2971 # Modified compared to target, local change
2971 (dsmodified, actions['revert'], dsmodifiedbackup),
2972 (dsmodified, actions['revert'], dsmodifiedbackup),
2972 # Added since target
2973 # Added since target
2973 (added, actions['remove'], discard),
2974 (added, actions['remove'], discard),
2974 # Added in working directory
2975 # Added in working directory
2975 (dsadded, actions['forget'], discard),
2976 (dsadded, actions['forget'], discard),
2976 # Added since target, have local modification
2977 # Added since target, have local modification
2977 (modadded, backupanddel, backup),
2978 (modadded, backupanddel, backup),
2978 # Added since target but file is missing in working directory
2979 # Added since target but file is missing in working directory
2979 (deladded, actions['drop'], discard),
2980 (deladded, actions['drop'], discard),
2980 # Removed since target, before working copy parent
2981 # Removed since target, before working copy parent
2981 (removed, actions['add'], discard),
2982 (removed, actions['add'], discard),
2982 # Same as `removed` but an unknown file exists at the same path
2983 # Same as `removed` but an unknown file exists at the same path
2983 (removunk, actions['add'], check),
2984 (removunk, actions['add'], check),
2984 # Removed since targe, marked as such in working copy parent
2985 # Removed since targe, marked as such in working copy parent
2985 (dsremoved, actions['undelete'], discard),
2986 (dsremoved, actions['undelete'], discard),
2986 # Same as `dsremoved` but an unknown file exists at the same path
2987 # Same as `dsremoved` but an unknown file exists at the same path
2987 (dsremovunk, actions['undelete'], check),
2988 (dsremovunk, actions['undelete'], check),
2988 ## the following sets does not result in any file changes
2989 ## the following sets does not result in any file changes
2989 # File with no modification
2990 # File with no modification
2990 (clean, actions['noop'], discard),
2991 (clean, actions['noop'], discard),
2991 # Existing file, not tracked anywhere
2992 # Existing file, not tracked anywhere
2992 (unknown, actions['unknown'], discard),
2993 (unknown, actions['unknown'], discard),
2993 )
2994 )
2994
2995
2995 for abs, (rel, exact) in sorted(names.items()):
2996 for abs, (rel, exact) in sorted(names.items()):
2996 # target file to be touch on disk (relative to cwd)
2997 # target file to be touch on disk (relative to cwd)
2997 target = repo.wjoin(abs)
2998 target = repo.wjoin(abs)
2998 # search the entry in the dispatch table.
2999 # search the entry in the dispatch table.
2999 # if the file is in any of these sets, it was touched in the working
3000 # if the file is in any of these sets, it was touched in the working
3000 # directory parent and we are sure it needs to be reverted.
3001 # directory parent and we are sure it needs to be reverted.
3001 for table, (xlist, msg), dobackup in disptable:
3002 for table, (xlist, msg), dobackup in disptable:
3002 if abs not in table:
3003 if abs not in table:
3003 continue
3004 continue
3004 if xlist is not None:
3005 if xlist is not None:
3005 xlist.append(abs)
3006 xlist.append(abs)
3006 if dobackup:
3007 if dobackup:
3007 # If in interactive mode, don't automatically create
3008 # If in interactive mode, don't automatically create
3008 # .orig files (issue4793)
3009 # .orig files (issue4793)
3009 if dobackup == backupinteractive:
3010 if dobackup == backupinteractive:
3010 tobackup.add(abs)
3011 tobackup.add(abs)
3011 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3012 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3012 bakname = scmutil.origpath(ui, repo, rel)
3013 bakname = scmutil.origpath(ui, repo, rel)
3013 ui.note(_('saving current version of %s as %s\n') %
3014 ui.note(_('saving current version of %s as %s\n') %
3014 (rel, bakname))
3015 (rel, bakname))
3015 if not opts.get('dry_run'):
3016 if not opts.get('dry_run'):
3016 if interactive:
3017 if interactive:
3017 util.copyfile(target, bakname)
3018 util.copyfile(target, bakname)
3018 else:
3019 else:
3019 util.rename(target, bakname)
3020 util.rename(target, bakname)
3020 if opts.get('dry_run'):
3021 if opts.get('dry_run'):
3021 if ui.verbose or not exact:
3022 if ui.verbose or not exact:
3022 ui.status(msg % rel)
3023 ui.status(msg % rel)
3023 elif exact:
3024 elif exact:
3024 ui.warn(msg % rel)
3025 ui.warn(msg % rel)
3025 break
3026 break
3026
3027
3027 if not opts.get('dry_run'):
3028 if not opts.get('dry_run'):
3028 needdata = ('revert', 'add', 'undelete')
3029 needdata = ('revert', 'add', 'undelete')
3029 oplist = [actions[name][0] for name in needdata]
3030 oplist = [actions[name][0] for name in needdata]
3030 prefetch = scmutil.prefetchfiles
3031 prefetch = scmutil.prefetchfiles
3031 matchfiles = scmutil.matchfiles
3032 matchfiles = scmutil.matchfiles
3032 prefetch(repo, [ctx.rev()],
3033 prefetch(repo, [ctx.rev()],
3033 matchfiles(repo,
3034 matchfiles(repo,
3034 [f for sublist in oplist for f in sublist]))
3035 [f for sublist in oplist for f in sublist]))
3035 _performrevert(repo, parents, ctx, names, actions, interactive,
3036 _performrevert(repo, parents, ctx, names, actions, interactive,
3036 tobackup)
3037 tobackup)
3037
3038
3038 if targetsubs:
3039 if targetsubs:
3039 # Revert the subrepos on the revert list
3040 # Revert the subrepos on the revert list
3040 for sub in targetsubs:
3041 for sub in targetsubs:
3041 try:
3042 try:
3042 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3043 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3043 **pycompat.strkwargs(opts))
3044 **pycompat.strkwargs(opts))
3044 except KeyError:
3045 except KeyError:
3045 raise error.Abort("subrepository '%s' does not exist in %s!"
3046 raise error.Abort("subrepository '%s' does not exist in %s!"
3046 % (sub, short(ctx.node())))
3047 % (sub, short(ctx.node())))
3047
3048
3048 def _performrevert(repo, parents, ctx, names, actions, interactive=False,
3049 def _performrevert(repo, parents, ctx, names, actions, interactive=False,
3049 tobackup=None):
3050 tobackup=None):
3050 """function that actually perform all the actions computed for revert
3051 """function that actually perform all the actions computed for revert
3051
3052
3052 This is an independent function to let extension to plug in and react to
3053 This is an independent function to let extension to plug in and react to
3053 the imminent revert.
3054 the imminent revert.
3054
3055
3055 Make sure you have the working directory locked when calling this function.
3056 Make sure you have the working directory locked when calling this function.
3056 """
3057 """
3057 parent, p2 = parents
3058 parent, p2 = parents
3058 node = ctx.node()
3059 node = ctx.node()
3059 excluded_files = []
3060 excluded_files = []
3060
3061
3061 def checkout(f):
3062 def checkout(f):
3062 fc = ctx[f]
3063 fc = ctx[f]
3063 repo.wwrite(f, fc.data(), fc.flags())
3064 repo.wwrite(f, fc.data(), fc.flags())
3064
3065
3065 def doremove(f):
3066 def doremove(f):
3066 try:
3067 try:
3067 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
3068 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
3068 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3069 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3069 except OSError:
3070 except OSError:
3070 pass
3071 pass
3071 repo.dirstate.remove(f)
3072 repo.dirstate.remove(f)
3072
3073
3073 def prntstatusmsg(action, f):
3074 def prntstatusmsg(action, f):
3074 rel, exact = names[f]
3075 rel, exact = names[f]
3075 if repo.ui.verbose or not exact:
3076 if repo.ui.verbose or not exact:
3076 repo.ui.status(actions[action][1] % rel)
3077 repo.ui.status(actions[action][1] % rel)
3077
3078
3078 audit_path = pathutil.pathauditor(repo.root, cached=True)
3079 audit_path = pathutil.pathauditor(repo.root, cached=True)
3079 for f in actions['forget'][0]:
3080 for f in actions['forget'][0]:
3080 if interactive:
3081 if interactive:
3081 choice = repo.ui.promptchoice(
3082 choice = repo.ui.promptchoice(
3082 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3083 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3083 if choice == 0:
3084 if choice == 0:
3084 prntstatusmsg('forget', f)
3085 prntstatusmsg('forget', f)
3085 repo.dirstate.drop(f)
3086 repo.dirstate.drop(f)
3086 else:
3087 else:
3087 excluded_files.append(f)
3088 excluded_files.append(f)
3088 else:
3089 else:
3089 prntstatusmsg('forget', f)
3090 prntstatusmsg('forget', f)
3090 repo.dirstate.drop(f)
3091 repo.dirstate.drop(f)
3091 for f in actions['remove'][0]:
3092 for f in actions['remove'][0]:
3092 audit_path(f)
3093 audit_path(f)
3093 if interactive:
3094 if interactive:
3094 choice = repo.ui.promptchoice(
3095 choice = repo.ui.promptchoice(
3095 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3096 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3096 if choice == 0:
3097 if choice == 0:
3097 prntstatusmsg('remove', f)
3098 prntstatusmsg('remove', f)
3098 doremove(f)
3099 doremove(f)
3099 else:
3100 else:
3100 excluded_files.append(f)
3101 excluded_files.append(f)
3101 else:
3102 else:
3102 prntstatusmsg('remove', f)
3103 prntstatusmsg('remove', f)
3103 doremove(f)
3104 doremove(f)
3104 for f in actions['drop'][0]:
3105 for f in actions['drop'][0]:
3105 audit_path(f)
3106 audit_path(f)
3106 prntstatusmsg('drop', f)
3107 prntstatusmsg('drop', f)
3107 repo.dirstate.remove(f)
3108 repo.dirstate.remove(f)
3108
3109
3109 normal = None
3110 normal = None
3110 if node == parent:
3111 if node == parent:
3111 # We're reverting to our parent. If possible, we'd like status
3112 # We're reverting to our parent. If possible, we'd like status
3112 # to report the file as clean. We have to use normallookup for
3113 # to report the file as clean. We have to use normallookup for
3113 # merges to avoid losing information about merged/dirty files.
3114 # merges to avoid losing information about merged/dirty files.
3114 if p2 != nullid:
3115 if p2 != nullid:
3115 normal = repo.dirstate.normallookup
3116 normal = repo.dirstate.normallookup
3116 else:
3117 else:
3117 normal = repo.dirstate.normal
3118 normal = repo.dirstate.normal
3118
3119
3119 newlyaddedandmodifiedfiles = set()
3120 newlyaddedandmodifiedfiles = set()
3120 if interactive:
3121 if interactive:
3121 # Prompt the user for changes to revert
3122 # Prompt the user for changes to revert
3122 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3123 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3123 m = scmutil.matchfiles(repo, torevert)
3124 m = scmutil.matchfiles(repo, torevert)
3124 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3125 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3125 diffopts.nodates = True
3126 diffopts.nodates = True
3126 diffopts.git = True
3127 diffopts.git = True
3127 operation = 'discard'
3128 operation = 'discard'
3128 reversehunks = True
3129 reversehunks = True
3129 if node != parent:
3130 if node != parent:
3130 operation = 'apply'
3131 operation = 'apply'
3131 reversehunks = False
3132 reversehunks = False
3132 if reversehunks:
3133 if reversehunks:
3133 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3134 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3134 else:
3135 else:
3135 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3136 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3136 originalchunks = patch.parsepatch(diff)
3137 originalchunks = patch.parsepatch(diff)
3137
3138
3138 try:
3139 try:
3139
3140
3140 chunks, opts = recordfilter(repo.ui, originalchunks,
3141 chunks, opts = recordfilter(repo.ui, originalchunks,
3141 operation=operation)
3142 operation=operation)
3142 if reversehunks:
3143 if reversehunks:
3143 chunks = patch.reversehunks(chunks)
3144 chunks = patch.reversehunks(chunks)
3144
3145
3145 except error.PatchError as err:
3146 except error.PatchError as err:
3146 raise error.Abort(_('error parsing patch: %s') % err)
3147 raise error.Abort(_('error parsing patch: %s') % err)
3147
3148
3148 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3149 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3149 if tobackup is None:
3150 if tobackup is None:
3150 tobackup = set()
3151 tobackup = set()
3151 # Apply changes
3152 # Apply changes
3152 fp = stringio()
3153 fp = stringio()
3153 # chunks are serialized per file, but files aren't sorted
3154 # chunks are serialized per file, but files aren't sorted
3154 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3155 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3155 prntstatusmsg('revert', f)
3156 prntstatusmsg('revert', f)
3156 for c in chunks:
3157 for c in chunks:
3157 if ishunk(c):
3158 if ishunk(c):
3158 abs = c.header.filename()
3159 abs = c.header.filename()
3159 # Create a backup file only if this hunk should be backed up
3160 # Create a backup file only if this hunk should be backed up
3160 if c.header.filename() in tobackup:
3161 if c.header.filename() in tobackup:
3161 target = repo.wjoin(abs)
3162 target = repo.wjoin(abs)
3162 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3163 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3163 util.copyfile(target, bakname)
3164 util.copyfile(target, bakname)
3164 tobackup.remove(abs)
3165 tobackup.remove(abs)
3165 c.write(fp)
3166 c.write(fp)
3166 dopatch = fp.tell()
3167 dopatch = fp.tell()
3167 fp.seek(0)
3168 fp.seek(0)
3168 if dopatch:
3169 if dopatch:
3169 try:
3170 try:
3170 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3171 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3171 except error.PatchError as err:
3172 except error.PatchError as err:
3172 raise error.Abort(pycompat.bytestr(err))
3173 raise error.Abort(pycompat.bytestr(err))
3173 del fp
3174 del fp
3174 else:
3175 else:
3175 for f in actions['revert'][0]:
3176 for f in actions['revert'][0]:
3176 prntstatusmsg('revert', f)
3177 prntstatusmsg('revert', f)
3177 checkout(f)
3178 checkout(f)
3178 if normal:
3179 if normal:
3179 normal(f)
3180 normal(f)
3180
3181
3181 for f in actions['add'][0]:
3182 for f in actions['add'][0]:
3182 # Don't checkout modified files, they are already created by the diff
3183 # Don't checkout modified files, they are already created by the diff
3183 if f not in newlyaddedandmodifiedfiles:
3184 if f not in newlyaddedandmodifiedfiles:
3184 prntstatusmsg('add', f)
3185 prntstatusmsg('add', f)
3185 checkout(f)
3186 checkout(f)
3186 repo.dirstate.add(f)
3187 repo.dirstate.add(f)
3187
3188
3188 normal = repo.dirstate.normallookup
3189 normal = repo.dirstate.normallookup
3189 if node == parent and p2 == nullid:
3190 if node == parent and p2 == nullid:
3190 normal = repo.dirstate.normal
3191 normal = repo.dirstate.normal
3191 for f in actions['undelete'][0]:
3192 for f in actions['undelete'][0]:
3192 prntstatusmsg('undelete', f)
3193 prntstatusmsg('undelete', f)
3193 checkout(f)
3194 checkout(f)
3194 normal(f)
3195 normal(f)
3195
3196
3196 copied = copies.pathcopies(repo[parent], ctx)
3197 copied = copies.pathcopies(repo[parent], ctx)
3197
3198
3198 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3199 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3199 if f in copied:
3200 if f in copied:
3200 repo.dirstate.copy(copied[f], f)
3201 repo.dirstate.copy(copied[f], f)
3201
3202
3202 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3203 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3203 # commands.outgoing. "missing" is "missing" of the result of
3204 # commands.outgoing. "missing" is "missing" of the result of
3204 # "findcommonoutgoing()"
3205 # "findcommonoutgoing()"
3205 outgoinghooks = util.hooks()
3206 outgoinghooks = util.hooks()
3206
3207
3207 # a list of (ui, repo) functions called by commands.summary
3208 # a list of (ui, repo) functions called by commands.summary
3208 summaryhooks = util.hooks()
3209 summaryhooks = util.hooks()
3209
3210
3210 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3211 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3211 #
3212 #
3212 # functions should return tuple of booleans below, if 'changes' is None:
3213 # functions should return tuple of booleans below, if 'changes' is None:
3213 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3214 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3214 #
3215 #
3215 # otherwise, 'changes' is a tuple of tuples below:
3216 # otherwise, 'changes' is a tuple of tuples below:
3216 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3217 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3217 # - (desturl, destbranch, destpeer, outgoing)
3218 # - (desturl, destbranch, destpeer, outgoing)
3218 summaryremotehooks = util.hooks()
3219 summaryremotehooks = util.hooks()
3219
3220
3220 # A list of state files kept by multistep operations like graft.
3221 # A list of state files kept by multistep operations like graft.
3221 # Since graft cannot be aborted, it is considered 'clearable' by update.
3222 # Since graft cannot be aborted, it is considered 'clearable' by update.
3222 # note: bisect is intentionally excluded
3223 # note: bisect is intentionally excluded
3223 # (state file, clearable, allowcommit, error, hint)
3224 # (state file, clearable, allowcommit, error, hint)
3224 unfinishedstates = [
3225 unfinishedstates = [
3225 ('graftstate', True, False, _('graft in progress'),
3226 ('graftstate', True, False, _('graft in progress'),
3226 _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
3227 _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
3227 ('updatestate', True, False, _('last update was interrupted'),
3228 ('updatestate', True, False, _('last update was interrupted'),
3228 _("use 'hg update' to get a consistent checkout"))
3229 _("use 'hg update' to get a consistent checkout"))
3229 ]
3230 ]
3230
3231
3231 def checkunfinished(repo, commit=False):
3232 def checkunfinished(repo, commit=False):
3232 '''Look for an unfinished multistep operation, like graft, and abort
3233 '''Look for an unfinished multistep operation, like graft, and abort
3233 if found. It's probably good to check this right before
3234 if found. It's probably good to check this right before
3234 bailifchanged().
3235 bailifchanged().
3235 '''
3236 '''
3236 # Check for non-clearable states first, so things like rebase will take
3237 # Check for non-clearable states first, so things like rebase will take
3237 # precedence over update.
3238 # precedence over update.
3238 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3239 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3239 if clearable or (commit and allowcommit):
3240 if clearable or (commit and allowcommit):
3240 continue
3241 continue
3241 if repo.vfs.exists(f):
3242 if repo.vfs.exists(f):
3242 raise error.Abort(msg, hint=hint)
3243 raise error.Abort(msg, hint=hint)
3243
3244
3244 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3245 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3245 if not clearable or (commit and allowcommit):
3246 if not clearable or (commit and allowcommit):
3246 continue
3247 continue
3247 if repo.vfs.exists(f):
3248 if repo.vfs.exists(f):
3248 raise error.Abort(msg, hint=hint)
3249 raise error.Abort(msg, hint=hint)
3249
3250
3250 def clearunfinished(repo):
3251 def clearunfinished(repo):
3251 '''Check for unfinished operations (as above), and clear the ones
3252 '''Check for unfinished operations (as above), and clear the ones
3252 that are clearable.
3253 that are clearable.
3253 '''
3254 '''
3254 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3255 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3255 if not clearable and repo.vfs.exists(f):
3256 if not clearable and repo.vfs.exists(f):
3256 raise error.Abort(msg, hint=hint)
3257 raise error.Abort(msg, hint=hint)
3257 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3258 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3258 if clearable and repo.vfs.exists(f):
3259 if clearable and repo.vfs.exists(f):
3259 util.unlink(repo.vfs.join(f))
3260 util.unlink(repo.vfs.join(f))
3260
3261
3261 afterresolvedstates = [
3262 afterresolvedstates = [
3262 ('graftstate',
3263 ('graftstate',
3263 _('hg graft --continue')),
3264 _('hg graft --continue')),
3264 ]
3265 ]
3265
3266
3266 def howtocontinue(repo):
3267 def howtocontinue(repo):
3267 '''Check for an unfinished operation and return the command to finish
3268 '''Check for an unfinished operation and return the command to finish
3268 it.
3269 it.
3269
3270
3270 afterresolvedstates tuples define a .hg/{file} and the corresponding
3271 afterresolvedstates tuples define a .hg/{file} and the corresponding
3271 command needed to finish it.
3272 command needed to finish it.
3272
3273
3273 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3274 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3274 a boolean.
3275 a boolean.
3275 '''
3276 '''
3276 contmsg = _("continue: %s")
3277 contmsg = _("continue: %s")
3277 for f, msg in afterresolvedstates:
3278 for f, msg in afterresolvedstates:
3278 if repo.vfs.exists(f):
3279 if repo.vfs.exists(f):
3279 return contmsg % msg, True
3280 return contmsg % msg, True
3280 if repo[None].dirty(missing=True, merge=False, branch=False):
3281 if repo[None].dirty(missing=True, merge=False, branch=False):
3281 return contmsg % _("hg commit"), False
3282 return contmsg % _("hg commit"), False
3282 return None, None
3283 return None, None
3283
3284
3284 def checkafterresolved(repo):
3285 def checkafterresolved(repo):
3285 '''Inform the user about the next action after completing hg resolve
3286 '''Inform the user about the next action after completing hg resolve
3286
3287
3287 If there's a matching afterresolvedstates, howtocontinue will yield
3288 If there's a matching afterresolvedstates, howtocontinue will yield
3288 repo.ui.warn as the reporter.
3289 repo.ui.warn as the reporter.
3289
3290
3290 Otherwise, it will yield repo.ui.note.
3291 Otherwise, it will yield repo.ui.note.
3291 '''
3292 '''
3292 msg, warning = howtocontinue(repo)
3293 msg, warning = howtocontinue(repo)
3293 if msg is not None:
3294 if msg is not None:
3294 if warning:
3295 if warning:
3295 repo.ui.warn("%s\n" % msg)
3296 repo.ui.warn("%s\n" % msg)
3296 else:
3297 else:
3297 repo.ui.note("%s\n" % msg)
3298 repo.ui.note("%s\n" % msg)
3298
3299
3299 def wrongtooltocontinue(repo, task):
3300 def wrongtooltocontinue(repo, task):
3300 '''Raise an abort suggesting how to properly continue if there is an
3301 '''Raise an abort suggesting how to properly continue if there is an
3301 active task.
3302 active task.
3302
3303
3303 Uses howtocontinue() to find the active task.
3304 Uses howtocontinue() to find the active task.
3304
3305
3305 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3306 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3306 a hint.
3307 a hint.
3307 '''
3308 '''
3308 after = howtocontinue(repo)
3309 after = howtocontinue(repo)
3309 hint = None
3310 hint = None
3310 if after[1]:
3311 if after[1]:
3311 hint = after[0]
3312 hint = after[0]
3312 raise error.Abort(_('no %s in progress') % task, hint=hint)
3313 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,1801 +1,1802 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16 import subprocess
16 import subprocess
17 import weakref
17 import weakref
18
18
19 from .i18n import _
19 from .i18n import _
20 from .node import (
20 from .node import (
21 bin,
21 bin,
22 hex,
22 hex,
23 nullid,
23 nullid,
24 nullrev,
24 nullrev,
25 short,
25 short,
26 wdirid,
26 wdirid,
27 wdirrev,
27 wdirrev,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 encoding,
31 encoding,
32 error,
32 error,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 obsutil,
35 obsutil,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 policy,
38 policy,
39 pycompat,
39 pycompat,
40 revsetlang,
40 revsetlang,
41 similar,
41 similar,
42 smartset,
42 smartset,
43 url,
43 url,
44 util,
44 util,
45 vfs,
45 vfs,
46 )
46 )
47
47
48 from .utils import (
48 from .utils import (
49 procutil,
49 procutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 if pycompat.iswindows:
53 if pycompat.iswindows:
54 from . import scmwindows as scmplatform
54 from . import scmwindows as scmplatform
55 else:
55 else:
56 from . import scmposix as scmplatform
56 from . import scmposix as scmplatform
57
57
58 parsers = policy.importmod(r'parsers')
58 parsers = policy.importmod(r'parsers')
59
59
60 termsize = scmplatform.termsize
60 termsize = scmplatform.termsize
61
61
62 class status(tuple):
62 class status(tuple):
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
64 and 'ignored' properties are only relevant to the working copy.
64 and 'ignored' properties are only relevant to the working copy.
65 '''
65 '''
66
66
67 __slots__ = ()
67 __slots__ = ()
68
68
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
70 clean):
70 clean):
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
72 ignored, clean))
72 ignored, clean))
73
73
74 @property
74 @property
75 def modified(self):
75 def modified(self):
76 '''files that have been modified'''
76 '''files that have been modified'''
77 return self[0]
77 return self[0]
78
78
79 @property
79 @property
80 def added(self):
80 def added(self):
81 '''files that have been added'''
81 '''files that have been added'''
82 return self[1]
82 return self[1]
83
83
84 @property
84 @property
85 def removed(self):
85 def removed(self):
86 '''files that have been removed'''
86 '''files that have been removed'''
87 return self[2]
87 return self[2]
88
88
89 @property
89 @property
90 def deleted(self):
90 def deleted(self):
91 '''files that are in the dirstate, but have been deleted from the
91 '''files that are in the dirstate, but have been deleted from the
92 working copy (aka "missing")
92 working copy (aka "missing")
93 '''
93 '''
94 return self[3]
94 return self[3]
95
95
96 @property
96 @property
97 def unknown(self):
97 def unknown(self):
98 '''files not in the dirstate that are not ignored'''
98 '''files not in the dirstate that are not ignored'''
99 return self[4]
99 return self[4]
100
100
101 @property
101 @property
102 def ignored(self):
102 def ignored(self):
103 '''files not in the dirstate that are ignored (by _dirignore())'''
103 '''files not in the dirstate that are ignored (by _dirignore())'''
104 return self[5]
104 return self[5]
105
105
106 @property
106 @property
107 def clean(self):
107 def clean(self):
108 '''files that have not been modified'''
108 '''files that have not been modified'''
109 return self[6]
109 return self[6]
110
110
111 def __repr__(self, *args, **kwargs):
111 def __repr__(self, *args, **kwargs):
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
113 r'unknown=%s, ignored=%s, clean=%s>') %
113 r'unknown=%s, ignored=%s, clean=%s>') %
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
115
115
116 def itersubrepos(ctx1, ctx2):
116 def itersubrepos(ctx1, ctx2):
117 """find subrepos in ctx1 or ctx2"""
117 """find subrepos in ctx1 or ctx2"""
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # Create a (subpath, ctx) mapping where we prefer subpaths from
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # ctx1. The subpaths from ctx2 are important when the .hgsub file
120 # has been modified (in ctx2) but not yet committed (in ctx1).
120 # has been modified (in ctx2) but not yet committed (in ctx1).
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths = dict.fromkeys(ctx2.substate, ctx2)
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
123
123
124 missing = set()
124 missing = set()
125
125
126 for subpath in ctx2.substate:
126 for subpath in ctx2.substate:
127 if subpath not in ctx1.substate:
127 if subpath not in ctx1.substate:
128 del subpaths[subpath]
128 del subpaths[subpath]
129 missing.add(subpath)
129 missing.add(subpath)
130
130
131 for subpath, ctx in sorted(subpaths.iteritems()):
131 for subpath, ctx in sorted(subpaths.iteritems()):
132 yield subpath, ctx.sub(subpath)
132 yield subpath, ctx.sub(subpath)
133
133
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
135 # status and diff will have an accurate result when it does
135 # status and diff will have an accurate result when it does
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
137 # against itself.
137 # against itself.
138 for subpath in missing:
138 for subpath in missing:
139 yield subpath, ctx2.nullsub(subpath, ctx1)
139 yield subpath, ctx2.nullsub(subpath, ctx1)
140
140
141 def nochangesfound(ui, repo, excluded=None):
141 def nochangesfound(ui, repo, excluded=None):
142 '''Report no changes for push/pull, excluded is None or a list of
142 '''Report no changes for push/pull, excluded is None or a list of
143 nodes excluded from the push/pull.
143 nodes excluded from the push/pull.
144 '''
144 '''
145 secretlist = []
145 secretlist = []
146 if excluded:
146 if excluded:
147 for n in excluded:
147 for n in excluded:
148 ctx = repo[n]
148 ctx = repo[n]
149 if ctx.phase() >= phases.secret and not ctx.extinct():
149 if ctx.phase() >= phases.secret and not ctx.extinct():
150 secretlist.append(n)
150 secretlist.append(n)
151
151
152 if secretlist:
152 if secretlist:
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 ui.status(_("no changes found (ignored %d secret changesets)\n")
154 % len(secretlist))
154 % len(secretlist))
155 else:
155 else:
156 ui.status(_("no changes found\n"))
156 ui.status(_("no changes found\n"))
157
157
158 def callcatch(ui, func):
158 def callcatch(ui, func):
159 """call func() with global exception handling
159 """call func() with global exception handling
160
160
161 return func() if no exception happens. otherwise do some error handling
161 return func() if no exception happens. otherwise do some error handling
162 and return an exit code accordingly. does not handle all exceptions.
162 and return an exit code accordingly. does not handle all exceptions.
163 """
163 """
164 try:
164 try:
165 try:
165 try:
166 return func()
166 return func()
167 except: # re-raises
167 except: # re-raises
168 ui.traceback()
168 ui.traceback()
169 raise
169 raise
170 # Global exception handling, alphabetically
170 # Global exception handling, alphabetically
171 # Mercurial-specific first, followed by built-in and library exceptions
171 # Mercurial-specific first, followed by built-in and library exceptions
172 except error.LockHeld as inst:
172 except error.LockHeld as inst:
173 if inst.errno == errno.ETIMEDOUT:
173 if inst.errno == errno.ETIMEDOUT:
174 reason = _('timed out waiting for lock held by %r') % inst.locker
174 reason = _('timed out waiting for lock held by %r') % inst.locker
175 else:
175 else:
176 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
177 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 if not inst.locker:
179 if not inst.locker:
180 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
181 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
182 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
183 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
185 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
186 if inst.args:
186 if inst.args:
187 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
188 else:
188 else:
189 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
208 else:
208 else:
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
214 except error.InterventionRequired as inst:
214 except error.InterventionRequired as inst:
215 ui.error("%s\n" % inst)
215 ui.error("%s\n" % inst)
216 if inst.hint:
216 if inst.hint:
217 ui.error(_("(%s)\n") % inst.hint)
217 ui.error(_("(%s)\n") % inst.hint)
218 return 1
218 return 1
219 except error.WdirUnsupported:
219 except error.WdirUnsupported:
220 ui.error(_("abort: working directory revision cannot be specified\n"))
220 ui.error(_("abort: working directory revision cannot be specified\n"))
221 except error.Abort as inst:
221 except error.Abort as inst:
222 ui.error(_("abort: %s\n") % inst)
222 ui.error(_("abort: %s\n") % inst)
223 if inst.hint:
223 if inst.hint:
224 ui.error(_("(%s)\n") % inst.hint)
224 ui.error(_("(%s)\n") % inst.hint)
225 except ImportError as inst:
225 except ImportError as inst:
226 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
226 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
227 m = stringutil.forcebytestr(inst).split()[-1]
227 m = stringutil.forcebytestr(inst).split()[-1]
228 if m in "mpatch bdiff".split():
228 if m in "mpatch bdiff".split():
229 ui.error(_("(did you forget to compile extensions?)\n"))
229 ui.error(_("(did you forget to compile extensions?)\n"))
230 elif m in "zlib".split():
230 elif m in "zlib".split():
231 ui.error(_("(is your Python install correct?)\n"))
231 ui.error(_("(is your Python install correct?)\n"))
232 except IOError as inst:
232 except IOError as inst:
233 if util.safehasattr(inst, "code"):
233 if util.safehasattr(inst, "code"):
234 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
234 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
235 elif util.safehasattr(inst, "reason"):
235 elif util.safehasattr(inst, "reason"):
236 try: # usually it is in the form (errno, strerror)
236 try: # usually it is in the form (errno, strerror)
237 reason = inst.reason.args[1]
237 reason = inst.reason.args[1]
238 except (AttributeError, IndexError):
238 except (AttributeError, IndexError):
239 # it might be anything, for example a string
239 # it might be anything, for example a string
240 reason = inst.reason
240 reason = inst.reason
241 if isinstance(reason, pycompat.unicode):
241 if isinstance(reason, pycompat.unicode):
242 # SSLError of Python 2.7.9 contains a unicode
242 # SSLError of Python 2.7.9 contains a unicode
243 reason = encoding.unitolocal(reason)
243 reason = encoding.unitolocal(reason)
244 ui.error(_("abort: error: %s\n") % reason)
244 ui.error(_("abort: error: %s\n") % reason)
245 elif (util.safehasattr(inst, "args")
245 elif (util.safehasattr(inst, "args")
246 and inst.args and inst.args[0] == errno.EPIPE):
246 and inst.args and inst.args[0] == errno.EPIPE):
247 pass
247 pass
248 elif getattr(inst, "strerror", None):
248 elif getattr(inst, "strerror", None):
249 if getattr(inst, "filename", None):
249 if getattr(inst, "filename", None):
250 ui.error(_("abort: %s: %s\n") % (
250 ui.error(_("abort: %s: %s\n") % (
251 encoding.strtolocal(inst.strerror),
251 encoding.strtolocal(inst.strerror),
252 stringutil.forcebytestr(inst.filename)))
252 stringutil.forcebytestr(inst.filename)))
253 else:
253 else:
254 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
254 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
255 else:
255 else:
256 raise
256 raise
257 except OSError as inst:
257 except OSError as inst:
258 if getattr(inst, "filename", None) is not None:
258 if getattr(inst, "filename", None) is not None:
259 ui.error(_("abort: %s: '%s'\n") % (
259 ui.error(_("abort: %s: '%s'\n") % (
260 encoding.strtolocal(inst.strerror),
260 encoding.strtolocal(inst.strerror),
261 stringutil.forcebytestr(inst.filename)))
261 stringutil.forcebytestr(inst.filename)))
262 else:
262 else:
263 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
263 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
264 except MemoryError:
264 except MemoryError:
265 ui.error(_("abort: out of memory\n"))
265 ui.error(_("abort: out of memory\n"))
266 except SystemExit as inst:
266 except SystemExit as inst:
267 # Commands shouldn't sys.exit directly, but give a return code.
267 # Commands shouldn't sys.exit directly, but give a return code.
268 # Just in case catch this and and pass exit code to caller.
268 # Just in case catch this and and pass exit code to caller.
269 return inst.code
269 return inst.code
270 except socket.error as inst:
270 except socket.error as inst:
271 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
271 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst.args[-1]))
272
272
273 return -1
273 return -1
274
274
275 def checknewlabel(repo, lbl, kind):
275 def checknewlabel(repo, lbl, kind):
276 # Do not use the "kind" parameter in ui output.
276 # Do not use the "kind" parameter in ui output.
277 # It makes strings difficult to translate.
277 # It makes strings difficult to translate.
278 if lbl in ['tip', '.', 'null']:
278 if lbl in ['tip', '.', 'null']:
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
279 raise error.Abort(_("the name '%s' is reserved") % lbl)
280 for c in (':', '\0', '\n', '\r'):
280 for c in (':', '\0', '\n', '\r'):
281 if c in lbl:
281 if c in lbl:
282 raise error.Abort(
282 raise error.Abort(
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
283 _("%r cannot be used in a name") % pycompat.bytestr(c))
284 try:
284 try:
285 int(lbl)
285 int(lbl)
286 raise error.Abort(_("cannot use an integer as a name"))
286 raise error.Abort(_("cannot use an integer as a name"))
287 except ValueError:
287 except ValueError:
288 pass
288 pass
289 if lbl.strip() != lbl:
289 if lbl.strip() != lbl:
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
290 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
291
291
292 def checkfilename(f):
292 def checkfilename(f):
293 '''Check that the filename f is an acceptable filename for a tracked file'''
293 '''Check that the filename f is an acceptable filename for a tracked file'''
294 if '\r' in f or '\n' in f:
294 if '\r' in f or '\n' in f:
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
295 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
296 % pycompat.bytestr(f))
296 % pycompat.bytestr(f))
297
297
298 def checkportable(ui, f):
298 def checkportable(ui, f):
299 '''Check if filename f is portable and warn or abort depending on config'''
299 '''Check if filename f is portable and warn or abort depending on config'''
300 checkfilename(f)
300 checkfilename(f)
301 abort, warn = checkportabilityalert(ui)
301 abort, warn = checkportabilityalert(ui)
302 if abort or warn:
302 if abort or warn:
303 msg = util.checkwinfilename(f)
303 msg = util.checkwinfilename(f)
304 if msg:
304 if msg:
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
305 msg = "%s: %s" % (msg, procutil.shellquote(f))
306 if abort:
306 if abort:
307 raise error.Abort(msg)
307 raise error.Abort(msg)
308 ui.warn(_("warning: %s\n") % msg)
308 ui.warn(_("warning: %s\n") % msg)
309
309
310 def checkportabilityalert(ui):
310 def checkportabilityalert(ui):
311 '''check if the user's config requests nothing, a warning, or abort for
311 '''check if the user's config requests nothing, a warning, or abort for
312 non-portable filenames'''
312 non-portable filenames'''
313 val = ui.config('ui', 'portablefilenames')
313 val = ui.config('ui', 'portablefilenames')
314 lval = val.lower()
314 lval = val.lower()
315 bval = stringutil.parsebool(val)
315 bval = stringutil.parsebool(val)
316 abort = pycompat.iswindows or lval == 'abort'
316 abort = pycompat.iswindows or lval == 'abort'
317 warn = bval or lval == 'warn'
317 warn = bval or lval == 'warn'
318 if bval is None and not (warn or abort or lval == 'ignore'):
318 if bval is None and not (warn or abort or lval == 'ignore'):
319 raise error.ConfigError(
319 raise error.ConfigError(
320 _("ui.portablefilenames value is invalid ('%s')") % val)
320 _("ui.portablefilenames value is invalid ('%s')") % val)
321 return abort, warn
321 return abort, warn
322
322
323 class casecollisionauditor(object):
323 class casecollisionauditor(object):
324 def __init__(self, ui, abort, dirstate):
324 def __init__(self, ui, abort, dirstate):
325 self._ui = ui
325 self._ui = ui
326 self._abort = abort
326 self._abort = abort
327 allfiles = '\0'.join(dirstate._map)
327 allfiles = '\0'.join(dirstate._map)
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
328 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
329 self._dirstate = dirstate
329 self._dirstate = dirstate
330 # The purpose of _newfiles is so that we don't complain about
330 # The purpose of _newfiles is so that we don't complain about
331 # case collisions if someone were to call this object with the
331 # case collisions if someone were to call this object with the
332 # same filename twice.
332 # same filename twice.
333 self._newfiles = set()
333 self._newfiles = set()
334
334
335 def __call__(self, f):
335 def __call__(self, f):
336 if f in self._newfiles:
336 if f in self._newfiles:
337 return
337 return
338 fl = encoding.lower(f)
338 fl = encoding.lower(f)
339 if fl in self._loweredfiles and f not in self._dirstate:
339 if fl in self._loweredfiles and f not in self._dirstate:
340 msg = _('possible case-folding collision for %s') % f
340 msg = _('possible case-folding collision for %s') % f
341 if self._abort:
341 if self._abort:
342 raise error.Abort(msg)
342 raise error.Abort(msg)
343 self._ui.warn(_("warning: %s\n") % msg)
343 self._ui.warn(_("warning: %s\n") % msg)
344 self._loweredfiles.add(fl)
344 self._loweredfiles.add(fl)
345 self._newfiles.add(f)
345 self._newfiles.add(f)
346
346
347 def filteredhash(repo, maxrev):
347 def filteredhash(repo, maxrev):
348 """build hash of filtered revisions in the current repoview.
348 """build hash of filtered revisions in the current repoview.
349
349
350 Multiple caches perform up-to-date validation by checking that the
350 Multiple caches perform up-to-date validation by checking that the
351 tiprev and tipnode stored in the cache file match the current repository.
351 tiprev and tipnode stored in the cache file match the current repository.
352 However, this is not sufficient for validating repoviews because the set
352 However, this is not sufficient for validating repoviews because the set
353 of revisions in the view may change without the repository tiprev and
353 of revisions in the view may change without the repository tiprev and
354 tipnode changing.
354 tipnode changing.
355
355
356 This function hashes all the revs filtered from the view and returns
356 This function hashes all the revs filtered from the view and returns
357 that SHA-1 digest.
357 that SHA-1 digest.
358 """
358 """
359 cl = repo.changelog
359 cl = repo.changelog
360 if not cl.filteredrevs:
360 if not cl.filteredrevs:
361 return None
361 return None
362 key = None
362 key = None
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
363 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
364 if revs:
364 if revs:
365 s = hashlib.sha1()
365 s = hashlib.sha1()
366 for rev in revs:
366 for rev in revs:
367 s.update('%d;' % rev)
367 s.update('%d;' % rev)
368 key = s.digest()
368 key = s.digest()
369 return key
369 return key
370
370
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
371 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
372 '''yield every hg repository under path, always recursively.
372 '''yield every hg repository under path, always recursively.
373 The recurse flag will only control recursion into repo working dirs'''
373 The recurse flag will only control recursion into repo working dirs'''
374 def errhandler(err):
374 def errhandler(err):
375 if err.filename == path:
375 if err.filename == path:
376 raise err
376 raise err
377 samestat = getattr(os.path, 'samestat', None)
377 samestat = getattr(os.path, 'samestat', None)
378 if followsym and samestat is not None:
378 if followsym and samestat is not None:
379 def adddir(dirlst, dirname):
379 def adddir(dirlst, dirname):
380 dirstat = os.stat(dirname)
380 dirstat = os.stat(dirname)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
381 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
382 if not match:
382 if not match:
383 dirlst.append(dirstat)
383 dirlst.append(dirstat)
384 return not match
384 return not match
385 else:
385 else:
386 followsym = False
386 followsym = False
387
387
388 if (seen_dirs is None) and followsym:
388 if (seen_dirs is None) and followsym:
389 seen_dirs = []
389 seen_dirs = []
390 adddir(seen_dirs, path)
390 adddir(seen_dirs, path)
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
392 dirs.sort()
392 dirs.sort()
393 if '.hg' in dirs:
393 if '.hg' in dirs:
394 yield root # found a repository
394 yield root # found a repository
395 qroot = os.path.join(root, '.hg', 'patches')
395 qroot = os.path.join(root, '.hg', 'patches')
396 if os.path.isdir(os.path.join(qroot, '.hg')):
396 if os.path.isdir(os.path.join(qroot, '.hg')):
397 yield qroot # we have a patch queue repo here
397 yield qroot # we have a patch queue repo here
398 if recurse:
398 if recurse:
399 # avoid recursing inside the .hg directory
399 # avoid recursing inside the .hg directory
400 dirs.remove('.hg')
400 dirs.remove('.hg')
401 else:
401 else:
402 dirs[:] = [] # don't descend further
402 dirs[:] = [] # don't descend further
403 elif followsym:
403 elif followsym:
404 newdirs = []
404 newdirs = []
405 for d in dirs:
405 for d in dirs:
406 fname = os.path.join(root, d)
406 fname = os.path.join(root, d)
407 if adddir(seen_dirs, fname):
407 if adddir(seen_dirs, fname):
408 if os.path.islink(fname):
408 if os.path.islink(fname):
409 for hgname in walkrepos(fname, True, seen_dirs):
409 for hgname in walkrepos(fname, True, seen_dirs):
410 yield hgname
410 yield hgname
411 else:
411 else:
412 newdirs.append(d)
412 newdirs.append(d)
413 dirs[:] = newdirs
413 dirs[:] = newdirs
414
414
415 def binnode(ctx):
415 def binnode(ctx):
416 """Return binary node id for a given basectx"""
416 """Return binary node id for a given basectx"""
417 node = ctx.node()
417 node = ctx.node()
418 if node is None:
418 if node is None:
419 return wdirid
419 return wdirid
420 return node
420 return node
421
421
422 def intrev(ctx):
422 def intrev(ctx):
423 """Return integer for a given basectx that can be used in comparison or
423 """Return integer for a given basectx that can be used in comparison or
424 arithmetic operation"""
424 arithmetic operation"""
425 rev = ctx.rev()
425 rev = ctx.rev()
426 if rev is None:
426 if rev is None:
427 return wdirrev
427 return wdirrev
428 return rev
428 return rev
429
429
430 def formatchangeid(ctx):
430 def formatchangeid(ctx):
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
431 """Format changectx as '{rev}:{node|formatnode}', which is the default
432 template provided by logcmdutil.changesettemplater"""
432 template provided by logcmdutil.changesettemplater"""
433 repo = ctx.repo()
433 repo = ctx.repo()
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
434 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
435
435
436 def formatrevnode(ui, rev, node):
436 def formatrevnode(ui, rev, node):
437 """Format given revision and node depending on the current verbosity"""
437 """Format given revision and node depending on the current verbosity"""
438 if ui.debugflag:
438 if ui.debugflag:
439 hexfunc = hex
439 hexfunc = hex
440 else:
440 else:
441 hexfunc = short
441 hexfunc = short
442 return '%d:%s' % (rev, hexfunc(node))
442 return '%d:%s' % (rev, hexfunc(node))
443
443
444 def resolvehexnodeidprefix(repo, prefix):
444 def resolvehexnodeidprefix(repo, prefix):
445 if (prefix.startswith('x') and
445 if (prefix.startswith('x') and
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
446 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
447 prefix = prefix[1:]
447 prefix = prefix[1:]
448 try:
448 try:
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
449 # Uses unfiltered repo because it's faster when prefix is ambiguous/
450 # This matches the shortesthexnodeidprefix() function below.
450 # This matches the shortesthexnodeidprefix() function below.
451 node = repo.unfiltered().changelog._partialmatch(prefix)
451 node = repo.unfiltered().changelog._partialmatch(prefix)
452 except error.AmbiguousPrefixLookupError:
452 except error.AmbiguousPrefixLookupError:
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
453 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
454 if revset:
454 if revset:
455 # Clear config to avoid infinite recursion
455 # Clear config to avoid infinite recursion
456 configoverrides = {('experimental',
456 configoverrides = {('experimental',
457 'revisions.disambiguatewithin'): None}
457 'revisions.disambiguatewithin'): None}
458 with repo.ui.configoverride(configoverrides):
458 with repo.ui.configoverride(configoverrides):
459 revs = repo.anyrevs([revset], user=True)
459 revs = repo.anyrevs([revset], user=True)
460 matches = []
460 matches = []
461 for rev in revs:
461 for rev in revs:
462 node = repo.changelog.node(rev)
462 node = repo.changelog.node(rev)
463 if hex(node).startswith(prefix):
463 if hex(node).startswith(prefix):
464 matches.append(node)
464 matches.append(node)
465 if len(matches) == 1:
465 if len(matches) == 1:
466 return matches[0]
466 return matches[0]
467 raise
467 raise
468 if node is None:
468 if node is None:
469 return
469 return
470 repo.changelog.rev(node) # make sure node isn't filtered
470 repo.changelog.rev(node) # make sure node isn't filtered
471 return node
471 return node
472
472
473 def mayberevnum(repo, prefix):
473 def mayberevnum(repo, prefix):
474 """Checks if the given prefix may be mistaken for a revision number"""
474 """Checks if the given prefix may be mistaken for a revision number"""
475 try:
475 try:
476 i = int(prefix)
476 i = int(prefix)
477 # if we are a pure int, then starting with zero will not be
477 # if we are a pure int, then starting with zero will not be
478 # confused as a rev; or, obviously, if the int is larger
478 # confused as a rev; or, obviously, if the int is larger
479 # than the value of the tip rev
479 # than the value of the tip rev
480 if prefix[0:1] == b'0' or i >= len(repo):
480 if prefix[0:1] == b'0' or i >= len(repo):
481 return False
481 return False
482 return True
482 return True
483 except ValueError:
483 except ValueError:
484 return False
484 return False
485
485
486 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
486 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
487 """Find the shortest unambiguous prefix that matches hexnode.
487 """Find the shortest unambiguous prefix that matches hexnode.
488
488
489 If "cache" is not None, it must be a dictionary that can be used for
489 If "cache" is not None, it must be a dictionary that can be used for
490 caching between calls to this method.
490 caching between calls to this method.
491 """
491 """
492 # _partialmatch() of filtered changelog could take O(len(repo)) time,
492 # _partialmatch() of filtered changelog could take O(len(repo)) time,
493 # which would be unacceptably slow. so we look for hash collision in
493 # which would be unacceptably slow. so we look for hash collision in
494 # unfiltered space, which means some hashes may be slightly longer.
494 # unfiltered space, which means some hashes may be slightly longer.
495
495
496 def disambiguate(prefix):
496 def disambiguate(prefix):
497 """Disambiguate against revnums."""
497 """Disambiguate against revnums."""
498 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
498 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
499 if mayberevnum(repo, prefix):
499 if mayberevnum(repo, prefix):
500 return 'x' + prefix
500 return 'x' + prefix
501 else:
501 else:
502 return prefix
502 return prefix
503
503
504 hexnode = hex(node)
504 hexnode = hex(node)
505 for length in range(len(prefix), len(hexnode) + 1):
505 for length in range(len(prefix), len(hexnode) + 1):
506 prefix = hexnode[:length]
506 prefix = hexnode[:length]
507 if not mayberevnum(repo, prefix):
507 if not mayberevnum(repo, prefix):
508 return prefix
508 return prefix
509
509
510 cl = repo.unfiltered().changelog
510 cl = repo.unfiltered().changelog
511 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
511 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
512 if revset:
512 if revset:
513 revs = None
513 revs = None
514 if cache is not None:
514 if cache is not None:
515 revs = cache.get('disambiguationrevset')
515 revs = cache.get('disambiguationrevset')
516 if revs is None:
516 if revs is None:
517 revs = repo.anyrevs([revset], user=True)
517 revs = repo.anyrevs([revset], user=True)
518 if cache is not None:
518 if cache is not None:
519 cache['disambiguationrevset'] = revs
519 cache['disambiguationrevset'] = revs
520 if cl.rev(node) in revs:
520 if cl.rev(node) in revs:
521 hexnode = hex(node)
521 hexnode = hex(node)
522 nodetree = None
522 nodetree = None
523 if cache is not None:
523 if cache is not None:
524 nodetree = cache.get('disambiguationnodetree')
524 nodetree = cache.get('disambiguationnodetree')
525 if not nodetree:
525 if not nodetree:
526 try:
526 try:
527 nodetree = parsers.nodetree(cl.index, len(revs))
527 nodetree = parsers.nodetree(cl.index, len(revs))
528 except AttributeError:
528 except AttributeError:
529 # no native nodetree
529 # no native nodetree
530 pass
530 pass
531 else:
531 else:
532 for r in revs:
532 for r in revs:
533 nodetree.insert(r)
533 nodetree.insert(r)
534 if cache is not None:
534 if cache is not None:
535 cache['disambiguationnodetree'] = nodetree
535 cache['disambiguationnodetree'] = nodetree
536 if nodetree is not None:
536 if nodetree is not None:
537 length = max(nodetree.shortest(node), minlength)
537 length = max(nodetree.shortest(node), minlength)
538 prefix = hexnode[:length]
538 prefix = hexnode[:length]
539 return disambiguate(prefix)
539 return disambiguate(prefix)
540 for length in range(minlength, len(hexnode) + 1):
540 for length in range(minlength, len(hexnode) + 1):
541 matches = []
541 matches = []
542 prefix = hexnode[:length]
542 prefix = hexnode[:length]
543 for rev in revs:
543 for rev in revs:
544 otherhexnode = repo[rev].hex()
544 otherhexnode = repo[rev].hex()
545 if prefix == otherhexnode[:length]:
545 if prefix == otherhexnode[:length]:
546 matches.append(otherhexnode)
546 matches.append(otherhexnode)
547 if len(matches) == 1:
547 if len(matches) == 1:
548 return disambiguate(prefix)
548 return disambiguate(prefix)
549
549
550 try:
550 try:
551 return disambiguate(cl.shortest(node, minlength))
551 return disambiguate(cl.shortest(node, minlength))
552 except error.LookupError:
552 except error.LookupError:
553 raise error.RepoLookupError()
553 raise error.RepoLookupError()
554
554
555 def isrevsymbol(repo, symbol):
555 def isrevsymbol(repo, symbol):
556 """Checks if a symbol exists in the repo.
556 """Checks if a symbol exists in the repo.
557
557
558 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
558 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
559 symbol is an ambiguous nodeid prefix.
559 symbol is an ambiguous nodeid prefix.
560 """
560 """
561 try:
561 try:
562 revsymbol(repo, symbol)
562 revsymbol(repo, symbol)
563 return True
563 return True
564 except error.RepoLookupError:
564 except error.RepoLookupError:
565 return False
565 return False
566
566
567 def revsymbol(repo, symbol):
567 def revsymbol(repo, symbol):
568 """Returns a context given a single revision symbol (as string).
568 """Returns a context given a single revision symbol (as string).
569
569
570 This is similar to revsingle(), but accepts only a single revision symbol,
570 This is similar to revsingle(), but accepts only a single revision symbol,
571 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
571 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
572 not "max(public())".
572 not "max(public())".
573 """
573 """
574 if not isinstance(symbol, bytes):
574 if not isinstance(symbol, bytes):
575 msg = ("symbol (%s of type %s) was not a string, did you mean "
575 msg = ("symbol (%s of type %s) was not a string, did you mean "
576 "repo[symbol]?" % (symbol, type(symbol)))
576 "repo[symbol]?" % (symbol, type(symbol)))
577 raise error.ProgrammingError(msg)
577 raise error.ProgrammingError(msg)
578 try:
578 try:
579 if symbol in ('.', 'tip', 'null'):
579 if symbol in ('.', 'tip', 'null'):
580 return repo[symbol]
580 return repo[symbol]
581
581
582 try:
582 try:
583 r = int(symbol)
583 r = int(symbol)
584 if '%d' % r != symbol:
584 if '%d' % r != symbol:
585 raise ValueError
585 raise ValueError
586 l = len(repo.changelog)
586 l = len(repo.changelog)
587 if r < 0:
587 if r < 0:
588 r += l
588 r += l
589 if r < 0 or r >= l and r != wdirrev:
589 if r < 0 or r >= l and r != wdirrev:
590 raise ValueError
590 raise ValueError
591 return repo[r]
591 return repo[r]
592 except error.FilteredIndexError:
592 except error.FilteredIndexError:
593 raise
593 raise
594 except (ValueError, OverflowError, IndexError):
594 except (ValueError, OverflowError, IndexError):
595 pass
595 pass
596
596
597 if len(symbol) == 40:
597 if len(symbol) == 40:
598 try:
598 try:
599 node = bin(symbol)
599 node = bin(symbol)
600 rev = repo.changelog.rev(node)
600 rev = repo.changelog.rev(node)
601 return repo[rev]
601 return repo[rev]
602 except error.FilteredLookupError:
602 except error.FilteredLookupError:
603 raise
603 raise
604 except (TypeError, LookupError):
604 except (TypeError, LookupError):
605 pass
605 pass
606
606
607 # look up bookmarks through the name interface
607 # look up bookmarks through the name interface
608 try:
608 try:
609 node = repo.names.singlenode(repo, symbol)
609 node = repo.names.singlenode(repo, symbol)
610 rev = repo.changelog.rev(node)
610 rev = repo.changelog.rev(node)
611 return repo[rev]
611 return repo[rev]
612 except KeyError:
612 except KeyError:
613 pass
613 pass
614
614
615 node = resolvehexnodeidprefix(repo, symbol)
615 node = resolvehexnodeidprefix(repo, symbol)
616 if node is not None:
616 if node is not None:
617 rev = repo.changelog.rev(node)
617 rev = repo.changelog.rev(node)
618 return repo[rev]
618 return repo[rev]
619
619
620 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
620 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
621
621
622 except error.WdirUnsupported:
622 except error.WdirUnsupported:
623 return repo[None]
623 return repo[None]
624 except (error.FilteredIndexError, error.FilteredLookupError,
624 except (error.FilteredIndexError, error.FilteredLookupError,
625 error.FilteredRepoLookupError):
625 error.FilteredRepoLookupError):
626 raise _filterederror(repo, symbol)
626 raise _filterederror(repo, symbol)
627
627
628 def _filterederror(repo, changeid):
628 def _filterederror(repo, changeid):
629 """build an exception to be raised about a filtered changeid
629 """build an exception to be raised about a filtered changeid
630
630
631 This is extracted in a function to help extensions (eg: evolve) to
631 This is extracted in a function to help extensions (eg: evolve) to
632 experiment with various message variants."""
632 experiment with various message variants."""
633 if repo.filtername.startswith('visible'):
633 if repo.filtername.startswith('visible'):
634
634
635 # Check if the changeset is obsolete
635 # Check if the changeset is obsolete
636 unfilteredrepo = repo.unfiltered()
636 unfilteredrepo = repo.unfiltered()
637 ctx = revsymbol(unfilteredrepo, changeid)
637 ctx = revsymbol(unfilteredrepo, changeid)
638
638
639 # If the changeset is obsolete, enrich the message with the reason
639 # If the changeset is obsolete, enrich the message with the reason
640 # that made this changeset not visible
640 # that made this changeset not visible
641 if ctx.obsolete():
641 if ctx.obsolete():
642 msg = obsutil._getfilteredreason(repo, changeid, ctx)
642 msg = obsutil._getfilteredreason(repo, changeid, ctx)
643 else:
643 else:
644 msg = _("hidden revision '%s'") % changeid
644 msg = _("hidden revision '%s'") % changeid
645
645
646 hint = _('use --hidden to access hidden revisions')
646 hint = _('use --hidden to access hidden revisions')
647
647
648 return error.FilteredRepoLookupError(msg, hint=hint)
648 return error.FilteredRepoLookupError(msg, hint=hint)
649 msg = _("filtered revision '%s' (not in '%s' subset)")
649 msg = _("filtered revision '%s' (not in '%s' subset)")
650 msg %= (changeid, repo.filtername)
650 msg %= (changeid, repo.filtername)
651 return error.FilteredRepoLookupError(msg)
651 return error.FilteredRepoLookupError(msg)
652
652
653 def revsingle(repo, revspec, default='.', localalias=None):
653 def revsingle(repo, revspec, default='.', localalias=None):
654 if not revspec and revspec != 0:
654 if not revspec and revspec != 0:
655 return repo[default]
655 return repo[default]
656
656
657 l = revrange(repo, [revspec], localalias=localalias)
657 l = revrange(repo, [revspec], localalias=localalias)
658 if not l:
658 if not l:
659 raise error.Abort(_('empty revision set'))
659 raise error.Abort(_('empty revision set'))
660 return repo[l.last()]
660 return repo[l.last()]
661
661
662 def _pairspec(revspec):
662 def _pairspec(revspec):
663 tree = revsetlang.parse(revspec)
663 tree = revsetlang.parse(revspec)
664 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
664 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
665
665
666 def revpair(repo, revs):
666 def revpair(repo, revs):
667 if not revs:
667 if not revs:
668 return repo['.'], repo[None]
668 return repo['.'], repo[None]
669
669
670 l = revrange(repo, revs)
670 l = revrange(repo, revs)
671
671
672 if not l:
672 if not l:
673 first = second = None
673 first = second = None
674 elif l.isascending():
674 elif l.isascending():
675 first = l.min()
675 first = l.min()
676 second = l.max()
676 second = l.max()
677 elif l.isdescending():
677 elif l.isdescending():
678 first = l.max()
678 first = l.max()
679 second = l.min()
679 second = l.min()
680 else:
680 else:
681 first = l.first()
681 first = l.first()
682 second = l.last()
682 second = l.last()
683
683
684 if first is None:
684 if first is None:
685 raise error.Abort(_('empty revision range'))
685 raise error.Abort(_('empty revision range'))
686 if (first == second and len(revs) >= 2
686 if (first == second and len(revs) >= 2
687 and not all(revrange(repo, [r]) for r in revs)):
687 and not all(revrange(repo, [r]) for r in revs)):
688 raise error.Abort(_('empty revision on one side of range'))
688 raise error.Abort(_('empty revision on one side of range'))
689
689
690 # if top-level is range expression, the result must always be a pair
690 # if top-level is range expression, the result must always be a pair
691 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
691 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
692 return repo[first], repo[None]
692 return repo[first], repo[None]
693
693
694 return repo[first], repo[second]
694 return repo[first], repo[second]
695
695
696 def revrange(repo, specs, localalias=None):
696 def revrange(repo, specs, localalias=None):
697 """Execute 1 to many revsets and return the union.
697 """Execute 1 to many revsets and return the union.
698
698
699 This is the preferred mechanism for executing revsets using user-specified
699 This is the preferred mechanism for executing revsets using user-specified
700 config options, such as revset aliases.
700 config options, such as revset aliases.
701
701
702 The revsets specified by ``specs`` will be executed via a chained ``OR``
702 The revsets specified by ``specs`` will be executed via a chained ``OR``
703 expression. If ``specs`` is empty, an empty result is returned.
703 expression. If ``specs`` is empty, an empty result is returned.
704
704
705 ``specs`` can contain integers, in which case they are assumed to be
705 ``specs`` can contain integers, in which case they are assumed to be
706 revision numbers.
706 revision numbers.
707
707
708 It is assumed the revsets are already formatted. If you have arguments
708 It is assumed the revsets are already formatted. If you have arguments
709 that need to be expanded in the revset, call ``revsetlang.formatspec()``
709 that need to be expanded in the revset, call ``revsetlang.formatspec()``
710 and pass the result as an element of ``specs``.
710 and pass the result as an element of ``specs``.
711
711
712 Specifying a single revset is allowed.
712 Specifying a single revset is allowed.
713
713
714 Returns a ``revset.abstractsmartset`` which is a list-like interface over
714 Returns a ``revset.abstractsmartset`` which is a list-like interface over
715 integer revisions.
715 integer revisions.
716 """
716 """
717 allspecs = []
717 allspecs = []
718 for spec in specs:
718 for spec in specs:
719 if isinstance(spec, int):
719 if isinstance(spec, int):
720 spec = revsetlang.formatspec('rev(%d)', spec)
720 spec = revsetlang.formatspec('rev(%d)', spec)
721 allspecs.append(spec)
721 allspecs.append(spec)
722 return repo.anyrevs(allspecs, user=True, localalias=localalias)
722 return repo.anyrevs(allspecs, user=True, localalias=localalias)
723
723
724 def meaningfulparents(repo, ctx):
724 def meaningfulparents(repo, ctx):
725 """Return list of meaningful (or all if debug) parentrevs for rev.
725 """Return list of meaningful (or all if debug) parentrevs for rev.
726
726
727 For merges (two non-nullrev revisions) both parents are meaningful.
727 For merges (two non-nullrev revisions) both parents are meaningful.
728 Otherwise the first parent revision is considered meaningful if it
728 Otherwise the first parent revision is considered meaningful if it
729 is not the preceding revision.
729 is not the preceding revision.
730 """
730 """
731 parents = ctx.parents()
731 parents = ctx.parents()
732 if len(parents) > 1:
732 if len(parents) > 1:
733 return parents
733 return parents
734 if repo.ui.debugflag:
734 if repo.ui.debugflag:
735 return [parents[0], repo[nullrev]]
735 return [parents[0], repo[nullrev]]
736 if parents[0].rev() >= intrev(ctx) - 1:
736 if parents[0].rev() >= intrev(ctx) - 1:
737 return []
737 return []
738 return parents
738 return parents
739
739
740 def expandpats(pats):
740 def expandpats(pats):
741 '''Expand bare globs when running on windows.
741 '''Expand bare globs when running on windows.
742 On posix we assume it already has already been done by sh.'''
742 On posix we assume it already has already been done by sh.'''
743 if not util.expandglobs:
743 if not util.expandglobs:
744 return list(pats)
744 return list(pats)
745 ret = []
745 ret = []
746 for kindpat in pats:
746 for kindpat in pats:
747 kind, pat = matchmod._patsplit(kindpat, None)
747 kind, pat = matchmod._patsplit(kindpat, None)
748 if kind is None:
748 if kind is None:
749 try:
749 try:
750 globbed = glob.glob(pat)
750 globbed = glob.glob(pat)
751 except re.error:
751 except re.error:
752 globbed = [pat]
752 globbed = [pat]
753 if globbed:
753 if globbed:
754 ret.extend(globbed)
754 ret.extend(globbed)
755 continue
755 continue
756 ret.append(kindpat)
756 ret.append(kindpat)
757 return ret
757 return ret
758
758
759 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
759 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
760 badfn=None):
760 badfn=None):
761 '''Return a matcher and the patterns that were used.
761 '''Return a matcher and the patterns that were used.
762 The matcher will warn about bad matches, unless an alternate badfn callback
762 The matcher will warn about bad matches, unless an alternate badfn callback
763 is provided.'''
763 is provided.'''
764 if pats == ("",):
764 if pats == ("",):
765 pats = []
765 pats = []
766 if opts is None:
766 if opts is None:
767 opts = {}
767 opts = {}
768 if not globbed and default == 'relpath':
768 if not globbed and default == 'relpath':
769 pats = expandpats(pats or [])
769 pats = expandpats(pats or [])
770
770
771 def bad(f, msg):
771 def bad(f, msg):
772 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
772 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
773
773
774 if badfn is None:
774 if badfn is None:
775 badfn = bad
775 badfn = bad
776
776
777 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
777 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
778 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
778 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
779
779
780 if m.always():
780 if m.always():
781 pats = []
781 pats = []
782 return m, pats
782 return m, pats
783
783
784 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
784 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
785 badfn=None):
785 badfn=None):
786 '''Return a matcher that will warn about bad matches.'''
786 '''Return a matcher that will warn about bad matches.'''
787 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
787 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
788
788
789 def matchall(repo):
789 def matchall(repo):
790 '''Return a matcher that will efficiently match everything.'''
790 '''Return a matcher that will efficiently match everything.'''
791 return matchmod.always(repo.root, repo.getcwd())
791 return matchmod.always(repo.root, repo.getcwd())
792
792
793 def matchfiles(repo, files, badfn=None):
793 def matchfiles(repo, files, badfn=None):
794 '''Return a matcher that will efficiently match exactly these files.'''
794 '''Return a matcher that will efficiently match exactly these files.'''
795 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
795 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
796
796
797 def parsefollowlinespattern(repo, rev, pat, msg):
797 def parsefollowlinespattern(repo, rev, pat, msg):
798 """Return a file name from `pat` pattern suitable for usage in followlines
798 """Return a file name from `pat` pattern suitable for usage in followlines
799 logic.
799 logic.
800 """
800 """
801 if not matchmod.patkind(pat):
801 if not matchmod.patkind(pat):
802 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
802 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
803 else:
803 else:
804 ctx = repo[rev]
804 ctx = repo[rev]
805 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
805 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
806 files = [f for f in ctx if m(f)]
806 files = [f for f in ctx if m(f)]
807 if len(files) != 1:
807 if len(files) != 1:
808 raise error.ParseError(msg)
808 raise error.ParseError(msg)
809 return files[0]
809 return files[0]
810
810
811 def origpath(ui, repo, filepath):
811 def origpath(ui, repo, filepath):
812 '''customize where .orig files are created
812 '''customize where .orig files are created
813
813
814 Fetch user defined path from config file: [ui] origbackuppath = <path>
814 Fetch user defined path from config file: [ui] origbackuppath = <path>
815 Fall back to default (filepath with .orig suffix) if not specified
815 Fall back to default (filepath with .orig suffix) if not specified
816 '''
816 '''
817 origbackuppath = ui.config('ui', 'origbackuppath')
817 origbackuppath = ui.config('ui', 'origbackuppath')
818 if not origbackuppath:
818 if not origbackuppath:
819 return filepath + ".orig"
819 return filepath + ".orig"
820
820
821 # Convert filepath from an absolute path into a path inside the repo.
821 # Convert filepath from an absolute path into a path inside the repo.
822 filepathfromroot = util.normpath(os.path.relpath(filepath,
822 filepathfromroot = util.normpath(os.path.relpath(filepath,
823 start=repo.root))
823 start=repo.root))
824
824
825 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
825 origvfs = vfs.vfs(repo.wjoin(origbackuppath))
826 origbackupdir = origvfs.dirname(filepathfromroot)
826 origbackupdir = origvfs.dirname(filepathfromroot)
827 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
827 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
828 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
828 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
829
829
830 # Remove any files that conflict with the backup file's path
830 # Remove any files that conflict with the backup file's path
831 for f in reversed(list(util.finddirs(filepathfromroot))):
831 for f in reversed(list(util.finddirs(filepathfromroot))):
832 if origvfs.isfileorlink(f):
832 if origvfs.isfileorlink(f):
833 ui.note(_('removing conflicting file: %s\n')
833 ui.note(_('removing conflicting file: %s\n')
834 % origvfs.join(f))
834 % origvfs.join(f))
835 origvfs.unlink(f)
835 origvfs.unlink(f)
836 break
836 break
837
837
838 origvfs.makedirs(origbackupdir)
838 origvfs.makedirs(origbackupdir)
839
839
840 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
840 if origvfs.isdir(filepathfromroot) and not origvfs.islink(filepathfromroot):
841 ui.note(_('removing conflicting directory: %s\n')
841 ui.note(_('removing conflicting directory: %s\n')
842 % origvfs.join(filepathfromroot))
842 % origvfs.join(filepathfromroot))
843 origvfs.rmtree(filepathfromroot, forcibly=True)
843 origvfs.rmtree(filepathfromroot, forcibly=True)
844
844
845 return origvfs.join(filepathfromroot)
845 return origvfs.join(filepathfromroot)
846
846
847 class _containsnode(object):
847 class _containsnode(object):
848 """proxy __contains__(node) to container.__contains__ which accepts revs"""
848 """proxy __contains__(node) to container.__contains__ which accepts revs"""
849
849
850 def __init__(self, repo, revcontainer):
850 def __init__(self, repo, revcontainer):
851 self._torev = repo.changelog.rev
851 self._torev = repo.changelog.rev
852 self._revcontains = revcontainer.__contains__
852 self._revcontains = revcontainer.__contains__
853
853
854 def __contains__(self, node):
854 def __contains__(self, node):
855 return self._revcontains(self._torev(node))
855 return self._revcontains(self._torev(node))
856
856
857 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
857 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
858 fixphase=False, targetphase=None, backup=True):
858 fixphase=False, targetphase=None, backup=True):
859 """do common cleanups when old nodes are replaced by new nodes
859 """do common cleanups when old nodes are replaced by new nodes
860
860
861 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
861 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
862 (we might also want to move working directory parent in the future)
862 (we might also want to move working directory parent in the future)
863
863
864 By default, bookmark moves are calculated automatically from 'replacements',
864 By default, bookmark moves are calculated automatically from 'replacements',
865 but 'moves' can be used to override that. Also, 'moves' may include
865 but 'moves' can be used to override that. Also, 'moves' may include
866 additional bookmark moves that should not have associated obsmarkers.
866 additional bookmark moves that should not have associated obsmarkers.
867
867
868 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
868 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
869 have replacements. operation is a string, like "rebase".
869 have replacements. operation is a string, like "rebase".
870
870
871 metadata is dictionary containing metadata to be stored in obsmarker if
871 metadata is dictionary containing metadata to be stored in obsmarker if
872 obsolescence is enabled.
872 obsolescence is enabled.
873 """
873 """
874 assert fixphase or targetphase is None
874 assert fixphase or targetphase is None
875 if not replacements and not moves:
875 if not replacements and not moves:
876 return
876 return
877
877
878 # translate mapping's other forms
878 # translate mapping's other forms
879 if not util.safehasattr(replacements, 'items'):
879 if not util.safehasattr(replacements, 'items'):
880 replacements = {(n,): () for n in replacements}
880 replacements = {(n,): () for n in replacements}
881 else:
881 else:
882 # upgrading non tuple "source" to tuple ones for BC
882 # upgrading non tuple "source" to tuple ones for BC
883 repls = {}
883 repls = {}
884 for key, value in replacements.items():
884 for key, value in replacements.items():
885 if not isinstance(key, tuple):
885 if not isinstance(key, tuple):
886 key = (key,)
886 key = (key,)
887 repls[key] = value
887 repls[key] = value
888 replacements = repls
888 replacements = repls
889
889
890 # Calculate bookmark movements
890 # Calculate bookmark movements
891 if moves is None:
891 if moves is None:
892 moves = {}
892 moves = {}
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
893 # Unfiltered repo is needed since nodes in replacements might be hidden.
894 unfi = repo.unfiltered()
894 unfi = repo.unfiltered()
895 for oldnodes, newnodes in replacements.items():
895 for oldnodes, newnodes in replacements.items():
896 for oldnode in oldnodes:
896 for oldnode in oldnodes:
897 if oldnode in moves:
897 if oldnode in moves:
898 continue
898 continue
899 if len(newnodes) > 1:
899 if len(newnodes) > 1:
900 # usually a split, take the one with biggest rev number
900 # usually a split, take the one with biggest rev number
901 newnode = next(unfi.set('max(%ln)', newnodes)).node()
901 newnode = next(unfi.set('max(%ln)', newnodes)).node()
902 elif len(newnodes) == 0:
902 elif len(newnodes) == 0:
903 # move bookmark backwards
903 # move bookmark backwards
904 allreplaced = []
904 allreplaced = []
905 for rep in replacements:
905 for rep in replacements:
906 allreplaced.extend(rep)
906 allreplaced.extend(rep)
907 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
907 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
908 allreplaced))
908 allreplaced))
909 if roots:
909 if roots:
910 newnode = roots[0].node()
910 newnode = roots[0].node()
911 else:
911 else:
912 newnode = nullid
912 newnode = nullid
913 else:
913 else:
914 newnode = newnodes[0]
914 newnode = newnodes[0]
915 moves[oldnode] = newnode
915 moves[oldnode] = newnode
916
916
917 allnewnodes = [n for ns in replacements.values() for n in ns]
917 allnewnodes = [n for ns in replacements.values() for n in ns]
918 toretract = {}
918 toretract = {}
919 toadvance = {}
919 toadvance = {}
920 if fixphase:
920 if fixphase:
921 precursors = {}
921 precursors = {}
922 for oldnodes, newnodes in replacements.items():
922 for oldnodes, newnodes in replacements.items():
923 for oldnode in oldnodes:
923 for oldnode in oldnodes:
924 for newnode in newnodes:
924 for newnode in newnodes:
925 precursors.setdefault(newnode, []).append(oldnode)
925 precursors.setdefault(newnode, []).append(oldnode)
926
926
927 allnewnodes.sort(key=lambda n: unfi[n].rev())
927 allnewnodes.sort(key=lambda n: unfi[n].rev())
928 newphases = {}
928 newphases = {}
929 def phase(ctx):
929 def phase(ctx):
930 return newphases.get(ctx.node(), ctx.phase())
930 return newphases.get(ctx.node(), ctx.phase())
931 for newnode in allnewnodes:
931 for newnode in allnewnodes:
932 ctx = unfi[newnode]
932 ctx = unfi[newnode]
933 parentphase = max(phase(p) for p in ctx.parents())
933 parentphase = max(phase(p) for p in ctx.parents())
934 if targetphase is None:
934 if targetphase is None:
935 oldphase = max(unfi[oldnode].phase()
935 oldphase = max(unfi[oldnode].phase()
936 for oldnode in precursors[newnode])
936 for oldnode in precursors[newnode])
937 newphase = max(oldphase, parentphase)
937 newphase = max(oldphase, parentphase)
938 else:
938 else:
939 newphase = max(targetphase, parentphase)
939 newphase = max(targetphase, parentphase)
940 newphases[newnode] = newphase
940 newphases[newnode] = newphase
941 if newphase > ctx.phase():
941 if newphase > ctx.phase():
942 toretract.setdefault(newphase, []).append(newnode)
942 toretract.setdefault(newphase, []).append(newnode)
943 elif newphase < ctx.phase():
943 elif newphase < ctx.phase():
944 toadvance.setdefault(newphase, []).append(newnode)
944 toadvance.setdefault(newphase, []).append(newnode)
945
945
946 with repo.transaction('cleanup') as tr:
946 with repo.transaction('cleanup') as tr:
947 # Move bookmarks
947 # Move bookmarks
948 bmarks = repo._bookmarks
948 bmarks = repo._bookmarks
949 bmarkchanges = []
949 bmarkchanges = []
950 for oldnode, newnode in moves.items():
950 for oldnode, newnode in moves.items():
951 oldbmarks = repo.nodebookmarks(oldnode)
951 oldbmarks = repo.nodebookmarks(oldnode)
952 if not oldbmarks:
952 if not oldbmarks:
953 continue
953 continue
954 from . import bookmarks # avoid import cycle
954 from . import bookmarks # avoid import cycle
955 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
955 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
956 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
956 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
957 hex(oldnode), hex(newnode)))
957 hex(oldnode), hex(newnode)))
958 # Delete divergent bookmarks being parents of related newnodes
958 # Delete divergent bookmarks being parents of related newnodes
959 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
959 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
960 allnewnodes, newnode, oldnode)
960 allnewnodes, newnode, oldnode)
961 deletenodes = _containsnode(repo, deleterevs)
961 deletenodes = _containsnode(repo, deleterevs)
962 for name in oldbmarks:
962 for name in oldbmarks:
963 bmarkchanges.append((name, newnode))
963 bmarkchanges.append((name, newnode))
964 for b in bookmarks.divergent2delete(repo, deletenodes, name):
964 for b in bookmarks.divergent2delete(repo, deletenodes, name):
965 bmarkchanges.append((b, None))
965 bmarkchanges.append((b, None))
966
966
967 if bmarkchanges:
967 if bmarkchanges:
968 bmarks.applychanges(repo, tr, bmarkchanges)
968 bmarks.applychanges(repo, tr, bmarkchanges)
969
969
970 for phase, nodes in toretract.items():
970 for phase, nodes in toretract.items():
971 phases.retractboundary(repo, tr, phase, nodes)
971 phases.retractboundary(repo, tr, phase, nodes)
972 for phase, nodes in toadvance.items():
972 for phase, nodes in toadvance.items():
973 phases.advanceboundary(repo, tr, phase, nodes)
973 phases.advanceboundary(repo, tr, phase, nodes)
974
974
975 # Obsolete or strip nodes
975 # Obsolete or strip nodes
976 if obsolete.isenabled(repo, obsolete.createmarkersopt):
976 if obsolete.isenabled(repo, obsolete.createmarkersopt):
977 # If a node is already obsoleted, and we want to obsolete it
977 # If a node is already obsoleted, and we want to obsolete it
978 # without a successor, skip that obssolete request since it's
978 # without a successor, skip that obssolete request since it's
979 # unnecessary. That's the "if s or not isobs(n)" check below.
979 # unnecessary. That's the "if s or not isobs(n)" check below.
980 # Also sort the node in topology order, that might be useful for
980 # Also sort the node in topology order, that might be useful for
981 # some obsstore logic.
981 # some obsstore logic.
982 # NOTE: the sorting might belong to createmarkers.
982 # NOTE: the sorting might belong to createmarkers.
983 torev = unfi.changelog.rev
983 torev = unfi.changelog.rev
984 sortfunc = lambda ns: torev(ns[0][0])
984 sortfunc = lambda ns: torev(ns[0][0])
985 rels = []
985 rels = []
986 for ns, s in sorted(replacements.items(), key=sortfunc):
986 for ns, s in sorted(replacements.items(), key=sortfunc):
987 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
987 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
988 rels.append(rel)
988 rels.append(rel)
989 if rels:
989 if rels:
990 obsolete.createmarkers(repo, rels, operation=operation,
990 obsolete.createmarkers(repo, rels, operation=operation,
991 metadata=metadata)
991 metadata=metadata)
992 else:
992 else:
993 from . import repair # avoid import cycle
993 from . import repair # avoid import cycle
994 tostrip = list(n for ns in replacements for n in ns)
994 tostrip = list(n for ns in replacements for n in ns)
995 if tostrip:
995 if tostrip:
996 repair.delayedstrip(repo.ui, repo, tostrip, operation,
996 repair.delayedstrip(repo.ui, repo, tostrip, operation,
997 backup=backup)
997 backup=backup)
998
998
999 def addremove(repo, matcher, prefix, opts=None):
999 def addremove(repo, matcher, prefix, opts=None):
1000 if opts is None:
1000 if opts is None:
1001 opts = {}
1001 opts = {}
1002 m = matcher
1002 m = matcher
1003 dry_run = opts.get('dry_run')
1003 dry_run = opts.get('dry_run')
1004 try:
1004 try:
1005 similarity = float(opts.get('similarity') or 0)
1005 similarity = float(opts.get('similarity') or 0)
1006 except ValueError:
1006 except ValueError:
1007 raise error.Abort(_('similarity must be a number'))
1007 raise error.Abort(_('similarity must be a number'))
1008 if similarity < 0 or similarity > 100:
1008 if similarity < 0 or similarity > 100:
1009 raise error.Abort(_('similarity must be between 0 and 100'))
1009 raise error.Abort(_('similarity must be between 0 and 100'))
1010 similarity /= 100.0
1010 similarity /= 100.0
1011
1011
1012 ret = 0
1012 ret = 0
1013 join = lambda f: os.path.join(prefix, f)
1013 join = lambda f: os.path.join(prefix, f)
1014
1014
1015 wctx = repo[None]
1015 wctx = repo[None]
1016 for subpath in sorted(wctx.substate):
1016 for subpath in sorted(wctx.substate):
1017 submatch = matchmod.subdirmatcher(subpath, m)
1017 submatch = matchmod.subdirmatcher(subpath, m)
1018 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1018 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1019 sub = wctx.sub(subpath)
1019 sub = wctx.sub(subpath)
1020 try:
1020 try:
1021 if sub.addremove(submatch, prefix, opts):
1021 if sub.addremove(submatch, prefix, opts):
1022 ret = 1
1022 ret = 1
1023 except error.LookupError:
1023 except error.LookupError:
1024 repo.ui.status(_("skipping missing subrepository: %s\n")
1024 repo.ui.status(_("skipping missing subrepository: %s\n")
1025 % join(subpath))
1025 % join(subpath))
1026
1026
1027 rejected = []
1027 rejected = []
1028 def badfn(f, msg):
1028 def badfn(f, msg):
1029 if f in m.files():
1029 if f in m.files():
1030 m.bad(f, msg)
1030 m.bad(f, msg)
1031 rejected.append(f)
1031 rejected.append(f)
1032
1032
1033 badmatch = matchmod.badmatch(m, badfn)
1033 badmatch = matchmod.badmatch(m, badfn)
1034 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1034 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1035 badmatch)
1035 badmatch)
1036
1036
1037 unknownset = set(unknown + forgotten)
1037 unknownset = set(unknown + forgotten)
1038 toprint = unknownset.copy()
1038 toprint = unknownset.copy()
1039 toprint.update(deleted)
1039 toprint.update(deleted)
1040 for abs in sorted(toprint):
1040 for abs in sorted(toprint):
1041 if repo.ui.verbose or not m.exact(abs):
1041 if repo.ui.verbose or not m.exact(abs):
1042 if abs in unknownset:
1042 if abs in unknownset:
1043 status = _('adding %s\n') % m.uipath(abs)
1043 status = _('adding %s\n') % m.uipath(abs)
1044 label = 'addremove.added'
1044 label = 'addremove.added'
1045 else:
1045 else:
1046 status = _('removing %s\n') % m.uipath(abs)
1046 status = _('removing %s\n') % m.uipath(abs)
1047 label = 'addremove.removed'
1047 label = 'addremove.removed'
1048 repo.ui.status(status, label=label)
1048 repo.ui.status(status, label=label)
1049
1049
1050 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1050 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1051 similarity)
1051 similarity)
1052
1052
1053 if not dry_run:
1053 if not dry_run:
1054 _markchanges(repo, unknown + forgotten, deleted, renames)
1054 _markchanges(repo, unknown + forgotten, deleted, renames)
1055
1055
1056 for f in rejected:
1056 for f in rejected:
1057 if f in m.files():
1057 if f in m.files():
1058 return 1
1058 return 1
1059 return ret
1059 return ret
1060
1060
1061 def marktouched(repo, files, similarity=0.0):
1061 def marktouched(repo, files, similarity=0.0):
1062 '''Assert that files have somehow been operated upon. files are relative to
1062 '''Assert that files have somehow been operated upon. files are relative to
1063 the repo root.'''
1063 the repo root.'''
1064 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1064 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1065 rejected = []
1065 rejected = []
1066
1066
1067 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1067 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1068
1068
1069 if repo.ui.verbose:
1069 if repo.ui.verbose:
1070 unknownset = set(unknown + forgotten)
1070 unknownset = set(unknown + forgotten)
1071 toprint = unknownset.copy()
1071 toprint = unknownset.copy()
1072 toprint.update(deleted)
1072 toprint.update(deleted)
1073 for abs in sorted(toprint):
1073 for abs in sorted(toprint):
1074 if abs in unknownset:
1074 if abs in unknownset:
1075 status = _('adding %s\n') % abs
1075 status = _('adding %s\n') % abs
1076 else:
1076 else:
1077 status = _('removing %s\n') % abs
1077 status = _('removing %s\n') % abs
1078 repo.ui.status(status)
1078 repo.ui.status(status)
1079
1079
1080 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1080 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1081 similarity)
1081 similarity)
1082
1082
1083 _markchanges(repo, unknown + forgotten, deleted, renames)
1083 _markchanges(repo, unknown + forgotten, deleted, renames)
1084
1084
1085 for f in rejected:
1085 for f in rejected:
1086 if f in m.files():
1086 if f in m.files():
1087 return 1
1087 return 1
1088 return 0
1088 return 0
1089
1089
1090 def _interestingfiles(repo, matcher):
1090 def _interestingfiles(repo, matcher):
1091 '''Walk dirstate with matcher, looking for files that addremove would care
1091 '''Walk dirstate with matcher, looking for files that addremove would care
1092 about.
1092 about.
1093
1093
1094 This is different from dirstate.status because it doesn't care about
1094 This is different from dirstate.status because it doesn't care about
1095 whether files are modified or clean.'''
1095 whether files are modified or clean.'''
1096 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1096 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1097 audit_path = pathutil.pathauditor(repo.root, cached=True)
1097 audit_path = pathutil.pathauditor(repo.root, cached=True)
1098
1098
1099 ctx = repo[None]
1099 ctx = repo[None]
1100 dirstate = repo.dirstate
1100 dirstate = repo.dirstate
1101 matcher = repo.narrowmatch(matcher, includeexact=True)
1101 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1102 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1102 unknown=True, ignored=False, full=False)
1103 unknown=True, ignored=False, full=False)
1103 for abs, st in walkresults.iteritems():
1104 for abs, st in walkresults.iteritems():
1104 dstate = dirstate[abs]
1105 dstate = dirstate[abs]
1105 if dstate == '?' and audit_path.check(abs):
1106 if dstate == '?' and audit_path.check(abs):
1106 unknown.append(abs)
1107 unknown.append(abs)
1107 elif dstate != 'r' and not st:
1108 elif dstate != 'r' and not st:
1108 deleted.append(abs)
1109 deleted.append(abs)
1109 elif dstate == 'r' and st:
1110 elif dstate == 'r' and st:
1110 forgotten.append(abs)
1111 forgotten.append(abs)
1111 # for finding renames
1112 # for finding renames
1112 elif dstate == 'r' and not st:
1113 elif dstate == 'r' and not st:
1113 removed.append(abs)
1114 removed.append(abs)
1114 elif dstate == 'a':
1115 elif dstate == 'a':
1115 added.append(abs)
1116 added.append(abs)
1116
1117
1117 return added, unknown, deleted, removed, forgotten
1118 return added, unknown, deleted, removed, forgotten
1118
1119
1119 def _findrenames(repo, matcher, added, removed, similarity):
1120 def _findrenames(repo, matcher, added, removed, similarity):
1120 '''Find renames from removed files to added ones.'''
1121 '''Find renames from removed files to added ones.'''
1121 renames = {}
1122 renames = {}
1122 if similarity > 0:
1123 if similarity > 0:
1123 for old, new, score in similar.findrenames(repo, added, removed,
1124 for old, new, score in similar.findrenames(repo, added, removed,
1124 similarity):
1125 similarity):
1125 if (repo.ui.verbose or not matcher.exact(old)
1126 if (repo.ui.verbose or not matcher.exact(old)
1126 or not matcher.exact(new)):
1127 or not matcher.exact(new)):
1127 repo.ui.status(_('recording removal of %s as rename to %s '
1128 repo.ui.status(_('recording removal of %s as rename to %s '
1128 '(%d%% similar)\n') %
1129 '(%d%% similar)\n') %
1129 (matcher.rel(old), matcher.rel(new),
1130 (matcher.rel(old), matcher.rel(new),
1130 score * 100))
1131 score * 100))
1131 renames[new] = old
1132 renames[new] = old
1132 return renames
1133 return renames
1133
1134
1134 def _markchanges(repo, unknown, deleted, renames):
1135 def _markchanges(repo, unknown, deleted, renames):
1135 '''Marks the files in unknown as added, the files in deleted as removed,
1136 '''Marks the files in unknown as added, the files in deleted as removed,
1136 and the files in renames as copied.'''
1137 and the files in renames as copied.'''
1137 wctx = repo[None]
1138 wctx = repo[None]
1138 with repo.wlock():
1139 with repo.wlock():
1139 wctx.forget(deleted)
1140 wctx.forget(deleted)
1140 wctx.add(unknown)
1141 wctx.add(unknown)
1141 for new, old in renames.iteritems():
1142 for new, old in renames.iteritems():
1142 wctx.copy(old, new)
1143 wctx.copy(old, new)
1143
1144
1144 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1145 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1145 """Update the dirstate to reflect the intent of copying src to dst. For
1146 """Update the dirstate to reflect the intent of copying src to dst. For
1146 different reasons it might not end with dst being marked as copied from src.
1147 different reasons it might not end with dst being marked as copied from src.
1147 """
1148 """
1148 origsrc = repo.dirstate.copied(src) or src
1149 origsrc = repo.dirstate.copied(src) or src
1149 if dst == origsrc: # copying back a copy?
1150 if dst == origsrc: # copying back a copy?
1150 if repo.dirstate[dst] not in 'mn' and not dryrun:
1151 if repo.dirstate[dst] not in 'mn' and not dryrun:
1151 repo.dirstate.normallookup(dst)
1152 repo.dirstate.normallookup(dst)
1152 else:
1153 else:
1153 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1154 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1154 if not ui.quiet:
1155 if not ui.quiet:
1155 ui.warn(_("%s has not been committed yet, so no copy "
1156 ui.warn(_("%s has not been committed yet, so no copy "
1156 "data will be stored for %s.\n")
1157 "data will be stored for %s.\n")
1157 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1158 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1158 if repo.dirstate[dst] in '?r' and not dryrun:
1159 if repo.dirstate[dst] in '?r' and not dryrun:
1159 wctx.add([dst])
1160 wctx.add([dst])
1160 elif not dryrun:
1161 elif not dryrun:
1161 wctx.copy(origsrc, dst)
1162 wctx.copy(origsrc, dst)
1162
1163
1163 def writerequires(opener, requirements):
1164 def writerequires(opener, requirements):
1164 with opener('requires', 'w') as fp:
1165 with opener('requires', 'w') as fp:
1165 for r in sorted(requirements):
1166 for r in sorted(requirements):
1166 fp.write("%s\n" % r)
1167 fp.write("%s\n" % r)
1167
1168
1168 class filecachesubentry(object):
1169 class filecachesubentry(object):
1169 def __init__(self, path, stat):
1170 def __init__(self, path, stat):
1170 self.path = path
1171 self.path = path
1171 self.cachestat = None
1172 self.cachestat = None
1172 self._cacheable = None
1173 self._cacheable = None
1173
1174
1174 if stat:
1175 if stat:
1175 self.cachestat = filecachesubentry.stat(self.path)
1176 self.cachestat = filecachesubentry.stat(self.path)
1176
1177
1177 if self.cachestat:
1178 if self.cachestat:
1178 self._cacheable = self.cachestat.cacheable()
1179 self._cacheable = self.cachestat.cacheable()
1179 else:
1180 else:
1180 # None means we don't know yet
1181 # None means we don't know yet
1181 self._cacheable = None
1182 self._cacheable = None
1182
1183
1183 def refresh(self):
1184 def refresh(self):
1184 if self.cacheable():
1185 if self.cacheable():
1185 self.cachestat = filecachesubentry.stat(self.path)
1186 self.cachestat = filecachesubentry.stat(self.path)
1186
1187
1187 def cacheable(self):
1188 def cacheable(self):
1188 if self._cacheable is not None:
1189 if self._cacheable is not None:
1189 return self._cacheable
1190 return self._cacheable
1190
1191
1191 # we don't know yet, assume it is for now
1192 # we don't know yet, assume it is for now
1192 return True
1193 return True
1193
1194
1194 def changed(self):
1195 def changed(self):
1195 # no point in going further if we can't cache it
1196 # no point in going further if we can't cache it
1196 if not self.cacheable():
1197 if not self.cacheable():
1197 return True
1198 return True
1198
1199
1199 newstat = filecachesubentry.stat(self.path)
1200 newstat = filecachesubentry.stat(self.path)
1200
1201
1201 # we may not know if it's cacheable yet, check again now
1202 # we may not know if it's cacheable yet, check again now
1202 if newstat and self._cacheable is None:
1203 if newstat and self._cacheable is None:
1203 self._cacheable = newstat.cacheable()
1204 self._cacheable = newstat.cacheable()
1204
1205
1205 # check again
1206 # check again
1206 if not self._cacheable:
1207 if not self._cacheable:
1207 return True
1208 return True
1208
1209
1209 if self.cachestat != newstat:
1210 if self.cachestat != newstat:
1210 self.cachestat = newstat
1211 self.cachestat = newstat
1211 return True
1212 return True
1212 else:
1213 else:
1213 return False
1214 return False
1214
1215
1215 @staticmethod
1216 @staticmethod
1216 def stat(path):
1217 def stat(path):
1217 try:
1218 try:
1218 return util.cachestat(path)
1219 return util.cachestat(path)
1219 except OSError as e:
1220 except OSError as e:
1220 if e.errno != errno.ENOENT:
1221 if e.errno != errno.ENOENT:
1221 raise
1222 raise
1222
1223
1223 class filecacheentry(object):
1224 class filecacheentry(object):
1224 def __init__(self, paths, stat=True):
1225 def __init__(self, paths, stat=True):
1225 self._entries = []
1226 self._entries = []
1226 for path in paths:
1227 for path in paths:
1227 self._entries.append(filecachesubentry(path, stat))
1228 self._entries.append(filecachesubentry(path, stat))
1228
1229
1229 def changed(self):
1230 def changed(self):
1230 '''true if any entry has changed'''
1231 '''true if any entry has changed'''
1231 for entry in self._entries:
1232 for entry in self._entries:
1232 if entry.changed():
1233 if entry.changed():
1233 return True
1234 return True
1234 return False
1235 return False
1235
1236
1236 def refresh(self):
1237 def refresh(self):
1237 for entry in self._entries:
1238 for entry in self._entries:
1238 entry.refresh()
1239 entry.refresh()
1239
1240
1240 class filecache(object):
1241 class filecache(object):
1241 """A property like decorator that tracks files under .hg/ for updates.
1242 """A property like decorator that tracks files under .hg/ for updates.
1242
1243
1243 On first access, the files defined as arguments are stat()ed and the
1244 On first access, the files defined as arguments are stat()ed and the
1244 results cached. The decorated function is called. The results are stashed
1245 results cached. The decorated function is called. The results are stashed
1245 away in a ``_filecache`` dict on the object whose method is decorated.
1246 away in a ``_filecache`` dict on the object whose method is decorated.
1246
1247
1247 On subsequent access, the cached result is returned.
1248 On subsequent access, the cached result is returned.
1248
1249
1249 On external property set operations, stat() calls are performed and the new
1250 On external property set operations, stat() calls are performed and the new
1250 value is cached.
1251 value is cached.
1251
1252
1252 On property delete operations, cached data is removed.
1253 On property delete operations, cached data is removed.
1253
1254
1254 When using the property API, cached data is always returned, if available:
1255 When using the property API, cached data is always returned, if available:
1255 no stat() is performed to check if the file has changed and if the function
1256 no stat() is performed to check if the file has changed and if the function
1256 needs to be called to reflect file changes.
1257 needs to be called to reflect file changes.
1257
1258
1258 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1259 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1259 can populate an entry before the property's getter is called. In this case,
1260 can populate an entry before the property's getter is called. In this case,
1260 entries in ``_filecache`` will be used during property operations,
1261 entries in ``_filecache`` will be used during property operations,
1261 if available. If the underlying file changes, it is up to external callers
1262 if available. If the underlying file changes, it is up to external callers
1262 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1263 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1263 method result as well as possibly calling ``del obj._filecache[attr]`` to
1264 method result as well as possibly calling ``del obj._filecache[attr]`` to
1264 remove the ``filecacheentry``.
1265 remove the ``filecacheentry``.
1265 """
1266 """
1266
1267
1267 def __init__(self, *paths):
1268 def __init__(self, *paths):
1268 self.paths = paths
1269 self.paths = paths
1269
1270
1270 def join(self, obj, fname):
1271 def join(self, obj, fname):
1271 """Used to compute the runtime path of a cached file.
1272 """Used to compute the runtime path of a cached file.
1272
1273
1273 Users should subclass filecache and provide their own version of this
1274 Users should subclass filecache and provide their own version of this
1274 function to call the appropriate join function on 'obj' (an instance
1275 function to call the appropriate join function on 'obj' (an instance
1275 of the class that its member function was decorated).
1276 of the class that its member function was decorated).
1276 """
1277 """
1277 raise NotImplementedError
1278 raise NotImplementedError
1278
1279
1279 def __call__(self, func):
1280 def __call__(self, func):
1280 self.func = func
1281 self.func = func
1281 self.sname = func.__name__
1282 self.sname = func.__name__
1282 self.name = pycompat.sysbytes(self.sname)
1283 self.name = pycompat.sysbytes(self.sname)
1283 return self
1284 return self
1284
1285
1285 def __get__(self, obj, type=None):
1286 def __get__(self, obj, type=None):
1286 # if accessed on the class, return the descriptor itself.
1287 # if accessed on the class, return the descriptor itself.
1287 if obj is None:
1288 if obj is None:
1288 return self
1289 return self
1289 # do we need to check if the file changed?
1290 # do we need to check if the file changed?
1290 if self.sname in obj.__dict__:
1291 if self.sname in obj.__dict__:
1291 assert self.name in obj._filecache, self.name
1292 assert self.name in obj._filecache, self.name
1292 return obj.__dict__[self.sname]
1293 return obj.__dict__[self.sname]
1293
1294
1294 entry = obj._filecache.get(self.name)
1295 entry = obj._filecache.get(self.name)
1295
1296
1296 if entry:
1297 if entry:
1297 if entry.changed():
1298 if entry.changed():
1298 entry.obj = self.func(obj)
1299 entry.obj = self.func(obj)
1299 else:
1300 else:
1300 paths = [self.join(obj, path) for path in self.paths]
1301 paths = [self.join(obj, path) for path in self.paths]
1301
1302
1302 # We stat -before- creating the object so our cache doesn't lie if
1303 # We stat -before- creating the object so our cache doesn't lie if
1303 # a writer modified between the time we read and stat
1304 # a writer modified between the time we read and stat
1304 entry = filecacheentry(paths, True)
1305 entry = filecacheentry(paths, True)
1305 entry.obj = self.func(obj)
1306 entry.obj = self.func(obj)
1306
1307
1307 obj._filecache[self.name] = entry
1308 obj._filecache[self.name] = entry
1308
1309
1309 obj.__dict__[self.sname] = entry.obj
1310 obj.__dict__[self.sname] = entry.obj
1310 return entry.obj
1311 return entry.obj
1311
1312
1312 def __set__(self, obj, value):
1313 def __set__(self, obj, value):
1313 if self.name not in obj._filecache:
1314 if self.name not in obj._filecache:
1314 # we add an entry for the missing value because X in __dict__
1315 # we add an entry for the missing value because X in __dict__
1315 # implies X in _filecache
1316 # implies X in _filecache
1316 paths = [self.join(obj, path) for path in self.paths]
1317 paths = [self.join(obj, path) for path in self.paths]
1317 ce = filecacheentry(paths, False)
1318 ce = filecacheentry(paths, False)
1318 obj._filecache[self.name] = ce
1319 obj._filecache[self.name] = ce
1319 else:
1320 else:
1320 ce = obj._filecache[self.name]
1321 ce = obj._filecache[self.name]
1321
1322
1322 ce.obj = value # update cached copy
1323 ce.obj = value # update cached copy
1323 obj.__dict__[self.sname] = value # update copy returned by obj.x
1324 obj.__dict__[self.sname] = value # update copy returned by obj.x
1324
1325
1325 def __delete__(self, obj):
1326 def __delete__(self, obj):
1326 try:
1327 try:
1327 del obj.__dict__[self.sname]
1328 del obj.__dict__[self.sname]
1328 except KeyError:
1329 except KeyError:
1329 raise AttributeError(self.sname)
1330 raise AttributeError(self.sname)
1330
1331
1331 def extdatasource(repo, source):
1332 def extdatasource(repo, source):
1332 """Gather a map of rev -> value dict from the specified source
1333 """Gather a map of rev -> value dict from the specified source
1333
1334
1334 A source spec is treated as a URL, with a special case shell: type
1335 A source spec is treated as a URL, with a special case shell: type
1335 for parsing the output from a shell command.
1336 for parsing the output from a shell command.
1336
1337
1337 The data is parsed as a series of newline-separated records where
1338 The data is parsed as a series of newline-separated records where
1338 each record is a revision specifier optionally followed by a space
1339 each record is a revision specifier optionally followed by a space
1339 and a freeform string value. If the revision is known locally, it
1340 and a freeform string value. If the revision is known locally, it
1340 is converted to a rev, otherwise the record is skipped.
1341 is converted to a rev, otherwise the record is skipped.
1341
1342
1342 Note that both key and value are treated as UTF-8 and converted to
1343 Note that both key and value are treated as UTF-8 and converted to
1343 the local encoding. This allows uniformity between local and
1344 the local encoding. This allows uniformity between local and
1344 remote data sources.
1345 remote data sources.
1345 """
1346 """
1346
1347
1347 spec = repo.ui.config("extdata", source)
1348 spec = repo.ui.config("extdata", source)
1348 if not spec:
1349 if not spec:
1349 raise error.Abort(_("unknown extdata source '%s'") % source)
1350 raise error.Abort(_("unknown extdata source '%s'") % source)
1350
1351
1351 data = {}
1352 data = {}
1352 src = proc = None
1353 src = proc = None
1353 try:
1354 try:
1354 if spec.startswith("shell:"):
1355 if spec.startswith("shell:"):
1355 # external commands should be run relative to the repo root
1356 # external commands should be run relative to the repo root
1356 cmd = spec[6:]
1357 cmd = spec[6:]
1357 proc = subprocess.Popen(procutil.tonativestr(cmd),
1358 proc = subprocess.Popen(procutil.tonativestr(cmd),
1358 shell=True, bufsize=-1,
1359 shell=True, bufsize=-1,
1359 close_fds=procutil.closefds,
1360 close_fds=procutil.closefds,
1360 stdout=subprocess.PIPE,
1361 stdout=subprocess.PIPE,
1361 cwd=procutil.tonativestr(repo.root))
1362 cwd=procutil.tonativestr(repo.root))
1362 src = proc.stdout
1363 src = proc.stdout
1363 else:
1364 else:
1364 # treat as a URL or file
1365 # treat as a URL or file
1365 src = url.open(repo.ui, spec)
1366 src = url.open(repo.ui, spec)
1366 for l in src:
1367 for l in src:
1367 if " " in l:
1368 if " " in l:
1368 k, v = l.strip().split(" ", 1)
1369 k, v = l.strip().split(" ", 1)
1369 else:
1370 else:
1370 k, v = l.strip(), ""
1371 k, v = l.strip(), ""
1371
1372
1372 k = encoding.tolocal(k)
1373 k = encoding.tolocal(k)
1373 try:
1374 try:
1374 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1375 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1375 except (error.LookupError, error.RepoLookupError):
1376 except (error.LookupError, error.RepoLookupError):
1376 pass # we ignore data for nodes that don't exist locally
1377 pass # we ignore data for nodes that don't exist locally
1377 finally:
1378 finally:
1378 if proc:
1379 if proc:
1379 proc.communicate()
1380 proc.communicate()
1380 if src:
1381 if src:
1381 src.close()
1382 src.close()
1382 if proc and proc.returncode != 0:
1383 if proc and proc.returncode != 0:
1383 raise error.Abort(_("extdata command '%s' failed: %s")
1384 raise error.Abort(_("extdata command '%s' failed: %s")
1384 % (cmd, procutil.explainexit(proc.returncode)))
1385 % (cmd, procutil.explainexit(proc.returncode)))
1385
1386
1386 return data
1387 return data
1387
1388
1388 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1389 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1389 if lock is None:
1390 if lock is None:
1390 raise error.LockInheritanceContractViolation(
1391 raise error.LockInheritanceContractViolation(
1391 'lock can only be inherited while held')
1392 'lock can only be inherited while held')
1392 if environ is None:
1393 if environ is None:
1393 environ = {}
1394 environ = {}
1394 with lock.inherit() as locker:
1395 with lock.inherit() as locker:
1395 environ[envvar] = locker
1396 environ[envvar] = locker
1396 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1397 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1397
1398
1398 def wlocksub(repo, cmd, *args, **kwargs):
1399 def wlocksub(repo, cmd, *args, **kwargs):
1399 """run cmd as a subprocess that allows inheriting repo's wlock
1400 """run cmd as a subprocess that allows inheriting repo's wlock
1400
1401
1401 This can only be called while the wlock is held. This takes all the
1402 This can only be called while the wlock is held. This takes all the
1402 arguments that ui.system does, and returns the exit code of the
1403 arguments that ui.system does, and returns the exit code of the
1403 subprocess."""
1404 subprocess."""
1404 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1405 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1405 **kwargs)
1406 **kwargs)
1406
1407
1407 class progress(object):
1408 class progress(object):
1408 def __init__(self, ui, topic, unit="", total=None):
1409 def __init__(self, ui, topic, unit="", total=None):
1409 self.ui = ui
1410 self.ui = ui
1410 self.pos = 0
1411 self.pos = 0
1411 self.topic = topic
1412 self.topic = topic
1412 self.unit = unit
1413 self.unit = unit
1413 self.total = total
1414 self.total = total
1414
1415
1415 def __enter__(self):
1416 def __enter__(self):
1416 return self
1417 return self
1417
1418
1418 def __exit__(self, exc_type, exc_value, exc_tb):
1419 def __exit__(self, exc_type, exc_value, exc_tb):
1419 self.complete()
1420 self.complete()
1420
1421
1421 def update(self, pos, item="", total=None):
1422 def update(self, pos, item="", total=None):
1422 assert pos is not None
1423 assert pos is not None
1423 if total:
1424 if total:
1424 self.total = total
1425 self.total = total
1425 self.pos = pos
1426 self.pos = pos
1426 self._print(item)
1427 self._print(item)
1427
1428
1428 def increment(self, step=1, item="", total=None):
1429 def increment(self, step=1, item="", total=None):
1429 self.update(self.pos + step, item, total)
1430 self.update(self.pos + step, item, total)
1430
1431
1431 def complete(self):
1432 def complete(self):
1432 self.ui.progress(self.topic, None)
1433 self.ui.progress(self.topic, None)
1433
1434
1434 def _print(self, item):
1435 def _print(self, item):
1435 self.ui.progress(self.topic, self.pos, item, self.unit,
1436 self.ui.progress(self.topic, self.pos, item, self.unit,
1436 self.total)
1437 self.total)
1437
1438
1438 def gdinitconfig(ui):
1439 def gdinitconfig(ui):
1439 """helper function to know if a repo should be created as general delta
1440 """helper function to know if a repo should be created as general delta
1440 """
1441 """
1441 # experimental config: format.generaldelta
1442 # experimental config: format.generaldelta
1442 return (ui.configbool('format', 'generaldelta')
1443 return (ui.configbool('format', 'generaldelta')
1443 or ui.configbool('format', 'usegeneraldelta')
1444 or ui.configbool('format', 'usegeneraldelta')
1444 or ui.configbool('format', 'sparse-revlog'))
1445 or ui.configbool('format', 'sparse-revlog'))
1445
1446
1446 def gddeltaconfig(ui):
1447 def gddeltaconfig(ui):
1447 """helper function to know if incoming delta should be optimised
1448 """helper function to know if incoming delta should be optimised
1448 """
1449 """
1449 # experimental config: format.generaldelta
1450 # experimental config: format.generaldelta
1450 return ui.configbool('format', 'generaldelta')
1451 return ui.configbool('format', 'generaldelta')
1451
1452
1452 class simplekeyvaluefile(object):
1453 class simplekeyvaluefile(object):
1453 """A simple file with key=value lines
1454 """A simple file with key=value lines
1454
1455
1455 Keys must be alphanumerics and start with a letter, values must not
1456 Keys must be alphanumerics and start with a letter, values must not
1456 contain '\n' characters"""
1457 contain '\n' characters"""
1457 firstlinekey = '__firstline'
1458 firstlinekey = '__firstline'
1458
1459
1459 def __init__(self, vfs, path, keys=None):
1460 def __init__(self, vfs, path, keys=None):
1460 self.vfs = vfs
1461 self.vfs = vfs
1461 self.path = path
1462 self.path = path
1462
1463
1463 def read(self, firstlinenonkeyval=False):
1464 def read(self, firstlinenonkeyval=False):
1464 """Read the contents of a simple key-value file
1465 """Read the contents of a simple key-value file
1465
1466
1466 'firstlinenonkeyval' indicates whether the first line of file should
1467 'firstlinenonkeyval' indicates whether the first line of file should
1467 be treated as a key-value pair or reuturned fully under the
1468 be treated as a key-value pair or reuturned fully under the
1468 __firstline key."""
1469 __firstline key."""
1469 lines = self.vfs.readlines(self.path)
1470 lines = self.vfs.readlines(self.path)
1470 d = {}
1471 d = {}
1471 if firstlinenonkeyval:
1472 if firstlinenonkeyval:
1472 if not lines:
1473 if not lines:
1473 e = _("empty simplekeyvalue file")
1474 e = _("empty simplekeyvalue file")
1474 raise error.CorruptedState(e)
1475 raise error.CorruptedState(e)
1475 # we don't want to include '\n' in the __firstline
1476 # we don't want to include '\n' in the __firstline
1476 d[self.firstlinekey] = lines[0][:-1]
1477 d[self.firstlinekey] = lines[0][:-1]
1477 del lines[0]
1478 del lines[0]
1478
1479
1479 try:
1480 try:
1480 # the 'if line.strip()' part prevents us from failing on empty
1481 # the 'if line.strip()' part prevents us from failing on empty
1481 # lines which only contain '\n' therefore are not skipped
1482 # lines which only contain '\n' therefore are not skipped
1482 # by 'if line'
1483 # by 'if line'
1483 updatedict = dict(line[:-1].split('=', 1) for line in lines
1484 updatedict = dict(line[:-1].split('=', 1) for line in lines
1484 if line.strip())
1485 if line.strip())
1485 if self.firstlinekey in updatedict:
1486 if self.firstlinekey in updatedict:
1486 e = _("%r can't be used as a key")
1487 e = _("%r can't be used as a key")
1487 raise error.CorruptedState(e % self.firstlinekey)
1488 raise error.CorruptedState(e % self.firstlinekey)
1488 d.update(updatedict)
1489 d.update(updatedict)
1489 except ValueError as e:
1490 except ValueError as e:
1490 raise error.CorruptedState(str(e))
1491 raise error.CorruptedState(str(e))
1491 return d
1492 return d
1492
1493
1493 def write(self, data, firstline=None):
1494 def write(self, data, firstline=None):
1494 """Write key=>value mapping to a file
1495 """Write key=>value mapping to a file
1495 data is a dict. Keys must be alphanumerical and start with a letter.
1496 data is a dict. Keys must be alphanumerical and start with a letter.
1496 Values must not contain newline characters.
1497 Values must not contain newline characters.
1497
1498
1498 If 'firstline' is not None, it is written to file before
1499 If 'firstline' is not None, it is written to file before
1499 everything else, as it is, not in a key=value form"""
1500 everything else, as it is, not in a key=value form"""
1500 lines = []
1501 lines = []
1501 if firstline is not None:
1502 if firstline is not None:
1502 lines.append('%s\n' % firstline)
1503 lines.append('%s\n' % firstline)
1503
1504
1504 for k, v in data.items():
1505 for k, v in data.items():
1505 if k == self.firstlinekey:
1506 if k == self.firstlinekey:
1506 e = "key name '%s' is reserved" % self.firstlinekey
1507 e = "key name '%s' is reserved" % self.firstlinekey
1507 raise error.ProgrammingError(e)
1508 raise error.ProgrammingError(e)
1508 if not k[0:1].isalpha():
1509 if not k[0:1].isalpha():
1509 e = "keys must start with a letter in a key-value file"
1510 e = "keys must start with a letter in a key-value file"
1510 raise error.ProgrammingError(e)
1511 raise error.ProgrammingError(e)
1511 if not k.isalnum():
1512 if not k.isalnum():
1512 e = "invalid key name in a simple key-value file"
1513 e = "invalid key name in a simple key-value file"
1513 raise error.ProgrammingError(e)
1514 raise error.ProgrammingError(e)
1514 if '\n' in v:
1515 if '\n' in v:
1515 e = "invalid value in a simple key-value file"
1516 e = "invalid value in a simple key-value file"
1516 raise error.ProgrammingError(e)
1517 raise error.ProgrammingError(e)
1517 lines.append("%s=%s\n" % (k, v))
1518 lines.append("%s=%s\n" % (k, v))
1518 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1519 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1519 fp.write(''.join(lines))
1520 fp.write(''.join(lines))
1520
1521
1521 _reportobsoletedsource = [
1522 _reportobsoletedsource = [
1522 'debugobsolete',
1523 'debugobsolete',
1523 'pull',
1524 'pull',
1524 'push',
1525 'push',
1525 'serve',
1526 'serve',
1526 'unbundle',
1527 'unbundle',
1527 ]
1528 ]
1528
1529
1529 _reportnewcssource = [
1530 _reportnewcssource = [
1530 'pull',
1531 'pull',
1531 'unbundle',
1532 'unbundle',
1532 ]
1533 ]
1533
1534
1534 def prefetchfiles(repo, revs, match):
1535 def prefetchfiles(repo, revs, match):
1535 """Invokes the registered file prefetch functions, allowing extensions to
1536 """Invokes the registered file prefetch functions, allowing extensions to
1536 ensure the corresponding files are available locally, before the command
1537 ensure the corresponding files are available locally, before the command
1537 uses them."""
1538 uses them."""
1538 if match:
1539 if match:
1539 # The command itself will complain about files that don't exist, so
1540 # The command itself will complain about files that don't exist, so
1540 # don't duplicate the message.
1541 # don't duplicate the message.
1541 match = matchmod.badmatch(match, lambda fn, msg: None)
1542 match = matchmod.badmatch(match, lambda fn, msg: None)
1542 else:
1543 else:
1543 match = matchall(repo)
1544 match = matchall(repo)
1544
1545
1545 fileprefetchhooks(repo, revs, match)
1546 fileprefetchhooks(repo, revs, match)
1546
1547
1547 # a list of (repo, revs, match) prefetch functions
1548 # a list of (repo, revs, match) prefetch functions
1548 fileprefetchhooks = util.hooks()
1549 fileprefetchhooks = util.hooks()
1549
1550
1550 # A marker that tells the evolve extension to suppress its own reporting
1551 # A marker that tells the evolve extension to suppress its own reporting
1551 _reportstroubledchangesets = True
1552 _reportstroubledchangesets = True
1552
1553
1553 def registersummarycallback(repo, otr, txnname=''):
1554 def registersummarycallback(repo, otr, txnname=''):
1554 """register a callback to issue a summary after the transaction is closed
1555 """register a callback to issue a summary after the transaction is closed
1555 """
1556 """
1556 def txmatch(sources):
1557 def txmatch(sources):
1557 return any(txnname.startswith(source) for source in sources)
1558 return any(txnname.startswith(source) for source in sources)
1558
1559
1559 categories = []
1560 categories = []
1560
1561
1561 def reportsummary(func):
1562 def reportsummary(func):
1562 """decorator for report callbacks."""
1563 """decorator for report callbacks."""
1563 # The repoview life cycle is shorter than the one of the actual
1564 # The repoview life cycle is shorter than the one of the actual
1564 # underlying repository. So the filtered object can die before the
1565 # underlying repository. So the filtered object can die before the
1565 # weakref is used leading to troubles. We keep a reference to the
1566 # weakref is used leading to troubles. We keep a reference to the
1566 # unfiltered object and restore the filtering when retrieving the
1567 # unfiltered object and restore the filtering when retrieving the
1567 # repository through the weakref.
1568 # repository through the weakref.
1568 filtername = repo.filtername
1569 filtername = repo.filtername
1569 reporef = weakref.ref(repo.unfiltered())
1570 reporef = weakref.ref(repo.unfiltered())
1570 def wrapped(tr):
1571 def wrapped(tr):
1571 repo = reporef()
1572 repo = reporef()
1572 if filtername:
1573 if filtername:
1573 repo = repo.filtered(filtername)
1574 repo = repo.filtered(filtername)
1574 func(repo, tr)
1575 func(repo, tr)
1575 newcat = '%02i-txnreport' % len(categories)
1576 newcat = '%02i-txnreport' % len(categories)
1576 otr.addpostclose(newcat, wrapped)
1577 otr.addpostclose(newcat, wrapped)
1577 categories.append(newcat)
1578 categories.append(newcat)
1578 return wrapped
1579 return wrapped
1579
1580
1580 if txmatch(_reportobsoletedsource):
1581 if txmatch(_reportobsoletedsource):
1581 @reportsummary
1582 @reportsummary
1582 def reportobsoleted(repo, tr):
1583 def reportobsoleted(repo, tr):
1583 obsoleted = obsutil.getobsoleted(repo, tr)
1584 obsoleted = obsutil.getobsoleted(repo, tr)
1584 if obsoleted:
1585 if obsoleted:
1585 repo.ui.status(_('obsoleted %i changesets\n')
1586 repo.ui.status(_('obsoleted %i changesets\n')
1586 % len(obsoleted))
1587 % len(obsoleted))
1587
1588
1588 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1589 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1589 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1590 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1590 instabilitytypes = [
1591 instabilitytypes = [
1591 ('orphan', 'orphan'),
1592 ('orphan', 'orphan'),
1592 ('phase-divergent', 'phasedivergent'),
1593 ('phase-divergent', 'phasedivergent'),
1593 ('content-divergent', 'contentdivergent'),
1594 ('content-divergent', 'contentdivergent'),
1594 ]
1595 ]
1595
1596
1596 def getinstabilitycounts(repo):
1597 def getinstabilitycounts(repo):
1597 filtered = repo.changelog.filteredrevs
1598 filtered = repo.changelog.filteredrevs
1598 counts = {}
1599 counts = {}
1599 for instability, revset in instabilitytypes:
1600 for instability, revset in instabilitytypes:
1600 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1601 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1601 filtered)
1602 filtered)
1602 return counts
1603 return counts
1603
1604
1604 oldinstabilitycounts = getinstabilitycounts(repo)
1605 oldinstabilitycounts = getinstabilitycounts(repo)
1605 @reportsummary
1606 @reportsummary
1606 def reportnewinstabilities(repo, tr):
1607 def reportnewinstabilities(repo, tr):
1607 newinstabilitycounts = getinstabilitycounts(repo)
1608 newinstabilitycounts = getinstabilitycounts(repo)
1608 for instability, revset in instabilitytypes:
1609 for instability, revset in instabilitytypes:
1609 delta = (newinstabilitycounts[instability] -
1610 delta = (newinstabilitycounts[instability] -
1610 oldinstabilitycounts[instability])
1611 oldinstabilitycounts[instability])
1611 msg = getinstabilitymessage(delta, instability)
1612 msg = getinstabilitymessage(delta, instability)
1612 if msg:
1613 if msg:
1613 repo.ui.warn(msg)
1614 repo.ui.warn(msg)
1614
1615
1615 if txmatch(_reportnewcssource):
1616 if txmatch(_reportnewcssource):
1616 @reportsummary
1617 @reportsummary
1617 def reportnewcs(repo, tr):
1618 def reportnewcs(repo, tr):
1618 """Report the range of new revisions pulled/unbundled."""
1619 """Report the range of new revisions pulled/unbundled."""
1619 origrepolen = tr.changes.get('origrepolen', len(repo))
1620 origrepolen = tr.changes.get('origrepolen', len(repo))
1620 unfi = repo.unfiltered()
1621 unfi = repo.unfiltered()
1621 if origrepolen >= len(unfi):
1622 if origrepolen >= len(unfi):
1622 return
1623 return
1623
1624
1624 # Compute the bounds of new visible revisions' range.
1625 # Compute the bounds of new visible revisions' range.
1625 revs = smartset.spanset(repo, start=origrepolen)
1626 revs = smartset.spanset(repo, start=origrepolen)
1626 if revs:
1627 if revs:
1627 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1628 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1628
1629
1629 if minrev == maxrev:
1630 if minrev == maxrev:
1630 revrange = minrev
1631 revrange = minrev
1631 else:
1632 else:
1632 revrange = '%s:%s' % (minrev, maxrev)
1633 revrange = '%s:%s' % (minrev, maxrev)
1633 draft = len(repo.revs('%ld and draft()', revs))
1634 draft = len(repo.revs('%ld and draft()', revs))
1634 secret = len(repo.revs('%ld and secret()', revs))
1635 secret = len(repo.revs('%ld and secret()', revs))
1635 if not (draft or secret):
1636 if not (draft or secret):
1636 msg = _('new changesets %s\n') % revrange
1637 msg = _('new changesets %s\n') % revrange
1637 elif draft and secret:
1638 elif draft and secret:
1638 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1639 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1639 msg %= (revrange, draft, secret)
1640 msg %= (revrange, draft, secret)
1640 elif draft:
1641 elif draft:
1641 msg = _('new changesets %s (%d drafts)\n')
1642 msg = _('new changesets %s (%d drafts)\n')
1642 msg %= (revrange, draft)
1643 msg %= (revrange, draft)
1643 elif secret:
1644 elif secret:
1644 msg = _('new changesets %s (%d secrets)\n')
1645 msg = _('new changesets %s (%d secrets)\n')
1645 msg %= (revrange, secret)
1646 msg %= (revrange, secret)
1646 else:
1647 else:
1647 errormsg = 'entered unreachable condition'
1648 errormsg = 'entered unreachable condition'
1648 raise error.ProgrammingError(errormsg)
1649 raise error.ProgrammingError(errormsg)
1649 repo.ui.status(msg)
1650 repo.ui.status(msg)
1650
1651
1651 # search new changesets directly pulled as obsolete
1652 # search new changesets directly pulled as obsolete
1652 duplicates = tr.changes.get('revduplicates', ())
1653 duplicates = tr.changes.get('revduplicates', ())
1653 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1654 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1654 origrepolen, duplicates)
1655 origrepolen, duplicates)
1655 cl = repo.changelog
1656 cl = repo.changelog
1656 extinctadded = [r for r in obsadded if r not in cl]
1657 extinctadded = [r for r in obsadded if r not in cl]
1657 if extinctadded:
1658 if extinctadded:
1658 # They are not just obsolete, but obsolete and invisible
1659 # They are not just obsolete, but obsolete and invisible
1659 # we call them "extinct" internally but the terms have not been
1660 # we call them "extinct" internally but the terms have not been
1660 # exposed to users.
1661 # exposed to users.
1661 msg = '(%d other changesets obsolete on arrival)\n'
1662 msg = '(%d other changesets obsolete on arrival)\n'
1662 repo.ui.status(msg % len(extinctadded))
1663 repo.ui.status(msg % len(extinctadded))
1663
1664
1664 @reportsummary
1665 @reportsummary
1665 def reportphasechanges(repo, tr):
1666 def reportphasechanges(repo, tr):
1666 """Report statistics of phase changes for changesets pre-existing
1667 """Report statistics of phase changes for changesets pre-existing
1667 pull/unbundle.
1668 pull/unbundle.
1668 """
1669 """
1669 origrepolen = tr.changes.get('origrepolen', len(repo))
1670 origrepolen = tr.changes.get('origrepolen', len(repo))
1670 phasetracking = tr.changes.get('phases', {})
1671 phasetracking = tr.changes.get('phases', {})
1671 if not phasetracking:
1672 if not phasetracking:
1672 return
1673 return
1673 published = [
1674 published = [
1674 rev for rev, (old, new) in phasetracking.iteritems()
1675 rev for rev, (old, new) in phasetracking.iteritems()
1675 if new == phases.public and rev < origrepolen
1676 if new == phases.public and rev < origrepolen
1676 ]
1677 ]
1677 if not published:
1678 if not published:
1678 return
1679 return
1679 repo.ui.status(_('%d local changesets published\n')
1680 repo.ui.status(_('%d local changesets published\n')
1680 % len(published))
1681 % len(published))
1681
1682
1682 def getinstabilitymessage(delta, instability):
1683 def getinstabilitymessage(delta, instability):
1683 """function to return the message to show warning about new instabilities
1684 """function to return the message to show warning about new instabilities
1684
1685
1685 exists as a separate function so that extension can wrap to show more
1686 exists as a separate function so that extension can wrap to show more
1686 information like how to fix instabilities"""
1687 information like how to fix instabilities"""
1687 if delta > 0:
1688 if delta > 0:
1688 return _('%i new %s changesets\n') % (delta, instability)
1689 return _('%i new %s changesets\n') % (delta, instability)
1689
1690
1690 def nodesummaries(repo, nodes, maxnumnodes=4):
1691 def nodesummaries(repo, nodes, maxnumnodes=4):
1691 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1692 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1692 return ' '.join(short(h) for h in nodes)
1693 return ' '.join(short(h) for h in nodes)
1693 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1694 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1694 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1695 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1695
1696
1696 def enforcesinglehead(repo, tr, desc):
1697 def enforcesinglehead(repo, tr, desc):
1697 """check that no named branch has multiple heads"""
1698 """check that no named branch has multiple heads"""
1698 if desc in ('strip', 'repair'):
1699 if desc in ('strip', 'repair'):
1699 # skip the logic during strip
1700 # skip the logic during strip
1700 return
1701 return
1701 visible = repo.filtered('visible')
1702 visible = repo.filtered('visible')
1702 # possible improvement: we could restrict the check to affected branch
1703 # possible improvement: we could restrict the check to affected branch
1703 for name, heads in visible.branchmap().iteritems():
1704 for name, heads in visible.branchmap().iteritems():
1704 if len(heads) > 1:
1705 if len(heads) > 1:
1705 msg = _('rejecting multiple heads on branch "%s"')
1706 msg = _('rejecting multiple heads on branch "%s"')
1706 msg %= name
1707 msg %= name
1707 hint = _('%d heads: %s')
1708 hint = _('%d heads: %s')
1708 hint %= (len(heads), nodesummaries(repo, heads))
1709 hint %= (len(heads), nodesummaries(repo, heads))
1709 raise error.Abort(msg, hint=hint)
1710 raise error.Abort(msg, hint=hint)
1710
1711
1711 def wrapconvertsink(sink):
1712 def wrapconvertsink(sink):
1712 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1713 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1713 before it is used, whether or not the convert extension was formally loaded.
1714 before it is used, whether or not the convert extension was formally loaded.
1714 """
1715 """
1715 return sink
1716 return sink
1716
1717
1717 def unhidehashlikerevs(repo, specs, hiddentype):
1718 def unhidehashlikerevs(repo, specs, hiddentype):
1718 """parse the user specs and unhide changesets whose hash or revision number
1719 """parse the user specs and unhide changesets whose hash or revision number
1719 is passed.
1720 is passed.
1720
1721
1721 hiddentype can be: 1) 'warn': warn while unhiding changesets
1722 hiddentype can be: 1) 'warn': warn while unhiding changesets
1722 2) 'nowarn': don't warn while unhiding changesets
1723 2) 'nowarn': don't warn while unhiding changesets
1723
1724
1724 returns a repo object with the required changesets unhidden
1725 returns a repo object with the required changesets unhidden
1725 """
1726 """
1726 if not repo.filtername or not repo.ui.configbool('experimental',
1727 if not repo.filtername or not repo.ui.configbool('experimental',
1727 'directaccess'):
1728 'directaccess'):
1728 return repo
1729 return repo
1729
1730
1730 if repo.filtername not in ('visible', 'visible-hidden'):
1731 if repo.filtername not in ('visible', 'visible-hidden'):
1731 return repo
1732 return repo
1732
1733
1733 symbols = set()
1734 symbols = set()
1734 for spec in specs:
1735 for spec in specs:
1735 try:
1736 try:
1736 tree = revsetlang.parse(spec)
1737 tree = revsetlang.parse(spec)
1737 except error.ParseError: # will be reported by scmutil.revrange()
1738 except error.ParseError: # will be reported by scmutil.revrange()
1738 continue
1739 continue
1739
1740
1740 symbols.update(revsetlang.gethashlikesymbols(tree))
1741 symbols.update(revsetlang.gethashlikesymbols(tree))
1741
1742
1742 if not symbols:
1743 if not symbols:
1743 return repo
1744 return repo
1744
1745
1745 revs = _getrevsfromsymbols(repo, symbols)
1746 revs = _getrevsfromsymbols(repo, symbols)
1746
1747
1747 if not revs:
1748 if not revs:
1748 return repo
1749 return repo
1749
1750
1750 if hiddentype == 'warn':
1751 if hiddentype == 'warn':
1751 unfi = repo.unfiltered()
1752 unfi = repo.unfiltered()
1752 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1753 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1753 repo.ui.warn(_("warning: accessing hidden changesets for write "
1754 repo.ui.warn(_("warning: accessing hidden changesets for write "
1754 "operation: %s\n") % revstr)
1755 "operation: %s\n") % revstr)
1755
1756
1756 # we have to use new filtername to separate branch/tags cache until we can
1757 # we have to use new filtername to separate branch/tags cache until we can
1757 # disbale these cache when revisions are dynamically pinned.
1758 # disbale these cache when revisions are dynamically pinned.
1758 return repo.filtered('visible-hidden', revs)
1759 return repo.filtered('visible-hidden', revs)
1759
1760
1760 def _getrevsfromsymbols(repo, symbols):
1761 def _getrevsfromsymbols(repo, symbols):
1761 """parse the list of symbols and returns a set of revision numbers of hidden
1762 """parse the list of symbols and returns a set of revision numbers of hidden
1762 changesets present in symbols"""
1763 changesets present in symbols"""
1763 revs = set()
1764 revs = set()
1764 unfi = repo.unfiltered()
1765 unfi = repo.unfiltered()
1765 unficl = unfi.changelog
1766 unficl = unfi.changelog
1766 cl = repo.changelog
1767 cl = repo.changelog
1767 tiprev = len(unficl)
1768 tiprev = len(unficl)
1768 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1769 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1769 for s in symbols:
1770 for s in symbols:
1770 try:
1771 try:
1771 n = int(s)
1772 n = int(s)
1772 if n <= tiprev:
1773 if n <= tiprev:
1773 if not allowrevnums:
1774 if not allowrevnums:
1774 continue
1775 continue
1775 else:
1776 else:
1776 if n not in cl:
1777 if n not in cl:
1777 revs.add(n)
1778 revs.add(n)
1778 continue
1779 continue
1779 except ValueError:
1780 except ValueError:
1780 pass
1781 pass
1781
1782
1782 try:
1783 try:
1783 s = resolvehexnodeidprefix(unfi, s)
1784 s = resolvehexnodeidprefix(unfi, s)
1784 except (error.LookupError, error.WdirUnsupported):
1785 except (error.LookupError, error.WdirUnsupported):
1785 s = None
1786 s = None
1786
1787
1787 if s is not None:
1788 if s is not None:
1788 rev = unficl.rev(s)
1789 rev = unficl.rev(s)
1789 if rev not in cl:
1790 if rev not in cl:
1790 revs.add(rev)
1791 revs.add(rev)
1791
1792
1792 return revs
1793 return revs
1793
1794
1794 def bookmarkrevs(repo, mark):
1795 def bookmarkrevs(repo, mark):
1795 """
1796 """
1796 Select revisions reachable by a given bookmark
1797 Select revisions reachable by a given bookmark
1797 """
1798 """
1798 return repo.revs("ancestors(bookmark(%s)) - "
1799 return repo.revs("ancestors(bookmark(%s)) - "
1799 "ancestors(head() and not bookmark(%s)) - "
1800 "ancestors(head() and not bookmark(%s)) - "
1800 "ancestors(bookmark() and not bookmark(%s))",
1801 "ancestors(bookmark() and not bookmark(%s))",
1801 mark, mark, mark)
1802 mark, mark, mark)
General Comments 0
You need to be logged in to leave comments. Login now