##// END OF EJS Templates
scmutil: pass ctx object to intrev()...
Yuya Nishihara -
r32654:4bec8cce default
parent child Browse files
Show More
@@ -1,3586 +1,3586 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 bin,
18 bin,
19 hex,
19 hex,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 bookmarks,
26 bookmarks,
27 changelog,
27 changelog,
28 copies,
28 copies,
29 crecord as crecordmod,
29 crecord as crecordmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 formatter,
32 formatter,
33 graphmod,
33 graphmod,
34 lock as lockmod,
34 lock as lockmod,
35 match as matchmod,
35 match as matchmod,
36 obsolete,
36 obsolete,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 pycompat,
40 pycompat,
41 registrar,
41 registrar,
42 repair,
42 repair,
43 revlog,
43 revlog,
44 revset,
44 revset,
45 scmutil,
45 scmutil,
46 smartset,
46 smartset,
47 templatekw,
47 templatekw,
48 templater,
48 templater,
49 util,
49 util,
50 vfs as vfsmod,
50 vfs as vfsmod,
51 )
51 )
52 stringio = util.stringio
52 stringio = util.stringio
53
53
54 # templates of common command options
54 # templates of common command options
55
55
56 dryrunopts = [
56 dryrunopts = [
57 ('n', 'dry-run', None,
57 ('n', 'dry-run', None,
58 _('do not perform actions, just print output')),
58 _('do not perform actions, just print output')),
59 ]
59 ]
60
60
61 remoteopts = [
61 remoteopts = [
62 ('e', 'ssh', '',
62 ('e', 'ssh', '',
63 _('specify ssh command to use'), _('CMD')),
63 _('specify ssh command to use'), _('CMD')),
64 ('', 'remotecmd', '',
64 ('', 'remotecmd', '',
65 _('specify hg command to run on the remote side'), _('CMD')),
65 _('specify hg command to run on the remote side'), _('CMD')),
66 ('', 'insecure', None,
66 ('', 'insecure', None,
67 _('do not verify server certificate (ignoring web.cacerts config)')),
67 _('do not verify server certificate (ignoring web.cacerts config)')),
68 ]
68 ]
69
69
70 walkopts = [
70 walkopts = [
71 ('I', 'include', [],
71 ('I', 'include', [],
72 _('include names matching the given patterns'), _('PATTERN')),
72 _('include names matching the given patterns'), _('PATTERN')),
73 ('X', 'exclude', [],
73 ('X', 'exclude', [],
74 _('exclude names matching the given patterns'), _('PATTERN')),
74 _('exclude names matching the given patterns'), _('PATTERN')),
75 ]
75 ]
76
76
77 commitopts = [
77 commitopts = [
78 ('m', 'message', '',
78 ('m', 'message', '',
79 _('use text as commit message'), _('TEXT')),
79 _('use text as commit message'), _('TEXT')),
80 ('l', 'logfile', '',
80 ('l', 'logfile', '',
81 _('read commit message from file'), _('FILE')),
81 _('read commit message from file'), _('FILE')),
82 ]
82 ]
83
83
84 commitopts2 = [
84 commitopts2 = [
85 ('d', 'date', '',
85 ('d', 'date', '',
86 _('record the specified date as commit date'), _('DATE')),
86 _('record the specified date as commit date'), _('DATE')),
87 ('u', 'user', '',
87 ('u', 'user', '',
88 _('record the specified user as committer'), _('USER')),
88 _('record the specified user as committer'), _('USER')),
89 ]
89 ]
90
90
91 # hidden for now
91 # hidden for now
92 formatteropts = [
92 formatteropts = [
93 ('T', 'template', '',
93 ('T', 'template', '',
94 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
95 ]
95 ]
96
96
97 templateopts = [
97 templateopts = [
98 ('', 'style', '',
98 ('', 'style', '',
99 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 _('display using template map file (DEPRECATED)'), _('STYLE')),
100 ('T', 'template', '',
100 ('T', 'template', '',
101 _('display with template'), _('TEMPLATE')),
101 _('display with template'), _('TEMPLATE')),
102 ]
102 ]
103
103
104 logopts = [
104 logopts = [
105 ('p', 'patch', None, _('show patch')),
105 ('p', 'patch', None, _('show patch')),
106 ('g', 'git', None, _('use git extended diff format')),
106 ('g', 'git', None, _('use git extended diff format')),
107 ('l', 'limit', '',
107 ('l', 'limit', '',
108 _('limit number of changes displayed'), _('NUM')),
108 _('limit number of changes displayed'), _('NUM')),
109 ('M', 'no-merges', None, _('do not show merges')),
109 ('M', 'no-merges', None, _('do not show merges')),
110 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('', 'stat', None, _('output diffstat-style summary of changes')),
111 ('G', 'graph', None, _("show the revision DAG")),
111 ('G', 'graph', None, _("show the revision DAG")),
112 ] + templateopts
112 ] + templateopts
113
113
114 diffopts = [
114 diffopts = [
115 ('a', 'text', None, _('treat all files as text')),
115 ('a', 'text', None, _('treat all files as text')),
116 ('g', 'git', None, _('use git extended diff format')),
116 ('g', 'git', None, _('use git extended diff format')),
117 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
118 ('', 'nodates', None, _('omit dates from diff headers'))
118 ('', 'nodates', None, _('omit dates from diff headers'))
119 ]
119 ]
120
120
121 diffwsopts = [
121 diffwsopts = [
122 ('w', 'ignore-all-space', None,
122 ('w', 'ignore-all-space', None,
123 _('ignore white space when comparing lines')),
123 _('ignore white space when comparing lines')),
124 ('b', 'ignore-space-change', None,
124 ('b', 'ignore-space-change', None,
125 _('ignore changes in the amount of white space')),
125 _('ignore changes in the amount of white space')),
126 ('B', 'ignore-blank-lines', None,
126 ('B', 'ignore-blank-lines', None,
127 _('ignore changes whose lines are all blank')),
127 _('ignore changes whose lines are all blank')),
128 ]
128 ]
129
129
130 diffopts2 = [
130 diffopts2 = [
131 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 ('p', 'show-function', None, _('show which function each change is in')),
132 ('p', 'show-function', None, _('show which function each change is in')),
133 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 ] + diffwsopts + [
134 ] + diffwsopts + [
135 ('U', 'unified', '',
135 ('U', 'unified', '',
136 _('number of lines of context to show'), _('NUM')),
136 _('number of lines of context to show'), _('NUM')),
137 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 ]
139 ]
140
140
141 mergetoolopts = [
141 mergetoolopts = [
142 ('t', 'tool', '', _('specify merge tool')),
142 ('t', 'tool', '', _('specify merge tool')),
143 ]
143 ]
144
144
145 similarityopts = [
145 similarityopts = [
146 ('s', 'similarity', '',
146 ('s', 'similarity', '',
147 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 ]
148 ]
149
149
150 subrepoopts = [
150 subrepoopts = [
151 ('S', 'subrepos', None,
151 ('S', 'subrepos', None,
152 _('recurse into subrepositories'))
152 _('recurse into subrepositories'))
153 ]
153 ]
154
154
155 debugrevlogopts = [
155 debugrevlogopts = [
156 ('c', 'changelog', False, _('open changelog')),
156 ('c', 'changelog', False, _('open changelog')),
157 ('m', 'manifest', False, _('open manifest')),
157 ('m', 'manifest', False, _('open manifest')),
158 ('', 'dir', '', _('open directory manifest')),
158 ('', 'dir', '', _('open directory manifest')),
159 ]
159 ]
160
160
161 # special string such that everything below this line will be ingored in the
161 # special string such that everything below this line will be ingored in the
162 # editor text
162 # editor text
163 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164
164
165 def ishunk(x):
165 def ishunk(x):
166 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 return isinstance(x, hunkclasses)
167 return isinstance(x, hunkclasses)
168
168
169 def newandmodified(chunks, originalchunks):
169 def newandmodified(chunks, originalchunks):
170 newlyaddedandmodifiedfiles = set()
170 newlyaddedandmodifiedfiles = set()
171 for chunk in chunks:
171 for chunk in chunks:
172 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 originalchunks:
173 originalchunks:
174 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 return newlyaddedandmodifiedfiles
175 return newlyaddedandmodifiedfiles
176
176
177 def parsealiases(cmd):
177 def parsealiases(cmd):
178 return cmd.lstrip("^").split("|")
178 return cmd.lstrip("^").split("|")
179
179
180 def setupwrapcolorwrite(ui):
180 def setupwrapcolorwrite(ui):
181 # wrap ui.write so diff output can be labeled/colorized
181 # wrap ui.write so diff output can be labeled/colorized
182 def wrapwrite(orig, *args, **kw):
182 def wrapwrite(orig, *args, **kw):
183 label = kw.pop('label', '')
183 label = kw.pop('label', '')
184 for chunk, l in patch.difflabel(lambda: args):
184 for chunk, l in patch.difflabel(lambda: args):
185 orig(chunk, label=label + l)
185 orig(chunk, label=label + l)
186
186
187 oldwrite = ui.write
187 oldwrite = ui.write
188 def wrap(*args, **kwargs):
188 def wrap(*args, **kwargs):
189 return wrapwrite(oldwrite, *args, **kwargs)
189 return wrapwrite(oldwrite, *args, **kwargs)
190 setattr(ui, 'write', wrap)
190 setattr(ui, 'write', wrap)
191 return oldwrite
191 return oldwrite
192
192
193 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 if usecurses:
194 if usecurses:
195 if testfile:
195 if testfile:
196 recordfn = crecordmod.testdecorator(testfile,
196 recordfn = crecordmod.testdecorator(testfile,
197 crecordmod.testchunkselector)
197 crecordmod.testchunkselector)
198 else:
198 else:
199 recordfn = crecordmod.chunkselector
199 recordfn = crecordmod.chunkselector
200
200
201 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202
202
203 else:
203 else:
204 return patch.filterpatch(ui, originalhunks, operation)
204 return patch.filterpatch(ui, originalhunks, operation)
205
205
206 def recordfilter(ui, originalhunks, operation=None):
206 def recordfilter(ui, originalhunks, operation=None):
207 """ Prompts the user to filter the originalhunks and return a list of
207 """ Prompts the user to filter the originalhunks and return a list of
208 selected hunks.
208 selected hunks.
209 *operation* is used for to build ui messages to indicate the user what
209 *operation* is used for to build ui messages to indicate the user what
210 kind of filtering they are doing: reverting, committing, shelving, etc.
210 kind of filtering they are doing: reverting, committing, shelving, etc.
211 (see patch.filterpatch).
211 (see patch.filterpatch).
212 """
212 """
213 usecurses = crecordmod.checkcurses(ui)
213 usecurses = crecordmod.checkcurses(ui)
214 testfile = ui.config('experimental', 'crecordtest', None)
214 testfile = ui.config('experimental', 'crecordtest', None)
215 oldwrite = setupwrapcolorwrite(ui)
215 oldwrite = setupwrapcolorwrite(ui)
216 try:
216 try:
217 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 testfile, operation)
218 testfile, operation)
219 finally:
219 finally:
220 ui.write = oldwrite
220 ui.write = oldwrite
221 return newchunks, newopts
221 return newchunks, newopts
222
222
223 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 filterfn, *pats, **opts):
224 filterfn, *pats, **opts):
225 from . import merge as mergemod
225 from . import merge as mergemod
226 opts = pycompat.byteskwargs(opts)
226 opts = pycompat.byteskwargs(opts)
227 if not ui.interactive():
227 if not ui.interactive():
228 if cmdsuggest:
228 if cmdsuggest:
229 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 else:
230 else:
231 msg = _('running non-interactively')
231 msg = _('running non-interactively')
232 raise error.Abort(msg)
232 raise error.Abort(msg)
233
233
234 # make sure username is set before going interactive
234 # make sure username is set before going interactive
235 if not opts.get('user'):
235 if not opts.get('user'):
236 ui.username() # raise exception, username not provided
236 ui.username() # raise exception, username not provided
237
237
238 def recordfunc(ui, repo, message, match, opts):
238 def recordfunc(ui, repo, message, match, opts):
239 """This is generic record driver.
239 """This is generic record driver.
240
240
241 Its job is to interactively filter local changes, and
241 Its job is to interactively filter local changes, and
242 accordingly prepare working directory into a state in which the
242 accordingly prepare working directory into a state in which the
243 job can be delegated to a non-interactive commit command such as
243 job can be delegated to a non-interactive commit command such as
244 'commit' or 'qrefresh'.
244 'commit' or 'qrefresh'.
245
245
246 After the actual job is done by non-interactive command, the
246 After the actual job is done by non-interactive command, the
247 working directory is restored to its original state.
247 working directory is restored to its original state.
248
248
249 In the end we'll record interesting changes, and everything else
249 In the end we'll record interesting changes, and everything else
250 will be left in place, so the user can continue working.
250 will be left in place, so the user can continue working.
251 """
251 """
252
252
253 checkunfinished(repo, commit=True)
253 checkunfinished(repo, commit=True)
254 wctx = repo[None]
254 wctx = repo[None]
255 merge = len(wctx.parents()) > 1
255 merge = len(wctx.parents()) > 1
256 if merge:
256 if merge:
257 raise error.Abort(_('cannot partially commit a merge '
257 raise error.Abort(_('cannot partially commit a merge '
258 '(use "hg commit" instead)'))
258 '(use "hg commit" instead)'))
259
259
260 def fail(f, msg):
260 def fail(f, msg):
261 raise error.Abort('%s: %s' % (f, msg))
261 raise error.Abort('%s: %s' % (f, msg))
262
262
263 force = opts.get('force')
263 force = opts.get('force')
264 if not force:
264 if not force:
265 vdirs = []
265 vdirs = []
266 match.explicitdir = vdirs.append
266 match.explicitdir = vdirs.append
267 match.bad = fail
267 match.bad = fail
268
268
269 status = repo.status(match=match)
269 status = repo.status(match=match)
270 if not force:
270 if not force:
271 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 diffopts.nodates = True
273 diffopts.nodates = True
274 diffopts.git = True
274 diffopts.git = True
275 diffopts.showfunc = True
275 diffopts.showfunc = True
276 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 originalchunks = patch.parsepatch(originaldiff)
277 originalchunks = patch.parsepatch(originaldiff)
278
278
279 # 1. filter patch, since we are intending to apply subset of it
279 # 1. filter patch, since we are intending to apply subset of it
280 try:
280 try:
281 chunks, newopts = filterfn(ui, originalchunks)
281 chunks, newopts = filterfn(ui, originalchunks)
282 except patch.PatchError as err:
282 except patch.PatchError as err:
283 raise error.Abort(_('error parsing patch: %s') % err)
283 raise error.Abort(_('error parsing patch: %s') % err)
284 opts.update(newopts)
284 opts.update(newopts)
285
285
286 # We need to keep a backup of files that have been newly added and
286 # We need to keep a backup of files that have been newly added and
287 # modified during the recording process because there is a previous
287 # modified during the recording process because there is a previous
288 # version without the edit in the workdir
288 # version without the edit in the workdir
289 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 contenders = set()
290 contenders = set()
291 for h in chunks:
291 for h in chunks:
292 try:
292 try:
293 contenders.update(set(h.files()))
293 contenders.update(set(h.files()))
294 except AttributeError:
294 except AttributeError:
295 pass
295 pass
296
296
297 changed = status.modified + status.added + status.removed
297 changed = status.modified + status.added + status.removed
298 newfiles = [f for f in changed if f in contenders]
298 newfiles = [f for f in changed if f in contenders]
299 if not newfiles:
299 if not newfiles:
300 ui.status(_('no changes to record\n'))
300 ui.status(_('no changes to record\n'))
301 return 0
301 return 0
302
302
303 modified = set(status.modified)
303 modified = set(status.modified)
304
304
305 # 2. backup changed files, so we can restore them in the end
305 # 2. backup changed files, so we can restore them in the end
306
306
307 if backupall:
307 if backupall:
308 tobackup = changed
308 tobackup = changed
309 else:
309 else:
310 tobackup = [f for f in newfiles if f in modified or f in \
310 tobackup = [f for f in newfiles if f in modified or f in \
311 newlyaddedandmodifiedfiles]
311 newlyaddedandmodifiedfiles]
312 backups = {}
312 backups = {}
313 if tobackup:
313 if tobackup:
314 backupdir = repo.vfs.join('record-backups')
314 backupdir = repo.vfs.join('record-backups')
315 try:
315 try:
316 os.mkdir(backupdir)
316 os.mkdir(backupdir)
317 except OSError as err:
317 except OSError as err:
318 if err.errno != errno.EEXIST:
318 if err.errno != errno.EEXIST:
319 raise
319 raise
320 try:
320 try:
321 # backup continues
321 # backup continues
322 for f in tobackup:
322 for f in tobackup:
323 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 dir=backupdir)
324 dir=backupdir)
325 os.close(fd)
325 os.close(fd)
326 ui.debug('backup %r as %r\n' % (f, tmpname))
326 ui.debug('backup %r as %r\n' % (f, tmpname))
327 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 backups[f] = tmpname
328 backups[f] = tmpname
329
329
330 fp = stringio()
330 fp = stringio()
331 for c in chunks:
331 for c in chunks:
332 fname = c.filename()
332 fname = c.filename()
333 if fname in backups:
333 if fname in backups:
334 c.write(fp)
334 c.write(fp)
335 dopatch = fp.tell()
335 dopatch = fp.tell()
336 fp.seek(0)
336 fp.seek(0)
337
337
338 # 2.5 optionally review / modify patch in text editor
338 # 2.5 optionally review / modify patch in text editor
339 if opts.get('review', False):
339 if opts.get('review', False):
340 patchtext = (crecordmod.diffhelptext
340 patchtext = (crecordmod.diffhelptext
341 + crecordmod.patchhelptext
341 + crecordmod.patchhelptext
342 + fp.read())
342 + fp.read())
343 reviewedpatch = ui.edit(patchtext, "",
343 reviewedpatch = ui.edit(patchtext, "",
344 extra={"suffix": ".diff"},
344 extra={"suffix": ".diff"},
345 repopath=repo.path)
345 repopath=repo.path)
346 fp.truncate(0)
346 fp.truncate(0)
347 fp.write(reviewedpatch)
347 fp.write(reviewedpatch)
348 fp.seek(0)
348 fp.seek(0)
349
349
350 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 # 3a. apply filtered patch to clean repo (clean)
351 # 3a. apply filtered patch to clean repo (clean)
352 if backups:
352 if backups:
353 # Equivalent to hg.revert
353 # Equivalent to hg.revert
354 m = scmutil.matchfiles(repo, backups.keys())
354 m = scmutil.matchfiles(repo, backups.keys())
355 mergemod.update(repo, repo.dirstate.p1(),
355 mergemod.update(repo, repo.dirstate.p1(),
356 False, True, matcher=m)
356 False, True, matcher=m)
357
357
358 # 3b. (apply)
358 # 3b. (apply)
359 if dopatch:
359 if dopatch:
360 try:
360 try:
361 ui.debug('applying patch\n')
361 ui.debug('applying patch\n')
362 ui.debug(fp.getvalue())
362 ui.debug(fp.getvalue())
363 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 except patch.PatchError as err:
364 except patch.PatchError as err:
365 raise error.Abort(str(err))
365 raise error.Abort(str(err))
366 del fp
366 del fp
367
367
368 # 4. We prepared working directory according to filtered
368 # 4. We prepared working directory according to filtered
369 # patch. Now is the time to delegate the job to
369 # patch. Now is the time to delegate the job to
370 # commit/qrefresh or the like!
370 # commit/qrefresh or the like!
371
371
372 # Make all of the pathnames absolute.
372 # Make all of the pathnames absolute.
373 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 return commitfunc(ui, repo, *newfiles, **opts)
374 return commitfunc(ui, repo, *newfiles, **opts)
375 finally:
375 finally:
376 # 5. finally restore backed-up files
376 # 5. finally restore backed-up files
377 try:
377 try:
378 dirstate = repo.dirstate
378 dirstate = repo.dirstate
379 for realname, tmpname in backups.iteritems():
379 for realname, tmpname in backups.iteritems():
380 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381
381
382 if dirstate[realname] == 'n':
382 if dirstate[realname] == 'n':
383 # without normallookup, restoring timestamp
383 # without normallookup, restoring timestamp
384 # may cause partially committed files
384 # may cause partially committed files
385 # to be treated as unmodified
385 # to be treated as unmodified
386 dirstate.normallookup(realname)
386 dirstate.normallookup(realname)
387
387
388 # copystat=True here and above are a hack to trick any
388 # copystat=True here and above are a hack to trick any
389 # editors that have f open that we haven't modified them.
389 # editors that have f open that we haven't modified them.
390 #
390 #
391 # Also note that this racy as an editor could notice the
391 # Also note that this racy as an editor could notice the
392 # file's mtime before we've finished writing it.
392 # file's mtime before we've finished writing it.
393 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 os.unlink(tmpname)
394 os.unlink(tmpname)
395 if tobackup:
395 if tobackup:
396 os.rmdir(backupdir)
396 os.rmdir(backupdir)
397 except OSError:
397 except OSError:
398 pass
398 pass
399
399
400 def recordinwlock(ui, repo, message, match, opts):
400 def recordinwlock(ui, repo, message, match, opts):
401 with repo.wlock():
401 with repo.wlock():
402 return recordfunc(ui, repo, message, match, opts)
402 return recordfunc(ui, repo, message, match, opts)
403
403
404 return commit(ui, repo, recordinwlock, pats, opts)
404 return commit(ui, repo, recordinwlock, pats, opts)
405
405
406 def findpossible(cmd, table, strict=False):
406 def findpossible(cmd, table, strict=False):
407 """
407 """
408 Return cmd -> (aliases, command table entry)
408 Return cmd -> (aliases, command table entry)
409 for each matching command.
409 for each matching command.
410 Return debug commands (or their aliases) only if no normal command matches.
410 Return debug commands (or their aliases) only if no normal command matches.
411 """
411 """
412 choice = {}
412 choice = {}
413 debugchoice = {}
413 debugchoice = {}
414
414
415 if cmd in table:
415 if cmd in table:
416 # short-circuit exact matches, "log" alias beats "^log|history"
416 # short-circuit exact matches, "log" alias beats "^log|history"
417 keys = [cmd]
417 keys = [cmd]
418 else:
418 else:
419 keys = table.keys()
419 keys = table.keys()
420
420
421 allcmds = []
421 allcmds = []
422 for e in keys:
422 for e in keys:
423 aliases = parsealiases(e)
423 aliases = parsealiases(e)
424 allcmds.extend(aliases)
424 allcmds.extend(aliases)
425 found = None
425 found = None
426 if cmd in aliases:
426 if cmd in aliases:
427 found = cmd
427 found = cmd
428 elif not strict:
428 elif not strict:
429 for a in aliases:
429 for a in aliases:
430 if a.startswith(cmd):
430 if a.startswith(cmd):
431 found = a
431 found = a
432 break
432 break
433 if found is not None:
433 if found is not None:
434 if aliases[0].startswith("debug") or found.startswith("debug"):
434 if aliases[0].startswith("debug") or found.startswith("debug"):
435 debugchoice[found] = (aliases, table[e])
435 debugchoice[found] = (aliases, table[e])
436 else:
436 else:
437 choice[found] = (aliases, table[e])
437 choice[found] = (aliases, table[e])
438
438
439 if not choice and debugchoice:
439 if not choice and debugchoice:
440 choice = debugchoice
440 choice = debugchoice
441
441
442 return choice, allcmds
442 return choice, allcmds
443
443
444 def findcmd(cmd, table, strict=True):
444 def findcmd(cmd, table, strict=True):
445 """Return (aliases, command table entry) for command string."""
445 """Return (aliases, command table entry) for command string."""
446 choice, allcmds = findpossible(cmd, table, strict)
446 choice, allcmds = findpossible(cmd, table, strict)
447
447
448 if cmd in choice:
448 if cmd in choice:
449 return choice[cmd]
449 return choice[cmd]
450
450
451 if len(choice) > 1:
451 if len(choice) > 1:
452 clist = sorted(choice)
452 clist = sorted(choice)
453 raise error.AmbiguousCommand(cmd, clist)
453 raise error.AmbiguousCommand(cmd, clist)
454
454
455 if choice:
455 if choice:
456 return choice.values()[0]
456 return choice.values()[0]
457
457
458 raise error.UnknownCommand(cmd, allcmds)
458 raise error.UnknownCommand(cmd, allcmds)
459
459
460 def findrepo(p):
460 def findrepo(p):
461 while not os.path.isdir(os.path.join(p, ".hg")):
461 while not os.path.isdir(os.path.join(p, ".hg")):
462 oldp, p = p, os.path.dirname(p)
462 oldp, p = p, os.path.dirname(p)
463 if p == oldp:
463 if p == oldp:
464 return None
464 return None
465
465
466 return p
466 return p
467
467
468 def bailifchanged(repo, merge=True, hint=None):
468 def bailifchanged(repo, merge=True, hint=None):
469 """ enforce the precondition that working directory must be clean.
469 """ enforce the precondition that working directory must be clean.
470
470
471 'merge' can be set to false if a pending uncommitted merge should be
471 'merge' can be set to false if a pending uncommitted merge should be
472 ignored (such as when 'update --check' runs).
472 ignored (such as when 'update --check' runs).
473
473
474 'hint' is the usual hint given to Abort exception.
474 'hint' is the usual hint given to Abort exception.
475 """
475 """
476
476
477 if merge and repo.dirstate.p2() != nullid:
477 if merge and repo.dirstate.p2() != nullid:
478 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
478 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
479 modified, added, removed, deleted = repo.status()[:4]
479 modified, added, removed, deleted = repo.status()[:4]
480 if modified or added or removed or deleted:
480 if modified or added or removed or deleted:
481 raise error.Abort(_('uncommitted changes'), hint=hint)
481 raise error.Abort(_('uncommitted changes'), hint=hint)
482 ctx = repo[None]
482 ctx = repo[None]
483 for s in sorted(ctx.substate):
483 for s in sorted(ctx.substate):
484 ctx.sub(s).bailifchanged(hint=hint)
484 ctx.sub(s).bailifchanged(hint=hint)
485
485
486 def logmessage(ui, opts):
486 def logmessage(ui, opts):
487 """ get the log message according to -m and -l option """
487 """ get the log message according to -m and -l option """
488 message = opts.get('message')
488 message = opts.get('message')
489 logfile = opts.get('logfile')
489 logfile = opts.get('logfile')
490
490
491 if message and logfile:
491 if message and logfile:
492 raise error.Abort(_('options --message and --logfile are mutually '
492 raise error.Abort(_('options --message and --logfile are mutually '
493 'exclusive'))
493 'exclusive'))
494 if not message and logfile:
494 if not message and logfile:
495 try:
495 try:
496 if isstdiofilename(logfile):
496 if isstdiofilename(logfile):
497 message = ui.fin.read()
497 message = ui.fin.read()
498 else:
498 else:
499 message = '\n'.join(util.readfile(logfile).splitlines())
499 message = '\n'.join(util.readfile(logfile).splitlines())
500 except IOError as inst:
500 except IOError as inst:
501 raise error.Abort(_("can't read commit message '%s': %s") %
501 raise error.Abort(_("can't read commit message '%s': %s") %
502 (logfile, inst.strerror))
502 (logfile, inst.strerror))
503 return message
503 return message
504
504
505 def mergeeditform(ctxorbool, baseformname):
505 def mergeeditform(ctxorbool, baseformname):
506 """return appropriate editform name (referencing a committemplate)
506 """return appropriate editform name (referencing a committemplate)
507
507
508 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
508 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
509 merging is committed.
509 merging is committed.
510
510
511 This returns baseformname with '.merge' appended if it is a merge,
511 This returns baseformname with '.merge' appended if it is a merge,
512 otherwise '.normal' is appended.
512 otherwise '.normal' is appended.
513 """
513 """
514 if isinstance(ctxorbool, bool):
514 if isinstance(ctxorbool, bool):
515 if ctxorbool:
515 if ctxorbool:
516 return baseformname + ".merge"
516 return baseformname + ".merge"
517 elif 1 < len(ctxorbool.parents()):
517 elif 1 < len(ctxorbool.parents()):
518 return baseformname + ".merge"
518 return baseformname + ".merge"
519
519
520 return baseformname + ".normal"
520 return baseformname + ".normal"
521
521
522 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
522 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
523 editform='', **opts):
523 editform='', **opts):
524 """get appropriate commit message editor according to '--edit' option
524 """get appropriate commit message editor according to '--edit' option
525
525
526 'finishdesc' is a function to be called with edited commit message
526 'finishdesc' is a function to be called with edited commit message
527 (= 'description' of the new changeset) just after editing, but
527 (= 'description' of the new changeset) just after editing, but
528 before checking empty-ness. It should return actual text to be
528 before checking empty-ness. It should return actual text to be
529 stored into history. This allows to change description before
529 stored into history. This allows to change description before
530 storing.
530 storing.
531
531
532 'extramsg' is a extra message to be shown in the editor instead of
532 'extramsg' is a extra message to be shown in the editor instead of
533 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
533 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
534 is automatically added.
534 is automatically added.
535
535
536 'editform' is a dot-separated list of names, to distinguish
536 'editform' is a dot-separated list of names, to distinguish
537 the purpose of commit text editing.
537 the purpose of commit text editing.
538
538
539 'getcommiteditor' returns 'commitforceeditor' regardless of
539 'getcommiteditor' returns 'commitforceeditor' regardless of
540 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
540 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
541 they are specific for usage in MQ.
541 they are specific for usage in MQ.
542 """
542 """
543 if edit or finishdesc or extramsg:
543 if edit or finishdesc or extramsg:
544 return lambda r, c, s: commitforceeditor(r, c, s,
544 return lambda r, c, s: commitforceeditor(r, c, s,
545 finishdesc=finishdesc,
545 finishdesc=finishdesc,
546 extramsg=extramsg,
546 extramsg=extramsg,
547 editform=editform)
547 editform=editform)
548 elif editform:
548 elif editform:
549 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
549 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
550 else:
550 else:
551 return commiteditor
551 return commiteditor
552
552
553 def loglimit(opts):
553 def loglimit(opts):
554 """get the log limit according to option -l/--limit"""
554 """get the log limit according to option -l/--limit"""
555 limit = opts.get('limit')
555 limit = opts.get('limit')
556 if limit:
556 if limit:
557 try:
557 try:
558 limit = int(limit)
558 limit = int(limit)
559 except ValueError:
559 except ValueError:
560 raise error.Abort(_('limit must be a positive integer'))
560 raise error.Abort(_('limit must be a positive integer'))
561 if limit <= 0:
561 if limit <= 0:
562 raise error.Abort(_('limit must be positive'))
562 raise error.Abort(_('limit must be positive'))
563 else:
563 else:
564 limit = None
564 limit = None
565 return limit
565 return limit
566
566
567 def makefilename(repo, pat, node, desc=None,
567 def makefilename(repo, pat, node, desc=None,
568 total=None, seqno=None, revwidth=None, pathname=None):
568 total=None, seqno=None, revwidth=None, pathname=None):
569 node_expander = {
569 node_expander = {
570 'H': lambda: hex(node),
570 'H': lambda: hex(node),
571 'R': lambda: str(repo.changelog.rev(node)),
571 'R': lambda: str(repo.changelog.rev(node)),
572 'h': lambda: short(node),
572 'h': lambda: short(node),
573 'm': lambda: re.sub('[^\w]', '_', str(desc))
573 'm': lambda: re.sub('[^\w]', '_', str(desc))
574 }
574 }
575 expander = {
575 expander = {
576 '%': lambda: '%',
576 '%': lambda: '%',
577 'b': lambda: os.path.basename(repo.root),
577 'b': lambda: os.path.basename(repo.root),
578 }
578 }
579
579
580 try:
580 try:
581 if node:
581 if node:
582 expander.update(node_expander)
582 expander.update(node_expander)
583 if node:
583 if node:
584 expander['r'] = (lambda:
584 expander['r'] = (lambda:
585 str(repo.changelog.rev(node)).zfill(revwidth or 0))
585 str(repo.changelog.rev(node)).zfill(revwidth or 0))
586 if total is not None:
586 if total is not None:
587 expander['N'] = lambda: str(total)
587 expander['N'] = lambda: str(total)
588 if seqno is not None:
588 if seqno is not None:
589 expander['n'] = lambda: str(seqno)
589 expander['n'] = lambda: str(seqno)
590 if total is not None and seqno is not None:
590 if total is not None and seqno is not None:
591 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
591 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
592 if pathname is not None:
592 if pathname is not None:
593 expander['s'] = lambda: os.path.basename(pathname)
593 expander['s'] = lambda: os.path.basename(pathname)
594 expander['d'] = lambda: os.path.dirname(pathname) or '.'
594 expander['d'] = lambda: os.path.dirname(pathname) or '.'
595 expander['p'] = lambda: pathname
595 expander['p'] = lambda: pathname
596
596
597 newname = []
597 newname = []
598 patlen = len(pat)
598 patlen = len(pat)
599 i = 0
599 i = 0
600 while i < patlen:
600 while i < patlen:
601 c = pat[i:i + 1]
601 c = pat[i:i + 1]
602 if c == '%':
602 if c == '%':
603 i += 1
603 i += 1
604 c = pat[i:i + 1]
604 c = pat[i:i + 1]
605 c = expander[c]()
605 c = expander[c]()
606 newname.append(c)
606 newname.append(c)
607 i += 1
607 i += 1
608 return ''.join(newname)
608 return ''.join(newname)
609 except KeyError as inst:
609 except KeyError as inst:
610 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
610 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
611 inst.args[0])
611 inst.args[0])
612
612
613 def isstdiofilename(pat):
613 def isstdiofilename(pat):
614 """True if the given pat looks like a filename denoting stdin/stdout"""
614 """True if the given pat looks like a filename denoting stdin/stdout"""
615 return not pat or pat == '-'
615 return not pat or pat == '-'
616
616
617 class _unclosablefile(object):
617 class _unclosablefile(object):
618 def __init__(self, fp):
618 def __init__(self, fp):
619 self._fp = fp
619 self._fp = fp
620
620
621 def close(self):
621 def close(self):
622 pass
622 pass
623
623
624 def __iter__(self):
624 def __iter__(self):
625 return iter(self._fp)
625 return iter(self._fp)
626
626
627 def __getattr__(self, attr):
627 def __getattr__(self, attr):
628 return getattr(self._fp, attr)
628 return getattr(self._fp, attr)
629
629
630 def __enter__(self):
630 def __enter__(self):
631 return self
631 return self
632
632
633 def __exit__(self, exc_type, exc_value, exc_tb):
633 def __exit__(self, exc_type, exc_value, exc_tb):
634 pass
634 pass
635
635
636 def makefileobj(repo, pat, node=None, desc=None, total=None,
636 def makefileobj(repo, pat, node=None, desc=None, total=None,
637 seqno=None, revwidth=None, mode='wb', modemap=None,
637 seqno=None, revwidth=None, mode='wb', modemap=None,
638 pathname=None):
638 pathname=None):
639
639
640 writable = mode not in ('r', 'rb')
640 writable = mode not in ('r', 'rb')
641
641
642 if isstdiofilename(pat):
642 if isstdiofilename(pat):
643 if writable:
643 if writable:
644 fp = repo.ui.fout
644 fp = repo.ui.fout
645 else:
645 else:
646 fp = repo.ui.fin
646 fp = repo.ui.fin
647 return _unclosablefile(fp)
647 return _unclosablefile(fp)
648 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
648 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
649 if modemap is not None:
649 if modemap is not None:
650 mode = modemap.get(fn, mode)
650 mode = modemap.get(fn, mode)
651 if mode == 'wb':
651 if mode == 'wb':
652 modemap[fn] = 'ab'
652 modemap[fn] = 'ab'
653 return open(fn, mode)
653 return open(fn, mode)
654
654
655 def openrevlog(repo, cmd, file_, opts):
655 def openrevlog(repo, cmd, file_, opts):
656 """opens the changelog, manifest, a filelog or a given revlog"""
656 """opens the changelog, manifest, a filelog or a given revlog"""
657 cl = opts['changelog']
657 cl = opts['changelog']
658 mf = opts['manifest']
658 mf = opts['manifest']
659 dir = opts['dir']
659 dir = opts['dir']
660 msg = None
660 msg = None
661 if cl and mf:
661 if cl and mf:
662 msg = _('cannot specify --changelog and --manifest at the same time')
662 msg = _('cannot specify --changelog and --manifest at the same time')
663 elif cl and dir:
663 elif cl and dir:
664 msg = _('cannot specify --changelog and --dir at the same time')
664 msg = _('cannot specify --changelog and --dir at the same time')
665 elif cl or mf or dir:
665 elif cl or mf or dir:
666 if file_:
666 if file_:
667 msg = _('cannot specify filename with --changelog or --manifest')
667 msg = _('cannot specify filename with --changelog or --manifest')
668 elif not repo:
668 elif not repo:
669 msg = _('cannot specify --changelog or --manifest or --dir '
669 msg = _('cannot specify --changelog or --manifest or --dir '
670 'without a repository')
670 'without a repository')
671 if msg:
671 if msg:
672 raise error.Abort(msg)
672 raise error.Abort(msg)
673
673
674 r = None
674 r = None
675 if repo:
675 if repo:
676 if cl:
676 if cl:
677 r = repo.unfiltered().changelog
677 r = repo.unfiltered().changelog
678 elif dir:
678 elif dir:
679 if 'treemanifest' not in repo.requirements:
679 if 'treemanifest' not in repo.requirements:
680 raise error.Abort(_("--dir can only be used on repos with "
680 raise error.Abort(_("--dir can only be used on repos with "
681 "treemanifest enabled"))
681 "treemanifest enabled"))
682 dirlog = repo.manifestlog._revlog.dirlog(dir)
682 dirlog = repo.manifestlog._revlog.dirlog(dir)
683 if len(dirlog):
683 if len(dirlog):
684 r = dirlog
684 r = dirlog
685 elif mf:
685 elif mf:
686 r = repo.manifestlog._revlog
686 r = repo.manifestlog._revlog
687 elif file_:
687 elif file_:
688 filelog = repo.file(file_)
688 filelog = repo.file(file_)
689 if len(filelog):
689 if len(filelog):
690 r = filelog
690 r = filelog
691 if not r:
691 if not r:
692 if not file_:
692 if not file_:
693 raise error.CommandError(cmd, _('invalid arguments'))
693 raise error.CommandError(cmd, _('invalid arguments'))
694 if not os.path.isfile(file_):
694 if not os.path.isfile(file_):
695 raise error.Abort(_("revlog '%s' not found") % file_)
695 raise error.Abort(_("revlog '%s' not found") % file_)
696 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
696 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
697 file_[:-2] + ".i")
697 file_[:-2] + ".i")
698 return r
698 return r
699
699
700 def copy(ui, repo, pats, opts, rename=False):
700 def copy(ui, repo, pats, opts, rename=False):
701 # called with the repo lock held
701 # called with the repo lock held
702 #
702 #
703 # hgsep => pathname that uses "/" to separate directories
703 # hgsep => pathname that uses "/" to separate directories
704 # ossep => pathname that uses os.sep to separate directories
704 # ossep => pathname that uses os.sep to separate directories
705 cwd = repo.getcwd()
705 cwd = repo.getcwd()
706 targets = {}
706 targets = {}
707 after = opts.get("after")
707 after = opts.get("after")
708 dryrun = opts.get("dry_run")
708 dryrun = opts.get("dry_run")
709 wctx = repo[None]
709 wctx = repo[None]
710
710
711 def walkpat(pat):
711 def walkpat(pat):
712 srcs = []
712 srcs = []
713 if after:
713 if after:
714 badstates = '?'
714 badstates = '?'
715 else:
715 else:
716 badstates = '?r'
716 badstates = '?r'
717 m = scmutil.match(wctx, [pat], opts, globbed=True)
717 m = scmutil.match(wctx, [pat], opts, globbed=True)
718 for abs in wctx.walk(m):
718 for abs in wctx.walk(m):
719 state = repo.dirstate[abs]
719 state = repo.dirstate[abs]
720 rel = m.rel(abs)
720 rel = m.rel(abs)
721 exact = m.exact(abs)
721 exact = m.exact(abs)
722 if state in badstates:
722 if state in badstates:
723 if exact and state == '?':
723 if exact and state == '?':
724 ui.warn(_('%s: not copying - file is not managed\n') % rel)
724 ui.warn(_('%s: not copying - file is not managed\n') % rel)
725 if exact and state == 'r':
725 if exact and state == 'r':
726 ui.warn(_('%s: not copying - file has been marked for'
726 ui.warn(_('%s: not copying - file has been marked for'
727 ' remove\n') % rel)
727 ' remove\n') % rel)
728 continue
728 continue
729 # abs: hgsep
729 # abs: hgsep
730 # rel: ossep
730 # rel: ossep
731 srcs.append((abs, rel, exact))
731 srcs.append((abs, rel, exact))
732 return srcs
732 return srcs
733
733
734 # abssrc: hgsep
734 # abssrc: hgsep
735 # relsrc: ossep
735 # relsrc: ossep
736 # otarget: ossep
736 # otarget: ossep
737 def copyfile(abssrc, relsrc, otarget, exact):
737 def copyfile(abssrc, relsrc, otarget, exact):
738 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
738 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
739 if '/' in abstarget:
739 if '/' in abstarget:
740 # We cannot normalize abstarget itself, this would prevent
740 # We cannot normalize abstarget itself, this would prevent
741 # case only renames, like a => A.
741 # case only renames, like a => A.
742 abspath, absname = abstarget.rsplit('/', 1)
742 abspath, absname = abstarget.rsplit('/', 1)
743 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
743 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
744 reltarget = repo.pathto(abstarget, cwd)
744 reltarget = repo.pathto(abstarget, cwd)
745 target = repo.wjoin(abstarget)
745 target = repo.wjoin(abstarget)
746 src = repo.wjoin(abssrc)
746 src = repo.wjoin(abssrc)
747 state = repo.dirstate[abstarget]
747 state = repo.dirstate[abstarget]
748
748
749 scmutil.checkportable(ui, abstarget)
749 scmutil.checkportable(ui, abstarget)
750
750
751 # check for collisions
751 # check for collisions
752 prevsrc = targets.get(abstarget)
752 prevsrc = targets.get(abstarget)
753 if prevsrc is not None:
753 if prevsrc is not None:
754 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
754 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
755 (reltarget, repo.pathto(abssrc, cwd),
755 (reltarget, repo.pathto(abssrc, cwd),
756 repo.pathto(prevsrc, cwd)))
756 repo.pathto(prevsrc, cwd)))
757 return
757 return
758
758
759 # check for overwrites
759 # check for overwrites
760 exists = os.path.lexists(target)
760 exists = os.path.lexists(target)
761 samefile = False
761 samefile = False
762 if exists and abssrc != abstarget:
762 if exists and abssrc != abstarget:
763 if (repo.dirstate.normalize(abssrc) ==
763 if (repo.dirstate.normalize(abssrc) ==
764 repo.dirstate.normalize(abstarget)):
764 repo.dirstate.normalize(abstarget)):
765 if not rename:
765 if not rename:
766 ui.warn(_("%s: can't copy - same file\n") % reltarget)
766 ui.warn(_("%s: can't copy - same file\n") % reltarget)
767 return
767 return
768 exists = False
768 exists = False
769 samefile = True
769 samefile = True
770
770
771 if not after and exists or after and state in 'mn':
771 if not after and exists or after and state in 'mn':
772 if not opts['force']:
772 if not opts['force']:
773 if state in 'mn':
773 if state in 'mn':
774 msg = _('%s: not overwriting - file already committed\n')
774 msg = _('%s: not overwriting - file already committed\n')
775 if after:
775 if after:
776 flags = '--after --force'
776 flags = '--after --force'
777 else:
777 else:
778 flags = '--force'
778 flags = '--force'
779 if rename:
779 if rename:
780 hint = _('(hg rename %s to replace the file by '
780 hint = _('(hg rename %s to replace the file by '
781 'recording a rename)\n') % flags
781 'recording a rename)\n') % flags
782 else:
782 else:
783 hint = _('(hg copy %s to replace the file by '
783 hint = _('(hg copy %s to replace the file by '
784 'recording a copy)\n') % flags
784 'recording a copy)\n') % flags
785 else:
785 else:
786 msg = _('%s: not overwriting - file exists\n')
786 msg = _('%s: not overwriting - file exists\n')
787 if rename:
787 if rename:
788 hint = _('(hg rename --after to record the rename)\n')
788 hint = _('(hg rename --after to record the rename)\n')
789 else:
789 else:
790 hint = _('(hg copy --after to record the copy)\n')
790 hint = _('(hg copy --after to record the copy)\n')
791 ui.warn(msg % reltarget)
791 ui.warn(msg % reltarget)
792 ui.warn(hint)
792 ui.warn(hint)
793 return
793 return
794
794
795 if after:
795 if after:
796 if not exists:
796 if not exists:
797 if rename:
797 if rename:
798 ui.warn(_('%s: not recording move - %s does not exist\n') %
798 ui.warn(_('%s: not recording move - %s does not exist\n') %
799 (relsrc, reltarget))
799 (relsrc, reltarget))
800 else:
800 else:
801 ui.warn(_('%s: not recording copy - %s does not exist\n') %
801 ui.warn(_('%s: not recording copy - %s does not exist\n') %
802 (relsrc, reltarget))
802 (relsrc, reltarget))
803 return
803 return
804 elif not dryrun:
804 elif not dryrun:
805 try:
805 try:
806 if exists:
806 if exists:
807 os.unlink(target)
807 os.unlink(target)
808 targetdir = os.path.dirname(target) or '.'
808 targetdir = os.path.dirname(target) or '.'
809 if not os.path.isdir(targetdir):
809 if not os.path.isdir(targetdir):
810 os.makedirs(targetdir)
810 os.makedirs(targetdir)
811 if samefile:
811 if samefile:
812 tmp = target + "~hgrename"
812 tmp = target + "~hgrename"
813 os.rename(src, tmp)
813 os.rename(src, tmp)
814 os.rename(tmp, target)
814 os.rename(tmp, target)
815 else:
815 else:
816 util.copyfile(src, target)
816 util.copyfile(src, target)
817 srcexists = True
817 srcexists = True
818 except IOError as inst:
818 except IOError as inst:
819 if inst.errno == errno.ENOENT:
819 if inst.errno == errno.ENOENT:
820 ui.warn(_('%s: deleted in working directory\n') % relsrc)
820 ui.warn(_('%s: deleted in working directory\n') % relsrc)
821 srcexists = False
821 srcexists = False
822 else:
822 else:
823 ui.warn(_('%s: cannot copy - %s\n') %
823 ui.warn(_('%s: cannot copy - %s\n') %
824 (relsrc, inst.strerror))
824 (relsrc, inst.strerror))
825 return True # report a failure
825 return True # report a failure
826
826
827 if ui.verbose or not exact:
827 if ui.verbose or not exact:
828 if rename:
828 if rename:
829 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
829 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
830 else:
830 else:
831 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
831 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
832
832
833 targets[abstarget] = abssrc
833 targets[abstarget] = abssrc
834
834
835 # fix up dirstate
835 # fix up dirstate
836 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
836 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
837 dryrun=dryrun, cwd=cwd)
837 dryrun=dryrun, cwd=cwd)
838 if rename and not dryrun:
838 if rename and not dryrun:
839 if not after and srcexists and not samefile:
839 if not after and srcexists and not samefile:
840 repo.wvfs.unlinkpath(abssrc)
840 repo.wvfs.unlinkpath(abssrc)
841 wctx.forget([abssrc])
841 wctx.forget([abssrc])
842
842
843 # pat: ossep
843 # pat: ossep
844 # dest ossep
844 # dest ossep
845 # srcs: list of (hgsep, hgsep, ossep, bool)
845 # srcs: list of (hgsep, hgsep, ossep, bool)
846 # return: function that takes hgsep and returns ossep
846 # return: function that takes hgsep and returns ossep
847 def targetpathfn(pat, dest, srcs):
847 def targetpathfn(pat, dest, srcs):
848 if os.path.isdir(pat):
848 if os.path.isdir(pat):
849 abspfx = pathutil.canonpath(repo.root, cwd, pat)
849 abspfx = pathutil.canonpath(repo.root, cwd, pat)
850 abspfx = util.localpath(abspfx)
850 abspfx = util.localpath(abspfx)
851 if destdirexists:
851 if destdirexists:
852 striplen = len(os.path.split(abspfx)[0])
852 striplen = len(os.path.split(abspfx)[0])
853 else:
853 else:
854 striplen = len(abspfx)
854 striplen = len(abspfx)
855 if striplen:
855 if striplen:
856 striplen += len(pycompat.ossep)
856 striplen += len(pycompat.ossep)
857 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
857 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
858 elif destdirexists:
858 elif destdirexists:
859 res = lambda p: os.path.join(dest,
859 res = lambda p: os.path.join(dest,
860 os.path.basename(util.localpath(p)))
860 os.path.basename(util.localpath(p)))
861 else:
861 else:
862 res = lambda p: dest
862 res = lambda p: dest
863 return res
863 return res
864
864
865 # pat: ossep
865 # pat: ossep
866 # dest ossep
866 # dest ossep
867 # srcs: list of (hgsep, hgsep, ossep, bool)
867 # srcs: list of (hgsep, hgsep, ossep, bool)
868 # return: function that takes hgsep and returns ossep
868 # return: function that takes hgsep and returns ossep
869 def targetpathafterfn(pat, dest, srcs):
869 def targetpathafterfn(pat, dest, srcs):
870 if matchmod.patkind(pat):
870 if matchmod.patkind(pat):
871 # a mercurial pattern
871 # a mercurial pattern
872 res = lambda p: os.path.join(dest,
872 res = lambda p: os.path.join(dest,
873 os.path.basename(util.localpath(p)))
873 os.path.basename(util.localpath(p)))
874 else:
874 else:
875 abspfx = pathutil.canonpath(repo.root, cwd, pat)
875 abspfx = pathutil.canonpath(repo.root, cwd, pat)
876 if len(abspfx) < len(srcs[0][0]):
876 if len(abspfx) < len(srcs[0][0]):
877 # A directory. Either the target path contains the last
877 # A directory. Either the target path contains the last
878 # component of the source path or it does not.
878 # component of the source path or it does not.
879 def evalpath(striplen):
879 def evalpath(striplen):
880 score = 0
880 score = 0
881 for s in srcs:
881 for s in srcs:
882 t = os.path.join(dest, util.localpath(s[0])[striplen:])
882 t = os.path.join(dest, util.localpath(s[0])[striplen:])
883 if os.path.lexists(t):
883 if os.path.lexists(t):
884 score += 1
884 score += 1
885 return score
885 return score
886
886
887 abspfx = util.localpath(abspfx)
887 abspfx = util.localpath(abspfx)
888 striplen = len(abspfx)
888 striplen = len(abspfx)
889 if striplen:
889 if striplen:
890 striplen += len(pycompat.ossep)
890 striplen += len(pycompat.ossep)
891 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
891 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
892 score = evalpath(striplen)
892 score = evalpath(striplen)
893 striplen1 = len(os.path.split(abspfx)[0])
893 striplen1 = len(os.path.split(abspfx)[0])
894 if striplen1:
894 if striplen1:
895 striplen1 += len(pycompat.ossep)
895 striplen1 += len(pycompat.ossep)
896 if evalpath(striplen1) > score:
896 if evalpath(striplen1) > score:
897 striplen = striplen1
897 striplen = striplen1
898 res = lambda p: os.path.join(dest,
898 res = lambda p: os.path.join(dest,
899 util.localpath(p)[striplen:])
899 util.localpath(p)[striplen:])
900 else:
900 else:
901 # a file
901 # a file
902 if destdirexists:
902 if destdirexists:
903 res = lambda p: os.path.join(dest,
903 res = lambda p: os.path.join(dest,
904 os.path.basename(util.localpath(p)))
904 os.path.basename(util.localpath(p)))
905 else:
905 else:
906 res = lambda p: dest
906 res = lambda p: dest
907 return res
907 return res
908
908
909 pats = scmutil.expandpats(pats)
909 pats = scmutil.expandpats(pats)
910 if not pats:
910 if not pats:
911 raise error.Abort(_('no source or destination specified'))
911 raise error.Abort(_('no source or destination specified'))
912 if len(pats) == 1:
912 if len(pats) == 1:
913 raise error.Abort(_('no destination specified'))
913 raise error.Abort(_('no destination specified'))
914 dest = pats.pop()
914 dest = pats.pop()
915 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
915 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
916 if not destdirexists:
916 if not destdirexists:
917 if len(pats) > 1 or matchmod.patkind(pats[0]):
917 if len(pats) > 1 or matchmod.patkind(pats[0]):
918 raise error.Abort(_('with multiple sources, destination must be an '
918 raise error.Abort(_('with multiple sources, destination must be an '
919 'existing directory'))
919 'existing directory'))
920 if util.endswithsep(dest):
920 if util.endswithsep(dest):
921 raise error.Abort(_('destination %s is not a directory') % dest)
921 raise error.Abort(_('destination %s is not a directory') % dest)
922
922
923 tfn = targetpathfn
923 tfn = targetpathfn
924 if after:
924 if after:
925 tfn = targetpathafterfn
925 tfn = targetpathafterfn
926 copylist = []
926 copylist = []
927 for pat in pats:
927 for pat in pats:
928 srcs = walkpat(pat)
928 srcs = walkpat(pat)
929 if not srcs:
929 if not srcs:
930 continue
930 continue
931 copylist.append((tfn(pat, dest, srcs), srcs))
931 copylist.append((tfn(pat, dest, srcs), srcs))
932 if not copylist:
932 if not copylist:
933 raise error.Abort(_('no files to copy'))
933 raise error.Abort(_('no files to copy'))
934
934
935 errors = 0
935 errors = 0
936 for targetpath, srcs in copylist:
936 for targetpath, srcs in copylist:
937 for abssrc, relsrc, exact in srcs:
937 for abssrc, relsrc, exact in srcs:
938 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
938 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
939 errors += 1
939 errors += 1
940
940
941 if errors:
941 if errors:
942 ui.warn(_('(consider using --after)\n'))
942 ui.warn(_('(consider using --after)\n'))
943
943
944 return errors != 0
944 return errors != 0
945
945
946 ## facility to let extension process additional data into an import patch
946 ## facility to let extension process additional data into an import patch
947 # list of identifier to be executed in order
947 # list of identifier to be executed in order
948 extrapreimport = [] # run before commit
948 extrapreimport = [] # run before commit
949 extrapostimport = [] # run after commit
949 extrapostimport = [] # run after commit
950 # mapping from identifier to actual import function
950 # mapping from identifier to actual import function
951 #
951 #
952 # 'preimport' are run before the commit is made and are provided the following
952 # 'preimport' are run before the commit is made and are provided the following
953 # arguments:
953 # arguments:
954 # - repo: the localrepository instance,
954 # - repo: the localrepository instance,
955 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
955 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
956 # - extra: the future extra dictionary of the changeset, please mutate it,
956 # - extra: the future extra dictionary of the changeset, please mutate it,
957 # - opts: the import options.
957 # - opts: the import options.
958 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
958 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
959 # mutation of in memory commit and more. Feel free to rework the code to get
959 # mutation of in memory commit and more. Feel free to rework the code to get
960 # there.
960 # there.
961 extrapreimportmap = {}
961 extrapreimportmap = {}
962 # 'postimport' are run after the commit is made and are provided the following
962 # 'postimport' are run after the commit is made and are provided the following
963 # argument:
963 # argument:
964 # - ctx: the changectx created by import.
964 # - ctx: the changectx created by import.
965 extrapostimportmap = {}
965 extrapostimportmap = {}
966
966
967 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
967 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
968 """Utility function used by commands.import to import a single patch
968 """Utility function used by commands.import to import a single patch
969
969
970 This function is explicitly defined here to help the evolve extension to
970 This function is explicitly defined here to help the evolve extension to
971 wrap this part of the import logic.
971 wrap this part of the import logic.
972
972
973 The API is currently a bit ugly because it a simple code translation from
973 The API is currently a bit ugly because it a simple code translation from
974 the import command. Feel free to make it better.
974 the import command. Feel free to make it better.
975
975
976 :hunk: a patch (as a binary string)
976 :hunk: a patch (as a binary string)
977 :parents: nodes that will be parent of the created commit
977 :parents: nodes that will be parent of the created commit
978 :opts: the full dict of option passed to the import command
978 :opts: the full dict of option passed to the import command
979 :msgs: list to save commit message to.
979 :msgs: list to save commit message to.
980 (used in case we need to save it when failing)
980 (used in case we need to save it when failing)
981 :updatefunc: a function that update a repo to a given node
981 :updatefunc: a function that update a repo to a given node
982 updatefunc(<repo>, <node>)
982 updatefunc(<repo>, <node>)
983 """
983 """
984 # avoid cycle context -> subrepo -> cmdutil
984 # avoid cycle context -> subrepo -> cmdutil
985 from . import context
985 from . import context
986 extractdata = patch.extract(ui, hunk)
986 extractdata = patch.extract(ui, hunk)
987 tmpname = extractdata.get('filename')
987 tmpname = extractdata.get('filename')
988 message = extractdata.get('message')
988 message = extractdata.get('message')
989 user = opts.get('user') or extractdata.get('user')
989 user = opts.get('user') or extractdata.get('user')
990 date = opts.get('date') or extractdata.get('date')
990 date = opts.get('date') or extractdata.get('date')
991 branch = extractdata.get('branch')
991 branch = extractdata.get('branch')
992 nodeid = extractdata.get('nodeid')
992 nodeid = extractdata.get('nodeid')
993 p1 = extractdata.get('p1')
993 p1 = extractdata.get('p1')
994 p2 = extractdata.get('p2')
994 p2 = extractdata.get('p2')
995
995
996 nocommit = opts.get('no_commit')
996 nocommit = opts.get('no_commit')
997 importbranch = opts.get('import_branch')
997 importbranch = opts.get('import_branch')
998 update = not opts.get('bypass')
998 update = not opts.get('bypass')
999 strip = opts["strip"]
999 strip = opts["strip"]
1000 prefix = opts["prefix"]
1000 prefix = opts["prefix"]
1001 sim = float(opts.get('similarity') or 0)
1001 sim = float(opts.get('similarity') or 0)
1002 if not tmpname:
1002 if not tmpname:
1003 return (None, None, False)
1003 return (None, None, False)
1004
1004
1005 rejects = False
1005 rejects = False
1006
1006
1007 try:
1007 try:
1008 cmdline_message = logmessage(ui, opts)
1008 cmdline_message = logmessage(ui, opts)
1009 if cmdline_message:
1009 if cmdline_message:
1010 # pickup the cmdline msg
1010 # pickup the cmdline msg
1011 message = cmdline_message
1011 message = cmdline_message
1012 elif message:
1012 elif message:
1013 # pickup the patch msg
1013 # pickup the patch msg
1014 message = message.strip()
1014 message = message.strip()
1015 else:
1015 else:
1016 # launch the editor
1016 # launch the editor
1017 message = None
1017 message = None
1018 ui.debug('message:\n%s\n' % message)
1018 ui.debug('message:\n%s\n' % message)
1019
1019
1020 if len(parents) == 1:
1020 if len(parents) == 1:
1021 parents.append(repo[nullid])
1021 parents.append(repo[nullid])
1022 if opts.get('exact'):
1022 if opts.get('exact'):
1023 if not nodeid or not p1:
1023 if not nodeid or not p1:
1024 raise error.Abort(_('not a Mercurial patch'))
1024 raise error.Abort(_('not a Mercurial patch'))
1025 p1 = repo[p1]
1025 p1 = repo[p1]
1026 p2 = repo[p2 or nullid]
1026 p2 = repo[p2 or nullid]
1027 elif p2:
1027 elif p2:
1028 try:
1028 try:
1029 p1 = repo[p1]
1029 p1 = repo[p1]
1030 p2 = repo[p2]
1030 p2 = repo[p2]
1031 # Without any options, consider p2 only if the
1031 # Without any options, consider p2 only if the
1032 # patch is being applied on top of the recorded
1032 # patch is being applied on top of the recorded
1033 # first parent.
1033 # first parent.
1034 if p1 != parents[0]:
1034 if p1 != parents[0]:
1035 p1 = parents[0]
1035 p1 = parents[0]
1036 p2 = repo[nullid]
1036 p2 = repo[nullid]
1037 except error.RepoError:
1037 except error.RepoError:
1038 p1, p2 = parents
1038 p1, p2 = parents
1039 if p2.node() == nullid:
1039 if p2.node() == nullid:
1040 ui.warn(_("warning: import the patch as a normal revision\n"
1040 ui.warn(_("warning: import the patch as a normal revision\n"
1041 "(use --exact to import the patch as a merge)\n"))
1041 "(use --exact to import the patch as a merge)\n"))
1042 else:
1042 else:
1043 p1, p2 = parents
1043 p1, p2 = parents
1044
1044
1045 n = None
1045 n = None
1046 if update:
1046 if update:
1047 if p1 != parents[0]:
1047 if p1 != parents[0]:
1048 updatefunc(repo, p1.node())
1048 updatefunc(repo, p1.node())
1049 if p2 != parents[1]:
1049 if p2 != parents[1]:
1050 repo.setparents(p1.node(), p2.node())
1050 repo.setparents(p1.node(), p2.node())
1051
1051
1052 if opts.get('exact') or importbranch:
1052 if opts.get('exact') or importbranch:
1053 repo.dirstate.setbranch(branch or 'default')
1053 repo.dirstate.setbranch(branch or 'default')
1054
1054
1055 partial = opts.get('partial', False)
1055 partial = opts.get('partial', False)
1056 files = set()
1056 files = set()
1057 try:
1057 try:
1058 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1058 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1059 files=files, eolmode=None, similarity=sim / 100.0)
1059 files=files, eolmode=None, similarity=sim / 100.0)
1060 except patch.PatchError as e:
1060 except patch.PatchError as e:
1061 if not partial:
1061 if not partial:
1062 raise error.Abort(str(e))
1062 raise error.Abort(str(e))
1063 if partial:
1063 if partial:
1064 rejects = True
1064 rejects = True
1065
1065
1066 files = list(files)
1066 files = list(files)
1067 if nocommit:
1067 if nocommit:
1068 if message:
1068 if message:
1069 msgs.append(message)
1069 msgs.append(message)
1070 else:
1070 else:
1071 if opts.get('exact') or p2:
1071 if opts.get('exact') or p2:
1072 # If you got here, you either use --force and know what
1072 # If you got here, you either use --force and know what
1073 # you are doing or used --exact or a merge patch while
1073 # you are doing or used --exact or a merge patch while
1074 # being updated to its first parent.
1074 # being updated to its first parent.
1075 m = None
1075 m = None
1076 else:
1076 else:
1077 m = scmutil.matchfiles(repo, files or [])
1077 m = scmutil.matchfiles(repo, files or [])
1078 editform = mergeeditform(repo[None], 'import.normal')
1078 editform = mergeeditform(repo[None], 'import.normal')
1079 if opts.get('exact'):
1079 if opts.get('exact'):
1080 editor = None
1080 editor = None
1081 else:
1081 else:
1082 editor = getcommiteditor(editform=editform, **opts)
1082 editor = getcommiteditor(editform=editform, **opts)
1083 extra = {}
1083 extra = {}
1084 for idfunc in extrapreimport:
1084 for idfunc in extrapreimport:
1085 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1085 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1086 overrides = {}
1086 overrides = {}
1087 if partial:
1087 if partial:
1088 overrides[('ui', 'allowemptycommit')] = True
1088 overrides[('ui', 'allowemptycommit')] = True
1089 with repo.ui.configoverride(overrides, 'import'):
1089 with repo.ui.configoverride(overrides, 'import'):
1090 n = repo.commit(message, user,
1090 n = repo.commit(message, user,
1091 date, match=m,
1091 date, match=m,
1092 editor=editor, extra=extra)
1092 editor=editor, extra=extra)
1093 for idfunc in extrapostimport:
1093 for idfunc in extrapostimport:
1094 extrapostimportmap[idfunc](repo[n])
1094 extrapostimportmap[idfunc](repo[n])
1095 else:
1095 else:
1096 if opts.get('exact') or importbranch:
1096 if opts.get('exact') or importbranch:
1097 branch = branch or 'default'
1097 branch = branch or 'default'
1098 else:
1098 else:
1099 branch = p1.branch()
1099 branch = p1.branch()
1100 store = patch.filestore()
1100 store = patch.filestore()
1101 try:
1101 try:
1102 files = set()
1102 files = set()
1103 try:
1103 try:
1104 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1104 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1105 files, eolmode=None)
1105 files, eolmode=None)
1106 except patch.PatchError as e:
1106 except patch.PatchError as e:
1107 raise error.Abort(str(e))
1107 raise error.Abort(str(e))
1108 if opts.get('exact'):
1108 if opts.get('exact'):
1109 editor = None
1109 editor = None
1110 else:
1110 else:
1111 editor = getcommiteditor(editform='import.bypass')
1111 editor = getcommiteditor(editform='import.bypass')
1112 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1112 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1113 message,
1113 message,
1114 user,
1114 user,
1115 date,
1115 date,
1116 branch, files, store,
1116 branch, files, store,
1117 editor=editor)
1117 editor=editor)
1118 n = memctx.commit()
1118 n = memctx.commit()
1119 finally:
1119 finally:
1120 store.close()
1120 store.close()
1121 if opts.get('exact') and nocommit:
1121 if opts.get('exact') and nocommit:
1122 # --exact with --no-commit is still useful in that it does merge
1122 # --exact with --no-commit is still useful in that it does merge
1123 # and branch bits
1123 # and branch bits
1124 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1124 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 elif opts.get('exact') and hex(n) != nodeid:
1125 elif opts.get('exact') and hex(n) != nodeid:
1126 raise error.Abort(_('patch is damaged or loses information'))
1126 raise error.Abort(_('patch is damaged or loses information'))
1127 msg = _('applied to working directory')
1127 msg = _('applied to working directory')
1128 if n:
1128 if n:
1129 # i18n: refers to a short changeset id
1129 # i18n: refers to a short changeset id
1130 msg = _('created %s') % short(n)
1130 msg = _('created %s') % short(n)
1131 return (msg, n, rejects)
1131 return (msg, n, rejects)
1132 finally:
1132 finally:
1133 os.unlink(tmpname)
1133 os.unlink(tmpname)
1134
1134
1135 # facility to let extensions include additional data in an exported patch
1135 # facility to let extensions include additional data in an exported patch
1136 # list of identifiers to be executed in order
1136 # list of identifiers to be executed in order
1137 extraexport = []
1137 extraexport = []
1138 # mapping from identifier to actual export function
1138 # mapping from identifier to actual export function
1139 # function as to return a string to be added to the header or None
1139 # function as to return a string to be added to the header or None
1140 # it is given two arguments (sequencenumber, changectx)
1140 # it is given two arguments (sequencenumber, changectx)
1141 extraexportmap = {}
1141 extraexportmap = {}
1142
1142
1143 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1143 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1144 node = ctx.node()
1144 node = ctx.node()
1145 parents = [p.node() for p in ctx.parents() if p]
1145 parents = [p.node() for p in ctx.parents() if p]
1146 branch = ctx.branch()
1146 branch = ctx.branch()
1147 if switch_parent:
1147 if switch_parent:
1148 parents.reverse()
1148 parents.reverse()
1149
1149
1150 if parents:
1150 if parents:
1151 prev = parents[0]
1151 prev = parents[0]
1152 else:
1152 else:
1153 prev = nullid
1153 prev = nullid
1154
1154
1155 write("# HG changeset patch\n")
1155 write("# HG changeset patch\n")
1156 write("# User %s\n" % ctx.user())
1156 write("# User %s\n" % ctx.user())
1157 write("# Date %d %d\n" % ctx.date())
1157 write("# Date %d %d\n" % ctx.date())
1158 write("# %s\n" % util.datestr(ctx.date()))
1158 write("# %s\n" % util.datestr(ctx.date()))
1159 if branch and branch != 'default':
1159 if branch and branch != 'default':
1160 write("# Branch %s\n" % branch)
1160 write("# Branch %s\n" % branch)
1161 write("# Node ID %s\n" % hex(node))
1161 write("# Node ID %s\n" % hex(node))
1162 write("# Parent %s\n" % hex(prev))
1162 write("# Parent %s\n" % hex(prev))
1163 if len(parents) > 1:
1163 if len(parents) > 1:
1164 write("# Parent %s\n" % hex(parents[1]))
1164 write("# Parent %s\n" % hex(parents[1]))
1165
1165
1166 for headerid in extraexport:
1166 for headerid in extraexport:
1167 header = extraexportmap[headerid](seqno, ctx)
1167 header = extraexportmap[headerid](seqno, ctx)
1168 if header is not None:
1168 if header is not None:
1169 write('# %s\n' % header)
1169 write('# %s\n' % header)
1170 write(ctx.description().rstrip())
1170 write(ctx.description().rstrip())
1171 write("\n\n")
1171 write("\n\n")
1172
1172
1173 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1173 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1174 write(chunk, label=label)
1174 write(chunk, label=label)
1175
1175
1176 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1176 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1177 opts=None, match=None):
1177 opts=None, match=None):
1178 '''export changesets as hg patches
1178 '''export changesets as hg patches
1179
1179
1180 Args:
1180 Args:
1181 repo: The repository from which we're exporting revisions.
1181 repo: The repository from which we're exporting revisions.
1182 revs: A list of revisions to export as revision numbers.
1182 revs: A list of revisions to export as revision numbers.
1183 fntemplate: An optional string to use for generating patch file names.
1183 fntemplate: An optional string to use for generating patch file names.
1184 fp: An optional file-like object to which patches should be written.
1184 fp: An optional file-like object to which patches should be written.
1185 switch_parent: If True, show diffs against second parent when not nullid.
1185 switch_parent: If True, show diffs against second parent when not nullid.
1186 Default is false, which always shows diff against p1.
1186 Default is false, which always shows diff against p1.
1187 opts: diff options to use for generating the patch.
1187 opts: diff options to use for generating the patch.
1188 match: If specified, only export changes to files matching this matcher.
1188 match: If specified, only export changes to files matching this matcher.
1189
1189
1190 Returns:
1190 Returns:
1191 Nothing.
1191 Nothing.
1192
1192
1193 Side Effect:
1193 Side Effect:
1194 "HG Changeset Patch" data is emitted to one of the following
1194 "HG Changeset Patch" data is emitted to one of the following
1195 destinations:
1195 destinations:
1196 fp is specified: All revs are written to the specified
1196 fp is specified: All revs are written to the specified
1197 file-like object.
1197 file-like object.
1198 fntemplate specified: Each rev is written to a unique file named using
1198 fntemplate specified: Each rev is written to a unique file named using
1199 the given template.
1199 the given template.
1200 Neither fp nor template specified: All revs written to repo.ui.write()
1200 Neither fp nor template specified: All revs written to repo.ui.write()
1201 '''
1201 '''
1202
1202
1203 total = len(revs)
1203 total = len(revs)
1204 revwidth = max(len(str(rev)) for rev in revs)
1204 revwidth = max(len(str(rev)) for rev in revs)
1205 filemode = {}
1205 filemode = {}
1206
1206
1207 write = None
1207 write = None
1208 dest = '<unnamed>'
1208 dest = '<unnamed>'
1209 if fp:
1209 if fp:
1210 dest = getattr(fp, 'name', dest)
1210 dest = getattr(fp, 'name', dest)
1211 def write(s, **kw):
1211 def write(s, **kw):
1212 fp.write(s)
1212 fp.write(s)
1213 elif not fntemplate:
1213 elif not fntemplate:
1214 write = repo.ui.write
1214 write = repo.ui.write
1215
1215
1216 for seqno, rev in enumerate(revs, 1):
1216 for seqno, rev in enumerate(revs, 1):
1217 ctx = repo[rev]
1217 ctx = repo[rev]
1218 fo = None
1218 fo = None
1219 if not fp and fntemplate:
1219 if not fp and fntemplate:
1220 desc_lines = ctx.description().rstrip().split('\n')
1220 desc_lines = ctx.description().rstrip().split('\n')
1221 desc = desc_lines[0] #Commit always has a first line.
1221 desc = desc_lines[0] #Commit always has a first line.
1222 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1222 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1223 total=total, seqno=seqno, revwidth=revwidth,
1223 total=total, seqno=seqno, revwidth=revwidth,
1224 mode='wb', modemap=filemode)
1224 mode='wb', modemap=filemode)
1225 dest = fo.name
1225 dest = fo.name
1226 def write(s, **kw):
1226 def write(s, **kw):
1227 fo.write(s)
1227 fo.write(s)
1228 if not dest.startswith('<'):
1228 if not dest.startswith('<'):
1229 repo.ui.note("%s\n" % dest)
1229 repo.ui.note("%s\n" % dest)
1230 _exportsingle(
1230 _exportsingle(
1231 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1231 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1232 if fo is not None:
1232 if fo is not None:
1233 fo.close()
1233 fo.close()
1234
1234
1235 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1235 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1236 changes=None, stat=False, fp=None, prefix='',
1236 changes=None, stat=False, fp=None, prefix='',
1237 root='', listsubrepos=False):
1237 root='', listsubrepos=False):
1238 '''show diff or diffstat.'''
1238 '''show diff or diffstat.'''
1239 if fp is None:
1239 if fp is None:
1240 write = ui.write
1240 write = ui.write
1241 else:
1241 else:
1242 def write(s, **kw):
1242 def write(s, **kw):
1243 fp.write(s)
1243 fp.write(s)
1244
1244
1245 if root:
1245 if root:
1246 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1246 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1247 else:
1247 else:
1248 relroot = ''
1248 relroot = ''
1249 if relroot != '':
1249 if relroot != '':
1250 # XXX relative roots currently don't work if the root is within a
1250 # XXX relative roots currently don't work if the root is within a
1251 # subrepo
1251 # subrepo
1252 uirelroot = match.uipath(relroot)
1252 uirelroot = match.uipath(relroot)
1253 relroot += '/'
1253 relroot += '/'
1254 for matchroot in match.files():
1254 for matchroot in match.files():
1255 if not matchroot.startswith(relroot):
1255 if not matchroot.startswith(relroot):
1256 ui.warn(_('warning: %s not inside relative root %s\n') % (
1256 ui.warn(_('warning: %s not inside relative root %s\n') % (
1257 match.uipath(matchroot), uirelroot))
1257 match.uipath(matchroot), uirelroot))
1258
1258
1259 if stat:
1259 if stat:
1260 diffopts = diffopts.copy(context=0)
1260 diffopts = diffopts.copy(context=0)
1261 width = 80
1261 width = 80
1262 if not ui.plain():
1262 if not ui.plain():
1263 width = ui.termwidth()
1263 width = ui.termwidth()
1264 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1264 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1265 prefix=prefix, relroot=relroot)
1265 prefix=prefix, relroot=relroot)
1266 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1266 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1267 width=width):
1267 width=width):
1268 write(chunk, label=label)
1268 write(chunk, label=label)
1269 else:
1269 else:
1270 for chunk, label in patch.diffui(repo, node1, node2, match,
1270 for chunk, label in patch.diffui(repo, node1, node2, match,
1271 changes, diffopts, prefix=prefix,
1271 changes, diffopts, prefix=prefix,
1272 relroot=relroot):
1272 relroot=relroot):
1273 write(chunk, label=label)
1273 write(chunk, label=label)
1274
1274
1275 if listsubrepos:
1275 if listsubrepos:
1276 ctx1 = repo[node1]
1276 ctx1 = repo[node1]
1277 ctx2 = repo[node2]
1277 ctx2 = repo[node2]
1278 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1278 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1279 tempnode2 = node2
1279 tempnode2 = node2
1280 try:
1280 try:
1281 if node2 is not None:
1281 if node2 is not None:
1282 tempnode2 = ctx2.substate[subpath][1]
1282 tempnode2 = ctx2.substate[subpath][1]
1283 except KeyError:
1283 except KeyError:
1284 # A subrepo that existed in node1 was deleted between node1 and
1284 # A subrepo that existed in node1 was deleted between node1 and
1285 # node2 (inclusive). Thus, ctx2's substate won't contain that
1285 # node2 (inclusive). Thus, ctx2's substate won't contain that
1286 # subpath. The best we can do is to ignore it.
1286 # subpath. The best we can do is to ignore it.
1287 tempnode2 = None
1287 tempnode2 = None
1288 submatch = matchmod.subdirmatcher(subpath, match)
1288 submatch = matchmod.subdirmatcher(subpath, match)
1289 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1289 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1290 stat=stat, fp=fp, prefix=prefix)
1290 stat=stat, fp=fp, prefix=prefix)
1291
1291
1292 def _changesetlabels(ctx):
1292 def _changesetlabels(ctx):
1293 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1293 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1294 if ctx.obsolete():
1294 if ctx.obsolete():
1295 labels.append('changeset.obsolete')
1295 labels.append('changeset.obsolete')
1296 if ctx.troubled():
1296 if ctx.troubled():
1297 labels.append('changeset.troubled')
1297 labels.append('changeset.troubled')
1298 for trouble in ctx.troubles():
1298 for trouble in ctx.troubles():
1299 labels.append('trouble.%s' % trouble)
1299 labels.append('trouble.%s' % trouble)
1300 return ' '.join(labels)
1300 return ' '.join(labels)
1301
1301
1302 class changeset_printer(object):
1302 class changeset_printer(object):
1303 '''show changeset information when templating not requested.'''
1303 '''show changeset information when templating not requested.'''
1304
1304
1305 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1305 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1306 self.ui = ui
1306 self.ui = ui
1307 self.repo = repo
1307 self.repo = repo
1308 self.buffered = buffered
1308 self.buffered = buffered
1309 self.matchfn = matchfn
1309 self.matchfn = matchfn
1310 self.diffopts = diffopts
1310 self.diffopts = diffopts
1311 self.header = {}
1311 self.header = {}
1312 self.hunk = {}
1312 self.hunk = {}
1313 self.lastheader = None
1313 self.lastheader = None
1314 self.footer = None
1314 self.footer = None
1315
1315
1316 def flush(self, ctx):
1316 def flush(self, ctx):
1317 rev = ctx.rev()
1317 rev = ctx.rev()
1318 if rev in self.header:
1318 if rev in self.header:
1319 h = self.header[rev]
1319 h = self.header[rev]
1320 if h != self.lastheader:
1320 if h != self.lastheader:
1321 self.lastheader = h
1321 self.lastheader = h
1322 self.ui.write(h)
1322 self.ui.write(h)
1323 del self.header[rev]
1323 del self.header[rev]
1324 if rev in self.hunk:
1324 if rev in self.hunk:
1325 self.ui.write(self.hunk[rev])
1325 self.ui.write(self.hunk[rev])
1326 del self.hunk[rev]
1326 del self.hunk[rev]
1327 return 1
1327 return 1
1328 return 0
1328 return 0
1329
1329
1330 def close(self):
1330 def close(self):
1331 if self.footer:
1331 if self.footer:
1332 self.ui.write(self.footer)
1332 self.ui.write(self.footer)
1333
1333
1334 def show(self, ctx, copies=None, matchfn=None, **props):
1334 def show(self, ctx, copies=None, matchfn=None, **props):
1335 if self.buffered:
1335 if self.buffered:
1336 self.ui.pushbuffer(labeled=True)
1336 self.ui.pushbuffer(labeled=True)
1337 self._show(ctx, copies, matchfn, props)
1337 self._show(ctx, copies, matchfn, props)
1338 self.hunk[ctx.rev()] = self.ui.popbuffer()
1338 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 else:
1339 else:
1340 self._show(ctx, copies, matchfn, props)
1340 self._show(ctx, copies, matchfn, props)
1341
1341
1342 def _show(self, ctx, copies, matchfn, props):
1342 def _show(self, ctx, copies, matchfn, props):
1343 '''show a single changeset or file revision'''
1343 '''show a single changeset or file revision'''
1344 changenode = ctx.node()
1344 changenode = ctx.node()
1345 rev = ctx.rev()
1345 rev = ctx.rev()
1346 if self.ui.debugflag:
1346 if self.ui.debugflag:
1347 hexfunc = hex
1347 hexfunc = hex
1348 else:
1348 else:
1349 hexfunc = short
1349 hexfunc = short
1350 # as of now, wctx.node() and wctx.rev() return None, but we want to
1350 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 # show the same values as {node} and {rev} templatekw
1351 # show the same values as {node} and {rev} templatekw
1352 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1352 revnode = (scmutil.intrev(ctx), hexfunc(bin(ctx.hex())))
1353
1353
1354 if self.ui.quiet:
1354 if self.ui.quiet:
1355 self.ui.write("%d:%s\n" % revnode, label='log.node')
1355 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 return
1356 return
1357
1357
1358 date = util.datestr(ctx.date())
1358 date = util.datestr(ctx.date())
1359
1359
1360 # i18n: column positioning for "hg log"
1360 # i18n: column positioning for "hg log"
1361 self.ui.write(_("changeset: %d:%s\n") % revnode,
1361 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 label=_changesetlabels(ctx))
1362 label=_changesetlabels(ctx))
1363
1363
1364 # branches are shown first before any other names due to backwards
1364 # branches are shown first before any other names due to backwards
1365 # compatibility
1365 # compatibility
1366 branch = ctx.branch()
1366 branch = ctx.branch()
1367 # don't show the default branch name
1367 # don't show the default branch name
1368 if branch != 'default':
1368 if branch != 'default':
1369 # i18n: column positioning for "hg log"
1369 # i18n: column positioning for "hg log"
1370 self.ui.write(_("branch: %s\n") % branch,
1370 self.ui.write(_("branch: %s\n") % branch,
1371 label='log.branch')
1371 label='log.branch')
1372
1372
1373 for nsname, ns in self.repo.names.iteritems():
1373 for nsname, ns in self.repo.names.iteritems():
1374 # branches has special logic already handled above, so here we just
1374 # branches has special logic already handled above, so here we just
1375 # skip it
1375 # skip it
1376 if nsname == 'branches':
1376 if nsname == 'branches':
1377 continue
1377 continue
1378 # we will use the templatename as the color name since those two
1378 # we will use the templatename as the color name since those two
1379 # should be the same
1379 # should be the same
1380 for name in ns.names(self.repo, changenode):
1380 for name in ns.names(self.repo, changenode):
1381 self.ui.write(ns.logfmt % name,
1381 self.ui.write(ns.logfmt % name,
1382 label='log.%s' % ns.colorname)
1382 label='log.%s' % ns.colorname)
1383 if self.ui.debugflag:
1383 if self.ui.debugflag:
1384 # i18n: column positioning for "hg log"
1384 # i18n: column positioning for "hg log"
1385 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1385 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 label='log.phase')
1386 label='log.phase')
1387 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1387 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 label = 'log.parent changeset.%s' % pctx.phasestr()
1388 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 # i18n: column positioning for "hg log"
1389 # i18n: column positioning for "hg log"
1390 self.ui.write(_("parent: %d:%s\n")
1390 self.ui.write(_("parent: %d:%s\n")
1391 % (pctx.rev(), hexfunc(pctx.node())),
1391 % (pctx.rev(), hexfunc(pctx.node())),
1392 label=label)
1392 label=label)
1393
1393
1394 if self.ui.debugflag and rev is not None:
1394 if self.ui.debugflag and rev is not None:
1395 mnode = ctx.manifestnode()
1395 mnode = ctx.manifestnode()
1396 # i18n: column positioning for "hg log"
1396 # i18n: column positioning for "hg log"
1397 self.ui.write(_("manifest: %d:%s\n") %
1397 self.ui.write(_("manifest: %d:%s\n") %
1398 (self.repo.manifestlog._revlog.rev(mnode),
1398 (self.repo.manifestlog._revlog.rev(mnode),
1399 hex(mnode)),
1399 hex(mnode)),
1400 label='ui.debug log.manifest')
1400 label='ui.debug log.manifest')
1401 # i18n: column positioning for "hg log"
1401 # i18n: column positioning for "hg log"
1402 self.ui.write(_("user: %s\n") % ctx.user(),
1402 self.ui.write(_("user: %s\n") % ctx.user(),
1403 label='log.user')
1403 label='log.user')
1404 # i18n: column positioning for "hg log"
1404 # i18n: column positioning for "hg log"
1405 self.ui.write(_("date: %s\n") % date,
1405 self.ui.write(_("date: %s\n") % date,
1406 label='log.date')
1406 label='log.date')
1407
1407
1408 if ctx.troubled():
1408 if ctx.troubled():
1409 # i18n: column positioning for "hg log"
1409 # i18n: column positioning for "hg log"
1410 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1410 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 label='log.trouble')
1411 label='log.trouble')
1412
1412
1413 if self.ui.debugflag:
1413 if self.ui.debugflag:
1414 files = ctx.p1().status(ctx)[:3]
1414 files = ctx.p1().status(ctx)[:3]
1415 for key, value in zip([# i18n: column positioning for "hg log"
1415 for key, value in zip([# i18n: column positioning for "hg log"
1416 _("files:"),
1416 _("files:"),
1417 # i18n: column positioning for "hg log"
1417 # i18n: column positioning for "hg log"
1418 _("files+:"),
1418 _("files+:"),
1419 # i18n: column positioning for "hg log"
1419 # i18n: column positioning for "hg log"
1420 _("files-:")], files):
1420 _("files-:")], files):
1421 if value:
1421 if value:
1422 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1422 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 label='ui.debug log.files')
1423 label='ui.debug log.files')
1424 elif ctx.files() and self.ui.verbose:
1424 elif ctx.files() and self.ui.verbose:
1425 # i18n: column positioning for "hg log"
1425 # i18n: column positioning for "hg log"
1426 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1426 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 label='ui.note log.files')
1427 label='ui.note log.files')
1428 if copies and self.ui.verbose:
1428 if copies and self.ui.verbose:
1429 copies = ['%s (%s)' % c for c in copies]
1429 copies = ['%s (%s)' % c for c in copies]
1430 # i18n: column positioning for "hg log"
1430 # i18n: column positioning for "hg log"
1431 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1431 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 label='ui.note log.copies')
1432 label='ui.note log.copies')
1433
1433
1434 extra = ctx.extra()
1434 extra = ctx.extra()
1435 if extra and self.ui.debugflag:
1435 if extra and self.ui.debugflag:
1436 for key, value in sorted(extra.items()):
1436 for key, value in sorted(extra.items()):
1437 # i18n: column positioning for "hg log"
1437 # i18n: column positioning for "hg log"
1438 self.ui.write(_("extra: %s=%s\n")
1438 self.ui.write(_("extra: %s=%s\n")
1439 % (key, util.escapestr(value)),
1439 % (key, util.escapestr(value)),
1440 label='ui.debug log.extra')
1440 label='ui.debug log.extra')
1441
1441
1442 description = ctx.description().strip()
1442 description = ctx.description().strip()
1443 if description:
1443 if description:
1444 if self.ui.verbose:
1444 if self.ui.verbose:
1445 self.ui.write(_("description:\n"),
1445 self.ui.write(_("description:\n"),
1446 label='ui.note log.description')
1446 label='ui.note log.description')
1447 self.ui.write(description,
1447 self.ui.write(description,
1448 label='ui.note log.description')
1448 label='ui.note log.description')
1449 self.ui.write("\n\n")
1449 self.ui.write("\n\n")
1450 else:
1450 else:
1451 # i18n: column positioning for "hg log"
1451 # i18n: column positioning for "hg log"
1452 self.ui.write(_("summary: %s\n") %
1452 self.ui.write(_("summary: %s\n") %
1453 description.splitlines()[0],
1453 description.splitlines()[0],
1454 label='log.summary')
1454 label='log.summary')
1455 self.ui.write("\n")
1455 self.ui.write("\n")
1456
1456
1457 self.showpatch(ctx, matchfn)
1457 self.showpatch(ctx, matchfn)
1458
1458
1459 def showpatch(self, ctx, matchfn):
1459 def showpatch(self, ctx, matchfn):
1460 if not matchfn:
1460 if not matchfn:
1461 matchfn = self.matchfn
1461 matchfn = self.matchfn
1462 if matchfn:
1462 if matchfn:
1463 stat = self.diffopts.get('stat')
1463 stat = self.diffopts.get('stat')
1464 diff = self.diffopts.get('patch')
1464 diff = self.diffopts.get('patch')
1465 diffopts = patch.diffallopts(self.ui, self.diffopts)
1465 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 node = ctx.node()
1466 node = ctx.node()
1467 prev = ctx.p1().node()
1467 prev = ctx.p1().node()
1468 if stat:
1468 if stat:
1469 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1469 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 match=matchfn, stat=True)
1470 match=matchfn, stat=True)
1471 if diff:
1471 if diff:
1472 if stat:
1472 if stat:
1473 self.ui.write("\n")
1473 self.ui.write("\n")
1474 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1474 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 match=matchfn, stat=False)
1475 match=matchfn, stat=False)
1476 self.ui.write("\n")
1476 self.ui.write("\n")
1477
1477
1478 class jsonchangeset(changeset_printer):
1478 class jsonchangeset(changeset_printer):
1479 '''format changeset information.'''
1479 '''format changeset information.'''
1480
1480
1481 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1481 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1482 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 self.cache = {}
1483 self.cache = {}
1484 self._first = True
1484 self._first = True
1485
1485
1486 def close(self):
1486 def close(self):
1487 if not self._first:
1487 if not self._first:
1488 self.ui.write("\n]\n")
1488 self.ui.write("\n]\n")
1489 else:
1489 else:
1490 self.ui.write("[]\n")
1490 self.ui.write("[]\n")
1491
1491
1492 def _show(self, ctx, copies, matchfn, props):
1492 def _show(self, ctx, copies, matchfn, props):
1493 '''show a single changeset or file revision'''
1493 '''show a single changeset or file revision'''
1494 rev = ctx.rev()
1494 rev = ctx.rev()
1495 if rev is None:
1495 if rev is None:
1496 jrev = jnode = 'null'
1496 jrev = jnode = 'null'
1497 else:
1497 else:
1498 jrev = '%d' % rev
1498 jrev = '%d' % rev
1499 jnode = '"%s"' % hex(ctx.node())
1499 jnode = '"%s"' % hex(ctx.node())
1500 j = encoding.jsonescape
1500 j = encoding.jsonescape
1501
1501
1502 if self._first:
1502 if self._first:
1503 self.ui.write("[\n {")
1503 self.ui.write("[\n {")
1504 self._first = False
1504 self._first = False
1505 else:
1505 else:
1506 self.ui.write(",\n {")
1506 self.ui.write(",\n {")
1507
1507
1508 if self.ui.quiet:
1508 if self.ui.quiet:
1509 self.ui.write(('\n "rev": %s') % jrev)
1509 self.ui.write(('\n "rev": %s') % jrev)
1510 self.ui.write((',\n "node": %s') % jnode)
1510 self.ui.write((',\n "node": %s') % jnode)
1511 self.ui.write('\n }')
1511 self.ui.write('\n }')
1512 return
1512 return
1513
1513
1514 self.ui.write(('\n "rev": %s') % jrev)
1514 self.ui.write(('\n "rev": %s') % jrev)
1515 self.ui.write((',\n "node": %s') % jnode)
1515 self.ui.write((',\n "node": %s') % jnode)
1516 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1516 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1517 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1518 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1519 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1520 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521
1521
1522 self.ui.write((',\n "bookmarks": [%s]') %
1522 self.ui.write((',\n "bookmarks": [%s]') %
1523 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1523 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 self.ui.write((',\n "tags": [%s]') %
1524 self.ui.write((',\n "tags": [%s]') %
1525 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1525 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 self.ui.write((',\n "parents": [%s]') %
1526 self.ui.write((',\n "parents": [%s]') %
1527 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1527 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528
1528
1529 if self.ui.debugflag:
1529 if self.ui.debugflag:
1530 if rev is None:
1530 if rev is None:
1531 jmanifestnode = 'null'
1531 jmanifestnode = 'null'
1532 else:
1532 else:
1533 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1533 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1534 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535
1535
1536 self.ui.write((',\n "extra": {%s}') %
1536 self.ui.write((',\n "extra": {%s}') %
1537 ", ".join('"%s": "%s"' % (j(k), j(v))
1537 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 for k, v in ctx.extra().items()))
1538 for k, v in ctx.extra().items()))
1539
1539
1540 files = ctx.p1().status(ctx)
1540 files = ctx.p1().status(ctx)
1541 self.ui.write((',\n "modified": [%s]') %
1541 self.ui.write((',\n "modified": [%s]') %
1542 ", ".join('"%s"' % j(f) for f in files[0]))
1542 ", ".join('"%s"' % j(f) for f in files[0]))
1543 self.ui.write((',\n "added": [%s]') %
1543 self.ui.write((',\n "added": [%s]') %
1544 ", ".join('"%s"' % j(f) for f in files[1]))
1544 ", ".join('"%s"' % j(f) for f in files[1]))
1545 self.ui.write((',\n "removed": [%s]') %
1545 self.ui.write((',\n "removed": [%s]') %
1546 ", ".join('"%s"' % j(f) for f in files[2]))
1546 ", ".join('"%s"' % j(f) for f in files[2]))
1547
1547
1548 elif self.ui.verbose:
1548 elif self.ui.verbose:
1549 self.ui.write((',\n "files": [%s]') %
1549 self.ui.write((',\n "files": [%s]') %
1550 ", ".join('"%s"' % j(f) for f in ctx.files()))
1550 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551
1551
1552 if copies:
1552 if copies:
1553 self.ui.write((',\n "copies": {%s}') %
1553 self.ui.write((',\n "copies": {%s}') %
1554 ", ".join('"%s": "%s"' % (j(k), j(v))
1554 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 for k, v in copies))
1555 for k, v in copies))
1556
1556
1557 matchfn = self.matchfn
1557 matchfn = self.matchfn
1558 if matchfn:
1558 if matchfn:
1559 stat = self.diffopts.get('stat')
1559 stat = self.diffopts.get('stat')
1560 diff = self.diffopts.get('patch')
1560 diff = self.diffopts.get('patch')
1561 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1561 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 node, prev = ctx.node(), ctx.p1().node()
1562 node, prev = ctx.node(), ctx.p1().node()
1563 if stat:
1563 if stat:
1564 self.ui.pushbuffer()
1564 self.ui.pushbuffer()
1565 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1565 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 match=matchfn, stat=True)
1566 match=matchfn, stat=True)
1567 self.ui.write((',\n "diffstat": "%s"')
1567 self.ui.write((',\n "diffstat": "%s"')
1568 % j(self.ui.popbuffer()))
1568 % j(self.ui.popbuffer()))
1569 if diff:
1569 if diff:
1570 self.ui.pushbuffer()
1570 self.ui.pushbuffer()
1571 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1571 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 match=matchfn, stat=False)
1572 match=matchfn, stat=False)
1573 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1573 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574
1574
1575 self.ui.write("\n }")
1575 self.ui.write("\n }")
1576
1576
1577 class changeset_templater(changeset_printer):
1577 class changeset_templater(changeset_printer):
1578 '''format changeset information.'''
1578 '''format changeset information.'''
1579
1579
1580 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1580 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1581 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1581 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1582 assert not (tmpl and mapfile)
1582 assert not (tmpl and mapfile)
1583 defaulttempl = templatekw.defaulttempl
1583 defaulttempl = templatekw.defaulttempl
1584 if mapfile:
1584 if mapfile:
1585 self.t = templater.templater.frommapfile(mapfile,
1585 self.t = templater.templater.frommapfile(mapfile,
1586 cache=defaulttempl)
1586 cache=defaulttempl)
1587 else:
1587 else:
1588 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1588 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1589 cache=defaulttempl)
1589 cache=defaulttempl)
1590
1590
1591 self._counter = itertools.count()
1591 self._counter = itertools.count()
1592 self.cache = {}
1592 self.cache = {}
1593
1593
1594 # find correct templates for current mode
1594 # find correct templates for current mode
1595 tmplmodes = [
1595 tmplmodes = [
1596 (True, None),
1596 (True, None),
1597 (self.ui.verbose, 'verbose'),
1597 (self.ui.verbose, 'verbose'),
1598 (self.ui.quiet, 'quiet'),
1598 (self.ui.quiet, 'quiet'),
1599 (self.ui.debugflag, 'debug'),
1599 (self.ui.debugflag, 'debug'),
1600 ]
1600 ]
1601
1601
1602 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1602 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1603 'docheader': '', 'docfooter': ''}
1603 'docheader': '', 'docfooter': ''}
1604 for mode, postfix in tmplmodes:
1604 for mode, postfix in tmplmodes:
1605 for t in self._parts:
1605 for t in self._parts:
1606 cur = t
1606 cur = t
1607 if postfix:
1607 if postfix:
1608 cur += "_" + postfix
1608 cur += "_" + postfix
1609 if mode and cur in self.t:
1609 if mode and cur in self.t:
1610 self._parts[t] = cur
1610 self._parts[t] = cur
1611
1611
1612 if self._parts['docheader']:
1612 if self._parts['docheader']:
1613 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1613 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1614
1614
1615 def close(self):
1615 def close(self):
1616 if self._parts['docfooter']:
1616 if self._parts['docfooter']:
1617 if not self.footer:
1617 if not self.footer:
1618 self.footer = ""
1618 self.footer = ""
1619 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1619 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1620 return super(changeset_templater, self).close()
1620 return super(changeset_templater, self).close()
1621
1621
1622 def _show(self, ctx, copies, matchfn, props):
1622 def _show(self, ctx, copies, matchfn, props):
1623 '''show a single changeset or file revision'''
1623 '''show a single changeset or file revision'''
1624 props = props.copy()
1624 props = props.copy()
1625 props.update(templatekw.keywords)
1625 props.update(templatekw.keywords)
1626 props['templ'] = self.t
1626 props['templ'] = self.t
1627 props['ctx'] = ctx
1627 props['ctx'] = ctx
1628 props['repo'] = self.repo
1628 props['repo'] = self.repo
1629 props['ui'] = self.repo.ui
1629 props['ui'] = self.repo.ui
1630 props['index'] = next(self._counter)
1630 props['index'] = next(self._counter)
1631 props['revcache'] = {'copies': copies}
1631 props['revcache'] = {'copies': copies}
1632 props['cache'] = self.cache
1632 props['cache'] = self.cache
1633 props = pycompat.strkwargs(props)
1633 props = pycompat.strkwargs(props)
1634
1634
1635 # write header
1635 # write header
1636 if self._parts['header']:
1636 if self._parts['header']:
1637 h = templater.stringify(self.t(self._parts['header'], **props))
1637 h = templater.stringify(self.t(self._parts['header'], **props))
1638 if self.buffered:
1638 if self.buffered:
1639 self.header[ctx.rev()] = h
1639 self.header[ctx.rev()] = h
1640 else:
1640 else:
1641 if self.lastheader != h:
1641 if self.lastheader != h:
1642 self.lastheader = h
1642 self.lastheader = h
1643 self.ui.write(h)
1643 self.ui.write(h)
1644
1644
1645 # write changeset metadata, then patch if requested
1645 # write changeset metadata, then patch if requested
1646 key = self._parts['changeset']
1646 key = self._parts['changeset']
1647 self.ui.write(templater.stringify(self.t(key, **props)))
1647 self.ui.write(templater.stringify(self.t(key, **props)))
1648 self.showpatch(ctx, matchfn)
1648 self.showpatch(ctx, matchfn)
1649
1649
1650 if self._parts['footer']:
1650 if self._parts['footer']:
1651 if not self.footer:
1651 if not self.footer:
1652 self.footer = templater.stringify(
1652 self.footer = templater.stringify(
1653 self.t(self._parts['footer'], **props))
1653 self.t(self._parts['footer'], **props))
1654
1654
1655 def gettemplate(ui, tmpl, style):
1655 def gettemplate(ui, tmpl, style):
1656 """
1656 """
1657 Find the template matching the given template spec or style.
1657 Find the template matching the given template spec or style.
1658 """
1658 """
1659
1659
1660 # ui settings
1660 # ui settings
1661 if not tmpl and not style: # template are stronger than style
1661 if not tmpl and not style: # template are stronger than style
1662 tmpl = ui.config('ui', 'logtemplate')
1662 tmpl = ui.config('ui', 'logtemplate')
1663 if tmpl:
1663 if tmpl:
1664 return templater.unquotestring(tmpl), None
1664 return templater.unquotestring(tmpl), None
1665 else:
1665 else:
1666 style = util.expandpath(ui.config('ui', 'style', ''))
1666 style = util.expandpath(ui.config('ui', 'style', ''))
1667
1667
1668 if not tmpl and style:
1668 if not tmpl and style:
1669 mapfile = style
1669 mapfile = style
1670 if not os.path.split(mapfile)[0]:
1670 if not os.path.split(mapfile)[0]:
1671 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1671 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1672 or templater.templatepath(mapfile))
1672 or templater.templatepath(mapfile))
1673 if mapname:
1673 if mapname:
1674 mapfile = mapname
1674 mapfile = mapname
1675 return None, mapfile
1675 return None, mapfile
1676
1676
1677 if not tmpl:
1677 if not tmpl:
1678 return None, None
1678 return None, None
1679
1679
1680 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1680 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1681
1681
1682 def show_changeset(ui, repo, opts, buffered=False):
1682 def show_changeset(ui, repo, opts, buffered=False):
1683 """show one changeset using template or regular display.
1683 """show one changeset using template or regular display.
1684
1684
1685 Display format will be the first non-empty hit of:
1685 Display format will be the first non-empty hit of:
1686 1. option 'template'
1686 1. option 'template'
1687 2. option 'style'
1687 2. option 'style'
1688 3. [ui] setting 'logtemplate'
1688 3. [ui] setting 'logtemplate'
1689 4. [ui] setting 'style'
1689 4. [ui] setting 'style'
1690 If all of these values are either the unset or the empty string,
1690 If all of these values are either the unset or the empty string,
1691 regular display via changeset_printer() is done.
1691 regular display via changeset_printer() is done.
1692 """
1692 """
1693 # options
1693 # options
1694 matchfn = None
1694 matchfn = None
1695 if opts.get('patch') or opts.get('stat'):
1695 if opts.get('patch') or opts.get('stat'):
1696 matchfn = scmutil.matchall(repo)
1696 matchfn = scmutil.matchall(repo)
1697
1697
1698 if opts.get('template') == 'json':
1698 if opts.get('template') == 'json':
1699 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1699 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1700
1700
1701 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1701 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1702
1702
1703 if not tmpl and not mapfile:
1703 if not tmpl and not mapfile:
1704 return changeset_printer(ui, repo, matchfn, opts, buffered)
1704 return changeset_printer(ui, repo, matchfn, opts, buffered)
1705
1705
1706 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1706 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1707
1707
1708 def showmarker(fm, marker, index=None):
1708 def showmarker(fm, marker, index=None):
1709 """utility function to display obsolescence marker in a readable way
1709 """utility function to display obsolescence marker in a readable way
1710
1710
1711 To be used by debug function."""
1711 To be used by debug function."""
1712 if index is not None:
1712 if index is not None:
1713 fm.write('index', '%i ', index)
1713 fm.write('index', '%i ', index)
1714 fm.write('precnode', '%s ', hex(marker.precnode()))
1714 fm.write('precnode', '%s ', hex(marker.precnode()))
1715 succs = marker.succnodes()
1715 succs = marker.succnodes()
1716 fm.condwrite(succs, 'succnodes', '%s ',
1716 fm.condwrite(succs, 'succnodes', '%s ',
1717 fm.formatlist(map(hex, succs), name='node'))
1717 fm.formatlist(map(hex, succs), name='node'))
1718 fm.write('flag', '%X ', marker.flags())
1718 fm.write('flag', '%X ', marker.flags())
1719 parents = marker.parentnodes()
1719 parents = marker.parentnodes()
1720 if parents is not None:
1720 if parents is not None:
1721 fm.write('parentnodes', '{%s} ',
1721 fm.write('parentnodes', '{%s} ',
1722 fm.formatlist(map(hex, parents), name='node', sep=', '))
1722 fm.formatlist(map(hex, parents), name='node', sep=', '))
1723 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1723 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1724 meta = marker.metadata().copy()
1724 meta = marker.metadata().copy()
1725 meta.pop('date', None)
1725 meta.pop('date', None)
1726 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1726 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1727 fm.plain('\n')
1727 fm.plain('\n')
1728
1728
1729 def finddate(ui, repo, date):
1729 def finddate(ui, repo, date):
1730 """Find the tipmost changeset that matches the given date spec"""
1730 """Find the tipmost changeset that matches the given date spec"""
1731
1731
1732 df = util.matchdate(date)
1732 df = util.matchdate(date)
1733 m = scmutil.matchall(repo)
1733 m = scmutil.matchall(repo)
1734 results = {}
1734 results = {}
1735
1735
1736 def prep(ctx, fns):
1736 def prep(ctx, fns):
1737 d = ctx.date()
1737 d = ctx.date()
1738 if df(d[0]):
1738 if df(d[0]):
1739 results[ctx.rev()] = d
1739 results[ctx.rev()] = d
1740
1740
1741 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1741 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1742 rev = ctx.rev()
1742 rev = ctx.rev()
1743 if rev in results:
1743 if rev in results:
1744 ui.status(_("found revision %s from %s\n") %
1744 ui.status(_("found revision %s from %s\n") %
1745 (rev, util.datestr(results[rev])))
1745 (rev, util.datestr(results[rev])))
1746 return '%d' % rev
1746 return '%d' % rev
1747
1747
1748 raise error.Abort(_("revision matching date not found"))
1748 raise error.Abort(_("revision matching date not found"))
1749
1749
1750 def increasingwindows(windowsize=8, sizelimit=512):
1750 def increasingwindows(windowsize=8, sizelimit=512):
1751 while True:
1751 while True:
1752 yield windowsize
1752 yield windowsize
1753 if windowsize < sizelimit:
1753 if windowsize < sizelimit:
1754 windowsize *= 2
1754 windowsize *= 2
1755
1755
1756 class FileWalkError(Exception):
1756 class FileWalkError(Exception):
1757 pass
1757 pass
1758
1758
1759 def walkfilerevs(repo, match, follow, revs, fncache):
1759 def walkfilerevs(repo, match, follow, revs, fncache):
1760 '''Walks the file history for the matched files.
1760 '''Walks the file history for the matched files.
1761
1761
1762 Returns the changeset revs that are involved in the file history.
1762 Returns the changeset revs that are involved in the file history.
1763
1763
1764 Throws FileWalkError if the file history can't be walked using
1764 Throws FileWalkError if the file history can't be walked using
1765 filelogs alone.
1765 filelogs alone.
1766 '''
1766 '''
1767 wanted = set()
1767 wanted = set()
1768 copies = []
1768 copies = []
1769 minrev, maxrev = min(revs), max(revs)
1769 minrev, maxrev = min(revs), max(revs)
1770 def filerevgen(filelog, last):
1770 def filerevgen(filelog, last):
1771 """
1771 """
1772 Only files, no patterns. Check the history of each file.
1772 Only files, no patterns. Check the history of each file.
1773
1773
1774 Examines filelog entries within minrev, maxrev linkrev range
1774 Examines filelog entries within minrev, maxrev linkrev range
1775 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1775 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1776 tuples in backwards order
1776 tuples in backwards order
1777 """
1777 """
1778 cl_count = len(repo)
1778 cl_count = len(repo)
1779 revs = []
1779 revs = []
1780 for j in xrange(0, last + 1):
1780 for j in xrange(0, last + 1):
1781 linkrev = filelog.linkrev(j)
1781 linkrev = filelog.linkrev(j)
1782 if linkrev < minrev:
1782 if linkrev < minrev:
1783 continue
1783 continue
1784 # only yield rev for which we have the changelog, it can
1784 # only yield rev for which we have the changelog, it can
1785 # happen while doing "hg log" during a pull or commit
1785 # happen while doing "hg log" during a pull or commit
1786 if linkrev >= cl_count:
1786 if linkrev >= cl_count:
1787 break
1787 break
1788
1788
1789 parentlinkrevs = []
1789 parentlinkrevs = []
1790 for p in filelog.parentrevs(j):
1790 for p in filelog.parentrevs(j):
1791 if p != nullrev:
1791 if p != nullrev:
1792 parentlinkrevs.append(filelog.linkrev(p))
1792 parentlinkrevs.append(filelog.linkrev(p))
1793 n = filelog.node(j)
1793 n = filelog.node(j)
1794 revs.append((linkrev, parentlinkrevs,
1794 revs.append((linkrev, parentlinkrevs,
1795 follow and filelog.renamed(n)))
1795 follow and filelog.renamed(n)))
1796
1796
1797 return reversed(revs)
1797 return reversed(revs)
1798 def iterfiles():
1798 def iterfiles():
1799 pctx = repo['.']
1799 pctx = repo['.']
1800 for filename in match.files():
1800 for filename in match.files():
1801 if follow:
1801 if follow:
1802 if filename not in pctx:
1802 if filename not in pctx:
1803 raise error.Abort(_('cannot follow file not in parent '
1803 raise error.Abort(_('cannot follow file not in parent '
1804 'revision: "%s"') % filename)
1804 'revision: "%s"') % filename)
1805 yield filename, pctx[filename].filenode()
1805 yield filename, pctx[filename].filenode()
1806 else:
1806 else:
1807 yield filename, None
1807 yield filename, None
1808 for filename_node in copies:
1808 for filename_node in copies:
1809 yield filename_node
1809 yield filename_node
1810
1810
1811 for file_, node in iterfiles():
1811 for file_, node in iterfiles():
1812 filelog = repo.file(file_)
1812 filelog = repo.file(file_)
1813 if not len(filelog):
1813 if not len(filelog):
1814 if node is None:
1814 if node is None:
1815 # A zero count may be a directory or deleted file, so
1815 # A zero count may be a directory or deleted file, so
1816 # try to find matching entries on the slow path.
1816 # try to find matching entries on the slow path.
1817 if follow:
1817 if follow:
1818 raise error.Abort(
1818 raise error.Abort(
1819 _('cannot follow nonexistent file: "%s"') % file_)
1819 _('cannot follow nonexistent file: "%s"') % file_)
1820 raise FileWalkError("Cannot walk via filelog")
1820 raise FileWalkError("Cannot walk via filelog")
1821 else:
1821 else:
1822 continue
1822 continue
1823
1823
1824 if node is None:
1824 if node is None:
1825 last = len(filelog) - 1
1825 last = len(filelog) - 1
1826 else:
1826 else:
1827 last = filelog.rev(node)
1827 last = filelog.rev(node)
1828
1828
1829 # keep track of all ancestors of the file
1829 # keep track of all ancestors of the file
1830 ancestors = {filelog.linkrev(last)}
1830 ancestors = {filelog.linkrev(last)}
1831
1831
1832 # iterate from latest to oldest revision
1832 # iterate from latest to oldest revision
1833 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1833 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1834 if not follow:
1834 if not follow:
1835 if rev > maxrev:
1835 if rev > maxrev:
1836 continue
1836 continue
1837 else:
1837 else:
1838 # Note that last might not be the first interesting
1838 # Note that last might not be the first interesting
1839 # rev to us:
1839 # rev to us:
1840 # if the file has been changed after maxrev, we'll
1840 # if the file has been changed after maxrev, we'll
1841 # have linkrev(last) > maxrev, and we still need
1841 # have linkrev(last) > maxrev, and we still need
1842 # to explore the file graph
1842 # to explore the file graph
1843 if rev not in ancestors:
1843 if rev not in ancestors:
1844 continue
1844 continue
1845 # XXX insert 1327 fix here
1845 # XXX insert 1327 fix here
1846 if flparentlinkrevs:
1846 if flparentlinkrevs:
1847 ancestors.update(flparentlinkrevs)
1847 ancestors.update(flparentlinkrevs)
1848
1848
1849 fncache.setdefault(rev, []).append(file_)
1849 fncache.setdefault(rev, []).append(file_)
1850 wanted.add(rev)
1850 wanted.add(rev)
1851 if copied:
1851 if copied:
1852 copies.append(copied)
1852 copies.append(copied)
1853
1853
1854 return wanted
1854 return wanted
1855
1855
1856 class _followfilter(object):
1856 class _followfilter(object):
1857 def __init__(self, repo, onlyfirst=False):
1857 def __init__(self, repo, onlyfirst=False):
1858 self.repo = repo
1858 self.repo = repo
1859 self.startrev = nullrev
1859 self.startrev = nullrev
1860 self.roots = set()
1860 self.roots = set()
1861 self.onlyfirst = onlyfirst
1861 self.onlyfirst = onlyfirst
1862
1862
1863 def match(self, rev):
1863 def match(self, rev):
1864 def realparents(rev):
1864 def realparents(rev):
1865 if self.onlyfirst:
1865 if self.onlyfirst:
1866 return self.repo.changelog.parentrevs(rev)[0:1]
1866 return self.repo.changelog.parentrevs(rev)[0:1]
1867 else:
1867 else:
1868 return filter(lambda x: x != nullrev,
1868 return filter(lambda x: x != nullrev,
1869 self.repo.changelog.parentrevs(rev))
1869 self.repo.changelog.parentrevs(rev))
1870
1870
1871 if self.startrev == nullrev:
1871 if self.startrev == nullrev:
1872 self.startrev = rev
1872 self.startrev = rev
1873 return True
1873 return True
1874
1874
1875 if rev > self.startrev:
1875 if rev > self.startrev:
1876 # forward: all descendants
1876 # forward: all descendants
1877 if not self.roots:
1877 if not self.roots:
1878 self.roots.add(self.startrev)
1878 self.roots.add(self.startrev)
1879 for parent in realparents(rev):
1879 for parent in realparents(rev):
1880 if parent in self.roots:
1880 if parent in self.roots:
1881 self.roots.add(rev)
1881 self.roots.add(rev)
1882 return True
1882 return True
1883 else:
1883 else:
1884 # backwards: all parents
1884 # backwards: all parents
1885 if not self.roots:
1885 if not self.roots:
1886 self.roots.update(realparents(self.startrev))
1886 self.roots.update(realparents(self.startrev))
1887 if rev in self.roots:
1887 if rev in self.roots:
1888 self.roots.remove(rev)
1888 self.roots.remove(rev)
1889 self.roots.update(realparents(rev))
1889 self.roots.update(realparents(rev))
1890 return True
1890 return True
1891
1891
1892 return False
1892 return False
1893
1893
1894 def walkchangerevs(repo, match, opts, prepare):
1894 def walkchangerevs(repo, match, opts, prepare):
1895 '''Iterate over files and the revs in which they changed.
1895 '''Iterate over files and the revs in which they changed.
1896
1896
1897 Callers most commonly need to iterate backwards over the history
1897 Callers most commonly need to iterate backwards over the history
1898 in which they are interested. Doing so has awful (quadratic-looking)
1898 in which they are interested. Doing so has awful (quadratic-looking)
1899 performance, so we use iterators in a "windowed" way.
1899 performance, so we use iterators in a "windowed" way.
1900
1900
1901 We walk a window of revisions in the desired order. Within the
1901 We walk a window of revisions in the desired order. Within the
1902 window, we first walk forwards to gather data, then in the desired
1902 window, we first walk forwards to gather data, then in the desired
1903 order (usually backwards) to display it.
1903 order (usually backwards) to display it.
1904
1904
1905 This function returns an iterator yielding contexts. Before
1905 This function returns an iterator yielding contexts. Before
1906 yielding each context, the iterator will first call the prepare
1906 yielding each context, the iterator will first call the prepare
1907 function on each context in the window in forward order.'''
1907 function on each context in the window in forward order.'''
1908
1908
1909 follow = opts.get('follow') or opts.get('follow_first')
1909 follow = opts.get('follow') or opts.get('follow_first')
1910 revs = _logrevs(repo, opts)
1910 revs = _logrevs(repo, opts)
1911 if not revs:
1911 if not revs:
1912 return []
1912 return []
1913 wanted = set()
1913 wanted = set()
1914 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1914 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1915 opts.get('removed'))
1915 opts.get('removed'))
1916 fncache = {}
1916 fncache = {}
1917 change = repo.changectx
1917 change = repo.changectx
1918
1918
1919 # First step is to fill wanted, the set of revisions that we want to yield.
1919 # First step is to fill wanted, the set of revisions that we want to yield.
1920 # When it does not induce extra cost, we also fill fncache for revisions in
1920 # When it does not induce extra cost, we also fill fncache for revisions in
1921 # wanted: a cache of filenames that were changed (ctx.files()) and that
1921 # wanted: a cache of filenames that were changed (ctx.files()) and that
1922 # match the file filtering conditions.
1922 # match the file filtering conditions.
1923
1923
1924 if match.always():
1924 if match.always():
1925 # No files, no patterns. Display all revs.
1925 # No files, no patterns. Display all revs.
1926 wanted = revs
1926 wanted = revs
1927 elif not slowpath:
1927 elif not slowpath:
1928 # We only have to read through the filelog to find wanted revisions
1928 # We only have to read through the filelog to find wanted revisions
1929
1929
1930 try:
1930 try:
1931 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1931 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1932 except FileWalkError:
1932 except FileWalkError:
1933 slowpath = True
1933 slowpath = True
1934
1934
1935 # We decided to fall back to the slowpath because at least one
1935 # We decided to fall back to the slowpath because at least one
1936 # of the paths was not a file. Check to see if at least one of them
1936 # of the paths was not a file. Check to see if at least one of them
1937 # existed in history, otherwise simply return
1937 # existed in history, otherwise simply return
1938 for path in match.files():
1938 for path in match.files():
1939 if path == '.' or path in repo.store:
1939 if path == '.' or path in repo.store:
1940 break
1940 break
1941 else:
1941 else:
1942 return []
1942 return []
1943
1943
1944 if slowpath:
1944 if slowpath:
1945 # We have to read the changelog to match filenames against
1945 # We have to read the changelog to match filenames against
1946 # changed files
1946 # changed files
1947
1947
1948 if follow:
1948 if follow:
1949 raise error.Abort(_('can only follow copies/renames for explicit '
1949 raise error.Abort(_('can only follow copies/renames for explicit '
1950 'filenames'))
1950 'filenames'))
1951
1951
1952 # The slow path checks files modified in every changeset.
1952 # The slow path checks files modified in every changeset.
1953 # This is really slow on large repos, so compute the set lazily.
1953 # This is really slow on large repos, so compute the set lazily.
1954 class lazywantedset(object):
1954 class lazywantedset(object):
1955 def __init__(self):
1955 def __init__(self):
1956 self.set = set()
1956 self.set = set()
1957 self.revs = set(revs)
1957 self.revs = set(revs)
1958
1958
1959 # No need to worry about locality here because it will be accessed
1959 # No need to worry about locality here because it will be accessed
1960 # in the same order as the increasing window below.
1960 # in the same order as the increasing window below.
1961 def __contains__(self, value):
1961 def __contains__(self, value):
1962 if value in self.set:
1962 if value in self.set:
1963 return True
1963 return True
1964 elif not value in self.revs:
1964 elif not value in self.revs:
1965 return False
1965 return False
1966 else:
1966 else:
1967 self.revs.discard(value)
1967 self.revs.discard(value)
1968 ctx = change(value)
1968 ctx = change(value)
1969 matches = filter(match, ctx.files())
1969 matches = filter(match, ctx.files())
1970 if matches:
1970 if matches:
1971 fncache[value] = matches
1971 fncache[value] = matches
1972 self.set.add(value)
1972 self.set.add(value)
1973 return True
1973 return True
1974 return False
1974 return False
1975
1975
1976 def discard(self, value):
1976 def discard(self, value):
1977 self.revs.discard(value)
1977 self.revs.discard(value)
1978 self.set.discard(value)
1978 self.set.discard(value)
1979
1979
1980 wanted = lazywantedset()
1980 wanted = lazywantedset()
1981
1981
1982 # it might be worthwhile to do this in the iterator if the rev range
1982 # it might be worthwhile to do this in the iterator if the rev range
1983 # is descending and the prune args are all within that range
1983 # is descending and the prune args are all within that range
1984 for rev in opts.get('prune', ()):
1984 for rev in opts.get('prune', ()):
1985 rev = repo[rev].rev()
1985 rev = repo[rev].rev()
1986 ff = _followfilter(repo)
1986 ff = _followfilter(repo)
1987 stop = min(revs[0], revs[-1])
1987 stop = min(revs[0], revs[-1])
1988 for x in xrange(rev, stop - 1, -1):
1988 for x in xrange(rev, stop - 1, -1):
1989 if ff.match(x):
1989 if ff.match(x):
1990 wanted = wanted - [x]
1990 wanted = wanted - [x]
1991
1991
1992 # Now that wanted is correctly initialized, we can iterate over the
1992 # Now that wanted is correctly initialized, we can iterate over the
1993 # revision range, yielding only revisions in wanted.
1993 # revision range, yielding only revisions in wanted.
1994 def iterate():
1994 def iterate():
1995 if follow and match.always():
1995 if follow and match.always():
1996 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1996 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1997 def want(rev):
1997 def want(rev):
1998 return ff.match(rev) and rev in wanted
1998 return ff.match(rev) and rev in wanted
1999 else:
1999 else:
2000 def want(rev):
2000 def want(rev):
2001 return rev in wanted
2001 return rev in wanted
2002
2002
2003 it = iter(revs)
2003 it = iter(revs)
2004 stopiteration = False
2004 stopiteration = False
2005 for windowsize in increasingwindows():
2005 for windowsize in increasingwindows():
2006 nrevs = []
2006 nrevs = []
2007 for i in xrange(windowsize):
2007 for i in xrange(windowsize):
2008 rev = next(it, None)
2008 rev = next(it, None)
2009 if rev is None:
2009 if rev is None:
2010 stopiteration = True
2010 stopiteration = True
2011 break
2011 break
2012 elif want(rev):
2012 elif want(rev):
2013 nrevs.append(rev)
2013 nrevs.append(rev)
2014 for rev in sorted(nrevs):
2014 for rev in sorted(nrevs):
2015 fns = fncache.get(rev)
2015 fns = fncache.get(rev)
2016 ctx = change(rev)
2016 ctx = change(rev)
2017 if not fns:
2017 if not fns:
2018 def fns_generator():
2018 def fns_generator():
2019 for f in ctx.files():
2019 for f in ctx.files():
2020 if match(f):
2020 if match(f):
2021 yield f
2021 yield f
2022 fns = fns_generator()
2022 fns = fns_generator()
2023 prepare(ctx, fns)
2023 prepare(ctx, fns)
2024 for rev in nrevs:
2024 for rev in nrevs:
2025 yield change(rev)
2025 yield change(rev)
2026
2026
2027 if stopiteration:
2027 if stopiteration:
2028 break
2028 break
2029
2029
2030 return iterate()
2030 return iterate()
2031
2031
2032 def _makefollowlogfilematcher(repo, files, followfirst):
2032 def _makefollowlogfilematcher(repo, files, followfirst):
2033 # When displaying a revision with --patch --follow FILE, we have
2033 # When displaying a revision with --patch --follow FILE, we have
2034 # to know which file of the revision must be diffed. With
2034 # to know which file of the revision must be diffed. With
2035 # --follow, we want the names of the ancestors of FILE in the
2035 # --follow, we want the names of the ancestors of FILE in the
2036 # revision, stored in "fcache". "fcache" is populated by
2036 # revision, stored in "fcache". "fcache" is populated by
2037 # reproducing the graph traversal already done by --follow revset
2037 # reproducing the graph traversal already done by --follow revset
2038 # and relating revs to file names (which is not "correct" but
2038 # and relating revs to file names (which is not "correct" but
2039 # good enough).
2039 # good enough).
2040 fcache = {}
2040 fcache = {}
2041 fcacheready = [False]
2041 fcacheready = [False]
2042 pctx = repo['.']
2042 pctx = repo['.']
2043
2043
2044 def populate():
2044 def populate():
2045 for fn in files:
2045 for fn in files:
2046 fctx = pctx[fn]
2046 fctx = pctx[fn]
2047 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2047 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2048 for c in fctx.ancestors(followfirst=followfirst):
2048 for c in fctx.ancestors(followfirst=followfirst):
2049 fcache.setdefault(c.rev(), set()).add(c.path())
2049 fcache.setdefault(c.rev(), set()).add(c.path())
2050
2050
2051 def filematcher(rev):
2051 def filematcher(rev):
2052 if not fcacheready[0]:
2052 if not fcacheready[0]:
2053 # Lazy initialization
2053 # Lazy initialization
2054 fcacheready[0] = True
2054 fcacheready[0] = True
2055 populate()
2055 populate()
2056 return scmutil.matchfiles(repo, fcache.get(rev, []))
2056 return scmutil.matchfiles(repo, fcache.get(rev, []))
2057
2057
2058 return filematcher
2058 return filematcher
2059
2059
2060 def _makenofollowlogfilematcher(repo, pats, opts):
2060 def _makenofollowlogfilematcher(repo, pats, opts):
2061 '''hook for extensions to override the filematcher for non-follow cases'''
2061 '''hook for extensions to override the filematcher for non-follow cases'''
2062 return None
2062 return None
2063
2063
2064 def _makelogrevset(repo, pats, opts, revs):
2064 def _makelogrevset(repo, pats, opts, revs):
2065 """Return (expr, filematcher) where expr is a revset string built
2065 """Return (expr, filematcher) where expr is a revset string built
2066 from log options and file patterns or None. If --stat or --patch
2066 from log options and file patterns or None. If --stat or --patch
2067 are not passed filematcher is None. Otherwise it is a callable
2067 are not passed filematcher is None. Otherwise it is a callable
2068 taking a revision number and returning a match objects filtering
2068 taking a revision number and returning a match objects filtering
2069 the files to be detailed when displaying the revision.
2069 the files to be detailed when displaying the revision.
2070 """
2070 """
2071 opt2revset = {
2071 opt2revset = {
2072 'no_merges': ('not merge()', None),
2072 'no_merges': ('not merge()', None),
2073 'only_merges': ('merge()', None),
2073 'only_merges': ('merge()', None),
2074 '_ancestors': ('ancestors(%(val)s)', None),
2074 '_ancestors': ('ancestors(%(val)s)', None),
2075 '_fancestors': ('_firstancestors(%(val)s)', None),
2075 '_fancestors': ('_firstancestors(%(val)s)', None),
2076 '_descendants': ('descendants(%(val)s)', None),
2076 '_descendants': ('descendants(%(val)s)', None),
2077 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2077 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2078 '_matchfiles': ('_matchfiles(%(val)s)', None),
2078 '_matchfiles': ('_matchfiles(%(val)s)', None),
2079 'date': ('date(%(val)r)', None),
2079 'date': ('date(%(val)r)', None),
2080 'branch': ('branch(%(val)r)', ' or '),
2080 'branch': ('branch(%(val)r)', ' or '),
2081 '_patslog': ('filelog(%(val)r)', ' or '),
2081 '_patslog': ('filelog(%(val)r)', ' or '),
2082 '_patsfollow': ('follow(%(val)r)', ' or '),
2082 '_patsfollow': ('follow(%(val)r)', ' or '),
2083 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2083 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2084 'keyword': ('keyword(%(val)r)', ' or '),
2084 'keyword': ('keyword(%(val)r)', ' or '),
2085 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2085 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2086 'user': ('user(%(val)r)', ' or '),
2086 'user': ('user(%(val)r)', ' or '),
2087 }
2087 }
2088
2088
2089 opts = dict(opts)
2089 opts = dict(opts)
2090 # follow or not follow?
2090 # follow or not follow?
2091 follow = opts.get('follow') or opts.get('follow_first')
2091 follow = opts.get('follow') or opts.get('follow_first')
2092 if opts.get('follow_first'):
2092 if opts.get('follow_first'):
2093 followfirst = 1
2093 followfirst = 1
2094 else:
2094 else:
2095 followfirst = 0
2095 followfirst = 0
2096 # --follow with FILE behavior depends on revs...
2096 # --follow with FILE behavior depends on revs...
2097 it = iter(revs)
2097 it = iter(revs)
2098 startrev = next(it)
2098 startrev = next(it)
2099 followdescendants = startrev < next(it, startrev)
2099 followdescendants = startrev < next(it, startrev)
2100
2100
2101 # branch and only_branch are really aliases and must be handled at
2101 # branch and only_branch are really aliases and must be handled at
2102 # the same time
2102 # the same time
2103 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2103 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2104 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2104 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2105 # pats/include/exclude are passed to match.match() directly in
2105 # pats/include/exclude are passed to match.match() directly in
2106 # _matchfiles() revset but walkchangerevs() builds its matcher with
2106 # _matchfiles() revset but walkchangerevs() builds its matcher with
2107 # scmutil.match(). The difference is input pats are globbed on
2107 # scmutil.match(). The difference is input pats are globbed on
2108 # platforms without shell expansion (windows).
2108 # platforms without shell expansion (windows).
2109 wctx = repo[None]
2109 wctx = repo[None]
2110 match, pats = scmutil.matchandpats(wctx, pats, opts)
2110 match, pats = scmutil.matchandpats(wctx, pats, opts)
2111 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2111 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2112 opts.get('removed'))
2112 opts.get('removed'))
2113 if not slowpath:
2113 if not slowpath:
2114 for f in match.files():
2114 for f in match.files():
2115 if follow and f not in wctx:
2115 if follow and f not in wctx:
2116 # If the file exists, it may be a directory, so let it
2116 # If the file exists, it may be a directory, so let it
2117 # take the slow path.
2117 # take the slow path.
2118 if os.path.exists(repo.wjoin(f)):
2118 if os.path.exists(repo.wjoin(f)):
2119 slowpath = True
2119 slowpath = True
2120 continue
2120 continue
2121 else:
2121 else:
2122 raise error.Abort(_('cannot follow file not in parent '
2122 raise error.Abort(_('cannot follow file not in parent '
2123 'revision: "%s"') % f)
2123 'revision: "%s"') % f)
2124 filelog = repo.file(f)
2124 filelog = repo.file(f)
2125 if not filelog:
2125 if not filelog:
2126 # A zero count may be a directory or deleted file, so
2126 # A zero count may be a directory or deleted file, so
2127 # try to find matching entries on the slow path.
2127 # try to find matching entries on the slow path.
2128 if follow:
2128 if follow:
2129 raise error.Abort(
2129 raise error.Abort(
2130 _('cannot follow nonexistent file: "%s"') % f)
2130 _('cannot follow nonexistent file: "%s"') % f)
2131 slowpath = True
2131 slowpath = True
2132
2132
2133 # We decided to fall back to the slowpath because at least one
2133 # We decided to fall back to the slowpath because at least one
2134 # of the paths was not a file. Check to see if at least one of them
2134 # of the paths was not a file. Check to see if at least one of them
2135 # existed in history - in that case, we'll continue down the
2135 # existed in history - in that case, we'll continue down the
2136 # slowpath; otherwise, we can turn off the slowpath
2136 # slowpath; otherwise, we can turn off the slowpath
2137 if slowpath:
2137 if slowpath:
2138 for path in match.files():
2138 for path in match.files():
2139 if path == '.' or path in repo.store:
2139 if path == '.' or path in repo.store:
2140 break
2140 break
2141 else:
2141 else:
2142 slowpath = False
2142 slowpath = False
2143
2143
2144 fpats = ('_patsfollow', '_patsfollowfirst')
2144 fpats = ('_patsfollow', '_patsfollowfirst')
2145 fnopats = (('_ancestors', '_fancestors'),
2145 fnopats = (('_ancestors', '_fancestors'),
2146 ('_descendants', '_fdescendants'))
2146 ('_descendants', '_fdescendants'))
2147 if slowpath:
2147 if slowpath:
2148 # See walkchangerevs() slow path.
2148 # See walkchangerevs() slow path.
2149 #
2149 #
2150 # pats/include/exclude cannot be represented as separate
2150 # pats/include/exclude cannot be represented as separate
2151 # revset expressions as their filtering logic applies at file
2151 # revset expressions as their filtering logic applies at file
2152 # level. For instance "-I a -X a" matches a revision touching
2152 # level. For instance "-I a -X a" matches a revision touching
2153 # "a" and "b" while "file(a) and not file(b)" does
2153 # "a" and "b" while "file(a) and not file(b)" does
2154 # not. Besides, filesets are evaluated against the working
2154 # not. Besides, filesets are evaluated against the working
2155 # directory.
2155 # directory.
2156 matchargs = ['r:', 'd:relpath']
2156 matchargs = ['r:', 'd:relpath']
2157 for p in pats:
2157 for p in pats:
2158 matchargs.append('p:' + p)
2158 matchargs.append('p:' + p)
2159 for p in opts.get('include', []):
2159 for p in opts.get('include', []):
2160 matchargs.append('i:' + p)
2160 matchargs.append('i:' + p)
2161 for p in opts.get('exclude', []):
2161 for p in opts.get('exclude', []):
2162 matchargs.append('x:' + p)
2162 matchargs.append('x:' + p)
2163 matchargs = ','.join(('%r' % p) for p in matchargs)
2163 matchargs = ','.join(('%r' % p) for p in matchargs)
2164 opts['_matchfiles'] = matchargs
2164 opts['_matchfiles'] = matchargs
2165 if follow:
2165 if follow:
2166 opts[fnopats[0][followfirst]] = '.'
2166 opts[fnopats[0][followfirst]] = '.'
2167 else:
2167 else:
2168 if follow:
2168 if follow:
2169 if pats:
2169 if pats:
2170 # follow() revset interprets its file argument as a
2170 # follow() revset interprets its file argument as a
2171 # manifest entry, so use match.files(), not pats.
2171 # manifest entry, so use match.files(), not pats.
2172 opts[fpats[followfirst]] = list(match.files())
2172 opts[fpats[followfirst]] = list(match.files())
2173 else:
2173 else:
2174 op = fnopats[followdescendants][followfirst]
2174 op = fnopats[followdescendants][followfirst]
2175 opts[op] = 'rev(%d)' % startrev
2175 opts[op] = 'rev(%d)' % startrev
2176 else:
2176 else:
2177 opts['_patslog'] = list(pats)
2177 opts['_patslog'] = list(pats)
2178
2178
2179 filematcher = None
2179 filematcher = None
2180 if opts.get('patch') or opts.get('stat'):
2180 if opts.get('patch') or opts.get('stat'):
2181 # When following files, track renames via a special matcher.
2181 # When following files, track renames via a special matcher.
2182 # If we're forced to take the slowpath it means we're following
2182 # If we're forced to take the slowpath it means we're following
2183 # at least one pattern/directory, so don't bother with rename tracking.
2183 # at least one pattern/directory, so don't bother with rename tracking.
2184 if follow and not match.always() and not slowpath:
2184 if follow and not match.always() and not slowpath:
2185 # _makefollowlogfilematcher expects its files argument to be
2185 # _makefollowlogfilematcher expects its files argument to be
2186 # relative to the repo root, so use match.files(), not pats.
2186 # relative to the repo root, so use match.files(), not pats.
2187 filematcher = _makefollowlogfilematcher(repo, match.files(),
2187 filematcher = _makefollowlogfilematcher(repo, match.files(),
2188 followfirst)
2188 followfirst)
2189 else:
2189 else:
2190 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2190 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2191 if filematcher is None:
2191 if filematcher is None:
2192 filematcher = lambda rev: match
2192 filematcher = lambda rev: match
2193
2193
2194 expr = []
2194 expr = []
2195 for op, val in sorted(opts.iteritems()):
2195 for op, val in sorted(opts.iteritems()):
2196 if not val:
2196 if not val:
2197 continue
2197 continue
2198 if op not in opt2revset:
2198 if op not in opt2revset:
2199 continue
2199 continue
2200 revop, andor = opt2revset[op]
2200 revop, andor = opt2revset[op]
2201 if '%(val)' not in revop:
2201 if '%(val)' not in revop:
2202 expr.append(revop)
2202 expr.append(revop)
2203 else:
2203 else:
2204 if not isinstance(val, list):
2204 if not isinstance(val, list):
2205 e = revop % {'val': val}
2205 e = revop % {'val': val}
2206 else:
2206 else:
2207 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2207 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2208 expr.append(e)
2208 expr.append(e)
2209
2209
2210 if expr:
2210 if expr:
2211 expr = '(' + ' and '.join(expr) + ')'
2211 expr = '(' + ' and '.join(expr) + ')'
2212 else:
2212 else:
2213 expr = None
2213 expr = None
2214 return expr, filematcher
2214 return expr, filematcher
2215
2215
2216 def _logrevs(repo, opts):
2216 def _logrevs(repo, opts):
2217 # Default --rev value depends on --follow but --follow behavior
2217 # Default --rev value depends on --follow but --follow behavior
2218 # depends on revisions resolved from --rev...
2218 # depends on revisions resolved from --rev...
2219 follow = opts.get('follow') or opts.get('follow_first')
2219 follow = opts.get('follow') or opts.get('follow_first')
2220 if opts.get('rev'):
2220 if opts.get('rev'):
2221 revs = scmutil.revrange(repo, opts['rev'])
2221 revs = scmutil.revrange(repo, opts['rev'])
2222 elif follow and repo.dirstate.p1() == nullid:
2222 elif follow and repo.dirstate.p1() == nullid:
2223 revs = smartset.baseset()
2223 revs = smartset.baseset()
2224 elif follow:
2224 elif follow:
2225 revs = repo.revs('reverse(:.)')
2225 revs = repo.revs('reverse(:.)')
2226 else:
2226 else:
2227 revs = smartset.spanset(repo)
2227 revs = smartset.spanset(repo)
2228 revs.reverse()
2228 revs.reverse()
2229 return revs
2229 return revs
2230
2230
2231 def getgraphlogrevs(repo, pats, opts):
2231 def getgraphlogrevs(repo, pats, opts):
2232 """Return (revs, expr, filematcher) where revs is an iterable of
2232 """Return (revs, expr, filematcher) where revs is an iterable of
2233 revision numbers, expr is a revset string built from log options
2233 revision numbers, expr is a revset string built from log options
2234 and file patterns or None, and used to filter 'revs'. If --stat or
2234 and file patterns or None, and used to filter 'revs'. If --stat or
2235 --patch are not passed filematcher is None. Otherwise it is a
2235 --patch are not passed filematcher is None. Otherwise it is a
2236 callable taking a revision number and returning a match objects
2236 callable taking a revision number and returning a match objects
2237 filtering the files to be detailed when displaying the revision.
2237 filtering the files to be detailed when displaying the revision.
2238 """
2238 """
2239 limit = loglimit(opts)
2239 limit = loglimit(opts)
2240 revs = _logrevs(repo, opts)
2240 revs = _logrevs(repo, opts)
2241 if not revs:
2241 if not revs:
2242 return smartset.baseset(), None, None
2242 return smartset.baseset(), None, None
2243 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2243 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2244 if opts.get('rev'):
2244 if opts.get('rev'):
2245 # User-specified revs might be unsorted, but don't sort before
2245 # User-specified revs might be unsorted, but don't sort before
2246 # _makelogrevset because it might depend on the order of revs
2246 # _makelogrevset because it might depend on the order of revs
2247 if not (revs.isdescending() or revs.istopo()):
2247 if not (revs.isdescending() or revs.istopo()):
2248 revs.sort(reverse=True)
2248 revs.sort(reverse=True)
2249 if expr:
2249 if expr:
2250 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2250 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2251 revs = matcher(repo, revs)
2251 revs = matcher(repo, revs)
2252 if limit is not None:
2252 if limit is not None:
2253 limitedrevs = []
2253 limitedrevs = []
2254 for idx, rev in enumerate(revs):
2254 for idx, rev in enumerate(revs):
2255 if idx >= limit:
2255 if idx >= limit:
2256 break
2256 break
2257 limitedrevs.append(rev)
2257 limitedrevs.append(rev)
2258 revs = smartset.baseset(limitedrevs)
2258 revs = smartset.baseset(limitedrevs)
2259
2259
2260 return revs, expr, filematcher
2260 return revs, expr, filematcher
2261
2261
2262 def getlogrevs(repo, pats, opts):
2262 def getlogrevs(repo, pats, opts):
2263 """Return (revs, expr, filematcher) where revs is an iterable of
2263 """Return (revs, expr, filematcher) where revs is an iterable of
2264 revision numbers, expr is a revset string built from log options
2264 revision numbers, expr is a revset string built from log options
2265 and file patterns or None, and used to filter 'revs'. If --stat or
2265 and file patterns or None, and used to filter 'revs'. If --stat or
2266 --patch are not passed filematcher is None. Otherwise it is a
2266 --patch are not passed filematcher is None. Otherwise it is a
2267 callable taking a revision number and returning a match objects
2267 callable taking a revision number and returning a match objects
2268 filtering the files to be detailed when displaying the revision.
2268 filtering the files to be detailed when displaying the revision.
2269 """
2269 """
2270 limit = loglimit(opts)
2270 limit = loglimit(opts)
2271 revs = _logrevs(repo, opts)
2271 revs = _logrevs(repo, opts)
2272 if not revs:
2272 if not revs:
2273 return smartset.baseset([]), None, None
2273 return smartset.baseset([]), None, None
2274 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2274 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2275 if expr:
2275 if expr:
2276 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2276 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2277 revs = matcher(repo, revs)
2277 revs = matcher(repo, revs)
2278 if limit is not None:
2278 if limit is not None:
2279 limitedrevs = []
2279 limitedrevs = []
2280 for idx, r in enumerate(revs):
2280 for idx, r in enumerate(revs):
2281 if limit <= idx:
2281 if limit <= idx:
2282 break
2282 break
2283 limitedrevs.append(r)
2283 limitedrevs.append(r)
2284 revs = smartset.baseset(limitedrevs)
2284 revs = smartset.baseset(limitedrevs)
2285
2285
2286 return revs, expr, filematcher
2286 return revs, expr, filematcher
2287
2287
2288 def _graphnodeformatter(ui, displayer):
2288 def _graphnodeformatter(ui, displayer):
2289 spec = ui.config('ui', 'graphnodetemplate')
2289 spec = ui.config('ui', 'graphnodetemplate')
2290 if not spec:
2290 if not spec:
2291 return templatekw.showgraphnode # fast path for "{graphnode}"
2291 return templatekw.showgraphnode # fast path for "{graphnode}"
2292
2292
2293 spec = templater.unquotestring(spec)
2293 spec = templater.unquotestring(spec)
2294 templ = formatter.gettemplater(ui, 'graphnode', spec)
2294 templ = formatter.gettemplater(ui, 'graphnode', spec)
2295 cache = {}
2295 cache = {}
2296 if isinstance(displayer, changeset_templater):
2296 if isinstance(displayer, changeset_templater):
2297 cache = displayer.cache # reuse cache of slow templates
2297 cache = displayer.cache # reuse cache of slow templates
2298 props = templatekw.keywords.copy()
2298 props = templatekw.keywords.copy()
2299 props['templ'] = templ
2299 props['templ'] = templ
2300 props['cache'] = cache
2300 props['cache'] = cache
2301 def formatnode(repo, ctx):
2301 def formatnode(repo, ctx):
2302 props['ctx'] = ctx
2302 props['ctx'] = ctx
2303 props['repo'] = repo
2303 props['repo'] = repo
2304 props['ui'] = repo.ui
2304 props['ui'] = repo.ui
2305 props['revcache'] = {}
2305 props['revcache'] = {}
2306 return templater.stringify(templ('graphnode', **props))
2306 return templater.stringify(templ('graphnode', **props))
2307 return formatnode
2307 return formatnode
2308
2308
2309 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2309 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2310 filematcher=None):
2310 filematcher=None):
2311 formatnode = _graphnodeformatter(ui, displayer)
2311 formatnode = _graphnodeformatter(ui, displayer)
2312 state = graphmod.asciistate()
2312 state = graphmod.asciistate()
2313 styles = state['styles']
2313 styles = state['styles']
2314
2314
2315 # only set graph styling if HGPLAIN is not set.
2315 # only set graph styling if HGPLAIN is not set.
2316 if ui.plain('graph'):
2316 if ui.plain('graph'):
2317 # set all edge styles to |, the default pre-3.8 behaviour
2317 # set all edge styles to |, the default pre-3.8 behaviour
2318 styles.update(dict.fromkeys(styles, '|'))
2318 styles.update(dict.fromkeys(styles, '|'))
2319 else:
2319 else:
2320 edgetypes = {
2320 edgetypes = {
2321 'parent': graphmod.PARENT,
2321 'parent': graphmod.PARENT,
2322 'grandparent': graphmod.GRANDPARENT,
2322 'grandparent': graphmod.GRANDPARENT,
2323 'missing': graphmod.MISSINGPARENT
2323 'missing': graphmod.MISSINGPARENT
2324 }
2324 }
2325 for name, key in edgetypes.items():
2325 for name, key in edgetypes.items():
2326 # experimental config: experimental.graphstyle.*
2326 # experimental config: experimental.graphstyle.*
2327 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2327 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2328 styles[key])
2328 styles[key])
2329 if not styles[key]:
2329 if not styles[key]:
2330 styles[key] = None
2330 styles[key] = None
2331
2331
2332 # experimental config: experimental.graphshorten
2332 # experimental config: experimental.graphshorten
2333 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2333 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2334
2334
2335 for rev, type, ctx, parents in dag:
2335 for rev, type, ctx, parents in dag:
2336 char = formatnode(repo, ctx)
2336 char = formatnode(repo, ctx)
2337 copies = None
2337 copies = None
2338 if getrenamed and ctx.rev():
2338 if getrenamed and ctx.rev():
2339 copies = []
2339 copies = []
2340 for fn in ctx.files():
2340 for fn in ctx.files():
2341 rename = getrenamed(fn, ctx.rev())
2341 rename = getrenamed(fn, ctx.rev())
2342 if rename:
2342 if rename:
2343 copies.append((fn, rename[0]))
2343 copies.append((fn, rename[0]))
2344 revmatchfn = None
2344 revmatchfn = None
2345 if filematcher is not None:
2345 if filematcher is not None:
2346 revmatchfn = filematcher(ctx.rev())
2346 revmatchfn = filematcher(ctx.rev())
2347 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2347 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2348 lines = displayer.hunk.pop(rev).split('\n')
2348 lines = displayer.hunk.pop(rev).split('\n')
2349 if not lines[-1]:
2349 if not lines[-1]:
2350 del lines[-1]
2350 del lines[-1]
2351 displayer.flush(ctx)
2351 displayer.flush(ctx)
2352 edges = edgefn(type, char, lines, state, rev, parents)
2352 edges = edgefn(type, char, lines, state, rev, parents)
2353 for type, char, lines, coldata in edges:
2353 for type, char, lines, coldata in edges:
2354 graphmod.ascii(ui, state, type, char, lines, coldata)
2354 graphmod.ascii(ui, state, type, char, lines, coldata)
2355 displayer.close()
2355 displayer.close()
2356
2356
2357 def graphlog(ui, repo, pats, opts):
2357 def graphlog(ui, repo, pats, opts):
2358 # Parameters are identical to log command ones
2358 # Parameters are identical to log command ones
2359 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2359 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2360 revdag = graphmod.dagwalker(repo, revs)
2360 revdag = graphmod.dagwalker(repo, revs)
2361
2361
2362 getrenamed = None
2362 getrenamed = None
2363 if opts.get('copies'):
2363 if opts.get('copies'):
2364 endrev = None
2364 endrev = None
2365 if opts.get('rev'):
2365 if opts.get('rev'):
2366 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2366 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2367 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2367 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2368
2368
2369 ui.pager('log')
2369 ui.pager('log')
2370 displayer = show_changeset(ui, repo, opts, buffered=True)
2370 displayer = show_changeset(ui, repo, opts, buffered=True)
2371 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2371 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2372 filematcher)
2372 filematcher)
2373
2373
2374 def checkunsupportedgraphflags(pats, opts):
2374 def checkunsupportedgraphflags(pats, opts):
2375 for op in ["newest_first"]:
2375 for op in ["newest_first"]:
2376 if op in opts and opts[op]:
2376 if op in opts and opts[op]:
2377 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2377 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2378 % op.replace("_", "-"))
2378 % op.replace("_", "-"))
2379
2379
2380 def graphrevs(repo, nodes, opts):
2380 def graphrevs(repo, nodes, opts):
2381 limit = loglimit(opts)
2381 limit = loglimit(opts)
2382 nodes.reverse()
2382 nodes.reverse()
2383 if limit is not None:
2383 if limit is not None:
2384 nodes = nodes[:limit]
2384 nodes = nodes[:limit]
2385 return graphmod.nodes(repo, nodes)
2385 return graphmod.nodes(repo, nodes)
2386
2386
2387 def add(ui, repo, match, prefix, explicitonly, **opts):
2387 def add(ui, repo, match, prefix, explicitonly, **opts):
2388 join = lambda f: os.path.join(prefix, f)
2388 join = lambda f: os.path.join(prefix, f)
2389 bad = []
2389 bad = []
2390
2390
2391 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2391 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2392 names = []
2392 names = []
2393 wctx = repo[None]
2393 wctx = repo[None]
2394 cca = None
2394 cca = None
2395 abort, warn = scmutil.checkportabilityalert(ui)
2395 abort, warn = scmutil.checkportabilityalert(ui)
2396 if abort or warn:
2396 if abort or warn:
2397 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2397 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2398
2398
2399 badmatch = matchmod.badmatch(match, badfn)
2399 badmatch = matchmod.badmatch(match, badfn)
2400 dirstate = repo.dirstate
2400 dirstate = repo.dirstate
2401 # We don't want to just call wctx.walk here, since it would return a lot of
2401 # We don't want to just call wctx.walk here, since it would return a lot of
2402 # clean files, which we aren't interested in and takes time.
2402 # clean files, which we aren't interested in and takes time.
2403 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2403 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2404 True, False, full=False)):
2404 True, False, full=False)):
2405 exact = match.exact(f)
2405 exact = match.exact(f)
2406 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2406 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2407 if cca:
2407 if cca:
2408 cca(f)
2408 cca(f)
2409 names.append(f)
2409 names.append(f)
2410 if ui.verbose or not exact:
2410 if ui.verbose or not exact:
2411 ui.status(_('adding %s\n') % match.rel(f))
2411 ui.status(_('adding %s\n') % match.rel(f))
2412
2412
2413 for subpath in sorted(wctx.substate):
2413 for subpath in sorted(wctx.substate):
2414 sub = wctx.sub(subpath)
2414 sub = wctx.sub(subpath)
2415 try:
2415 try:
2416 submatch = matchmod.subdirmatcher(subpath, match)
2416 submatch = matchmod.subdirmatcher(subpath, match)
2417 if opts.get(r'subrepos'):
2417 if opts.get(r'subrepos'):
2418 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2418 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2419 else:
2419 else:
2420 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2420 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2421 except error.LookupError:
2421 except error.LookupError:
2422 ui.status(_("skipping missing subrepository: %s\n")
2422 ui.status(_("skipping missing subrepository: %s\n")
2423 % join(subpath))
2423 % join(subpath))
2424
2424
2425 if not opts.get(r'dry_run'):
2425 if not opts.get(r'dry_run'):
2426 rejected = wctx.add(names, prefix)
2426 rejected = wctx.add(names, prefix)
2427 bad.extend(f for f in rejected if f in match.files())
2427 bad.extend(f for f in rejected if f in match.files())
2428 return bad
2428 return bad
2429
2429
2430 def addwebdirpath(repo, serverpath, webconf):
2430 def addwebdirpath(repo, serverpath, webconf):
2431 webconf[serverpath] = repo.root
2431 webconf[serverpath] = repo.root
2432 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2432 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2433
2433
2434 for r in repo.revs('filelog("path:.hgsub")'):
2434 for r in repo.revs('filelog("path:.hgsub")'):
2435 ctx = repo[r]
2435 ctx = repo[r]
2436 for subpath in ctx.substate:
2436 for subpath in ctx.substate:
2437 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2437 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2438
2438
2439 def forget(ui, repo, match, prefix, explicitonly):
2439 def forget(ui, repo, match, prefix, explicitonly):
2440 join = lambda f: os.path.join(prefix, f)
2440 join = lambda f: os.path.join(prefix, f)
2441 bad = []
2441 bad = []
2442 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2442 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2443 wctx = repo[None]
2443 wctx = repo[None]
2444 forgot = []
2444 forgot = []
2445
2445
2446 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2446 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2447 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2447 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2448 if explicitonly:
2448 if explicitonly:
2449 forget = [f for f in forget if match.exact(f)]
2449 forget = [f for f in forget if match.exact(f)]
2450
2450
2451 for subpath in sorted(wctx.substate):
2451 for subpath in sorted(wctx.substate):
2452 sub = wctx.sub(subpath)
2452 sub = wctx.sub(subpath)
2453 try:
2453 try:
2454 submatch = matchmod.subdirmatcher(subpath, match)
2454 submatch = matchmod.subdirmatcher(subpath, match)
2455 subbad, subforgot = sub.forget(submatch, prefix)
2455 subbad, subforgot = sub.forget(submatch, prefix)
2456 bad.extend([subpath + '/' + f for f in subbad])
2456 bad.extend([subpath + '/' + f for f in subbad])
2457 forgot.extend([subpath + '/' + f for f in subforgot])
2457 forgot.extend([subpath + '/' + f for f in subforgot])
2458 except error.LookupError:
2458 except error.LookupError:
2459 ui.status(_("skipping missing subrepository: %s\n")
2459 ui.status(_("skipping missing subrepository: %s\n")
2460 % join(subpath))
2460 % join(subpath))
2461
2461
2462 if not explicitonly:
2462 if not explicitonly:
2463 for f in match.files():
2463 for f in match.files():
2464 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2464 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2465 if f not in forgot:
2465 if f not in forgot:
2466 if repo.wvfs.exists(f):
2466 if repo.wvfs.exists(f):
2467 # Don't complain if the exact case match wasn't given.
2467 # Don't complain if the exact case match wasn't given.
2468 # But don't do this until after checking 'forgot', so
2468 # But don't do this until after checking 'forgot', so
2469 # that subrepo files aren't normalized, and this op is
2469 # that subrepo files aren't normalized, and this op is
2470 # purely from data cached by the status walk above.
2470 # purely from data cached by the status walk above.
2471 if repo.dirstate.normalize(f) in repo.dirstate:
2471 if repo.dirstate.normalize(f) in repo.dirstate:
2472 continue
2472 continue
2473 ui.warn(_('not removing %s: '
2473 ui.warn(_('not removing %s: '
2474 'file is already untracked\n')
2474 'file is already untracked\n')
2475 % match.rel(f))
2475 % match.rel(f))
2476 bad.append(f)
2476 bad.append(f)
2477
2477
2478 for f in forget:
2478 for f in forget:
2479 if ui.verbose or not match.exact(f):
2479 if ui.verbose or not match.exact(f):
2480 ui.status(_('removing %s\n') % match.rel(f))
2480 ui.status(_('removing %s\n') % match.rel(f))
2481
2481
2482 rejected = wctx.forget(forget, prefix)
2482 rejected = wctx.forget(forget, prefix)
2483 bad.extend(f for f in rejected if f in match.files())
2483 bad.extend(f for f in rejected if f in match.files())
2484 forgot.extend(f for f in forget if f not in rejected)
2484 forgot.extend(f for f in forget if f not in rejected)
2485 return bad, forgot
2485 return bad, forgot
2486
2486
2487 def files(ui, ctx, m, fm, fmt, subrepos):
2487 def files(ui, ctx, m, fm, fmt, subrepos):
2488 rev = ctx.rev()
2488 rev = ctx.rev()
2489 ret = 1
2489 ret = 1
2490 ds = ctx.repo().dirstate
2490 ds = ctx.repo().dirstate
2491
2491
2492 for f in ctx.matches(m):
2492 for f in ctx.matches(m):
2493 if rev is None and ds[f] == 'r':
2493 if rev is None and ds[f] == 'r':
2494 continue
2494 continue
2495 fm.startitem()
2495 fm.startitem()
2496 if ui.verbose:
2496 if ui.verbose:
2497 fc = ctx[f]
2497 fc = ctx[f]
2498 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2498 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2499 fm.data(abspath=f)
2499 fm.data(abspath=f)
2500 fm.write('path', fmt, m.rel(f))
2500 fm.write('path', fmt, m.rel(f))
2501 ret = 0
2501 ret = 0
2502
2502
2503 for subpath in sorted(ctx.substate):
2503 for subpath in sorted(ctx.substate):
2504 submatch = matchmod.subdirmatcher(subpath, m)
2504 submatch = matchmod.subdirmatcher(subpath, m)
2505 if (subrepos or m.exact(subpath) or any(submatch.files())):
2505 if (subrepos or m.exact(subpath) or any(submatch.files())):
2506 sub = ctx.sub(subpath)
2506 sub = ctx.sub(subpath)
2507 try:
2507 try:
2508 recurse = m.exact(subpath) or subrepos
2508 recurse = m.exact(subpath) or subrepos
2509 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2509 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2510 ret = 0
2510 ret = 0
2511 except error.LookupError:
2511 except error.LookupError:
2512 ui.status(_("skipping missing subrepository: %s\n")
2512 ui.status(_("skipping missing subrepository: %s\n")
2513 % m.abs(subpath))
2513 % m.abs(subpath))
2514
2514
2515 return ret
2515 return ret
2516
2516
2517 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2517 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2518 join = lambda f: os.path.join(prefix, f)
2518 join = lambda f: os.path.join(prefix, f)
2519 ret = 0
2519 ret = 0
2520 s = repo.status(match=m, clean=True)
2520 s = repo.status(match=m, clean=True)
2521 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2521 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2522
2522
2523 wctx = repo[None]
2523 wctx = repo[None]
2524
2524
2525 if warnings is None:
2525 if warnings is None:
2526 warnings = []
2526 warnings = []
2527 warn = True
2527 warn = True
2528 else:
2528 else:
2529 warn = False
2529 warn = False
2530
2530
2531 subs = sorted(wctx.substate)
2531 subs = sorted(wctx.substate)
2532 total = len(subs)
2532 total = len(subs)
2533 count = 0
2533 count = 0
2534 for subpath in subs:
2534 for subpath in subs:
2535 count += 1
2535 count += 1
2536 submatch = matchmod.subdirmatcher(subpath, m)
2536 submatch = matchmod.subdirmatcher(subpath, m)
2537 if subrepos or m.exact(subpath) or any(submatch.files()):
2537 if subrepos or m.exact(subpath) or any(submatch.files()):
2538 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2538 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2539 sub = wctx.sub(subpath)
2539 sub = wctx.sub(subpath)
2540 try:
2540 try:
2541 if sub.removefiles(submatch, prefix, after, force, subrepos,
2541 if sub.removefiles(submatch, prefix, after, force, subrepos,
2542 warnings):
2542 warnings):
2543 ret = 1
2543 ret = 1
2544 except error.LookupError:
2544 except error.LookupError:
2545 warnings.append(_("skipping missing subrepository: %s\n")
2545 warnings.append(_("skipping missing subrepository: %s\n")
2546 % join(subpath))
2546 % join(subpath))
2547 ui.progress(_('searching'), None)
2547 ui.progress(_('searching'), None)
2548
2548
2549 # warn about failure to delete explicit files/dirs
2549 # warn about failure to delete explicit files/dirs
2550 deleteddirs = util.dirs(deleted)
2550 deleteddirs = util.dirs(deleted)
2551 files = m.files()
2551 files = m.files()
2552 total = len(files)
2552 total = len(files)
2553 count = 0
2553 count = 0
2554 for f in files:
2554 for f in files:
2555 def insubrepo():
2555 def insubrepo():
2556 for subpath in wctx.substate:
2556 for subpath in wctx.substate:
2557 if f.startswith(subpath + '/'):
2557 if f.startswith(subpath + '/'):
2558 return True
2558 return True
2559 return False
2559 return False
2560
2560
2561 count += 1
2561 count += 1
2562 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2562 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2563 isdir = f in deleteddirs or wctx.hasdir(f)
2563 isdir = f in deleteddirs or wctx.hasdir(f)
2564 if (f in repo.dirstate or isdir or f == '.'
2564 if (f in repo.dirstate or isdir or f == '.'
2565 or insubrepo() or f in subs):
2565 or insubrepo() or f in subs):
2566 continue
2566 continue
2567
2567
2568 if repo.wvfs.exists(f):
2568 if repo.wvfs.exists(f):
2569 if repo.wvfs.isdir(f):
2569 if repo.wvfs.isdir(f):
2570 warnings.append(_('not removing %s: no tracked files\n')
2570 warnings.append(_('not removing %s: no tracked files\n')
2571 % m.rel(f))
2571 % m.rel(f))
2572 else:
2572 else:
2573 warnings.append(_('not removing %s: file is untracked\n')
2573 warnings.append(_('not removing %s: file is untracked\n')
2574 % m.rel(f))
2574 % m.rel(f))
2575 # missing files will generate a warning elsewhere
2575 # missing files will generate a warning elsewhere
2576 ret = 1
2576 ret = 1
2577 ui.progress(_('deleting'), None)
2577 ui.progress(_('deleting'), None)
2578
2578
2579 if force:
2579 if force:
2580 list = modified + deleted + clean + added
2580 list = modified + deleted + clean + added
2581 elif after:
2581 elif after:
2582 list = deleted
2582 list = deleted
2583 remaining = modified + added + clean
2583 remaining = modified + added + clean
2584 total = len(remaining)
2584 total = len(remaining)
2585 count = 0
2585 count = 0
2586 for f in remaining:
2586 for f in remaining:
2587 count += 1
2587 count += 1
2588 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2588 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2589 warnings.append(_('not removing %s: file still exists\n')
2589 warnings.append(_('not removing %s: file still exists\n')
2590 % m.rel(f))
2590 % m.rel(f))
2591 ret = 1
2591 ret = 1
2592 ui.progress(_('skipping'), None)
2592 ui.progress(_('skipping'), None)
2593 else:
2593 else:
2594 list = deleted + clean
2594 list = deleted + clean
2595 total = len(modified) + len(added)
2595 total = len(modified) + len(added)
2596 count = 0
2596 count = 0
2597 for f in modified:
2597 for f in modified:
2598 count += 1
2598 count += 1
2599 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2599 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2600 warnings.append(_('not removing %s: file is modified (use -f'
2600 warnings.append(_('not removing %s: file is modified (use -f'
2601 ' to force removal)\n') % m.rel(f))
2601 ' to force removal)\n') % m.rel(f))
2602 ret = 1
2602 ret = 1
2603 for f in added:
2603 for f in added:
2604 count += 1
2604 count += 1
2605 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2605 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2606 warnings.append(_("not removing %s: file has been marked for add"
2606 warnings.append(_("not removing %s: file has been marked for add"
2607 " (use 'hg forget' to undo add)\n") % m.rel(f))
2607 " (use 'hg forget' to undo add)\n") % m.rel(f))
2608 ret = 1
2608 ret = 1
2609 ui.progress(_('skipping'), None)
2609 ui.progress(_('skipping'), None)
2610
2610
2611 list = sorted(list)
2611 list = sorted(list)
2612 total = len(list)
2612 total = len(list)
2613 count = 0
2613 count = 0
2614 for f in list:
2614 for f in list:
2615 count += 1
2615 count += 1
2616 if ui.verbose or not m.exact(f):
2616 if ui.verbose or not m.exact(f):
2617 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2617 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2618 ui.status(_('removing %s\n') % m.rel(f))
2618 ui.status(_('removing %s\n') % m.rel(f))
2619 ui.progress(_('deleting'), None)
2619 ui.progress(_('deleting'), None)
2620
2620
2621 with repo.wlock():
2621 with repo.wlock():
2622 if not after:
2622 if not after:
2623 for f in list:
2623 for f in list:
2624 if f in added:
2624 if f in added:
2625 continue # we never unlink added files on remove
2625 continue # we never unlink added files on remove
2626 repo.wvfs.unlinkpath(f, ignoremissing=True)
2626 repo.wvfs.unlinkpath(f, ignoremissing=True)
2627 repo[None].forget(list)
2627 repo[None].forget(list)
2628
2628
2629 if warn:
2629 if warn:
2630 for warning in warnings:
2630 for warning in warnings:
2631 ui.warn(warning)
2631 ui.warn(warning)
2632
2632
2633 return ret
2633 return ret
2634
2634
2635 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2635 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2636 err = 1
2636 err = 1
2637
2637
2638 def write(path):
2638 def write(path):
2639 filename = None
2639 filename = None
2640 if fntemplate:
2640 if fntemplate:
2641 filename = makefilename(repo, fntemplate, ctx.node(),
2641 filename = makefilename(repo, fntemplate, ctx.node(),
2642 pathname=os.path.join(prefix, path))
2642 pathname=os.path.join(prefix, path))
2643 with formatter.maybereopen(basefm, filename, opts) as fm:
2643 with formatter.maybereopen(basefm, filename, opts) as fm:
2644 data = ctx[path].data()
2644 data = ctx[path].data()
2645 if opts.get('decode'):
2645 if opts.get('decode'):
2646 data = repo.wwritedata(path, data)
2646 data = repo.wwritedata(path, data)
2647 fm.startitem()
2647 fm.startitem()
2648 fm.write('data', '%s', data)
2648 fm.write('data', '%s', data)
2649 fm.data(abspath=path, path=matcher.rel(path))
2649 fm.data(abspath=path, path=matcher.rel(path))
2650
2650
2651 # Automation often uses hg cat on single files, so special case it
2651 # Automation often uses hg cat on single files, so special case it
2652 # for performance to avoid the cost of parsing the manifest.
2652 # for performance to avoid the cost of parsing the manifest.
2653 if len(matcher.files()) == 1 and not matcher.anypats():
2653 if len(matcher.files()) == 1 and not matcher.anypats():
2654 file = matcher.files()[0]
2654 file = matcher.files()[0]
2655 mfl = repo.manifestlog
2655 mfl = repo.manifestlog
2656 mfnode = ctx.manifestnode()
2656 mfnode = ctx.manifestnode()
2657 try:
2657 try:
2658 if mfnode and mfl[mfnode].find(file)[0]:
2658 if mfnode and mfl[mfnode].find(file)[0]:
2659 write(file)
2659 write(file)
2660 return 0
2660 return 0
2661 except KeyError:
2661 except KeyError:
2662 pass
2662 pass
2663
2663
2664 for abs in ctx.walk(matcher):
2664 for abs in ctx.walk(matcher):
2665 write(abs)
2665 write(abs)
2666 err = 0
2666 err = 0
2667
2667
2668 for subpath in sorted(ctx.substate):
2668 for subpath in sorted(ctx.substate):
2669 sub = ctx.sub(subpath)
2669 sub = ctx.sub(subpath)
2670 try:
2670 try:
2671 submatch = matchmod.subdirmatcher(subpath, matcher)
2671 submatch = matchmod.subdirmatcher(subpath, matcher)
2672
2672
2673 if not sub.cat(submatch, basefm, fntemplate,
2673 if not sub.cat(submatch, basefm, fntemplate,
2674 os.path.join(prefix, sub._path), **opts):
2674 os.path.join(prefix, sub._path), **opts):
2675 err = 0
2675 err = 0
2676 except error.RepoLookupError:
2676 except error.RepoLookupError:
2677 ui.status(_("skipping missing subrepository: %s\n")
2677 ui.status(_("skipping missing subrepository: %s\n")
2678 % os.path.join(prefix, subpath))
2678 % os.path.join(prefix, subpath))
2679
2679
2680 return err
2680 return err
2681
2681
2682 def commit(ui, repo, commitfunc, pats, opts):
2682 def commit(ui, repo, commitfunc, pats, opts):
2683 '''commit the specified files or all outstanding changes'''
2683 '''commit the specified files or all outstanding changes'''
2684 date = opts.get('date')
2684 date = opts.get('date')
2685 if date:
2685 if date:
2686 opts['date'] = util.parsedate(date)
2686 opts['date'] = util.parsedate(date)
2687 message = logmessage(ui, opts)
2687 message = logmessage(ui, opts)
2688 matcher = scmutil.match(repo[None], pats, opts)
2688 matcher = scmutil.match(repo[None], pats, opts)
2689
2689
2690 # extract addremove carefully -- this function can be called from a command
2690 # extract addremove carefully -- this function can be called from a command
2691 # that doesn't support addremove
2691 # that doesn't support addremove
2692 if opts.get('addremove'):
2692 if opts.get('addremove'):
2693 if scmutil.addremove(repo, matcher, "", opts) != 0:
2693 if scmutil.addremove(repo, matcher, "", opts) != 0:
2694 raise error.Abort(
2694 raise error.Abort(
2695 _("failed to mark all new/missing files as added/removed"))
2695 _("failed to mark all new/missing files as added/removed"))
2696
2696
2697 return commitfunc(ui, repo, message, matcher, opts)
2697 return commitfunc(ui, repo, message, matcher, opts)
2698
2698
2699 def samefile(f, ctx1, ctx2):
2699 def samefile(f, ctx1, ctx2):
2700 if f in ctx1.manifest():
2700 if f in ctx1.manifest():
2701 a = ctx1.filectx(f)
2701 a = ctx1.filectx(f)
2702 if f in ctx2.manifest():
2702 if f in ctx2.manifest():
2703 b = ctx2.filectx(f)
2703 b = ctx2.filectx(f)
2704 return (not a.cmp(b)
2704 return (not a.cmp(b)
2705 and a.flags() == b.flags())
2705 and a.flags() == b.flags())
2706 else:
2706 else:
2707 return False
2707 return False
2708 else:
2708 else:
2709 return f not in ctx2.manifest()
2709 return f not in ctx2.manifest()
2710
2710
2711 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2711 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2712 # avoid cycle context -> subrepo -> cmdutil
2712 # avoid cycle context -> subrepo -> cmdutil
2713 from . import context
2713 from . import context
2714
2714
2715 # amend will reuse the existing user if not specified, but the obsolete
2715 # amend will reuse the existing user if not specified, but the obsolete
2716 # marker creation requires that the current user's name is specified.
2716 # marker creation requires that the current user's name is specified.
2717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2718 ui.username() # raise exception if username not set
2718 ui.username() # raise exception if username not set
2719
2719
2720 ui.note(_('amending changeset %s\n') % old)
2720 ui.note(_('amending changeset %s\n') % old)
2721 base = old.p1()
2721 base = old.p1()
2722 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2722 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2723
2723
2724 wlock = lock = newid = None
2724 wlock = lock = newid = None
2725 try:
2725 try:
2726 wlock = repo.wlock()
2726 wlock = repo.wlock()
2727 lock = repo.lock()
2727 lock = repo.lock()
2728 with repo.transaction('amend') as tr:
2728 with repo.transaction('amend') as tr:
2729 # See if we got a message from -m or -l, if not, open the editor
2729 # See if we got a message from -m or -l, if not, open the editor
2730 # with the message of the changeset to amend
2730 # with the message of the changeset to amend
2731 message = logmessage(ui, opts)
2731 message = logmessage(ui, opts)
2732 # ensure logfile does not conflict with later enforcement of the
2732 # ensure logfile does not conflict with later enforcement of the
2733 # message. potential logfile content has been processed by
2733 # message. potential logfile content has been processed by
2734 # `logmessage` anyway.
2734 # `logmessage` anyway.
2735 opts.pop('logfile')
2735 opts.pop('logfile')
2736 # First, do a regular commit to record all changes in the working
2736 # First, do a regular commit to record all changes in the working
2737 # directory (if there are any)
2737 # directory (if there are any)
2738 ui.callhooks = False
2738 ui.callhooks = False
2739 activebookmark = repo._bookmarks.active
2739 activebookmark = repo._bookmarks.active
2740 try:
2740 try:
2741 repo._bookmarks.active = None
2741 repo._bookmarks.active = None
2742 opts['message'] = 'temporary amend commit for %s' % old
2742 opts['message'] = 'temporary amend commit for %s' % old
2743 node = commit(ui, repo, commitfunc, pats, opts)
2743 node = commit(ui, repo, commitfunc, pats, opts)
2744 finally:
2744 finally:
2745 repo._bookmarks.active = activebookmark
2745 repo._bookmarks.active = activebookmark
2746 repo._bookmarks.recordchange(tr)
2746 repo._bookmarks.recordchange(tr)
2747 ui.callhooks = True
2747 ui.callhooks = True
2748 ctx = repo[node]
2748 ctx = repo[node]
2749
2749
2750 # Participating changesets:
2750 # Participating changesets:
2751 #
2751 #
2752 # node/ctx o - new (intermediate) commit that contains changes
2752 # node/ctx o - new (intermediate) commit that contains changes
2753 # | from working dir to go into amending commit
2753 # | from working dir to go into amending commit
2754 # | (or a workingctx if there were no changes)
2754 # | (or a workingctx if there were no changes)
2755 # |
2755 # |
2756 # old o - changeset to amend
2756 # old o - changeset to amend
2757 # |
2757 # |
2758 # base o - parent of amending changeset
2758 # base o - parent of amending changeset
2759
2759
2760 # Update extra dict from amended commit (e.g. to preserve graft
2760 # Update extra dict from amended commit (e.g. to preserve graft
2761 # source)
2761 # source)
2762 extra.update(old.extra())
2762 extra.update(old.extra())
2763
2763
2764 # Also update it from the intermediate commit or from the wctx
2764 # Also update it from the intermediate commit or from the wctx
2765 extra.update(ctx.extra())
2765 extra.update(ctx.extra())
2766
2766
2767 if len(old.parents()) > 1:
2767 if len(old.parents()) > 1:
2768 # ctx.files() isn't reliable for merges, so fall back to the
2768 # ctx.files() isn't reliable for merges, so fall back to the
2769 # slower repo.status() method
2769 # slower repo.status() method
2770 files = set([fn for st in repo.status(base, old)[:3]
2770 files = set([fn for st in repo.status(base, old)[:3]
2771 for fn in st])
2771 for fn in st])
2772 else:
2772 else:
2773 files = set(old.files())
2773 files = set(old.files())
2774
2774
2775 # Second, we use either the commit we just did, or if there were no
2775 # Second, we use either the commit we just did, or if there were no
2776 # changes the parent of the working directory as the version of the
2776 # changes the parent of the working directory as the version of the
2777 # files in the final amend commit
2777 # files in the final amend commit
2778 if node:
2778 if node:
2779 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2779 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2780
2780
2781 user = ctx.user()
2781 user = ctx.user()
2782 date = ctx.date()
2782 date = ctx.date()
2783 # Recompute copies (avoid recording a -> b -> a)
2783 # Recompute copies (avoid recording a -> b -> a)
2784 copied = copies.pathcopies(base, ctx)
2784 copied = copies.pathcopies(base, ctx)
2785 if old.p2:
2785 if old.p2:
2786 copied.update(copies.pathcopies(old.p2(), ctx))
2786 copied.update(copies.pathcopies(old.p2(), ctx))
2787
2787
2788 # Prune files which were reverted by the updates: if old
2788 # Prune files which were reverted by the updates: if old
2789 # introduced file X and our intermediate commit, node,
2789 # introduced file X and our intermediate commit, node,
2790 # renamed that file, then those two files are the same and
2790 # renamed that file, then those two files are the same and
2791 # we can discard X from our list of files. Likewise if X
2791 # we can discard X from our list of files. Likewise if X
2792 # was deleted, it's no longer relevant
2792 # was deleted, it's no longer relevant
2793 files.update(ctx.files())
2793 files.update(ctx.files())
2794 files = [f for f in files if not samefile(f, ctx, base)]
2794 files = [f for f in files if not samefile(f, ctx, base)]
2795
2795
2796 def filectxfn(repo, ctx_, path):
2796 def filectxfn(repo, ctx_, path):
2797 try:
2797 try:
2798 fctx = ctx[path]
2798 fctx = ctx[path]
2799 flags = fctx.flags()
2799 flags = fctx.flags()
2800 mctx = context.memfilectx(repo,
2800 mctx = context.memfilectx(repo,
2801 fctx.path(), fctx.data(),
2801 fctx.path(), fctx.data(),
2802 islink='l' in flags,
2802 islink='l' in flags,
2803 isexec='x' in flags,
2803 isexec='x' in flags,
2804 copied=copied.get(path))
2804 copied=copied.get(path))
2805 return mctx
2805 return mctx
2806 except KeyError:
2806 except KeyError:
2807 return None
2807 return None
2808 else:
2808 else:
2809 ui.note(_('copying changeset %s to %s\n') % (old, base))
2809 ui.note(_('copying changeset %s to %s\n') % (old, base))
2810
2810
2811 # Use version of files as in the old cset
2811 # Use version of files as in the old cset
2812 def filectxfn(repo, ctx_, path):
2812 def filectxfn(repo, ctx_, path):
2813 try:
2813 try:
2814 return old.filectx(path)
2814 return old.filectx(path)
2815 except KeyError:
2815 except KeyError:
2816 return None
2816 return None
2817
2817
2818 user = opts.get('user') or old.user()
2818 user = opts.get('user') or old.user()
2819 date = opts.get('date') or old.date()
2819 date = opts.get('date') or old.date()
2820 editform = mergeeditform(old, 'commit.amend')
2820 editform = mergeeditform(old, 'commit.amend')
2821 editor = getcommiteditor(editform=editform, **opts)
2821 editor = getcommiteditor(editform=editform, **opts)
2822 if not message:
2822 if not message:
2823 editor = getcommiteditor(edit=True, editform=editform)
2823 editor = getcommiteditor(edit=True, editform=editform)
2824 message = old.description()
2824 message = old.description()
2825
2825
2826 pureextra = extra.copy()
2826 pureextra = extra.copy()
2827 extra['amend_source'] = old.hex()
2827 extra['amend_source'] = old.hex()
2828
2828
2829 new = context.memctx(repo,
2829 new = context.memctx(repo,
2830 parents=[base.node(), old.p2().node()],
2830 parents=[base.node(), old.p2().node()],
2831 text=message,
2831 text=message,
2832 files=files,
2832 files=files,
2833 filectxfn=filectxfn,
2833 filectxfn=filectxfn,
2834 user=user,
2834 user=user,
2835 date=date,
2835 date=date,
2836 extra=extra,
2836 extra=extra,
2837 editor=editor)
2837 editor=editor)
2838
2838
2839 newdesc = changelog.stripdesc(new.description())
2839 newdesc = changelog.stripdesc(new.description())
2840 if ((not node)
2840 if ((not node)
2841 and newdesc == old.description()
2841 and newdesc == old.description()
2842 and user == old.user()
2842 and user == old.user()
2843 and date == old.date()
2843 and date == old.date()
2844 and pureextra == old.extra()):
2844 and pureextra == old.extra()):
2845 # nothing changed. continuing here would create a new node
2845 # nothing changed. continuing here would create a new node
2846 # anyway because of the amend_source noise.
2846 # anyway because of the amend_source noise.
2847 #
2847 #
2848 # This not what we expect from amend.
2848 # This not what we expect from amend.
2849 return old.node()
2849 return old.node()
2850
2850
2851 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2851 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2852 try:
2852 try:
2853 if opts.get('secret'):
2853 if opts.get('secret'):
2854 commitphase = 'secret'
2854 commitphase = 'secret'
2855 else:
2855 else:
2856 commitphase = old.phase()
2856 commitphase = old.phase()
2857 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2857 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2858 newid = repo.commitctx(new)
2858 newid = repo.commitctx(new)
2859 finally:
2859 finally:
2860 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2860 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2861 if newid != old.node():
2861 if newid != old.node():
2862 # Reroute the working copy parent to the new changeset
2862 # Reroute the working copy parent to the new changeset
2863 repo.setparents(newid, nullid)
2863 repo.setparents(newid, nullid)
2864
2864
2865 # Move bookmarks from old parent to amend commit
2865 # Move bookmarks from old parent to amend commit
2866 bms = repo.nodebookmarks(old.node())
2866 bms = repo.nodebookmarks(old.node())
2867 if bms:
2867 if bms:
2868 marks = repo._bookmarks
2868 marks = repo._bookmarks
2869 for bm in bms:
2869 for bm in bms:
2870 ui.debug('moving bookmarks %r from %s to %s\n' %
2870 ui.debug('moving bookmarks %r from %s to %s\n' %
2871 (marks, old.hex(), hex(newid)))
2871 (marks, old.hex(), hex(newid)))
2872 marks[bm] = newid
2872 marks[bm] = newid
2873 marks.recordchange(tr)
2873 marks.recordchange(tr)
2874 #commit the whole amend process
2874 #commit the whole amend process
2875 if createmarkers:
2875 if createmarkers:
2876 # mark the new changeset as successor of the rewritten one
2876 # mark the new changeset as successor of the rewritten one
2877 new = repo[newid]
2877 new = repo[newid]
2878 obs = [(old, (new,))]
2878 obs = [(old, (new,))]
2879 if node:
2879 if node:
2880 obs.append((ctx, ()))
2880 obs.append((ctx, ()))
2881
2881
2882 obsolete.createmarkers(repo, obs, operation='amend')
2882 obsolete.createmarkers(repo, obs, operation='amend')
2883 if not createmarkers and newid != old.node():
2883 if not createmarkers and newid != old.node():
2884 # Strip the intermediate commit (if there was one) and the amended
2884 # Strip the intermediate commit (if there was one) and the amended
2885 # commit
2885 # commit
2886 if node:
2886 if node:
2887 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2887 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2888 ui.note(_('stripping amended changeset %s\n') % old)
2888 ui.note(_('stripping amended changeset %s\n') % old)
2889 repair.strip(ui, repo, old.node(), topic='amend-backup')
2889 repair.strip(ui, repo, old.node(), topic='amend-backup')
2890 finally:
2890 finally:
2891 lockmod.release(lock, wlock)
2891 lockmod.release(lock, wlock)
2892 return newid
2892 return newid
2893
2893
2894 def commiteditor(repo, ctx, subs, editform=''):
2894 def commiteditor(repo, ctx, subs, editform=''):
2895 if ctx.description():
2895 if ctx.description():
2896 return ctx.description()
2896 return ctx.description()
2897 return commitforceeditor(repo, ctx, subs, editform=editform,
2897 return commitforceeditor(repo, ctx, subs, editform=editform,
2898 unchangedmessagedetection=True)
2898 unchangedmessagedetection=True)
2899
2899
2900 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2900 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2901 editform='', unchangedmessagedetection=False):
2901 editform='', unchangedmessagedetection=False):
2902 if not extramsg:
2902 if not extramsg:
2903 extramsg = _("Leave message empty to abort commit.")
2903 extramsg = _("Leave message empty to abort commit.")
2904
2904
2905 forms = [e for e in editform.split('.') if e]
2905 forms = [e for e in editform.split('.') if e]
2906 forms.insert(0, 'changeset')
2906 forms.insert(0, 'changeset')
2907 templatetext = None
2907 templatetext = None
2908 while forms:
2908 while forms:
2909 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2909 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2910 if tmpl:
2910 if tmpl:
2911 tmpl = templater.unquotestring(tmpl)
2911 tmpl = templater.unquotestring(tmpl)
2912 templatetext = committext = buildcommittemplate(
2912 templatetext = committext = buildcommittemplate(
2913 repo, ctx, subs, extramsg, tmpl)
2913 repo, ctx, subs, extramsg, tmpl)
2914 break
2914 break
2915 forms.pop()
2915 forms.pop()
2916 else:
2916 else:
2917 committext = buildcommittext(repo, ctx, subs, extramsg)
2917 committext = buildcommittext(repo, ctx, subs, extramsg)
2918
2918
2919 # run editor in the repository root
2919 # run editor in the repository root
2920 olddir = pycompat.getcwd()
2920 olddir = pycompat.getcwd()
2921 os.chdir(repo.root)
2921 os.chdir(repo.root)
2922
2922
2923 # make in-memory changes visible to external process
2923 # make in-memory changes visible to external process
2924 tr = repo.currenttransaction()
2924 tr = repo.currenttransaction()
2925 repo.dirstate.write(tr)
2925 repo.dirstate.write(tr)
2926 pending = tr and tr.writepending() and repo.root
2926 pending = tr and tr.writepending() and repo.root
2927
2927
2928 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2928 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2929 editform=editform, pending=pending,
2929 editform=editform, pending=pending,
2930 repopath=repo.path)
2930 repopath=repo.path)
2931 text = editortext
2931 text = editortext
2932
2932
2933 # strip away anything below this special string (used for editors that want
2933 # strip away anything below this special string (used for editors that want
2934 # to display the diff)
2934 # to display the diff)
2935 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2935 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2936 if stripbelow:
2936 if stripbelow:
2937 text = text[:stripbelow.start()]
2937 text = text[:stripbelow.start()]
2938
2938
2939 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2939 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2940 os.chdir(olddir)
2940 os.chdir(olddir)
2941
2941
2942 if finishdesc:
2942 if finishdesc:
2943 text = finishdesc(text)
2943 text = finishdesc(text)
2944 if not text.strip():
2944 if not text.strip():
2945 raise error.Abort(_("empty commit message"))
2945 raise error.Abort(_("empty commit message"))
2946 if unchangedmessagedetection and editortext == templatetext:
2946 if unchangedmessagedetection and editortext == templatetext:
2947 raise error.Abort(_("commit message unchanged"))
2947 raise error.Abort(_("commit message unchanged"))
2948
2948
2949 return text
2949 return text
2950
2950
2951 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2951 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2952 ui = repo.ui
2952 ui = repo.ui
2953 tmpl, mapfile = gettemplate(ui, tmpl, None)
2953 tmpl, mapfile = gettemplate(ui, tmpl, None)
2954
2954
2955 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2955 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2956
2956
2957 for k, v in repo.ui.configitems('committemplate'):
2957 for k, v in repo.ui.configitems('committemplate'):
2958 if k != 'changeset':
2958 if k != 'changeset':
2959 t.t.cache[k] = v
2959 t.t.cache[k] = v
2960
2960
2961 if not extramsg:
2961 if not extramsg:
2962 extramsg = '' # ensure that extramsg is string
2962 extramsg = '' # ensure that extramsg is string
2963
2963
2964 ui.pushbuffer()
2964 ui.pushbuffer()
2965 t.show(ctx, extramsg=extramsg)
2965 t.show(ctx, extramsg=extramsg)
2966 return ui.popbuffer()
2966 return ui.popbuffer()
2967
2967
2968 def hgprefix(msg):
2968 def hgprefix(msg):
2969 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2969 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2970
2970
2971 def buildcommittext(repo, ctx, subs, extramsg):
2971 def buildcommittext(repo, ctx, subs, extramsg):
2972 edittext = []
2972 edittext = []
2973 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2973 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2974 if ctx.description():
2974 if ctx.description():
2975 edittext.append(ctx.description())
2975 edittext.append(ctx.description())
2976 edittext.append("")
2976 edittext.append("")
2977 edittext.append("") # Empty line between message and comments.
2977 edittext.append("") # Empty line between message and comments.
2978 edittext.append(hgprefix(_("Enter commit message."
2978 edittext.append(hgprefix(_("Enter commit message."
2979 " Lines beginning with 'HG:' are removed.")))
2979 " Lines beginning with 'HG:' are removed.")))
2980 edittext.append(hgprefix(extramsg))
2980 edittext.append(hgprefix(extramsg))
2981 edittext.append("HG: --")
2981 edittext.append("HG: --")
2982 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2982 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2983 if ctx.p2():
2983 if ctx.p2():
2984 edittext.append(hgprefix(_("branch merge")))
2984 edittext.append(hgprefix(_("branch merge")))
2985 if ctx.branch():
2985 if ctx.branch():
2986 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2986 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2987 if bookmarks.isactivewdirparent(repo):
2987 if bookmarks.isactivewdirparent(repo):
2988 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2988 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2989 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2989 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2990 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2990 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2991 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2991 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2992 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2992 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2993 if not added and not modified and not removed:
2993 if not added and not modified and not removed:
2994 edittext.append(hgprefix(_("no files changed")))
2994 edittext.append(hgprefix(_("no files changed")))
2995 edittext.append("")
2995 edittext.append("")
2996
2996
2997 return "\n".join(edittext)
2997 return "\n".join(edittext)
2998
2998
2999 def commitstatus(repo, node, branch, bheads=None, opts=None):
2999 def commitstatus(repo, node, branch, bheads=None, opts=None):
3000 if opts is None:
3000 if opts is None:
3001 opts = {}
3001 opts = {}
3002 ctx = repo[node]
3002 ctx = repo[node]
3003 parents = ctx.parents()
3003 parents = ctx.parents()
3004
3004
3005 if (not opts.get('amend') and bheads and node not in bheads and not
3005 if (not opts.get('amend') and bheads and node not in bheads and not
3006 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3006 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3007 repo.ui.status(_('created new head\n'))
3007 repo.ui.status(_('created new head\n'))
3008 # The message is not printed for initial roots. For the other
3008 # The message is not printed for initial roots. For the other
3009 # changesets, it is printed in the following situations:
3009 # changesets, it is printed in the following situations:
3010 #
3010 #
3011 # Par column: for the 2 parents with ...
3011 # Par column: for the 2 parents with ...
3012 # N: null or no parent
3012 # N: null or no parent
3013 # B: parent is on another named branch
3013 # B: parent is on another named branch
3014 # C: parent is a regular non head changeset
3014 # C: parent is a regular non head changeset
3015 # H: parent was a branch head of the current branch
3015 # H: parent was a branch head of the current branch
3016 # Msg column: whether we print "created new head" message
3016 # Msg column: whether we print "created new head" message
3017 # In the following, it is assumed that there already exists some
3017 # In the following, it is assumed that there already exists some
3018 # initial branch heads of the current branch, otherwise nothing is
3018 # initial branch heads of the current branch, otherwise nothing is
3019 # printed anyway.
3019 # printed anyway.
3020 #
3020 #
3021 # Par Msg Comment
3021 # Par Msg Comment
3022 # N N y additional topo root
3022 # N N y additional topo root
3023 #
3023 #
3024 # B N y additional branch root
3024 # B N y additional branch root
3025 # C N y additional topo head
3025 # C N y additional topo head
3026 # H N n usual case
3026 # H N n usual case
3027 #
3027 #
3028 # B B y weird additional branch root
3028 # B B y weird additional branch root
3029 # C B y branch merge
3029 # C B y branch merge
3030 # H B n merge with named branch
3030 # H B n merge with named branch
3031 #
3031 #
3032 # C C y additional head from merge
3032 # C C y additional head from merge
3033 # C H n merge with a head
3033 # C H n merge with a head
3034 #
3034 #
3035 # H H n head merge: head count decreases
3035 # H H n head merge: head count decreases
3036
3036
3037 if not opts.get('close_branch'):
3037 if not opts.get('close_branch'):
3038 for r in parents:
3038 for r in parents:
3039 if r.closesbranch() and r.branch() == branch:
3039 if r.closesbranch() and r.branch() == branch:
3040 repo.ui.status(_('reopening closed branch head %d\n') % r)
3040 repo.ui.status(_('reopening closed branch head %d\n') % r)
3041
3041
3042 if repo.ui.debugflag:
3042 if repo.ui.debugflag:
3043 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3043 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3044 elif repo.ui.verbose:
3044 elif repo.ui.verbose:
3045 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3045 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3046
3046
3047 def postcommitstatus(repo, pats, opts):
3047 def postcommitstatus(repo, pats, opts):
3048 return repo.status(match=scmutil.match(repo[None], pats, opts))
3048 return repo.status(match=scmutil.match(repo[None], pats, opts))
3049
3049
3050 def revert(ui, repo, ctx, parents, *pats, **opts):
3050 def revert(ui, repo, ctx, parents, *pats, **opts):
3051 parent, p2 = parents
3051 parent, p2 = parents
3052 node = ctx.node()
3052 node = ctx.node()
3053
3053
3054 mf = ctx.manifest()
3054 mf = ctx.manifest()
3055 if node == p2:
3055 if node == p2:
3056 parent = p2
3056 parent = p2
3057
3057
3058 # need all matching names in dirstate and manifest of target rev,
3058 # need all matching names in dirstate and manifest of target rev,
3059 # so have to walk both. do not print errors if files exist in one
3059 # so have to walk both. do not print errors if files exist in one
3060 # but not other. in both cases, filesets should be evaluated against
3060 # but not other. in both cases, filesets should be evaluated against
3061 # workingctx to get consistent result (issue4497). this means 'set:**'
3061 # workingctx to get consistent result (issue4497). this means 'set:**'
3062 # cannot be used to select missing files from target rev.
3062 # cannot be used to select missing files from target rev.
3063
3063
3064 # `names` is a mapping for all elements in working copy and target revision
3064 # `names` is a mapping for all elements in working copy and target revision
3065 # The mapping is in the form:
3065 # The mapping is in the form:
3066 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3066 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3067 names = {}
3067 names = {}
3068
3068
3069 with repo.wlock():
3069 with repo.wlock():
3070 ## filling of the `names` mapping
3070 ## filling of the `names` mapping
3071 # walk dirstate to fill `names`
3071 # walk dirstate to fill `names`
3072
3072
3073 interactive = opts.get('interactive', False)
3073 interactive = opts.get('interactive', False)
3074 wctx = repo[None]
3074 wctx = repo[None]
3075 m = scmutil.match(wctx, pats, opts)
3075 m = scmutil.match(wctx, pats, opts)
3076
3076
3077 # we'll need this later
3077 # we'll need this later
3078 targetsubs = sorted(s for s in wctx.substate if m(s))
3078 targetsubs = sorted(s for s in wctx.substate if m(s))
3079
3079
3080 if not m.always():
3080 if not m.always():
3081 matcher = matchmod.badmatch(m, lambda x, y: False)
3081 matcher = matchmod.badmatch(m, lambda x, y: False)
3082 for abs in wctx.walk(matcher):
3082 for abs in wctx.walk(matcher):
3083 names[abs] = m.rel(abs), m.exact(abs)
3083 names[abs] = m.rel(abs), m.exact(abs)
3084
3084
3085 # walk target manifest to fill `names`
3085 # walk target manifest to fill `names`
3086
3086
3087 def badfn(path, msg):
3087 def badfn(path, msg):
3088 if path in names:
3088 if path in names:
3089 return
3089 return
3090 if path in ctx.substate:
3090 if path in ctx.substate:
3091 return
3091 return
3092 path_ = path + '/'
3092 path_ = path + '/'
3093 for f in names:
3093 for f in names:
3094 if f.startswith(path_):
3094 if f.startswith(path_):
3095 return
3095 return
3096 ui.warn("%s: %s\n" % (m.rel(path), msg))
3096 ui.warn("%s: %s\n" % (m.rel(path), msg))
3097
3097
3098 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3098 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3099 if abs not in names:
3099 if abs not in names:
3100 names[abs] = m.rel(abs), m.exact(abs)
3100 names[abs] = m.rel(abs), m.exact(abs)
3101
3101
3102 # Find status of all file in `names`.
3102 # Find status of all file in `names`.
3103 m = scmutil.matchfiles(repo, names)
3103 m = scmutil.matchfiles(repo, names)
3104
3104
3105 changes = repo.status(node1=node, match=m,
3105 changes = repo.status(node1=node, match=m,
3106 unknown=True, ignored=True, clean=True)
3106 unknown=True, ignored=True, clean=True)
3107 else:
3107 else:
3108 changes = repo.status(node1=node, match=m)
3108 changes = repo.status(node1=node, match=m)
3109 for kind in changes:
3109 for kind in changes:
3110 for abs in kind:
3110 for abs in kind:
3111 names[abs] = m.rel(abs), m.exact(abs)
3111 names[abs] = m.rel(abs), m.exact(abs)
3112
3112
3113 m = scmutil.matchfiles(repo, names)
3113 m = scmutil.matchfiles(repo, names)
3114
3114
3115 modified = set(changes.modified)
3115 modified = set(changes.modified)
3116 added = set(changes.added)
3116 added = set(changes.added)
3117 removed = set(changes.removed)
3117 removed = set(changes.removed)
3118 _deleted = set(changes.deleted)
3118 _deleted = set(changes.deleted)
3119 unknown = set(changes.unknown)
3119 unknown = set(changes.unknown)
3120 unknown.update(changes.ignored)
3120 unknown.update(changes.ignored)
3121 clean = set(changes.clean)
3121 clean = set(changes.clean)
3122 modadded = set()
3122 modadded = set()
3123
3123
3124 # We need to account for the state of the file in the dirstate,
3124 # We need to account for the state of the file in the dirstate,
3125 # even when we revert against something else than parent. This will
3125 # even when we revert against something else than parent. This will
3126 # slightly alter the behavior of revert (doing back up or not, delete
3126 # slightly alter the behavior of revert (doing back up or not, delete
3127 # or just forget etc).
3127 # or just forget etc).
3128 if parent == node:
3128 if parent == node:
3129 dsmodified = modified
3129 dsmodified = modified
3130 dsadded = added
3130 dsadded = added
3131 dsremoved = removed
3131 dsremoved = removed
3132 # store all local modifications, useful later for rename detection
3132 # store all local modifications, useful later for rename detection
3133 localchanges = dsmodified | dsadded
3133 localchanges = dsmodified | dsadded
3134 modified, added, removed = set(), set(), set()
3134 modified, added, removed = set(), set(), set()
3135 else:
3135 else:
3136 changes = repo.status(node1=parent, match=m)
3136 changes = repo.status(node1=parent, match=m)
3137 dsmodified = set(changes.modified)
3137 dsmodified = set(changes.modified)
3138 dsadded = set(changes.added)
3138 dsadded = set(changes.added)
3139 dsremoved = set(changes.removed)
3139 dsremoved = set(changes.removed)
3140 # store all local modifications, useful later for rename detection
3140 # store all local modifications, useful later for rename detection
3141 localchanges = dsmodified | dsadded
3141 localchanges = dsmodified | dsadded
3142
3142
3143 # only take into account for removes between wc and target
3143 # only take into account for removes between wc and target
3144 clean |= dsremoved - removed
3144 clean |= dsremoved - removed
3145 dsremoved &= removed
3145 dsremoved &= removed
3146 # distinct between dirstate remove and other
3146 # distinct between dirstate remove and other
3147 removed -= dsremoved
3147 removed -= dsremoved
3148
3148
3149 modadded = added & dsmodified
3149 modadded = added & dsmodified
3150 added -= modadded
3150 added -= modadded
3151
3151
3152 # tell newly modified apart.
3152 # tell newly modified apart.
3153 dsmodified &= modified
3153 dsmodified &= modified
3154 dsmodified |= modified & dsadded # dirstate added may need backup
3154 dsmodified |= modified & dsadded # dirstate added may need backup
3155 modified -= dsmodified
3155 modified -= dsmodified
3156
3156
3157 # We need to wait for some post-processing to update this set
3157 # We need to wait for some post-processing to update this set
3158 # before making the distinction. The dirstate will be used for
3158 # before making the distinction. The dirstate will be used for
3159 # that purpose.
3159 # that purpose.
3160 dsadded = added
3160 dsadded = added
3161
3161
3162 # in case of merge, files that are actually added can be reported as
3162 # in case of merge, files that are actually added can be reported as
3163 # modified, we need to post process the result
3163 # modified, we need to post process the result
3164 if p2 != nullid:
3164 if p2 != nullid:
3165 mergeadd = set(dsmodified)
3165 mergeadd = set(dsmodified)
3166 for path in dsmodified:
3166 for path in dsmodified:
3167 if path in mf:
3167 if path in mf:
3168 mergeadd.remove(path)
3168 mergeadd.remove(path)
3169 dsadded |= mergeadd
3169 dsadded |= mergeadd
3170 dsmodified -= mergeadd
3170 dsmodified -= mergeadd
3171
3171
3172 # if f is a rename, update `names` to also revert the source
3172 # if f is a rename, update `names` to also revert the source
3173 cwd = repo.getcwd()
3173 cwd = repo.getcwd()
3174 for f in localchanges:
3174 for f in localchanges:
3175 src = repo.dirstate.copied(f)
3175 src = repo.dirstate.copied(f)
3176 # XXX should we check for rename down to target node?
3176 # XXX should we check for rename down to target node?
3177 if src and src not in names and repo.dirstate[src] == 'r':
3177 if src and src not in names and repo.dirstate[src] == 'r':
3178 dsremoved.add(src)
3178 dsremoved.add(src)
3179 names[src] = (repo.pathto(src, cwd), True)
3179 names[src] = (repo.pathto(src, cwd), True)
3180
3180
3181 # determine the exact nature of the deleted changesets
3181 # determine the exact nature of the deleted changesets
3182 deladded = set(_deleted)
3182 deladded = set(_deleted)
3183 for path in _deleted:
3183 for path in _deleted:
3184 if path in mf:
3184 if path in mf:
3185 deladded.remove(path)
3185 deladded.remove(path)
3186 deleted = _deleted - deladded
3186 deleted = _deleted - deladded
3187
3187
3188 # distinguish between file to forget and the other
3188 # distinguish between file to forget and the other
3189 added = set()
3189 added = set()
3190 for abs in dsadded:
3190 for abs in dsadded:
3191 if repo.dirstate[abs] != 'a':
3191 if repo.dirstate[abs] != 'a':
3192 added.add(abs)
3192 added.add(abs)
3193 dsadded -= added
3193 dsadded -= added
3194
3194
3195 for abs in deladded:
3195 for abs in deladded:
3196 if repo.dirstate[abs] == 'a':
3196 if repo.dirstate[abs] == 'a':
3197 dsadded.add(abs)
3197 dsadded.add(abs)
3198 deladded -= dsadded
3198 deladded -= dsadded
3199
3199
3200 # For files marked as removed, we check if an unknown file is present at
3200 # For files marked as removed, we check if an unknown file is present at
3201 # the same path. If a such file exists it may need to be backed up.
3201 # the same path. If a such file exists it may need to be backed up.
3202 # Making the distinction at this stage helps have simpler backup
3202 # Making the distinction at this stage helps have simpler backup
3203 # logic.
3203 # logic.
3204 removunk = set()
3204 removunk = set()
3205 for abs in removed:
3205 for abs in removed:
3206 target = repo.wjoin(abs)
3206 target = repo.wjoin(abs)
3207 if os.path.lexists(target):
3207 if os.path.lexists(target):
3208 removunk.add(abs)
3208 removunk.add(abs)
3209 removed -= removunk
3209 removed -= removunk
3210
3210
3211 dsremovunk = set()
3211 dsremovunk = set()
3212 for abs in dsremoved:
3212 for abs in dsremoved:
3213 target = repo.wjoin(abs)
3213 target = repo.wjoin(abs)
3214 if os.path.lexists(target):
3214 if os.path.lexists(target):
3215 dsremovunk.add(abs)
3215 dsremovunk.add(abs)
3216 dsremoved -= dsremovunk
3216 dsremoved -= dsremovunk
3217
3217
3218 # action to be actually performed by revert
3218 # action to be actually performed by revert
3219 # (<list of file>, message>) tuple
3219 # (<list of file>, message>) tuple
3220 actions = {'revert': ([], _('reverting %s\n')),
3220 actions = {'revert': ([], _('reverting %s\n')),
3221 'add': ([], _('adding %s\n')),
3221 'add': ([], _('adding %s\n')),
3222 'remove': ([], _('removing %s\n')),
3222 'remove': ([], _('removing %s\n')),
3223 'drop': ([], _('removing %s\n')),
3223 'drop': ([], _('removing %s\n')),
3224 'forget': ([], _('forgetting %s\n')),
3224 'forget': ([], _('forgetting %s\n')),
3225 'undelete': ([], _('undeleting %s\n')),
3225 'undelete': ([], _('undeleting %s\n')),
3226 'noop': (None, _('no changes needed to %s\n')),
3226 'noop': (None, _('no changes needed to %s\n')),
3227 'unknown': (None, _('file not managed: %s\n')),
3227 'unknown': (None, _('file not managed: %s\n')),
3228 }
3228 }
3229
3229
3230 # "constant" that convey the backup strategy.
3230 # "constant" that convey the backup strategy.
3231 # All set to `discard` if `no-backup` is set do avoid checking
3231 # All set to `discard` if `no-backup` is set do avoid checking
3232 # no_backup lower in the code.
3232 # no_backup lower in the code.
3233 # These values are ordered for comparison purposes
3233 # These values are ordered for comparison purposes
3234 backupinteractive = 3 # do backup if interactively modified
3234 backupinteractive = 3 # do backup if interactively modified
3235 backup = 2 # unconditionally do backup
3235 backup = 2 # unconditionally do backup
3236 check = 1 # check if the existing file differs from target
3236 check = 1 # check if the existing file differs from target
3237 discard = 0 # never do backup
3237 discard = 0 # never do backup
3238 if opts.get('no_backup'):
3238 if opts.get('no_backup'):
3239 backupinteractive = backup = check = discard
3239 backupinteractive = backup = check = discard
3240 if interactive:
3240 if interactive:
3241 dsmodifiedbackup = backupinteractive
3241 dsmodifiedbackup = backupinteractive
3242 else:
3242 else:
3243 dsmodifiedbackup = backup
3243 dsmodifiedbackup = backup
3244 tobackup = set()
3244 tobackup = set()
3245
3245
3246 backupanddel = actions['remove']
3246 backupanddel = actions['remove']
3247 if not opts.get('no_backup'):
3247 if not opts.get('no_backup'):
3248 backupanddel = actions['drop']
3248 backupanddel = actions['drop']
3249
3249
3250 disptable = (
3250 disptable = (
3251 # dispatch table:
3251 # dispatch table:
3252 # file state
3252 # file state
3253 # action
3253 # action
3254 # make backup
3254 # make backup
3255
3255
3256 ## Sets that results that will change file on disk
3256 ## Sets that results that will change file on disk
3257 # Modified compared to target, no local change
3257 # Modified compared to target, no local change
3258 (modified, actions['revert'], discard),
3258 (modified, actions['revert'], discard),
3259 # Modified compared to target, but local file is deleted
3259 # Modified compared to target, but local file is deleted
3260 (deleted, actions['revert'], discard),
3260 (deleted, actions['revert'], discard),
3261 # Modified compared to target, local change
3261 # Modified compared to target, local change
3262 (dsmodified, actions['revert'], dsmodifiedbackup),
3262 (dsmodified, actions['revert'], dsmodifiedbackup),
3263 # Added since target
3263 # Added since target
3264 (added, actions['remove'], discard),
3264 (added, actions['remove'], discard),
3265 # Added in working directory
3265 # Added in working directory
3266 (dsadded, actions['forget'], discard),
3266 (dsadded, actions['forget'], discard),
3267 # Added since target, have local modification
3267 # Added since target, have local modification
3268 (modadded, backupanddel, backup),
3268 (modadded, backupanddel, backup),
3269 # Added since target but file is missing in working directory
3269 # Added since target but file is missing in working directory
3270 (deladded, actions['drop'], discard),
3270 (deladded, actions['drop'], discard),
3271 # Removed since target, before working copy parent
3271 # Removed since target, before working copy parent
3272 (removed, actions['add'], discard),
3272 (removed, actions['add'], discard),
3273 # Same as `removed` but an unknown file exists at the same path
3273 # Same as `removed` but an unknown file exists at the same path
3274 (removunk, actions['add'], check),
3274 (removunk, actions['add'], check),
3275 # Removed since targe, marked as such in working copy parent
3275 # Removed since targe, marked as such in working copy parent
3276 (dsremoved, actions['undelete'], discard),
3276 (dsremoved, actions['undelete'], discard),
3277 # Same as `dsremoved` but an unknown file exists at the same path
3277 # Same as `dsremoved` but an unknown file exists at the same path
3278 (dsremovunk, actions['undelete'], check),
3278 (dsremovunk, actions['undelete'], check),
3279 ## the following sets does not result in any file changes
3279 ## the following sets does not result in any file changes
3280 # File with no modification
3280 # File with no modification
3281 (clean, actions['noop'], discard),
3281 (clean, actions['noop'], discard),
3282 # Existing file, not tracked anywhere
3282 # Existing file, not tracked anywhere
3283 (unknown, actions['unknown'], discard),
3283 (unknown, actions['unknown'], discard),
3284 )
3284 )
3285
3285
3286 for abs, (rel, exact) in sorted(names.items()):
3286 for abs, (rel, exact) in sorted(names.items()):
3287 # target file to be touch on disk (relative to cwd)
3287 # target file to be touch on disk (relative to cwd)
3288 target = repo.wjoin(abs)
3288 target = repo.wjoin(abs)
3289 # search the entry in the dispatch table.
3289 # search the entry in the dispatch table.
3290 # if the file is in any of these sets, it was touched in the working
3290 # if the file is in any of these sets, it was touched in the working
3291 # directory parent and we are sure it needs to be reverted.
3291 # directory parent and we are sure it needs to be reverted.
3292 for table, (xlist, msg), dobackup in disptable:
3292 for table, (xlist, msg), dobackup in disptable:
3293 if abs not in table:
3293 if abs not in table:
3294 continue
3294 continue
3295 if xlist is not None:
3295 if xlist is not None:
3296 xlist.append(abs)
3296 xlist.append(abs)
3297 if dobackup:
3297 if dobackup:
3298 # If in interactive mode, don't automatically create
3298 # If in interactive mode, don't automatically create
3299 # .orig files (issue4793)
3299 # .orig files (issue4793)
3300 if dobackup == backupinteractive:
3300 if dobackup == backupinteractive:
3301 tobackup.add(abs)
3301 tobackup.add(abs)
3302 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3302 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3303 bakname = scmutil.origpath(ui, repo, rel)
3303 bakname = scmutil.origpath(ui, repo, rel)
3304 ui.note(_('saving current version of %s as %s\n') %
3304 ui.note(_('saving current version of %s as %s\n') %
3305 (rel, bakname))
3305 (rel, bakname))
3306 if not opts.get('dry_run'):
3306 if not opts.get('dry_run'):
3307 if interactive:
3307 if interactive:
3308 util.copyfile(target, bakname)
3308 util.copyfile(target, bakname)
3309 else:
3309 else:
3310 util.rename(target, bakname)
3310 util.rename(target, bakname)
3311 if ui.verbose or not exact:
3311 if ui.verbose or not exact:
3312 if not isinstance(msg, basestring):
3312 if not isinstance(msg, basestring):
3313 msg = msg(abs)
3313 msg = msg(abs)
3314 ui.status(msg % rel)
3314 ui.status(msg % rel)
3315 elif exact:
3315 elif exact:
3316 ui.warn(msg % rel)
3316 ui.warn(msg % rel)
3317 break
3317 break
3318
3318
3319 if not opts.get('dry_run'):
3319 if not opts.get('dry_run'):
3320 needdata = ('revert', 'add', 'undelete')
3320 needdata = ('revert', 'add', 'undelete')
3321 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3321 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3322 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3322 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3323
3323
3324 if targetsubs:
3324 if targetsubs:
3325 # Revert the subrepos on the revert list
3325 # Revert the subrepos on the revert list
3326 for sub in targetsubs:
3326 for sub in targetsubs:
3327 try:
3327 try:
3328 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3328 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3329 except KeyError:
3329 except KeyError:
3330 raise error.Abort("subrepository '%s' does not exist in %s!"
3330 raise error.Abort("subrepository '%s' does not exist in %s!"
3331 % (sub, short(ctx.node())))
3331 % (sub, short(ctx.node())))
3332
3332
3333 def _revertprefetch(repo, ctx, *files):
3333 def _revertprefetch(repo, ctx, *files):
3334 """Let extension changing the storage layer prefetch content"""
3334 """Let extension changing the storage layer prefetch content"""
3335 pass
3335 pass
3336
3336
3337 def _performrevert(repo, parents, ctx, actions, interactive=False,
3337 def _performrevert(repo, parents, ctx, actions, interactive=False,
3338 tobackup=None):
3338 tobackup=None):
3339 """function that actually perform all the actions computed for revert
3339 """function that actually perform all the actions computed for revert
3340
3340
3341 This is an independent function to let extension to plug in and react to
3341 This is an independent function to let extension to plug in and react to
3342 the imminent revert.
3342 the imminent revert.
3343
3343
3344 Make sure you have the working directory locked when calling this function.
3344 Make sure you have the working directory locked when calling this function.
3345 """
3345 """
3346 parent, p2 = parents
3346 parent, p2 = parents
3347 node = ctx.node()
3347 node = ctx.node()
3348 excluded_files = []
3348 excluded_files = []
3349 matcher_opts = {"exclude": excluded_files}
3349 matcher_opts = {"exclude": excluded_files}
3350
3350
3351 def checkout(f):
3351 def checkout(f):
3352 fc = ctx[f]
3352 fc = ctx[f]
3353 repo.wwrite(f, fc.data(), fc.flags())
3353 repo.wwrite(f, fc.data(), fc.flags())
3354
3354
3355 def doremove(f):
3355 def doremove(f):
3356 try:
3356 try:
3357 repo.wvfs.unlinkpath(f)
3357 repo.wvfs.unlinkpath(f)
3358 except OSError:
3358 except OSError:
3359 pass
3359 pass
3360 repo.dirstate.remove(f)
3360 repo.dirstate.remove(f)
3361
3361
3362 audit_path = pathutil.pathauditor(repo.root)
3362 audit_path = pathutil.pathauditor(repo.root)
3363 for f in actions['forget'][0]:
3363 for f in actions['forget'][0]:
3364 if interactive:
3364 if interactive:
3365 choice = repo.ui.promptchoice(
3365 choice = repo.ui.promptchoice(
3366 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3366 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3367 if choice == 0:
3367 if choice == 0:
3368 repo.dirstate.drop(f)
3368 repo.dirstate.drop(f)
3369 else:
3369 else:
3370 excluded_files.append(repo.wjoin(f))
3370 excluded_files.append(repo.wjoin(f))
3371 else:
3371 else:
3372 repo.dirstate.drop(f)
3372 repo.dirstate.drop(f)
3373 for f in actions['remove'][0]:
3373 for f in actions['remove'][0]:
3374 audit_path(f)
3374 audit_path(f)
3375 if interactive:
3375 if interactive:
3376 choice = repo.ui.promptchoice(
3376 choice = repo.ui.promptchoice(
3377 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3377 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3378 if choice == 0:
3378 if choice == 0:
3379 doremove(f)
3379 doremove(f)
3380 else:
3380 else:
3381 excluded_files.append(repo.wjoin(f))
3381 excluded_files.append(repo.wjoin(f))
3382 else:
3382 else:
3383 doremove(f)
3383 doremove(f)
3384 for f in actions['drop'][0]:
3384 for f in actions['drop'][0]:
3385 audit_path(f)
3385 audit_path(f)
3386 repo.dirstate.remove(f)
3386 repo.dirstate.remove(f)
3387
3387
3388 normal = None
3388 normal = None
3389 if node == parent:
3389 if node == parent:
3390 # We're reverting to our parent. If possible, we'd like status
3390 # We're reverting to our parent. If possible, we'd like status
3391 # to report the file as clean. We have to use normallookup for
3391 # to report the file as clean. We have to use normallookup for
3392 # merges to avoid losing information about merged/dirty files.
3392 # merges to avoid losing information about merged/dirty files.
3393 if p2 != nullid:
3393 if p2 != nullid:
3394 normal = repo.dirstate.normallookup
3394 normal = repo.dirstate.normallookup
3395 else:
3395 else:
3396 normal = repo.dirstate.normal
3396 normal = repo.dirstate.normal
3397
3397
3398 newlyaddedandmodifiedfiles = set()
3398 newlyaddedandmodifiedfiles = set()
3399 if interactive:
3399 if interactive:
3400 # Prompt the user for changes to revert
3400 # Prompt the user for changes to revert
3401 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3401 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3402 m = scmutil.match(ctx, torevert, matcher_opts)
3402 m = scmutil.match(ctx, torevert, matcher_opts)
3403 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3403 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3404 diffopts.nodates = True
3404 diffopts.nodates = True
3405 diffopts.git = True
3405 diffopts.git = True
3406 operation = 'discard'
3406 operation = 'discard'
3407 reversehunks = True
3407 reversehunks = True
3408 if node != parent:
3408 if node != parent:
3409 operation = 'revert'
3409 operation = 'revert'
3410 reversehunks = repo.ui.configbool('experimental',
3410 reversehunks = repo.ui.configbool('experimental',
3411 'revertalternateinteractivemode',
3411 'revertalternateinteractivemode',
3412 True)
3412 True)
3413 if reversehunks:
3413 if reversehunks:
3414 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3414 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3415 else:
3415 else:
3416 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3416 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3417 originalchunks = patch.parsepatch(diff)
3417 originalchunks = patch.parsepatch(diff)
3418
3418
3419 try:
3419 try:
3420
3420
3421 chunks, opts = recordfilter(repo.ui, originalchunks,
3421 chunks, opts = recordfilter(repo.ui, originalchunks,
3422 operation=operation)
3422 operation=operation)
3423 if reversehunks:
3423 if reversehunks:
3424 chunks = patch.reversehunks(chunks)
3424 chunks = patch.reversehunks(chunks)
3425
3425
3426 except patch.PatchError as err:
3426 except patch.PatchError as err:
3427 raise error.Abort(_('error parsing patch: %s') % err)
3427 raise error.Abort(_('error parsing patch: %s') % err)
3428
3428
3429 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3429 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3430 if tobackup is None:
3430 if tobackup is None:
3431 tobackup = set()
3431 tobackup = set()
3432 # Apply changes
3432 # Apply changes
3433 fp = stringio()
3433 fp = stringio()
3434 for c in chunks:
3434 for c in chunks:
3435 # Create a backup file only if this hunk should be backed up
3435 # Create a backup file only if this hunk should be backed up
3436 if ishunk(c) and c.header.filename() in tobackup:
3436 if ishunk(c) and c.header.filename() in tobackup:
3437 abs = c.header.filename()
3437 abs = c.header.filename()
3438 target = repo.wjoin(abs)
3438 target = repo.wjoin(abs)
3439 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3439 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3440 util.copyfile(target, bakname)
3440 util.copyfile(target, bakname)
3441 tobackup.remove(abs)
3441 tobackup.remove(abs)
3442 c.write(fp)
3442 c.write(fp)
3443 dopatch = fp.tell()
3443 dopatch = fp.tell()
3444 fp.seek(0)
3444 fp.seek(0)
3445 if dopatch:
3445 if dopatch:
3446 try:
3446 try:
3447 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3447 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3448 except patch.PatchError as err:
3448 except patch.PatchError as err:
3449 raise error.Abort(str(err))
3449 raise error.Abort(str(err))
3450 del fp
3450 del fp
3451 else:
3451 else:
3452 for f in actions['revert'][0]:
3452 for f in actions['revert'][0]:
3453 checkout(f)
3453 checkout(f)
3454 if normal:
3454 if normal:
3455 normal(f)
3455 normal(f)
3456
3456
3457 for f in actions['add'][0]:
3457 for f in actions['add'][0]:
3458 # Don't checkout modified files, they are already created by the diff
3458 # Don't checkout modified files, they are already created by the diff
3459 if f not in newlyaddedandmodifiedfiles:
3459 if f not in newlyaddedandmodifiedfiles:
3460 checkout(f)
3460 checkout(f)
3461 repo.dirstate.add(f)
3461 repo.dirstate.add(f)
3462
3462
3463 normal = repo.dirstate.normallookup
3463 normal = repo.dirstate.normallookup
3464 if node == parent and p2 == nullid:
3464 if node == parent and p2 == nullid:
3465 normal = repo.dirstate.normal
3465 normal = repo.dirstate.normal
3466 for f in actions['undelete'][0]:
3466 for f in actions['undelete'][0]:
3467 checkout(f)
3467 checkout(f)
3468 normal(f)
3468 normal(f)
3469
3469
3470 copied = copies.pathcopies(repo[parent], ctx)
3470 copied = copies.pathcopies(repo[parent], ctx)
3471
3471
3472 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3472 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3473 if f in copied:
3473 if f in copied:
3474 repo.dirstate.copy(copied[f], f)
3474 repo.dirstate.copy(copied[f], f)
3475
3475
3476 class command(registrar.command):
3476 class command(registrar.command):
3477 def _doregister(self, func, name, *args, **kwargs):
3477 def _doregister(self, func, name, *args, **kwargs):
3478 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3478 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3479 return super(command, self)._doregister(func, name, *args, **kwargs)
3479 return super(command, self)._doregister(func, name, *args, **kwargs)
3480
3480
3481 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3481 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3482 # commands.outgoing. "missing" is "missing" of the result of
3482 # commands.outgoing. "missing" is "missing" of the result of
3483 # "findcommonoutgoing()"
3483 # "findcommonoutgoing()"
3484 outgoinghooks = util.hooks()
3484 outgoinghooks = util.hooks()
3485
3485
3486 # a list of (ui, repo) functions called by commands.summary
3486 # a list of (ui, repo) functions called by commands.summary
3487 summaryhooks = util.hooks()
3487 summaryhooks = util.hooks()
3488
3488
3489 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3489 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3490 #
3490 #
3491 # functions should return tuple of booleans below, if 'changes' is None:
3491 # functions should return tuple of booleans below, if 'changes' is None:
3492 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3492 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3493 #
3493 #
3494 # otherwise, 'changes' is a tuple of tuples below:
3494 # otherwise, 'changes' is a tuple of tuples below:
3495 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3495 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3496 # - (desturl, destbranch, destpeer, outgoing)
3496 # - (desturl, destbranch, destpeer, outgoing)
3497 summaryremotehooks = util.hooks()
3497 summaryremotehooks = util.hooks()
3498
3498
3499 # A list of state files kept by multistep operations like graft.
3499 # A list of state files kept by multistep operations like graft.
3500 # Since graft cannot be aborted, it is considered 'clearable' by update.
3500 # Since graft cannot be aborted, it is considered 'clearable' by update.
3501 # note: bisect is intentionally excluded
3501 # note: bisect is intentionally excluded
3502 # (state file, clearable, allowcommit, error, hint)
3502 # (state file, clearable, allowcommit, error, hint)
3503 unfinishedstates = [
3503 unfinishedstates = [
3504 ('graftstate', True, False, _('graft in progress'),
3504 ('graftstate', True, False, _('graft in progress'),
3505 _("use 'hg graft --continue' or 'hg update' to abort")),
3505 _("use 'hg graft --continue' or 'hg update' to abort")),
3506 ('updatestate', True, False, _('last update was interrupted'),
3506 ('updatestate', True, False, _('last update was interrupted'),
3507 _("use 'hg update' to get a consistent checkout"))
3507 _("use 'hg update' to get a consistent checkout"))
3508 ]
3508 ]
3509
3509
3510 def checkunfinished(repo, commit=False):
3510 def checkunfinished(repo, commit=False):
3511 '''Look for an unfinished multistep operation, like graft, and abort
3511 '''Look for an unfinished multistep operation, like graft, and abort
3512 if found. It's probably good to check this right before
3512 if found. It's probably good to check this right before
3513 bailifchanged().
3513 bailifchanged().
3514 '''
3514 '''
3515 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3515 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3516 if commit and allowcommit:
3516 if commit and allowcommit:
3517 continue
3517 continue
3518 if repo.vfs.exists(f):
3518 if repo.vfs.exists(f):
3519 raise error.Abort(msg, hint=hint)
3519 raise error.Abort(msg, hint=hint)
3520
3520
3521 def clearunfinished(repo):
3521 def clearunfinished(repo):
3522 '''Check for unfinished operations (as above), and clear the ones
3522 '''Check for unfinished operations (as above), and clear the ones
3523 that are clearable.
3523 that are clearable.
3524 '''
3524 '''
3525 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3525 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3526 if not clearable and repo.vfs.exists(f):
3526 if not clearable and repo.vfs.exists(f):
3527 raise error.Abort(msg, hint=hint)
3527 raise error.Abort(msg, hint=hint)
3528 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3528 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3529 if clearable and repo.vfs.exists(f):
3529 if clearable and repo.vfs.exists(f):
3530 util.unlink(repo.vfs.join(f))
3530 util.unlink(repo.vfs.join(f))
3531
3531
3532 afterresolvedstates = [
3532 afterresolvedstates = [
3533 ('graftstate',
3533 ('graftstate',
3534 _('hg graft --continue')),
3534 _('hg graft --continue')),
3535 ]
3535 ]
3536
3536
3537 def howtocontinue(repo):
3537 def howtocontinue(repo):
3538 '''Check for an unfinished operation and return the command to finish
3538 '''Check for an unfinished operation and return the command to finish
3539 it.
3539 it.
3540
3540
3541 afterresolvedstates tuples define a .hg/{file} and the corresponding
3541 afterresolvedstates tuples define a .hg/{file} and the corresponding
3542 command needed to finish it.
3542 command needed to finish it.
3543
3543
3544 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3544 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3545 a boolean.
3545 a boolean.
3546 '''
3546 '''
3547 contmsg = _("continue: %s")
3547 contmsg = _("continue: %s")
3548 for f, msg in afterresolvedstates:
3548 for f, msg in afterresolvedstates:
3549 if repo.vfs.exists(f):
3549 if repo.vfs.exists(f):
3550 return contmsg % msg, True
3550 return contmsg % msg, True
3551 workingctx = repo[None]
3551 workingctx = repo[None]
3552 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3552 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3553 for s in workingctx.substate)
3553 for s in workingctx.substate)
3554 if dirty:
3554 if dirty:
3555 return contmsg % _("hg commit"), False
3555 return contmsg % _("hg commit"), False
3556 return None, None
3556 return None, None
3557
3557
3558 def checkafterresolved(repo):
3558 def checkafterresolved(repo):
3559 '''Inform the user about the next action after completing hg resolve
3559 '''Inform the user about the next action after completing hg resolve
3560
3560
3561 If there's a matching afterresolvedstates, howtocontinue will yield
3561 If there's a matching afterresolvedstates, howtocontinue will yield
3562 repo.ui.warn as the reporter.
3562 repo.ui.warn as the reporter.
3563
3563
3564 Otherwise, it will yield repo.ui.note.
3564 Otherwise, it will yield repo.ui.note.
3565 '''
3565 '''
3566 msg, warning = howtocontinue(repo)
3566 msg, warning = howtocontinue(repo)
3567 if msg is not None:
3567 if msg is not None:
3568 if warning:
3568 if warning:
3569 repo.ui.warn("%s\n" % msg)
3569 repo.ui.warn("%s\n" % msg)
3570 else:
3570 else:
3571 repo.ui.note("%s\n" % msg)
3571 repo.ui.note("%s\n" % msg)
3572
3572
3573 def wrongtooltocontinue(repo, task):
3573 def wrongtooltocontinue(repo, task):
3574 '''Raise an abort suggesting how to properly continue if there is an
3574 '''Raise an abort suggesting how to properly continue if there is an
3575 active task.
3575 active task.
3576
3576
3577 Uses howtocontinue() to find the active task.
3577 Uses howtocontinue() to find the active task.
3578
3578
3579 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3579 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3580 a hint.
3580 a hint.
3581 '''
3581 '''
3582 after = howtocontinue(repo)
3582 after = howtocontinue(repo)
3583 hint = None
3583 hint = None
3584 if after[1]:
3584 if after[1]:
3585 hint = after[0]
3585 hint = after[0]
3586 raise error.Abort(_('no %s in progress') % task, hint=hint)
3586 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,982 +1,983 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16
16
17 from .i18n import _
17 from .i18n import _
18 from .node import wdirrev
18 from .node import wdirrev
19 from . import (
19 from . import (
20 encoding,
20 encoding,
21 error,
21 error,
22 match as matchmod,
22 match as matchmod,
23 pathutil,
23 pathutil,
24 phases,
24 phases,
25 pycompat,
25 pycompat,
26 revsetlang,
26 revsetlang,
27 similar,
27 similar,
28 util,
28 util,
29 )
29 )
30
30
31 if pycompat.osname == 'nt':
31 if pycompat.osname == 'nt':
32 from . import scmwindows as scmplatform
32 from . import scmwindows as scmplatform
33 else:
33 else:
34 from . import scmposix as scmplatform
34 from . import scmposix as scmplatform
35
35
36 termsize = scmplatform.termsize
36 termsize = scmplatform.termsize
37
37
38 class status(tuple):
38 class status(tuple):
39 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
39 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
40 and 'ignored' properties are only relevant to the working copy.
40 and 'ignored' properties are only relevant to the working copy.
41 '''
41 '''
42
42
43 __slots__ = ()
43 __slots__ = ()
44
44
45 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
45 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
46 clean):
46 clean):
47 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
47 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
48 ignored, clean))
48 ignored, clean))
49
49
50 @property
50 @property
51 def modified(self):
51 def modified(self):
52 '''files that have been modified'''
52 '''files that have been modified'''
53 return self[0]
53 return self[0]
54
54
55 @property
55 @property
56 def added(self):
56 def added(self):
57 '''files that have been added'''
57 '''files that have been added'''
58 return self[1]
58 return self[1]
59
59
60 @property
60 @property
61 def removed(self):
61 def removed(self):
62 '''files that have been removed'''
62 '''files that have been removed'''
63 return self[2]
63 return self[2]
64
64
65 @property
65 @property
66 def deleted(self):
66 def deleted(self):
67 '''files that are in the dirstate, but have been deleted from the
67 '''files that are in the dirstate, but have been deleted from the
68 working copy (aka "missing")
68 working copy (aka "missing")
69 '''
69 '''
70 return self[3]
70 return self[3]
71
71
72 @property
72 @property
73 def unknown(self):
73 def unknown(self):
74 '''files not in the dirstate that are not ignored'''
74 '''files not in the dirstate that are not ignored'''
75 return self[4]
75 return self[4]
76
76
77 @property
77 @property
78 def ignored(self):
78 def ignored(self):
79 '''files not in the dirstate that are ignored (by _dirignore())'''
79 '''files not in the dirstate that are ignored (by _dirignore())'''
80 return self[5]
80 return self[5]
81
81
82 @property
82 @property
83 def clean(self):
83 def clean(self):
84 '''files that have not been modified'''
84 '''files that have not been modified'''
85 return self[6]
85 return self[6]
86
86
87 def __repr__(self, *args, **kwargs):
87 def __repr__(self, *args, **kwargs):
88 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
88 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
89 'unknown=%r, ignored=%r, clean=%r>') % self)
89 'unknown=%r, ignored=%r, clean=%r>') % self)
90
90
91 def itersubrepos(ctx1, ctx2):
91 def itersubrepos(ctx1, ctx2):
92 """find subrepos in ctx1 or ctx2"""
92 """find subrepos in ctx1 or ctx2"""
93 # Create a (subpath, ctx) mapping where we prefer subpaths from
93 # Create a (subpath, ctx) mapping where we prefer subpaths from
94 # ctx1. The subpaths from ctx2 are important when the .hgsub file
94 # ctx1. The subpaths from ctx2 are important when the .hgsub file
95 # has been modified (in ctx2) but not yet committed (in ctx1).
95 # has been modified (in ctx2) but not yet committed (in ctx1).
96 subpaths = dict.fromkeys(ctx2.substate, ctx2)
96 subpaths = dict.fromkeys(ctx2.substate, ctx2)
97 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
97 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
98
98
99 missing = set()
99 missing = set()
100
100
101 for subpath in ctx2.substate:
101 for subpath in ctx2.substate:
102 if subpath not in ctx1.substate:
102 if subpath not in ctx1.substate:
103 del subpaths[subpath]
103 del subpaths[subpath]
104 missing.add(subpath)
104 missing.add(subpath)
105
105
106 for subpath, ctx in sorted(subpaths.iteritems()):
106 for subpath, ctx in sorted(subpaths.iteritems()):
107 yield subpath, ctx.sub(subpath)
107 yield subpath, ctx.sub(subpath)
108
108
109 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
109 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
110 # status and diff will have an accurate result when it does
110 # status and diff will have an accurate result when it does
111 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
111 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
112 # against itself.
112 # against itself.
113 for subpath in missing:
113 for subpath in missing:
114 yield subpath, ctx2.nullsub(subpath, ctx1)
114 yield subpath, ctx2.nullsub(subpath, ctx1)
115
115
116 def nochangesfound(ui, repo, excluded=None):
116 def nochangesfound(ui, repo, excluded=None):
117 '''Report no changes for push/pull, excluded is None or a list of
117 '''Report no changes for push/pull, excluded is None or a list of
118 nodes excluded from the push/pull.
118 nodes excluded from the push/pull.
119 '''
119 '''
120 secretlist = []
120 secretlist = []
121 if excluded:
121 if excluded:
122 for n in excluded:
122 for n in excluded:
123 ctx = repo[n]
123 ctx = repo[n]
124 if ctx.phase() >= phases.secret and not ctx.extinct():
124 if ctx.phase() >= phases.secret and not ctx.extinct():
125 secretlist.append(n)
125 secretlist.append(n)
126
126
127 if secretlist:
127 if secretlist:
128 ui.status(_("no changes found (ignored %d secret changesets)\n")
128 ui.status(_("no changes found (ignored %d secret changesets)\n")
129 % len(secretlist))
129 % len(secretlist))
130 else:
130 else:
131 ui.status(_("no changes found\n"))
131 ui.status(_("no changes found\n"))
132
132
133 def callcatch(ui, func):
133 def callcatch(ui, func):
134 """call func() with global exception handling
134 """call func() with global exception handling
135
135
136 return func() if no exception happens. otherwise do some error handling
136 return func() if no exception happens. otherwise do some error handling
137 and return an exit code accordingly. does not handle all exceptions.
137 and return an exit code accordingly. does not handle all exceptions.
138 """
138 """
139 try:
139 try:
140 try:
140 try:
141 return func()
141 return func()
142 except: # re-raises
142 except: # re-raises
143 ui.traceback()
143 ui.traceback()
144 raise
144 raise
145 # Global exception handling, alphabetically
145 # Global exception handling, alphabetically
146 # Mercurial-specific first, followed by built-in and library exceptions
146 # Mercurial-specific first, followed by built-in and library exceptions
147 except error.LockHeld as inst:
147 except error.LockHeld as inst:
148 if inst.errno == errno.ETIMEDOUT:
148 if inst.errno == errno.ETIMEDOUT:
149 reason = _('timed out waiting for lock held by %r') % inst.locker
149 reason = _('timed out waiting for lock held by %r') % inst.locker
150 else:
150 else:
151 reason = _('lock held by %r') % inst.locker
151 reason = _('lock held by %r') % inst.locker
152 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
152 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
153 if not inst.locker:
153 if not inst.locker:
154 ui.warn(_("(lock might be very busy)\n"))
154 ui.warn(_("(lock might be very busy)\n"))
155 except error.LockUnavailable as inst:
155 except error.LockUnavailable as inst:
156 ui.warn(_("abort: could not lock %s: %s\n") %
156 ui.warn(_("abort: could not lock %s: %s\n") %
157 (inst.desc or inst.filename, inst.strerror))
157 (inst.desc or inst.filename, inst.strerror))
158 except error.OutOfBandError as inst:
158 except error.OutOfBandError as inst:
159 if inst.args:
159 if inst.args:
160 msg = _("abort: remote error:\n")
160 msg = _("abort: remote error:\n")
161 else:
161 else:
162 msg = _("abort: remote error\n")
162 msg = _("abort: remote error\n")
163 ui.warn(msg)
163 ui.warn(msg)
164 if inst.args:
164 if inst.args:
165 ui.warn(''.join(inst.args))
165 ui.warn(''.join(inst.args))
166 if inst.hint:
166 if inst.hint:
167 ui.warn('(%s)\n' % inst.hint)
167 ui.warn('(%s)\n' % inst.hint)
168 except error.RepoError as inst:
168 except error.RepoError as inst:
169 ui.warn(_("abort: %s!\n") % inst)
169 ui.warn(_("abort: %s!\n") % inst)
170 if inst.hint:
170 if inst.hint:
171 ui.warn(_("(%s)\n") % inst.hint)
171 ui.warn(_("(%s)\n") % inst.hint)
172 except error.ResponseError as inst:
172 except error.ResponseError as inst:
173 ui.warn(_("abort: %s") % inst.args[0])
173 ui.warn(_("abort: %s") % inst.args[0])
174 if not isinstance(inst.args[1], basestring):
174 if not isinstance(inst.args[1], basestring):
175 ui.warn(" %r\n" % (inst.args[1],))
175 ui.warn(" %r\n" % (inst.args[1],))
176 elif not inst.args[1]:
176 elif not inst.args[1]:
177 ui.warn(_(" empty string\n"))
177 ui.warn(_(" empty string\n"))
178 else:
178 else:
179 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
179 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
180 except error.CensoredNodeError as inst:
180 except error.CensoredNodeError as inst:
181 ui.warn(_("abort: file censored %s!\n") % inst)
181 ui.warn(_("abort: file censored %s!\n") % inst)
182 except error.RevlogError as inst:
182 except error.RevlogError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
183 ui.warn(_("abort: %s!\n") % inst)
184 except error.InterventionRequired as inst:
184 except error.InterventionRequired as inst:
185 ui.warn("%s\n" % inst)
185 ui.warn("%s\n" % inst)
186 if inst.hint:
186 if inst.hint:
187 ui.warn(_("(%s)\n") % inst.hint)
187 ui.warn(_("(%s)\n") % inst.hint)
188 return 1
188 return 1
189 except error.Abort as inst:
189 except error.Abort as inst:
190 ui.warn(_("abort: %s\n") % inst)
190 ui.warn(_("abort: %s\n") % inst)
191 if inst.hint:
191 if inst.hint:
192 ui.warn(_("(%s)\n") % inst.hint)
192 ui.warn(_("(%s)\n") % inst.hint)
193 except ImportError as inst:
193 except ImportError as inst:
194 ui.warn(_("abort: %s!\n") % inst)
194 ui.warn(_("abort: %s!\n") % inst)
195 m = str(inst).split()[-1]
195 m = str(inst).split()[-1]
196 if m in "mpatch bdiff".split():
196 if m in "mpatch bdiff".split():
197 ui.warn(_("(did you forget to compile extensions?)\n"))
197 ui.warn(_("(did you forget to compile extensions?)\n"))
198 elif m in "zlib".split():
198 elif m in "zlib".split():
199 ui.warn(_("(is your Python install correct?)\n"))
199 ui.warn(_("(is your Python install correct?)\n"))
200 except IOError as inst:
200 except IOError as inst:
201 if util.safehasattr(inst, "code"):
201 if util.safehasattr(inst, "code"):
202 ui.warn(_("abort: %s\n") % inst)
202 ui.warn(_("abort: %s\n") % inst)
203 elif util.safehasattr(inst, "reason"):
203 elif util.safehasattr(inst, "reason"):
204 try: # usually it is in the form (errno, strerror)
204 try: # usually it is in the form (errno, strerror)
205 reason = inst.reason.args[1]
205 reason = inst.reason.args[1]
206 except (AttributeError, IndexError):
206 except (AttributeError, IndexError):
207 # it might be anything, for example a string
207 # it might be anything, for example a string
208 reason = inst.reason
208 reason = inst.reason
209 if isinstance(reason, unicode):
209 if isinstance(reason, unicode):
210 # SSLError of Python 2.7.9 contains a unicode
210 # SSLError of Python 2.7.9 contains a unicode
211 reason = encoding.unitolocal(reason)
211 reason = encoding.unitolocal(reason)
212 ui.warn(_("abort: error: %s\n") % reason)
212 ui.warn(_("abort: error: %s\n") % reason)
213 elif (util.safehasattr(inst, "args")
213 elif (util.safehasattr(inst, "args")
214 and inst.args and inst.args[0] == errno.EPIPE):
214 and inst.args and inst.args[0] == errno.EPIPE):
215 pass
215 pass
216 elif getattr(inst, "strerror", None):
216 elif getattr(inst, "strerror", None):
217 if getattr(inst, "filename", None):
217 if getattr(inst, "filename", None):
218 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
218 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
219 else:
219 else:
220 ui.warn(_("abort: %s\n") % inst.strerror)
220 ui.warn(_("abort: %s\n") % inst.strerror)
221 else:
221 else:
222 raise
222 raise
223 except OSError as inst:
223 except OSError as inst:
224 if getattr(inst, "filename", None) is not None:
224 if getattr(inst, "filename", None) is not None:
225 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
225 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
226 else:
226 else:
227 ui.warn(_("abort: %s\n") % inst.strerror)
227 ui.warn(_("abort: %s\n") % inst.strerror)
228 except MemoryError:
228 except MemoryError:
229 ui.warn(_("abort: out of memory\n"))
229 ui.warn(_("abort: out of memory\n"))
230 except SystemExit as inst:
230 except SystemExit as inst:
231 # Commands shouldn't sys.exit directly, but give a return code.
231 # Commands shouldn't sys.exit directly, but give a return code.
232 # Just in case catch this and and pass exit code to caller.
232 # Just in case catch this and and pass exit code to caller.
233 return inst.code
233 return inst.code
234 except socket.error as inst:
234 except socket.error as inst:
235 ui.warn(_("abort: %s\n") % inst.args[-1])
235 ui.warn(_("abort: %s\n") % inst.args[-1])
236
236
237 return -1
237 return -1
238
238
239 def checknewlabel(repo, lbl, kind):
239 def checknewlabel(repo, lbl, kind):
240 # Do not use the "kind" parameter in ui output.
240 # Do not use the "kind" parameter in ui output.
241 # It makes strings difficult to translate.
241 # It makes strings difficult to translate.
242 if lbl in ['tip', '.', 'null']:
242 if lbl in ['tip', '.', 'null']:
243 raise error.Abort(_("the name '%s' is reserved") % lbl)
243 raise error.Abort(_("the name '%s' is reserved") % lbl)
244 for c in (':', '\0', '\n', '\r'):
244 for c in (':', '\0', '\n', '\r'):
245 if c in lbl:
245 if c in lbl:
246 raise error.Abort(_("%r cannot be used in a name") % c)
246 raise error.Abort(_("%r cannot be used in a name") % c)
247 try:
247 try:
248 int(lbl)
248 int(lbl)
249 raise error.Abort(_("cannot use an integer as a name"))
249 raise error.Abort(_("cannot use an integer as a name"))
250 except ValueError:
250 except ValueError:
251 pass
251 pass
252
252
253 def checkfilename(f):
253 def checkfilename(f):
254 '''Check that the filename f is an acceptable filename for a tracked file'''
254 '''Check that the filename f is an acceptable filename for a tracked file'''
255 if '\r' in f or '\n' in f:
255 if '\r' in f or '\n' in f:
256 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
256 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
257
257
258 def checkportable(ui, f):
258 def checkportable(ui, f):
259 '''Check if filename f is portable and warn or abort depending on config'''
259 '''Check if filename f is portable and warn or abort depending on config'''
260 checkfilename(f)
260 checkfilename(f)
261 abort, warn = checkportabilityalert(ui)
261 abort, warn = checkportabilityalert(ui)
262 if abort or warn:
262 if abort or warn:
263 msg = util.checkwinfilename(f)
263 msg = util.checkwinfilename(f)
264 if msg:
264 if msg:
265 msg = "%s: %r" % (msg, f)
265 msg = "%s: %r" % (msg, f)
266 if abort:
266 if abort:
267 raise error.Abort(msg)
267 raise error.Abort(msg)
268 ui.warn(_("warning: %s\n") % msg)
268 ui.warn(_("warning: %s\n") % msg)
269
269
270 def checkportabilityalert(ui):
270 def checkportabilityalert(ui):
271 '''check if the user's config requests nothing, a warning, or abort for
271 '''check if the user's config requests nothing, a warning, or abort for
272 non-portable filenames'''
272 non-portable filenames'''
273 val = ui.config('ui', 'portablefilenames', 'warn')
273 val = ui.config('ui', 'portablefilenames', 'warn')
274 lval = val.lower()
274 lval = val.lower()
275 bval = util.parsebool(val)
275 bval = util.parsebool(val)
276 abort = pycompat.osname == 'nt' or lval == 'abort'
276 abort = pycompat.osname == 'nt' or lval == 'abort'
277 warn = bval or lval == 'warn'
277 warn = bval or lval == 'warn'
278 if bval is None and not (warn or abort or lval == 'ignore'):
278 if bval is None and not (warn or abort or lval == 'ignore'):
279 raise error.ConfigError(
279 raise error.ConfigError(
280 _("ui.portablefilenames value is invalid ('%s')") % val)
280 _("ui.portablefilenames value is invalid ('%s')") % val)
281 return abort, warn
281 return abort, warn
282
282
283 class casecollisionauditor(object):
283 class casecollisionauditor(object):
284 def __init__(self, ui, abort, dirstate):
284 def __init__(self, ui, abort, dirstate):
285 self._ui = ui
285 self._ui = ui
286 self._abort = abort
286 self._abort = abort
287 allfiles = '\0'.join(dirstate._map)
287 allfiles = '\0'.join(dirstate._map)
288 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
288 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
289 self._dirstate = dirstate
289 self._dirstate = dirstate
290 # The purpose of _newfiles is so that we don't complain about
290 # The purpose of _newfiles is so that we don't complain about
291 # case collisions if someone were to call this object with the
291 # case collisions if someone were to call this object with the
292 # same filename twice.
292 # same filename twice.
293 self._newfiles = set()
293 self._newfiles = set()
294
294
295 def __call__(self, f):
295 def __call__(self, f):
296 if f in self._newfiles:
296 if f in self._newfiles:
297 return
297 return
298 fl = encoding.lower(f)
298 fl = encoding.lower(f)
299 if fl in self._loweredfiles and f not in self._dirstate:
299 if fl in self._loweredfiles and f not in self._dirstate:
300 msg = _('possible case-folding collision for %s') % f
300 msg = _('possible case-folding collision for %s') % f
301 if self._abort:
301 if self._abort:
302 raise error.Abort(msg)
302 raise error.Abort(msg)
303 self._ui.warn(_("warning: %s\n") % msg)
303 self._ui.warn(_("warning: %s\n") % msg)
304 self._loweredfiles.add(fl)
304 self._loweredfiles.add(fl)
305 self._newfiles.add(f)
305 self._newfiles.add(f)
306
306
307 def filteredhash(repo, maxrev):
307 def filteredhash(repo, maxrev):
308 """build hash of filtered revisions in the current repoview.
308 """build hash of filtered revisions in the current repoview.
309
309
310 Multiple caches perform up-to-date validation by checking that the
310 Multiple caches perform up-to-date validation by checking that the
311 tiprev and tipnode stored in the cache file match the current repository.
311 tiprev and tipnode stored in the cache file match the current repository.
312 However, this is not sufficient for validating repoviews because the set
312 However, this is not sufficient for validating repoviews because the set
313 of revisions in the view may change without the repository tiprev and
313 of revisions in the view may change without the repository tiprev and
314 tipnode changing.
314 tipnode changing.
315
315
316 This function hashes all the revs filtered from the view and returns
316 This function hashes all the revs filtered from the view and returns
317 that SHA-1 digest.
317 that SHA-1 digest.
318 """
318 """
319 cl = repo.changelog
319 cl = repo.changelog
320 if not cl.filteredrevs:
320 if not cl.filteredrevs:
321 return None
321 return None
322 key = None
322 key = None
323 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
323 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
324 if revs:
324 if revs:
325 s = hashlib.sha1()
325 s = hashlib.sha1()
326 for rev in revs:
326 for rev in revs:
327 s.update('%d;' % rev)
327 s.update('%d;' % rev)
328 key = s.digest()
328 key = s.digest()
329 return key
329 return key
330
330
331 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
331 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
332 '''yield every hg repository under path, always recursively.
332 '''yield every hg repository under path, always recursively.
333 The recurse flag will only control recursion into repo working dirs'''
333 The recurse flag will only control recursion into repo working dirs'''
334 def errhandler(err):
334 def errhandler(err):
335 if err.filename == path:
335 if err.filename == path:
336 raise err
336 raise err
337 samestat = getattr(os.path, 'samestat', None)
337 samestat = getattr(os.path, 'samestat', None)
338 if followsym and samestat is not None:
338 if followsym and samestat is not None:
339 def adddir(dirlst, dirname):
339 def adddir(dirlst, dirname):
340 match = False
340 match = False
341 dirstat = os.stat(dirname)
341 dirstat = os.stat(dirname)
342 for lstdirstat in dirlst:
342 for lstdirstat in dirlst:
343 if samestat(dirstat, lstdirstat):
343 if samestat(dirstat, lstdirstat):
344 match = True
344 match = True
345 break
345 break
346 if not match:
346 if not match:
347 dirlst.append(dirstat)
347 dirlst.append(dirstat)
348 return not match
348 return not match
349 else:
349 else:
350 followsym = False
350 followsym = False
351
351
352 if (seen_dirs is None) and followsym:
352 if (seen_dirs is None) and followsym:
353 seen_dirs = []
353 seen_dirs = []
354 adddir(seen_dirs, path)
354 adddir(seen_dirs, path)
355 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
355 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
356 dirs.sort()
356 dirs.sort()
357 if '.hg' in dirs:
357 if '.hg' in dirs:
358 yield root # found a repository
358 yield root # found a repository
359 qroot = os.path.join(root, '.hg', 'patches')
359 qroot = os.path.join(root, '.hg', 'patches')
360 if os.path.isdir(os.path.join(qroot, '.hg')):
360 if os.path.isdir(os.path.join(qroot, '.hg')):
361 yield qroot # we have a patch queue repo here
361 yield qroot # we have a patch queue repo here
362 if recurse:
362 if recurse:
363 # avoid recursing inside the .hg directory
363 # avoid recursing inside the .hg directory
364 dirs.remove('.hg')
364 dirs.remove('.hg')
365 else:
365 else:
366 dirs[:] = [] # don't descend further
366 dirs[:] = [] # don't descend further
367 elif followsym:
367 elif followsym:
368 newdirs = []
368 newdirs = []
369 for d in dirs:
369 for d in dirs:
370 fname = os.path.join(root, d)
370 fname = os.path.join(root, d)
371 if adddir(seen_dirs, fname):
371 if adddir(seen_dirs, fname):
372 if os.path.islink(fname):
372 if os.path.islink(fname):
373 for hgname in walkrepos(fname, True, seen_dirs):
373 for hgname in walkrepos(fname, True, seen_dirs):
374 yield hgname
374 yield hgname
375 else:
375 else:
376 newdirs.append(d)
376 newdirs.append(d)
377 dirs[:] = newdirs
377 dirs[:] = newdirs
378
378
379 def intrev(rev):
379 def intrev(ctx):
380 """Return integer for a given revision that can be used in comparison or
380 """Return integer for a given basectx that can be used in comparison or
381 arithmetic operation"""
381 arithmetic operation"""
382 rev = ctx.rev()
382 if rev is None:
383 if rev is None:
383 return wdirrev
384 return wdirrev
384 return rev
385 return rev
385
386
386 def revsingle(repo, revspec, default='.'):
387 def revsingle(repo, revspec, default='.'):
387 if not revspec and revspec != 0:
388 if not revspec and revspec != 0:
388 return repo[default]
389 return repo[default]
389
390
390 l = revrange(repo, [revspec])
391 l = revrange(repo, [revspec])
391 if not l:
392 if not l:
392 raise error.Abort(_('empty revision set'))
393 raise error.Abort(_('empty revision set'))
393 return repo[l.last()]
394 return repo[l.last()]
394
395
395 def _pairspec(revspec):
396 def _pairspec(revspec):
396 tree = revsetlang.parse(revspec)
397 tree = revsetlang.parse(revspec)
397 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
398 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
398
399
399 def revpair(repo, revs):
400 def revpair(repo, revs):
400 if not revs:
401 if not revs:
401 return repo.dirstate.p1(), None
402 return repo.dirstate.p1(), None
402
403
403 l = revrange(repo, revs)
404 l = revrange(repo, revs)
404
405
405 if not l:
406 if not l:
406 first = second = None
407 first = second = None
407 elif l.isascending():
408 elif l.isascending():
408 first = l.min()
409 first = l.min()
409 second = l.max()
410 second = l.max()
410 elif l.isdescending():
411 elif l.isdescending():
411 first = l.max()
412 first = l.max()
412 second = l.min()
413 second = l.min()
413 else:
414 else:
414 first = l.first()
415 first = l.first()
415 second = l.last()
416 second = l.last()
416
417
417 if first is None:
418 if first is None:
418 raise error.Abort(_('empty revision range'))
419 raise error.Abort(_('empty revision range'))
419 if (first == second and len(revs) >= 2
420 if (first == second and len(revs) >= 2
420 and not all(revrange(repo, [r]) for r in revs)):
421 and not all(revrange(repo, [r]) for r in revs)):
421 raise error.Abort(_('empty revision on one side of range'))
422 raise error.Abort(_('empty revision on one side of range'))
422
423
423 # if top-level is range expression, the result must always be a pair
424 # if top-level is range expression, the result must always be a pair
424 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
425 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
425 return repo.lookup(first), None
426 return repo.lookup(first), None
426
427
427 return repo.lookup(first), repo.lookup(second)
428 return repo.lookup(first), repo.lookup(second)
428
429
429 def revrange(repo, specs):
430 def revrange(repo, specs):
430 """Execute 1 to many revsets and return the union.
431 """Execute 1 to many revsets and return the union.
431
432
432 This is the preferred mechanism for executing revsets using user-specified
433 This is the preferred mechanism for executing revsets using user-specified
433 config options, such as revset aliases.
434 config options, such as revset aliases.
434
435
435 The revsets specified by ``specs`` will be executed via a chained ``OR``
436 The revsets specified by ``specs`` will be executed via a chained ``OR``
436 expression. If ``specs`` is empty, an empty result is returned.
437 expression. If ``specs`` is empty, an empty result is returned.
437
438
438 ``specs`` can contain integers, in which case they are assumed to be
439 ``specs`` can contain integers, in which case they are assumed to be
439 revision numbers.
440 revision numbers.
440
441
441 It is assumed the revsets are already formatted. If you have arguments
442 It is assumed the revsets are already formatted. If you have arguments
442 that need to be expanded in the revset, call ``revsetlang.formatspec()``
443 that need to be expanded in the revset, call ``revsetlang.formatspec()``
443 and pass the result as an element of ``specs``.
444 and pass the result as an element of ``specs``.
444
445
445 Specifying a single revset is allowed.
446 Specifying a single revset is allowed.
446
447
447 Returns a ``revset.abstractsmartset`` which is a list-like interface over
448 Returns a ``revset.abstractsmartset`` which is a list-like interface over
448 integer revisions.
449 integer revisions.
449 """
450 """
450 allspecs = []
451 allspecs = []
451 for spec in specs:
452 for spec in specs:
452 if isinstance(spec, int):
453 if isinstance(spec, int):
453 spec = revsetlang.formatspec('rev(%d)', spec)
454 spec = revsetlang.formatspec('rev(%d)', spec)
454 allspecs.append(spec)
455 allspecs.append(spec)
455 return repo.anyrevs(allspecs, user=True)
456 return repo.anyrevs(allspecs, user=True)
456
457
457 def meaningfulparents(repo, ctx):
458 def meaningfulparents(repo, ctx):
458 """Return list of meaningful (or all if debug) parentrevs for rev.
459 """Return list of meaningful (or all if debug) parentrevs for rev.
459
460
460 For merges (two non-nullrev revisions) both parents are meaningful.
461 For merges (two non-nullrev revisions) both parents are meaningful.
461 Otherwise the first parent revision is considered meaningful if it
462 Otherwise the first parent revision is considered meaningful if it
462 is not the preceding revision.
463 is not the preceding revision.
463 """
464 """
464 parents = ctx.parents()
465 parents = ctx.parents()
465 if len(parents) > 1:
466 if len(parents) > 1:
466 return parents
467 return parents
467 if repo.ui.debugflag:
468 if repo.ui.debugflag:
468 return [parents[0], repo['null']]
469 return [parents[0], repo['null']]
469 if parents[0].rev() >= intrev(ctx.rev()) - 1:
470 if parents[0].rev() >= intrev(ctx) - 1:
470 return []
471 return []
471 return parents
472 return parents
472
473
473 def expandpats(pats):
474 def expandpats(pats):
474 '''Expand bare globs when running on windows.
475 '''Expand bare globs when running on windows.
475 On posix we assume it already has already been done by sh.'''
476 On posix we assume it already has already been done by sh.'''
476 if not util.expandglobs:
477 if not util.expandglobs:
477 return list(pats)
478 return list(pats)
478 ret = []
479 ret = []
479 for kindpat in pats:
480 for kindpat in pats:
480 kind, pat = matchmod._patsplit(kindpat, None)
481 kind, pat = matchmod._patsplit(kindpat, None)
481 if kind is None:
482 if kind is None:
482 try:
483 try:
483 globbed = glob.glob(pat)
484 globbed = glob.glob(pat)
484 except re.error:
485 except re.error:
485 globbed = [pat]
486 globbed = [pat]
486 if globbed:
487 if globbed:
487 ret.extend(globbed)
488 ret.extend(globbed)
488 continue
489 continue
489 ret.append(kindpat)
490 ret.append(kindpat)
490 return ret
491 return ret
491
492
492 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
493 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
493 badfn=None):
494 badfn=None):
494 '''Return a matcher and the patterns that were used.
495 '''Return a matcher and the patterns that were used.
495 The matcher will warn about bad matches, unless an alternate badfn callback
496 The matcher will warn about bad matches, unless an alternate badfn callback
496 is provided.'''
497 is provided.'''
497 if pats == ("",):
498 if pats == ("",):
498 pats = []
499 pats = []
499 if opts is None:
500 if opts is None:
500 opts = {}
501 opts = {}
501 if not globbed and default == 'relpath':
502 if not globbed and default == 'relpath':
502 pats = expandpats(pats or [])
503 pats = expandpats(pats or [])
503
504
504 def bad(f, msg):
505 def bad(f, msg):
505 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
506 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
506
507
507 if badfn is None:
508 if badfn is None:
508 badfn = bad
509 badfn = bad
509
510
510 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
511 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
511 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
512 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
512
513
513 if m.always():
514 if m.always():
514 pats = []
515 pats = []
515 return m, pats
516 return m, pats
516
517
517 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
518 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
518 badfn=None):
519 badfn=None):
519 '''Return a matcher that will warn about bad matches.'''
520 '''Return a matcher that will warn about bad matches.'''
520 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
521 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
521
522
522 def matchall(repo):
523 def matchall(repo):
523 '''Return a matcher that will efficiently match everything.'''
524 '''Return a matcher that will efficiently match everything.'''
524 return matchmod.always(repo.root, repo.getcwd())
525 return matchmod.always(repo.root, repo.getcwd())
525
526
526 def matchfiles(repo, files, badfn=None):
527 def matchfiles(repo, files, badfn=None):
527 '''Return a matcher that will efficiently match exactly these files.'''
528 '''Return a matcher that will efficiently match exactly these files.'''
528 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
529 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
529
530
530 def origpath(ui, repo, filepath):
531 def origpath(ui, repo, filepath):
531 '''customize where .orig files are created
532 '''customize where .orig files are created
532
533
533 Fetch user defined path from config file: [ui] origbackuppath = <path>
534 Fetch user defined path from config file: [ui] origbackuppath = <path>
534 Fall back to default (filepath) if not specified
535 Fall back to default (filepath) if not specified
535 '''
536 '''
536 origbackuppath = ui.config('ui', 'origbackuppath', None)
537 origbackuppath = ui.config('ui', 'origbackuppath', None)
537 if origbackuppath is None:
538 if origbackuppath is None:
538 return filepath + ".orig"
539 return filepath + ".orig"
539
540
540 filepathfromroot = os.path.relpath(filepath, start=repo.root)
541 filepathfromroot = os.path.relpath(filepath, start=repo.root)
541 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
542 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
542
543
543 origbackupdir = repo.vfs.dirname(fullorigpath)
544 origbackupdir = repo.vfs.dirname(fullorigpath)
544 if not repo.vfs.exists(origbackupdir):
545 if not repo.vfs.exists(origbackupdir):
545 ui.note(_('creating directory: %s\n') % origbackupdir)
546 ui.note(_('creating directory: %s\n') % origbackupdir)
546 util.makedirs(origbackupdir)
547 util.makedirs(origbackupdir)
547
548
548 return fullorigpath + ".orig"
549 return fullorigpath + ".orig"
549
550
550 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
551 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
551 if opts is None:
552 if opts is None:
552 opts = {}
553 opts = {}
553 m = matcher
554 m = matcher
554 if dry_run is None:
555 if dry_run is None:
555 dry_run = opts.get('dry_run')
556 dry_run = opts.get('dry_run')
556 if similarity is None:
557 if similarity is None:
557 similarity = float(opts.get('similarity') or 0)
558 similarity = float(opts.get('similarity') or 0)
558
559
559 ret = 0
560 ret = 0
560 join = lambda f: os.path.join(prefix, f)
561 join = lambda f: os.path.join(prefix, f)
561
562
562 wctx = repo[None]
563 wctx = repo[None]
563 for subpath in sorted(wctx.substate):
564 for subpath in sorted(wctx.substate):
564 submatch = matchmod.subdirmatcher(subpath, m)
565 submatch = matchmod.subdirmatcher(subpath, m)
565 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
566 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
566 sub = wctx.sub(subpath)
567 sub = wctx.sub(subpath)
567 try:
568 try:
568 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
569 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
569 ret = 1
570 ret = 1
570 except error.LookupError:
571 except error.LookupError:
571 repo.ui.status(_("skipping missing subrepository: %s\n")
572 repo.ui.status(_("skipping missing subrepository: %s\n")
572 % join(subpath))
573 % join(subpath))
573
574
574 rejected = []
575 rejected = []
575 def badfn(f, msg):
576 def badfn(f, msg):
576 if f in m.files():
577 if f in m.files():
577 m.bad(f, msg)
578 m.bad(f, msg)
578 rejected.append(f)
579 rejected.append(f)
579
580
580 badmatch = matchmod.badmatch(m, badfn)
581 badmatch = matchmod.badmatch(m, badfn)
581 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
582 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
582 badmatch)
583 badmatch)
583
584
584 unknownset = set(unknown + forgotten)
585 unknownset = set(unknown + forgotten)
585 toprint = unknownset.copy()
586 toprint = unknownset.copy()
586 toprint.update(deleted)
587 toprint.update(deleted)
587 for abs in sorted(toprint):
588 for abs in sorted(toprint):
588 if repo.ui.verbose or not m.exact(abs):
589 if repo.ui.verbose or not m.exact(abs):
589 if abs in unknownset:
590 if abs in unknownset:
590 status = _('adding %s\n') % m.uipath(abs)
591 status = _('adding %s\n') % m.uipath(abs)
591 else:
592 else:
592 status = _('removing %s\n') % m.uipath(abs)
593 status = _('removing %s\n') % m.uipath(abs)
593 repo.ui.status(status)
594 repo.ui.status(status)
594
595
595 renames = _findrenames(repo, m, added + unknown, removed + deleted,
596 renames = _findrenames(repo, m, added + unknown, removed + deleted,
596 similarity)
597 similarity)
597
598
598 if not dry_run:
599 if not dry_run:
599 _markchanges(repo, unknown + forgotten, deleted, renames)
600 _markchanges(repo, unknown + forgotten, deleted, renames)
600
601
601 for f in rejected:
602 for f in rejected:
602 if f in m.files():
603 if f in m.files():
603 return 1
604 return 1
604 return ret
605 return ret
605
606
606 def marktouched(repo, files, similarity=0.0):
607 def marktouched(repo, files, similarity=0.0):
607 '''Assert that files have somehow been operated upon. files are relative to
608 '''Assert that files have somehow been operated upon. files are relative to
608 the repo root.'''
609 the repo root.'''
609 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
610 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
610 rejected = []
611 rejected = []
611
612
612 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
613 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
613
614
614 if repo.ui.verbose:
615 if repo.ui.verbose:
615 unknownset = set(unknown + forgotten)
616 unknownset = set(unknown + forgotten)
616 toprint = unknownset.copy()
617 toprint = unknownset.copy()
617 toprint.update(deleted)
618 toprint.update(deleted)
618 for abs in sorted(toprint):
619 for abs in sorted(toprint):
619 if abs in unknownset:
620 if abs in unknownset:
620 status = _('adding %s\n') % abs
621 status = _('adding %s\n') % abs
621 else:
622 else:
622 status = _('removing %s\n') % abs
623 status = _('removing %s\n') % abs
623 repo.ui.status(status)
624 repo.ui.status(status)
624
625
625 renames = _findrenames(repo, m, added + unknown, removed + deleted,
626 renames = _findrenames(repo, m, added + unknown, removed + deleted,
626 similarity)
627 similarity)
627
628
628 _markchanges(repo, unknown + forgotten, deleted, renames)
629 _markchanges(repo, unknown + forgotten, deleted, renames)
629
630
630 for f in rejected:
631 for f in rejected:
631 if f in m.files():
632 if f in m.files():
632 return 1
633 return 1
633 return 0
634 return 0
634
635
635 def _interestingfiles(repo, matcher):
636 def _interestingfiles(repo, matcher):
636 '''Walk dirstate with matcher, looking for files that addremove would care
637 '''Walk dirstate with matcher, looking for files that addremove would care
637 about.
638 about.
638
639
639 This is different from dirstate.status because it doesn't care about
640 This is different from dirstate.status because it doesn't care about
640 whether files are modified or clean.'''
641 whether files are modified or clean.'''
641 added, unknown, deleted, removed, forgotten = [], [], [], [], []
642 added, unknown, deleted, removed, forgotten = [], [], [], [], []
642 audit_path = pathutil.pathauditor(repo.root)
643 audit_path = pathutil.pathauditor(repo.root)
643
644
644 ctx = repo[None]
645 ctx = repo[None]
645 dirstate = repo.dirstate
646 dirstate = repo.dirstate
646 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
647 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
647 full=False)
648 full=False)
648 for abs, st in walkresults.iteritems():
649 for abs, st in walkresults.iteritems():
649 dstate = dirstate[abs]
650 dstate = dirstate[abs]
650 if dstate == '?' and audit_path.check(abs):
651 if dstate == '?' and audit_path.check(abs):
651 unknown.append(abs)
652 unknown.append(abs)
652 elif dstate != 'r' and not st:
653 elif dstate != 'r' and not st:
653 deleted.append(abs)
654 deleted.append(abs)
654 elif dstate == 'r' and st:
655 elif dstate == 'r' and st:
655 forgotten.append(abs)
656 forgotten.append(abs)
656 # for finding renames
657 # for finding renames
657 elif dstate == 'r' and not st:
658 elif dstate == 'r' and not st:
658 removed.append(abs)
659 removed.append(abs)
659 elif dstate == 'a':
660 elif dstate == 'a':
660 added.append(abs)
661 added.append(abs)
661
662
662 return added, unknown, deleted, removed, forgotten
663 return added, unknown, deleted, removed, forgotten
663
664
664 def _findrenames(repo, matcher, added, removed, similarity):
665 def _findrenames(repo, matcher, added, removed, similarity):
665 '''Find renames from removed files to added ones.'''
666 '''Find renames from removed files to added ones.'''
666 renames = {}
667 renames = {}
667 if similarity > 0:
668 if similarity > 0:
668 for old, new, score in similar.findrenames(repo, added, removed,
669 for old, new, score in similar.findrenames(repo, added, removed,
669 similarity):
670 similarity):
670 if (repo.ui.verbose or not matcher.exact(old)
671 if (repo.ui.verbose or not matcher.exact(old)
671 or not matcher.exact(new)):
672 or not matcher.exact(new)):
672 repo.ui.status(_('recording removal of %s as rename to %s '
673 repo.ui.status(_('recording removal of %s as rename to %s '
673 '(%d%% similar)\n') %
674 '(%d%% similar)\n') %
674 (matcher.rel(old), matcher.rel(new),
675 (matcher.rel(old), matcher.rel(new),
675 score * 100))
676 score * 100))
676 renames[new] = old
677 renames[new] = old
677 return renames
678 return renames
678
679
679 def _markchanges(repo, unknown, deleted, renames):
680 def _markchanges(repo, unknown, deleted, renames):
680 '''Marks the files in unknown as added, the files in deleted as removed,
681 '''Marks the files in unknown as added, the files in deleted as removed,
681 and the files in renames as copied.'''
682 and the files in renames as copied.'''
682 wctx = repo[None]
683 wctx = repo[None]
683 with repo.wlock():
684 with repo.wlock():
684 wctx.forget(deleted)
685 wctx.forget(deleted)
685 wctx.add(unknown)
686 wctx.add(unknown)
686 for new, old in renames.iteritems():
687 for new, old in renames.iteritems():
687 wctx.copy(old, new)
688 wctx.copy(old, new)
688
689
689 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
690 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
690 """Update the dirstate to reflect the intent of copying src to dst. For
691 """Update the dirstate to reflect the intent of copying src to dst. For
691 different reasons it might not end with dst being marked as copied from src.
692 different reasons it might not end with dst being marked as copied from src.
692 """
693 """
693 origsrc = repo.dirstate.copied(src) or src
694 origsrc = repo.dirstate.copied(src) or src
694 if dst == origsrc: # copying back a copy?
695 if dst == origsrc: # copying back a copy?
695 if repo.dirstate[dst] not in 'mn' and not dryrun:
696 if repo.dirstate[dst] not in 'mn' and not dryrun:
696 repo.dirstate.normallookup(dst)
697 repo.dirstate.normallookup(dst)
697 else:
698 else:
698 if repo.dirstate[origsrc] == 'a' and origsrc == src:
699 if repo.dirstate[origsrc] == 'a' and origsrc == src:
699 if not ui.quiet:
700 if not ui.quiet:
700 ui.warn(_("%s has not been committed yet, so no copy "
701 ui.warn(_("%s has not been committed yet, so no copy "
701 "data will be stored for %s.\n")
702 "data will be stored for %s.\n")
702 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
703 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
703 if repo.dirstate[dst] in '?r' and not dryrun:
704 if repo.dirstate[dst] in '?r' and not dryrun:
704 wctx.add([dst])
705 wctx.add([dst])
705 elif not dryrun:
706 elif not dryrun:
706 wctx.copy(origsrc, dst)
707 wctx.copy(origsrc, dst)
707
708
708 def readrequires(opener, supported):
709 def readrequires(opener, supported):
709 '''Reads and parses .hg/requires and checks if all entries found
710 '''Reads and parses .hg/requires and checks if all entries found
710 are in the list of supported features.'''
711 are in the list of supported features.'''
711 requirements = set(opener.read("requires").splitlines())
712 requirements = set(opener.read("requires").splitlines())
712 missings = []
713 missings = []
713 for r in requirements:
714 for r in requirements:
714 if r not in supported:
715 if r not in supported:
715 if not r or not r[0].isalnum():
716 if not r or not r[0].isalnum():
716 raise error.RequirementError(_(".hg/requires file is corrupt"))
717 raise error.RequirementError(_(".hg/requires file is corrupt"))
717 missings.append(r)
718 missings.append(r)
718 missings.sort()
719 missings.sort()
719 if missings:
720 if missings:
720 raise error.RequirementError(
721 raise error.RequirementError(
721 _("repository requires features unknown to this Mercurial: %s")
722 _("repository requires features unknown to this Mercurial: %s")
722 % " ".join(missings),
723 % " ".join(missings),
723 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
724 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
724 " for more information"))
725 " for more information"))
725 return requirements
726 return requirements
726
727
727 def writerequires(opener, requirements):
728 def writerequires(opener, requirements):
728 with opener('requires', 'w') as fp:
729 with opener('requires', 'w') as fp:
729 for r in sorted(requirements):
730 for r in sorted(requirements):
730 fp.write("%s\n" % r)
731 fp.write("%s\n" % r)
731
732
732 class filecachesubentry(object):
733 class filecachesubentry(object):
733 def __init__(self, path, stat):
734 def __init__(self, path, stat):
734 self.path = path
735 self.path = path
735 self.cachestat = None
736 self.cachestat = None
736 self._cacheable = None
737 self._cacheable = None
737
738
738 if stat:
739 if stat:
739 self.cachestat = filecachesubentry.stat(self.path)
740 self.cachestat = filecachesubentry.stat(self.path)
740
741
741 if self.cachestat:
742 if self.cachestat:
742 self._cacheable = self.cachestat.cacheable()
743 self._cacheable = self.cachestat.cacheable()
743 else:
744 else:
744 # None means we don't know yet
745 # None means we don't know yet
745 self._cacheable = None
746 self._cacheable = None
746
747
747 def refresh(self):
748 def refresh(self):
748 if self.cacheable():
749 if self.cacheable():
749 self.cachestat = filecachesubentry.stat(self.path)
750 self.cachestat = filecachesubentry.stat(self.path)
750
751
751 def cacheable(self):
752 def cacheable(self):
752 if self._cacheable is not None:
753 if self._cacheable is not None:
753 return self._cacheable
754 return self._cacheable
754
755
755 # we don't know yet, assume it is for now
756 # we don't know yet, assume it is for now
756 return True
757 return True
757
758
758 def changed(self):
759 def changed(self):
759 # no point in going further if we can't cache it
760 # no point in going further if we can't cache it
760 if not self.cacheable():
761 if not self.cacheable():
761 return True
762 return True
762
763
763 newstat = filecachesubentry.stat(self.path)
764 newstat = filecachesubentry.stat(self.path)
764
765
765 # we may not know if it's cacheable yet, check again now
766 # we may not know if it's cacheable yet, check again now
766 if newstat and self._cacheable is None:
767 if newstat and self._cacheable is None:
767 self._cacheable = newstat.cacheable()
768 self._cacheable = newstat.cacheable()
768
769
769 # check again
770 # check again
770 if not self._cacheable:
771 if not self._cacheable:
771 return True
772 return True
772
773
773 if self.cachestat != newstat:
774 if self.cachestat != newstat:
774 self.cachestat = newstat
775 self.cachestat = newstat
775 return True
776 return True
776 else:
777 else:
777 return False
778 return False
778
779
779 @staticmethod
780 @staticmethod
780 def stat(path):
781 def stat(path):
781 try:
782 try:
782 return util.cachestat(path)
783 return util.cachestat(path)
783 except OSError as e:
784 except OSError as e:
784 if e.errno != errno.ENOENT:
785 if e.errno != errno.ENOENT:
785 raise
786 raise
786
787
787 class filecacheentry(object):
788 class filecacheentry(object):
788 def __init__(self, paths, stat=True):
789 def __init__(self, paths, stat=True):
789 self._entries = []
790 self._entries = []
790 for path in paths:
791 for path in paths:
791 self._entries.append(filecachesubentry(path, stat))
792 self._entries.append(filecachesubentry(path, stat))
792
793
793 def changed(self):
794 def changed(self):
794 '''true if any entry has changed'''
795 '''true if any entry has changed'''
795 for entry in self._entries:
796 for entry in self._entries:
796 if entry.changed():
797 if entry.changed():
797 return True
798 return True
798 return False
799 return False
799
800
800 def refresh(self):
801 def refresh(self):
801 for entry in self._entries:
802 for entry in self._entries:
802 entry.refresh()
803 entry.refresh()
803
804
804 class filecache(object):
805 class filecache(object):
805 '''A property like decorator that tracks files under .hg/ for updates.
806 '''A property like decorator that tracks files under .hg/ for updates.
806
807
807 Records stat info when called in _filecache.
808 Records stat info when called in _filecache.
808
809
809 On subsequent calls, compares old stat info with new info, and recreates the
810 On subsequent calls, compares old stat info with new info, and recreates the
810 object when any of the files changes, updating the new stat info in
811 object when any of the files changes, updating the new stat info in
811 _filecache.
812 _filecache.
812
813
813 Mercurial either atomic renames or appends for files under .hg,
814 Mercurial either atomic renames or appends for files under .hg,
814 so to ensure the cache is reliable we need the filesystem to be able
815 so to ensure the cache is reliable we need the filesystem to be able
815 to tell us if a file has been replaced. If it can't, we fallback to
816 to tell us if a file has been replaced. If it can't, we fallback to
816 recreating the object on every call (essentially the same behavior as
817 recreating the object on every call (essentially the same behavior as
817 propertycache).
818 propertycache).
818
819
819 '''
820 '''
820 def __init__(self, *paths):
821 def __init__(self, *paths):
821 self.paths = paths
822 self.paths = paths
822
823
823 def join(self, obj, fname):
824 def join(self, obj, fname):
824 """Used to compute the runtime path of a cached file.
825 """Used to compute the runtime path of a cached file.
825
826
826 Users should subclass filecache and provide their own version of this
827 Users should subclass filecache and provide their own version of this
827 function to call the appropriate join function on 'obj' (an instance
828 function to call the appropriate join function on 'obj' (an instance
828 of the class that its member function was decorated).
829 of the class that its member function was decorated).
829 """
830 """
830 raise NotImplementedError
831 raise NotImplementedError
831
832
832 def __call__(self, func):
833 def __call__(self, func):
833 self.func = func
834 self.func = func
834 self.name = func.__name__.encode('ascii')
835 self.name = func.__name__.encode('ascii')
835 return self
836 return self
836
837
837 def __get__(self, obj, type=None):
838 def __get__(self, obj, type=None):
838 # if accessed on the class, return the descriptor itself.
839 # if accessed on the class, return the descriptor itself.
839 if obj is None:
840 if obj is None:
840 return self
841 return self
841 # do we need to check if the file changed?
842 # do we need to check if the file changed?
842 if self.name in obj.__dict__:
843 if self.name in obj.__dict__:
843 assert self.name in obj._filecache, self.name
844 assert self.name in obj._filecache, self.name
844 return obj.__dict__[self.name]
845 return obj.__dict__[self.name]
845
846
846 entry = obj._filecache.get(self.name)
847 entry = obj._filecache.get(self.name)
847
848
848 if entry:
849 if entry:
849 if entry.changed():
850 if entry.changed():
850 entry.obj = self.func(obj)
851 entry.obj = self.func(obj)
851 else:
852 else:
852 paths = [self.join(obj, path) for path in self.paths]
853 paths = [self.join(obj, path) for path in self.paths]
853
854
854 # We stat -before- creating the object so our cache doesn't lie if
855 # We stat -before- creating the object so our cache doesn't lie if
855 # a writer modified between the time we read and stat
856 # a writer modified between the time we read and stat
856 entry = filecacheentry(paths, True)
857 entry = filecacheentry(paths, True)
857 entry.obj = self.func(obj)
858 entry.obj = self.func(obj)
858
859
859 obj._filecache[self.name] = entry
860 obj._filecache[self.name] = entry
860
861
861 obj.__dict__[self.name] = entry.obj
862 obj.__dict__[self.name] = entry.obj
862 return entry.obj
863 return entry.obj
863
864
864 def __set__(self, obj, value):
865 def __set__(self, obj, value):
865 if self.name not in obj._filecache:
866 if self.name not in obj._filecache:
866 # we add an entry for the missing value because X in __dict__
867 # we add an entry for the missing value because X in __dict__
867 # implies X in _filecache
868 # implies X in _filecache
868 paths = [self.join(obj, path) for path in self.paths]
869 paths = [self.join(obj, path) for path in self.paths]
869 ce = filecacheentry(paths, False)
870 ce = filecacheentry(paths, False)
870 obj._filecache[self.name] = ce
871 obj._filecache[self.name] = ce
871 else:
872 else:
872 ce = obj._filecache[self.name]
873 ce = obj._filecache[self.name]
873
874
874 ce.obj = value # update cached copy
875 ce.obj = value # update cached copy
875 obj.__dict__[self.name] = value # update copy returned by obj.x
876 obj.__dict__[self.name] = value # update copy returned by obj.x
876
877
877 def __delete__(self, obj):
878 def __delete__(self, obj):
878 try:
879 try:
879 del obj.__dict__[self.name]
880 del obj.__dict__[self.name]
880 except KeyError:
881 except KeyError:
881 raise AttributeError(self.name)
882 raise AttributeError(self.name)
882
883
883 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
884 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
884 if lock is None:
885 if lock is None:
885 raise error.LockInheritanceContractViolation(
886 raise error.LockInheritanceContractViolation(
886 'lock can only be inherited while held')
887 'lock can only be inherited while held')
887 if environ is None:
888 if environ is None:
888 environ = {}
889 environ = {}
889 with lock.inherit() as locker:
890 with lock.inherit() as locker:
890 environ[envvar] = locker
891 environ[envvar] = locker
891 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
892 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
892
893
893 def wlocksub(repo, cmd, *args, **kwargs):
894 def wlocksub(repo, cmd, *args, **kwargs):
894 """run cmd as a subprocess that allows inheriting repo's wlock
895 """run cmd as a subprocess that allows inheriting repo's wlock
895
896
896 This can only be called while the wlock is held. This takes all the
897 This can only be called while the wlock is held. This takes all the
897 arguments that ui.system does, and returns the exit code of the
898 arguments that ui.system does, and returns the exit code of the
898 subprocess."""
899 subprocess."""
899 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
900 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
900 **kwargs)
901 **kwargs)
901
902
902 def gdinitconfig(ui):
903 def gdinitconfig(ui):
903 """helper function to know if a repo should be created as general delta
904 """helper function to know if a repo should be created as general delta
904 """
905 """
905 # experimental config: format.generaldelta
906 # experimental config: format.generaldelta
906 return (ui.configbool('format', 'generaldelta', False)
907 return (ui.configbool('format', 'generaldelta', False)
907 or ui.configbool('format', 'usegeneraldelta', True))
908 or ui.configbool('format', 'usegeneraldelta', True))
908
909
909 def gddeltaconfig(ui):
910 def gddeltaconfig(ui):
910 """helper function to know if incoming delta should be optimised
911 """helper function to know if incoming delta should be optimised
911 """
912 """
912 # experimental config: format.generaldelta
913 # experimental config: format.generaldelta
913 return ui.configbool('format', 'generaldelta', False)
914 return ui.configbool('format', 'generaldelta', False)
914
915
915 class simplekeyvaluefile(object):
916 class simplekeyvaluefile(object):
916 """A simple file with key=value lines
917 """A simple file with key=value lines
917
918
918 Keys must be alphanumerics and start with a letter, values must not
919 Keys must be alphanumerics and start with a letter, values must not
919 contain '\n' characters"""
920 contain '\n' characters"""
920 firstlinekey = '__firstline'
921 firstlinekey = '__firstline'
921
922
922 def __init__(self, vfs, path, keys=None):
923 def __init__(self, vfs, path, keys=None):
923 self.vfs = vfs
924 self.vfs = vfs
924 self.path = path
925 self.path = path
925
926
926 def read(self, firstlinenonkeyval=False):
927 def read(self, firstlinenonkeyval=False):
927 """Read the contents of a simple key-value file
928 """Read the contents of a simple key-value file
928
929
929 'firstlinenonkeyval' indicates whether the first line of file should
930 'firstlinenonkeyval' indicates whether the first line of file should
930 be treated as a key-value pair or reuturned fully under the
931 be treated as a key-value pair or reuturned fully under the
931 __firstline key."""
932 __firstline key."""
932 lines = self.vfs.readlines(self.path)
933 lines = self.vfs.readlines(self.path)
933 d = {}
934 d = {}
934 if firstlinenonkeyval:
935 if firstlinenonkeyval:
935 if not lines:
936 if not lines:
936 e = _("empty simplekeyvalue file")
937 e = _("empty simplekeyvalue file")
937 raise error.CorruptedState(e)
938 raise error.CorruptedState(e)
938 # we don't want to include '\n' in the __firstline
939 # we don't want to include '\n' in the __firstline
939 d[self.firstlinekey] = lines[0][:-1]
940 d[self.firstlinekey] = lines[0][:-1]
940 del lines[0]
941 del lines[0]
941
942
942 try:
943 try:
943 # the 'if line.strip()' part prevents us from failing on empty
944 # the 'if line.strip()' part prevents us from failing on empty
944 # lines which only contain '\n' therefore are not skipped
945 # lines which only contain '\n' therefore are not skipped
945 # by 'if line'
946 # by 'if line'
946 updatedict = dict(line[:-1].split('=', 1) for line in lines
947 updatedict = dict(line[:-1].split('=', 1) for line in lines
947 if line.strip())
948 if line.strip())
948 if self.firstlinekey in updatedict:
949 if self.firstlinekey in updatedict:
949 e = _("%r can't be used as a key")
950 e = _("%r can't be used as a key")
950 raise error.CorruptedState(e % self.firstlinekey)
951 raise error.CorruptedState(e % self.firstlinekey)
951 d.update(updatedict)
952 d.update(updatedict)
952 except ValueError as e:
953 except ValueError as e:
953 raise error.CorruptedState(str(e))
954 raise error.CorruptedState(str(e))
954 return d
955 return d
955
956
956 def write(self, data, firstline=None):
957 def write(self, data, firstline=None):
957 """Write key=>value mapping to a file
958 """Write key=>value mapping to a file
958 data is a dict. Keys must be alphanumerical and start with a letter.
959 data is a dict. Keys must be alphanumerical and start with a letter.
959 Values must not contain newline characters.
960 Values must not contain newline characters.
960
961
961 If 'firstline' is not None, it is written to file before
962 If 'firstline' is not None, it is written to file before
962 everything else, as it is, not in a key=value form"""
963 everything else, as it is, not in a key=value form"""
963 lines = []
964 lines = []
964 if firstline is not None:
965 if firstline is not None:
965 lines.append('%s\n' % firstline)
966 lines.append('%s\n' % firstline)
966
967
967 for k, v in data.items():
968 for k, v in data.items():
968 if k == self.firstlinekey:
969 if k == self.firstlinekey:
969 e = "key name '%s' is reserved" % self.firstlinekey
970 e = "key name '%s' is reserved" % self.firstlinekey
970 raise error.ProgrammingError(e)
971 raise error.ProgrammingError(e)
971 if not k[0].isalpha():
972 if not k[0].isalpha():
972 e = "keys must start with a letter in a key-value file"
973 e = "keys must start with a letter in a key-value file"
973 raise error.ProgrammingError(e)
974 raise error.ProgrammingError(e)
974 if not k.isalnum():
975 if not k.isalnum():
975 e = "invalid key name in a simple key-value file"
976 e = "invalid key name in a simple key-value file"
976 raise error.ProgrammingError(e)
977 raise error.ProgrammingError(e)
977 if '\n' in v:
978 if '\n' in v:
978 e = "invalid value in a simple key-value file"
979 e = "invalid value in a simple key-value file"
979 raise error.ProgrammingError(e)
980 raise error.ProgrammingError(e)
980 lines.append("%s=%s\n" % (k, v))
981 lines.append("%s=%s\n" % (k, v))
981 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
982 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
982 fp.write(''.join(lines))
983 fp.write(''.join(lines))
@@ -1,671 +1,671 b''
1 # templatekw.py - common changeset template keywords
1 # templatekw.py - common changeset template keywords
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import hex, nullid
11 from .node import hex, nullid
12 from . import (
12 from . import (
13 encoding,
13 encoding,
14 error,
14 error,
15 hbisect,
15 hbisect,
16 patch,
16 patch,
17 registrar,
17 registrar,
18 scmutil,
18 scmutil,
19 util,
19 util,
20 )
20 )
21
21
22 class _hybrid(object):
22 class _hybrid(object):
23 """Wrapper for list or dict to support legacy template
23 """Wrapper for list or dict to support legacy template
24
24
25 This class allows us to handle both:
25 This class allows us to handle both:
26 - "{files}" (legacy command-line-specific list hack) and
26 - "{files}" (legacy command-line-specific list hack) and
27 - "{files % '{file}\n'}" (hgweb-style with inlining and function support)
27 - "{files % '{file}\n'}" (hgweb-style with inlining and function support)
28 and to access raw values:
28 and to access raw values:
29 - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
29 - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
30 - "{get(extras, key)}"
30 - "{get(extras, key)}"
31 - "{files|json}"
31 - "{files|json}"
32 """
32 """
33
33
34 def __init__(self, gen, values, makemap, joinfmt):
34 def __init__(self, gen, values, makemap, joinfmt):
35 if gen is not None:
35 if gen is not None:
36 self.gen = gen
36 self.gen = gen
37 self._values = values
37 self._values = values
38 self._makemap = makemap
38 self._makemap = makemap
39 self.joinfmt = joinfmt
39 self.joinfmt = joinfmt
40 @util.propertycache
40 @util.propertycache
41 def gen(self):
41 def gen(self):
42 return self._defaultgen()
42 return self._defaultgen()
43 def _defaultgen(self):
43 def _defaultgen(self):
44 """Generator to stringify this as {join(self, ' ')}"""
44 """Generator to stringify this as {join(self, ' ')}"""
45 for i, d in enumerate(self.itermaps()):
45 for i, d in enumerate(self.itermaps()):
46 if i > 0:
46 if i > 0:
47 yield ' '
47 yield ' '
48 yield self.joinfmt(d)
48 yield self.joinfmt(d)
49 def itermaps(self):
49 def itermaps(self):
50 makemap = self._makemap
50 makemap = self._makemap
51 for x in self._values:
51 for x in self._values:
52 yield makemap(x)
52 yield makemap(x)
53 def __contains__(self, x):
53 def __contains__(self, x):
54 return x in self._values
54 return x in self._values
55 def __len__(self):
55 def __len__(self):
56 return len(self._values)
56 return len(self._values)
57 def __iter__(self):
57 def __iter__(self):
58 return iter(self._values)
58 return iter(self._values)
59 def __getattr__(self, name):
59 def __getattr__(self, name):
60 if name not in ('get', 'items', 'iteritems', 'iterkeys', 'itervalues',
60 if name not in ('get', 'items', 'iteritems', 'iterkeys', 'itervalues',
61 'keys', 'values'):
61 'keys', 'values'):
62 raise AttributeError(name)
62 raise AttributeError(name)
63 return getattr(self._values, name)
63 return getattr(self._values, name)
64
64
65 def hybriddict(data, key='key', value='value', fmt='%s=%s', gen=None):
65 def hybriddict(data, key='key', value='value', fmt='%s=%s', gen=None):
66 """Wrap data to support both dict-like and string-like operations"""
66 """Wrap data to support both dict-like and string-like operations"""
67 return _hybrid(gen, data, lambda k: {key: k, value: data[k]},
67 return _hybrid(gen, data, lambda k: {key: k, value: data[k]},
68 lambda d: fmt % (d[key], d[value]))
68 lambda d: fmt % (d[key], d[value]))
69
69
70 def hybridlist(data, name, fmt='%s', gen=None):
70 def hybridlist(data, name, fmt='%s', gen=None):
71 """Wrap data to support both list-like and string-like operations"""
71 """Wrap data to support both list-like and string-like operations"""
72 return _hybrid(gen, data, lambda x: {name: x}, lambda d: fmt % d[name])
72 return _hybrid(gen, data, lambda x: {name: x}, lambda d: fmt % d[name])
73
73
74 def unwraphybrid(thing):
74 def unwraphybrid(thing):
75 """Return an object which can be stringified possibly by using a legacy
75 """Return an object which can be stringified possibly by using a legacy
76 template"""
76 template"""
77 if not util.safehasattr(thing, 'gen'):
77 if not util.safehasattr(thing, 'gen'):
78 return thing
78 return thing
79 return thing.gen
79 return thing.gen
80
80
81 def showdict(name, data, mapping, plural=None, key='key', value='value',
81 def showdict(name, data, mapping, plural=None, key='key', value='value',
82 fmt='%s=%s', separator=' '):
82 fmt='%s=%s', separator=' '):
83 c = [{key: k, value: v} for k, v in data.iteritems()]
83 c = [{key: k, value: v} for k, v in data.iteritems()]
84 f = _showlist(name, c, mapping, plural, separator)
84 f = _showlist(name, c, mapping, plural, separator)
85 return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
85 return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
86
86
87 def showlist(name, values, mapping, plural=None, element=None, separator=' '):
87 def showlist(name, values, mapping, plural=None, element=None, separator=' '):
88 if not element:
88 if not element:
89 element = name
89 element = name
90 f = _showlist(name, values, mapping, plural, separator)
90 f = _showlist(name, values, mapping, plural, separator)
91 return hybridlist(values, name=element, gen=f)
91 return hybridlist(values, name=element, gen=f)
92
92
93 def _showlist(name, values, mapping, plural=None, separator=' '):
93 def _showlist(name, values, mapping, plural=None, separator=' '):
94 '''expand set of values.
94 '''expand set of values.
95 name is name of key in template map.
95 name is name of key in template map.
96 values is list of strings or dicts.
96 values is list of strings or dicts.
97 plural is plural of name, if not simply name + 's'.
97 plural is plural of name, if not simply name + 's'.
98 separator is used to join values as a string
98 separator is used to join values as a string
99
99
100 expansion works like this, given name 'foo'.
100 expansion works like this, given name 'foo'.
101
101
102 if values is empty, expand 'no_foos'.
102 if values is empty, expand 'no_foos'.
103
103
104 if 'foo' not in template map, return values as a string,
104 if 'foo' not in template map, return values as a string,
105 joined by 'separator'.
105 joined by 'separator'.
106
106
107 expand 'start_foos'.
107 expand 'start_foos'.
108
108
109 for each value, expand 'foo'. if 'last_foo' in template
109 for each value, expand 'foo'. if 'last_foo' in template
110 map, expand it instead of 'foo' for last key.
110 map, expand it instead of 'foo' for last key.
111
111
112 expand 'end_foos'.
112 expand 'end_foos'.
113 '''
113 '''
114 templ = mapping['templ']
114 templ = mapping['templ']
115 if not plural:
115 if not plural:
116 plural = name + 's'
116 plural = name + 's'
117 if not values:
117 if not values:
118 noname = 'no_' + plural
118 noname = 'no_' + plural
119 if noname in templ:
119 if noname in templ:
120 yield templ(noname, **mapping)
120 yield templ(noname, **mapping)
121 return
121 return
122 if name not in templ:
122 if name not in templ:
123 if isinstance(values[0], str):
123 if isinstance(values[0], str):
124 yield separator.join(values)
124 yield separator.join(values)
125 else:
125 else:
126 for v in values:
126 for v in values:
127 yield dict(v, **mapping)
127 yield dict(v, **mapping)
128 return
128 return
129 startname = 'start_' + plural
129 startname = 'start_' + plural
130 if startname in templ:
130 if startname in templ:
131 yield templ(startname, **mapping)
131 yield templ(startname, **mapping)
132 vmapping = mapping.copy()
132 vmapping = mapping.copy()
133 def one(v, tag=name):
133 def one(v, tag=name):
134 try:
134 try:
135 vmapping.update(v)
135 vmapping.update(v)
136 except (AttributeError, ValueError):
136 except (AttributeError, ValueError):
137 try:
137 try:
138 for a, b in v:
138 for a, b in v:
139 vmapping[a] = b
139 vmapping[a] = b
140 except ValueError:
140 except ValueError:
141 vmapping[name] = v
141 vmapping[name] = v
142 return templ(tag, **vmapping)
142 return templ(tag, **vmapping)
143 lastname = 'last_' + name
143 lastname = 'last_' + name
144 if lastname in templ:
144 if lastname in templ:
145 last = values.pop()
145 last = values.pop()
146 else:
146 else:
147 last = None
147 last = None
148 for v in values:
148 for v in values:
149 yield one(v)
149 yield one(v)
150 if last is not None:
150 if last is not None:
151 yield one(last, tag=lastname)
151 yield one(last, tag=lastname)
152 endname = 'end_' + plural
152 endname = 'end_' + plural
153 if endname in templ:
153 if endname in templ:
154 yield templ(endname, **mapping)
154 yield templ(endname, **mapping)
155
155
156 def _formatrevnode(ctx):
156 def _formatrevnode(ctx):
157 """Format changeset as '{rev}:{node|formatnode}', which is the default
157 """Format changeset as '{rev}:{node|formatnode}', which is the default
158 template provided by cmdutil.changeset_templater"""
158 template provided by cmdutil.changeset_templater"""
159 repo = ctx.repo()
159 repo = ctx.repo()
160 if repo.ui.debugflag:
160 if repo.ui.debugflag:
161 hexnode = ctx.hex()
161 hexnode = ctx.hex()
162 else:
162 else:
163 hexnode = ctx.hex()[:12]
163 hexnode = ctx.hex()[:12]
164 return '%d:%s' % (scmutil.intrev(ctx.rev()), hexnode)
164 return '%d:%s' % (scmutil.intrev(ctx), hexnode)
165
165
166 def getfiles(repo, ctx, revcache):
166 def getfiles(repo, ctx, revcache):
167 if 'files' not in revcache:
167 if 'files' not in revcache:
168 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
168 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
169 return revcache['files']
169 return revcache['files']
170
170
171 def getlatesttags(repo, ctx, cache, pattern=None):
171 def getlatesttags(repo, ctx, cache, pattern=None):
172 '''return date, distance and name for the latest tag of rev'''
172 '''return date, distance and name for the latest tag of rev'''
173
173
174 cachename = 'latesttags'
174 cachename = 'latesttags'
175 if pattern is not None:
175 if pattern is not None:
176 cachename += '-' + pattern
176 cachename += '-' + pattern
177 match = util.stringmatcher(pattern)[2]
177 match = util.stringmatcher(pattern)[2]
178 else:
178 else:
179 match = util.always
179 match = util.always
180
180
181 if cachename not in cache:
181 if cachename not in cache:
182 # Cache mapping from rev to a tuple with tag date, tag
182 # Cache mapping from rev to a tuple with tag date, tag
183 # distance and tag name
183 # distance and tag name
184 cache[cachename] = {-1: (0, 0, ['null'])}
184 cache[cachename] = {-1: (0, 0, ['null'])}
185 latesttags = cache[cachename]
185 latesttags = cache[cachename]
186
186
187 rev = ctx.rev()
187 rev = ctx.rev()
188 todo = [rev]
188 todo = [rev]
189 while todo:
189 while todo:
190 rev = todo.pop()
190 rev = todo.pop()
191 if rev in latesttags:
191 if rev in latesttags:
192 continue
192 continue
193 ctx = repo[rev]
193 ctx = repo[rev]
194 tags = [t for t in ctx.tags()
194 tags = [t for t in ctx.tags()
195 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
195 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
196 and match(t))]
196 and match(t))]
197 if tags:
197 if tags:
198 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
198 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
199 continue
199 continue
200 try:
200 try:
201 # The tuples are laid out so the right one can be found by
201 # The tuples are laid out so the right one can be found by
202 # comparison.
202 # comparison.
203 pdate, pdist, ptag = max(
203 pdate, pdist, ptag = max(
204 latesttags[p.rev()] for p in ctx.parents())
204 latesttags[p.rev()] for p in ctx.parents())
205 except KeyError:
205 except KeyError:
206 # Cache miss - recurse
206 # Cache miss - recurse
207 todo.append(rev)
207 todo.append(rev)
208 todo.extend(p.rev() for p in ctx.parents())
208 todo.extend(p.rev() for p in ctx.parents())
209 continue
209 continue
210 latesttags[rev] = pdate, pdist + 1, ptag
210 latesttags[rev] = pdate, pdist + 1, ptag
211 return latesttags[rev]
211 return latesttags[rev]
212
212
213 def getrenamedfn(repo, endrev=None):
213 def getrenamedfn(repo, endrev=None):
214 rcache = {}
214 rcache = {}
215 if endrev is None:
215 if endrev is None:
216 endrev = len(repo)
216 endrev = len(repo)
217
217
218 def getrenamed(fn, rev):
218 def getrenamed(fn, rev):
219 '''looks up all renames for a file (up to endrev) the first
219 '''looks up all renames for a file (up to endrev) the first
220 time the file is given. It indexes on the changerev and only
220 time the file is given. It indexes on the changerev and only
221 parses the manifest if linkrev != changerev.
221 parses the manifest if linkrev != changerev.
222 Returns rename info for fn at changerev rev.'''
222 Returns rename info for fn at changerev rev.'''
223 if fn not in rcache:
223 if fn not in rcache:
224 rcache[fn] = {}
224 rcache[fn] = {}
225 fl = repo.file(fn)
225 fl = repo.file(fn)
226 for i in fl:
226 for i in fl:
227 lr = fl.linkrev(i)
227 lr = fl.linkrev(i)
228 renamed = fl.renamed(fl.node(i))
228 renamed = fl.renamed(fl.node(i))
229 rcache[fn][lr] = renamed
229 rcache[fn][lr] = renamed
230 if lr >= endrev:
230 if lr >= endrev:
231 break
231 break
232 if rev in rcache[fn]:
232 if rev in rcache[fn]:
233 return rcache[fn][rev]
233 return rcache[fn][rev]
234
234
235 # If linkrev != rev (i.e. rev not found in rcache) fallback to
235 # If linkrev != rev (i.e. rev not found in rcache) fallback to
236 # filectx logic.
236 # filectx logic.
237 try:
237 try:
238 return repo[rev][fn].renamed()
238 return repo[rev][fn].renamed()
239 except error.LookupError:
239 except error.LookupError:
240 return None
240 return None
241
241
242 return getrenamed
242 return getrenamed
243
243
244 # default templates internally used for rendering of lists
244 # default templates internally used for rendering of lists
245 defaulttempl = {
245 defaulttempl = {
246 'parent': '{rev}:{node|formatnode} ',
246 'parent': '{rev}:{node|formatnode} ',
247 'manifest': '{rev}:{node|formatnode}',
247 'manifest': '{rev}:{node|formatnode}',
248 'file_copy': '{name} ({source})',
248 'file_copy': '{name} ({source})',
249 'envvar': '{key}={value}',
249 'envvar': '{key}={value}',
250 'extra': '{key}={value|stringescape}'
250 'extra': '{key}={value|stringescape}'
251 }
251 }
252 # filecopy is preserved for compatibility reasons
252 # filecopy is preserved for compatibility reasons
253 defaulttempl['filecopy'] = defaulttempl['file_copy']
253 defaulttempl['filecopy'] = defaulttempl['file_copy']
254
254
255 # keywords are callables like:
255 # keywords are callables like:
256 # fn(repo, ctx, templ, cache, revcache, **args)
256 # fn(repo, ctx, templ, cache, revcache, **args)
257 # with:
257 # with:
258 # repo - current repository instance
258 # repo - current repository instance
259 # ctx - the changectx being displayed
259 # ctx - the changectx being displayed
260 # templ - the templater instance
260 # templ - the templater instance
261 # cache - a cache dictionary for the whole templater run
261 # cache - a cache dictionary for the whole templater run
262 # revcache - a cache dictionary for the current revision
262 # revcache - a cache dictionary for the current revision
263 keywords = {}
263 keywords = {}
264
264
265 templatekeyword = registrar.templatekeyword(keywords)
265 templatekeyword = registrar.templatekeyword(keywords)
266
266
267 @templatekeyword('author')
267 @templatekeyword('author')
268 def showauthor(repo, ctx, templ, **args):
268 def showauthor(repo, ctx, templ, **args):
269 """String. The unmodified author of the changeset."""
269 """String. The unmodified author of the changeset."""
270 return ctx.user()
270 return ctx.user()
271
271
272 @templatekeyword('bisect')
272 @templatekeyword('bisect')
273 def showbisect(repo, ctx, templ, **args):
273 def showbisect(repo, ctx, templ, **args):
274 """String. The changeset bisection status."""
274 """String. The changeset bisection status."""
275 return hbisect.label(repo, ctx.node())
275 return hbisect.label(repo, ctx.node())
276
276
277 @templatekeyword('branch')
277 @templatekeyword('branch')
278 def showbranch(**args):
278 def showbranch(**args):
279 """String. The name of the branch on which the changeset was
279 """String. The name of the branch on which the changeset was
280 committed.
280 committed.
281 """
281 """
282 return args['ctx'].branch()
282 return args['ctx'].branch()
283
283
284 @templatekeyword('branches')
284 @templatekeyword('branches')
285 def showbranches(**args):
285 def showbranches(**args):
286 """List of strings. The name of the branch on which the
286 """List of strings. The name of the branch on which the
287 changeset was committed. Will be empty if the branch name was
287 changeset was committed. Will be empty if the branch name was
288 default. (DEPRECATED)
288 default. (DEPRECATED)
289 """
289 """
290 branch = args['ctx'].branch()
290 branch = args['ctx'].branch()
291 if branch != 'default':
291 if branch != 'default':
292 return showlist('branch', [branch], args, plural='branches')
292 return showlist('branch', [branch], args, plural='branches')
293 return showlist('branch', [], args, plural='branches')
293 return showlist('branch', [], args, plural='branches')
294
294
295 @templatekeyword('bookmarks')
295 @templatekeyword('bookmarks')
296 def showbookmarks(**args):
296 def showbookmarks(**args):
297 """List of strings. Any bookmarks associated with the
297 """List of strings. Any bookmarks associated with the
298 changeset. Also sets 'active', the name of the active bookmark.
298 changeset. Also sets 'active', the name of the active bookmark.
299 """
299 """
300 repo = args['ctx']._repo
300 repo = args['ctx']._repo
301 bookmarks = args['ctx'].bookmarks()
301 bookmarks = args['ctx'].bookmarks()
302 active = repo._activebookmark
302 active = repo._activebookmark
303 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
303 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
304 f = _showlist('bookmark', bookmarks, args)
304 f = _showlist('bookmark', bookmarks, args)
305 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
305 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
306
306
307 @templatekeyword('children')
307 @templatekeyword('children')
308 def showchildren(**args):
308 def showchildren(**args):
309 """List of strings. The children of the changeset."""
309 """List of strings. The children of the changeset."""
310 ctx = args['ctx']
310 ctx = args['ctx']
311 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
311 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
312 return showlist('children', childrevs, args, element='child')
312 return showlist('children', childrevs, args, element='child')
313
313
314 # Deprecated, but kept alive for help generation a purpose.
314 # Deprecated, but kept alive for help generation a purpose.
315 @templatekeyword('currentbookmark')
315 @templatekeyword('currentbookmark')
316 def showcurrentbookmark(**args):
316 def showcurrentbookmark(**args):
317 """String. The active bookmark, if it is
317 """String. The active bookmark, if it is
318 associated with the changeset (DEPRECATED)"""
318 associated with the changeset (DEPRECATED)"""
319 return showactivebookmark(**args)
319 return showactivebookmark(**args)
320
320
321 @templatekeyword('activebookmark')
321 @templatekeyword('activebookmark')
322 def showactivebookmark(**args):
322 def showactivebookmark(**args):
323 """String. The active bookmark, if it is
323 """String. The active bookmark, if it is
324 associated with the changeset"""
324 associated with the changeset"""
325 active = args['repo']._activebookmark
325 active = args['repo']._activebookmark
326 if active and active in args['ctx'].bookmarks():
326 if active and active in args['ctx'].bookmarks():
327 return active
327 return active
328 return ''
328 return ''
329
329
330 @templatekeyword('date')
330 @templatekeyword('date')
331 def showdate(repo, ctx, templ, **args):
331 def showdate(repo, ctx, templ, **args):
332 """Date information. The date when the changeset was committed."""
332 """Date information. The date when the changeset was committed."""
333 return ctx.date()
333 return ctx.date()
334
334
335 @templatekeyword('desc')
335 @templatekeyword('desc')
336 def showdescription(repo, ctx, templ, **args):
336 def showdescription(repo, ctx, templ, **args):
337 """String. The text of the changeset description."""
337 """String. The text of the changeset description."""
338 s = ctx.description()
338 s = ctx.description()
339 if isinstance(s, encoding.localstr):
339 if isinstance(s, encoding.localstr):
340 # try hard to preserve utf-8 bytes
340 # try hard to preserve utf-8 bytes
341 return encoding.tolocal(encoding.fromlocal(s).strip())
341 return encoding.tolocal(encoding.fromlocal(s).strip())
342 else:
342 else:
343 return s.strip()
343 return s.strip()
344
344
345 @templatekeyword('diffstat')
345 @templatekeyword('diffstat')
346 def showdiffstat(repo, ctx, templ, **args):
346 def showdiffstat(repo, ctx, templ, **args):
347 """String. Statistics of changes with the following format:
347 """String. Statistics of changes with the following format:
348 "modified files: +added/-removed lines"
348 "modified files: +added/-removed lines"
349 """
349 """
350 stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False)))
350 stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False)))
351 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
351 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
352 return '%s: +%s/-%s' % (len(stats), adds, removes)
352 return '%s: +%s/-%s' % (len(stats), adds, removes)
353
353
354 @templatekeyword('envvars')
354 @templatekeyword('envvars')
355 def showenvvars(repo, **args):
355 def showenvvars(repo, **args):
356 """A dictionary of environment variables. (EXPERIMENTAL)"""
356 """A dictionary of environment variables. (EXPERIMENTAL)"""
357 env = repo.ui.exportableenviron()
357 env = repo.ui.exportableenviron()
358 env = util.sortdict((k, env[k]) for k in sorted(env))
358 env = util.sortdict((k, env[k]) for k in sorted(env))
359 return showdict('envvar', env, args, plural='envvars')
359 return showdict('envvar', env, args, plural='envvars')
360
360
361 @templatekeyword('extras')
361 @templatekeyword('extras')
362 def showextras(**args):
362 def showextras(**args):
363 """List of dicts with key, value entries of the 'extras'
363 """List of dicts with key, value entries of the 'extras'
364 field of this changeset."""
364 field of this changeset."""
365 extras = args['ctx'].extra()
365 extras = args['ctx'].extra()
366 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
366 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
367 makemap = lambda k: {'key': k, 'value': extras[k]}
367 makemap = lambda k: {'key': k, 'value': extras[k]}
368 c = [makemap(k) for k in extras]
368 c = [makemap(k) for k in extras]
369 f = _showlist('extra', c, args, plural='extras')
369 f = _showlist('extra', c, args, plural='extras')
370 return _hybrid(f, extras, makemap,
370 return _hybrid(f, extras, makemap,
371 lambda x: '%s=%s' % (x['key'], util.escapestr(x['value'])))
371 lambda x: '%s=%s' % (x['key'], util.escapestr(x['value'])))
372
372
373 @templatekeyword('file_adds')
373 @templatekeyword('file_adds')
374 def showfileadds(**args):
374 def showfileadds(**args):
375 """List of strings. Files added by this changeset."""
375 """List of strings. Files added by this changeset."""
376 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
376 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
377 return showlist('file_add', getfiles(repo, ctx, revcache)[1], args,
377 return showlist('file_add', getfiles(repo, ctx, revcache)[1], args,
378 element='file')
378 element='file')
379
379
380 @templatekeyword('file_copies')
380 @templatekeyword('file_copies')
381 def showfilecopies(**args):
381 def showfilecopies(**args):
382 """List of strings. Files copied in this changeset with
382 """List of strings. Files copied in this changeset with
383 their sources.
383 their sources.
384 """
384 """
385 cache, ctx = args['cache'], args['ctx']
385 cache, ctx = args['cache'], args['ctx']
386 copies = args['revcache'].get('copies')
386 copies = args['revcache'].get('copies')
387 if copies is None:
387 if copies is None:
388 if 'getrenamed' not in cache:
388 if 'getrenamed' not in cache:
389 cache['getrenamed'] = getrenamedfn(args['repo'])
389 cache['getrenamed'] = getrenamedfn(args['repo'])
390 copies = []
390 copies = []
391 getrenamed = cache['getrenamed']
391 getrenamed = cache['getrenamed']
392 for fn in ctx.files():
392 for fn in ctx.files():
393 rename = getrenamed(fn, ctx.rev())
393 rename = getrenamed(fn, ctx.rev())
394 if rename:
394 if rename:
395 copies.append((fn, rename[0]))
395 copies.append((fn, rename[0]))
396
396
397 copies = util.sortdict(copies)
397 copies = util.sortdict(copies)
398 return showdict('file_copy', copies, args, plural='file_copies',
398 return showdict('file_copy', copies, args, plural='file_copies',
399 key='name', value='source', fmt='%s (%s)')
399 key='name', value='source', fmt='%s (%s)')
400
400
401 # showfilecopiesswitch() displays file copies only if copy records are
401 # showfilecopiesswitch() displays file copies only if copy records are
402 # provided before calling the templater, usually with a --copies
402 # provided before calling the templater, usually with a --copies
403 # command line switch.
403 # command line switch.
404 @templatekeyword('file_copies_switch')
404 @templatekeyword('file_copies_switch')
405 def showfilecopiesswitch(**args):
405 def showfilecopiesswitch(**args):
406 """List of strings. Like "file_copies" but displayed
406 """List of strings. Like "file_copies" but displayed
407 only if the --copied switch is set.
407 only if the --copied switch is set.
408 """
408 """
409 copies = args['revcache'].get('copies') or []
409 copies = args['revcache'].get('copies') or []
410 copies = util.sortdict(copies)
410 copies = util.sortdict(copies)
411 return showdict('file_copy', copies, args, plural='file_copies',
411 return showdict('file_copy', copies, args, plural='file_copies',
412 key='name', value='source', fmt='%s (%s)')
412 key='name', value='source', fmt='%s (%s)')
413
413
414 @templatekeyword('file_dels')
414 @templatekeyword('file_dels')
415 def showfiledels(**args):
415 def showfiledels(**args):
416 """List of strings. Files removed by this changeset."""
416 """List of strings. Files removed by this changeset."""
417 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
417 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
418 return showlist('file_del', getfiles(repo, ctx, revcache)[2], args,
418 return showlist('file_del', getfiles(repo, ctx, revcache)[2], args,
419 element='file')
419 element='file')
420
420
421 @templatekeyword('file_mods')
421 @templatekeyword('file_mods')
422 def showfilemods(**args):
422 def showfilemods(**args):
423 """List of strings. Files modified by this changeset."""
423 """List of strings. Files modified by this changeset."""
424 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
424 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
425 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], args,
425 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], args,
426 element='file')
426 element='file')
427
427
428 @templatekeyword('files')
428 @templatekeyword('files')
429 def showfiles(**args):
429 def showfiles(**args):
430 """List of strings. All files modified, added, or removed by this
430 """List of strings. All files modified, added, or removed by this
431 changeset.
431 changeset.
432 """
432 """
433 return showlist('file', args['ctx'].files(), args)
433 return showlist('file', args['ctx'].files(), args)
434
434
435 @templatekeyword('graphnode')
435 @templatekeyword('graphnode')
436 def showgraphnode(repo, ctx, **args):
436 def showgraphnode(repo, ctx, **args):
437 """String. The character representing the changeset node in
437 """String. The character representing the changeset node in
438 an ASCII revision graph"""
438 an ASCII revision graph"""
439 wpnodes = repo.dirstate.parents()
439 wpnodes = repo.dirstate.parents()
440 if wpnodes[1] == nullid:
440 if wpnodes[1] == nullid:
441 wpnodes = wpnodes[:1]
441 wpnodes = wpnodes[:1]
442 if ctx.node() in wpnodes:
442 if ctx.node() in wpnodes:
443 return '@'
443 return '@'
444 elif ctx.obsolete():
444 elif ctx.obsolete():
445 return 'x'
445 return 'x'
446 elif ctx.closesbranch():
446 elif ctx.closesbranch():
447 return '_'
447 return '_'
448 else:
448 else:
449 return 'o'
449 return 'o'
450
450
451 @templatekeyword('index')
451 @templatekeyword('index')
452 def showindex(**args):
452 def showindex(**args):
453 """Integer. The current iteration of the loop. (0 indexed)"""
453 """Integer. The current iteration of the loop. (0 indexed)"""
454 # just hosts documentation; should be overridden by template mapping
454 # just hosts documentation; should be overridden by template mapping
455 raise error.Abort(_("can't use index in this context"))
455 raise error.Abort(_("can't use index in this context"))
456
456
457 @templatekeyword('latesttag')
457 @templatekeyword('latesttag')
458 def showlatesttag(**args):
458 def showlatesttag(**args):
459 """List of strings. The global tags on the most recent globally
459 """List of strings. The global tags on the most recent globally
460 tagged ancestor of this changeset. If no such tags exist, the list
460 tagged ancestor of this changeset. If no such tags exist, the list
461 consists of the single string "null".
461 consists of the single string "null".
462 """
462 """
463 return showlatesttags(None, **args)
463 return showlatesttags(None, **args)
464
464
465 def showlatesttags(pattern, **args):
465 def showlatesttags(pattern, **args):
466 """helper method for the latesttag keyword and function"""
466 """helper method for the latesttag keyword and function"""
467 repo, ctx = args['repo'], args['ctx']
467 repo, ctx = args['repo'], args['ctx']
468 cache = args['cache']
468 cache = args['cache']
469 latesttags = getlatesttags(repo, ctx, cache, pattern)
469 latesttags = getlatesttags(repo, ctx, cache, pattern)
470
470
471 # latesttag[0] is an implementation detail for sorting csets on different
471 # latesttag[0] is an implementation detail for sorting csets on different
472 # branches in a stable manner- it is the date the tagged cset was created,
472 # branches in a stable manner- it is the date the tagged cset was created,
473 # not the date the tag was created. Therefore it isn't made visible here.
473 # not the date the tag was created. Therefore it isn't made visible here.
474 makemap = lambda v: {
474 makemap = lambda v: {
475 'changes': _showchangessincetag,
475 'changes': _showchangessincetag,
476 'distance': latesttags[1],
476 'distance': latesttags[1],
477 'latesttag': v, # BC with {latesttag % '{latesttag}'}
477 'latesttag': v, # BC with {latesttag % '{latesttag}'}
478 'tag': v
478 'tag': v
479 }
479 }
480
480
481 tags = latesttags[2]
481 tags = latesttags[2]
482 f = _showlist('latesttag', tags, args, separator=':')
482 f = _showlist('latesttag', tags, args, separator=':')
483 return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
483 return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
484
484
485 @templatekeyword('latesttagdistance')
485 @templatekeyword('latesttagdistance')
486 def showlatesttagdistance(repo, ctx, templ, cache, **args):
486 def showlatesttagdistance(repo, ctx, templ, cache, **args):
487 """Integer. Longest path to the latest tag."""
487 """Integer. Longest path to the latest tag."""
488 return getlatesttags(repo, ctx, cache)[1]
488 return getlatesttags(repo, ctx, cache)[1]
489
489
490 @templatekeyword('changessincelatesttag')
490 @templatekeyword('changessincelatesttag')
491 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
491 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
492 """Integer. All ancestors not in the latest tag."""
492 """Integer. All ancestors not in the latest tag."""
493 latesttag = getlatesttags(repo, ctx, cache)[2][0]
493 latesttag = getlatesttags(repo, ctx, cache)[2][0]
494
494
495 return _showchangessincetag(repo, ctx, tag=latesttag, **args)
495 return _showchangessincetag(repo, ctx, tag=latesttag, **args)
496
496
497 def _showchangessincetag(repo, ctx, **args):
497 def _showchangessincetag(repo, ctx, **args):
498 offset = 0
498 offset = 0
499 revs = [ctx.rev()]
499 revs = [ctx.rev()]
500 tag = args['tag']
500 tag = args['tag']
501
501
502 # The only() revset doesn't currently support wdir()
502 # The only() revset doesn't currently support wdir()
503 if ctx.rev() is None:
503 if ctx.rev() is None:
504 offset = 1
504 offset = 1
505 revs = [p.rev() for p in ctx.parents()]
505 revs = [p.rev() for p in ctx.parents()]
506
506
507 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
507 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
508
508
509 @templatekeyword('manifest')
509 @templatekeyword('manifest')
510 def showmanifest(**args):
510 def showmanifest(**args):
511 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
511 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
512 mnode = ctx.manifestnode()
512 mnode = ctx.manifestnode()
513 if mnode is None:
513 if mnode is None:
514 # just avoid crash, we might want to use the 'ff...' hash in future
514 # just avoid crash, we might want to use the 'ff...' hash in future
515 return
515 return
516 args = args.copy()
516 args = args.copy()
517 args.update({'rev': repo.manifestlog._revlog.rev(mnode),
517 args.update({'rev': repo.manifestlog._revlog.rev(mnode),
518 'node': hex(mnode)})
518 'node': hex(mnode)})
519 return templ('manifest', **args)
519 return templ('manifest', **args)
520
520
521 def shownames(namespace, **args):
521 def shownames(namespace, **args):
522 """helper method to generate a template keyword for a namespace"""
522 """helper method to generate a template keyword for a namespace"""
523 ctx = args['ctx']
523 ctx = args['ctx']
524 repo = ctx.repo()
524 repo = ctx.repo()
525 ns = repo.names[namespace]
525 ns = repo.names[namespace]
526 names = ns.names(repo, ctx.node())
526 names = ns.names(repo, ctx.node())
527 return showlist(ns.templatename, names, args, plural=namespace)
527 return showlist(ns.templatename, names, args, plural=namespace)
528
528
529 @templatekeyword('namespaces')
529 @templatekeyword('namespaces')
530 def shownamespaces(**args):
530 def shownamespaces(**args):
531 """Dict of lists. Names attached to this changeset per
531 """Dict of lists. Names attached to this changeset per
532 namespace."""
532 namespace."""
533 ctx = args['ctx']
533 ctx = args['ctx']
534 repo = ctx.repo()
534 repo = ctx.repo()
535 namespaces = util.sortdict((k, showlist('name', ns.names(repo, ctx.node()),
535 namespaces = util.sortdict((k, showlist('name', ns.names(repo, ctx.node()),
536 args))
536 args))
537 for k, ns in repo.names.iteritems())
537 for k, ns in repo.names.iteritems())
538 f = _showlist('namespace', list(namespaces), args)
538 f = _showlist('namespace', list(namespaces), args)
539 return _hybrid(f, namespaces,
539 return _hybrid(f, namespaces,
540 lambda k: {'namespace': k, 'names': namespaces[k]},
540 lambda k: {'namespace': k, 'names': namespaces[k]},
541 lambda x: x['namespace'])
541 lambda x: x['namespace'])
542
542
543 @templatekeyword('node')
543 @templatekeyword('node')
544 def shownode(repo, ctx, templ, **args):
544 def shownode(repo, ctx, templ, **args):
545 """String. The changeset identification hash, as a 40 hexadecimal
545 """String. The changeset identification hash, as a 40 hexadecimal
546 digit string.
546 digit string.
547 """
547 """
548 return ctx.hex()
548 return ctx.hex()
549
549
550 @templatekeyword('obsolete')
550 @templatekeyword('obsolete')
551 def showobsolete(repo, ctx, templ, **args):
551 def showobsolete(repo, ctx, templ, **args):
552 """String. Whether the changeset is obsolete.
552 """String. Whether the changeset is obsolete.
553 """
553 """
554 if ctx.obsolete():
554 if ctx.obsolete():
555 return 'obsolete'
555 return 'obsolete'
556 return ''
556 return ''
557
557
558 @templatekeyword('p1rev')
558 @templatekeyword('p1rev')
559 def showp1rev(repo, ctx, templ, **args):
559 def showp1rev(repo, ctx, templ, **args):
560 """Integer. The repository-local revision number of the changeset's
560 """Integer. The repository-local revision number of the changeset's
561 first parent, or -1 if the changeset has no parents."""
561 first parent, or -1 if the changeset has no parents."""
562 return ctx.p1().rev()
562 return ctx.p1().rev()
563
563
564 @templatekeyword('p2rev')
564 @templatekeyword('p2rev')
565 def showp2rev(repo, ctx, templ, **args):
565 def showp2rev(repo, ctx, templ, **args):
566 """Integer. The repository-local revision number of the changeset's
566 """Integer. The repository-local revision number of the changeset's
567 second parent, or -1 if the changeset has no second parent."""
567 second parent, or -1 if the changeset has no second parent."""
568 return ctx.p2().rev()
568 return ctx.p2().rev()
569
569
570 @templatekeyword('p1node')
570 @templatekeyword('p1node')
571 def showp1node(repo, ctx, templ, **args):
571 def showp1node(repo, ctx, templ, **args):
572 """String. The identification hash of the changeset's first parent,
572 """String. The identification hash of the changeset's first parent,
573 as a 40 digit hexadecimal string. If the changeset has no parents, all
573 as a 40 digit hexadecimal string. If the changeset has no parents, all
574 digits are 0."""
574 digits are 0."""
575 return ctx.p1().hex()
575 return ctx.p1().hex()
576
576
577 @templatekeyword('p2node')
577 @templatekeyword('p2node')
578 def showp2node(repo, ctx, templ, **args):
578 def showp2node(repo, ctx, templ, **args):
579 """String. The identification hash of the changeset's second
579 """String. The identification hash of the changeset's second
580 parent, as a 40 digit hexadecimal string. If the changeset has no second
580 parent, as a 40 digit hexadecimal string. If the changeset has no second
581 parent, all digits are 0."""
581 parent, all digits are 0."""
582 return ctx.p2().hex()
582 return ctx.p2().hex()
583
583
584 @templatekeyword('parents')
584 @templatekeyword('parents')
585 def showparents(**args):
585 def showparents(**args):
586 """List of strings. The parents of the changeset in "rev:node"
586 """List of strings. The parents of the changeset in "rev:node"
587 format. If the changeset has only one "natural" parent (the predecessor
587 format. If the changeset has only one "natural" parent (the predecessor
588 revision) nothing is shown."""
588 revision) nothing is shown."""
589 repo = args['repo']
589 repo = args['repo']
590 ctx = args['ctx']
590 ctx = args['ctx']
591 pctxs = scmutil.meaningfulparents(repo, ctx)
591 pctxs = scmutil.meaningfulparents(repo, ctx)
592 prevs = [str(p.rev()) for p in pctxs] # ifcontains() needs a list of str
592 prevs = [str(p.rev()) for p in pctxs] # ifcontains() needs a list of str
593 parents = [[('rev', p.rev()),
593 parents = [[('rev', p.rev()),
594 ('node', p.hex()),
594 ('node', p.hex()),
595 ('phase', p.phasestr())]
595 ('phase', p.phasestr())]
596 for p in pctxs]
596 for p in pctxs]
597 f = _showlist('parent', parents, args)
597 f = _showlist('parent', parents, args)
598 return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}},
598 return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}},
599 lambda d: _formatrevnode(d['ctx']))
599 lambda d: _formatrevnode(d['ctx']))
600
600
601 @templatekeyword('phase')
601 @templatekeyword('phase')
602 def showphase(repo, ctx, templ, **args):
602 def showphase(repo, ctx, templ, **args):
603 """String. The changeset phase name."""
603 """String. The changeset phase name."""
604 return ctx.phasestr()
604 return ctx.phasestr()
605
605
606 @templatekeyword('phaseidx')
606 @templatekeyword('phaseidx')
607 def showphaseidx(repo, ctx, templ, **args):
607 def showphaseidx(repo, ctx, templ, **args):
608 """Integer. The changeset phase index."""
608 """Integer. The changeset phase index."""
609 return ctx.phase()
609 return ctx.phase()
610
610
611 @templatekeyword('rev')
611 @templatekeyword('rev')
612 def showrev(repo, ctx, templ, **args):
612 def showrev(repo, ctx, templ, **args):
613 """Integer. The repository-local changeset revision number."""
613 """Integer. The repository-local changeset revision number."""
614 return scmutil.intrev(ctx.rev())
614 return scmutil.intrev(ctx)
615
615
616 def showrevslist(name, revs, **args):
616 def showrevslist(name, revs, **args):
617 """helper to generate a list of revisions in which a mapped template will
617 """helper to generate a list of revisions in which a mapped template will
618 be evaluated"""
618 be evaluated"""
619 repo = args['ctx'].repo()
619 repo = args['ctx'].repo()
620 revs = [str(r) for r in revs] # ifcontains() needs a list of str
620 revs = [str(r) for r in revs] # ifcontains() needs a list of str
621 f = _showlist(name, revs, args)
621 f = _showlist(name, revs, args)
622 return _hybrid(f, revs,
622 return _hybrid(f, revs,
623 lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
623 lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
624 lambda d: d[name])
624 lambda d: d[name])
625
625
626 @templatekeyword('subrepos')
626 @templatekeyword('subrepos')
627 def showsubrepos(**args):
627 def showsubrepos(**args):
628 """List of strings. Updated subrepositories in the changeset."""
628 """List of strings. Updated subrepositories in the changeset."""
629 ctx = args['ctx']
629 ctx = args['ctx']
630 substate = ctx.substate
630 substate = ctx.substate
631 if not substate:
631 if not substate:
632 return showlist('subrepo', [], args)
632 return showlist('subrepo', [], args)
633 psubstate = ctx.parents()[0].substate or {}
633 psubstate = ctx.parents()[0].substate or {}
634 subrepos = []
634 subrepos = []
635 for sub in substate:
635 for sub in substate:
636 if sub not in psubstate or substate[sub] != psubstate[sub]:
636 if sub not in psubstate or substate[sub] != psubstate[sub]:
637 subrepos.append(sub) # modified or newly added in ctx
637 subrepos.append(sub) # modified or newly added in ctx
638 for sub in psubstate:
638 for sub in psubstate:
639 if sub not in substate:
639 if sub not in substate:
640 subrepos.append(sub) # removed in ctx
640 subrepos.append(sub) # removed in ctx
641 return showlist('subrepo', sorted(subrepos), args)
641 return showlist('subrepo', sorted(subrepos), args)
642
642
643 # don't remove "showtags" definition, even though namespaces will put
643 # don't remove "showtags" definition, even though namespaces will put
644 # a helper function for "tags" keyword into "keywords" map automatically,
644 # a helper function for "tags" keyword into "keywords" map automatically,
645 # because online help text is built without namespaces initialization
645 # because online help text is built without namespaces initialization
646 @templatekeyword('tags')
646 @templatekeyword('tags')
647 def showtags(**args):
647 def showtags(**args):
648 """List of strings. Any tags associated with the changeset."""
648 """List of strings. Any tags associated with the changeset."""
649 return shownames('tags', **args)
649 return shownames('tags', **args)
650
650
651 def loadkeyword(ui, extname, registrarobj):
651 def loadkeyword(ui, extname, registrarobj):
652 """Load template keyword from specified registrarobj
652 """Load template keyword from specified registrarobj
653 """
653 """
654 for name, func in registrarobj._table.iteritems():
654 for name, func in registrarobj._table.iteritems():
655 keywords[name] = func
655 keywords[name] = func
656
656
657 @templatekeyword('termwidth')
657 @templatekeyword('termwidth')
658 def termwidth(repo, ctx, templ, **args):
658 def termwidth(repo, ctx, templ, **args):
659 """Integer. The width of the current terminal."""
659 """Integer. The width of the current terminal."""
660 return repo.ui.termwidth()
660 return repo.ui.termwidth()
661
661
662 @templatekeyword('troubles')
662 @templatekeyword('troubles')
663 def showtroubles(**args):
663 def showtroubles(**args):
664 """List of strings. Evolution troubles affecting the changeset.
664 """List of strings. Evolution troubles affecting the changeset.
665
665
666 (EXPERIMENTAL)
666 (EXPERIMENTAL)
667 """
667 """
668 return showlist('trouble', args['ctx'].troubles(), args)
668 return showlist('trouble', args['ctx'].troubles(), args)
669
669
670 # tell hggettext to extract docstrings from these functions:
670 # tell hggettext to extract docstrings from these functions:
671 i18nfunctions = keywords.values()
671 i18nfunctions = keywords.values()
General Comments 0
You need to be logged in to leave comments. Login now