##// END OF EJS Templates
templater: keep default resources per template engine (API)...
Yuya Nishihara -
r35484:32c278eb default
parent child Browse files
Show More
@@ -1,3973 +1,3971 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 dagop,
29 dagop,
30 dirstateguard,
30 dirstateguard,
31 encoding,
31 encoding,
32 error,
32 error,
33 formatter,
33 formatter,
34 graphmod,
34 graphmod,
35 match as matchmod,
35 match as matchmod,
36 mdiff,
36 mdiff,
37 obsolete,
37 obsolete,
38 patch,
38 patch,
39 pathutil,
39 pathutil,
40 pycompat,
40 pycompat,
41 registrar,
41 registrar,
42 revlog,
42 revlog,
43 revset,
43 revset,
44 scmutil,
44 scmutil,
45 smartset,
45 smartset,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51 stringio = util.stringio
51 stringio = util.stringio
52
52
53 # templates of common command options
53 # templates of common command options
54
54
55 dryrunopts = [
55 dryrunopts = [
56 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
57 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
58 ]
58 ]
59
59
60 remoteopts = [
60 remoteopts = [
61 ('e', 'ssh', '',
61 ('e', 'ssh', '',
62 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
63 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
64 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
65 ('', 'insecure', None,
65 ('', 'insecure', None,
66 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 ]
67 ]
68
68
69 walkopts = [
69 walkopts = [
70 ('I', 'include', [],
70 ('I', 'include', [],
71 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
72 ('X', 'exclude', [],
72 ('X', 'exclude', [],
73 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
74 ]
74 ]
75
75
76 commitopts = [
76 commitopts = [
77 ('m', 'message', '',
77 ('m', 'message', '',
78 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
79 ('l', 'logfile', '',
79 ('l', 'logfile', '',
80 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
81 ]
81 ]
82
82
83 commitopts2 = [
83 commitopts2 = [
84 ('d', 'date', '',
84 ('d', 'date', '',
85 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
86 ('u', 'user', '',
86 ('u', 'user', '',
87 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
88 ]
88 ]
89
89
90 # hidden for now
90 # hidden for now
91 formatteropts = [
91 formatteropts = [
92 ('T', 'template', '',
92 ('T', 'template', '',
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 ]
94 ]
95
95
96 templateopts = [
96 templateopts = [
97 ('', 'style', '',
97 ('', 'style', '',
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 ('T', 'template', '',
99 ('T', 'template', '',
100 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
101 ]
101 ]
102
102
103 logopts = [
103 logopts = [
104 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('l', 'limit', '',
106 ('l', 'limit', '',
107 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
108 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
111 ] + templateopts
111 ] + templateopts
112
112
113 diffopts = [
113 diffopts = [
114 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
118 ]
118 ]
119
119
120 diffwsopts = [
120 diffwsopts = [
121 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
122 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
123 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
124 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
125 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
126 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
127 ('Z', 'ignore-space-at-eol', None,
127 ('Z', 'ignore-space-at-eol', None,
128 _('ignore changes in whitespace at EOL')),
128 _('ignore changes in whitespace at EOL')),
129 ]
129 ]
130
130
131 diffopts2 = [
131 diffopts2 = [
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
133 ('p', 'show-function', None, _('show which function each change is in')),
133 ('p', 'show-function', None, _('show which function each change is in')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
135 ] + diffwsopts + [
135 ] + diffwsopts + [
136 ('U', 'unified', '',
136 ('U', 'unified', '',
137 _('number of lines of context to show'), _('NUM')),
137 _('number of lines of context to show'), _('NUM')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
140 ]
140 ]
141
141
142 mergetoolopts = [
142 mergetoolopts = [
143 ('t', 'tool', '', _('specify merge tool')),
143 ('t', 'tool', '', _('specify merge tool')),
144 ]
144 ]
145
145
146 similarityopts = [
146 similarityopts = [
147 ('s', 'similarity', '',
147 ('s', 'similarity', '',
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
149 ]
149 ]
150
150
151 subrepoopts = [
151 subrepoopts = [
152 ('S', 'subrepos', None,
152 ('S', 'subrepos', None,
153 _('recurse into subrepositories'))
153 _('recurse into subrepositories'))
154 ]
154 ]
155
155
156 debugrevlogopts = [
156 debugrevlogopts = [
157 ('c', 'changelog', False, _('open changelog')),
157 ('c', 'changelog', False, _('open changelog')),
158 ('m', 'manifest', False, _('open manifest')),
158 ('m', 'manifest', False, _('open manifest')),
159 ('', 'dir', '', _('open directory manifest')),
159 ('', 'dir', '', _('open directory manifest')),
160 ]
160 ]
161
161
162 # special string such that everything below this line will be ingored in the
162 # special string such that everything below this line will be ingored in the
163 # editor text
163 # editor text
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
165
165
166 def ishunk(x):
166 def ishunk(x):
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
168 return isinstance(x, hunkclasses)
168 return isinstance(x, hunkclasses)
169
169
170 def newandmodified(chunks, originalchunks):
170 def newandmodified(chunks, originalchunks):
171 newlyaddedandmodifiedfiles = set()
171 newlyaddedandmodifiedfiles = set()
172 for chunk in chunks:
172 for chunk in chunks:
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
174 originalchunks:
174 originalchunks:
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
176 return newlyaddedandmodifiedfiles
176 return newlyaddedandmodifiedfiles
177
177
178 def parsealiases(cmd):
178 def parsealiases(cmd):
179 return cmd.lstrip("^").split("|")
179 return cmd.lstrip("^").split("|")
180
180
181 def setupwrapcolorwrite(ui):
181 def setupwrapcolorwrite(ui):
182 # wrap ui.write so diff output can be labeled/colorized
182 # wrap ui.write so diff output can be labeled/colorized
183 def wrapwrite(orig, *args, **kw):
183 def wrapwrite(orig, *args, **kw):
184 label = kw.pop(r'label', '')
184 label = kw.pop(r'label', '')
185 for chunk, l in patch.difflabel(lambda: args):
185 for chunk, l in patch.difflabel(lambda: args):
186 orig(chunk, label=label + l)
186 orig(chunk, label=label + l)
187
187
188 oldwrite = ui.write
188 oldwrite = ui.write
189 def wrap(*args, **kwargs):
189 def wrap(*args, **kwargs):
190 return wrapwrite(oldwrite, *args, **kwargs)
190 return wrapwrite(oldwrite, *args, **kwargs)
191 setattr(ui, 'write', wrap)
191 setattr(ui, 'write', wrap)
192 return oldwrite
192 return oldwrite
193
193
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
195 if usecurses:
195 if usecurses:
196 if testfile:
196 if testfile:
197 recordfn = crecordmod.testdecorator(testfile,
197 recordfn = crecordmod.testdecorator(testfile,
198 crecordmod.testchunkselector)
198 crecordmod.testchunkselector)
199 else:
199 else:
200 recordfn = crecordmod.chunkselector
200 recordfn = crecordmod.chunkselector
201
201
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
203
203
204 else:
204 else:
205 return patch.filterpatch(ui, originalhunks, operation)
205 return patch.filterpatch(ui, originalhunks, operation)
206
206
207 def recordfilter(ui, originalhunks, operation=None):
207 def recordfilter(ui, originalhunks, operation=None):
208 """ Prompts the user to filter the originalhunks and return a list of
208 """ Prompts the user to filter the originalhunks and return a list of
209 selected hunks.
209 selected hunks.
210 *operation* is used for to build ui messages to indicate the user what
210 *operation* is used for to build ui messages to indicate the user what
211 kind of filtering they are doing: reverting, committing, shelving, etc.
211 kind of filtering they are doing: reverting, committing, shelving, etc.
212 (see patch.filterpatch).
212 (see patch.filterpatch).
213 """
213 """
214 usecurses = crecordmod.checkcurses(ui)
214 usecurses = crecordmod.checkcurses(ui)
215 testfile = ui.config('experimental', 'crecordtest')
215 testfile = ui.config('experimental', 'crecordtest')
216 oldwrite = setupwrapcolorwrite(ui)
216 oldwrite = setupwrapcolorwrite(ui)
217 try:
217 try:
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
219 testfile, operation)
219 testfile, operation)
220 finally:
220 finally:
221 ui.write = oldwrite
221 ui.write = oldwrite
222 return newchunks, newopts
222 return newchunks, newopts
223
223
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
225 filterfn, *pats, **opts):
225 filterfn, *pats, **opts):
226 from . import merge as mergemod
226 from . import merge as mergemod
227 opts = pycompat.byteskwargs(opts)
227 opts = pycompat.byteskwargs(opts)
228 if not ui.interactive():
228 if not ui.interactive():
229 if cmdsuggest:
229 if cmdsuggest:
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
231 else:
231 else:
232 msg = _('running non-interactively')
232 msg = _('running non-interactively')
233 raise error.Abort(msg)
233 raise error.Abort(msg)
234
234
235 # make sure username is set before going interactive
235 # make sure username is set before going interactive
236 if not opts.get('user'):
236 if not opts.get('user'):
237 ui.username() # raise exception, username not provided
237 ui.username() # raise exception, username not provided
238
238
239 def recordfunc(ui, repo, message, match, opts):
239 def recordfunc(ui, repo, message, match, opts):
240 """This is generic record driver.
240 """This is generic record driver.
241
241
242 Its job is to interactively filter local changes, and
242 Its job is to interactively filter local changes, and
243 accordingly prepare working directory into a state in which the
243 accordingly prepare working directory into a state in which the
244 job can be delegated to a non-interactive commit command such as
244 job can be delegated to a non-interactive commit command such as
245 'commit' or 'qrefresh'.
245 'commit' or 'qrefresh'.
246
246
247 After the actual job is done by non-interactive command, the
247 After the actual job is done by non-interactive command, the
248 working directory is restored to its original state.
248 working directory is restored to its original state.
249
249
250 In the end we'll record interesting changes, and everything else
250 In the end we'll record interesting changes, and everything else
251 will be left in place, so the user can continue working.
251 will be left in place, so the user can continue working.
252 """
252 """
253
253
254 checkunfinished(repo, commit=True)
254 checkunfinished(repo, commit=True)
255 wctx = repo[None]
255 wctx = repo[None]
256 merge = len(wctx.parents()) > 1
256 merge = len(wctx.parents()) > 1
257 if merge:
257 if merge:
258 raise error.Abort(_('cannot partially commit a merge '
258 raise error.Abort(_('cannot partially commit a merge '
259 '(use "hg commit" instead)'))
259 '(use "hg commit" instead)'))
260
260
261 def fail(f, msg):
261 def fail(f, msg):
262 raise error.Abort('%s: %s' % (f, msg))
262 raise error.Abort('%s: %s' % (f, msg))
263
263
264 force = opts.get('force')
264 force = opts.get('force')
265 if not force:
265 if not force:
266 vdirs = []
266 vdirs = []
267 match.explicitdir = vdirs.append
267 match.explicitdir = vdirs.append
268 match.bad = fail
268 match.bad = fail
269
269
270 status = repo.status(match=match)
270 status = repo.status(match=match)
271 if not force:
271 if not force:
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
274 diffopts.nodates = True
274 diffopts.nodates = True
275 diffopts.git = True
275 diffopts.git = True
276 diffopts.showfunc = True
276 diffopts.showfunc = True
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
278 originalchunks = patch.parsepatch(originaldiff)
278 originalchunks = patch.parsepatch(originaldiff)
279
279
280 # 1. filter patch, since we are intending to apply subset of it
280 # 1. filter patch, since we are intending to apply subset of it
281 try:
281 try:
282 chunks, newopts = filterfn(ui, originalchunks)
282 chunks, newopts = filterfn(ui, originalchunks)
283 except error.PatchError as err:
283 except error.PatchError as err:
284 raise error.Abort(_('error parsing patch: %s') % err)
284 raise error.Abort(_('error parsing patch: %s') % err)
285 opts.update(newopts)
285 opts.update(newopts)
286
286
287 # We need to keep a backup of files that have been newly added and
287 # We need to keep a backup of files that have been newly added and
288 # modified during the recording process because there is a previous
288 # modified during the recording process because there is a previous
289 # version without the edit in the workdir
289 # version without the edit in the workdir
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
291 contenders = set()
291 contenders = set()
292 for h in chunks:
292 for h in chunks:
293 try:
293 try:
294 contenders.update(set(h.files()))
294 contenders.update(set(h.files()))
295 except AttributeError:
295 except AttributeError:
296 pass
296 pass
297
297
298 changed = status.modified + status.added + status.removed
298 changed = status.modified + status.added + status.removed
299 newfiles = [f for f in changed if f in contenders]
299 newfiles = [f for f in changed if f in contenders]
300 if not newfiles:
300 if not newfiles:
301 ui.status(_('no changes to record\n'))
301 ui.status(_('no changes to record\n'))
302 return 0
302 return 0
303
303
304 modified = set(status.modified)
304 modified = set(status.modified)
305
305
306 # 2. backup changed files, so we can restore them in the end
306 # 2. backup changed files, so we can restore them in the end
307
307
308 if backupall:
308 if backupall:
309 tobackup = changed
309 tobackup = changed
310 else:
310 else:
311 tobackup = [f for f in newfiles if f in modified or f in \
311 tobackup = [f for f in newfiles if f in modified or f in \
312 newlyaddedandmodifiedfiles]
312 newlyaddedandmodifiedfiles]
313 backups = {}
313 backups = {}
314 if tobackup:
314 if tobackup:
315 backupdir = repo.vfs.join('record-backups')
315 backupdir = repo.vfs.join('record-backups')
316 try:
316 try:
317 os.mkdir(backupdir)
317 os.mkdir(backupdir)
318 except OSError as err:
318 except OSError as err:
319 if err.errno != errno.EEXIST:
319 if err.errno != errno.EEXIST:
320 raise
320 raise
321 try:
321 try:
322 # backup continues
322 # backup continues
323 for f in tobackup:
323 for f in tobackup:
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
325 dir=backupdir)
325 dir=backupdir)
326 os.close(fd)
326 os.close(fd)
327 ui.debug('backup %r as %r\n' % (f, tmpname))
327 ui.debug('backup %r as %r\n' % (f, tmpname))
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
329 backups[f] = tmpname
329 backups[f] = tmpname
330
330
331 fp = stringio()
331 fp = stringio()
332 for c in chunks:
332 for c in chunks:
333 fname = c.filename()
333 fname = c.filename()
334 if fname in backups:
334 if fname in backups:
335 c.write(fp)
335 c.write(fp)
336 dopatch = fp.tell()
336 dopatch = fp.tell()
337 fp.seek(0)
337 fp.seek(0)
338
338
339 # 2.5 optionally review / modify patch in text editor
339 # 2.5 optionally review / modify patch in text editor
340 if opts.get('review', False):
340 if opts.get('review', False):
341 patchtext = (crecordmod.diffhelptext
341 patchtext = (crecordmod.diffhelptext
342 + crecordmod.patchhelptext
342 + crecordmod.patchhelptext
343 + fp.read())
343 + fp.read())
344 reviewedpatch = ui.edit(patchtext, "",
344 reviewedpatch = ui.edit(patchtext, "",
345 action="diff",
345 action="diff",
346 repopath=repo.path)
346 repopath=repo.path)
347 fp.truncate(0)
347 fp.truncate(0)
348 fp.write(reviewedpatch)
348 fp.write(reviewedpatch)
349 fp.seek(0)
349 fp.seek(0)
350
350
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
352 # 3a. apply filtered patch to clean repo (clean)
352 # 3a. apply filtered patch to clean repo (clean)
353 if backups:
353 if backups:
354 # Equivalent to hg.revert
354 # Equivalent to hg.revert
355 m = scmutil.matchfiles(repo, backups.keys())
355 m = scmutil.matchfiles(repo, backups.keys())
356 mergemod.update(repo, repo.dirstate.p1(),
356 mergemod.update(repo, repo.dirstate.p1(),
357 False, True, matcher=m)
357 False, True, matcher=m)
358
358
359 # 3b. (apply)
359 # 3b. (apply)
360 if dopatch:
360 if dopatch:
361 try:
361 try:
362 ui.debug('applying patch\n')
362 ui.debug('applying patch\n')
363 ui.debug(fp.getvalue())
363 ui.debug(fp.getvalue())
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
365 except error.PatchError as err:
365 except error.PatchError as err:
366 raise error.Abort(str(err))
366 raise error.Abort(str(err))
367 del fp
367 del fp
368
368
369 # 4. We prepared working directory according to filtered
369 # 4. We prepared working directory according to filtered
370 # patch. Now is the time to delegate the job to
370 # patch. Now is the time to delegate the job to
371 # commit/qrefresh or the like!
371 # commit/qrefresh or the like!
372
372
373 # Make all of the pathnames absolute.
373 # Make all of the pathnames absolute.
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
375 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
375 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
376 finally:
376 finally:
377 # 5. finally restore backed-up files
377 # 5. finally restore backed-up files
378 try:
378 try:
379 dirstate = repo.dirstate
379 dirstate = repo.dirstate
380 for realname, tmpname in backups.iteritems():
380 for realname, tmpname in backups.iteritems():
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
382
382
383 if dirstate[realname] == 'n':
383 if dirstate[realname] == 'n':
384 # without normallookup, restoring timestamp
384 # without normallookup, restoring timestamp
385 # may cause partially committed files
385 # may cause partially committed files
386 # to be treated as unmodified
386 # to be treated as unmodified
387 dirstate.normallookup(realname)
387 dirstate.normallookup(realname)
388
388
389 # copystat=True here and above are a hack to trick any
389 # copystat=True here and above are a hack to trick any
390 # editors that have f open that we haven't modified them.
390 # editors that have f open that we haven't modified them.
391 #
391 #
392 # Also note that this racy as an editor could notice the
392 # Also note that this racy as an editor could notice the
393 # file's mtime before we've finished writing it.
393 # file's mtime before we've finished writing it.
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
395 os.unlink(tmpname)
395 os.unlink(tmpname)
396 if tobackup:
396 if tobackup:
397 os.rmdir(backupdir)
397 os.rmdir(backupdir)
398 except OSError:
398 except OSError:
399 pass
399 pass
400
400
401 def recordinwlock(ui, repo, message, match, opts):
401 def recordinwlock(ui, repo, message, match, opts):
402 with repo.wlock():
402 with repo.wlock():
403 return recordfunc(ui, repo, message, match, opts)
403 return recordfunc(ui, repo, message, match, opts)
404
404
405 return commit(ui, repo, recordinwlock, pats, opts)
405 return commit(ui, repo, recordinwlock, pats, opts)
406
406
407 class dirnode(object):
407 class dirnode(object):
408 """
408 """
409 Represent a directory in user working copy with information required for
409 Represent a directory in user working copy with information required for
410 the purpose of tersing its status.
410 the purpose of tersing its status.
411
411
412 path is the path to the directory
412 path is the path to the directory
413
413
414 statuses is a set of statuses of all files in this directory (this includes
414 statuses is a set of statuses of all files in this directory (this includes
415 all the files in all the subdirectories too)
415 all the files in all the subdirectories too)
416
416
417 files is a list of files which are direct child of this directory
417 files is a list of files which are direct child of this directory
418
418
419 subdirs is a dictionary of sub-directory name as the key and it's own
419 subdirs is a dictionary of sub-directory name as the key and it's own
420 dirnode object as the value
420 dirnode object as the value
421 """
421 """
422
422
423 def __init__(self, dirpath):
423 def __init__(self, dirpath):
424 self.path = dirpath
424 self.path = dirpath
425 self.statuses = set([])
425 self.statuses = set([])
426 self.files = []
426 self.files = []
427 self.subdirs = {}
427 self.subdirs = {}
428
428
429 def _addfileindir(self, filename, status):
429 def _addfileindir(self, filename, status):
430 """Add a file in this directory as a direct child."""
430 """Add a file in this directory as a direct child."""
431 self.files.append((filename, status))
431 self.files.append((filename, status))
432
432
433 def addfile(self, filename, status):
433 def addfile(self, filename, status):
434 """
434 """
435 Add a file to this directory or to its direct parent directory.
435 Add a file to this directory or to its direct parent directory.
436
436
437 If the file is not direct child of this directory, we traverse to the
437 If the file is not direct child of this directory, we traverse to the
438 directory of which this file is a direct child of and add the file
438 directory of which this file is a direct child of and add the file
439 there.
439 there.
440 """
440 """
441
441
442 # the filename contains a path separator, it means it's not the direct
442 # the filename contains a path separator, it means it's not the direct
443 # child of this directory
443 # child of this directory
444 if '/' in filename:
444 if '/' in filename:
445 subdir, filep = filename.split('/', 1)
445 subdir, filep = filename.split('/', 1)
446
446
447 # does the dirnode object for subdir exists
447 # does the dirnode object for subdir exists
448 if subdir not in self.subdirs:
448 if subdir not in self.subdirs:
449 subdirpath = os.path.join(self.path, subdir)
449 subdirpath = os.path.join(self.path, subdir)
450 self.subdirs[subdir] = dirnode(subdirpath)
450 self.subdirs[subdir] = dirnode(subdirpath)
451
451
452 # try adding the file in subdir
452 # try adding the file in subdir
453 self.subdirs[subdir].addfile(filep, status)
453 self.subdirs[subdir].addfile(filep, status)
454
454
455 else:
455 else:
456 self._addfileindir(filename, status)
456 self._addfileindir(filename, status)
457
457
458 if status not in self.statuses:
458 if status not in self.statuses:
459 self.statuses.add(status)
459 self.statuses.add(status)
460
460
461 def iterfilepaths(self):
461 def iterfilepaths(self):
462 """Yield (status, path) for files directly under this directory."""
462 """Yield (status, path) for files directly under this directory."""
463 for f, st in self.files:
463 for f, st in self.files:
464 yield st, os.path.join(self.path, f)
464 yield st, os.path.join(self.path, f)
465
465
466 def tersewalk(self, terseargs):
466 def tersewalk(self, terseargs):
467 """
467 """
468 Yield (status, path) obtained by processing the status of this
468 Yield (status, path) obtained by processing the status of this
469 dirnode.
469 dirnode.
470
470
471 terseargs is the string of arguments passed by the user with `--terse`
471 terseargs is the string of arguments passed by the user with `--terse`
472 flag.
472 flag.
473
473
474 Following are the cases which can happen:
474 Following are the cases which can happen:
475
475
476 1) All the files in the directory (including all the files in its
476 1) All the files in the directory (including all the files in its
477 subdirectories) share the same status and the user has asked us to terse
477 subdirectories) share the same status and the user has asked us to terse
478 that status. -> yield (status, dirpath)
478 that status. -> yield (status, dirpath)
479
479
480 2) Otherwise, we do following:
480 2) Otherwise, we do following:
481
481
482 a) Yield (status, filepath) for all the files which are in this
482 a) Yield (status, filepath) for all the files which are in this
483 directory (only the ones in this directory, not the subdirs)
483 directory (only the ones in this directory, not the subdirs)
484
484
485 b) Recurse the function on all the subdirectories of this
485 b) Recurse the function on all the subdirectories of this
486 directory
486 directory
487 """
487 """
488
488
489 if len(self.statuses) == 1:
489 if len(self.statuses) == 1:
490 onlyst = self.statuses.pop()
490 onlyst = self.statuses.pop()
491
491
492 # Making sure we terse only when the status abbreviation is
492 # Making sure we terse only when the status abbreviation is
493 # passed as terse argument
493 # passed as terse argument
494 if onlyst in terseargs:
494 if onlyst in terseargs:
495 yield onlyst, self.path + pycompat.ossep
495 yield onlyst, self.path + pycompat.ossep
496 return
496 return
497
497
498 # add the files to status list
498 # add the files to status list
499 for st, fpath in self.iterfilepaths():
499 for st, fpath in self.iterfilepaths():
500 yield st, fpath
500 yield st, fpath
501
501
502 #recurse on the subdirs
502 #recurse on the subdirs
503 for dirobj in self.subdirs.values():
503 for dirobj in self.subdirs.values():
504 for st, fpath in dirobj.tersewalk(terseargs):
504 for st, fpath in dirobj.tersewalk(terseargs):
505 yield st, fpath
505 yield st, fpath
506
506
507 def tersedir(statuslist, terseargs):
507 def tersedir(statuslist, terseargs):
508 """
508 """
509 Terse the status if all the files in a directory shares the same status.
509 Terse the status if all the files in a directory shares the same status.
510
510
511 statuslist is scmutil.status() object which contains a list of files for
511 statuslist is scmutil.status() object which contains a list of files for
512 each status.
512 each status.
513 terseargs is string which is passed by the user as the argument to `--terse`
513 terseargs is string which is passed by the user as the argument to `--terse`
514 flag.
514 flag.
515
515
516 The function makes a tree of objects of dirnode class, and at each node it
516 The function makes a tree of objects of dirnode class, and at each node it
517 stores the information required to know whether we can terse a certain
517 stores the information required to know whether we can terse a certain
518 directory or not.
518 directory or not.
519 """
519 """
520 # the order matters here as that is used to produce final list
520 # the order matters here as that is used to produce final list
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
522
522
523 # checking the argument validity
523 # checking the argument validity
524 for s in pycompat.bytestr(terseargs):
524 for s in pycompat.bytestr(terseargs):
525 if s not in allst:
525 if s not in allst:
526 raise error.Abort(_("'%s' not recognized") % s)
526 raise error.Abort(_("'%s' not recognized") % s)
527
527
528 # creating a dirnode object for the root of the repo
528 # creating a dirnode object for the root of the repo
529 rootobj = dirnode('')
529 rootobj = dirnode('')
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
531 'ignored', 'removed')
531 'ignored', 'removed')
532
532
533 tersedict = {}
533 tersedict = {}
534 for attrname in pstatus:
534 for attrname in pstatus:
535 statuschar = attrname[0:1]
535 statuschar = attrname[0:1]
536 for f in getattr(statuslist, attrname):
536 for f in getattr(statuslist, attrname):
537 rootobj.addfile(f, statuschar)
537 rootobj.addfile(f, statuschar)
538 tersedict[statuschar] = []
538 tersedict[statuschar] = []
539
539
540 # we won't be tersing the root dir, so add files in it
540 # we won't be tersing the root dir, so add files in it
541 for st, fpath in rootobj.iterfilepaths():
541 for st, fpath in rootobj.iterfilepaths():
542 tersedict[st].append(fpath)
542 tersedict[st].append(fpath)
543
543
544 # process each sub-directory and build tersedict
544 # process each sub-directory and build tersedict
545 for subdir in rootobj.subdirs.values():
545 for subdir in rootobj.subdirs.values():
546 for st, f in subdir.tersewalk(terseargs):
546 for st, f in subdir.tersewalk(terseargs):
547 tersedict[st].append(f)
547 tersedict[st].append(f)
548
548
549 tersedlist = []
549 tersedlist = []
550 for st in allst:
550 for st in allst:
551 tersedict[st].sort()
551 tersedict[st].sort()
552 tersedlist.append(tersedict[st])
552 tersedlist.append(tersedict[st])
553
553
554 return tersedlist
554 return tersedlist
555
555
556 def _commentlines(raw):
556 def _commentlines(raw):
557 '''Surround lineswith a comment char and a new line'''
557 '''Surround lineswith a comment char and a new line'''
558 lines = raw.splitlines()
558 lines = raw.splitlines()
559 commentedlines = ['# %s' % line for line in lines]
559 commentedlines = ['# %s' % line for line in lines]
560 return '\n'.join(commentedlines) + '\n'
560 return '\n'.join(commentedlines) + '\n'
561
561
562 def _conflictsmsg(repo):
562 def _conflictsmsg(repo):
563 # avoid merge cycle
563 # avoid merge cycle
564 from . import merge as mergemod
564 from . import merge as mergemod
565 mergestate = mergemod.mergestate.read(repo)
565 mergestate = mergemod.mergestate.read(repo)
566 if not mergestate.active():
566 if not mergestate.active():
567 return
567 return
568
568
569 m = scmutil.match(repo[None])
569 m = scmutil.match(repo[None])
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
571 if unresolvedlist:
571 if unresolvedlist:
572 mergeliststr = '\n'.join(
572 mergeliststr = '\n'.join(
573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
574 for path in unresolvedlist])
574 for path in unresolvedlist])
575 msg = _('''Unresolved merge conflicts:
575 msg = _('''Unresolved merge conflicts:
576
576
577 %s
577 %s
578
578
579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
580 else:
580 else:
581 msg = _('No unresolved merge conflicts.')
581 msg = _('No unresolved merge conflicts.')
582
582
583 return _commentlines(msg)
583 return _commentlines(msg)
584
584
585 def _helpmessage(continuecmd, abortcmd):
585 def _helpmessage(continuecmd, abortcmd):
586 msg = _('To continue: %s\n'
586 msg = _('To continue: %s\n'
587 'To abort: %s') % (continuecmd, abortcmd)
587 'To abort: %s') % (continuecmd, abortcmd)
588 return _commentlines(msg)
588 return _commentlines(msg)
589
589
590 def _rebasemsg():
590 def _rebasemsg():
591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
592
592
593 def _histeditmsg():
593 def _histeditmsg():
594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
595
595
596 def _unshelvemsg():
596 def _unshelvemsg():
597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
598
598
599 def _updatecleanmsg(dest=None):
599 def _updatecleanmsg(dest=None):
600 warning = _('warning: this will discard uncommitted changes')
600 warning = _('warning: this will discard uncommitted changes')
601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
602
602
603 def _graftmsg():
603 def _graftmsg():
604 # tweakdefaults requires `update` to have a rev hence the `.`
604 # tweakdefaults requires `update` to have a rev hence the `.`
605 return _helpmessage('hg graft --continue', _updatecleanmsg())
605 return _helpmessage('hg graft --continue', _updatecleanmsg())
606
606
607 def _mergemsg():
607 def _mergemsg():
608 # tweakdefaults requires `update` to have a rev hence the `.`
608 # tweakdefaults requires `update` to have a rev hence the `.`
609 return _helpmessage('hg commit', _updatecleanmsg())
609 return _helpmessage('hg commit', _updatecleanmsg())
610
610
611 def _bisectmsg():
611 def _bisectmsg():
612 msg = _('To mark the changeset good: hg bisect --good\n'
612 msg = _('To mark the changeset good: hg bisect --good\n'
613 'To mark the changeset bad: hg bisect --bad\n'
613 'To mark the changeset bad: hg bisect --bad\n'
614 'To abort: hg bisect --reset\n')
614 'To abort: hg bisect --reset\n')
615 return _commentlines(msg)
615 return _commentlines(msg)
616
616
617 def fileexistspredicate(filename):
617 def fileexistspredicate(filename):
618 return lambda repo: repo.vfs.exists(filename)
618 return lambda repo: repo.vfs.exists(filename)
619
619
620 def _mergepredicate(repo):
620 def _mergepredicate(repo):
621 return len(repo[None].parents()) > 1
621 return len(repo[None].parents()) > 1
622
622
623 STATES = (
623 STATES = (
624 # (state, predicate to detect states, helpful message function)
624 # (state, predicate to detect states, helpful message function)
625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
630 # The merge state is part of a list that will be iterated over.
630 # The merge state is part of a list that will be iterated over.
631 # They need to be last because some of the other unfinished states may also
631 # They need to be last because some of the other unfinished states may also
632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
633 # We want those to have priority.
633 # We want those to have priority.
634 ('merge', _mergepredicate, _mergemsg),
634 ('merge', _mergepredicate, _mergemsg),
635 )
635 )
636
636
637 def _getrepostate(repo):
637 def _getrepostate(repo):
638 # experimental config: commands.status.skipstates
638 # experimental config: commands.status.skipstates
639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
640 for state, statedetectionpredicate, msgfn in STATES:
640 for state, statedetectionpredicate, msgfn in STATES:
641 if state in skip:
641 if state in skip:
642 continue
642 continue
643 if statedetectionpredicate(repo):
643 if statedetectionpredicate(repo):
644 return (state, statedetectionpredicate, msgfn)
644 return (state, statedetectionpredicate, msgfn)
645
645
646 def morestatus(repo, fm):
646 def morestatus(repo, fm):
647 statetuple = _getrepostate(repo)
647 statetuple = _getrepostate(repo)
648 label = 'status.morestatus'
648 label = 'status.morestatus'
649 if statetuple:
649 if statetuple:
650 fm.startitem()
650 fm.startitem()
651 state, statedetectionpredicate, helpfulmsg = statetuple
651 state, statedetectionpredicate, helpfulmsg = statetuple
652 statemsg = _('The repository is in an unfinished *%s* state.') % state
652 statemsg = _('The repository is in an unfinished *%s* state.') % state
653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
654 conmsg = _conflictsmsg(repo)
654 conmsg = _conflictsmsg(repo)
655 if conmsg:
655 if conmsg:
656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
657 if helpfulmsg:
657 if helpfulmsg:
658 helpmsg = helpfulmsg()
658 helpmsg = helpfulmsg()
659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
660
660
661 def findpossible(cmd, table, strict=False):
661 def findpossible(cmd, table, strict=False):
662 """
662 """
663 Return cmd -> (aliases, command table entry)
663 Return cmd -> (aliases, command table entry)
664 for each matching command.
664 for each matching command.
665 Return debug commands (or their aliases) only if no normal command matches.
665 Return debug commands (or their aliases) only if no normal command matches.
666 """
666 """
667 choice = {}
667 choice = {}
668 debugchoice = {}
668 debugchoice = {}
669
669
670 if cmd in table:
670 if cmd in table:
671 # short-circuit exact matches, "log" alias beats "^log|history"
671 # short-circuit exact matches, "log" alias beats "^log|history"
672 keys = [cmd]
672 keys = [cmd]
673 else:
673 else:
674 keys = table.keys()
674 keys = table.keys()
675
675
676 allcmds = []
676 allcmds = []
677 for e in keys:
677 for e in keys:
678 aliases = parsealiases(e)
678 aliases = parsealiases(e)
679 allcmds.extend(aliases)
679 allcmds.extend(aliases)
680 found = None
680 found = None
681 if cmd in aliases:
681 if cmd in aliases:
682 found = cmd
682 found = cmd
683 elif not strict:
683 elif not strict:
684 for a in aliases:
684 for a in aliases:
685 if a.startswith(cmd):
685 if a.startswith(cmd):
686 found = a
686 found = a
687 break
687 break
688 if found is not None:
688 if found is not None:
689 if aliases[0].startswith("debug") or found.startswith("debug"):
689 if aliases[0].startswith("debug") or found.startswith("debug"):
690 debugchoice[found] = (aliases, table[e])
690 debugchoice[found] = (aliases, table[e])
691 else:
691 else:
692 choice[found] = (aliases, table[e])
692 choice[found] = (aliases, table[e])
693
693
694 if not choice and debugchoice:
694 if not choice and debugchoice:
695 choice = debugchoice
695 choice = debugchoice
696
696
697 return choice, allcmds
697 return choice, allcmds
698
698
699 def findcmd(cmd, table, strict=True):
699 def findcmd(cmd, table, strict=True):
700 """Return (aliases, command table entry) for command string."""
700 """Return (aliases, command table entry) for command string."""
701 choice, allcmds = findpossible(cmd, table, strict)
701 choice, allcmds = findpossible(cmd, table, strict)
702
702
703 if cmd in choice:
703 if cmd in choice:
704 return choice[cmd]
704 return choice[cmd]
705
705
706 if len(choice) > 1:
706 if len(choice) > 1:
707 clist = sorted(choice)
707 clist = sorted(choice)
708 raise error.AmbiguousCommand(cmd, clist)
708 raise error.AmbiguousCommand(cmd, clist)
709
709
710 if choice:
710 if choice:
711 return list(choice.values())[0]
711 return list(choice.values())[0]
712
712
713 raise error.UnknownCommand(cmd, allcmds)
713 raise error.UnknownCommand(cmd, allcmds)
714
714
715 def findrepo(p):
715 def findrepo(p):
716 while not os.path.isdir(os.path.join(p, ".hg")):
716 while not os.path.isdir(os.path.join(p, ".hg")):
717 oldp, p = p, os.path.dirname(p)
717 oldp, p = p, os.path.dirname(p)
718 if p == oldp:
718 if p == oldp:
719 return None
719 return None
720
720
721 return p
721 return p
722
722
723 def bailifchanged(repo, merge=True, hint=None):
723 def bailifchanged(repo, merge=True, hint=None):
724 """ enforce the precondition that working directory must be clean.
724 """ enforce the precondition that working directory must be clean.
725
725
726 'merge' can be set to false if a pending uncommitted merge should be
726 'merge' can be set to false if a pending uncommitted merge should be
727 ignored (such as when 'update --check' runs).
727 ignored (such as when 'update --check' runs).
728
728
729 'hint' is the usual hint given to Abort exception.
729 'hint' is the usual hint given to Abort exception.
730 """
730 """
731
731
732 if merge and repo.dirstate.p2() != nullid:
732 if merge and repo.dirstate.p2() != nullid:
733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
734 modified, added, removed, deleted = repo.status()[:4]
734 modified, added, removed, deleted = repo.status()[:4]
735 if modified or added or removed or deleted:
735 if modified or added or removed or deleted:
736 raise error.Abort(_('uncommitted changes'), hint=hint)
736 raise error.Abort(_('uncommitted changes'), hint=hint)
737 ctx = repo[None]
737 ctx = repo[None]
738 for s in sorted(ctx.substate):
738 for s in sorted(ctx.substate):
739 ctx.sub(s).bailifchanged(hint=hint)
739 ctx.sub(s).bailifchanged(hint=hint)
740
740
741 def logmessage(ui, opts):
741 def logmessage(ui, opts):
742 """ get the log message according to -m and -l option """
742 """ get the log message according to -m and -l option """
743 message = opts.get('message')
743 message = opts.get('message')
744 logfile = opts.get('logfile')
744 logfile = opts.get('logfile')
745
745
746 if message and logfile:
746 if message and logfile:
747 raise error.Abort(_('options --message and --logfile are mutually '
747 raise error.Abort(_('options --message and --logfile are mutually '
748 'exclusive'))
748 'exclusive'))
749 if not message and logfile:
749 if not message and logfile:
750 try:
750 try:
751 if isstdiofilename(logfile):
751 if isstdiofilename(logfile):
752 message = ui.fin.read()
752 message = ui.fin.read()
753 else:
753 else:
754 message = '\n'.join(util.readfile(logfile).splitlines())
754 message = '\n'.join(util.readfile(logfile).splitlines())
755 except IOError as inst:
755 except IOError as inst:
756 raise error.Abort(_("can't read commit message '%s': %s") %
756 raise error.Abort(_("can't read commit message '%s': %s") %
757 (logfile, encoding.strtolocal(inst.strerror)))
757 (logfile, encoding.strtolocal(inst.strerror)))
758 return message
758 return message
759
759
760 def mergeeditform(ctxorbool, baseformname):
760 def mergeeditform(ctxorbool, baseformname):
761 """return appropriate editform name (referencing a committemplate)
761 """return appropriate editform name (referencing a committemplate)
762
762
763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
764 merging is committed.
764 merging is committed.
765
765
766 This returns baseformname with '.merge' appended if it is a merge,
766 This returns baseformname with '.merge' appended if it is a merge,
767 otherwise '.normal' is appended.
767 otherwise '.normal' is appended.
768 """
768 """
769 if isinstance(ctxorbool, bool):
769 if isinstance(ctxorbool, bool):
770 if ctxorbool:
770 if ctxorbool:
771 return baseformname + ".merge"
771 return baseformname + ".merge"
772 elif 1 < len(ctxorbool.parents()):
772 elif 1 < len(ctxorbool.parents()):
773 return baseformname + ".merge"
773 return baseformname + ".merge"
774
774
775 return baseformname + ".normal"
775 return baseformname + ".normal"
776
776
777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
778 editform='', **opts):
778 editform='', **opts):
779 """get appropriate commit message editor according to '--edit' option
779 """get appropriate commit message editor according to '--edit' option
780
780
781 'finishdesc' is a function to be called with edited commit message
781 'finishdesc' is a function to be called with edited commit message
782 (= 'description' of the new changeset) just after editing, but
782 (= 'description' of the new changeset) just after editing, but
783 before checking empty-ness. It should return actual text to be
783 before checking empty-ness. It should return actual text to be
784 stored into history. This allows to change description before
784 stored into history. This allows to change description before
785 storing.
785 storing.
786
786
787 'extramsg' is a extra message to be shown in the editor instead of
787 'extramsg' is a extra message to be shown in the editor instead of
788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
789 is automatically added.
789 is automatically added.
790
790
791 'editform' is a dot-separated list of names, to distinguish
791 'editform' is a dot-separated list of names, to distinguish
792 the purpose of commit text editing.
792 the purpose of commit text editing.
793
793
794 'getcommiteditor' returns 'commitforceeditor' regardless of
794 'getcommiteditor' returns 'commitforceeditor' regardless of
795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
796 they are specific for usage in MQ.
796 they are specific for usage in MQ.
797 """
797 """
798 if edit or finishdesc or extramsg:
798 if edit or finishdesc or extramsg:
799 return lambda r, c, s: commitforceeditor(r, c, s,
799 return lambda r, c, s: commitforceeditor(r, c, s,
800 finishdesc=finishdesc,
800 finishdesc=finishdesc,
801 extramsg=extramsg,
801 extramsg=extramsg,
802 editform=editform)
802 editform=editform)
803 elif editform:
803 elif editform:
804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
805 else:
805 else:
806 return commiteditor
806 return commiteditor
807
807
808 def loglimit(opts):
808 def loglimit(opts):
809 """get the log limit according to option -l/--limit"""
809 """get the log limit according to option -l/--limit"""
810 limit = opts.get('limit')
810 limit = opts.get('limit')
811 if limit:
811 if limit:
812 try:
812 try:
813 limit = int(limit)
813 limit = int(limit)
814 except ValueError:
814 except ValueError:
815 raise error.Abort(_('limit must be a positive integer'))
815 raise error.Abort(_('limit must be a positive integer'))
816 if limit <= 0:
816 if limit <= 0:
817 raise error.Abort(_('limit must be positive'))
817 raise error.Abort(_('limit must be positive'))
818 else:
818 else:
819 limit = None
819 limit = None
820 return limit
820 return limit
821
821
822 def makefilename(repo, pat, node, desc=None,
822 def makefilename(repo, pat, node, desc=None,
823 total=None, seqno=None, revwidth=None, pathname=None):
823 total=None, seqno=None, revwidth=None, pathname=None):
824 node_expander = {
824 node_expander = {
825 'H': lambda: hex(node),
825 'H': lambda: hex(node),
826 'R': lambda: '%d' % repo.changelog.rev(node),
826 'R': lambda: '%d' % repo.changelog.rev(node),
827 'h': lambda: short(node),
827 'h': lambda: short(node),
828 'm': lambda: re.sub('[^\w]', '_', desc or '')
828 'm': lambda: re.sub('[^\w]', '_', desc or '')
829 }
829 }
830 expander = {
830 expander = {
831 '%': lambda: '%',
831 '%': lambda: '%',
832 'b': lambda: os.path.basename(repo.root),
832 'b': lambda: os.path.basename(repo.root),
833 }
833 }
834
834
835 try:
835 try:
836 if node:
836 if node:
837 expander.update(node_expander)
837 expander.update(node_expander)
838 if node:
838 if node:
839 expander['r'] = (lambda:
839 expander['r'] = (lambda:
840 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
840 ('%d' % repo.changelog.rev(node)).zfill(revwidth or 0))
841 if total is not None:
841 if total is not None:
842 expander['N'] = lambda: '%d' % total
842 expander['N'] = lambda: '%d' % total
843 if seqno is not None:
843 if seqno is not None:
844 expander['n'] = lambda: '%d' % seqno
844 expander['n'] = lambda: '%d' % seqno
845 if total is not None and seqno is not None:
845 if total is not None and seqno is not None:
846 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
846 expander['n'] = (lambda: ('%d' % seqno).zfill(len('%d' % total)))
847 if pathname is not None:
847 if pathname is not None:
848 expander['s'] = lambda: os.path.basename(pathname)
848 expander['s'] = lambda: os.path.basename(pathname)
849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
850 expander['p'] = lambda: pathname
850 expander['p'] = lambda: pathname
851
851
852 newname = []
852 newname = []
853 patlen = len(pat)
853 patlen = len(pat)
854 i = 0
854 i = 0
855 while i < patlen:
855 while i < patlen:
856 c = pat[i:i + 1]
856 c = pat[i:i + 1]
857 if c == '%':
857 if c == '%':
858 i += 1
858 i += 1
859 c = pat[i:i + 1]
859 c = pat[i:i + 1]
860 c = expander[c]()
860 c = expander[c]()
861 newname.append(c)
861 newname.append(c)
862 i += 1
862 i += 1
863 return ''.join(newname)
863 return ''.join(newname)
864 except KeyError as inst:
864 except KeyError as inst:
865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
866 inst.args[0])
866 inst.args[0])
867
867
868 def isstdiofilename(pat):
868 def isstdiofilename(pat):
869 """True if the given pat looks like a filename denoting stdin/stdout"""
869 """True if the given pat looks like a filename denoting stdin/stdout"""
870 return not pat or pat == '-'
870 return not pat or pat == '-'
871
871
872 class _unclosablefile(object):
872 class _unclosablefile(object):
873 def __init__(self, fp):
873 def __init__(self, fp):
874 self._fp = fp
874 self._fp = fp
875
875
876 def close(self):
876 def close(self):
877 pass
877 pass
878
878
879 def __iter__(self):
879 def __iter__(self):
880 return iter(self._fp)
880 return iter(self._fp)
881
881
882 def __getattr__(self, attr):
882 def __getattr__(self, attr):
883 return getattr(self._fp, attr)
883 return getattr(self._fp, attr)
884
884
885 def __enter__(self):
885 def __enter__(self):
886 return self
886 return self
887
887
888 def __exit__(self, exc_type, exc_value, exc_tb):
888 def __exit__(self, exc_type, exc_value, exc_tb):
889 pass
889 pass
890
890
891 def makefileobj(repo, pat, node=None, desc=None, total=None,
891 def makefileobj(repo, pat, node=None, desc=None, total=None,
892 seqno=None, revwidth=None, mode='wb', modemap=None,
892 seqno=None, revwidth=None, mode='wb', modemap=None,
893 pathname=None):
893 pathname=None):
894
894
895 writable = mode not in ('r', 'rb')
895 writable = mode not in ('r', 'rb')
896
896
897 if isstdiofilename(pat):
897 if isstdiofilename(pat):
898 if writable:
898 if writable:
899 fp = repo.ui.fout
899 fp = repo.ui.fout
900 else:
900 else:
901 fp = repo.ui.fin
901 fp = repo.ui.fin
902 return _unclosablefile(fp)
902 return _unclosablefile(fp)
903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
904 if modemap is not None:
904 if modemap is not None:
905 mode = modemap.get(fn, mode)
905 mode = modemap.get(fn, mode)
906 if mode == 'wb':
906 if mode == 'wb':
907 modemap[fn] = 'ab'
907 modemap[fn] = 'ab'
908 return open(fn, mode)
908 return open(fn, mode)
909
909
910 def openrevlog(repo, cmd, file_, opts):
910 def openrevlog(repo, cmd, file_, opts):
911 """opens the changelog, manifest, a filelog or a given revlog"""
911 """opens the changelog, manifest, a filelog or a given revlog"""
912 cl = opts['changelog']
912 cl = opts['changelog']
913 mf = opts['manifest']
913 mf = opts['manifest']
914 dir = opts['dir']
914 dir = opts['dir']
915 msg = None
915 msg = None
916 if cl and mf:
916 if cl and mf:
917 msg = _('cannot specify --changelog and --manifest at the same time')
917 msg = _('cannot specify --changelog and --manifest at the same time')
918 elif cl and dir:
918 elif cl and dir:
919 msg = _('cannot specify --changelog and --dir at the same time')
919 msg = _('cannot specify --changelog and --dir at the same time')
920 elif cl or mf or dir:
920 elif cl or mf or dir:
921 if file_:
921 if file_:
922 msg = _('cannot specify filename with --changelog or --manifest')
922 msg = _('cannot specify filename with --changelog or --manifest')
923 elif not repo:
923 elif not repo:
924 msg = _('cannot specify --changelog or --manifest or --dir '
924 msg = _('cannot specify --changelog or --manifest or --dir '
925 'without a repository')
925 'without a repository')
926 if msg:
926 if msg:
927 raise error.Abort(msg)
927 raise error.Abort(msg)
928
928
929 r = None
929 r = None
930 if repo:
930 if repo:
931 if cl:
931 if cl:
932 r = repo.unfiltered().changelog
932 r = repo.unfiltered().changelog
933 elif dir:
933 elif dir:
934 if 'treemanifest' not in repo.requirements:
934 if 'treemanifest' not in repo.requirements:
935 raise error.Abort(_("--dir can only be used on repos with "
935 raise error.Abort(_("--dir can only be used on repos with "
936 "treemanifest enabled"))
936 "treemanifest enabled"))
937 dirlog = repo.manifestlog._revlog.dirlog(dir)
937 dirlog = repo.manifestlog._revlog.dirlog(dir)
938 if len(dirlog):
938 if len(dirlog):
939 r = dirlog
939 r = dirlog
940 elif mf:
940 elif mf:
941 r = repo.manifestlog._revlog
941 r = repo.manifestlog._revlog
942 elif file_:
942 elif file_:
943 filelog = repo.file(file_)
943 filelog = repo.file(file_)
944 if len(filelog):
944 if len(filelog):
945 r = filelog
945 r = filelog
946 if not r:
946 if not r:
947 if not file_:
947 if not file_:
948 raise error.CommandError(cmd, _('invalid arguments'))
948 raise error.CommandError(cmd, _('invalid arguments'))
949 if not os.path.isfile(file_):
949 if not os.path.isfile(file_):
950 raise error.Abort(_("revlog '%s' not found") % file_)
950 raise error.Abort(_("revlog '%s' not found") % file_)
951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
952 file_[:-2] + ".i")
952 file_[:-2] + ".i")
953 return r
953 return r
954
954
955 def copy(ui, repo, pats, opts, rename=False):
955 def copy(ui, repo, pats, opts, rename=False):
956 # called with the repo lock held
956 # called with the repo lock held
957 #
957 #
958 # hgsep => pathname that uses "/" to separate directories
958 # hgsep => pathname that uses "/" to separate directories
959 # ossep => pathname that uses os.sep to separate directories
959 # ossep => pathname that uses os.sep to separate directories
960 cwd = repo.getcwd()
960 cwd = repo.getcwd()
961 targets = {}
961 targets = {}
962 after = opts.get("after")
962 after = opts.get("after")
963 dryrun = opts.get("dry_run")
963 dryrun = opts.get("dry_run")
964 wctx = repo[None]
964 wctx = repo[None]
965
965
966 def walkpat(pat):
966 def walkpat(pat):
967 srcs = []
967 srcs = []
968 if after:
968 if after:
969 badstates = '?'
969 badstates = '?'
970 else:
970 else:
971 badstates = '?r'
971 badstates = '?r'
972 m = scmutil.match(wctx, [pat], opts, globbed=True)
972 m = scmutil.match(wctx, [pat], opts, globbed=True)
973 for abs in wctx.walk(m):
973 for abs in wctx.walk(m):
974 state = repo.dirstate[abs]
974 state = repo.dirstate[abs]
975 rel = m.rel(abs)
975 rel = m.rel(abs)
976 exact = m.exact(abs)
976 exact = m.exact(abs)
977 if state in badstates:
977 if state in badstates:
978 if exact and state == '?':
978 if exact and state == '?':
979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
980 if exact and state == 'r':
980 if exact and state == 'r':
981 ui.warn(_('%s: not copying - file has been marked for'
981 ui.warn(_('%s: not copying - file has been marked for'
982 ' remove\n') % rel)
982 ' remove\n') % rel)
983 continue
983 continue
984 # abs: hgsep
984 # abs: hgsep
985 # rel: ossep
985 # rel: ossep
986 srcs.append((abs, rel, exact))
986 srcs.append((abs, rel, exact))
987 return srcs
987 return srcs
988
988
989 # abssrc: hgsep
989 # abssrc: hgsep
990 # relsrc: ossep
990 # relsrc: ossep
991 # otarget: ossep
991 # otarget: ossep
992 def copyfile(abssrc, relsrc, otarget, exact):
992 def copyfile(abssrc, relsrc, otarget, exact):
993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
994 if '/' in abstarget:
994 if '/' in abstarget:
995 # We cannot normalize abstarget itself, this would prevent
995 # We cannot normalize abstarget itself, this would prevent
996 # case only renames, like a => A.
996 # case only renames, like a => A.
997 abspath, absname = abstarget.rsplit('/', 1)
997 abspath, absname = abstarget.rsplit('/', 1)
998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
999 reltarget = repo.pathto(abstarget, cwd)
999 reltarget = repo.pathto(abstarget, cwd)
1000 target = repo.wjoin(abstarget)
1000 target = repo.wjoin(abstarget)
1001 src = repo.wjoin(abssrc)
1001 src = repo.wjoin(abssrc)
1002 state = repo.dirstate[abstarget]
1002 state = repo.dirstate[abstarget]
1003
1003
1004 scmutil.checkportable(ui, abstarget)
1004 scmutil.checkportable(ui, abstarget)
1005
1005
1006 # check for collisions
1006 # check for collisions
1007 prevsrc = targets.get(abstarget)
1007 prevsrc = targets.get(abstarget)
1008 if prevsrc is not None:
1008 if prevsrc is not None:
1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 (reltarget, repo.pathto(abssrc, cwd),
1010 (reltarget, repo.pathto(abssrc, cwd),
1011 repo.pathto(prevsrc, cwd)))
1011 repo.pathto(prevsrc, cwd)))
1012 return
1012 return
1013
1013
1014 # check for overwrites
1014 # check for overwrites
1015 exists = os.path.lexists(target)
1015 exists = os.path.lexists(target)
1016 samefile = False
1016 samefile = False
1017 if exists and abssrc != abstarget:
1017 if exists and abssrc != abstarget:
1018 if (repo.dirstate.normalize(abssrc) ==
1018 if (repo.dirstate.normalize(abssrc) ==
1019 repo.dirstate.normalize(abstarget)):
1019 repo.dirstate.normalize(abstarget)):
1020 if not rename:
1020 if not rename:
1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1022 return
1022 return
1023 exists = False
1023 exists = False
1024 samefile = True
1024 samefile = True
1025
1025
1026 if not after and exists or after and state in 'mn':
1026 if not after and exists or after and state in 'mn':
1027 if not opts['force']:
1027 if not opts['force']:
1028 if state in 'mn':
1028 if state in 'mn':
1029 msg = _('%s: not overwriting - file already committed\n')
1029 msg = _('%s: not overwriting - file already committed\n')
1030 if after:
1030 if after:
1031 flags = '--after --force'
1031 flags = '--after --force'
1032 else:
1032 else:
1033 flags = '--force'
1033 flags = '--force'
1034 if rename:
1034 if rename:
1035 hint = _('(hg rename %s to replace the file by '
1035 hint = _('(hg rename %s to replace the file by '
1036 'recording a rename)\n') % flags
1036 'recording a rename)\n') % flags
1037 else:
1037 else:
1038 hint = _('(hg copy %s to replace the file by '
1038 hint = _('(hg copy %s to replace the file by '
1039 'recording a copy)\n') % flags
1039 'recording a copy)\n') % flags
1040 else:
1040 else:
1041 msg = _('%s: not overwriting - file exists\n')
1041 msg = _('%s: not overwriting - file exists\n')
1042 if rename:
1042 if rename:
1043 hint = _('(hg rename --after to record the rename)\n')
1043 hint = _('(hg rename --after to record the rename)\n')
1044 else:
1044 else:
1045 hint = _('(hg copy --after to record the copy)\n')
1045 hint = _('(hg copy --after to record the copy)\n')
1046 ui.warn(msg % reltarget)
1046 ui.warn(msg % reltarget)
1047 ui.warn(hint)
1047 ui.warn(hint)
1048 return
1048 return
1049
1049
1050 if after:
1050 if after:
1051 if not exists:
1051 if not exists:
1052 if rename:
1052 if rename:
1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1054 (relsrc, reltarget))
1054 (relsrc, reltarget))
1055 else:
1055 else:
1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1057 (relsrc, reltarget))
1057 (relsrc, reltarget))
1058 return
1058 return
1059 elif not dryrun:
1059 elif not dryrun:
1060 try:
1060 try:
1061 if exists:
1061 if exists:
1062 os.unlink(target)
1062 os.unlink(target)
1063 targetdir = os.path.dirname(target) or '.'
1063 targetdir = os.path.dirname(target) or '.'
1064 if not os.path.isdir(targetdir):
1064 if not os.path.isdir(targetdir):
1065 os.makedirs(targetdir)
1065 os.makedirs(targetdir)
1066 if samefile:
1066 if samefile:
1067 tmp = target + "~hgrename"
1067 tmp = target + "~hgrename"
1068 os.rename(src, tmp)
1068 os.rename(src, tmp)
1069 os.rename(tmp, target)
1069 os.rename(tmp, target)
1070 else:
1070 else:
1071 util.copyfile(src, target)
1071 util.copyfile(src, target)
1072 srcexists = True
1072 srcexists = True
1073 except IOError as inst:
1073 except IOError as inst:
1074 if inst.errno == errno.ENOENT:
1074 if inst.errno == errno.ENOENT:
1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1076 srcexists = False
1076 srcexists = False
1077 else:
1077 else:
1078 ui.warn(_('%s: cannot copy - %s\n') %
1078 ui.warn(_('%s: cannot copy - %s\n') %
1079 (relsrc, encoding.strtolocal(inst.strerror)))
1079 (relsrc, encoding.strtolocal(inst.strerror)))
1080 return True # report a failure
1080 return True # report a failure
1081
1081
1082 if ui.verbose or not exact:
1082 if ui.verbose or not exact:
1083 if rename:
1083 if rename:
1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1085 else:
1085 else:
1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1087
1087
1088 targets[abstarget] = abssrc
1088 targets[abstarget] = abssrc
1089
1089
1090 # fix up dirstate
1090 # fix up dirstate
1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1092 dryrun=dryrun, cwd=cwd)
1092 dryrun=dryrun, cwd=cwd)
1093 if rename and not dryrun:
1093 if rename and not dryrun:
1094 if not after and srcexists and not samefile:
1094 if not after and srcexists and not samefile:
1095 repo.wvfs.unlinkpath(abssrc)
1095 repo.wvfs.unlinkpath(abssrc)
1096 wctx.forget([abssrc])
1096 wctx.forget([abssrc])
1097
1097
1098 # pat: ossep
1098 # pat: ossep
1099 # dest ossep
1099 # dest ossep
1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1101 # return: function that takes hgsep and returns ossep
1101 # return: function that takes hgsep and returns ossep
1102 def targetpathfn(pat, dest, srcs):
1102 def targetpathfn(pat, dest, srcs):
1103 if os.path.isdir(pat):
1103 if os.path.isdir(pat):
1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1105 abspfx = util.localpath(abspfx)
1105 abspfx = util.localpath(abspfx)
1106 if destdirexists:
1106 if destdirexists:
1107 striplen = len(os.path.split(abspfx)[0])
1107 striplen = len(os.path.split(abspfx)[0])
1108 else:
1108 else:
1109 striplen = len(abspfx)
1109 striplen = len(abspfx)
1110 if striplen:
1110 if striplen:
1111 striplen += len(pycompat.ossep)
1111 striplen += len(pycompat.ossep)
1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1113 elif destdirexists:
1113 elif destdirexists:
1114 res = lambda p: os.path.join(dest,
1114 res = lambda p: os.path.join(dest,
1115 os.path.basename(util.localpath(p)))
1115 os.path.basename(util.localpath(p)))
1116 else:
1116 else:
1117 res = lambda p: dest
1117 res = lambda p: dest
1118 return res
1118 return res
1119
1119
1120 # pat: ossep
1120 # pat: ossep
1121 # dest ossep
1121 # dest ossep
1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1123 # return: function that takes hgsep and returns ossep
1123 # return: function that takes hgsep and returns ossep
1124 def targetpathafterfn(pat, dest, srcs):
1124 def targetpathafterfn(pat, dest, srcs):
1125 if matchmod.patkind(pat):
1125 if matchmod.patkind(pat):
1126 # a mercurial pattern
1126 # a mercurial pattern
1127 res = lambda p: os.path.join(dest,
1127 res = lambda p: os.path.join(dest,
1128 os.path.basename(util.localpath(p)))
1128 os.path.basename(util.localpath(p)))
1129 else:
1129 else:
1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1131 if len(abspfx) < len(srcs[0][0]):
1131 if len(abspfx) < len(srcs[0][0]):
1132 # A directory. Either the target path contains the last
1132 # A directory. Either the target path contains the last
1133 # component of the source path or it does not.
1133 # component of the source path or it does not.
1134 def evalpath(striplen):
1134 def evalpath(striplen):
1135 score = 0
1135 score = 0
1136 for s in srcs:
1136 for s in srcs:
1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1138 if os.path.lexists(t):
1138 if os.path.lexists(t):
1139 score += 1
1139 score += 1
1140 return score
1140 return score
1141
1141
1142 abspfx = util.localpath(abspfx)
1142 abspfx = util.localpath(abspfx)
1143 striplen = len(abspfx)
1143 striplen = len(abspfx)
1144 if striplen:
1144 if striplen:
1145 striplen += len(pycompat.ossep)
1145 striplen += len(pycompat.ossep)
1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1147 score = evalpath(striplen)
1147 score = evalpath(striplen)
1148 striplen1 = len(os.path.split(abspfx)[0])
1148 striplen1 = len(os.path.split(abspfx)[0])
1149 if striplen1:
1149 if striplen1:
1150 striplen1 += len(pycompat.ossep)
1150 striplen1 += len(pycompat.ossep)
1151 if evalpath(striplen1) > score:
1151 if evalpath(striplen1) > score:
1152 striplen = striplen1
1152 striplen = striplen1
1153 res = lambda p: os.path.join(dest,
1153 res = lambda p: os.path.join(dest,
1154 util.localpath(p)[striplen:])
1154 util.localpath(p)[striplen:])
1155 else:
1155 else:
1156 # a file
1156 # a file
1157 if destdirexists:
1157 if destdirexists:
1158 res = lambda p: os.path.join(dest,
1158 res = lambda p: os.path.join(dest,
1159 os.path.basename(util.localpath(p)))
1159 os.path.basename(util.localpath(p)))
1160 else:
1160 else:
1161 res = lambda p: dest
1161 res = lambda p: dest
1162 return res
1162 return res
1163
1163
1164 pats = scmutil.expandpats(pats)
1164 pats = scmutil.expandpats(pats)
1165 if not pats:
1165 if not pats:
1166 raise error.Abort(_('no source or destination specified'))
1166 raise error.Abort(_('no source or destination specified'))
1167 if len(pats) == 1:
1167 if len(pats) == 1:
1168 raise error.Abort(_('no destination specified'))
1168 raise error.Abort(_('no destination specified'))
1169 dest = pats.pop()
1169 dest = pats.pop()
1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1171 if not destdirexists:
1171 if not destdirexists:
1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1173 raise error.Abort(_('with multiple sources, destination must be an '
1173 raise error.Abort(_('with multiple sources, destination must be an '
1174 'existing directory'))
1174 'existing directory'))
1175 if util.endswithsep(dest):
1175 if util.endswithsep(dest):
1176 raise error.Abort(_('destination %s is not a directory') % dest)
1176 raise error.Abort(_('destination %s is not a directory') % dest)
1177
1177
1178 tfn = targetpathfn
1178 tfn = targetpathfn
1179 if after:
1179 if after:
1180 tfn = targetpathafterfn
1180 tfn = targetpathafterfn
1181 copylist = []
1181 copylist = []
1182 for pat in pats:
1182 for pat in pats:
1183 srcs = walkpat(pat)
1183 srcs = walkpat(pat)
1184 if not srcs:
1184 if not srcs:
1185 continue
1185 continue
1186 copylist.append((tfn(pat, dest, srcs), srcs))
1186 copylist.append((tfn(pat, dest, srcs), srcs))
1187 if not copylist:
1187 if not copylist:
1188 raise error.Abort(_('no files to copy'))
1188 raise error.Abort(_('no files to copy'))
1189
1189
1190 errors = 0
1190 errors = 0
1191 for targetpath, srcs in copylist:
1191 for targetpath, srcs in copylist:
1192 for abssrc, relsrc, exact in srcs:
1192 for abssrc, relsrc, exact in srcs:
1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1194 errors += 1
1194 errors += 1
1195
1195
1196 if errors:
1196 if errors:
1197 ui.warn(_('(consider using --after)\n'))
1197 ui.warn(_('(consider using --after)\n'))
1198
1198
1199 return errors != 0
1199 return errors != 0
1200
1200
1201 ## facility to let extension process additional data into an import patch
1201 ## facility to let extension process additional data into an import patch
1202 # list of identifier to be executed in order
1202 # list of identifier to be executed in order
1203 extrapreimport = [] # run before commit
1203 extrapreimport = [] # run before commit
1204 extrapostimport = [] # run after commit
1204 extrapostimport = [] # run after commit
1205 # mapping from identifier to actual import function
1205 # mapping from identifier to actual import function
1206 #
1206 #
1207 # 'preimport' are run before the commit is made and are provided the following
1207 # 'preimport' are run before the commit is made and are provided the following
1208 # arguments:
1208 # arguments:
1209 # - repo: the localrepository instance,
1209 # - repo: the localrepository instance,
1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1212 # - opts: the import options.
1212 # - opts: the import options.
1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1214 # mutation of in memory commit and more. Feel free to rework the code to get
1214 # mutation of in memory commit and more. Feel free to rework the code to get
1215 # there.
1215 # there.
1216 extrapreimportmap = {}
1216 extrapreimportmap = {}
1217 # 'postimport' are run after the commit is made and are provided the following
1217 # 'postimport' are run after the commit is made and are provided the following
1218 # argument:
1218 # argument:
1219 # - ctx: the changectx created by import.
1219 # - ctx: the changectx created by import.
1220 extrapostimportmap = {}
1220 extrapostimportmap = {}
1221
1221
1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1223 """Utility function used by commands.import to import a single patch
1223 """Utility function used by commands.import to import a single patch
1224
1224
1225 This function is explicitly defined here to help the evolve extension to
1225 This function is explicitly defined here to help the evolve extension to
1226 wrap this part of the import logic.
1226 wrap this part of the import logic.
1227
1227
1228 The API is currently a bit ugly because it a simple code translation from
1228 The API is currently a bit ugly because it a simple code translation from
1229 the import command. Feel free to make it better.
1229 the import command. Feel free to make it better.
1230
1230
1231 :hunk: a patch (as a binary string)
1231 :hunk: a patch (as a binary string)
1232 :parents: nodes that will be parent of the created commit
1232 :parents: nodes that will be parent of the created commit
1233 :opts: the full dict of option passed to the import command
1233 :opts: the full dict of option passed to the import command
1234 :msgs: list to save commit message to.
1234 :msgs: list to save commit message to.
1235 (used in case we need to save it when failing)
1235 (used in case we need to save it when failing)
1236 :updatefunc: a function that update a repo to a given node
1236 :updatefunc: a function that update a repo to a given node
1237 updatefunc(<repo>, <node>)
1237 updatefunc(<repo>, <node>)
1238 """
1238 """
1239 # avoid cycle context -> subrepo -> cmdutil
1239 # avoid cycle context -> subrepo -> cmdutil
1240 from . import context
1240 from . import context
1241 extractdata = patch.extract(ui, hunk)
1241 extractdata = patch.extract(ui, hunk)
1242 tmpname = extractdata.get('filename')
1242 tmpname = extractdata.get('filename')
1243 message = extractdata.get('message')
1243 message = extractdata.get('message')
1244 user = opts.get('user') or extractdata.get('user')
1244 user = opts.get('user') or extractdata.get('user')
1245 date = opts.get('date') or extractdata.get('date')
1245 date = opts.get('date') or extractdata.get('date')
1246 branch = extractdata.get('branch')
1246 branch = extractdata.get('branch')
1247 nodeid = extractdata.get('nodeid')
1247 nodeid = extractdata.get('nodeid')
1248 p1 = extractdata.get('p1')
1248 p1 = extractdata.get('p1')
1249 p2 = extractdata.get('p2')
1249 p2 = extractdata.get('p2')
1250
1250
1251 nocommit = opts.get('no_commit')
1251 nocommit = opts.get('no_commit')
1252 importbranch = opts.get('import_branch')
1252 importbranch = opts.get('import_branch')
1253 update = not opts.get('bypass')
1253 update = not opts.get('bypass')
1254 strip = opts["strip"]
1254 strip = opts["strip"]
1255 prefix = opts["prefix"]
1255 prefix = opts["prefix"]
1256 sim = float(opts.get('similarity') or 0)
1256 sim = float(opts.get('similarity') or 0)
1257 if not tmpname:
1257 if not tmpname:
1258 return (None, None, False)
1258 return (None, None, False)
1259
1259
1260 rejects = False
1260 rejects = False
1261
1261
1262 try:
1262 try:
1263 cmdline_message = logmessage(ui, opts)
1263 cmdline_message = logmessage(ui, opts)
1264 if cmdline_message:
1264 if cmdline_message:
1265 # pickup the cmdline msg
1265 # pickup the cmdline msg
1266 message = cmdline_message
1266 message = cmdline_message
1267 elif message:
1267 elif message:
1268 # pickup the patch msg
1268 # pickup the patch msg
1269 message = message.strip()
1269 message = message.strip()
1270 else:
1270 else:
1271 # launch the editor
1271 # launch the editor
1272 message = None
1272 message = None
1273 ui.debug('message:\n%s\n' % message)
1273 ui.debug('message:\n%s\n' % message)
1274
1274
1275 if len(parents) == 1:
1275 if len(parents) == 1:
1276 parents.append(repo[nullid])
1276 parents.append(repo[nullid])
1277 if opts.get('exact'):
1277 if opts.get('exact'):
1278 if not nodeid or not p1:
1278 if not nodeid or not p1:
1279 raise error.Abort(_('not a Mercurial patch'))
1279 raise error.Abort(_('not a Mercurial patch'))
1280 p1 = repo[p1]
1280 p1 = repo[p1]
1281 p2 = repo[p2 or nullid]
1281 p2 = repo[p2 or nullid]
1282 elif p2:
1282 elif p2:
1283 try:
1283 try:
1284 p1 = repo[p1]
1284 p1 = repo[p1]
1285 p2 = repo[p2]
1285 p2 = repo[p2]
1286 # Without any options, consider p2 only if the
1286 # Without any options, consider p2 only if the
1287 # patch is being applied on top of the recorded
1287 # patch is being applied on top of the recorded
1288 # first parent.
1288 # first parent.
1289 if p1 != parents[0]:
1289 if p1 != parents[0]:
1290 p1 = parents[0]
1290 p1 = parents[0]
1291 p2 = repo[nullid]
1291 p2 = repo[nullid]
1292 except error.RepoError:
1292 except error.RepoError:
1293 p1, p2 = parents
1293 p1, p2 = parents
1294 if p2.node() == nullid:
1294 if p2.node() == nullid:
1295 ui.warn(_("warning: import the patch as a normal revision\n"
1295 ui.warn(_("warning: import the patch as a normal revision\n"
1296 "(use --exact to import the patch as a merge)\n"))
1296 "(use --exact to import the patch as a merge)\n"))
1297 else:
1297 else:
1298 p1, p2 = parents
1298 p1, p2 = parents
1299
1299
1300 n = None
1300 n = None
1301 if update:
1301 if update:
1302 if p1 != parents[0]:
1302 if p1 != parents[0]:
1303 updatefunc(repo, p1.node())
1303 updatefunc(repo, p1.node())
1304 if p2 != parents[1]:
1304 if p2 != parents[1]:
1305 repo.setparents(p1.node(), p2.node())
1305 repo.setparents(p1.node(), p2.node())
1306
1306
1307 if opts.get('exact') or importbranch:
1307 if opts.get('exact') or importbranch:
1308 repo.dirstate.setbranch(branch or 'default')
1308 repo.dirstate.setbranch(branch or 'default')
1309
1309
1310 partial = opts.get('partial', False)
1310 partial = opts.get('partial', False)
1311 files = set()
1311 files = set()
1312 try:
1312 try:
1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1314 files=files, eolmode=None, similarity=sim / 100.0)
1314 files=files, eolmode=None, similarity=sim / 100.0)
1315 except error.PatchError as e:
1315 except error.PatchError as e:
1316 if not partial:
1316 if not partial:
1317 raise error.Abort(str(e))
1317 raise error.Abort(str(e))
1318 if partial:
1318 if partial:
1319 rejects = True
1319 rejects = True
1320
1320
1321 files = list(files)
1321 files = list(files)
1322 if nocommit:
1322 if nocommit:
1323 if message:
1323 if message:
1324 msgs.append(message)
1324 msgs.append(message)
1325 else:
1325 else:
1326 if opts.get('exact') or p2:
1326 if opts.get('exact') or p2:
1327 # If you got here, you either use --force and know what
1327 # If you got here, you either use --force and know what
1328 # you are doing or used --exact or a merge patch while
1328 # you are doing or used --exact or a merge patch while
1329 # being updated to its first parent.
1329 # being updated to its first parent.
1330 m = None
1330 m = None
1331 else:
1331 else:
1332 m = scmutil.matchfiles(repo, files or [])
1332 m = scmutil.matchfiles(repo, files or [])
1333 editform = mergeeditform(repo[None], 'import.normal')
1333 editform = mergeeditform(repo[None], 'import.normal')
1334 if opts.get('exact'):
1334 if opts.get('exact'):
1335 editor = None
1335 editor = None
1336 else:
1336 else:
1337 editor = getcommiteditor(editform=editform,
1337 editor = getcommiteditor(editform=editform,
1338 **pycompat.strkwargs(opts))
1338 **pycompat.strkwargs(opts))
1339 extra = {}
1339 extra = {}
1340 for idfunc in extrapreimport:
1340 for idfunc in extrapreimport:
1341 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1341 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1342 overrides = {}
1342 overrides = {}
1343 if partial:
1343 if partial:
1344 overrides[('ui', 'allowemptycommit')] = True
1344 overrides[('ui', 'allowemptycommit')] = True
1345 with repo.ui.configoverride(overrides, 'import'):
1345 with repo.ui.configoverride(overrides, 'import'):
1346 n = repo.commit(message, user,
1346 n = repo.commit(message, user,
1347 date, match=m,
1347 date, match=m,
1348 editor=editor, extra=extra)
1348 editor=editor, extra=extra)
1349 for idfunc in extrapostimport:
1349 for idfunc in extrapostimport:
1350 extrapostimportmap[idfunc](repo[n])
1350 extrapostimportmap[idfunc](repo[n])
1351 else:
1351 else:
1352 if opts.get('exact') or importbranch:
1352 if opts.get('exact') or importbranch:
1353 branch = branch or 'default'
1353 branch = branch or 'default'
1354 else:
1354 else:
1355 branch = p1.branch()
1355 branch = p1.branch()
1356 store = patch.filestore()
1356 store = patch.filestore()
1357 try:
1357 try:
1358 files = set()
1358 files = set()
1359 try:
1359 try:
1360 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1360 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1361 files, eolmode=None)
1361 files, eolmode=None)
1362 except error.PatchError as e:
1362 except error.PatchError as e:
1363 raise error.Abort(str(e))
1363 raise error.Abort(str(e))
1364 if opts.get('exact'):
1364 if opts.get('exact'):
1365 editor = None
1365 editor = None
1366 else:
1366 else:
1367 editor = getcommiteditor(editform='import.bypass')
1367 editor = getcommiteditor(editform='import.bypass')
1368 memctx = context.memctx(repo, (p1.node(), p2.node()),
1368 memctx = context.memctx(repo, (p1.node(), p2.node()),
1369 message,
1369 message,
1370 files=files,
1370 files=files,
1371 filectxfn=store,
1371 filectxfn=store,
1372 user=user,
1372 user=user,
1373 date=date,
1373 date=date,
1374 branch=branch,
1374 branch=branch,
1375 editor=editor)
1375 editor=editor)
1376 n = memctx.commit()
1376 n = memctx.commit()
1377 finally:
1377 finally:
1378 store.close()
1378 store.close()
1379 if opts.get('exact') and nocommit:
1379 if opts.get('exact') and nocommit:
1380 # --exact with --no-commit is still useful in that it does merge
1380 # --exact with --no-commit is still useful in that it does merge
1381 # and branch bits
1381 # and branch bits
1382 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1382 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1383 elif opts.get('exact') and hex(n) != nodeid:
1383 elif opts.get('exact') and hex(n) != nodeid:
1384 raise error.Abort(_('patch is damaged or loses information'))
1384 raise error.Abort(_('patch is damaged or loses information'))
1385 msg = _('applied to working directory')
1385 msg = _('applied to working directory')
1386 if n:
1386 if n:
1387 # i18n: refers to a short changeset id
1387 # i18n: refers to a short changeset id
1388 msg = _('created %s') % short(n)
1388 msg = _('created %s') % short(n)
1389 return (msg, n, rejects)
1389 return (msg, n, rejects)
1390 finally:
1390 finally:
1391 os.unlink(tmpname)
1391 os.unlink(tmpname)
1392
1392
1393 # facility to let extensions include additional data in an exported patch
1393 # facility to let extensions include additional data in an exported patch
1394 # list of identifiers to be executed in order
1394 # list of identifiers to be executed in order
1395 extraexport = []
1395 extraexport = []
1396 # mapping from identifier to actual export function
1396 # mapping from identifier to actual export function
1397 # function as to return a string to be added to the header or None
1397 # function as to return a string to be added to the header or None
1398 # it is given two arguments (sequencenumber, changectx)
1398 # it is given two arguments (sequencenumber, changectx)
1399 extraexportmap = {}
1399 extraexportmap = {}
1400
1400
1401 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1401 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1402 node = scmutil.binnode(ctx)
1402 node = scmutil.binnode(ctx)
1403 parents = [p.node() for p in ctx.parents() if p]
1403 parents = [p.node() for p in ctx.parents() if p]
1404 branch = ctx.branch()
1404 branch = ctx.branch()
1405 if switch_parent:
1405 if switch_parent:
1406 parents.reverse()
1406 parents.reverse()
1407
1407
1408 if parents:
1408 if parents:
1409 prev = parents[0]
1409 prev = parents[0]
1410 else:
1410 else:
1411 prev = nullid
1411 prev = nullid
1412
1412
1413 write("# HG changeset patch\n")
1413 write("# HG changeset patch\n")
1414 write("# User %s\n" % ctx.user())
1414 write("# User %s\n" % ctx.user())
1415 write("# Date %d %d\n" % ctx.date())
1415 write("# Date %d %d\n" % ctx.date())
1416 write("# %s\n" % util.datestr(ctx.date()))
1416 write("# %s\n" % util.datestr(ctx.date()))
1417 if branch and branch != 'default':
1417 if branch and branch != 'default':
1418 write("# Branch %s\n" % branch)
1418 write("# Branch %s\n" % branch)
1419 write("# Node ID %s\n" % hex(node))
1419 write("# Node ID %s\n" % hex(node))
1420 write("# Parent %s\n" % hex(prev))
1420 write("# Parent %s\n" % hex(prev))
1421 if len(parents) > 1:
1421 if len(parents) > 1:
1422 write("# Parent %s\n" % hex(parents[1]))
1422 write("# Parent %s\n" % hex(parents[1]))
1423
1423
1424 for headerid in extraexport:
1424 for headerid in extraexport:
1425 header = extraexportmap[headerid](seqno, ctx)
1425 header = extraexportmap[headerid](seqno, ctx)
1426 if header is not None:
1426 if header is not None:
1427 write('# %s\n' % header)
1427 write('# %s\n' % header)
1428 write(ctx.description().rstrip())
1428 write(ctx.description().rstrip())
1429 write("\n\n")
1429 write("\n\n")
1430
1430
1431 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1431 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1432 write(chunk, label=label)
1432 write(chunk, label=label)
1433
1433
1434 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1434 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1435 opts=None, match=None):
1435 opts=None, match=None):
1436 '''export changesets as hg patches
1436 '''export changesets as hg patches
1437
1437
1438 Args:
1438 Args:
1439 repo: The repository from which we're exporting revisions.
1439 repo: The repository from which we're exporting revisions.
1440 revs: A list of revisions to export as revision numbers.
1440 revs: A list of revisions to export as revision numbers.
1441 fntemplate: An optional string to use for generating patch file names.
1441 fntemplate: An optional string to use for generating patch file names.
1442 fp: An optional file-like object to which patches should be written.
1442 fp: An optional file-like object to which patches should be written.
1443 switch_parent: If True, show diffs against second parent when not nullid.
1443 switch_parent: If True, show diffs against second parent when not nullid.
1444 Default is false, which always shows diff against p1.
1444 Default is false, which always shows diff against p1.
1445 opts: diff options to use for generating the patch.
1445 opts: diff options to use for generating the patch.
1446 match: If specified, only export changes to files matching this matcher.
1446 match: If specified, only export changes to files matching this matcher.
1447
1447
1448 Returns:
1448 Returns:
1449 Nothing.
1449 Nothing.
1450
1450
1451 Side Effect:
1451 Side Effect:
1452 "HG Changeset Patch" data is emitted to one of the following
1452 "HG Changeset Patch" data is emitted to one of the following
1453 destinations:
1453 destinations:
1454 fp is specified: All revs are written to the specified
1454 fp is specified: All revs are written to the specified
1455 file-like object.
1455 file-like object.
1456 fntemplate specified: Each rev is written to a unique file named using
1456 fntemplate specified: Each rev is written to a unique file named using
1457 the given template.
1457 the given template.
1458 Neither fp nor template specified: All revs written to repo.ui.write()
1458 Neither fp nor template specified: All revs written to repo.ui.write()
1459 '''
1459 '''
1460
1460
1461 total = len(revs)
1461 total = len(revs)
1462 revwidth = max(len(str(rev)) for rev in revs)
1462 revwidth = max(len(str(rev)) for rev in revs)
1463 filemode = {}
1463 filemode = {}
1464
1464
1465 write = None
1465 write = None
1466 dest = '<unnamed>'
1466 dest = '<unnamed>'
1467 if fp:
1467 if fp:
1468 dest = getattr(fp, 'name', dest)
1468 dest = getattr(fp, 'name', dest)
1469 def write(s, **kw):
1469 def write(s, **kw):
1470 fp.write(s)
1470 fp.write(s)
1471 elif not fntemplate:
1471 elif not fntemplate:
1472 write = repo.ui.write
1472 write = repo.ui.write
1473
1473
1474 for seqno, rev in enumerate(revs, 1):
1474 for seqno, rev in enumerate(revs, 1):
1475 ctx = repo[rev]
1475 ctx = repo[rev]
1476 fo = None
1476 fo = None
1477 if not fp and fntemplate:
1477 if not fp and fntemplate:
1478 desc_lines = ctx.description().rstrip().split('\n')
1478 desc_lines = ctx.description().rstrip().split('\n')
1479 desc = desc_lines[0] #Commit always has a first line.
1479 desc = desc_lines[0] #Commit always has a first line.
1480 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1480 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1481 total=total, seqno=seqno, revwidth=revwidth,
1481 total=total, seqno=seqno, revwidth=revwidth,
1482 mode='wb', modemap=filemode)
1482 mode='wb', modemap=filemode)
1483 dest = fo.name
1483 dest = fo.name
1484 def write(s, **kw):
1484 def write(s, **kw):
1485 fo.write(s)
1485 fo.write(s)
1486 if not dest.startswith('<'):
1486 if not dest.startswith('<'):
1487 repo.ui.note("%s\n" % dest)
1487 repo.ui.note("%s\n" % dest)
1488 _exportsingle(
1488 _exportsingle(
1489 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1489 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1490 if fo is not None:
1490 if fo is not None:
1491 fo.close()
1491 fo.close()
1492
1492
1493 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1493 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1494 changes=None, stat=False, fp=None, prefix='',
1494 changes=None, stat=False, fp=None, prefix='',
1495 root='', listsubrepos=False, hunksfilterfn=None):
1495 root='', listsubrepos=False, hunksfilterfn=None):
1496 '''show diff or diffstat.'''
1496 '''show diff or diffstat.'''
1497 if fp is None:
1497 if fp is None:
1498 write = ui.write
1498 write = ui.write
1499 else:
1499 else:
1500 def write(s, **kw):
1500 def write(s, **kw):
1501 fp.write(s)
1501 fp.write(s)
1502
1502
1503 if root:
1503 if root:
1504 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1504 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1505 else:
1505 else:
1506 relroot = ''
1506 relroot = ''
1507 if relroot != '':
1507 if relroot != '':
1508 # XXX relative roots currently don't work if the root is within a
1508 # XXX relative roots currently don't work if the root is within a
1509 # subrepo
1509 # subrepo
1510 uirelroot = match.uipath(relroot)
1510 uirelroot = match.uipath(relroot)
1511 relroot += '/'
1511 relroot += '/'
1512 for matchroot in match.files():
1512 for matchroot in match.files():
1513 if not matchroot.startswith(relroot):
1513 if not matchroot.startswith(relroot):
1514 ui.warn(_('warning: %s not inside relative root %s\n') % (
1514 ui.warn(_('warning: %s not inside relative root %s\n') % (
1515 match.uipath(matchroot), uirelroot))
1515 match.uipath(matchroot), uirelroot))
1516
1516
1517 if stat:
1517 if stat:
1518 diffopts = diffopts.copy(context=0, noprefix=False)
1518 diffopts = diffopts.copy(context=0, noprefix=False)
1519 width = 80
1519 width = 80
1520 if not ui.plain():
1520 if not ui.plain():
1521 width = ui.termwidth()
1521 width = ui.termwidth()
1522 chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
1522 chunks = patch.diff(repo, node1, node2, match, changes, opts=diffopts,
1523 prefix=prefix, relroot=relroot,
1523 prefix=prefix, relroot=relroot,
1524 hunksfilterfn=hunksfilterfn)
1524 hunksfilterfn=hunksfilterfn)
1525 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1525 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1526 width=width):
1526 width=width):
1527 write(chunk, label=label)
1527 write(chunk, label=label)
1528 else:
1528 else:
1529 for chunk, label in patch.diffui(repo, node1, node2, match,
1529 for chunk, label in patch.diffui(repo, node1, node2, match,
1530 changes, opts=diffopts, prefix=prefix,
1530 changes, opts=diffopts, prefix=prefix,
1531 relroot=relroot,
1531 relroot=relroot,
1532 hunksfilterfn=hunksfilterfn):
1532 hunksfilterfn=hunksfilterfn):
1533 write(chunk, label=label)
1533 write(chunk, label=label)
1534
1534
1535 if listsubrepos:
1535 if listsubrepos:
1536 ctx1 = repo[node1]
1536 ctx1 = repo[node1]
1537 ctx2 = repo[node2]
1537 ctx2 = repo[node2]
1538 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1538 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1539 tempnode2 = node2
1539 tempnode2 = node2
1540 try:
1540 try:
1541 if node2 is not None:
1541 if node2 is not None:
1542 tempnode2 = ctx2.substate[subpath][1]
1542 tempnode2 = ctx2.substate[subpath][1]
1543 except KeyError:
1543 except KeyError:
1544 # A subrepo that existed in node1 was deleted between node1 and
1544 # A subrepo that existed in node1 was deleted between node1 and
1545 # node2 (inclusive). Thus, ctx2's substate won't contain that
1545 # node2 (inclusive). Thus, ctx2's substate won't contain that
1546 # subpath. The best we can do is to ignore it.
1546 # subpath. The best we can do is to ignore it.
1547 tempnode2 = None
1547 tempnode2 = None
1548 submatch = matchmod.subdirmatcher(subpath, match)
1548 submatch = matchmod.subdirmatcher(subpath, match)
1549 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1549 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1550 stat=stat, fp=fp, prefix=prefix)
1550 stat=stat, fp=fp, prefix=prefix)
1551
1551
1552 def _changesetlabels(ctx):
1552 def _changesetlabels(ctx):
1553 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1553 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1554 if ctx.obsolete():
1554 if ctx.obsolete():
1555 labels.append('changeset.obsolete')
1555 labels.append('changeset.obsolete')
1556 if ctx.isunstable():
1556 if ctx.isunstable():
1557 labels.append('changeset.unstable')
1557 labels.append('changeset.unstable')
1558 for instability in ctx.instabilities():
1558 for instability in ctx.instabilities():
1559 labels.append('instability.%s' % instability)
1559 labels.append('instability.%s' % instability)
1560 return ' '.join(labels)
1560 return ' '.join(labels)
1561
1561
1562 class changeset_printer(object):
1562 class changeset_printer(object):
1563 '''show changeset information when templating not requested.'''
1563 '''show changeset information when templating not requested.'''
1564
1564
1565 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1565 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1566 self.ui = ui
1566 self.ui = ui
1567 self.repo = repo
1567 self.repo = repo
1568 self.buffered = buffered
1568 self.buffered = buffered
1569 self.matchfn = matchfn
1569 self.matchfn = matchfn
1570 self.diffopts = diffopts
1570 self.diffopts = diffopts
1571 self.header = {}
1571 self.header = {}
1572 self.hunk = {}
1572 self.hunk = {}
1573 self.lastheader = None
1573 self.lastheader = None
1574 self.footer = None
1574 self.footer = None
1575 self._columns = templatekw.getlogcolumns()
1575 self._columns = templatekw.getlogcolumns()
1576
1576
1577 def flush(self, ctx):
1577 def flush(self, ctx):
1578 rev = ctx.rev()
1578 rev = ctx.rev()
1579 if rev in self.header:
1579 if rev in self.header:
1580 h = self.header[rev]
1580 h = self.header[rev]
1581 if h != self.lastheader:
1581 if h != self.lastheader:
1582 self.lastheader = h
1582 self.lastheader = h
1583 self.ui.write(h)
1583 self.ui.write(h)
1584 del self.header[rev]
1584 del self.header[rev]
1585 if rev in self.hunk:
1585 if rev in self.hunk:
1586 self.ui.write(self.hunk[rev])
1586 self.ui.write(self.hunk[rev])
1587 del self.hunk[rev]
1587 del self.hunk[rev]
1588 return 1
1588 return 1
1589 return 0
1589 return 0
1590
1590
1591 def close(self):
1591 def close(self):
1592 if self.footer:
1592 if self.footer:
1593 self.ui.write(self.footer)
1593 self.ui.write(self.footer)
1594
1594
1595 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1595 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1596 **props):
1596 **props):
1597 props = pycompat.byteskwargs(props)
1597 props = pycompat.byteskwargs(props)
1598 if self.buffered:
1598 if self.buffered:
1599 self.ui.pushbuffer(labeled=True)
1599 self.ui.pushbuffer(labeled=True)
1600 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1600 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1601 self.hunk[ctx.rev()] = self.ui.popbuffer()
1601 self.hunk[ctx.rev()] = self.ui.popbuffer()
1602 else:
1602 else:
1603 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1603 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1604
1604
1605 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1605 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1606 '''show a single changeset or file revision'''
1606 '''show a single changeset or file revision'''
1607 changenode = ctx.node()
1607 changenode = ctx.node()
1608 rev = ctx.rev()
1608 rev = ctx.rev()
1609
1609
1610 if self.ui.quiet:
1610 if self.ui.quiet:
1611 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1611 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1612 label='log.node')
1612 label='log.node')
1613 return
1613 return
1614
1614
1615 columns = self._columns
1615 columns = self._columns
1616 self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
1616 self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
1617 label=_changesetlabels(ctx))
1617 label=_changesetlabels(ctx))
1618
1618
1619 # branches are shown first before any other names due to backwards
1619 # branches are shown first before any other names due to backwards
1620 # compatibility
1620 # compatibility
1621 branch = ctx.branch()
1621 branch = ctx.branch()
1622 # don't show the default branch name
1622 # don't show the default branch name
1623 if branch != 'default':
1623 if branch != 'default':
1624 self.ui.write(columns['branch'] % branch, label='log.branch')
1624 self.ui.write(columns['branch'] % branch, label='log.branch')
1625
1625
1626 for nsname, ns in self.repo.names.iteritems():
1626 for nsname, ns in self.repo.names.iteritems():
1627 # branches has special logic already handled above, so here we just
1627 # branches has special logic already handled above, so here we just
1628 # skip it
1628 # skip it
1629 if nsname == 'branches':
1629 if nsname == 'branches':
1630 continue
1630 continue
1631 # we will use the templatename as the color name since those two
1631 # we will use the templatename as the color name since those two
1632 # should be the same
1632 # should be the same
1633 for name in ns.names(self.repo, changenode):
1633 for name in ns.names(self.repo, changenode):
1634 self.ui.write(ns.logfmt % name,
1634 self.ui.write(ns.logfmt % name,
1635 label='log.%s' % ns.colorname)
1635 label='log.%s' % ns.colorname)
1636 if self.ui.debugflag:
1636 if self.ui.debugflag:
1637 self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
1637 self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
1638 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1638 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1639 label = 'log.parent changeset.%s' % pctx.phasestr()
1639 label = 'log.parent changeset.%s' % pctx.phasestr()
1640 self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
1640 self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
1641 label=label)
1641 label=label)
1642
1642
1643 if self.ui.debugflag and rev is not None:
1643 if self.ui.debugflag and rev is not None:
1644 mnode = ctx.manifestnode()
1644 mnode = ctx.manifestnode()
1645 mrev = self.repo.manifestlog._revlog.rev(mnode)
1645 mrev = self.repo.manifestlog._revlog.rev(mnode)
1646 self.ui.write(columns['manifest']
1646 self.ui.write(columns['manifest']
1647 % scmutil.formatrevnode(self.ui, mrev, mnode),
1647 % scmutil.formatrevnode(self.ui, mrev, mnode),
1648 label='ui.debug log.manifest')
1648 label='ui.debug log.manifest')
1649 self.ui.write(columns['user'] % ctx.user(), label='log.user')
1649 self.ui.write(columns['user'] % ctx.user(), label='log.user')
1650 self.ui.write(columns['date'] % util.datestr(ctx.date()),
1650 self.ui.write(columns['date'] % util.datestr(ctx.date()),
1651 label='log.date')
1651 label='log.date')
1652
1652
1653 if ctx.isunstable():
1653 if ctx.isunstable():
1654 instabilities = ctx.instabilities()
1654 instabilities = ctx.instabilities()
1655 self.ui.write(columns['instability'] % ', '.join(instabilities),
1655 self.ui.write(columns['instability'] % ', '.join(instabilities),
1656 label='log.instability')
1656 label='log.instability')
1657
1657
1658 elif ctx.obsolete():
1658 elif ctx.obsolete():
1659 self._showobsfate(ctx)
1659 self._showobsfate(ctx)
1660
1660
1661 self._exthook(ctx)
1661 self._exthook(ctx)
1662
1662
1663 if self.ui.debugflag:
1663 if self.ui.debugflag:
1664 files = ctx.p1().status(ctx)[:3]
1664 files = ctx.p1().status(ctx)[:3]
1665 for key, value in zip(['files', 'files+', 'files-'], files):
1665 for key, value in zip(['files', 'files+', 'files-'], files):
1666 if value:
1666 if value:
1667 self.ui.write(columns[key] % " ".join(value),
1667 self.ui.write(columns[key] % " ".join(value),
1668 label='ui.debug log.files')
1668 label='ui.debug log.files')
1669 elif ctx.files() and self.ui.verbose:
1669 elif ctx.files() and self.ui.verbose:
1670 self.ui.write(columns['files'] % " ".join(ctx.files()),
1670 self.ui.write(columns['files'] % " ".join(ctx.files()),
1671 label='ui.note log.files')
1671 label='ui.note log.files')
1672 if copies and self.ui.verbose:
1672 if copies and self.ui.verbose:
1673 copies = ['%s (%s)' % c for c in copies]
1673 copies = ['%s (%s)' % c for c in copies]
1674 self.ui.write(columns['copies'] % ' '.join(copies),
1674 self.ui.write(columns['copies'] % ' '.join(copies),
1675 label='ui.note log.copies')
1675 label='ui.note log.copies')
1676
1676
1677 extra = ctx.extra()
1677 extra = ctx.extra()
1678 if extra and self.ui.debugflag:
1678 if extra and self.ui.debugflag:
1679 for key, value in sorted(extra.items()):
1679 for key, value in sorted(extra.items()):
1680 self.ui.write(columns['extra'] % (key, util.escapestr(value)),
1680 self.ui.write(columns['extra'] % (key, util.escapestr(value)),
1681 label='ui.debug log.extra')
1681 label='ui.debug log.extra')
1682
1682
1683 description = ctx.description().strip()
1683 description = ctx.description().strip()
1684 if description:
1684 if description:
1685 if self.ui.verbose:
1685 if self.ui.verbose:
1686 self.ui.write(_("description:\n"),
1686 self.ui.write(_("description:\n"),
1687 label='ui.note log.description')
1687 label='ui.note log.description')
1688 self.ui.write(description,
1688 self.ui.write(description,
1689 label='ui.note log.description')
1689 label='ui.note log.description')
1690 self.ui.write("\n\n")
1690 self.ui.write("\n\n")
1691 else:
1691 else:
1692 self.ui.write(columns['summary'] % description.splitlines()[0],
1692 self.ui.write(columns['summary'] % description.splitlines()[0],
1693 label='log.summary')
1693 label='log.summary')
1694 self.ui.write("\n")
1694 self.ui.write("\n")
1695
1695
1696 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1696 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1697
1697
1698 def _showobsfate(self, ctx):
1698 def _showobsfate(self, ctx):
1699 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1699 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1700
1700
1701 if obsfate:
1701 if obsfate:
1702 for obsfateline in obsfate:
1702 for obsfateline in obsfate:
1703 self.ui.write(self._columns['obsolete'] % obsfateline,
1703 self.ui.write(self._columns['obsolete'] % obsfateline,
1704 label='log.obsfate')
1704 label='log.obsfate')
1705
1705
1706 def _exthook(self, ctx):
1706 def _exthook(self, ctx):
1707 '''empty method used by extension as a hook point
1707 '''empty method used by extension as a hook point
1708 '''
1708 '''
1709
1709
1710 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1710 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1711 if not matchfn:
1711 if not matchfn:
1712 matchfn = self.matchfn
1712 matchfn = self.matchfn
1713 if matchfn:
1713 if matchfn:
1714 stat = self.diffopts.get('stat')
1714 stat = self.diffopts.get('stat')
1715 diff = self.diffopts.get('patch')
1715 diff = self.diffopts.get('patch')
1716 diffopts = patch.diffallopts(self.ui, self.diffopts)
1716 diffopts = patch.diffallopts(self.ui, self.diffopts)
1717 node = ctx.node()
1717 node = ctx.node()
1718 prev = ctx.p1().node()
1718 prev = ctx.p1().node()
1719 if stat:
1719 if stat:
1720 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1720 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1721 match=matchfn, stat=True,
1721 match=matchfn, stat=True,
1722 hunksfilterfn=hunksfilterfn)
1722 hunksfilterfn=hunksfilterfn)
1723 if diff:
1723 if diff:
1724 if stat:
1724 if stat:
1725 self.ui.write("\n")
1725 self.ui.write("\n")
1726 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1726 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1727 match=matchfn, stat=False,
1727 match=matchfn, stat=False,
1728 hunksfilterfn=hunksfilterfn)
1728 hunksfilterfn=hunksfilterfn)
1729 self.ui.write("\n")
1729 self.ui.write("\n")
1730
1730
1731 class jsonchangeset(changeset_printer):
1731 class jsonchangeset(changeset_printer):
1732 '''format changeset information.'''
1732 '''format changeset information.'''
1733
1733
1734 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1734 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1735 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1735 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1736 self.cache = {}
1736 self.cache = {}
1737 self._first = True
1737 self._first = True
1738
1738
1739 def close(self):
1739 def close(self):
1740 if not self._first:
1740 if not self._first:
1741 self.ui.write("\n]\n")
1741 self.ui.write("\n]\n")
1742 else:
1742 else:
1743 self.ui.write("[]\n")
1743 self.ui.write("[]\n")
1744
1744
1745 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1745 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1746 '''show a single changeset or file revision'''
1746 '''show a single changeset or file revision'''
1747 rev = ctx.rev()
1747 rev = ctx.rev()
1748 if rev is None:
1748 if rev is None:
1749 jrev = jnode = 'null'
1749 jrev = jnode = 'null'
1750 else:
1750 else:
1751 jrev = '%d' % rev
1751 jrev = '%d' % rev
1752 jnode = '"%s"' % hex(ctx.node())
1752 jnode = '"%s"' % hex(ctx.node())
1753 j = encoding.jsonescape
1753 j = encoding.jsonescape
1754
1754
1755 if self._first:
1755 if self._first:
1756 self.ui.write("[\n {")
1756 self.ui.write("[\n {")
1757 self._first = False
1757 self._first = False
1758 else:
1758 else:
1759 self.ui.write(",\n {")
1759 self.ui.write(",\n {")
1760
1760
1761 if self.ui.quiet:
1761 if self.ui.quiet:
1762 self.ui.write(('\n "rev": %s') % jrev)
1762 self.ui.write(('\n "rev": %s') % jrev)
1763 self.ui.write((',\n "node": %s') % jnode)
1763 self.ui.write((',\n "node": %s') % jnode)
1764 self.ui.write('\n }')
1764 self.ui.write('\n }')
1765 return
1765 return
1766
1766
1767 self.ui.write(('\n "rev": %s') % jrev)
1767 self.ui.write(('\n "rev": %s') % jrev)
1768 self.ui.write((',\n "node": %s') % jnode)
1768 self.ui.write((',\n "node": %s') % jnode)
1769 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1769 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1770 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1770 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1771 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1771 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1772 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1772 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1773 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1773 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1774
1774
1775 self.ui.write((',\n "bookmarks": [%s]') %
1775 self.ui.write((',\n "bookmarks": [%s]') %
1776 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1776 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1777 self.ui.write((',\n "tags": [%s]') %
1777 self.ui.write((',\n "tags": [%s]') %
1778 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1778 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1779 self.ui.write((',\n "parents": [%s]') %
1779 self.ui.write((',\n "parents": [%s]') %
1780 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1780 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1781
1781
1782 if self.ui.debugflag:
1782 if self.ui.debugflag:
1783 if rev is None:
1783 if rev is None:
1784 jmanifestnode = 'null'
1784 jmanifestnode = 'null'
1785 else:
1785 else:
1786 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1786 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1787 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1787 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1788
1788
1789 self.ui.write((',\n "extra": {%s}') %
1789 self.ui.write((',\n "extra": {%s}') %
1790 ", ".join('"%s": "%s"' % (j(k), j(v))
1790 ", ".join('"%s": "%s"' % (j(k), j(v))
1791 for k, v in ctx.extra().items()))
1791 for k, v in ctx.extra().items()))
1792
1792
1793 files = ctx.p1().status(ctx)
1793 files = ctx.p1().status(ctx)
1794 self.ui.write((',\n "modified": [%s]') %
1794 self.ui.write((',\n "modified": [%s]') %
1795 ", ".join('"%s"' % j(f) for f in files[0]))
1795 ", ".join('"%s"' % j(f) for f in files[0]))
1796 self.ui.write((',\n "added": [%s]') %
1796 self.ui.write((',\n "added": [%s]') %
1797 ", ".join('"%s"' % j(f) for f in files[1]))
1797 ", ".join('"%s"' % j(f) for f in files[1]))
1798 self.ui.write((',\n "removed": [%s]') %
1798 self.ui.write((',\n "removed": [%s]') %
1799 ", ".join('"%s"' % j(f) for f in files[2]))
1799 ", ".join('"%s"' % j(f) for f in files[2]))
1800
1800
1801 elif self.ui.verbose:
1801 elif self.ui.verbose:
1802 self.ui.write((',\n "files": [%s]') %
1802 self.ui.write((',\n "files": [%s]') %
1803 ", ".join('"%s"' % j(f) for f in ctx.files()))
1803 ", ".join('"%s"' % j(f) for f in ctx.files()))
1804
1804
1805 if copies:
1805 if copies:
1806 self.ui.write((',\n "copies": {%s}') %
1806 self.ui.write((',\n "copies": {%s}') %
1807 ", ".join('"%s": "%s"' % (j(k), j(v))
1807 ", ".join('"%s": "%s"' % (j(k), j(v))
1808 for k, v in copies))
1808 for k, v in copies))
1809
1809
1810 matchfn = self.matchfn
1810 matchfn = self.matchfn
1811 if matchfn:
1811 if matchfn:
1812 stat = self.diffopts.get('stat')
1812 stat = self.diffopts.get('stat')
1813 diff = self.diffopts.get('patch')
1813 diff = self.diffopts.get('patch')
1814 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1814 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1815 node, prev = ctx.node(), ctx.p1().node()
1815 node, prev = ctx.node(), ctx.p1().node()
1816 if stat:
1816 if stat:
1817 self.ui.pushbuffer()
1817 self.ui.pushbuffer()
1818 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1818 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1819 match=matchfn, stat=True)
1819 match=matchfn, stat=True)
1820 self.ui.write((',\n "diffstat": "%s"')
1820 self.ui.write((',\n "diffstat": "%s"')
1821 % j(self.ui.popbuffer()))
1821 % j(self.ui.popbuffer()))
1822 if diff:
1822 if diff:
1823 self.ui.pushbuffer()
1823 self.ui.pushbuffer()
1824 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1824 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1825 match=matchfn, stat=False)
1825 match=matchfn, stat=False)
1826 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1826 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1827
1827
1828 self.ui.write("\n }")
1828 self.ui.write("\n }")
1829
1829
1830 class changeset_templater(changeset_printer):
1830 class changeset_templater(changeset_printer):
1831 '''format changeset information.
1831 '''format changeset information.
1832
1832
1833 Note: there are a variety of convenience functions to build a
1833 Note: there are a variety of convenience functions to build a
1834 changeset_templater for common cases. See functions such as:
1834 changeset_templater for common cases. See functions such as:
1835 makelogtemplater, show_changeset, buildcommittemplate, or other
1835 makelogtemplater, show_changeset, buildcommittemplate, or other
1836 functions that use changesest_templater.
1836 functions that use changesest_templater.
1837 '''
1837 '''
1838
1838
1839 # Arguments before "buffered" used to be positional. Consider not
1839 # Arguments before "buffered" used to be positional. Consider not
1840 # adding/removing arguments before "buffered" to not break callers.
1840 # adding/removing arguments before "buffered" to not break callers.
1841 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1841 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1842 buffered=False):
1842 buffered=False):
1843 diffopts = diffopts or {}
1843 diffopts = diffopts or {}
1844
1844
1845 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1845 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1846 self.t = formatter.loadtemplater(ui, tmplspec,
1846 self.t = formatter.loadtemplater(ui, tmplspec,
1847 cache=templatekw.defaulttempl)
1847 cache=templatekw.defaulttempl)
1848 self._counter = itertools.count()
1848 self._counter = itertools.count()
1849 self.cache = {}
1849 self.cache = {}
1850
1850
1851 self._tref = tmplspec.ref
1851 self._tref = tmplspec.ref
1852 self._parts = {'header': '', 'footer': '',
1852 self._parts = {'header': '', 'footer': '',
1853 tmplspec.ref: tmplspec.ref,
1853 tmplspec.ref: tmplspec.ref,
1854 'docheader': '', 'docfooter': '',
1854 'docheader': '', 'docfooter': '',
1855 'separator': ''}
1855 'separator': ''}
1856 if tmplspec.mapfile:
1856 if tmplspec.mapfile:
1857 # find correct templates for current mode, for backward
1857 # find correct templates for current mode, for backward
1858 # compatibility with 'log -v/-q/--debug' using a mapfile
1858 # compatibility with 'log -v/-q/--debug' using a mapfile
1859 tmplmodes = [
1859 tmplmodes = [
1860 (True, ''),
1860 (True, ''),
1861 (self.ui.verbose, '_verbose'),
1861 (self.ui.verbose, '_verbose'),
1862 (self.ui.quiet, '_quiet'),
1862 (self.ui.quiet, '_quiet'),
1863 (self.ui.debugflag, '_debug'),
1863 (self.ui.debugflag, '_debug'),
1864 ]
1864 ]
1865 for mode, postfix in tmplmodes:
1865 for mode, postfix in tmplmodes:
1866 for t in self._parts:
1866 for t in self._parts:
1867 cur = t + postfix
1867 cur = t + postfix
1868 if mode and cur in self.t:
1868 if mode and cur in self.t:
1869 self._parts[t] = cur
1869 self._parts[t] = cur
1870 else:
1870 else:
1871 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1871 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1872 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1872 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1873 self._parts.update(m)
1873 self._parts.update(m)
1874
1874
1875 if self._parts['docheader']:
1875 if self._parts['docheader']:
1876 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1876 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1877
1877
1878 def close(self):
1878 def close(self):
1879 if self._parts['docfooter']:
1879 if self._parts['docfooter']:
1880 if not self.footer:
1880 if not self.footer:
1881 self.footer = ""
1881 self.footer = ""
1882 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1882 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1883 return super(changeset_templater, self).close()
1883 return super(changeset_templater, self).close()
1884
1884
1885 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1885 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1886 '''show a single changeset or file revision'''
1886 '''show a single changeset or file revision'''
1887 props = props.copy()
1887 props = props.copy()
1888 props.update(templatekw.keywords)
1888 props.update(templatekw.keywords)
1889 props['templ'] = self.t
1890 props['ctx'] = ctx
1889 props['ctx'] = ctx
1891 props['repo'] = self.repo
1890 props['repo'] = self.repo
1892 props['ui'] = self.repo.ui
1891 props['ui'] = self.repo.ui
1893 props['index'] = index = next(self._counter)
1892 props['index'] = index = next(self._counter)
1894 props['revcache'] = {'copies': copies}
1893 props['revcache'] = {'copies': copies}
1895 props['cache'] = self.cache
1894 props['cache'] = self.cache
1896 props = pycompat.strkwargs(props)
1895 props = pycompat.strkwargs(props)
1897
1896
1898 # write separator, which wouldn't work well with the header part below
1897 # write separator, which wouldn't work well with the header part below
1899 # since there's inherently a conflict between header (across items) and
1898 # since there's inherently a conflict between header (across items) and
1900 # separator (per item)
1899 # separator (per item)
1901 if self._parts['separator'] and index > 0:
1900 if self._parts['separator'] and index > 0:
1902 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1901 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1903
1902
1904 # write header
1903 # write header
1905 if self._parts['header']:
1904 if self._parts['header']:
1906 h = templater.stringify(self.t(self._parts['header'], **props))
1905 h = templater.stringify(self.t(self._parts['header'], **props))
1907 if self.buffered:
1906 if self.buffered:
1908 self.header[ctx.rev()] = h
1907 self.header[ctx.rev()] = h
1909 else:
1908 else:
1910 if self.lastheader != h:
1909 if self.lastheader != h:
1911 self.lastheader = h
1910 self.lastheader = h
1912 self.ui.write(h)
1911 self.ui.write(h)
1913
1912
1914 # write changeset metadata, then patch if requested
1913 # write changeset metadata, then patch if requested
1915 key = self._parts[self._tref]
1914 key = self._parts[self._tref]
1916 self.ui.write(templater.stringify(self.t(key, **props)))
1915 self.ui.write(templater.stringify(self.t(key, **props)))
1917 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1916 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1918
1917
1919 if self._parts['footer']:
1918 if self._parts['footer']:
1920 if not self.footer:
1919 if not self.footer:
1921 self.footer = templater.stringify(
1920 self.footer = templater.stringify(
1922 self.t(self._parts['footer'], **props))
1921 self.t(self._parts['footer'], **props))
1923
1922
1924 def logtemplatespec(tmpl, mapfile):
1923 def logtemplatespec(tmpl, mapfile):
1925 if mapfile:
1924 if mapfile:
1926 return formatter.templatespec('changeset', tmpl, mapfile)
1925 return formatter.templatespec('changeset', tmpl, mapfile)
1927 else:
1926 else:
1928 return formatter.templatespec('', tmpl, None)
1927 return formatter.templatespec('', tmpl, None)
1929
1928
1930 def _lookuplogtemplate(ui, tmpl, style):
1929 def _lookuplogtemplate(ui, tmpl, style):
1931 """Find the template matching the given template spec or style
1930 """Find the template matching the given template spec or style
1932
1931
1933 See formatter.lookuptemplate() for details.
1932 See formatter.lookuptemplate() for details.
1934 """
1933 """
1935
1934
1936 # ui settings
1935 # ui settings
1937 if not tmpl and not style: # template are stronger than style
1936 if not tmpl and not style: # template are stronger than style
1938 tmpl = ui.config('ui', 'logtemplate')
1937 tmpl = ui.config('ui', 'logtemplate')
1939 if tmpl:
1938 if tmpl:
1940 return logtemplatespec(templater.unquotestring(tmpl), None)
1939 return logtemplatespec(templater.unquotestring(tmpl), None)
1941 else:
1940 else:
1942 style = util.expandpath(ui.config('ui', 'style'))
1941 style = util.expandpath(ui.config('ui', 'style'))
1943
1942
1944 if not tmpl and style:
1943 if not tmpl and style:
1945 mapfile = style
1944 mapfile = style
1946 if not os.path.split(mapfile)[0]:
1945 if not os.path.split(mapfile)[0]:
1947 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1946 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1948 or templater.templatepath(mapfile))
1947 or templater.templatepath(mapfile))
1949 if mapname:
1948 if mapname:
1950 mapfile = mapname
1949 mapfile = mapname
1951 return logtemplatespec(None, mapfile)
1950 return logtemplatespec(None, mapfile)
1952
1951
1953 if not tmpl:
1952 if not tmpl:
1954 return logtemplatespec(None, None)
1953 return logtemplatespec(None, None)
1955
1954
1956 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1955 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1957
1956
1958 def makelogtemplater(ui, repo, tmpl, buffered=False):
1957 def makelogtemplater(ui, repo, tmpl, buffered=False):
1959 """Create a changeset_templater from a literal template 'tmpl'
1958 """Create a changeset_templater from a literal template 'tmpl'
1960 byte-string."""
1959 byte-string."""
1961 spec = logtemplatespec(tmpl, None)
1960 spec = logtemplatespec(tmpl, None)
1962 return changeset_templater(ui, repo, spec, buffered=buffered)
1961 return changeset_templater(ui, repo, spec, buffered=buffered)
1963
1962
1964 def show_changeset(ui, repo, opts, buffered=False):
1963 def show_changeset(ui, repo, opts, buffered=False):
1965 """show one changeset using template or regular display.
1964 """show one changeset using template or regular display.
1966
1965
1967 Display format will be the first non-empty hit of:
1966 Display format will be the first non-empty hit of:
1968 1. option 'template'
1967 1. option 'template'
1969 2. option 'style'
1968 2. option 'style'
1970 3. [ui] setting 'logtemplate'
1969 3. [ui] setting 'logtemplate'
1971 4. [ui] setting 'style'
1970 4. [ui] setting 'style'
1972 If all of these values are either the unset or the empty string,
1971 If all of these values are either the unset or the empty string,
1973 regular display via changeset_printer() is done.
1972 regular display via changeset_printer() is done.
1974 """
1973 """
1975 # options
1974 # options
1976 match = None
1975 match = None
1977 if opts.get('patch') or opts.get('stat'):
1976 if opts.get('patch') or opts.get('stat'):
1978 match = scmutil.matchall(repo)
1977 match = scmutil.matchall(repo)
1979
1978
1980 if opts.get('template') == 'json':
1979 if opts.get('template') == 'json':
1981 return jsonchangeset(ui, repo, match, opts, buffered)
1980 return jsonchangeset(ui, repo, match, opts, buffered)
1982
1981
1983 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1982 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1984
1983
1985 if not spec.ref and not spec.tmpl and not spec.mapfile:
1984 if not spec.ref and not spec.tmpl and not spec.mapfile:
1986 return changeset_printer(ui, repo, match, opts, buffered)
1985 return changeset_printer(ui, repo, match, opts, buffered)
1987
1986
1988 return changeset_templater(ui, repo, spec, match, opts, buffered)
1987 return changeset_templater(ui, repo, spec, match, opts, buffered)
1989
1988
1990 def showmarker(fm, marker, index=None):
1989 def showmarker(fm, marker, index=None):
1991 """utility function to display obsolescence marker in a readable way
1990 """utility function to display obsolescence marker in a readable way
1992
1991
1993 To be used by debug function."""
1992 To be used by debug function."""
1994 if index is not None:
1993 if index is not None:
1995 fm.write('index', '%i ', index)
1994 fm.write('index', '%i ', index)
1996 fm.write('prednode', '%s ', hex(marker.prednode()))
1995 fm.write('prednode', '%s ', hex(marker.prednode()))
1997 succs = marker.succnodes()
1996 succs = marker.succnodes()
1998 fm.condwrite(succs, 'succnodes', '%s ',
1997 fm.condwrite(succs, 'succnodes', '%s ',
1999 fm.formatlist(map(hex, succs), name='node'))
1998 fm.formatlist(map(hex, succs), name='node'))
2000 fm.write('flag', '%X ', marker.flags())
1999 fm.write('flag', '%X ', marker.flags())
2001 parents = marker.parentnodes()
2000 parents = marker.parentnodes()
2002 if parents is not None:
2001 if parents is not None:
2003 fm.write('parentnodes', '{%s} ',
2002 fm.write('parentnodes', '{%s} ',
2004 fm.formatlist(map(hex, parents), name='node', sep=', '))
2003 fm.formatlist(map(hex, parents), name='node', sep=', '))
2005 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2004 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2006 meta = marker.metadata().copy()
2005 meta = marker.metadata().copy()
2007 meta.pop('date', None)
2006 meta.pop('date', None)
2008 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2007 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2009 fm.plain('\n')
2008 fm.plain('\n')
2010
2009
2011 def finddate(ui, repo, date):
2010 def finddate(ui, repo, date):
2012 """Find the tipmost changeset that matches the given date spec"""
2011 """Find the tipmost changeset that matches the given date spec"""
2013
2012
2014 df = util.matchdate(date)
2013 df = util.matchdate(date)
2015 m = scmutil.matchall(repo)
2014 m = scmutil.matchall(repo)
2016 results = {}
2015 results = {}
2017
2016
2018 def prep(ctx, fns):
2017 def prep(ctx, fns):
2019 d = ctx.date()
2018 d = ctx.date()
2020 if df(d[0]):
2019 if df(d[0]):
2021 results[ctx.rev()] = d
2020 results[ctx.rev()] = d
2022
2021
2023 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2022 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2024 rev = ctx.rev()
2023 rev = ctx.rev()
2025 if rev in results:
2024 if rev in results:
2026 ui.status(_("found revision %s from %s\n") %
2025 ui.status(_("found revision %s from %s\n") %
2027 (rev, util.datestr(results[rev])))
2026 (rev, util.datestr(results[rev])))
2028 return '%d' % rev
2027 return '%d' % rev
2029
2028
2030 raise error.Abort(_("revision matching date not found"))
2029 raise error.Abort(_("revision matching date not found"))
2031
2030
2032 def increasingwindows(windowsize=8, sizelimit=512):
2031 def increasingwindows(windowsize=8, sizelimit=512):
2033 while True:
2032 while True:
2034 yield windowsize
2033 yield windowsize
2035 if windowsize < sizelimit:
2034 if windowsize < sizelimit:
2036 windowsize *= 2
2035 windowsize *= 2
2037
2036
2038 class FileWalkError(Exception):
2037 class FileWalkError(Exception):
2039 pass
2038 pass
2040
2039
2041 def walkfilerevs(repo, match, follow, revs, fncache):
2040 def walkfilerevs(repo, match, follow, revs, fncache):
2042 '''Walks the file history for the matched files.
2041 '''Walks the file history for the matched files.
2043
2042
2044 Returns the changeset revs that are involved in the file history.
2043 Returns the changeset revs that are involved in the file history.
2045
2044
2046 Throws FileWalkError if the file history can't be walked using
2045 Throws FileWalkError if the file history can't be walked using
2047 filelogs alone.
2046 filelogs alone.
2048 '''
2047 '''
2049 wanted = set()
2048 wanted = set()
2050 copies = []
2049 copies = []
2051 minrev, maxrev = min(revs), max(revs)
2050 minrev, maxrev = min(revs), max(revs)
2052 def filerevgen(filelog, last):
2051 def filerevgen(filelog, last):
2053 """
2052 """
2054 Only files, no patterns. Check the history of each file.
2053 Only files, no patterns. Check the history of each file.
2055
2054
2056 Examines filelog entries within minrev, maxrev linkrev range
2055 Examines filelog entries within minrev, maxrev linkrev range
2057 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2056 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2058 tuples in backwards order
2057 tuples in backwards order
2059 """
2058 """
2060 cl_count = len(repo)
2059 cl_count = len(repo)
2061 revs = []
2060 revs = []
2062 for j in xrange(0, last + 1):
2061 for j in xrange(0, last + 1):
2063 linkrev = filelog.linkrev(j)
2062 linkrev = filelog.linkrev(j)
2064 if linkrev < minrev:
2063 if linkrev < minrev:
2065 continue
2064 continue
2066 # only yield rev for which we have the changelog, it can
2065 # only yield rev for which we have the changelog, it can
2067 # happen while doing "hg log" during a pull or commit
2066 # happen while doing "hg log" during a pull or commit
2068 if linkrev >= cl_count:
2067 if linkrev >= cl_count:
2069 break
2068 break
2070
2069
2071 parentlinkrevs = []
2070 parentlinkrevs = []
2072 for p in filelog.parentrevs(j):
2071 for p in filelog.parentrevs(j):
2073 if p != nullrev:
2072 if p != nullrev:
2074 parentlinkrevs.append(filelog.linkrev(p))
2073 parentlinkrevs.append(filelog.linkrev(p))
2075 n = filelog.node(j)
2074 n = filelog.node(j)
2076 revs.append((linkrev, parentlinkrevs,
2075 revs.append((linkrev, parentlinkrevs,
2077 follow and filelog.renamed(n)))
2076 follow and filelog.renamed(n)))
2078
2077
2079 return reversed(revs)
2078 return reversed(revs)
2080 def iterfiles():
2079 def iterfiles():
2081 pctx = repo['.']
2080 pctx = repo['.']
2082 for filename in match.files():
2081 for filename in match.files():
2083 if follow:
2082 if follow:
2084 if filename not in pctx:
2083 if filename not in pctx:
2085 raise error.Abort(_('cannot follow file not in parent '
2084 raise error.Abort(_('cannot follow file not in parent '
2086 'revision: "%s"') % filename)
2085 'revision: "%s"') % filename)
2087 yield filename, pctx[filename].filenode()
2086 yield filename, pctx[filename].filenode()
2088 else:
2087 else:
2089 yield filename, None
2088 yield filename, None
2090 for filename_node in copies:
2089 for filename_node in copies:
2091 yield filename_node
2090 yield filename_node
2092
2091
2093 for file_, node in iterfiles():
2092 for file_, node in iterfiles():
2094 filelog = repo.file(file_)
2093 filelog = repo.file(file_)
2095 if not len(filelog):
2094 if not len(filelog):
2096 if node is None:
2095 if node is None:
2097 # A zero count may be a directory or deleted file, so
2096 # A zero count may be a directory or deleted file, so
2098 # try to find matching entries on the slow path.
2097 # try to find matching entries on the slow path.
2099 if follow:
2098 if follow:
2100 raise error.Abort(
2099 raise error.Abort(
2101 _('cannot follow nonexistent file: "%s"') % file_)
2100 _('cannot follow nonexistent file: "%s"') % file_)
2102 raise FileWalkError("Cannot walk via filelog")
2101 raise FileWalkError("Cannot walk via filelog")
2103 else:
2102 else:
2104 continue
2103 continue
2105
2104
2106 if node is None:
2105 if node is None:
2107 last = len(filelog) - 1
2106 last = len(filelog) - 1
2108 else:
2107 else:
2109 last = filelog.rev(node)
2108 last = filelog.rev(node)
2110
2109
2111 # keep track of all ancestors of the file
2110 # keep track of all ancestors of the file
2112 ancestors = {filelog.linkrev(last)}
2111 ancestors = {filelog.linkrev(last)}
2113
2112
2114 # iterate from latest to oldest revision
2113 # iterate from latest to oldest revision
2115 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2114 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2116 if not follow:
2115 if not follow:
2117 if rev > maxrev:
2116 if rev > maxrev:
2118 continue
2117 continue
2119 else:
2118 else:
2120 # Note that last might not be the first interesting
2119 # Note that last might not be the first interesting
2121 # rev to us:
2120 # rev to us:
2122 # if the file has been changed after maxrev, we'll
2121 # if the file has been changed after maxrev, we'll
2123 # have linkrev(last) > maxrev, and we still need
2122 # have linkrev(last) > maxrev, and we still need
2124 # to explore the file graph
2123 # to explore the file graph
2125 if rev not in ancestors:
2124 if rev not in ancestors:
2126 continue
2125 continue
2127 # XXX insert 1327 fix here
2126 # XXX insert 1327 fix here
2128 if flparentlinkrevs:
2127 if flparentlinkrevs:
2129 ancestors.update(flparentlinkrevs)
2128 ancestors.update(flparentlinkrevs)
2130
2129
2131 fncache.setdefault(rev, []).append(file_)
2130 fncache.setdefault(rev, []).append(file_)
2132 wanted.add(rev)
2131 wanted.add(rev)
2133 if copied:
2132 if copied:
2134 copies.append(copied)
2133 copies.append(copied)
2135
2134
2136 return wanted
2135 return wanted
2137
2136
2138 class _followfilter(object):
2137 class _followfilter(object):
2139 def __init__(self, repo, onlyfirst=False):
2138 def __init__(self, repo, onlyfirst=False):
2140 self.repo = repo
2139 self.repo = repo
2141 self.startrev = nullrev
2140 self.startrev = nullrev
2142 self.roots = set()
2141 self.roots = set()
2143 self.onlyfirst = onlyfirst
2142 self.onlyfirst = onlyfirst
2144
2143
2145 def match(self, rev):
2144 def match(self, rev):
2146 def realparents(rev):
2145 def realparents(rev):
2147 if self.onlyfirst:
2146 if self.onlyfirst:
2148 return self.repo.changelog.parentrevs(rev)[0:1]
2147 return self.repo.changelog.parentrevs(rev)[0:1]
2149 else:
2148 else:
2150 return filter(lambda x: x != nullrev,
2149 return filter(lambda x: x != nullrev,
2151 self.repo.changelog.parentrevs(rev))
2150 self.repo.changelog.parentrevs(rev))
2152
2151
2153 if self.startrev == nullrev:
2152 if self.startrev == nullrev:
2154 self.startrev = rev
2153 self.startrev = rev
2155 return True
2154 return True
2156
2155
2157 if rev > self.startrev:
2156 if rev > self.startrev:
2158 # forward: all descendants
2157 # forward: all descendants
2159 if not self.roots:
2158 if not self.roots:
2160 self.roots.add(self.startrev)
2159 self.roots.add(self.startrev)
2161 for parent in realparents(rev):
2160 for parent in realparents(rev):
2162 if parent in self.roots:
2161 if parent in self.roots:
2163 self.roots.add(rev)
2162 self.roots.add(rev)
2164 return True
2163 return True
2165 else:
2164 else:
2166 # backwards: all parents
2165 # backwards: all parents
2167 if not self.roots:
2166 if not self.roots:
2168 self.roots.update(realparents(self.startrev))
2167 self.roots.update(realparents(self.startrev))
2169 if rev in self.roots:
2168 if rev in self.roots:
2170 self.roots.remove(rev)
2169 self.roots.remove(rev)
2171 self.roots.update(realparents(rev))
2170 self.roots.update(realparents(rev))
2172 return True
2171 return True
2173
2172
2174 return False
2173 return False
2175
2174
2176 def walkchangerevs(repo, match, opts, prepare):
2175 def walkchangerevs(repo, match, opts, prepare):
2177 '''Iterate over files and the revs in which they changed.
2176 '''Iterate over files and the revs in which they changed.
2178
2177
2179 Callers most commonly need to iterate backwards over the history
2178 Callers most commonly need to iterate backwards over the history
2180 in which they are interested. Doing so has awful (quadratic-looking)
2179 in which they are interested. Doing so has awful (quadratic-looking)
2181 performance, so we use iterators in a "windowed" way.
2180 performance, so we use iterators in a "windowed" way.
2182
2181
2183 We walk a window of revisions in the desired order. Within the
2182 We walk a window of revisions in the desired order. Within the
2184 window, we first walk forwards to gather data, then in the desired
2183 window, we first walk forwards to gather data, then in the desired
2185 order (usually backwards) to display it.
2184 order (usually backwards) to display it.
2186
2185
2187 This function returns an iterator yielding contexts. Before
2186 This function returns an iterator yielding contexts. Before
2188 yielding each context, the iterator will first call the prepare
2187 yielding each context, the iterator will first call the prepare
2189 function on each context in the window in forward order.'''
2188 function on each context in the window in forward order.'''
2190
2189
2191 follow = opts.get('follow') or opts.get('follow_first')
2190 follow = opts.get('follow') or opts.get('follow_first')
2192 revs = _logrevs(repo, opts)
2191 revs = _logrevs(repo, opts)
2193 if not revs:
2192 if not revs:
2194 return []
2193 return []
2195 wanted = set()
2194 wanted = set()
2196 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
2195 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
2197 fncache = {}
2196 fncache = {}
2198 change = repo.changectx
2197 change = repo.changectx
2199
2198
2200 # First step is to fill wanted, the set of revisions that we want to yield.
2199 # First step is to fill wanted, the set of revisions that we want to yield.
2201 # When it does not induce extra cost, we also fill fncache for revisions in
2200 # When it does not induce extra cost, we also fill fncache for revisions in
2202 # wanted: a cache of filenames that were changed (ctx.files()) and that
2201 # wanted: a cache of filenames that were changed (ctx.files()) and that
2203 # match the file filtering conditions.
2202 # match the file filtering conditions.
2204
2203
2205 if match.always():
2204 if match.always():
2206 # No files, no patterns. Display all revs.
2205 # No files, no patterns. Display all revs.
2207 wanted = revs
2206 wanted = revs
2208 elif not slowpath:
2207 elif not slowpath:
2209 # We only have to read through the filelog to find wanted revisions
2208 # We only have to read through the filelog to find wanted revisions
2210
2209
2211 try:
2210 try:
2212 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2211 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2213 except FileWalkError:
2212 except FileWalkError:
2214 slowpath = True
2213 slowpath = True
2215
2214
2216 # We decided to fall back to the slowpath because at least one
2215 # We decided to fall back to the slowpath because at least one
2217 # of the paths was not a file. Check to see if at least one of them
2216 # of the paths was not a file. Check to see if at least one of them
2218 # existed in history, otherwise simply return
2217 # existed in history, otherwise simply return
2219 for path in match.files():
2218 for path in match.files():
2220 if path == '.' or path in repo.store:
2219 if path == '.' or path in repo.store:
2221 break
2220 break
2222 else:
2221 else:
2223 return []
2222 return []
2224
2223
2225 if slowpath:
2224 if slowpath:
2226 # We have to read the changelog to match filenames against
2225 # We have to read the changelog to match filenames against
2227 # changed files
2226 # changed files
2228
2227
2229 if follow:
2228 if follow:
2230 raise error.Abort(_('can only follow copies/renames for explicit '
2229 raise error.Abort(_('can only follow copies/renames for explicit '
2231 'filenames'))
2230 'filenames'))
2232
2231
2233 # The slow path checks files modified in every changeset.
2232 # The slow path checks files modified in every changeset.
2234 # This is really slow on large repos, so compute the set lazily.
2233 # This is really slow on large repos, so compute the set lazily.
2235 class lazywantedset(object):
2234 class lazywantedset(object):
2236 def __init__(self):
2235 def __init__(self):
2237 self.set = set()
2236 self.set = set()
2238 self.revs = set(revs)
2237 self.revs = set(revs)
2239
2238
2240 # No need to worry about locality here because it will be accessed
2239 # No need to worry about locality here because it will be accessed
2241 # in the same order as the increasing window below.
2240 # in the same order as the increasing window below.
2242 def __contains__(self, value):
2241 def __contains__(self, value):
2243 if value in self.set:
2242 if value in self.set:
2244 return True
2243 return True
2245 elif not value in self.revs:
2244 elif not value in self.revs:
2246 return False
2245 return False
2247 else:
2246 else:
2248 self.revs.discard(value)
2247 self.revs.discard(value)
2249 ctx = change(value)
2248 ctx = change(value)
2250 matches = filter(match, ctx.files())
2249 matches = filter(match, ctx.files())
2251 if matches:
2250 if matches:
2252 fncache[value] = matches
2251 fncache[value] = matches
2253 self.set.add(value)
2252 self.set.add(value)
2254 return True
2253 return True
2255 return False
2254 return False
2256
2255
2257 def discard(self, value):
2256 def discard(self, value):
2258 self.revs.discard(value)
2257 self.revs.discard(value)
2259 self.set.discard(value)
2258 self.set.discard(value)
2260
2259
2261 wanted = lazywantedset()
2260 wanted = lazywantedset()
2262
2261
2263 # it might be worthwhile to do this in the iterator if the rev range
2262 # it might be worthwhile to do this in the iterator if the rev range
2264 # is descending and the prune args are all within that range
2263 # is descending and the prune args are all within that range
2265 for rev in opts.get('prune', ()):
2264 for rev in opts.get('prune', ()):
2266 rev = repo[rev].rev()
2265 rev = repo[rev].rev()
2267 ff = _followfilter(repo)
2266 ff = _followfilter(repo)
2268 stop = min(revs[0], revs[-1])
2267 stop = min(revs[0], revs[-1])
2269 for x in xrange(rev, stop - 1, -1):
2268 for x in xrange(rev, stop - 1, -1):
2270 if ff.match(x):
2269 if ff.match(x):
2271 wanted = wanted - [x]
2270 wanted = wanted - [x]
2272
2271
2273 # Now that wanted is correctly initialized, we can iterate over the
2272 # Now that wanted is correctly initialized, we can iterate over the
2274 # revision range, yielding only revisions in wanted.
2273 # revision range, yielding only revisions in wanted.
2275 def iterate():
2274 def iterate():
2276 if follow and match.always():
2275 if follow and match.always():
2277 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2276 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2278 def want(rev):
2277 def want(rev):
2279 return ff.match(rev) and rev in wanted
2278 return ff.match(rev) and rev in wanted
2280 else:
2279 else:
2281 def want(rev):
2280 def want(rev):
2282 return rev in wanted
2281 return rev in wanted
2283
2282
2284 it = iter(revs)
2283 it = iter(revs)
2285 stopiteration = False
2284 stopiteration = False
2286 for windowsize in increasingwindows():
2285 for windowsize in increasingwindows():
2287 nrevs = []
2286 nrevs = []
2288 for i in xrange(windowsize):
2287 for i in xrange(windowsize):
2289 rev = next(it, None)
2288 rev = next(it, None)
2290 if rev is None:
2289 if rev is None:
2291 stopiteration = True
2290 stopiteration = True
2292 break
2291 break
2293 elif want(rev):
2292 elif want(rev):
2294 nrevs.append(rev)
2293 nrevs.append(rev)
2295 for rev in sorted(nrevs):
2294 for rev in sorted(nrevs):
2296 fns = fncache.get(rev)
2295 fns = fncache.get(rev)
2297 ctx = change(rev)
2296 ctx = change(rev)
2298 if not fns:
2297 if not fns:
2299 def fns_generator():
2298 def fns_generator():
2300 for f in ctx.files():
2299 for f in ctx.files():
2301 if match(f):
2300 if match(f):
2302 yield f
2301 yield f
2303 fns = fns_generator()
2302 fns = fns_generator()
2304 prepare(ctx, fns)
2303 prepare(ctx, fns)
2305 for rev in nrevs:
2304 for rev in nrevs:
2306 yield change(rev)
2305 yield change(rev)
2307
2306
2308 if stopiteration:
2307 if stopiteration:
2309 break
2308 break
2310
2309
2311 return iterate()
2310 return iterate()
2312
2311
2313 def _makefollowlogfilematcher(repo, files, followfirst):
2312 def _makefollowlogfilematcher(repo, files, followfirst):
2314 # When displaying a revision with --patch --follow FILE, we have
2313 # When displaying a revision with --patch --follow FILE, we have
2315 # to know which file of the revision must be diffed. With
2314 # to know which file of the revision must be diffed. With
2316 # --follow, we want the names of the ancestors of FILE in the
2315 # --follow, we want the names of the ancestors of FILE in the
2317 # revision, stored in "fcache". "fcache" is populated by
2316 # revision, stored in "fcache". "fcache" is populated by
2318 # reproducing the graph traversal already done by --follow revset
2317 # reproducing the graph traversal already done by --follow revset
2319 # and relating revs to file names (which is not "correct" but
2318 # and relating revs to file names (which is not "correct" but
2320 # good enough).
2319 # good enough).
2321 fcache = {}
2320 fcache = {}
2322 fcacheready = [False]
2321 fcacheready = [False]
2323 pctx = repo['.']
2322 pctx = repo['.']
2324
2323
2325 def populate():
2324 def populate():
2326 for fn in files:
2325 for fn in files:
2327 fctx = pctx[fn]
2326 fctx = pctx[fn]
2328 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2327 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2329 for c in fctx.ancestors(followfirst=followfirst):
2328 for c in fctx.ancestors(followfirst=followfirst):
2330 fcache.setdefault(c.rev(), set()).add(c.path())
2329 fcache.setdefault(c.rev(), set()).add(c.path())
2331
2330
2332 def filematcher(rev):
2331 def filematcher(rev):
2333 if not fcacheready[0]:
2332 if not fcacheready[0]:
2334 # Lazy initialization
2333 # Lazy initialization
2335 fcacheready[0] = True
2334 fcacheready[0] = True
2336 populate()
2335 populate()
2337 return scmutil.matchfiles(repo, fcache.get(rev, []))
2336 return scmutil.matchfiles(repo, fcache.get(rev, []))
2338
2337
2339 return filematcher
2338 return filematcher
2340
2339
2341 def _makenofollowlogfilematcher(repo, pats, opts):
2340 def _makenofollowlogfilematcher(repo, pats, opts):
2342 '''hook for extensions to override the filematcher for non-follow cases'''
2341 '''hook for extensions to override the filematcher for non-follow cases'''
2343 return None
2342 return None
2344
2343
2345 def _makelogrevset(repo, pats, opts, revs):
2344 def _makelogrevset(repo, pats, opts, revs):
2346 """Return (expr, filematcher) where expr is a revset string built
2345 """Return (expr, filematcher) where expr is a revset string built
2347 from log options and file patterns or None. If --stat or --patch
2346 from log options and file patterns or None. If --stat or --patch
2348 are not passed filematcher is None. Otherwise it is a callable
2347 are not passed filematcher is None. Otherwise it is a callable
2349 taking a revision number and returning a match objects filtering
2348 taking a revision number and returning a match objects filtering
2350 the files to be detailed when displaying the revision.
2349 the files to be detailed when displaying the revision.
2351 """
2350 """
2352 opt2revset = {
2351 opt2revset = {
2353 'no_merges': ('not merge()', None),
2352 'no_merges': ('not merge()', None),
2354 'only_merges': ('merge()', None),
2353 'only_merges': ('merge()', None),
2355 '_ancestors': ('ancestors(%(val)s)', None),
2354 '_ancestors': ('ancestors(%(val)s)', None),
2356 '_fancestors': ('_firstancestors(%(val)s)', None),
2355 '_fancestors': ('_firstancestors(%(val)s)', None),
2357 '_descendants': ('descendants(%(val)s)', None),
2356 '_descendants': ('descendants(%(val)s)', None),
2358 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2357 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2359 '_matchfiles': ('_matchfiles(%(val)s)', None),
2358 '_matchfiles': ('_matchfiles(%(val)s)', None),
2360 'date': ('date(%(val)r)', None),
2359 'date': ('date(%(val)r)', None),
2361 'branch': ('branch(%(val)r)', ' or '),
2360 'branch': ('branch(%(val)r)', ' or '),
2362 '_patslog': ('filelog(%(val)r)', ' or '),
2361 '_patslog': ('filelog(%(val)r)', ' or '),
2363 '_patsfollow': ('follow(%(val)r)', ' or '),
2362 '_patsfollow': ('follow(%(val)r)', ' or '),
2364 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2363 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2365 'keyword': ('keyword(%(val)r)', ' or '),
2364 'keyword': ('keyword(%(val)r)', ' or '),
2366 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2365 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2367 'user': ('user(%(val)r)', ' or '),
2366 'user': ('user(%(val)r)', ' or '),
2368 }
2367 }
2369
2368
2370 opts = dict(opts)
2369 opts = dict(opts)
2371 # follow or not follow?
2370 # follow or not follow?
2372 follow = opts.get('follow') or opts.get('follow_first')
2371 follow = opts.get('follow') or opts.get('follow_first')
2373 if opts.get('follow_first'):
2372 if opts.get('follow_first'):
2374 followfirst = 1
2373 followfirst = 1
2375 else:
2374 else:
2376 followfirst = 0
2375 followfirst = 0
2377 # --follow with FILE behavior depends on revs...
2376 # --follow with FILE behavior depends on revs...
2378 it = iter(revs)
2377 it = iter(revs)
2379 startrev = next(it)
2378 startrev = next(it)
2380 followdescendants = startrev < next(it, startrev)
2379 followdescendants = startrev < next(it, startrev)
2381
2380
2382 # branch and only_branch are really aliases and must be handled at
2381 # branch and only_branch are really aliases and must be handled at
2383 # the same time
2382 # the same time
2384 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2383 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2385 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2384 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2386 # pats/include/exclude are passed to match.match() directly in
2385 # pats/include/exclude are passed to match.match() directly in
2387 # _matchfiles() revset but walkchangerevs() builds its matcher with
2386 # _matchfiles() revset but walkchangerevs() builds its matcher with
2388 # scmutil.match(). The difference is input pats are globbed on
2387 # scmutil.match(). The difference is input pats are globbed on
2389 # platforms without shell expansion (windows).
2388 # platforms without shell expansion (windows).
2390 wctx = repo[None]
2389 wctx = repo[None]
2391 match, pats = scmutil.matchandpats(wctx, pats, opts)
2390 match, pats = scmutil.matchandpats(wctx, pats, opts)
2392 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
2391 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
2393 if not slowpath:
2392 if not slowpath:
2394 for f in match.files():
2393 for f in match.files():
2395 if follow and f not in wctx:
2394 if follow and f not in wctx:
2396 # If the file exists, it may be a directory, so let it
2395 # If the file exists, it may be a directory, so let it
2397 # take the slow path.
2396 # take the slow path.
2398 if os.path.exists(repo.wjoin(f)):
2397 if os.path.exists(repo.wjoin(f)):
2399 slowpath = True
2398 slowpath = True
2400 continue
2399 continue
2401 else:
2400 else:
2402 raise error.Abort(_('cannot follow file not in parent '
2401 raise error.Abort(_('cannot follow file not in parent '
2403 'revision: "%s"') % f)
2402 'revision: "%s"') % f)
2404 filelog = repo.file(f)
2403 filelog = repo.file(f)
2405 if not filelog:
2404 if not filelog:
2406 # A zero count may be a directory or deleted file, so
2405 # A zero count may be a directory or deleted file, so
2407 # try to find matching entries on the slow path.
2406 # try to find matching entries on the slow path.
2408 if follow:
2407 if follow:
2409 raise error.Abort(
2408 raise error.Abort(
2410 _('cannot follow nonexistent file: "%s"') % f)
2409 _('cannot follow nonexistent file: "%s"') % f)
2411 slowpath = True
2410 slowpath = True
2412
2411
2413 # We decided to fall back to the slowpath because at least one
2412 # We decided to fall back to the slowpath because at least one
2414 # of the paths was not a file. Check to see if at least one of them
2413 # of the paths was not a file. Check to see if at least one of them
2415 # existed in history - in that case, we'll continue down the
2414 # existed in history - in that case, we'll continue down the
2416 # slowpath; otherwise, we can turn off the slowpath
2415 # slowpath; otherwise, we can turn off the slowpath
2417 if slowpath:
2416 if slowpath:
2418 for path in match.files():
2417 for path in match.files():
2419 if path == '.' or path in repo.store:
2418 if path == '.' or path in repo.store:
2420 break
2419 break
2421 else:
2420 else:
2422 slowpath = False
2421 slowpath = False
2423
2422
2424 fpats = ('_patsfollow', '_patsfollowfirst')
2423 fpats = ('_patsfollow', '_patsfollowfirst')
2425 fnopats = (('_ancestors', '_fancestors'),
2424 fnopats = (('_ancestors', '_fancestors'),
2426 ('_descendants', '_fdescendants'))
2425 ('_descendants', '_fdescendants'))
2427 if slowpath:
2426 if slowpath:
2428 # See walkchangerevs() slow path.
2427 # See walkchangerevs() slow path.
2429 #
2428 #
2430 # pats/include/exclude cannot be represented as separate
2429 # pats/include/exclude cannot be represented as separate
2431 # revset expressions as their filtering logic applies at file
2430 # revset expressions as their filtering logic applies at file
2432 # level. For instance "-I a -X a" matches a revision touching
2431 # level. For instance "-I a -X a" matches a revision touching
2433 # "a" and "b" while "file(a) and not file(b)" does
2432 # "a" and "b" while "file(a) and not file(b)" does
2434 # not. Besides, filesets are evaluated against the working
2433 # not. Besides, filesets are evaluated against the working
2435 # directory.
2434 # directory.
2436 matchargs = ['r:', 'd:relpath']
2435 matchargs = ['r:', 'd:relpath']
2437 for p in pats:
2436 for p in pats:
2438 matchargs.append('p:' + p)
2437 matchargs.append('p:' + p)
2439 for p in opts.get('include', []):
2438 for p in opts.get('include', []):
2440 matchargs.append('i:' + p)
2439 matchargs.append('i:' + p)
2441 for p in opts.get('exclude', []):
2440 for p in opts.get('exclude', []):
2442 matchargs.append('x:' + p)
2441 matchargs.append('x:' + p)
2443 matchargs = ','.join(('%r' % p) for p in matchargs)
2442 matchargs = ','.join(('%r' % p) for p in matchargs)
2444 opts['_matchfiles'] = matchargs
2443 opts['_matchfiles'] = matchargs
2445 if follow:
2444 if follow:
2446 opts[fnopats[0][followfirst]] = '.'
2445 opts[fnopats[0][followfirst]] = '.'
2447 else:
2446 else:
2448 if follow:
2447 if follow:
2449 if pats:
2448 if pats:
2450 # follow() revset interprets its file argument as a
2449 # follow() revset interprets its file argument as a
2451 # manifest entry, so use match.files(), not pats.
2450 # manifest entry, so use match.files(), not pats.
2452 opts[fpats[followfirst]] = list(match.files())
2451 opts[fpats[followfirst]] = list(match.files())
2453 else:
2452 else:
2454 op = fnopats[followdescendants][followfirst]
2453 op = fnopats[followdescendants][followfirst]
2455 opts[op] = 'rev(%d)' % startrev
2454 opts[op] = 'rev(%d)' % startrev
2456 else:
2455 else:
2457 opts['_patslog'] = list(pats)
2456 opts['_patslog'] = list(pats)
2458
2457
2459 filematcher = None
2458 filematcher = None
2460 if opts.get('patch') or opts.get('stat'):
2459 if opts.get('patch') or opts.get('stat'):
2461 # When following files, track renames via a special matcher.
2460 # When following files, track renames via a special matcher.
2462 # If we're forced to take the slowpath it means we're following
2461 # If we're forced to take the slowpath it means we're following
2463 # at least one pattern/directory, so don't bother with rename tracking.
2462 # at least one pattern/directory, so don't bother with rename tracking.
2464 if follow and not match.always() and not slowpath:
2463 if follow and not match.always() and not slowpath:
2465 # _makefollowlogfilematcher expects its files argument to be
2464 # _makefollowlogfilematcher expects its files argument to be
2466 # relative to the repo root, so use match.files(), not pats.
2465 # relative to the repo root, so use match.files(), not pats.
2467 filematcher = _makefollowlogfilematcher(repo, match.files(),
2466 filematcher = _makefollowlogfilematcher(repo, match.files(),
2468 followfirst)
2467 followfirst)
2469 else:
2468 else:
2470 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2469 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2471 if filematcher is None:
2470 if filematcher is None:
2472 filematcher = lambda rev: match
2471 filematcher = lambda rev: match
2473
2472
2474 expr = []
2473 expr = []
2475 for op, val in sorted(opts.iteritems()):
2474 for op, val in sorted(opts.iteritems()):
2476 if not val:
2475 if not val:
2477 continue
2476 continue
2478 if op not in opt2revset:
2477 if op not in opt2revset:
2479 continue
2478 continue
2480 revop, andor = opt2revset[op]
2479 revop, andor = opt2revset[op]
2481 if '%(val)' not in revop:
2480 if '%(val)' not in revop:
2482 expr.append(revop)
2481 expr.append(revop)
2483 else:
2482 else:
2484 if not isinstance(val, list):
2483 if not isinstance(val, list):
2485 e = revop % {'val': val}
2484 e = revop % {'val': val}
2486 else:
2485 else:
2487 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2486 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2488 expr.append(e)
2487 expr.append(e)
2489
2488
2490 if expr:
2489 if expr:
2491 expr = '(' + ' and '.join(expr) + ')'
2490 expr = '(' + ' and '.join(expr) + ')'
2492 else:
2491 else:
2493 expr = None
2492 expr = None
2494 return expr, filematcher
2493 return expr, filematcher
2495
2494
2496 def _logrevs(repo, opts):
2495 def _logrevs(repo, opts):
2497 # Default --rev value depends on --follow but --follow behavior
2496 # Default --rev value depends on --follow but --follow behavior
2498 # depends on revisions resolved from --rev...
2497 # depends on revisions resolved from --rev...
2499 follow = opts.get('follow') or opts.get('follow_first')
2498 follow = opts.get('follow') or opts.get('follow_first')
2500 if opts.get('rev'):
2499 if opts.get('rev'):
2501 revs = scmutil.revrange(repo, opts['rev'])
2500 revs = scmutil.revrange(repo, opts['rev'])
2502 elif follow and repo.dirstate.p1() == nullid:
2501 elif follow and repo.dirstate.p1() == nullid:
2503 revs = smartset.baseset()
2502 revs = smartset.baseset()
2504 elif follow:
2503 elif follow:
2505 revs = repo.revs('reverse(:.)')
2504 revs = repo.revs('reverse(:.)')
2506 else:
2505 else:
2507 revs = smartset.spanset(repo)
2506 revs = smartset.spanset(repo)
2508 revs.reverse()
2507 revs.reverse()
2509 return revs
2508 return revs
2510
2509
2511 def getgraphlogrevs(repo, pats, opts):
2510 def getgraphlogrevs(repo, pats, opts):
2512 """Return (revs, expr, filematcher) where revs is an iterable of
2511 """Return (revs, expr, filematcher) where revs is an iterable of
2513 revision numbers, expr is a revset string built from log options
2512 revision numbers, expr is a revset string built from log options
2514 and file patterns or None, and used to filter 'revs'. If --stat or
2513 and file patterns or None, and used to filter 'revs'. If --stat or
2515 --patch are not passed filematcher is None. Otherwise it is a
2514 --patch are not passed filematcher is None. Otherwise it is a
2516 callable taking a revision number and returning a match objects
2515 callable taking a revision number and returning a match objects
2517 filtering the files to be detailed when displaying the revision.
2516 filtering the files to be detailed when displaying the revision.
2518 """
2517 """
2519 limit = loglimit(opts)
2518 limit = loglimit(opts)
2520 revs = _logrevs(repo, opts)
2519 revs = _logrevs(repo, opts)
2521 if not revs:
2520 if not revs:
2522 return smartset.baseset(), None, None
2521 return smartset.baseset(), None, None
2523 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2522 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2524 if opts.get('rev'):
2523 if opts.get('rev'):
2525 # User-specified revs might be unsorted, but don't sort before
2524 # User-specified revs might be unsorted, but don't sort before
2526 # _makelogrevset because it might depend on the order of revs
2525 # _makelogrevset because it might depend on the order of revs
2527 if not (revs.isdescending() or revs.istopo()):
2526 if not (revs.isdescending() or revs.istopo()):
2528 revs.sort(reverse=True)
2527 revs.sort(reverse=True)
2529 if expr:
2528 if expr:
2530 matcher = revset.match(repo.ui, expr)
2529 matcher = revset.match(repo.ui, expr)
2531 revs = matcher(repo, revs)
2530 revs = matcher(repo, revs)
2532 if limit is not None:
2531 if limit is not None:
2533 limitedrevs = []
2532 limitedrevs = []
2534 for idx, rev in enumerate(revs):
2533 for idx, rev in enumerate(revs):
2535 if idx >= limit:
2534 if idx >= limit:
2536 break
2535 break
2537 limitedrevs.append(rev)
2536 limitedrevs.append(rev)
2538 revs = smartset.baseset(limitedrevs)
2537 revs = smartset.baseset(limitedrevs)
2539
2538
2540 return revs, expr, filematcher
2539 return revs, expr, filematcher
2541
2540
2542 def getlogrevs(repo, pats, opts):
2541 def getlogrevs(repo, pats, opts):
2543 """Return (revs, expr, filematcher) where revs is an iterable of
2542 """Return (revs, expr, filematcher) where revs is an iterable of
2544 revision numbers, expr is a revset string built from log options
2543 revision numbers, expr is a revset string built from log options
2545 and file patterns or None, and used to filter 'revs'. If --stat or
2544 and file patterns or None, and used to filter 'revs'. If --stat or
2546 --patch are not passed filematcher is None. Otherwise it is a
2545 --patch are not passed filematcher is None. Otherwise it is a
2547 callable taking a revision number and returning a match objects
2546 callable taking a revision number and returning a match objects
2548 filtering the files to be detailed when displaying the revision.
2547 filtering the files to be detailed when displaying the revision.
2549 """
2548 """
2550 limit = loglimit(opts)
2549 limit = loglimit(opts)
2551 revs = _logrevs(repo, opts)
2550 revs = _logrevs(repo, opts)
2552 if not revs:
2551 if not revs:
2553 return smartset.baseset([]), None, None
2552 return smartset.baseset([]), None, None
2554 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2553 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2555 if expr:
2554 if expr:
2556 matcher = revset.match(repo.ui, expr)
2555 matcher = revset.match(repo.ui, expr)
2557 revs = matcher(repo, revs)
2556 revs = matcher(repo, revs)
2558 if limit is not None:
2557 if limit is not None:
2559 limitedrevs = []
2558 limitedrevs = []
2560 for idx, r in enumerate(revs):
2559 for idx, r in enumerate(revs):
2561 if limit <= idx:
2560 if limit <= idx:
2562 break
2561 break
2563 limitedrevs.append(r)
2562 limitedrevs.append(r)
2564 revs = smartset.baseset(limitedrevs)
2563 revs = smartset.baseset(limitedrevs)
2565
2564
2566 return revs, expr, filematcher
2565 return revs, expr, filematcher
2567
2566
2568 def _parselinerangelogopt(repo, opts):
2567 def _parselinerangelogopt(repo, opts):
2569 """Parse --line-range log option and return a list of tuples (filename,
2568 """Parse --line-range log option and return a list of tuples (filename,
2570 (fromline, toline)).
2569 (fromline, toline)).
2571 """
2570 """
2572 linerangebyfname = []
2571 linerangebyfname = []
2573 for pat in opts.get('line_range', []):
2572 for pat in opts.get('line_range', []):
2574 try:
2573 try:
2575 pat, linerange = pat.rsplit(',', 1)
2574 pat, linerange = pat.rsplit(',', 1)
2576 except ValueError:
2575 except ValueError:
2577 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2576 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2578 try:
2577 try:
2579 fromline, toline = map(int, linerange.split(':'))
2578 fromline, toline = map(int, linerange.split(':'))
2580 except ValueError:
2579 except ValueError:
2581 raise error.Abort(_("invalid line range for %s") % pat)
2580 raise error.Abort(_("invalid line range for %s") % pat)
2582 msg = _("line range pattern '%s' must match exactly one file") % pat
2581 msg = _("line range pattern '%s' must match exactly one file") % pat
2583 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2582 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2584 linerangebyfname.append(
2583 linerangebyfname.append(
2585 (fname, util.processlinerange(fromline, toline)))
2584 (fname, util.processlinerange(fromline, toline)))
2586 return linerangebyfname
2585 return linerangebyfname
2587
2586
2588 def getloglinerangerevs(repo, userrevs, opts):
2587 def getloglinerangerevs(repo, userrevs, opts):
2589 """Return (revs, filematcher, hunksfilter).
2588 """Return (revs, filematcher, hunksfilter).
2590
2589
2591 "revs" are revisions obtained by processing "line-range" log options and
2590 "revs" are revisions obtained by processing "line-range" log options and
2592 walking block ancestors of each specified file/line-range.
2591 walking block ancestors of each specified file/line-range.
2593
2592
2594 "filematcher(rev) -> match" is a factory function returning a match object
2593 "filematcher(rev) -> match" is a factory function returning a match object
2595 for a given revision for file patterns specified in --line-range option.
2594 for a given revision for file patterns specified in --line-range option.
2596 If neither --stat nor --patch options are passed, "filematcher" is None.
2595 If neither --stat nor --patch options are passed, "filematcher" is None.
2597
2596
2598 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2597 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2599 returning a hunks filtering function.
2598 returning a hunks filtering function.
2600 If neither --stat nor --patch options are passed, "filterhunks" is None.
2599 If neither --stat nor --patch options are passed, "filterhunks" is None.
2601 """
2600 """
2602 wctx = repo[None]
2601 wctx = repo[None]
2603
2602
2604 # Two-levels map of "rev -> file ctx -> [line range]".
2603 # Two-levels map of "rev -> file ctx -> [line range]".
2605 linerangesbyrev = {}
2604 linerangesbyrev = {}
2606 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2605 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2607 if fname not in wctx:
2606 if fname not in wctx:
2608 raise error.Abort(_('cannot follow file not in parent '
2607 raise error.Abort(_('cannot follow file not in parent '
2609 'revision: "%s"') % fname)
2608 'revision: "%s"') % fname)
2610 fctx = wctx.filectx(fname)
2609 fctx = wctx.filectx(fname)
2611 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2610 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2612 rev = fctx.introrev()
2611 rev = fctx.introrev()
2613 if rev not in userrevs:
2612 if rev not in userrevs:
2614 continue
2613 continue
2615 linerangesbyrev.setdefault(
2614 linerangesbyrev.setdefault(
2616 rev, {}).setdefault(
2615 rev, {}).setdefault(
2617 fctx.path(), []).append(linerange)
2616 fctx.path(), []).append(linerange)
2618
2617
2619 filematcher = None
2618 filematcher = None
2620 hunksfilter = None
2619 hunksfilter = None
2621 if opts.get('patch') or opts.get('stat'):
2620 if opts.get('patch') or opts.get('stat'):
2622
2621
2623 def nofilterhunksfn(fctx, hunks):
2622 def nofilterhunksfn(fctx, hunks):
2624 return hunks
2623 return hunks
2625
2624
2626 def hunksfilter(rev):
2625 def hunksfilter(rev):
2627 fctxlineranges = linerangesbyrev.get(rev)
2626 fctxlineranges = linerangesbyrev.get(rev)
2628 if fctxlineranges is None:
2627 if fctxlineranges is None:
2629 return nofilterhunksfn
2628 return nofilterhunksfn
2630
2629
2631 def filterfn(fctx, hunks):
2630 def filterfn(fctx, hunks):
2632 lineranges = fctxlineranges.get(fctx.path())
2631 lineranges = fctxlineranges.get(fctx.path())
2633 if lineranges is not None:
2632 if lineranges is not None:
2634 for hr, lines in hunks:
2633 for hr, lines in hunks:
2635 if hr is None: # binary
2634 if hr is None: # binary
2636 yield hr, lines
2635 yield hr, lines
2637 continue
2636 continue
2638 if any(mdiff.hunkinrange(hr[2:], lr)
2637 if any(mdiff.hunkinrange(hr[2:], lr)
2639 for lr in lineranges):
2638 for lr in lineranges):
2640 yield hr, lines
2639 yield hr, lines
2641 else:
2640 else:
2642 for hunk in hunks:
2641 for hunk in hunks:
2643 yield hunk
2642 yield hunk
2644
2643
2645 return filterfn
2644 return filterfn
2646
2645
2647 def filematcher(rev):
2646 def filematcher(rev):
2648 files = list(linerangesbyrev.get(rev, []))
2647 files = list(linerangesbyrev.get(rev, []))
2649 return scmutil.matchfiles(repo, files)
2648 return scmutil.matchfiles(repo, files)
2650
2649
2651 revs = sorted(linerangesbyrev, reverse=True)
2650 revs = sorted(linerangesbyrev, reverse=True)
2652
2651
2653 return revs, filematcher, hunksfilter
2652 return revs, filematcher, hunksfilter
2654
2653
2655 def _graphnodeformatter(ui, displayer):
2654 def _graphnodeformatter(ui, displayer):
2656 spec = ui.config('ui', 'graphnodetemplate')
2655 spec = ui.config('ui', 'graphnodetemplate')
2657 if not spec:
2656 if not spec:
2658 return templatekw.showgraphnode # fast path for "{graphnode}"
2657 return templatekw.showgraphnode # fast path for "{graphnode}"
2659
2658
2660 spec = templater.unquotestring(spec)
2659 spec = templater.unquotestring(spec)
2661 templ = formatter.maketemplater(ui, spec)
2660 templ = formatter.maketemplater(ui, spec)
2662 cache = {}
2661 cache = {}
2663 if isinstance(displayer, changeset_templater):
2662 if isinstance(displayer, changeset_templater):
2664 cache = displayer.cache # reuse cache of slow templates
2663 cache = displayer.cache # reuse cache of slow templates
2665 props = templatekw.keywords.copy()
2664 props = templatekw.keywords.copy()
2666 props['templ'] = templ
2667 props['cache'] = cache
2665 props['cache'] = cache
2668 def formatnode(repo, ctx):
2666 def formatnode(repo, ctx):
2669 props['ctx'] = ctx
2667 props['ctx'] = ctx
2670 props['repo'] = repo
2668 props['repo'] = repo
2671 props['ui'] = repo.ui
2669 props['ui'] = repo.ui
2672 props['revcache'] = {}
2670 props['revcache'] = {}
2673 return templ.render(props)
2671 return templ.render(props)
2674 return formatnode
2672 return formatnode
2675
2673
2676 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2674 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2677 filematcher=None, props=None):
2675 filematcher=None, props=None):
2678 props = props or {}
2676 props = props or {}
2679 formatnode = _graphnodeformatter(ui, displayer)
2677 formatnode = _graphnodeformatter(ui, displayer)
2680 state = graphmod.asciistate()
2678 state = graphmod.asciistate()
2681 styles = state['styles']
2679 styles = state['styles']
2682
2680
2683 # only set graph styling if HGPLAIN is not set.
2681 # only set graph styling if HGPLAIN is not set.
2684 if ui.plain('graph'):
2682 if ui.plain('graph'):
2685 # set all edge styles to |, the default pre-3.8 behaviour
2683 # set all edge styles to |, the default pre-3.8 behaviour
2686 styles.update(dict.fromkeys(styles, '|'))
2684 styles.update(dict.fromkeys(styles, '|'))
2687 else:
2685 else:
2688 edgetypes = {
2686 edgetypes = {
2689 'parent': graphmod.PARENT,
2687 'parent': graphmod.PARENT,
2690 'grandparent': graphmod.GRANDPARENT,
2688 'grandparent': graphmod.GRANDPARENT,
2691 'missing': graphmod.MISSINGPARENT
2689 'missing': graphmod.MISSINGPARENT
2692 }
2690 }
2693 for name, key in edgetypes.items():
2691 for name, key in edgetypes.items():
2694 # experimental config: experimental.graphstyle.*
2692 # experimental config: experimental.graphstyle.*
2695 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2693 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2696 styles[key])
2694 styles[key])
2697 if not styles[key]:
2695 if not styles[key]:
2698 styles[key] = None
2696 styles[key] = None
2699
2697
2700 # experimental config: experimental.graphshorten
2698 # experimental config: experimental.graphshorten
2701 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2699 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2702
2700
2703 for rev, type, ctx, parents in dag:
2701 for rev, type, ctx, parents in dag:
2704 char = formatnode(repo, ctx)
2702 char = formatnode(repo, ctx)
2705 copies = None
2703 copies = None
2706 if getrenamed and ctx.rev():
2704 if getrenamed and ctx.rev():
2707 copies = []
2705 copies = []
2708 for fn in ctx.files():
2706 for fn in ctx.files():
2709 rename = getrenamed(fn, ctx.rev())
2707 rename = getrenamed(fn, ctx.rev())
2710 if rename:
2708 if rename:
2711 copies.append((fn, rename[0]))
2709 copies.append((fn, rename[0]))
2712 revmatchfn = None
2710 revmatchfn = None
2713 if filematcher is not None:
2711 if filematcher is not None:
2714 revmatchfn = filematcher(ctx.rev())
2712 revmatchfn = filematcher(ctx.rev())
2715 edges = edgefn(type, char, state, rev, parents)
2713 edges = edgefn(type, char, state, rev, parents)
2716 firstedge = next(edges)
2714 firstedge = next(edges)
2717 width = firstedge[2]
2715 width = firstedge[2]
2718 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2716 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2719 _graphwidth=width, **pycompat.strkwargs(props))
2717 _graphwidth=width, **pycompat.strkwargs(props))
2720 lines = displayer.hunk.pop(rev).split('\n')
2718 lines = displayer.hunk.pop(rev).split('\n')
2721 if not lines[-1]:
2719 if not lines[-1]:
2722 del lines[-1]
2720 del lines[-1]
2723 displayer.flush(ctx)
2721 displayer.flush(ctx)
2724 for type, char, width, coldata in itertools.chain([firstedge], edges):
2722 for type, char, width, coldata in itertools.chain([firstedge], edges):
2725 graphmod.ascii(ui, state, type, char, lines, coldata)
2723 graphmod.ascii(ui, state, type, char, lines, coldata)
2726 lines = []
2724 lines = []
2727 displayer.close()
2725 displayer.close()
2728
2726
2729 def graphlog(ui, repo, pats, opts):
2727 def graphlog(ui, repo, pats, opts):
2730 # Parameters are identical to log command ones
2728 # Parameters are identical to log command ones
2731 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2729 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2732 revdag = graphmod.dagwalker(repo, revs)
2730 revdag = graphmod.dagwalker(repo, revs)
2733
2731
2734 getrenamed = None
2732 getrenamed = None
2735 if opts.get('copies'):
2733 if opts.get('copies'):
2736 endrev = None
2734 endrev = None
2737 if opts.get('rev'):
2735 if opts.get('rev'):
2738 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2736 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2739 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2737 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2740
2738
2741 ui.pager('log')
2739 ui.pager('log')
2742 displayer = show_changeset(ui, repo, opts, buffered=True)
2740 displayer = show_changeset(ui, repo, opts, buffered=True)
2743 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2741 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2744 filematcher)
2742 filematcher)
2745
2743
2746 def checkunsupportedgraphflags(pats, opts):
2744 def checkunsupportedgraphflags(pats, opts):
2747 for op in ["newest_first"]:
2745 for op in ["newest_first"]:
2748 if op in opts and opts[op]:
2746 if op in opts and opts[op]:
2749 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2747 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2750 % op.replace("_", "-"))
2748 % op.replace("_", "-"))
2751
2749
2752 def graphrevs(repo, nodes, opts):
2750 def graphrevs(repo, nodes, opts):
2753 limit = loglimit(opts)
2751 limit = loglimit(opts)
2754 nodes.reverse()
2752 nodes.reverse()
2755 if limit is not None:
2753 if limit is not None:
2756 nodes = nodes[:limit]
2754 nodes = nodes[:limit]
2757 return graphmod.nodes(repo, nodes)
2755 return graphmod.nodes(repo, nodes)
2758
2756
2759 def add(ui, repo, match, prefix, explicitonly, **opts):
2757 def add(ui, repo, match, prefix, explicitonly, **opts):
2760 join = lambda f: os.path.join(prefix, f)
2758 join = lambda f: os.path.join(prefix, f)
2761 bad = []
2759 bad = []
2762
2760
2763 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2761 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2764 names = []
2762 names = []
2765 wctx = repo[None]
2763 wctx = repo[None]
2766 cca = None
2764 cca = None
2767 abort, warn = scmutil.checkportabilityalert(ui)
2765 abort, warn = scmutil.checkportabilityalert(ui)
2768 if abort or warn:
2766 if abort or warn:
2769 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2767 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2770
2768
2771 badmatch = matchmod.badmatch(match, badfn)
2769 badmatch = matchmod.badmatch(match, badfn)
2772 dirstate = repo.dirstate
2770 dirstate = repo.dirstate
2773 # We don't want to just call wctx.walk here, since it would return a lot of
2771 # We don't want to just call wctx.walk here, since it would return a lot of
2774 # clean files, which we aren't interested in and takes time.
2772 # clean files, which we aren't interested in and takes time.
2775 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2773 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2776 unknown=True, ignored=False, full=False)):
2774 unknown=True, ignored=False, full=False)):
2777 exact = match.exact(f)
2775 exact = match.exact(f)
2778 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2776 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2779 if cca:
2777 if cca:
2780 cca(f)
2778 cca(f)
2781 names.append(f)
2779 names.append(f)
2782 if ui.verbose or not exact:
2780 if ui.verbose or not exact:
2783 ui.status(_('adding %s\n') % match.rel(f))
2781 ui.status(_('adding %s\n') % match.rel(f))
2784
2782
2785 for subpath in sorted(wctx.substate):
2783 for subpath in sorted(wctx.substate):
2786 sub = wctx.sub(subpath)
2784 sub = wctx.sub(subpath)
2787 try:
2785 try:
2788 submatch = matchmod.subdirmatcher(subpath, match)
2786 submatch = matchmod.subdirmatcher(subpath, match)
2789 if opts.get(r'subrepos'):
2787 if opts.get(r'subrepos'):
2790 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2788 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2791 else:
2789 else:
2792 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2790 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2793 except error.LookupError:
2791 except error.LookupError:
2794 ui.status(_("skipping missing subrepository: %s\n")
2792 ui.status(_("skipping missing subrepository: %s\n")
2795 % join(subpath))
2793 % join(subpath))
2796
2794
2797 if not opts.get(r'dry_run'):
2795 if not opts.get(r'dry_run'):
2798 rejected = wctx.add(names, prefix)
2796 rejected = wctx.add(names, prefix)
2799 bad.extend(f for f in rejected if f in match.files())
2797 bad.extend(f for f in rejected if f in match.files())
2800 return bad
2798 return bad
2801
2799
2802 def addwebdirpath(repo, serverpath, webconf):
2800 def addwebdirpath(repo, serverpath, webconf):
2803 webconf[serverpath] = repo.root
2801 webconf[serverpath] = repo.root
2804 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2802 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2805
2803
2806 for r in repo.revs('filelog("path:.hgsub")'):
2804 for r in repo.revs('filelog("path:.hgsub")'):
2807 ctx = repo[r]
2805 ctx = repo[r]
2808 for subpath in ctx.substate:
2806 for subpath in ctx.substate:
2809 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2807 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2810
2808
2811 def forget(ui, repo, match, prefix, explicitonly):
2809 def forget(ui, repo, match, prefix, explicitonly):
2812 join = lambda f: os.path.join(prefix, f)
2810 join = lambda f: os.path.join(prefix, f)
2813 bad = []
2811 bad = []
2814 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2812 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2815 wctx = repo[None]
2813 wctx = repo[None]
2816 forgot = []
2814 forgot = []
2817
2815
2818 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2816 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2819 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2817 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2820 if explicitonly:
2818 if explicitonly:
2821 forget = [f for f in forget if match.exact(f)]
2819 forget = [f for f in forget if match.exact(f)]
2822
2820
2823 for subpath in sorted(wctx.substate):
2821 for subpath in sorted(wctx.substate):
2824 sub = wctx.sub(subpath)
2822 sub = wctx.sub(subpath)
2825 try:
2823 try:
2826 submatch = matchmod.subdirmatcher(subpath, match)
2824 submatch = matchmod.subdirmatcher(subpath, match)
2827 subbad, subforgot = sub.forget(submatch, prefix)
2825 subbad, subforgot = sub.forget(submatch, prefix)
2828 bad.extend([subpath + '/' + f for f in subbad])
2826 bad.extend([subpath + '/' + f for f in subbad])
2829 forgot.extend([subpath + '/' + f for f in subforgot])
2827 forgot.extend([subpath + '/' + f for f in subforgot])
2830 except error.LookupError:
2828 except error.LookupError:
2831 ui.status(_("skipping missing subrepository: %s\n")
2829 ui.status(_("skipping missing subrepository: %s\n")
2832 % join(subpath))
2830 % join(subpath))
2833
2831
2834 if not explicitonly:
2832 if not explicitonly:
2835 for f in match.files():
2833 for f in match.files():
2836 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2834 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2837 if f not in forgot:
2835 if f not in forgot:
2838 if repo.wvfs.exists(f):
2836 if repo.wvfs.exists(f):
2839 # Don't complain if the exact case match wasn't given.
2837 # Don't complain if the exact case match wasn't given.
2840 # But don't do this until after checking 'forgot', so
2838 # But don't do this until after checking 'forgot', so
2841 # that subrepo files aren't normalized, and this op is
2839 # that subrepo files aren't normalized, and this op is
2842 # purely from data cached by the status walk above.
2840 # purely from data cached by the status walk above.
2843 if repo.dirstate.normalize(f) in repo.dirstate:
2841 if repo.dirstate.normalize(f) in repo.dirstate:
2844 continue
2842 continue
2845 ui.warn(_('not removing %s: '
2843 ui.warn(_('not removing %s: '
2846 'file is already untracked\n')
2844 'file is already untracked\n')
2847 % match.rel(f))
2845 % match.rel(f))
2848 bad.append(f)
2846 bad.append(f)
2849
2847
2850 for f in forget:
2848 for f in forget:
2851 if ui.verbose or not match.exact(f):
2849 if ui.verbose or not match.exact(f):
2852 ui.status(_('removing %s\n') % match.rel(f))
2850 ui.status(_('removing %s\n') % match.rel(f))
2853
2851
2854 rejected = wctx.forget(forget, prefix)
2852 rejected = wctx.forget(forget, prefix)
2855 bad.extend(f for f in rejected if f in match.files())
2853 bad.extend(f for f in rejected if f in match.files())
2856 forgot.extend(f for f in forget if f not in rejected)
2854 forgot.extend(f for f in forget if f not in rejected)
2857 return bad, forgot
2855 return bad, forgot
2858
2856
2859 def files(ui, ctx, m, fm, fmt, subrepos):
2857 def files(ui, ctx, m, fm, fmt, subrepos):
2860 rev = ctx.rev()
2858 rev = ctx.rev()
2861 ret = 1
2859 ret = 1
2862 ds = ctx.repo().dirstate
2860 ds = ctx.repo().dirstate
2863
2861
2864 for f in ctx.matches(m):
2862 for f in ctx.matches(m):
2865 if rev is None and ds[f] == 'r':
2863 if rev is None and ds[f] == 'r':
2866 continue
2864 continue
2867 fm.startitem()
2865 fm.startitem()
2868 if ui.verbose:
2866 if ui.verbose:
2869 fc = ctx[f]
2867 fc = ctx[f]
2870 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2868 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2871 fm.data(abspath=f)
2869 fm.data(abspath=f)
2872 fm.write('path', fmt, m.rel(f))
2870 fm.write('path', fmt, m.rel(f))
2873 ret = 0
2871 ret = 0
2874
2872
2875 for subpath in sorted(ctx.substate):
2873 for subpath in sorted(ctx.substate):
2876 submatch = matchmod.subdirmatcher(subpath, m)
2874 submatch = matchmod.subdirmatcher(subpath, m)
2877 if (subrepos or m.exact(subpath) or any(submatch.files())):
2875 if (subrepos or m.exact(subpath) or any(submatch.files())):
2878 sub = ctx.sub(subpath)
2876 sub = ctx.sub(subpath)
2879 try:
2877 try:
2880 recurse = m.exact(subpath) or subrepos
2878 recurse = m.exact(subpath) or subrepos
2881 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2879 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2882 ret = 0
2880 ret = 0
2883 except error.LookupError:
2881 except error.LookupError:
2884 ui.status(_("skipping missing subrepository: %s\n")
2882 ui.status(_("skipping missing subrepository: %s\n")
2885 % m.abs(subpath))
2883 % m.abs(subpath))
2886
2884
2887 return ret
2885 return ret
2888
2886
2889 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2887 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2890 join = lambda f: os.path.join(prefix, f)
2888 join = lambda f: os.path.join(prefix, f)
2891 ret = 0
2889 ret = 0
2892 s = repo.status(match=m, clean=True)
2890 s = repo.status(match=m, clean=True)
2893 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2891 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2894
2892
2895 wctx = repo[None]
2893 wctx = repo[None]
2896
2894
2897 if warnings is None:
2895 if warnings is None:
2898 warnings = []
2896 warnings = []
2899 warn = True
2897 warn = True
2900 else:
2898 else:
2901 warn = False
2899 warn = False
2902
2900
2903 subs = sorted(wctx.substate)
2901 subs = sorted(wctx.substate)
2904 total = len(subs)
2902 total = len(subs)
2905 count = 0
2903 count = 0
2906 for subpath in subs:
2904 for subpath in subs:
2907 count += 1
2905 count += 1
2908 submatch = matchmod.subdirmatcher(subpath, m)
2906 submatch = matchmod.subdirmatcher(subpath, m)
2909 if subrepos or m.exact(subpath) or any(submatch.files()):
2907 if subrepos or m.exact(subpath) or any(submatch.files()):
2910 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2908 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2911 sub = wctx.sub(subpath)
2909 sub = wctx.sub(subpath)
2912 try:
2910 try:
2913 if sub.removefiles(submatch, prefix, after, force, subrepos,
2911 if sub.removefiles(submatch, prefix, after, force, subrepos,
2914 warnings):
2912 warnings):
2915 ret = 1
2913 ret = 1
2916 except error.LookupError:
2914 except error.LookupError:
2917 warnings.append(_("skipping missing subrepository: %s\n")
2915 warnings.append(_("skipping missing subrepository: %s\n")
2918 % join(subpath))
2916 % join(subpath))
2919 ui.progress(_('searching'), None)
2917 ui.progress(_('searching'), None)
2920
2918
2921 # warn about failure to delete explicit files/dirs
2919 # warn about failure to delete explicit files/dirs
2922 deleteddirs = util.dirs(deleted)
2920 deleteddirs = util.dirs(deleted)
2923 files = m.files()
2921 files = m.files()
2924 total = len(files)
2922 total = len(files)
2925 count = 0
2923 count = 0
2926 for f in files:
2924 for f in files:
2927 def insubrepo():
2925 def insubrepo():
2928 for subpath in wctx.substate:
2926 for subpath in wctx.substate:
2929 if f.startswith(subpath + '/'):
2927 if f.startswith(subpath + '/'):
2930 return True
2928 return True
2931 return False
2929 return False
2932
2930
2933 count += 1
2931 count += 1
2934 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2932 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2935 isdir = f in deleteddirs or wctx.hasdir(f)
2933 isdir = f in deleteddirs or wctx.hasdir(f)
2936 if (f in repo.dirstate or isdir or f == '.'
2934 if (f in repo.dirstate or isdir or f == '.'
2937 or insubrepo() or f in subs):
2935 or insubrepo() or f in subs):
2938 continue
2936 continue
2939
2937
2940 if repo.wvfs.exists(f):
2938 if repo.wvfs.exists(f):
2941 if repo.wvfs.isdir(f):
2939 if repo.wvfs.isdir(f):
2942 warnings.append(_('not removing %s: no tracked files\n')
2940 warnings.append(_('not removing %s: no tracked files\n')
2943 % m.rel(f))
2941 % m.rel(f))
2944 else:
2942 else:
2945 warnings.append(_('not removing %s: file is untracked\n')
2943 warnings.append(_('not removing %s: file is untracked\n')
2946 % m.rel(f))
2944 % m.rel(f))
2947 # missing files will generate a warning elsewhere
2945 # missing files will generate a warning elsewhere
2948 ret = 1
2946 ret = 1
2949 ui.progress(_('deleting'), None)
2947 ui.progress(_('deleting'), None)
2950
2948
2951 if force:
2949 if force:
2952 list = modified + deleted + clean + added
2950 list = modified + deleted + clean + added
2953 elif after:
2951 elif after:
2954 list = deleted
2952 list = deleted
2955 remaining = modified + added + clean
2953 remaining = modified + added + clean
2956 total = len(remaining)
2954 total = len(remaining)
2957 count = 0
2955 count = 0
2958 for f in remaining:
2956 for f in remaining:
2959 count += 1
2957 count += 1
2960 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2958 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2961 if ui.verbose or (f in files):
2959 if ui.verbose or (f in files):
2962 warnings.append(_('not removing %s: file still exists\n')
2960 warnings.append(_('not removing %s: file still exists\n')
2963 % m.rel(f))
2961 % m.rel(f))
2964 ret = 1
2962 ret = 1
2965 ui.progress(_('skipping'), None)
2963 ui.progress(_('skipping'), None)
2966 else:
2964 else:
2967 list = deleted + clean
2965 list = deleted + clean
2968 total = len(modified) + len(added)
2966 total = len(modified) + len(added)
2969 count = 0
2967 count = 0
2970 for f in modified:
2968 for f in modified:
2971 count += 1
2969 count += 1
2972 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2970 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2973 warnings.append(_('not removing %s: file is modified (use -f'
2971 warnings.append(_('not removing %s: file is modified (use -f'
2974 ' to force removal)\n') % m.rel(f))
2972 ' to force removal)\n') % m.rel(f))
2975 ret = 1
2973 ret = 1
2976 for f in added:
2974 for f in added:
2977 count += 1
2975 count += 1
2978 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2976 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2979 warnings.append(_("not removing %s: file has been marked for add"
2977 warnings.append(_("not removing %s: file has been marked for add"
2980 " (use 'hg forget' to undo add)\n") % m.rel(f))
2978 " (use 'hg forget' to undo add)\n") % m.rel(f))
2981 ret = 1
2979 ret = 1
2982 ui.progress(_('skipping'), None)
2980 ui.progress(_('skipping'), None)
2983
2981
2984 list = sorted(list)
2982 list = sorted(list)
2985 total = len(list)
2983 total = len(list)
2986 count = 0
2984 count = 0
2987 for f in list:
2985 for f in list:
2988 count += 1
2986 count += 1
2989 if ui.verbose or not m.exact(f):
2987 if ui.verbose or not m.exact(f):
2990 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2988 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2991 ui.status(_('removing %s\n') % m.rel(f))
2989 ui.status(_('removing %s\n') % m.rel(f))
2992 ui.progress(_('deleting'), None)
2990 ui.progress(_('deleting'), None)
2993
2991
2994 with repo.wlock():
2992 with repo.wlock():
2995 if not after:
2993 if not after:
2996 for f in list:
2994 for f in list:
2997 if f in added:
2995 if f in added:
2998 continue # we never unlink added files on remove
2996 continue # we never unlink added files on remove
2999 repo.wvfs.unlinkpath(f, ignoremissing=True)
2997 repo.wvfs.unlinkpath(f, ignoremissing=True)
3000 repo[None].forget(list)
2998 repo[None].forget(list)
3001
2999
3002 if warn:
3000 if warn:
3003 for warning in warnings:
3001 for warning in warnings:
3004 ui.warn(warning)
3002 ui.warn(warning)
3005
3003
3006 return ret
3004 return ret
3007
3005
3008 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3006 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3009 err = 1
3007 err = 1
3010 opts = pycompat.byteskwargs(opts)
3008 opts = pycompat.byteskwargs(opts)
3011
3009
3012 def write(path):
3010 def write(path):
3013 filename = None
3011 filename = None
3014 if fntemplate:
3012 if fntemplate:
3015 filename = makefilename(repo, fntemplate, ctx.node(),
3013 filename = makefilename(repo, fntemplate, ctx.node(),
3016 pathname=os.path.join(prefix, path))
3014 pathname=os.path.join(prefix, path))
3017 # attempt to create the directory if it does not already exist
3015 # attempt to create the directory if it does not already exist
3018 try:
3016 try:
3019 os.makedirs(os.path.dirname(filename))
3017 os.makedirs(os.path.dirname(filename))
3020 except OSError:
3018 except OSError:
3021 pass
3019 pass
3022 with formatter.maybereopen(basefm, filename, opts) as fm:
3020 with formatter.maybereopen(basefm, filename, opts) as fm:
3023 data = ctx[path].data()
3021 data = ctx[path].data()
3024 if opts.get('decode'):
3022 if opts.get('decode'):
3025 data = repo.wwritedata(path, data)
3023 data = repo.wwritedata(path, data)
3026 fm.startitem()
3024 fm.startitem()
3027 fm.write('data', '%s', data)
3025 fm.write('data', '%s', data)
3028 fm.data(abspath=path, path=matcher.rel(path))
3026 fm.data(abspath=path, path=matcher.rel(path))
3029
3027
3030 # Automation often uses hg cat on single files, so special case it
3028 # Automation often uses hg cat on single files, so special case it
3031 # for performance to avoid the cost of parsing the manifest.
3029 # for performance to avoid the cost of parsing the manifest.
3032 if len(matcher.files()) == 1 and not matcher.anypats():
3030 if len(matcher.files()) == 1 and not matcher.anypats():
3033 file = matcher.files()[0]
3031 file = matcher.files()[0]
3034 mfl = repo.manifestlog
3032 mfl = repo.manifestlog
3035 mfnode = ctx.manifestnode()
3033 mfnode = ctx.manifestnode()
3036 try:
3034 try:
3037 if mfnode and mfl[mfnode].find(file)[0]:
3035 if mfnode and mfl[mfnode].find(file)[0]:
3038 write(file)
3036 write(file)
3039 return 0
3037 return 0
3040 except KeyError:
3038 except KeyError:
3041 pass
3039 pass
3042
3040
3043 for abs in ctx.walk(matcher):
3041 for abs in ctx.walk(matcher):
3044 write(abs)
3042 write(abs)
3045 err = 0
3043 err = 0
3046
3044
3047 for subpath in sorted(ctx.substate):
3045 for subpath in sorted(ctx.substate):
3048 sub = ctx.sub(subpath)
3046 sub = ctx.sub(subpath)
3049 try:
3047 try:
3050 submatch = matchmod.subdirmatcher(subpath, matcher)
3048 submatch = matchmod.subdirmatcher(subpath, matcher)
3051
3049
3052 if not sub.cat(submatch, basefm, fntemplate,
3050 if not sub.cat(submatch, basefm, fntemplate,
3053 os.path.join(prefix, sub._path),
3051 os.path.join(prefix, sub._path),
3054 **pycompat.strkwargs(opts)):
3052 **pycompat.strkwargs(opts)):
3055 err = 0
3053 err = 0
3056 except error.RepoLookupError:
3054 except error.RepoLookupError:
3057 ui.status(_("skipping missing subrepository: %s\n")
3055 ui.status(_("skipping missing subrepository: %s\n")
3058 % os.path.join(prefix, subpath))
3056 % os.path.join(prefix, subpath))
3059
3057
3060 return err
3058 return err
3061
3059
3062 def commit(ui, repo, commitfunc, pats, opts):
3060 def commit(ui, repo, commitfunc, pats, opts):
3063 '''commit the specified files or all outstanding changes'''
3061 '''commit the specified files or all outstanding changes'''
3064 date = opts.get('date')
3062 date = opts.get('date')
3065 if date:
3063 if date:
3066 opts['date'] = util.parsedate(date)
3064 opts['date'] = util.parsedate(date)
3067 message = logmessage(ui, opts)
3065 message = logmessage(ui, opts)
3068 matcher = scmutil.match(repo[None], pats, opts)
3066 matcher = scmutil.match(repo[None], pats, opts)
3069
3067
3070 dsguard = None
3068 dsguard = None
3071 # extract addremove carefully -- this function can be called from a command
3069 # extract addremove carefully -- this function can be called from a command
3072 # that doesn't support addremove
3070 # that doesn't support addremove
3073 if opts.get('addremove'):
3071 if opts.get('addremove'):
3074 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3072 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3075 with dsguard or util.nullcontextmanager():
3073 with dsguard or util.nullcontextmanager():
3076 if dsguard:
3074 if dsguard:
3077 if scmutil.addremove(repo, matcher, "", opts) != 0:
3075 if scmutil.addremove(repo, matcher, "", opts) != 0:
3078 raise error.Abort(
3076 raise error.Abort(
3079 _("failed to mark all new/missing files as added/removed"))
3077 _("failed to mark all new/missing files as added/removed"))
3080
3078
3081 return commitfunc(ui, repo, message, matcher, opts)
3079 return commitfunc(ui, repo, message, matcher, opts)
3082
3080
3083 def samefile(f, ctx1, ctx2):
3081 def samefile(f, ctx1, ctx2):
3084 if f in ctx1.manifest():
3082 if f in ctx1.manifest():
3085 a = ctx1.filectx(f)
3083 a = ctx1.filectx(f)
3086 if f in ctx2.manifest():
3084 if f in ctx2.manifest():
3087 b = ctx2.filectx(f)
3085 b = ctx2.filectx(f)
3088 return (not a.cmp(b)
3086 return (not a.cmp(b)
3089 and a.flags() == b.flags())
3087 and a.flags() == b.flags())
3090 else:
3088 else:
3091 return False
3089 return False
3092 else:
3090 else:
3093 return f not in ctx2.manifest()
3091 return f not in ctx2.manifest()
3094
3092
3095 def amend(ui, repo, old, extra, pats, opts):
3093 def amend(ui, repo, old, extra, pats, opts):
3096 # avoid cycle context -> subrepo -> cmdutil
3094 # avoid cycle context -> subrepo -> cmdutil
3097 from . import context
3095 from . import context
3098
3096
3099 # amend will reuse the existing user if not specified, but the obsolete
3097 # amend will reuse the existing user if not specified, but the obsolete
3100 # marker creation requires that the current user's name is specified.
3098 # marker creation requires that the current user's name is specified.
3101 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3099 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3102 ui.username() # raise exception if username not set
3100 ui.username() # raise exception if username not set
3103
3101
3104 ui.note(_('amending changeset %s\n') % old)
3102 ui.note(_('amending changeset %s\n') % old)
3105 base = old.p1()
3103 base = old.p1()
3106
3104
3107 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3105 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3108 # Participating changesets:
3106 # Participating changesets:
3109 #
3107 #
3110 # wctx o - workingctx that contains changes from working copy
3108 # wctx o - workingctx that contains changes from working copy
3111 # | to go into amending commit
3109 # | to go into amending commit
3112 # |
3110 # |
3113 # old o - changeset to amend
3111 # old o - changeset to amend
3114 # |
3112 # |
3115 # base o - first parent of the changeset to amend
3113 # base o - first parent of the changeset to amend
3116 wctx = repo[None]
3114 wctx = repo[None]
3117
3115
3118 # Copy to avoid mutating input
3116 # Copy to avoid mutating input
3119 extra = extra.copy()
3117 extra = extra.copy()
3120 # Update extra dict from amended commit (e.g. to preserve graft
3118 # Update extra dict from amended commit (e.g. to preserve graft
3121 # source)
3119 # source)
3122 extra.update(old.extra())
3120 extra.update(old.extra())
3123
3121
3124 # Also update it from the from the wctx
3122 # Also update it from the from the wctx
3125 extra.update(wctx.extra())
3123 extra.update(wctx.extra())
3126
3124
3127 user = opts.get('user') or old.user()
3125 user = opts.get('user') or old.user()
3128 date = opts.get('date') or old.date()
3126 date = opts.get('date') or old.date()
3129
3127
3130 # Parse the date to allow comparison between date and old.date()
3128 # Parse the date to allow comparison between date and old.date()
3131 date = util.parsedate(date)
3129 date = util.parsedate(date)
3132
3130
3133 if len(old.parents()) > 1:
3131 if len(old.parents()) > 1:
3134 # ctx.files() isn't reliable for merges, so fall back to the
3132 # ctx.files() isn't reliable for merges, so fall back to the
3135 # slower repo.status() method
3133 # slower repo.status() method
3136 files = set([fn for st in repo.status(base, old)[:3]
3134 files = set([fn for st in repo.status(base, old)[:3]
3137 for fn in st])
3135 for fn in st])
3138 else:
3136 else:
3139 files = set(old.files())
3137 files = set(old.files())
3140
3138
3141 # add/remove the files to the working copy if the "addremove" option
3139 # add/remove the files to the working copy if the "addremove" option
3142 # was specified.
3140 # was specified.
3143 matcher = scmutil.match(wctx, pats, opts)
3141 matcher = scmutil.match(wctx, pats, opts)
3144 if (opts.get('addremove')
3142 if (opts.get('addremove')
3145 and scmutil.addremove(repo, matcher, "", opts)):
3143 and scmutil.addremove(repo, matcher, "", opts)):
3146 raise error.Abort(
3144 raise error.Abort(
3147 _("failed to mark all new/missing files as added/removed"))
3145 _("failed to mark all new/missing files as added/removed"))
3148
3146
3149 # Check subrepos. This depends on in-place wctx._status update in
3147 # Check subrepos. This depends on in-place wctx._status update in
3150 # subrepo.precommit(). To minimize the risk of this hack, we do
3148 # subrepo.precommit(). To minimize the risk of this hack, we do
3151 # nothing if .hgsub does not exist.
3149 # nothing if .hgsub does not exist.
3152 if '.hgsub' in wctx or '.hgsub' in old:
3150 if '.hgsub' in wctx or '.hgsub' in old:
3153 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
3151 from . import subrepo # avoid cycle: cmdutil -> subrepo -> cmdutil
3154 subs, commitsubs, newsubstate = subrepo.precommit(
3152 subs, commitsubs, newsubstate = subrepo.precommit(
3155 ui, wctx, wctx._status, matcher)
3153 ui, wctx, wctx._status, matcher)
3156 # amend should abort if commitsubrepos is enabled
3154 # amend should abort if commitsubrepos is enabled
3157 assert not commitsubs
3155 assert not commitsubs
3158 if subs:
3156 if subs:
3159 subrepo.writestate(repo, newsubstate)
3157 subrepo.writestate(repo, newsubstate)
3160
3158
3161 filestoamend = set(f for f in wctx.files() if matcher(f))
3159 filestoamend = set(f for f in wctx.files() if matcher(f))
3162
3160
3163 changes = (len(filestoamend) > 0)
3161 changes = (len(filestoamend) > 0)
3164 if changes:
3162 if changes:
3165 # Recompute copies (avoid recording a -> b -> a)
3163 # Recompute copies (avoid recording a -> b -> a)
3166 copied = copies.pathcopies(base, wctx, matcher)
3164 copied = copies.pathcopies(base, wctx, matcher)
3167 if old.p2:
3165 if old.p2:
3168 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3166 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3169
3167
3170 # Prune files which were reverted by the updates: if old
3168 # Prune files which were reverted by the updates: if old
3171 # introduced file X and the file was renamed in the working
3169 # introduced file X and the file was renamed in the working
3172 # copy, then those two files are the same and
3170 # copy, then those two files are the same and
3173 # we can discard X from our list of files. Likewise if X
3171 # we can discard X from our list of files. Likewise if X
3174 # was removed, it's no longer relevant. If X is missing (aka
3172 # was removed, it's no longer relevant. If X is missing (aka
3175 # deleted), old X must be preserved.
3173 # deleted), old X must be preserved.
3176 files.update(filestoamend)
3174 files.update(filestoamend)
3177 files = [f for f in files if (not samefile(f, wctx, base)
3175 files = [f for f in files if (not samefile(f, wctx, base)
3178 or f in wctx.deleted())]
3176 or f in wctx.deleted())]
3179
3177
3180 def filectxfn(repo, ctx_, path):
3178 def filectxfn(repo, ctx_, path):
3181 try:
3179 try:
3182 # If the file being considered is not amongst the files
3180 # If the file being considered is not amongst the files
3183 # to be amended, we should return the file context from the
3181 # to be amended, we should return the file context from the
3184 # old changeset. This avoids issues when only some files in
3182 # old changeset. This avoids issues when only some files in
3185 # the working copy are being amended but there are also
3183 # the working copy are being amended but there are also
3186 # changes to other files from the old changeset.
3184 # changes to other files from the old changeset.
3187 if path not in filestoamend:
3185 if path not in filestoamend:
3188 return old.filectx(path)
3186 return old.filectx(path)
3189
3187
3190 # Return None for removed files.
3188 # Return None for removed files.
3191 if path in wctx.removed():
3189 if path in wctx.removed():
3192 return None
3190 return None
3193
3191
3194 fctx = wctx[path]
3192 fctx = wctx[path]
3195 flags = fctx.flags()
3193 flags = fctx.flags()
3196 mctx = context.memfilectx(repo, ctx_,
3194 mctx = context.memfilectx(repo, ctx_,
3197 fctx.path(), fctx.data(),
3195 fctx.path(), fctx.data(),
3198 islink='l' in flags,
3196 islink='l' in flags,
3199 isexec='x' in flags,
3197 isexec='x' in flags,
3200 copied=copied.get(path))
3198 copied=copied.get(path))
3201 return mctx
3199 return mctx
3202 except KeyError:
3200 except KeyError:
3203 return None
3201 return None
3204 else:
3202 else:
3205 ui.note(_('copying changeset %s to %s\n') % (old, base))
3203 ui.note(_('copying changeset %s to %s\n') % (old, base))
3206
3204
3207 # Use version of files as in the old cset
3205 # Use version of files as in the old cset
3208 def filectxfn(repo, ctx_, path):
3206 def filectxfn(repo, ctx_, path):
3209 try:
3207 try:
3210 return old.filectx(path)
3208 return old.filectx(path)
3211 except KeyError:
3209 except KeyError:
3212 return None
3210 return None
3213
3211
3214 # See if we got a message from -m or -l, if not, open the editor with
3212 # See if we got a message from -m or -l, if not, open the editor with
3215 # the message of the changeset to amend.
3213 # the message of the changeset to amend.
3216 message = logmessage(ui, opts)
3214 message = logmessage(ui, opts)
3217
3215
3218 editform = mergeeditform(old, 'commit.amend')
3216 editform = mergeeditform(old, 'commit.amend')
3219 editor = getcommiteditor(editform=editform,
3217 editor = getcommiteditor(editform=editform,
3220 **pycompat.strkwargs(opts))
3218 **pycompat.strkwargs(opts))
3221
3219
3222 if not message:
3220 if not message:
3223 editor = getcommiteditor(edit=True, editform=editform)
3221 editor = getcommiteditor(edit=True, editform=editform)
3224 message = old.description()
3222 message = old.description()
3225
3223
3226 pureextra = extra.copy()
3224 pureextra = extra.copy()
3227 extra['amend_source'] = old.hex()
3225 extra['amend_source'] = old.hex()
3228
3226
3229 new = context.memctx(repo,
3227 new = context.memctx(repo,
3230 parents=[base.node(), old.p2().node()],
3228 parents=[base.node(), old.p2().node()],
3231 text=message,
3229 text=message,
3232 files=files,
3230 files=files,
3233 filectxfn=filectxfn,
3231 filectxfn=filectxfn,
3234 user=user,
3232 user=user,
3235 date=date,
3233 date=date,
3236 extra=extra,
3234 extra=extra,
3237 editor=editor)
3235 editor=editor)
3238
3236
3239 newdesc = changelog.stripdesc(new.description())
3237 newdesc = changelog.stripdesc(new.description())
3240 if ((not changes)
3238 if ((not changes)
3241 and newdesc == old.description()
3239 and newdesc == old.description()
3242 and user == old.user()
3240 and user == old.user()
3243 and date == old.date()
3241 and date == old.date()
3244 and pureextra == old.extra()):
3242 and pureextra == old.extra()):
3245 # nothing changed. continuing here would create a new node
3243 # nothing changed. continuing here would create a new node
3246 # anyway because of the amend_source noise.
3244 # anyway because of the amend_source noise.
3247 #
3245 #
3248 # This not what we expect from amend.
3246 # This not what we expect from amend.
3249 return old.node()
3247 return old.node()
3250
3248
3251 if opts.get('secret'):
3249 if opts.get('secret'):
3252 commitphase = 'secret'
3250 commitphase = 'secret'
3253 else:
3251 else:
3254 commitphase = old.phase()
3252 commitphase = old.phase()
3255 overrides = {('phases', 'new-commit'): commitphase}
3253 overrides = {('phases', 'new-commit'): commitphase}
3256 with ui.configoverride(overrides, 'amend'):
3254 with ui.configoverride(overrides, 'amend'):
3257 newid = repo.commitctx(new)
3255 newid = repo.commitctx(new)
3258
3256
3259 # Reroute the working copy parent to the new changeset
3257 # Reroute the working copy parent to the new changeset
3260 repo.setparents(newid, nullid)
3258 repo.setparents(newid, nullid)
3261 mapping = {old.node(): (newid,)}
3259 mapping = {old.node(): (newid,)}
3262 obsmetadata = None
3260 obsmetadata = None
3263 if opts.get('note'):
3261 if opts.get('note'):
3264 obsmetadata = {'note': opts['note']}
3262 obsmetadata = {'note': opts['note']}
3265 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3263 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3266
3264
3267 # Fixing the dirstate because localrepo.commitctx does not update
3265 # Fixing the dirstate because localrepo.commitctx does not update
3268 # it. This is rather convenient because we did not need to update
3266 # it. This is rather convenient because we did not need to update
3269 # the dirstate for all the files in the new commit which commitctx
3267 # the dirstate for all the files in the new commit which commitctx
3270 # could have done if it updated the dirstate. Now, we can
3268 # could have done if it updated the dirstate. Now, we can
3271 # selectively update the dirstate only for the amended files.
3269 # selectively update the dirstate only for the amended files.
3272 dirstate = repo.dirstate
3270 dirstate = repo.dirstate
3273
3271
3274 # Update the state of the files which were added and
3272 # Update the state of the files which were added and
3275 # and modified in the amend to "normal" in the dirstate.
3273 # and modified in the amend to "normal" in the dirstate.
3276 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3274 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3277 for f in normalfiles:
3275 for f in normalfiles:
3278 dirstate.normal(f)
3276 dirstate.normal(f)
3279
3277
3280 # Update the state of files which were removed in the amend
3278 # Update the state of files which were removed in the amend
3281 # to "removed" in the dirstate.
3279 # to "removed" in the dirstate.
3282 removedfiles = set(wctx.removed()) & filestoamend
3280 removedfiles = set(wctx.removed()) & filestoamend
3283 for f in removedfiles:
3281 for f in removedfiles:
3284 dirstate.drop(f)
3282 dirstate.drop(f)
3285
3283
3286 return newid
3284 return newid
3287
3285
3288 def commiteditor(repo, ctx, subs, editform=''):
3286 def commiteditor(repo, ctx, subs, editform=''):
3289 if ctx.description():
3287 if ctx.description():
3290 return ctx.description()
3288 return ctx.description()
3291 return commitforceeditor(repo, ctx, subs, editform=editform,
3289 return commitforceeditor(repo, ctx, subs, editform=editform,
3292 unchangedmessagedetection=True)
3290 unchangedmessagedetection=True)
3293
3291
3294 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3292 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3295 editform='', unchangedmessagedetection=False):
3293 editform='', unchangedmessagedetection=False):
3296 if not extramsg:
3294 if not extramsg:
3297 extramsg = _("Leave message empty to abort commit.")
3295 extramsg = _("Leave message empty to abort commit.")
3298
3296
3299 forms = [e for e in editform.split('.') if e]
3297 forms = [e for e in editform.split('.') if e]
3300 forms.insert(0, 'changeset')
3298 forms.insert(0, 'changeset')
3301 templatetext = None
3299 templatetext = None
3302 while forms:
3300 while forms:
3303 ref = '.'.join(forms)
3301 ref = '.'.join(forms)
3304 if repo.ui.config('committemplate', ref):
3302 if repo.ui.config('committemplate', ref):
3305 templatetext = committext = buildcommittemplate(
3303 templatetext = committext = buildcommittemplate(
3306 repo, ctx, subs, extramsg, ref)
3304 repo, ctx, subs, extramsg, ref)
3307 break
3305 break
3308 forms.pop()
3306 forms.pop()
3309 else:
3307 else:
3310 committext = buildcommittext(repo, ctx, subs, extramsg)
3308 committext = buildcommittext(repo, ctx, subs, extramsg)
3311
3309
3312 # run editor in the repository root
3310 # run editor in the repository root
3313 olddir = pycompat.getcwd()
3311 olddir = pycompat.getcwd()
3314 os.chdir(repo.root)
3312 os.chdir(repo.root)
3315
3313
3316 # make in-memory changes visible to external process
3314 # make in-memory changes visible to external process
3317 tr = repo.currenttransaction()
3315 tr = repo.currenttransaction()
3318 repo.dirstate.write(tr)
3316 repo.dirstate.write(tr)
3319 pending = tr and tr.writepending() and repo.root
3317 pending = tr and tr.writepending() and repo.root
3320
3318
3321 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3319 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3322 editform=editform, pending=pending,
3320 editform=editform, pending=pending,
3323 repopath=repo.path, action='commit')
3321 repopath=repo.path, action='commit')
3324 text = editortext
3322 text = editortext
3325
3323
3326 # strip away anything below this special string (used for editors that want
3324 # strip away anything below this special string (used for editors that want
3327 # to display the diff)
3325 # to display the diff)
3328 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3326 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3329 if stripbelow:
3327 if stripbelow:
3330 text = text[:stripbelow.start()]
3328 text = text[:stripbelow.start()]
3331
3329
3332 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3330 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3333 os.chdir(olddir)
3331 os.chdir(olddir)
3334
3332
3335 if finishdesc:
3333 if finishdesc:
3336 text = finishdesc(text)
3334 text = finishdesc(text)
3337 if not text.strip():
3335 if not text.strip():
3338 raise error.Abort(_("empty commit message"))
3336 raise error.Abort(_("empty commit message"))
3339 if unchangedmessagedetection and editortext == templatetext:
3337 if unchangedmessagedetection and editortext == templatetext:
3340 raise error.Abort(_("commit message unchanged"))
3338 raise error.Abort(_("commit message unchanged"))
3341
3339
3342 return text
3340 return text
3343
3341
3344 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3342 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3345 ui = repo.ui
3343 ui = repo.ui
3346 spec = formatter.templatespec(ref, None, None)
3344 spec = formatter.templatespec(ref, None, None)
3347 t = changeset_templater(ui, repo, spec, None, {}, False)
3345 t = changeset_templater(ui, repo, spec, None, {}, False)
3348 t.t.cache.update((k, templater.unquotestring(v))
3346 t.t.cache.update((k, templater.unquotestring(v))
3349 for k, v in repo.ui.configitems('committemplate'))
3347 for k, v in repo.ui.configitems('committemplate'))
3350
3348
3351 if not extramsg:
3349 if not extramsg:
3352 extramsg = '' # ensure that extramsg is string
3350 extramsg = '' # ensure that extramsg is string
3353
3351
3354 ui.pushbuffer()
3352 ui.pushbuffer()
3355 t.show(ctx, extramsg=extramsg)
3353 t.show(ctx, extramsg=extramsg)
3356 return ui.popbuffer()
3354 return ui.popbuffer()
3357
3355
3358 def hgprefix(msg):
3356 def hgprefix(msg):
3359 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3357 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3360
3358
3361 def buildcommittext(repo, ctx, subs, extramsg):
3359 def buildcommittext(repo, ctx, subs, extramsg):
3362 edittext = []
3360 edittext = []
3363 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3361 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3364 if ctx.description():
3362 if ctx.description():
3365 edittext.append(ctx.description())
3363 edittext.append(ctx.description())
3366 edittext.append("")
3364 edittext.append("")
3367 edittext.append("") # Empty line between message and comments.
3365 edittext.append("") # Empty line between message and comments.
3368 edittext.append(hgprefix(_("Enter commit message."
3366 edittext.append(hgprefix(_("Enter commit message."
3369 " Lines beginning with 'HG:' are removed.")))
3367 " Lines beginning with 'HG:' are removed.")))
3370 edittext.append(hgprefix(extramsg))
3368 edittext.append(hgprefix(extramsg))
3371 edittext.append("HG: --")
3369 edittext.append("HG: --")
3372 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3370 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3373 if ctx.p2():
3371 if ctx.p2():
3374 edittext.append(hgprefix(_("branch merge")))
3372 edittext.append(hgprefix(_("branch merge")))
3375 if ctx.branch():
3373 if ctx.branch():
3376 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3374 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3377 if bookmarks.isactivewdirparent(repo):
3375 if bookmarks.isactivewdirparent(repo):
3378 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3376 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3379 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3377 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3380 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3378 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3381 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3379 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3382 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3380 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3383 if not added and not modified and not removed:
3381 if not added and not modified and not removed:
3384 edittext.append(hgprefix(_("no files changed")))
3382 edittext.append(hgprefix(_("no files changed")))
3385 edittext.append("")
3383 edittext.append("")
3386
3384
3387 return "\n".join(edittext)
3385 return "\n".join(edittext)
3388
3386
3389 def commitstatus(repo, node, branch, bheads=None, opts=None):
3387 def commitstatus(repo, node, branch, bheads=None, opts=None):
3390 if opts is None:
3388 if opts is None:
3391 opts = {}
3389 opts = {}
3392 ctx = repo[node]
3390 ctx = repo[node]
3393 parents = ctx.parents()
3391 parents = ctx.parents()
3394
3392
3395 if (not opts.get('amend') and bheads and node not in bheads and not
3393 if (not opts.get('amend') and bheads and node not in bheads and not
3396 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3394 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3397 repo.ui.status(_('created new head\n'))
3395 repo.ui.status(_('created new head\n'))
3398 # The message is not printed for initial roots. For the other
3396 # The message is not printed for initial roots. For the other
3399 # changesets, it is printed in the following situations:
3397 # changesets, it is printed in the following situations:
3400 #
3398 #
3401 # Par column: for the 2 parents with ...
3399 # Par column: for the 2 parents with ...
3402 # N: null or no parent
3400 # N: null or no parent
3403 # B: parent is on another named branch
3401 # B: parent is on another named branch
3404 # C: parent is a regular non head changeset
3402 # C: parent is a regular non head changeset
3405 # H: parent was a branch head of the current branch
3403 # H: parent was a branch head of the current branch
3406 # Msg column: whether we print "created new head" message
3404 # Msg column: whether we print "created new head" message
3407 # In the following, it is assumed that there already exists some
3405 # In the following, it is assumed that there already exists some
3408 # initial branch heads of the current branch, otherwise nothing is
3406 # initial branch heads of the current branch, otherwise nothing is
3409 # printed anyway.
3407 # printed anyway.
3410 #
3408 #
3411 # Par Msg Comment
3409 # Par Msg Comment
3412 # N N y additional topo root
3410 # N N y additional topo root
3413 #
3411 #
3414 # B N y additional branch root
3412 # B N y additional branch root
3415 # C N y additional topo head
3413 # C N y additional topo head
3416 # H N n usual case
3414 # H N n usual case
3417 #
3415 #
3418 # B B y weird additional branch root
3416 # B B y weird additional branch root
3419 # C B y branch merge
3417 # C B y branch merge
3420 # H B n merge with named branch
3418 # H B n merge with named branch
3421 #
3419 #
3422 # C C y additional head from merge
3420 # C C y additional head from merge
3423 # C H n merge with a head
3421 # C H n merge with a head
3424 #
3422 #
3425 # H H n head merge: head count decreases
3423 # H H n head merge: head count decreases
3426
3424
3427 if not opts.get('close_branch'):
3425 if not opts.get('close_branch'):
3428 for r in parents:
3426 for r in parents:
3429 if r.closesbranch() and r.branch() == branch:
3427 if r.closesbranch() and r.branch() == branch:
3430 repo.ui.status(_('reopening closed branch head %d\n') % r)
3428 repo.ui.status(_('reopening closed branch head %d\n') % r)
3431
3429
3432 if repo.ui.debugflag:
3430 if repo.ui.debugflag:
3433 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3431 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3434 elif repo.ui.verbose:
3432 elif repo.ui.verbose:
3435 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3433 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3436
3434
3437 def postcommitstatus(repo, pats, opts):
3435 def postcommitstatus(repo, pats, opts):
3438 return repo.status(match=scmutil.match(repo[None], pats, opts))
3436 return repo.status(match=scmutil.match(repo[None], pats, opts))
3439
3437
3440 def revert(ui, repo, ctx, parents, *pats, **opts):
3438 def revert(ui, repo, ctx, parents, *pats, **opts):
3441 opts = pycompat.byteskwargs(opts)
3439 opts = pycompat.byteskwargs(opts)
3442 parent, p2 = parents
3440 parent, p2 = parents
3443 node = ctx.node()
3441 node = ctx.node()
3444
3442
3445 mf = ctx.manifest()
3443 mf = ctx.manifest()
3446 if node == p2:
3444 if node == p2:
3447 parent = p2
3445 parent = p2
3448
3446
3449 # need all matching names in dirstate and manifest of target rev,
3447 # need all matching names in dirstate and manifest of target rev,
3450 # so have to walk both. do not print errors if files exist in one
3448 # so have to walk both. do not print errors if files exist in one
3451 # but not other. in both cases, filesets should be evaluated against
3449 # but not other. in both cases, filesets should be evaluated against
3452 # workingctx to get consistent result (issue4497). this means 'set:**'
3450 # workingctx to get consistent result (issue4497). this means 'set:**'
3453 # cannot be used to select missing files from target rev.
3451 # cannot be used to select missing files from target rev.
3454
3452
3455 # `names` is a mapping for all elements in working copy and target revision
3453 # `names` is a mapping for all elements in working copy and target revision
3456 # The mapping is in the form:
3454 # The mapping is in the form:
3457 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3455 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3458 names = {}
3456 names = {}
3459
3457
3460 with repo.wlock():
3458 with repo.wlock():
3461 ## filling of the `names` mapping
3459 ## filling of the `names` mapping
3462 # walk dirstate to fill `names`
3460 # walk dirstate to fill `names`
3463
3461
3464 interactive = opts.get('interactive', False)
3462 interactive = opts.get('interactive', False)
3465 wctx = repo[None]
3463 wctx = repo[None]
3466 m = scmutil.match(wctx, pats, opts)
3464 m = scmutil.match(wctx, pats, opts)
3467
3465
3468 # we'll need this later
3466 # we'll need this later
3469 targetsubs = sorted(s for s in wctx.substate if m(s))
3467 targetsubs = sorted(s for s in wctx.substate if m(s))
3470
3468
3471 if not m.always():
3469 if not m.always():
3472 matcher = matchmod.badmatch(m, lambda x, y: False)
3470 matcher = matchmod.badmatch(m, lambda x, y: False)
3473 for abs in wctx.walk(matcher):
3471 for abs in wctx.walk(matcher):
3474 names[abs] = m.rel(abs), m.exact(abs)
3472 names[abs] = m.rel(abs), m.exact(abs)
3475
3473
3476 # walk target manifest to fill `names`
3474 # walk target manifest to fill `names`
3477
3475
3478 def badfn(path, msg):
3476 def badfn(path, msg):
3479 if path in names:
3477 if path in names:
3480 return
3478 return
3481 if path in ctx.substate:
3479 if path in ctx.substate:
3482 return
3480 return
3483 path_ = path + '/'
3481 path_ = path + '/'
3484 for f in names:
3482 for f in names:
3485 if f.startswith(path_):
3483 if f.startswith(path_):
3486 return
3484 return
3487 ui.warn("%s: %s\n" % (m.rel(path), msg))
3485 ui.warn("%s: %s\n" % (m.rel(path), msg))
3488
3486
3489 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3487 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3490 if abs not in names:
3488 if abs not in names:
3491 names[abs] = m.rel(abs), m.exact(abs)
3489 names[abs] = m.rel(abs), m.exact(abs)
3492
3490
3493 # Find status of all file in `names`.
3491 # Find status of all file in `names`.
3494 m = scmutil.matchfiles(repo, names)
3492 m = scmutil.matchfiles(repo, names)
3495
3493
3496 changes = repo.status(node1=node, match=m,
3494 changes = repo.status(node1=node, match=m,
3497 unknown=True, ignored=True, clean=True)
3495 unknown=True, ignored=True, clean=True)
3498 else:
3496 else:
3499 changes = repo.status(node1=node, match=m)
3497 changes = repo.status(node1=node, match=m)
3500 for kind in changes:
3498 for kind in changes:
3501 for abs in kind:
3499 for abs in kind:
3502 names[abs] = m.rel(abs), m.exact(abs)
3500 names[abs] = m.rel(abs), m.exact(abs)
3503
3501
3504 m = scmutil.matchfiles(repo, names)
3502 m = scmutil.matchfiles(repo, names)
3505
3503
3506 modified = set(changes.modified)
3504 modified = set(changes.modified)
3507 added = set(changes.added)
3505 added = set(changes.added)
3508 removed = set(changes.removed)
3506 removed = set(changes.removed)
3509 _deleted = set(changes.deleted)
3507 _deleted = set(changes.deleted)
3510 unknown = set(changes.unknown)
3508 unknown = set(changes.unknown)
3511 unknown.update(changes.ignored)
3509 unknown.update(changes.ignored)
3512 clean = set(changes.clean)
3510 clean = set(changes.clean)
3513 modadded = set()
3511 modadded = set()
3514
3512
3515 # We need to account for the state of the file in the dirstate,
3513 # We need to account for the state of the file in the dirstate,
3516 # even when we revert against something else than parent. This will
3514 # even when we revert against something else than parent. This will
3517 # slightly alter the behavior of revert (doing back up or not, delete
3515 # slightly alter the behavior of revert (doing back up or not, delete
3518 # or just forget etc).
3516 # or just forget etc).
3519 if parent == node:
3517 if parent == node:
3520 dsmodified = modified
3518 dsmodified = modified
3521 dsadded = added
3519 dsadded = added
3522 dsremoved = removed
3520 dsremoved = removed
3523 # store all local modifications, useful later for rename detection
3521 # store all local modifications, useful later for rename detection
3524 localchanges = dsmodified | dsadded
3522 localchanges = dsmodified | dsadded
3525 modified, added, removed = set(), set(), set()
3523 modified, added, removed = set(), set(), set()
3526 else:
3524 else:
3527 changes = repo.status(node1=parent, match=m)
3525 changes = repo.status(node1=parent, match=m)
3528 dsmodified = set(changes.modified)
3526 dsmodified = set(changes.modified)
3529 dsadded = set(changes.added)
3527 dsadded = set(changes.added)
3530 dsremoved = set(changes.removed)
3528 dsremoved = set(changes.removed)
3531 # store all local modifications, useful later for rename detection
3529 # store all local modifications, useful later for rename detection
3532 localchanges = dsmodified | dsadded
3530 localchanges = dsmodified | dsadded
3533
3531
3534 # only take into account for removes between wc and target
3532 # only take into account for removes between wc and target
3535 clean |= dsremoved - removed
3533 clean |= dsremoved - removed
3536 dsremoved &= removed
3534 dsremoved &= removed
3537 # distinct between dirstate remove and other
3535 # distinct between dirstate remove and other
3538 removed -= dsremoved
3536 removed -= dsremoved
3539
3537
3540 modadded = added & dsmodified
3538 modadded = added & dsmodified
3541 added -= modadded
3539 added -= modadded
3542
3540
3543 # tell newly modified apart.
3541 # tell newly modified apart.
3544 dsmodified &= modified
3542 dsmodified &= modified
3545 dsmodified |= modified & dsadded # dirstate added may need backup
3543 dsmodified |= modified & dsadded # dirstate added may need backup
3546 modified -= dsmodified
3544 modified -= dsmodified
3547
3545
3548 # We need to wait for some post-processing to update this set
3546 # We need to wait for some post-processing to update this set
3549 # before making the distinction. The dirstate will be used for
3547 # before making the distinction. The dirstate will be used for
3550 # that purpose.
3548 # that purpose.
3551 dsadded = added
3549 dsadded = added
3552
3550
3553 # in case of merge, files that are actually added can be reported as
3551 # in case of merge, files that are actually added can be reported as
3554 # modified, we need to post process the result
3552 # modified, we need to post process the result
3555 if p2 != nullid:
3553 if p2 != nullid:
3556 mergeadd = set(dsmodified)
3554 mergeadd = set(dsmodified)
3557 for path in dsmodified:
3555 for path in dsmodified:
3558 if path in mf:
3556 if path in mf:
3559 mergeadd.remove(path)
3557 mergeadd.remove(path)
3560 dsadded |= mergeadd
3558 dsadded |= mergeadd
3561 dsmodified -= mergeadd
3559 dsmodified -= mergeadd
3562
3560
3563 # if f is a rename, update `names` to also revert the source
3561 # if f is a rename, update `names` to also revert the source
3564 cwd = repo.getcwd()
3562 cwd = repo.getcwd()
3565 for f in localchanges:
3563 for f in localchanges:
3566 src = repo.dirstate.copied(f)
3564 src = repo.dirstate.copied(f)
3567 # XXX should we check for rename down to target node?
3565 # XXX should we check for rename down to target node?
3568 if src and src not in names and repo.dirstate[src] == 'r':
3566 if src and src not in names and repo.dirstate[src] == 'r':
3569 dsremoved.add(src)
3567 dsremoved.add(src)
3570 names[src] = (repo.pathto(src, cwd), True)
3568 names[src] = (repo.pathto(src, cwd), True)
3571
3569
3572 # determine the exact nature of the deleted changesets
3570 # determine the exact nature of the deleted changesets
3573 deladded = set(_deleted)
3571 deladded = set(_deleted)
3574 for path in _deleted:
3572 for path in _deleted:
3575 if path in mf:
3573 if path in mf:
3576 deladded.remove(path)
3574 deladded.remove(path)
3577 deleted = _deleted - deladded
3575 deleted = _deleted - deladded
3578
3576
3579 # distinguish between file to forget and the other
3577 # distinguish between file to forget and the other
3580 added = set()
3578 added = set()
3581 for abs in dsadded:
3579 for abs in dsadded:
3582 if repo.dirstate[abs] != 'a':
3580 if repo.dirstate[abs] != 'a':
3583 added.add(abs)
3581 added.add(abs)
3584 dsadded -= added
3582 dsadded -= added
3585
3583
3586 for abs in deladded:
3584 for abs in deladded:
3587 if repo.dirstate[abs] == 'a':
3585 if repo.dirstate[abs] == 'a':
3588 dsadded.add(abs)
3586 dsadded.add(abs)
3589 deladded -= dsadded
3587 deladded -= dsadded
3590
3588
3591 # For files marked as removed, we check if an unknown file is present at
3589 # For files marked as removed, we check if an unknown file is present at
3592 # the same path. If a such file exists it may need to be backed up.
3590 # the same path. If a such file exists it may need to be backed up.
3593 # Making the distinction at this stage helps have simpler backup
3591 # Making the distinction at this stage helps have simpler backup
3594 # logic.
3592 # logic.
3595 removunk = set()
3593 removunk = set()
3596 for abs in removed:
3594 for abs in removed:
3597 target = repo.wjoin(abs)
3595 target = repo.wjoin(abs)
3598 if os.path.lexists(target):
3596 if os.path.lexists(target):
3599 removunk.add(abs)
3597 removunk.add(abs)
3600 removed -= removunk
3598 removed -= removunk
3601
3599
3602 dsremovunk = set()
3600 dsremovunk = set()
3603 for abs in dsremoved:
3601 for abs in dsremoved:
3604 target = repo.wjoin(abs)
3602 target = repo.wjoin(abs)
3605 if os.path.lexists(target):
3603 if os.path.lexists(target):
3606 dsremovunk.add(abs)
3604 dsremovunk.add(abs)
3607 dsremoved -= dsremovunk
3605 dsremoved -= dsremovunk
3608
3606
3609 # action to be actually performed by revert
3607 # action to be actually performed by revert
3610 # (<list of file>, message>) tuple
3608 # (<list of file>, message>) tuple
3611 actions = {'revert': ([], _('reverting %s\n')),
3609 actions = {'revert': ([], _('reverting %s\n')),
3612 'add': ([], _('adding %s\n')),
3610 'add': ([], _('adding %s\n')),
3613 'remove': ([], _('removing %s\n')),
3611 'remove': ([], _('removing %s\n')),
3614 'drop': ([], _('removing %s\n')),
3612 'drop': ([], _('removing %s\n')),
3615 'forget': ([], _('forgetting %s\n')),
3613 'forget': ([], _('forgetting %s\n')),
3616 'undelete': ([], _('undeleting %s\n')),
3614 'undelete': ([], _('undeleting %s\n')),
3617 'noop': (None, _('no changes needed to %s\n')),
3615 'noop': (None, _('no changes needed to %s\n')),
3618 'unknown': (None, _('file not managed: %s\n')),
3616 'unknown': (None, _('file not managed: %s\n')),
3619 }
3617 }
3620
3618
3621 # "constant" that convey the backup strategy.
3619 # "constant" that convey the backup strategy.
3622 # All set to `discard` if `no-backup` is set do avoid checking
3620 # All set to `discard` if `no-backup` is set do avoid checking
3623 # no_backup lower in the code.
3621 # no_backup lower in the code.
3624 # These values are ordered for comparison purposes
3622 # These values are ordered for comparison purposes
3625 backupinteractive = 3 # do backup if interactively modified
3623 backupinteractive = 3 # do backup if interactively modified
3626 backup = 2 # unconditionally do backup
3624 backup = 2 # unconditionally do backup
3627 check = 1 # check if the existing file differs from target
3625 check = 1 # check if the existing file differs from target
3628 discard = 0 # never do backup
3626 discard = 0 # never do backup
3629 if opts.get('no_backup'):
3627 if opts.get('no_backup'):
3630 backupinteractive = backup = check = discard
3628 backupinteractive = backup = check = discard
3631 if interactive:
3629 if interactive:
3632 dsmodifiedbackup = backupinteractive
3630 dsmodifiedbackup = backupinteractive
3633 else:
3631 else:
3634 dsmodifiedbackup = backup
3632 dsmodifiedbackup = backup
3635 tobackup = set()
3633 tobackup = set()
3636
3634
3637 backupanddel = actions['remove']
3635 backupanddel = actions['remove']
3638 if not opts.get('no_backup'):
3636 if not opts.get('no_backup'):
3639 backupanddel = actions['drop']
3637 backupanddel = actions['drop']
3640
3638
3641 disptable = (
3639 disptable = (
3642 # dispatch table:
3640 # dispatch table:
3643 # file state
3641 # file state
3644 # action
3642 # action
3645 # make backup
3643 # make backup
3646
3644
3647 ## Sets that results that will change file on disk
3645 ## Sets that results that will change file on disk
3648 # Modified compared to target, no local change
3646 # Modified compared to target, no local change
3649 (modified, actions['revert'], discard),
3647 (modified, actions['revert'], discard),
3650 # Modified compared to target, but local file is deleted
3648 # Modified compared to target, but local file is deleted
3651 (deleted, actions['revert'], discard),
3649 (deleted, actions['revert'], discard),
3652 # Modified compared to target, local change
3650 # Modified compared to target, local change
3653 (dsmodified, actions['revert'], dsmodifiedbackup),
3651 (dsmodified, actions['revert'], dsmodifiedbackup),
3654 # Added since target
3652 # Added since target
3655 (added, actions['remove'], discard),
3653 (added, actions['remove'], discard),
3656 # Added in working directory
3654 # Added in working directory
3657 (dsadded, actions['forget'], discard),
3655 (dsadded, actions['forget'], discard),
3658 # Added since target, have local modification
3656 # Added since target, have local modification
3659 (modadded, backupanddel, backup),
3657 (modadded, backupanddel, backup),
3660 # Added since target but file is missing in working directory
3658 # Added since target but file is missing in working directory
3661 (deladded, actions['drop'], discard),
3659 (deladded, actions['drop'], discard),
3662 # Removed since target, before working copy parent
3660 # Removed since target, before working copy parent
3663 (removed, actions['add'], discard),
3661 (removed, actions['add'], discard),
3664 # Same as `removed` but an unknown file exists at the same path
3662 # Same as `removed` but an unknown file exists at the same path
3665 (removunk, actions['add'], check),
3663 (removunk, actions['add'], check),
3666 # Removed since targe, marked as such in working copy parent
3664 # Removed since targe, marked as such in working copy parent
3667 (dsremoved, actions['undelete'], discard),
3665 (dsremoved, actions['undelete'], discard),
3668 # Same as `dsremoved` but an unknown file exists at the same path
3666 # Same as `dsremoved` but an unknown file exists at the same path
3669 (dsremovunk, actions['undelete'], check),
3667 (dsremovunk, actions['undelete'], check),
3670 ## the following sets does not result in any file changes
3668 ## the following sets does not result in any file changes
3671 # File with no modification
3669 # File with no modification
3672 (clean, actions['noop'], discard),
3670 (clean, actions['noop'], discard),
3673 # Existing file, not tracked anywhere
3671 # Existing file, not tracked anywhere
3674 (unknown, actions['unknown'], discard),
3672 (unknown, actions['unknown'], discard),
3675 )
3673 )
3676
3674
3677 for abs, (rel, exact) in sorted(names.items()):
3675 for abs, (rel, exact) in sorted(names.items()):
3678 # target file to be touch on disk (relative to cwd)
3676 # target file to be touch on disk (relative to cwd)
3679 target = repo.wjoin(abs)
3677 target = repo.wjoin(abs)
3680 # search the entry in the dispatch table.
3678 # search the entry in the dispatch table.
3681 # if the file is in any of these sets, it was touched in the working
3679 # if the file is in any of these sets, it was touched in the working
3682 # directory parent and we are sure it needs to be reverted.
3680 # directory parent and we are sure it needs to be reverted.
3683 for table, (xlist, msg), dobackup in disptable:
3681 for table, (xlist, msg), dobackup in disptable:
3684 if abs not in table:
3682 if abs not in table:
3685 continue
3683 continue
3686 if xlist is not None:
3684 if xlist is not None:
3687 xlist.append(abs)
3685 xlist.append(abs)
3688 if dobackup:
3686 if dobackup:
3689 # If in interactive mode, don't automatically create
3687 # If in interactive mode, don't automatically create
3690 # .orig files (issue4793)
3688 # .orig files (issue4793)
3691 if dobackup == backupinteractive:
3689 if dobackup == backupinteractive:
3692 tobackup.add(abs)
3690 tobackup.add(abs)
3693 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3691 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3694 bakname = scmutil.origpath(ui, repo, rel)
3692 bakname = scmutil.origpath(ui, repo, rel)
3695 ui.note(_('saving current version of %s as %s\n') %
3693 ui.note(_('saving current version of %s as %s\n') %
3696 (rel, bakname))
3694 (rel, bakname))
3697 if not opts.get('dry_run'):
3695 if not opts.get('dry_run'):
3698 if interactive:
3696 if interactive:
3699 util.copyfile(target, bakname)
3697 util.copyfile(target, bakname)
3700 else:
3698 else:
3701 util.rename(target, bakname)
3699 util.rename(target, bakname)
3702 if ui.verbose or not exact:
3700 if ui.verbose or not exact:
3703 if not isinstance(msg, bytes):
3701 if not isinstance(msg, bytes):
3704 msg = msg(abs)
3702 msg = msg(abs)
3705 ui.status(msg % rel)
3703 ui.status(msg % rel)
3706 elif exact:
3704 elif exact:
3707 ui.warn(msg % rel)
3705 ui.warn(msg % rel)
3708 break
3706 break
3709
3707
3710 if not opts.get('dry_run'):
3708 if not opts.get('dry_run'):
3711 needdata = ('revert', 'add', 'undelete')
3709 needdata = ('revert', 'add', 'undelete')
3712 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3710 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3713 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3711 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3714
3712
3715 if targetsubs:
3713 if targetsubs:
3716 # Revert the subrepos on the revert list
3714 # Revert the subrepos on the revert list
3717 for sub in targetsubs:
3715 for sub in targetsubs:
3718 try:
3716 try:
3719 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3717 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3720 **pycompat.strkwargs(opts))
3718 **pycompat.strkwargs(opts))
3721 except KeyError:
3719 except KeyError:
3722 raise error.Abort("subrepository '%s' does not exist in %s!"
3720 raise error.Abort("subrepository '%s' does not exist in %s!"
3723 % (sub, short(ctx.node())))
3721 % (sub, short(ctx.node())))
3724
3722
3725 def _revertprefetch(repo, ctx, *files):
3723 def _revertprefetch(repo, ctx, *files):
3726 """Let extension changing the storage layer prefetch content"""
3724 """Let extension changing the storage layer prefetch content"""
3727
3725
3728 def _performrevert(repo, parents, ctx, actions, interactive=False,
3726 def _performrevert(repo, parents, ctx, actions, interactive=False,
3729 tobackup=None):
3727 tobackup=None):
3730 """function that actually perform all the actions computed for revert
3728 """function that actually perform all the actions computed for revert
3731
3729
3732 This is an independent function to let extension to plug in and react to
3730 This is an independent function to let extension to plug in and react to
3733 the imminent revert.
3731 the imminent revert.
3734
3732
3735 Make sure you have the working directory locked when calling this function.
3733 Make sure you have the working directory locked when calling this function.
3736 """
3734 """
3737 parent, p2 = parents
3735 parent, p2 = parents
3738 node = ctx.node()
3736 node = ctx.node()
3739 excluded_files = []
3737 excluded_files = []
3740 matcher_opts = {"exclude": excluded_files}
3738 matcher_opts = {"exclude": excluded_files}
3741
3739
3742 def checkout(f):
3740 def checkout(f):
3743 fc = ctx[f]
3741 fc = ctx[f]
3744 repo.wwrite(f, fc.data(), fc.flags())
3742 repo.wwrite(f, fc.data(), fc.flags())
3745
3743
3746 def doremove(f):
3744 def doremove(f):
3747 try:
3745 try:
3748 repo.wvfs.unlinkpath(f)
3746 repo.wvfs.unlinkpath(f)
3749 except OSError:
3747 except OSError:
3750 pass
3748 pass
3751 repo.dirstate.remove(f)
3749 repo.dirstate.remove(f)
3752
3750
3753 audit_path = pathutil.pathauditor(repo.root, cached=True)
3751 audit_path = pathutil.pathauditor(repo.root, cached=True)
3754 for f in actions['forget'][0]:
3752 for f in actions['forget'][0]:
3755 if interactive:
3753 if interactive:
3756 choice = repo.ui.promptchoice(
3754 choice = repo.ui.promptchoice(
3757 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3755 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3758 if choice == 0:
3756 if choice == 0:
3759 repo.dirstate.drop(f)
3757 repo.dirstate.drop(f)
3760 else:
3758 else:
3761 excluded_files.append(repo.wjoin(f))
3759 excluded_files.append(repo.wjoin(f))
3762 else:
3760 else:
3763 repo.dirstate.drop(f)
3761 repo.dirstate.drop(f)
3764 for f in actions['remove'][0]:
3762 for f in actions['remove'][0]:
3765 audit_path(f)
3763 audit_path(f)
3766 if interactive:
3764 if interactive:
3767 choice = repo.ui.promptchoice(
3765 choice = repo.ui.promptchoice(
3768 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3766 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3769 if choice == 0:
3767 if choice == 0:
3770 doremove(f)
3768 doremove(f)
3771 else:
3769 else:
3772 excluded_files.append(repo.wjoin(f))
3770 excluded_files.append(repo.wjoin(f))
3773 else:
3771 else:
3774 doremove(f)
3772 doremove(f)
3775 for f in actions['drop'][0]:
3773 for f in actions['drop'][0]:
3776 audit_path(f)
3774 audit_path(f)
3777 repo.dirstate.remove(f)
3775 repo.dirstate.remove(f)
3778
3776
3779 normal = None
3777 normal = None
3780 if node == parent:
3778 if node == parent:
3781 # We're reverting to our parent. If possible, we'd like status
3779 # We're reverting to our parent. If possible, we'd like status
3782 # to report the file as clean. We have to use normallookup for
3780 # to report the file as clean. We have to use normallookup for
3783 # merges to avoid losing information about merged/dirty files.
3781 # merges to avoid losing information about merged/dirty files.
3784 if p2 != nullid:
3782 if p2 != nullid:
3785 normal = repo.dirstate.normallookup
3783 normal = repo.dirstate.normallookup
3786 else:
3784 else:
3787 normal = repo.dirstate.normal
3785 normal = repo.dirstate.normal
3788
3786
3789 newlyaddedandmodifiedfiles = set()
3787 newlyaddedandmodifiedfiles = set()
3790 if interactive:
3788 if interactive:
3791 # Prompt the user for changes to revert
3789 # Prompt the user for changes to revert
3792 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3790 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3793 m = scmutil.match(ctx, torevert, matcher_opts)
3791 m = scmutil.match(ctx, torevert, matcher_opts)
3794 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3792 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3795 diffopts.nodates = True
3793 diffopts.nodates = True
3796 diffopts.git = True
3794 diffopts.git = True
3797 operation = 'discard'
3795 operation = 'discard'
3798 reversehunks = True
3796 reversehunks = True
3799 if node != parent:
3797 if node != parent:
3800 operation = 'apply'
3798 operation = 'apply'
3801 reversehunks = False
3799 reversehunks = False
3802 if reversehunks:
3800 if reversehunks:
3803 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3801 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3804 else:
3802 else:
3805 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3803 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3806 originalchunks = patch.parsepatch(diff)
3804 originalchunks = patch.parsepatch(diff)
3807
3805
3808 try:
3806 try:
3809
3807
3810 chunks, opts = recordfilter(repo.ui, originalchunks,
3808 chunks, opts = recordfilter(repo.ui, originalchunks,
3811 operation=operation)
3809 operation=operation)
3812 if reversehunks:
3810 if reversehunks:
3813 chunks = patch.reversehunks(chunks)
3811 chunks = patch.reversehunks(chunks)
3814
3812
3815 except error.PatchError as err:
3813 except error.PatchError as err:
3816 raise error.Abort(_('error parsing patch: %s') % err)
3814 raise error.Abort(_('error parsing patch: %s') % err)
3817
3815
3818 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3816 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3819 if tobackup is None:
3817 if tobackup is None:
3820 tobackup = set()
3818 tobackup = set()
3821 # Apply changes
3819 # Apply changes
3822 fp = stringio()
3820 fp = stringio()
3823 for c in chunks:
3821 for c in chunks:
3824 # Create a backup file only if this hunk should be backed up
3822 # Create a backup file only if this hunk should be backed up
3825 if ishunk(c) and c.header.filename() in tobackup:
3823 if ishunk(c) and c.header.filename() in tobackup:
3826 abs = c.header.filename()
3824 abs = c.header.filename()
3827 target = repo.wjoin(abs)
3825 target = repo.wjoin(abs)
3828 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3826 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3829 util.copyfile(target, bakname)
3827 util.copyfile(target, bakname)
3830 tobackup.remove(abs)
3828 tobackup.remove(abs)
3831 c.write(fp)
3829 c.write(fp)
3832 dopatch = fp.tell()
3830 dopatch = fp.tell()
3833 fp.seek(0)
3831 fp.seek(0)
3834 if dopatch:
3832 if dopatch:
3835 try:
3833 try:
3836 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3834 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3837 except error.PatchError as err:
3835 except error.PatchError as err:
3838 raise error.Abort(str(err))
3836 raise error.Abort(str(err))
3839 del fp
3837 del fp
3840 else:
3838 else:
3841 for f in actions['revert'][0]:
3839 for f in actions['revert'][0]:
3842 checkout(f)
3840 checkout(f)
3843 if normal:
3841 if normal:
3844 normal(f)
3842 normal(f)
3845
3843
3846 for f in actions['add'][0]:
3844 for f in actions['add'][0]:
3847 # Don't checkout modified files, they are already created by the diff
3845 # Don't checkout modified files, they are already created by the diff
3848 if f not in newlyaddedandmodifiedfiles:
3846 if f not in newlyaddedandmodifiedfiles:
3849 checkout(f)
3847 checkout(f)
3850 repo.dirstate.add(f)
3848 repo.dirstate.add(f)
3851
3849
3852 normal = repo.dirstate.normallookup
3850 normal = repo.dirstate.normallookup
3853 if node == parent and p2 == nullid:
3851 if node == parent and p2 == nullid:
3854 normal = repo.dirstate.normal
3852 normal = repo.dirstate.normal
3855 for f in actions['undelete'][0]:
3853 for f in actions['undelete'][0]:
3856 checkout(f)
3854 checkout(f)
3857 normal(f)
3855 normal(f)
3858
3856
3859 copied = copies.pathcopies(repo[parent], ctx)
3857 copied = copies.pathcopies(repo[parent], ctx)
3860
3858
3861 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3859 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3862 if f in copied:
3860 if f in copied:
3863 repo.dirstate.copy(copied[f], f)
3861 repo.dirstate.copy(copied[f], f)
3864
3862
3865 class command(registrar.command):
3863 class command(registrar.command):
3866 """deprecated: used registrar.command instead"""
3864 """deprecated: used registrar.command instead"""
3867 def _doregister(self, func, name, *args, **kwargs):
3865 def _doregister(self, func, name, *args, **kwargs):
3868 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3866 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3869 return super(command, self)._doregister(func, name, *args, **kwargs)
3867 return super(command, self)._doregister(func, name, *args, **kwargs)
3870
3868
3871 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3869 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3872 # commands.outgoing. "missing" is "missing" of the result of
3870 # commands.outgoing. "missing" is "missing" of the result of
3873 # "findcommonoutgoing()"
3871 # "findcommonoutgoing()"
3874 outgoinghooks = util.hooks()
3872 outgoinghooks = util.hooks()
3875
3873
3876 # a list of (ui, repo) functions called by commands.summary
3874 # a list of (ui, repo) functions called by commands.summary
3877 summaryhooks = util.hooks()
3875 summaryhooks = util.hooks()
3878
3876
3879 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3877 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3880 #
3878 #
3881 # functions should return tuple of booleans below, if 'changes' is None:
3879 # functions should return tuple of booleans below, if 'changes' is None:
3882 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3880 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3883 #
3881 #
3884 # otherwise, 'changes' is a tuple of tuples below:
3882 # otherwise, 'changes' is a tuple of tuples below:
3885 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3883 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3886 # - (desturl, destbranch, destpeer, outgoing)
3884 # - (desturl, destbranch, destpeer, outgoing)
3887 summaryremotehooks = util.hooks()
3885 summaryremotehooks = util.hooks()
3888
3886
3889 # A list of state files kept by multistep operations like graft.
3887 # A list of state files kept by multistep operations like graft.
3890 # Since graft cannot be aborted, it is considered 'clearable' by update.
3888 # Since graft cannot be aborted, it is considered 'clearable' by update.
3891 # note: bisect is intentionally excluded
3889 # note: bisect is intentionally excluded
3892 # (state file, clearable, allowcommit, error, hint)
3890 # (state file, clearable, allowcommit, error, hint)
3893 unfinishedstates = [
3891 unfinishedstates = [
3894 ('graftstate', True, False, _('graft in progress'),
3892 ('graftstate', True, False, _('graft in progress'),
3895 _("use 'hg graft --continue' or 'hg update' to abort")),
3893 _("use 'hg graft --continue' or 'hg update' to abort")),
3896 ('updatestate', True, False, _('last update was interrupted'),
3894 ('updatestate', True, False, _('last update was interrupted'),
3897 _("use 'hg update' to get a consistent checkout"))
3895 _("use 'hg update' to get a consistent checkout"))
3898 ]
3896 ]
3899
3897
3900 def checkunfinished(repo, commit=False):
3898 def checkunfinished(repo, commit=False):
3901 '''Look for an unfinished multistep operation, like graft, and abort
3899 '''Look for an unfinished multistep operation, like graft, and abort
3902 if found. It's probably good to check this right before
3900 if found. It's probably good to check this right before
3903 bailifchanged().
3901 bailifchanged().
3904 '''
3902 '''
3905 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3903 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3906 if commit and allowcommit:
3904 if commit and allowcommit:
3907 continue
3905 continue
3908 if repo.vfs.exists(f):
3906 if repo.vfs.exists(f):
3909 raise error.Abort(msg, hint=hint)
3907 raise error.Abort(msg, hint=hint)
3910
3908
3911 def clearunfinished(repo):
3909 def clearunfinished(repo):
3912 '''Check for unfinished operations (as above), and clear the ones
3910 '''Check for unfinished operations (as above), and clear the ones
3913 that are clearable.
3911 that are clearable.
3914 '''
3912 '''
3915 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3913 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3916 if not clearable and repo.vfs.exists(f):
3914 if not clearable and repo.vfs.exists(f):
3917 raise error.Abort(msg, hint=hint)
3915 raise error.Abort(msg, hint=hint)
3918 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3916 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3919 if clearable and repo.vfs.exists(f):
3917 if clearable and repo.vfs.exists(f):
3920 util.unlink(repo.vfs.join(f))
3918 util.unlink(repo.vfs.join(f))
3921
3919
3922 afterresolvedstates = [
3920 afterresolvedstates = [
3923 ('graftstate',
3921 ('graftstate',
3924 _('hg graft --continue')),
3922 _('hg graft --continue')),
3925 ]
3923 ]
3926
3924
3927 def howtocontinue(repo):
3925 def howtocontinue(repo):
3928 '''Check for an unfinished operation and return the command to finish
3926 '''Check for an unfinished operation and return the command to finish
3929 it.
3927 it.
3930
3928
3931 afterresolvedstates tuples define a .hg/{file} and the corresponding
3929 afterresolvedstates tuples define a .hg/{file} and the corresponding
3932 command needed to finish it.
3930 command needed to finish it.
3933
3931
3934 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3932 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3935 a boolean.
3933 a boolean.
3936 '''
3934 '''
3937 contmsg = _("continue: %s")
3935 contmsg = _("continue: %s")
3938 for f, msg in afterresolvedstates:
3936 for f, msg in afterresolvedstates:
3939 if repo.vfs.exists(f):
3937 if repo.vfs.exists(f):
3940 return contmsg % msg, True
3938 return contmsg % msg, True
3941 if repo[None].dirty(missing=True, merge=False, branch=False):
3939 if repo[None].dirty(missing=True, merge=False, branch=False):
3942 return contmsg % _("hg commit"), False
3940 return contmsg % _("hg commit"), False
3943 return None, None
3941 return None, None
3944
3942
3945 def checkafterresolved(repo):
3943 def checkafterresolved(repo):
3946 '''Inform the user about the next action after completing hg resolve
3944 '''Inform the user about the next action after completing hg resolve
3947
3945
3948 If there's a matching afterresolvedstates, howtocontinue will yield
3946 If there's a matching afterresolvedstates, howtocontinue will yield
3949 repo.ui.warn as the reporter.
3947 repo.ui.warn as the reporter.
3950
3948
3951 Otherwise, it will yield repo.ui.note.
3949 Otherwise, it will yield repo.ui.note.
3952 '''
3950 '''
3953 msg, warning = howtocontinue(repo)
3951 msg, warning = howtocontinue(repo)
3954 if msg is not None:
3952 if msg is not None:
3955 if warning:
3953 if warning:
3956 repo.ui.warn("%s\n" % msg)
3954 repo.ui.warn("%s\n" % msg)
3957 else:
3955 else:
3958 repo.ui.note("%s\n" % msg)
3956 repo.ui.note("%s\n" % msg)
3959
3957
3960 def wrongtooltocontinue(repo, task):
3958 def wrongtooltocontinue(repo, task):
3961 '''Raise an abort suggesting how to properly continue if there is an
3959 '''Raise an abort suggesting how to properly continue if there is an
3962 active task.
3960 active task.
3963
3961
3964 Uses howtocontinue() to find the active task.
3962 Uses howtocontinue() to find the active task.
3965
3963
3966 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3964 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3967 a hint.
3965 a hint.
3968 '''
3966 '''
3969 after = howtocontinue(repo)
3967 after = howtocontinue(repo)
3970 hint = None
3968 hint = None
3971 if after[1]:
3969 if after[1]:
3972 hint = after[0]
3970 hint = after[0]
3973 raise error.Abort(_('no %s in progress') % task, hint=hint)
3971 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,531 +1,531 b''
1 # formatter.py - generic output formatting for mercurial
1 # formatter.py - generic output formatting for mercurial
2 #
2 #
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Generic output formatting for Mercurial
8 """Generic output formatting for Mercurial
9
9
10 The formatter provides API to show data in various ways. The following
10 The formatter provides API to show data in various ways. The following
11 functions should be used in place of ui.write():
11 functions should be used in place of ui.write():
12
12
13 - fm.write() for unconditional output
13 - fm.write() for unconditional output
14 - fm.condwrite() to show some extra data conditionally in plain output
14 - fm.condwrite() to show some extra data conditionally in plain output
15 - fm.context() to provide changectx to template output
15 - fm.context() to provide changectx to template output
16 - fm.data() to provide extra data to JSON or template output
16 - fm.data() to provide extra data to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
18
18
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 beforehand so the data is converted to the appropriate data type. Use
20 beforehand so the data is converted to the appropriate data type. Use
21 fm.isplain() if you need to convert or format data conditionally which isn't
21 fm.isplain() if you need to convert or format data conditionally which isn't
22 supported by the formatter API.
22 supported by the formatter API.
23
23
24 To build nested structure (i.e. a list of dicts), use fm.nested().
24 To build nested structure (i.e. a list of dicts), use fm.nested().
25
25
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27
27
28 fm.condwrite() vs 'if cond:':
28 fm.condwrite() vs 'if cond:':
29
29
30 In most cases, use fm.condwrite() so users can selectively show the data
30 In most cases, use fm.condwrite() so users can selectively show the data
31 in template output. If it's costly to build data, use plain 'if cond:' with
31 in template output. If it's costly to build data, use plain 'if cond:' with
32 fm.write().
32 fm.write().
33
33
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35
35
36 fm.nested() should be used to form a tree structure (a list of dicts of
36 fm.nested() should be used to form a tree structure (a list of dicts of
37 lists of dicts...) which can be accessed through template keywords, e.g.
37 lists of dicts...) which can be accessed through template keywords, e.g.
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 exports a dict-type object to template, which can be accessed by e.g.
39 exports a dict-type object to template, which can be accessed by e.g.
40 "{get(foo, key)}" function.
40 "{get(foo, key)}" function.
41
41
42 Doctest helper:
42 Doctest helper:
43
43
44 >>> def show(fn, verbose=False, **opts):
44 >>> def show(fn, verbose=False, **opts):
45 ... import sys
45 ... import sys
46 ... from . import ui as uimod
46 ... from . import ui as uimod
47 ... ui = uimod.ui()
47 ... ui = uimod.ui()
48 ... ui.verbose = verbose
48 ... ui.verbose = verbose
49 ... ui.pushbuffer()
49 ... ui.pushbuffer()
50 ... try:
50 ... try:
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
51 ... return fn(ui, ui.formatter(pycompat.sysbytes(fn.__name__),
52 ... pycompat.byteskwargs(opts)))
52 ... pycompat.byteskwargs(opts)))
53 ... finally:
53 ... finally:
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
54 ... print(pycompat.sysstr(ui.popbuffer()), end='')
55
55
56 Basic example:
56 Basic example:
57
57
58 >>> def files(ui, fm):
58 >>> def files(ui, fm):
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
59 ... files = [(b'foo', 123, (0, 0)), (b'bar', 456, (1, 0))]
60 ... for f in files:
60 ... for f in files:
61 ... fm.startitem()
61 ... fm.startitem()
62 ... fm.write(b'path', b'%s', f[0])
62 ... fm.write(b'path', b'%s', f[0])
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
63 ... fm.condwrite(ui.verbose, b'date', b' %s',
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
64 ... fm.formatdate(f[2], b'%Y-%m-%d %H:%M:%S'))
65 ... fm.data(size=f[1])
65 ... fm.data(size=f[1])
66 ... fm.plain(b'\\n')
66 ... fm.plain(b'\\n')
67 ... fm.end()
67 ... fm.end()
68 >>> show(files)
68 >>> show(files)
69 foo
69 foo
70 bar
70 bar
71 >>> show(files, verbose=True)
71 >>> show(files, verbose=True)
72 foo 1970-01-01 00:00:00
72 foo 1970-01-01 00:00:00
73 bar 1970-01-01 00:00:01
73 bar 1970-01-01 00:00:01
74 >>> show(files, template=b'json')
74 >>> show(files, template=b'json')
75 [
75 [
76 {
76 {
77 "date": [0, 0],
77 "date": [0, 0],
78 "path": "foo",
78 "path": "foo",
79 "size": 123
79 "size": 123
80 },
80 },
81 {
81 {
82 "date": [1, 0],
82 "date": [1, 0],
83 "path": "bar",
83 "path": "bar",
84 "size": 456
84 "size": 456
85 }
85 }
86 ]
86 ]
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
87 >>> show(files, template=b'path: {path}\\ndate: {date|rfc3339date}\\n')
88 path: foo
88 path: foo
89 date: 1970-01-01T00:00:00+00:00
89 date: 1970-01-01T00:00:00+00:00
90 path: bar
90 path: bar
91 date: 1970-01-01T00:00:01+00:00
91 date: 1970-01-01T00:00:01+00:00
92
92
93 Nested example:
93 Nested example:
94
94
95 >>> def subrepos(ui, fm):
95 >>> def subrepos(ui, fm):
96 ... fm.startitem()
96 ... fm.startitem()
97 ... fm.write(b'repo', b'[%s]\\n', b'baz')
97 ... fm.write(b'repo', b'[%s]\\n', b'baz')
98 ... files(ui, fm.nested(b'files'))
98 ... files(ui, fm.nested(b'files'))
99 ... fm.end()
99 ... fm.end()
100 >>> show(subrepos)
100 >>> show(subrepos)
101 [baz]
101 [baz]
102 foo
102 foo
103 bar
103 bar
104 >>> show(subrepos, template=b'{repo}: {join(files % "{path}", ", ")}\\n')
104 >>> show(subrepos, template=b'{repo}: {join(files % "{path}", ", ")}\\n')
105 baz: foo, bar
105 baz: foo, bar
106 """
106 """
107
107
108 from __future__ import absolute_import, print_function
108 from __future__ import absolute_import, print_function
109
109
110 import collections
110 import collections
111 import contextlib
111 import contextlib
112 import itertools
112 import itertools
113 import os
113 import os
114
114
115 from .i18n import _
115 from .i18n import _
116 from .node import (
116 from .node import (
117 hex,
117 hex,
118 short,
118 short,
119 )
119 )
120
120
121 from . import (
121 from . import (
122 error,
122 error,
123 pycompat,
123 pycompat,
124 templatefilters,
124 templatefilters,
125 templatekw,
125 templatekw,
126 templater,
126 templater,
127 util,
127 util,
128 )
128 )
129
129
130 pickle = util.pickle
130 pickle = util.pickle
131
131
132 class _nullconverter(object):
132 class _nullconverter(object):
133 '''convert non-primitive data types to be processed by formatter'''
133 '''convert non-primitive data types to be processed by formatter'''
134
134
135 # set to True if context object should be stored as item
135 # set to True if context object should be stored as item
136 storecontext = False
136 storecontext = False
137
137
138 @staticmethod
138 @staticmethod
139 def formatdate(date, fmt):
139 def formatdate(date, fmt):
140 '''convert date tuple to appropriate format'''
140 '''convert date tuple to appropriate format'''
141 return date
141 return date
142 @staticmethod
142 @staticmethod
143 def formatdict(data, key, value, fmt, sep):
143 def formatdict(data, key, value, fmt, sep):
144 '''convert dict or key-value pairs to appropriate dict format'''
144 '''convert dict or key-value pairs to appropriate dict format'''
145 # use plain dict instead of util.sortdict so that data can be
145 # use plain dict instead of util.sortdict so that data can be
146 # serialized as a builtin dict in pickle output
146 # serialized as a builtin dict in pickle output
147 return dict(data)
147 return dict(data)
148 @staticmethod
148 @staticmethod
149 def formatlist(data, name, fmt, sep):
149 def formatlist(data, name, fmt, sep):
150 '''convert iterable to appropriate list format'''
150 '''convert iterable to appropriate list format'''
151 return list(data)
151 return list(data)
152
152
153 class baseformatter(object):
153 class baseformatter(object):
154 def __init__(self, ui, topic, opts, converter):
154 def __init__(self, ui, topic, opts, converter):
155 self._ui = ui
155 self._ui = ui
156 self._topic = topic
156 self._topic = topic
157 self._style = opts.get("style")
157 self._style = opts.get("style")
158 self._template = opts.get("template")
158 self._template = opts.get("template")
159 self._converter = converter
159 self._converter = converter
160 self._item = None
160 self._item = None
161 # function to convert node to string suitable for this output
161 # function to convert node to string suitable for this output
162 self.hexfunc = hex
162 self.hexfunc = hex
163 def __enter__(self):
163 def __enter__(self):
164 return self
164 return self
165 def __exit__(self, exctype, excvalue, traceback):
165 def __exit__(self, exctype, excvalue, traceback):
166 if exctype is None:
166 if exctype is None:
167 self.end()
167 self.end()
168 def _showitem(self):
168 def _showitem(self):
169 '''show a formatted item once all data is collected'''
169 '''show a formatted item once all data is collected'''
170 def startitem(self):
170 def startitem(self):
171 '''begin an item in the format list'''
171 '''begin an item in the format list'''
172 if self._item is not None:
172 if self._item is not None:
173 self._showitem()
173 self._showitem()
174 self._item = {}
174 self._item = {}
175 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
175 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
176 '''convert date tuple to appropriate format'''
176 '''convert date tuple to appropriate format'''
177 return self._converter.formatdate(date, fmt)
177 return self._converter.formatdate(date, fmt)
178 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
178 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
179 '''convert dict or key-value pairs to appropriate dict format'''
179 '''convert dict or key-value pairs to appropriate dict format'''
180 return self._converter.formatdict(data, key, value, fmt, sep)
180 return self._converter.formatdict(data, key, value, fmt, sep)
181 def formatlist(self, data, name, fmt='%s', sep=' '):
181 def formatlist(self, data, name, fmt='%s', sep=' '):
182 '''convert iterable to appropriate list format'''
182 '''convert iterable to appropriate list format'''
183 # name is mandatory argument for now, but it could be optional if
183 # name is mandatory argument for now, but it could be optional if
184 # we have default template keyword, e.g. {item}
184 # we have default template keyword, e.g. {item}
185 return self._converter.formatlist(data, name, fmt, sep)
185 return self._converter.formatlist(data, name, fmt, sep)
186 def context(self, **ctxs):
186 def context(self, **ctxs):
187 '''insert context objects to be used to render template keywords'''
187 '''insert context objects to be used to render template keywords'''
188 ctxs = pycompat.byteskwargs(ctxs)
188 ctxs = pycompat.byteskwargs(ctxs)
189 assert all(k == 'ctx' for k in ctxs)
189 assert all(k == 'ctx' for k in ctxs)
190 if self._converter.storecontext:
190 if self._converter.storecontext:
191 self._item.update(ctxs)
191 self._item.update(ctxs)
192 def data(self, **data):
192 def data(self, **data):
193 '''insert data into item that's not shown in default output'''
193 '''insert data into item that's not shown in default output'''
194 data = pycompat.byteskwargs(data)
194 data = pycompat.byteskwargs(data)
195 self._item.update(data)
195 self._item.update(data)
196 def write(self, fields, deftext, *fielddata, **opts):
196 def write(self, fields, deftext, *fielddata, **opts):
197 '''do default text output while assigning data to item'''
197 '''do default text output while assigning data to item'''
198 fieldkeys = fields.split()
198 fieldkeys = fields.split()
199 assert len(fieldkeys) == len(fielddata)
199 assert len(fieldkeys) == len(fielddata)
200 self._item.update(zip(fieldkeys, fielddata))
200 self._item.update(zip(fieldkeys, fielddata))
201 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
201 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
202 '''do conditional write (primarily for plain formatter)'''
202 '''do conditional write (primarily for plain formatter)'''
203 fieldkeys = fields.split()
203 fieldkeys = fields.split()
204 assert len(fieldkeys) == len(fielddata)
204 assert len(fieldkeys) == len(fielddata)
205 self._item.update(zip(fieldkeys, fielddata))
205 self._item.update(zip(fieldkeys, fielddata))
206 def plain(self, text, **opts):
206 def plain(self, text, **opts):
207 '''show raw text for non-templated mode'''
207 '''show raw text for non-templated mode'''
208 def isplain(self):
208 def isplain(self):
209 '''check for plain formatter usage'''
209 '''check for plain formatter usage'''
210 return False
210 return False
211 def nested(self, field):
211 def nested(self, field):
212 '''sub formatter to store nested data in the specified field'''
212 '''sub formatter to store nested data in the specified field'''
213 self._item[field] = data = []
213 self._item[field] = data = []
214 return _nestedformatter(self._ui, self._converter, data)
214 return _nestedformatter(self._ui, self._converter, data)
215 def end(self):
215 def end(self):
216 '''end output for the formatter'''
216 '''end output for the formatter'''
217 if self._item is not None:
217 if self._item is not None:
218 self._showitem()
218 self._showitem()
219
219
220 def nullformatter(ui, topic):
220 def nullformatter(ui, topic):
221 '''formatter that prints nothing'''
221 '''formatter that prints nothing'''
222 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
222 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
223
223
224 class _nestedformatter(baseformatter):
224 class _nestedformatter(baseformatter):
225 '''build sub items and store them in the parent formatter'''
225 '''build sub items and store them in the parent formatter'''
226 def __init__(self, ui, converter, data):
226 def __init__(self, ui, converter, data):
227 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
227 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
228 self._data = data
228 self._data = data
229 def _showitem(self):
229 def _showitem(self):
230 self._data.append(self._item)
230 self._data.append(self._item)
231
231
232 def _iteritems(data):
232 def _iteritems(data):
233 '''iterate key-value pairs in stable order'''
233 '''iterate key-value pairs in stable order'''
234 if isinstance(data, dict):
234 if isinstance(data, dict):
235 return sorted(data.iteritems())
235 return sorted(data.iteritems())
236 return data
236 return data
237
237
238 class _plainconverter(object):
238 class _plainconverter(object):
239 '''convert non-primitive data types to text'''
239 '''convert non-primitive data types to text'''
240
240
241 storecontext = False
241 storecontext = False
242
242
243 @staticmethod
243 @staticmethod
244 def formatdate(date, fmt):
244 def formatdate(date, fmt):
245 '''stringify date tuple in the given format'''
245 '''stringify date tuple in the given format'''
246 return util.datestr(date, fmt)
246 return util.datestr(date, fmt)
247 @staticmethod
247 @staticmethod
248 def formatdict(data, key, value, fmt, sep):
248 def formatdict(data, key, value, fmt, sep):
249 '''stringify key-value pairs separated by sep'''
249 '''stringify key-value pairs separated by sep'''
250 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
250 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
251 @staticmethod
251 @staticmethod
252 def formatlist(data, name, fmt, sep):
252 def formatlist(data, name, fmt, sep):
253 '''stringify iterable separated by sep'''
253 '''stringify iterable separated by sep'''
254 return sep.join(fmt % e for e in data)
254 return sep.join(fmt % e for e in data)
255
255
256 class plainformatter(baseformatter):
256 class plainformatter(baseformatter):
257 '''the default text output scheme'''
257 '''the default text output scheme'''
258 def __init__(self, ui, out, topic, opts):
258 def __init__(self, ui, out, topic, opts):
259 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
259 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
260 if ui.debugflag:
260 if ui.debugflag:
261 self.hexfunc = hex
261 self.hexfunc = hex
262 else:
262 else:
263 self.hexfunc = short
263 self.hexfunc = short
264 if ui is out:
264 if ui is out:
265 self._write = ui.write
265 self._write = ui.write
266 else:
266 else:
267 self._write = lambda s, **opts: out.write(s)
267 self._write = lambda s, **opts: out.write(s)
268 def startitem(self):
268 def startitem(self):
269 pass
269 pass
270 def data(self, **data):
270 def data(self, **data):
271 pass
271 pass
272 def write(self, fields, deftext, *fielddata, **opts):
272 def write(self, fields, deftext, *fielddata, **opts):
273 self._write(deftext % fielddata, **opts)
273 self._write(deftext % fielddata, **opts)
274 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
274 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
275 '''do conditional write'''
275 '''do conditional write'''
276 if cond:
276 if cond:
277 self._write(deftext % fielddata, **opts)
277 self._write(deftext % fielddata, **opts)
278 def plain(self, text, **opts):
278 def plain(self, text, **opts):
279 self._write(text, **opts)
279 self._write(text, **opts)
280 def isplain(self):
280 def isplain(self):
281 return True
281 return True
282 def nested(self, field):
282 def nested(self, field):
283 # nested data will be directly written to ui
283 # nested data will be directly written to ui
284 return self
284 return self
285 def end(self):
285 def end(self):
286 pass
286 pass
287
287
288 class debugformatter(baseformatter):
288 class debugformatter(baseformatter):
289 def __init__(self, ui, out, topic, opts):
289 def __init__(self, ui, out, topic, opts):
290 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
290 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
291 self._out = out
291 self._out = out
292 self._out.write("%s = [\n" % self._topic)
292 self._out.write("%s = [\n" % self._topic)
293 def _showitem(self):
293 def _showitem(self):
294 self._out.write(" " + repr(self._item) + ",\n")
294 self._out.write(" " + repr(self._item) + ",\n")
295 def end(self):
295 def end(self):
296 baseformatter.end(self)
296 baseformatter.end(self)
297 self._out.write("]\n")
297 self._out.write("]\n")
298
298
299 class pickleformatter(baseformatter):
299 class pickleformatter(baseformatter):
300 def __init__(self, ui, out, topic, opts):
300 def __init__(self, ui, out, topic, opts):
301 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
301 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
302 self._out = out
302 self._out = out
303 self._data = []
303 self._data = []
304 def _showitem(self):
304 def _showitem(self):
305 self._data.append(self._item)
305 self._data.append(self._item)
306 def end(self):
306 def end(self):
307 baseformatter.end(self)
307 baseformatter.end(self)
308 self._out.write(pickle.dumps(self._data))
308 self._out.write(pickle.dumps(self._data))
309
309
310 class jsonformatter(baseformatter):
310 class jsonformatter(baseformatter):
311 def __init__(self, ui, out, topic, opts):
311 def __init__(self, ui, out, topic, opts):
312 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
312 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
313 self._out = out
313 self._out = out
314 self._out.write("[")
314 self._out.write("[")
315 self._first = True
315 self._first = True
316 def _showitem(self):
316 def _showitem(self):
317 if self._first:
317 if self._first:
318 self._first = False
318 self._first = False
319 else:
319 else:
320 self._out.write(",")
320 self._out.write(",")
321
321
322 self._out.write("\n {\n")
322 self._out.write("\n {\n")
323 first = True
323 first = True
324 for k, v in sorted(self._item.items()):
324 for k, v in sorted(self._item.items()):
325 if first:
325 if first:
326 first = False
326 first = False
327 else:
327 else:
328 self._out.write(",\n")
328 self._out.write(",\n")
329 u = templatefilters.json(v, paranoid=False)
329 u = templatefilters.json(v, paranoid=False)
330 self._out.write(' "%s": %s' % (k, u))
330 self._out.write(' "%s": %s' % (k, u))
331 self._out.write("\n }")
331 self._out.write("\n }")
332 def end(self):
332 def end(self):
333 baseformatter.end(self)
333 baseformatter.end(self)
334 self._out.write("\n]\n")
334 self._out.write("\n]\n")
335
335
336 class _templateconverter(object):
336 class _templateconverter(object):
337 '''convert non-primitive data types to be processed by templater'''
337 '''convert non-primitive data types to be processed by templater'''
338
338
339 storecontext = True
339 storecontext = True
340
340
341 @staticmethod
341 @staticmethod
342 def formatdate(date, fmt):
342 def formatdate(date, fmt):
343 '''return date tuple'''
343 '''return date tuple'''
344 return date
344 return date
345 @staticmethod
345 @staticmethod
346 def formatdict(data, key, value, fmt, sep):
346 def formatdict(data, key, value, fmt, sep):
347 '''build object that can be evaluated as either plain string or dict'''
347 '''build object that can be evaluated as either plain string or dict'''
348 data = util.sortdict(_iteritems(data))
348 data = util.sortdict(_iteritems(data))
349 def f():
349 def f():
350 yield _plainconverter.formatdict(data, key, value, fmt, sep)
350 yield _plainconverter.formatdict(data, key, value, fmt, sep)
351 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
351 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
352 @staticmethod
352 @staticmethod
353 def formatlist(data, name, fmt, sep):
353 def formatlist(data, name, fmt, sep):
354 '''build object that can be evaluated as either plain string or list'''
354 '''build object that can be evaluated as either plain string or list'''
355 data = list(data)
355 data = list(data)
356 def f():
356 def f():
357 yield _plainconverter.formatlist(data, name, fmt, sep)
357 yield _plainconverter.formatlist(data, name, fmt, sep)
358 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f)
358 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f)
359
359
360 class templateformatter(baseformatter):
360 class templateformatter(baseformatter):
361 def __init__(self, ui, out, topic, opts):
361 def __init__(self, ui, out, topic, opts):
362 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
362 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
363 self._out = out
363 self._out = out
364 spec = lookuptemplate(ui, topic, opts.get('template', ''))
364 spec = lookuptemplate(ui, topic, opts.get('template', ''))
365 self._tref = spec.ref
365 self._tref = spec.ref
366 self._t = loadtemplater(ui, spec, cache=templatekw.defaulttempl)
366 self._t = loadtemplater(ui, spec, cache=templatekw.defaulttempl)
367 self._parts = templatepartsmap(spec, self._t,
367 self._parts = templatepartsmap(spec, self._t,
368 ['docheader', 'docfooter', 'separator'])
368 ['docheader', 'docfooter', 'separator'])
369 self._counter = itertools.count()
369 self._counter = itertools.count()
370 self._cache = {} # for templatekw/funcs to store reusable data
370 self._cache = {} # for templatekw/funcs to store reusable data
371 self._renderitem('docheader', {})
371 self._renderitem('docheader', {})
372
372
373 def _showitem(self):
373 def _showitem(self):
374 item = self._item.copy()
374 item = self._item.copy()
375 item['index'] = index = next(self._counter)
375 item['index'] = index = next(self._counter)
376 if index > 0:
376 if index > 0:
377 self._renderitem('separator', {})
377 self._renderitem('separator', {})
378 self._renderitem(self._tref, item)
378 self._renderitem(self._tref, item)
379
379
380 def _renderitem(self, part, item):
380 def _renderitem(self, part, item):
381 if part not in self._parts:
381 if part not in self._parts:
382 return
382 return
383 ref = self._parts[part]
383 ref = self._parts[part]
384
384
385 # TODO: add support for filectx. probably each template keyword or
385 # TODO: add support for filectx. probably each template keyword or
386 # function will have to declare dependent resources. e.g.
386 # function will have to declare dependent resources. e.g.
387 # @templatekeyword(..., requires=('ctx',))
387 # @templatekeyword(..., requires=('ctx',))
388 props = {}
388 props = {}
389 if 'ctx' in item:
389 if 'ctx' in item:
390 props.update(templatekw.keywords)
390 props.update(templatekw.keywords)
391 # explicitly-defined fields precede templatekw
391 # explicitly-defined fields precede templatekw
392 props.update(item)
392 props.update(item)
393 if 'ctx' in item:
393 if 'ctx' in item:
394 # but template resources must be always available
394 # but template resources must be always available
395 props['templ'] = self._t
396 props['repo'] = props['ctx'].repo()
395 props['repo'] = props['ctx'].repo()
397 props['revcache'] = {}
396 props['revcache'] = {}
398 props = pycompat.strkwargs(props)
397 props = pycompat.strkwargs(props)
399 g = self._t(ref, ui=self._ui, cache=self._cache, **props)
398 g = self._t(ref, ui=self._ui, cache=self._cache, **props)
400 self._out.write(templater.stringify(g))
399 self._out.write(templater.stringify(g))
401
400
402 def end(self):
401 def end(self):
403 baseformatter.end(self)
402 baseformatter.end(self)
404 self._renderitem('docfooter', {})
403 self._renderitem('docfooter', {})
405
404
406 templatespec = collections.namedtuple(r'templatespec',
405 templatespec = collections.namedtuple(r'templatespec',
407 r'ref tmpl mapfile')
406 r'ref tmpl mapfile')
408
407
409 def lookuptemplate(ui, topic, tmpl):
408 def lookuptemplate(ui, topic, tmpl):
410 """Find the template matching the given -T/--template spec 'tmpl'
409 """Find the template matching the given -T/--template spec 'tmpl'
411
410
412 'tmpl' can be any of the following:
411 'tmpl' can be any of the following:
413
412
414 - a literal template (e.g. '{rev}')
413 - a literal template (e.g. '{rev}')
415 - a map-file name or path (e.g. 'changelog')
414 - a map-file name or path (e.g. 'changelog')
416 - a reference to [templates] in config file
415 - a reference to [templates] in config file
417 - a path to raw template file
416 - a path to raw template file
418
417
419 A map file defines a stand-alone template environment. If a map file
418 A map file defines a stand-alone template environment. If a map file
420 selected, all templates defined in the file will be loaded, and the
419 selected, all templates defined in the file will be loaded, and the
421 template matching the given topic will be rendered. Aliases won't be
420 template matching the given topic will be rendered. Aliases won't be
422 loaded from user config, but from the map file.
421 loaded from user config, but from the map file.
423
422
424 If no map file selected, all templates in [templates] section will be
423 If no map file selected, all templates in [templates] section will be
425 available as well as aliases in [templatealias].
424 available as well as aliases in [templatealias].
426 """
425 """
427
426
428 # looks like a literal template?
427 # looks like a literal template?
429 if '{' in tmpl:
428 if '{' in tmpl:
430 return templatespec('', tmpl, None)
429 return templatespec('', tmpl, None)
431
430
432 # perhaps a stock style?
431 # perhaps a stock style?
433 if not os.path.split(tmpl)[0]:
432 if not os.path.split(tmpl)[0]:
434 mapname = (templater.templatepath('map-cmdline.' + tmpl)
433 mapname = (templater.templatepath('map-cmdline.' + tmpl)
435 or templater.templatepath(tmpl))
434 or templater.templatepath(tmpl))
436 if mapname and os.path.isfile(mapname):
435 if mapname and os.path.isfile(mapname):
437 return templatespec(topic, None, mapname)
436 return templatespec(topic, None, mapname)
438
437
439 # perhaps it's a reference to [templates]
438 # perhaps it's a reference to [templates]
440 if ui.config('templates', tmpl):
439 if ui.config('templates', tmpl):
441 return templatespec(tmpl, None, None)
440 return templatespec(tmpl, None, None)
442
441
443 if tmpl == 'list':
442 if tmpl == 'list':
444 ui.write(_("available styles: %s\n") % templater.stylelist())
443 ui.write(_("available styles: %s\n") % templater.stylelist())
445 raise error.Abort(_("specify a template"))
444 raise error.Abort(_("specify a template"))
446
445
447 # perhaps it's a path to a map or a template
446 # perhaps it's a path to a map or a template
448 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
447 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
449 # is it a mapfile for a style?
448 # is it a mapfile for a style?
450 if os.path.basename(tmpl).startswith("map-"):
449 if os.path.basename(tmpl).startswith("map-"):
451 return templatespec(topic, None, os.path.realpath(tmpl))
450 return templatespec(topic, None, os.path.realpath(tmpl))
452 with util.posixfile(tmpl, 'rb') as f:
451 with util.posixfile(tmpl, 'rb') as f:
453 tmpl = f.read()
452 tmpl = f.read()
454 return templatespec('', tmpl, None)
453 return templatespec('', tmpl, None)
455
454
456 # constant string?
455 # constant string?
457 return templatespec('', tmpl, None)
456 return templatespec('', tmpl, None)
458
457
459 def templatepartsmap(spec, t, partnames):
458 def templatepartsmap(spec, t, partnames):
460 """Create a mapping of {part: ref}"""
459 """Create a mapping of {part: ref}"""
461 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
460 partsmap = {spec.ref: spec.ref} # initial ref must exist in t
462 if spec.mapfile:
461 if spec.mapfile:
463 partsmap.update((p, p) for p in partnames if p in t)
462 partsmap.update((p, p) for p in partnames if p in t)
464 elif spec.ref:
463 elif spec.ref:
465 for part in partnames:
464 for part in partnames:
466 ref = '%s:%s' % (spec.ref, part) # select config sub-section
465 ref = '%s:%s' % (spec.ref, part) # select config sub-section
467 if ref in t:
466 if ref in t:
468 partsmap[part] = ref
467 partsmap[part] = ref
469 return partsmap
468 return partsmap
470
469
471 def loadtemplater(ui, spec, cache=None):
470 def loadtemplater(ui, spec, resources=None, cache=None):
472 """Create a templater from either a literal template or loading from
471 """Create a templater from either a literal template or loading from
473 a map file"""
472 a map file"""
474 assert not (spec.tmpl and spec.mapfile)
473 assert not (spec.tmpl and spec.mapfile)
475 if spec.mapfile:
474 if spec.mapfile:
476 return templater.templater.frommapfile(spec.mapfile, cache=cache)
475 frommapfile = templater.templater.frommapfile
477 return maketemplater(ui, spec.tmpl, cache=cache)
476 return frommapfile(spec.mapfile, resources=resources, cache=cache)
477 return maketemplater(ui, spec.tmpl, resources=resources, cache=cache)
478
478
479 def maketemplater(ui, tmpl, cache=None):
479 def maketemplater(ui, tmpl, resources=None, cache=None):
480 """Create a templater from a string template 'tmpl'"""
480 """Create a templater from a string template 'tmpl'"""
481 aliases = ui.configitems('templatealias')
481 aliases = ui.configitems('templatealias')
482 t = templater.templater(cache=cache, aliases=aliases)
482 t = templater.templater(resources=resources, cache=cache, aliases=aliases)
483 t.cache.update((k, templater.unquotestring(v))
483 t.cache.update((k, templater.unquotestring(v))
484 for k, v in ui.configitems('templates'))
484 for k, v in ui.configitems('templates'))
485 if tmpl:
485 if tmpl:
486 t.cache[''] = tmpl
486 t.cache[''] = tmpl
487 return t
487 return t
488
488
489 def formatter(ui, out, topic, opts):
489 def formatter(ui, out, topic, opts):
490 template = opts.get("template", "")
490 template = opts.get("template", "")
491 if template == "json":
491 if template == "json":
492 return jsonformatter(ui, out, topic, opts)
492 return jsonformatter(ui, out, topic, opts)
493 elif template == "pickle":
493 elif template == "pickle":
494 return pickleformatter(ui, out, topic, opts)
494 return pickleformatter(ui, out, topic, opts)
495 elif template == "debug":
495 elif template == "debug":
496 return debugformatter(ui, out, topic, opts)
496 return debugformatter(ui, out, topic, opts)
497 elif template != "":
497 elif template != "":
498 return templateformatter(ui, out, topic, opts)
498 return templateformatter(ui, out, topic, opts)
499 # developer config: ui.formatdebug
499 # developer config: ui.formatdebug
500 elif ui.configbool('ui', 'formatdebug'):
500 elif ui.configbool('ui', 'formatdebug'):
501 return debugformatter(ui, out, topic, opts)
501 return debugformatter(ui, out, topic, opts)
502 # deprecated config: ui.formatjson
502 # deprecated config: ui.formatjson
503 elif ui.configbool('ui', 'formatjson'):
503 elif ui.configbool('ui', 'formatjson'):
504 return jsonformatter(ui, out, topic, opts)
504 return jsonformatter(ui, out, topic, opts)
505 return plainformatter(ui, out, topic, opts)
505 return plainformatter(ui, out, topic, opts)
506
506
507 @contextlib.contextmanager
507 @contextlib.contextmanager
508 def openformatter(ui, filename, topic, opts):
508 def openformatter(ui, filename, topic, opts):
509 """Create a formatter that writes outputs to the specified file
509 """Create a formatter that writes outputs to the specified file
510
510
511 Must be invoked using the 'with' statement.
511 Must be invoked using the 'with' statement.
512 """
512 """
513 with util.posixfile(filename, 'wb') as out:
513 with util.posixfile(filename, 'wb') as out:
514 with formatter(ui, out, topic, opts) as fm:
514 with formatter(ui, out, topic, opts) as fm:
515 yield fm
515 yield fm
516
516
517 @contextlib.contextmanager
517 @contextlib.contextmanager
518 def _neverending(fm):
518 def _neverending(fm):
519 yield fm
519 yield fm
520
520
521 def maybereopen(fm, filename, opts):
521 def maybereopen(fm, filename, opts):
522 """Create a formatter backed by file if filename specified, else return
522 """Create a formatter backed by file if filename specified, else return
523 the given formatter
523 the given formatter
524
524
525 Must be invoked using the 'with' statement. This will never call fm.end()
525 Must be invoked using the 'with' statement. This will never call fm.end()
526 of the given formatter.
526 of the given formatter.
527 """
527 """
528 if filename:
528 if filename:
529 return openformatter(fm._ui, filename, fm._topic, opts)
529 return openformatter(fm._ui, filename, fm._topic, opts)
530 else:
530 else:
531 return _neverending(fm)
531 return _neverending(fm)
@@ -1,1538 +1,1566 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import, print_function
8 from __future__ import absolute_import, print_function
9
9
10 import os
10 import os
11 import re
11 import re
12 import types
12 import types
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 color,
16 color,
17 config,
17 config,
18 encoding,
18 encoding,
19 error,
19 error,
20 minirst,
20 minirst,
21 obsutil,
21 obsutil,
22 parser,
22 parser,
23 pycompat,
23 pycompat,
24 registrar,
24 registrar,
25 revset as revsetmod,
25 revset as revsetmod,
26 revsetlang,
26 revsetlang,
27 scmutil,
27 scmutil,
28 templatefilters,
28 templatefilters,
29 templatekw,
29 templatekw,
30 util,
30 util,
31 )
31 )
32
32
33 # template parsing
33 # template parsing
34
34
35 elements = {
35 elements = {
36 # token-type: binding-strength, primary, prefix, infix, suffix
36 # token-type: binding-strength, primary, prefix, infix, suffix
37 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
37 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
38 ".": (18, None, None, (".", 18), None),
38 ".": (18, None, None, (".", 18), None),
39 "%": (15, None, None, ("%", 15), None),
39 "%": (15, None, None, ("%", 15), None),
40 "|": (15, None, None, ("|", 15), None),
40 "|": (15, None, None, ("|", 15), None),
41 "*": (5, None, None, ("*", 5), None),
41 "*": (5, None, None, ("*", 5), None),
42 "/": (5, None, None, ("/", 5), None),
42 "/": (5, None, None, ("/", 5), None),
43 "+": (4, None, None, ("+", 4), None),
43 "+": (4, None, None, ("+", 4), None),
44 "-": (4, None, ("negate", 19), ("-", 4), None),
44 "-": (4, None, ("negate", 19), ("-", 4), None),
45 "=": (3, None, None, ("keyvalue", 3), None),
45 "=": (3, None, None, ("keyvalue", 3), None),
46 ",": (2, None, None, ("list", 2), None),
46 ",": (2, None, None, ("list", 2), None),
47 ")": (0, None, None, None, None),
47 ")": (0, None, None, None, None),
48 "integer": (0, "integer", None, None, None),
48 "integer": (0, "integer", None, None, None),
49 "symbol": (0, "symbol", None, None, None),
49 "symbol": (0, "symbol", None, None, None),
50 "string": (0, "string", None, None, None),
50 "string": (0, "string", None, None, None),
51 "template": (0, "template", None, None, None),
51 "template": (0, "template", None, None, None),
52 "end": (0, None, None, None, None),
52 "end": (0, None, None, None, None),
53 }
53 }
54
54
55 def tokenize(program, start, end, term=None):
55 def tokenize(program, start, end, term=None):
56 """Parse a template expression into a stream of tokens, which must end
56 """Parse a template expression into a stream of tokens, which must end
57 with term if specified"""
57 with term if specified"""
58 pos = start
58 pos = start
59 program = pycompat.bytestr(program)
59 program = pycompat.bytestr(program)
60 while pos < end:
60 while pos < end:
61 c = program[pos]
61 c = program[pos]
62 if c.isspace(): # skip inter-token whitespace
62 if c.isspace(): # skip inter-token whitespace
63 pass
63 pass
64 elif c in "(=,).%|+-*/": # handle simple operators
64 elif c in "(=,).%|+-*/": # handle simple operators
65 yield (c, None, pos)
65 yield (c, None, pos)
66 elif c in '"\'': # handle quoted templates
66 elif c in '"\'': # handle quoted templates
67 s = pos + 1
67 s = pos + 1
68 data, pos = _parsetemplate(program, s, end, c)
68 data, pos = _parsetemplate(program, s, end, c)
69 yield ('template', data, s)
69 yield ('template', data, s)
70 pos -= 1
70 pos -= 1
71 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
71 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
72 # handle quoted strings
72 # handle quoted strings
73 c = program[pos + 1]
73 c = program[pos + 1]
74 s = pos = pos + 2
74 s = pos = pos + 2
75 while pos < end: # find closing quote
75 while pos < end: # find closing quote
76 d = program[pos]
76 d = program[pos]
77 if d == '\\': # skip over escaped characters
77 if d == '\\': # skip over escaped characters
78 pos += 2
78 pos += 2
79 continue
79 continue
80 if d == c:
80 if d == c:
81 yield ('string', program[s:pos], s)
81 yield ('string', program[s:pos], s)
82 break
82 break
83 pos += 1
83 pos += 1
84 else:
84 else:
85 raise error.ParseError(_("unterminated string"), s)
85 raise error.ParseError(_("unterminated string"), s)
86 elif c.isdigit():
86 elif c.isdigit():
87 s = pos
87 s = pos
88 while pos < end:
88 while pos < end:
89 d = program[pos]
89 d = program[pos]
90 if not d.isdigit():
90 if not d.isdigit():
91 break
91 break
92 pos += 1
92 pos += 1
93 yield ('integer', program[s:pos], s)
93 yield ('integer', program[s:pos], s)
94 pos -= 1
94 pos -= 1
95 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
95 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
96 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
96 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
97 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
97 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
98 # where some of nested templates were preprocessed as strings and
98 # where some of nested templates were preprocessed as strings and
99 # then compiled. therefore, \"...\" was allowed. (issue4733)
99 # then compiled. therefore, \"...\" was allowed. (issue4733)
100 #
100 #
101 # processing flow of _evalifliteral() at 5ab28a2e9962:
101 # processing flow of _evalifliteral() at 5ab28a2e9962:
102 # outer template string -> stringify() -> compiletemplate()
102 # outer template string -> stringify() -> compiletemplate()
103 # ------------------------ ------------ ------------------
103 # ------------------------ ------------ ------------------
104 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
104 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
105 # ~~~~~~~~
105 # ~~~~~~~~
106 # escaped quoted string
106 # escaped quoted string
107 if c == 'r':
107 if c == 'r':
108 pos += 1
108 pos += 1
109 token = 'string'
109 token = 'string'
110 else:
110 else:
111 token = 'template'
111 token = 'template'
112 quote = program[pos:pos + 2]
112 quote = program[pos:pos + 2]
113 s = pos = pos + 2
113 s = pos = pos + 2
114 while pos < end: # find closing escaped quote
114 while pos < end: # find closing escaped quote
115 if program.startswith('\\\\\\', pos, end):
115 if program.startswith('\\\\\\', pos, end):
116 pos += 4 # skip over double escaped characters
116 pos += 4 # skip over double escaped characters
117 continue
117 continue
118 if program.startswith(quote, pos, end):
118 if program.startswith(quote, pos, end):
119 # interpret as if it were a part of an outer string
119 # interpret as if it were a part of an outer string
120 data = parser.unescapestr(program[s:pos])
120 data = parser.unescapestr(program[s:pos])
121 if token == 'template':
121 if token == 'template':
122 data = _parsetemplate(data, 0, len(data))[0]
122 data = _parsetemplate(data, 0, len(data))[0]
123 yield (token, data, s)
123 yield (token, data, s)
124 pos += 1
124 pos += 1
125 break
125 break
126 pos += 1
126 pos += 1
127 else:
127 else:
128 raise error.ParseError(_("unterminated string"), s)
128 raise error.ParseError(_("unterminated string"), s)
129 elif c.isalnum() or c in '_':
129 elif c.isalnum() or c in '_':
130 s = pos
130 s = pos
131 pos += 1
131 pos += 1
132 while pos < end: # find end of symbol
132 while pos < end: # find end of symbol
133 d = program[pos]
133 d = program[pos]
134 if not (d.isalnum() or d == "_"):
134 if not (d.isalnum() or d == "_"):
135 break
135 break
136 pos += 1
136 pos += 1
137 sym = program[s:pos]
137 sym = program[s:pos]
138 yield ('symbol', sym, s)
138 yield ('symbol', sym, s)
139 pos -= 1
139 pos -= 1
140 elif c == term:
140 elif c == term:
141 yield ('end', None, pos + 1)
141 yield ('end', None, pos + 1)
142 return
142 return
143 else:
143 else:
144 raise error.ParseError(_("syntax error"), pos)
144 raise error.ParseError(_("syntax error"), pos)
145 pos += 1
145 pos += 1
146 if term:
146 if term:
147 raise error.ParseError(_("unterminated template expansion"), start)
147 raise error.ParseError(_("unterminated template expansion"), start)
148 yield ('end', None, pos)
148 yield ('end', None, pos)
149
149
150 def _parsetemplate(tmpl, start, stop, quote=''):
150 def _parsetemplate(tmpl, start, stop, quote=''):
151 r"""
151 r"""
152 >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
152 >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
153 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
153 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
154 >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"')
154 >>> _parsetemplate(b'foo{bar}"baz', 0, 12, quote=b'"')
155 ([('string', 'foo'), ('symbol', 'bar')], 9)
155 ([('string', 'foo'), ('symbol', 'bar')], 9)
156 >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"')
156 >>> _parsetemplate(b'foo"{bar}', 0, 9, quote=b'"')
157 ([('string', 'foo')], 4)
157 ([('string', 'foo')], 4)
158 >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"')
158 >>> _parsetemplate(br'foo\"bar"baz', 0, 12, quote=b'"')
159 ([('string', 'foo"'), ('string', 'bar')], 9)
159 ([('string', 'foo"'), ('string', 'bar')], 9)
160 >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"')
160 >>> _parsetemplate(br'foo\\"bar', 0, 10, quote=b'"')
161 ([('string', 'foo\\')], 6)
161 ([('string', 'foo\\')], 6)
162 """
162 """
163 parsed = []
163 parsed = []
164 sepchars = '{' + quote
164 sepchars = '{' + quote
165 pos = start
165 pos = start
166 p = parser.parser(elements)
166 p = parser.parser(elements)
167 while pos < stop:
167 while pos < stop:
168 n = min((tmpl.find(c, pos, stop) for c in sepchars),
168 n = min((tmpl.find(c, pos, stop) for c in sepchars),
169 key=lambda n: (n < 0, n))
169 key=lambda n: (n < 0, n))
170 if n < 0:
170 if n < 0:
171 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
171 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
172 pos = stop
172 pos = stop
173 break
173 break
174 c = tmpl[n:n + 1]
174 c = tmpl[n:n + 1]
175 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
175 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
176 if bs % 2 == 1:
176 if bs % 2 == 1:
177 # escaped (e.g. '\{', '\\\{', but not '\\{')
177 # escaped (e.g. '\{', '\\\{', but not '\\{')
178 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
178 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
179 pos = n + 1
179 pos = n + 1
180 continue
180 continue
181 if n > pos:
181 if n > pos:
182 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
182 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
183 if c == quote:
183 if c == quote:
184 return parsed, n + 1
184 return parsed, n + 1
185
185
186 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
186 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
187 parsed.append(parseres)
187 parsed.append(parseres)
188
188
189 if quote:
189 if quote:
190 raise error.ParseError(_("unterminated string"), start)
190 raise error.ParseError(_("unterminated string"), start)
191 return parsed, pos
191 return parsed, pos
192
192
193 def _unnesttemplatelist(tree):
193 def _unnesttemplatelist(tree):
194 """Expand list of templates to node tuple
194 """Expand list of templates to node tuple
195
195
196 >>> def f(tree):
196 >>> def f(tree):
197 ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree))))
197 ... print(pycompat.sysstr(prettyformat(_unnesttemplatelist(tree))))
198 >>> f((b'template', []))
198 >>> f((b'template', []))
199 (string '')
199 (string '')
200 >>> f((b'template', [(b'string', b'foo')]))
200 >>> f((b'template', [(b'string', b'foo')]))
201 (string 'foo')
201 (string 'foo')
202 >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')]))
202 >>> f((b'template', [(b'string', b'foo'), (b'symbol', b'rev')]))
203 (template
203 (template
204 (string 'foo')
204 (string 'foo')
205 (symbol 'rev'))
205 (symbol 'rev'))
206 >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str
206 >>> f((b'template', [(b'symbol', b'rev')])) # template(rev) -> str
207 (template
207 (template
208 (symbol 'rev'))
208 (symbol 'rev'))
209 >>> f((b'template', [(b'template', [(b'string', b'foo')])]))
209 >>> f((b'template', [(b'template', [(b'string', b'foo')])]))
210 (string 'foo')
210 (string 'foo')
211 """
211 """
212 if not isinstance(tree, tuple):
212 if not isinstance(tree, tuple):
213 return tree
213 return tree
214 op = tree[0]
214 op = tree[0]
215 if op != 'template':
215 if op != 'template':
216 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
216 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
217
217
218 assert len(tree) == 2
218 assert len(tree) == 2
219 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
219 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
220 if not xs:
220 if not xs:
221 return ('string', '') # empty template ""
221 return ('string', '') # empty template ""
222 elif len(xs) == 1 and xs[0][0] == 'string':
222 elif len(xs) == 1 and xs[0][0] == 'string':
223 return xs[0] # fast path for string with no template fragment "x"
223 return xs[0] # fast path for string with no template fragment "x"
224 else:
224 else:
225 return (op,) + xs
225 return (op,) + xs
226
226
227 def parse(tmpl):
227 def parse(tmpl):
228 """Parse template string into tree"""
228 """Parse template string into tree"""
229 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
229 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
230 assert pos == len(tmpl), 'unquoted template should be consumed'
230 assert pos == len(tmpl), 'unquoted template should be consumed'
231 return _unnesttemplatelist(('template', parsed))
231 return _unnesttemplatelist(('template', parsed))
232
232
233 def _parseexpr(expr):
233 def _parseexpr(expr):
234 """Parse a template expression into tree
234 """Parse a template expression into tree
235
235
236 >>> _parseexpr(b'"foo"')
236 >>> _parseexpr(b'"foo"')
237 ('string', 'foo')
237 ('string', 'foo')
238 >>> _parseexpr(b'foo(bar)')
238 >>> _parseexpr(b'foo(bar)')
239 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
239 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
240 >>> _parseexpr(b'foo(')
240 >>> _parseexpr(b'foo(')
241 Traceback (most recent call last):
241 Traceback (most recent call last):
242 ...
242 ...
243 ParseError: ('not a prefix: end', 4)
243 ParseError: ('not a prefix: end', 4)
244 >>> _parseexpr(b'"foo" "bar"')
244 >>> _parseexpr(b'"foo" "bar"')
245 Traceback (most recent call last):
245 Traceback (most recent call last):
246 ...
246 ...
247 ParseError: ('invalid token', 7)
247 ParseError: ('invalid token', 7)
248 """
248 """
249 p = parser.parser(elements)
249 p = parser.parser(elements)
250 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
250 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
251 if pos != len(expr):
251 if pos != len(expr):
252 raise error.ParseError(_('invalid token'), pos)
252 raise error.ParseError(_('invalid token'), pos)
253 return _unnesttemplatelist(tree)
253 return _unnesttemplatelist(tree)
254
254
255 def prettyformat(tree):
255 def prettyformat(tree):
256 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
256 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
257
257
258 def compileexp(exp, context, curmethods):
258 def compileexp(exp, context, curmethods):
259 """Compile parsed template tree to (func, data) pair"""
259 """Compile parsed template tree to (func, data) pair"""
260 t = exp[0]
260 t = exp[0]
261 if t in curmethods:
261 if t in curmethods:
262 return curmethods[t](exp, context)
262 return curmethods[t](exp, context)
263 raise error.ParseError(_("unknown method '%s'") % t)
263 raise error.ParseError(_("unknown method '%s'") % t)
264
264
265 # template evaluation
265 # template evaluation
266
266
267 def getsymbol(exp):
267 def getsymbol(exp):
268 if exp[0] == 'symbol':
268 if exp[0] == 'symbol':
269 return exp[1]
269 return exp[1]
270 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
270 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
271
271
272 def getlist(x):
272 def getlist(x):
273 if not x:
273 if not x:
274 return []
274 return []
275 if x[0] == 'list':
275 if x[0] == 'list':
276 return getlist(x[1]) + [x[2]]
276 return getlist(x[1]) + [x[2]]
277 return [x]
277 return [x]
278
278
279 def gettemplate(exp, context):
279 def gettemplate(exp, context):
280 """Compile given template tree or load named template from map file;
280 """Compile given template tree or load named template from map file;
281 returns (func, data) pair"""
281 returns (func, data) pair"""
282 if exp[0] in ('template', 'string'):
282 if exp[0] in ('template', 'string'):
283 return compileexp(exp, context, methods)
283 return compileexp(exp, context, methods)
284 if exp[0] == 'symbol':
284 if exp[0] == 'symbol':
285 # unlike runsymbol(), here 'symbol' is always taken as template name
285 # unlike runsymbol(), here 'symbol' is always taken as template name
286 # even if it exists in mapping. this allows us to override mapping
286 # even if it exists in mapping. this allows us to override mapping
287 # by web templates, e.g. 'changelogtag' is redefined in map file.
287 # by web templates, e.g. 'changelogtag' is redefined in map file.
288 return context._load(exp[1])
288 return context._load(exp[1])
289 raise error.ParseError(_("expected template specifier"))
289 raise error.ParseError(_("expected template specifier"))
290
290
291 def findsymbolicname(arg):
291 def findsymbolicname(arg):
292 """Find symbolic name for the given compiled expression; returns None
292 """Find symbolic name for the given compiled expression; returns None
293 if nothing found reliably"""
293 if nothing found reliably"""
294 while True:
294 while True:
295 func, data = arg
295 func, data = arg
296 if func is runsymbol:
296 if func is runsymbol:
297 return data
297 return data
298 elif func is runfilter:
298 elif func is runfilter:
299 arg = data[0]
299 arg = data[0]
300 else:
300 else:
301 return None
301 return None
302
302
303 def evalrawexp(context, mapping, arg):
303 def evalrawexp(context, mapping, arg):
304 """Evaluate given argument as a bare template object which may require
304 """Evaluate given argument as a bare template object which may require
305 further processing (such as folding generator of strings)"""
305 further processing (such as folding generator of strings)"""
306 func, data = arg
306 func, data = arg
307 return func(context, mapping, data)
307 return func(context, mapping, data)
308
308
309 def evalfuncarg(context, mapping, arg):
309 def evalfuncarg(context, mapping, arg):
310 """Evaluate given argument as value type"""
310 """Evaluate given argument as value type"""
311 thing = evalrawexp(context, mapping, arg)
311 thing = evalrawexp(context, mapping, arg)
312 thing = templatekw.unwrapvalue(thing)
312 thing = templatekw.unwrapvalue(thing)
313 # evalrawexp() may return string, generator of strings or arbitrary object
313 # evalrawexp() may return string, generator of strings or arbitrary object
314 # such as date tuple, but filter does not want generator.
314 # such as date tuple, but filter does not want generator.
315 if isinstance(thing, types.GeneratorType):
315 if isinstance(thing, types.GeneratorType):
316 thing = stringify(thing)
316 thing = stringify(thing)
317 return thing
317 return thing
318
318
319 def evalboolean(context, mapping, arg):
319 def evalboolean(context, mapping, arg):
320 """Evaluate given argument as boolean, but also takes boolean literals"""
320 """Evaluate given argument as boolean, but also takes boolean literals"""
321 func, data = arg
321 func, data = arg
322 if func is runsymbol:
322 if func is runsymbol:
323 thing = func(context, mapping, data, default=None)
323 thing = func(context, mapping, data, default=None)
324 if thing is None:
324 if thing is None:
325 # not a template keyword, takes as a boolean literal
325 # not a template keyword, takes as a boolean literal
326 thing = util.parsebool(data)
326 thing = util.parsebool(data)
327 else:
327 else:
328 thing = func(context, mapping, data)
328 thing = func(context, mapping, data)
329 thing = templatekw.unwrapvalue(thing)
329 thing = templatekw.unwrapvalue(thing)
330 if isinstance(thing, bool):
330 if isinstance(thing, bool):
331 return thing
331 return thing
332 # other objects are evaluated as strings, which means 0 is True, but
332 # other objects are evaluated as strings, which means 0 is True, but
333 # empty dict/list should be False as they are expected to be ''
333 # empty dict/list should be False as they are expected to be ''
334 return bool(stringify(thing))
334 return bool(stringify(thing))
335
335
336 def evalinteger(context, mapping, arg, err=None):
336 def evalinteger(context, mapping, arg, err=None):
337 v = evalfuncarg(context, mapping, arg)
337 v = evalfuncarg(context, mapping, arg)
338 try:
338 try:
339 return int(v)
339 return int(v)
340 except (TypeError, ValueError):
340 except (TypeError, ValueError):
341 raise error.ParseError(err or _('not an integer'))
341 raise error.ParseError(err or _('not an integer'))
342
342
343 def evalstring(context, mapping, arg):
343 def evalstring(context, mapping, arg):
344 return stringify(evalrawexp(context, mapping, arg))
344 return stringify(evalrawexp(context, mapping, arg))
345
345
346 def evalstringliteral(context, mapping, arg):
346 def evalstringliteral(context, mapping, arg):
347 """Evaluate given argument as string template, but returns symbol name
347 """Evaluate given argument as string template, but returns symbol name
348 if it is unknown"""
348 if it is unknown"""
349 func, data = arg
349 func, data = arg
350 if func is runsymbol:
350 if func is runsymbol:
351 thing = func(context, mapping, data, default=data)
351 thing = func(context, mapping, data, default=data)
352 else:
352 else:
353 thing = func(context, mapping, data)
353 thing = func(context, mapping, data)
354 return stringify(thing)
354 return stringify(thing)
355
355
356 _evalfuncbytype = {
356 _evalfuncbytype = {
357 bool: evalboolean,
357 bool: evalboolean,
358 bytes: evalstring,
358 bytes: evalstring,
359 int: evalinteger,
359 int: evalinteger,
360 }
360 }
361
361
362 def evalastype(context, mapping, arg, typ):
362 def evalastype(context, mapping, arg, typ):
363 """Evaluate given argument and coerce its type"""
363 """Evaluate given argument and coerce its type"""
364 try:
364 try:
365 f = _evalfuncbytype[typ]
365 f = _evalfuncbytype[typ]
366 except KeyError:
366 except KeyError:
367 raise error.ProgrammingError('invalid type specified: %r' % typ)
367 raise error.ProgrammingError('invalid type specified: %r' % typ)
368 return f(context, mapping, arg)
368 return f(context, mapping, arg)
369
369
370 def runinteger(context, mapping, data):
370 def runinteger(context, mapping, data):
371 return int(data)
371 return int(data)
372
372
373 def runstring(context, mapping, data):
373 def runstring(context, mapping, data):
374 return data
374 return data
375
375
376 def _recursivesymbolblocker(key):
376 def _recursivesymbolblocker(key):
377 def showrecursion(**args):
377 def showrecursion(**args):
378 raise error.Abort(_("recursive reference '%s' in template") % key)
378 raise error.Abort(_("recursive reference '%s' in template") % key)
379 return showrecursion
379 return showrecursion
380
380
381 def _runrecursivesymbol(context, mapping, key):
381 def _runrecursivesymbol(context, mapping, key):
382 raise error.Abort(_("recursive reference '%s' in template") % key)
382 raise error.Abort(_("recursive reference '%s' in template") % key)
383
383
384 def runsymbol(context, mapping, key, default=''):
384 def runsymbol(context, mapping, key, default=''):
385 v = context.symbol(mapping, key)
385 v = context.symbol(mapping, key)
386 if v is None:
386 if v is None:
387 # put poison to cut recursion. we can't move this to parsing phase
387 # put poison to cut recursion. we can't move this to parsing phase
388 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
388 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
389 safemapping = mapping.copy()
389 safemapping = mapping.copy()
390 safemapping[key] = _recursivesymbolblocker(key)
390 safemapping[key] = _recursivesymbolblocker(key)
391 try:
391 try:
392 v = context.process(key, safemapping)
392 v = context.process(key, safemapping)
393 except TemplateNotFound:
393 except TemplateNotFound:
394 v = default
394 v = default
395 if callable(v):
395 if callable(v):
396 return v(**pycompat.strkwargs(mapping))
396 # TODO: templatekw functions will be updated to take (context, mapping)
397 # pair instead of **props
398 props = context._resources.copy()
399 props.update(mapping)
400 return v(**props)
397 return v
401 return v
398
402
399 def buildtemplate(exp, context):
403 def buildtemplate(exp, context):
400 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
404 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
401 return (runtemplate, ctmpl)
405 return (runtemplate, ctmpl)
402
406
403 def runtemplate(context, mapping, template):
407 def runtemplate(context, mapping, template):
404 for arg in template:
408 for arg in template:
405 yield evalrawexp(context, mapping, arg)
409 yield evalrawexp(context, mapping, arg)
406
410
407 def buildfilter(exp, context):
411 def buildfilter(exp, context):
408 n = getsymbol(exp[2])
412 n = getsymbol(exp[2])
409 if n in context._filters:
413 if n in context._filters:
410 filt = context._filters[n]
414 filt = context._filters[n]
411 arg = compileexp(exp[1], context, methods)
415 arg = compileexp(exp[1], context, methods)
412 return (runfilter, (arg, filt))
416 return (runfilter, (arg, filt))
413 if n in funcs:
417 if n in funcs:
414 f = funcs[n]
418 f = funcs[n]
415 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
419 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
416 return (f, args)
420 return (f, args)
417 raise error.ParseError(_("unknown function '%s'") % n)
421 raise error.ParseError(_("unknown function '%s'") % n)
418
422
419 def runfilter(context, mapping, data):
423 def runfilter(context, mapping, data):
420 arg, filt = data
424 arg, filt = data
421 thing = evalfuncarg(context, mapping, arg)
425 thing = evalfuncarg(context, mapping, arg)
422 try:
426 try:
423 return filt(thing)
427 return filt(thing)
424 except (ValueError, AttributeError, TypeError):
428 except (ValueError, AttributeError, TypeError):
425 sym = findsymbolicname(arg)
429 sym = findsymbolicname(arg)
426 if sym:
430 if sym:
427 msg = (_("template filter '%s' is not compatible with keyword '%s'")
431 msg = (_("template filter '%s' is not compatible with keyword '%s'")
428 % (pycompat.sysbytes(filt.__name__), sym))
432 % (pycompat.sysbytes(filt.__name__), sym))
429 else:
433 else:
430 msg = (_("incompatible use of template filter '%s'")
434 msg = (_("incompatible use of template filter '%s'")
431 % pycompat.sysbytes(filt.__name__))
435 % pycompat.sysbytes(filt.__name__))
432 raise error.Abort(msg)
436 raise error.Abort(msg)
433
437
434 def buildmap(exp, context):
438 def buildmap(exp, context):
435 darg = compileexp(exp[1], context, methods)
439 darg = compileexp(exp[1], context, methods)
436 targ = gettemplate(exp[2], context)
440 targ = gettemplate(exp[2], context)
437 return (runmap, (darg, targ))
441 return (runmap, (darg, targ))
438
442
439 def runmap(context, mapping, data):
443 def runmap(context, mapping, data):
440 darg, targ = data
444 darg, targ = data
441 d = evalrawexp(context, mapping, darg)
445 d = evalrawexp(context, mapping, darg)
442 if util.safehasattr(d, 'itermaps'):
446 if util.safehasattr(d, 'itermaps'):
443 diter = d.itermaps()
447 diter = d.itermaps()
444 else:
448 else:
445 try:
449 try:
446 diter = iter(d)
450 diter = iter(d)
447 except TypeError:
451 except TypeError:
448 sym = findsymbolicname(darg)
452 sym = findsymbolicname(darg)
449 if sym:
453 if sym:
450 raise error.ParseError(_("keyword '%s' is not iterable") % sym)
454 raise error.ParseError(_("keyword '%s' is not iterable") % sym)
451 else:
455 else:
452 raise error.ParseError(_("%r is not iterable") % d)
456 raise error.ParseError(_("%r is not iterable") % d)
453
457
454 for i, v in enumerate(diter):
458 for i, v in enumerate(diter):
455 lm = mapping.copy()
459 lm = mapping.copy()
456 lm['index'] = i
460 lm['index'] = i
457 if isinstance(v, dict):
461 if isinstance(v, dict):
458 lm.update(v)
462 lm.update(v)
459 lm['originalnode'] = mapping.get('node')
463 lm['originalnode'] = mapping.get('node')
460 yield evalrawexp(context, lm, targ)
464 yield evalrawexp(context, lm, targ)
461 else:
465 else:
462 # v is not an iterable of dicts, this happen when 'key'
466 # v is not an iterable of dicts, this happen when 'key'
463 # has been fully expanded already and format is useless.
467 # has been fully expanded already and format is useless.
464 # If so, return the expanded value.
468 # If so, return the expanded value.
465 yield v
469 yield v
466
470
467 def buildmember(exp, context):
471 def buildmember(exp, context):
468 darg = compileexp(exp[1], context, methods)
472 darg = compileexp(exp[1], context, methods)
469 memb = getsymbol(exp[2])
473 memb = getsymbol(exp[2])
470 return (runmember, (darg, memb))
474 return (runmember, (darg, memb))
471
475
472 def runmember(context, mapping, data):
476 def runmember(context, mapping, data):
473 darg, memb = data
477 darg, memb = data
474 d = evalrawexp(context, mapping, darg)
478 d = evalrawexp(context, mapping, darg)
475 if util.safehasattr(d, 'tomap'):
479 if util.safehasattr(d, 'tomap'):
476 lm = mapping.copy()
480 lm = mapping.copy()
477 lm.update(d.tomap())
481 lm.update(d.tomap())
478 return runsymbol(context, lm, memb)
482 return runsymbol(context, lm, memb)
479 if util.safehasattr(d, 'get'):
483 if util.safehasattr(d, 'get'):
480 return _getdictitem(d, memb)
484 return _getdictitem(d, memb)
481
485
482 sym = findsymbolicname(darg)
486 sym = findsymbolicname(darg)
483 if sym:
487 if sym:
484 raise error.ParseError(_("keyword '%s' has no member") % sym)
488 raise error.ParseError(_("keyword '%s' has no member") % sym)
485 else:
489 else:
486 raise error.ParseError(_("%r has no member") % d)
490 raise error.ParseError(_("%r has no member") % d)
487
491
488 def buildnegate(exp, context):
492 def buildnegate(exp, context):
489 arg = compileexp(exp[1], context, exprmethods)
493 arg = compileexp(exp[1], context, exprmethods)
490 return (runnegate, arg)
494 return (runnegate, arg)
491
495
492 def runnegate(context, mapping, data):
496 def runnegate(context, mapping, data):
493 data = evalinteger(context, mapping, data,
497 data = evalinteger(context, mapping, data,
494 _('negation needs an integer argument'))
498 _('negation needs an integer argument'))
495 return -data
499 return -data
496
500
497 def buildarithmetic(exp, context, func):
501 def buildarithmetic(exp, context, func):
498 left = compileexp(exp[1], context, exprmethods)
502 left = compileexp(exp[1], context, exprmethods)
499 right = compileexp(exp[2], context, exprmethods)
503 right = compileexp(exp[2], context, exprmethods)
500 return (runarithmetic, (func, left, right))
504 return (runarithmetic, (func, left, right))
501
505
502 def runarithmetic(context, mapping, data):
506 def runarithmetic(context, mapping, data):
503 func, left, right = data
507 func, left, right = data
504 left = evalinteger(context, mapping, left,
508 left = evalinteger(context, mapping, left,
505 _('arithmetic only defined on integers'))
509 _('arithmetic only defined on integers'))
506 right = evalinteger(context, mapping, right,
510 right = evalinteger(context, mapping, right,
507 _('arithmetic only defined on integers'))
511 _('arithmetic only defined on integers'))
508 try:
512 try:
509 return func(left, right)
513 return func(left, right)
510 except ZeroDivisionError:
514 except ZeroDivisionError:
511 raise error.Abort(_('division by zero is not defined'))
515 raise error.Abort(_('division by zero is not defined'))
512
516
513 def buildfunc(exp, context):
517 def buildfunc(exp, context):
514 n = getsymbol(exp[1])
518 n = getsymbol(exp[1])
515 if n in funcs:
519 if n in funcs:
516 f = funcs[n]
520 f = funcs[n]
517 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
521 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
518 return (f, args)
522 return (f, args)
519 if n in context._filters:
523 if n in context._filters:
520 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
524 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
521 if len(args) != 1:
525 if len(args) != 1:
522 raise error.ParseError(_("filter %s expects one argument") % n)
526 raise error.ParseError(_("filter %s expects one argument") % n)
523 f = context._filters[n]
527 f = context._filters[n]
524 return (runfilter, (args[0], f))
528 return (runfilter, (args[0], f))
525 raise error.ParseError(_("unknown function '%s'") % n)
529 raise error.ParseError(_("unknown function '%s'") % n)
526
530
527 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
531 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
528 """Compile parsed tree of function arguments into list or dict of
532 """Compile parsed tree of function arguments into list or dict of
529 (func, data) pairs
533 (func, data) pairs
530
534
531 >>> context = engine(lambda t: (runsymbol, t))
535 >>> context = engine(lambda t: (runsymbol, t))
532 >>> def fargs(expr, argspec):
536 >>> def fargs(expr, argspec):
533 ... x = _parseexpr(expr)
537 ... x = _parseexpr(expr)
534 ... n = getsymbol(x[1])
538 ... n = getsymbol(x[1])
535 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
539 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
536 >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys())
540 >>> list(fargs(b'a(l=1, k=2)', b'k l m').keys())
537 ['l', 'k']
541 ['l', 'k']
538 >>> args = fargs(b'a(opts=1, k=2)', b'**opts')
542 >>> args = fargs(b'a(opts=1, k=2)', b'**opts')
539 >>> list(args.keys()), list(args[b'opts'].keys())
543 >>> list(args.keys()), list(args[b'opts'].keys())
540 (['opts'], ['opts', 'k'])
544 (['opts'], ['opts', 'k'])
541 """
545 """
542 def compiledict(xs):
546 def compiledict(xs):
543 return util.sortdict((k, compileexp(x, context, curmethods))
547 return util.sortdict((k, compileexp(x, context, curmethods))
544 for k, x in xs.iteritems())
548 for k, x in xs.iteritems())
545 def compilelist(xs):
549 def compilelist(xs):
546 return [compileexp(x, context, curmethods) for x in xs]
550 return [compileexp(x, context, curmethods) for x in xs]
547
551
548 if not argspec:
552 if not argspec:
549 # filter or function with no argspec: return list of positional args
553 # filter or function with no argspec: return list of positional args
550 return compilelist(getlist(exp))
554 return compilelist(getlist(exp))
551
555
552 # function with argspec: return dict of named args
556 # function with argspec: return dict of named args
553 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
557 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
554 treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
558 treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
555 keyvaluenode='keyvalue', keynode='symbol')
559 keyvaluenode='keyvalue', keynode='symbol')
556 compargs = util.sortdict()
560 compargs = util.sortdict()
557 if varkey:
561 if varkey:
558 compargs[varkey] = compilelist(treeargs.pop(varkey))
562 compargs[varkey] = compilelist(treeargs.pop(varkey))
559 if optkey:
563 if optkey:
560 compargs[optkey] = compiledict(treeargs.pop(optkey))
564 compargs[optkey] = compiledict(treeargs.pop(optkey))
561 compargs.update(compiledict(treeargs))
565 compargs.update(compiledict(treeargs))
562 return compargs
566 return compargs
563
567
564 def buildkeyvaluepair(exp, content):
568 def buildkeyvaluepair(exp, content):
565 raise error.ParseError(_("can't use a key-value pair in this context"))
569 raise error.ParseError(_("can't use a key-value pair in this context"))
566
570
567 # dict of template built-in functions
571 # dict of template built-in functions
568 funcs = {}
572 funcs = {}
569
573
570 templatefunc = registrar.templatefunc(funcs)
574 templatefunc = registrar.templatefunc(funcs)
571
575
572 @templatefunc('date(date[, fmt])')
576 @templatefunc('date(date[, fmt])')
573 def date(context, mapping, args):
577 def date(context, mapping, args):
574 """Format a date. See :hg:`help dates` for formatting
578 """Format a date. See :hg:`help dates` for formatting
575 strings. The default is a Unix date format, including the timezone:
579 strings. The default is a Unix date format, including the timezone:
576 "Mon Sep 04 15:13:13 2006 0700"."""
580 "Mon Sep 04 15:13:13 2006 0700"."""
577 if not (1 <= len(args) <= 2):
581 if not (1 <= len(args) <= 2):
578 # i18n: "date" is a keyword
582 # i18n: "date" is a keyword
579 raise error.ParseError(_("date expects one or two arguments"))
583 raise error.ParseError(_("date expects one or two arguments"))
580
584
581 date = evalfuncarg(context, mapping, args[0])
585 date = evalfuncarg(context, mapping, args[0])
582 fmt = None
586 fmt = None
583 if len(args) == 2:
587 if len(args) == 2:
584 fmt = evalstring(context, mapping, args[1])
588 fmt = evalstring(context, mapping, args[1])
585 try:
589 try:
586 if fmt is None:
590 if fmt is None:
587 return util.datestr(date)
591 return util.datestr(date)
588 else:
592 else:
589 return util.datestr(date, fmt)
593 return util.datestr(date, fmt)
590 except (TypeError, ValueError):
594 except (TypeError, ValueError):
591 # i18n: "date" is a keyword
595 # i18n: "date" is a keyword
592 raise error.ParseError(_("date expects a date information"))
596 raise error.ParseError(_("date expects a date information"))
593
597
594 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
598 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
595 def dict_(context, mapping, args):
599 def dict_(context, mapping, args):
596 """Construct a dict from key-value pairs. A key may be omitted if
600 """Construct a dict from key-value pairs. A key may be omitted if
597 a value expression can provide an unambiguous name."""
601 a value expression can provide an unambiguous name."""
598 data = util.sortdict()
602 data = util.sortdict()
599
603
600 for v in args['args']:
604 for v in args['args']:
601 k = findsymbolicname(v)
605 k = findsymbolicname(v)
602 if not k:
606 if not k:
603 raise error.ParseError(_('dict key cannot be inferred'))
607 raise error.ParseError(_('dict key cannot be inferred'))
604 if k in data or k in args['kwargs']:
608 if k in data or k in args['kwargs']:
605 raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
609 raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
606 data[k] = evalfuncarg(context, mapping, v)
610 data[k] = evalfuncarg(context, mapping, v)
607
611
608 data.update((k, evalfuncarg(context, mapping, v))
612 data.update((k, evalfuncarg(context, mapping, v))
609 for k, v in args['kwargs'].iteritems())
613 for k, v in args['kwargs'].iteritems())
610 return templatekw.hybriddict(data)
614 return templatekw.hybriddict(data)
611
615
612 @templatefunc('diff([includepattern [, excludepattern]])')
616 @templatefunc('diff([includepattern [, excludepattern]])')
613 def diff(context, mapping, args):
617 def diff(context, mapping, args):
614 """Show a diff, optionally
618 """Show a diff, optionally
615 specifying files to include or exclude."""
619 specifying files to include or exclude."""
616 if len(args) > 2:
620 if len(args) > 2:
617 # i18n: "diff" is a keyword
621 # i18n: "diff" is a keyword
618 raise error.ParseError(_("diff expects zero, one, or two arguments"))
622 raise error.ParseError(_("diff expects zero, one, or two arguments"))
619
623
620 def getpatterns(i):
624 def getpatterns(i):
621 if i < len(args):
625 if i < len(args):
622 s = evalstring(context, mapping, args[i]).strip()
626 s = evalstring(context, mapping, args[i]).strip()
623 if s:
627 if s:
624 return [s]
628 return [s]
625 return []
629 return []
626
630
627 ctx = context.resource(mapping, 'ctx')
631 ctx = context.resource(mapping, 'ctx')
628 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
632 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
629
633
630 return ''.join(chunks)
634 return ''.join(chunks)
631
635
632 @templatefunc('extdata(source)', argspec='source')
636 @templatefunc('extdata(source)', argspec='source')
633 def extdata(context, mapping, args):
637 def extdata(context, mapping, args):
634 """Show a text read from the specified extdata source. (EXPERIMENTAL)"""
638 """Show a text read from the specified extdata source. (EXPERIMENTAL)"""
635 if 'source' not in args:
639 if 'source' not in args:
636 # i18n: "extdata" is a keyword
640 # i18n: "extdata" is a keyword
637 raise error.ParseError(_('extdata expects one argument'))
641 raise error.ParseError(_('extdata expects one argument'))
638
642
639 source = evalstring(context, mapping, args['source'])
643 source = evalstring(context, mapping, args['source'])
640 cache = context.resource(mapping, 'cache').setdefault('extdata', {})
644 cache = context.resource(mapping, 'cache').setdefault('extdata', {})
641 ctx = context.resource(mapping, 'ctx')
645 ctx = context.resource(mapping, 'ctx')
642 if source in cache:
646 if source in cache:
643 data = cache[source]
647 data = cache[source]
644 else:
648 else:
645 data = cache[source] = scmutil.extdatasource(ctx.repo(), source)
649 data = cache[source] = scmutil.extdatasource(ctx.repo(), source)
646 return data.get(ctx.rev(), '')
650 return data.get(ctx.rev(), '')
647
651
648 @templatefunc('files(pattern)')
652 @templatefunc('files(pattern)')
649 def files(context, mapping, args):
653 def files(context, mapping, args):
650 """All files of the current changeset matching the pattern. See
654 """All files of the current changeset matching the pattern. See
651 :hg:`help patterns`."""
655 :hg:`help patterns`."""
652 if not len(args) == 1:
656 if not len(args) == 1:
653 # i18n: "files" is a keyword
657 # i18n: "files" is a keyword
654 raise error.ParseError(_("files expects one argument"))
658 raise error.ParseError(_("files expects one argument"))
655
659
656 raw = evalstring(context, mapping, args[0])
660 raw = evalstring(context, mapping, args[0])
657 ctx = context.resource(mapping, 'ctx')
661 ctx = context.resource(mapping, 'ctx')
658 m = ctx.match([raw])
662 m = ctx.match([raw])
659 files = list(ctx.matches(m))
663 files = list(ctx.matches(m))
660 return templatekw.showlist("file", files, mapping)
664 # TODO: pass (context, mapping) pair to keyword function
665 props = context._resources.copy()
666 props.update(mapping)
667 return templatekw.showlist("file", files, props)
661
668
662 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
669 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
663 def fill(context, mapping, args):
670 def fill(context, mapping, args):
664 """Fill many
671 """Fill many
665 paragraphs with optional indentation. See the "fill" filter."""
672 paragraphs with optional indentation. See the "fill" filter."""
666 if not (1 <= len(args) <= 4):
673 if not (1 <= len(args) <= 4):
667 # i18n: "fill" is a keyword
674 # i18n: "fill" is a keyword
668 raise error.ParseError(_("fill expects one to four arguments"))
675 raise error.ParseError(_("fill expects one to four arguments"))
669
676
670 text = evalstring(context, mapping, args[0])
677 text = evalstring(context, mapping, args[0])
671 width = 76
678 width = 76
672 initindent = ''
679 initindent = ''
673 hangindent = ''
680 hangindent = ''
674 if 2 <= len(args) <= 4:
681 if 2 <= len(args) <= 4:
675 width = evalinteger(context, mapping, args[1],
682 width = evalinteger(context, mapping, args[1],
676 # i18n: "fill" is a keyword
683 # i18n: "fill" is a keyword
677 _("fill expects an integer width"))
684 _("fill expects an integer width"))
678 try:
685 try:
679 initindent = evalstring(context, mapping, args[2])
686 initindent = evalstring(context, mapping, args[2])
680 hangindent = evalstring(context, mapping, args[3])
687 hangindent = evalstring(context, mapping, args[3])
681 except IndexError:
688 except IndexError:
682 pass
689 pass
683
690
684 return templatefilters.fill(text, width, initindent, hangindent)
691 return templatefilters.fill(text, width, initindent, hangindent)
685
692
686 @templatefunc('formatnode(node)')
693 @templatefunc('formatnode(node)')
687 def formatnode(context, mapping, args):
694 def formatnode(context, mapping, args):
688 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
695 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
689 if len(args) != 1:
696 if len(args) != 1:
690 # i18n: "formatnode" is a keyword
697 # i18n: "formatnode" is a keyword
691 raise error.ParseError(_("formatnode expects one argument"))
698 raise error.ParseError(_("formatnode expects one argument"))
692
699
693 ui = context.resource(mapping, 'ui')
700 ui = context.resource(mapping, 'ui')
694 node = evalstring(context, mapping, args[0])
701 node = evalstring(context, mapping, args[0])
695 if ui.debugflag:
702 if ui.debugflag:
696 return node
703 return node
697 return templatefilters.short(node)
704 return templatefilters.short(node)
698
705
699 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])',
706 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])',
700 argspec='text width fillchar left')
707 argspec='text width fillchar left')
701 def pad(context, mapping, args):
708 def pad(context, mapping, args):
702 """Pad text with a
709 """Pad text with a
703 fill character."""
710 fill character."""
704 if 'text' not in args or 'width' not in args:
711 if 'text' not in args or 'width' not in args:
705 # i18n: "pad" is a keyword
712 # i18n: "pad" is a keyword
706 raise error.ParseError(_("pad() expects two to four arguments"))
713 raise error.ParseError(_("pad() expects two to four arguments"))
707
714
708 width = evalinteger(context, mapping, args['width'],
715 width = evalinteger(context, mapping, args['width'],
709 # i18n: "pad" is a keyword
716 # i18n: "pad" is a keyword
710 _("pad() expects an integer width"))
717 _("pad() expects an integer width"))
711
718
712 text = evalstring(context, mapping, args['text'])
719 text = evalstring(context, mapping, args['text'])
713
720
714 left = False
721 left = False
715 fillchar = ' '
722 fillchar = ' '
716 if 'fillchar' in args:
723 if 'fillchar' in args:
717 fillchar = evalstring(context, mapping, args['fillchar'])
724 fillchar = evalstring(context, mapping, args['fillchar'])
718 if len(color.stripeffects(fillchar)) != 1:
725 if len(color.stripeffects(fillchar)) != 1:
719 # i18n: "pad" is a keyword
726 # i18n: "pad" is a keyword
720 raise error.ParseError(_("pad() expects a single fill character"))
727 raise error.ParseError(_("pad() expects a single fill character"))
721 if 'left' in args:
728 if 'left' in args:
722 left = evalboolean(context, mapping, args['left'])
729 left = evalboolean(context, mapping, args['left'])
723
730
724 fillwidth = width - encoding.colwidth(color.stripeffects(text))
731 fillwidth = width - encoding.colwidth(color.stripeffects(text))
725 if fillwidth <= 0:
732 if fillwidth <= 0:
726 return text
733 return text
727 if left:
734 if left:
728 return fillchar * fillwidth + text
735 return fillchar * fillwidth + text
729 else:
736 else:
730 return text + fillchar * fillwidth
737 return text + fillchar * fillwidth
731
738
732 @templatefunc('indent(text, indentchars[, firstline])')
739 @templatefunc('indent(text, indentchars[, firstline])')
733 def indent(context, mapping, args):
740 def indent(context, mapping, args):
734 """Indents all non-empty lines
741 """Indents all non-empty lines
735 with the characters given in the indentchars string. An optional
742 with the characters given in the indentchars string. An optional
736 third parameter will override the indent for the first line only
743 third parameter will override the indent for the first line only
737 if present."""
744 if present."""
738 if not (2 <= len(args) <= 3):
745 if not (2 <= len(args) <= 3):
739 # i18n: "indent" is a keyword
746 # i18n: "indent" is a keyword
740 raise error.ParseError(_("indent() expects two or three arguments"))
747 raise error.ParseError(_("indent() expects two or three arguments"))
741
748
742 text = evalstring(context, mapping, args[0])
749 text = evalstring(context, mapping, args[0])
743 indent = evalstring(context, mapping, args[1])
750 indent = evalstring(context, mapping, args[1])
744
751
745 if len(args) == 3:
752 if len(args) == 3:
746 firstline = evalstring(context, mapping, args[2])
753 firstline = evalstring(context, mapping, args[2])
747 else:
754 else:
748 firstline = indent
755 firstline = indent
749
756
750 # the indent function doesn't indent the first line, so we do it here
757 # the indent function doesn't indent the first line, so we do it here
751 return templatefilters.indent(firstline + text, indent)
758 return templatefilters.indent(firstline + text, indent)
752
759
753 @templatefunc('get(dict, key)')
760 @templatefunc('get(dict, key)')
754 def get(context, mapping, args):
761 def get(context, mapping, args):
755 """Get an attribute/key from an object. Some keywords
762 """Get an attribute/key from an object. Some keywords
756 are complex types. This function allows you to obtain the value of an
763 are complex types. This function allows you to obtain the value of an
757 attribute on these types."""
764 attribute on these types."""
758 if len(args) != 2:
765 if len(args) != 2:
759 # i18n: "get" is a keyword
766 # i18n: "get" is a keyword
760 raise error.ParseError(_("get() expects two arguments"))
767 raise error.ParseError(_("get() expects two arguments"))
761
768
762 dictarg = evalfuncarg(context, mapping, args[0])
769 dictarg = evalfuncarg(context, mapping, args[0])
763 if not util.safehasattr(dictarg, 'get'):
770 if not util.safehasattr(dictarg, 'get'):
764 # i18n: "get" is a keyword
771 # i18n: "get" is a keyword
765 raise error.ParseError(_("get() expects a dict as first argument"))
772 raise error.ParseError(_("get() expects a dict as first argument"))
766
773
767 key = evalfuncarg(context, mapping, args[1])
774 key = evalfuncarg(context, mapping, args[1])
768 return _getdictitem(dictarg, key)
775 return _getdictitem(dictarg, key)
769
776
770 def _getdictitem(dictarg, key):
777 def _getdictitem(dictarg, key):
771 val = dictarg.get(key)
778 val = dictarg.get(key)
772 if val is None:
779 if val is None:
773 return
780 return
774 return templatekw.wraphybridvalue(dictarg, key, val)
781 return templatekw.wraphybridvalue(dictarg, key, val)
775
782
776 @templatefunc('if(expr, then[, else])')
783 @templatefunc('if(expr, then[, else])')
777 def if_(context, mapping, args):
784 def if_(context, mapping, args):
778 """Conditionally execute based on the result of
785 """Conditionally execute based on the result of
779 an expression."""
786 an expression."""
780 if not (2 <= len(args) <= 3):
787 if not (2 <= len(args) <= 3):
781 # i18n: "if" is a keyword
788 # i18n: "if" is a keyword
782 raise error.ParseError(_("if expects two or three arguments"))
789 raise error.ParseError(_("if expects two or three arguments"))
783
790
784 test = evalboolean(context, mapping, args[0])
791 test = evalboolean(context, mapping, args[0])
785 if test:
792 if test:
786 yield evalrawexp(context, mapping, args[1])
793 yield evalrawexp(context, mapping, args[1])
787 elif len(args) == 3:
794 elif len(args) == 3:
788 yield evalrawexp(context, mapping, args[2])
795 yield evalrawexp(context, mapping, args[2])
789
796
790 @templatefunc('ifcontains(needle, haystack, then[, else])')
797 @templatefunc('ifcontains(needle, haystack, then[, else])')
791 def ifcontains(context, mapping, args):
798 def ifcontains(context, mapping, args):
792 """Conditionally execute based
799 """Conditionally execute based
793 on whether the item "needle" is in "haystack"."""
800 on whether the item "needle" is in "haystack"."""
794 if not (3 <= len(args) <= 4):
801 if not (3 <= len(args) <= 4):
795 # i18n: "ifcontains" is a keyword
802 # i18n: "ifcontains" is a keyword
796 raise error.ParseError(_("ifcontains expects three or four arguments"))
803 raise error.ParseError(_("ifcontains expects three or four arguments"))
797
804
798 haystack = evalfuncarg(context, mapping, args[1])
805 haystack = evalfuncarg(context, mapping, args[1])
799 try:
806 try:
800 needle = evalastype(context, mapping, args[0],
807 needle = evalastype(context, mapping, args[0],
801 getattr(haystack, 'keytype', None) or bytes)
808 getattr(haystack, 'keytype', None) or bytes)
802 found = (needle in haystack)
809 found = (needle in haystack)
803 except error.ParseError:
810 except error.ParseError:
804 found = False
811 found = False
805
812
806 if found:
813 if found:
807 yield evalrawexp(context, mapping, args[2])
814 yield evalrawexp(context, mapping, args[2])
808 elif len(args) == 4:
815 elif len(args) == 4:
809 yield evalrawexp(context, mapping, args[3])
816 yield evalrawexp(context, mapping, args[3])
810
817
811 @templatefunc('ifeq(expr1, expr2, then[, else])')
818 @templatefunc('ifeq(expr1, expr2, then[, else])')
812 def ifeq(context, mapping, args):
819 def ifeq(context, mapping, args):
813 """Conditionally execute based on
820 """Conditionally execute based on
814 whether 2 items are equivalent."""
821 whether 2 items are equivalent."""
815 if not (3 <= len(args) <= 4):
822 if not (3 <= len(args) <= 4):
816 # i18n: "ifeq" is a keyword
823 # i18n: "ifeq" is a keyword
817 raise error.ParseError(_("ifeq expects three or four arguments"))
824 raise error.ParseError(_("ifeq expects three or four arguments"))
818
825
819 test = evalstring(context, mapping, args[0])
826 test = evalstring(context, mapping, args[0])
820 match = evalstring(context, mapping, args[1])
827 match = evalstring(context, mapping, args[1])
821 if test == match:
828 if test == match:
822 yield evalrawexp(context, mapping, args[2])
829 yield evalrawexp(context, mapping, args[2])
823 elif len(args) == 4:
830 elif len(args) == 4:
824 yield evalrawexp(context, mapping, args[3])
831 yield evalrawexp(context, mapping, args[3])
825
832
826 @templatefunc('join(list, sep)')
833 @templatefunc('join(list, sep)')
827 def join(context, mapping, args):
834 def join(context, mapping, args):
828 """Join items in a list with a delimiter."""
835 """Join items in a list with a delimiter."""
829 if not (1 <= len(args) <= 2):
836 if not (1 <= len(args) <= 2):
830 # i18n: "join" is a keyword
837 # i18n: "join" is a keyword
831 raise error.ParseError(_("join expects one or two arguments"))
838 raise error.ParseError(_("join expects one or two arguments"))
832
839
833 # TODO: perhaps this should be evalfuncarg(), but it can't because hgweb
840 # TODO: perhaps this should be evalfuncarg(), but it can't because hgweb
834 # abuses generator as a keyword that returns a list of dicts.
841 # abuses generator as a keyword that returns a list of dicts.
835 joinset = evalrawexp(context, mapping, args[0])
842 joinset = evalrawexp(context, mapping, args[0])
836 joinset = templatekw.unwrapvalue(joinset)
843 joinset = templatekw.unwrapvalue(joinset)
837 joinfmt = getattr(joinset, 'joinfmt', pycompat.identity)
844 joinfmt = getattr(joinset, 'joinfmt', pycompat.identity)
838 joiner = " "
845 joiner = " "
839 if len(args) > 1:
846 if len(args) > 1:
840 joiner = evalstring(context, mapping, args[1])
847 joiner = evalstring(context, mapping, args[1])
841
848
842 first = True
849 first = True
843 for x in joinset:
850 for x in joinset:
844 if first:
851 if first:
845 first = False
852 first = False
846 else:
853 else:
847 yield joiner
854 yield joiner
848 yield joinfmt(x)
855 yield joinfmt(x)
849
856
850 @templatefunc('label(label, expr)')
857 @templatefunc('label(label, expr)')
851 def label(context, mapping, args):
858 def label(context, mapping, args):
852 """Apply a label to generated content. Content with
859 """Apply a label to generated content. Content with
853 a label applied can result in additional post-processing, such as
860 a label applied can result in additional post-processing, such as
854 automatic colorization."""
861 automatic colorization."""
855 if len(args) != 2:
862 if len(args) != 2:
856 # i18n: "label" is a keyword
863 # i18n: "label" is a keyword
857 raise error.ParseError(_("label expects two arguments"))
864 raise error.ParseError(_("label expects two arguments"))
858
865
859 ui = context.resource(mapping, 'ui')
866 ui = context.resource(mapping, 'ui')
860 thing = evalstring(context, mapping, args[1])
867 thing = evalstring(context, mapping, args[1])
861 # preserve unknown symbol as literal so effects like 'red', 'bold',
868 # preserve unknown symbol as literal so effects like 'red', 'bold',
862 # etc. don't need to be quoted
869 # etc. don't need to be quoted
863 label = evalstringliteral(context, mapping, args[0])
870 label = evalstringliteral(context, mapping, args[0])
864
871
865 return ui.label(thing, label)
872 return ui.label(thing, label)
866
873
867 @templatefunc('latesttag([pattern])')
874 @templatefunc('latesttag([pattern])')
868 def latesttag(context, mapping, args):
875 def latesttag(context, mapping, args):
869 """The global tags matching the given pattern on the
876 """The global tags matching the given pattern on the
870 most recent globally tagged ancestor of this changeset.
877 most recent globally tagged ancestor of this changeset.
871 If no such tags exist, the "{tag}" template resolves to
878 If no such tags exist, the "{tag}" template resolves to
872 the string "null"."""
879 the string "null"."""
873 if len(args) > 1:
880 if len(args) > 1:
874 # i18n: "latesttag" is a keyword
881 # i18n: "latesttag" is a keyword
875 raise error.ParseError(_("latesttag expects at most one argument"))
882 raise error.ParseError(_("latesttag expects at most one argument"))
876
883
877 pattern = None
884 pattern = None
878 if len(args) == 1:
885 if len(args) == 1:
879 pattern = evalstring(context, mapping, args[0])
886 pattern = evalstring(context, mapping, args[0])
880
887
881 return templatekw.showlatesttags(pattern, **pycompat.strkwargs(mapping))
888 # TODO: pass (context, mapping) pair to keyword function
889 props = context._resources.copy()
890 props.update(mapping)
891 return templatekw.showlatesttags(pattern, **pycompat.strkwargs(props))
882
892
883 @templatefunc('localdate(date[, tz])')
893 @templatefunc('localdate(date[, tz])')
884 def localdate(context, mapping, args):
894 def localdate(context, mapping, args):
885 """Converts a date to the specified timezone.
895 """Converts a date to the specified timezone.
886 The default is local date."""
896 The default is local date."""
887 if not (1 <= len(args) <= 2):
897 if not (1 <= len(args) <= 2):
888 # i18n: "localdate" is a keyword
898 # i18n: "localdate" is a keyword
889 raise error.ParseError(_("localdate expects one or two arguments"))
899 raise error.ParseError(_("localdate expects one or two arguments"))
890
900
891 date = evalfuncarg(context, mapping, args[0])
901 date = evalfuncarg(context, mapping, args[0])
892 try:
902 try:
893 date = util.parsedate(date)
903 date = util.parsedate(date)
894 except AttributeError: # not str nor date tuple
904 except AttributeError: # not str nor date tuple
895 # i18n: "localdate" is a keyword
905 # i18n: "localdate" is a keyword
896 raise error.ParseError(_("localdate expects a date information"))
906 raise error.ParseError(_("localdate expects a date information"))
897 if len(args) >= 2:
907 if len(args) >= 2:
898 tzoffset = None
908 tzoffset = None
899 tz = evalfuncarg(context, mapping, args[1])
909 tz = evalfuncarg(context, mapping, args[1])
900 if isinstance(tz, str):
910 if isinstance(tz, str):
901 tzoffset, remainder = util.parsetimezone(tz)
911 tzoffset, remainder = util.parsetimezone(tz)
902 if remainder:
912 if remainder:
903 tzoffset = None
913 tzoffset = None
904 if tzoffset is None:
914 if tzoffset is None:
905 try:
915 try:
906 tzoffset = int(tz)
916 tzoffset = int(tz)
907 except (TypeError, ValueError):
917 except (TypeError, ValueError):
908 # i18n: "localdate" is a keyword
918 # i18n: "localdate" is a keyword
909 raise error.ParseError(_("localdate expects a timezone"))
919 raise error.ParseError(_("localdate expects a timezone"))
910 else:
920 else:
911 tzoffset = util.makedate()[1]
921 tzoffset = util.makedate()[1]
912 return (date[0], tzoffset)
922 return (date[0], tzoffset)
913
923
914 @templatefunc('max(iterable)')
924 @templatefunc('max(iterable)')
915 def max_(context, mapping, args, **kwargs):
925 def max_(context, mapping, args, **kwargs):
916 """Return the max of an iterable"""
926 """Return the max of an iterable"""
917 if len(args) != 1:
927 if len(args) != 1:
918 # i18n: "max" is a keyword
928 # i18n: "max" is a keyword
919 raise error.ParseError(_("max expects one argument"))
929 raise error.ParseError(_("max expects one argument"))
920
930
921 iterable = evalfuncarg(context, mapping, args[0])
931 iterable = evalfuncarg(context, mapping, args[0])
922 try:
932 try:
923 x = max(iterable)
933 x = max(iterable)
924 except (TypeError, ValueError):
934 except (TypeError, ValueError):
925 # i18n: "max" is a keyword
935 # i18n: "max" is a keyword
926 raise error.ParseError(_("max first argument should be an iterable"))
936 raise error.ParseError(_("max first argument should be an iterable"))
927 return templatekw.wraphybridvalue(iterable, x, x)
937 return templatekw.wraphybridvalue(iterable, x, x)
928
938
929 @templatefunc('min(iterable)')
939 @templatefunc('min(iterable)')
930 def min_(context, mapping, args, **kwargs):
940 def min_(context, mapping, args, **kwargs):
931 """Return the min of an iterable"""
941 """Return the min of an iterable"""
932 if len(args) != 1:
942 if len(args) != 1:
933 # i18n: "min" is a keyword
943 # i18n: "min" is a keyword
934 raise error.ParseError(_("min expects one argument"))
944 raise error.ParseError(_("min expects one argument"))
935
945
936 iterable = evalfuncarg(context, mapping, args[0])
946 iterable = evalfuncarg(context, mapping, args[0])
937 try:
947 try:
938 x = min(iterable)
948 x = min(iterable)
939 except (TypeError, ValueError):
949 except (TypeError, ValueError):
940 # i18n: "min" is a keyword
950 # i18n: "min" is a keyword
941 raise error.ParseError(_("min first argument should be an iterable"))
951 raise error.ParseError(_("min first argument should be an iterable"))
942 return templatekw.wraphybridvalue(iterable, x, x)
952 return templatekw.wraphybridvalue(iterable, x, x)
943
953
944 @templatefunc('mod(a, b)')
954 @templatefunc('mod(a, b)')
945 def mod(context, mapping, args):
955 def mod(context, mapping, args):
946 """Calculate a mod b such that a / b + a mod b == a"""
956 """Calculate a mod b such that a / b + a mod b == a"""
947 if not len(args) == 2:
957 if not len(args) == 2:
948 # i18n: "mod" is a keyword
958 # i18n: "mod" is a keyword
949 raise error.ParseError(_("mod expects two arguments"))
959 raise error.ParseError(_("mod expects two arguments"))
950
960
951 func = lambda a, b: a % b
961 func = lambda a, b: a % b
952 return runarithmetic(context, mapping, (func, args[0], args[1]))
962 return runarithmetic(context, mapping, (func, args[0], args[1]))
953
963
954 @templatefunc('obsfateoperations(markers)')
964 @templatefunc('obsfateoperations(markers)')
955 def obsfateoperations(context, mapping, args):
965 def obsfateoperations(context, mapping, args):
956 """Compute obsfate related information based on markers (EXPERIMENTAL)"""
966 """Compute obsfate related information based on markers (EXPERIMENTAL)"""
957 if len(args) != 1:
967 if len(args) != 1:
958 # i18n: "obsfateoperations" is a keyword
968 # i18n: "obsfateoperations" is a keyword
959 raise error.ParseError(_("obsfateoperations expects one argument"))
969 raise error.ParseError(_("obsfateoperations expects one argument"))
960
970
961 markers = evalfuncarg(context, mapping, args[0])
971 markers = evalfuncarg(context, mapping, args[0])
962
972
963 try:
973 try:
964 data = obsutil.markersoperations(markers)
974 data = obsutil.markersoperations(markers)
965 return templatekw.hybridlist(data, name='operation')
975 return templatekw.hybridlist(data, name='operation')
966 except (TypeError, KeyError):
976 except (TypeError, KeyError):
967 # i18n: "obsfateoperations" is a keyword
977 # i18n: "obsfateoperations" is a keyword
968 errmsg = _("obsfateoperations first argument should be an iterable")
978 errmsg = _("obsfateoperations first argument should be an iterable")
969 raise error.ParseError(errmsg)
979 raise error.ParseError(errmsg)
970
980
971 @templatefunc('obsfatedate(markers)')
981 @templatefunc('obsfatedate(markers)')
972 def obsfatedate(context, mapping, args):
982 def obsfatedate(context, mapping, args):
973 """Compute obsfate related information based on markers (EXPERIMENTAL)"""
983 """Compute obsfate related information based on markers (EXPERIMENTAL)"""
974 if len(args) != 1:
984 if len(args) != 1:
975 # i18n: "obsfatedate" is a keyword
985 # i18n: "obsfatedate" is a keyword
976 raise error.ParseError(_("obsfatedate expects one argument"))
986 raise error.ParseError(_("obsfatedate expects one argument"))
977
987
978 markers = evalfuncarg(context, mapping, args[0])
988 markers = evalfuncarg(context, mapping, args[0])
979
989
980 try:
990 try:
981 data = obsutil.markersdates(markers)
991 data = obsutil.markersdates(markers)
982 return templatekw.hybridlist(data, name='date', fmt='%d %d')
992 return templatekw.hybridlist(data, name='date', fmt='%d %d')
983 except (TypeError, KeyError):
993 except (TypeError, KeyError):
984 # i18n: "obsfatedate" is a keyword
994 # i18n: "obsfatedate" is a keyword
985 errmsg = _("obsfatedate first argument should be an iterable")
995 errmsg = _("obsfatedate first argument should be an iterable")
986 raise error.ParseError(errmsg)
996 raise error.ParseError(errmsg)
987
997
988 @templatefunc('obsfateusers(markers)')
998 @templatefunc('obsfateusers(markers)')
989 def obsfateusers(context, mapping, args):
999 def obsfateusers(context, mapping, args):
990 """Compute obsfate related information based on markers (EXPERIMENTAL)"""
1000 """Compute obsfate related information based on markers (EXPERIMENTAL)"""
991 if len(args) != 1:
1001 if len(args) != 1:
992 # i18n: "obsfateusers" is a keyword
1002 # i18n: "obsfateusers" is a keyword
993 raise error.ParseError(_("obsfateusers expects one argument"))
1003 raise error.ParseError(_("obsfateusers expects one argument"))
994
1004
995 markers = evalfuncarg(context, mapping, args[0])
1005 markers = evalfuncarg(context, mapping, args[0])
996
1006
997 try:
1007 try:
998 data = obsutil.markersusers(markers)
1008 data = obsutil.markersusers(markers)
999 return templatekw.hybridlist(data, name='user')
1009 return templatekw.hybridlist(data, name='user')
1000 except (TypeError, KeyError, ValueError):
1010 except (TypeError, KeyError, ValueError):
1001 # i18n: "obsfateusers" is a keyword
1011 # i18n: "obsfateusers" is a keyword
1002 msg = _("obsfateusers first argument should be an iterable of "
1012 msg = _("obsfateusers first argument should be an iterable of "
1003 "obsmakers")
1013 "obsmakers")
1004 raise error.ParseError(msg)
1014 raise error.ParseError(msg)
1005
1015
1006 @templatefunc('obsfateverb(successors, markers)')
1016 @templatefunc('obsfateverb(successors, markers)')
1007 def obsfateverb(context, mapping, args):
1017 def obsfateverb(context, mapping, args):
1008 """Compute obsfate related information based on successors (EXPERIMENTAL)"""
1018 """Compute obsfate related information based on successors (EXPERIMENTAL)"""
1009 if len(args) != 2:
1019 if len(args) != 2:
1010 # i18n: "obsfateverb" is a keyword
1020 # i18n: "obsfateverb" is a keyword
1011 raise error.ParseError(_("obsfateverb expects two arguments"))
1021 raise error.ParseError(_("obsfateverb expects two arguments"))
1012
1022
1013 successors = evalfuncarg(context, mapping, args[0])
1023 successors = evalfuncarg(context, mapping, args[0])
1014 markers = evalfuncarg(context, mapping, args[1])
1024 markers = evalfuncarg(context, mapping, args[1])
1015
1025
1016 try:
1026 try:
1017 return obsutil.obsfateverb(successors, markers)
1027 return obsutil.obsfateverb(successors, markers)
1018 except TypeError:
1028 except TypeError:
1019 # i18n: "obsfateverb" is a keyword
1029 # i18n: "obsfateverb" is a keyword
1020 errmsg = _("obsfateverb first argument should be countable")
1030 errmsg = _("obsfateverb first argument should be countable")
1021 raise error.ParseError(errmsg)
1031 raise error.ParseError(errmsg)
1022
1032
1023 @templatefunc('relpath(path)')
1033 @templatefunc('relpath(path)')
1024 def relpath(context, mapping, args):
1034 def relpath(context, mapping, args):
1025 """Convert a repository-absolute path into a filesystem path relative to
1035 """Convert a repository-absolute path into a filesystem path relative to
1026 the current working directory."""
1036 the current working directory."""
1027 if len(args) != 1:
1037 if len(args) != 1:
1028 # i18n: "relpath" is a keyword
1038 # i18n: "relpath" is a keyword
1029 raise error.ParseError(_("relpath expects one argument"))
1039 raise error.ParseError(_("relpath expects one argument"))
1030
1040
1031 repo = context.resource(mapping, 'ctx').repo()
1041 repo = context.resource(mapping, 'ctx').repo()
1032 path = evalstring(context, mapping, args[0])
1042 path = evalstring(context, mapping, args[0])
1033 return repo.pathto(path)
1043 return repo.pathto(path)
1034
1044
1035 @templatefunc('revset(query[, formatargs...])')
1045 @templatefunc('revset(query[, formatargs...])')
1036 def revset(context, mapping, args):
1046 def revset(context, mapping, args):
1037 """Execute a revision set query. See
1047 """Execute a revision set query. See
1038 :hg:`help revset`."""
1048 :hg:`help revset`."""
1039 if not len(args) > 0:
1049 if not len(args) > 0:
1040 # i18n: "revset" is a keyword
1050 # i18n: "revset" is a keyword
1041 raise error.ParseError(_("revset expects one or more arguments"))
1051 raise error.ParseError(_("revset expects one or more arguments"))
1042
1052
1043 raw = evalstring(context, mapping, args[0])
1053 raw = evalstring(context, mapping, args[0])
1044 ctx = context.resource(mapping, 'ctx')
1054 ctx = context.resource(mapping, 'ctx')
1045 repo = ctx.repo()
1055 repo = ctx.repo()
1046
1056
1047 def query(expr):
1057 def query(expr):
1048 m = revsetmod.match(repo.ui, expr, repo=repo)
1058 m = revsetmod.match(repo.ui, expr, repo=repo)
1049 return m(repo)
1059 return m(repo)
1050
1060
1051 if len(args) > 1:
1061 if len(args) > 1:
1052 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
1062 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
1053 revs = query(revsetlang.formatspec(raw, *formatargs))
1063 revs = query(revsetlang.formatspec(raw, *formatargs))
1054 revs = list(revs)
1064 revs = list(revs)
1055 else:
1065 else:
1056 cache = context.resource(mapping, 'cache')
1066 cache = context.resource(mapping, 'cache')
1057 revsetcache = cache.setdefault("revsetcache", {})
1067 revsetcache = cache.setdefault("revsetcache", {})
1058 if raw in revsetcache:
1068 if raw in revsetcache:
1059 revs = revsetcache[raw]
1069 revs = revsetcache[raw]
1060 else:
1070 else:
1061 revs = query(raw)
1071 revs = query(raw)
1062 revs = list(revs)
1072 revs = list(revs)
1063 revsetcache[raw] = revs
1073 revsetcache[raw] = revs
1064
1074
1075 # TODO: pass (context, mapping) pair to keyword function
1076 props = context._resources.copy()
1077 props.update(mapping)
1065 return templatekw.showrevslist("revision", revs,
1078 return templatekw.showrevslist("revision", revs,
1066 **pycompat.strkwargs(mapping))
1079 **pycompat.strkwargs(props))
1067
1080
1068 @templatefunc('rstdoc(text, style)')
1081 @templatefunc('rstdoc(text, style)')
1069 def rstdoc(context, mapping, args):
1082 def rstdoc(context, mapping, args):
1070 """Format reStructuredText."""
1083 """Format reStructuredText."""
1071 if len(args) != 2:
1084 if len(args) != 2:
1072 # i18n: "rstdoc" is a keyword
1085 # i18n: "rstdoc" is a keyword
1073 raise error.ParseError(_("rstdoc expects two arguments"))
1086 raise error.ParseError(_("rstdoc expects two arguments"))
1074
1087
1075 text = evalstring(context, mapping, args[0])
1088 text = evalstring(context, mapping, args[0])
1076 style = evalstring(context, mapping, args[1])
1089 style = evalstring(context, mapping, args[1])
1077
1090
1078 return minirst.format(text, style=style, keep=['verbose'])
1091 return minirst.format(text, style=style, keep=['verbose'])
1079
1092
1080 @templatefunc('separate(sep, args)', argspec='sep *args')
1093 @templatefunc('separate(sep, args)', argspec='sep *args')
1081 def separate(context, mapping, args):
1094 def separate(context, mapping, args):
1082 """Add a separator between non-empty arguments."""
1095 """Add a separator between non-empty arguments."""
1083 if 'sep' not in args:
1096 if 'sep' not in args:
1084 # i18n: "separate" is a keyword
1097 # i18n: "separate" is a keyword
1085 raise error.ParseError(_("separate expects at least one argument"))
1098 raise error.ParseError(_("separate expects at least one argument"))
1086
1099
1087 sep = evalstring(context, mapping, args['sep'])
1100 sep = evalstring(context, mapping, args['sep'])
1088 first = True
1101 first = True
1089 for arg in args['args']:
1102 for arg in args['args']:
1090 argstr = evalstring(context, mapping, arg)
1103 argstr = evalstring(context, mapping, arg)
1091 if not argstr:
1104 if not argstr:
1092 continue
1105 continue
1093 if first:
1106 if first:
1094 first = False
1107 first = False
1095 else:
1108 else:
1096 yield sep
1109 yield sep
1097 yield argstr
1110 yield argstr
1098
1111
1099 @templatefunc('shortest(node, minlength=4)')
1112 @templatefunc('shortest(node, minlength=4)')
1100 def shortest(context, mapping, args):
1113 def shortest(context, mapping, args):
1101 """Obtain the shortest representation of
1114 """Obtain the shortest representation of
1102 a node."""
1115 a node."""
1103 if not (1 <= len(args) <= 2):
1116 if not (1 <= len(args) <= 2):
1104 # i18n: "shortest" is a keyword
1117 # i18n: "shortest" is a keyword
1105 raise error.ParseError(_("shortest() expects one or two arguments"))
1118 raise error.ParseError(_("shortest() expects one or two arguments"))
1106
1119
1107 node = evalstring(context, mapping, args[0])
1120 node = evalstring(context, mapping, args[0])
1108
1121
1109 minlength = 4
1122 minlength = 4
1110 if len(args) > 1:
1123 if len(args) > 1:
1111 minlength = evalinteger(context, mapping, args[1],
1124 minlength = evalinteger(context, mapping, args[1],
1112 # i18n: "shortest" is a keyword
1125 # i18n: "shortest" is a keyword
1113 _("shortest() expects an integer minlength"))
1126 _("shortest() expects an integer minlength"))
1114
1127
1115 # _partialmatch() of filtered changelog could take O(len(repo)) time,
1128 # _partialmatch() of filtered changelog could take O(len(repo)) time,
1116 # which would be unacceptably slow. so we look for hash collision in
1129 # which would be unacceptably slow. so we look for hash collision in
1117 # unfiltered space, which means some hashes may be slightly longer.
1130 # unfiltered space, which means some hashes may be slightly longer.
1118 cl = context.resource(mapping, 'ctx')._repo.unfiltered().changelog
1131 cl = context.resource(mapping, 'ctx')._repo.unfiltered().changelog
1119 return cl.shortest(node, minlength)
1132 return cl.shortest(node, minlength)
1120
1133
1121 @templatefunc('strip(text[, chars])')
1134 @templatefunc('strip(text[, chars])')
1122 def strip(context, mapping, args):
1135 def strip(context, mapping, args):
1123 """Strip characters from a string. By default,
1136 """Strip characters from a string. By default,
1124 strips all leading and trailing whitespace."""
1137 strips all leading and trailing whitespace."""
1125 if not (1 <= len(args) <= 2):
1138 if not (1 <= len(args) <= 2):
1126 # i18n: "strip" is a keyword
1139 # i18n: "strip" is a keyword
1127 raise error.ParseError(_("strip expects one or two arguments"))
1140 raise error.ParseError(_("strip expects one or two arguments"))
1128
1141
1129 text = evalstring(context, mapping, args[0])
1142 text = evalstring(context, mapping, args[0])
1130 if len(args) == 2:
1143 if len(args) == 2:
1131 chars = evalstring(context, mapping, args[1])
1144 chars = evalstring(context, mapping, args[1])
1132 return text.strip(chars)
1145 return text.strip(chars)
1133 return text.strip()
1146 return text.strip()
1134
1147
1135 @templatefunc('sub(pattern, replacement, expression)')
1148 @templatefunc('sub(pattern, replacement, expression)')
1136 def sub(context, mapping, args):
1149 def sub(context, mapping, args):
1137 """Perform text substitution
1150 """Perform text substitution
1138 using regular expressions."""
1151 using regular expressions."""
1139 if len(args) != 3:
1152 if len(args) != 3:
1140 # i18n: "sub" is a keyword
1153 # i18n: "sub" is a keyword
1141 raise error.ParseError(_("sub expects three arguments"))
1154 raise error.ParseError(_("sub expects three arguments"))
1142
1155
1143 pat = evalstring(context, mapping, args[0])
1156 pat = evalstring(context, mapping, args[0])
1144 rpl = evalstring(context, mapping, args[1])
1157 rpl = evalstring(context, mapping, args[1])
1145 src = evalstring(context, mapping, args[2])
1158 src = evalstring(context, mapping, args[2])
1146 try:
1159 try:
1147 patre = re.compile(pat)
1160 patre = re.compile(pat)
1148 except re.error:
1161 except re.error:
1149 # i18n: "sub" is a keyword
1162 # i18n: "sub" is a keyword
1150 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
1163 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
1151 try:
1164 try:
1152 yield patre.sub(rpl, src)
1165 yield patre.sub(rpl, src)
1153 except re.error:
1166 except re.error:
1154 # i18n: "sub" is a keyword
1167 # i18n: "sub" is a keyword
1155 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
1168 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
1156
1169
1157 @templatefunc('startswith(pattern, text)')
1170 @templatefunc('startswith(pattern, text)')
1158 def startswith(context, mapping, args):
1171 def startswith(context, mapping, args):
1159 """Returns the value from the "text" argument
1172 """Returns the value from the "text" argument
1160 if it begins with the content from the "pattern" argument."""
1173 if it begins with the content from the "pattern" argument."""
1161 if len(args) != 2:
1174 if len(args) != 2:
1162 # i18n: "startswith" is a keyword
1175 # i18n: "startswith" is a keyword
1163 raise error.ParseError(_("startswith expects two arguments"))
1176 raise error.ParseError(_("startswith expects two arguments"))
1164
1177
1165 patn = evalstring(context, mapping, args[0])
1178 patn = evalstring(context, mapping, args[0])
1166 text = evalstring(context, mapping, args[1])
1179 text = evalstring(context, mapping, args[1])
1167 if text.startswith(patn):
1180 if text.startswith(patn):
1168 return text
1181 return text
1169 return ''
1182 return ''
1170
1183
1171 @templatefunc('word(number, text[, separator])')
1184 @templatefunc('word(number, text[, separator])')
1172 def word(context, mapping, args):
1185 def word(context, mapping, args):
1173 """Return the nth word from a string."""
1186 """Return the nth word from a string."""
1174 if not (2 <= len(args) <= 3):
1187 if not (2 <= len(args) <= 3):
1175 # i18n: "word" is a keyword
1188 # i18n: "word" is a keyword
1176 raise error.ParseError(_("word expects two or three arguments, got %d")
1189 raise error.ParseError(_("word expects two or three arguments, got %d")
1177 % len(args))
1190 % len(args))
1178
1191
1179 num = evalinteger(context, mapping, args[0],
1192 num = evalinteger(context, mapping, args[0],
1180 # i18n: "word" is a keyword
1193 # i18n: "word" is a keyword
1181 _("word expects an integer index"))
1194 _("word expects an integer index"))
1182 text = evalstring(context, mapping, args[1])
1195 text = evalstring(context, mapping, args[1])
1183 if len(args) == 3:
1196 if len(args) == 3:
1184 splitter = evalstring(context, mapping, args[2])
1197 splitter = evalstring(context, mapping, args[2])
1185 else:
1198 else:
1186 splitter = None
1199 splitter = None
1187
1200
1188 tokens = text.split(splitter)
1201 tokens = text.split(splitter)
1189 if num >= len(tokens) or num < -len(tokens):
1202 if num >= len(tokens) or num < -len(tokens):
1190 return ''
1203 return ''
1191 else:
1204 else:
1192 return tokens[num]
1205 return tokens[num]
1193
1206
1194 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
1207 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
1195 exprmethods = {
1208 exprmethods = {
1196 "integer": lambda e, c: (runinteger, e[1]),
1209 "integer": lambda e, c: (runinteger, e[1]),
1197 "string": lambda e, c: (runstring, e[1]),
1210 "string": lambda e, c: (runstring, e[1]),
1198 "symbol": lambda e, c: (runsymbol, e[1]),
1211 "symbol": lambda e, c: (runsymbol, e[1]),
1199 "template": buildtemplate,
1212 "template": buildtemplate,
1200 "group": lambda e, c: compileexp(e[1], c, exprmethods),
1213 "group": lambda e, c: compileexp(e[1], c, exprmethods),
1201 ".": buildmember,
1214 ".": buildmember,
1202 "|": buildfilter,
1215 "|": buildfilter,
1203 "%": buildmap,
1216 "%": buildmap,
1204 "func": buildfunc,
1217 "func": buildfunc,
1205 "keyvalue": buildkeyvaluepair,
1218 "keyvalue": buildkeyvaluepair,
1206 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
1219 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
1207 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
1220 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
1208 "negate": buildnegate,
1221 "negate": buildnegate,
1209 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
1222 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
1210 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
1223 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
1211 }
1224 }
1212
1225
1213 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
1226 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
1214 methods = exprmethods.copy()
1227 methods = exprmethods.copy()
1215 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
1228 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
1216
1229
1217 class _aliasrules(parser.basealiasrules):
1230 class _aliasrules(parser.basealiasrules):
1218 """Parsing and expansion rule set of template aliases"""
1231 """Parsing and expansion rule set of template aliases"""
1219 _section = _('template alias')
1232 _section = _('template alias')
1220 _parse = staticmethod(_parseexpr)
1233 _parse = staticmethod(_parseexpr)
1221
1234
1222 @staticmethod
1235 @staticmethod
1223 def _trygetfunc(tree):
1236 def _trygetfunc(tree):
1224 """Return (name, args) if tree is func(...) or ...|filter; otherwise
1237 """Return (name, args) if tree is func(...) or ...|filter; otherwise
1225 None"""
1238 None"""
1226 if tree[0] == 'func' and tree[1][0] == 'symbol':
1239 if tree[0] == 'func' and tree[1][0] == 'symbol':
1227 return tree[1][1], getlist(tree[2])
1240 return tree[1][1], getlist(tree[2])
1228 if tree[0] == '|' and tree[2][0] == 'symbol':
1241 if tree[0] == '|' and tree[2][0] == 'symbol':
1229 return tree[2][1], [tree[1]]
1242 return tree[2][1], [tree[1]]
1230
1243
1231 def expandaliases(tree, aliases):
1244 def expandaliases(tree, aliases):
1232 """Return new tree of aliases are expanded"""
1245 """Return new tree of aliases are expanded"""
1233 aliasmap = _aliasrules.buildmap(aliases)
1246 aliasmap = _aliasrules.buildmap(aliases)
1234 return _aliasrules.expand(aliasmap, tree)
1247 return _aliasrules.expand(aliasmap, tree)
1235
1248
1236 # template engine
1249 # template engine
1237
1250
1238 stringify = templatefilters.stringify
1251 stringify = templatefilters.stringify
1239
1252
1240 def _flatten(thing):
1253 def _flatten(thing):
1241 '''yield a single stream from a possibly nested set of iterators'''
1254 '''yield a single stream from a possibly nested set of iterators'''
1242 thing = templatekw.unwraphybrid(thing)
1255 thing = templatekw.unwraphybrid(thing)
1243 if isinstance(thing, bytes):
1256 if isinstance(thing, bytes):
1244 yield thing
1257 yield thing
1245 elif isinstance(thing, str):
1258 elif isinstance(thing, str):
1246 # We can only hit this on Python 3, and it's here to guard
1259 # We can only hit this on Python 3, and it's here to guard
1247 # against infinite recursion.
1260 # against infinite recursion.
1248 raise error.ProgrammingError('Mercurial IO including templates is done'
1261 raise error.ProgrammingError('Mercurial IO including templates is done'
1249 ' with bytes, not strings')
1262 ' with bytes, not strings')
1250 elif thing is None:
1263 elif thing is None:
1251 pass
1264 pass
1252 elif not util.safehasattr(thing, '__iter__'):
1265 elif not util.safehasattr(thing, '__iter__'):
1253 yield pycompat.bytestr(thing)
1266 yield pycompat.bytestr(thing)
1254 else:
1267 else:
1255 for i in thing:
1268 for i in thing:
1256 i = templatekw.unwraphybrid(i)
1269 i = templatekw.unwraphybrid(i)
1257 if isinstance(i, bytes):
1270 if isinstance(i, bytes):
1258 yield i
1271 yield i
1259 elif i is None:
1272 elif i is None:
1260 pass
1273 pass
1261 elif not util.safehasattr(i, '__iter__'):
1274 elif not util.safehasattr(i, '__iter__'):
1262 yield pycompat.bytestr(i)
1275 yield pycompat.bytestr(i)
1263 else:
1276 else:
1264 for j in _flatten(i):
1277 for j in _flatten(i):
1265 yield j
1278 yield j
1266
1279
1267 def unquotestring(s):
1280 def unquotestring(s):
1268 '''unwrap quotes if any; otherwise returns unmodified string'''
1281 '''unwrap quotes if any; otherwise returns unmodified string'''
1269 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1282 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1270 return s
1283 return s
1271 return s[1:-1]
1284 return s[1:-1]
1272
1285
1273 class engine(object):
1286 class engine(object):
1274 '''template expansion engine.
1287 '''template expansion engine.
1275
1288
1276 template expansion works like this. a map file contains key=value
1289 template expansion works like this. a map file contains key=value
1277 pairs. if value is quoted, it is treated as string. otherwise, it
1290 pairs. if value is quoted, it is treated as string. otherwise, it
1278 is treated as name of template file.
1291 is treated as name of template file.
1279
1292
1280 templater is asked to expand a key in map. it looks up key, and
1293 templater is asked to expand a key in map. it looks up key, and
1281 looks for strings like this: {foo}. it expands {foo} by looking up
1294 looks for strings like this: {foo}. it expands {foo} by looking up
1282 foo in map, and substituting it. expansion is recursive: it stops
1295 foo in map, and substituting it. expansion is recursive: it stops
1283 when there is no more {foo} to replace.
1296 when there is no more {foo} to replace.
1284
1297
1285 expansion also allows formatting and filtering.
1298 expansion also allows formatting and filtering.
1286
1299
1287 format uses key to expand each item in list. syntax is
1300 format uses key to expand each item in list. syntax is
1288 {key%format}.
1301 {key%format}.
1289
1302
1290 filter uses function to transform value. syntax is
1303 filter uses function to transform value. syntax is
1291 {key|filter1|filter2|...}.'''
1304 {key|filter1|filter2|...}.'''
1292
1305
1293 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1306 def __init__(self, loader, filters=None, defaults=None, resources=None,
1307 aliases=()):
1294 self._loader = loader
1308 self._loader = loader
1295 if filters is None:
1309 if filters is None:
1296 filters = {}
1310 filters = {}
1297 self._filters = filters
1311 self._filters = filters
1298 if defaults is None:
1312 if defaults is None:
1299 defaults = {}
1313 defaults = {}
1314 if resources is None:
1315 resources = {}
1300 self._defaults = defaults
1316 self._defaults = defaults
1317 self._resources = resources
1301 self._aliasmap = _aliasrules.buildmap(aliases)
1318 self._aliasmap = _aliasrules.buildmap(aliases)
1302 self._cache = {} # key: (func, data)
1319 self._cache = {} # key: (func, data)
1303
1320
1304 def symbol(self, mapping, key):
1321 def symbol(self, mapping, key):
1305 """Resolve symbol to value or function; None if nothing found"""
1322 """Resolve symbol to value or function; None if nothing found"""
1306 v = mapping.get(key)
1323 v = mapping.get(key)
1307 if v is None:
1324 if v is None:
1308 v = self._defaults.get(key)
1325 v = self._defaults.get(key)
1309 return v
1326 return v
1310
1327
1311 def resource(self, mapping, key):
1328 def resource(self, mapping, key):
1312 """Return internal data (e.g. cache) used for keyword/function
1329 """Return internal data (e.g. cache) used for keyword/function
1313 evaluation"""
1330 evaluation"""
1314 return mapping[key]
1331 v = mapping.get(key)
1332 if v is None:
1333 v = self._resources.get(key)
1334 if v is None:
1335 raise KeyError
1336 return v
1315
1337
1316 def _load(self, t):
1338 def _load(self, t):
1317 '''load, parse, and cache a template'''
1339 '''load, parse, and cache a template'''
1318 if t not in self._cache:
1340 if t not in self._cache:
1319 # put poison to cut recursion while compiling 't'
1341 # put poison to cut recursion while compiling 't'
1320 self._cache[t] = (_runrecursivesymbol, t)
1342 self._cache[t] = (_runrecursivesymbol, t)
1321 try:
1343 try:
1322 x = parse(self._loader(t))
1344 x = parse(self._loader(t))
1323 if self._aliasmap:
1345 if self._aliasmap:
1324 x = _aliasrules.expand(self._aliasmap, x)
1346 x = _aliasrules.expand(self._aliasmap, x)
1325 self._cache[t] = compileexp(x, self, methods)
1347 self._cache[t] = compileexp(x, self, methods)
1326 except: # re-raises
1348 except: # re-raises
1327 del self._cache[t]
1349 del self._cache[t]
1328 raise
1350 raise
1329 return self._cache[t]
1351 return self._cache[t]
1330
1352
1331 def process(self, t, mapping):
1353 def process(self, t, mapping):
1332 '''Perform expansion. t is name of map element to expand.
1354 '''Perform expansion. t is name of map element to expand.
1333 mapping contains added elements for use during expansion. Is a
1355 mapping contains added elements for use during expansion. Is a
1334 generator.'''
1356 generator.'''
1335 func, data = self._load(t)
1357 func, data = self._load(t)
1336 return _flatten(func(self, mapping, data))
1358 return _flatten(func(self, mapping, data))
1337
1359
1338 engines = {'default': engine}
1360 engines = {'default': engine}
1339
1361
1340 def stylelist():
1362 def stylelist():
1341 paths = templatepaths()
1363 paths = templatepaths()
1342 if not paths:
1364 if not paths:
1343 return _('no templates found, try `hg debuginstall` for more info')
1365 return _('no templates found, try `hg debuginstall` for more info')
1344 dirlist = os.listdir(paths[0])
1366 dirlist = os.listdir(paths[0])
1345 stylelist = []
1367 stylelist = []
1346 for file in dirlist:
1368 for file in dirlist:
1347 split = file.split(".")
1369 split = file.split(".")
1348 if split[-1] in ('orig', 'rej'):
1370 if split[-1] in ('orig', 'rej'):
1349 continue
1371 continue
1350 if split[0] == "map-cmdline":
1372 if split[0] == "map-cmdline":
1351 stylelist.append(split[1])
1373 stylelist.append(split[1])
1352 return ", ".join(sorted(stylelist))
1374 return ", ".join(sorted(stylelist))
1353
1375
1354 def _readmapfile(mapfile):
1376 def _readmapfile(mapfile):
1355 """Load template elements from the given map file"""
1377 """Load template elements from the given map file"""
1356 if not os.path.exists(mapfile):
1378 if not os.path.exists(mapfile):
1357 raise error.Abort(_("style '%s' not found") % mapfile,
1379 raise error.Abort(_("style '%s' not found") % mapfile,
1358 hint=_("available styles: %s") % stylelist())
1380 hint=_("available styles: %s") % stylelist())
1359
1381
1360 base = os.path.dirname(mapfile)
1382 base = os.path.dirname(mapfile)
1361 conf = config.config(includepaths=templatepaths())
1383 conf = config.config(includepaths=templatepaths())
1362 conf.read(mapfile, remap={'': 'templates'})
1384 conf.read(mapfile, remap={'': 'templates'})
1363
1385
1364 cache = {}
1386 cache = {}
1365 tmap = {}
1387 tmap = {}
1366 aliases = []
1388 aliases = []
1367
1389
1368 val = conf.get('templates', '__base__')
1390 val = conf.get('templates', '__base__')
1369 if val and val[0] not in "'\"":
1391 if val and val[0] not in "'\"":
1370 # treat as a pointer to a base class for this style
1392 # treat as a pointer to a base class for this style
1371 path = util.normpath(os.path.join(base, val))
1393 path = util.normpath(os.path.join(base, val))
1372
1394
1373 # fallback check in template paths
1395 # fallback check in template paths
1374 if not os.path.exists(path):
1396 if not os.path.exists(path):
1375 for p in templatepaths():
1397 for p in templatepaths():
1376 p2 = util.normpath(os.path.join(p, val))
1398 p2 = util.normpath(os.path.join(p, val))
1377 if os.path.isfile(p2):
1399 if os.path.isfile(p2):
1378 path = p2
1400 path = p2
1379 break
1401 break
1380 p3 = util.normpath(os.path.join(p2, "map"))
1402 p3 = util.normpath(os.path.join(p2, "map"))
1381 if os.path.isfile(p3):
1403 if os.path.isfile(p3):
1382 path = p3
1404 path = p3
1383 break
1405 break
1384
1406
1385 cache, tmap, aliases = _readmapfile(path)
1407 cache, tmap, aliases = _readmapfile(path)
1386
1408
1387 for key, val in conf['templates'].items():
1409 for key, val in conf['templates'].items():
1388 if not val:
1410 if not val:
1389 raise error.ParseError(_('missing value'),
1411 raise error.ParseError(_('missing value'),
1390 conf.source('templates', key))
1412 conf.source('templates', key))
1391 if val[0] in "'\"":
1413 if val[0] in "'\"":
1392 if val[0] != val[-1]:
1414 if val[0] != val[-1]:
1393 raise error.ParseError(_('unmatched quotes'),
1415 raise error.ParseError(_('unmatched quotes'),
1394 conf.source('templates', key))
1416 conf.source('templates', key))
1395 cache[key] = unquotestring(val)
1417 cache[key] = unquotestring(val)
1396 elif key != '__base__':
1418 elif key != '__base__':
1397 val = 'default', val
1419 val = 'default', val
1398 if ':' in val[1]:
1420 if ':' in val[1]:
1399 val = val[1].split(':', 1)
1421 val = val[1].split(':', 1)
1400 tmap[key] = val[0], os.path.join(base, val[1])
1422 tmap[key] = val[0], os.path.join(base, val[1])
1401 aliases.extend(conf['templatealias'].items())
1423 aliases.extend(conf['templatealias'].items())
1402 return cache, tmap, aliases
1424 return cache, tmap, aliases
1403
1425
1404 class TemplateNotFound(error.Abort):
1426 class TemplateNotFound(error.Abort):
1405 pass
1427 pass
1406
1428
1407 class templater(object):
1429 class templater(object):
1408
1430
1409 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1431 def __init__(self, filters=None, defaults=None, resources=None,
1410 minchunk=1024, maxchunk=65536):
1432 cache=None, aliases=(), minchunk=1024, maxchunk=65536):
1411 '''set up template engine.
1433 '''set up template engine.
1412 filters is dict of functions. each transforms a value into another.
1434 filters is dict of functions. each transforms a value into another.
1413 defaults is dict of default map definitions.
1435 defaults is dict of default map definitions.
1436 resources is dict of internal data (e.g. cache), which are inaccessible
1437 from user template.
1414 aliases is list of alias (name, replacement) pairs.
1438 aliases is list of alias (name, replacement) pairs.
1415 '''
1439 '''
1416 if filters is None:
1440 if filters is None:
1417 filters = {}
1441 filters = {}
1418 if defaults is None:
1442 if defaults is None:
1419 defaults = {}
1443 defaults = {}
1444 if resources is None:
1445 resources = {}
1420 if cache is None:
1446 if cache is None:
1421 cache = {}
1447 cache = {}
1422 self.cache = cache.copy()
1448 self.cache = cache.copy()
1423 self.map = {}
1449 self.map = {}
1424 self.filters = templatefilters.filters.copy()
1450 self.filters = templatefilters.filters.copy()
1425 self.filters.update(filters)
1451 self.filters.update(filters)
1426 self.defaults = defaults
1452 self.defaults = defaults
1453 self._resources = {'templ': self}
1454 self._resources.update(resources)
1427 self._aliases = aliases
1455 self._aliases = aliases
1428 self.minchunk, self.maxchunk = minchunk, maxchunk
1456 self.minchunk, self.maxchunk = minchunk, maxchunk
1429 self.ecache = {}
1457 self.ecache = {}
1430
1458
1431 @classmethod
1459 @classmethod
1432 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1460 def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None,
1433 minchunk=1024, maxchunk=65536):
1461 cache=None, minchunk=1024, maxchunk=65536):
1434 """Create templater from the specified map file"""
1462 """Create templater from the specified map file"""
1435 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1463 t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk)
1436 cache, tmap, aliases = _readmapfile(mapfile)
1464 cache, tmap, aliases = _readmapfile(mapfile)
1437 t.cache.update(cache)
1465 t.cache.update(cache)
1438 t.map = tmap
1466 t.map = tmap
1439 t._aliases = aliases
1467 t._aliases = aliases
1440 return t
1468 return t
1441
1469
1442 def __contains__(self, key):
1470 def __contains__(self, key):
1443 return key in self.cache or key in self.map
1471 return key in self.cache or key in self.map
1444
1472
1445 def load(self, t):
1473 def load(self, t):
1446 '''Get the template for the given template name. Use a local cache.'''
1474 '''Get the template for the given template name. Use a local cache.'''
1447 if t not in self.cache:
1475 if t not in self.cache:
1448 try:
1476 try:
1449 self.cache[t] = util.readfile(self.map[t][1])
1477 self.cache[t] = util.readfile(self.map[t][1])
1450 except KeyError as inst:
1478 except KeyError as inst:
1451 raise TemplateNotFound(_('"%s" not in template map') %
1479 raise TemplateNotFound(_('"%s" not in template map') %
1452 inst.args[0])
1480 inst.args[0])
1453 except IOError as inst:
1481 except IOError as inst:
1454 raise IOError(inst.args[0], _('template file %s: %s') %
1482 raise IOError(inst.args[0], _('template file %s: %s') %
1455 (self.map[t][1], inst.args[1]))
1483 (self.map[t][1], inst.args[1]))
1456 return self.cache[t]
1484 return self.cache[t]
1457
1485
1458 def render(self, mapping):
1486 def render(self, mapping):
1459 """Render the default unnamed template and return result as string"""
1487 """Render the default unnamed template and return result as string"""
1460 mapping = pycompat.strkwargs(mapping)
1488 mapping = pycompat.strkwargs(mapping)
1461 return stringify(self('', **mapping))
1489 return stringify(self('', **mapping))
1462
1490
1463 def __call__(self, t, **mapping):
1491 def __call__(self, t, **mapping):
1464 mapping = pycompat.byteskwargs(mapping)
1492 mapping = pycompat.byteskwargs(mapping)
1465 ttype = t in self.map and self.map[t][0] or 'default'
1493 ttype = t in self.map and self.map[t][0] or 'default'
1466 if ttype not in self.ecache:
1494 if ttype not in self.ecache:
1467 try:
1495 try:
1468 ecls = engines[ttype]
1496 ecls = engines[ttype]
1469 except KeyError:
1497 except KeyError:
1470 raise error.Abort(_('invalid template engine: %s') % ttype)
1498 raise error.Abort(_('invalid template engine: %s') % ttype)
1471 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1499 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1472 self._aliases)
1500 self._resources, self._aliases)
1473 proc = self.ecache[ttype]
1501 proc = self.ecache[ttype]
1474
1502
1475 stream = proc.process(t, mapping)
1503 stream = proc.process(t, mapping)
1476 if self.minchunk:
1504 if self.minchunk:
1477 stream = util.increasingchunks(stream, min=self.minchunk,
1505 stream = util.increasingchunks(stream, min=self.minchunk,
1478 max=self.maxchunk)
1506 max=self.maxchunk)
1479 return stream
1507 return stream
1480
1508
1481 def templatepaths():
1509 def templatepaths():
1482 '''return locations used for template files.'''
1510 '''return locations used for template files.'''
1483 pathsrel = ['templates']
1511 pathsrel = ['templates']
1484 paths = [os.path.normpath(os.path.join(util.datapath, f))
1512 paths = [os.path.normpath(os.path.join(util.datapath, f))
1485 for f in pathsrel]
1513 for f in pathsrel]
1486 return [p for p in paths if os.path.isdir(p)]
1514 return [p for p in paths if os.path.isdir(p)]
1487
1515
1488 def templatepath(name):
1516 def templatepath(name):
1489 '''return location of template file. returns None if not found.'''
1517 '''return location of template file. returns None if not found.'''
1490 for p in templatepaths():
1518 for p in templatepaths():
1491 f = os.path.join(p, name)
1519 f = os.path.join(p, name)
1492 if os.path.exists(f):
1520 if os.path.exists(f):
1493 return f
1521 return f
1494 return None
1522 return None
1495
1523
1496 def stylemap(styles, paths=None):
1524 def stylemap(styles, paths=None):
1497 """Return path to mapfile for a given style.
1525 """Return path to mapfile for a given style.
1498
1526
1499 Searches mapfile in the following locations:
1527 Searches mapfile in the following locations:
1500 1. templatepath/style/map
1528 1. templatepath/style/map
1501 2. templatepath/map-style
1529 2. templatepath/map-style
1502 3. templatepath/map
1530 3. templatepath/map
1503 """
1531 """
1504
1532
1505 if paths is None:
1533 if paths is None:
1506 paths = templatepaths()
1534 paths = templatepaths()
1507 elif isinstance(paths, str):
1535 elif isinstance(paths, str):
1508 paths = [paths]
1536 paths = [paths]
1509
1537
1510 if isinstance(styles, str):
1538 if isinstance(styles, str):
1511 styles = [styles]
1539 styles = [styles]
1512
1540
1513 for style in styles:
1541 for style in styles:
1514 # only plain name is allowed to honor template paths
1542 # only plain name is allowed to honor template paths
1515 if (not style
1543 if (not style
1516 or style in (os.curdir, os.pardir)
1544 or style in (os.curdir, os.pardir)
1517 or pycompat.ossep in style
1545 or pycompat.ossep in style
1518 or pycompat.osaltsep and pycompat.osaltsep in style):
1546 or pycompat.osaltsep and pycompat.osaltsep in style):
1519 continue
1547 continue
1520 locations = [os.path.join(style, 'map'), 'map-' + style]
1548 locations = [os.path.join(style, 'map'), 'map-' + style]
1521 locations.append('map')
1549 locations.append('map')
1522
1550
1523 for path in paths:
1551 for path in paths:
1524 for location in locations:
1552 for location in locations:
1525 mapfile = os.path.join(path, location)
1553 mapfile = os.path.join(path, location)
1526 if os.path.isfile(mapfile):
1554 if os.path.isfile(mapfile):
1527 return style, mapfile
1555 return style, mapfile
1528
1556
1529 raise RuntimeError("No hgweb templates found in %r" % paths)
1557 raise RuntimeError("No hgweb templates found in %r" % paths)
1530
1558
1531 def loadfunction(ui, extname, registrarobj):
1559 def loadfunction(ui, extname, registrarobj):
1532 """Load template function from specified registrarobj
1560 """Load template function from specified registrarobj
1533 """
1561 """
1534 for name, func in registrarobj._table.iteritems():
1562 for name, func in registrarobj._table.iteritems():
1535 funcs[name] = func
1563 funcs[name] = func
1536
1564
1537 # tell hggettext to extract docstrings from these functions:
1565 # tell hggettext to extract docstrings from these functions:
1538 i18nfunctions = funcs.values()
1566 i18nfunctions = funcs.values()
@@ -1,54 +1,57 b''
1
1
2 $ cat > engine.py << EOF
2 $ cat > engine.py << EOF
3 >
3 >
4 > from mercurial import templater
4 > from mercurial import templater
5 >
5 >
6 > class mytemplater(object):
6 > class mytemplater(object):
7 > def __init__(self, loader, filters, defaults, aliases):
7 > def __init__(self, loader, filters, defaults, resources, aliases):
8 > self.loader = loader
8 > self.loader = loader
9 > self._resources = resources
9 >
10 >
10 > def process(self, t, map):
11 > def process(self, t, map):
11 > tmpl = self.loader(t)
12 > tmpl = self.loader(t)
12 > for k, v in map.iteritems():
13 > for k, v in map.iteritems():
13 > if k in ('templ', 'ctx', 'repo', 'revcache', 'cache', 'troubles'):
14 > if k in ('templ', 'ctx', 'repo', 'revcache', 'cache', 'troubles'):
14 > continue
15 > continue
15 > if hasattr(v, '__call__'):
16 > if hasattr(v, '__call__'):
16 > v = v(**map)
17 > props = self._resources.copy()
18 > props.update(map)
19 > v = v(**props)
17 > v = templater.stringify(v)
20 > v = templater.stringify(v)
18 > tmpl = tmpl.replace('{{%s}}' % k, v)
21 > tmpl = tmpl.replace('{{%s}}' % k, v)
19 > yield tmpl
22 > yield tmpl
20 >
23 >
21 > templater.engines['my'] = mytemplater
24 > templater.engines['my'] = mytemplater
22 > EOF
25 > EOF
23 $ hg init test
26 $ hg init test
24 $ echo '[extensions]' > test/.hg/hgrc
27 $ echo '[extensions]' > test/.hg/hgrc
25 $ echo "engine = `pwd`/engine.py" >> test/.hg/hgrc
28 $ echo "engine = `pwd`/engine.py" >> test/.hg/hgrc
26 $ cd test
29 $ cd test
27 $ cat > mymap << EOF
30 $ cat > mymap << EOF
28 > changeset = my:changeset.txt
31 > changeset = my:changeset.txt
29 > EOF
32 > EOF
30 $ cat > changeset.txt << EOF
33 $ cat > changeset.txt << EOF
31 > {{rev}} {{node}} {{author}}
34 > {{rev}} {{node}} {{author}}
32 > EOF
35 > EOF
33 $ hg ci -Ama
36 $ hg ci -Ama
34 adding changeset.txt
37 adding changeset.txt
35 adding mymap
38 adding mymap
36 $ hg log --style=./mymap
39 $ hg log --style=./mymap
37 0 97e5f848f0936960273bbf75be6388cd0350a32b test
40 0 97e5f848f0936960273bbf75be6388cd0350a32b test
38
41
39 $ cat > changeset.txt << EOF
42 $ cat > changeset.txt << EOF
40 > {{p1rev}} {{p1node}} {{p2rev}} {{p2node}}
43 > {{p1rev}} {{p1node}} {{p2rev}} {{p2node}}
41 > EOF
44 > EOF
42 $ hg ci -Ama
45 $ hg ci -Ama
43 $ hg log --style=./mymap
46 $ hg log --style=./mymap
44 0 97e5f848f0936960273bbf75be6388cd0350a32b -1 0000000000000000000000000000000000000000
47 0 97e5f848f0936960273bbf75be6388cd0350a32b -1 0000000000000000000000000000000000000000
45 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000
48 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000
46
49
47 invalid engine type:
50 invalid engine type:
48
51
49 $ echo 'changeset = unknown:changeset.txt' > unknownenginemap
52 $ echo 'changeset = unknown:changeset.txt' > unknownenginemap
50 $ hg log --style=./unknownenginemap
53 $ hg log --style=./unknownenginemap
51 abort: invalid template engine: unknown
54 abort: invalid template engine: unknown
52 [255]
55 [255]
53
56
54 $ cd ..
57 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now