##// END OF EJS Templates
templater: port formatnode filter from changeset_templater...
Yuya Nishihara -
r31169:48a8b2e5 default
parent child Browse files
Show More
@@ -1,3478 +1,3475
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 bin,
17 bin,
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 graphmod,
32 graphmod,
33 lock as lockmod,
33 lock as lockmod,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 repair,
40 repair,
41 revlog,
41 revlog,
42 revset,
42 revset,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 templatekw,
45 templatekw,
46 templater,
46 templater,
47 util,
47 util,
48 )
48 )
49 stringio = util.stringio
49 stringio = util.stringio
50
50
51 # special string such that everything below this line will be ingored in the
51 # special string such that everything below this line will be ingored in the
52 # editor text
52 # editor text
53 _linebelow = "^HG: ------------------------ >8 ------------------------$"
53 _linebelow = "^HG: ------------------------ >8 ------------------------$"
54
54
55 def ishunk(x):
55 def ishunk(x):
56 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
56 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
57 return isinstance(x, hunkclasses)
57 return isinstance(x, hunkclasses)
58
58
59 def newandmodified(chunks, originalchunks):
59 def newandmodified(chunks, originalchunks):
60 newlyaddedandmodifiedfiles = set()
60 newlyaddedandmodifiedfiles = set()
61 for chunk in chunks:
61 for chunk in chunks:
62 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
62 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
63 originalchunks:
63 originalchunks:
64 newlyaddedandmodifiedfiles.add(chunk.header.filename())
64 newlyaddedandmodifiedfiles.add(chunk.header.filename())
65 return newlyaddedandmodifiedfiles
65 return newlyaddedandmodifiedfiles
66
66
67 def parsealiases(cmd):
67 def parsealiases(cmd):
68 return cmd.lstrip("^").split("|")
68 return cmd.lstrip("^").split("|")
69
69
70 def setupwrapcolorwrite(ui):
70 def setupwrapcolorwrite(ui):
71 # wrap ui.write so diff output can be labeled/colorized
71 # wrap ui.write so diff output can be labeled/colorized
72 def wrapwrite(orig, *args, **kw):
72 def wrapwrite(orig, *args, **kw):
73 label = kw.pop('label', '')
73 label = kw.pop('label', '')
74 for chunk, l in patch.difflabel(lambda: args):
74 for chunk, l in patch.difflabel(lambda: args):
75 orig(chunk, label=label + l)
75 orig(chunk, label=label + l)
76
76
77 oldwrite = ui.write
77 oldwrite = ui.write
78 def wrap(*args, **kwargs):
78 def wrap(*args, **kwargs):
79 return wrapwrite(oldwrite, *args, **kwargs)
79 return wrapwrite(oldwrite, *args, **kwargs)
80 setattr(ui, 'write', wrap)
80 setattr(ui, 'write', wrap)
81 return oldwrite
81 return oldwrite
82
82
83 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
83 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
84 if usecurses:
84 if usecurses:
85 if testfile:
85 if testfile:
86 recordfn = crecordmod.testdecorator(testfile,
86 recordfn = crecordmod.testdecorator(testfile,
87 crecordmod.testchunkselector)
87 crecordmod.testchunkselector)
88 else:
88 else:
89 recordfn = crecordmod.chunkselector
89 recordfn = crecordmod.chunkselector
90
90
91 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
91 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
92
92
93 else:
93 else:
94 return patch.filterpatch(ui, originalhunks, operation)
94 return patch.filterpatch(ui, originalhunks, operation)
95
95
96 def recordfilter(ui, originalhunks, operation=None):
96 def recordfilter(ui, originalhunks, operation=None):
97 """ Prompts the user to filter the originalhunks and return a list of
97 """ Prompts the user to filter the originalhunks and return a list of
98 selected hunks.
98 selected hunks.
99 *operation* is used for to build ui messages to indicate the user what
99 *operation* is used for to build ui messages to indicate the user what
100 kind of filtering they are doing: reverting, committing, shelving, etc.
100 kind of filtering they are doing: reverting, committing, shelving, etc.
101 (see patch.filterpatch).
101 (see patch.filterpatch).
102 """
102 """
103 usecurses = crecordmod.checkcurses(ui)
103 usecurses = crecordmod.checkcurses(ui)
104 testfile = ui.config('experimental', 'crecordtest', None)
104 testfile = ui.config('experimental', 'crecordtest', None)
105 oldwrite = setupwrapcolorwrite(ui)
105 oldwrite = setupwrapcolorwrite(ui)
106 try:
106 try:
107 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
107 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
108 testfile, operation)
108 testfile, operation)
109 finally:
109 finally:
110 ui.write = oldwrite
110 ui.write = oldwrite
111 return newchunks, newopts
111 return newchunks, newopts
112
112
113 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
113 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
114 filterfn, *pats, **opts):
114 filterfn, *pats, **opts):
115 from . import merge as mergemod
115 from . import merge as mergemod
116 if not ui.interactive():
116 if not ui.interactive():
117 if cmdsuggest:
117 if cmdsuggest:
118 msg = _('running non-interactively, use %s instead') % cmdsuggest
118 msg = _('running non-interactively, use %s instead') % cmdsuggest
119 else:
119 else:
120 msg = _('running non-interactively')
120 msg = _('running non-interactively')
121 raise error.Abort(msg)
121 raise error.Abort(msg)
122
122
123 # make sure username is set before going interactive
123 # make sure username is set before going interactive
124 if not opts.get('user'):
124 if not opts.get('user'):
125 ui.username() # raise exception, username not provided
125 ui.username() # raise exception, username not provided
126
126
127 def recordfunc(ui, repo, message, match, opts):
127 def recordfunc(ui, repo, message, match, opts):
128 """This is generic record driver.
128 """This is generic record driver.
129
129
130 Its job is to interactively filter local changes, and
130 Its job is to interactively filter local changes, and
131 accordingly prepare working directory into a state in which the
131 accordingly prepare working directory into a state in which the
132 job can be delegated to a non-interactive commit command such as
132 job can be delegated to a non-interactive commit command such as
133 'commit' or 'qrefresh'.
133 'commit' or 'qrefresh'.
134
134
135 After the actual job is done by non-interactive command, the
135 After the actual job is done by non-interactive command, the
136 working directory is restored to its original state.
136 working directory is restored to its original state.
137
137
138 In the end we'll record interesting changes, and everything else
138 In the end we'll record interesting changes, and everything else
139 will be left in place, so the user can continue working.
139 will be left in place, so the user can continue working.
140 """
140 """
141
141
142 checkunfinished(repo, commit=True)
142 checkunfinished(repo, commit=True)
143 wctx = repo[None]
143 wctx = repo[None]
144 merge = len(wctx.parents()) > 1
144 merge = len(wctx.parents()) > 1
145 if merge:
145 if merge:
146 raise error.Abort(_('cannot partially commit a merge '
146 raise error.Abort(_('cannot partially commit a merge '
147 '(use "hg commit" instead)'))
147 '(use "hg commit" instead)'))
148
148
149 def fail(f, msg):
149 def fail(f, msg):
150 raise error.Abort('%s: %s' % (f, msg))
150 raise error.Abort('%s: %s' % (f, msg))
151
151
152 force = opts.get('force')
152 force = opts.get('force')
153 if not force:
153 if not force:
154 vdirs = []
154 vdirs = []
155 match.explicitdir = vdirs.append
155 match.explicitdir = vdirs.append
156 match.bad = fail
156 match.bad = fail
157
157
158 status = repo.status(match=match)
158 status = repo.status(match=match)
159 if not force:
159 if not force:
160 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
160 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
161 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
161 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
162 diffopts.nodates = True
162 diffopts.nodates = True
163 diffopts.git = True
163 diffopts.git = True
164 diffopts.showfunc = True
164 diffopts.showfunc = True
165 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
165 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
166 originalchunks = patch.parsepatch(originaldiff)
166 originalchunks = patch.parsepatch(originaldiff)
167
167
168 # 1. filter patch, since we are intending to apply subset of it
168 # 1. filter patch, since we are intending to apply subset of it
169 try:
169 try:
170 chunks, newopts = filterfn(ui, originalchunks)
170 chunks, newopts = filterfn(ui, originalchunks)
171 except patch.PatchError as err:
171 except patch.PatchError as err:
172 raise error.Abort(_('error parsing patch: %s') % err)
172 raise error.Abort(_('error parsing patch: %s') % err)
173 opts.update(newopts)
173 opts.update(newopts)
174
174
175 # We need to keep a backup of files that have been newly added and
175 # We need to keep a backup of files that have been newly added and
176 # modified during the recording process because there is a previous
176 # modified during the recording process because there is a previous
177 # version without the edit in the workdir
177 # version without the edit in the workdir
178 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
178 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
179 contenders = set()
179 contenders = set()
180 for h in chunks:
180 for h in chunks:
181 try:
181 try:
182 contenders.update(set(h.files()))
182 contenders.update(set(h.files()))
183 except AttributeError:
183 except AttributeError:
184 pass
184 pass
185
185
186 changed = status.modified + status.added + status.removed
186 changed = status.modified + status.added + status.removed
187 newfiles = [f for f in changed if f in contenders]
187 newfiles = [f for f in changed if f in contenders]
188 if not newfiles:
188 if not newfiles:
189 ui.status(_('no changes to record\n'))
189 ui.status(_('no changes to record\n'))
190 return 0
190 return 0
191
191
192 modified = set(status.modified)
192 modified = set(status.modified)
193
193
194 # 2. backup changed files, so we can restore them in the end
194 # 2. backup changed files, so we can restore them in the end
195
195
196 if backupall:
196 if backupall:
197 tobackup = changed
197 tobackup = changed
198 else:
198 else:
199 tobackup = [f for f in newfiles if f in modified or f in \
199 tobackup = [f for f in newfiles if f in modified or f in \
200 newlyaddedandmodifiedfiles]
200 newlyaddedandmodifiedfiles]
201 backups = {}
201 backups = {}
202 if tobackup:
202 if tobackup:
203 backupdir = repo.join('record-backups')
203 backupdir = repo.join('record-backups')
204 try:
204 try:
205 os.mkdir(backupdir)
205 os.mkdir(backupdir)
206 except OSError as err:
206 except OSError as err:
207 if err.errno != errno.EEXIST:
207 if err.errno != errno.EEXIST:
208 raise
208 raise
209 try:
209 try:
210 # backup continues
210 # backup continues
211 for f in tobackup:
211 for f in tobackup:
212 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
212 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
213 dir=backupdir)
213 dir=backupdir)
214 os.close(fd)
214 os.close(fd)
215 ui.debug('backup %r as %r\n' % (f, tmpname))
215 ui.debug('backup %r as %r\n' % (f, tmpname))
216 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
216 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
217 backups[f] = tmpname
217 backups[f] = tmpname
218
218
219 fp = stringio()
219 fp = stringio()
220 for c in chunks:
220 for c in chunks:
221 fname = c.filename()
221 fname = c.filename()
222 if fname in backups:
222 if fname in backups:
223 c.write(fp)
223 c.write(fp)
224 dopatch = fp.tell()
224 dopatch = fp.tell()
225 fp.seek(0)
225 fp.seek(0)
226
226
227 # 2.5 optionally review / modify patch in text editor
227 # 2.5 optionally review / modify patch in text editor
228 if opts.get('review', False):
228 if opts.get('review', False):
229 patchtext = (crecordmod.diffhelptext
229 patchtext = (crecordmod.diffhelptext
230 + crecordmod.patchhelptext
230 + crecordmod.patchhelptext
231 + fp.read())
231 + fp.read())
232 reviewedpatch = ui.edit(patchtext, "",
232 reviewedpatch = ui.edit(patchtext, "",
233 extra={"suffix": ".diff"},
233 extra={"suffix": ".diff"},
234 repopath=repo.path)
234 repopath=repo.path)
235 fp.truncate(0)
235 fp.truncate(0)
236 fp.write(reviewedpatch)
236 fp.write(reviewedpatch)
237 fp.seek(0)
237 fp.seek(0)
238
238
239 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
239 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
240 # 3a. apply filtered patch to clean repo (clean)
240 # 3a. apply filtered patch to clean repo (clean)
241 if backups:
241 if backups:
242 # Equivalent to hg.revert
242 # Equivalent to hg.revert
243 m = scmutil.matchfiles(repo, backups.keys())
243 m = scmutil.matchfiles(repo, backups.keys())
244 mergemod.update(repo, repo.dirstate.p1(),
244 mergemod.update(repo, repo.dirstate.p1(),
245 False, True, matcher=m)
245 False, True, matcher=m)
246
246
247 # 3b. (apply)
247 # 3b. (apply)
248 if dopatch:
248 if dopatch:
249 try:
249 try:
250 ui.debug('applying patch\n')
250 ui.debug('applying patch\n')
251 ui.debug(fp.getvalue())
251 ui.debug(fp.getvalue())
252 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
252 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
253 except patch.PatchError as err:
253 except patch.PatchError as err:
254 raise error.Abort(str(err))
254 raise error.Abort(str(err))
255 del fp
255 del fp
256
256
257 # 4. We prepared working directory according to filtered
257 # 4. We prepared working directory according to filtered
258 # patch. Now is the time to delegate the job to
258 # patch. Now is the time to delegate the job to
259 # commit/qrefresh or the like!
259 # commit/qrefresh or the like!
260
260
261 # Make all of the pathnames absolute.
261 # Make all of the pathnames absolute.
262 newfiles = [repo.wjoin(nf) for nf in newfiles]
262 newfiles = [repo.wjoin(nf) for nf in newfiles]
263 return commitfunc(ui, repo, *newfiles, **opts)
263 return commitfunc(ui, repo, *newfiles, **opts)
264 finally:
264 finally:
265 # 5. finally restore backed-up files
265 # 5. finally restore backed-up files
266 try:
266 try:
267 dirstate = repo.dirstate
267 dirstate = repo.dirstate
268 for realname, tmpname in backups.iteritems():
268 for realname, tmpname in backups.iteritems():
269 ui.debug('restoring %r to %r\n' % (tmpname, realname))
269 ui.debug('restoring %r to %r\n' % (tmpname, realname))
270
270
271 if dirstate[realname] == 'n':
271 if dirstate[realname] == 'n':
272 # without normallookup, restoring timestamp
272 # without normallookup, restoring timestamp
273 # may cause partially committed files
273 # may cause partially committed files
274 # to be treated as unmodified
274 # to be treated as unmodified
275 dirstate.normallookup(realname)
275 dirstate.normallookup(realname)
276
276
277 # copystat=True here and above are a hack to trick any
277 # copystat=True here and above are a hack to trick any
278 # editors that have f open that we haven't modified them.
278 # editors that have f open that we haven't modified them.
279 #
279 #
280 # Also note that this racy as an editor could notice the
280 # Also note that this racy as an editor could notice the
281 # file's mtime before we've finished writing it.
281 # file's mtime before we've finished writing it.
282 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
282 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
283 os.unlink(tmpname)
283 os.unlink(tmpname)
284 if tobackup:
284 if tobackup:
285 os.rmdir(backupdir)
285 os.rmdir(backupdir)
286 except OSError:
286 except OSError:
287 pass
287 pass
288
288
289 def recordinwlock(ui, repo, message, match, opts):
289 def recordinwlock(ui, repo, message, match, opts):
290 with repo.wlock():
290 with repo.wlock():
291 return recordfunc(ui, repo, message, match, opts)
291 return recordfunc(ui, repo, message, match, opts)
292
292
293 return commit(ui, repo, recordinwlock, pats, opts)
293 return commit(ui, repo, recordinwlock, pats, opts)
294
294
295 def findpossible(cmd, table, strict=False):
295 def findpossible(cmd, table, strict=False):
296 """
296 """
297 Return cmd -> (aliases, command table entry)
297 Return cmd -> (aliases, command table entry)
298 for each matching command.
298 for each matching command.
299 Return debug commands (or their aliases) only if no normal command matches.
299 Return debug commands (or their aliases) only if no normal command matches.
300 """
300 """
301 choice = {}
301 choice = {}
302 debugchoice = {}
302 debugchoice = {}
303
303
304 if cmd in table:
304 if cmd in table:
305 # short-circuit exact matches, "log" alias beats "^log|history"
305 # short-circuit exact matches, "log" alias beats "^log|history"
306 keys = [cmd]
306 keys = [cmd]
307 else:
307 else:
308 keys = table.keys()
308 keys = table.keys()
309
309
310 allcmds = []
310 allcmds = []
311 for e in keys:
311 for e in keys:
312 aliases = parsealiases(e)
312 aliases = parsealiases(e)
313 allcmds.extend(aliases)
313 allcmds.extend(aliases)
314 found = None
314 found = None
315 if cmd in aliases:
315 if cmd in aliases:
316 found = cmd
316 found = cmd
317 elif not strict:
317 elif not strict:
318 for a in aliases:
318 for a in aliases:
319 if a.startswith(cmd):
319 if a.startswith(cmd):
320 found = a
320 found = a
321 break
321 break
322 if found is not None:
322 if found is not None:
323 if aliases[0].startswith("debug") or found.startswith("debug"):
323 if aliases[0].startswith("debug") or found.startswith("debug"):
324 debugchoice[found] = (aliases, table[e])
324 debugchoice[found] = (aliases, table[e])
325 else:
325 else:
326 choice[found] = (aliases, table[e])
326 choice[found] = (aliases, table[e])
327
327
328 if not choice and debugchoice:
328 if not choice and debugchoice:
329 choice = debugchoice
329 choice = debugchoice
330
330
331 return choice, allcmds
331 return choice, allcmds
332
332
333 def findcmd(cmd, table, strict=True):
333 def findcmd(cmd, table, strict=True):
334 """Return (aliases, command table entry) for command string."""
334 """Return (aliases, command table entry) for command string."""
335 choice, allcmds = findpossible(cmd, table, strict)
335 choice, allcmds = findpossible(cmd, table, strict)
336
336
337 if cmd in choice:
337 if cmd in choice:
338 return choice[cmd]
338 return choice[cmd]
339
339
340 if len(choice) > 1:
340 if len(choice) > 1:
341 clist = choice.keys()
341 clist = choice.keys()
342 clist.sort()
342 clist.sort()
343 raise error.AmbiguousCommand(cmd, clist)
343 raise error.AmbiguousCommand(cmd, clist)
344
344
345 if choice:
345 if choice:
346 return choice.values()[0]
346 return choice.values()[0]
347
347
348 raise error.UnknownCommand(cmd, allcmds)
348 raise error.UnknownCommand(cmd, allcmds)
349
349
350 def findrepo(p):
350 def findrepo(p):
351 while not os.path.isdir(os.path.join(p, ".hg")):
351 while not os.path.isdir(os.path.join(p, ".hg")):
352 oldp, p = p, os.path.dirname(p)
352 oldp, p = p, os.path.dirname(p)
353 if p == oldp:
353 if p == oldp:
354 return None
354 return None
355
355
356 return p
356 return p
357
357
358 def bailifchanged(repo, merge=True, hint=None):
358 def bailifchanged(repo, merge=True, hint=None):
359 """ enforce the precondition that working directory must be clean.
359 """ enforce the precondition that working directory must be clean.
360
360
361 'merge' can be set to false if a pending uncommitted merge should be
361 'merge' can be set to false if a pending uncommitted merge should be
362 ignored (such as when 'update --check' runs).
362 ignored (such as when 'update --check' runs).
363
363
364 'hint' is the usual hint given to Abort exception.
364 'hint' is the usual hint given to Abort exception.
365 """
365 """
366
366
367 if merge and repo.dirstate.p2() != nullid:
367 if merge and repo.dirstate.p2() != nullid:
368 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
368 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
369 modified, added, removed, deleted = repo.status()[:4]
369 modified, added, removed, deleted = repo.status()[:4]
370 if modified or added or removed or deleted:
370 if modified or added or removed or deleted:
371 raise error.Abort(_('uncommitted changes'), hint=hint)
371 raise error.Abort(_('uncommitted changes'), hint=hint)
372 ctx = repo[None]
372 ctx = repo[None]
373 for s in sorted(ctx.substate):
373 for s in sorted(ctx.substate):
374 ctx.sub(s).bailifchanged(hint=hint)
374 ctx.sub(s).bailifchanged(hint=hint)
375
375
376 def logmessage(ui, opts):
376 def logmessage(ui, opts):
377 """ get the log message according to -m and -l option """
377 """ get the log message according to -m and -l option """
378 message = opts.get('message')
378 message = opts.get('message')
379 logfile = opts.get('logfile')
379 logfile = opts.get('logfile')
380
380
381 if message and logfile:
381 if message and logfile:
382 raise error.Abort(_('options --message and --logfile are mutually '
382 raise error.Abort(_('options --message and --logfile are mutually '
383 'exclusive'))
383 'exclusive'))
384 if not message and logfile:
384 if not message and logfile:
385 try:
385 try:
386 if logfile == '-':
386 if logfile == '-':
387 message = ui.fin.read()
387 message = ui.fin.read()
388 else:
388 else:
389 message = '\n'.join(util.readfile(logfile).splitlines())
389 message = '\n'.join(util.readfile(logfile).splitlines())
390 except IOError as inst:
390 except IOError as inst:
391 raise error.Abort(_("can't read commit message '%s': %s") %
391 raise error.Abort(_("can't read commit message '%s': %s") %
392 (logfile, inst.strerror))
392 (logfile, inst.strerror))
393 return message
393 return message
394
394
395 def mergeeditform(ctxorbool, baseformname):
395 def mergeeditform(ctxorbool, baseformname):
396 """return appropriate editform name (referencing a committemplate)
396 """return appropriate editform name (referencing a committemplate)
397
397
398 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
398 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
399 merging is committed.
399 merging is committed.
400
400
401 This returns baseformname with '.merge' appended if it is a merge,
401 This returns baseformname with '.merge' appended if it is a merge,
402 otherwise '.normal' is appended.
402 otherwise '.normal' is appended.
403 """
403 """
404 if isinstance(ctxorbool, bool):
404 if isinstance(ctxorbool, bool):
405 if ctxorbool:
405 if ctxorbool:
406 return baseformname + ".merge"
406 return baseformname + ".merge"
407 elif 1 < len(ctxorbool.parents()):
407 elif 1 < len(ctxorbool.parents()):
408 return baseformname + ".merge"
408 return baseformname + ".merge"
409
409
410 return baseformname + ".normal"
410 return baseformname + ".normal"
411
411
412 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
412 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
413 editform='', **opts):
413 editform='', **opts):
414 """get appropriate commit message editor according to '--edit' option
414 """get appropriate commit message editor according to '--edit' option
415
415
416 'finishdesc' is a function to be called with edited commit message
416 'finishdesc' is a function to be called with edited commit message
417 (= 'description' of the new changeset) just after editing, but
417 (= 'description' of the new changeset) just after editing, but
418 before checking empty-ness. It should return actual text to be
418 before checking empty-ness. It should return actual text to be
419 stored into history. This allows to change description before
419 stored into history. This allows to change description before
420 storing.
420 storing.
421
421
422 'extramsg' is a extra message to be shown in the editor instead of
422 'extramsg' is a extra message to be shown in the editor instead of
423 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
423 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
424 is automatically added.
424 is automatically added.
425
425
426 'editform' is a dot-separated list of names, to distinguish
426 'editform' is a dot-separated list of names, to distinguish
427 the purpose of commit text editing.
427 the purpose of commit text editing.
428
428
429 'getcommiteditor' returns 'commitforceeditor' regardless of
429 'getcommiteditor' returns 'commitforceeditor' regardless of
430 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
430 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
431 they are specific for usage in MQ.
431 they are specific for usage in MQ.
432 """
432 """
433 if edit or finishdesc or extramsg:
433 if edit or finishdesc or extramsg:
434 return lambda r, c, s: commitforceeditor(r, c, s,
434 return lambda r, c, s: commitforceeditor(r, c, s,
435 finishdesc=finishdesc,
435 finishdesc=finishdesc,
436 extramsg=extramsg,
436 extramsg=extramsg,
437 editform=editform)
437 editform=editform)
438 elif editform:
438 elif editform:
439 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
439 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
440 else:
440 else:
441 return commiteditor
441 return commiteditor
442
442
443 def loglimit(opts):
443 def loglimit(opts):
444 """get the log limit according to option -l/--limit"""
444 """get the log limit according to option -l/--limit"""
445 limit = opts.get('limit')
445 limit = opts.get('limit')
446 if limit:
446 if limit:
447 try:
447 try:
448 limit = int(limit)
448 limit = int(limit)
449 except ValueError:
449 except ValueError:
450 raise error.Abort(_('limit must be a positive integer'))
450 raise error.Abort(_('limit must be a positive integer'))
451 if limit <= 0:
451 if limit <= 0:
452 raise error.Abort(_('limit must be positive'))
452 raise error.Abort(_('limit must be positive'))
453 else:
453 else:
454 limit = None
454 limit = None
455 return limit
455 return limit
456
456
457 def makefilename(repo, pat, node, desc=None,
457 def makefilename(repo, pat, node, desc=None,
458 total=None, seqno=None, revwidth=None, pathname=None):
458 total=None, seqno=None, revwidth=None, pathname=None):
459 node_expander = {
459 node_expander = {
460 'H': lambda: hex(node),
460 'H': lambda: hex(node),
461 'R': lambda: str(repo.changelog.rev(node)),
461 'R': lambda: str(repo.changelog.rev(node)),
462 'h': lambda: short(node),
462 'h': lambda: short(node),
463 'm': lambda: re.sub('[^\w]', '_', str(desc))
463 'm': lambda: re.sub('[^\w]', '_', str(desc))
464 }
464 }
465 expander = {
465 expander = {
466 '%': lambda: '%',
466 '%': lambda: '%',
467 'b': lambda: os.path.basename(repo.root),
467 'b': lambda: os.path.basename(repo.root),
468 }
468 }
469
469
470 try:
470 try:
471 if node:
471 if node:
472 expander.update(node_expander)
472 expander.update(node_expander)
473 if node:
473 if node:
474 expander['r'] = (lambda:
474 expander['r'] = (lambda:
475 str(repo.changelog.rev(node)).zfill(revwidth or 0))
475 str(repo.changelog.rev(node)).zfill(revwidth or 0))
476 if total is not None:
476 if total is not None:
477 expander['N'] = lambda: str(total)
477 expander['N'] = lambda: str(total)
478 if seqno is not None:
478 if seqno is not None:
479 expander['n'] = lambda: str(seqno)
479 expander['n'] = lambda: str(seqno)
480 if total is not None and seqno is not None:
480 if total is not None and seqno is not None:
481 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
481 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
482 if pathname is not None:
482 if pathname is not None:
483 expander['s'] = lambda: os.path.basename(pathname)
483 expander['s'] = lambda: os.path.basename(pathname)
484 expander['d'] = lambda: os.path.dirname(pathname) or '.'
484 expander['d'] = lambda: os.path.dirname(pathname) or '.'
485 expander['p'] = lambda: pathname
485 expander['p'] = lambda: pathname
486
486
487 newname = []
487 newname = []
488 patlen = len(pat)
488 patlen = len(pat)
489 i = 0
489 i = 0
490 while i < patlen:
490 while i < patlen:
491 c = pat[i]
491 c = pat[i]
492 if c == '%':
492 if c == '%':
493 i += 1
493 i += 1
494 c = pat[i]
494 c = pat[i]
495 c = expander[c]()
495 c = expander[c]()
496 newname.append(c)
496 newname.append(c)
497 i += 1
497 i += 1
498 return ''.join(newname)
498 return ''.join(newname)
499 except KeyError as inst:
499 except KeyError as inst:
500 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
500 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
501 inst.args[0])
501 inst.args[0])
502
502
503 class _unclosablefile(object):
503 class _unclosablefile(object):
504 def __init__(self, fp):
504 def __init__(self, fp):
505 self._fp = fp
505 self._fp = fp
506
506
507 def close(self):
507 def close(self):
508 pass
508 pass
509
509
510 def __iter__(self):
510 def __iter__(self):
511 return iter(self._fp)
511 return iter(self._fp)
512
512
513 def __getattr__(self, attr):
513 def __getattr__(self, attr):
514 return getattr(self._fp, attr)
514 return getattr(self._fp, attr)
515
515
516 def __enter__(self):
516 def __enter__(self):
517 return self
517 return self
518
518
519 def __exit__(self, exc_type, exc_value, exc_tb):
519 def __exit__(self, exc_type, exc_value, exc_tb):
520 pass
520 pass
521
521
522 def makefileobj(repo, pat, node=None, desc=None, total=None,
522 def makefileobj(repo, pat, node=None, desc=None, total=None,
523 seqno=None, revwidth=None, mode='wb', modemap=None,
523 seqno=None, revwidth=None, mode='wb', modemap=None,
524 pathname=None):
524 pathname=None):
525
525
526 writable = mode not in ('r', 'rb')
526 writable = mode not in ('r', 'rb')
527
527
528 if not pat or pat == '-':
528 if not pat or pat == '-':
529 if writable:
529 if writable:
530 fp = repo.ui.fout
530 fp = repo.ui.fout
531 else:
531 else:
532 fp = repo.ui.fin
532 fp = repo.ui.fin
533 return _unclosablefile(fp)
533 return _unclosablefile(fp)
534 if util.safehasattr(pat, 'write') and writable:
534 if util.safehasattr(pat, 'write') and writable:
535 return pat
535 return pat
536 if util.safehasattr(pat, 'read') and 'r' in mode:
536 if util.safehasattr(pat, 'read') and 'r' in mode:
537 return pat
537 return pat
538 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
538 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
539 if modemap is not None:
539 if modemap is not None:
540 mode = modemap.get(fn, mode)
540 mode = modemap.get(fn, mode)
541 if mode == 'wb':
541 if mode == 'wb':
542 modemap[fn] = 'ab'
542 modemap[fn] = 'ab'
543 return open(fn, mode)
543 return open(fn, mode)
544
544
545 def openrevlog(repo, cmd, file_, opts):
545 def openrevlog(repo, cmd, file_, opts):
546 """opens the changelog, manifest, a filelog or a given revlog"""
546 """opens the changelog, manifest, a filelog or a given revlog"""
547 cl = opts['changelog']
547 cl = opts['changelog']
548 mf = opts['manifest']
548 mf = opts['manifest']
549 dir = opts['dir']
549 dir = opts['dir']
550 msg = None
550 msg = None
551 if cl and mf:
551 if cl and mf:
552 msg = _('cannot specify --changelog and --manifest at the same time')
552 msg = _('cannot specify --changelog and --manifest at the same time')
553 elif cl and dir:
553 elif cl and dir:
554 msg = _('cannot specify --changelog and --dir at the same time')
554 msg = _('cannot specify --changelog and --dir at the same time')
555 elif cl or mf or dir:
555 elif cl or mf or dir:
556 if file_:
556 if file_:
557 msg = _('cannot specify filename with --changelog or --manifest')
557 msg = _('cannot specify filename with --changelog or --manifest')
558 elif not repo:
558 elif not repo:
559 msg = _('cannot specify --changelog or --manifest or --dir '
559 msg = _('cannot specify --changelog or --manifest or --dir '
560 'without a repository')
560 'without a repository')
561 if msg:
561 if msg:
562 raise error.Abort(msg)
562 raise error.Abort(msg)
563
563
564 r = None
564 r = None
565 if repo:
565 if repo:
566 if cl:
566 if cl:
567 r = repo.unfiltered().changelog
567 r = repo.unfiltered().changelog
568 elif dir:
568 elif dir:
569 if 'treemanifest' not in repo.requirements:
569 if 'treemanifest' not in repo.requirements:
570 raise error.Abort(_("--dir can only be used on repos with "
570 raise error.Abort(_("--dir can only be used on repos with "
571 "treemanifest enabled"))
571 "treemanifest enabled"))
572 dirlog = repo.manifestlog._revlog.dirlog(dir)
572 dirlog = repo.manifestlog._revlog.dirlog(dir)
573 if len(dirlog):
573 if len(dirlog):
574 r = dirlog
574 r = dirlog
575 elif mf:
575 elif mf:
576 r = repo.manifestlog._revlog
576 r = repo.manifestlog._revlog
577 elif file_:
577 elif file_:
578 filelog = repo.file(file_)
578 filelog = repo.file(file_)
579 if len(filelog):
579 if len(filelog):
580 r = filelog
580 r = filelog
581 if not r:
581 if not r:
582 if not file_:
582 if not file_:
583 raise error.CommandError(cmd, _('invalid arguments'))
583 raise error.CommandError(cmd, _('invalid arguments'))
584 if not os.path.isfile(file_):
584 if not os.path.isfile(file_):
585 raise error.Abort(_("revlog '%s' not found") % file_)
585 raise error.Abort(_("revlog '%s' not found") % file_)
586 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
586 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
587 file_[:-2] + ".i")
587 file_[:-2] + ".i")
588 return r
588 return r
589
589
590 def copy(ui, repo, pats, opts, rename=False):
590 def copy(ui, repo, pats, opts, rename=False):
591 # called with the repo lock held
591 # called with the repo lock held
592 #
592 #
593 # hgsep => pathname that uses "/" to separate directories
593 # hgsep => pathname that uses "/" to separate directories
594 # ossep => pathname that uses os.sep to separate directories
594 # ossep => pathname that uses os.sep to separate directories
595 cwd = repo.getcwd()
595 cwd = repo.getcwd()
596 targets = {}
596 targets = {}
597 after = opts.get("after")
597 after = opts.get("after")
598 dryrun = opts.get("dry_run")
598 dryrun = opts.get("dry_run")
599 wctx = repo[None]
599 wctx = repo[None]
600
600
601 def walkpat(pat):
601 def walkpat(pat):
602 srcs = []
602 srcs = []
603 if after:
603 if after:
604 badstates = '?'
604 badstates = '?'
605 else:
605 else:
606 badstates = '?r'
606 badstates = '?r'
607 m = scmutil.match(repo[None], [pat], opts, globbed=True)
607 m = scmutil.match(repo[None], [pat], opts, globbed=True)
608 for abs in repo.walk(m):
608 for abs in repo.walk(m):
609 state = repo.dirstate[abs]
609 state = repo.dirstate[abs]
610 rel = m.rel(abs)
610 rel = m.rel(abs)
611 exact = m.exact(abs)
611 exact = m.exact(abs)
612 if state in badstates:
612 if state in badstates:
613 if exact and state == '?':
613 if exact and state == '?':
614 ui.warn(_('%s: not copying - file is not managed\n') % rel)
614 ui.warn(_('%s: not copying - file is not managed\n') % rel)
615 if exact and state == 'r':
615 if exact and state == 'r':
616 ui.warn(_('%s: not copying - file has been marked for'
616 ui.warn(_('%s: not copying - file has been marked for'
617 ' remove\n') % rel)
617 ' remove\n') % rel)
618 continue
618 continue
619 # abs: hgsep
619 # abs: hgsep
620 # rel: ossep
620 # rel: ossep
621 srcs.append((abs, rel, exact))
621 srcs.append((abs, rel, exact))
622 return srcs
622 return srcs
623
623
624 # abssrc: hgsep
624 # abssrc: hgsep
625 # relsrc: ossep
625 # relsrc: ossep
626 # otarget: ossep
626 # otarget: ossep
627 def copyfile(abssrc, relsrc, otarget, exact):
627 def copyfile(abssrc, relsrc, otarget, exact):
628 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
628 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
629 if '/' in abstarget:
629 if '/' in abstarget:
630 # We cannot normalize abstarget itself, this would prevent
630 # We cannot normalize abstarget itself, this would prevent
631 # case only renames, like a => A.
631 # case only renames, like a => A.
632 abspath, absname = abstarget.rsplit('/', 1)
632 abspath, absname = abstarget.rsplit('/', 1)
633 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
633 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
634 reltarget = repo.pathto(abstarget, cwd)
634 reltarget = repo.pathto(abstarget, cwd)
635 target = repo.wjoin(abstarget)
635 target = repo.wjoin(abstarget)
636 src = repo.wjoin(abssrc)
636 src = repo.wjoin(abssrc)
637 state = repo.dirstate[abstarget]
637 state = repo.dirstate[abstarget]
638
638
639 scmutil.checkportable(ui, abstarget)
639 scmutil.checkportable(ui, abstarget)
640
640
641 # check for collisions
641 # check for collisions
642 prevsrc = targets.get(abstarget)
642 prevsrc = targets.get(abstarget)
643 if prevsrc is not None:
643 if prevsrc is not None:
644 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
644 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
645 (reltarget, repo.pathto(abssrc, cwd),
645 (reltarget, repo.pathto(abssrc, cwd),
646 repo.pathto(prevsrc, cwd)))
646 repo.pathto(prevsrc, cwd)))
647 return
647 return
648
648
649 # check for overwrites
649 # check for overwrites
650 exists = os.path.lexists(target)
650 exists = os.path.lexists(target)
651 samefile = False
651 samefile = False
652 if exists and abssrc != abstarget:
652 if exists and abssrc != abstarget:
653 if (repo.dirstate.normalize(abssrc) ==
653 if (repo.dirstate.normalize(abssrc) ==
654 repo.dirstate.normalize(abstarget)):
654 repo.dirstate.normalize(abstarget)):
655 if not rename:
655 if not rename:
656 ui.warn(_("%s: can't copy - same file\n") % reltarget)
656 ui.warn(_("%s: can't copy - same file\n") % reltarget)
657 return
657 return
658 exists = False
658 exists = False
659 samefile = True
659 samefile = True
660
660
661 if not after and exists or after and state in 'mn':
661 if not after and exists or after and state in 'mn':
662 if not opts['force']:
662 if not opts['force']:
663 if state in 'mn':
663 if state in 'mn':
664 msg = _('%s: not overwriting - file already committed\n')
664 msg = _('%s: not overwriting - file already committed\n')
665 if after:
665 if after:
666 flags = '--after --force'
666 flags = '--after --force'
667 else:
667 else:
668 flags = '--force'
668 flags = '--force'
669 if rename:
669 if rename:
670 hint = _('(hg rename %s to replace the file by '
670 hint = _('(hg rename %s to replace the file by '
671 'recording a rename)\n') % flags
671 'recording a rename)\n') % flags
672 else:
672 else:
673 hint = _('(hg copy %s to replace the file by '
673 hint = _('(hg copy %s to replace the file by '
674 'recording a copy)\n') % flags
674 'recording a copy)\n') % flags
675 else:
675 else:
676 msg = _('%s: not overwriting - file exists\n')
676 msg = _('%s: not overwriting - file exists\n')
677 if rename:
677 if rename:
678 hint = _('(hg rename --after to record the rename)\n')
678 hint = _('(hg rename --after to record the rename)\n')
679 else:
679 else:
680 hint = _('(hg copy --after to record the copy)\n')
680 hint = _('(hg copy --after to record the copy)\n')
681 ui.warn(msg % reltarget)
681 ui.warn(msg % reltarget)
682 ui.warn(hint)
682 ui.warn(hint)
683 return
683 return
684
684
685 if after:
685 if after:
686 if not exists:
686 if not exists:
687 if rename:
687 if rename:
688 ui.warn(_('%s: not recording move - %s does not exist\n') %
688 ui.warn(_('%s: not recording move - %s does not exist\n') %
689 (relsrc, reltarget))
689 (relsrc, reltarget))
690 else:
690 else:
691 ui.warn(_('%s: not recording copy - %s does not exist\n') %
691 ui.warn(_('%s: not recording copy - %s does not exist\n') %
692 (relsrc, reltarget))
692 (relsrc, reltarget))
693 return
693 return
694 elif not dryrun:
694 elif not dryrun:
695 try:
695 try:
696 if exists:
696 if exists:
697 os.unlink(target)
697 os.unlink(target)
698 targetdir = os.path.dirname(target) or '.'
698 targetdir = os.path.dirname(target) or '.'
699 if not os.path.isdir(targetdir):
699 if not os.path.isdir(targetdir):
700 os.makedirs(targetdir)
700 os.makedirs(targetdir)
701 if samefile:
701 if samefile:
702 tmp = target + "~hgrename"
702 tmp = target + "~hgrename"
703 os.rename(src, tmp)
703 os.rename(src, tmp)
704 os.rename(tmp, target)
704 os.rename(tmp, target)
705 else:
705 else:
706 util.copyfile(src, target)
706 util.copyfile(src, target)
707 srcexists = True
707 srcexists = True
708 except IOError as inst:
708 except IOError as inst:
709 if inst.errno == errno.ENOENT:
709 if inst.errno == errno.ENOENT:
710 ui.warn(_('%s: deleted in working directory\n') % relsrc)
710 ui.warn(_('%s: deleted in working directory\n') % relsrc)
711 srcexists = False
711 srcexists = False
712 else:
712 else:
713 ui.warn(_('%s: cannot copy - %s\n') %
713 ui.warn(_('%s: cannot copy - %s\n') %
714 (relsrc, inst.strerror))
714 (relsrc, inst.strerror))
715 return True # report a failure
715 return True # report a failure
716
716
717 if ui.verbose or not exact:
717 if ui.verbose or not exact:
718 if rename:
718 if rename:
719 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
719 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
720 else:
720 else:
721 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
721 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
722
722
723 targets[abstarget] = abssrc
723 targets[abstarget] = abssrc
724
724
725 # fix up dirstate
725 # fix up dirstate
726 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
726 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
727 dryrun=dryrun, cwd=cwd)
727 dryrun=dryrun, cwd=cwd)
728 if rename and not dryrun:
728 if rename and not dryrun:
729 if not after and srcexists and not samefile:
729 if not after and srcexists and not samefile:
730 util.unlinkpath(repo.wjoin(abssrc))
730 util.unlinkpath(repo.wjoin(abssrc))
731 wctx.forget([abssrc])
731 wctx.forget([abssrc])
732
732
733 # pat: ossep
733 # pat: ossep
734 # dest ossep
734 # dest ossep
735 # srcs: list of (hgsep, hgsep, ossep, bool)
735 # srcs: list of (hgsep, hgsep, ossep, bool)
736 # return: function that takes hgsep and returns ossep
736 # return: function that takes hgsep and returns ossep
737 def targetpathfn(pat, dest, srcs):
737 def targetpathfn(pat, dest, srcs):
738 if os.path.isdir(pat):
738 if os.path.isdir(pat):
739 abspfx = pathutil.canonpath(repo.root, cwd, pat)
739 abspfx = pathutil.canonpath(repo.root, cwd, pat)
740 abspfx = util.localpath(abspfx)
740 abspfx = util.localpath(abspfx)
741 if destdirexists:
741 if destdirexists:
742 striplen = len(os.path.split(abspfx)[0])
742 striplen = len(os.path.split(abspfx)[0])
743 else:
743 else:
744 striplen = len(abspfx)
744 striplen = len(abspfx)
745 if striplen:
745 if striplen:
746 striplen += len(pycompat.ossep)
746 striplen += len(pycompat.ossep)
747 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
747 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
748 elif destdirexists:
748 elif destdirexists:
749 res = lambda p: os.path.join(dest,
749 res = lambda p: os.path.join(dest,
750 os.path.basename(util.localpath(p)))
750 os.path.basename(util.localpath(p)))
751 else:
751 else:
752 res = lambda p: dest
752 res = lambda p: dest
753 return res
753 return res
754
754
755 # pat: ossep
755 # pat: ossep
756 # dest ossep
756 # dest ossep
757 # srcs: list of (hgsep, hgsep, ossep, bool)
757 # srcs: list of (hgsep, hgsep, ossep, bool)
758 # return: function that takes hgsep and returns ossep
758 # return: function that takes hgsep and returns ossep
759 def targetpathafterfn(pat, dest, srcs):
759 def targetpathafterfn(pat, dest, srcs):
760 if matchmod.patkind(pat):
760 if matchmod.patkind(pat):
761 # a mercurial pattern
761 # a mercurial pattern
762 res = lambda p: os.path.join(dest,
762 res = lambda p: os.path.join(dest,
763 os.path.basename(util.localpath(p)))
763 os.path.basename(util.localpath(p)))
764 else:
764 else:
765 abspfx = pathutil.canonpath(repo.root, cwd, pat)
765 abspfx = pathutil.canonpath(repo.root, cwd, pat)
766 if len(abspfx) < len(srcs[0][0]):
766 if len(abspfx) < len(srcs[0][0]):
767 # A directory. Either the target path contains the last
767 # A directory. Either the target path contains the last
768 # component of the source path or it does not.
768 # component of the source path or it does not.
769 def evalpath(striplen):
769 def evalpath(striplen):
770 score = 0
770 score = 0
771 for s in srcs:
771 for s in srcs:
772 t = os.path.join(dest, util.localpath(s[0])[striplen:])
772 t = os.path.join(dest, util.localpath(s[0])[striplen:])
773 if os.path.lexists(t):
773 if os.path.lexists(t):
774 score += 1
774 score += 1
775 return score
775 return score
776
776
777 abspfx = util.localpath(abspfx)
777 abspfx = util.localpath(abspfx)
778 striplen = len(abspfx)
778 striplen = len(abspfx)
779 if striplen:
779 if striplen:
780 striplen += len(pycompat.ossep)
780 striplen += len(pycompat.ossep)
781 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
781 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
782 score = evalpath(striplen)
782 score = evalpath(striplen)
783 striplen1 = len(os.path.split(abspfx)[0])
783 striplen1 = len(os.path.split(abspfx)[0])
784 if striplen1:
784 if striplen1:
785 striplen1 += len(pycompat.ossep)
785 striplen1 += len(pycompat.ossep)
786 if evalpath(striplen1) > score:
786 if evalpath(striplen1) > score:
787 striplen = striplen1
787 striplen = striplen1
788 res = lambda p: os.path.join(dest,
788 res = lambda p: os.path.join(dest,
789 util.localpath(p)[striplen:])
789 util.localpath(p)[striplen:])
790 else:
790 else:
791 # a file
791 # a file
792 if destdirexists:
792 if destdirexists:
793 res = lambda p: os.path.join(dest,
793 res = lambda p: os.path.join(dest,
794 os.path.basename(util.localpath(p)))
794 os.path.basename(util.localpath(p)))
795 else:
795 else:
796 res = lambda p: dest
796 res = lambda p: dest
797 return res
797 return res
798
798
799 pats = scmutil.expandpats(pats)
799 pats = scmutil.expandpats(pats)
800 if not pats:
800 if not pats:
801 raise error.Abort(_('no source or destination specified'))
801 raise error.Abort(_('no source or destination specified'))
802 if len(pats) == 1:
802 if len(pats) == 1:
803 raise error.Abort(_('no destination specified'))
803 raise error.Abort(_('no destination specified'))
804 dest = pats.pop()
804 dest = pats.pop()
805 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
805 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
806 if not destdirexists:
806 if not destdirexists:
807 if len(pats) > 1 or matchmod.patkind(pats[0]):
807 if len(pats) > 1 or matchmod.patkind(pats[0]):
808 raise error.Abort(_('with multiple sources, destination must be an '
808 raise error.Abort(_('with multiple sources, destination must be an '
809 'existing directory'))
809 'existing directory'))
810 if util.endswithsep(dest):
810 if util.endswithsep(dest):
811 raise error.Abort(_('destination %s is not a directory') % dest)
811 raise error.Abort(_('destination %s is not a directory') % dest)
812
812
813 tfn = targetpathfn
813 tfn = targetpathfn
814 if after:
814 if after:
815 tfn = targetpathafterfn
815 tfn = targetpathafterfn
816 copylist = []
816 copylist = []
817 for pat in pats:
817 for pat in pats:
818 srcs = walkpat(pat)
818 srcs = walkpat(pat)
819 if not srcs:
819 if not srcs:
820 continue
820 continue
821 copylist.append((tfn(pat, dest, srcs), srcs))
821 copylist.append((tfn(pat, dest, srcs), srcs))
822 if not copylist:
822 if not copylist:
823 raise error.Abort(_('no files to copy'))
823 raise error.Abort(_('no files to copy'))
824
824
825 errors = 0
825 errors = 0
826 for targetpath, srcs in copylist:
826 for targetpath, srcs in copylist:
827 for abssrc, relsrc, exact in srcs:
827 for abssrc, relsrc, exact in srcs:
828 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
828 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
829 errors += 1
829 errors += 1
830
830
831 if errors:
831 if errors:
832 ui.warn(_('(consider using --after)\n'))
832 ui.warn(_('(consider using --after)\n'))
833
833
834 return errors != 0
834 return errors != 0
835
835
836 ## facility to let extension process additional data into an import patch
836 ## facility to let extension process additional data into an import patch
837 # list of identifier to be executed in order
837 # list of identifier to be executed in order
838 extrapreimport = [] # run before commit
838 extrapreimport = [] # run before commit
839 extrapostimport = [] # run after commit
839 extrapostimport = [] # run after commit
840 # mapping from identifier to actual import function
840 # mapping from identifier to actual import function
841 #
841 #
842 # 'preimport' are run before the commit is made and are provided the following
842 # 'preimport' are run before the commit is made and are provided the following
843 # arguments:
843 # arguments:
844 # - repo: the localrepository instance,
844 # - repo: the localrepository instance,
845 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
845 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
846 # - extra: the future extra dictionary of the changeset, please mutate it,
846 # - extra: the future extra dictionary of the changeset, please mutate it,
847 # - opts: the import options.
847 # - opts: the import options.
848 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
848 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
849 # mutation of in memory commit and more. Feel free to rework the code to get
849 # mutation of in memory commit and more. Feel free to rework the code to get
850 # there.
850 # there.
851 extrapreimportmap = {}
851 extrapreimportmap = {}
852 # 'postimport' are run after the commit is made and are provided the following
852 # 'postimport' are run after the commit is made and are provided the following
853 # argument:
853 # argument:
854 # - ctx: the changectx created by import.
854 # - ctx: the changectx created by import.
855 extrapostimportmap = {}
855 extrapostimportmap = {}
856
856
857 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
857 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
858 """Utility function used by commands.import to import a single patch
858 """Utility function used by commands.import to import a single patch
859
859
860 This function is explicitly defined here to help the evolve extension to
860 This function is explicitly defined here to help the evolve extension to
861 wrap this part of the import logic.
861 wrap this part of the import logic.
862
862
863 The API is currently a bit ugly because it a simple code translation from
863 The API is currently a bit ugly because it a simple code translation from
864 the import command. Feel free to make it better.
864 the import command. Feel free to make it better.
865
865
866 :hunk: a patch (as a binary string)
866 :hunk: a patch (as a binary string)
867 :parents: nodes that will be parent of the created commit
867 :parents: nodes that will be parent of the created commit
868 :opts: the full dict of option passed to the import command
868 :opts: the full dict of option passed to the import command
869 :msgs: list to save commit message to.
869 :msgs: list to save commit message to.
870 (used in case we need to save it when failing)
870 (used in case we need to save it when failing)
871 :updatefunc: a function that update a repo to a given node
871 :updatefunc: a function that update a repo to a given node
872 updatefunc(<repo>, <node>)
872 updatefunc(<repo>, <node>)
873 """
873 """
874 # avoid cycle context -> subrepo -> cmdutil
874 # avoid cycle context -> subrepo -> cmdutil
875 from . import context
875 from . import context
876 extractdata = patch.extract(ui, hunk)
876 extractdata = patch.extract(ui, hunk)
877 tmpname = extractdata.get('filename')
877 tmpname = extractdata.get('filename')
878 message = extractdata.get('message')
878 message = extractdata.get('message')
879 user = opts.get('user') or extractdata.get('user')
879 user = opts.get('user') or extractdata.get('user')
880 date = opts.get('date') or extractdata.get('date')
880 date = opts.get('date') or extractdata.get('date')
881 branch = extractdata.get('branch')
881 branch = extractdata.get('branch')
882 nodeid = extractdata.get('nodeid')
882 nodeid = extractdata.get('nodeid')
883 p1 = extractdata.get('p1')
883 p1 = extractdata.get('p1')
884 p2 = extractdata.get('p2')
884 p2 = extractdata.get('p2')
885
885
886 nocommit = opts.get('no_commit')
886 nocommit = opts.get('no_commit')
887 importbranch = opts.get('import_branch')
887 importbranch = opts.get('import_branch')
888 update = not opts.get('bypass')
888 update = not opts.get('bypass')
889 strip = opts["strip"]
889 strip = opts["strip"]
890 prefix = opts["prefix"]
890 prefix = opts["prefix"]
891 sim = float(opts.get('similarity') or 0)
891 sim = float(opts.get('similarity') or 0)
892 if not tmpname:
892 if not tmpname:
893 return (None, None, False)
893 return (None, None, False)
894
894
895 rejects = False
895 rejects = False
896
896
897 try:
897 try:
898 cmdline_message = logmessage(ui, opts)
898 cmdline_message = logmessage(ui, opts)
899 if cmdline_message:
899 if cmdline_message:
900 # pickup the cmdline msg
900 # pickup the cmdline msg
901 message = cmdline_message
901 message = cmdline_message
902 elif message:
902 elif message:
903 # pickup the patch msg
903 # pickup the patch msg
904 message = message.strip()
904 message = message.strip()
905 else:
905 else:
906 # launch the editor
906 # launch the editor
907 message = None
907 message = None
908 ui.debug('message:\n%s\n' % message)
908 ui.debug('message:\n%s\n' % message)
909
909
910 if len(parents) == 1:
910 if len(parents) == 1:
911 parents.append(repo[nullid])
911 parents.append(repo[nullid])
912 if opts.get('exact'):
912 if opts.get('exact'):
913 if not nodeid or not p1:
913 if not nodeid or not p1:
914 raise error.Abort(_('not a Mercurial patch'))
914 raise error.Abort(_('not a Mercurial patch'))
915 p1 = repo[p1]
915 p1 = repo[p1]
916 p2 = repo[p2 or nullid]
916 p2 = repo[p2 or nullid]
917 elif p2:
917 elif p2:
918 try:
918 try:
919 p1 = repo[p1]
919 p1 = repo[p1]
920 p2 = repo[p2]
920 p2 = repo[p2]
921 # Without any options, consider p2 only if the
921 # Without any options, consider p2 only if the
922 # patch is being applied on top of the recorded
922 # patch is being applied on top of the recorded
923 # first parent.
923 # first parent.
924 if p1 != parents[0]:
924 if p1 != parents[0]:
925 p1 = parents[0]
925 p1 = parents[0]
926 p2 = repo[nullid]
926 p2 = repo[nullid]
927 except error.RepoError:
927 except error.RepoError:
928 p1, p2 = parents
928 p1, p2 = parents
929 if p2.node() == nullid:
929 if p2.node() == nullid:
930 ui.warn(_("warning: import the patch as a normal revision\n"
930 ui.warn(_("warning: import the patch as a normal revision\n"
931 "(use --exact to import the patch as a merge)\n"))
931 "(use --exact to import the patch as a merge)\n"))
932 else:
932 else:
933 p1, p2 = parents
933 p1, p2 = parents
934
934
935 n = None
935 n = None
936 if update:
936 if update:
937 if p1 != parents[0]:
937 if p1 != parents[0]:
938 updatefunc(repo, p1.node())
938 updatefunc(repo, p1.node())
939 if p2 != parents[1]:
939 if p2 != parents[1]:
940 repo.setparents(p1.node(), p2.node())
940 repo.setparents(p1.node(), p2.node())
941
941
942 if opts.get('exact') or importbranch:
942 if opts.get('exact') or importbranch:
943 repo.dirstate.setbranch(branch or 'default')
943 repo.dirstate.setbranch(branch or 'default')
944
944
945 partial = opts.get('partial', False)
945 partial = opts.get('partial', False)
946 files = set()
946 files = set()
947 try:
947 try:
948 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
948 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
949 files=files, eolmode=None, similarity=sim / 100.0)
949 files=files, eolmode=None, similarity=sim / 100.0)
950 except patch.PatchError as e:
950 except patch.PatchError as e:
951 if not partial:
951 if not partial:
952 raise error.Abort(str(e))
952 raise error.Abort(str(e))
953 if partial:
953 if partial:
954 rejects = True
954 rejects = True
955
955
956 files = list(files)
956 files = list(files)
957 if nocommit:
957 if nocommit:
958 if message:
958 if message:
959 msgs.append(message)
959 msgs.append(message)
960 else:
960 else:
961 if opts.get('exact') or p2:
961 if opts.get('exact') or p2:
962 # If you got here, you either use --force and know what
962 # If you got here, you either use --force and know what
963 # you are doing or used --exact or a merge patch while
963 # you are doing or used --exact or a merge patch while
964 # being updated to its first parent.
964 # being updated to its first parent.
965 m = None
965 m = None
966 else:
966 else:
967 m = scmutil.matchfiles(repo, files or [])
967 m = scmutil.matchfiles(repo, files or [])
968 editform = mergeeditform(repo[None], 'import.normal')
968 editform = mergeeditform(repo[None], 'import.normal')
969 if opts.get('exact'):
969 if opts.get('exact'):
970 editor = None
970 editor = None
971 else:
971 else:
972 editor = getcommiteditor(editform=editform, **opts)
972 editor = getcommiteditor(editform=editform, **opts)
973 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
973 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
974 extra = {}
974 extra = {}
975 for idfunc in extrapreimport:
975 for idfunc in extrapreimport:
976 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
976 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
977 try:
977 try:
978 if partial:
978 if partial:
979 repo.ui.setconfig('ui', 'allowemptycommit', True)
979 repo.ui.setconfig('ui', 'allowemptycommit', True)
980 n = repo.commit(message, user,
980 n = repo.commit(message, user,
981 date, match=m,
981 date, match=m,
982 editor=editor, extra=extra)
982 editor=editor, extra=extra)
983 for idfunc in extrapostimport:
983 for idfunc in extrapostimport:
984 extrapostimportmap[idfunc](repo[n])
984 extrapostimportmap[idfunc](repo[n])
985 finally:
985 finally:
986 repo.ui.restoreconfig(allowemptyback)
986 repo.ui.restoreconfig(allowemptyback)
987 else:
987 else:
988 if opts.get('exact') or importbranch:
988 if opts.get('exact') or importbranch:
989 branch = branch or 'default'
989 branch = branch or 'default'
990 else:
990 else:
991 branch = p1.branch()
991 branch = p1.branch()
992 store = patch.filestore()
992 store = patch.filestore()
993 try:
993 try:
994 files = set()
994 files = set()
995 try:
995 try:
996 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
996 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
997 files, eolmode=None)
997 files, eolmode=None)
998 except patch.PatchError as e:
998 except patch.PatchError as e:
999 raise error.Abort(str(e))
999 raise error.Abort(str(e))
1000 if opts.get('exact'):
1000 if opts.get('exact'):
1001 editor = None
1001 editor = None
1002 else:
1002 else:
1003 editor = getcommiteditor(editform='import.bypass')
1003 editor = getcommiteditor(editform='import.bypass')
1004 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1004 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1005 message,
1005 message,
1006 user,
1006 user,
1007 date,
1007 date,
1008 branch, files, store,
1008 branch, files, store,
1009 editor=editor)
1009 editor=editor)
1010 n = memctx.commit()
1010 n = memctx.commit()
1011 finally:
1011 finally:
1012 store.close()
1012 store.close()
1013 if opts.get('exact') and nocommit:
1013 if opts.get('exact') and nocommit:
1014 # --exact with --no-commit is still useful in that it does merge
1014 # --exact with --no-commit is still useful in that it does merge
1015 # and branch bits
1015 # and branch bits
1016 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1016 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1017 elif opts.get('exact') and hex(n) != nodeid:
1017 elif opts.get('exact') and hex(n) != nodeid:
1018 raise error.Abort(_('patch is damaged or loses information'))
1018 raise error.Abort(_('patch is damaged or loses information'))
1019 msg = _('applied to working directory')
1019 msg = _('applied to working directory')
1020 if n:
1020 if n:
1021 # i18n: refers to a short changeset id
1021 # i18n: refers to a short changeset id
1022 msg = _('created %s') % short(n)
1022 msg = _('created %s') % short(n)
1023 return (msg, n, rejects)
1023 return (msg, n, rejects)
1024 finally:
1024 finally:
1025 os.unlink(tmpname)
1025 os.unlink(tmpname)
1026
1026
1027 # facility to let extensions include additional data in an exported patch
1027 # facility to let extensions include additional data in an exported patch
1028 # list of identifiers to be executed in order
1028 # list of identifiers to be executed in order
1029 extraexport = []
1029 extraexport = []
1030 # mapping from identifier to actual export function
1030 # mapping from identifier to actual export function
1031 # function as to return a string to be added to the header or None
1031 # function as to return a string to be added to the header or None
1032 # it is given two arguments (sequencenumber, changectx)
1032 # it is given two arguments (sequencenumber, changectx)
1033 extraexportmap = {}
1033 extraexportmap = {}
1034
1034
1035 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1035 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1036 opts=None, match=None):
1036 opts=None, match=None):
1037 '''export changesets as hg patches.'''
1037 '''export changesets as hg patches.'''
1038
1038
1039 total = len(revs)
1039 total = len(revs)
1040 revwidth = max([len(str(rev)) for rev in revs])
1040 revwidth = max([len(str(rev)) for rev in revs])
1041 filemode = {}
1041 filemode = {}
1042
1042
1043 def single(rev, seqno, fp):
1043 def single(rev, seqno, fp):
1044 ctx = repo[rev]
1044 ctx = repo[rev]
1045 node = ctx.node()
1045 node = ctx.node()
1046 parents = [p.node() for p in ctx.parents() if p]
1046 parents = [p.node() for p in ctx.parents() if p]
1047 branch = ctx.branch()
1047 branch = ctx.branch()
1048 if switch_parent:
1048 if switch_parent:
1049 parents.reverse()
1049 parents.reverse()
1050
1050
1051 if parents:
1051 if parents:
1052 prev = parents[0]
1052 prev = parents[0]
1053 else:
1053 else:
1054 prev = nullid
1054 prev = nullid
1055
1055
1056 shouldclose = False
1056 shouldclose = False
1057 if not fp and len(template) > 0:
1057 if not fp and len(template) > 0:
1058 desc_lines = ctx.description().rstrip().split('\n')
1058 desc_lines = ctx.description().rstrip().split('\n')
1059 desc = desc_lines[0] #Commit always has a first line.
1059 desc = desc_lines[0] #Commit always has a first line.
1060 fp = makefileobj(repo, template, node, desc=desc, total=total,
1060 fp = makefileobj(repo, template, node, desc=desc, total=total,
1061 seqno=seqno, revwidth=revwidth, mode='wb',
1061 seqno=seqno, revwidth=revwidth, mode='wb',
1062 modemap=filemode)
1062 modemap=filemode)
1063 shouldclose = True
1063 shouldclose = True
1064 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1064 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1065 repo.ui.note("%s\n" % fp.name)
1065 repo.ui.note("%s\n" % fp.name)
1066
1066
1067 if not fp:
1067 if not fp:
1068 write = repo.ui.write
1068 write = repo.ui.write
1069 else:
1069 else:
1070 def write(s, **kw):
1070 def write(s, **kw):
1071 fp.write(s)
1071 fp.write(s)
1072
1072
1073 write("# HG changeset patch\n")
1073 write("# HG changeset patch\n")
1074 write("# User %s\n" % ctx.user())
1074 write("# User %s\n" % ctx.user())
1075 write("# Date %d %d\n" % ctx.date())
1075 write("# Date %d %d\n" % ctx.date())
1076 write("# %s\n" % util.datestr(ctx.date()))
1076 write("# %s\n" % util.datestr(ctx.date()))
1077 if branch and branch != 'default':
1077 if branch and branch != 'default':
1078 write("# Branch %s\n" % branch)
1078 write("# Branch %s\n" % branch)
1079 write("# Node ID %s\n" % hex(node))
1079 write("# Node ID %s\n" % hex(node))
1080 write("# Parent %s\n" % hex(prev))
1080 write("# Parent %s\n" % hex(prev))
1081 if len(parents) > 1:
1081 if len(parents) > 1:
1082 write("# Parent %s\n" % hex(parents[1]))
1082 write("# Parent %s\n" % hex(parents[1]))
1083
1083
1084 for headerid in extraexport:
1084 for headerid in extraexport:
1085 header = extraexportmap[headerid](seqno, ctx)
1085 header = extraexportmap[headerid](seqno, ctx)
1086 if header is not None:
1086 if header is not None:
1087 write('# %s\n' % header)
1087 write('# %s\n' % header)
1088 write(ctx.description().rstrip())
1088 write(ctx.description().rstrip())
1089 write("\n\n")
1089 write("\n\n")
1090
1090
1091 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1091 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1092 write(chunk, label=label)
1092 write(chunk, label=label)
1093
1093
1094 if shouldclose:
1094 if shouldclose:
1095 fp.close()
1095 fp.close()
1096
1096
1097 for seqno, rev in enumerate(revs):
1097 for seqno, rev in enumerate(revs):
1098 single(rev, seqno + 1, fp)
1098 single(rev, seqno + 1, fp)
1099
1099
1100 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1100 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1101 changes=None, stat=False, fp=None, prefix='',
1101 changes=None, stat=False, fp=None, prefix='',
1102 root='', listsubrepos=False):
1102 root='', listsubrepos=False):
1103 '''show diff or diffstat.'''
1103 '''show diff or diffstat.'''
1104 if fp is None:
1104 if fp is None:
1105 write = ui.write
1105 write = ui.write
1106 else:
1106 else:
1107 def write(s, **kw):
1107 def write(s, **kw):
1108 fp.write(s)
1108 fp.write(s)
1109
1109
1110 if root:
1110 if root:
1111 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1111 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1112 else:
1112 else:
1113 relroot = ''
1113 relroot = ''
1114 if relroot != '':
1114 if relroot != '':
1115 # XXX relative roots currently don't work if the root is within a
1115 # XXX relative roots currently don't work if the root is within a
1116 # subrepo
1116 # subrepo
1117 uirelroot = match.uipath(relroot)
1117 uirelroot = match.uipath(relroot)
1118 relroot += '/'
1118 relroot += '/'
1119 for matchroot in match.files():
1119 for matchroot in match.files():
1120 if not matchroot.startswith(relroot):
1120 if not matchroot.startswith(relroot):
1121 ui.warn(_('warning: %s not inside relative root %s\n') % (
1121 ui.warn(_('warning: %s not inside relative root %s\n') % (
1122 match.uipath(matchroot), uirelroot))
1122 match.uipath(matchroot), uirelroot))
1123
1123
1124 if stat:
1124 if stat:
1125 diffopts = diffopts.copy(context=0)
1125 diffopts = diffopts.copy(context=0)
1126 width = 80
1126 width = 80
1127 if not ui.plain():
1127 if not ui.plain():
1128 width = ui.termwidth()
1128 width = ui.termwidth()
1129 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1129 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1130 prefix=prefix, relroot=relroot)
1130 prefix=prefix, relroot=relroot)
1131 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1131 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1132 width=width):
1132 width=width):
1133 write(chunk, label=label)
1133 write(chunk, label=label)
1134 else:
1134 else:
1135 for chunk, label in patch.diffui(repo, node1, node2, match,
1135 for chunk, label in patch.diffui(repo, node1, node2, match,
1136 changes, diffopts, prefix=prefix,
1136 changes, diffopts, prefix=prefix,
1137 relroot=relroot):
1137 relroot=relroot):
1138 write(chunk, label=label)
1138 write(chunk, label=label)
1139
1139
1140 if listsubrepos:
1140 if listsubrepos:
1141 ctx1 = repo[node1]
1141 ctx1 = repo[node1]
1142 ctx2 = repo[node2]
1142 ctx2 = repo[node2]
1143 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1143 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1144 tempnode2 = node2
1144 tempnode2 = node2
1145 try:
1145 try:
1146 if node2 is not None:
1146 if node2 is not None:
1147 tempnode2 = ctx2.substate[subpath][1]
1147 tempnode2 = ctx2.substate[subpath][1]
1148 except KeyError:
1148 except KeyError:
1149 # A subrepo that existed in node1 was deleted between node1 and
1149 # A subrepo that existed in node1 was deleted between node1 and
1150 # node2 (inclusive). Thus, ctx2's substate won't contain that
1150 # node2 (inclusive). Thus, ctx2's substate won't contain that
1151 # subpath. The best we can do is to ignore it.
1151 # subpath. The best we can do is to ignore it.
1152 tempnode2 = None
1152 tempnode2 = None
1153 submatch = matchmod.subdirmatcher(subpath, match)
1153 submatch = matchmod.subdirmatcher(subpath, match)
1154 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1154 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1155 stat=stat, fp=fp, prefix=prefix)
1155 stat=stat, fp=fp, prefix=prefix)
1156
1156
1157 def _changesetlabels(ctx):
1157 def _changesetlabels(ctx):
1158 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1158 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1159 if ctx.troubled():
1159 if ctx.troubled():
1160 labels.append('changeset.troubled')
1160 labels.append('changeset.troubled')
1161 for trouble in ctx.troubles():
1161 for trouble in ctx.troubles():
1162 labels.append('trouble.%s' % trouble)
1162 labels.append('trouble.%s' % trouble)
1163 return ' '.join(labels)
1163 return ' '.join(labels)
1164
1164
1165 class changeset_printer(object):
1165 class changeset_printer(object):
1166 '''show changeset information when templating not requested.'''
1166 '''show changeset information when templating not requested.'''
1167
1167
1168 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1168 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1169 self.ui = ui
1169 self.ui = ui
1170 self.repo = repo
1170 self.repo = repo
1171 self.buffered = buffered
1171 self.buffered = buffered
1172 self.matchfn = matchfn
1172 self.matchfn = matchfn
1173 self.diffopts = diffopts
1173 self.diffopts = diffopts
1174 self.header = {}
1174 self.header = {}
1175 self.hunk = {}
1175 self.hunk = {}
1176 self.lastheader = None
1176 self.lastheader = None
1177 self.footer = None
1177 self.footer = None
1178
1178
1179 def flush(self, ctx):
1179 def flush(self, ctx):
1180 rev = ctx.rev()
1180 rev = ctx.rev()
1181 if rev in self.header:
1181 if rev in self.header:
1182 h = self.header[rev]
1182 h = self.header[rev]
1183 if h != self.lastheader:
1183 if h != self.lastheader:
1184 self.lastheader = h
1184 self.lastheader = h
1185 self.ui.write(h)
1185 self.ui.write(h)
1186 del self.header[rev]
1186 del self.header[rev]
1187 if rev in self.hunk:
1187 if rev in self.hunk:
1188 self.ui.write(self.hunk[rev])
1188 self.ui.write(self.hunk[rev])
1189 del self.hunk[rev]
1189 del self.hunk[rev]
1190 return 1
1190 return 1
1191 return 0
1191 return 0
1192
1192
1193 def close(self):
1193 def close(self):
1194 if self.footer:
1194 if self.footer:
1195 self.ui.write(self.footer)
1195 self.ui.write(self.footer)
1196
1196
1197 def show(self, ctx, copies=None, matchfn=None, **props):
1197 def show(self, ctx, copies=None, matchfn=None, **props):
1198 if self.buffered:
1198 if self.buffered:
1199 self.ui.pushbuffer(labeled=True)
1199 self.ui.pushbuffer(labeled=True)
1200 self._show(ctx, copies, matchfn, props)
1200 self._show(ctx, copies, matchfn, props)
1201 self.hunk[ctx.rev()] = self.ui.popbuffer()
1201 self.hunk[ctx.rev()] = self.ui.popbuffer()
1202 else:
1202 else:
1203 self._show(ctx, copies, matchfn, props)
1203 self._show(ctx, copies, matchfn, props)
1204
1204
1205 def _show(self, ctx, copies, matchfn, props):
1205 def _show(self, ctx, copies, matchfn, props):
1206 '''show a single changeset or file revision'''
1206 '''show a single changeset or file revision'''
1207 changenode = ctx.node()
1207 changenode = ctx.node()
1208 rev = ctx.rev()
1208 rev = ctx.rev()
1209 if self.ui.debugflag:
1209 if self.ui.debugflag:
1210 hexfunc = hex
1210 hexfunc = hex
1211 else:
1211 else:
1212 hexfunc = short
1212 hexfunc = short
1213 # as of now, wctx.node() and wctx.rev() return None, but we want to
1213 # as of now, wctx.node() and wctx.rev() return None, but we want to
1214 # show the same values as {node} and {rev} templatekw
1214 # show the same values as {node} and {rev} templatekw
1215 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1215 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1216
1216
1217 if self.ui.quiet:
1217 if self.ui.quiet:
1218 self.ui.write("%d:%s\n" % revnode, label='log.node')
1218 self.ui.write("%d:%s\n" % revnode, label='log.node')
1219 return
1219 return
1220
1220
1221 date = util.datestr(ctx.date())
1221 date = util.datestr(ctx.date())
1222
1222
1223 # i18n: column positioning for "hg log"
1223 # i18n: column positioning for "hg log"
1224 self.ui.write(_("changeset: %d:%s\n") % revnode,
1224 self.ui.write(_("changeset: %d:%s\n") % revnode,
1225 label=_changesetlabels(ctx))
1225 label=_changesetlabels(ctx))
1226
1226
1227 # branches are shown first before any other names due to backwards
1227 # branches are shown first before any other names due to backwards
1228 # compatibility
1228 # compatibility
1229 branch = ctx.branch()
1229 branch = ctx.branch()
1230 # don't show the default branch name
1230 # don't show the default branch name
1231 if branch != 'default':
1231 if branch != 'default':
1232 # i18n: column positioning for "hg log"
1232 # i18n: column positioning for "hg log"
1233 self.ui.write(_("branch: %s\n") % branch,
1233 self.ui.write(_("branch: %s\n") % branch,
1234 label='log.branch')
1234 label='log.branch')
1235
1235
1236 for nsname, ns in self.repo.names.iteritems():
1236 for nsname, ns in self.repo.names.iteritems():
1237 # branches has special logic already handled above, so here we just
1237 # branches has special logic already handled above, so here we just
1238 # skip it
1238 # skip it
1239 if nsname == 'branches':
1239 if nsname == 'branches':
1240 continue
1240 continue
1241 # we will use the templatename as the color name since those two
1241 # we will use the templatename as the color name since those two
1242 # should be the same
1242 # should be the same
1243 for name in ns.names(self.repo, changenode):
1243 for name in ns.names(self.repo, changenode):
1244 self.ui.write(ns.logfmt % name,
1244 self.ui.write(ns.logfmt % name,
1245 label='log.%s' % ns.colorname)
1245 label='log.%s' % ns.colorname)
1246 if self.ui.debugflag:
1246 if self.ui.debugflag:
1247 # i18n: column positioning for "hg log"
1247 # i18n: column positioning for "hg log"
1248 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1248 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1249 label='log.phase')
1249 label='log.phase')
1250 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1250 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1251 label = 'log.parent changeset.%s' % pctx.phasestr()
1251 label = 'log.parent changeset.%s' % pctx.phasestr()
1252 # i18n: column positioning for "hg log"
1252 # i18n: column positioning for "hg log"
1253 self.ui.write(_("parent: %d:%s\n")
1253 self.ui.write(_("parent: %d:%s\n")
1254 % (pctx.rev(), hexfunc(pctx.node())),
1254 % (pctx.rev(), hexfunc(pctx.node())),
1255 label=label)
1255 label=label)
1256
1256
1257 if self.ui.debugflag and rev is not None:
1257 if self.ui.debugflag and rev is not None:
1258 mnode = ctx.manifestnode()
1258 mnode = ctx.manifestnode()
1259 # i18n: column positioning for "hg log"
1259 # i18n: column positioning for "hg log"
1260 self.ui.write(_("manifest: %d:%s\n") %
1260 self.ui.write(_("manifest: %d:%s\n") %
1261 (self.repo.manifestlog._revlog.rev(mnode),
1261 (self.repo.manifestlog._revlog.rev(mnode),
1262 hex(mnode)),
1262 hex(mnode)),
1263 label='ui.debug log.manifest')
1263 label='ui.debug log.manifest')
1264 # i18n: column positioning for "hg log"
1264 # i18n: column positioning for "hg log"
1265 self.ui.write(_("user: %s\n") % ctx.user(),
1265 self.ui.write(_("user: %s\n") % ctx.user(),
1266 label='log.user')
1266 label='log.user')
1267 # i18n: column positioning for "hg log"
1267 # i18n: column positioning for "hg log"
1268 self.ui.write(_("date: %s\n") % date,
1268 self.ui.write(_("date: %s\n") % date,
1269 label='log.date')
1269 label='log.date')
1270
1270
1271 if ctx.troubled():
1271 if ctx.troubled():
1272 # i18n: column positioning for "hg log"
1272 # i18n: column positioning for "hg log"
1273 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1273 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1274 label='log.trouble')
1274 label='log.trouble')
1275
1275
1276 if self.ui.debugflag:
1276 if self.ui.debugflag:
1277 files = ctx.p1().status(ctx)[:3]
1277 files = ctx.p1().status(ctx)[:3]
1278 for key, value in zip([# i18n: column positioning for "hg log"
1278 for key, value in zip([# i18n: column positioning for "hg log"
1279 _("files:"),
1279 _("files:"),
1280 # i18n: column positioning for "hg log"
1280 # i18n: column positioning for "hg log"
1281 _("files+:"),
1281 _("files+:"),
1282 # i18n: column positioning for "hg log"
1282 # i18n: column positioning for "hg log"
1283 _("files-:")], files):
1283 _("files-:")], files):
1284 if value:
1284 if value:
1285 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1285 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1286 label='ui.debug log.files')
1286 label='ui.debug log.files')
1287 elif ctx.files() and self.ui.verbose:
1287 elif ctx.files() and self.ui.verbose:
1288 # i18n: column positioning for "hg log"
1288 # i18n: column positioning for "hg log"
1289 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1289 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1290 label='ui.note log.files')
1290 label='ui.note log.files')
1291 if copies and self.ui.verbose:
1291 if copies and self.ui.verbose:
1292 copies = ['%s (%s)' % c for c in copies]
1292 copies = ['%s (%s)' % c for c in copies]
1293 # i18n: column positioning for "hg log"
1293 # i18n: column positioning for "hg log"
1294 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1294 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1295 label='ui.note log.copies')
1295 label='ui.note log.copies')
1296
1296
1297 extra = ctx.extra()
1297 extra = ctx.extra()
1298 if extra and self.ui.debugflag:
1298 if extra and self.ui.debugflag:
1299 for key, value in sorted(extra.items()):
1299 for key, value in sorted(extra.items()):
1300 # i18n: column positioning for "hg log"
1300 # i18n: column positioning for "hg log"
1301 self.ui.write(_("extra: %s=%s\n")
1301 self.ui.write(_("extra: %s=%s\n")
1302 % (key, value.encode('string_escape')),
1302 % (key, value.encode('string_escape')),
1303 label='ui.debug log.extra')
1303 label='ui.debug log.extra')
1304
1304
1305 description = ctx.description().strip()
1305 description = ctx.description().strip()
1306 if description:
1306 if description:
1307 if self.ui.verbose:
1307 if self.ui.verbose:
1308 self.ui.write(_("description:\n"),
1308 self.ui.write(_("description:\n"),
1309 label='ui.note log.description')
1309 label='ui.note log.description')
1310 self.ui.write(description,
1310 self.ui.write(description,
1311 label='ui.note log.description')
1311 label='ui.note log.description')
1312 self.ui.write("\n\n")
1312 self.ui.write("\n\n")
1313 else:
1313 else:
1314 # i18n: column positioning for "hg log"
1314 # i18n: column positioning for "hg log"
1315 self.ui.write(_("summary: %s\n") %
1315 self.ui.write(_("summary: %s\n") %
1316 description.splitlines()[0],
1316 description.splitlines()[0],
1317 label='log.summary')
1317 label='log.summary')
1318 self.ui.write("\n")
1318 self.ui.write("\n")
1319
1319
1320 self.showpatch(ctx, matchfn)
1320 self.showpatch(ctx, matchfn)
1321
1321
1322 def showpatch(self, ctx, matchfn):
1322 def showpatch(self, ctx, matchfn):
1323 if not matchfn:
1323 if not matchfn:
1324 matchfn = self.matchfn
1324 matchfn = self.matchfn
1325 if matchfn:
1325 if matchfn:
1326 stat = self.diffopts.get('stat')
1326 stat = self.diffopts.get('stat')
1327 diff = self.diffopts.get('patch')
1327 diff = self.diffopts.get('patch')
1328 diffopts = patch.diffallopts(self.ui, self.diffopts)
1328 diffopts = patch.diffallopts(self.ui, self.diffopts)
1329 node = ctx.node()
1329 node = ctx.node()
1330 prev = ctx.p1().node()
1330 prev = ctx.p1().node()
1331 if stat:
1331 if stat:
1332 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1332 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1333 match=matchfn, stat=True)
1333 match=matchfn, stat=True)
1334 if diff:
1334 if diff:
1335 if stat:
1335 if stat:
1336 self.ui.write("\n")
1336 self.ui.write("\n")
1337 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1337 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1338 match=matchfn, stat=False)
1338 match=matchfn, stat=False)
1339 self.ui.write("\n")
1339 self.ui.write("\n")
1340
1340
1341 class jsonchangeset(changeset_printer):
1341 class jsonchangeset(changeset_printer):
1342 '''format changeset information.'''
1342 '''format changeset information.'''
1343
1343
1344 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1344 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1345 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1345 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1346 self.cache = {}
1346 self.cache = {}
1347 self._first = True
1347 self._first = True
1348
1348
1349 def close(self):
1349 def close(self):
1350 if not self._first:
1350 if not self._first:
1351 self.ui.write("\n]\n")
1351 self.ui.write("\n]\n")
1352 else:
1352 else:
1353 self.ui.write("[]\n")
1353 self.ui.write("[]\n")
1354
1354
1355 def _show(self, ctx, copies, matchfn, props):
1355 def _show(self, ctx, copies, matchfn, props):
1356 '''show a single changeset or file revision'''
1356 '''show a single changeset or file revision'''
1357 rev = ctx.rev()
1357 rev = ctx.rev()
1358 if rev is None:
1358 if rev is None:
1359 jrev = jnode = 'null'
1359 jrev = jnode = 'null'
1360 else:
1360 else:
1361 jrev = str(rev)
1361 jrev = str(rev)
1362 jnode = '"%s"' % hex(ctx.node())
1362 jnode = '"%s"' % hex(ctx.node())
1363 j = encoding.jsonescape
1363 j = encoding.jsonescape
1364
1364
1365 if self._first:
1365 if self._first:
1366 self.ui.write("[\n {")
1366 self.ui.write("[\n {")
1367 self._first = False
1367 self._first = False
1368 else:
1368 else:
1369 self.ui.write(",\n {")
1369 self.ui.write(",\n {")
1370
1370
1371 if self.ui.quiet:
1371 if self.ui.quiet:
1372 self.ui.write(('\n "rev": %s') % jrev)
1372 self.ui.write(('\n "rev": %s') % jrev)
1373 self.ui.write((',\n "node": %s') % jnode)
1373 self.ui.write((',\n "node": %s') % jnode)
1374 self.ui.write('\n }')
1374 self.ui.write('\n }')
1375 return
1375 return
1376
1376
1377 self.ui.write(('\n "rev": %s') % jrev)
1377 self.ui.write(('\n "rev": %s') % jrev)
1378 self.ui.write((',\n "node": %s') % jnode)
1378 self.ui.write((',\n "node": %s') % jnode)
1379 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1379 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1380 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1380 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1381 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1381 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1382 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1382 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1383 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1383 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1384
1384
1385 self.ui.write((',\n "bookmarks": [%s]') %
1385 self.ui.write((',\n "bookmarks": [%s]') %
1386 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1386 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1387 self.ui.write((',\n "tags": [%s]') %
1387 self.ui.write((',\n "tags": [%s]') %
1388 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1388 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1389 self.ui.write((',\n "parents": [%s]') %
1389 self.ui.write((',\n "parents": [%s]') %
1390 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1390 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1391
1391
1392 if self.ui.debugflag:
1392 if self.ui.debugflag:
1393 if rev is None:
1393 if rev is None:
1394 jmanifestnode = 'null'
1394 jmanifestnode = 'null'
1395 else:
1395 else:
1396 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1396 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1397 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1397 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1398
1398
1399 self.ui.write((',\n "extra": {%s}') %
1399 self.ui.write((',\n "extra": {%s}') %
1400 ", ".join('"%s": "%s"' % (j(k), j(v))
1400 ", ".join('"%s": "%s"' % (j(k), j(v))
1401 for k, v in ctx.extra().items()))
1401 for k, v in ctx.extra().items()))
1402
1402
1403 files = ctx.p1().status(ctx)
1403 files = ctx.p1().status(ctx)
1404 self.ui.write((',\n "modified": [%s]') %
1404 self.ui.write((',\n "modified": [%s]') %
1405 ", ".join('"%s"' % j(f) for f in files[0]))
1405 ", ".join('"%s"' % j(f) for f in files[0]))
1406 self.ui.write((',\n "added": [%s]') %
1406 self.ui.write((',\n "added": [%s]') %
1407 ", ".join('"%s"' % j(f) for f in files[1]))
1407 ", ".join('"%s"' % j(f) for f in files[1]))
1408 self.ui.write((',\n "removed": [%s]') %
1408 self.ui.write((',\n "removed": [%s]') %
1409 ", ".join('"%s"' % j(f) for f in files[2]))
1409 ", ".join('"%s"' % j(f) for f in files[2]))
1410
1410
1411 elif self.ui.verbose:
1411 elif self.ui.verbose:
1412 self.ui.write((',\n "files": [%s]') %
1412 self.ui.write((',\n "files": [%s]') %
1413 ", ".join('"%s"' % j(f) for f in ctx.files()))
1413 ", ".join('"%s"' % j(f) for f in ctx.files()))
1414
1414
1415 if copies:
1415 if copies:
1416 self.ui.write((',\n "copies": {%s}') %
1416 self.ui.write((',\n "copies": {%s}') %
1417 ", ".join('"%s": "%s"' % (j(k), j(v))
1417 ", ".join('"%s": "%s"' % (j(k), j(v))
1418 for k, v in copies))
1418 for k, v in copies))
1419
1419
1420 matchfn = self.matchfn
1420 matchfn = self.matchfn
1421 if matchfn:
1421 if matchfn:
1422 stat = self.diffopts.get('stat')
1422 stat = self.diffopts.get('stat')
1423 diff = self.diffopts.get('patch')
1423 diff = self.diffopts.get('patch')
1424 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1424 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1425 node, prev = ctx.node(), ctx.p1().node()
1425 node, prev = ctx.node(), ctx.p1().node()
1426 if stat:
1426 if stat:
1427 self.ui.pushbuffer()
1427 self.ui.pushbuffer()
1428 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1428 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1429 match=matchfn, stat=True)
1429 match=matchfn, stat=True)
1430 self.ui.write((',\n "diffstat": "%s"')
1430 self.ui.write((',\n "diffstat": "%s"')
1431 % j(self.ui.popbuffer()))
1431 % j(self.ui.popbuffer()))
1432 if diff:
1432 if diff:
1433 self.ui.pushbuffer()
1433 self.ui.pushbuffer()
1434 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1434 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1435 match=matchfn, stat=False)
1435 match=matchfn, stat=False)
1436 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1436 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1437
1437
1438 self.ui.write("\n }")
1438 self.ui.write("\n }")
1439
1439
1440 class changeset_templater(changeset_printer):
1440 class changeset_templater(changeset_printer):
1441 '''format changeset information.'''
1441 '''format changeset information.'''
1442
1442
1443 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1443 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1444 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1444 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1445 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1446 filters = {'formatnode': formatnode}
1447 defaulttempl = {
1445 defaulttempl = {
1448 'parent': '{rev}:{node|formatnode} ',
1446 'parent': '{rev}:{node|formatnode} ',
1449 'manifest': '{rev}:{node|formatnode}',
1447 'manifest': '{rev}:{node|formatnode}',
1450 'file_copy': '{name} ({source})',
1448 'file_copy': '{name} ({source})',
1451 'envvar': '{key}={value}',
1449 'envvar': '{key}={value}',
1452 'extra': '{key}={value|stringescape}'
1450 'extra': '{key}={value|stringescape}'
1453 }
1451 }
1454 # filecopy is preserved for compatibility reasons
1452 # filecopy is preserved for compatibility reasons
1455 defaulttempl['filecopy'] = defaulttempl['file_copy']
1453 defaulttempl['filecopy'] = defaulttempl['file_copy']
1456 assert not (tmpl and mapfile)
1454 assert not (tmpl and mapfile)
1457 if mapfile:
1455 if mapfile:
1458 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1456 self.t = templater.templater.frommapfile(mapfile,
1459 cache=defaulttempl)
1457 cache=defaulttempl)
1460 else:
1458 else:
1461 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1459 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1462 filters=filters,
1463 cache=defaulttempl)
1460 cache=defaulttempl)
1464
1461
1465 self.cache = {}
1462 self.cache = {}
1466
1463
1467 # find correct templates for current mode
1464 # find correct templates for current mode
1468 tmplmodes = [
1465 tmplmodes = [
1469 (True, None),
1466 (True, None),
1470 (self.ui.verbose, 'verbose'),
1467 (self.ui.verbose, 'verbose'),
1471 (self.ui.quiet, 'quiet'),
1468 (self.ui.quiet, 'quiet'),
1472 (self.ui.debugflag, 'debug'),
1469 (self.ui.debugflag, 'debug'),
1473 ]
1470 ]
1474
1471
1475 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1472 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1476 'docheader': '', 'docfooter': ''}
1473 'docheader': '', 'docfooter': ''}
1477 for mode, postfix in tmplmodes:
1474 for mode, postfix in tmplmodes:
1478 for t in self._parts:
1475 for t in self._parts:
1479 cur = t
1476 cur = t
1480 if postfix:
1477 if postfix:
1481 cur += "_" + postfix
1478 cur += "_" + postfix
1482 if mode and cur in self.t:
1479 if mode and cur in self.t:
1483 self._parts[t] = cur
1480 self._parts[t] = cur
1484
1481
1485 if self._parts['docheader']:
1482 if self._parts['docheader']:
1486 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1483 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1487
1484
1488 def close(self):
1485 def close(self):
1489 if self._parts['docfooter']:
1486 if self._parts['docfooter']:
1490 if not self.footer:
1487 if not self.footer:
1491 self.footer = ""
1488 self.footer = ""
1492 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1489 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1493 return super(changeset_templater, self).close()
1490 return super(changeset_templater, self).close()
1494
1491
1495 def _show(self, ctx, copies, matchfn, props):
1492 def _show(self, ctx, copies, matchfn, props):
1496 '''show a single changeset or file revision'''
1493 '''show a single changeset or file revision'''
1497 props = props.copy()
1494 props = props.copy()
1498 props.update(templatekw.keywords)
1495 props.update(templatekw.keywords)
1499 props['templ'] = self.t
1496 props['templ'] = self.t
1500 props['ctx'] = ctx
1497 props['ctx'] = ctx
1501 props['repo'] = self.repo
1498 props['repo'] = self.repo
1502 props['ui'] = self.repo.ui
1499 props['ui'] = self.repo.ui
1503 props['revcache'] = {'copies': copies}
1500 props['revcache'] = {'copies': copies}
1504 props['cache'] = self.cache
1501 props['cache'] = self.cache
1505
1502
1506 # write header
1503 # write header
1507 if self._parts['header']:
1504 if self._parts['header']:
1508 h = templater.stringify(self.t(self._parts['header'], **props))
1505 h = templater.stringify(self.t(self._parts['header'], **props))
1509 if self.buffered:
1506 if self.buffered:
1510 self.header[ctx.rev()] = h
1507 self.header[ctx.rev()] = h
1511 else:
1508 else:
1512 if self.lastheader != h:
1509 if self.lastheader != h:
1513 self.lastheader = h
1510 self.lastheader = h
1514 self.ui.write(h)
1511 self.ui.write(h)
1515
1512
1516 # write changeset metadata, then patch if requested
1513 # write changeset metadata, then patch if requested
1517 key = self._parts['changeset']
1514 key = self._parts['changeset']
1518 self.ui.write(templater.stringify(self.t(key, **props)))
1515 self.ui.write(templater.stringify(self.t(key, **props)))
1519 self.showpatch(ctx, matchfn)
1516 self.showpatch(ctx, matchfn)
1520
1517
1521 if self._parts['footer']:
1518 if self._parts['footer']:
1522 if not self.footer:
1519 if not self.footer:
1523 self.footer = templater.stringify(
1520 self.footer = templater.stringify(
1524 self.t(self._parts['footer'], **props))
1521 self.t(self._parts['footer'], **props))
1525
1522
1526 def gettemplate(ui, tmpl, style):
1523 def gettemplate(ui, tmpl, style):
1527 """
1524 """
1528 Find the template matching the given template spec or style.
1525 Find the template matching the given template spec or style.
1529 """
1526 """
1530
1527
1531 # ui settings
1528 # ui settings
1532 if not tmpl and not style: # template are stronger than style
1529 if not tmpl and not style: # template are stronger than style
1533 tmpl = ui.config('ui', 'logtemplate')
1530 tmpl = ui.config('ui', 'logtemplate')
1534 if tmpl:
1531 if tmpl:
1535 return templater.unquotestring(tmpl), None
1532 return templater.unquotestring(tmpl), None
1536 else:
1533 else:
1537 style = util.expandpath(ui.config('ui', 'style', ''))
1534 style = util.expandpath(ui.config('ui', 'style', ''))
1538
1535
1539 if not tmpl and style:
1536 if not tmpl and style:
1540 mapfile = style
1537 mapfile = style
1541 if not os.path.split(mapfile)[0]:
1538 if not os.path.split(mapfile)[0]:
1542 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1539 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1543 or templater.templatepath(mapfile))
1540 or templater.templatepath(mapfile))
1544 if mapname:
1541 if mapname:
1545 mapfile = mapname
1542 mapfile = mapname
1546 return None, mapfile
1543 return None, mapfile
1547
1544
1548 if not tmpl:
1545 if not tmpl:
1549 return None, None
1546 return None, None
1550
1547
1551 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1548 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1552
1549
1553 def show_changeset(ui, repo, opts, buffered=False):
1550 def show_changeset(ui, repo, opts, buffered=False):
1554 """show one changeset using template or regular display.
1551 """show one changeset using template or regular display.
1555
1552
1556 Display format will be the first non-empty hit of:
1553 Display format will be the first non-empty hit of:
1557 1. option 'template'
1554 1. option 'template'
1558 2. option 'style'
1555 2. option 'style'
1559 3. [ui] setting 'logtemplate'
1556 3. [ui] setting 'logtemplate'
1560 4. [ui] setting 'style'
1557 4. [ui] setting 'style'
1561 If all of these values are either the unset or the empty string,
1558 If all of these values are either the unset or the empty string,
1562 regular display via changeset_printer() is done.
1559 regular display via changeset_printer() is done.
1563 """
1560 """
1564 # options
1561 # options
1565 matchfn = None
1562 matchfn = None
1566 if opts.get('patch') or opts.get('stat'):
1563 if opts.get('patch') or opts.get('stat'):
1567 matchfn = scmutil.matchall(repo)
1564 matchfn = scmutil.matchall(repo)
1568
1565
1569 if opts.get('template') == 'json':
1566 if opts.get('template') == 'json':
1570 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1567 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1571
1568
1572 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1569 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1573
1570
1574 if not tmpl and not mapfile:
1571 if not tmpl and not mapfile:
1575 return changeset_printer(ui, repo, matchfn, opts, buffered)
1572 return changeset_printer(ui, repo, matchfn, opts, buffered)
1576
1573
1577 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1574 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1578
1575
1579 def showmarker(fm, marker, index=None):
1576 def showmarker(fm, marker, index=None):
1580 """utility function to display obsolescence marker in a readable way
1577 """utility function to display obsolescence marker in a readable way
1581
1578
1582 To be used by debug function."""
1579 To be used by debug function."""
1583 if index is not None:
1580 if index is not None:
1584 fm.write('index', '%i ', index)
1581 fm.write('index', '%i ', index)
1585 fm.write('precnode', '%s ', hex(marker.precnode()))
1582 fm.write('precnode', '%s ', hex(marker.precnode()))
1586 succs = marker.succnodes()
1583 succs = marker.succnodes()
1587 fm.condwrite(succs, 'succnodes', '%s ',
1584 fm.condwrite(succs, 'succnodes', '%s ',
1588 fm.formatlist(map(hex, succs), name='node'))
1585 fm.formatlist(map(hex, succs), name='node'))
1589 fm.write('flag', '%X ', marker.flags())
1586 fm.write('flag', '%X ', marker.flags())
1590 parents = marker.parentnodes()
1587 parents = marker.parentnodes()
1591 if parents is not None:
1588 if parents is not None:
1592 fm.write('parentnodes', '{%s} ',
1589 fm.write('parentnodes', '{%s} ',
1593 fm.formatlist(map(hex, parents), name='node', sep=', '))
1590 fm.formatlist(map(hex, parents), name='node', sep=', '))
1594 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1591 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1595 meta = marker.metadata().copy()
1592 meta = marker.metadata().copy()
1596 meta.pop('date', None)
1593 meta.pop('date', None)
1597 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1594 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1598 fm.plain('\n')
1595 fm.plain('\n')
1599
1596
1600 def finddate(ui, repo, date):
1597 def finddate(ui, repo, date):
1601 """Find the tipmost changeset that matches the given date spec"""
1598 """Find the tipmost changeset that matches the given date spec"""
1602
1599
1603 df = util.matchdate(date)
1600 df = util.matchdate(date)
1604 m = scmutil.matchall(repo)
1601 m = scmutil.matchall(repo)
1605 results = {}
1602 results = {}
1606
1603
1607 def prep(ctx, fns):
1604 def prep(ctx, fns):
1608 d = ctx.date()
1605 d = ctx.date()
1609 if df(d[0]):
1606 if df(d[0]):
1610 results[ctx.rev()] = d
1607 results[ctx.rev()] = d
1611
1608
1612 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1609 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1613 rev = ctx.rev()
1610 rev = ctx.rev()
1614 if rev in results:
1611 if rev in results:
1615 ui.status(_("found revision %s from %s\n") %
1612 ui.status(_("found revision %s from %s\n") %
1616 (rev, util.datestr(results[rev])))
1613 (rev, util.datestr(results[rev])))
1617 return str(rev)
1614 return str(rev)
1618
1615
1619 raise error.Abort(_("revision matching date not found"))
1616 raise error.Abort(_("revision matching date not found"))
1620
1617
1621 def increasingwindows(windowsize=8, sizelimit=512):
1618 def increasingwindows(windowsize=8, sizelimit=512):
1622 while True:
1619 while True:
1623 yield windowsize
1620 yield windowsize
1624 if windowsize < sizelimit:
1621 if windowsize < sizelimit:
1625 windowsize *= 2
1622 windowsize *= 2
1626
1623
1627 class FileWalkError(Exception):
1624 class FileWalkError(Exception):
1628 pass
1625 pass
1629
1626
1630 def walkfilerevs(repo, match, follow, revs, fncache):
1627 def walkfilerevs(repo, match, follow, revs, fncache):
1631 '''Walks the file history for the matched files.
1628 '''Walks the file history for the matched files.
1632
1629
1633 Returns the changeset revs that are involved in the file history.
1630 Returns the changeset revs that are involved in the file history.
1634
1631
1635 Throws FileWalkError if the file history can't be walked using
1632 Throws FileWalkError if the file history can't be walked using
1636 filelogs alone.
1633 filelogs alone.
1637 '''
1634 '''
1638 wanted = set()
1635 wanted = set()
1639 copies = []
1636 copies = []
1640 minrev, maxrev = min(revs), max(revs)
1637 minrev, maxrev = min(revs), max(revs)
1641 def filerevgen(filelog, last):
1638 def filerevgen(filelog, last):
1642 """
1639 """
1643 Only files, no patterns. Check the history of each file.
1640 Only files, no patterns. Check the history of each file.
1644
1641
1645 Examines filelog entries within minrev, maxrev linkrev range
1642 Examines filelog entries within minrev, maxrev linkrev range
1646 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1643 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1647 tuples in backwards order
1644 tuples in backwards order
1648 """
1645 """
1649 cl_count = len(repo)
1646 cl_count = len(repo)
1650 revs = []
1647 revs = []
1651 for j in xrange(0, last + 1):
1648 for j in xrange(0, last + 1):
1652 linkrev = filelog.linkrev(j)
1649 linkrev = filelog.linkrev(j)
1653 if linkrev < minrev:
1650 if linkrev < minrev:
1654 continue
1651 continue
1655 # only yield rev for which we have the changelog, it can
1652 # only yield rev for which we have the changelog, it can
1656 # happen while doing "hg log" during a pull or commit
1653 # happen while doing "hg log" during a pull or commit
1657 if linkrev >= cl_count:
1654 if linkrev >= cl_count:
1658 break
1655 break
1659
1656
1660 parentlinkrevs = []
1657 parentlinkrevs = []
1661 for p in filelog.parentrevs(j):
1658 for p in filelog.parentrevs(j):
1662 if p != nullrev:
1659 if p != nullrev:
1663 parentlinkrevs.append(filelog.linkrev(p))
1660 parentlinkrevs.append(filelog.linkrev(p))
1664 n = filelog.node(j)
1661 n = filelog.node(j)
1665 revs.append((linkrev, parentlinkrevs,
1662 revs.append((linkrev, parentlinkrevs,
1666 follow and filelog.renamed(n)))
1663 follow and filelog.renamed(n)))
1667
1664
1668 return reversed(revs)
1665 return reversed(revs)
1669 def iterfiles():
1666 def iterfiles():
1670 pctx = repo['.']
1667 pctx = repo['.']
1671 for filename in match.files():
1668 for filename in match.files():
1672 if follow:
1669 if follow:
1673 if filename not in pctx:
1670 if filename not in pctx:
1674 raise error.Abort(_('cannot follow file not in parent '
1671 raise error.Abort(_('cannot follow file not in parent '
1675 'revision: "%s"') % filename)
1672 'revision: "%s"') % filename)
1676 yield filename, pctx[filename].filenode()
1673 yield filename, pctx[filename].filenode()
1677 else:
1674 else:
1678 yield filename, None
1675 yield filename, None
1679 for filename_node in copies:
1676 for filename_node in copies:
1680 yield filename_node
1677 yield filename_node
1681
1678
1682 for file_, node in iterfiles():
1679 for file_, node in iterfiles():
1683 filelog = repo.file(file_)
1680 filelog = repo.file(file_)
1684 if not len(filelog):
1681 if not len(filelog):
1685 if node is None:
1682 if node is None:
1686 # A zero count may be a directory or deleted file, so
1683 # A zero count may be a directory or deleted file, so
1687 # try to find matching entries on the slow path.
1684 # try to find matching entries on the slow path.
1688 if follow:
1685 if follow:
1689 raise error.Abort(
1686 raise error.Abort(
1690 _('cannot follow nonexistent file: "%s"') % file_)
1687 _('cannot follow nonexistent file: "%s"') % file_)
1691 raise FileWalkError("Cannot walk via filelog")
1688 raise FileWalkError("Cannot walk via filelog")
1692 else:
1689 else:
1693 continue
1690 continue
1694
1691
1695 if node is None:
1692 if node is None:
1696 last = len(filelog) - 1
1693 last = len(filelog) - 1
1697 else:
1694 else:
1698 last = filelog.rev(node)
1695 last = filelog.rev(node)
1699
1696
1700 # keep track of all ancestors of the file
1697 # keep track of all ancestors of the file
1701 ancestors = set([filelog.linkrev(last)])
1698 ancestors = set([filelog.linkrev(last)])
1702
1699
1703 # iterate from latest to oldest revision
1700 # iterate from latest to oldest revision
1704 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1701 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1705 if not follow:
1702 if not follow:
1706 if rev > maxrev:
1703 if rev > maxrev:
1707 continue
1704 continue
1708 else:
1705 else:
1709 # Note that last might not be the first interesting
1706 # Note that last might not be the first interesting
1710 # rev to us:
1707 # rev to us:
1711 # if the file has been changed after maxrev, we'll
1708 # if the file has been changed after maxrev, we'll
1712 # have linkrev(last) > maxrev, and we still need
1709 # have linkrev(last) > maxrev, and we still need
1713 # to explore the file graph
1710 # to explore the file graph
1714 if rev not in ancestors:
1711 if rev not in ancestors:
1715 continue
1712 continue
1716 # XXX insert 1327 fix here
1713 # XXX insert 1327 fix here
1717 if flparentlinkrevs:
1714 if flparentlinkrevs:
1718 ancestors.update(flparentlinkrevs)
1715 ancestors.update(flparentlinkrevs)
1719
1716
1720 fncache.setdefault(rev, []).append(file_)
1717 fncache.setdefault(rev, []).append(file_)
1721 wanted.add(rev)
1718 wanted.add(rev)
1722 if copied:
1719 if copied:
1723 copies.append(copied)
1720 copies.append(copied)
1724
1721
1725 return wanted
1722 return wanted
1726
1723
1727 class _followfilter(object):
1724 class _followfilter(object):
1728 def __init__(self, repo, onlyfirst=False):
1725 def __init__(self, repo, onlyfirst=False):
1729 self.repo = repo
1726 self.repo = repo
1730 self.startrev = nullrev
1727 self.startrev = nullrev
1731 self.roots = set()
1728 self.roots = set()
1732 self.onlyfirst = onlyfirst
1729 self.onlyfirst = onlyfirst
1733
1730
1734 def match(self, rev):
1731 def match(self, rev):
1735 def realparents(rev):
1732 def realparents(rev):
1736 if self.onlyfirst:
1733 if self.onlyfirst:
1737 return self.repo.changelog.parentrevs(rev)[0:1]
1734 return self.repo.changelog.parentrevs(rev)[0:1]
1738 else:
1735 else:
1739 return filter(lambda x: x != nullrev,
1736 return filter(lambda x: x != nullrev,
1740 self.repo.changelog.parentrevs(rev))
1737 self.repo.changelog.parentrevs(rev))
1741
1738
1742 if self.startrev == nullrev:
1739 if self.startrev == nullrev:
1743 self.startrev = rev
1740 self.startrev = rev
1744 return True
1741 return True
1745
1742
1746 if rev > self.startrev:
1743 if rev > self.startrev:
1747 # forward: all descendants
1744 # forward: all descendants
1748 if not self.roots:
1745 if not self.roots:
1749 self.roots.add(self.startrev)
1746 self.roots.add(self.startrev)
1750 for parent in realparents(rev):
1747 for parent in realparents(rev):
1751 if parent in self.roots:
1748 if parent in self.roots:
1752 self.roots.add(rev)
1749 self.roots.add(rev)
1753 return True
1750 return True
1754 else:
1751 else:
1755 # backwards: all parents
1752 # backwards: all parents
1756 if not self.roots:
1753 if not self.roots:
1757 self.roots.update(realparents(self.startrev))
1754 self.roots.update(realparents(self.startrev))
1758 if rev in self.roots:
1755 if rev in self.roots:
1759 self.roots.remove(rev)
1756 self.roots.remove(rev)
1760 self.roots.update(realparents(rev))
1757 self.roots.update(realparents(rev))
1761 return True
1758 return True
1762
1759
1763 return False
1760 return False
1764
1761
1765 def walkchangerevs(repo, match, opts, prepare):
1762 def walkchangerevs(repo, match, opts, prepare):
1766 '''Iterate over files and the revs in which they changed.
1763 '''Iterate over files and the revs in which they changed.
1767
1764
1768 Callers most commonly need to iterate backwards over the history
1765 Callers most commonly need to iterate backwards over the history
1769 in which they are interested. Doing so has awful (quadratic-looking)
1766 in which they are interested. Doing so has awful (quadratic-looking)
1770 performance, so we use iterators in a "windowed" way.
1767 performance, so we use iterators in a "windowed" way.
1771
1768
1772 We walk a window of revisions in the desired order. Within the
1769 We walk a window of revisions in the desired order. Within the
1773 window, we first walk forwards to gather data, then in the desired
1770 window, we first walk forwards to gather data, then in the desired
1774 order (usually backwards) to display it.
1771 order (usually backwards) to display it.
1775
1772
1776 This function returns an iterator yielding contexts. Before
1773 This function returns an iterator yielding contexts. Before
1777 yielding each context, the iterator will first call the prepare
1774 yielding each context, the iterator will first call the prepare
1778 function on each context in the window in forward order.'''
1775 function on each context in the window in forward order.'''
1779
1776
1780 follow = opts.get('follow') or opts.get('follow_first')
1777 follow = opts.get('follow') or opts.get('follow_first')
1781 revs = _logrevs(repo, opts)
1778 revs = _logrevs(repo, opts)
1782 if not revs:
1779 if not revs:
1783 return []
1780 return []
1784 wanted = set()
1781 wanted = set()
1785 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1782 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1786 opts.get('removed'))
1783 opts.get('removed'))
1787 fncache = {}
1784 fncache = {}
1788 change = repo.changectx
1785 change = repo.changectx
1789
1786
1790 # First step is to fill wanted, the set of revisions that we want to yield.
1787 # First step is to fill wanted, the set of revisions that we want to yield.
1791 # When it does not induce extra cost, we also fill fncache for revisions in
1788 # When it does not induce extra cost, we also fill fncache for revisions in
1792 # wanted: a cache of filenames that were changed (ctx.files()) and that
1789 # wanted: a cache of filenames that were changed (ctx.files()) and that
1793 # match the file filtering conditions.
1790 # match the file filtering conditions.
1794
1791
1795 if match.always():
1792 if match.always():
1796 # No files, no patterns. Display all revs.
1793 # No files, no patterns. Display all revs.
1797 wanted = revs
1794 wanted = revs
1798 elif not slowpath:
1795 elif not slowpath:
1799 # We only have to read through the filelog to find wanted revisions
1796 # We only have to read through the filelog to find wanted revisions
1800
1797
1801 try:
1798 try:
1802 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1799 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1803 except FileWalkError:
1800 except FileWalkError:
1804 slowpath = True
1801 slowpath = True
1805
1802
1806 # We decided to fall back to the slowpath because at least one
1803 # We decided to fall back to the slowpath because at least one
1807 # of the paths was not a file. Check to see if at least one of them
1804 # of the paths was not a file. Check to see if at least one of them
1808 # existed in history, otherwise simply return
1805 # existed in history, otherwise simply return
1809 for path in match.files():
1806 for path in match.files():
1810 if path == '.' or path in repo.store:
1807 if path == '.' or path in repo.store:
1811 break
1808 break
1812 else:
1809 else:
1813 return []
1810 return []
1814
1811
1815 if slowpath:
1812 if slowpath:
1816 # We have to read the changelog to match filenames against
1813 # We have to read the changelog to match filenames against
1817 # changed files
1814 # changed files
1818
1815
1819 if follow:
1816 if follow:
1820 raise error.Abort(_('can only follow copies/renames for explicit '
1817 raise error.Abort(_('can only follow copies/renames for explicit '
1821 'filenames'))
1818 'filenames'))
1822
1819
1823 # The slow path checks files modified in every changeset.
1820 # The slow path checks files modified in every changeset.
1824 # This is really slow on large repos, so compute the set lazily.
1821 # This is really slow on large repos, so compute the set lazily.
1825 class lazywantedset(object):
1822 class lazywantedset(object):
1826 def __init__(self):
1823 def __init__(self):
1827 self.set = set()
1824 self.set = set()
1828 self.revs = set(revs)
1825 self.revs = set(revs)
1829
1826
1830 # No need to worry about locality here because it will be accessed
1827 # No need to worry about locality here because it will be accessed
1831 # in the same order as the increasing window below.
1828 # in the same order as the increasing window below.
1832 def __contains__(self, value):
1829 def __contains__(self, value):
1833 if value in self.set:
1830 if value in self.set:
1834 return True
1831 return True
1835 elif not value in self.revs:
1832 elif not value in self.revs:
1836 return False
1833 return False
1837 else:
1834 else:
1838 self.revs.discard(value)
1835 self.revs.discard(value)
1839 ctx = change(value)
1836 ctx = change(value)
1840 matches = filter(match, ctx.files())
1837 matches = filter(match, ctx.files())
1841 if matches:
1838 if matches:
1842 fncache[value] = matches
1839 fncache[value] = matches
1843 self.set.add(value)
1840 self.set.add(value)
1844 return True
1841 return True
1845 return False
1842 return False
1846
1843
1847 def discard(self, value):
1844 def discard(self, value):
1848 self.revs.discard(value)
1845 self.revs.discard(value)
1849 self.set.discard(value)
1846 self.set.discard(value)
1850
1847
1851 wanted = lazywantedset()
1848 wanted = lazywantedset()
1852
1849
1853 # it might be worthwhile to do this in the iterator if the rev range
1850 # it might be worthwhile to do this in the iterator if the rev range
1854 # is descending and the prune args are all within that range
1851 # is descending and the prune args are all within that range
1855 for rev in opts.get('prune', ()):
1852 for rev in opts.get('prune', ()):
1856 rev = repo[rev].rev()
1853 rev = repo[rev].rev()
1857 ff = _followfilter(repo)
1854 ff = _followfilter(repo)
1858 stop = min(revs[0], revs[-1])
1855 stop = min(revs[0], revs[-1])
1859 for x in xrange(rev, stop - 1, -1):
1856 for x in xrange(rev, stop - 1, -1):
1860 if ff.match(x):
1857 if ff.match(x):
1861 wanted = wanted - [x]
1858 wanted = wanted - [x]
1862
1859
1863 # Now that wanted is correctly initialized, we can iterate over the
1860 # Now that wanted is correctly initialized, we can iterate over the
1864 # revision range, yielding only revisions in wanted.
1861 # revision range, yielding only revisions in wanted.
1865 def iterate():
1862 def iterate():
1866 if follow and match.always():
1863 if follow and match.always():
1867 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1864 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1868 def want(rev):
1865 def want(rev):
1869 return ff.match(rev) and rev in wanted
1866 return ff.match(rev) and rev in wanted
1870 else:
1867 else:
1871 def want(rev):
1868 def want(rev):
1872 return rev in wanted
1869 return rev in wanted
1873
1870
1874 it = iter(revs)
1871 it = iter(revs)
1875 stopiteration = False
1872 stopiteration = False
1876 for windowsize in increasingwindows():
1873 for windowsize in increasingwindows():
1877 nrevs = []
1874 nrevs = []
1878 for i in xrange(windowsize):
1875 for i in xrange(windowsize):
1879 rev = next(it, None)
1876 rev = next(it, None)
1880 if rev is None:
1877 if rev is None:
1881 stopiteration = True
1878 stopiteration = True
1882 break
1879 break
1883 elif want(rev):
1880 elif want(rev):
1884 nrevs.append(rev)
1881 nrevs.append(rev)
1885 for rev in sorted(nrevs):
1882 for rev in sorted(nrevs):
1886 fns = fncache.get(rev)
1883 fns = fncache.get(rev)
1887 ctx = change(rev)
1884 ctx = change(rev)
1888 if not fns:
1885 if not fns:
1889 def fns_generator():
1886 def fns_generator():
1890 for f in ctx.files():
1887 for f in ctx.files():
1891 if match(f):
1888 if match(f):
1892 yield f
1889 yield f
1893 fns = fns_generator()
1890 fns = fns_generator()
1894 prepare(ctx, fns)
1891 prepare(ctx, fns)
1895 for rev in nrevs:
1892 for rev in nrevs:
1896 yield change(rev)
1893 yield change(rev)
1897
1894
1898 if stopiteration:
1895 if stopiteration:
1899 break
1896 break
1900
1897
1901 return iterate()
1898 return iterate()
1902
1899
1903 def _makefollowlogfilematcher(repo, files, followfirst):
1900 def _makefollowlogfilematcher(repo, files, followfirst):
1904 # When displaying a revision with --patch --follow FILE, we have
1901 # When displaying a revision with --patch --follow FILE, we have
1905 # to know which file of the revision must be diffed. With
1902 # to know which file of the revision must be diffed. With
1906 # --follow, we want the names of the ancestors of FILE in the
1903 # --follow, we want the names of the ancestors of FILE in the
1907 # revision, stored in "fcache". "fcache" is populated by
1904 # revision, stored in "fcache". "fcache" is populated by
1908 # reproducing the graph traversal already done by --follow revset
1905 # reproducing the graph traversal already done by --follow revset
1909 # and relating revs to file names (which is not "correct" but
1906 # and relating revs to file names (which is not "correct" but
1910 # good enough).
1907 # good enough).
1911 fcache = {}
1908 fcache = {}
1912 fcacheready = [False]
1909 fcacheready = [False]
1913 pctx = repo['.']
1910 pctx = repo['.']
1914
1911
1915 def populate():
1912 def populate():
1916 for fn in files:
1913 for fn in files:
1917 fctx = pctx[fn]
1914 fctx = pctx[fn]
1918 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1915 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1919 for c in fctx.ancestors(followfirst=followfirst):
1916 for c in fctx.ancestors(followfirst=followfirst):
1920 fcache.setdefault(c.rev(), set()).add(c.path())
1917 fcache.setdefault(c.rev(), set()).add(c.path())
1921
1918
1922 def filematcher(rev):
1919 def filematcher(rev):
1923 if not fcacheready[0]:
1920 if not fcacheready[0]:
1924 # Lazy initialization
1921 # Lazy initialization
1925 fcacheready[0] = True
1922 fcacheready[0] = True
1926 populate()
1923 populate()
1927 return scmutil.matchfiles(repo, fcache.get(rev, []))
1924 return scmutil.matchfiles(repo, fcache.get(rev, []))
1928
1925
1929 return filematcher
1926 return filematcher
1930
1927
1931 def _makenofollowlogfilematcher(repo, pats, opts):
1928 def _makenofollowlogfilematcher(repo, pats, opts):
1932 '''hook for extensions to override the filematcher for non-follow cases'''
1929 '''hook for extensions to override the filematcher for non-follow cases'''
1933 return None
1930 return None
1934
1931
1935 def _makelogrevset(repo, pats, opts, revs):
1932 def _makelogrevset(repo, pats, opts, revs):
1936 """Return (expr, filematcher) where expr is a revset string built
1933 """Return (expr, filematcher) where expr is a revset string built
1937 from log options and file patterns or None. If --stat or --patch
1934 from log options and file patterns or None. If --stat or --patch
1938 are not passed filematcher is None. Otherwise it is a callable
1935 are not passed filematcher is None. Otherwise it is a callable
1939 taking a revision number and returning a match objects filtering
1936 taking a revision number and returning a match objects filtering
1940 the files to be detailed when displaying the revision.
1937 the files to be detailed when displaying the revision.
1941 """
1938 """
1942 opt2revset = {
1939 opt2revset = {
1943 'no_merges': ('not merge()', None),
1940 'no_merges': ('not merge()', None),
1944 'only_merges': ('merge()', None),
1941 'only_merges': ('merge()', None),
1945 '_ancestors': ('ancestors(%(val)s)', None),
1942 '_ancestors': ('ancestors(%(val)s)', None),
1946 '_fancestors': ('_firstancestors(%(val)s)', None),
1943 '_fancestors': ('_firstancestors(%(val)s)', None),
1947 '_descendants': ('descendants(%(val)s)', None),
1944 '_descendants': ('descendants(%(val)s)', None),
1948 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1945 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1949 '_matchfiles': ('_matchfiles(%(val)s)', None),
1946 '_matchfiles': ('_matchfiles(%(val)s)', None),
1950 'date': ('date(%(val)r)', None),
1947 'date': ('date(%(val)r)', None),
1951 'branch': ('branch(%(val)r)', ' or '),
1948 'branch': ('branch(%(val)r)', ' or '),
1952 '_patslog': ('filelog(%(val)r)', ' or '),
1949 '_patslog': ('filelog(%(val)r)', ' or '),
1953 '_patsfollow': ('follow(%(val)r)', ' or '),
1950 '_patsfollow': ('follow(%(val)r)', ' or '),
1954 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1951 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1955 'keyword': ('keyword(%(val)r)', ' or '),
1952 'keyword': ('keyword(%(val)r)', ' or '),
1956 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1953 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1957 'user': ('user(%(val)r)', ' or '),
1954 'user': ('user(%(val)r)', ' or '),
1958 }
1955 }
1959
1956
1960 opts = dict(opts)
1957 opts = dict(opts)
1961 # follow or not follow?
1958 # follow or not follow?
1962 follow = opts.get('follow') or opts.get('follow_first')
1959 follow = opts.get('follow') or opts.get('follow_first')
1963 if opts.get('follow_first'):
1960 if opts.get('follow_first'):
1964 followfirst = 1
1961 followfirst = 1
1965 else:
1962 else:
1966 followfirst = 0
1963 followfirst = 0
1967 # --follow with FILE behavior depends on revs...
1964 # --follow with FILE behavior depends on revs...
1968 it = iter(revs)
1965 it = iter(revs)
1969 startrev = next(it)
1966 startrev = next(it)
1970 followdescendants = startrev < next(it, startrev)
1967 followdescendants = startrev < next(it, startrev)
1971
1968
1972 # branch and only_branch are really aliases and must be handled at
1969 # branch and only_branch are really aliases and must be handled at
1973 # the same time
1970 # the same time
1974 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1971 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1975 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1972 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1976 # pats/include/exclude are passed to match.match() directly in
1973 # pats/include/exclude are passed to match.match() directly in
1977 # _matchfiles() revset but walkchangerevs() builds its matcher with
1974 # _matchfiles() revset but walkchangerevs() builds its matcher with
1978 # scmutil.match(). The difference is input pats are globbed on
1975 # scmutil.match(). The difference is input pats are globbed on
1979 # platforms without shell expansion (windows).
1976 # platforms without shell expansion (windows).
1980 wctx = repo[None]
1977 wctx = repo[None]
1981 match, pats = scmutil.matchandpats(wctx, pats, opts)
1978 match, pats = scmutil.matchandpats(wctx, pats, opts)
1982 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1979 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1983 opts.get('removed'))
1980 opts.get('removed'))
1984 if not slowpath:
1981 if not slowpath:
1985 for f in match.files():
1982 for f in match.files():
1986 if follow and f not in wctx:
1983 if follow and f not in wctx:
1987 # If the file exists, it may be a directory, so let it
1984 # If the file exists, it may be a directory, so let it
1988 # take the slow path.
1985 # take the slow path.
1989 if os.path.exists(repo.wjoin(f)):
1986 if os.path.exists(repo.wjoin(f)):
1990 slowpath = True
1987 slowpath = True
1991 continue
1988 continue
1992 else:
1989 else:
1993 raise error.Abort(_('cannot follow file not in parent '
1990 raise error.Abort(_('cannot follow file not in parent '
1994 'revision: "%s"') % f)
1991 'revision: "%s"') % f)
1995 filelog = repo.file(f)
1992 filelog = repo.file(f)
1996 if not filelog:
1993 if not filelog:
1997 # A zero count may be a directory or deleted file, so
1994 # A zero count may be a directory or deleted file, so
1998 # try to find matching entries on the slow path.
1995 # try to find matching entries on the slow path.
1999 if follow:
1996 if follow:
2000 raise error.Abort(
1997 raise error.Abort(
2001 _('cannot follow nonexistent file: "%s"') % f)
1998 _('cannot follow nonexistent file: "%s"') % f)
2002 slowpath = True
1999 slowpath = True
2003
2000
2004 # We decided to fall back to the slowpath because at least one
2001 # We decided to fall back to the slowpath because at least one
2005 # of the paths was not a file. Check to see if at least one of them
2002 # of the paths was not a file. Check to see if at least one of them
2006 # existed in history - in that case, we'll continue down the
2003 # existed in history - in that case, we'll continue down the
2007 # slowpath; otherwise, we can turn off the slowpath
2004 # slowpath; otherwise, we can turn off the slowpath
2008 if slowpath:
2005 if slowpath:
2009 for path in match.files():
2006 for path in match.files():
2010 if path == '.' or path in repo.store:
2007 if path == '.' or path in repo.store:
2011 break
2008 break
2012 else:
2009 else:
2013 slowpath = False
2010 slowpath = False
2014
2011
2015 fpats = ('_patsfollow', '_patsfollowfirst')
2012 fpats = ('_patsfollow', '_patsfollowfirst')
2016 fnopats = (('_ancestors', '_fancestors'),
2013 fnopats = (('_ancestors', '_fancestors'),
2017 ('_descendants', '_fdescendants'))
2014 ('_descendants', '_fdescendants'))
2018 if slowpath:
2015 if slowpath:
2019 # See walkchangerevs() slow path.
2016 # See walkchangerevs() slow path.
2020 #
2017 #
2021 # pats/include/exclude cannot be represented as separate
2018 # pats/include/exclude cannot be represented as separate
2022 # revset expressions as their filtering logic applies at file
2019 # revset expressions as their filtering logic applies at file
2023 # level. For instance "-I a -X a" matches a revision touching
2020 # level. For instance "-I a -X a" matches a revision touching
2024 # "a" and "b" while "file(a) and not file(b)" does
2021 # "a" and "b" while "file(a) and not file(b)" does
2025 # not. Besides, filesets are evaluated against the working
2022 # not. Besides, filesets are evaluated against the working
2026 # directory.
2023 # directory.
2027 matchargs = ['r:', 'd:relpath']
2024 matchargs = ['r:', 'd:relpath']
2028 for p in pats:
2025 for p in pats:
2029 matchargs.append('p:' + p)
2026 matchargs.append('p:' + p)
2030 for p in opts.get('include', []):
2027 for p in opts.get('include', []):
2031 matchargs.append('i:' + p)
2028 matchargs.append('i:' + p)
2032 for p in opts.get('exclude', []):
2029 for p in opts.get('exclude', []):
2033 matchargs.append('x:' + p)
2030 matchargs.append('x:' + p)
2034 matchargs = ','.join(('%r' % p) for p in matchargs)
2031 matchargs = ','.join(('%r' % p) for p in matchargs)
2035 opts['_matchfiles'] = matchargs
2032 opts['_matchfiles'] = matchargs
2036 if follow:
2033 if follow:
2037 opts[fnopats[0][followfirst]] = '.'
2034 opts[fnopats[0][followfirst]] = '.'
2038 else:
2035 else:
2039 if follow:
2036 if follow:
2040 if pats:
2037 if pats:
2041 # follow() revset interprets its file argument as a
2038 # follow() revset interprets its file argument as a
2042 # manifest entry, so use match.files(), not pats.
2039 # manifest entry, so use match.files(), not pats.
2043 opts[fpats[followfirst]] = list(match.files())
2040 opts[fpats[followfirst]] = list(match.files())
2044 else:
2041 else:
2045 op = fnopats[followdescendants][followfirst]
2042 op = fnopats[followdescendants][followfirst]
2046 opts[op] = 'rev(%d)' % startrev
2043 opts[op] = 'rev(%d)' % startrev
2047 else:
2044 else:
2048 opts['_patslog'] = list(pats)
2045 opts['_patslog'] = list(pats)
2049
2046
2050 filematcher = None
2047 filematcher = None
2051 if opts.get('patch') or opts.get('stat'):
2048 if opts.get('patch') or opts.get('stat'):
2052 # When following files, track renames via a special matcher.
2049 # When following files, track renames via a special matcher.
2053 # If we're forced to take the slowpath it means we're following
2050 # If we're forced to take the slowpath it means we're following
2054 # at least one pattern/directory, so don't bother with rename tracking.
2051 # at least one pattern/directory, so don't bother with rename tracking.
2055 if follow and not match.always() and not slowpath:
2052 if follow and not match.always() and not slowpath:
2056 # _makefollowlogfilematcher expects its files argument to be
2053 # _makefollowlogfilematcher expects its files argument to be
2057 # relative to the repo root, so use match.files(), not pats.
2054 # relative to the repo root, so use match.files(), not pats.
2058 filematcher = _makefollowlogfilematcher(repo, match.files(),
2055 filematcher = _makefollowlogfilematcher(repo, match.files(),
2059 followfirst)
2056 followfirst)
2060 else:
2057 else:
2061 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2058 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2062 if filematcher is None:
2059 if filematcher is None:
2063 filematcher = lambda rev: match
2060 filematcher = lambda rev: match
2064
2061
2065 expr = []
2062 expr = []
2066 for op, val in sorted(opts.iteritems()):
2063 for op, val in sorted(opts.iteritems()):
2067 if not val:
2064 if not val:
2068 continue
2065 continue
2069 if op not in opt2revset:
2066 if op not in opt2revset:
2070 continue
2067 continue
2071 revop, andor = opt2revset[op]
2068 revop, andor = opt2revset[op]
2072 if '%(val)' not in revop:
2069 if '%(val)' not in revop:
2073 expr.append(revop)
2070 expr.append(revop)
2074 else:
2071 else:
2075 if not isinstance(val, list):
2072 if not isinstance(val, list):
2076 e = revop % {'val': val}
2073 e = revop % {'val': val}
2077 else:
2074 else:
2078 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2075 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2079 expr.append(e)
2076 expr.append(e)
2080
2077
2081 if expr:
2078 if expr:
2082 expr = '(' + ' and '.join(expr) + ')'
2079 expr = '(' + ' and '.join(expr) + ')'
2083 else:
2080 else:
2084 expr = None
2081 expr = None
2085 return expr, filematcher
2082 return expr, filematcher
2086
2083
2087 def _logrevs(repo, opts):
2084 def _logrevs(repo, opts):
2088 # Default --rev value depends on --follow but --follow behavior
2085 # Default --rev value depends on --follow but --follow behavior
2089 # depends on revisions resolved from --rev...
2086 # depends on revisions resolved from --rev...
2090 follow = opts.get('follow') or opts.get('follow_first')
2087 follow = opts.get('follow') or opts.get('follow_first')
2091 if opts.get('rev'):
2088 if opts.get('rev'):
2092 revs = scmutil.revrange(repo, opts['rev'])
2089 revs = scmutil.revrange(repo, opts['rev'])
2093 elif follow and repo.dirstate.p1() == nullid:
2090 elif follow and repo.dirstate.p1() == nullid:
2094 revs = smartset.baseset()
2091 revs = smartset.baseset()
2095 elif follow:
2092 elif follow:
2096 revs = repo.revs('reverse(:.)')
2093 revs = repo.revs('reverse(:.)')
2097 else:
2094 else:
2098 revs = smartset.spanset(repo)
2095 revs = smartset.spanset(repo)
2099 revs.reverse()
2096 revs.reverse()
2100 return revs
2097 return revs
2101
2098
2102 def getgraphlogrevs(repo, pats, opts):
2099 def getgraphlogrevs(repo, pats, opts):
2103 """Return (revs, expr, filematcher) where revs is an iterable of
2100 """Return (revs, expr, filematcher) where revs is an iterable of
2104 revision numbers, expr is a revset string built from log options
2101 revision numbers, expr is a revset string built from log options
2105 and file patterns or None, and used to filter 'revs'. If --stat or
2102 and file patterns or None, and used to filter 'revs'. If --stat or
2106 --patch are not passed filematcher is None. Otherwise it is a
2103 --patch are not passed filematcher is None. Otherwise it is a
2107 callable taking a revision number and returning a match objects
2104 callable taking a revision number and returning a match objects
2108 filtering the files to be detailed when displaying the revision.
2105 filtering the files to be detailed when displaying the revision.
2109 """
2106 """
2110 limit = loglimit(opts)
2107 limit = loglimit(opts)
2111 revs = _logrevs(repo, opts)
2108 revs = _logrevs(repo, opts)
2112 if not revs:
2109 if not revs:
2113 return smartset.baseset(), None, None
2110 return smartset.baseset(), None, None
2114 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2111 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2115 if opts.get('rev'):
2112 if opts.get('rev'):
2116 # User-specified revs might be unsorted, but don't sort before
2113 # User-specified revs might be unsorted, but don't sort before
2117 # _makelogrevset because it might depend on the order of revs
2114 # _makelogrevset because it might depend on the order of revs
2118 if not (revs.isdescending() or revs.istopo()):
2115 if not (revs.isdescending() or revs.istopo()):
2119 revs.sort(reverse=True)
2116 revs.sort(reverse=True)
2120 if expr:
2117 if expr:
2121 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2118 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2122 revs = matcher(repo, revs)
2119 revs = matcher(repo, revs)
2123 if limit is not None:
2120 if limit is not None:
2124 limitedrevs = []
2121 limitedrevs = []
2125 for idx, rev in enumerate(revs):
2122 for idx, rev in enumerate(revs):
2126 if idx >= limit:
2123 if idx >= limit:
2127 break
2124 break
2128 limitedrevs.append(rev)
2125 limitedrevs.append(rev)
2129 revs = smartset.baseset(limitedrevs)
2126 revs = smartset.baseset(limitedrevs)
2130
2127
2131 return revs, expr, filematcher
2128 return revs, expr, filematcher
2132
2129
2133 def getlogrevs(repo, pats, opts):
2130 def getlogrevs(repo, pats, opts):
2134 """Return (revs, expr, filematcher) where revs is an iterable of
2131 """Return (revs, expr, filematcher) where revs is an iterable of
2135 revision numbers, expr is a revset string built from log options
2132 revision numbers, expr is a revset string built from log options
2136 and file patterns or None, and used to filter 'revs'. If --stat or
2133 and file patterns or None, and used to filter 'revs'. If --stat or
2137 --patch are not passed filematcher is None. Otherwise it is a
2134 --patch are not passed filematcher is None. Otherwise it is a
2138 callable taking a revision number and returning a match objects
2135 callable taking a revision number and returning a match objects
2139 filtering the files to be detailed when displaying the revision.
2136 filtering the files to be detailed when displaying the revision.
2140 """
2137 """
2141 limit = loglimit(opts)
2138 limit = loglimit(opts)
2142 revs = _logrevs(repo, opts)
2139 revs = _logrevs(repo, opts)
2143 if not revs:
2140 if not revs:
2144 return smartset.baseset([]), None, None
2141 return smartset.baseset([]), None, None
2145 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2142 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2146 if expr:
2143 if expr:
2147 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2144 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2148 revs = matcher(repo, revs)
2145 revs = matcher(repo, revs)
2149 if limit is not None:
2146 if limit is not None:
2150 limitedrevs = []
2147 limitedrevs = []
2151 for idx, r in enumerate(revs):
2148 for idx, r in enumerate(revs):
2152 if limit <= idx:
2149 if limit <= idx:
2153 break
2150 break
2154 limitedrevs.append(r)
2151 limitedrevs.append(r)
2155 revs = smartset.baseset(limitedrevs)
2152 revs = smartset.baseset(limitedrevs)
2156
2153
2157 return revs, expr, filematcher
2154 return revs, expr, filematcher
2158
2155
2159 def _graphnodeformatter(ui, displayer):
2156 def _graphnodeformatter(ui, displayer):
2160 spec = ui.config('ui', 'graphnodetemplate')
2157 spec = ui.config('ui', 'graphnodetemplate')
2161 if not spec:
2158 if not spec:
2162 return templatekw.showgraphnode # fast path for "{graphnode}"
2159 return templatekw.showgraphnode # fast path for "{graphnode}"
2163
2160
2164 templ = formatter.gettemplater(ui, 'graphnode', spec)
2161 templ = formatter.gettemplater(ui, 'graphnode', spec)
2165 cache = {}
2162 cache = {}
2166 if isinstance(displayer, changeset_templater):
2163 if isinstance(displayer, changeset_templater):
2167 cache = displayer.cache # reuse cache of slow templates
2164 cache = displayer.cache # reuse cache of slow templates
2168 props = templatekw.keywords.copy()
2165 props = templatekw.keywords.copy()
2169 props['templ'] = templ
2166 props['templ'] = templ
2170 props['cache'] = cache
2167 props['cache'] = cache
2171 def formatnode(repo, ctx):
2168 def formatnode(repo, ctx):
2172 props['ctx'] = ctx
2169 props['ctx'] = ctx
2173 props['repo'] = repo
2170 props['repo'] = repo
2174 props['ui'] = repo.ui
2171 props['ui'] = repo.ui
2175 props['revcache'] = {}
2172 props['revcache'] = {}
2176 return templater.stringify(templ('graphnode', **props))
2173 return templater.stringify(templ('graphnode', **props))
2177 return formatnode
2174 return formatnode
2178
2175
2179 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2176 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2180 filematcher=None):
2177 filematcher=None):
2181 formatnode = _graphnodeformatter(ui, displayer)
2178 formatnode = _graphnodeformatter(ui, displayer)
2182 state = graphmod.asciistate()
2179 state = graphmod.asciistate()
2183 styles = state['styles']
2180 styles = state['styles']
2184
2181
2185 # only set graph styling if HGPLAIN is not set.
2182 # only set graph styling if HGPLAIN is not set.
2186 if ui.plain('graph'):
2183 if ui.plain('graph'):
2187 # set all edge styles to |, the default pre-3.8 behaviour
2184 # set all edge styles to |, the default pre-3.8 behaviour
2188 styles.update(dict.fromkeys(styles, '|'))
2185 styles.update(dict.fromkeys(styles, '|'))
2189 else:
2186 else:
2190 edgetypes = {
2187 edgetypes = {
2191 'parent': graphmod.PARENT,
2188 'parent': graphmod.PARENT,
2192 'grandparent': graphmod.GRANDPARENT,
2189 'grandparent': graphmod.GRANDPARENT,
2193 'missing': graphmod.MISSINGPARENT
2190 'missing': graphmod.MISSINGPARENT
2194 }
2191 }
2195 for name, key in edgetypes.items():
2192 for name, key in edgetypes.items():
2196 # experimental config: experimental.graphstyle.*
2193 # experimental config: experimental.graphstyle.*
2197 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2194 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2198 styles[key])
2195 styles[key])
2199 if not styles[key]:
2196 if not styles[key]:
2200 styles[key] = None
2197 styles[key] = None
2201
2198
2202 # experimental config: experimental.graphshorten
2199 # experimental config: experimental.graphshorten
2203 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2200 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2204
2201
2205 for rev, type, ctx, parents in dag:
2202 for rev, type, ctx, parents in dag:
2206 char = formatnode(repo, ctx)
2203 char = formatnode(repo, ctx)
2207 copies = None
2204 copies = None
2208 if getrenamed and ctx.rev():
2205 if getrenamed and ctx.rev():
2209 copies = []
2206 copies = []
2210 for fn in ctx.files():
2207 for fn in ctx.files():
2211 rename = getrenamed(fn, ctx.rev())
2208 rename = getrenamed(fn, ctx.rev())
2212 if rename:
2209 if rename:
2213 copies.append((fn, rename[0]))
2210 copies.append((fn, rename[0]))
2214 revmatchfn = None
2211 revmatchfn = None
2215 if filematcher is not None:
2212 if filematcher is not None:
2216 revmatchfn = filematcher(ctx.rev())
2213 revmatchfn = filematcher(ctx.rev())
2217 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2214 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2218 lines = displayer.hunk.pop(rev).split('\n')
2215 lines = displayer.hunk.pop(rev).split('\n')
2219 if not lines[-1]:
2216 if not lines[-1]:
2220 del lines[-1]
2217 del lines[-1]
2221 displayer.flush(ctx)
2218 displayer.flush(ctx)
2222 edges = edgefn(type, char, lines, state, rev, parents)
2219 edges = edgefn(type, char, lines, state, rev, parents)
2223 for type, char, lines, coldata in edges:
2220 for type, char, lines, coldata in edges:
2224 graphmod.ascii(ui, state, type, char, lines, coldata)
2221 graphmod.ascii(ui, state, type, char, lines, coldata)
2225 displayer.close()
2222 displayer.close()
2226
2223
2227 def graphlog(ui, repo, *pats, **opts):
2224 def graphlog(ui, repo, *pats, **opts):
2228 # Parameters are identical to log command ones
2225 # Parameters are identical to log command ones
2229 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2226 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2230 revdag = graphmod.dagwalker(repo, revs)
2227 revdag = graphmod.dagwalker(repo, revs)
2231
2228
2232 getrenamed = None
2229 getrenamed = None
2233 if opts.get('copies'):
2230 if opts.get('copies'):
2234 endrev = None
2231 endrev = None
2235 if opts.get('rev'):
2232 if opts.get('rev'):
2236 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2233 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2237 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2234 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2238
2235
2239 ui.pager('log')
2236 ui.pager('log')
2240 displayer = show_changeset(ui, repo, opts, buffered=True)
2237 displayer = show_changeset(ui, repo, opts, buffered=True)
2241 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2238 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2242 filematcher)
2239 filematcher)
2243
2240
2244 def checkunsupportedgraphflags(pats, opts):
2241 def checkunsupportedgraphflags(pats, opts):
2245 for op in ["newest_first"]:
2242 for op in ["newest_first"]:
2246 if op in opts and opts[op]:
2243 if op in opts and opts[op]:
2247 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2244 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2248 % op.replace("_", "-"))
2245 % op.replace("_", "-"))
2249
2246
2250 def graphrevs(repo, nodes, opts):
2247 def graphrevs(repo, nodes, opts):
2251 limit = loglimit(opts)
2248 limit = loglimit(opts)
2252 nodes.reverse()
2249 nodes.reverse()
2253 if limit is not None:
2250 if limit is not None:
2254 nodes = nodes[:limit]
2251 nodes = nodes[:limit]
2255 return graphmod.nodes(repo, nodes)
2252 return graphmod.nodes(repo, nodes)
2256
2253
2257 def add(ui, repo, match, prefix, explicitonly, **opts):
2254 def add(ui, repo, match, prefix, explicitonly, **opts):
2258 join = lambda f: os.path.join(prefix, f)
2255 join = lambda f: os.path.join(prefix, f)
2259 bad = []
2256 bad = []
2260
2257
2261 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2258 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2262 names = []
2259 names = []
2263 wctx = repo[None]
2260 wctx = repo[None]
2264 cca = None
2261 cca = None
2265 abort, warn = scmutil.checkportabilityalert(ui)
2262 abort, warn = scmutil.checkportabilityalert(ui)
2266 if abort or warn:
2263 if abort or warn:
2267 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2264 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2268
2265
2269 badmatch = matchmod.badmatch(match, badfn)
2266 badmatch = matchmod.badmatch(match, badfn)
2270 dirstate = repo.dirstate
2267 dirstate = repo.dirstate
2271 # We don't want to just call wctx.walk here, since it would return a lot of
2268 # We don't want to just call wctx.walk here, since it would return a lot of
2272 # clean files, which we aren't interested in and takes time.
2269 # clean files, which we aren't interested in and takes time.
2273 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2270 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2274 True, False, full=False)):
2271 True, False, full=False)):
2275 exact = match.exact(f)
2272 exact = match.exact(f)
2276 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2273 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2277 if cca:
2274 if cca:
2278 cca(f)
2275 cca(f)
2279 names.append(f)
2276 names.append(f)
2280 if ui.verbose or not exact:
2277 if ui.verbose or not exact:
2281 ui.status(_('adding %s\n') % match.rel(f))
2278 ui.status(_('adding %s\n') % match.rel(f))
2282
2279
2283 for subpath in sorted(wctx.substate):
2280 for subpath in sorted(wctx.substate):
2284 sub = wctx.sub(subpath)
2281 sub = wctx.sub(subpath)
2285 try:
2282 try:
2286 submatch = matchmod.subdirmatcher(subpath, match)
2283 submatch = matchmod.subdirmatcher(subpath, match)
2287 if opts.get('subrepos'):
2284 if opts.get('subrepos'):
2288 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2285 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2289 else:
2286 else:
2290 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2287 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2291 except error.LookupError:
2288 except error.LookupError:
2292 ui.status(_("skipping missing subrepository: %s\n")
2289 ui.status(_("skipping missing subrepository: %s\n")
2293 % join(subpath))
2290 % join(subpath))
2294
2291
2295 if not opts.get('dry_run'):
2292 if not opts.get('dry_run'):
2296 rejected = wctx.add(names, prefix)
2293 rejected = wctx.add(names, prefix)
2297 bad.extend(f for f in rejected if f in match.files())
2294 bad.extend(f for f in rejected if f in match.files())
2298 return bad
2295 return bad
2299
2296
2300 def forget(ui, repo, match, prefix, explicitonly):
2297 def forget(ui, repo, match, prefix, explicitonly):
2301 join = lambda f: os.path.join(prefix, f)
2298 join = lambda f: os.path.join(prefix, f)
2302 bad = []
2299 bad = []
2303 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2300 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2304 wctx = repo[None]
2301 wctx = repo[None]
2305 forgot = []
2302 forgot = []
2306
2303
2307 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2304 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2308 forget = sorted(s[0] + s[1] + s[3] + s[6])
2305 forget = sorted(s[0] + s[1] + s[3] + s[6])
2309 if explicitonly:
2306 if explicitonly:
2310 forget = [f for f in forget if match.exact(f)]
2307 forget = [f for f in forget if match.exact(f)]
2311
2308
2312 for subpath in sorted(wctx.substate):
2309 for subpath in sorted(wctx.substate):
2313 sub = wctx.sub(subpath)
2310 sub = wctx.sub(subpath)
2314 try:
2311 try:
2315 submatch = matchmod.subdirmatcher(subpath, match)
2312 submatch = matchmod.subdirmatcher(subpath, match)
2316 subbad, subforgot = sub.forget(submatch, prefix)
2313 subbad, subforgot = sub.forget(submatch, prefix)
2317 bad.extend([subpath + '/' + f for f in subbad])
2314 bad.extend([subpath + '/' + f for f in subbad])
2318 forgot.extend([subpath + '/' + f for f in subforgot])
2315 forgot.extend([subpath + '/' + f for f in subforgot])
2319 except error.LookupError:
2316 except error.LookupError:
2320 ui.status(_("skipping missing subrepository: %s\n")
2317 ui.status(_("skipping missing subrepository: %s\n")
2321 % join(subpath))
2318 % join(subpath))
2322
2319
2323 if not explicitonly:
2320 if not explicitonly:
2324 for f in match.files():
2321 for f in match.files():
2325 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2322 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2326 if f not in forgot:
2323 if f not in forgot:
2327 if repo.wvfs.exists(f):
2324 if repo.wvfs.exists(f):
2328 # Don't complain if the exact case match wasn't given.
2325 # Don't complain if the exact case match wasn't given.
2329 # But don't do this until after checking 'forgot', so
2326 # But don't do this until after checking 'forgot', so
2330 # that subrepo files aren't normalized, and this op is
2327 # that subrepo files aren't normalized, and this op is
2331 # purely from data cached by the status walk above.
2328 # purely from data cached by the status walk above.
2332 if repo.dirstate.normalize(f) in repo.dirstate:
2329 if repo.dirstate.normalize(f) in repo.dirstate:
2333 continue
2330 continue
2334 ui.warn(_('not removing %s: '
2331 ui.warn(_('not removing %s: '
2335 'file is already untracked\n')
2332 'file is already untracked\n')
2336 % match.rel(f))
2333 % match.rel(f))
2337 bad.append(f)
2334 bad.append(f)
2338
2335
2339 for f in forget:
2336 for f in forget:
2340 if ui.verbose or not match.exact(f):
2337 if ui.verbose or not match.exact(f):
2341 ui.status(_('removing %s\n') % match.rel(f))
2338 ui.status(_('removing %s\n') % match.rel(f))
2342
2339
2343 rejected = wctx.forget(forget, prefix)
2340 rejected = wctx.forget(forget, prefix)
2344 bad.extend(f for f in rejected if f in match.files())
2341 bad.extend(f for f in rejected if f in match.files())
2345 forgot.extend(f for f in forget if f not in rejected)
2342 forgot.extend(f for f in forget if f not in rejected)
2346 return bad, forgot
2343 return bad, forgot
2347
2344
2348 def files(ui, ctx, m, fm, fmt, subrepos):
2345 def files(ui, ctx, m, fm, fmt, subrepos):
2349 rev = ctx.rev()
2346 rev = ctx.rev()
2350 ret = 1
2347 ret = 1
2351 ds = ctx.repo().dirstate
2348 ds = ctx.repo().dirstate
2352
2349
2353 for f in ctx.matches(m):
2350 for f in ctx.matches(m):
2354 if rev is None and ds[f] == 'r':
2351 if rev is None and ds[f] == 'r':
2355 continue
2352 continue
2356 fm.startitem()
2353 fm.startitem()
2357 if ui.verbose:
2354 if ui.verbose:
2358 fc = ctx[f]
2355 fc = ctx[f]
2359 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2356 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2360 fm.data(abspath=f)
2357 fm.data(abspath=f)
2361 fm.write('path', fmt, m.rel(f))
2358 fm.write('path', fmt, m.rel(f))
2362 ret = 0
2359 ret = 0
2363
2360
2364 for subpath in sorted(ctx.substate):
2361 for subpath in sorted(ctx.substate):
2365 submatch = matchmod.subdirmatcher(subpath, m)
2362 submatch = matchmod.subdirmatcher(subpath, m)
2366 if (subrepos or m.exact(subpath) or any(submatch.files())):
2363 if (subrepos or m.exact(subpath) or any(submatch.files())):
2367 sub = ctx.sub(subpath)
2364 sub = ctx.sub(subpath)
2368 try:
2365 try:
2369 recurse = m.exact(subpath) or subrepos
2366 recurse = m.exact(subpath) or subrepos
2370 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2367 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2371 ret = 0
2368 ret = 0
2372 except error.LookupError:
2369 except error.LookupError:
2373 ui.status(_("skipping missing subrepository: %s\n")
2370 ui.status(_("skipping missing subrepository: %s\n")
2374 % m.abs(subpath))
2371 % m.abs(subpath))
2375
2372
2376 return ret
2373 return ret
2377
2374
2378 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2375 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2379 join = lambda f: os.path.join(prefix, f)
2376 join = lambda f: os.path.join(prefix, f)
2380 ret = 0
2377 ret = 0
2381 s = repo.status(match=m, clean=True)
2378 s = repo.status(match=m, clean=True)
2382 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2379 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2383
2380
2384 wctx = repo[None]
2381 wctx = repo[None]
2385
2382
2386 if warnings is None:
2383 if warnings is None:
2387 warnings = []
2384 warnings = []
2388 warn = True
2385 warn = True
2389 else:
2386 else:
2390 warn = False
2387 warn = False
2391
2388
2392 subs = sorted(wctx.substate)
2389 subs = sorted(wctx.substate)
2393 total = len(subs)
2390 total = len(subs)
2394 count = 0
2391 count = 0
2395 for subpath in subs:
2392 for subpath in subs:
2396 count += 1
2393 count += 1
2397 submatch = matchmod.subdirmatcher(subpath, m)
2394 submatch = matchmod.subdirmatcher(subpath, m)
2398 if subrepos or m.exact(subpath) or any(submatch.files()):
2395 if subrepos or m.exact(subpath) or any(submatch.files()):
2399 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2396 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2400 sub = wctx.sub(subpath)
2397 sub = wctx.sub(subpath)
2401 try:
2398 try:
2402 if sub.removefiles(submatch, prefix, after, force, subrepos,
2399 if sub.removefiles(submatch, prefix, after, force, subrepos,
2403 warnings):
2400 warnings):
2404 ret = 1
2401 ret = 1
2405 except error.LookupError:
2402 except error.LookupError:
2406 warnings.append(_("skipping missing subrepository: %s\n")
2403 warnings.append(_("skipping missing subrepository: %s\n")
2407 % join(subpath))
2404 % join(subpath))
2408 ui.progress(_('searching'), None)
2405 ui.progress(_('searching'), None)
2409
2406
2410 # warn about failure to delete explicit files/dirs
2407 # warn about failure to delete explicit files/dirs
2411 deleteddirs = util.dirs(deleted)
2408 deleteddirs = util.dirs(deleted)
2412 files = m.files()
2409 files = m.files()
2413 total = len(files)
2410 total = len(files)
2414 count = 0
2411 count = 0
2415 for f in files:
2412 for f in files:
2416 def insubrepo():
2413 def insubrepo():
2417 for subpath in wctx.substate:
2414 for subpath in wctx.substate:
2418 if f.startswith(subpath + '/'):
2415 if f.startswith(subpath + '/'):
2419 return True
2416 return True
2420 return False
2417 return False
2421
2418
2422 count += 1
2419 count += 1
2423 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2420 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2424 isdir = f in deleteddirs or wctx.hasdir(f)
2421 isdir = f in deleteddirs or wctx.hasdir(f)
2425 if (f in repo.dirstate or isdir or f == '.'
2422 if (f in repo.dirstate or isdir or f == '.'
2426 or insubrepo() or f in subs):
2423 or insubrepo() or f in subs):
2427 continue
2424 continue
2428
2425
2429 if repo.wvfs.exists(f):
2426 if repo.wvfs.exists(f):
2430 if repo.wvfs.isdir(f):
2427 if repo.wvfs.isdir(f):
2431 warnings.append(_('not removing %s: no tracked files\n')
2428 warnings.append(_('not removing %s: no tracked files\n')
2432 % m.rel(f))
2429 % m.rel(f))
2433 else:
2430 else:
2434 warnings.append(_('not removing %s: file is untracked\n')
2431 warnings.append(_('not removing %s: file is untracked\n')
2435 % m.rel(f))
2432 % m.rel(f))
2436 # missing files will generate a warning elsewhere
2433 # missing files will generate a warning elsewhere
2437 ret = 1
2434 ret = 1
2438 ui.progress(_('deleting'), None)
2435 ui.progress(_('deleting'), None)
2439
2436
2440 if force:
2437 if force:
2441 list = modified + deleted + clean + added
2438 list = modified + deleted + clean + added
2442 elif after:
2439 elif after:
2443 list = deleted
2440 list = deleted
2444 remaining = modified + added + clean
2441 remaining = modified + added + clean
2445 total = len(remaining)
2442 total = len(remaining)
2446 count = 0
2443 count = 0
2447 for f in remaining:
2444 for f in remaining:
2448 count += 1
2445 count += 1
2449 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2446 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2450 warnings.append(_('not removing %s: file still exists\n')
2447 warnings.append(_('not removing %s: file still exists\n')
2451 % m.rel(f))
2448 % m.rel(f))
2452 ret = 1
2449 ret = 1
2453 ui.progress(_('skipping'), None)
2450 ui.progress(_('skipping'), None)
2454 else:
2451 else:
2455 list = deleted + clean
2452 list = deleted + clean
2456 total = len(modified) + len(added)
2453 total = len(modified) + len(added)
2457 count = 0
2454 count = 0
2458 for f in modified:
2455 for f in modified:
2459 count += 1
2456 count += 1
2460 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2457 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2461 warnings.append(_('not removing %s: file is modified (use -f'
2458 warnings.append(_('not removing %s: file is modified (use -f'
2462 ' to force removal)\n') % m.rel(f))
2459 ' to force removal)\n') % m.rel(f))
2463 ret = 1
2460 ret = 1
2464 for f in added:
2461 for f in added:
2465 count += 1
2462 count += 1
2466 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2463 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2467 warnings.append(_("not removing %s: file has been marked for add"
2464 warnings.append(_("not removing %s: file has been marked for add"
2468 " (use 'hg forget' to undo add)\n") % m.rel(f))
2465 " (use 'hg forget' to undo add)\n") % m.rel(f))
2469 ret = 1
2466 ret = 1
2470 ui.progress(_('skipping'), None)
2467 ui.progress(_('skipping'), None)
2471
2468
2472 list = sorted(list)
2469 list = sorted(list)
2473 total = len(list)
2470 total = len(list)
2474 count = 0
2471 count = 0
2475 for f in list:
2472 for f in list:
2476 count += 1
2473 count += 1
2477 if ui.verbose or not m.exact(f):
2474 if ui.verbose or not m.exact(f):
2478 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2475 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2479 ui.status(_('removing %s\n') % m.rel(f))
2476 ui.status(_('removing %s\n') % m.rel(f))
2480 ui.progress(_('deleting'), None)
2477 ui.progress(_('deleting'), None)
2481
2478
2482 with repo.wlock():
2479 with repo.wlock():
2483 if not after:
2480 if not after:
2484 for f in list:
2481 for f in list:
2485 if f in added:
2482 if f in added:
2486 continue # we never unlink added files on remove
2483 continue # we never unlink added files on remove
2487 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2484 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2488 repo[None].forget(list)
2485 repo[None].forget(list)
2489
2486
2490 if warn:
2487 if warn:
2491 for warning in warnings:
2488 for warning in warnings:
2492 ui.warn(warning)
2489 ui.warn(warning)
2493
2490
2494 return ret
2491 return ret
2495
2492
2496 def cat(ui, repo, ctx, matcher, prefix, **opts):
2493 def cat(ui, repo, ctx, matcher, prefix, **opts):
2497 err = 1
2494 err = 1
2498
2495
2499 def write(path):
2496 def write(path):
2500 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2497 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2501 pathname=os.path.join(prefix, path))
2498 pathname=os.path.join(prefix, path))
2502 data = ctx[path].data()
2499 data = ctx[path].data()
2503 if opts.get('decode'):
2500 if opts.get('decode'):
2504 data = repo.wwritedata(path, data)
2501 data = repo.wwritedata(path, data)
2505 fp.write(data)
2502 fp.write(data)
2506 fp.close()
2503 fp.close()
2507
2504
2508 # Automation often uses hg cat on single files, so special case it
2505 # Automation often uses hg cat on single files, so special case it
2509 # for performance to avoid the cost of parsing the manifest.
2506 # for performance to avoid the cost of parsing the manifest.
2510 if len(matcher.files()) == 1 and not matcher.anypats():
2507 if len(matcher.files()) == 1 and not matcher.anypats():
2511 file = matcher.files()[0]
2508 file = matcher.files()[0]
2512 mfl = repo.manifestlog
2509 mfl = repo.manifestlog
2513 mfnode = ctx.manifestnode()
2510 mfnode = ctx.manifestnode()
2514 try:
2511 try:
2515 if mfnode and mfl[mfnode].find(file)[0]:
2512 if mfnode and mfl[mfnode].find(file)[0]:
2516 write(file)
2513 write(file)
2517 return 0
2514 return 0
2518 except KeyError:
2515 except KeyError:
2519 pass
2516 pass
2520
2517
2521 for abs in ctx.walk(matcher):
2518 for abs in ctx.walk(matcher):
2522 write(abs)
2519 write(abs)
2523 err = 0
2520 err = 0
2524
2521
2525 for subpath in sorted(ctx.substate):
2522 for subpath in sorted(ctx.substate):
2526 sub = ctx.sub(subpath)
2523 sub = ctx.sub(subpath)
2527 try:
2524 try:
2528 submatch = matchmod.subdirmatcher(subpath, matcher)
2525 submatch = matchmod.subdirmatcher(subpath, matcher)
2529
2526
2530 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2527 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2531 **opts):
2528 **opts):
2532 err = 0
2529 err = 0
2533 except error.RepoLookupError:
2530 except error.RepoLookupError:
2534 ui.status(_("skipping missing subrepository: %s\n")
2531 ui.status(_("skipping missing subrepository: %s\n")
2535 % os.path.join(prefix, subpath))
2532 % os.path.join(prefix, subpath))
2536
2533
2537 return err
2534 return err
2538
2535
2539 def commit(ui, repo, commitfunc, pats, opts):
2536 def commit(ui, repo, commitfunc, pats, opts):
2540 '''commit the specified files or all outstanding changes'''
2537 '''commit the specified files or all outstanding changes'''
2541 date = opts.get('date')
2538 date = opts.get('date')
2542 if date:
2539 if date:
2543 opts['date'] = util.parsedate(date)
2540 opts['date'] = util.parsedate(date)
2544 message = logmessage(ui, opts)
2541 message = logmessage(ui, opts)
2545 matcher = scmutil.match(repo[None], pats, opts)
2542 matcher = scmutil.match(repo[None], pats, opts)
2546
2543
2547 # extract addremove carefully -- this function can be called from a command
2544 # extract addremove carefully -- this function can be called from a command
2548 # that doesn't support addremove
2545 # that doesn't support addremove
2549 if opts.get('addremove'):
2546 if opts.get('addremove'):
2550 if scmutil.addremove(repo, matcher, "", opts) != 0:
2547 if scmutil.addremove(repo, matcher, "", opts) != 0:
2551 raise error.Abort(
2548 raise error.Abort(
2552 _("failed to mark all new/missing files as added/removed"))
2549 _("failed to mark all new/missing files as added/removed"))
2553
2550
2554 return commitfunc(ui, repo, message, matcher, opts)
2551 return commitfunc(ui, repo, message, matcher, opts)
2555
2552
2556 def samefile(f, ctx1, ctx2):
2553 def samefile(f, ctx1, ctx2):
2557 if f in ctx1.manifest():
2554 if f in ctx1.manifest():
2558 a = ctx1.filectx(f)
2555 a = ctx1.filectx(f)
2559 if f in ctx2.manifest():
2556 if f in ctx2.manifest():
2560 b = ctx2.filectx(f)
2557 b = ctx2.filectx(f)
2561 return (not a.cmp(b)
2558 return (not a.cmp(b)
2562 and a.flags() == b.flags())
2559 and a.flags() == b.flags())
2563 else:
2560 else:
2564 return False
2561 return False
2565 else:
2562 else:
2566 return f not in ctx2.manifest()
2563 return f not in ctx2.manifest()
2567
2564
2568 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2565 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2569 # avoid cycle context -> subrepo -> cmdutil
2566 # avoid cycle context -> subrepo -> cmdutil
2570 from . import context
2567 from . import context
2571
2568
2572 # amend will reuse the existing user if not specified, but the obsolete
2569 # amend will reuse the existing user if not specified, but the obsolete
2573 # marker creation requires that the current user's name is specified.
2570 # marker creation requires that the current user's name is specified.
2574 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2571 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2575 ui.username() # raise exception if username not set
2572 ui.username() # raise exception if username not set
2576
2573
2577 ui.note(_('amending changeset %s\n') % old)
2574 ui.note(_('amending changeset %s\n') % old)
2578 base = old.p1()
2575 base = old.p1()
2579 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2576 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2580
2577
2581 wlock = lock = newid = None
2578 wlock = lock = newid = None
2582 try:
2579 try:
2583 wlock = repo.wlock()
2580 wlock = repo.wlock()
2584 lock = repo.lock()
2581 lock = repo.lock()
2585 with repo.transaction('amend') as tr:
2582 with repo.transaction('amend') as tr:
2586 # See if we got a message from -m or -l, if not, open the editor
2583 # See if we got a message from -m or -l, if not, open the editor
2587 # with the message of the changeset to amend
2584 # with the message of the changeset to amend
2588 message = logmessage(ui, opts)
2585 message = logmessage(ui, opts)
2589 # ensure logfile does not conflict with later enforcement of the
2586 # ensure logfile does not conflict with later enforcement of the
2590 # message. potential logfile content has been processed by
2587 # message. potential logfile content has been processed by
2591 # `logmessage` anyway.
2588 # `logmessage` anyway.
2592 opts.pop('logfile')
2589 opts.pop('logfile')
2593 # First, do a regular commit to record all changes in the working
2590 # First, do a regular commit to record all changes in the working
2594 # directory (if there are any)
2591 # directory (if there are any)
2595 ui.callhooks = False
2592 ui.callhooks = False
2596 activebookmark = repo._bookmarks.active
2593 activebookmark = repo._bookmarks.active
2597 try:
2594 try:
2598 repo._bookmarks.active = None
2595 repo._bookmarks.active = None
2599 opts['message'] = 'temporary amend commit for %s' % old
2596 opts['message'] = 'temporary amend commit for %s' % old
2600 node = commit(ui, repo, commitfunc, pats, opts)
2597 node = commit(ui, repo, commitfunc, pats, opts)
2601 finally:
2598 finally:
2602 repo._bookmarks.active = activebookmark
2599 repo._bookmarks.active = activebookmark
2603 repo._bookmarks.recordchange(tr)
2600 repo._bookmarks.recordchange(tr)
2604 ui.callhooks = True
2601 ui.callhooks = True
2605 ctx = repo[node]
2602 ctx = repo[node]
2606
2603
2607 # Participating changesets:
2604 # Participating changesets:
2608 #
2605 #
2609 # node/ctx o - new (intermediate) commit that contains changes
2606 # node/ctx o - new (intermediate) commit that contains changes
2610 # | from working dir to go into amending commit
2607 # | from working dir to go into amending commit
2611 # | (or a workingctx if there were no changes)
2608 # | (or a workingctx if there were no changes)
2612 # |
2609 # |
2613 # old o - changeset to amend
2610 # old o - changeset to amend
2614 # |
2611 # |
2615 # base o - parent of amending changeset
2612 # base o - parent of amending changeset
2616
2613
2617 # Update extra dict from amended commit (e.g. to preserve graft
2614 # Update extra dict from amended commit (e.g. to preserve graft
2618 # source)
2615 # source)
2619 extra.update(old.extra())
2616 extra.update(old.extra())
2620
2617
2621 # Also update it from the intermediate commit or from the wctx
2618 # Also update it from the intermediate commit or from the wctx
2622 extra.update(ctx.extra())
2619 extra.update(ctx.extra())
2623
2620
2624 if len(old.parents()) > 1:
2621 if len(old.parents()) > 1:
2625 # ctx.files() isn't reliable for merges, so fall back to the
2622 # ctx.files() isn't reliable for merges, so fall back to the
2626 # slower repo.status() method
2623 # slower repo.status() method
2627 files = set([fn for st in repo.status(base, old)[:3]
2624 files = set([fn for st in repo.status(base, old)[:3]
2628 for fn in st])
2625 for fn in st])
2629 else:
2626 else:
2630 files = set(old.files())
2627 files = set(old.files())
2631
2628
2632 # Second, we use either the commit we just did, or if there were no
2629 # Second, we use either the commit we just did, or if there were no
2633 # changes the parent of the working directory as the version of the
2630 # changes the parent of the working directory as the version of the
2634 # files in the final amend commit
2631 # files in the final amend commit
2635 if node:
2632 if node:
2636 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2633 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2637
2634
2638 user = ctx.user()
2635 user = ctx.user()
2639 date = ctx.date()
2636 date = ctx.date()
2640 # Recompute copies (avoid recording a -> b -> a)
2637 # Recompute copies (avoid recording a -> b -> a)
2641 copied = copies.pathcopies(base, ctx)
2638 copied = copies.pathcopies(base, ctx)
2642 if old.p2:
2639 if old.p2:
2643 copied.update(copies.pathcopies(old.p2(), ctx))
2640 copied.update(copies.pathcopies(old.p2(), ctx))
2644
2641
2645 # Prune files which were reverted by the updates: if old
2642 # Prune files which were reverted by the updates: if old
2646 # introduced file X and our intermediate commit, node,
2643 # introduced file X and our intermediate commit, node,
2647 # renamed that file, then those two files are the same and
2644 # renamed that file, then those two files are the same and
2648 # we can discard X from our list of files. Likewise if X
2645 # we can discard X from our list of files. Likewise if X
2649 # was deleted, it's no longer relevant
2646 # was deleted, it's no longer relevant
2650 files.update(ctx.files())
2647 files.update(ctx.files())
2651 files = [f for f in files if not samefile(f, ctx, base)]
2648 files = [f for f in files if not samefile(f, ctx, base)]
2652
2649
2653 def filectxfn(repo, ctx_, path):
2650 def filectxfn(repo, ctx_, path):
2654 try:
2651 try:
2655 fctx = ctx[path]
2652 fctx = ctx[path]
2656 flags = fctx.flags()
2653 flags = fctx.flags()
2657 mctx = context.memfilectx(repo,
2654 mctx = context.memfilectx(repo,
2658 fctx.path(), fctx.data(),
2655 fctx.path(), fctx.data(),
2659 islink='l' in flags,
2656 islink='l' in flags,
2660 isexec='x' in flags,
2657 isexec='x' in flags,
2661 copied=copied.get(path))
2658 copied=copied.get(path))
2662 return mctx
2659 return mctx
2663 except KeyError:
2660 except KeyError:
2664 return None
2661 return None
2665 else:
2662 else:
2666 ui.note(_('copying changeset %s to %s\n') % (old, base))
2663 ui.note(_('copying changeset %s to %s\n') % (old, base))
2667
2664
2668 # Use version of files as in the old cset
2665 # Use version of files as in the old cset
2669 def filectxfn(repo, ctx_, path):
2666 def filectxfn(repo, ctx_, path):
2670 try:
2667 try:
2671 return old.filectx(path)
2668 return old.filectx(path)
2672 except KeyError:
2669 except KeyError:
2673 return None
2670 return None
2674
2671
2675 user = opts.get('user') or old.user()
2672 user = opts.get('user') or old.user()
2676 date = opts.get('date') or old.date()
2673 date = opts.get('date') or old.date()
2677 editform = mergeeditform(old, 'commit.amend')
2674 editform = mergeeditform(old, 'commit.amend')
2678 editor = getcommiteditor(editform=editform, **opts)
2675 editor = getcommiteditor(editform=editform, **opts)
2679 if not message:
2676 if not message:
2680 editor = getcommiteditor(edit=True, editform=editform)
2677 editor = getcommiteditor(edit=True, editform=editform)
2681 message = old.description()
2678 message = old.description()
2682
2679
2683 pureextra = extra.copy()
2680 pureextra = extra.copy()
2684 extra['amend_source'] = old.hex()
2681 extra['amend_source'] = old.hex()
2685
2682
2686 new = context.memctx(repo,
2683 new = context.memctx(repo,
2687 parents=[base.node(), old.p2().node()],
2684 parents=[base.node(), old.p2().node()],
2688 text=message,
2685 text=message,
2689 files=files,
2686 files=files,
2690 filectxfn=filectxfn,
2687 filectxfn=filectxfn,
2691 user=user,
2688 user=user,
2692 date=date,
2689 date=date,
2693 extra=extra,
2690 extra=extra,
2694 editor=editor)
2691 editor=editor)
2695
2692
2696 newdesc = changelog.stripdesc(new.description())
2693 newdesc = changelog.stripdesc(new.description())
2697 if ((not node)
2694 if ((not node)
2698 and newdesc == old.description()
2695 and newdesc == old.description()
2699 and user == old.user()
2696 and user == old.user()
2700 and date == old.date()
2697 and date == old.date()
2701 and pureextra == old.extra()):
2698 and pureextra == old.extra()):
2702 # nothing changed. continuing here would create a new node
2699 # nothing changed. continuing here would create a new node
2703 # anyway because of the amend_source noise.
2700 # anyway because of the amend_source noise.
2704 #
2701 #
2705 # This not what we expect from amend.
2702 # This not what we expect from amend.
2706 return old.node()
2703 return old.node()
2707
2704
2708 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2705 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2709 try:
2706 try:
2710 if opts.get('secret'):
2707 if opts.get('secret'):
2711 commitphase = 'secret'
2708 commitphase = 'secret'
2712 else:
2709 else:
2713 commitphase = old.phase()
2710 commitphase = old.phase()
2714 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2711 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2715 newid = repo.commitctx(new)
2712 newid = repo.commitctx(new)
2716 finally:
2713 finally:
2717 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2714 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2718 if newid != old.node():
2715 if newid != old.node():
2719 # Reroute the working copy parent to the new changeset
2716 # Reroute the working copy parent to the new changeset
2720 repo.setparents(newid, nullid)
2717 repo.setparents(newid, nullid)
2721
2718
2722 # Move bookmarks from old parent to amend commit
2719 # Move bookmarks from old parent to amend commit
2723 bms = repo.nodebookmarks(old.node())
2720 bms = repo.nodebookmarks(old.node())
2724 if bms:
2721 if bms:
2725 marks = repo._bookmarks
2722 marks = repo._bookmarks
2726 for bm in bms:
2723 for bm in bms:
2727 ui.debug('moving bookmarks %r from %s to %s\n' %
2724 ui.debug('moving bookmarks %r from %s to %s\n' %
2728 (marks, old.hex(), hex(newid)))
2725 (marks, old.hex(), hex(newid)))
2729 marks[bm] = newid
2726 marks[bm] = newid
2730 marks.recordchange(tr)
2727 marks.recordchange(tr)
2731 #commit the whole amend process
2728 #commit the whole amend process
2732 if createmarkers:
2729 if createmarkers:
2733 # mark the new changeset as successor of the rewritten one
2730 # mark the new changeset as successor of the rewritten one
2734 new = repo[newid]
2731 new = repo[newid]
2735 obs = [(old, (new,))]
2732 obs = [(old, (new,))]
2736 if node:
2733 if node:
2737 obs.append((ctx, ()))
2734 obs.append((ctx, ()))
2738
2735
2739 obsolete.createmarkers(repo, obs)
2736 obsolete.createmarkers(repo, obs)
2740 if not createmarkers and newid != old.node():
2737 if not createmarkers and newid != old.node():
2741 # Strip the intermediate commit (if there was one) and the amended
2738 # Strip the intermediate commit (if there was one) and the amended
2742 # commit
2739 # commit
2743 if node:
2740 if node:
2744 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2741 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2745 ui.note(_('stripping amended changeset %s\n') % old)
2742 ui.note(_('stripping amended changeset %s\n') % old)
2746 repair.strip(ui, repo, old.node(), topic='amend-backup')
2743 repair.strip(ui, repo, old.node(), topic='amend-backup')
2747 finally:
2744 finally:
2748 lockmod.release(lock, wlock)
2745 lockmod.release(lock, wlock)
2749 return newid
2746 return newid
2750
2747
2751 def commiteditor(repo, ctx, subs, editform=''):
2748 def commiteditor(repo, ctx, subs, editform=''):
2752 if ctx.description():
2749 if ctx.description():
2753 return ctx.description()
2750 return ctx.description()
2754 return commitforceeditor(repo, ctx, subs, editform=editform,
2751 return commitforceeditor(repo, ctx, subs, editform=editform,
2755 unchangedmessagedetection=True)
2752 unchangedmessagedetection=True)
2756
2753
2757 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2754 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2758 editform='', unchangedmessagedetection=False):
2755 editform='', unchangedmessagedetection=False):
2759 if not extramsg:
2756 if not extramsg:
2760 extramsg = _("Leave message empty to abort commit.")
2757 extramsg = _("Leave message empty to abort commit.")
2761
2758
2762 forms = [e for e in editform.split('.') if e]
2759 forms = [e for e in editform.split('.') if e]
2763 forms.insert(0, 'changeset')
2760 forms.insert(0, 'changeset')
2764 templatetext = None
2761 templatetext = None
2765 while forms:
2762 while forms:
2766 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2763 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2767 if tmpl:
2764 if tmpl:
2768 templatetext = committext = buildcommittemplate(
2765 templatetext = committext = buildcommittemplate(
2769 repo, ctx, subs, extramsg, tmpl)
2766 repo, ctx, subs, extramsg, tmpl)
2770 break
2767 break
2771 forms.pop()
2768 forms.pop()
2772 else:
2769 else:
2773 committext = buildcommittext(repo, ctx, subs, extramsg)
2770 committext = buildcommittext(repo, ctx, subs, extramsg)
2774
2771
2775 # run editor in the repository root
2772 # run editor in the repository root
2776 olddir = pycompat.getcwd()
2773 olddir = pycompat.getcwd()
2777 os.chdir(repo.root)
2774 os.chdir(repo.root)
2778
2775
2779 # make in-memory changes visible to external process
2776 # make in-memory changes visible to external process
2780 tr = repo.currenttransaction()
2777 tr = repo.currenttransaction()
2781 repo.dirstate.write(tr)
2778 repo.dirstate.write(tr)
2782 pending = tr and tr.writepending() and repo.root
2779 pending = tr and tr.writepending() and repo.root
2783
2780
2784 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2781 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2785 editform=editform, pending=pending,
2782 editform=editform, pending=pending,
2786 repopath=repo.path)
2783 repopath=repo.path)
2787 text = editortext
2784 text = editortext
2788
2785
2789 # strip away anything below this special string (used for editors that want
2786 # strip away anything below this special string (used for editors that want
2790 # to display the diff)
2787 # to display the diff)
2791 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2788 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2792 if stripbelow:
2789 if stripbelow:
2793 text = text[:stripbelow.start()]
2790 text = text[:stripbelow.start()]
2794
2791
2795 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2792 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2796 os.chdir(olddir)
2793 os.chdir(olddir)
2797
2794
2798 if finishdesc:
2795 if finishdesc:
2799 text = finishdesc(text)
2796 text = finishdesc(text)
2800 if not text.strip():
2797 if not text.strip():
2801 raise error.Abort(_("empty commit message"))
2798 raise error.Abort(_("empty commit message"))
2802 if unchangedmessagedetection and editortext == templatetext:
2799 if unchangedmessagedetection and editortext == templatetext:
2803 raise error.Abort(_("commit message unchanged"))
2800 raise error.Abort(_("commit message unchanged"))
2804
2801
2805 return text
2802 return text
2806
2803
2807 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2804 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2808 ui = repo.ui
2805 ui = repo.ui
2809 tmpl, mapfile = gettemplate(ui, tmpl, None)
2806 tmpl, mapfile = gettemplate(ui, tmpl, None)
2810
2807
2811 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2808 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2812
2809
2813 for k, v in repo.ui.configitems('committemplate'):
2810 for k, v in repo.ui.configitems('committemplate'):
2814 if k != 'changeset':
2811 if k != 'changeset':
2815 t.t.cache[k] = v
2812 t.t.cache[k] = v
2816
2813
2817 if not extramsg:
2814 if not extramsg:
2818 extramsg = '' # ensure that extramsg is string
2815 extramsg = '' # ensure that extramsg is string
2819
2816
2820 ui.pushbuffer()
2817 ui.pushbuffer()
2821 t.show(ctx, extramsg=extramsg)
2818 t.show(ctx, extramsg=extramsg)
2822 return ui.popbuffer()
2819 return ui.popbuffer()
2823
2820
2824 def hgprefix(msg):
2821 def hgprefix(msg):
2825 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2822 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2826
2823
2827 def buildcommittext(repo, ctx, subs, extramsg):
2824 def buildcommittext(repo, ctx, subs, extramsg):
2828 edittext = []
2825 edittext = []
2829 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2826 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2830 if ctx.description():
2827 if ctx.description():
2831 edittext.append(ctx.description())
2828 edittext.append(ctx.description())
2832 edittext.append("")
2829 edittext.append("")
2833 edittext.append("") # Empty line between message and comments.
2830 edittext.append("") # Empty line between message and comments.
2834 edittext.append(hgprefix(_("Enter commit message."
2831 edittext.append(hgprefix(_("Enter commit message."
2835 " Lines beginning with 'HG:' are removed.")))
2832 " Lines beginning with 'HG:' are removed.")))
2836 edittext.append(hgprefix(extramsg))
2833 edittext.append(hgprefix(extramsg))
2837 edittext.append("HG: --")
2834 edittext.append("HG: --")
2838 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2835 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2839 if ctx.p2():
2836 if ctx.p2():
2840 edittext.append(hgprefix(_("branch merge")))
2837 edittext.append(hgprefix(_("branch merge")))
2841 if ctx.branch():
2838 if ctx.branch():
2842 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2839 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2843 if bookmarks.isactivewdirparent(repo):
2840 if bookmarks.isactivewdirparent(repo):
2844 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2841 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2845 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2842 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2846 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2843 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2847 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2844 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2848 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2845 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2849 if not added and not modified and not removed:
2846 if not added and not modified and not removed:
2850 edittext.append(hgprefix(_("no files changed")))
2847 edittext.append(hgprefix(_("no files changed")))
2851 edittext.append("")
2848 edittext.append("")
2852
2849
2853 return "\n".join(edittext)
2850 return "\n".join(edittext)
2854
2851
2855 def commitstatus(repo, node, branch, bheads=None, opts=None):
2852 def commitstatus(repo, node, branch, bheads=None, opts=None):
2856 if opts is None:
2853 if opts is None:
2857 opts = {}
2854 opts = {}
2858 ctx = repo[node]
2855 ctx = repo[node]
2859 parents = ctx.parents()
2856 parents = ctx.parents()
2860
2857
2861 if (not opts.get('amend') and bheads and node not in bheads and not
2858 if (not opts.get('amend') and bheads and node not in bheads and not
2862 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2859 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2863 repo.ui.status(_('created new head\n'))
2860 repo.ui.status(_('created new head\n'))
2864 # The message is not printed for initial roots. For the other
2861 # The message is not printed for initial roots. For the other
2865 # changesets, it is printed in the following situations:
2862 # changesets, it is printed in the following situations:
2866 #
2863 #
2867 # Par column: for the 2 parents with ...
2864 # Par column: for the 2 parents with ...
2868 # N: null or no parent
2865 # N: null or no parent
2869 # B: parent is on another named branch
2866 # B: parent is on another named branch
2870 # C: parent is a regular non head changeset
2867 # C: parent is a regular non head changeset
2871 # H: parent was a branch head of the current branch
2868 # H: parent was a branch head of the current branch
2872 # Msg column: whether we print "created new head" message
2869 # Msg column: whether we print "created new head" message
2873 # In the following, it is assumed that there already exists some
2870 # In the following, it is assumed that there already exists some
2874 # initial branch heads of the current branch, otherwise nothing is
2871 # initial branch heads of the current branch, otherwise nothing is
2875 # printed anyway.
2872 # printed anyway.
2876 #
2873 #
2877 # Par Msg Comment
2874 # Par Msg Comment
2878 # N N y additional topo root
2875 # N N y additional topo root
2879 #
2876 #
2880 # B N y additional branch root
2877 # B N y additional branch root
2881 # C N y additional topo head
2878 # C N y additional topo head
2882 # H N n usual case
2879 # H N n usual case
2883 #
2880 #
2884 # B B y weird additional branch root
2881 # B B y weird additional branch root
2885 # C B y branch merge
2882 # C B y branch merge
2886 # H B n merge with named branch
2883 # H B n merge with named branch
2887 #
2884 #
2888 # C C y additional head from merge
2885 # C C y additional head from merge
2889 # C H n merge with a head
2886 # C H n merge with a head
2890 #
2887 #
2891 # H H n head merge: head count decreases
2888 # H H n head merge: head count decreases
2892
2889
2893 if not opts.get('close_branch'):
2890 if not opts.get('close_branch'):
2894 for r in parents:
2891 for r in parents:
2895 if r.closesbranch() and r.branch() == branch:
2892 if r.closesbranch() and r.branch() == branch:
2896 repo.ui.status(_('reopening closed branch head %d\n') % r)
2893 repo.ui.status(_('reopening closed branch head %d\n') % r)
2897
2894
2898 if repo.ui.debugflag:
2895 if repo.ui.debugflag:
2899 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2896 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2900 elif repo.ui.verbose:
2897 elif repo.ui.verbose:
2901 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2898 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2902
2899
2903 def postcommitstatus(repo, pats, opts):
2900 def postcommitstatus(repo, pats, opts):
2904 return repo.status(match=scmutil.match(repo[None], pats, opts))
2901 return repo.status(match=scmutil.match(repo[None], pats, opts))
2905
2902
2906 def revert(ui, repo, ctx, parents, *pats, **opts):
2903 def revert(ui, repo, ctx, parents, *pats, **opts):
2907 parent, p2 = parents
2904 parent, p2 = parents
2908 node = ctx.node()
2905 node = ctx.node()
2909
2906
2910 mf = ctx.manifest()
2907 mf = ctx.manifest()
2911 if node == p2:
2908 if node == p2:
2912 parent = p2
2909 parent = p2
2913
2910
2914 # need all matching names in dirstate and manifest of target rev,
2911 # need all matching names in dirstate and manifest of target rev,
2915 # so have to walk both. do not print errors if files exist in one
2912 # so have to walk both. do not print errors if files exist in one
2916 # but not other. in both cases, filesets should be evaluated against
2913 # but not other. in both cases, filesets should be evaluated against
2917 # workingctx to get consistent result (issue4497). this means 'set:**'
2914 # workingctx to get consistent result (issue4497). this means 'set:**'
2918 # cannot be used to select missing files from target rev.
2915 # cannot be used to select missing files from target rev.
2919
2916
2920 # `names` is a mapping for all elements in working copy and target revision
2917 # `names` is a mapping for all elements in working copy and target revision
2921 # The mapping is in the form:
2918 # The mapping is in the form:
2922 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2919 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2923 names = {}
2920 names = {}
2924
2921
2925 with repo.wlock():
2922 with repo.wlock():
2926 ## filling of the `names` mapping
2923 ## filling of the `names` mapping
2927 # walk dirstate to fill `names`
2924 # walk dirstate to fill `names`
2928
2925
2929 interactive = opts.get('interactive', False)
2926 interactive = opts.get('interactive', False)
2930 wctx = repo[None]
2927 wctx = repo[None]
2931 m = scmutil.match(wctx, pats, opts)
2928 m = scmutil.match(wctx, pats, opts)
2932
2929
2933 # we'll need this later
2930 # we'll need this later
2934 targetsubs = sorted(s for s in wctx.substate if m(s))
2931 targetsubs = sorted(s for s in wctx.substate if m(s))
2935
2932
2936 if not m.always():
2933 if not m.always():
2937 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2934 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2938 names[abs] = m.rel(abs), m.exact(abs)
2935 names[abs] = m.rel(abs), m.exact(abs)
2939
2936
2940 # walk target manifest to fill `names`
2937 # walk target manifest to fill `names`
2941
2938
2942 def badfn(path, msg):
2939 def badfn(path, msg):
2943 if path in names:
2940 if path in names:
2944 return
2941 return
2945 if path in ctx.substate:
2942 if path in ctx.substate:
2946 return
2943 return
2947 path_ = path + '/'
2944 path_ = path + '/'
2948 for f in names:
2945 for f in names:
2949 if f.startswith(path_):
2946 if f.startswith(path_):
2950 return
2947 return
2951 ui.warn("%s: %s\n" % (m.rel(path), msg))
2948 ui.warn("%s: %s\n" % (m.rel(path), msg))
2952
2949
2953 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2950 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2954 if abs not in names:
2951 if abs not in names:
2955 names[abs] = m.rel(abs), m.exact(abs)
2952 names[abs] = m.rel(abs), m.exact(abs)
2956
2953
2957 # Find status of all file in `names`.
2954 # Find status of all file in `names`.
2958 m = scmutil.matchfiles(repo, names)
2955 m = scmutil.matchfiles(repo, names)
2959
2956
2960 changes = repo.status(node1=node, match=m,
2957 changes = repo.status(node1=node, match=m,
2961 unknown=True, ignored=True, clean=True)
2958 unknown=True, ignored=True, clean=True)
2962 else:
2959 else:
2963 changes = repo.status(node1=node, match=m)
2960 changes = repo.status(node1=node, match=m)
2964 for kind in changes:
2961 for kind in changes:
2965 for abs in kind:
2962 for abs in kind:
2966 names[abs] = m.rel(abs), m.exact(abs)
2963 names[abs] = m.rel(abs), m.exact(abs)
2967
2964
2968 m = scmutil.matchfiles(repo, names)
2965 m = scmutil.matchfiles(repo, names)
2969
2966
2970 modified = set(changes.modified)
2967 modified = set(changes.modified)
2971 added = set(changes.added)
2968 added = set(changes.added)
2972 removed = set(changes.removed)
2969 removed = set(changes.removed)
2973 _deleted = set(changes.deleted)
2970 _deleted = set(changes.deleted)
2974 unknown = set(changes.unknown)
2971 unknown = set(changes.unknown)
2975 unknown.update(changes.ignored)
2972 unknown.update(changes.ignored)
2976 clean = set(changes.clean)
2973 clean = set(changes.clean)
2977 modadded = set()
2974 modadded = set()
2978
2975
2979 # We need to account for the state of the file in the dirstate,
2976 # We need to account for the state of the file in the dirstate,
2980 # even when we revert against something else than parent. This will
2977 # even when we revert against something else than parent. This will
2981 # slightly alter the behavior of revert (doing back up or not, delete
2978 # slightly alter the behavior of revert (doing back up or not, delete
2982 # or just forget etc).
2979 # or just forget etc).
2983 if parent == node:
2980 if parent == node:
2984 dsmodified = modified
2981 dsmodified = modified
2985 dsadded = added
2982 dsadded = added
2986 dsremoved = removed
2983 dsremoved = removed
2987 # store all local modifications, useful later for rename detection
2984 # store all local modifications, useful later for rename detection
2988 localchanges = dsmodified | dsadded
2985 localchanges = dsmodified | dsadded
2989 modified, added, removed = set(), set(), set()
2986 modified, added, removed = set(), set(), set()
2990 else:
2987 else:
2991 changes = repo.status(node1=parent, match=m)
2988 changes = repo.status(node1=parent, match=m)
2992 dsmodified = set(changes.modified)
2989 dsmodified = set(changes.modified)
2993 dsadded = set(changes.added)
2990 dsadded = set(changes.added)
2994 dsremoved = set(changes.removed)
2991 dsremoved = set(changes.removed)
2995 # store all local modifications, useful later for rename detection
2992 # store all local modifications, useful later for rename detection
2996 localchanges = dsmodified | dsadded
2993 localchanges = dsmodified | dsadded
2997
2994
2998 # only take into account for removes between wc and target
2995 # only take into account for removes between wc and target
2999 clean |= dsremoved - removed
2996 clean |= dsremoved - removed
3000 dsremoved &= removed
2997 dsremoved &= removed
3001 # distinct between dirstate remove and other
2998 # distinct between dirstate remove and other
3002 removed -= dsremoved
2999 removed -= dsremoved
3003
3000
3004 modadded = added & dsmodified
3001 modadded = added & dsmodified
3005 added -= modadded
3002 added -= modadded
3006
3003
3007 # tell newly modified apart.
3004 # tell newly modified apart.
3008 dsmodified &= modified
3005 dsmodified &= modified
3009 dsmodified |= modified & dsadded # dirstate added may need backup
3006 dsmodified |= modified & dsadded # dirstate added may need backup
3010 modified -= dsmodified
3007 modified -= dsmodified
3011
3008
3012 # We need to wait for some post-processing to update this set
3009 # We need to wait for some post-processing to update this set
3013 # before making the distinction. The dirstate will be used for
3010 # before making the distinction. The dirstate will be used for
3014 # that purpose.
3011 # that purpose.
3015 dsadded = added
3012 dsadded = added
3016
3013
3017 # in case of merge, files that are actually added can be reported as
3014 # in case of merge, files that are actually added can be reported as
3018 # modified, we need to post process the result
3015 # modified, we need to post process the result
3019 if p2 != nullid:
3016 if p2 != nullid:
3020 mergeadd = set(dsmodified)
3017 mergeadd = set(dsmodified)
3021 for path in dsmodified:
3018 for path in dsmodified:
3022 if path in mf:
3019 if path in mf:
3023 mergeadd.remove(path)
3020 mergeadd.remove(path)
3024 dsadded |= mergeadd
3021 dsadded |= mergeadd
3025 dsmodified -= mergeadd
3022 dsmodified -= mergeadd
3026
3023
3027 # if f is a rename, update `names` to also revert the source
3024 # if f is a rename, update `names` to also revert the source
3028 cwd = repo.getcwd()
3025 cwd = repo.getcwd()
3029 for f in localchanges:
3026 for f in localchanges:
3030 src = repo.dirstate.copied(f)
3027 src = repo.dirstate.copied(f)
3031 # XXX should we check for rename down to target node?
3028 # XXX should we check for rename down to target node?
3032 if src and src not in names and repo.dirstate[src] == 'r':
3029 if src and src not in names and repo.dirstate[src] == 'r':
3033 dsremoved.add(src)
3030 dsremoved.add(src)
3034 names[src] = (repo.pathto(src, cwd), True)
3031 names[src] = (repo.pathto(src, cwd), True)
3035
3032
3036 # determine the exact nature of the deleted changesets
3033 # determine the exact nature of the deleted changesets
3037 deladded = set(_deleted)
3034 deladded = set(_deleted)
3038 for path in _deleted:
3035 for path in _deleted:
3039 if path in mf:
3036 if path in mf:
3040 deladded.remove(path)
3037 deladded.remove(path)
3041 deleted = _deleted - deladded
3038 deleted = _deleted - deladded
3042
3039
3043 # distinguish between file to forget and the other
3040 # distinguish between file to forget and the other
3044 added = set()
3041 added = set()
3045 for abs in dsadded:
3042 for abs in dsadded:
3046 if repo.dirstate[abs] != 'a':
3043 if repo.dirstate[abs] != 'a':
3047 added.add(abs)
3044 added.add(abs)
3048 dsadded -= added
3045 dsadded -= added
3049
3046
3050 for abs in deladded:
3047 for abs in deladded:
3051 if repo.dirstate[abs] == 'a':
3048 if repo.dirstate[abs] == 'a':
3052 dsadded.add(abs)
3049 dsadded.add(abs)
3053 deladded -= dsadded
3050 deladded -= dsadded
3054
3051
3055 # For files marked as removed, we check if an unknown file is present at
3052 # For files marked as removed, we check if an unknown file is present at
3056 # the same path. If a such file exists it may need to be backed up.
3053 # the same path. If a such file exists it may need to be backed up.
3057 # Making the distinction at this stage helps have simpler backup
3054 # Making the distinction at this stage helps have simpler backup
3058 # logic.
3055 # logic.
3059 removunk = set()
3056 removunk = set()
3060 for abs in removed:
3057 for abs in removed:
3061 target = repo.wjoin(abs)
3058 target = repo.wjoin(abs)
3062 if os.path.lexists(target):
3059 if os.path.lexists(target):
3063 removunk.add(abs)
3060 removunk.add(abs)
3064 removed -= removunk
3061 removed -= removunk
3065
3062
3066 dsremovunk = set()
3063 dsremovunk = set()
3067 for abs in dsremoved:
3064 for abs in dsremoved:
3068 target = repo.wjoin(abs)
3065 target = repo.wjoin(abs)
3069 if os.path.lexists(target):
3066 if os.path.lexists(target):
3070 dsremovunk.add(abs)
3067 dsremovunk.add(abs)
3071 dsremoved -= dsremovunk
3068 dsremoved -= dsremovunk
3072
3069
3073 # action to be actually performed by revert
3070 # action to be actually performed by revert
3074 # (<list of file>, message>) tuple
3071 # (<list of file>, message>) tuple
3075 actions = {'revert': ([], _('reverting %s\n')),
3072 actions = {'revert': ([], _('reverting %s\n')),
3076 'add': ([], _('adding %s\n')),
3073 'add': ([], _('adding %s\n')),
3077 'remove': ([], _('removing %s\n')),
3074 'remove': ([], _('removing %s\n')),
3078 'drop': ([], _('removing %s\n')),
3075 'drop': ([], _('removing %s\n')),
3079 'forget': ([], _('forgetting %s\n')),
3076 'forget': ([], _('forgetting %s\n')),
3080 'undelete': ([], _('undeleting %s\n')),
3077 'undelete': ([], _('undeleting %s\n')),
3081 'noop': (None, _('no changes needed to %s\n')),
3078 'noop': (None, _('no changes needed to %s\n')),
3082 'unknown': (None, _('file not managed: %s\n')),
3079 'unknown': (None, _('file not managed: %s\n')),
3083 }
3080 }
3084
3081
3085 # "constant" that convey the backup strategy.
3082 # "constant" that convey the backup strategy.
3086 # All set to `discard` if `no-backup` is set do avoid checking
3083 # All set to `discard` if `no-backup` is set do avoid checking
3087 # no_backup lower in the code.
3084 # no_backup lower in the code.
3088 # These values are ordered for comparison purposes
3085 # These values are ordered for comparison purposes
3089 backupinteractive = 3 # do backup if interactively modified
3086 backupinteractive = 3 # do backup if interactively modified
3090 backup = 2 # unconditionally do backup
3087 backup = 2 # unconditionally do backup
3091 check = 1 # check if the existing file differs from target
3088 check = 1 # check if the existing file differs from target
3092 discard = 0 # never do backup
3089 discard = 0 # never do backup
3093 if opts.get('no_backup'):
3090 if opts.get('no_backup'):
3094 backupinteractive = backup = check = discard
3091 backupinteractive = backup = check = discard
3095 if interactive:
3092 if interactive:
3096 dsmodifiedbackup = backupinteractive
3093 dsmodifiedbackup = backupinteractive
3097 else:
3094 else:
3098 dsmodifiedbackup = backup
3095 dsmodifiedbackup = backup
3099 tobackup = set()
3096 tobackup = set()
3100
3097
3101 backupanddel = actions['remove']
3098 backupanddel = actions['remove']
3102 if not opts.get('no_backup'):
3099 if not opts.get('no_backup'):
3103 backupanddel = actions['drop']
3100 backupanddel = actions['drop']
3104
3101
3105 disptable = (
3102 disptable = (
3106 # dispatch table:
3103 # dispatch table:
3107 # file state
3104 # file state
3108 # action
3105 # action
3109 # make backup
3106 # make backup
3110
3107
3111 ## Sets that results that will change file on disk
3108 ## Sets that results that will change file on disk
3112 # Modified compared to target, no local change
3109 # Modified compared to target, no local change
3113 (modified, actions['revert'], discard),
3110 (modified, actions['revert'], discard),
3114 # Modified compared to target, but local file is deleted
3111 # Modified compared to target, but local file is deleted
3115 (deleted, actions['revert'], discard),
3112 (deleted, actions['revert'], discard),
3116 # Modified compared to target, local change
3113 # Modified compared to target, local change
3117 (dsmodified, actions['revert'], dsmodifiedbackup),
3114 (dsmodified, actions['revert'], dsmodifiedbackup),
3118 # Added since target
3115 # Added since target
3119 (added, actions['remove'], discard),
3116 (added, actions['remove'], discard),
3120 # Added in working directory
3117 # Added in working directory
3121 (dsadded, actions['forget'], discard),
3118 (dsadded, actions['forget'], discard),
3122 # Added since target, have local modification
3119 # Added since target, have local modification
3123 (modadded, backupanddel, backup),
3120 (modadded, backupanddel, backup),
3124 # Added since target but file is missing in working directory
3121 # Added since target but file is missing in working directory
3125 (deladded, actions['drop'], discard),
3122 (deladded, actions['drop'], discard),
3126 # Removed since target, before working copy parent
3123 # Removed since target, before working copy parent
3127 (removed, actions['add'], discard),
3124 (removed, actions['add'], discard),
3128 # Same as `removed` but an unknown file exists at the same path
3125 # Same as `removed` but an unknown file exists at the same path
3129 (removunk, actions['add'], check),
3126 (removunk, actions['add'], check),
3130 # Removed since targe, marked as such in working copy parent
3127 # Removed since targe, marked as such in working copy parent
3131 (dsremoved, actions['undelete'], discard),
3128 (dsremoved, actions['undelete'], discard),
3132 # Same as `dsremoved` but an unknown file exists at the same path
3129 # Same as `dsremoved` but an unknown file exists at the same path
3133 (dsremovunk, actions['undelete'], check),
3130 (dsremovunk, actions['undelete'], check),
3134 ## the following sets does not result in any file changes
3131 ## the following sets does not result in any file changes
3135 # File with no modification
3132 # File with no modification
3136 (clean, actions['noop'], discard),
3133 (clean, actions['noop'], discard),
3137 # Existing file, not tracked anywhere
3134 # Existing file, not tracked anywhere
3138 (unknown, actions['unknown'], discard),
3135 (unknown, actions['unknown'], discard),
3139 )
3136 )
3140
3137
3141 for abs, (rel, exact) in sorted(names.items()):
3138 for abs, (rel, exact) in sorted(names.items()):
3142 # target file to be touch on disk (relative to cwd)
3139 # target file to be touch on disk (relative to cwd)
3143 target = repo.wjoin(abs)
3140 target = repo.wjoin(abs)
3144 # search the entry in the dispatch table.
3141 # search the entry in the dispatch table.
3145 # if the file is in any of these sets, it was touched in the working
3142 # if the file is in any of these sets, it was touched in the working
3146 # directory parent and we are sure it needs to be reverted.
3143 # directory parent and we are sure it needs to be reverted.
3147 for table, (xlist, msg), dobackup in disptable:
3144 for table, (xlist, msg), dobackup in disptable:
3148 if abs not in table:
3145 if abs not in table:
3149 continue
3146 continue
3150 if xlist is not None:
3147 if xlist is not None:
3151 xlist.append(abs)
3148 xlist.append(abs)
3152 if dobackup:
3149 if dobackup:
3153 # If in interactive mode, don't automatically create
3150 # If in interactive mode, don't automatically create
3154 # .orig files (issue4793)
3151 # .orig files (issue4793)
3155 if dobackup == backupinteractive:
3152 if dobackup == backupinteractive:
3156 tobackup.add(abs)
3153 tobackup.add(abs)
3157 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3154 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3158 bakname = scmutil.origpath(ui, repo, rel)
3155 bakname = scmutil.origpath(ui, repo, rel)
3159 ui.note(_('saving current version of %s as %s\n') %
3156 ui.note(_('saving current version of %s as %s\n') %
3160 (rel, bakname))
3157 (rel, bakname))
3161 if not opts.get('dry_run'):
3158 if not opts.get('dry_run'):
3162 if interactive:
3159 if interactive:
3163 util.copyfile(target, bakname)
3160 util.copyfile(target, bakname)
3164 else:
3161 else:
3165 util.rename(target, bakname)
3162 util.rename(target, bakname)
3166 if ui.verbose or not exact:
3163 if ui.verbose or not exact:
3167 if not isinstance(msg, basestring):
3164 if not isinstance(msg, basestring):
3168 msg = msg(abs)
3165 msg = msg(abs)
3169 ui.status(msg % rel)
3166 ui.status(msg % rel)
3170 elif exact:
3167 elif exact:
3171 ui.warn(msg % rel)
3168 ui.warn(msg % rel)
3172 break
3169 break
3173
3170
3174 if not opts.get('dry_run'):
3171 if not opts.get('dry_run'):
3175 needdata = ('revert', 'add', 'undelete')
3172 needdata = ('revert', 'add', 'undelete')
3176 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3173 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3177 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3174 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3178
3175
3179 if targetsubs:
3176 if targetsubs:
3180 # Revert the subrepos on the revert list
3177 # Revert the subrepos on the revert list
3181 for sub in targetsubs:
3178 for sub in targetsubs:
3182 try:
3179 try:
3183 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3180 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3184 except KeyError:
3181 except KeyError:
3185 raise error.Abort("subrepository '%s' does not exist in %s!"
3182 raise error.Abort("subrepository '%s' does not exist in %s!"
3186 % (sub, short(ctx.node())))
3183 % (sub, short(ctx.node())))
3187
3184
3188 def _revertprefetch(repo, ctx, *files):
3185 def _revertprefetch(repo, ctx, *files):
3189 """Let extension changing the storage layer prefetch content"""
3186 """Let extension changing the storage layer prefetch content"""
3190 pass
3187 pass
3191
3188
3192 def _performrevert(repo, parents, ctx, actions, interactive=False,
3189 def _performrevert(repo, parents, ctx, actions, interactive=False,
3193 tobackup=None):
3190 tobackup=None):
3194 """function that actually perform all the actions computed for revert
3191 """function that actually perform all the actions computed for revert
3195
3192
3196 This is an independent function to let extension to plug in and react to
3193 This is an independent function to let extension to plug in and react to
3197 the imminent revert.
3194 the imminent revert.
3198
3195
3199 Make sure you have the working directory locked when calling this function.
3196 Make sure you have the working directory locked when calling this function.
3200 """
3197 """
3201 parent, p2 = parents
3198 parent, p2 = parents
3202 node = ctx.node()
3199 node = ctx.node()
3203 excluded_files = []
3200 excluded_files = []
3204 matcher_opts = {"exclude": excluded_files}
3201 matcher_opts = {"exclude": excluded_files}
3205
3202
3206 def checkout(f):
3203 def checkout(f):
3207 fc = ctx[f]
3204 fc = ctx[f]
3208 repo.wwrite(f, fc.data(), fc.flags())
3205 repo.wwrite(f, fc.data(), fc.flags())
3209
3206
3210 def doremove(f):
3207 def doremove(f):
3211 try:
3208 try:
3212 util.unlinkpath(repo.wjoin(f))
3209 util.unlinkpath(repo.wjoin(f))
3213 except OSError:
3210 except OSError:
3214 pass
3211 pass
3215 repo.dirstate.remove(f)
3212 repo.dirstate.remove(f)
3216
3213
3217 audit_path = pathutil.pathauditor(repo.root)
3214 audit_path = pathutil.pathauditor(repo.root)
3218 for f in actions['forget'][0]:
3215 for f in actions['forget'][0]:
3219 if interactive:
3216 if interactive:
3220 choice = repo.ui.promptchoice(
3217 choice = repo.ui.promptchoice(
3221 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3218 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3222 if choice == 0:
3219 if choice == 0:
3223 repo.dirstate.drop(f)
3220 repo.dirstate.drop(f)
3224 else:
3221 else:
3225 excluded_files.append(repo.wjoin(f))
3222 excluded_files.append(repo.wjoin(f))
3226 else:
3223 else:
3227 repo.dirstate.drop(f)
3224 repo.dirstate.drop(f)
3228 for f in actions['remove'][0]:
3225 for f in actions['remove'][0]:
3229 audit_path(f)
3226 audit_path(f)
3230 if interactive:
3227 if interactive:
3231 choice = repo.ui.promptchoice(
3228 choice = repo.ui.promptchoice(
3232 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3229 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3233 if choice == 0:
3230 if choice == 0:
3234 doremove(f)
3231 doremove(f)
3235 else:
3232 else:
3236 excluded_files.append(repo.wjoin(f))
3233 excluded_files.append(repo.wjoin(f))
3237 else:
3234 else:
3238 doremove(f)
3235 doremove(f)
3239 for f in actions['drop'][0]:
3236 for f in actions['drop'][0]:
3240 audit_path(f)
3237 audit_path(f)
3241 repo.dirstate.remove(f)
3238 repo.dirstate.remove(f)
3242
3239
3243 normal = None
3240 normal = None
3244 if node == parent:
3241 if node == parent:
3245 # We're reverting to our parent. If possible, we'd like status
3242 # We're reverting to our parent. If possible, we'd like status
3246 # to report the file as clean. We have to use normallookup for
3243 # to report the file as clean. We have to use normallookup for
3247 # merges to avoid losing information about merged/dirty files.
3244 # merges to avoid losing information about merged/dirty files.
3248 if p2 != nullid:
3245 if p2 != nullid:
3249 normal = repo.dirstate.normallookup
3246 normal = repo.dirstate.normallookup
3250 else:
3247 else:
3251 normal = repo.dirstate.normal
3248 normal = repo.dirstate.normal
3252
3249
3253 newlyaddedandmodifiedfiles = set()
3250 newlyaddedandmodifiedfiles = set()
3254 if interactive:
3251 if interactive:
3255 # Prompt the user for changes to revert
3252 # Prompt the user for changes to revert
3256 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3253 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3257 m = scmutil.match(ctx, torevert, matcher_opts)
3254 m = scmutil.match(ctx, torevert, matcher_opts)
3258 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3255 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3259 diffopts.nodates = True
3256 diffopts.nodates = True
3260 diffopts.git = True
3257 diffopts.git = True
3261 reversehunks = repo.ui.configbool('experimental',
3258 reversehunks = repo.ui.configbool('experimental',
3262 'revertalternateinteractivemode',
3259 'revertalternateinteractivemode',
3263 True)
3260 True)
3264 if reversehunks:
3261 if reversehunks:
3265 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3262 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3266 else:
3263 else:
3267 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3264 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3268 originalchunks = patch.parsepatch(diff)
3265 originalchunks = patch.parsepatch(diff)
3269 operation = 'discard' if node == parent else 'revert'
3266 operation = 'discard' if node == parent else 'revert'
3270
3267
3271 try:
3268 try:
3272
3269
3273 chunks, opts = recordfilter(repo.ui, originalchunks,
3270 chunks, opts = recordfilter(repo.ui, originalchunks,
3274 operation=operation)
3271 operation=operation)
3275 if reversehunks:
3272 if reversehunks:
3276 chunks = patch.reversehunks(chunks)
3273 chunks = patch.reversehunks(chunks)
3277
3274
3278 except patch.PatchError as err:
3275 except patch.PatchError as err:
3279 raise error.Abort(_('error parsing patch: %s') % err)
3276 raise error.Abort(_('error parsing patch: %s') % err)
3280
3277
3281 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3278 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3282 if tobackup is None:
3279 if tobackup is None:
3283 tobackup = set()
3280 tobackup = set()
3284 # Apply changes
3281 # Apply changes
3285 fp = stringio()
3282 fp = stringio()
3286 for c in chunks:
3283 for c in chunks:
3287 # Create a backup file only if this hunk should be backed up
3284 # Create a backup file only if this hunk should be backed up
3288 if ishunk(c) and c.header.filename() in tobackup:
3285 if ishunk(c) and c.header.filename() in tobackup:
3289 abs = c.header.filename()
3286 abs = c.header.filename()
3290 target = repo.wjoin(abs)
3287 target = repo.wjoin(abs)
3291 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3288 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3292 util.copyfile(target, bakname)
3289 util.copyfile(target, bakname)
3293 tobackup.remove(abs)
3290 tobackup.remove(abs)
3294 c.write(fp)
3291 c.write(fp)
3295 dopatch = fp.tell()
3292 dopatch = fp.tell()
3296 fp.seek(0)
3293 fp.seek(0)
3297 if dopatch:
3294 if dopatch:
3298 try:
3295 try:
3299 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3296 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3300 except patch.PatchError as err:
3297 except patch.PatchError as err:
3301 raise error.Abort(str(err))
3298 raise error.Abort(str(err))
3302 del fp
3299 del fp
3303 else:
3300 else:
3304 for f in actions['revert'][0]:
3301 for f in actions['revert'][0]:
3305 checkout(f)
3302 checkout(f)
3306 if normal:
3303 if normal:
3307 normal(f)
3304 normal(f)
3308
3305
3309 for f in actions['add'][0]:
3306 for f in actions['add'][0]:
3310 # Don't checkout modified files, they are already created by the diff
3307 # Don't checkout modified files, they are already created by the diff
3311 if f not in newlyaddedandmodifiedfiles:
3308 if f not in newlyaddedandmodifiedfiles:
3312 checkout(f)
3309 checkout(f)
3313 repo.dirstate.add(f)
3310 repo.dirstate.add(f)
3314
3311
3315 normal = repo.dirstate.normallookup
3312 normal = repo.dirstate.normallookup
3316 if node == parent and p2 == nullid:
3313 if node == parent and p2 == nullid:
3317 normal = repo.dirstate.normal
3314 normal = repo.dirstate.normal
3318 for f in actions['undelete'][0]:
3315 for f in actions['undelete'][0]:
3319 checkout(f)
3316 checkout(f)
3320 normal(f)
3317 normal(f)
3321
3318
3322 copied = copies.pathcopies(repo[parent], ctx)
3319 copied = copies.pathcopies(repo[parent], ctx)
3323
3320
3324 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3321 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3325 if f in copied:
3322 if f in copied:
3326 repo.dirstate.copy(copied[f], f)
3323 repo.dirstate.copy(copied[f], f)
3327
3324
3328 def command(table):
3325 def command(table):
3329 """Returns a function object to be used as a decorator for making commands.
3326 """Returns a function object to be used as a decorator for making commands.
3330
3327
3331 This function receives a command table as its argument. The table should
3328 This function receives a command table as its argument. The table should
3332 be a dict.
3329 be a dict.
3333
3330
3334 The returned function can be used as a decorator for adding commands
3331 The returned function can be used as a decorator for adding commands
3335 to that command table. This function accepts multiple arguments to define
3332 to that command table. This function accepts multiple arguments to define
3336 a command.
3333 a command.
3337
3334
3338 The first argument is the command name.
3335 The first argument is the command name.
3339
3336
3340 The options argument is an iterable of tuples defining command arguments.
3337 The options argument is an iterable of tuples defining command arguments.
3341 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3338 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3342
3339
3343 The synopsis argument defines a short, one line summary of how to use the
3340 The synopsis argument defines a short, one line summary of how to use the
3344 command. This shows up in the help output.
3341 command. This shows up in the help output.
3345
3342
3346 The norepo argument defines whether the command does not require a
3343 The norepo argument defines whether the command does not require a
3347 local repository. Most commands operate against a repository, thus the
3344 local repository. Most commands operate against a repository, thus the
3348 default is False.
3345 default is False.
3349
3346
3350 The optionalrepo argument defines whether the command optionally requires
3347 The optionalrepo argument defines whether the command optionally requires
3351 a local repository.
3348 a local repository.
3352
3349
3353 The inferrepo argument defines whether to try to find a repository from the
3350 The inferrepo argument defines whether to try to find a repository from the
3354 command line arguments. If True, arguments will be examined for potential
3351 command line arguments. If True, arguments will be examined for potential
3355 repository locations. See ``findrepo()``. If a repository is found, it
3352 repository locations. See ``findrepo()``. If a repository is found, it
3356 will be used.
3353 will be used.
3357 """
3354 """
3358 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3355 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3359 inferrepo=False):
3356 inferrepo=False):
3360 def decorator(func):
3357 def decorator(func):
3361 func.norepo = norepo
3358 func.norepo = norepo
3362 func.optionalrepo = optionalrepo
3359 func.optionalrepo = optionalrepo
3363 func.inferrepo = inferrepo
3360 func.inferrepo = inferrepo
3364 if synopsis:
3361 if synopsis:
3365 table[name] = func, list(options), synopsis
3362 table[name] = func, list(options), synopsis
3366 else:
3363 else:
3367 table[name] = func, list(options)
3364 table[name] = func, list(options)
3368 return func
3365 return func
3369 return decorator
3366 return decorator
3370
3367
3371 return cmd
3368 return cmd
3372
3369
3373 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3370 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3374 # commands.outgoing. "missing" is "missing" of the result of
3371 # commands.outgoing. "missing" is "missing" of the result of
3375 # "findcommonoutgoing()"
3372 # "findcommonoutgoing()"
3376 outgoinghooks = util.hooks()
3373 outgoinghooks = util.hooks()
3377
3374
3378 # a list of (ui, repo) functions called by commands.summary
3375 # a list of (ui, repo) functions called by commands.summary
3379 summaryhooks = util.hooks()
3376 summaryhooks = util.hooks()
3380
3377
3381 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3378 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3382 #
3379 #
3383 # functions should return tuple of booleans below, if 'changes' is None:
3380 # functions should return tuple of booleans below, if 'changes' is None:
3384 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3381 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3385 #
3382 #
3386 # otherwise, 'changes' is a tuple of tuples below:
3383 # otherwise, 'changes' is a tuple of tuples below:
3387 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3384 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3388 # - (desturl, destbranch, destpeer, outgoing)
3385 # - (desturl, destbranch, destpeer, outgoing)
3389 summaryremotehooks = util.hooks()
3386 summaryremotehooks = util.hooks()
3390
3387
3391 # A list of state files kept by multistep operations like graft.
3388 # A list of state files kept by multistep operations like graft.
3392 # Since graft cannot be aborted, it is considered 'clearable' by update.
3389 # Since graft cannot be aborted, it is considered 'clearable' by update.
3393 # note: bisect is intentionally excluded
3390 # note: bisect is intentionally excluded
3394 # (state file, clearable, allowcommit, error, hint)
3391 # (state file, clearable, allowcommit, error, hint)
3395 unfinishedstates = [
3392 unfinishedstates = [
3396 ('graftstate', True, False, _('graft in progress'),
3393 ('graftstate', True, False, _('graft in progress'),
3397 _("use 'hg graft --continue' or 'hg update' to abort")),
3394 _("use 'hg graft --continue' or 'hg update' to abort")),
3398 ('updatestate', True, False, _('last update was interrupted'),
3395 ('updatestate', True, False, _('last update was interrupted'),
3399 _("use 'hg update' to get a consistent checkout"))
3396 _("use 'hg update' to get a consistent checkout"))
3400 ]
3397 ]
3401
3398
3402 def checkunfinished(repo, commit=False):
3399 def checkunfinished(repo, commit=False):
3403 '''Look for an unfinished multistep operation, like graft, and abort
3400 '''Look for an unfinished multistep operation, like graft, and abort
3404 if found. It's probably good to check this right before
3401 if found. It's probably good to check this right before
3405 bailifchanged().
3402 bailifchanged().
3406 '''
3403 '''
3407 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3404 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3408 if commit and allowcommit:
3405 if commit and allowcommit:
3409 continue
3406 continue
3410 if repo.vfs.exists(f):
3407 if repo.vfs.exists(f):
3411 raise error.Abort(msg, hint=hint)
3408 raise error.Abort(msg, hint=hint)
3412
3409
3413 def clearunfinished(repo):
3410 def clearunfinished(repo):
3414 '''Check for unfinished operations (as above), and clear the ones
3411 '''Check for unfinished operations (as above), and clear the ones
3415 that are clearable.
3412 that are clearable.
3416 '''
3413 '''
3417 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3414 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3418 if not clearable and repo.vfs.exists(f):
3415 if not clearable and repo.vfs.exists(f):
3419 raise error.Abort(msg, hint=hint)
3416 raise error.Abort(msg, hint=hint)
3420 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3417 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3421 if clearable and repo.vfs.exists(f):
3418 if clearable and repo.vfs.exists(f):
3422 util.unlink(repo.join(f))
3419 util.unlink(repo.join(f))
3423
3420
3424 afterresolvedstates = [
3421 afterresolvedstates = [
3425 ('graftstate',
3422 ('graftstate',
3426 _('hg graft --continue')),
3423 _('hg graft --continue')),
3427 ]
3424 ]
3428
3425
3429 def howtocontinue(repo):
3426 def howtocontinue(repo):
3430 '''Check for an unfinished operation and return the command to finish
3427 '''Check for an unfinished operation and return the command to finish
3431 it.
3428 it.
3432
3429
3433 afterresolvedstates tuples define a .hg/{file} and the corresponding
3430 afterresolvedstates tuples define a .hg/{file} and the corresponding
3434 command needed to finish it.
3431 command needed to finish it.
3435
3432
3436 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3433 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3437 a boolean.
3434 a boolean.
3438 '''
3435 '''
3439 contmsg = _("continue: %s")
3436 contmsg = _("continue: %s")
3440 for f, msg in afterresolvedstates:
3437 for f, msg in afterresolvedstates:
3441 if repo.vfs.exists(f):
3438 if repo.vfs.exists(f):
3442 return contmsg % msg, True
3439 return contmsg % msg, True
3443 workingctx = repo[None]
3440 workingctx = repo[None]
3444 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3441 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3445 for s in workingctx.substate)
3442 for s in workingctx.substate)
3446 if dirty:
3443 if dirty:
3447 return contmsg % _("hg commit"), False
3444 return contmsg % _("hg commit"), False
3448 return None, None
3445 return None, None
3449
3446
3450 def checkafterresolved(repo):
3447 def checkafterresolved(repo):
3451 '''Inform the user about the next action after completing hg resolve
3448 '''Inform the user about the next action after completing hg resolve
3452
3449
3453 If there's a matching afterresolvedstates, howtocontinue will yield
3450 If there's a matching afterresolvedstates, howtocontinue will yield
3454 repo.ui.warn as the reporter.
3451 repo.ui.warn as the reporter.
3455
3452
3456 Otherwise, it will yield repo.ui.note.
3453 Otherwise, it will yield repo.ui.note.
3457 '''
3454 '''
3458 msg, warning = howtocontinue(repo)
3455 msg, warning = howtocontinue(repo)
3459 if msg is not None:
3456 if msg is not None:
3460 if warning:
3457 if warning:
3461 repo.ui.warn("%s\n" % msg)
3458 repo.ui.warn("%s\n" % msg)
3462 else:
3459 else:
3463 repo.ui.note("%s\n" % msg)
3460 repo.ui.note("%s\n" % msg)
3464
3461
3465 def wrongtooltocontinue(repo, task):
3462 def wrongtooltocontinue(repo, task):
3466 '''Raise an abort suggesting how to properly continue if there is an
3463 '''Raise an abort suggesting how to properly continue if there is an
3467 active task.
3464 active task.
3468
3465
3469 Uses howtocontinue() to find the active task.
3466 Uses howtocontinue() to find the active task.
3470
3467
3471 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3468 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3472 a hint.
3469 a hint.
3473 '''
3470 '''
3474 after = howtocontinue(repo)
3471 after = howtocontinue(repo)
3475 hint = None
3472 hint = None
3476 if after[1]:
3473 if after[1]:
3477 hint = after[0]
3474 hint = after[0]
3478 raise error.Abort(_('no %s in progress') % task, hint=hint)
3475 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,1269 +1,1282
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import re
11 import re
12 import types
12 import types
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 config,
16 config,
17 error,
17 error,
18 minirst,
18 minirst,
19 parser,
19 parser,
20 pycompat,
20 pycompat,
21 registrar,
21 registrar,
22 revset as revsetmod,
22 revset as revsetmod,
23 revsetlang,
23 revsetlang,
24 templatefilters,
24 templatefilters,
25 templatekw,
25 templatekw,
26 util,
26 util,
27 )
27 )
28
28
29 # template parsing
29 # template parsing
30
30
31 elements = {
31 elements = {
32 # token-type: binding-strength, primary, prefix, infix, suffix
32 # token-type: binding-strength, primary, prefix, infix, suffix
33 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
33 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
34 ",": (2, None, None, ("list", 2), None),
34 ",": (2, None, None, ("list", 2), None),
35 "|": (5, None, None, ("|", 5), None),
35 "|": (5, None, None, ("|", 5), None),
36 "%": (6, None, None, ("%", 6), None),
36 "%": (6, None, None, ("%", 6), None),
37 ")": (0, None, None, None, None),
37 ")": (0, None, None, None, None),
38 "+": (3, None, None, ("+", 3), None),
38 "+": (3, None, None, ("+", 3), None),
39 "-": (3, None, ("negate", 10), ("-", 3), None),
39 "-": (3, None, ("negate", 10), ("-", 3), None),
40 "*": (4, None, None, ("*", 4), None),
40 "*": (4, None, None, ("*", 4), None),
41 "/": (4, None, None, ("/", 4), None),
41 "/": (4, None, None, ("/", 4), None),
42 "integer": (0, "integer", None, None, None),
42 "integer": (0, "integer", None, None, None),
43 "symbol": (0, "symbol", None, None, None),
43 "symbol": (0, "symbol", None, None, None),
44 "string": (0, "string", None, None, None),
44 "string": (0, "string", None, None, None),
45 "template": (0, "template", None, None, None),
45 "template": (0, "template", None, None, None),
46 "end": (0, None, None, None, None),
46 "end": (0, None, None, None, None),
47 }
47 }
48
48
49 def tokenize(program, start, end, term=None):
49 def tokenize(program, start, end, term=None):
50 """Parse a template expression into a stream of tokens, which must end
50 """Parse a template expression into a stream of tokens, which must end
51 with term if specified"""
51 with term if specified"""
52 pos = start
52 pos = start
53 while pos < end:
53 while pos < end:
54 c = program[pos]
54 c = program[pos]
55 if c.isspace(): # skip inter-token whitespace
55 if c.isspace(): # skip inter-token whitespace
56 pass
56 pass
57 elif c in "(,)%|+-*/": # handle simple operators
57 elif c in "(,)%|+-*/": # handle simple operators
58 yield (c, None, pos)
58 yield (c, None, pos)
59 elif c in '"\'': # handle quoted templates
59 elif c in '"\'': # handle quoted templates
60 s = pos + 1
60 s = pos + 1
61 data, pos = _parsetemplate(program, s, end, c)
61 data, pos = _parsetemplate(program, s, end, c)
62 yield ('template', data, s)
62 yield ('template', data, s)
63 pos -= 1
63 pos -= 1
64 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
64 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
65 # handle quoted strings
65 # handle quoted strings
66 c = program[pos + 1]
66 c = program[pos + 1]
67 s = pos = pos + 2
67 s = pos = pos + 2
68 while pos < end: # find closing quote
68 while pos < end: # find closing quote
69 d = program[pos]
69 d = program[pos]
70 if d == '\\': # skip over escaped characters
70 if d == '\\': # skip over escaped characters
71 pos += 2
71 pos += 2
72 continue
72 continue
73 if d == c:
73 if d == c:
74 yield ('string', program[s:pos], s)
74 yield ('string', program[s:pos], s)
75 break
75 break
76 pos += 1
76 pos += 1
77 else:
77 else:
78 raise error.ParseError(_("unterminated string"), s)
78 raise error.ParseError(_("unterminated string"), s)
79 elif c.isdigit():
79 elif c.isdigit():
80 s = pos
80 s = pos
81 while pos < end:
81 while pos < end:
82 d = program[pos]
82 d = program[pos]
83 if not d.isdigit():
83 if not d.isdigit():
84 break
84 break
85 pos += 1
85 pos += 1
86 yield ('integer', program[s:pos], s)
86 yield ('integer', program[s:pos], s)
87 pos -= 1
87 pos -= 1
88 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
88 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
89 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
89 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
90 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
90 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
91 # where some of nested templates were preprocessed as strings and
91 # where some of nested templates were preprocessed as strings and
92 # then compiled. therefore, \"...\" was allowed. (issue4733)
92 # then compiled. therefore, \"...\" was allowed. (issue4733)
93 #
93 #
94 # processing flow of _evalifliteral() at 5ab28a2e9962:
94 # processing flow of _evalifliteral() at 5ab28a2e9962:
95 # outer template string -> stringify() -> compiletemplate()
95 # outer template string -> stringify() -> compiletemplate()
96 # ------------------------ ------------ ------------------
96 # ------------------------ ------------ ------------------
97 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
97 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
98 # ~~~~~~~~
98 # ~~~~~~~~
99 # escaped quoted string
99 # escaped quoted string
100 if c == 'r':
100 if c == 'r':
101 pos += 1
101 pos += 1
102 token = 'string'
102 token = 'string'
103 else:
103 else:
104 token = 'template'
104 token = 'template'
105 quote = program[pos:pos + 2]
105 quote = program[pos:pos + 2]
106 s = pos = pos + 2
106 s = pos = pos + 2
107 while pos < end: # find closing escaped quote
107 while pos < end: # find closing escaped quote
108 if program.startswith('\\\\\\', pos, end):
108 if program.startswith('\\\\\\', pos, end):
109 pos += 4 # skip over double escaped characters
109 pos += 4 # skip over double escaped characters
110 continue
110 continue
111 if program.startswith(quote, pos, end):
111 if program.startswith(quote, pos, end):
112 # interpret as if it were a part of an outer string
112 # interpret as if it were a part of an outer string
113 data = parser.unescapestr(program[s:pos])
113 data = parser.unescapestr(program[s:pos])
114 if token == 'template':
114 if token == 'template':
115 data = _parsetemplate(data, 0, len(data))[0]
115 data = _parsetemplate(data, 0, len(data))[0]
116 yield (token, data, s)
116 yield (token, data, s)
117 pos += 1
117 pos += 1
118 break
118 break
119 pos += 1
119 pos += 1
120 else:
120 else:
121 raise error.ParseError(_("unterminated string"), s)
121 raise error.ParseError(_("unterminated string"), s)
122 elif c.isalnum() or c in '_':
122 elif c.isalnum() or c in '_':
123 s = pos
123 s = pos
124 pos += 1
124 pos += 1
125 while pos < end: # find end of symbol
125 while pos < end: # find end of symbol
126 d = program[pos]
126 d = program[pos]
127 if not (d.isalnum() or d == "_"):
127 if not (d.isalnum() or d == "_"):
128 break
128 break
129 pos += 1
129 pos += 1
130 sym = program[s:pos]
130 sym = program[s:pos]
131 yield ('symbol', sym, s)
131 yield ('symbol', sym, s)
132 pos -= 1
132 pos -= 1
133 elif c == term:
133 elif c == term:
134 yield ('end', None, pos + 1)
134 yield ('end', None, pos + 1)
135 return
135 return
136 else:
136 else:
137 raise error.ParseError(_("syntax error"), pos)
137 raise error.ParseError(_("syntax error"), pos)
138 pos += 1
138 pos += 1
139 if term:
139 if term:
140 raise error.ParseError(_("unterminated template expansion"), start)
140 raise error.ParseError(_("unterminated template expansion"), start)
141 yield ('end', None, pos)
141 yield ('end', None, pos)
142
142
143 def _parsetemplate(tmpl, start, stop, quote=''):
143 def _parsetemplate(tmpl, start, stop, quote=''):
144 r"""
144 r"""
145 >>> _parsetemplate('foo{bar}"baz', 0, 12)
145 >>> _parsetemplate('foo{bar}"baz', 0, 12)
146 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
146 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
147 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
147 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
148 ([('string', 'foo'), ('symbol', 'bar')], 9)
148 ([('string', 'foo'), ('symbol', 'bar')], 9)
149 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
149 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
150 ([('string', 'foo')], 4)
150 ([('string', 'foo')], 4)
151 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
151 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
152 ([('string', 'foo"'), ('string', 'bar')], 9)
152 ([('string', 'foo"'), ('string', 'bar')], 9)
153 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
153 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
154 ([('string', 'foo\\')], 6)
154 ([('string', 'foo\\')], 6)
155 """
155 """
156 parsed = []
156 parsed = []
157 sepchars = '{' + quote
157 sepchars = '{' + quote
158 pos = start
158 pos = start
159 p = parser.parser(elements)
159 p = parser.parser(elements)
160 while pos < stop:
160 while pos < stop:
161 n = min((tmpl.find(c, pos, stop) for c in sepchars),
161 n = min((tmpl.find(c, pos, stop) for c in sepchars),
162 key=lambda n: (n < 0, n))
162 key=lambda n: (n < 0, n))
163 if n < 0:
163 if n < 0:
164 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
164 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
165 pos = stop
165 pos = stop
166 break
166 break
167 c = tmpl[n]
167 c = tmpl[n]
168 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
168 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
169 if bs % 2 == 1:
169 if bs % 2 == 1:
170 # escaped (e.g. '\{', '\\\{', but not '\\{')
170 # escaped (e.g. '\{', '\\\{', but not '\\{')
171 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
171 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
172 pos = n + 1
172 pos = n + 1
173 continue
173 continue
174 if n > pos:
174 if n > pos:
175 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
175 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
176 if c == quote:
176 if c == quote:
177 return parsed, n + 1
177 return parsed, n + 1
178
178
179 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
179 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
180 parsed.append(parseres)
180 parsed.append(parseres)
181
181
182 if quote:
182 if quote:
183 raise error.ParseError(_("unterminated string"), start)
183 raise error.ParseError(_("unterminated string"), start)
184 return parsed, pos
184 return parsed, pos
185
185
186 def _unnesttemplatelist(tree):
186 def _unnesttemplatelist(tree):
187 """Expand list of templates to node tuple
187 """Expand list of templates to node tuple
188
188
189 >>> def f(tree):
189 >>> def f(tree):
190 ... print prettyformat(_unnesttemplatelist(tree))
190 ... print prettyformat(_unnesttemplatelist(tree))
191 >>> f(('template', []))
191 >>> f(('template', []))
192 ('string', '')
192 ('string', '')
193 >>> f(('template', [('string', 'foo')]))
193 >>> f(('template', [('string', 'foo')]))
194 ('string', 'foo')
194 ('string', 'foo')
195 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
195 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
196 (template
196 (template
197 ('string', 'foo')
197 ('string', 'foo')
198 ('symbol', 'rev'))
198 ('symbol', 'rev'))
199 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
199 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
200 (template
200 (template
201 ('symbol', 'rev'))
201 ('symbol', 'rev'))
202 >>> f(('template', [('template', [('string', 'foo')])]))
202 >>> f(('template', [('template', [('string', 'foo')])]))
203 ('string', 'foo')
203 ('string', 'foo')
204 """
204 """
205 if not isinstance(tree, tuple):
205 if not isinstance(tree, tuple):
206 return tree
206 return tree
207 op = tree[0]
207 op = tree[0]
208 if op != 'template':
208 if op != 'template':
209 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
209 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
210
210
211 assert len(tree) == 2
211 assert len(tree) == 2
212 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
212 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
213 if not xs:
213 if not xs:
214 return ('string', '') # empty template ""
214 return ('string', '') # empty template ""
215 elif len(xs) == 1 and xs[0][0] == 'string':
215 elif len(xs) == 1 and xs[0][0] == 'string':
216 return xs[0] # fast path for string with no template fragment "x"
216 return xs[0] # fast path for string with no template fragment "x"
217 else:
217 else:
218 return (op,) + xs
218 return (op,) + xs
219
219
220 def parse(tmpl):
220 def parse(tmpl):
221 """Parse template string into tree"""
221 """Parse template string into tree"""
222 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
222 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
223 assert pos == len(tmpl), 'unquoted template should be consumed'
223 assert pos == len(tmpl), 'unquoted template should be consumed'
224 return _unnesttemplatelist(('template', parsed))
224 return _unnesttemplatelist(('template', parsed))
225
225
226 def _parseexpr(expr):
226 def _parseexpr(expr):
227 """Parse a template expression into tree
227 """Parse a template expression into tree
228
228
229 >>> _parseexpr('"foo"')
229 >>> _parseexpr('"foo"')
230 ('string', 'foo')
230 ('string', 'foo')
231 >>> _parseexpr('foo(bar)')
231 >>> _parseexpr('foo(bar)')
232 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
232 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
233 >>> _parseexpr('foo(')
233 >>> _parseexpr('foo(')
234 Traceback (most recent call last):
234 Traceback (most recent call last):
235 ...
235 ...
236 ParseError: ('not a prefix: end', 4)
236 ParseError: ('not a prefix: end', 4)
237 >>> _parseexpr('"foo" "bar"')
237 >>> _parseexpr('"foo" "bar"')
238 Traceback (most recent call last):
238 Traceback (most recent call last):
239 ...
239 ...
240 ParseError: ('invalid token', 7)
240 ParseError: ('invalid token', 7)
241 """
241 """
242 p = parser.parser(elements)
242 p = parser.parser(elements)
243 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
243 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
244 if pos != len(expr):
244 if pos != len(expr):
245 raise error.ParseError(_('invalid token'), pos)
245 raise error.ParseError(_('invalid token'), pos)
246 return _unnesttemplatelist(tree)
246 return _unnesttemplatelist(tree)
247
247
248 def prettyformat(tree):
248 def prettyformat(tree):
249 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
249 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
250
250
251 def compileexp(exp, context, curmethods):
251 def compileexp(exp, context, curmethods):
252 """Compile parsed template tree to (func, data) pair"""
252 """Compile parsed template tree to (func, data) pair"""
253 t = exp[0]
253 t = exp[0]
254 if t in curmethods:
254 if t in curmethods:
255 return curmethods[t](exp, context)
255 return curmethods[t](exp, context)
256 raise error.ParseError(_("unknown method '%s'") % t)
256 raise error.ParseError(_("unknown method '%s'") % t)
257
257
258 # template evaluation
258 # template evaluation
259
259
260 def getsymbol(exp):
260 def getsymbol(exp):
261 if exp[0] == 'symbol':
261 if exp[0] == 'symbol':
262 return exp[1]
262 return exp[1]
263 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
263 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
264
264
265 def getlist(x):
265 def getlist(x):
266 if not x:
266 if not x:
267 return []
267 return []
268 if x[0] == 'list':
268 if x[0] == 'list':
269 return getlist(x[1]) + [x[2]]
269 return getlist(x[1]) + [x[2]]
270 return [x]
270 return [x]
271
271
272 def gettemplate(exp, context):
272 def gettemplate(exp, context):
273 """Compile given template tree or load named template from map file;
273 """Compile given template tree or load named template from map file;
274 returns (func, data) pair"""
274 returns (func, data) pair"""
275 if exp[0] in ('template', 'string'):
275 if exp[0] in ('template', 'string'):
276 return compileexp(exp, context, methods)
276 return compileexp(exp, context, methods)
277 if exp[0] == 'symbol':
277 if exp[0] == 'symbol':
278 # unlike runsymbol(), here 'symbol' is always taken as template name
278 # unlike runsymbol(), here 'symbol' is always taken as template name
279 # even if it exists in mapping. this allows us to override mapping
279 # even if it exists in mapping. this allows us to override mapping
280 # by web templates, e.g. 'changelogtag' is redefined in map file.
280 # by web templates, e.g. 'changelogtag' is redefined in map file.
281 return context._load(exp[1])
281 return context._load(exp[1])
282 raise error.ParseError(_("expected template specifier"))
282 raise error.ParseError(_("expected template specifier"))
283
283
284 def evalfuncarg(context, mapping, arg):
284 def evalfuncarg(context, mapping, arg):
285 func, data = arg
285 func, data = arg
286 # func() may return string, generator of strings or arbitrary object such
286 # func() may return string, generator of strings or arbitrary object such
287 # as date tuple, but filter does not want generator.
287 # as date tuple, but filter does not want generator.
288 thing = func(context, mapping, data)
288 thing = func(context, mapping, data)
289 if isinstance(thing, types.GeneratorType):
289 if isinstance(thing, types.GeneratorType):
290 thing = stringify(thing)
290 thing = stringify(thing)
291 return thing
291 return thing
292
292
293 def evalboolean(context, mapping, arg):
293 def evalboolean(context, mapping, arg):
294 """Evaluate given argument as boolean, but also takes boolean literals"""
294 """Evaluate given argument as boolean, but also takes boolean literals"""
295 func, data = arg
295 func, data = arg
296 if func is runsymbol:
296 if func is runsymbol:
297 thing = func(context, mapping, data, default=None)
297 thing = func(context, mapping, data, default=None)
298 if thing is None:
298 if thing is None:
299 # not a template keyword, takes as a boolean literal
299 # not a template keyword, takes as a boolean literal
300 thing = util.parsebool(data)
300 thing = util.parsebool(data)
301 else:
301 else:
302 thing = func(context, mapping, data)
302 thing = func(context, mapping, data)
303 if isinstance(thing, bool):
303 if isinstance(thing, bool):
304 return thing
304 return thing
305 # other objects are evaluated as strings, which means 0 is True, but
305 # other objects are evaluated as strings, which means 0 is True, but
306 # empty dict/list should be False as they are expected to be ''
306 # empty dict/list should be False as they are expected to be ''
307 return bool(stringify(thing))
307 return bool(stringify(thing))
308
308
309 def evalinteger(context, mapping, arg, err):
309 def evalinteger(context, mapping, arg, err):
310 v = evalfuncarg(context, mapping, arg)
310 v = evalfuncarg(context, mapping, arg)
311 try:
311 try:
312 return int(v)
312 return int(v)
313 except (TypeError, ValueError):
313 except (TypeError, ValueError):
314 raise error.ParseError(err)
314 raise error.ParseError(err)
315
315
316 def evalstring(context, mapping, arg):
316 def evalstring(context, mapping, arg):
317 func, data = arg
317 func, data = arg
318 return stringify(func(context, mapping, data))
318 return stringify(func(context, mapping, data))
319
319
320 def evalstringliteral(context, mapping, arg):
320 def evalstringliteral(context, mapping, arg):
321 """Evaluate given argument as string template, but returns symbol name
321 """Evaluate given argument as string template, but returns symbol name
322 if it is unknown"""
322 if it is unknown"""
323 func, data = arg
323 func, data = arg
324 if func is runsymbol:
324 if func is runsymbol:
325 thing = func(context, mapping, data, default=data)
325 thing = func(context, mapping, data, default=data)
326 else:
326 else:
327 thing = func(context, mapping, data)
327 thing = func(context, mapping, data)
328 return stringify(thing)
328 return stringify(thing)
329
329
330 def runinteger(context, mapping, data):
330 def runinteger(context, mapping, data):
331 return int(data)
331 return int(data)
332
332
333 def runstring(context, mapping, data):
333 def runstring(context, mapping, data):
334 return data
334 return data
335
335
336 def _recursivesymbolblocker(key):
336 def _recursivesymbolblocker(key):
337 def showrecursion(**args):
337 def showrecursion(**args):
338 raise error.Abort(_("recursive reference '%s' in template") % key)
338 raise error.Abort(_("recursive reference '%s' in template") % key)
339 return showrecursion
339 return showrecursion
340
340
341 def _runrecursivesymbol(context, mapping, key):
341 def _runrecursivesymbol(context, mapping, key):
342 raise error.Abort(_("recursive reference '%s' in template") % key)
342 raise error.Abort(_("recursive reference '%s' in template") % key)
343
343
344 def runsymbol(context, mapping, key, default=''):
344 def runsymbol(context, mapping, key, default=''):
345 v = mapping.get(key)
345 v = mapping.get(key)
346 if v is None:
346 if v is None:
347 v = context._defaults.get(key)
347 v = context._defaults.get(key)
348 if v is None:
348 if v is None:
349 # put poison to cut recursion. we can't move this to parsing phase
349 # put poison to cut recursion. we can't move this to parsing phase
350 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
350 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
351 safemapping = mapping.copy()
351 safemapping = mapping.copy()
352 safemapping[key] = _recursivesymbolblocker(key)
352 safemapping[key] = _recursivesymbolblocker(key)
353 try:
353 try:
354 v = context.process(key, safemapping)
354 v = context.process(key, safemapping)
355 except TemplateNotFound:
355 except TemplateNotFound:
356 v = default
356 v = default
357 if callable(v):
357 if callable(v):
358 return v(**mapping)
358 return v(**mapping)
359 return v
359 return v
360
360
361 def buildtemplate(exp, context):
361 def buildtemplate(exp, context):
362 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
362 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
363 return (runtemplate, ctmpl)
363 return (runtemplate, ctmpl)
364
364
365 def runtemplate(context, mapping, template):
365 def runtemplate(context, mapping, template):
366 for func, data in template:
366 for func, data in template:
367 yield func(context, mapping, data)
367 yield func(context, mapping, data)
368
368
369 def buildfilter(exp, context):
369 def buildfilter(exp, context):
370 arg = compileexp(exp[1], context, methods)
370 arg = compileexp(exp[1], context, methods)
371 n = getsymbol(exp[2])
371 n = getsymbol(exp[2])
372 if n in context._filters:
372 if n in context._filters:
373 filt = context._filters[n]
373 filt = context._filters[n]
374 return (runfilter, (arg, filt))
374 return (runfilter, (arg, filt))
375 if n in funcs:
375 if n in funcs:
376 f = funcs[n]
376 f = funcs[n]
377 return (f, [arg])
377 return (f, [arg])
378 raise error.ParseError(_("unknown function '%s'") % n)
378 raise error.ParseError(_("unknown function '%s'") % n)
379
379
380 def runfilter(context, mapping, data):
380 def runfilter(context, mapping, data):
381 arg, filt = data
381 arg, filt = data
382 thing = evalfuncarg(context, mapping, arg)
382 thing = evalfuncarg(context, mapping, arg)
383 try:
383 try:
384 return filt(thing)
384 return filt(thing)
385 except (ValueError, AttributeError, TypeError):
385 except (ValueError, AttributeError, TypeError):
386 if isinstance(arg[1], tuple):
386 if isinstance(arg[1], tuple):
387 dt = arg[1][1]
387 dt = arg[1][1]
388 else:
388 else:
389 dt = arg[1]
389 dt = arg[1]
390 raise error.Abort(_("template filter '%s' is not compatible with "
390 raise error.Abort(_("template filter '%s' is not compatible with "
391 "keyword '%s'") % (filt.func_name, dt))
391 "keyword '%s'") % (filt.func_name, dt))
392
392
393 def buildmap(exp, context):
393 def buildmap(exp, context):
394 func, data = compileexp(exp[1], context, methods)
394 func, data = compileexp(exp[1], context, methods)
395 tfunc, tdata = gettemplate(exp[2], context)
395 tfunc, tdata = gettemplate(exp[2], context)
396 return (runmap, (func, data, tfunc, tdata))
396 return (runmap, (func, data, tfunc, tdata))
397
397
398 def runmap(context, mapping, data):
398 def runmap(context, mapping, data):
399 func, data, tfunc, tdata = data
399 func, data, tfunc, tdata = data
400 d = func(context, mapping, data)
400 d = func(context, mapping, data)
401 if util.safehasattr(d, 'itermaps'):
401 if util.safehasattr(d, 'itermaps'):
402 diter = d.itermaps()
402 diter = d.itermaps()
403 else:
403 else:
404 try:
404 try:
405 diter = iter(d)
405 diter = iter(d)
406 except TypeError:
406 except TypeError:
407 if func is runsymbol:
407 if func is runsymbol:
408 raise error.ParseError(_("keyword '%s' is not iterable") % data)
408 raise error.ParseError(_("keyword '%s' is not iterable") % data)
409 else:
409 else:
410 raise error.ParseError(_("%r is not iterable") % d)
410 raise error.ParseError(_("%r is not iterable") % d)
411
411
412 for i in diter:
412 for i in diter:
413 lm = mapping.copy()
413 lm = mapping.copy()
414 if isinstance(i, dict):
414 if isinstance(i, dict):
415 lm.update(i)
415 lm.update(i)
416 lm['originalnode'] = mapping.get('node')
416 lm['originalnode'] = mapping.get('node')
417 yield tfunc(context, lm, tdata)
417 yield tfunc(context, lm, tdata)
418 else:
418 else:
419 # v is not an iterable of dicts, this happen when 'key'
419 # v is not an iterable of dicts, this happen when 'key'
420 # has been fully expanded already and format is useless.
420 # has been fully expanded already and format is useless.
421 # If so, return the expanded value.
421 # If so, return the expanded value.
422 yield i
422 yield i
423
423
424 def buildnegate(exp, context):
424 def buildnegate(exp, context):
425 arg = compileexp(exp[1], context, exprmethods)
425 arg = compileexp(exp[1], context, exprmethods)
426 return (runnegate, arg)
426 return (runnegate, arg)
427
427
428 def runnegate(context, mapping, data):
428 def runnegate(context, mapping, data):
429 data = evalinteger(context, mapping, data,
429 data = evalinteger(context, mapping, data,
430 _('negation needs an integer argument'))
430 _('negation needs an integer argument'))
431 return -data
431 return -data
432
432
433 def buildarithmetic(exp, context, func):
433 def buildarithmetic(exp, context, func):
434 left = compileexp(exp[1], context, exprmethods)
434 left = compileexp(exp[1], context, exprmethods)
435 right = compileexp(exp[2], context, exprmethods)
435 right = compileexp(exp[2], context, exprmethods)
436 return (runarithmetic, (func, left, right))
436 return (runarithmetic, (func, left, right))
437
437
438 def runarithmetic(context, mapping, data):
438 def runarithmetic(context, mapping, data):
439 func, left, right = data
439 func, left, right = data
440 left = evalinteger(context, mapping, left,
440 left = evalinteger(context, mapping, left,
441 _('arithmetic only defined on integers'))
441 _('arithmetic only defined on integers'))
442 right = evalinteger(context, mapping, right,
442 right = evalinteger(context, mapping, right,
443 _('arithmetic only defined on integers'))
443 _('arithmetic only defined on integers'))
444 try:
444 try:
445 return func(left, right)
445 return func(left, right)
446 except ZeroDivisionError:
446 except ZeroDivisionError:
447 raise error.Abort(_('division by zero is not defined'))
447 raise error.Abort(_('division by zero is not defined'))
448
448
449 def buildfunc(exp, context):
449 def buildfunc(exp, context):
450 n = getsymbol(exp[1])
450 n = getsymbol(exp[1])
451 args = [compileexp(x, context, exprmethods) for x in getlist(exp[2])]
451 args = [compileexp(x, context, exprmethods) for x in getlist(exp[2])]
452 if n in funcs:
452 if n in funcs:
453 f = funcs[n]
453 f = funcs[n]
454 return (f, args)
454 return (f, args)
455 if n in context._filters:
455 if n in context._filters:
456 if len(args) != 1:
456 if len(args) != 1:
457 raise error.ParseError(_("filter %s expects one argument") % n)
457 raise error.ParseError(_("filter %s expects one argument") % n)
458 f = context._filters[n]
458 f = context._filters[n]
459 return (runfilter, (args[0], f))
459 return (runfilter, (args[0], f))
460 raise error.ParseError(_("unknown function '%s'") % n)
460 raise error.ParseError(_("unknown function '%s'") % n)
461
461
462 # dict of template built-in functions
462 # dict of template built-in functions
463 funcs = {}
463 funcs = {}
464
464
465 templatefunc = registrar.templatefunc(funcs)
465 templatefunc = registrar.templatefunc(funcs)
466
466
467 @templatefunc('date(date[, fmt])')
467 @templatefunc('date(date[, fmt])')
468 def date(context, mapping, args):
468 def date(context, mapping, args):
469 """Format a date. See :hg:`help dates` for formatting
469 """Format a date. See :hg:`help dates` for formatting
470 strings. The default is a Unix date format, including the timezone:
470 strings. The default is a Unix date format, including the timezone:
471 "Mon Sep 04 15:13:13 2006 0700"."""
471 "Mon Sep 04 15:13:13 2006 0700"."""
472 if not (1 <= len(args) <= 2):
472 if not (1 <= len(args) <= 2):
473 # i18n: "date" is a keyword
473 # i18n: "date" is a keyword
474 raise error.ParseError(_("date expects one or two arguments"))
474 raise error.ParseError(_("date expects one or two arguments"))
475
475
476 date = evalfuncarg(context, mapping, args[0])
476 date = evalfuncarg(context, mapping, args[0])
477 fmt = None
477 fmt = None
478 if len(args) == 2:
478 if len(args) == 2:
479 fmt = evalstring(context, mapping, args[1])
479 fmt = evalstring(context, mapping, args[1])
480 try:
480 try:
481 if fmt is None:
481 if fmt is None:
482 return util.datestr(date)
482 return util.datestr(date)
483 else:
483 else:
484 return util.datestr(date, fmt)
484 return util.datestr(date, fmt)
485 except (TypeError, ValueError):
485 except (TypeError, ValueError):
486 # i18n: "date" is a keyword
486 # i18n: "date" is a keyword
487 raise error.ParseError(_("date expects a date information"))
487 raise error.ParseError(_("date expects a date information"))
488
488
489 @templatefunc('diff([includepattern [, excludepattern]])')
489 @templatefunc('diff([includepattern [, excludepattern]])')
490 def diff(context, mapping, args):
490 def diff(context, mapping, args):
491 """Show a diff, optionally
491 """Show a diff, optionally
492 specifying files to include or exclude."""
492 specifying files to include or exclude."""
493 if len(args) > 2:
493 if len(args) > 2:
494 # i18n: "diff" is a keyword
494 # i18n: "diff" is a keyword
495 raise error.ParseError(_("diff expects zero, one, or two arguments"))
495 raise error.ParseError(_("diff expects zero, one, or two arguments"))
496
496
497 def getpatterns(i):
497 def getpatterns(i):
498 if i < len(args):
498 if i < len(args):
499 s = evalstring(context, mapping, args[i]).strip()
499 s = evalstring(context, mapping, args[i]).strip()
500 if s:
500 if s:
501 return [s]
501 return [s]
502 return []
502 return []
503
503
504 ctx = mapping['ctx']
504 ctx = mapping['ctx']
505 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
505 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
506
506
507 return ''.join(chunks)
507 return ''.join(chunks)
508
508
509 @templatefunc('files(pattern)')
509 @templatefunc('files(pattern)')
510 def files(context, mapping, args):
510 def files(context, mapping, args):
511 """All files of the current changeset matching the pattern. See
511 """All files of the current changeset matching the pattern. See
512 :hg:`help patterns`."""
512 :hg:`help patterns`."""
513 if not len(args) == 1:
513 if not len(args) == 1:
514 # i18n: "files" is a keyword
514 # i18n: "files" is a keyword
515 raise error.ParseError(_("files expects one argument"))
515 raise error.ParseError(_("files expects one argument"))
516
516
517 raw = evalstring(context, mapping, args[0])
517 raw = evalstring(context, mapping, args[0])
518 ctx = mapping['ctx']
518 ctx = mapping['ctx']
519 m = ctx.match([raw])
519 m = ctx.match([raw])
520 files = list(ctx.matches(m))
520 files = list(ctx.matches(m))
521 return templatekw.showlist("file", files, **mapping)
521 return templatekw.showlist("file", files, **mapping)
522
522
523 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
523 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
524 def fill(context, mapping, args):
524 def fill(context, mapping, args):
525 """Fill many
525 """Fill many
526 paragraphs with optional indentation. See the "fill" filter."""
526 paragraphs with optional indentation. See the "fill" filter."""
527 if not (1 <= len(args) <= 4):
527 if not (1 <= len(args) <= 4):
528 # i18n: "fill" is a keyword
528 # i18n: "fill" is a keyword
529 raise error.ParseError(_("fill expects one to four arguments"))
529 raise error.ParseError(_("fill expects one to four arguments"))
530
530
531 text = evalstring(context, mapping, args[0])
531 text = evalstring(context, mapping, args[0])
532 width = 76
532 width = 76
533 initindent = ''
533 initindent = ''
534 hangindent = ''
534 hangindent = ''
535 if 2 <= len(args) <= 4:
535 if 2 <= len(args) <= 4:
536 width = evalinteger(context, mapping, args[1],
536 width = evalinteger(context, mapping, args[1],
537 # i18n: "fill" is a keyword
537 # i18n: "fill" is a keyword
538 _("fill expects an integer width"))
538 _("fill expects an integer width"))
539 try:
539 try:
540 initindent = evalstring(context, mapping, args[2])
540 initindent = evalstring(context, mapping, args[2])
541 hangindent = evalstring(context, mapping, args[3])
541 hangindent = evalstring(context, mapping, args[3])
542 except IndexError:
542 except IndexError:
543 pass
543 pass
544
544
545 return templatefilters.fill(text, width, initindent, hangindent)
545 return templatefilters.fill(text, width, initindent, hangindent)
546
546
547 @templatefunc('formatnode(node)')
548 def formatnode(context, mapping, args):
549 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
550 if len(args) != 1:
551 # i18n: "formatnode" is a keyword
552 raise error.ParseError(_("formatnode expects one argument"))
553
554 ui = mapping['ui']
555 node = evalstring(context, mapping, args[0])
556 if ui.debugflag:
557 return node
558 return templatefilters.short(node)
559
547 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])')
560 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])')
548 def pad(context, mapping, args):
561 def pad(context, mapping, args):
549 """Pad text with a
562 """Pad text with a
550 fill character."""
563 fill character."""
551 if not (2 <= len(args) <= 4):
564 if not (2 <= len(args) <= 4):
552 # i18n: "pad" is a keyword
565 # i18n: "pad" is a keyword
553 raise error.ParseError(_("pad() expects two to four arguments"))
566 raise error.ParseError(_("pad() expects two to four arguments"))
554
567
555 width = evalinteger(context, mapping, args[1],
568 width = evalinteger(context, mapping, args[1],
556 # i18n: "pad" is a keyword
569 # i18n: "pad" is a keyword
557 _("pad() expects an integer width"))
570 _("pad() expects an integer width"))
558
571
559 text = evalstring(context, mapping, args[0])
572 text = evalstring(context, mapping, args[0])
560
573
561 left = False
574 left = False
562 fillchar = ' '
575 fillchar = ' '
563 if len(args) > 2:
576 if len(args) > 2:
564 fillchar = evalstring(context, mapping, args[2])
577 fillchar = evalstring(context, mapping, args[2])
565 if len(args) > 3:
578 if len(args) > 3:
566 left = evalboolean(context, mapping, args[3])
579 left = evalboolean(context, mapping, args[3])
567
580
568 if left:
581 if left:
569 return text.rjust(width, fillchar)
582 return text.rjust(width, fillchar)
570 else:
583 else:
571 return text.ljust(width, fillchar)
584 return text.ljust(width, fillchar)
572
585
573 @templatefunc('indent(text, indentchars[, firstline])')
586 @templatefunc('indent(text, indentchars[, firstline])')
574 def indent(context, mapping, args):
587 def indent(context, mapping, args):
575 """Indents all non-empty lines
588 """Indents all non-empty lines
576 with the characters given in the indentchars string. An optional
589 with the characters given in the indentchars string. An optional
577 third parameter will override the indent for the first line only
590 third parameter will override the indent for the first line only
578 if present."""
591 if present."""
579 if not (2 <= len(args) <= 3):
592 if not (2 <= len(args) <= 3):
580 # i18n: "indent" is a keyword
593 # i18n: "indent" is a keyword
581 raise error.ParseError(_("indent() expects two or three arguments"))
594 raise error.ParseError(_("indent() expects two or three arguments"))
582
595
583 text = evalstring(context, mapping, args[0])
596 text = evalstring(context, mapping, args[0])
584 indent = evalstring(context, mapping, args[1])
597 indent = evalstring(context, mapping, args[1])
585
598
586 if len(args) == 3:
599 if len(args) == 3:
587 firstline = evalstring(context, mapping, args[2])
600 firstline = evalstring(context, mapping, args[2])
588 else:
601 else:
589 firstline = indent
602 firstline = indent
590
603
591 # the indent function doesn't indent the first line, so we do it here
604 # the indent function doesn't indent the first line, so we do it here
592 return templatefilters.indent(firstline + text, indent)
605 return templatefilters.indent(firstline + text, indent)
593
606
594 @templatefunc('get(dict, key)')
607 @templatefunc('get(dict, key)')
595 def get(context, mapping, args):
608 def get(context, mapping, args):
596 """Get an attribute/key from an object. Some keywords
609 """Get an attribute/key from an object. Some keywords
597 are complex types. This function allows you to obtain the value of an
610 are complex types. This function allows you to obtain the value of an
598 attribute on these types."""
611 attribute on these types."""
599 if len(args) != 2:
612 if len(args) != 2:
600 # i18n: "get" is a keyword
613 # i18n: "get" is a keyword
601 raise error.ParseError(_("get() expects two arguments"))
614 raise error.ParseError(_("get() expects two arguments"))
602
615
603 dictarg = evalfuncarg(context, mapping, args[0])
616 dictarg = evalfuncarg(context, mapping, args[0])
604 if not util.safehasattr(dictarg, 'get'):
617 if not util.safehasattr(dictarg, 'get'):
605 # i18n: "get" is a keyword
618 # i18n: "get" is a keyword
606 raise error.ParseError(_("get() expects a dict as first argument"))
619 raise error.ParseError(_("get() expects a dict as first argument"))
607
620
608 key = evalfuncarg(context, mapping, args[1])
621 key = evalfuncarg(context, mapping, args[1])
609 return dictarg.get(key)
622 return dictarg.get(key)
610
623
611 @templatefunc('if(expr, then[, else])')
624 @templatefunc('if(expr, then[, else])')
612 def if_(context, mapping, args):
625 def if_(context, mapping, args):
613 """Conditionally execute based on the result of
626 """Conditionally execute based on the result of
614 an expression."""
627 an expression."""
615 if not (2 <= len(args) <= 3):
628 if not (2 <= len(args) <= 3):
616 # i18n: "if" is a keyword
629 # i18n: "if" is a keyword
617 raise error.ParseError(_("if expects two or three arguments"))
630 raise error.ParseError(_("if expects two or three arguments"))
618
631
619 test = evalboolean(context, mapping, args[0])
632 test = evalboolean(context, mapping, args[0])
620 if test:
633 if test:
621 yield args[1][0](context, mapping, args[1][1])
634 yield args[1][0](context, mapping, args[1][1])
622 elif len(args) == 3:
635 elif len(args) == 3:
623 yield args[2][0](context, mapping, args[2][1])
636 yield args[2][0](context, mapping, args[2][1])
624
637
625 @templatefunc('ifcontains(needle, haystack, then[, else])')
638 @templatefunc('ifcontains(needle, haystack, then[, else])')
626 def ifcontains(context, mapping, args):
639 def ifcontains(context, mapping, args):
627 """Conditionally execute based
640 """Conditionally execute based
628 on whether the item "needle" is in "haystack"."""
641 on whether the item "needle" is in "haystack"."""
629 if not (3 <= len(args) <= 4):
642 if not (3 <= len(args) <= 4):
630 # i18n: "ifcontains" is a keyword
643 # i18n: "ifcontains" is a keyword
631 raise error.ParseError(_("ifcontains expects three or four arguments"))
644 raise error.ParseError(_("ifcontains expects three or four arguments"))
632
645
633 needle = evalstring(context, mapping, args[0])
646 needle = evalstring(context, mapping, args[0])
634 haystack = evalfuncarg(context, mapping, args[1])
647 haystack = evalfuncarg(context, mapping, args[1])
635
648
636 if needle in haystack:
649 if needle in haystack:
637 yield args[2][0](context, mapping, args[2][1])
650 yield args[2][0](context, mapping, args[2][1])
638 elif len(args) == 4:
651 elif len(args) == 4:
639 yield args[3][0](context, mapping, args[3][1])
652 yield args[3][0](context, mapping, args[3][1])
640
653
641 @templatefunc('ifeq(expr1, expr2, then[, else])')
654 @templatefunc('ifeq(expr1, expr2, then[, else])')
642 def ifeq(context, mapping, args):
655 def ifeq(context, mapping, args):
643 """Conditionally execute based on
656 """Conditionally execute based on
644 whether 2 items are equivalent."""
657 whether 2 items are equivalent."""
645 if not (3 <= len(args) <= 4):
658 if not (3 <= len(args) <= 4):
646 # i18n: "ifeq" is a keyword
659 # i18n: "ifeq" is a keyword
647 raise error.ParseError(_("ifeq expects three or four arguments"))
660 raise error.ParseError(_("ifeq expects three or four arguments"))
648
661
649 test = evalstring(context, mapping, args[0])
662 test = evalstring(context, mapping, args[0])
650 match = evalstring(context, mapping, args[1])
663 match = evalstring(context, mapping, args[1])
651 if test == match:
664 if test == match:
652 yield args[2][0](context, mapping, args[2][1])
665 yield args[2][0](context, mapping, args[2][1])
653 elif len(args) == 4:
666 elif len(args) == 4:
654 yield args[3][0](context, mapping, args[3][1])
667 yield args[3][0](context, mapping, args[3][1])
655
668
656 @templatefunc('join(list, sep)')
669 @templatefunc('join(list, sep)')
657 def join(context, mapping, args):
670 def join(context, mapping, args):
658 """Join items in a list with a delimiter."""
671 """Join items in a list with a delimiter."""
659 if not (1 <= len(args) <= 2):
672 if not (1 <= len(args) <= 2):
660 # i18n: "join" is a keyword
673 # i18n: "join" is a keyword
661 raise error.ParseError(_("join expects one or two arguments"))
674 raise error.ParseError(_("join expects one or two arguments"))
662
675
663 joinset = args[0][0](context, mapping, args[0][1])
676 joinset = args[0][0](context, mapping, args[0][1])
664 if util.safehasattr(joinset, 'itermaps'):
677 if util.safehasattr(joinset, 'itermaps'):
665 jf = joinset.joinfmt
678 jf = joinset.joinfmt
666 joinset = [jf(x) for x in joinset.itermaps()]
679 joinset = [jf(x) for x in joinset.itermaps()]
667
680
668 joiner = " "
681 joiner = " "
669 if len(args) > 1:
682 if len(args) > 1:
670 joiner = evalstring(context, mapping, args[1])
683 joiner = evalstring(context, mapping, args[1])
671
684
672 first = True
685 first = True
673 for x in joinset:
686 for x in joinset:
674 if first:
687 if first:
675 first = False
688 first = False
676 else:
689 else:
677 yield joiner
690 yield joiner
678 yield x
691 yield x
679
692
680 @templatefunc('label(label, expr)')
693 @templatefunc('label(label, expr)')
681 def label(context, mapping, args):
694 def label(context, mapping, args):
682 """Apply a label to generated content. Content with
695 """Apply a label to generated content. Content with
683 a label applied can result in additional post-processing, such as
696 a label applied can result in additional post-processing, such as
684 automatic colorization."""
697 automatic colorization."""
685 if len(args) != 2:
698 if len(args) != 2:
686 # i18n: "label" is a keyword
699 # i18n: "label" is a keyword
687 raise error.ParseError(_("label expects two arguments"))
700 raise error.ParseError(_("label expects two arguments"))
688
701
689 ui = mapping['ui']
702 ui = mapping['ui']
690 thing = evalstring(context, mapping, args[1])
703 thing = evalstring(context, mapping, args[1])
691 # preserve unknown symbol as literal so effects like 'red', 'bold',
704 # preserve unknown symbol as literal so effects like 'red', 'bold',
692 # etc. don't need to be quoted
705 # etc. don't need to be quoted
693 label = evalstringliteral(context, mapping, args[0])
706 label = evalstringliteral(context, mapping, args[0])
694
707
695 return ui.label(thing, label)
708 return ui.label(thing, label)
696
709
697 @templatefunc('latesttag([pattern])')
710 @templatefunc('latesttag([pattern])')
698 def latesttag(context, mapping, args):
711 def latesttag(context, mapping, args):
699 """The global tags matching the given pattern on the
712 """The global tags matching the given pattern on the
700 most recent globally tagged ancestor of this changeset."""
713 most recent globally tagged ancestor of this changeset."""
701 if len(args) > 1:
714 if len(args) > 1:
702 # i18n: "latesttag" is a keyword
715 # i18n: "latesttag" is a keyword
703 raise error.ParseError(_("latesttag expects at most one argument"))
716 raise error.ParseError(_("latesttag expects at most one argument"))
704
717
705 pattern = None
718 pattern = None
706 if len(args) == 1:
719 if len(args) == 1:
707 pattern = evalstring(context, mapping, args[0])
720 pattern = evalstring(context, mapping, args[0])
708
721
709 return templatekw.showlatesttags(pattern, **mapping)
722 return templatekw.showlatesttags(pattern, **mapping)
710
723
711 @templatefunc('localdate(date[, tz])')
724 @templatefunc('localdate(date[, tz])')
712 def localdate(context, mapping, args):
725 def localdate(context, mapping, args):
713 """Converts a date to the specified timezone.
726 """Converts a date to the specified timezone.
714 The default is local date."""
727 The default is local date."""
715 if not (1 <= len(args) <= 2):
728 if not (1 <= len(args) <= 2):
716 # i18n: "localdate" is a keyword
729 # i18n: "localdate" is a keyword
717 raise error.ParseError(_("localdate expects one or two arguments"))
730 raise error.ParseError(_("localdate expects one or two arguments"))
718
731
719 date = evalfuncarg(context, mapping, args[0])
732 date = evalfuncarg(context, mapping, args[0])
720 try:
733 try:
721 date = util.parsedate(date)
734 date = util.parsedate(date)
722 except AttributeError: # not str nor date tuple
735 except AttributeError: # not str nor date tuple
723 # i18n: "localdate" is a keyword
736 # i18n: "localdate" is a keyword
724 raise error.ParseError(_("localdate expects a date information"))
737 raise error.ParseError(_("localdate expects a date information"))
725 if len(args) >= 2:
738 if len(args) >= 2:
726 tzoffset = None
739 tzoffset = None
727 tz = evalfuncarg(context, mapping, args[1])
740 tz = evalfuncarg(context, mapping, args[1])
728 if isinstance(tz, str):
741 if isinstance(tz, str):
729 tzoffset, remainder = util.parsetimezone(tz)
742 tzoffset, remainder = util.parsetimezone(tz)
730 if remainder:
743 if remainder:
731 tzoffset = None
744 tzoffset = None
732 if tzoffset is None:
745 if tzoffset is None:
733 try:
746 try:
734 tzoffset = int(tz)
747 tzoffset = int(tz)
735 except (TypeError, ValueError):
748 except (TypeError, ValueError):
736 # i18n: "localdate" is a keyword
749 # i18n: "localdate" is a keyword
737 raise error.ParseError(_("localdate expects a timezone"))
750 raise error.ParseError(_("localdate expects a timezone"))
738 else:
751 else:
739 tzoffset = util.makedate()[1]
752 tzoffset = util.makedate()[1]
740 return (date[0], tzoffset)
753 return (date[0], tzoffset)
741
754
742 @templatefunc('mod(a, b)')
755 @templatefunc('mod(a, b)')
743 def mod(context, mapping, args):
756 def mod(context, mapping, args):
744 """Calculate a mod b such that a / b + a mod b == a"""
757 """Calculate a mod b such that a / b + a mod b == a"""
745 if not len(args) == 2:
758 if not len(args) == 2:
746 # i18n: "mod" is a keyword
759 # i18n: "mod" is a keyword
747 raise error.ParseError(_("mod expects two arguments"))
760 raise error.ParseError(_("mod expects two arguments"))
748
761
749 func = lambda a, b: a % b
762 func = lambda a, b: a % b
750 return runarithmetic(context, mapping, (func, args[0], args[1]))
763 return runarithmetic(context, mapping, (func, args[0], args[1]))
751
764
752 @templatefunc('relpath(path)')
765 @templatefunc('relpath(path)')
753 def relpath(context, mapping, args):
766 def relpath(context, mapping, args):
754 """Convert a repository-absolute path into a filesystem path relative to
767 """Convert a repository-absolute path into a filesystem path relative to
755 the current working directory."""
768 the current working directory."""
756 if len(args) != 1:
769 if len(args) != 1:
757 # i18n: "relpath" is a keyword
770 # i18n: "relpath" is a keyword
758 raise error.ParseError(_("relpath expects one argument"))
771 raise error.ParseError(_("relpath expects one argument"))
759
772
760 repo = mapping['ctx'].repo()
773 repo = mapping['ctx'].repo()
761 path = evalstring(context, mapping, args[0])
774 path = evalstring(context, mapping, args[0])
762 return repo.pathto(path)
775 return repo.pathto(path)
763
776
764 @templatefunc('revset(query[, formatargs...])')
777 @templatefunc('revset(query[, formatargs...])')
765 def revset(context, mapping, args):
778 def revset(context, mapping, args):
766 """Execute a revision set query. See
779 """Execute a revision set query. See
767 :hg:`help revset`."""
780 :hg:`help revset`."""
768 if not len(args) > 0:
781 if not len(args) > 0:
769 # i18n: "revset" is a keyword
782 # i18n: "revset" is a keyword
770 raise error.ParseError(_("revset expects one or more arguments"))
783 raise error.ParseError(_("revset expects one or more arguments"))
771
784
772 raw = evalstring(context, mapping, args[0])
785 raw = evalstring(context, mapping, args[0])
773 ctx = mapping['ctx']
786 ctx = mapping['ctx']
774 repo = ctx.repo()
787 repo = ctx.repo()
775
788
776 def query(expr):
789 def query(expr):
777 m = revsetmod.match(repo.ui, expr)
790 m = revsetmod.match(repo.ui, expr)
778 return m(repo)
791 return m(repo)
779
792
780 if len(args) > 1:
793 if len(args) > 1:
781 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
794 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
782 revs = query(revsetlang.formatspec(raw, *formatargs))
795 revs = query(revsetlang.formatspec(raw, *formatargs))
783 revs = list(revs)
796 revs = list(revs)
784 else:
797 else:
785 revsetcache = mapping['cache'].setdefault("revsetcache", {})
798 revsetcache = mapping['cache'].setdefault("revsetcache", {})
786 if raw in revsetcache:
799 if raw in revsetcache:
787 revs = revsetcache[raw]
800 revs = revsetcache[raw]
788 else:
801 else:
789 revs = query(raw)
802 revs = query(raw)
790 revs = list(revs)
803 revs = list(revs)
791 revsetcache[raw] = revs
804 revsetcache[raw] = revs
792
805
793 return templatekw.showrevslist("revision", revs, **mapping)
806 return templatekw.showrevslist("revision", revs, **mapping)
794
807
795 @templatefunc('rstdoc(text, style)')
808 @templatefunc('rstdoc(text, style)')
796 def rstdoc(context, mapping, args):
809 def rstdoc(context, mapping, args):
797 """Format reStructuredText."""
810 """Format reStructuredText."""
798 if len(args) != 2:
811 if len(args) != 2:
799 # i18n: "rstdoc" is a keyword
812 # i18n: "rstdoc" is a keyword
800 raise error.ParseError(_("rstdoc expects two arguments"))
813 raise error.ParseError(_("rstdoc expects two arguments"))
801
814
802 text = evalstring(context, mapping, args[0])
815 text = evalstring(context, mapping, args[0])
803 style = evalstring(context, mapping, args[1])
816 style = evalstring(context, mapping, args[1])
804
817
805 return minirst.format(text, style=style, keep=['verbose'])
818 return minirst.format(text, style=style, keep=['verbose'])
806
819
807 @templatefunc('separate(sep, args)')
820 @templatefunc('separate(sep, args)')
808 def separate(context, mapping, args):
821 def separate(context, mapping, args):
809 """Add a separator between non-empty arguments."""
822 """Add a separator between non-empty arguments."""
810 if not args:
823 if not args:
811 # i18n: "separate" is a keyword
824 # i18n: "separate" is a keyword
812 raise error.ParseError(_("separate expects at least one argument"))
825 raise error.ParseError(_("separate expects at least one argument"))
813
826
814 sep = evalstring(context, mapping, args[0])
827 sep = evalstring(context, mapping, args[0])
815 first = True
828 first = True
816 for arg in args[1:]:
829 for arg in args[1:]:
817 argstr = evalstring(context, mapping, arg)
830 argstr = evalstring(context, mapping, arg)
818 if not argstr:
831 if not argstr:
819 continue
832 continue
820 if first:
833 if first:
821 first = False
834 first = False
822 else:
835 else:
823 yield sep
836 yield sep
824 yield argstr
837 yield argstr
825
838
826 @templatefunc('shortest(node, minlength=4)')
839 @templatefunc('shortest(node, minlength=4)')
827 def shortest(context, mapping, args):
840 def shortest(context, mapping, args):
828 """Obtain the shortest representation of
841 """Obtain the shortest representation of
829 a node."""
842 a node."""
830 if not (1 <= len(args) <= 2):
843 if not (1 <= len(args) <= 2):
831 # i18n: "shortest" is a keyword
844 # i18n: "shortest" is a keyword
832 raise error.ParseError(_("shortest() expects one or two arguments"))
845 raise error.ParseError(_("shortest() expects one or two arguments"))
833
846
834 node = evalstring(context, mapping, args[0])
847 node = evalstring(context, mapping, args[0])
835
848
836 minlength = 4
849 minlength = 4
837 if len(args) > 1:
850 if len(args) > 1:
838 minlength = evalinteger(context, mapping, args[1],
851 minlength = evalinteger(context, mapping, args[1],
839 # i18n: "shortest" is a keyword
852 # i18n: "shortest" is a keyword
840 _("shortest() expects an integer minlength"))
853 _("shortest() expects an integer minlength"))
841
854
842 # _partialmatch() of filtered changelog could take O(len(repo)) time,
855 # _partialmatch() of filtered changelog could take O(len(repo)) time,
843 # which would be unacceptably slow. so we look for hash collision in
856 # which would be unacceptably slow. so we look for hash collision in
844 # unfiltered space, which means some hashes may be slightly longer.
857 # unfiltered space, which means some hashes may be slightly longer.
845 cl = mapping['ctx']._repo.unfiltered().changelog
858 cl = mapping['ctx']._repo.unfiltered().changelog
846 def isvalid(test):
859 def isvalid(test):
847 try:
860 try:
848 if cl._partialmatch(test) is None:
861 if cl._partialmatch(test) is None:
849 return False
862 return False
850
863
851 try:
864 try:
852 i = int(test)
865 i = int(test)
853 # if we are a pure int, then starting with zero will not be
866 # if we are a pure int, then starting with zero will not be
854 # confused as a rev; or, obviously, if the int is larger than
867 # confused as a rev; or, obviously, if the int is larger than
855 # the value of the tip rev
868 # the value of the tip rev
856 if test[0] == '0' or i > len(cl):
869 if test[0] == '0' or i > len(cl):
857 return True
870 return True
858 return False
871 return False
859 except ValueError:
872 except ValueError:
860 return True
873 return True
861 except error.RevlogError:
874 except error.RevlogError:
862 return False
875 return False
863
876
864 shortest = node
877 shortest = node
865 startlength = max(6, minlength)
878 startlength = max(6, minlength)
866 length = startlength
879 length = startlength
867 while True:
880 while True:
868 test = node[:length]
881 test = node[:length]
869 if isvalid(test):
882 if isvalid(test):
870 shortest = test
883 shortest = test
871 if length == minlength or length > startlength:
884 if length == minlength or length > startlength:
872 return shortest
885 return shortest
873 length -= 1
886 length -= 1
874 else:
887 else:
875 length += 1
888 length += 1
876 if len(shortest) <= length:
889 if len(shortest) <= length:
877 return shortest
890 return shortest
878
891
879 @templatefunc('strip(text[, chars])')
892 @templatefunc('strip(text[, chars])')
880 def strip(context, mapping, args):
893 def strip(context, mapping, args):
881 """Strip characters from a string. By default,
894 """Strip characters from a string. By default,
882 strips all leading and trailing whitespace."""
895 strips all leading and trailing whitespace."""
883 if not (1 <= len(args) <= 2):
896 if not (1 <= len(args) <= 2):
884 # i18n: "strip" is a keyword
897 # i18n: "strip" is a keyword
885 raise error.ParseError(_("strip expects one or two arguments"))
898 raise error.ParseError(_("strip expects one or two arguments"))
886
899
887 text = evalstring(context, mapping, args[0])
900 text = evalstring(context, mapping, args[0])
888 if len(args) == 2:
901 if len(args) == 2:
889 chars = evalstring(context, mapping, args[1])
902 chars = evalstring(context, mapping, args[1])
890 return text.strip(chars)
903 return text.strip(chars)
891 return text.strip()
904 return text.strip()
892
905
893 @templatefunc('sub(pattern, replacement, expression)')
906 @templatefunc('sub(pattern, replacement, expression)')
894 def sub(context, mapping, args):
907 def sub(context, mapping, args):
895 """Perform text substitution
908 """Perform text substitution
896 using regular expressions."""
909 using regular expressions."""
897 if len(args) != 3:
910 if len(args) != 3:
898 # i18n: "sub" is a keyword
911 # i18n: "sub" is a keyword
899 raise error.ParseError(_("sub expects three arguments"))
912 raise error.ParseError(_("sub expects three arguments"))
900
913
901 pat = evalstring(context, mapping, args[0])
914 pat = evalstring(context, mapping, args[0])
902 rpl = evalstring(context, mapping, args[1])
915 rpl = evalstring(context, mapping, args[1])
903 src = evalstring(context, mapping, args[2])
916 src = evalstring(context, mapping, args[2])
904 try:
917 try:
905 patre = re.compile(pat)
918 patre = re.compile(pat)
906 except re.error:
919 except re.error:
907 # i18n: "sub" is a keyword
920 # i18n: "sub" is a keyword
908 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
921 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
909 try:
922 try:
910 yield patre.sub(rpl, src)
923 yield patre.sub(rpl, src)
911 except re.error:
924 except re.error:
912 # i18n: "sub" is a keyword
925 # i18n: "sub" is a keyword
913 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
926 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
914
927
915 @templatefunc('startswith(pattern, text)')
928 @templatefunc('startswith(pattern, text)')
916 def startswith(context, mapping, args):
929 def startswith(context, mapping, args):
917 """Returns the value from the "text" argument
930 """Returns the value from the "text" argument
918 if it begins with the content from the "pattern" argument."""
931 if it begins with the content from the "pattern" argument."""
919 if len(args) != 2:
932 if len(args) != 2:
920 # i18n: "startswith" is a keyword
933 # i18n: "startswith" is a keyword
921 raise error.ParseError(_("startswith expects two arguments"))
934 raise error.ParseError(_("startswith expects two arguments"))
922
935
923 patn = evalstring(context, mapping, args[0])
936 patn = evalstring(context, mapping, args[0])
924 text = evalstring(context, mapping, args[1])
937 text = evalstring(context, mapping, args[1])
925 if text.startswith(patn):
938 if text.startswith(patn):
926 return text
939 return text
927 return ''
940 return ''
928
941
929 @templatefunc('word(number, text[, separator])')
942 @templatefunc('word(number, text[, separator])')
930 def word(context, mapping, args):
943 def word(context, mapping, args):
931 """Return the nth word from a string."""
944 """Return the nth word from a string."""
932 if not (2 <= len(args) <= 3):
945 if not (2 <= len(args) <= 3):
933 # i18n: "word" is a keyword
946 # i18n: "word" is a keyword
934 raise error.ParseError(_("word expects two or three arguments, got %d")
947 raise error.ParseError(_("word expects two or three arguments, got %d")
935 % len(args))
948 % len(args))
936
949
937 num = evalinteger(context, mapping, args[0],
950 num = evalinteger(context, mapping, args[0],
938 # i18n: "word" is a keyword
951 # i18n: "word" is a keyword
939 _("word expects an integer index"))
952 _("word expects an integer index"))
940 text = evalstring(context, mapping, args[1])
953 text = evalstring(context, mapping, args[1])
941 if len(args) == 3:
954 if len(args) == 3:
942 splitter = evalstring(context, mapping, args[2])
955 splitter = evalstring(context, mapping, args[2])
943 else:
956 else:
944 splitter = None
957 splitter = None
945
958
946 tokens = text.split(splitter)
959 tokens = text.split(splitter)
947 if num >= len(tokens) or num < -len(tokens):
960 if num >= len(tokens) or num < -len(tokens):
948 return ''
961 return ''
949 else:
962 else:
950 return tokens[num]
963 return tokens[num]
951
964
952 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
965 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
953 exprmethods = {
966 exprmethods = {
954 "integer": lambda e, c: (runinteger, e[1]),
967 "integer": lambda e, c: (runinteger, e[1]),
955 "string": lambda e, c: (runstring, e[1]),
968 "string": lambda e, c: (runstring, e[1]),
956 "symbol": lambda e, c: (runsymbol, e[1]),
969 "symbol": lambda e, c: (runsymbol, e[1]),
957 "template": buildtemplate,
970 "template": buildtemplate,
958 "group": lambda e, c: compileexp(e[1], c, exprmethods),
971 "group": lambda e, c: compileexp(e[1], c, exprmethods),
959 # ".": buildmember,
972 # ".": buildmember,
960 "|": buildfilter,
973 "|": buildfilter,
961 "%": buildmap,
974 "%": buildmap,
962 "func": buildfunc,
975 "func": buildfunc,
963 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
976 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
964 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
977 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
965 "negate": buildnegate,
978 "negate": buildnegate,
966 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
979 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
967 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
980 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
968 }
981 }
969
982
970 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
983 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
971 methods = exprmethods.copy()
984 methods = exprmethods.copy()
972 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
985 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
973
986
974 class _aliasrules(parser.basealiasrules):
987 class _aliasrules(parser.basealiasrules):
975 """Parsing and expansion rule set of template aliases"""
988 """Parsing and expansion rule set of template aliases"""
976 _section = _('template alias')
989 _section = _('template alias')
977 _parse = staticmethod(_parseexpr)
990 _parse = staticmethod(_parseexpr)
978
991
979 @staticmethod
992 @staticmethod
980 def _trygetfunc(tree):
993 def _trygetfunc(tree):
981 """Return (name, args) if tree is func(...) or ...|filter; otherwise
994 """Return (name, args) if tree is func(...) or ...|filter; otherwise
982 None"""
995 None"""
983 if tree[0] == 'func' and tree[1][0] == 'symbol':
996 if tree[0] == 'func' and tree[1][0] == 'symbol':
984 return tree[1][1], getlist(tree[2])
997 return tree[1][1], getlist(tree[2])
985 if tree[0] == '|' and tree[2][0] == 'symbol':
998 if tree[0] == '|' and tree[2][0] == 'symbol':
986 return tree[2][1], [tree[1]]
999 return tree[2][1], [tree[1]]
987
1000
988 def expandaliases(tree, aliases):
1001 def expandaliases(tree, aliases):
989 """Return new tree of aliases are expanded"""
1002 """Return new tree of aliases are expanded"""
990 aliasmap = _aliasrules.buildmap(aliases)
1003 aliasmap = _aliasrules.buildmap(aliases)
991 return _aliasrules.expand(aliasmap, tree)
1004 return _aliasrules.expand(aliasmap, tree)
992
1005
993 # template engine
1006 # template engine
994
1007
995 stringify = templatefilters.stringify
1008 stringify = templatefilters.stringify
996
1009
997 def _flatten(thing):
1010 def _flatten(thing):
998 '''yield a single stream from a possibly nested set of iterators'''
1011 '''yield a single stream from a possibly nested set of iterators'''
999 if isinstance(thing, str):
1012 if isinstance(thing, str):
1000 yield thing
1013 yield thing
1001 elif thing is None:
1014 elif thing is None:
1002 pass
1015 pass
1003 elif not util.safehasattr(thing, '__iter__'):
1016 elif not util.safehasattr(thing, '__iter__'):
1004 yield str(thing)
1017 yield str(thing)
1005 else:
1018 else:
1006 for i in thing:
1019 for i in thing:
1007 if isinstance(i, str):
1020 if isinstance(i, str):
1008 yield i
1021 yield i
1009 elif i is None:
1022 elif i is None:
1010 pass
1023 pass
1011 elif not util.safehasattr(i, '__iter__'):
1024 elif not util.safehasattr(i, '__iter__'):
1012 yield str(i)
1025 yield str(i)
1013 else:
1026 else:
1014 for j in _flatten(i):
1027 for j in _flatten(i):
1015 yield j
1028 yield j
1016
1029
1017 def unquotestring(s):
1030 def unquotestring(s):
1018 '''unwrap quotes if any; otherwise returns unmodified string'''
1031 '''unwrap quotes if any; otherwise returns unmodified string'''
1019 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1032 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1020 return s
1033 return s
1021 return s[1:-1]
1034 return s[1:-1]
1022
1035
1023 class engine(object):
1036 class engine(object):
1024 '''template expansion engine.
1037 '''template expansion engine.
1025
1038
1026 template expansion works like this. a map file contains key=value
1039 template expansion works like this. a map file contains key=value
1027 pairs. if value is quoted, it is treated as string. otherwise, it
1040 pairs. if value is quoted, it is treated as string. otherwise, it
1028 is treated as name of template file.
1041 is treated as name of template file.
1029
1042
1030 templater is asked to expand a key in map. it looks up key, and
1043 templater is asked to expand a key in map. it looks up key, and
1031 looks for strings like this: {foo}. it expands {foo} by looking up
1044 looks for strings like this: {foo}. it expands {foo} by looking up
1032 foo in map, and substituting it. expansion is recursive: it stops
1045 foo in map, and substituting it. expansion is recursive: it stops
1033 when there is no more {foo} to replace.
1046 when there is no more {foo} to replace.
1034
1047
1035 expansion also allows formatting and filtering.
1048 expansion also allows formatting and filtering.
1036
1049
1037 format uses key to expand each item in list. syntax is
1050 format uses key to expand each item in list. syntax is
1038 {key%format}.
1051 {key%format}.
1039
1052
1040 filter uses function to transform value. syntax is
1053 filter uses function to transform value. syntax is
1041 {key|filter1|filter2|...}.'''
1054 {key|filter1|filter2|...}.'''
1042
1055
1043 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1056 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1044 self._loader = loader
1057 self._loader = loader
1045 if filters is None:
1058 if filters is None:
1046 filters = {}
1059 filters = {}
1047 self._filters = filters
1060 self._filters = filters
1048 if defaults is None:
1061 if defaults is None:
1049 defaults = {}
1062 defaults = {}
1050 self._defaults = defaults
1063 self._defaults = defaults
1051 self._aliasmap = _aliasrules.buildmap(aliases)
1064 self._aliasmap = _aliasrules.buildmap(aliases)
1052 self._cache = {} # key: (func, data)
1065 self._cache = {} # key: (func, data)
1053
1066
1054 def _load(self, t):
1067 def _load(self, t):
1055 '''load, parse, and cache a template'''
1068 '''load, parse, and cache a template'''
1056 if t not in self._cache:
1069 if t not in self._cache:
1057 # put poison to cut recursion while compiling 't'
1070 # put poison to cut recursion while compiling 't'
1058 self._cache[t] = (_runrecursivesymbol, t)
1071 self._cache[t] = (_runrecursivesymbol, t)
1059 try:
1072 try:
1060 x = parse(self._loader(t))
1073 x = parse(self._loader(t))
1061 if self._aliasmap:
1074 if self._aliasmap:
1062 x = _aliasrules.expand(self._aliasmap, x)
1075 x = _aliasrules.expand(self._aliasmap, x)
1063 self._cache[t] = compileexp(x, self, methods)
1076 self._cache[t] = compileexp(x, self, methods)
1064 except: # re-raises
1077 except: # re-raises
1065 del self._cache[t]
1078 del self._cache[t]
1066 raise
1079 raise
1067 return self._cache[t]
1080 return self._cache[t]
1068
1081
1069 def process(self, t, mapping):
1082 def process(self, t, mapping):
1070 '''Perform expansion. t is name of map element to expand.
1083 '''Perform expansion. t is name of map element to expand.
1071 mapping contains added elements for use during expansion. Is a
1084 mapping contains added elements for use during expansion. Is a
1072 generator.'''
1085 generator.'''
1073 func, data = self._load(t)
1086 func, data = self._load(t)
1074 return _flatten(func(self, mapping, data))
1087 return _flatten(func(self, mapping, data))
1075
1088
1076 engines = {'default': engine}
1089 engines = {'default': engine}
1077
1090
1078 def stylelist():
1091 def stylelist():
1079 paths = templatepaths()
1092 paths = templatepaths()
1080 if not paths:
1093 if not paths:
1081 return _('no templates found, try `hg debuginstall` for more info')
1094 return _('no templates found, try `hg debuginstall` for more info')
1082 dirlist = os.listdir(paths[0])
1095 dirlist = os.listdir(paths[0])
1083 stylelist = []
1096 stylelist = []
1084 for file in dirlist:
1097 for file in dirlist:
1085 split = file.split(".")
1098 split = file.split(".")
1086 if split[-1] in ('orig', 'rej'):
1099 if split[-1] in ('orig', 'rej'):
1087 continue
1100 continue
1088 if split[0] == "map-cmdline":
1101 if split[0] == "map-cmdline":
1089 stylelist.append(split[1])
1102 stylelist.append(split[1])
1090 return ", ".join(sorted(stylelist))
1103 return ", ".join(sorted(stylelist))
1091
1104
1092 def _readmapfile(mapfile):
1105 def _readmapfile(mapfile):
1093 """Load template elements from the given map file"""
1106 """Load template elements from the given map file"""
1094 if not os.path.exists(mapfile):
1107 if not os.path.exists(mapfile):
1095 raise error.Abort(_("style '%s' not found") % mapfile,
1108 raise error.Abort(_("style '%s' not found") % mapfile,
1096 hint=_("available styles: %s") % stylelist())
1109 hint=_("available styles: %s") % stylelist())
1097
1110
1098 base = os.path.dirname(mapfile)
1111 base = os.path.dirname(mapfile)
1099 conf = config.config(includepaths=templatepaths())
1112 conf = config.config(includepaths=templatepaths())
1100 conf.read(mapfile)
1113 conf.read(mapfile)
1101
1114
1102 cache = {}
1115 cache = {}
1103 tmap = {}
1116 tmap = {}
1104 for key, val in conf[''].items():
1117 for key, val in conf[''].items():
1105 if not val:
1118 if not val:
1106 raise error.ParseError(_('missing value'), conf.source('', key))
1119 raise error.ParseError(_('missing value'), conf.source('', key))
1107 if val[0] in "'\"":
1120 if val[0] in "'\"":
1108 if val[0] != val[-1]:
1121 if val[0] != val[-1]:
1109 raise error.ParseError(_('unmatched quotes'),
1122 raise error.ParseError(_('unmatched quotes'),
1110 conf.source('', key))
1123 conf.source('', key))
1111 cache[key] = unquotestring(val)
1124 cache[key] = unquotestring(val)
1112 elif key == "__base__":
1125 elif key == "__base__":
1113 # treat as a pointer to a base class for this style
1126 # treat as a pointer to a base class for this style
1114 path = util.normpath(os.path.join(base, val))
1127 path = util.normpath(os.path.join(base, val))
1115
1128
1116 # fallback check in template paths
1129 # fallback check in template paths
1117 if not os.path.exists(path):
1130 if not os.path.exists(path):
1118 for p in templatepaths():
1131 for p in templatepaths():
1119 p2 = util.normpath(os.path.join(p, val))
1132 p2 = util.normpath(os.path.join(p, val))
1120 if os.path.isfile(p2):
1133 if os.path.isfile(p2):
1121 path = p2
1134 path = p2
1122 break
1135 break
1123 p3 = util.normpath(os.path.join(p2, "map"))
1136 p3 = util.normpath(os.path.join(p2, "map"))
1124 if os.path.isfile(p3):
1137 if os.path.isfile(p3):
1125 path = p3
1138 path = p3
1126 break
1139 break
1127
1140
1128 bcache, btmap = _readmapfile(path)
1141 bcache, btmap = _readmapfile(path)
1129 for k in bcache:
1142 for k in bcache:
1130 if k not in cache:
1143 if k not in cache:
1131 cache[k] = bcache[k]
1144 cache[k] = bcache[k]
1132 for k in btmap:
1145 for k in btmap:
1133 if k not in tmap:
1146 if k not in tmap:
1134 tmap[k] = btmap[k]
1147 tmap[k] = btmap[k]
1135 else:
1148 else:
1136 val = 'default', val
1149 val = 'default', val
1137 if ':' in val[1]:
1150 if ':' in val[1]:
1138 val = val[1].split(':', 1)
1151 val = val[1].split(':', 1)
1139 tmap[key] = val[0], os.path.join(base, val[1])
1152 tmap[key] = val[0], os.path.join(base, val[1])
1140 return cache, tmap
1153 return cache, tmap
1141
1154
1142 class TemplateNotFound(error.Abort):
1155 class TemplateNotFound(error.Abort):
1143 pass
1156 pass
1144
1157
1145 class templater(object):
1158 class templater(object):
1146
1159
1147 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1160 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1148 minchunk=1024, maxchunk=65536):
1161 minchunk=1024, maxchunk=65536):
1149 '''set up template engine.
1162 '''set up template engine.
1150 filters is dict of functions. each transforms a value into another.
1163 filters is dict of functions. each transforms a value into another.
1151 defaults is dict of default map definitions.
1164 defaults is dict of default map definitions.
1152 aliases is list of alias (name, replacement) pairs.
1165 aliases is list of alias (name, replacement) pairs.
1153 '''
1166 '''
1154 if filters is None:
1167 if filters is None:
1155 filters = {}
1168 filters = {}
1156 if defaults is None:
1169 if defaults is None:
1157 defaults = {}
1170 defaults = {}
1158 if cache is None:
1171 if cache is None:
1159 cache = {}
1172 cache = {}
1160 self.cache = cache.copy()
1173 self.cache = cache.copy()
1161 self.map = {}
1174 self.map = {}
1162 self.filters = templatefilters.filters.copy()
1175 self.filters = templatefilters.filters.copy()
1163 self.filters.update(filters)
1176 self.filters.update(filters)
1164 self.defaults = defaults
1177 self.defaults = defaults
1165 self._aliases = aliases
1178 self._aliases = aliases
1166 self.minchunk, self.maxchunk = minchunk, maxchunk
1179 self.minchunk, self.maxchunk = minchunk, maxchunk
1167 self.ecache = {}
1180 self.ecache = {}
1168
1181
1169 @classmethod
1182 @classmethod
1170 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1183 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1171 minchunk=1024, maxchunk=65536):
1184 minchunk=1024, maxchunk=65536):
1172 """Create templater from the specified map file"""
1185 """Create templater from the specified map file"""
1173 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1186 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1174 cache, tmap = _readmapfile(mapfile)
1187 cache, tmap = _readmapfile(mapfile)
1175 t.cache.update(cache)
1188 t.cache.update(cache)
1176 t.map = tmap
1189 t.map = tmap
1177 return t
1190 return t
1178
1191
1179 def __contains__(self, key):
1192 def __contains__(self, key):
1180 return key in self.cache or key in self.map
1193 return key in self.cache or key in self.map
1181
1194
1182 def load(self, t):
1195 def load(self, t):
1183 '''Get the template for the given template name. Use a local cache.'''
1196 '''Get the template for the given template name. Use a local cache.'''
1184 if t not in self.cache:
1197 if t not in self.cache:
1185 try:
1198 try:
1186 self.cache[t] = util.readfile(self.map[t][1])
1199 self.cache[t] = util.readfile(self.map[t][1])
1187 except KeyError as inst:
1200 except KeyError as inst:
1188 raise TemplateNotFound(_('"%s" not in template map') %
1201 raise TemplateNotFound(_('"%s" not in template map') %
1189 inst.args[0])
1202 inst.args[0])
1190 except IOError as inst:
1203 except IOError as inst:
1191 raise IOError(inst.args[0], _('template file %s: %s') %
1204 raise IOError(inst.args[0], _('template file %s: %s') %
1192 (self.map[t][1], inst.args[1]))
1205 (self.map[t][1], inst.args[1]))
1193 return self.cache[t]
1206 return self.cache[t]
1194
1207
1195 def __call__(self, t, **mapping):
1208 def __call__(self, t, **mapping):
1196 ttype = t in self.map and self.map[t][0] or 'default'
1209 ttype = t in self.map and self.map[t][0] or 'default'
1197 if ttype not in self.ecache:
1210 if ttype not in self.ecache:
1198 try:
1211 try:
1199 ecls = engines[ttype]
1212 ecls = engines[ttype]
1200 except KeyError:
1213 except KeyError:
1201 raise error.Abort(_('invalid template engine: %s') % ttype)
1214 raise error.Abort(_('invalid template engine: %s') % ttype)
1202 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1215 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1203 self._aliases)
1216 self._aliases)
1204 proc = self.ecache[ttype]
1217 proc = self.ecache[ttype]
1205
1218
1206 stream = proc.process(t, mapping)
1219 stream = proc.process(t, mapping)
1207 if self.minchunk:
1220 if self.minchunk:
1208 stream = util.increasingchunks(stream, min=self.minchunk,
1221 stream = util.increasingchunks(stream, min=self.minchunk,
1209 max=self.maxchunk)
1222 max=self.maxchunk)
1210 return stream
1223 return stream
1211
1224
1212 def templatepaths():
1225 def templatepaths():
1213 '''return locations used for template files.'''
1226 '''return locations used for template files.'''
1214 pathsrel = ['templates']
1227 pathsrel = ['templates']
1215 paths = [os.path.normpath(os.path.join(util.datapath, f))
1228 paths = [os.path.normpath(os.path.join(util.datapath, f))
1216 for f in pathsrel]
1229 for f in pathsrel]
1217 return [p for p in paths if os.path.isdir(p)]
1230 return [p for p in paths if os.path.isdir(p)]
1218
1231
1219 def templatepath(name):
1232 def templatepath(name):
1220 '''return location of template file. returns None if not found.'''
1233 '''return location of template file. returns None if not found.'''
1221 for p in templatepaths():
1234 for p in templatepaths():
1222 f = os.path.join(p, name)
1235 f = os.path.join(p, name)
1223 if os.path.exists(f):
1236 if os.path.exists(f):
1224 return f
1237 return f
1225 return None
1238 return None
1226
1239
1227 def stylemap(styles, paths=None):
1240 def stylemap(styles, paths=None):
1228 """Return path to mapfile for a given style.
1241 """Return path to mapfile for a given style.
1229
1242
1230 Searches mapfile in the following locations:
1243 Searches mapfile in the following locations:
1231 1. templatepath/style/map
1244 1. templatepath/style/map
1232 2. templatepath/map-style
1245 2. templatepath/map-style
1233 3. templatepath/map
1246 3. templatepath/map
1234 """
1247 """
1235
1248
1236 if paths is None:
1249 if paths is None:
1237 paths = templatepaths()
1250 paths = templatepaths()
1238 elif isinstance(paths, str):
1251 elif isinstance(paths, str):
1239 paths = [paths]
1252 paths = [paths]
1240
1253
1241 if isinstance(styles, str):
1254 if isinstance(styles, str):
1242 styles = [styles]
1255 styles = [styles]
1243
1256
1244 for style in styles:
1257 for style in styles:
1245 # only plain name is allowed to honor template paths
1258 # only plain name is allowed to honor template paths
1246 if (not style
1259 if (not style
1247 or style in (os.curdir, os.pardir)
1260 or style in (os.curdir, os.pardir)
1248 or pycompat.ossep in style
1261 or pycompat.ossep in style
1249 or pycompat.osaltsep and pycompat.osaltsep in style):
1262 or pycompat.osaltsep and pycompat.osaltsep in style):
1250 continue
1263 continue
1251 locations = [os.path.join(style, 'map'), 'map-' + style]
1264 locations = [os.path.join(style, 'map'), 'map-' + style]
1252 locations.append('map')
1265 locations.append('map')
1253
1266
1254 for path in paths:
1267 for path in paths:
1255 for location in locations:
1268 for location in locations:
1256 mapfile = os.path.join(path, location)
1269 mapfile = os.path.join(path, location)
1257 if os.path.isfile(mapfile):
1270 if os.path.isfile(mapfile):
1258 return style, mapfile
1271 return style, mapfile
1259
1272
1260 raise RuntimeError("No hgweb templates found in %r" % paths)
1273 raise RuntimeError("No hgweb templates found in %r" % paths)
1261
1274
1262 def loadfunction(ui, extname, registrarobj):
1275 def loadfunction(ui, extname, registrarobj):
1263 """Load template function from specified registrarobj
1276 """Load template function from specified registrarobj
1264 """
1277 """
1265 for name, func in registrarobj._table.iteritems():
1278 for name, func in registrarobj._table.iteritems():
1266 funcs[name] = func
1279 funcs[name] = func
1267
1280
1268 # tell hggettext to extract docstrings from these functions:
1281 # tell hggettext to extract docstrings from these functions:
1269 i18nfunctions = funcs.values()
1282 i18nfunctions = funcs.values()
General Comments 0
You need to be logged in to leave comments. Login now