##// END OF EJS Templates
formatter: wrap (tmpl, mapfile) by named tuple...
Yuya Nishihara -
r32838:615ec3f1 default
parent child Browse files
Show More
@@ -1,3587 +1,3590
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 graphmod,
32 graphmod,
33 lock as lockmod,
33 lock as lockmod,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 registrar,
40 registrar,
41 repair,
41 repair,
42 revlog,
42 revlog,
43 revset,
43 revset,
44 scmutil,
44 scmutil,
45 smartset,
45 smartset,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51 stringio = util.stringio
51 stringio = util.stringio
52
52
53 # templates of common command options
53 # templates of common command options
54
54
55 dryrunopts = [
55 dryrunopts = [
56 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
57 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
58 ]
58 ]
59
59
60 remoteopts = [
60 remoteopts = [
61 ('e', 'ssh', '',
61 ('e', 'ssh', '',
62 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
63 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
64 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
65 ('', 'insecure', None,
65 ('', 'insecure', None,
66 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 ]
67 ]
68
68
69 walkopts = [
69 walkopts = [
70 ('I', 'include', [],
70 ('I', 'include', [],
71 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
72 ('X', 'exclude', [],
72 ('X', 'exclude', [],
73 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
74 ]
74 ]
75
75
76 commitopts = [
76 commitopts = [
77 ('m', 'message', '',
77 ('m', 'message', '',
78 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
79 ('l', 'logfile', '',
79 ('l', 'logfile', '',
80 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
81 ]
81 ]
82
82
83 commitopts2 = [
83 commitopts2 = [
84 ('d', 'date', '',
84 ('d', 'date', '',
85 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
86 ('u', 'user', '',
86 ('u', 'user', '',
87 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
88 ]
88 ]
89
89
90 # hidden for now
90 # hidden for now
91 formatteropts = [
91 formatteropts = [
92 ('T', 'template', '',
92 ('T', 'template', '',
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 ]
94 ]
95
95
96 templateopts = [
96 templateopts = [
97 ('', 'style', '',
97 ('', 'style', '',
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 ('T', 'template', '',
99 ('T', 'template', '',
100 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
101 ]
101 ]
102
102
103 logopts = [
103 logopts = [
104 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('l', 'limit', '',
106 ('l', 'limit', '',
107 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
108 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
111 ] + templateopts
111 ] + templateopts
112
112
113 diffopts = [
113 diffopts = [
114 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
118 ]
118 ]
119
119
120 diffwsopts = [
120 diffwsopts = [
121 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
122 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
123 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
124 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
125 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
126 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
127 ]
127 ]
128
128
129 diffopts2 = [
129 diffopts2 = [
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 ('p', 'show-function', None, _('show which function each change is in')),
131 ('p', 'show-function', None, _('show which function each change is in')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 ] + diffwsopts + [
133 ] + diffwsopts + [
134 ('U', 'unified', '',
134 ('U', 'unified', '',
135 _('number of lines of context to show'), _('NUM')),
135 _('number of lines of context to show'), _('NUM')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 ]
138 ]
139
139
140 mergetoolopts = [
140 mergetoolopts = [
141 ('t', 'tool', '', _('specify merge tool')),
141 ('t', 'tool', '', _('specify merge tool')),
142 ]
142 ]
143
143
144 similarityopts = [
144 similarityopts = [
145 ('s', 'similarity', '',
145 ('s', 'similarity', '',
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 ]
147 ]
148
148
149 subrepoopts = [
149 subrepoopts = [
150 ('S', 'subrepos', None,
150 ('S', 'subrepos', None,
151 _('recurse into subrepositories'))
151 _('recurse into subrepositories'))
152 ]
152 ]
153
153
154 debugrevlogopts = [
154 debugrevlogopts = [
155 ('c', 'changelog', False, _('open changelog')),
155 ('c', 'changelog', False, _('open changelog')),
156 ('m', 'manifest', False, _('open manifest')),
156 ('m', 'manifest', False, _('open manifest')),
157 ('', 'dir', '', _('open directory manifest')),
157 ('', 'dir', '', _('open directory manifest')),
158 ]
158 ]
159
159
160 # special string such that everything below this line will be ingored in the
160 # special string such that everything below this line will be ingored in the
161 # editor text
161 # editor text
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163
163
164 def ishunk(x):
164 def ishunk(x):
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 return isinstance(x, hunkclasses)
166 return isinstance(x, hunkclasses)
167
167
168 def newandmodified(chunks, originalchunks):
168 def newandmodified(chunks, originalchunks):
169 newlyaddedandmodifiedfiles = set()
169 newlyaddedandmodifiedfiles = set()
170 for chunk in chunks:
170 for chunk in chunks:
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 originalchunks:
172 originalchunks:
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 return newlyaddedandmodifiedfiles
174 return newlyaddedandmodifiedfiles
175
175
176 def parsealiases(cmd):
176 def parsealiases(cmd):
177 return cmd.lstrip("^").split("|")
177 return cmd.lstrip("^").split("|")
178
178
179 def setupwrapcolorwrite(ui):
179 def setupwrapcolorwrite(ui):
180 # wrap ui.write so diff output can be labeled/colorized
180 # wrap ui.write so diff output can be labeled/colorized
181 def wrapwrite(orig, *args, **kw):
181 def wrapwrite(orig, *args, **kw):
182 label = kw.pop('label', '')
182 label = kw.pop('label', '')
183 for chunk, l in patch.difflabel(lambda: args):
183 for chunk, l in patch.difflabel(lambda: args):
184 orig(chunk, label=label + l)
184 orig(chunk, label=label + l)
185
185
186 oldwrite = ui.write
186 oldwrite = ui.write
187 def wrap(*args, **kwargs):
187 def wrap(*args, **kwargs):
188 return wrapwrite(oldwrite, *args, **kwargs)
188 return wrapwrite(oldwrite, *args, **kwargs)
189 setattr(ui, 'write', wrap)
189 setattr(ui, 'write', wrap)
190 return oldwrite
190 return oldwrite
191
191
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 if usecurses:
193 if usecurses:
194 if testfile:
194 if testfile:
195 recordfn = crecordmod.testdecorator(testfile,
195 recordfn = crecordmod.testdecorator(testfile,
196 crecordmod.testchunkselector)
196 crecordmod.testchunkselector)
197 else:
197 else:
198 recordfn = crecordmod.chunkselector
198 recordfn = crecordmod.chunkselector
199
199
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201
201
202 else:
202 else:
203 return patch.filterpatch(ui, originalhunks, operation)
203 return patch.filterpatch(ui, originalhunks, operation)
204
204
205 def recordfilter(ui, originalhunks, operation=None):
205 def recordfilter(ui, originalhunks, operation=None):
206 """ Prompts the user to filter the originalhunks and return a list of
206 """ Prompts the user to filter the originalhunks and return a list of
207 selected hunks.
207 selected hunks.
208 *operation* is used for to build ui messages to indicate the user what
208 *operation* is used for to build ui messages to indicate the user what
209 kind of filtering they are doing: reverting, committing, shelving, etc.
209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 (see patch.filterpatch).
210 (see patch.filterpatch).
211 """
211 """
212 usecurses = crecordmod.checkcurses(ui)
212 usecurses = crecordmod.checkcurses(ui)
213 testfile = ui.config('experimental', 'crecordtest', None)
213 testfile = ui.config('experimental', 'crecordtest', None)
214 oldwrite = setupwrapcolorwrite(ui)
214 oldwrite = setupwrapcolorwrite(ui)
215 try:
215 try:
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 testfile, operation)
217 testfile, operation)
218 finally:
218 finally:
219 ui.write = oldwrite
219 ui.write = oldwrite
220 return newchunks, newopts
220 return newchunks, newopts
221
221
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 filterfn, *pats, **opts):
223 filterfn, *pats, **opts):
224 from . import merge as mergemod
224 from . import merge as mergemod
225 opts = pycompat.byteskwargs(opts)
225 opts = pycompat.byteskwargs(opts)
226 if not ui.interactive():
226 if not ui.interactive():
227 if cmdsuggest:
227 if cmdsuggest:
228 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 else:
229 else:
230 msg = _('running non-interactively')
230 msg = _('running non-interactively')
231 raise error.Abort(msg)
231 raise error.Abort(msg)
232
232
233 # make sure username is set before going interactive
233 # make sure username is set before going interactive
234 if not opts.get('user'):
234 if not opts.get('user'):
235 ui.username() # raise exception, username not provided
235 ui.username() # raise exception, username not provided
236
236
237 def recordfunc(ui, repo, message, match, opts):
237 def recordfunc(ui, repo, message, match, opts):
238 """This is generic record driver.
238 """This is generic record driver.
239
239
240 Its job is to interactively filter local changes, and
240 Its job is to interactively filter local changes, and
241 accordingly prepare working directory into a state in which the
241 accordingly prepare working directory into a state in which the
242 job can be delegated to a non-interactive commit command such as
242 job can be delegated to a non-interactive commit command such as
243 'commit' or 'qrefresh'.
243 'commit' or 'qrefresh'.
244
244
245 After the actual job is done by non-interactive command, the
245 After the actual job is done by non-interactive command, the
246 working directory is restored to its original state.
246 working directory is restored to its original state.
247
247
248 In the end we'll record interesting changes, and everything else
248 In the end we'll record interesting changes, and everything else
249 will be left in place, so the user can continue working.
249 will be left in place, so the user can continue working.
250 """
250 """
251
251
252 checkunfinished(repo, commit=True)
252 checkunfinished(repo, commit=True)
253 wctx = repo[None]
253 wctx = repo[None]
254 merge = len(wctx.parents()) > 1
254 merge = len(wctx.parents()) > 1
255 if merge:
255 if merge:
256 raise error.Abort(_('cannot partially commit a merge '
256 raise error.Abort(_('cannot partially commit a merge '
257 '(use "hg commit" instead)'))
257 '(use "hg commit" instead)'))
258
258
259 def fail(f, msg):
259 def fail(f, msg):
260 raise error.Abort('%s: %s' % (f, msg))
260 raise error.Abort('%s: %s' % (f, msg))
261
261
262 force = opts.get('force')
262 force = opts.get('force')
263 if not force:
263 if not force:
264 vdirs = []
264 vdirs = []
265 match.explicitdir = vdirs.append
265 match.explicitdir = vdirs.append
266 match.bad = fail
266 match.bad = fail
267
267
268 status = repo.status(match=match)
268 status = repo.status(match=match)
269 if not force:
269 if not force:
270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 diffopts.nodates = True
272 diffopts.nodates = True
273 diffopts.git = True
273 diffopts.git = True
274 diffopts.showfunc = True
274 diffopts.showfunc = True
275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 originalchunks = patch.parsepatch(originaldiff)
276 originalchunks = patch.parsepatch(originaldiff)
277
277
278 # 1. filter patch, since we are intending to apply subset of it
278 # 1. filter patch, since we are intending to apply subset of it
279 try:
279 try:
280 chunks, newopts = filterfn(ui, originalchunks)
280 chunks, newopts = filterfn(ui, originalchunks)
281 except patch.PatchError as err:
281 except patch.PatchError as err:
282 raise error.Abort(_('error parsing patch: %s') % err)
282 raise error.Abort(_('error parsing patch: %s') % err)
283 opts.update(newopts)
283 opts.update(newopts)
284
284
285 # We need to keep a backup of files that have been newly added and
285 # We need to keep a backup of files that have been newly added and
286 # modified during the recording process because there is a previous
286 # modified during the recording process because there is a previous
287 # version without the edit in the workdir
287 # version without the edit in the workdir
288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 contenders = set()
289 contenders = set()
290 for h in chunks:
290 for h in chunks:
291 try:
291 try:
292 contenders.update(set(h.files()))
292 contenders.update(set(h.files()))
293 except AttributeError:
293 except AttributeError:
294 pass
294 pass
295
295
296 changed = status.modified + status.added + status.removed
296 changed = status.modified + status.added + status.removed
297 newfiles = [f for f in changed if f in contenders]
297 newfiles = [f for f in changed if f in contenders]
298 if not newfiles:
298 if not newfiles:
299 ui.status(_('no changes to record\n'))
299 ui.status(_('no changes to record\n'))
300 return 0
300 return 0
301
301
302 modified = set(status.modified)
302 modified = set(status.modified)
303
303
304 # 2. backup changed files, so we can restore them in the end
304 # 2. backup changed files, so we can restore them in the end
305
305
306 if backupall:
306 if backupall:
307 tobackup = changed
307 tobackup = changed
308 else:
308 else:
309 tobackup = [f for f in newfiles if f in modified or f in \
309 tobackup = [f for f in newfiles if f in modified or f in \
310 newlyaddedandmodifiedfiles]
310 newlyaddedandmodifiedfiles]
311 backups = {}
311 backups = {}
312 if tobackup:
312 if tobackup:
313 backupdir = repo.vfs.join('record-backups')
313 backupdir = repo.vfs.join('record-backups')
314 try:
314 try:
315 os.mkdir(backupdir)
315 os.mkdir(backupdir)
316 except OSError as err:
316 except OSError as err:
317 if err.errno != errno.EEXIST:
317 if err.errno != errno.EEXIST:
318 raise
318 raise
319 try:
319 try:
320 # backup continues
320 # backup continues
321 for f in tobackup:
321 for f in tobackup:
322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 dir=backupdir)
323 dir=backupdir)
324 os.close(fd)
324 os.close(fd)
325 ui.debug('backup %r as %r\n' % (f, tmpname))
325 ui.debug('backup %r as %r\n' % (f, tmpname))
326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 backups[f] = tmpname
327 backups[f] = tmpname
328
328
329 fp = stringio()
329 fp = stringio()
330 for c in chunks:
330 for c in chunks:
331 fname = c.filename()
331 fname = c.filename()
332 if fname in backups:
332 if fname in backups:
333 c.write(fp)
333 c.write(fp)
334 dopatch = fp.tell()
334 dopatch = fp.tell()
335 fp.seek(0)
335 fp.seek(0)
336
336
337 # 2.5 optionally review / modify patch in text editor
337 # 2.5 optionally review / modify patch in text editor
338 if opts.get('review', False):
338 if opts.get('review', False):
339 patchtext = (crecordmod.diffhelptext
339 patchtext = (crecordmod.diffhelptext
340 + crecordmod.patchhelptext
340 + crecordmod.patchhelptext
341 + fp.read())
341 + fp.read())
342 reviewedpatch = ui.edit(patchtext, "",
342 reviewedpatch = ui.edit(patchtext, "",
343 extra={"suffix": ".diff"},
343 extra={"suffix": ".diff"},
344 repopath=repo.path)
344 repopath=repo.path)
345 fp.truncate(0)
345 fp.truncate(0)
346 fp.write(reviewedpatch)
346 fp.write(reviewedpatch)
347 fp.seek(0)
347 fp.seek(0)
348
348
349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 # 3a. apply filtered patch to clean repo (clean)
350 # 3a. apply filtered patch to clean repo (clean)
351 if backups:
351 if backups:
352 # Equivalent to hg.revert
352 # Equivalent to hg.revert
353 m = scmutil.matchfiles(repo, backups.keys())
353 m = scmutil.matchfiles(repo, backups.keys())
354 mergemod.update(repo, repo.dirstate.p1(),
354 mergemod.update(repo, repo.dirstate.p1(),
355 False, True, matcher=m)
355 False, True, matcher=m)
356
356
357 # 3b. (apply)
357 # 3b. (apply)
358 if dopatch:
358 if dopatch:
359 try:
359 try:
360 ui.debug('applying patch\n')
360 ui.debug('applying patch\n')
361 ui.debug(fp.getvalue())
361 ui.debug(fp.getvalue())
362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 except patch.PatchError as err:
363 except patch.PatchError as err:
364 raise error.Abort(str(err))
364 raise error.Abort(str(err))
365 del fp
365 del fp
366
366
367 # 4. We prepared working directory according to filtered
367 # 4. We prepared working directory according to filtered
368 # patch. Now is the time to delegate the job to
368 # patch. Now is the time to delegate the job to
369 # commit/qrefresh or the like!
369 # commit/qrefresh or the like!
370
370
371 # Make all of the pathnames absolute.
371 # Make all of the pathnames absolute.
372 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 return commitfunc(ui, repo, *newfiles, **opts)
373 return commitfunc(ui, repo, *newfiles, **opts)
374 finally:
374 finally:
375 # 5. finally restore backed-up files
375 # 5. finally restore backed-up files
376 try:
376 try:
377 dirstate = repo.dirstate
377 dirstate = repo.dirstate
378 for realname, tmpname in backups.iteritems():
378 for realname, tmpname in backups.iteritems():
379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380
380
381 if dirstate[realname] == 'n':
381 if dirstate[realname] == 'n':
382 # without normallookup, restoring timestamp
382 # without normallookup, restoring timestamp
383 # may cause partially committed files
383 # may cause partially committed files
384 # to be treated as unmodified
384 # to be treated as unmodified
385 dirstate.normallookup(realname)
385 dirstate.normallookup(realname)
386
386
387 # copystat=True here and above are a hack to trick any
387 # copystat=True here and above are a hack to trick any
388 # editors that have f open that we haven't modified them.
388 # editors that have f open that we haven't modified them.
389 #
389 #
390 # Also note that this racy as an editor could notice the
390 # Also note that this racy as an editor could notice the
391 # file's mtime before we've finished writing it.
391 # file's mtime before we've finished writing it.
392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 os.unlink(tmpname)
393 os.unlink(tmpname)
394 if tobackup:
394 if tobackup:
395 os.rmdir(backupdir)
395 os.rmdir(backupdir)
396 except OSError:
396 except OSError:
397 pass
397 pass
398
398
399 def recordinwlock(ui, repo, message, match, opts):
399 def recordinwlock(ui, repo, message, match, opts):
400 with repo.wlock():
400 with repo.wlock():
401 return recordfunc(ui, repo, message, match, opts)
401 return recordfunc(ui, repo, message, match, opts)
402
402
403 return commit(ui, repo, recordinwlock, pats, opts)
403 return commit(ui, repo, recordinwlock, pats, opts)
404
404
405 def findpossible(cmd, table, strict=False):
405 def findpossible(cmd, table, strict=False):
406 """
406 """
407 Return cmd -> (aliases, command table entry)
407 Return cmd -> (aliases, command table entry)
408 for each matching command.
408 for each matching command.
409 Return debug commands (or their aliases) only if no normal command matches.
409 Return debug commands (or their aliases) only if no normal command matches.
410 """
410 """
411 choice = {}
411 choice = {}
412 debugchoice = {}
412 debugchoice = {}
413
413
414 if cmd in table:
414 if cmd in table:
415 # short-circuit exact matches, "log" alias beats "^log|history"
415 # short-circuit exact matches, "log" alias beats "^log|history"
416 keys = [cmd]
416 keys = [cmd]
417 else:
417 else:
418 keys = table.keys()
418 keys = table.keys()
419
419
420 allcmds = []
420 allcmds = []
421 for e in keys:
421 for e in keys:
422 aliases = parsealiases(e)
422 aliases = parsealiases(e)
423 allcmds.extend(aliases)
423 allcmds.extend(aliases)
424 found = None
424 found = None
425 if cmd in aliases:
425 if cmd in aliases:
426 found = cmd
426 found = cmd
427 elif not strict:
427 elif not strict:
428 for a in aliases:
428 for a in aliases:
429 if a.startswith(cmd):
429 if a.startswith(cmd):
430 found = a
430 found = a
431 break
431 break
432 if found is not None:
432 if found is not None:
433 if aliases[0].startswith("debug") or found.startswith("debug"):
433 if aliases[0].startswith("debug") or found.startswith("debug"):
434 debugchoice[found] = (aliases, table[e])
434 debugchoice[found] = (aliases, table[e])
435 else:
435 else:
436 choice[found] = (aliases, table[e])
436 choice[found] = (aliases, table[e])
437
437
438 if not choice and debugchoice:
438 if not choice and debugchoice:
439 choice = debugchoice
439 choice = debugchoice
440
440
441 return choice, allcmds
441 return choice, allcmds
442
442
443 def findcmd(cmd, table, strict=True):
443 def findcmd(cmd, table, strict=True):
444 """Return (aliases, command table entry) for command string."""
444 """Return (aliases, command table entry) for command string."""
445 choice, allcmds = findpossible(cmd, table, strict)
445 choice, allcmds = findpossible(cmd, table, strict)
446
446
447 if cmd in choice:
447 if cmd in choice:
448 return choice[cmd]
448 return choice[cmd]
449
449
450 if len(choice) > 1:
450 if len(choice) > 1:
451 clist = sorted(choice)
451 clist = sorted(choice)
452 raise error.AmbiguousCommand(cmd, clist)
452 raise error.AmbiguousCommand(cmd, clist)
453
453
454 if choice:
454 if choice:
455 return choice.values()[0]
455 return choice.values()[0]
456
456
457 raise error.UnknownCommand(cmd, allcmds)
457 raise error.UnknownCommand(cmd, allcmds)
458
458
459 def findrepo(p):
459 def findrepo(p):
460 while not os.path.isdir(os.path.join(p, ".hg")):
460 while not os.path.isdir(os.path.join(p, ".hg")):
461 oldp, p = p, os.path.dirname(p)
461 oldp, p = p, os.path.dirname(p)
462 if p == oldp:
462 if p == oldp:
463 return None
463 return None
464
464
465 return p
465 return p
466
466
467 def bailifchanged(repo, merge=True, hint=None):
467 def bailifchanged(repo, merge=True, hint=None):
468 """ enforce the precondition that working directory must be clean.
468 """ enforce the precondition that working directory must be clean.
469
469
470 'merge' can be set to false if a pending uncommitted merge should be
470 'merge' can be set to false if a pending uncommitted merge should be
471 ignored (such as when 'update --check' runs).
471 ignored (such as when 'update --check' runs).
472
472
473 'hint' is the usual hint given to Abort exception.
473 'hint' is the usual hint given to Abort exception.
474 """
474 """
475
475
476 if merge and repo.dirstate.p2() != nullid:
476 if merge and repo.dirstate.p2() != nullid:
477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
478 modified, added, removed, deleted = repo.status()[:4]
478 modified, added, removed, deleted = repo.status()[:4]
479 if modified or added or removed or deleted:
479 if modified or added or removed or deleted:
480 raise error.Abort(_('uncommitted changes'), hint=hint)
480 raise error.Abort(_('uncommitted changes'), hint=hint)
481 ctx = repo[None]
481 ctx = repo[None]
482 for s in sorted(ctx.substate):
482 for s in sorted(ctx.substate):
483 ctx.sub(s).bailifchanged(hint=hint)
483 ctx.sub(s).bailifchanged(hint=hint)
484
484
485 def logmessage(ui, opts):
485 def logmessage(ui, opts):
486 """ get the log message according to -m and -l option """
486 """ get the log message according to -m and -l option """
487 message = opts.get('message')
487 message = opts.get('message')
488 logfile = opts.get('logfile')
488 logfile = opts.get('logfile')
489
489
490 if message and logfile:
490 if message and logfile:
491 raise error.Abort(_('options --message and --logfile are mutually '
491 raise error.Abort(_('options --message and --logfile are mutually '
492 'exclusive'))
492 'exclusive'))
493 if not message and logfile:
493 if not message and logfile:
494 try:
494 try:
495 if isstdiofilename(logfile):
495 if isstdiofilename(logfile):
496 message = ui.fin.read()
496 message = ui.fin.read()
497 else:
497 else:
498 message = '\n'.join(util.readfile(logfile).splitlines())
498 message = '\n'.join(util.readfile(logfile).splitlines())
499 except IOError as inst:
499 except IOError as inst:
500 raise error.Abort(_("can't read commit message '%s': %s") %
500 raise error.Abort(_("can't read commit message '%s': %s") %
501 (logfile, inst.strerror))
501 (logfile, inst.strerror))
502 return message
502 return message
503
503
504 def mergeeditform(ctxorbool, baseformname):
504 def mergeeditform(ctxorbool, baseformname):
505 """return appropriate editform name (referencing a committemplate)
505 """return appropriate editform name (referencing a committemplate)
506
506
507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
508 merging is committed.
508 merging is committed.
509
509
510 This returns baseformname with '.merge' appended if it is a merge,
510 This returns baseformname with '.merge' appended if it is a merge,
511 otherwise '.normal' is appended.
511 otherwise '.normal' is appended.
512 """
512 """
513 if isinstance(ctxorbool, bool):
513 if isinstance(ctxorbool, bool):
514 if ctxorbool:
514 if ctxorbool:
515 return baseformname + ".merge"
515 return baseformname + ".merge"
516 elif 1 < len(ctxorbool.parents()):
516 elif 1 < len(ctxorbool.parents()):
517 return baseformname + ".merge"
517 return baseformname + ".merge"
518
518
519 return baseformname + ".normal"
519 return baseformname + ".normal"
520
520
521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
522 editform='', **opts):
522 editform='', **opts):
523 """get appropriate commit message editor according to '--edit' option
523 """get appropriate commit message editor according to '--edit' option
524
524
525 'finishdesc' is a function to be called with edited commit message
525 'finishdesc' is a function to be called with edited commit message
526 (= 'description' of the new changeset) just after editing, but
526 (= 'description' of the new changeset) just after editing, but
527 before checking empty-ness. It should return actual text to be
527 before checking empty-ness. It should return actual text to be
528 stored into history. This allows to change description before
528 stored into history. This allows to change description before
529 storing.
529 storing.
530
530
531 'extramsg' is a extra message to be shown in the editor instead of
531 'extramsg' is a extra message to be shown in the editor instead of
532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
533 is automatically added.
533 is automatically added.
534
534
535 'editform' is a dot-separated list of names, to distinguish
535 'editform' is a dot-separated list of names, to distinguish
536 the purpose of commit text editing.
536 the purpose of commit text editing.
537
537
538 'getcommiteditor' returns 'commitforceeditor' regardless of
538 'getcommiteditor' returns 'commitforceeditor' regardless of
539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
540 they are specific for usage in MQ.
540 they are specific for usage in MQ.
541 """
541 """
542 if edit or finishdesc or extramsg:
542 if edit or finishdesc or extramsg:
543 return lambda r, c, s: commitforceeditor(r, c, s,
543 return lambda r, c, s: commitforceeditor(r, c, s,
544 finishdesc=finishdesc,
544 finishdesc=finishdesc,
545 extramsg=extramsg,
545 extramsg=extramsg,
546 editform=editform)
546 editform=editform)
547 elif editform:
547 elif editform:
548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
549 else:
549 else:
550 return commiteditor
550 return commiteditor
551
551
552 def loglimit(opts):
552 def loglimit(opts):
553 """get the log limit according to option -l/--limit"""
553 """get the log limit according to option -l/--limit"""
554 limit = opts.get('limit')
554 limit = opts.get('limit')
555 if limit:
555 if limit:
556 try:
556 try:
557 limit = int(limit)
557 limit = int(limit)
558 except ValueError:
558 except ValueError:
559 raise error.Abort(_('limit must be a positive integer'))
559 raise error.Abort(_('limit must be a positive integer'))
560 if limit <= 0:
560 if limit <= 0:
561 raise error.Abort(_('limit must be positive'))
561 raise error.Abort(_('limit must be positive'))
562 else:
562 else:
563 limit = None
563 limit = None
564 return limit
564 return limit
565
565
566 def makefilename(repo, pat, node, desc=None,
566 def makefilename(repo, pat, node, desc=None,
567 total=None, seqno=None, revwidth=None, pathname=None):
567 total=None, seqno=None, revwidth=None, pathname=None):
568 node_expander = {
568 node_expander = {
569 'H': lambda: hex(node),
569 'H': lambda: hex(node),
570 'R': lambda: str(repo.changelog.rev(node)),
570 'R': lambda: str(repo.changelog.rev(node)),
571 'h': lambda: short(node),
571 'h': lambda: short(node),
572 'm': lambda: re.sub('[^\w]', '_', str(desc))
572 'm': lambda: re.sub('[^\w]', '_', str(desc))
573 }
573 }
574 expander = {
574 expander = {
575 '%': lambda: '%',
575 '%': lambda: '%',
576 'b': lambda: os.path.basename(repo.root),
576 'b': lambda: os.path.basename(repo.root),
577 }
577 }
578
578
579 try:
579 try:
580 if node:
580 if node:
581 expander.update(node_expander)
581 expander.update(node_expander)
582 if node:
582 if node:
583 expander['r'] = (lambda:
583 expander['r'] = (lambda:
584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
585 if total is not None:
585 if total is not None:
586 expander['N'] = lambda: str(total)
586 expander['N'] = lambda: str(total)
587 if seqno is not None:
587 if seqno is not None:
588 expander['n'] = lambda: str(seqno)
588 expander['n'] = lambda: str(seqno)
589 if total is not None and seqno is not None:
589 if total is not None and seqno is not None:
590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
591 if pathname is not None:
591 if pathname is not None:
592 expander['s'] = lambda: os.path.basename(pathname)
592 expander['s'] = lambda: os.path.basename(pathname)
593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
594 expander['p'] = lambda: pathname
594 expander['p'] = lambda: pathname
595
595
596 newname = []
596 newname = []
597 patlen = len(pat)
597 patlen = len(pat)
598 i = 0
598 i = 0
599 while i < patlen:
599 while i < patlen:
600 c = pat[i:i + 1]
600 c = pat[i:i + 1]
601 if c == '%':
601 if c == '%':
602 i += 1
602 i += 1
603 c = pat[i:i + 1]
603 c = pat[i:i + 1]
604 c = expander[c]()
604 c = expander[c]()
605 newname.append(c)
605 newname.append(c)
606 i += 1
606 i += 1
607 return ''.join(newname)
607 return ''.join(newname)
608 except KeyError as inst:
608 except KeyError as inst:
609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
610 inst.args[0])
610 inst.args[0])
611
611
612 def isstdiofilename(pat):
612 def isstdiofilename(pat):
613 """True if the given pat looks like a filename denoting stdin/stdout"""
613 """True if the given pat looks like a filename denoting stdin/stdout"""
614 return not pat or pat == '-'
614 return not pat or pat == '-'
615
615
616 class _unclosablefile(object):
616 class _unclosablefile(object):
617 def __init__(self, fp):
617 def __init__(self, fp):
618 self._fp = fp
618 self._fp = fp
619
619
620 def close(self):
620 def close(self):
621 pass
621 pass
622
622
623 def __iter__(self):
623 def __iter__(self):
624 return iter(self._fp)
624 return iter(self._fp)
625
625
626 def __getattr__(self, attr):
626 def __getattr__(self, attr):
627 return getattr(self._fp, attr)
627 return getattr(self._fp, attr)
628
628
629 def __enter__(self):
629 def __enter__(self):
630 return self
630 return self
631
631
632 def __exit__(self, exc_type, exc_value, exc_tb):
632 def __exit__(self, exc_type, exc_value, exc_tb):
633 pass
633 pass
634
634
635 def makefileobj(repo, pat, node=None, desc=None, total=None,
635 def makefileobj(repo, pat, node=None, desc=None, total=None,
636 seqno=None, revwidth=None, mode='wb', modemap=None,
636 seqno=None, revwidth=None, mode='wb', modemap=None,
637 pathname=None):
637 pathname=None):
638
638
639 writable = mode not in ('r', 'rb')
639 writable = mode not in ('r', 'rb')
640
640
641 if isstdiofilename(pat):
641 if isstdiofilename(pat):
642 if writable:
642 if writable:
643 fp = repo.ui.fout
643 fp = repo.ui.fout
644 else:
644 else:
645 fp = repo.ui.fin
645 fp = repo.ui.fin
646 return _unclosablefile(fp)
646 return _unclosablefile(fp)
647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
648 if modemap is not None:
648 if modemap is not None:
649 mode = modemap.get(fn, mode)
649 mode = modemap.get(fn, mode)
650 if mode == 'wb':
650 if mode == 'wb':
651 modemap[fn] = 'ab'
651 modemap[fn] = 'ab'
652 return open(fn, mode)
652 return open(fn, mode)
653
653
654 def openrevlog(repo, cmd, file_, opts):
654 def openrevlog(repo, cmd, file_, opts):
655 """opens the changelog, manifest, a filelog or a given revlog"""
655 """opens the changelog, manifest, a filelog or a given revlog"""
656 cl = opts['changelog']
656 cl = opts['changelog']
657 mf = opts['manifest']
657 mf = opts['manifest']
658 dir = opts['dir']
658 dir = opts['dir']
659 msg = None
659 msg = None
660 if cl and mf:
660 if cl and mf:
661 msg = _('cannot specify --changelog and --manifest at the same time')
661 msg = _('cannot specify --changelog and --manifest at the same time')
662 elif cl and dir:
662 elif cl and dir:
663 msg = _('cannot specify --changelog and --dir at the same time')
663 msg = _('cannot specify --changelog and --dir at the same time')
664 elif cl or mf or dir:
664 elif cl or mf or dir:
665 if file_:
665 if file_:
666 msg = _('cannot specify filename with --changelog or --manifest')
666 msg = _('cannot specify filename with --changelog or --manifest')
667 elif not repo:
667 elif not repo:
668 msg = _('cannot specify --changelog or --manifest or --dir '
668 msg = _('cannot specify --changelog or --manifest or --dir '
669 'without a repository')
669 'without a repository')
670 if msg:
670 if msg:
671 raise error.Abort(msg)
671 raise error.Abort(msg)
672
672
673 r = None
673 r = None
674 if repo:
674 if repo:
675 if cl:
675 if cl:
676 r = repo.unfiltered().changelog
676 r = repo.unfiltered().changelog
677 elif dir:
677 elif dir:
678 if 'treemanifest' not in repo.requirements:
678 if 'treemanifest' not in repo.requirements:
679 raise error.Abort(_("--dir can only be used on repos with "
679 raise error.Abort(_("--dir can only be used on repos with "
680 "treemanifest enabled"))
680 "treemanifest enabled"))
681 dirlog = repo.manifestlog._revlog.dirlog(dir)
681 dirlog = repo.manifestlog._revlog.dirlog(dir)
682 if len(dirlog):
682 if len(dirlog):
683 r = dirlog
683 r = dirlog
684 elif mf:
684 elif mf:
685 r = repo.manifestlog._revlog
685 r = repo.manifestlog._revlog
686 elif file_:
686 elif file_:
687 filelog = repo.file(file_)
687 filelog = repo.file(file_)
688 if len(filelog):
688 if len(filelog):
689 r = filelog
689 r = filelog
690 if not r:
690 if not r:
691 if not file_:
691 if not file_:
692 raise error.CommandError(cmd, _('invalid arguments'))
692 raise error.CommandError(cmd, _('invalid arguments'))
693 if not os.path.isfile(file_):
693 if not os.path.isfile(file_):
694 raise error.Abort(_("revlog '%s' not found") % file_)
694 raise error.Abort(_("revlog '%s' not found") % file_)
695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
696 file_[:-2] + ".i")
696 file_[:-2] + ".i")
697 return r
697 return r
698
698
699 def copy(ui, repo, pats, opts, rename=False):
699 def copy(ui, repo, pats, opts, rename=False):
700 # called with the repo lock held
700 # called with the repo lock held
701 #
701 #
702 # hgsep => pathname that uses "/" to separate directories
702 # hgsep => pathname that uses "/" to separate directories
703 # ossep => pathname that uses os.sep to separate directories
703 # ossep => pathname that uses os.sep to separate directories
704 cwd = repo.getcwd()
704 cwd = repo.getcwd()
705 targets = {}
705 targets = {}
706 after = opts.get("after")
706 after = opts.get("after")
707 dryrun = opts.get("dry_run")
707 dryrun = opts.get("dry_run")
708 wctx = repo[None]
708 wctx = repo[None]
709
709
710 def walkpat(pat):
710 def walkpat(pat):
711 srcs = []
711 srcs = []
712 if after:
712 if after:
713 badstates = '?'
713 badstates = '?'
714 else:
714 else:
715 badstates = '?r'
715 badstates = '?r'
716 m = scmutil.match(wctx, [pat], opts, globbed=True)
716 m = scmutil.match(wctx, [pat], opts, globbed=True)
717 for abs in wctx.walk(m):
717 for abs in wctx.walk(m):
718 state = repo.dirstate[abs]
718 state = repo.dirstate[abs]
719 rel = m.rel(abs)
719 rel = m.rel(abs)
720 exact = m.exact(abs)
720 exact = m.exact(abs)
721 if state in badstates:
721 if state in badstates:
722 if exact and state == '?':
722 if exact and state == '?':
723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
724 if exact and state == 'r':
724 if exact and state == 'r':
725 ui.warn(_('%s: not copying - file has been marked for'
725 ui.warn(_('%s: not copying - file has been marked for'
726 ' remove\n') % rel)
726 ' remove\n') % rel)
727 continue
727 continue
728 # abs: hgsep
728 # abs: hgsep
729 # rel: ossep
729 # rel: ossep
730 srcs.append((abs, rel, exact))
730 srcs.append((abs, rel, exact))
731 return srcs
731 return srcs
732
732
733 # abssrc: hgsep
733 # abssrc: hgsep
734 # relsrc: ossep
734 # relsrc: ossep
735 # otarget: ossep
735 # otarget: ossep
736 def copyfile(abssrc, relsrc, otarget, exact):
736 def copyfile(abssrc, relsrc, otarget, exact):
737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
738 if '/' in abstarget:
738 if '/' in abstarget:
739 # We cannot normalize abstarget itself, this would prevent
739 # We cannot normalize abstarget itself, this would prevent
740 # case only renames, like a => A.
740 # case only renames, like a => A.
741 abspath, absname = abstarget.rsplit('/', 1)
741 abspath, absname = abstarget.rsplit('/', 1)
742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
743 reltarget = repo.pathto(abstarget, cwd)
743 reltarget = repo.pathto(abstarget, cwd)
744 target = repo.wjoin(abstarget)
744 target = repo.wjoin(abstarget)
745 src = repo.wjoin(abssrc)
745 src = repo.wjoin(abssrc)
746 state = repo.dirstate[abstarget]
746 state = repo.dirstate[abstarget]
747
747
748 scmutil.checkportable(ui, abstarget)
748 scmutil.checkportable(ui, abstarget)
749
749
750 # check for collisions
750 # check for collisions
751 prevsrc = targets.get(abstarget)
751 prevsrc = targets.get(abstarget)
752 if prevsrc is not None:
752 if prevsrc is not None:
753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
754 (reltarget, repo.pathto(abssrc, cwd),
754 (reltarget, repo.pathto(abssrc, cwd),
755 repo.pathto(prevsrc, cwd)))
755 repo.pathto(prevsrc, cwd)))
756 return
756 return
757
757
758 # check for overwrites
758 # check for overwrites
759 exists = os.path.lexists(target)
759 exists = os.path.lexists(target)
760 samefile = False
760 samefile = False
761 if exists and abssrc != abstarget:
761 if exists and abssrc != abstarget:
762 if (repo.dirstate.normalize(abssrc) ==
762 if (repo.dirstate.normalize(abssrc) ==
763 repo.dirstate.normalize(abstarget)):
763 repo.dirstate.normalize(abstarget)):
764 if not rename:
764 if not rename:
765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
766 return
766 return
767 exists = False
767 exists = False
768 samefile = True
768 samefile = True
769
769
770 if not after and exists or after and state in 'mn':
770 if not after and exists or after and state in 'mn':
771 if not opts['force']:
771 if not opts['force']:
772 if state in 'mn':
772 if state in 'mn':
773 msg = _('%s: not overwriting - file already committed\n')
773 msg = _('%s: not overwriting - file already committed\n')
774 if after:
774 if after:
775 flags = '--after --force'
775 flags = '--after --force'
776 else:
776 else:
777 flags = '--force'
777 flags = '--force'
778 if rename:
778 if rename:
779 hint = _('(hg rename %s to replace the file by '
779 hint = _('(hg rename %s to replace the file by '
780 'recording a rename)\n') % flags
780 'recording a rename)\n') % flags
781 else:
781 else:
782 hint = _('(hg copy %s to replace the file by '
782 hint = _('(hg copy %s to replace the file by '
783 'recording a copy)\n') % flags
783 'recording a copy)\n') % flags
784 else:
784 else:
785 msg = _('%s: not overwriting - file exists\n')
785 msg = _('%s: not overwriting - file exists\n')
786 if rename:
786 if rename:
787 hint = _('(hg rename --after to record the rename)\n')
787 hint = _('(hg rename --after to record the rename)\n')
788 else:
788 else:
789 hint = _('(hg copy --after to record the copy)\n')
789 hint = _('(hg copy --after to record the copy)\n')
790 ui.warn(msg % reltarget)
790 ui.warn(msg % reltarget)
791 ui.warn(hint)
791 ui.warn(hint)
792 return
792 return
793
793
794 if after:
794 if after:
795 if not exists:
795 if not exists:
796 if rename:
796 if rename:
797 ui.warn(_('%s: not recording move - %s does not exist\n') %
797 ui.warn(_('%s: not recording move - %s does not exist\n') %
798 (relsrc, reltarget))
798 (relsrc, reltarget))
799 else:
799 else:
800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
801 (relsrc, reltarget))
801 (relsrc, reltarget))
802 return
802 return
803 elif not dryrun:
803 elif not dryrun:
804 try:
804 try:
805 if exists:
805 if exists:
806 os.unlink(target)
806 os.unlink(target)
807 targetdir = os.path.dirname(target) or '.'
807 targetdir = os.path.dirname(target) or '.'
808 if not os.path.isdir(targetdir):
808 if not os.path.isdir(targetdir):
809 os.makedirs(targetdir)
809 os.makedirs(targetdir)
810 if samefile:
810 if samefile:
811 tmp = target + "~hgrename"
811 tmp = target + "~hgrename"
812 os.rename(src, tmp)
812 os.rename(src, tmp)
813 os.rename(tmp, target)
813 os.rename(tmp, target)
814 else:
814 else:
815 util.copyfile(src, target)
815 util.copyfile(src, target)
816 srcexists = True
816 srcexists = True
817 except IOError as inst:
817 except IOError as inst:
818 if inst.errno == errno.ENOENT:
818 if inst.errno == errno.ENOENT:
819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
820 srcexists = False
820 srcexists = False
821 else:
821 else:
822 ui.warn(_('%s: cannot copy - %s\n') %
822 ui.warn(_('%s: cannot copy - %s\n') %
823 (relsrc, inst.strerror))
823 (relsrc, inst.strerror))
824 return True # report a failure
824 return True # report a failure
825
825
826 if ui.verbose or not exact:
826 if ui.verbose or not exact:
827 if rename:
827 if rename:
828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
829 else:
829 else:
830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
831
831
832 targets[abstarget] = abssrc
832 targets[abstarget] = abssrc
833
833
834 # fix up dirstate
834 # fix up dirstate
835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
836 dryrun=dryrun, cwd=cwd)
836 dryrun=dryrun, cwd=cwd)
837 if rename and not dryrun:
837 if rename and not dryrun:
838 if not after and srcexists and not samefile:
838 if not after and srcexists and not samefile:
839 repo.wvfs.unlinkpath(abssrc)
839 repo.wvfs.unlinkpath(abssrc)
840 wctx.forget([abssrc])
840 wctx.forget([abssrc])
841
841
842 # pat: ossep
842 # pat: ossep
843 # dest ossep
843 # dest ossep
844 # srcs: list of (hgsep, hgsep, ossep, bool)
844 # srcs: list of (hgsep, hgsep, ossep, bool)
845 # return: function that takes hgsep and returns ossep
845 # return: function that takes hgsep and returns ossep
846 def targetpathfn(pat, dest, srcs):
846 def targetpathfn(pat, dest, srcs):
847 if os.path.isdir(pat):
847 if os.path.isdir(pat):
848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
849 abspfx = util.localpath(abspfx)
849 abspfx = util.localpath(abspfx)
850 if destdirexists:
850 if destdirexists:
851 striplen = len(os.path.split(abspfx)[0])
851 striplen = len(os.path.split(abspfx)[0])
852 else:
852 else:
853 striplen = len(abspfx)
853 striplen = len(abspfx)
854 if striplen:
854 if striplen:
855 striplen += len(pycompat.ossep)
855 striplen += len(pycompat.ossep)
856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
857 elif destdirexists:
857 elif destdirexists:
858 res = lambda p: os.path.join(dest,
858 res = lambda p: os.path.join(dest,
859 os.path.basename(util.localpath(p)))
859 os.path.basename(util.localpath(p)))
860 else:
860 else:
861 res = lambda p: dest
861 res = lambda p: dest
862 return res
862 return res
863
863
864 # pat: ossep
864 # pat: ossep
865 # dest ossep
865 # dest ossep
866 # srcs: list of (hgsep, hgsep, ossep, bool)
866 # srcs: list of (hgsep, hgsep, ossep, bool)
867 # return: function that takes hgsep and returns ossep
867 # return: function that takes hgsep and returns ossep
868 def targetpathafterfn(pat, dest, srcs):
868 def targetpathafterfn(pat, dest, srcs):
869 if matchmod.patkind(pat):
869 if matchmod.patkind(pat):
870 # a mercurial pattern
870 # a mercurial pattern
871 res = lambda p: os.path.join(dest,
871 res = lambda p: os.path.join(dest,
872 os.path.basename(util.localpath(p)))
872 os.path.basename(util.localpath(p)))
873 else:
873 else:
874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
875 if len(abspfx) < len(srcs[0][0]):
875 if len(abspfx) < len(srcs[0][0]):
876 # A directory. Either the target path contains the last
876 # A directory. Either the target path contains the last
877 # component of the source path or it does not.
877 # component of the source path or it does not.
878 def evalpath(striplen):
878 def evalpath(striplen):
879 score = 0
879 score = 0
880 for s in srcs:
880 for s in srcs:
881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
882 if os.path.lexists(t):
882 if os.path.lexists(t):
883 score += 1
883 score += 1
884 return score
884 return score
885
885
886 abspfx = util.localpath(abspfx)
886 abspfx = util.localpath(abspfx)
887 striplen = len(abspfx)
887 striplen = len(abspfx)
888 if striplen:
888 if striplen:
889 striplen += len(pycompat.ossep)
889 striplen += len(pycompat.ossep)
890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
891 score = evalpath(striplen)
891 score = evalpath(striplen)
892 striplen1 = len(os.path.split(abspfx)[0])
892 striplen1 = len(os.path.split(abspfx)[0])
893 if striplen1:
893 if striplen1:
894 striplen1 += len(pycompat.ossep)
894 striplen1 += len(pycompat.ossep)
895 if evalpath(striplen1) > score:
895 if evalpath(striplen1) > score:
896 striplen = striplen1
896 striplen = striplen1
897 res = lambda p: os.path.join(dest,
897 res = lambda p: os.path.join(dest,
898 util.localpath(p)[striplen:])
898 util.localpath(p)[striplen:])
899 else:
899 else:
900 # a file
900 # a file
901 if destdirexists:
901 if destdirexists:
902 res = lambda p: os.path.join(dest,
902 res = lambda p: os.path.join(dest,
903 os.path.basename(util.localpath(p)))
903 os.path.basename(util.localpath(p)))
904 else:
904 else:
905 res = lambda p: dest
905 res = lambda p: dest
906 return res
906 return res
907
907
908 pats = scmutil.expandpats(pats)
908 pats = scmutil.expandpats(pats)
909 if not pats:
909 if not pats:
910 raise error.Abort(_('no source or destination specified'))
910 raise error.Abort(_('no source or destination specified'))
911 if len(pats) == 1:
911 if len(pats) == 1:
912 raise error.Abort(_('no destination specified'))
912 raise error.Abort(_('no destination specified'))
913 dest = pats.pop()
913 dest = pats.pop()
914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
915 if not destdirexists:
915 if not destdirexists:
916 if len(pats) > 1 or matchmod.patkind(pats[0]):
916 if len(pats) > 1 or matchmod.patkind(pats[0]):
917 raise error.Abort(_('with multiple sources, destination must be an '
917 raise error.Abort(_('with multiple sources, destination must be an '
918 'existing directory'))
918 'existing directory'))
919 if util.endswithsep(dest):
919 if util.endswithsep(dest):
920 raise error.Abort(_('destination %s is not a directory') % dest)
920 raise error.Abort(_('destination %s is not a directory') % dest)
921
921
922 tfn = targetpathfn
922 tfn = targetpathfn
923 if after:
923 if after:
924 tfn = targetpathafterfn
924 tfn = targetpathafterfn
925 copylist = []
925 copylist = []
926 for pat in pats:
926 for pat in pats:
927 srcs = walkpat(pat)
927 srcs = walkpat(pat)
928 if not srcs:
928 if not srcs:
929 continue
929 continue
930 copylist.append((tfn(pat, dest, srcs), srcs))
930 copylist.append((tfn(pat, dest, srcs), srcs))
931 if not copylist:
931 if not copylist:
932 raise error.Abort(_('no files to copy'))
932 raise error.Abort(_('no files to copy'))
933
933
934 errors = 0
934 errors = 0
935 for targetpath, srcs in copylist:
935 for targetpath, srcs in copylist:
936 for abssrc, relsrc, exact in srcs:
936 for abssrc, relsrc, exact in srcs:
937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
938 errors += 1
938 errors += 1
939
939
940 if errors:
940 if errors:
941 ui.warn(_('(consider using --after)\n'))
941 ui.warn(_('(consider using --after)\n'))
942
942
943 return errors != 0
943 return errors != 0
944
944
945 ## facility to let extension process additional data into an import patch
945 ## facility to let extension process additional data into an import patch
946 # list of identifier to be executed in order
946 # list of identifier to be executed in order
947 extrapreimport = [] # run before commit
947 extrapreimport = [] # run before commit
948 extrapostimport = [] # run after commit
948 extrapostimport = [] # run after commit
949 # mapping from identifier to actual import function
949 # mapping from identifier to actual import function
950 #
950 #
951 # 'preimport' are run before the commit is made and are provided the following
951 # 'preimport' are run before the commit is made and are provided the following
952 # arguments:
952 # arguments:
953 # - repo: the localrepository instance,
953 # - repo: the localrepository instance,
954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
955 # - extra: the future extra dictionary of the changeset, please mutate it,
955 # - extra: the future extra dictionary of the changeset, please mutate it,
956 # - opts: the import options.
956 # - opts: the import options.
957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
958 # mutation of in memory commit and more. Feel free to rework the code to get
958 # mutation of in memory commit and more. Feel free to rework the code to get
959 # there.
959 # there.
960 extrapreimportmap = {}
960 extrapreimportmap = {}
961 # 'postimport' are run after the commit is made and are provided the following
961 # 'postimport' are run after the commit is made and are provided the following
962 # argument:
962 # argument:
963 # - ctx: the changectx created by import.
963 # - ctx: the changectx created by import.
964 extrapostimportmap = {}
964 extrapostimportmap = {}
965
965
966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
967 """Utility function used by commands.import to import a single patch
967 """Utility function used by commands.import to import a single patch
968
968
969 This function is explicitly defined here to help the evolve extension to
969 This function is explicitly defined here to help the evolve extension to
970 wrap this part of the import logic.
970 wrap this part of the import logic.
971
971
972 The API is currently a bit ugly because it a simple code translation from
972 The API is currently a bit ugly because it a simple code translation from
973 the import command. Feel free to make it better.
973 the import command. Feel free to make it better.
974
974
975 :hunk: a patch (as a binary string)
975 :hunk: a patch (as a binary string)
976 :parents: nodes that will be parent of the created commit
976 :parents: nodes that will be parent of the created commit
977 :opts: the full dict of option passed to the import command
977 :opts: the full dict of option passed to the import command
978 :msgs: list to save commit message to.
978 :msgs: list to save commit message to.
979 (used in case we need to save it when failing)
979 (used in case we need to save it when failing)
980 :updatefunc: a function that update a repo to a given node
980 :updatefunc: a function that update a repo to a given node
981 updatefunc(<repo>, <node>)
981 updatefunc(<repo>, <node>)
982 """
982 """
983 # avoid cycle context -> subrepo -> cmdutil
983 # avoid cycle context -> subrepo -> cmdutil
984 from . import context
984 from . import context
985 extractdata = patch.extract(ui, hunk)
985 extractdata = patch.extract(ui, hunk)
986 tmpname = extractdata.get('filename')
986 tmpname = extractdata.get('filename')
987 message = extractdata.get('message')
987 message = extractdata.get('message')
988 user = opts.get('user') or extractdata.get('user')
988 user = opts.get('user') or extractdata.get('user')
989 date = opts.get('date') or extractdata.get('date')
989 date = opts.get('date') or extractdata.get('date')
990 branch = extractdata.get('branch')
990 branch = extractdata.get('branch')
991 nodeid = extractdata.get('nodeid')
991 nodeid = extractdata.get('nodeid')
992 p1 = extractdata.get('p1')
992 p1 = extractdata.get('p1')
993 p2 = extractdata.get('p2')
993 p2 = extractdata.get('p2')
994
994
995 nocommit = opts.get('no_commit')
995 nocommit = opts.get('no_commit')
996 importbranch = opts.get('import_branch')
996 importbranch = opts.get('import_branch')
997 update = not opts.get('bypass')
997 update = not opts.get('bypass')
998 strip = opts["strip"]
998 strip = opts["strip"]
999 prefix = opts["prefix"]
999 prefix = opts["prefix"]
1000 sim = float(opts.get('similarity') or 0)
1000 sim = float(opts.get('similarity') or 0)
1001 if not tmpname:
1001 if not tmpname:
1002 return (None, None, False)
1002 return (None, None, False)
1003
1003
1004 rejects = False
1004 rejects = False
1005
1005
1006 try:
1006 try:
1007 cmdline_message = logmessage(ui, opts)
1007 cmdline_message = logmessage(ui, opts)
1008 if cmdline_message:
1008 if cmdline_message:
1009 # pickup the cmdline msg
1009 # pickup the cmdline msg
1010 message = cmdline_message
1010 message = cmdline_message
1011 elif message:
1011 elif message:
1012 # pickup the patch msg
1012 # pickup the patch msg
1013 message = message.strip()
1013 message = message.strip()
1014 else:
1014 else:
1015 # launch the editor
1015 # launch the editor
1016 message = None
1016 message = None
1017 ui.debug('message:\n%s\n' % message)
1017 ui.debug('message:\n%s\n' % message)
1018
1018
1019 if len(parents) == 1:
1019 if len(parents) == 1:
1020 parents.append(repo[nullid])
1020 parents.append(repo[nullid])
1021 if opts.get('exact'):
1021 if opts.get('exact'):
1022 if not nodeid or not p1:
1022 if not nodeid or not p1:
1023 raise error.Abort(_('not a Mercurial patch'))
1023 raise error.Abort(_('not a Mercurial patch'))
1024 p1 = repo[p1]
1024 p1 = repo[p1]
1025 p2 = repo[p2 or nullid]
1025 p2 = repo[p2 or nullid]
1026 elif p2:
1026 elif p2:
1027 try:
1027 try:
1028 p1 = repo[p1]
1028 p1 = repo[p1]
1029 p2 = repo[p2]
1029 p2 = repo[p2]
1030 # Without any options, consider p2 only if the
1030 # Without any options, consider p2 only if the
1031 # patch is being applied on top of the recorded
1031 # patch is being applied on top of the recorded
1032 # first parent.
1032 # first parent.
1033 if p1 != parents[0]:
1033 if p1 != parents[0]:
1034 p1 = parents[0]
1034 p1 = parents[0]
1035 p2 = repo[nullid]
1035 p2 = repo[nullid]
1036 except error.RepoError:
1036 except error.RepoError:
1037 p1, p2 = parents
1037 p1, p2 = parents
1038 if p2.node() == nullid:
1038 if p2.node() == nullid:
1039 ui.warn(_("warning: import the patch as a normal revision\n"
1039 ui.warn(_("warning: import the patch as a normal revision\n"
1040 "(use --exact to import the patch as a merge)\n"))
1040 "(use --exact to import the patch as a merge)\n"))
1041 else:
1041 else:
1042 p1, p2 = parents
1042 p1, p2 = parents
1043
1043
1044 n = None
1044 n = None
1045 if update:
1045 if update:
1046 if p1 != parents[0]:
1046 if p1 != parents[0]:
1047 updatefunc(repo, p1.node())
1047 updatefunc(repo, p1.node())
1048 if p2 != parents[1]:
1048 if p2 != parents[1]:
1049 repo.setparents(p1.node(), p2.node())
1049 repo.setparents(p1.node(), p2.node())
1050
1050
1051 if opts.get('exact') or importbranch:
1051 if opts.get('exact') or importbranch:
1052 repo.dirstate.setbranch(branch or 'default')
1052 repo.dirstate.setbranch(branch or 'default')
1053
1053
1054 partial = opts.get('partial', False)
1054 partial = opts.get('partial', False)
1055 files = set()
1055 files = set()
1056 try:
1056 try:
1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1058 files=files, eolmode=None, similarity=sim / 100.0)
1058 files=files, eolmode=None, similarity=sim / 100.0)
1059 except patch.PatchError as e:
1059 except patch.PatchError as e:
1060 if not partial:
1060 if not partial:
1061 raise error.Abort(str(e))
1061 raise error.Abort(str(e))
1062 if partial:
1062 if partial:
1063 rejects = True
1063 rejects = True
1064
1064
1065 files = list(files)
1065 files = list(files)
1066 if nocommit:
1066 if nocommit:
1067 if message:
1067 if message:
1068 msgs.append(message)
1068 msgs.append(message)
1069 else:
1069 else:
1070 if opts.get('exact') or p2:
1070 if opts.get('exact') or p2:
1071 # If you got here, you either use --force and know what
1071 # If you got here, you either use --force and know what
1072 # you are doing or used --exact or a merge patch while
1072 # you are doing or used --exact or a merge patch while
1073 # being updated to its first parent.
1073 # being updated to its first parent.
1074 m = None
1074 m = None
1075 else:
1075 else:
1076 m = scmutil.matchfiles(repo, files or [])
1076 m = scmutil.matchfiles(repo, files or [])
1077 editform = mergeeditform(repo[None], 'import.normal')
1077 editform = mergeeditform(repo[None], 'import.normal')
1078 if opts.get('exact'):
1078 if opts.get('exact'):
1079 editor = None
1079 editor = None
1080 else:
1080 else:
1081 editor = getcommiteditor(editform=editform, **opts)
1081 editor = getcommiteditor(editform=editform, **opts)
1082 extra = {}
1082 extra = {}
1083 for idfunc in extrapreimport:
1083 for idfunc in extrapreimport:
1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1085 overrides = {}
1085 overrides = {}
1086 if partial:
1086 if partial:
1087 overrides[('ui', 'allowemptycommit')] = True
1087 overrides[('ui', 'allowemptycommit')] = True
1088 with repo.ui.configoverride(overrides, 'import'):
1088 with repo.ui.configoverride(overrides, 'import'):
1089 n = repo.commit(message, user,
1089 n = repo.commit(message, user,
1090 date, match=m,
1090 date, match=m,
1091 editor=editor, extra=extra)
1091 editor=editor, extra=extra)
1092 for idfunc in extrapostimport:
1092 for idfunc in extrapostimport:
1093 extrapostimportmap[idfunc](repo[n])
1093 extrapostimportmap[idfunc](repo[n])
1094 else:
1094 else:
1095 if opts.get('exact') or importbranch:
1095 if opts.get('exact') or importbranch:
1096 branch = branch or 'default'
1096 branch = branch or 'default'
1097 else:
1097 else:
1098 branch = p1.branch()
1098 branch = p1.branch()
1099 store = patch.filestore()
1099 store = patch.filestore()
1100 try:
1100 try:
1101 files = set()
1101 files = set()
1102 try:
1102 try:
1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1104 files, eolmode=None)
1104 files, eolmode=None)
1105 except patch.PatchError as e:
1105 except patch.PatchError as e:
1106 raise error.Abort(str(e))
1106 raise error.Abort(str(e))
1107 if opts.get('exact'):
1107 if opts.get('exact'):
1108 editor = None
1108 editor = None
1109 else:
1109 else:
1110 editor = getcommiteditor(editform='import.bypass')
1110 editor = getcommiteditor(editform='import.bypass')
1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1112 message,
1112 message,
1113 files=files,
1113 files=files,
1114 filectxfn=store,
1114 filectxfn=store,
1115 user=user,
1115 user=user,
1116 date=date,
1116 date=date,
1117 branch=branch,
1117 branch=branch,
1118 editor=editor)
1118 editor=editor)
1119 n = memctx.commit()
1119 n = memctx.commit()
1120 finally:
1120 finally:
1121 store.close()
1121 store.close()
1122 if opts.get('exact') and nocommit:
1122 if opts.get('exact') and nocommit:
1123 # --exact with --no-commit is still useful in that it does merge
1123 # --exact with --no-commit is still useful in that it does merge
1124 # and branch bits
1124 # and branch bits
1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1126 elif opts.get('exact') and hex(n) != nodeid:
1126 elif opts.get('exact') and hex(n) != nodeid:
1127 raise error.Abort(_('patch is damaged or loses information'))
1127 raise error.Abort(_('patch is damaged or loses information'))
1128 msg = _('applied to working directory')
1128 msg = _('applied to working directory')
1129 if n:
1129 if n:
1130 # i18n: refers to a short changeset id
1130 # i18n: refers to a short changeset id
1131 msg = _('created %s') % short(n)
1131 msg = _('created %s') % short(n)
1132 return (msg, n, rejects)
1132 return (msg, n, rejects)
1133 finally:
1133 finally:
1134 os.unlink(tmpname)
1134 os.unlink(tmpname)
1135
1135
1136 # facility to let extensions include additional data in an exported patch
1136 # facility to let extensions include additional data in an exported patch
1137 # list of identifiers to be executed in order
1137 # list of identifiers to be executed in order
1138 extraexport = []
1138 extraexport = []
1139 # mapping from identifier to actual export function
1139 # mapping from identifier to actual export function
1140 # function as to return a string to be added to the header or None
1140 # function as to return a string to be added to the header or None
1141 # it is given two arguments (sequencenumber, changectx)
1141 # it is given two arguments (sequencenumber, changectx)
1142 extraexportmap = {}
1142 extraexportmap = {}
1143
1143
1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1145 node = scmutil.binnode(ctx)
1145 node = scmutil.binnode(ctx)
1146 parents = [p.node() for p in ctx.parents() if p]
1146 parents = [p.node() for p in ctx.parents() if p]
1147 branch = ctx.branch()
1147 branch = ctx.branch()
1148 if switch_parent:
1148 if switch_parent:
1149 parents.reverse()
1149 parents.reverse()
1150
1150
1151 if parents:
1151 if parents:
1152 prev = parents[0]
1152 prev = parents[0]
1153 else:
1153 else:
1154 prev = nullid
1154 prev = nullid
1155
1155
1156 write("# HG changeset patch\n")
1156 write("# HG changeset patch\n")
1157 write("# User %s\n" % ctx.user())
1157 write("# User %s\n" % ctx.user())
1158 write("# Date %d %d\n" % ctx.date())
1158 write("# Date %d %d\n" % ctx.date())
1159 write("# %s\n" % util.datestr(ctx.date()))
1159 write("# %s\n" % util.datestr(ctx.date()))
1160 if branch and branch != 'default':
1160 if branch and branch != 'default':
1161 write("# Branch %s\n" % branch)
1161 write("# Branch %s\n" % branch)
1162 write("# Node ID %s\n" % hex(node))
1162 write("# Node ID %s\n" % hex(node))
1163 write("# Parent %s\n" % hex(prev))
1163 write("# Parent %s\n" % hex(prev))
1164 if len(parents) > 1:
1164 if len(parents) > 1:
1165 write("# Parent %s\n" % hex(parents[1]))
1165 write("# Parent %s\n" % hex(parents[1]))
1166
1166
1167 for headerid in extraexport:
1167 for headerid in extraexport:
1168 header = extraexportmap[headerid](seqno, ctx)
1168 header = extraexportmap[headerid](seqno, ctx)
1169 if header is not None:
1169 if header is not None:
1170 write('# %s\n' % header)
1170 write('# %s\n' % header)
1171 write(ctx.description().rstrip())
1171 write(ctx.description().rstrip())
1172 write("\n\n")
1172 write("\n\n")
1173
1173
1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1175 write(chunk, label=label)
1175 write(chunk, label=label)
1176
1176
1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1178 opts=None, match=None):
1178 opts=None, match=None):
1179 '''export changesets as hg patches
1179 '''export changesets as hg patches
1180
1180
1181 Args:
1181 Args:
1182 repo: The repository from which we're exporting revisions.
1182 repo: The repository from which we're exporting revisions.
1183 revs: A list of revisions to export as revision numbers.
1183 revs: A list of revisions to export as revision numbers.
1184 fntemplate: An optional string to use for generating patch file names.
1184 fntemplate: An optional string to use for generating patch file names.
1185 fp: An optional file-like object to which patches should be written.
1185 fp: An optional file-like object to which patches should be written.
1186 switch_parent: If True, show diffs against second parent when not nullid.
1186 switch_parent: If True, show diffs against second parent when not nullid.
1187 Default is false, which always shows diff against p1.
1187 Default is false, which always shows diff against p1.
1188 opts: diff options to use for generating the patch.
1188 opts: diff options to use for generating the patch.
1189 match: If specified, only export changes to files matching this matcher.
1189 match: If specified, only export changes to files matching this matcher.
1190
1190
1191 Returns:
1191 Returns:
1192 Nothing.
1192 Nothing.
1193
1193
1194 Side Effect:
1194 Side Effect:
1195 "HG Changeset Patch" data is emitted to one of the following
1195 "HG Changeset Patch" data is emitted to one of the following
1196 destinations:
1196 destinations:
1197 fp is specified: All revs are written to the specified
1197 fp is specified: All revs are written to the specified
1198 file-like object.
1198 file-like object.
1199 fntemplate specified: Each rev is written to a unique file named using
1199 fntemplate specified: Each rev is written to a unique file named using
1200 the given template.
1200 the given template.
1201 Neither fp nor template specified: All revs written to repo.ui.write()
1201 Neither fp nor template specified: All revs written to repo.ui.write()
1202 '''
1202 '''
1203
1203
1204 total = len(revs)
1204 total = len(revs)
1205 revwidth = max(len(str(rev)) for rev in revs)
1205 revwidth = max(len(str(rev)) for rev in revs)
1206 filemode = {}
1206 filemode = {}
1207
1207
1208 write = None
1208 write = None
1209 dest = '<unnamed>'
1209 dest = '<unnamed>'
1210 if fp:
1210 if fp:
1211 dest = getattr(fp, 'name', dest)
1211 dest = getattr(fp, 'name', dest)
1212 def write(s, **kw):
1212 def write(s, **kw):
1213 fp.write(s)
1213 fp.write(s)
1214 elif not fntemplate:
1214 elif not fntemplate:
1215 write = repo.ui.write
1215 write = repo.ui.write
1216
1216
1217 for seqno, rev in enumerate(revs, 1):
1217 for seqno, rev in enumerate(revs, 1):
1218 ctx = repo[rev]
1218 ctx = repo[rev]
1219 fo = None
1219 fo = None
1220 if not fp and fntemplate:
1220 if not fp and fntemplate:
1221 desc_lines = ctx.description().rstrip().split('\n')
1221 desc_lines = ctx.description().rstrip().split('\n')
1222 desc = desc_lines[0] #Commit always has a first line.
1222 desc = desc_lines[0] #Commit always has a first line.
1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1224 total=total, seqno=seqno, revwidth=revwidth,
1224 total=total, seqno=seqno, revwidth=revwidth,
1225 mode='wb', modemap=filemode)
1225 mode='wb', modemap=filemode)
1226 dest = fo.name
1226 dest = fo.name
1227 def write(s, **kw):
1227 def write(s, **kw):
1228 fo.write(s)
1228 fo.write(s)
1229 if not dest.startswith('<'):
1229 if not dest.startswith('<'):
1230 repo.ui.note("%s\n" % dest)
1230 repo.ui.note("%s\n" % dest)
1231 _exportsingle(
1231 _exportsingle(
1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1233 if fo is not None:
1233 if fo is not None:
1234 fo.close()
1234 fo.close()
1235
1235
1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1237 changes=None, stat=False, fp=None, prefix='',
1237 changes=None, stat=False, fp=None, prefix='',
1238 root='', listsubrepos=False):
1238 root='', listsubrepos=False):
1239 '''show diff or diffstat.'''
1239 '''show diff or diffstat.'''
1240 if fp is None:
1240 if fp is None:
1241 write = ui.write
1241 write = ui.write
1242 else:
1242 else:
1243 def write(s, **kw):
1243 def write(s, **kw):
1244 fp.write(s)
1244 fp.write(s)
1245
1245
1246 if root:
1246 if root:
1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1248 else:
1248 else:
1249 relroot = ''
1249 relroot = ''
1250 if relroot != '':
1250 if relroot != '':
1251 # XXX relative roots currently don't work if the root is within a
1251 # XXX relative roots currently don't work if the root is within a
1252 # subrepo
1252 # subrepo
1253 uirelroot = match.uipath(relroot)
1253 uirelroot = match.uipath(relroot)
1254 relroot += '/'
1254 relroot += '/'
1255 for matchroot in match.files():
1255 for matchroot in match.files():
1256 if not matchroot.startswith(relroot):
1256 if not matchroot.startswith(relroot):
1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1258 match.uipath(matchroot), uirelroot))
1258 match.uipath(matchroot), uirelroot))
1259
1259
1260 if stat:
1260 if stat:
1261 diffopts = diffopts.copy(context=0)
1261 diffopts = diffopts.copy(context=0)
1262 width = 80
1262 width = 80
1263 if not ui.plain():
1263 if not ui.plain():
1264 width = ui.termwidth()
1264 width = ui.termwidth()
1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1266 prefix=prefix, relroot=relroot)
1266 prefix=prefix, relroot=relroot)
1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1268 width=width):
1268 width=width):
1269 write(chunk, label=label)
1269 write(chunk, label=label)
1270 else:
1270 else:
1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1272 changes, diffopts, prefix=prefix,
1272 changes, diffopts, prefix=prefix,
1273 relroot=relroot):
1273 relroot=relroot):
1274 write(chunk, label=label)
1274 write(chunk, label=label)
1275
1275
1276 if listsubrepos:
1276 if listsubrepos:
1277 ctx1 = repo[node1]
1277 ctx1 = repo[node1]
1278 ctx2 = repo[node2]
1278 ctx2 = repo[node2]
1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1280 tempnode2 = node2
1280 tempnode2 = node2
1281 try:
1281 try:
1282 if node2 is not None:
1282 if node2 is not None:
1283 tempnode2 = ctx2.substate[subpath][1]
1283 tempnode2 = ctx2.substate[subpath][1]
1284 except KeyError:
1284 except KeyError:
1285 # A subrepo that existed in node1 was deleted between node1 and
1285 # A subrepo that existed in node1 was deleted between node1 and
1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1287 # subpath. The best we can do is to ignore it.
1287 # subpath. The best we can do is to ignore it.
1288 tempnode2 = None
1288 tempnode2 = None
1289 submatch = matchmod.subdirmatcher(subpath, match)
1289 submatch = matchmod.subdirmatcher(subpath, match)
1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1291 stat=stat, fp=fp, prefix=prefix)
1291 stat=stat, fp=fp, prefix=prefix)
1292
1292
1293 def _changesetlabels(ctx):
1293 def _changesetlabels(ctx):
1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1295 if ctx.obsolete():
1295 if ctx.obsolete():
1296 labels.append('changeset.obsolete')
1296 labels.append('changeset.obsolete')
1297 if ctx.troubled():
1297 if ctx.troubled():
1298 labels.append('changeset.troubled')
1298 labels.append('changeset.troubled')
1299 for trouble in ctx.troubles():
1299 for trouble in ctx.troubles():
1300 labels.append('trouble.%s' % trouble)
1300 labels.append('trouble.%s' % trouble)
1301 return ' '.join(labels)
1301 return ' '.join(labels)
1302
1302
1303 class changeset_printer(object):
1303 class changeset_printer(object):
1304 '''show changeset information when templating not requested.'''
1304 '''show changeset information when templating not requested.'''
1305
1305
1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1307 self.ui = ui
1307 self.ui = ui
1308 self.repo = repo
1308 self.repo = repo
1309 self.buffered = buffered
1309 self.buffered = buffered
1310 self.matchfn = matchfn
1310 self.matchfn = matchfn
1311 self.diffopts = diffopts
1311 self.diffopts = diffopts
1312 self.header = {}
1312 self.header = {}
1313 self.hunk = {}
1313 self.hunk = {}
1314 self.lastheader = None
1314 self.lastheader = None
1315 self.footer = None
1315 self.footer = None
1316
1316
1317 def flush(self, ctx):
1317 def flush(self, ctx):
1318 rev = ctx.rev()
1318 rev = ctx.rev()
1319 if rev in self.header:
1319 if rev in self.header:
1320 h = self.header[rev]
1320 h = self.header[rev]
1321 if h != self.lastheader:
1321 if h != self.lastheader:
1322 self.lastheader = h
1322 self.lastheader = h
1323 self.ui.write(h)
1323 self.ui.write(h)
1324 del self.header[rev]
1324 del self.header[rev]
1325 if rev in self.hunk:
1325 if rev in self.hunk:
1326 self.ui.write(self.hunk[rev])
1326 self.ui.write(self.hunk[rev])
1327 del self.hunk[rev]
1327 del self.hunk[rev]
1328 return 1
1328 return 1
1329 return 0
1329 return 0
1330
1330
1331 def close(self):
1331 def close(self):
1332 if self.footer:
1332 if self.footer:
1333 self.ui.write(self.footer)
1333 self.ui.write(self.footer)
1334
1334
1335 def show(self, ctx, copies=None, matchfn=None, **props):
1335 def show(self, ctx, copies=None, matchfn=None, **props):
1336 if self.buffered:
1336 if self.buffered:
1337 self.ui.pushbuffer(labeled=True)
1337 self.ui.pushbuffer(labeled=True)
1338 self._show(ctx, copies, matchfn, props)
1338 self._show(ctx, copies, matchfn, props)
1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1340 else:
1340 else:
1341 self._show(ctx, copies, matchfn, props)
1341 self._show(ctx, copies, matchfn, props)
1342
1342
1343 def _show(self, ctx, copies, matchfn, props):
1343 def _show(self, ctx, copies, matchfn, props):
1344 '''show a single changeset or file revision'''
1344 '''show a single changeset or file revision'''
1345 changenode = ctx.node()
1345 changenode = ctx.node()
1346 rev = ctx.rev()
1346 rev = ctx.rev()
1347 if self.ui.debugflag:
1347 if self.ui.debugflag:
1348 hexfunc = hex
1348 hexfunc = hex
1349 else:
1349 else:
1350 hexfunc = short
1350 hexfunc = short
1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1352 # show the same values as {node} and {rev} templatekw
1352 # show the same values as {node} and {rev} templatekw
1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1354
1354
1355 if self.ui.quiet:
1355 if self.ui.quiet:
1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1357 return
1357 return
1358
1358
1359 date = util.datestr(ctx.date())
1359 date = util.datestr(ctx.date())
1360
1360
1361 # i18n: column positioning for "hg log"
1361 # i18n: column positioning for "hg log"
1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1363 label=_changesetlabels(ctx))
1363 label=_changesetlabels(ctx))
1364
1364
1365 # branches are shown first before any other names due to backwards
1365 # branches are shown first before any other names due to backwards
1366 # compatibility
1366 # compatibility
1367 branch = ctx.branch()
1367 branch = ctx.branch()
1368 # don't show the default branch name
1368 # don't show the default branch name
1369 if branch != 'default':
1369 if branch != 'default':
1370 # i18n: column positioning for "hg log"
1370 # i18n: column positioning for "hg log"
1371 self.ui.write(_("branch: %s\n") % branch,
1371 self.ui.write(_("branch: %s\n") % branch,
1372 label='log.branch')
1372 label='log.branch')
1373
1373
1374 for nsname, ns in self.repo.names.iteritems():
1374 for nsname, ns in self.repo.names.iteritems():
1375 # branches has special logic already handled above, so here we just
1375 # branches has special logic already handled above, so here we just
1376 # skip it
1376 # skip it
1377 if nsname == 'branches':
1377 if nsname == 'branches':
1378 continue
1378 continue
1379 # we will use the templatename as the color name since those two
1379 # we will use the templatename as the color name since those two
1380 # should be the same
1380 # should be the same
1381 for name in ns.names(self.repo, changenode):
1381 for name in ns.names(self.repo, changenode):
1382 self.ui.write(ns.logfmt % name,
1382 self.ui.write(ns.logfmt % name,
1383 label='log.%s' % ns.colorname)
1383 label='log.%s' % ns.colorname)
1384 if self.ui.debugflag:
1384 if self.ui.debugflag:
1385 # i18n: column positioning for "hg log"
1385 # i18n: column positioning for "hg log"
1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1387 label='log.phase')
1387 label='log.phase')
1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1390 # i18n: column positioning for "hg log"
1390 # i18n: column positioning for "hg log"
1391 self.ui.write(_("parent: %d:%s\n")
1391 self.ui.write(_("parent: %d:%s\n")
1392 % (pctx.rev(), hexfunc(pctx.node())),
1392 % (pctx.rev(), hexfunc(pctx.node())),
1393 label=label)
1393 label=label)
1394
1394
1395 if self.ui.debugflag and rev is not None:
1395 if self.ui.debugflag and rev is not None:
1396 mnode = ctx.manifestnode()
1396 mnode = ctx.manifestnode()
1397 # i18n: column positioning for "hg log"
1397 # i18n: column positioning for "hg log"
1398 self.ui.write(_("manifest: %d:%s\n") %
1398 self.ui.write(_("manifest: %d:%s\n") %
1399 (self.repo.manifestlog._revlog.rev(mnode),
1399 (self.repo.manifestlog._revlog.rev(mnode),
1400 hex(mnode)),
1400 hex(mnode)),
1401 label='ui.debug log.manifest')
1401 label='ui.debug log.manifest')
1402 # i18n: column positioning for "hg log"
1402 # i18n: column positioning for "hg log"
1403 self.ui.write(_("user: %s\n") % ctx.user(),
1403 self.ui.write(_("user: %s\n") % ctx.user(),
1404 label='log.user')
1404 label='log.user')
1405 # i18n: column positioning for "hg log"
1405 # i18n: column positioning for "hg log"
1406 self.ui.write(_("date: %s\n") % date,
1406 self.ui.write(_("date: %s\n") % date,
1407 label='log.date')
1407 label='log.date')
1408
1408
1409 if ctx.troubled():
1409 if ctx.troubled():
1410 # i18n: column positioning for "hg log"
1410 # i18n: column positioning for "hg log"
1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1412 label='log.trouble')
1412 label='log.trouble')
1413
1413
1414 if self.ui.debugflag:
1414 if self.ui.debugflag:
1415 files = ctx.p1().status(ctx)[:3]
1415 files = ctx.p1().status(ctx)[:3]
1416 for key, value in zip([# i18n: column positioning for "hg log"
1416 for key, value in zip([# i18n: column positioning for "hg log"
1417 _("files:"),
1417 _("files:"),
1418 # i18n: column positioning for "hg log"
1418 # i18n: column positioning for "hg log"
1419 _("files+:"),
1419 _("files+:"),
1420 # i18n: column positioning for "hg log"
1420 # i18n: column positioning for "hg log"
1421 _("files-:")], files):
1421 _("files-:")], files):
1422 if value:
1422 if value:
1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1424 label='ui.debug log.files')
1424 label='ui.debug log.files')
1425 elif ctx.files() and self.ui.verbose:
1425 elif ctx.files() and self.ui.verbose:
1426 # i18n: column positioning for "hg log"
1426 # i18n: column positioning for "hg log"
1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1428 label='ui.note log.files')
1428 label='ui.note log.files')
1429 if copies and self.ui.verbose:
1429 if copies and self.ui.verbose:
1430 copies = ['%s (%s)' % c for c in copies]
1430 copies = ['%s (%s)' % c for c in copies]
1431 # i18n: column positioning for "hg log"
1431 # i18n: column positioning for "hg log"
1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1433 label='ui.note log.copies')
1433 label='ui.note log.copies')
1434
1434
1435 extra = ctx.extra()
1435 extra = ctx.extra()
1436 if extra and self.ui.debugflag:
1436 if extra and self.ui.debugflag:
1437 for key, value in sorted(extra.items()):
1437 for key, value in sorted(extra.items()):
1438 # i18n: column positioning for "hg log"
1438 # i18n: column positioning for "hg log"
1439 self.ui.write(_("extra: %s=%s\n")
1439 self.ui.write(_("extra: %s=%s\n")
1440 % (key, util.escapestr(value)),
1440 % (key, util.escapestr(value)),
1441 label='ui.debug log.extra')
1441 label='ui.debug log.extra')
1442
1442
1443 description = ctx.description().strip()
1443 description = ctx.description().strip()
1444 if description:
1444 if description:
1445 if self.ui.verbose:
1445 if self.ui.verbose:
1446 self.ui.write(_("description:\n"),
1446 self.ui.write(_("description:\n"),
1447 label='ui.note log.description')
1447 label='ui.note log.description')
1448 self.ui.write(description,
1448 self.ui.write(description,
1449 label='ui.note log.description')
1449 label='ui.note log.description')
1450 self.ui.write("\n\n")
1450 self.ui.write("\n\n")
1451 else:
1451 else:
1452 # i18n: column positioning for "hg log"
1452 # i18n: column positioning for "hg log"
1453 self.ui.write(_("summary: %s\n") %
1453 self.ui.write(_("summary: %s\n") %
1454 description.splitlines()[0],
1454 description.splitlines()[0],
1455 label='log.summary')
1455 label='log.summary')
1456 self.ui.write("\n")
1456 self.ui.write("\n")
1457
1457
1458 self.showpatch(ctx, matchfn)
1458 self.showpatch(ctx, matchfn)
1459
1459
1460 def showpatch(self, ctx, matchfn):
1460 def showpatch(self, ctx, matchfn):
1461 if not matchfn:
1461 if not matchfn:
1462 matchfn = self.matchfn
1462 matchfn = self.matchfn
1463 if matchfn:
1463 if matchfn:
1464 stat = self.diffopts.get('stat')
1464 stat = self.diffopts.get('stat')
1465 diff = self.diffopts.get('patch')
1465 diff = self.diffopts.get('patch')
1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1467 node = ctx.node()
1467 node = ctx.node()
1468 prev = ctx.p1().node()
1468 prev = ctx.p1().node()
1469 if stat:
1469 if stat:
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1471 match=matchfn, stat=True)
1471 match=matchfn, stat=True)
1472 if diff:
1472 if diff:
1473 if stat:
1473 if stat:
1474 self.ui.write("\n")
1474 self.ui.write("\n")
1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1476 match=matchfn, stat=False)
1476 match=matchfn, stat=False)
1477 self.ui.write("\n")
1477 self.ui.write("\n")
1478
1478
1479 class jsonchangeset(changeset_printer):
1479 class jsonchangeset(changeset_printer):
1480 '''format changeset information.'''
1480 '''format changeset information.'''
1481
1481
1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1484 self.cache = {}
1484 self.cache = {}
1485 self._first = True
1485 self._first = True
1486
1486
1487 def close(self):
1487 def close(self):
1488 if not self._first:
1488 if not self._first:
1489 self.ui.write("\n]\n")
1489 self.ui.write("\n]\n")
1490 else:
1490 else:
1491 self.ui.write("[]\n")
1491 self.ui.write("[]\n")
1492
1492
1493 def _show(self, ctx, copies, matchfn, props):
1493 def _show(self, ctx, copies, matchfn, props):
1494 '''show a single changeset or file revision'''
1494 '''show a single changeset or file revision'''
1495 rev = ctx.rev()
1495 rev = ctx.rev()
1496 if rev is None:
1496 if rev is None:
1497 jrev = jnode = 'null'
1497 jrev = jnode = 'null'
1498 else:
1498 else:
1499 jrev = '%d' % rev
1499 jrev = '%d' % rev
1500 jnode = '"%s"' % hex(ctx.node())
1500 jnode = '"%s"' % hex(ctx.node())
1501 j = encoding.jsonescape
1501 j = encoding.jsonescape
1502
1502
1503 if self._first:
1503 if self._first:
1504 self.ui.write("[\n {")
1504 self.ui.write("[\n {")
1505 self._first = False
1505 self._first = False
1506 else:
1506 else:
1507 self.ui.write(",\n {")
1507 self.ui.write(",\n {")
1508
1508
1509 if self.ui.quiet:
1509 if self.ui.quiet:
1510 self.ui.write(('\n "rev": %s') % jrev)
1510 self.ui.write(('\n "rev": %s') % jrev)
1511 self.ui.write((',\n "node": %s') % jnode)
1511 self.ui.write((',\n "node": %s') % jnode)
1512 self.ui.write('\n }')
1512 self.ui.write('\n }')
1513 return
1513 return
1514
1514
1515 self.ui.write(('\n "rev": %s') % jrev)
1515 self.ui.write(('\n "rev": %s') % jrev)
1516 self.ui.write((',\n "node": %s') % jnode)
1516 self.ui.write((',\n "node": %s') % jnode)
1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1522
1522
1523 self.ui.write((',\n "bookmarks": [%s]') %
1523 self.ui.write((',\n "bookmarks": [%s]') %
1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1525 self.ui.write((',\n "tags": [%s]') %
1525 self.ui.write((',\n "tags": [%s]') %
1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1527 self.ui.write((',\n "parents": [%s]') %
1527 self.ui.write((',\n "parents": [%s]') %
1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1529
1529
1530 if self.ui.debugflag:
1530 if self.ui.debugflag:
1531 if rev is None:
1531 if rev is None:
1532 jmanifestnode = 'null'
1532 jmanifestnode = 'null'
1533 else:
1533 else:
1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1536
1536
1537 self.ui.write((',\n "extra": {%s}') %
1537 self.ui.write((',\n "extra": {%s}') %
1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1539 for k, v in ctx.extra().items()))
1539 for k, v in ctx.extra().items()))
1540
1540
1541 files = ctx.p1().status(ctx)
1541 files = ctx.p1().status(ctx)
1542 self.ui.write((',\n "modified": [%s]') %
1542 self.ui.write((',\n "modified": [%s]') %
1543 ", ".join('"%s"' % j(f) for f in files[0]))
1543 ", ".join('"%s"' % j(f) for f in files[0]))
1544 self.ui.write((',\n "added": [%s]') %
1544 self.ui.write((',\n "added": [%s]') %
1545 ", ".join('"%s"' % j(f) for f in files[1]))
1545 ", ".join('"%s"' % j(f) for f in files[1]))
1546 self.ui.write((',\n "removed": [%s]') %
1546 self.ui.write((',\n "removed": [%s]') %
1547 ", ".join('"%s"' % j(f) for f in files[2]))
1547 ", ".join('"%s"' % j(f) for f in files[2]))
1548
1548
1549 elif self.ui.verbose:
1549 elif self.ui.verbose:
1550 self.ui.write((',\n "files": [%s]') %
1550 self.ui.write((',\n "files": [%s]') %
1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1552
1552
1553 if copies:
1553 if copies:
1554 self.ui.write((',\n "copies": {%s}') %
1554 self.ui.write((',\n "copies": {%s}') %
1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1556 for k, v in copies))
1556 for k, v in copies))
1557
1557
1558 matchfn = self.matchfn
1558 matchfn = self.matchfn
1559 if matchfn:
1559 if matchfn:
1560 stat = self.diffopts.get('stat')
1560 stat = self.diffopts.get('stat')
1561 diff = self.diffopts.get('patch')
1561 diff = self.diffopts.get('patch')
1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1563 node, prev = ctx.node(), ctx.p1().node()
1563 node, prev = ctx.node(), ctx.p1().node()
1564 if stat:
1564 if stat:
1565 self.ui.pushbuffer()
1565 self.ui.pushbuffer()
1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1567 match=matchfn, stat=True)
1567 match=matchfn, stat=True)
1568 self.ui.write((',\n "diffstat": "%s"')
1568 self.ui.write((',\n "diffstat": "%s"')
1569 % j(self.ui.popbuffer()))
1569 % j(self.ui.popbuffer()))
1570 if diff:
1570 if diff:
1571 self.ui.pushbuffer()
1571 self.ui.pushbuffer()
1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1573 match=matchfn, stat=False)
1573 match=matchfn, stat=False)
1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1575
1575
1576 self.ui.write("\n }")
1576 self.ui.write("\n }")
1577
1577
1578 class changeset_templater(changeset_printer):
1578 class changeset_templater(changeset_printer):
1579 '''format changeset information.'''
1579 '''format changeset information.'''
1580
1580
1581 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1581 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1583 self.t = formatter.loadtemplater(ui, 'changeset', (tmpl, mapfile),
1583 tmplspec = logtemplatespec(tmpl, mapfile)
1584 self.t = formatter.loadtemplater(ui, 'changeset', tmplspec,
1584 cache=templatekw.defaulttempl)
1585 cache=templatekw.defaulttempl)
1585 self._counter = itertools.count()
1586 self._counter = itertools.count()
1586 self.cache = {}
1587 self.cache = {}
1587
1588
1588 # find correct templates for current mode
1589 # find correct templates for current mode
1589 tmplmodes = [
1590 tmplmodes = [
1590 (True, None),
1591 (True, None),
1591 (self.ui.verbose, 'verbose'),
1592 (self.ui.verbose, 'verbose'),
1592 (self.ui.quiet, 'quiet'),
1593 (self.ui.quiet, 'quiet'),
1593 (self.ui.debugflag, 'debug'),
1594 (self.ui.debugflag, 'debug'),
1594 ]
1595 ]
1595
1596
1596 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1597 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1597 'docheader': '', 'docfooter': ''}
1598 'docheader': '', 'docfooter': ''}
1598 for mode, postfix in tmplmodes:
1599 for mode, postfix in tmplmodes:
1599 for t in self._parts:
1600 for t in self._parts:
1600 cur = t
1601 cur = t
1601 if postfix:
1602 if postfix:
1602 cur += "_" + postfix
1603 cur += "_" + postfix
1603 if mode and cur in self.t:
1604 if mode and cur in self.t:
1604 self._parts[t] = cur
1605 self._parts[t] = cur
1605
1606
1606 if self._parts['docheader']:
1607 if self._parts['docheader']:
1607 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1608 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1608
1609
1609 def close(self):
1610 def close(self):
1610 if self._parts['docfooter']:
1611 if self._parts['docfooter']:
1611 if not self.footer:
1612 if not self.footer:
1612 self.footer = ""
1613 self.footer = ""
1613 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1614 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1614 return super(changeset_templater, self).close()
1615 return super(changeset_templater, self).close()
1615
1616
1616 def _show(self, ctx, copies, matchfn, props):
1617 def _show(self, ctx, copies, matchfn, props):
1617 '''show a single changeset or file revision'''
1618 '''show a single changeset or file revision'''
1618 props = props.copy()
1619 props = props.copy()
1619 props.update(templatekw.keywords)
1620 props.update(templatekw.keywords)
1620 props['templ'] = self.t
1621 props['templ'] = self.t
1621 props['ctx'] = ctx
1622 props['ctx'] = ctx
1622 props['repo'] = self.repo
1623 props['repo'] = self.repo
1623 props['ui'] = self.repo.ui
1624 props['ui'] = self.repo.ui
1624 props['index'] = next(self._counter)
1625 props['index'] = next(self._counter)
1625 props['revcache'] = {'copies': copies}
1626 props['revcache'] = {'copies': copies}
1626 props['cache'] = self.cache
1627 props['cache'] = self.cache
1627 props = pycompat.strkwargs(props)
1628 props = pycompat.strkwargs(props)
1628
1629
1629 # write header
1630 # write header
1630 if self._parts['header']:
1631 if self._parts['header']:
1631 h = templater.stringify(self.t(self._parts['header'], **props))
1632 h = templater.stringify(self.t(self._parts['header'], **props))
1632 if self.buffered:
1633 if self.buffered:
1633 self.header[ctx.rev()] = h
1634 self.header[ctx.rev()] = h
1634 else:
1635 else:
1635 if self.lastheader != h:
1636 if self.lastheader != h:
1636 self.lastheader = h
1637 self.lastheader = h
1637 self.ui.write(h)
1638 self.ui.write(h)
1638
1639
1639 # write changeset metadata, then patch if requested
1640 # write changeset metadata, then patch if requested
1640 key = self._parts['changeset']
1641 key = self._parts['changeset']
1641 self.ui.write(templater.stringify(self.t(key, **props)))
1642 self.ui.write(templater.stringify(self.t(key, **props)))
1642 self.showpatch(ctx, matchfn)
1643 self.showpatch(ctx, matchfn)
1643
1644
1644 if self._parts['footer']:
1645 if self._parts['footer']:
1645 if not self.footer:
1646 if not self.footer:
1646 self.footer = templater.stringify(
1647 self.footer = templater.stringify(
1647 self.t(self._parts['footer'], **props))
1648 self.t(self._parts['footer'], **props))
1648
1649
1650 logtemplatespec = formatter.templatespec
1651
1649 def _lookuplogtemplate(ui, tmpl, style):
1652 def _lookuplogtemplate(ui, tmpl, style):
1650 """Find the template matching the given template spec or style
1653 """Find the template matching the given template spec or style
1651
1654
1652 See formatter.lookuptemplate() for details.
1655 See formatter.lookuptemplate() for details.
1653 """
1656 """
1654
1657
1655 # ui settings
1658 # ui settings
1656 if not tmpl and not style: # template are stronger than style
1659 if not tmpl and not style: # template are stronger than style
1657 tmpl = ui.config('ui', 'logtemplate')
1660 tmpl = ui.config('ui', 'logtemplate')
1658 if tmpl:
1661 if tmpl:
1659 return templater.unquotestring(tmpl), None
1662 return logtemplatespec(templater.unquotestring(tmpl), None)
1660 else:
1663 else:
1661 style = util.expandpath(ui.config('ui', 'style', ''))
1664 style = util.expandpath(ui.config('ui', 'style', ''))
1662
1665
1663 if not tmpl and style:
1666 if not tmpl and style:
1664 mapfile = style
1667 mapfile = style
1665 if not os.path.split(mapfile)[0]:
1668 if not os.path.split(mapfile)[0]:
1666 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1669 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1667 or templater.templatepath(mapfile))
1670 or templater.templatepath(mapfile))
1668 if mapname:
1671 if mapname:
1669 mapfile = mapname
1672 mapfile = mapname
1670 return None, mapfile
1673 return logtemplatespec(None, mapfile)
1671
1674
1672 if not tmpl:
1675 if not tmpl:
1673 return None, None
1676 return logtemplatespec(None, None)
1674
1677
1675 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1678 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1676
1679
1677 def makelogtemplater(ui, repo, tmpl, buffered=False):
1680 def makelogtemplater(ui, repo, tmpl, buffered=False):
1678 """Create a changeset_templater from a literal template 'tmpl'"""
1681 """Create a changeset_templater from a literal template 'tmpl'"""
1679 return changeset_templater(ui, repo, matchfn=None, diffopts={},
1682 return changeset_templater(ui, repo, matchfn=None, diffopts={},
1680 tmpl=tmpl, mapfile=None, buffered=buffered)
1683 tmpl=tmpl, mapfile=None, buffered=buffered)
1681
1684
1682 def show_changeset(ui, repo, opts, buffered=False):
1685 def show_changeset(ui, repo, opts, buffered=False):
1683 """show one changeset using template or regular display.
1686 """show one changeset using template or regular display.
1684
1687
1685 Display format will be the first non-empty hit of:
1688 Display format will be the first non-empty hit of:
1686 1. option 'template'
1689 1. option 'template'
1687 2. option 'style'
1690 2. option 'style'
1688 3. [ui] setting 'logtemplate'
1691 3. [ui] setting 'logtemplate'
1689 4. [ui] setting 'style'
1692 4. [ui] setting 'style'
1690 If all of these values are either the unset or the empty string,
1693 If all of these values are either the unset or the empty string,
1691 regular display via changeset_printer() is done.
1694 regular display via changeset_printer() is done.
1692 """
1695 """
1693 # options
1696 # options
1694 matchfn = None
1697 matchfn = None
1695 if opts.get('patch') or opts.get('stat'):
1698 if opts.get('patch') or opts.get('stat'):
1696 matchfn = scmutil.matchall(repo)
1699 matchfn = scmutil.matchall(repo)
1697
1700
1698 if opts.get('template') == 'json':
1701 if opts.get('template') == 'json':
1699 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1702 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1700
1703
1701 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1704 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1702 tmpl, mapfile = spec
1705 tmpl, mapfile = spec
1703
1706
1704 if not tmpl and not mapfile:
1707 if not tmpl and not mapfile:
1705 return changeset_printer(ui, repo, matchfn, opts, buffered)
1708 return changeset_printer(ui, repo, matchfn, opts, buffered)
1706
1709
1707 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1710 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1708
1711
1709 def showmarker(fm, marker, index=None):
1712 def showmarker(fm, marker, index=None):
1710 """utility function to display obsolescence marker in a readable way
1713 """utility function to display obsolescence marker in a readable way
1711
1714
1712 To be used by debug function."""
1715 To be used by debug function."""
1713 if index is not None:
1716 if index is not None:
1714 fm.write('index', '%i ', index)
1717 fm.write('index', '%i ', index)
1715 fm.write('precnode', '%s ', hex(marker.precnode()))
1718 fm.write('precnode', '%s ', hex(marker.precnode()))
1716 succs = marker.succnodes()
1719 succs = marker.succnodes()
1717 fm.condwrite(succs, 'succnodes', '%s ',
1720 fm.condwrite(succs, 'succnodes', '%s ',
1718 fm.formatlist(map(hex, succs), name='node'))
1721 fm.formatlist(map(hex, succs), name='node'))
1719 fm.write('flag', '%X ', marker.flags())
1722 fm.write('flag', '%X ', marker.flags())
1720 parents = marker.parentnodes()
1723 parents = marker.parentnodes()
1721 if parents is not None:
1724 if parents is not None:
1722 fm.write('parentnodes', '{%s} ',
1725 fm.write('parentnodes', '{%s} ',
1723 fm.formatlist(map(hex, parents), name='node', sep=', '))
1726 fm.formatlist(map(hex, parents), name='node', sep=', '))
1724 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1727 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1725 meta = marker.metadata().copy()
1728 meta = marker.metadata().copy()
1726 meta.pop('date', None)
1729 meta.pop('date', None)
1727 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1730 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1728 fm.plain('\n')
1731 fm.plain('\n')
1729
1732
1730 def finddate(ui, repo, date):
1733 def finddate(ui, repo, date):
1731 """Find the tipmost changeset that matches the given date spec"""
1734 """Find the tipmost changeset that matches the given date spec"""
1732
1735
1733 df = util.matchdate(date)
1736 df = util.matchdate(date)
1734 m = scmutil.matchall(repo)
1737 m = scmutil.matchall(repo)
1735 results = {}
1738 results = {}
1736
1739
1737 def prep(ctx, fns):
1740 def prep(ctx, fns):
1738 d = ctx.date()
1741 d = ctx.date()
1739 if df(d[0]):
1742 if df(d[0]):
1740 results[ctx.rev()] = d
1743 results[ctx.rev()] = d
1741
1744
1742 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1745 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1743 rev = ctx.rev()
1746 rev = ctx.rev()
1744 if rev in results:
1747 if rev in results:
1745 ui.status(_("found revision %s from %s\n") %
1748 ui.status(_("found revision %s from %s\n") %
1746 (rev, util.datestr(results[rev])))
1749 (rev, util.datestr(results[rev])))
1747 return '%d' % rev
1750 return '%d' % rev
1748
1751
1749 raise error.Abort(_("revision matching date not found"))
1752 raise error.Abort(_("revision matching date not found"))
1750
1753
1751 def increasingwindows(windowsize=8, sizelimit=512):
1754 def increasingwindows(windowsize=8, sizelimit=512):
1752 while True:
1755 while True:
1753 yield windowsize
1756 yield windowsize
1754 if windowsize < sizelimit:
1757 if windowsize < sizelimit:
1755 windowsize *= 2
1758 windowsize *= 2
1756
1759
1757 class FileWalkError(Exception):
1760 class FileWalkError(Exception):
1758 pass
1761 pass
1759
1762
1760 def walkfilerevs(repo, match, follow, revs, fncache):
1763 def walkfilerevs(repo, match, follow, revs, fncache):
1761 '''Walks the file history for the matched files.
1764 '''Walks the file history for the matched files.
1762
1765
1763 Returns the changeset revs that are involved in the file history.
1766 Returns the changeset revs that are involved in the file history.
1764
1767
1765 Throws FileWalkError if the file history can't be walked using
1768 Throws FileWalkError if the file history can't be walked using
1766 filelogs alone.
1769 filelogs alone.
1767 '''
1770 '''
1768 wanted = set()
1771 wanted = set()
1769 copies = []
1772 copies = []
1770 minrev, maxrev = min(revs), max(revs)
1773 minrev, maxrev = min(revs), max(revs)
1771 def filerevgen(filelog, last):
1774 def filerevgen(filelog, last):
1772 """
1775 """
1773 Only files, no patterns. Check the history of each file.
1776 Only files, no patterns. Check the history of each file.
1774
1777
1775 Examines filelog entries within minrev, maxrev linkrev range
1778 Examines filelog entries within minrev, maxrev linkrev range
1776 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1779 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1777 tuples in backwards order
1780 tuples in backwards order
1778 """
1781 """
1779 cl_count = len(repo)
1782 cl_count = len(repo)
1780 revs = []
1783 revs = []
1781 for j in xrange(0, last + 1):
1784 for j in xrange(0, last + 1):
1782 linkrev = filelog.linkrev(j)
1785 linkrev = filelog.linkrev(j)
1783 if linkrev < minrev:
1786 if linkrev < minrev:
1784 continue
1787 continue
1785 # only yield rev for which we have the changelog, it can
1788 # only yield rev for which we have the changelog, it can
1786 # happen while doing "hg log" during a pull or commit
1789 # happen while doing "hg log" during a pull or commit
1787 if linkrev >= cl_count:
1790 if linkrev >= cl_count:
1788 break
1791 break
1789
1792
1790 parentlinkrevs = []
1793 parentlinkrevs = []
1791 for p in filelog.parentrevs(j):
1794 for p in filelog.parentrevs(j):
1792 if p != nullrev:
1795 if p != nullrev:
1793 parentlinkrevs.append(filelog.linkrev(p))
1796 parentlinkrevs.append(filelog.linkrev(p))
1794 n = filelog.node(j)
1797 n = filelog.node(j)
1795 revs.append((linkrev, parentlinkrevs,
1798 revs.append((linkrev, parentlinkrevs,
1796 follow and filelog.renamed(n)))
1799 follow and filelog.renamed(n)))
1797
1800
1798 return reversed(revs)
1801 return reversed(revs)
1799 def iterfiles():
1802 def iterfiles():
1800 pctx = repo['.']
1803 pctx = repo['.']
1801 for filename in match.files():
1804 for filename in match.files():
1802 if follow:
1805 if follow:
1803 if filename not in pctx:
1806 if filename not in pctx:
1804 raise error.Abort(_('cannot follow file not in parent '
1807 raise error.Abort(_('cannot follow file not in parent '
1805 'revision: "%s"') % filename)
1808 'revision: "%s"') % filename)
1806 yield filename, pctx[filename].filenode()
1809 yield filename, pctx[filename].filenode()
1807 else:
1810 else:
1808 yield filename, None
1811 yield filename, None
1809 for filename_node in copies:
1812 for filename_node in copies:
1810 yield filename_node
1813 yield filename_node
1811
1814
1812 for file_, node in iterfiles():
1815 for file_, node in iterfiles():
1813 filelog = repo.file(file_)
1816 filelog = repo.file(file_)
1814 if not len(filelog):
1817 if not len(filelog):
1815 if node is None:
1818 if node is None:
1816 # A zero count may be a directory or deleted file, so
1819 # A zero count may be a directory or deleted file, so
1817 # try to find matching entries on the slow path.
1820 # try to find matching entries on the slow path.
1818 if follow:
1821 if follow:
1819 raise error.Abort(
1822 raise error.Abort(
1820 _('cannot follow nonexistent file: "%s"') % file_)
1823 _('cannot follow nonexistent file: "%s"') % file_)
1821 raise FileWalkError("Cannot walk via filelog")
1824 raise FileWalkError("Cannot walk via filelog")
1822 else:
1825 else:
1823 continue
1826 continue
1824
1827
1825 if node is None:
1828 if node is None:
1826 last = len(filelog) - 1
1829 last = len(filelog) - 1
1827 else:
1830 else:
1828 last = filelog.rev(node)
1831 last = filelog.rev(node)
1829
1832
1830 # keep track of all ancestors of the file
1833 # keep track of all ancestors of the file
1831 ancestors = {filelog.linkrev(last)}
1834 ancestors = {filelog.linkrev(last)}
1832
1835
1833 # iterate from latest to oldest revision
1836 # iterate from latest to oldest revision
1834 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1837 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1835 if not follow:
1838 if not follow:
1836 if rev > maxrev:
1839 if rev > maxrev:
1837 continue
1840 continue
1838 else:
1841 else:
1839 # Note that last might not be the first interesting
1842 # Note that last might not be the first interesting
1840 # rev to us:
1843 # rev to us:
1841 # if the file has been changed after maxrev, we'll
1844 # if the file has been changed after maxrev, we'll
1842 # have linkrev(last) > maxrev, and we still need
1845 # have linkrev(last) > maxrev, and we still need
1843 # to explore the file graph
1846 # to explore the file graph
1844 if rev not in ancestors:
1847 if rev not in ancestors:
1845 continue
1848 continue
1846 # XXX insert 1327 fix here
1849 # XXX insert 1327 fix here
1847 if flparentlinkrevs:
1850 if flparentlinkrevs:
1848 ancestors.update(flparentlinkrevs)
1851 ancestors.update(flparentlinkrevs)
1849
1852
1850 fncache.setdefault(rev, []).append(file_)
1853 fncache.setdefault(rev, []).append(file_)
1851 wanted.add(rev)
1854 wanted.add(rev)
1852 if copied:
1855 if copied:
1853 copies.append(copied)
1856 copies.append(copied)
1854
1857
1855 return wanted
1858 return wanted
1856
1859
1857 class _followfilter(object):
1860 class _followfilter(object):
1858 def __init__(self, repo, onlyfirst=False):
1861 def __init__(self, repo, onlyfirst=False):
1859 self.repo = repo
1862 self.repo = repo
1860 self.startrev = nullrev
1863 self.startrev = nullrev
1861 self.roots = set()
1864 self.roots = set()
1862 self.onlyfirst = onlyfirst
1865 self.onlyfirst = onlyfirst
1863
1866
1864 def match(self, rev):
1867 def match(self, rev):
1865 def realparents(rev):
1868 def realparents(rev):
1866 if self.onlyfirst:
1869 if self.onlyfirst:
1867 return self.repo.changelog.parentrevs(rev)[0:1]
1870 return self.repo.changelog.parentrevs(rev)[0:1]
1868 else:
1871 else:
1869 return filter(lambda x: x != nullrev,
1872 return filter(lambda x: x != nullrev,
1870 self.repo.changelog.parentrevs(rev))
1873 self.repo.changelog.parentrevs(rev))
1871
1874
1872 if self.startrev == nullrev:
1875 if self.startrev == nullrev:
1873 self.startrev = rev
1876 self.startrev = rev
1874 return True
1877 return True
1875
1878
1876 if rev > self.startrev:
1879 if rev > self.startrev:
1877 # forward: all descendants
1880 # forward: all descendants
1878 if not self.roots:
1881 if not self.roots:
1879 self.roots.add(self.startrev)
1882 self.roots.add(self.startrev)
1880 for parent in realparents(rev):
1883 for parent in realparents(rev):
1881 if parent in self.roots:
1884 if parent in self.roots:
1882 self.roots.add(rev)
1885 self.roots.add(rev)
1883 return True
1886 return True
1884 else:
1887 else:
1885 # backwards: all parents
1888 # backwards: all parents
1886 if not self.roots:
1889 if not self.roots:
1887 self.roots.update(realparents(self.startrev))
1890 self.roots.update(realparents(self.startrev))
1888 if rev in self.roots:
1891 if rev in self.roots:
1889 self.roots.remove(rev)
1892 self.roots.remove(rev)
1890 self.roots.update(realparents(rev))
1893 self.roots.update(realparents(rev))
1891 return True
1894 return True
1892
1895
1893 return False
1896 return False
1894
1897
1895 def walkchangerevs(repo, match, opts, prepare):
1898 def walkchangerevs(repo, match, opts, prepare):
1896 '''Iterate over files and the revs in which they changed.
1899 '''Iterate over files and the revs in which they changed.
1897
1900
1898 Callers most commonly need to iterate backwards over the history
1901 Callers most commonly need to iterate backwards over the history
1899 in which they are interested. Doing so has awful (quadratic-looking)
1902 in which they are interested. Doing so has awful (quadratic-looking)
1900 performance, so we use iterators in a "windowed" way.
1903 performance, so we use iterators in a "windowed" way.
1901
1904
1902 We walk a window of revisions in the desired order. Within the
1905 We walk a window of revisions in the desired order. Within the
1903 window, we first walk forwards to gather data, then in the desired
1906 window, we first walk forwards to gather data, then in the desired
1904 order (usually backwards) to display it.
1907 order (usually backwards) to display it.
1905
1908
1906 This function returns an iterator yielding contexts. Before
1909 This function returns an iterator yielding contexts. Before
1907 yielding each context, the iterator will first call the prepare
1910 yielding each context, the iterator will first call the prepare
1908 function on each context in the window in forward order.'''
1911 function on each context in the window in forward order.'''
1909
1912
1910 follow = opts.get('follow') or opts.get('follow_first')
1913 follow = opts.get('follow') or opts.get('follow_first')
1911 revs = _logrevs(repo, opts)
1914 revs = _logrevs(repo, opts)
1912 if not revs:
1915 if not revs:
1913 return []
1916 return []
1914 wanted = set()
1917 wanted = set()
1915 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1918 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1916 opts.get('removed'))
1919 opts.get('removed'))
1917 fncache = {}
1920 fncache = {}
1918 change = repo.changectx
1921 change = repo.changectx
1919
1922
1920 # First step is to fill wanted, the set of revisions that we want to yield.
1923 # First step is to fill wanted, the set of revisions that we want to yield.
1921 # When it does not induce extra cost, we also fill fncache for revisions in
1924 # When it does not induce extra cost, we also fill fncache for revisions in
1922 # wanted: a cache of filenames that were changed (ctx.files()) and that
1925 # wanted: a cache of filenames that were changed (ctx.files()) and that
1923 # match the file filtering conditions.
1926 # match the file filtering conditions.
1924
1927
1925 if match.always():
1928 if match.always():
1926 # No files, no patterns. Display all revs.
1929 # No files, no patterns. Display all revs.
1927 wanted = revs
1930 wanted = revs
1928 elif not slowpath:
1931 elif not slowpath:
1929 # We only have to read through the filelog to find wanted revisions
1932 # We only have to read through the filelog to find wanted revisions
1930
1933
1931 try:
1934 try:
1932 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1935 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1933 except FileWalkError:
1936 except FileWalkError:
1934 slowpath = True
1937 slowpath = True
1935
1938
1936 # We decided to fall back to the slowpath because at least one
1939 # We decided to fall back to the slowpath because at least one
1937 # of the paths was not a file. Check to see if at least one of them
1940 # of the paths was not a file. Check to see if at least one of them
1938 # existed in history, otherwise simply return
1941 # existed in history, otherwise simply return
1939 for path in match.files():
1942 for path in match.files():
1940 if path == '.' or path in repo.store:
1943 if path == '.' or path in repo.store:
1941 break
1944 break
1942 else:
1945 else:
1943 return []
1946 return []
1944
1947
1945 if slowpath:
1948 if slowpath:
1946 # We have to read the changelog to match filenames against
1949 # We have to read the changelog to match filenames against
1947 # changed files
1950 # changed files
1948
1951
1949 if follow:
1952 if follow:
1950 raise error.Abort(_('can only follow copies/renames for explicit '
1953 raise error.Abort(_('can only follow copies/renames for explicit '
1951 'filenames'))
1954 'filenames'))
1952
1955
1953 # The slow path checks files modified in every changeset.
1956 # The slow path checks files modified in every changeset.
1954 # This is really slow on large repos, so compute the set lazily.
1957 # This is really slow on large repos, so compute the set lazily.
1955 class lazywantedset(object):
1958 class lazywantedset(object):
1956 def __init__(self):
1959 def __init__(self):
1957 self.set = set()
1960 self.set = set()
1958 self.revs = set(revs)
1961 self.revs = set(revs)
1959
1962
1960 # No need to worry about locality here because it will be accessed
1963 # No need to worry about locality here because it will be accessed
1961 # in the same order as the increasing window below.
1964 # in the same order as the increasing window below.
1962 def __contains__(self, value):
1965 def __contains__(self, value):
1963 if value in self.set:
1966 if value in self.set:
1964 return True
1967 return True
1965 elif not value in self.revs:
1968 elif not value in self.revs:
1966 return False
1969 return False
1967 else:
1970 else:
1968 self.revs.discard(value)
1971 self.revs.discard(value)
1969 ctx = change(value)
1972 ctx = change(value)
1970 matches = filter(match, ctx.files())
1973 matches = filter(match, ctx.files())
1971 if matches:
1974 if matches:
1972 fncache[value] = matches
1975 fncache[value] = matches
1973 self.set.add(value)
1976 self.set.add(value)
1974 return True
1977 return True
1975 return False
1978 return False
1976
1979
1977 def discard(self, value):
1980 def discard(self, value):
1978 self.revs.discard(value)
1981 self.revs.discard(value)
1979 self.set.discard(value)
1982 self.set.discard(value)
1980
1983
1981 wanted = lazywantedset()
1984 wanted = lazywantedset()
1982
1985
1983 # it might be worthwhile to do this in the iterator if the rev range
1986 # it might be worthwhile to do this in the iterator if the rev range
1984 # is descending and the prune args are all within that range
1987 # is descending and the prune args are all within that range
1985 for rev in opts.get('prune', ()):
1988 for rev in opts.get('prune', ()):
1986 rev = repo[rev].rev()
1989 rev = repo[rev].rev()
1987 ff = _followfilter(repo)
1990 ff = _followfilter(repo)
1988 stop = min(revs[0], revs[-1])
1991 stop = min(revs[0], revs[-1])
1989 for x in xrange(rev, stop - 1, -1):
1992 for x in xrange(rev, stop - 1, -1):
1990 if ff.match(x):
1993 if ff.match(x):
1991 wanted = wanted - [x]
1994 wanted = wanted - [x]
1992
1995
1993 # Now that wanted is correctly initialized, we can iterate over the
1996 # Now that wanted is correctly initialized, we can iterate over the
1994 # revision range, yielding only revisions in wanted.
1997 # revision range, yielding only revisions in wanted.
1995 def iterate():
1998 def iterate():
1996 if follow and match.always():
1999 if follow and match.always():
1997 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2000 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1998 def want(rev):
2001 def want(rev):
1999 return ff.match(rev) and rev in wanted
2002 return ff.match(rev) and rev in wanted
2000 else:
2003 else:
2001 def want(rev):
2004 def want(rev):
2002 return rev in wanted
2005 return rev in wanted
2003
2006
2004 it = iter(revs)
2007 it = iter(revs)
2005 stopiteration = False
2008 stopiteration = False
2006 for windowsize in increasingwindows():
2009 for windowsize in increasingwindows():
2007 nrevs = []
2010 nrevs = []
2008 for i in xrange(windowsize):
2011 for i in xrange(windowsize):
2009 rev = next(it, None)
2012 rev = next(it, None)
2010 if rev is None:
2013 if rev is None:
2011 stopiteration = True
2014 stopiteration = True
2012 break
2015 break
2013 elif want(rev):
2016 elif want(rev):
2014 nrevs.append(rev)
2017 nrevs.append(rev)
2015 for rev in sorted(nrevs):
2018 for rev in sorted(nrevs):
2016 fns = fncache.get(rev)
2019 fns = fncache.get(rev)
2017 ctx = change(rev)
2020 ctx = change(rev)
2018 if not fns:
2021 if not fns:
2019 def fns_generator():
2022 def fns_generator():
2020 for f in ctx.files():
2023 for f in ctx.files():
2021 if match(f):
2024 if match(f):
2022 yield f
2025 yield f
2023 fns = fns_generator()
2026 fns = fns_generator()
2024 prepare(ctx, fns)
2027 prepare(ctx, fns)
2025 for rev in nrevs:
2028 for rev in nrevs:
2026 yield change(rev)
2029 yield change(rev)
2027
2030
2028 if stopiteration:
2031 if stopiteration:
2029 break
2032 break
2030
2033
2031 return iterate()
2034 return iterate()
2032
2035
2033 def _makefollowlogfilematcher(repo, files, followfirst):
2036 def _makefollowlogfilematcher(repo, files, followfirst):
2034 # When displaying a revision with --patch --follow FILE, we have
2037 # When displaying a revision with --patch --follow FILE, we have
2035 # to know which file of the revision must be diffed. With
2038 # to know which file of the revision must be diffed. With
2036 # --follow, we want the names of the ancestors of FILE in the
2039 # --follow, we want the names of the ancestors of FILE in the
2037 # revision, stored in "fcache". "fcache" is populated by
2040 # revision, stored in "fcache". "fcache" is populated by
2038 # reproducing the graph traversal already done by --follow revset
2041 # reproducing the graph traversal already done by --follow revset
2039 # and relating revs to file names (which is not "correct" but
2042 # and relating revs to file names (which is not "correct" but
2040 # good enough).
2043 # good enough).
2041 fcache = {}
2044 fcache = {}
2042 fcacheready = [False]
2045 fcacheready = [False]
2043 pctx = repo['.']
2046 pctx = repo['.']
2044
2047
2045 def populate():
2048 def populate():
2046 for fn in files:
2049 for fn in files:
2047 fctx = pctx[fn]
2050 fctx = pctx[fn]
2048 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2051 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2049 for c in fctx.ancestors(followfirst=followfirst):
2052 for c in fctx.ancestors(followfirst=followfirst):
2050 fcache.setdefault(c.rev(), set()).add(c.path())
2053 fcache.setdefault(c.rev(), set()).add(c.path())
2051
2054
2052 def filematcher(rev):
2055 def filematcher(rev):
2053 if not fcacheready[0]:
2056 if not fcacheready[0]:
2054 # Lazy initialization
2057 # Lazy initialization
2055 fcacheready[0] = True
2058 fcacheready[0] = True
2056 populate()
2059 populate()
2057 return scmutil.matchfiles(repo, fcache.get(rev, []))
2060 return scmutil.matchfiles(repo, fcache.get(rev, []))
2058
2061
2059 return filematcher
2062 return filematcher
2060
2063
2061 def _makenofollowlogfilematcher(repo, pats, opts):
2064 def _makenofollowlogfilematcher(repo, pats, opts):
2062 '''hook for extensions to override the filematcher for non-follow cases'''
2065 '''hook for extensions to override the filematcher for non-follow cases'''
2063 return None
2066 return None
2064
2067
2065 def _makelogrevset(repo, pats, opts, revs):
2068 def _makelogrevset(repo, pats, opts, revs):
2066 """Return (expr, filematcher) where expr is a revset string built
2069 """Return (expr, filematcher) where expr is a revset string built
2067 from log options and file patterns or None. If --stat or --patch
2070 from log options and file patterns or None. If --stat or --patch
2068 are not passed filematcher is None. Otherwise it is a callable
2071 are not passed filematcher is None. Otherwise it is a callable
2069 taking a revision number and returning a match objects filtering
2072 taking a revision number and returning a match objects filtering
2070 the files to be detailed when displaying the revision.
2073 the files to be detailed when displaying the revision.
2071 """
2074 """
2072 opt2revset = {
2075 opt2revset = {
2073 'no_merges': ('not merge()', None),
2076 'no_merges': ('not merge()', None),
2074 'only_merges': ('merge()', None),
2077 'only_merges': ('merge()', None),
2075 '_ancestors': ('ancestors(%(val)s)', None),
2078 '_ancestors': ('ancestors(%(val)s)', None),
2076 '_fancestors': ('_firstancestors(%(val)s)', None),
2079 '_fancestors': ('_firstancestors(%(val)s)', None),
2077 '_descendants': ('descendants(%(val)s)', None),
2080 '_descendants': ('descendants(%(val)s)', None),
2078 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2081 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2079 '_matchfiles': ('_matchfiles(%(val)s)', None),
2082 '_matchfiles': ('_matchfiles(%(val)s)', None),
2080 'date': ('date(%(val)r)', None),
2083 'date': ('date(%(val)r)', None),
2081 'branch': ('branch(%(val)r)', ' or '),
2084 'branch': ('branch(%(val)r)', ' or '),
2082 '_patslog': ('filelog(%(val)r)', ' or '),
2085 '_patslog': ('filelog(%(val)r)', ' or '),
2083 '_patsfollow': ('follow(%(val)r)', ' or '),
2086 '_patsfollow': ('follow(%(val)r)', ' or '),
2084 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2087 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2085 'keyword': ('keyword(%(val)r)', ' or '),
2088 'keyword': ('keyword(%(val)r)', ' or '),
2086 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2089 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2087 'user': ('user(%(val)r)', ' or '),
2090 'user': ('user(%(val)r)', ' or '),
2088 }
2091 }
2089
2092
2090 opts = dict(opts)
2093 opts = dict(opts)
2091 # follow or not follow?
2094 # follow or not follow?
2092 follow = opts.get('follow') or opts.get('follow_first')
2095 follow = opts.get('follow') or opts.get('follow_first')
2093 if opts.get('follow_first'):
2096 if opts.get('follow_first'):
2094 followfirst = 1
2097 followfirst = 1
2095 else:
2098 else:
2096 followfirst = 0
2099 followfirst = 0
2097 # --follow with FILE behavior depends on revs...
2100 # --follow with FILE behavior depends on revs...
2098 it = iter(revs)
2101 it = iter(revs)
2099 startrev = next(it)
2102 startrev = next(it)
2100 followdescendants = startrev < next(it, startrev)
2103 followdescendants = startrev < next(it, startrev)
2101
2104
2102 # branch and only_branch are really aliases and must be handled at
2105 # branch and only_branch are really aliases and must be handled at
2103 # the same time
2106 # the same time
2104 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2107 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2105 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2108 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2106 # pats/include/exclude are passed to match.match() directly in
2109 # pats/include/exclude are passed to match.match() directly in
2107 # _matchfiles() revset but walkchangerevs() builds its matcher with
2110 # _matchfiles() revset but walkchangerevs() builds its matcher with
2108 # scmutil.match(). The difference is input pats are globbed on
2111 # scmutil.match(). The difference is input pats are globbed on
2109 # platforms without shell expansion (windows).
2112 # platforms without shell expansion (windows).
2110 wctx = repo[None]
2113 wctx = repo[None]
2111 match, pats = scmutil.matchandpats(wctx, pats, opts)
2114 match, pats = scmutil.matchandpats(wctx, pats, opts)
2112 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2115 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2113 opts.get('removed'))
2116 opts.get('removed'))
2114 if not slowpath:
2117 if not slowpath:
2115 for f in match.files():
2118 for f in match.files():
2116 if follow and f not in wctx:
2119 if follow and f not in wctx:
2117 # If the file exists, it may be a directory, so let it
2120 # If the file exists, it may be a directory, so let it
2118 # take the slow path.
2121 # take the slow path.
2119 if os.path.exists(repo.wjoin(f)):
2122 if os.path.exists(repo.wjoin(f)):
2120 slowpath = True
2123 slowpath = True
2121 continue
2124 continue
2122 else:
2125 else:
2123 raise error.Abort(_('cannot follow file not in parent '
2126 raise error.Abort(_('cannot follow file not in parent '
2124 'revision: "%s"') % f)
2127 'revision: "%s"') % f)
2125 filelog = repo.file(f)
2128 filelog = repo.file(f)
2126 if not filelog:
2129 if not filelog:
2127 # A zero count may be a directory or deleted file, so
2130 # A zero count may be a directory or deleted file, so
2128 # try to find matching entries on the slow path.
2131 # try to find matching entries on the slow path.
2129 if follow:
2132 if follow:
2130 raise error.Abort(
2133 raise error.Abort(
2131 _('cannot follow nonexistent file: "%s"') % f)
2134 _('cannot follow nonexistent file: "%s"') % f)
2132 slowpath = True
2135 slowpath = True
2133
2136
2134 # We decided to fall back to the slowpath because at least one
2137 # We decided to fall back to the slowpath because at least one
2135 # of the paths was not a file. Check to see if at least one of them
2138 # of the paths was not a file. Check to see if at least one of them
2136 # existed in history - in that case, we'll continue down the
2139 # existed in history - in that case, we'll continue down the
2137 # slowpath; otherwise, we can turn off the slowpath
2140 # slowpath; otherwise, we can turn off the slowpath
2138 if slowpath:
2141 if slowpath:
2139 for path in match.files():
2142 for path in match.files():
2140 if path == '.' or path in repo.store:
2143 if path == '.' or path in repo.store:
2141 break
2144 break
2142 else:
2145 else:
2143 slowpath = False
2146 slowpath = False
2144
2147
2145 fpats = ('_patsfollow', '_patsfollowfirst')
2148 fpats = ('_patsfollow', '_patsfollowfirst')
2146 fnopats = (('_ancestors', '_fancestors'),
2149 fnopats = (('_ancestors', '_fancestors'),
2147 ('_descendants', '_fdescendants'))
2150 ('_descendants', '_fdescendants'))
2148 if slowpath:
2151 if slowpath:
2149 # See walkchangerevs() slow path.
2152 # See walkchangerevs() slow path.
2150 #
2153 #
2151 # pats/include/exclude cannot be represented as separate
2154 # pats/include/exclude cannot be represented as separate
2152 # revset expressions as their filtering logic applies at file
2155 # revset expressions as their filtering logic applies at file
2153 # level. For instance "-I a -X a" matches a revision touching
2156 # level. For instance "-I a -X a" matches a revision touching
2154 # "a" and "b" while "file(a) and not file(b)" does
2157 # "a" and "b" while "file(a) and not file(b)" does
2155 # not. Besides, filesets are evaluated against the working
2158 # not. Besides, filesets are evaluated against the working
2156 # directory.
2159 # directory.
2157 matchargs = ['r:', 'd:relpath']
2160 matchargs = ['r:', 'd:relpath']
2158 for p in pats:
2161 for p in pats:
2159 matchargs.append('p:' + p)
2162 matchargs.append('p:' + p)
2160 for p in opts.get('include', []):
2163 for p in opts.get('include', []):
2161 matchargs.append('i:' + p)
2164 matchargs.append('i:' + p)
2162 for p in opts.get('exclude', []):
2165 for p in opts.get('exclude', []):
2163 matchargs.append('x:' + p)
2166 matchargs.append('x:' + p)
2164 matchargs = ','.join(('%r' % p) for p in matchargs)
2167 matchargs = ','.join(('%r' % p) for p in matchargs)
2165 opts['_matchfiles'] = matchargs
2168 opts['_matchfiles'] = matchargs
2166 if follow:
2169 if follow:
2167 opts[fnopats[0][followfirst]] = '.'
2170 opts[fnopats[0][followfirst]] = '.'
2168 else:
2171 else:
2169 if follow:
2172 if follow:
2170 if pats:
2173 if pats:
2171 # follow() revset interprets its file argument as a
2174 # follow() revset interprets its file argument as a
2172 # manifest entry, so use match.files(), not pats.
2175 # manifest entry, so use match.files(), not pats.
2173 opts[fpats[followfirst]] = list(match.files())
2176 opts[fpats[followfirst]] = list(match.files())
2174 else:
2177 else:
2175 op = fnopats[followdescendants][followfirst]
2178 op = fnopats[followdescendants][followfirst]
2176 opts[op] = 'rev(%d)' % startrev
2179 opts[op] = 'rev(%d)' % startrev
2177 else:
2180 else:
2178 opts['_patslog'] = list(pats)
2181 opts['_patslog'] = list(pats)
2179
2182
2180 filematcher = None
2183 filematcher = None
2181 if opts.get('patch') or opts.get('stat'):
2184 if opts.get('patch') or opts.get('stat'):
2182 # When following files, track renames via a special matcher.
2185 # When following files, track renames via a special matcher.
2183 # If we're forced to take the slowpath it means we're following
2186 # If we're forced to take the slowpath it means we're following
2184 # at least one pattern/directory, so don't bother with rename tracking.
2187 # at least one pattern/directory, so don't bother with rename tracking.
2185 if follow and not match.always() and not slowpath:
2188 if follow and not match.always() and not slowpath:
2186 # _makefollowlogfilematcher expects its files argument to be
2189 # _makefollowlogfilematcher expects its files argument to be
2187 # relative to the repo root, so use match.files(), not pats.
2190 # relative to the repo root, so use match.files(), not pats.
2188 filematcher = _makefollowlogfilematcher(repo, match.files(),
2191 filematcher = _makefollowlogfilematcher(repo, match.files(),
2189 followfirst)
2192 followfirst)
2190 else:
2193 else:
2191 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2194 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2192 if filematcher is None:
2195 if filematcher is None:
2193 filematcher = lambda rev: match
2196 filematcher = lambda rev: match
2194
2197
2195 expr = []
2198 expr = []
2196 for op, val in sorted(opts.iteritems()):
2199 for op, val in sorted(opts.iteritems()):
2197 if not val:
2200 if not val:
2198 continue
2201 continue
2199 if op not in opt2revset:
2202 if op not in opt2revset:
2200 continue
2203 continue
2201 revop, andor = opt2revset[op]
2204 revop, andor = opt2revset[op]
2202 if '%(val)' not in revop:
2205 if '%(val)' not in revop:
2203 expr.append(revop)
2206 expr.append(revop)
2204 else:
2207 else:
2205 if not isinstance(val, list):
2208 if not isinstance(val, list):
2206 e = revop % {'val': val}
2209 e = revop % {'val': val}
2207 else:
2210 else:
2208 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2211 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2209 expr.append(e)
2212 expr.append(e)
2210
2213
2211 if expr:
2214 if expr:
2212 expr = '(' + ' and '.join(expr) + ')'
2215 expr = '(' + ' and '.join(expr) + ')'
2213 else:
2216 else:
2214 expr = None
2217 expr = None
2215 return expr, filematcher
2218 return expr, filematcher
2216
2219
2217 def _logrevs(repo, opts):
2220 def _logrevs(repo, opts):
2218 # Default --rev value depends on --follow but --follow behavior
2221 # Default --rev value depends on --follow but --follow behavior
2219 # depends on revisions resolved from --rev...
2222 # depends on revisions resolved from --rev...
2220 follow = opts.get('follow') or opts.get('follow_first')
2223 follow = opts.get('follow') or opts.get('follow_first')
2221 if opts.get('rev'):
2224 if opts.get('rev'):
2222 revs = scmutil.revrange(repo, opts['rev'])
2225 revs = scmutil.revrange(repo, opts['rev'])
2223 elif follow and repo.dirstate.p1() == nullid:
2226 elif follow and repo.dirstate.p1() == nullid:
2224 revs = smartset.baseset()
2227 revs = smartset.baseset()
2225 elif follow:
2228 elif follow:
2226 revs = repo.revs('reverse(:.)')
2229 revs = repo.revs('reverse(:.)')
2227 else:
2230 else:
2228 revs = smartset.spanset(repo)
2231 revs = smartset.spanset(repo)
2229 revs.reverse()
2232 revs.reverse()
2230 return revs
2233 return revs
2231
2234
2232 def getgraphlogrevs(repo, pats, opts):
2235 def getgraphlogrevs(repo, pats, opts):
2233 """Return (revs, expr, filematcher) where revs is an iterable of
2236 """Return (revs, expr, filematcher) where revs is an iterable of
2234 revision numbers, expr is a revset string built from log options
2237 revision numbers, expr is a revset string built from log options
2235 and file patterns or None, and used to filter 'revs'. If --stat or
2238 and file patterns or None, and used to filter 'revs'. If --stat or
2236 --patch are not passed filematcher is None. Otherwise it is a
2239 --patch are not passed filematcher is None. Otherwise it is a
2237 callable taking a revision number and returning a match objects
2240 callable taking a revision number and returning a match objects
2238 filtering the files to be detailed when displaying the revision.
2241 filtering the files to be detailed when displaying the revision.
2239 """
2242 """
2240 limit = loglimit(opts)
2243 limit = loglimit(opts)
2241 revs = _logrevs(repo, opts)
2244 revs = _logrevs(repo, opts)
2242 if not revs:
2245 if not revs:
2243 return smartset.baseset(), None, None
2246 return smartset.baseset(), None, None
2244 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2247 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2245 if opts.get('rev'):
2248 if opts.get('rev'):
2246 # User-specified revs might be unsorted, but don't sort before
2249 # User-specified revs might be unsorted, but don't sort before
2247 # _makelogrevset because it might depend on the order of revs
2250 # _makelogrevset because it might depend on the order of revs
2248 if not (revs.isdescending() or revs.istopo()):
2251 if not (revs.isdescending() or revs.istopo()):
2249 revs.sort(reverse=True)
2252 revs.sort(reverse=True)
2250 if expr:
2253 if expr:
2251 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2254 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2252 revs = matcher(repo, revs)
2255 revs = matcher(repo, revs)
2253 if limit is not None:
2256 if limit is not None:
2254 limitedrevs = []
2257 limitedrevs = []
2255 for idx, rev in enumerate(revs):
2258 for idx, rev in enumerate(revs):
2256 if idx >= limit:
2259 if idx >= limit:
2257 break
2260 break
2258 limitedrevs.append(rev)
2261 limitedrevs.append(rev)
2259 revs = smartset.baseset(limitedrevs)
2262 revs = smartset.baseset(limitedrevs)
2260
2263
2261 return revs, expr, filematcher
2264 return revs, expr, filematcher
2262
2265
2263 def getlogrevs(repo, pats, opts):
2266 def getlogrevs(repo, pats, opts):
2264 """Return (revs, expr, filematcher) where revs is an iterable of
2267 """Return (revs, expr, filematcher) where revs is an iterable of
2265 revision numbers, expr is a revset string built from log options
2268 revision numbers, expr is a revset string built from log options
2266 and file patterns or None, and used to filter 'revs'. If --stat or
2269 and file patterns or None, and used to filter 'revs'. If --stat or
2267 --patch are not passed filematcher is None. Otherwise it is a
2270 --patch are not passed filematcher is None. Otherwise it is a
2268 callable taking a revision number and returning a match objects
2271 callable taking a revision number and returning a match objects
2269 filtering the files to be detailed when displaying the revision.
2272 filtering the files to be detailed when displaying the revision.
2270 """
2273 """
2271 limit = loglimit(opts)
2274 limit = loglimit(opts)
2272 revs = _logrevs(repo, opts)
2275 revs = _logrevs(repo, opts)
2273 if not revs:
2276 if not revs:
2274 return smartset.baseset([]), None, None
2277 return smartset.baseset([]), None, None
2275 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2278 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2276 if expr:
2279 if expr:
2277 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2280 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2278 revs = matcher(repo, revs)
2281 revs = matcher(repo, revs)
2279 if limit is not None:
2282 if limit is not None:
2280 limitedrevs = []
2283 limitedrevs = []
2281 for idx, r in enumerate(revs):
2284 for idx, r in enumerate(revs):
2282 if limit <= idx:
2285 if limit <= idx:
2283 break
2286 break
2284 limitedrevs.append(r)
2287 limitedrevs.append(r)
2285 revs = smartset.baseset(limitedrevs)
2288 revs = smartset.baseset(limitedrevs)
2286
2289
2287 return revs, expr, filematcher
2290 return revs, expr, filematcher
2288
2291
2289 def _graphnodeformatter(ui, displayer):
2292 def _graphnodeformatter(ui, displayer):
2290 spec = ui.config('ui', 'graphnodetemplate')
2293 spec = ui.config('ui', 'graphnodetemplate')
2291 if not spec:
2294 if not spec:
2292 return templatekw.showgraphnode # fast path for "{graphnode}"
2295 return templatekw.showgraphnode # fast path for "{graphnode}"
2293
2296
2294 spec = templater.unquotestring(spec)
2297 spec = templater.unquotestring(spec)
2295 templ = formatter.maketemplater(ui, 'graphnode', spec)
2298 templ = formatter.maketemplater(ui, 'graphnode', spec)
2296 cache = {}
2299 cache = {}
2297 if isinstance(displayer, changeset_templater):
2300 if isinstance(displayer, changeset_templater):
2298 cache = displayer.cache # reuse cache of slow templates
2301 cache = displayer.cache # reuse cache of slow templates
2299 props = templatekw.keywords.copy()
2302 props = templatekw.keywords.copy()
2300 props['templ'] = templ
2303 props['templ'] = templ
2301 props['cache'] = cache
2304 props['cache'] = cache
2302 def formatnode(repo, ctx):
2305 def formatnode(repo, ctx):
2303 props['ctx'] = ctx
2306 props['ctx'] = ctx
2304 props['repo'] = repo
2307 props['repo'] = repo
2305 props['ui'] = repo.ui
2308 props['ui'] = repo.ui
2306 props['revcache'] = {}
2309 props['revcache'] = {}
2307 return templater.stringify(templ('graphnode', **props))
2310 return templater.stringify(templ('graphnode', **props))
2308 return formatnode
2311 return formatnode
2309
2312
2310 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2313 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2311 filematcher=None):
2314 filematcher=None):
2312 formatnode = _graphnodeformatter(ui, displayer)
2315 formatnode = _graphnodeformatter(ui, displayer)
2313 state = graphmod.asciistate()
2316 state = graphmod.asciistate()
2314 styles = state['styles']
2317 styles = state['styles']
2315
2318
2316 # only set graph styling if HGPLAIN is not set.
2319 # only set graph styling if HGPLAIN is not set.
2317 if ui.plain('graph'):
2320 if ui.plain('graph'):
2318 # set all edge styles to |, the default pre-3.8 behaviour
2321 # set all edge styles to |, the default pre-3.8 behaviour
2319 styles.update(dict.fromkeys(styles, '|'))
2322 styles.update(dict.fromkeys(styles, '|'))
2320 else:
2323 else:
2321 edgetypes = {
2324 edgetypes = {
2322 'parent': graphmod.PARENT,
2325 'parent': graphmod.PARENT,
2323 'grandparent': graphmod.GRANDPARENT,
2326 'grandparent': graphmod.GRANDPARENT,
2324 'missing': graphmod.MISSINGPARENT
2327 'missing': graphmod.MISSINGPARENT
2325 }
2328 }
2326 for name, key in edgetypes.items():
2329 for name, key in edgetypes.items():
2327 # experimental config: experimental.graphstyle.*
2330 # experimental config: experimental.graphstyle.*
2328 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2331 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2329 styles[key])
2332 styles[key])
2330 if not styles[key]:
2333 if not styles[key]:
2331 styles[key] = None
2334 styles[key] = None
2332
2335
2333 # experimental config: experimental.graphshorten
2336 # experimental config: experimental.graphshorten
2334 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2337 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2335
2338
2336 for rev, type, ctx, parents in dag:
2339 for rev, type, ctx, parents in dag:
2337 char = formatnode(repo, ctx)
2340 char = formatnode(repo, ctx)
2338 copies = None
2341 copies = None
2339 if getrenamed and ctx.rev():
2342 if getrenamed and ctx.rev():
2340 copies = []
2343 copies = []
2341 for fn in ctx.files():
2344 for fn in ctx.files():
2342 rename = getrenamed(fn, ctx.rev())
2345 rename = getrenamed(fn, ctx.rev())
2343 if rename:
2346 if rename:
2344 copies.append((fn, rename[0]))
2347 copies.append((fn, rename[0]))
2345 revmatchfn = None
2348 revmatchfn = None
2346 if filematcher is not None:
2349 if filematcher is not None:
2347 revmatchfn = filematcher(ctx.rev())
2350 revmatchfn = filematcher(ctx.rev())
2348 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2351 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2349 lines = displayer.hunk.pop(rev).split('\n')
2352 lines = displayer.hunk.pop(rev).split('\n')
2350 if not lines[-1]:
2353 if not lines[-1]:
2351 del lines[-1]
2354 del lines[-1]
2352 displayer.flush(ctx)
2355 displayer.flush(ctx)
2353 edges = edgefn(type, char, lines, state, rev, parents)
2356 edges = edgefn(type, char, lines, state, rev, parents)
2354 for type, char, lines, coldata in edges:
2357 for type, char, lines, coldata in edges:
2355 graphmod.ascii(ui, state, type, char, lines, coldata)
2358 graphmod.ascii(ui, state, type, char, lines, coldata)
2356 displayer.close()
2359 displayer.close()
2357
2360
2358 def graphlog(ui, repo, pats, opts):
2361 def graphlog(ui, repo, pats, opts):
2359 # Parameters are identical to log command ones
2362 # Parameters are identical to log command ones
2360 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2363 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2361 revdag = graphmod.dagwalker(repo, revs)
2364 revdag = graphmod.dagwalker(repo, revs)
2362
2365
2363 getrenamed = None
2366 getrenamed = None
2364 if opts.get('copies'):
2367 if opts.get('copies'):
2365 endrev = None
2368 endrev = None
2366 if opts.get('rev'):
2369 if opts.get('rev'):
2367 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2370 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2368 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2371 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2369
2372
2370 ui.pager('log')
2373 ui.pager('log')
2371 displayer = show_changeset(ui, repo, opts, buffered=True)
2374 displayer = show_changeset(ui, repo, opts, buffered=True)
2372 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2375 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2373 filematcher)
2376 filematcher)
2374
2377
2375 def checkunsupportedgraphflags(pats, opts):
2378 def checkunsupportedgraphflags(pats, opts):
2376 for op in ["newest_first"]:
2379 for op in ["newest_first"]:
2377 if op in opts and opts[op]:
2380 if op in opts and opts[op]:
2378 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2381 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2379 % op.replace("_", "-"))
2382 % op.replace("_", "-"))
2380
2383
2381 def graphrevs(repo, nodes, opts):
2384 def graphrevs(repo, nodes, opts):
2382 limit = loglimit(opts)
2385 limit = loglimit(opts)
2383 nodes.reverse()
2386 nodes.reverse()
2384 if limit is not None:
2387 if limit is not None:
2385 nodes = nodes[:limit]
2388 nodes = nodes[:limit]
2386 return graphmod.nodes(repo, nodes)
2389 return graphmod.nodes(repo, nodes)
2387
2390
2388 def add(ui, repo, match, prefix, explicitonly, **opts):
2391 def add(ui, repo, match, prefix, explicitonly, **opts):
2389 join = lambda f: os.path.join(prefix, f)
2392 join = lambda f: os.path.join(prefix, f)
2390 bad = []
2393 bad = []
2391
2394
2392 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2395 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2393 names = []
2396 names = []
2394 wctx = repo[None]
2397 wctx = repo[None]
2395 cca = None
2398 cca = None
2396 abort, warn = scmutil.checkportabilityalert(ui)
2399 abort, warn = scmutil.checkportabilityalert(ui)
2397 if abort or warn:
2400 if abort or warn:
2398 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2401 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2399
2402
2400 badmatch = matchmod.badmatch(match, badfn)
2403 badmatch = matchmod.badmatch(match, badfn)
2401 dirstate = repo.dirstate
2404 dirstate = repo.dirstate
2402 # We don't want to just call wctx.walk here, since it would return a lot of
2405 # We don't want to just call wctx.walk here, since it would return a lot of
2403 # clean files, which we aren't interested in and takes time.
2406 # clean files, which we aren't interested in and takes time.
2404 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2407 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2405 True, False, full=False)):
2408 True, False, full=False)):
2406 exact = match.exact(f)
2409 exact = match.exact(f)
2407 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2410 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2408 if cca:
2411 if cca:
2409 cca(f)
2412 cca(f)
2410 names.append(f)
2413 names.append(f)
2411 if ui.verbose or not exact:
2414 if ui.verbose or not exact:
2412 ui.status(_('adding %s\n') % match.rel(f))
2415 ui.status(_('adding %s\n') % match.rel(f))
2413
2416
2414 for subpath in sorted(wctx.substate):
2417 for subpath in sorted(wctx.substate):
2415 sub = wctx.sub(subpath)
2418 sub = wctx.sub(subpath)
2416 try:
2419 try:
2417 submatch = matchmod.subdirmatcher(subpath, match)
2420 submatch = matchmod.subdirmatcher(subpath, match)
2418 if opts.get(r'subrepos'):
2421 if opts.get(r'subrepos'):
2419 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2422 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2420 else:
2423 else:
2421 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2424 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2422 except error.LookupError:
2425 except error.LookupError:
2423 ui.status(_("skipping missing subrepository: %s\n")
2426 ui.status(_("skipping missing subrepository: %s\n")
2424 % join(subpath))
2427 % join(subpath))
2425
2428
2426 if not opts.get(r'dry_run'):
2429 if not opts.get(r'dry_run'):
2427 rejected = wctx.add(names, prefix)
2430 rejected = wctx.add(names, prefix)
2428 bad.extend(f for f in rejected if f in match.files())
2431 bad.extend(f for f in rejected if f in match.files())
2429 return bad
2432 return bad
2430
2433
2431 def addwebdirpath(repo, serverpath, webconf):
2434 def addwebdirpath(repo, serverpath, webconf):
2432 webconf[serverpath] = repo.root
2435 webconf[serverpath] = repo.root
2433 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2436 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2434
2437
2435 for r in repo.revs('filelog("path:.hgsub")'):
2438 for r in repo.revs('filelog("path:.hgsub")'):
2436 ctx = repo[r]
2439 ctx = repo[r]
2437 for subpath in ctx.substate:
2440 for subpath in ctx.substate:
2438 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2441 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2439
2442
2440 def forget(ui, repo, match, prefix, explicitonly):
2443 def forget(ui, repo, match, prefix, explicitonly):
2441 join = lambda f: os.path.join(prefix, f)
2444 join = lambda f: os.path.join(prefix, f)
2442 bad = []
2445 bad = []
2443 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2446 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2444 wctx = repo[None]
2447 wctx = repo[None]
2445 forgot = []
2448 forgot = []
2446
2449
2447 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2450 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2448 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2451 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2449 if explicitonly:
2452 if explicitonly:
2450 forget = [f for f in forget if match.exact(f)]
2453 forget = [f for f in forget if match.exact(f)]
2451
2454
2452 for subpath in sorted(wctx.substate):
2455 for subpath in sorted(wctx.substate):
2453 sub = wctx.sub(subpath)
2456 sub = wctx.sub(subpath)
2454 try:
2457 try:
2455 submatch = matchmod.subdirmatcher(subpath, match)
2458 submatch = matchmod.subdirmatcher(subpath, match)
2456 subbad, subforgot = sub.forget(submatch, prefix)
2459 subbad, subforgot = sub.forget(submatch, prefix)
2457 bad.extend([subpath + '/' + f for f in subbad])
2460 bad.extend([subpath + '/' + f for f in subbad])
2458 forgot.extend([subpath + '/' + f for f in subforgot])
2461 forgot.extend([subpath + '/' + f for f in subforgot])
2459 except error.LookupError:
2462 except error.LookupError:
2460 ui.status(_("skipping missing subrepository: %s\n")
2463 ui.status(_("skipping missing subrepository: %s\n")
2461 % join(subpath))
2464 % join(subpath))
2462
2465
2463 if not explicitonly:
2466 if not explicitonly:
2464 for f in match.files():
2467 for f in match.files():
2465 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2468 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2466 if f not in forgot:
2469 if f not in forgot:
2467 if repo.wvfs.exists(f):
2470 if repo.wvfs.exists(f):
2468 # Don't complain if the exact case match wasn't given.
2471 # Don't complain if the exact case match wasn't given.
2469 # But don't do this until after checking 'forgot', so
2472 # But don't do this until after checking 'forgot', so
2470 # that subrepo files aren't normalized, and this op is
2473 # that subrepo files aren't normalized, and this op is
2471 # purely from data cached by the status walk above.
2474 # purely from data cached by the status walk above.
2472 if repo.dirstate.normalize(f) in repo.dirstate:
2475 if repo.dirstate.normalize(f) in repo.dirstate:
2473 continue
2476 continue
2474 ui.warn(_('not removing %s: '
2477 ui.warn(_('not removing %s: '
2475 'file is already untracked\n')
2478 'file is already untracked\n')
2476 % match.rel(f))
2479 % match.rel(f))
2477 bad.append(f)
2480 bad.append(f)
2478
2481
2479 for f in forget:
2482 for f in forget:
2480 if ui.verbose or not match.exact(f):
2483 if ui.verbose or not match.exact(f):
2481 ui.status(_('removing %s\n') % match.rel(f))
2484 ui.status(_('removing %s\n') % match.rel(f))
2482
2485
2483 rejected = wctx.forget(forget, prefix)
2486 rejected = wctx.forget(forget, prefix)
2484 bad.extend(f for f in rejected if f in match.files())
2487 bad.extend(f for f in rejected if f in match.files())
2485 forgot.extend(f for f in forget if f not in rejected)
2488 forgot.extend(f for f in forget if f not in rejected)
2486 return bad, forgot
2489 return bad, forgot
2487
2490
2488 def files(ui, ctx, m, fm, fmt, subrepos):
2491 def files(ui, ctx, m, fm, fmt, subrepos):
2489 rev = ctx.rev()
2492 rev = ctx.rev()
2490 ret = 1
2493 ret = 1
2491 ds = ctx.repo().dirstate
2494 ds = ctx.repo().dirstate
2492
2495
2493 for f in ctx.matches(m):
2496 for f in ctx.matches(m):
2494 if rev is None and ds[f] == 'r':
2497 if rev is None and ds[f] == 'r':
2495 continue
2498 continue
2496 fm.startitem()
2499 fm.startitem()
2497 if ui.verbose:
2500 if ui.verbose:
2498 fc = ctx[f]
2501 fc = ctx[f]
2499 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2502 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2500 fm.data(abspath=f)
2503 fm.data(abspath=f)
2501 fm.write('path', fmt, m.rel(f))
2504 fm.write('path', fmt, m.rel(f))
2502 ret = 0
2505 ret = 0
2503
2506
2504 for subpath in sorted(ctx.substate):
2507 for subpath in sorted(ctx.substate):
2505 submatch = matchmod.subdirmatcher(subpath, m)
2508 submatch = matchmod.subdirmatcher(subpath, m)
2506 if (subrepos or m.exact(subpath) or any(submatch.files())):
2509 if (subrepos or m.exact(subpath) or any(submatch.files())):
2507 sub = ctx.sub(subpath)
2510 sub = ctx.sub(subpath)
2508 try:
2511 try:
2509 recurse = m.exact(subpath) or subrepos
2512 recurse = m.exact(subpath) or subrepos
2510 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2513 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2511 ret = 0
2514 ret = 0
2512 except error.LookupError:
2515 except error.LookupError:
2513 ui.status(_("skipping missing subrepository: %s\n")
2516 ui.status(_("skipping missing subrepository: %s\n")
2514 % m.abs(subpath))
2517 % m.abs(subpath))
2515
2518
2516 return ret
2519 return ret
2517
2520
2518 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2521 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2519 join = lambda f: os.path.join(prefix, f)
2522 join = lambda f: os.path.join(prefix, f)
2520 ret = 0
2523 ret = 0
2521 s = repo.status(match=m, clean=True)
2524 s = repo.status(match=m, clean=True)
2522 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2525 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2523
2526
2524 wctx = repo[None]
2527 wctx = repo[None]
2525
2528
2526 if warnings is None:
2529 if warnings is None:
2527 warnings = []
2530 warnings = []
2528 warn = True
2531 warn = True
2529 else:
2532 else:
2530 warn = False
2533 warn = False
2531
2534
2532 subs = sorted(wctx.substate)
2535 subs = sorted(wctx.substate)
2533 total = len(subs)
2536 total = len(subs)
2534 count = 0
2537 count = 0
2535 for subpath in subs:
2538 for subpath in subs:
2536 count += 1
2539 count += 1
2537 submatch = matchmod.subdirmatcher(subpath, m)
2540 submatch = matchmod.subdirmatcher(subpath, m)
2538 if subrepos or m.exact(subpath) or any(submatch.files()):
2541 if subrepos or m.exact(subpath) or any(submatch.files()):
2539 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2542 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2540 sub = wctx.sub(subpath)
2543 sub = wctx.sub(subpath)
2541 try:
2544 try:
2542 if sub.removefiles(submatch, prefix, after, force, subrepos,
2545 if sub.removefiles(submatch, prefix, after, force, subrepos,
2543 warnings):
2546 warnings):
2544 ret = 1
2547 ret = 1
2545 except error.LookupError:
2548 except error.LookupError:
2546 warnings.append(_("skipping missing subrepository: %s\n")
2549 warnings.append(_("skipping missing subrepository: %s\n")
2547 % join(subpath))
2550 % join(subpath))
2548 ui.progress(_('searching'), None)
2551 ui.progress(_('searching'), None)
2549
2552
2550 # warn about failure to delete explicit files/dirs
2553 # warn about failure to delete explicit files/dirs
2551 deleteddirs = util.dirs(deleted)
2554 deleteddirs = util.dirs(deleted)
2552 files = m.files()
2555 files = m.files()
2553 total = len(files)
2556 total = len(files)
2554 count = 0
2557 count = 0
2555 for f in files:
2558 for f in files:
2556 def insubrepo():
2559 def insubrepo():
2557 for subpath in wctx.substate:
2560 for subpath in wctx.substate:
2558 if f.startswith(subpath + '/'):
2561 if f.startswith(subpath + '/'):
2559 return True
2562 return True
2560 return False
2563 return False
2561
2564
2562 count += 1
2565 count += 1
2563 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2566 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2564 isdir = f in deleteddirs or wctx.hasdir(f)
2567 isdir = f in deleteddirs or wctx.hasdir(f)
2565 if (f in repo.dirstate or isdir or f == '.'
2568 if (f in repo.dirstate or isdir or f == '.'
2566 or insubrepo() or f in subs):
2569 or insubrepo() or f in subs):
2567 continue
2570 continue
2568
2571
2569 if repo.wvfs.exists(f):
2572 if repo.wvfs.exists(f):
2570 if repo.wvfs.isdir(f):
2573 if repo.wvfs.isdir(f):
2571 warnings.append(_('not removing %s: no tracked files\n')
2574 warnings.append(_('not removing %s: no tracked files\n')
2572 % m.rel(f))
2575 % m.rel(f))
2573 else:
2576 else:
2574 warnings.append(_('not removing %s: file is untracked\n')
2577 warnings.append(_('not removing %s: file is untracked\n')
2575 % m.rel(f))
2578 % m.rel(f))
2576 # missing files will generate a warning elsewhere
2579 # missing files will generate a warning elsewhere
2577 ret = 1
2580 ret = 1
2578 ui.progress(_('deleting'), None)
2581 ui.progress(_('deleting'), None)
2579
2582
2580 if force:
2583 if force:
2581 list = modified + deleted + clean + added
2584 list = modified + deleted + clean + added
2582 elif after:
2585 elif after:
2583 list = deleted
2586 list = deleted
2584 remaining = modified + added + clean
2587 remaining = modified + added + clean
2585 total = len(remaining)
2588 total = len(remaining)
2586 count = 0
2589 count = 0
2587 for f in remaining:
2590 for f in remaining:
2588 count += 1
2591 count += 1
2589 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2592 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2590 warnings.append(_('not removing %s: file still exists\n')
2593 warnings.append(_('not removing %s: file still exists\n')
2591 % m.rel(f))
2594 % m.rel(f))
2592 ret = 1
2595 ret = 1
2593 ui.progress(_('skipping'), None)
2596 ui.progress(_('skipping'), None)
2594 else:
2597 else:
2595 list = deleted + clean
2598 list = deleted + clean
2596 total = len(modified) + len(added)
2599 total = len(modified) + len(added)
2597 count = 0
2600 count = 0
2598 for f in modified:
2601 for f in modified:
2599 count += 1
2602 count += 1
2600 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2603 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2601 warnings.append(_('not removing %s: file is modified (use -f'
2604 warnings.append(_('not removing %s: file is modified (use -f'
2602 ' to force removal)\n') % m.rel(f))
2605 ' to force removal)\n') % m.rel(f))
2603 ret = 1
2606 ret = 1
2604 for f in added:
2607 for f in added:
2605 count += 1
2608 count += 1
2606 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2609 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2607 warnings.append(_("not removing %s: file has been marked for add"
2610 warnings.append(_("not removing %s: file has been marked for add"
2608 " (use 'hg forget' to undo add)\n") % m.rel(f))
2611 " (use 'hg forget' to undo add)\n") % m.rel(f))
2609 ret = 1
2612 ret = 1
2610 ui.progress(_('skipping'), None)
2613 ui.progress(_('skipping'), None)
2611
2614
2612 list = sorted(list)
2615 list = sorted(list)
2613 total = len(list)
2616 total = len(list)
2614 count = 0
2617 count = 0
2615 for f in list:
2618 for f in list:
2616 count += 1
2619 count += 1
2617 if ui.verbose or not m.exact(f):
2620 if ui.verbose or not m.exact(f):
2618 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2621 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2619 ui.status(_('removing %s\n') % m.rel(f))
2622 ui.status(_('removing %s\n') % m.rel(f))
2620 ui.progress(_('deleting'), None)
2623 ui.progress(_('deleting'), None)
2621
2624
2622 with repo.wlock():
2625 with repo.wlock():
2623 if not after:
2626 if not after:
2624 for f in list:
2627 for f in list:
2625 if f in added:
2628 if f in added:
2626 continue # we never unlink added files on remove
2629 continue # we never unlink added files on remove
2627 repo.wvfs.unlinkpath(f, ignoremissing=True)
2630 repo.wvfs.unlinkpath(f, ignoremissing=True)
2628 repo[None].forget(list)
2631 repo[None].forget(list)
2629
2632
2630 if warn:
2633 if warn:
2631 for warning in warnings:
2634 for warning in warnings:
2632 ui.warn(warning)
2635 ui.warn(warning)
2633
2636
2634 return ret
2637 return ret
2635
2638
2636 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2639 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2637 err = 1
2640 err = 1
2638
2641
2639 def write(path):
2642 def write(path):
2640 filename = None
2643 filename = None
2641 if fntemplate:
2644 if fntemplate:
2642 filename = makefilename(repo, fntemplate, ctx.node(),
2645 filename = makefilename(repo, fntemplate, ctx.node(),
2643 pathname=os.path.join(prefix, path))
2646 pathname=os.path.join(prefix, path))
2644 with formatter.maybereopen(basefm, filename, opts) as fm:
2647 with formatter.maybereopen(basefm, filename, opts) as fm:
2645 data = ctx[path].data()
2648 data = ctx[path].data()
2646 if opts.get('decode'):
2649 if opts.get('decode'):
2647 data = repo.wwritedata(path, data)
2650 data = repo.wwritedata(path, data)
2648 fm.startitem()
2651 fm.startitem()
2649 fm.write('data', '%s', data)
2652 fm.write('data', '%s', data)
2650 fm.data(abspath=path, path=matcher.rel(path))
2653 fm.data(abspath=path, path=matcher.rel(path))
2651
2654
2652 # Automation often uses hg cat on single files, so special case it
2655 # Automation often uses hg cat on single files, so special case it
2653 # for performance to avoid the cost of parsing the manifest.
2656 # for performance to avoid the cost of parsing the manifest.
2654 if len(matcher.files()) == 1 and not matcher.anypats():
2657 if len(matcher.files()) == 1 and not matcher.anypats():
2655 file = matcher.files()[0]
2658 file = matcher.files()[0]
2656 mfl = repo.manifestlog
2659 mfl = repo.manifestlog
2657 mfnode = ctx.manifestnode()
2660 mfnode = ctx.manifestnode()
2658 try:
2661 try:
2659 if mfnode and mfl[mfnode].find(file)[0]:
2662 if mfnode and mfl[mfnode].find(file)[0]:
2660 write(file)
2663 write(file)
2661 return 0
2664 return 0
2662 except KeyError:
2665 except KeyError:
2663 pass
2666 pass
2664
2667
2665 for abs in ctx.walk(matcher):
2668 for abs in ctx.walk(matcher):
2666 write(abs)
2669 write(abs)
2667 err = 0
2670 err = 0
2668
2671
2669 for subpath in sorted(ctx.substate):
2672 for subpath in sorted(ctx.substate):
2670 sub = ctx.sub(subpath)
2673 sub = ctx.sub(subpath)
2671 try:
2674 try:
2672 submatch = matchmod.subdirmatcher(subpath, matcher)
2675 submatch = matchmod.subdirmatcher(subpath, matcher)
2673
2676
2674 if not sub.cat(submatch, basefm, fntemplate,
2677 if not sub.cat(submatch, basefm, fntemplate,
2675 os.path.join(prefix, sub._path), **opts):
2678 os.path.join(prefix, sub._path), **opts):
2676 err = 0
2679 err = 0
2677 except error.RepoLookupError:
2680 except error.RepoLookupError:
2678 ui.status(_("skipping missing subrepository: %s\n")
2681 ui.status(_("skipping missing subrepository: %s\n")
2679 % os.path.join(prefix, subpath))
2682 % os.path.join(prefix, subpath))
2680
2683
2681 return err
2684 return err
2682
2685
2683 def commit(ui, repo, commitfunc, pats, opts):
2686 def commit(ui, repo, commitfunc, pats, opts):
2684 '''commit the specified files or all outstanding changes'''
2687 '''commit the specified files or all outstanding changes'''
2685 date = opts.get('date')
2688 date = opts.get('date')
2686 if date:
2689 if date:
2687 opts['date'] = util.parsedate(date)
2690 opts['date'] = util.parsedate(date)
2688 message = logmessage(ui, opts)
2691 message = logmessage(ui, opts)
2689 matcher = scmutil.match(repo[None], pats, opts)
2692 matcher = scmutil.match(repo[None], pats, opts)
2690
2693
2691 # extract addremove carefully -- this function can be called from a command
2694 # extract addremove carefully -- this function can be called from a command
2692 # that doesn't support addremove
2695 # that doesn't support addremove
2693 if opts.get('addremove'):
2696 if opts.get('addremove'):
2694 if scmutil.addremove(repo, matcher, "", opts) != 0:
2697 if scmutil.addremove(repo, matcher, "", opts) != 0:
2695 raise error.Abort(
2698 raise error.Abort(
2696 _("failed to mark all new/missing files as added/removed"))
2699 _("failed to mark all new/missing files as added/removed"))
2697
2700
2698 return commitfunc(ui, repo, message, matcher, opts)
2701 return commitfunc(ui, repo, message, matcher, opts)
2699
2702
2700 def samefile(f, ctx1, ctx2):
2703 def samefile(f, ctx1, ctx2):
2701 if f in ctx1.manifest():
2704 if f in ctx1.manifest():
2702 a = ctx1.filectx(f)
2705 a = ctx1.filectx(f)
2703 if f in ctx2.manifest():
2706 if f in ctx2.manifest():
2704 b = ctx2.filectx(f)
2707 b = ctx2.filectx(f)
2705 return (not a.cmp(b)
2708 return (not a.cmp(b)
2706 and a.flags() == b.flags())
2709 and a.flags() == b.flags())
2707 else:
2710 else:
2708 return False
2711 return False
2709 else:
2712 else:
2710 return f not in ctx2.manifest()
2713 return f not in ctx2.manifest()
2711
2714
2712 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2715 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2713 # avoid cycle context -> subrepo -> cmdutil
2716 # avoid cycle context -> subrepo -> cmdutil
2714 from . import context
2717 from . import context
2715
2718
2716 # amend will reuse the existing user if not specified, but the obsolete
2719 # amend will reuse the existing user if not specified, but the obsolete
2717 # marker creation requires that the current user's name is specified.
2720 # marker creation requires that the current user's name is specified.
2718 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2721 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2719 ui.username() # raise exception if username not set
2722 ui.username() # raise exception if username not set
2720
2723
2721 ui.note(_('amending changeset %s\n') % old)
2724 ui.note(_('amending changeset %s\n') % old)
2722 base = old.p1()
2725 base = old.p1()
2723 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2726 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2724
2727
2725 wlock = lock = newid = None
2728 wlock = lock = newid = None
2726 try:
2729 try:
2727 wlock = repo.wlock()
2730 wlock = repo.wlock()
2728 lock = repo.lock()
2731 lock = repo.lock()
2729 with repo.transaction('amend') as tr:
2732 with repo.transaction('amend') as tr:
2730 # See if we got a message from -m or -l, if not, open the editor
2733 # See if we got a message from -m or -l, if not, open the editor
2731 # with the message of the changeset to amend
2734 # with the message of the changeset to amend
2732 message = logmessage(ui, opts)
2735 message = logmessage(ui, opts)
2733 # ensure logfile does not conflict with later enforcement of the
2736 # ensure logfile does not conflict with later enforcement of the
2734 # message. potential logfile content has been processed by
2737 # message. potential logfile content has been processed by
2735 # `logmessage` anyway.
2738 # `logmessage` anyway.
2736 opts.pop('logfile')
2739 opts.pop('logfile')
2737 # First, do a regular commit to record all changes in the working
2740 # First, do a regular commit to record all changes in the working
2738 # directory (if there are any)
2741 # directory (if there are any)
2739 ui.callhooks = False
2742 ui.callhooks = False
2740 activebookmark = repo._bookmarks.active
2743 activebookmark = repo._bookmarks.active
2741 try:
2744 try:
2742 repo._bookmarks.active = None
2745 repo._bookmarks.active = None
2743 opts['message'] = 'temporary amend commit for %s' % old
2746 opts['message'] = 'temporary amend commit for %s' % old
2744 node = commit(ui, repo, commitfunc, pats, opts)
2747 node = commit(ui, repo, commitfunc, pats, opts)
2745 finally:
2748 finally:
2746 repo._bookmarks.active = activebookmark
2749 repo._bookmarks.active = activebookmark
2747 repo._bookmarks.recordchange(tr)
2750 repo._bookmarks.recordchange(tr)
2748 ui.callhooks = True
2751 ui.callhooks = True
2749 ctx = repo[node]
2752 ctx = repo[node]
2750
2753
2751 # Participating changesets:
2754 # Participating changesets:
2752 #
2755 #
2753 # node/ctx o - new (intermediate) commit that contains changes
2756 # node/ctx o - new (intermediate) commit that contains changes
2754 # | from working dir to go into amending commit
2757 # | from working dir to go into amending commit
2755 # | (or a workingctx if there were no changes)
2758 # | (or a workingctx if there were no changes)
2756 # |
2759 # |
2757 # old o - changeset to amend
2760 # old o - changeset to amend
2758 # |
2761 # |
2759 # base o - parent of amending changeset
2762 # base o - parent of amending changeset
2760
2763
2761 # Update extra dict from amended commit (e.g. to preserve graft
2764 # Update extra dict from amended commit (e.g. to preserve graft
2762 # source)
2765 # source)
2763 extra.update(old.extra())
2766 extra.update(old.extra())
2764
2767
2765 # Also update it from the intermediate commit or from the wctx
2768 # Also update it from the intermediate commit or from the wctx
2766 extra.update(ctx.extra())
2769 extra.update(ctx.extra())
2767
2770
2768 if len(old.parents()) > 1:
2771 if len(old.parents()) > 1:
2769 # ctx.files() isn't reliable for merges, so fall back to the
2772 # ctx.files() isn't reliable for merges, so fall back to the
2770 # slower repo.status() method
2773 # slower repo.status() method
2771 files = set([fn for st in repo.status(base, old)[:3]
2774 files = set([fn for st in repo.status(base, old)[:3]
2772 for fn in st])
2775 for fn in st])
2773 else:
2776 else:
2774 files = set(old.files())
2777 files = set(old.files())
2775
2778
2776 # Second, we use either the commit we just did, or if there were no
2779 # Second, we use either the commit we just did, or if there were no
2777 # changes the parent of the working directory as the version of the
2780 # changes the parent of the working directory as the version of the
2778 # files in the final amend commit
2781 # files in the final amend commit
2779 if node:
2782 if node:
2780 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2783 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2781
2784
2782 user = ctx.user()
2785 user = ctx.user()
2783 date = ctx.date()
2786 date = ctx.date()
2784 # Recompute copies (avoid recording a -> b -> a)
2787 # Recompute copies (avoid recording a -> b -> a)
2785 copied = copies.pathcopies(base, ctx)
2788 copied = copies.pathcopies(base, ctx)
2786 if old.p2:
2789 if old.p2:
2787 copied.update(copies.pathcopies(old.p2(), ctx))
2790 copied.update(copies.pathcopies(old.p2(), ctx))
2788
2791
2789 # Prune files which were reverted by the updates: if old
2792 # Prune files which were reverted by the updates: if old
2790 # introduced file X and our intermediate commit, node,
2793 # introduced file X and our intermediate commit, node,
2791 # renamed that file, then those two files are the same and
2794 # renamed that file, then those two files are the same and
2792 # we can discard X from our list of files. Likewise if X
2795 # we can discard X from our list of files. Likewise if X
2793 # was deleted, it's no longer relevant
2796 # was deleted, it's no longer relevant
2794 files.update(ctx.files())
2797 files.update(ctx.files())
2795 files = [f for f in files if not samefile(f, ctx, base)]
2798 files = [f for f in files if not samefile(f, ctx, base)]
2796
2799
2797 def filectxfn(repo, ctx_, path):
2800 def filectxfn(repo, ctx_, path):
2798 try:
2801 try:
2799 fctx = ctx[path]
2802 fctx = ctx[path]
2800 flags = fctx.flags()
2803 flags = fctx.flags()
2801 mctx = context.memfilectx(repo,
2804 mctx = context.memfilectx(repo,
2802 fctx.path(), fctx.data(),
2805 fctx.path(), fctx.data(),
2803 islink='l' in flags,
2806 islink='l' in flags,
2804 isexec='x' in flags,
2807 isexec='x' in flags,
2805 copied=copied.get(path))
2808 copied=copied.get(path))
2806 return mctx
2809 return mctx
2807 except KeyError:
2810 except KeyError:
2808 return None
2811 return None
2809 else:
2812 else:
2810 ui.note(_('copying changeset %s to %s\n') % (old, base))
2813 ui.note(_('copying changeset %s to %s\n') % (old, base))
2811
2814
2812 # Use version of files as in the old cset
2815 # Use version of files as in the old cset
2813 def filectxfn(repo, ctx_, path):
2816 def filectxfn(repo, ctx_, path):
2814 try:
2817 try:
2815 return old.filectx(path)
2818 return old.filectx(path)
2816 except KeyError:
2819 except KeyError:
2817 return None
2820 return None
2818
2821
2819 user = opts.get('user') or old.user()
2822 user = opts.get('user') or old.user()
2820 date = opts.get('date') or old.date()
2823 date = opts.get('date') or old.date()
2821 editform = mergeeditform(old, 'commit.amend')
2824 editform = mergeeditform(old, 'commit.amend')
2822 editor = getcommiteditor(editform=editform, **opts)
2825 editor = getcommiteditor(editform=editform, **opts)
2823 if not message:
2826 if not message:
2824 editor = getcommiteditor(edit=True, editform=editform)
2827 editor = getcommiteditor(edit=True, editform=editform)
2825 message = old.description()
2828 message = old.description()
2826
2829
2827 pureextra = extra.copy()
2830 pureextra = extra.copy()
2828 extra['amend_source'] = old.hex()
2831 extra['amend_source'] = old.hex()
2829
2832
2830 new = context.memctx(repo,
2833 new = context.memctx(repo,
2831 parents=[base.node(), old.p2().node()],
2834 parents=[base.node(), old.p2().node()],
2832 text=message,
2835 text=message,
2833 files=files,
2836 files=files,
2834 filectxfn=filectxfn,
2837 filectxfn=filectxfn,
2835 user=user,
2838 user=user,
2836 date=date,
2839 date=date,
2837 extra=extra,
2840 extra=extra,
2838 editor=editor)
2841 editor=editor)
2839
2842
2840 newdesc = changelog.stripdesc(new.description())
2843 newdesc = changelog.stripdesc(new.description())
2841 if ((not node)
2844 if ((not node)
2842 and newdesc == old.description()
2845 and newdesc == old.description()
2843 and user == old.user()
2846 and user == old.user()
2844 and date == old.date()
2847 and date == old.date()
2845 and pureextra == old.extra()):
2848 and pureextra == old.extra()):
2846 # nothing changed. continuing here would create a new node
2849 # nothing changed. continuing here would create a new node
2847 # anyway because of the amend_source noise.
2850 # anyway because of the amend_source noise.
2848 #
2851 #
2849 # This not what we expect from amend.
2852 # This not what we expect from amend.
2850 return old.node()
2853 return old.node()
2851
2854
2852 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2855 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2853 try:
2856 try:
2854 if opts.get('secret'):
2857 if opts.get('secret'):
2855 commitphase = 'secret'
2858 commitphase = 'secret'
2856 else:
2859 else:
2857 commitphase = old.phase()
2860 commitphase = old.phase()
2858 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2861 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2859 newid = repo.commitctx(new)
2862 newid = repo.commitctx(new)
2860 finally:
2863 finally:
2861 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2864 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2862 if newid != old.node():
2865 if newid != old.node():
2863 # Reroute the working copy parent to the new changeset
2866 # Reroute the working copy parent to the new changeset
2864 repo.setparents(newid, nullid)
2867 repo.setparents(newid, nullid)
2865
2868
2866 # Move bookmarks from old parent to amend commit
2869 # Move bookmarks from old parent to amend commit
2867 bms = repo.nodebookmarks(old.node())
2870 bms = repo.nodebookmarks(old.node())
2868 if bms:
2871 if bms:
2869 marks = repo._bookmarks
2872 marks = repo._bookmarks
2870 for bm in bms:
2873 for bm in bms:
2871 ui.debug('moving bookmarks %r from %s to %s\n' %
2874 ui.debug('moving bookmarks %r from %s to %s\n' %
2872 (marks, old.hex(), hex(newid)))
2875 (marks, old.hex(), hex(newid)))
2873 marks[bm] = newid
2876 marks[bm] = newid
2874 marks.recordchange(tr)
2877 marks.recordchange(tr)
2875 #commit the whole amend process
2878 #commit the whole amend process
2876 if createmarkers:
2879 if createmarkers:
2877 # mark the new changeset as successor of the rewritten one
2880 # mark the new changeset as successor of the rewritten one
2878 new = repo[newid]
2881 new = repo[newid]
2879 obs = [(old, (new,))]
2882 obs = [(old, (new,))]
2880 if node:
2883 if node:
2881 obs.append((ctx, ()))
2884 obs.append((ctx, ()))
2882
2885
2883 obsolete.createmarkers(repo, obs, operation='amend')
2886 obsolete.createmarkers(repo, obs, operation='amend')
2884 if not createmarkers and newid != old.node():
2887 if not createmarkers and newid != old.node():
2885 # Strip the intermediate commit (if there was one) and the amended
2888 # Strip the intermediate commit (if there was one) and the amended
2886 # commit
2889 # commit
2887 if node:
2890 if node:
2888 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2891 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2889 ui.note(_('stripping amended changeset %s\n') % old)
2892 ui.note(_('stripping amended changeset %s\n') % old)
2890 repair.strip(ui, repo, old.node(), topic='amend-backup')
2893 repair.strip(ui, repo, old.node(), topic='amend-backup')
2891 finally:
2894 finally:
2892 lockmod.release(lock, wlock)
2895 lockmod.release(lock, wlock)
2893 return newid
2896 return newid
2894
2897
2895 def commiteditor(repo, ctx, subs, editform=''):
2898 def commiteditor(repo, ctx, subs, editform=''):
2896 if ctx.description():
2899 if ctx.description():
2897 return ctx.description()
2900 return ctx.description()
2898 return commitforceeditor(repo, ctx, subs, editform=editform,
2901 return commitforceeditor(repo, ctx, subs, editform=editform,
2899 unchangedmessagedetection=True)
2902 unchangedmessagedetection=True)
2900
2903
2901 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2904 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2902 editform='', unchangedmessagedetection=False):
2905 editform='', unchangedmessagedetection=False):
2903 if not extramsg:
2906 if not extramsg:
2904 extramsg = _("Leave message empty to abort commit.")
2907 extramsg = _("Leave message empty to abort commit.")
2905
2908
2906 forms = [e for e in editform.split('.') if e]
2909 forms = [e for e in editform.split('.') if e]
2907 forms.insert(0, 'changeset')
2910 forms.insert(0, 'changeset')
2908 templatetext = None
2911 templatetext = None
2909 while forms:
2912 while forms:
2910 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2913 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2911 if tmpl:
2914 if tmpl:
2912 tmpl = templater.unquotestring(tmpl)
2915 tmpl = templater.unquotestring(tmpl)
2913 templatetext = committext = buildcommittemplate(
2916 templatetext = committext = buildcommittemplate(
2914 repo, ctx, subs, extramsg, tmpl)
2917 repo, ctx, subs, extramsg, tmpl)
2915 break
2918 break
2916 forms.pop()
2919 forms.pop()
2917 else:
2920 else:
2918 committext = buildcommittext(repo, ctx, subs, extramsg)
2921 committext = buildcommittext(repo, ctx, subs, extramsg)
2919
2922
2920 # run editor in the repository root
2923 # run editor in the repository root
2921 olddir = pycompat.getcwd()
2924 olddir = pycompat.getcwd()
2922 os.chdir(repo.root)
2925 os.chdir(repo.root)
2923
2926
2924 # make in-memory changes visible to external process
2927 # make in-memory changes visible to external process
2925 tr = repo.currenttransaction()
2928 tr = repo.currenttransaction()
2926 repo.dirstate.write(tr)
2929 repo.dirstate.write(tr)
2927 pending = tr and tr.writepending() and repo.root
2930 pending = tr and tr.writepending() and repo.root
2928
2931
2929 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2932 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2930 editform=editform, pending=pending,
2933 editform=editform, pending=pending,
2931 repopath=repo.path)
2934 repopath=repo.path)
2932 text = editortext
2935 text = editortext
2933
2936
2934 # strip away anything below this special string (used for editors that want
2937 # strip away anything below this special string (used for editors that want
2935 # to display the diff)
2938 # to display the diff)
2936 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2939 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2937 if stripbelow:
2940 if stripbelow:
2938 text = text[:stripbelow.start()]
2941 text = text[:stripbelow.start()]
2939
2942
2940 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2943 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2941 os.chdir(olddir)
2944 os.chdir(olddir)
2942
2945
2943 if finishdesc:
2946 if finishdesc:
2944 text = finishdesc(text)
2947 text = finishdesc(text)
2945 if not text.strip():
2948 if not text.strip():
2946 raise error.Abort(_("empty commit message"))
2949 raise error.Abort(_("empty commit message"))
2947 if unchangedmessagedetection and editortext == templatetext:
2950 if unchangedmessagedetection and editortext == templatetext:
2948 raise error.Abort(_("commit message unchanged"))
2951 raise error.Abort(_("commit message unchanged"))
2949
2952
2950 return text
2953 return text
2951
2954
2952 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2955 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2953 ui = repo.ui
2956 ui = repo.ui
2954 tmpl, mapfile = _lookuplogtemplate(ui, tmpl, None)
2957 tmpl, mapfile = _lookuplogtemplate(ui, tmpl, None)
2955
2958
2956 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2959 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2957
2960
2958 for k, v in repo.ui.configitems('committemplate'):
2961 for k, v in repo.ui.configitems('committemplate'):
2959 if k != 'changeset':
2962 if k != 'changeset':
2960 t.t.cache[k] = v
2963 t.t.cache[k] = v
2961
2964
2962 if not extramsg:
2965 if not extramsg:
2963 extramsg = '' # ensure that extramsg is string
2966 extramsg = '' # ensure that extramsg is string
2964
2967
2965 ui.pushbuffer()
2968 ui.pushbuffer()
2966 t.show(ctx, extramsg=extramsg)
2969 t.show(ctx, extramsg=extramsg)
2967 return ui.popbuffer()
2970 return ui.popbuffer()
2968
2971
2969 def hgprefix(msg):
2972 def hgprefix(msg):
2970 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2973 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2971
2974
2972 def buildcommittext(repo, ctx, subs, extramsg):
2975 def buildcommittext(repo, ctx, subs, extramsg):
2973 edittext = []
2976 edittext = []
2974 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2977 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2975 if ctx.description():
2978 if ctx.description():
2976 edittext.append(ctx.description())
2979 edittext.append(ctx.description())
2977 edittext.append("")
2980 edittext.append("")
2978 edittext.append("") # Empty line between message and comments.
2981 edittext.append("") # Empty line between message and comments.
2979 edittext.append(hgprefix(_("Enter commit message."
2982 edittext.append(hgprefix(_("Enter commit message."
2980 " Lines beginning with 'HG:' are removed.")))
2983 " Lines beginning with 'HG:' are removed.")))
2981 edittext.append(hgprefix(extramsg))
2984 edittext.append(hgprefix(extramsg))
2982 edittext.append("HG: --")
2985 edittext.append("HG: --")
2983 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2986 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2984 if ctx.p2():
2987 if ctx.p2():
2985 edittext.append(hgprefix(_("branch merge")))
2988 edittext.append(hgprefix(_("branch merge")))
2986 if ctx.branch():
2989 if ctx.branch():
2987 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2990 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2988 if bookmarks.isactivewdirparent(repo):
2991 if bookmarks.isactivewdirparent(repo):
2989 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2992 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2990 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2993 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2991 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2994 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2992 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2995 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2993 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2996 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2994 if not added and not modified and not removed:
2997 if not added and not modified and not removed:
2995 edittext.append(hgprefix(_("no files changed")))
2998 edittext.append(hgprefix(_("no files changed")))
2996 edittext.append("")
2999 edittext.append("")
2997
3000
2998 return "\n".join(edittext)
3001 return "\n".join(edittext)
2999
3002
3000 def commitstatus(repo, node, branch, bheads=None, opts=None):
3003 def commitstatus(repo, node, branch, bheads=None, opts=None):
3001 if opts is None:
3004 if opts is None:
3002 opts = {}
3005 opts = {}
3003 ctx = repo[node]
3006 ctx = repo[node]
3004 parents = ctx.parents()
3007 parents = ctx.parents()
3005
3008
3006 if (not opts.get('amend') and bheads and node not in bheads and not
3009 if (not opts.get('amend') and bheads and node not in bheads and not
3007 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3010 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3008 repo.ui.status(_('created new head\n'))
3011 repo.ui.status(_('created new head\n'))
3009 # The message is not printed for initial roots. For the other
3012 # The message is not printed for initial roots. For the other
3010 # changesets, it is printed in the following situations:
3013 # changesets, it is printed in the following situations:
3011 #
3014 #
3012 # Par column: for the 2 parents with ...
3015 # Par column: for the 2 parents with ...
3013 # N: null or no parent
3016 # N: null or no parent
3014 # B: parent is on another named branch
3017 # B: parent is on another named branch
3015 # C: parent is a regular non head changeset
3018 # C: parent is a regular non head changeset
3016 # H: parent was a branch head of the current branch
3019 # H: parent was a branch head of the current branch
3017 # Msg column: whether we print "created new head" message
3020 # Msg column: whether we print "created new head" message
3018 # In the following, it is assumed that there already exists some
3021 # In the following, it is assumed that there already exists some
3019 # initial branch heads of the current branch, otherwise nothing is
3022 # initial branch heads of the current branch, otherwise nothing is
3020 # printed anyway.
3023 # printed anyway.
3021 #
3024 #
3022 # Par Msg Comment
3025 # Par Msg Comment
3023 # N N y additional topo root
3026 # N N y additional topo root
3024 #
3027 #
3025 # B N y additional branch root
3028 # B N y additional branch root
3026 # C N y additional topo head
3029 # C N y additional topo head
3027 # H N n usual case
3030 # H N n usual case
3028 #
3031 #
3029 # B B y weird additional branch root
3032 # B B y weird additional branch root
3030 # C B y branch merge
3033 # C B y branch merge
3031 # H B n merge with named branch
3034 # H B n merge with named branch
3032 #
3035 #
3033 # C C y additional head from merge
3036 # C C y additional head from merge
3034 # C H n merge with a head
3037 # C H n merge with a head
3035 #
3038 #
3036 # H H n head merge: head count decreases
3039 # H H n head merge: head count decreases
3037
3040
3038 if not opts.get('close_branch'):
3041 if not opts.get('close_branch'):
3039 for r in parents:
3042 for r in parents:
3040 if r.closesbranch() and r.branch() == branch:
3043 if r.closesbranch() and r.branch() == branch:
3041 repo.ui.status(_('reopening closed branch head %d\n') % r)
3044 repo.ui.status(_('reopening closed branch head %d\n') % r)
3042
3045
3043 if repo.ui.debugflag:
3046 if repo.ui.debugflag:
3044 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3047 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3045 elif repo.ui.verbose:
3048 elif repo.ui.verbose:
3046 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3049 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3047
3050
3048 def postcommitstatus(repo, pats, opts):
3051 def postcommitstatus(repo, pats, opts):
3049 return repo.status(match=scmutil.match(repo[None], pats, opts))
3052 return repo.status(match=scmutil.match(repo[None], pats, opts))
3050
3053
3051 def revert(ui, repo, ctx, parents, *pats, **opts):
3054 def revert(ui, repo, ctx, parents, *pats, **opts):
3052 parent, p2 = parents
3055 parent, p2 = parents
3053 node = ctx.node()
3056 node = ctx.node()
3054
3057
3055 mf = ctx.manifest()
3058 mf = ctx.manifest()
3056 if node == p2:
3059 if node == p2:
3057 parent = p2
3060 parent = p2
3058
3061
3059 # need all matching names in dirstate and manifest of target rev,
3062 # need all matching names in dirstate and manifest of target rev,
3060 # so have to walk both. do not print errors if files exist in one
3063 # so have to walk both. do not print errors if files exist in one
3061 # but not other. in both cases, filesets should be evaluated against
3064 # but not other. in both cases, filesets should be evaluated against
3062 # workingctx to get consistent result (issue4497). this means 'set:**'
3065 # workingctx to get consistent result (issue4497). this means 'set:**'
3063 # cannot be used to select missing files from target rev.
3066 # cannot be used to select missing files from target rev.
3064
3067
3065 # `names` is a mapping for all elements in working copy and target revision
3068 # `names` is a mapping for all elements in working copy and target revision
3066 # The mapping is in the form:
3069 # The mapping is in the form:
3067 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3070 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3068 names = {}
3071 names = {}
3069
3072
3070 with repo.wlock():
3073 with repo.wlock():
3071 ## filling of the `names` mapping
3074 ## filling of the `names` mapping
3072 # walk dirstate to fill `names`
3075 # walk dirstate to fill `names`
3073
3076
3074 interactive = opts.get('interactive', False)
3077 interactive = opts.get('interactive', False)
3075 wctx = repo[None]
3078 wctx = repo[None]
3076 m = scmutil.match(wctx, pats, opts)
3079 m = scmutil.match(wctx, pats, opts)
3077
3080
3078 # we'll need this later
3081 # we'll need this later
3079 targetsubs = sorted(s for s in wctx.substate if m(s))
3082 targetsubs = sorted(s for s in wctx.substate if m(s))
3080
3083
3081 if not m.always():
3084 if not m.always():
3082 matcher = matchmod.badmatch(m, lambda x, y: False)
3085 matcher = matchmod.badmatch(m, lambda x, y: False)
3083 for abs in wctx.walk(matcher):
3086 for abs in wctx.walk(matcher):
3084 names[abs] = m.rel(abs), m.exact(abs)
3087 names[abs] = m.rel(abs), m.exact(abs)
3085
3088
3086 # walk target manifest to fill `names`
3089 # walk target manifest to fill `names`
3087
3090
3088 def badfn(path, msg):
3091 def badfn(path, msg):
3089 if path in names:
3092 if path in names:
3090 return
3093 return
3091 if path in ctx.substate:
3094 if path in ctx.substate:
3092 return
3095 return
3093 path_ = path + '/'
3096 path_ = path + '/'
3094 for f in names:
3097 for f in names:
3095 if f.startswith(path_):
3098 if f.startswith(path_):
3096 return
3099 return
3097 ui.warn("%s: %s\n" % (m.rel(path), msg))
3100 ui.warn("%s: %s\n" % (m.rel(path), msg))
3098
3101
3099 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3102 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3100 if abs not in names:
3103 if abs not in names:
3101 names[abs] = m.rel(abs), m.exact(abs)
3104 names[abs] = m.rel(abs), m.exact(abs)
3102
3105
3103 # Find status of all file in `names`.
3106 # Find status of all file in `names`.
3104 m = scmutil.matchfiles(repo, names)
3107 m = scmutil.matchfiles(repo, names)
3105
3108
3106 changes = repo.status(node1=node, match=m,
3109 changes = repo.status(node1=node, match=m,
3107 unknown=True, ignored=True, clean=True)
3110 unknown=True, ignored=True, clean=True)
3108 else:
3111 else:
3109 changes = repo.status(node1=node, match=m)
3112 changes = repo.status(node1=node, match=m)
3110 for kind in changes:
3113 for kind in changes:
3111 for abs in kind:
3114 for abs in kind:
3112 names[abs] = m.rel(abs), m.exact(abs)
3115 names[abs] = m.rel(abs), m.exact(abs)
3113
3116
3114 m = scmutil.matchfiles(repo, names)
3117 m = scmutil.matchfiles(repo, names)
3115
3118
3116 modified = set(changes.modified)
3119 modified = set(changes.modified)
3117 added = set(changes.added)
3120 added = set(changes.added)
3118 removed = set(changes.removed)
3121 removed = set(changes.removed)
3119 _deleted = set(changes.deleted)
3122 _deleted = set(changes.deleted)
3120 unknown = set(changes.unknown)
3123 unknown = set(changes.unknown)
3121 unknown.update(changes.ignored)
3124 unknown.update(changes.ignored)
3122 clean = set(changes.clean)
3125 clean = set(changes.clean)
3123 modadded = set()
3126 modadded = set()
3124
3127
3125 # We need to account for the state of the file in the dirstate,
3128 # We need to account for the state of the file in the dirstate,
3126 # even when we revert against something else than parent. This will
3129 # even when we revert against something else than parent. This will
3127 # slightly alter the behavior of revert (doing back up or not, delete
3130 # slightly alter the behavior of revert (doing back up or not, delete
3128 # or just forget etc).
3131 # or just forget etc).
3129 if parent == node:
3132 if parent == node:
3130 dsmodified = modified
3133 dsmodified = modified
3131 dsadded = added
3134 dsadded = added
3132 dsremoved = removed
3135 dsremoved = removed
3133 # store all local modifications, useful later for rename detection
3136 # store all local modifications, useful later for rename detection
3134 localchanges = dsmodified | dsadded
3137 localchanges = dsmodified | dsadded
3135 modified, added, removed = set(), set(), set()
3138 modified, added, removed = set(), set(), set()
3136 else:
3139 else:
3137 changes = repo.status(node1=parent, match=m)
3140 changes = repo.status(node1=parent, match=m)
3138 dsmodified = set(changes.modified)
3141 dsmodified = set(changes.modified)
3139 dsadded = set(changes.added)
3142 dsadded = set(changes.added)
3140 dsremoved = set(changes.removed)
3143 dsremoved = set(changes.removed)
3141 # store all local modifications, useful later for rename detection
3144 # store all local modifications, useful later for rename detection
3142 localchanges = dsmodified | dsadded
3145 localchanges = dsmodified | dsadded
3143
3146
3144 # only take into account for removes between wc and target
3147 # only take into account for removes between wc and target
3145 clean |= dsremoved - removed
3148 clean |= dsremoved - removed
3146 dsremoved &= removed
3149 dsremoved &= removed
3147 # distinct between dirstate remove and other
3150 # distinct between dirstate remove and other
3148 removed -= dsremoved
3151 removed -= dsremoved
3149
3152
3150 modadded = added & dsmodified
3153 modadded = added & dsmodified
3151 added -= modadded
3154 added -= modadded
3152
3155
3153 # tell newly modified apart.
3156 # tell newly modified apart.
3154 dsmodified &= modified
3157 dsmodified &= modified
3155 dsmodified |= modified & dsadded # dirstate added may need backup
3158 dsmodified |= modified & dsadded # dirstate added may need backup
3156 modified -= dsmodified
3159 modified -= dsmodified
3157
3160
3158 # We need to wait for some post-processing to update this set
3161 # We need to wait for some post-processing to update this set
3159 # before making the distinction. The dirstate will be used for
3162 # before making the distinction. The dirstate will be used for
3160 # that purpose.
3163 # that purpose.
3161 dsadded = added
3164 dsadded = added
3162
3165
3163 # in case of merge, files that are actually added can be reported as
3166 # in case of merge, files that are actually added can be reported as
3164 # modified, we need to post process the result
3167 # modified, we need to post process the result
3165 if p2 != nullid:
3168 if p2 != nullid:
3166 mergeadd = set(dsmodified)
3169 mergeadd = set(dsmodified)
3167 for path in dsmodified:
3170 for path in dsmodified:
3168 if path in mf:
3171 if path in mf:
3169 mergeadd.remove(path)
3172 mergeadd.remove(path)
3170 dsadded |= mergeadd
3173 dsadded |= mergeadd
3171 dsmodified -= mergeadd
3174 dsmodified -= mergeadd
3172
3175
3173 # if f is a rename, update `names` to also revert the source
3176 # if f is a rename, update `names` to also revert the source
3174 cwd = repo.getcwd()
3177 cwd = repo.getcwd()
3175 for f in localchanges:
3178 for f in localchanges:
3176 src = repo.dirstate.copied(f)
3179 src = repo.dirstate.copied(f)
3177 # XXX should we check for rename down to target node?
3180 # XXX should we check for rename down to target node?
3178 if src and src not in names and repo.dirstate[src] == 'r':
3181 if src and src not in names and repo.dirstate[src] == 'r':
3179 dsremoved.add(src)
3182 dsremoved.add(src)
3180 names[src] = (repo.pathto(src, cwd), True)
3183 names[src] = (repo.pathto(src, cwd), True)
3181
3184
3182 # determine the exact nature of the deleted changesets
3185 # determine the exact nature of the deleted changesets
3183 deladded = set(_deleted)
3186 deladded = set(_deleted)
3184 for path in _deleted:
3187 for path in _deleted:
3185 if path in mf:
3188 if path in mf:
3186 deladded.remove(path)
3189 deladded.remove(path)
3187 deleted = _deleted - deladded
3190 deleted = _deleted - deladded
3188
3191
3189 # distinguish between file to forget and the other
3192 # distinguish between file to forget and the other
3190 added = set()
3193 added = set()
3191 for abs in dsadded:
3194 for abs in dsadded:
3192 if repo.dirstate[abs] != 'a':
3195 if repo.dirstate[abs] != 'a':
3193 added.add(abs)
3196 added.add(abs)
3194 dsadded -= added
3197 dsadded -= added
3195
3198
3196 for abs in deladded:
3199 for abs in deladded:
3197 if repo.dirstate[abs] == 'a':
3200 if repo.dirstate[abs] == 'a':
3198 dsadded.add(abs)
3201 dsadded.add(abs)
3199 deladded -= dsadded
3202 deladded -= dsadded
3200
3203
3201 # For files marked as removed, we check if an unknown file is present at
3204 # For files marked as removed, we check if an unknown file is present at
3202 # the same path. If a such file exists it may need to be backed up.
3205 # the same path. If a such file exists it may need to be backed up.
3203 # Making the distinction at this stage helps have simpler backup
3206 # Making the distinction at this stage helps have simpler backup
3204 # logic.
3207 # logic.
3205 removunk = set()
3208 removunk = set()
3206 for abs in removed:
3209 for abs in removed:
3207 target = repo.wjoin(abs)
3210 target = repo.wjoin(abs)
3208 if os.path.lexists(target):
3211 if os.path.lexists(target):
3209 removunk.add(abs)
3212 removunk.add(abs)
3210 removed -= removunk
3213 removed -= removunk
3211
3214
3212 dsremovunk = set()
3215 dsremovunk = set()
3213 for abs in dsremoved:
3216 for abs in dsremoved:
3214 target = repo.wjoin(abs)
3217 target = repo.wjoin(abs)
3215 if os.path.lexists(target):
3218 if os.path.lexists(target):
3216 dsremovunk.add(abs)
3219 dsremovunk.add(abs)
3217 dsremoved -= dsremovunk
3220 dsremoved -= dsremovunk
3218
3221
3219 # action to be actually performed by revert
3222 # action to be actually performed by revert
3220 # (<list of file>, message>) tuple
3223 # (<list of file>, message>) tuple
3221 actions = {'revert': ([], _('reverting %s\n')),
3224 actions = {'revert': ([], _('reverting %s\n')),
3222 'add': ([], _('adding %s\n')),
3225 'add': ([], _('adding %s\n')),
3223 'remove': ([], _('removing %s\n')),
3226 'remove': ([], _('removing %s\n')),
3224 'drop': ([], _('removing %s\n')),
3227 'drop': ([], _('removing %s\n')),
3225 'forget': ([], _('forgetting %s\n')),
3228 'forget': ([], _('forgetting %s\n')),
3226 'undelete': ([], _('undeleting %s\n')),
3229 'undelete': ([], _('undeleting %s\n')),
3227 'noop': (None, _('no changes needed to %s\n')),
3230 'noop': (None, _('no changes needed to %s\n')),
3228 'unknown': (None, _('file not managed: %s\n')),
3231 'unknown': (None, _('file not managed: %s\n')),
3229 }
3232 }
3230
3233
3231 # "constant" that convey the backup strategy.
3234 # "constant" that convey the backup strategy.
3232 # All set to `discard` if `no-backup` is set do avoid checking
3235 # All set to `discard` if `no-backup` is set do avoid checking
3233 # no_backup lower in the code.
3236 # no_backup lower in the code.
3234 # These values are ordered for comparison purposes
3237 # These values are ordered for comparison purposes
3235 backupinteractive = 3 # do backup if interactively modified
3238 backupinteractive = 3 # do backup if interactively modified
3236 backup = 2 # unconditionally do backup
3239 backup = 2 # unconditionally do backup
3237 check = 1 # check if the existing file differs from target
3240 check = 1 # check if the existing file differs from target
3238 discard = 0 # never do backup
3241 discard = 0 # never do backup
3239 if opts.get('no_backup'):
3242 if opts.get('no_backup'):
3240 backupinteractive = backup = check = discard
3243 backupinteractive = backup = check = discard
3241 if interactive:
3244 if interactive:
3242 dsmodifiedbackup = backupinteractive
3245 dsmodifiedbackup = backupinteractive
3243 else:
3246 else:
3244 dsmodifiedbackup = backup
3247 dsmodifiedbackup = backup
3245 tobackup = set()
3248 tobackup = set()
3246
3249
3247 backupanddel = actions['remove']
3250 backupanddel = actions['remove']
3248 if not opts.get('no_backup'):
3251 if not opts.get('no_backup'):
3249 backupanddel = actions['drop']
3252 backupanddel = actions['drop']
3250
3253
3251 disptable = (
3254 disptable = (
3252 # dispatch table:
3255 # dispatch table:
3253 # file state
3256 # file state
3254 # action
3257 # action
3255 # make backup
3258 # make backup
3256
3259
3257 ## Sets that results that will change file on disk
3260 ## Sets that results that will change file on disk
3258 # Modified compared to target, no local change
3261 # Modified compared to target, no local change
3259 (modified, actions['revert'], discard),
3262 (modified, actions['revert'], discard),
3260 # Modified compared to target, but local file is deleted
3263 # Modified compared to target, but local file is deleted
3261 (deleted, actions['revert'], discard),
3264 (deleted, actions['revert'], discard),
3262 # Modified compared to target, local change
3265 # Modified compared to target, local change
3263 (dsmodified, actions['revert'], dsmodifiedbackup),
3266 (dsmodified, actions['revert'], dsmodifiedbackup),
3264 # Added since target
3267 # Added since target
3265 (added, actions['remove'], discard),
3268 (added, actions['remove'], discard),
3266 # Added in working directory
3269 # Added in working directory
3267 (dsadded, actions['forget'], discard),
3270 (dsadded, actions['forget'], discard),
3268 # Added since target, have local modification
3271 # Added since target, have local modification
3269 (modadded, backupanddel, backup),
3272 (modadded, backupanddel, backup),
3270 # Added since target but file is missing in working directory
3273 # Added since target but file is missing in working directory
3271 (deladded, actions['drop'], discard),
3274 (deladded, actions['drop'], discard),
3272 # Removed since target, before working copy parent
3275 # Removed since target, before working copy parent
3273 (removed, actions['add'], discard),
3276 (removed, actions['add'], discard),
3274 # Same as `removed` but an unknown file exists at the same path
3277 # Same as `removed` but an unknown file exists at the same path
3275 (removunk, actions['add'], check),
3278 (removunk, actions['add'], check),
3276 # Removed since targe, marked as such in working copy parent
3279 # Removed since targe, marked as such in working copy parent
3277 (dsremoved, actions['undelete'], discard),
3280 (dsremoved, actions['undelete'], discard),
3278 # Same as `dsremoved` but an unknown file exists at the same path
3281 # Same as `dsremoved` but an unknown file exists at the same path
3279 (dsremovunk, actions['undelete'], check),
3282 (dsremovunk, actions['undelete'], check),
3280 ## the following sets does not result in any file changes
3283 ## the following sets does not result in any file changes
3281 # File with no modification
3284 # File with no modification
3282 (clean, actions['noop'], discard),
3285 (clean, actions['noop'], discard),
3283 # Existing file, not tracked anywhere
3286 # Existing file, not tracked anywhere
3284 (unknown, actions['unknown'], discard),
3287 (unknown, actions['unknown'], discard),
3285 )
3288 )
3286
3289
3287 for abs, (rel, exact) in sorted(names.items()):
3290 for abs, (rel, exact) in sorted(names.items()):
3288 # target file to be touch on disk (relative to cwd)
3291 # target file to be touch on disk (relative to cwd)
3289 target = repo.wjoin(abs)
3292 target = repo.wjoin(abs)
3290 # search the entry in the dispatch table.
3293 # search the entry in the dispatch table.
3291 # if the file is in any of these sets, it was touched in the working
3294 # if the file is in any of these sets, it was touched in the working
3292 # directory parent and we are sure it needs to be reverted.
3295 # directory parent and we are sure it needs to be reverted.
3293 for table, (xlist, msg), dobackup in disptable:
3296 for table, (xlist, msg), dobackup in disptable:
3294 if abs not in table:
3297 if abs not in table:
3295 continue
3298 continue
3296 if xlist is not None:
3299 if xlist is not None:
3297 xlist.append(abs)
3300 xlist.append(abs)
3298 if dobackup:
3301 if dobackup:
3299 # If in interactive mode, don't automatically create
3302 # If in interactive mode, don't automatically create
3300 # .orig files (issue4793)
3303 # .orig files (issue4793)
3301 if dobackup == backupinteractive:
3304 if dobackup == backupinteractive:
3302 tobackup.add(abs)
3305 tobackup.add(abs)
3303 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3306 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3304 bakname = scmutil.origpath(ui, repo, rel)
3307 bakname = scmutil.origpath(ui, repo, rel)
3305 ui.note(_('saving current version of %s as %s\n') %
3308 ui.note(_('saving current version of %s as %s\n') %
3306 (rel, bakname))
3309 (rel, bakname))
3307 if not opts.get('dry_run'):
3310 if not opts.get('dry_run'):
3308 if interactive:
3311 if interactive:
3309 util.copyfile(target, bakname)
3312 util.copyfile(target, bakname)
3310 else:
3313 else:
3311 util.rename(target, bakname)
3314 util.rename(target, bakname)
3312 if ui.verbose or not exact:
3315 if ui.verbose or not exact:
3313 if not isinstance(msg, basestring):
3316 if not isinstance(msg, basestring):
3314 msg = msg(abs)
3317 msg = msg(abs)
3315 ui.status(msg % rel)
3318 ui.status(msg % rel)
3316 elif exact:
3319 elif exact:
3317 ui.warn(msg % rel)
3320 ui.warn(msg % rel)
3318 break
3321 break
3319
3322
3320 if not opts.get('dry_run'):
3323 if not opts.get('dry_run'):
3321 needdata = ('revert', 'add', 'undelete')
3324 needdata = ('revert', 'add', 'undelete')
3322 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3325 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3323 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3326 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3324
3327
3325 if targetsubs:
3328 if targetsubs:
3326 # Revert the subrepos on the revert list
3329 # Revert the subrepos on the revert list
3327 for sub in targetsubs:
3330 for sub in targetsubs:
3328 try:
3331 try:
3329 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3332 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3330 except KeyError:
3333 except KeyError:
3331 raise error.Abort("subrepository '%s' does not exist in %s!"
3334 raise error.Abort("subrepository '%s' does not exist in %s!"
3332 % (sub, short(ctx.node())))
3335 % (sub, short(ctx.node())))
3333
3336
3334 def _revertprefetch(repo, ctx, *files):
3337 def _revertprefetch(repo, ctx, *files):
3335 """Let extension changing the storage layer prefetch content"""
3338 """Let extension changing the storage layer prefetch content"""
3336 pass
3339 pass
3337
3340
3338 def _performrevert(repo, parents, ctx, actions, interactive=False,
3341 def _performrevert(repo, parents, ctx, actions, interactive=False,
3339 tobackup=None):
3342 tobackup=None):
3340 """function that actually perform all the actions computed for revert
3343 """function that actually perform all the actions computed for revert
3341
3344
3342 This is an independent function to let extension to plug in and react to
3345 This is an independent function to let extension to plug in and react to
3343 the imminent revert.
3346 the imminent revert.
3344
3347
3345 Make sure you have the working directory locked when calling this function.
3348 Make sure you have the working directory locked when calling this function.
3346 """
3349 """
3347 parent, p2 = parents
3350 parent, p2 = parents
3348 node = ctx.node()
3351 node = ctx.node()
3349 excluded_files = []
3352 excluded_files = []
3350 matcher_opts = {"exclude": excluded_files}
3353 matcher_opts = {"exclude": excluded_files}
3351
3354
3352 def checkout(f):
3355 def checkout(f):
3353 fc = ctx[f]
3356 fc = ctx[f]
3354 repo.wwrite(f, fc.data(), fc.flags())
3357 repo.wwrite(f, fc.data(), fc.flags())
3355
3358
3356 def doremove(f):
3359 def doremove(f):
3357 try:
3360 try:
3358 repo.wvfs.unlinkpath(f)
3361 repo.wvfs.unlinkpath(f)
3359 except OSError:
3362 except OSError:
3360 pass
3363 pass
3361 repo.dirstate.remove(f)
3364 repo.dirstate.remove(f)
3362
3365
3363 audit_path = pathutil.pathauditor(repo.root)
3366 audit_path = pathutil.pathauditor(repo.root)
3364 for f in actions['forget'][0]:
3367 for f in actions['forget'][0]:
3365 if interactive:
3368 if interactive:
3366 choice = repo.ui.promptchoice(
3369 choice = repo.ui.promptchoice(
3367 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3370 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3368 if choice == 0:
3371 if choice == 0:
3369 repo.dirstate.drop(f)
3372 repo.dirstate.drop(f)
3370 else:
3373 else:
3371 excluded_files.append(repo.wjoin(f))
3374 excluded_files.append(repo.wjoin(f))
3372 else:
3375 else:
3373 repo.dirstate.drop(f)
3376 repo.dirstate.drop(f)
3374 for f in actions['remove'][0]:
3377 for f in actions['remove'][0]:
3375 audit_path(f)
3378 audit_path(f)
3376 if interactive:
3379 if interactive:
3377 choice = repo.ui.promptchoice(
3380 choice = repo.ui.promptchoice(
3378 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3381 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3379 if choice == 0:
3382 if choice == 0:
3380 doremove(f)
3383 doremove(f)
3381 else:
3384 else:
3382 excluded_files.append(repo.wjoin(f))
3385 excluded_files.append(repo.wjoin(f))
3383 else:
3386 else:
3384 doremove(f)
3387 doremove(f)
3385 for f in actions['drop'][0]:
3388 for f in actions['drop'][0]:
3386 audit_path(f)
3389 audit_path(f)
3387 repo.dirstate.remove(f)
3390 repo.dirstate.remove(f)
3388
3391
3389 normal = None
3392 normal = None
3390 if node == parent:
3393 if node == parent:
3391 # We're reverting to our parent. If possible, we'd like status
3394 # We're reverting to our parent. If possible, we'd like status
3392 # to report the file as clean. We have to use normallookup for
3395 # to report the file as clean. We have to use normallookup for
3393 # merges to avoid losing information about merged/dirty files.
3396 # merges to avoid losing information about merged/dirty files.
3394 if p2 != nullid:
3397 if p2 != nullid:
3395 normal = repo.dirstate.normallookup
3398 normal = repo.dirstate.normallookup
3396 else:
3399 else:
3397 normal = repo.dirstate.normal
3400 normal = repo.dirstate.normal
3398
3401
3399 newlyaddedandmodifiedfiles = set()
3402 newlyaddedandmodifiedfiles = set()
3400 if interactive:
3403 if interactive:
3401 # Prompt the user for changes to revert
3404 # Prompt the user for changes to revert
3402 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3405 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3403 m = scmutil.match(ctx, torevert, matcher_opts)
3406 m = scmutil.match(ctx, torevert, matcher_opts)
3404 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3407 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3405 diffopts.nodates = True
3408 diffopts.nodates = True
3406 diffopts.git = True
3409 diffopts.git = True
3407 operation = 'discard'
3410 operation = 'discard'
3408 reversehunks = True
3411 reversehunks = True
3409 if node != parent:
3412 if node != parent:
3410 operation = 'revert'
3413 operation = 'revert'
3411 reversehunks = repo.ui.configbool('experimental',
3414 reversehunks = repo.ui.configbool('experimental',
3412 'revertalternateinteractivemode',
3415 'revertalternateinteractivemode',
3413 True)
3416 True)
3414 if reversehunks:
3417 if reversehunks:
3415 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3418 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3416 else:
3419 else:
3417 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3420 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3418 originalchunks = patch.parsepatch(diff)
3421 originalchunks = patch.parsepatch(diff)
3419
3422
3420 try:
3423 try:
3421
3424
3422 chunks, opts = recordfilter(repo.ui, originalchunks,
3425 chunks, opts = recordfilter(repo.ui, originalchunks,
3423 operation=operation)
3426 operation=operation)
3424 if reversehunks:
3427 if reversehunks:
3425 chunks = patch.reversehunks(chunks)
3428 chunks = patch.reversehunks(chunks)
3426
3429
3427 except patch.PatchError as err:
3430 except patch.PatchError as err:
3428 raise error.Abort(_('error parsing patch: %s') % err)
3431 raise error.Abort(_('error parsing patch: %s') % err)
3429
3432
3430 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3433 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3431 if tobackup is None:
3434 if tobackup is None:
3432 tobackup = set()
3435 tobackup = set()
3433 # Apply changes
3436 # Apply changes
3434 fp = stringio()
3437 fp = stringio()
3435 for c in chunks:
3438 for c in chunks:
3436 # Create a backup file only if this hunk should be backed up
3439 # Create a backup file only if this hunk should be backed up
3437 if ishunk(c) and c.header.filename() in tobackup:
3440 if ishunk(c) and c.header.filename() in tobackup:
3438 abs = c.header.filename()
3441 abs = c.header.filename()
3439 target = repo.wjoin(abs)
3442 target = repo.wjoin(abs)
3440 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3443 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3441 util.copyfile(target, bakname)
3444 util.copyfile(target, bakname)
3442 tobackup.remove(abs)
3445 tobackup.remove(abs)
3443 c.write(fp)
3446 c.write(fp)
3444 dopatch = fp.tell()
3447 dopatch = fp.tell()
3445 fp.seek(0)
3448 fp.seek(0)
3446 if dopatch:
3449 if dopatch:
3447 try:
3450 try:
3448 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3451 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3449 except patch.PatchError as err:
3452 except patch.PatchError as err:
3450 raise error.Abort(str(err))
3453 raise error.Abort(str(err))
3451 del fp
3454 del fp
3452 else:
3455 else:
3453 for f in actions['revert'][0]:
3456 for f in actions['revert'][0]:
3454 checkout(f)
3457 checkout(f)
3455 if normal:
3458 if normal:
3456 normal(f)
3459 normal(f)
3457
3460
3458 for f in actions['add'][0]:
3461 for f in actions['add'][0]:
3459 # Don't checkout modified files, they are already created by the diff
3462 # Don't checkout modified files, they are already created by the diff
3460 if f not in newlyaddedandmodifiedfiles:
3463 if f not in newlyaddedandmodifiedfiles:
3461 checkout(f)
3464 checkout(f)
3462 repo.dirstate.add(f)
3465 repo.dirstate.add(f)
3463
3466
3464 normal = repo.dirstate.normallookup
3467 normal = repo.dirstate.normallookup
3465 if node == parent and p2 == nullid:
3468 if node == parent and p2 == nullid:
3466 normal = repo.dirstate.normal
3469 normal = repo.dirstate.normal
3467 for f in actions['undelete'][0]:
3470 for f in actions['undelete'][0]:
3468 checkout(f)
3471 checkout(f)
3469 normal(f)
3472 normal(f)
3470
3473
3471 copied = copies.pathcopies(repo[parent], ctx)
3474 copied = copies.pathcopies(repo[parent], ctx)
3472
3475
3473 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3476 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3474 if f in copied:
3477 if f in copied:
3475 repo.dirstate.copy(copied[f], f)
3478 repo.dirstate.copy(copied[f], f)
3476
3479
3477 class command(registrar.command):
3480 class command(registrar.command):
3478 def _doregister(self, func, name, *args, **kwargs):
3481 def _doregister(self, func, name, *args, **kwargs):
3479 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3482 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3480 return super(command, self)._doregister(func, name, *args, **kwargs)
3483 return super(command, self)._doregister(func, name, *args, **kwargs)
3481
3484
3482 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3485 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3483 # commands.outgoing. "missing" is "missing" of the result of
3486 # commands.outgoing. "missing" is "missing" of the result of
3484 # "findcommonoutgoing()"
3487 # "findcommonoutgoing()"
3485 outgoinghooks = util.hooks()
3488 outgoinghooks = util.hooks()
3486
3489
3487 # a list of (ui, repo) functions called by commands.summary
3490 # a list of (ui, repo) functions called by commands.summary
3488 summaryhooks = util.hooks()
3491 summaryhooks = util.hooks()
3489
3492
3490 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3493 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3491 #
3494 #
3492 # functions should return tuple of booleans below, if 'changes' is None:
3495 # functions should return tuple of booleans below, if 'changes' is None:
3493 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3496 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3494 #
3497 #
3495 # otherwise, 'changes' is a tuple of tuples below:
3498 # otherwise, 'changes' is a tuple of tuples below:
3496 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3499 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3497 # - (desturl, destbranch, destpeer, outgoing)
3500 # - (desturl, destbranch, destpeer, outgoing)
3498 summaryremotehooks = util.hooks()
3501 summaryremotehooks = util.hooks()
3499
3502
3500 # A list of state files kept by multistep operations like graft.
3503 # A list of state files kept by multistep operations like graft.
3501 # Since graft cannot be aborted, it is considered 'clearable' by update.
3504 # Since graft cannot be aborted, it is considered 'clearable' by update.
3502 # note: bisect is intentionally excluded
3505 # note: bisect is intentionally excluded
3503 # (state file, clearable, allowcommit, error, hint)
3506 # (state file, clearable, allowcommit, error, hint)
3504 unfinishedstates = [
3507 unfinishedstates = [
3505 ('graftstate', True, False, _('graft in progress'),
3508 ('graftstate', True, False, _('graft in progress'),
3506 _("use 'hg graft --continue' or 'hg update' to abort")),
3509 _("use 'hg graft --continue' or 'hg update' to abort")),
3507 ('updatestate', True, False, _('last update was interrupted'),
3510 ('updatestate', True, False, _('last update was interrupted'),
3508 _("use 'hg update' to get a consistent checkout"))
3511 _("use 'hg update' to get a consistent checkout"))
3509 ]
3512 ]
3510
3513
3511 def checkunfinished(repo, commit=False):
3514 def checkunfinished(repo, commit=False):
3512 '''Look for an unfinished multistep operation, like graft, and abort
3515 '''Look for an unfinished multistep operation, like graft, and abort
3513 if found. It's probably good to check this right before
3516 if found. It's probably good to check this right before
3514 bailifchanged().
3517 bailifchanged().
3515 '''
3518 '''
3516 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3519 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3517 if commit and allowcommit:
3520 if commit and allowcommit:
3518 continue
3521 continue
3519 if repo.vfs.exists(f):
3522 if repo.vfs.exists(f):
3520 raise error.Abort(msg, hint=hint)
3523 raise error.Abort(msg, hint=hint)
3521
3524
3522 def clearunfinished(repo):
3525 def clearunfinished(repo):
3523 '''Check for unfinished operations (as above), and clear the ones
3526 '''Check for unfinished operations (as above), and clear the ones
3524 that are clearable.
3527 that are clearable.
3525 '''
3528 '''
3526 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3529 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3527 if not clearable and repo.vfs.exists(f):
3530 if not clearable and repo.vfs.exists(f):
3528 raise error.Abort(msg, hint=hint)
3531 raise error.Abort(msg, hint=hint)
3529 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3532 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3530 if clearable and repo.vfs.exists(f):
3533 if clearable and repo.vfs.exists(f):
3531 util.unlink(repo.vfs.join(f))
3534 util.unlink(repo.vfs.join(f))
3532
3535
3533 afterresolvedstates = [
3536 afterresolvedstates = [
3534 ('graftstate',
3537 ('graftstate',
3535 _('hg graft --continue')),
3538 _('hg graft --continue')),
3536 ]
3539 ]
3537
3540
3538 def howtocontinue(repo):
3541 def howtocontinue(repo):
3539 '''Check for an unfinished operation and return the command to finish
3542 '''Check for an unfinished operation and return the command to finish
3540 it.
3543 it.
3541
3544
3542 afterresolvedstates tuples define a .hg/{file} and the corresponding
3545 afterresolvedstates tuples define a .hg/{file} and the corresponding
3543 command needed to finish it.
3546 command needed to finish it.
3544
3547
3545 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3548 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3546 a boolean.
3549 a boolean.
3547 '''
3550 '''
3548 contmsg = _("continue: %s")
3551 contmsg = _("continue: %s")
3549 for f, msg in afterresolvedstates:
3552 for f, msg in afterresolvedstates:
3550 if repo.vfs.exists(f):
3553 if repo.vfs.exists(f):
3551 return contmsg % msg, True
3554 return contmsg % msg, True
3552 workingctx = repo[None]
3555 workingctx = repo[None]
3553 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3556 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3554 for s in workingctx.substate)
3557 for s in workingctx.substate)
3555 if dirty:
3558 if dirty:
3556 return contmsg % _("hg commit"), False
3559 return contmsg % _("hg commit"), False
3557 return None, None
3560 return None, None
3558
3561
3559 def checkafterresolved(repo):
3562 def checkafterresolved(repo):
3560 '''Inform the user about the next action after completing hg resolve
3563 '''Inform the user about the next action after completing hg resolve
3561
3564
3562 If there's a matching afterresolvedstates, howtocontinue will yield
3565 If there's a matching afterresolvedstates, howtocontinue will yield
3563 repo.ui.warn as the reporter.
3566 repo.ui.warn as the reporter.
3564
3567
3565 Otherwise, it will yield repo.ui.note.
3568 Otherwise, it will yield repo.ui.note.
3566 '''
3569 '''
3567 msg, warning = howtocontinue(repo)
3570 msg, warning = howtocontinue(repo)
3568 if msg is not None:
3571 if msg is not None:
3569 if warning:
3572 if warning:
3570 repo.ui.warn("%s\n" % msg)
3573 repo.ui.warn("%s\n" % msg)
3571 else:
3574 else:
3572 repo.ui.note("%s\n" % msg)
3575 repo.ui.note("%s\n" % msg)
3573
3576
3574 def wrongtooltocontinue(repo, task):
3577 def wrongtooltocontinue(repo, task):
3575 '''Raise an abort suggesting how to properly continue if there is an
3578 '''Raise an abort suggesting how to properly continue if there is an
3576 active task.
3579 active task.
3577
3580
3578 Uses howtocontinue() to find the active task.
3581 Uses howtocontinue() to find the active task.
3579
3582
3580 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3583 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3581 a hint.
3584 a hint.
3582 '''
3585 '''
3583 after = howtocontinue(repo)
3586 after = howtocontinue(repo)
3584 hint = None
3587 hint = None
3585 if after[1]:
3588 if after[1]:
3586 hint = after[0]
3589 hint = after[0]
3587 raise error.Abort(_('no %s in progress') % task, hint=hint)
3590 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,483 +1,486
1 # formatter.py - generic output formatting for mercurial
1 # formatter.py - generic output formatting for mercurial
2 #
2 #
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Generic output formatting for Mercurial
8 """Generic output formatting for Mercurial
9
9
10 The formatter provides API to show data in various ways. The following
10 The formatter provides API to show data in various ways. The following
11 functions should be used in place of ui.write():
11 functions should be used in place of ui.write():
12
12
13 - fm.write() for unconditional output
13 - fm.write() for unconditional output
14 - fm.condwrite() to show some extra data conditionally in plain output
14 - fm.condwrite() to show some extra data conditionally in plain output
15 - fm.context() to provide changectx to template output
15 - fm.context() to provide changectx to template output
16 - fm.data() to provide extra data to JSON or template output
16 - fm.data() to provide extra data to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
18
18
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 beforehand so the data is converted to the appropriate data type. Use
20 beforehand so the data is converted to the appropriate data type. Use
21 fm.isplain() if you need to convert or format data conditionally which isn't
21 fm.isplain() if you need to convert or format data conditionally which isn't
22 supported by the formatter API.
22 supported by the formatter API.
23
23
24 To build nested structure (i.e. a list of dicts), use fm.nested().
24 To build nested structure (i.e. a list of dicts), use fm.nested().
25
25
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27
27
28 fm.condwrite() vs 'if cond:':
28 fm.condwrite() vs 'if cond:':
29
29
30 In most cases, use fm.condwrite() so users can selectively show the data
30 In most cases, use fm.condwrite() so users can selectively show the data
31 in template output. If it's costly to build data, use plain 'if cond:' with
31 in template output. If it's costly to build data, use plain 'if cond:' with
32 fm.write().
32 fm.write().
33
33
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35
35
36 fm.nested() should be used to form a tree structure (a list of dicts of
36 fm.nested() should be used to form a tree structure (a list of dicts of
37 lists of dicts...) which can be accessed through template keywords, e.g.
37 lists of dicts...) which can be accessed through template keywords, e.g.
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 exports a dict-type object to template, which can be accessed by e.g.
39 exports a dict-type object to template, which can be accessed by e.g.
40 "{get(foo, key)}" function.
40 "{get(foo, key)}" function.
41
41
42 Doctest helper:
42 Doctest helper:
43
43
44 >>> def show(fn, verbose=False, **opts):
44 >>> def show(fn, verbose=False, **opts):
45 ... import sys
45 ... import sys
46 ... from . import ui as uimod
46 ... from . import ui as uimod
47 ... ui = uimod.ui()
47 ... ui = uimod.ui()
48 ... ui.fout = sys.stdout # redirect to doctest
48 ... ui.fout = sys.stdout # redirect to doctest
49 ... ui.verbose = verbose
49 ... ui.verbose = verbose
50 ... return fn(ui, ui.formatter(fn.__name__, opts))
50 ... return fn(ui, ui.formatter(fn.__name__, opts))
51
51
52 Basic example:
52 Basic example:
53
53
54 >>> def files(ui, fm):
54 >>> def files(ui, fm):
55 ... files = [('foo', 123, (0, 0)), ('bar', 456, (1, 0))]
55 ... files = [('foo', 123, (0, 0)), ('bar', 456, (1, 0))]
56 ... for f in files:
56 ... for f in files:
57 ... fm.startitem()
57 ... fm.startitem()
58 ... fm.write('path', '%s', f[0])
58 ... fm.write('path', '%s', f[0])
59 ... fm.condwrite(ui.verbose, 'date', ' %s',
59 ... fm.condwrite(ui.verbose, 'date', ' %s',
60 ... fm.formatdate(f[2], '%Y-%m-%d %H:%M:%S'))
60 ... fm.formatdate(f[2], '%Y-%m-%d %H:%M:%S'))
61 ... fm.data(size=f[1])
61 ... fm.data(size=f[1])
62 ... fm.plain('\\n')
62 ... fm.plain('\\n')
63 ... fm.end()
63 ... fm.end()
64 >>> show(files)
64 >>> show(files)
65 foo
65 foo
66 bar
66 bar
67 >>> show(files, verbose=True)
67 >>> show(files, verbose=True)
68 foo 1970-01-01 00:00:00
68 foo 1970-01-01 00:00:00
69 bar 1970-01-01 00:00:01
69 bar 1970-01-01 00:00:01
70 >>> show(files, template='json')
70 >>> show(files, template='json')
71 [
71 [
72 {
72 {
73 "date": [0, 0],
73 "date": [0, 0],
74 "path": "foo",
74 "path": "foo",
75 "size": 123
75 "size": 123
76 },
76 },
77 {
77 {
78 "date": [1, 0],
78 "date": [1, 0],
79 "path": "bar",
79 "path": "bar",
80 "size": 456
80 "size": 456
81 }
81 }
82 ]
82 ]
83 >>> show(files, template='path: {path}\\ndate: {date|rfc3339date}\\n')
83 >>> show(files, template='path: {path}\\ndate: {date|rfc3339date}\\n')
84 path: foo
84 path: foo
85 date: 1970-01-01T00:00:00+00:00
85 date: 1970-01-01T00:00:00+00:00
86 path: bar
86 path: bar
87 date: 1970-01-01T00:00:01+00:00
87 date: 1970-01-01T00:00:01+00:00
88
88
89 Nested example:
89 Nested example:
90
90
91 >>> def subrepos(ui, fm):
91 >>> def subrepos(ui, fm):
92 ... fm.startitem()
92 ... fm.startitem()
93 ... fm.write('repo', '[%s]\\n', 'baz')
93 ... fm.write('repo', '[%s]\\n', 'baz')
94 ... files(ui, fm.nested('files'))
94 ... files(ui, fm.nested('files'))
95 ... fm.end()
95 ... fm.end()
96 >>> show(subrepos)
96 >>> show(subrepos)
97 [baz]
97 [baz]
98 foo
98 foo
99 bar
99 bar
100 >>> show(subrepos, template='{repo}: {join(files % "{path}", ", ")}\\n')
100 >>> show(subrepos, template='{repo}: {join(files % "{path}", ", ")}\\n')
101 baz: foo, bar
101 baz: foo, bar
102 """
102 """
103
103
104 from __future__ import absolute_import
104 from __future__ import absolute_import
105
105
106 import collections
106 import contextlib
107 import contextlib
107 import itertools
108 import itertools
108 import os
109 import os
109
110
110 from .i18n import _
111 from .i18n import _
111 from .node import (
112 from .node import (
112 hex,
113 hex,
113 short,
114 short,
114 )
115 )
115
116
116 from . import (
117 from . import (
117 error,
118 error,
118 pycompat,
119 pycompat,
119 templatefilters,
120 templatefilters,
120 templatekw,
121 templatekw,
121 templater,
122 templater,
122 util,
123 util,
123 )
124 )
124
125
125 pickle = util.pickle
126 pickle = util.pickle
126
127
127 class _nullconverter(object):
128 class _nullconverter(object):
128 '''convert non-primitive data types to be processed by formatter'''
129 '''convert non-primitive data types to be processed by formatter'''
129 @staticmethod
130 @staticmethod
130 def formatdate(date, fmt):
131 def formatdate(date, fmt):
131 '''convert date tuple to appropriate format'''
132 '''convert date tuple to appropriate format'''
132 return date
133 return date
133 @staticmethod
134 @staticmethod
134 def formatdict(data, key, value, fmt, sep):
135 def formatdict(data, key, value, fmt, sep):
135 '''convert dict or key-value pairs to appropriate dict format'''
136 '''convert dict or key-value pairs to appropriate dict format'''
136 # use plain dict instead of util.sortdict so that data can be
137 # use plain dict instead of util.sortdict so that data can be
137 # serialized as a builtin dict in pickle output
138 # serialized as a builtin dict in pickle output
138 return dict(data)
139 return dict(data)
139 @staticmethod
140 @staticmethod
140 def formatlist(data, name, fmt, sep):
141 def formatlist(data, name, fmt, sep):
141 '''convert iterable to appropriate list format'''
142 '''convert iterable to appropriate list format'''
142 return list(data)
143 return list(data)
143
144
144 class baseformatter(object):
145 class baseformatter(object):
145 def __init__(self, ui, topic, opts, converter):
146 def __init__(self, ui, topic, opts, converter):
146 self._ui = ui
147 self._ui = ui
147 self._topic = topic
148 self._topic = topic
148 self._style = opts.get("style")
149 self._style = opts.get("style")
149 self._template = opts.get("template")
150 self._template = opts.get("template")
150 self._converter = converter
151 self._converter = converter
151 self._item = None
152 self._item = None
152 # function to convert node to string suitable for this output
153 # function to convert node to string suitable for this output
153 self.hexfunc = hex
154 self.hexfunc = hex
154 def __enter__(self):
155 def __enter__(self):
155 return self
156 return self
156 def __exit__(self, exctype, excvalue, traceback):
157 def __exit__(self, exctype, excvalue, traceback):
157 if exctype is None:
158 if exctype is None:
158 self.end()
159 self.end()
159 def _showitem(self):
160 def _showitem(self):
160 '''show a formatted item once all data is collected'''
161 '''show a formatted item once all data is collected'''
161 pass
162 pass
162 def startitem(self):
163 def startitem(self):
163 '''begin an item in the format list'''
164 '''begin an item in the format list'''
164 if self._item is not None:
165 if self._item is not None:
165 self._showitem()
166 self._showitem()
166 self._item = {}
167 self._item = {}
167 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
168 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
168 '''convert date tuple to appropriate format'''
169 '''convert date tuple to appropriate format'''
169 return self._converter.formatdate(date, fmt)
170 return self._converter.formatdate(date, fmt)
170 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
171 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
171 '''convert dict or key-value pairs to appropriate dict format'''
172 '''convert dict or key-value pairs to appropriate dict format'''
172 return self._converter.formatdict(data, key, value, fmt, sep)
173 return self._converter.formatdict(data, key, value, fmt, sep)
173 def formatlist(self, data, name, fmt='%s', sep=' '):
174 def formatlist(self, data, name, fmt='%s', sep=' '):
174 '''convert iterable to appropriate list format'''
175 '''convert iterable to appropriate list format'''
175 # name is mandatory argument for now, but it could be optional if
176 # name is mandatory argument for now, but it could be optional if
176 # we have default template keyword, e.g. {item}
177 # we have default template keyword, e.g. {item}
177 return self._converter.formatlist(data, name, fmt, sep)
178 return self._converter.formatlist(data, name, fmt, sep)
178 def context(self, **ctxs):
179 def context(self, **ctxs):
179 '''insert context objects to be used to render template keywords'''
180 '''insert context objects to be used to render template keywords'''
180 pass
181 pass
181 def data(self, **data):
182 def data(self, **data):
182 '''insert data into item that's not shown in default output'''
183 '''insert data into item that's not shown in default output'''
183 data = pycompat.byteskwargs(data)
184 data = pycompat.byteskwargs(data)
184 self._item.update(data)
185 self._item.update(data)
185 def write(self, fields, deftext, *fielddata, **opts):
186 def write(self, fields, deftext, *fielddata, **opts):
186 '''do default text output while assigning data to item'''
187 '''do default text output while assigning data to item'''
187 fieldkeys = fields.split()
188 fieldkeys = fields.split()
188 assert len(fieldkeys) == len(fielddata)
189 assert len(fieldkeys) == len(fielddata)
189 self._item.update(zip(fieldkeys, fielddata))
190 self._item.update(zip(fieldkeys, fielddata))
190 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
191 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
191 '''do conditional write (primarily for plain formatter)'''
192 '''do conditional write (primarily for plain formatter)'''
192 fieldkeys = fields.split()
193 fieldkeys = fields.split()
193 assert len(fieldkeys) == len(fielddata)
194 assert len(fieldkeys) == len(fielddata)
194 self._item.update(zip(fieldkeys, fielddata))
195 self._item.update(zip(fieldkeys, fielddata))
195 def plain(self, text, **opts):
196 def plain(self, text, **opts):
196 '''show raw text for non-templated mode'''
197 '''show raw text for non-templated mode'''
197 pass
198 pass
198 def isplain(self):
199 def isplain(self):
199 '''check for plain formatter usage'''
200 '''check for plain formatter usage'''
200 return False
201 return False
201 def nested(self, field):
202 def nested(self, field):
202 '''sub formatter to store nested data in the specified field'''
203 '''sub formatter to store nested data in the specified field'''
203 self._item[field] = data = []
204 self._item[field] = data = []
204 return _nestedformatter(self._ui, self._converter, data)
205 return _nestedformatter(self._ui, self._converter, data)
205 def end(self):
206 def end(self):
206 '''end output for the formatter'''
207 '''end output for the formatter'''
207 if self._item is not None:
208 if self._item is not None:
208 self._showitem()
209 self._showitem()
209
210
210 def nullformatter(ui, topic):
211 def nullformatter(ui, topic):
211 '''formatter that prints nothing'''
212 '''formatter that prints nothing'''
212 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
213 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
213
214
214 class _nestedformatter(baseformatter):
215 class _nestedformatter(baseformatter):
215 '''build sub items and store them in the parent formatter'''
216 '''build sub items and store them in the parent formatter'''
216 def __init__(self, ui, converter, data):
217 def __init__(self, ui, converter, data):
217 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
218 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
218 self._data = data
219 self._data = data
219 def _showitem(self):
220 def _showitem(self):
220 self._data.append(self._item)
221 self._data.append(self._item)
221
222
222 def _iteritems(data):
223 def _iteritems(data):
223 '''iterate key-value pairs in stable order'''
224 '''iterate key-value pairs in stable order'''
224 if isinstance(data, dict):
225 if isinstance(data, dict):
225 return sorted(data.iteritems())
226 return sorted(data.iteritems())
226 return data
227 return data
227
228
228 class _plainconverter(object):
229 class _plainconverter(object):
229 '''convert non-primitive data types to text'''
230 '''convert non-primitive data types to text'''
230 @staticmethod
231 @staticmethod
231 def formatdate(date, fmt):
232 def formatdate(date, fmt):
232 '''stringify date tuple in the given format'''
233 '''stringify date tuple in the given format'''
233 return util.datestr(date, fmt)
234 return util.datestr(date, fmt)
234 @staticmethod
235 @staticmethod
235 def formatdict(data, key, value, fmt, sep):
236 def formatdict(data, key, value, fmt, sep):
236 '''stringify key-value pairs separated by sep'''
237 '''stringify key-value pairs separated by sep'''
237 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
238 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
238 @staticmethod
239 @staticmethod
239 def formatlist(data, name, fmt, sep):
240 def formatlist(data, name, fmt, sep):
240 '''stringify iterable separated by sep'''
241 '''stringify iterable separated by sep'''
241 return sep.join(fmt % e for e in data)
242 return sep.join(fmt % e for e in data)
242
243
243 class plainformatter(baseformatter):
244 class plainformatter(baseformatter):
244 '''the default text output scheme'''
245 '''the default text output scheme'''
245 def __init__(self, ui, out, topic, opts):
246 def __init__(self, ui, out, topic, opts):
246 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
247 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
247 if ui.debugflag:
248 if ui.debugflag:
248 self.hexfunc = hex
249 self.hexfunc = hex
249 else:
250 else:
250 self.hexfunc = short
251 self.hexfunc = short
251 if ui is out:
252 if ui is out:
252 self._write = ui.write
253 self._write = ui.write
253 else:
254 else:
254 self._write = lambda s, **opts: out.write(s)
255 self._write = lambda s, **opts: out.write(s)
255 def startitem(self):
256 def startitem(self):
256 pass
257 pass
257 def data(self, **data):
258 def data(self, **data):
258 pass
259 pass
259 def write(self, fields, deftext, *fielddata, **opts):
260 def write(self, fields, deftext, *fielddata, **opts):
260 self._write(deftext % fielddata, **opts)
261 self._write(deftext % fielddata, **opts)
261 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
262 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
262 '''do conditional write'''
263 '''do conditional write'''
263 if cond:
264 if cond:
264 self._write(deftext % fielddata, **opts)
265 self._write(deftext % fielddata, **opts)
265 def plain(self, text, **opts):
266 def plain(self, text, **opts):
266 self._write(text, **opts)
267 self._write(text, **opts)
267 def isplain(self):
268 def isplain(self):
268 return True
269 return True
269 def nested(self, field):
270 def nested(self, field):
270 # nested data will be directly written to ui
271 # nested data will be directly written to ui
271 return self
272 return self
272 def end(self):
273 def end(self):
273 pass
274 pass
274
275
275 class debugformatter(baseformatter):
276 class debugformatter(baseformatter):
276 def __init__(self, ui, out, topic, opts):
277 def __init__(self, ui, out, topic, opts):
277 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
278 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
278 self._out = out
279 self._out = out
279 self._out.write("%s = [\n" % self._topic)
280 self._out.write("%s = [\n" % self._topic)
280 def _showitem(self):
281 def _showitem(self):
281 self._out.write(" " + repr(self._item) + ",\n")
282 self._out.write(" " + repr(self._item) + ",\n")
282 def end(self):
283 def end(self):
283 baseformatter.end(self)
284 baseformatter.end(self)
284 self._out.write("]\n")
285 self._out.write("]\n")
285
286
286 class pickleformatter(baseformatter):
287 class pickleformatter(baseformatter):
287 def __init__(self, ui, out, topic, opts):
288 def __init__(self, ui, out, topic, opts):
288 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
289 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
289 self._out = out
290 self._out = out
290 self._data = []
291 self._data = []
291 def _showitem(self):
292 def _showitem(self):
292 self._data.append(self._item)
293 self._data.append(self._item)
293 def end(self):
294 def end(self):
294 baseformatter.end(self)
295 baseformatter.end(self)
295 self._out.write(pickle.dumps(self._data))
296 self._out.write(pickle.dumps(self._data))
296
297
297 class jsonformatter(baseformatter):
298 class jsonformatter(baseformatter):
298 def __init__(self, ui, out, topic, opts):
299 def __init__(self, ui, out, topic, opts):
299 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
300 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
300 self._out = out
301 self._out = out
301 self._out.write("[")
302 self._out.write("[")
302 self._first = True
303 self._first = True
303 def _showitem(self):
304 def _showitem(self):
304 if self._first:
305 if self._first:
305 self._first = False
306 self._first = False
306 else:
307 else:
307 self._out.write(",")
308 self._out.write(",")
308
309
309 self._out.write("\n {\n")
310 self._out.write("\n {\n")
310 first = True
311 first = True
311 for k, v in sorted(self._item.items()):
312 for k, v in sorted(self._item.items()):
312 if first:
313 if first:
313 first = False
314 first = False
314 else:
315 else:
315 self._out.write(",\n")
316 self._out.write(",\n")
316 u = templatefilters.json(v, paranoid=False)
317 u = templatefilters.json(v, paranoid=False)
317 self._out.write(' "%s": %s' % (k, u))
318 self._out.write(' "%s": %s' % (k, u))
318 self._out.write("\n }")
319 self._out.write("\n }")
319 def end(self):
320 def end(self):
320 baseformatter.end(self)
321 baseformatter.end(self)
321 self._out.write("\n]\n")
322 self._out.write("\n]\n")
322
323
323 class _templateconverter(object):
324 class _templateconverter(object):
324 '''convert non-primitive data types to be processed by templater'''
325 '''convert non-primitive data types to be processed by templater'''
325 @staticmethod
326 @staticmethod
326 def formatdate(date, fmt):
327 def formatdate(date, fmt):
327 '''return date tuple'''
328 '''return date tuple'''
328 return date
329 return date
329 @staticmethod
330 @staticmethod
330 def formatdict(data, key, value, fmt, sep):
331 def formatdict(data, key, value, fmt, sep):
331 '''build object that can be evaluated as either plain string or dict'''
332 '''build object that can be evaluated as either plain string or dict'''
332 data = util.sortdict(_iteritems(data))
333 data = util.sortdict(_iteritems(data))
333 def f():
334 def f():
334 yield _plainconverter.formatdict(data, key, value, fmt, sep)
335 yield _plainconverter.formatdict(data, key, value, fmt, sep)
335 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt,
336 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt,
336 gen=f())
337 gen=f())
337 @staticmethod
338 @staticmethod
338 def formatlist(data, name, fmt, sep):
339 def formatlist(data, name, fmt, sep):
339 '''build object that can be evaluated as either plain string or list'''
340 '''build object that can be evaluated as either plain string or list'''
340 data = list(data)
341 data = list(data)
341 def f():
342 def f():
342 yield _plainconverter.formatlist(data, name, fmt, sep)
343 yield _plainconverter.formatlist(data, name, fmt, sep)
343 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f())
344 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f())
344
345
345 class templateformatter(baseformatter):
346 class templateformatter(baseformatter):
346 def __init__(self, ui, out, topic, opts):
347 def __init__(self, ui, out, topic, opts):
347 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
348 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
348 self._out = out
349 self._out = out
349 self._topic = topic
350 self._topic = topic
350 spec = lookuptemplate(ui, topic, opts.get('template', ''))
351 spec = lookuptemplate(ui, topic, opts.get('template', ''))
351 self._t = loadtemplater(ui, topic, spec, cache=templatekw.defaulttempl)
352 self._t = loadtemplater(ui, topic, spec, cache=templatekw.defaulttempl)
352 self._counter = itertools.count()
353 self._counter = itertools.count()
353 self._cache = {} # for templatekw/funcs to store reusable data
354 self._cache = {} # for templatekw/funcs to store reusable data
354 def context(self, **ctxs):
355 def context(self, **ctxs):
355 '''insert context objects to be used to render template keywords'''
356 '''insert context objects to be used to render template keywords'''
356 assert all(k == 'ctx' for k in ctxs)
357 assert all(k == 'ctx' for k in ctxs)
357 self._item.update(ctxs)
358 self._item.update(ctxs)
358 def _showitem(self):
359 def _showitem(self):
359 # TODO: add support for filectx. probably each template keyword or
360 # TODO: add support for filectx. probably each template keyword or
360 # function will have to declare dependent resources. e.g.
361 # function will have to declare dependent resources. e.g.
361 # @templatekeyword(..., requires=('ctx',))
362 # @templatekeyword(..., requires=('ctx',))
362 props = {}
363 props = {}
363 if 'ctx' in self._item:
364 if 'ctx' in self._item:
364 props.update(templatekw.keywords)
365 props.update(templatekw.keywords)
365 props['index'] = next(self._counter)
366 props['index'] = next(self._counter)
366 # explicitly-defined fields precede templatekw
367 # explicitly-defined fields precede templatekw
367 props.update(self._item)
368 props.update(self._item)
368 if 'ctx' in self._item:
369 if 'ctx' in self._item:
369 # but template resources must be always available
370 # but template resources must be always available
370 props['templ'] = self._t
371 props['templ'] = self._t
371 props['repo'] = props['ctx'].repo()
372 props['repo'] = props['ctx'].repo()
372 props['revcache'] = {}
373 props['revcache'] = {}
373 g = self._t(self._topic, ui=self._ui, cache=self._cache, **props)
374 g = self._t(self._topic, ui=self._ui, cache=self._cache, **props)
374 self._out.write(templater.stringify(g))
375 self._out.write(templater.stringify(g))
375
376
377 templatespec = collections.namedtuple(r'templatespec',
378 r'tmpl mapfile')
379
376 def lookuptemplate(ui, topic, tmpl):
380 def lookuptemplate(ui, topic, tmpl):
377 """Find the template matching the given -T/--template spec 'tmpl'
381 """Find the template matching the given -T/--template spec 'tmpl'
378
382
379 'tmpl' can be any of the following:
383 'tmpl' can be any of the following:
380
384
381 - a literal template (e.g. '{rev}')
385 - a literal template (e.g. '{rev}')
382 - a map-file name or path (e.g. 'changelog')
386 - a map-file name or path (e.g. 'changelog')
383 - a reference to [templates] in config file
387 - a reference to [templates] in config file
384 - a path to raw template file
388 - a path to raw template file
385
389
386 A map file defines a stand-alone template environment. If a map file
390 A map file defines a stand-alone template environment. If a map file
387 selected, all templates defined in the file will be loaded, and the
391 selected, all templates defined in the file will be loaded, and the
388 template matching the given topic will be rendered. No aliases will be
392 template matching the given topic will be rendered. No aliases will be
389 loaded from user config.
393 loaded from user config.
390 """
394 """
391
395
392 # looks like a literal template?
396 # looks like a literal template?
393 if '{' in tmpl:
397 if '{' in tmpl:
394 return tmpl, None
398 return templatespec(tmpl, None)
395
399
396 # perhaps a stock style?
400 # perhaps a stock style?
397 if not os.path.split(tmpl)[0]:
401 if not os.path.split(tmpl)[0]:
398 mapname = (templater.templatepath('map-cmdline.' + tmpl)
402 mapname = (templater.templatepath('map-cmdline.' + tmpl)
399 or templater.templatepath(tmpl))
403 or templater.templatepath(tmpl))
400 if mapname and os.path.isfile(mapname):
404 if mapname and os.path.isfile(mapname):
401 return None, mapname
405 return templatespec(None, mapname)
402
406
403 # perhaps it's a reference to [templates]
407 # perhaps it's a reference to [templates]
404 t = ui.config('templates', tmpl)
408 t = ui.config('templates', tmpl)
405 if t:
409 if t:
406 return templater.unquotestring(t), None
410 return templatespec(templater.unquotestring(t), None)
407
411
408 if tmpl == 'list':
412 if tmpl == 'list':
409 ui.write(_("available styles: %s\n") % templater.stylelist())
413 ui.write(_("available styles: %s\n") % templater.stylelist())
410 raise error.Abort(_("specify a template"))
414 raise error.Abort(_("specify a template"))
411
415
412 # perhaps it's a path to a map or a template
416 # perhaps it's a path to a map or a template
413 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
417 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
414 # is it a mapfile for a style?
418 # is it a mapfile for a style?
415 if os.path.basename(tmpl).startswith("map-"):
419 if os.path.basename(tmpl).startswith("map-"):
416 return None, os.path.realpath(tmpl)
420 return templatespec(None, os.path.realpath(tmpl))
417 with util.posixfile(tmpl, 'rb') as f:
421 with util.posixfile(tmpl, 'rb') as f:
418 tmpl = f.read()
422 tmpl = f.read()
419 return tmpl, None
423 return templatespec(tmpl, None)
420
424
421 # constant string?
425 # constant string?
422 return tmpl, None
426 return templatespec(tmpl, None)
423
427
424 def loadtemplater(ui, topic, spec, cache=None):
428 def loadtemplater(ui, topic, spec, cache=None):
425 """Create a templater from either a literal template or loading from
429 """Create a templater from either a literal template or loading from
426 a map file"""
430 a map file"""
427 tmpl, mapfile = spec
431 assert not (spec.tmpl and spec.mapfile)
428 assert not (tmpl and mapfile)
432 if spec.mapfile:
429 if mapfile:
433 return templater.templater.frommapfile(spec.mapfile, cache=cache)
430 return templater.templater.frommapfile(mapfile, cache=cache)
434 return maketemplater(ui, topic, spec.tmpl, cache=cache)
431 return maketemplater(ui, topic, tmpl, cache=cache)
432
435
433 def maketemplater(ui, topic, tmpl, cache=None):
436 def maketemplater(ui, topic, tmpl, cache=None):
434 """Create a templater from a string template 'tmpl'"""
437 """Create a templater from a string template 'tmpl'"""
435 aliases = ui.configitems('templatealias')
438 aliases = ui.configitems('templatealias')
436 t = templater.templater(cache=cache, aliases=aliases)
439 t = templater.templater(cache=cache, aliases=aliases)
437 if tmpl:
440 if tmpl:
438 t.cache[topic] = tmpl
441 t.cache[topic] = tmpl
439 return t
442 return t
440
443
441 def formatter(ui, out, topic, opts):
444 def formatter(ui, out, topic, opts):
442 template = opts.get("template", "")
445 template = opts.get("template", "")
443 if template == "json":
446 if template == "json":
444 return jsonformatter(ui, out, topic, opts)
447 return jsonformatter(ui, out, topic, opts)
445 elif template == "pickle":
448 elif template == "pickle":
446 return pickleformatter(ui, out, topic, opts)
449 return pickleformatter(ui, out, topic, opts)
447 elif template == "debug":
450 elif template == "debug":
448 return debugformatter(ui, out, topic, opts)
451 return debugformatter(ui, out, topic, opts)
449 elif template != "":
452 elif template != "":
450 return templateformatter(ui, out, topic, opts)
453 return templateformatter(ui, out, topic, opts)
451 # developer config: ui.formatdebug
454 # developer config: ui.formatdebug
452 elif ui.configbool('ui', 'formatdebug'):
455 elif ui.configbool('ui', 'formatdebug'):
453 return debugformatter(ui, out, topic, opts)
456 return debugformatter(ui, out, topic, opts)
454 # deprecated config: ui.formatjson
457 # deprecated config: ui.formatjson
455 elif ui.configbool('ui', 'formatjson'):
458 elif ui.configbool('ui', 'formatjson'):
456 return jsonformatter(ui, out, topic, opts)
459 return jsonformatter(ui, out, topic, opts)
457 return plainformatter(ui, out, topic, opts)
460 return plainformatter(ui, out, topic, opts)
458
461
459 @contextlib.contextmanager
462 @contextlib.contextmanager
460 def openformatter(ui, filename, topic, opts):
463 def openformatter(ui, filename, topic, opts):
461 """Create a formatter that writes outputs to the specified file
464 """Create a formatter that writes outputs to the specified file
462
465
463 Must be invoked using the 'with' statement.
466 Must be invoked using the 'with' statement.
464 """
467 """
465 with util.posixfile(filename, 'wb') as out:
468 with util.posixfile(filename, 'wb') as out:
466 with formatter(ui, out, topic, opts) as fm:
469 with formatter(ui, out, topic, opts) as fm:
467 yield fm
470 yield fm
468
471
469 @contextlib.contextmanager
472 @contextlib.contextmanager
470 def _neverending(fm):
473 def _neverending(fm):
471 yield fm
474 yield fm
472
475
473 def maybereopen(fm, filename, opts):
476 def maybereopen(fm, filename, opts):
474 """Create a formatter backed by file if filename specified, else return
477 """Create a formatter backed by file if filename specified, else return
475 the given formatter
478 the given formatter
476
479
477 Must be invoked using the 'with' statement. This will never call fm.end()
480 Must be invoked using the 'with' statement. This will never call fm.end()
478 of the given formatter.
481 of the given formatter.
479 """
482 """
480 if filename:
483 if filename:
481 return openformatter(fm._ui, filename, fm._topic, opts)
484 return openformatter(fm._ui, filename, fm._topic, opts)
482 else:
485 else:
483 return _neverending(fm)
486 return _neverending(fm)
General Comments 0
You need to be logged in to leave comments. Login now