##// END OF EJS Templates
scmutil: introduce binnode(ctx) as paired function with intrev(ctx)...
Yuya Nishihara -
r32656:55ff67ff default
parent child Browse files
Show More
@@ -1,3586 +1,3585 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 bin,
19 hex,
18 hex,
20 nullid,
19 nullid,
21 nullrev,
20 nullrev,
22 short,
21 short,
23 )
22 )
24
23
25 from . import (
24 from . import (
26 bookmarks,
25 bookmarks,
27 changelog,
26 changelog,
28 copies,
27 copies,
29 crecord as crecordmod,
28 crecord as crecordmod,
30 encoding,
29 encoding,
31 error,
30 error,
32 formatter,
31 formatter,
33 graphmod,
32 graphmod,
34 lock as lockmod,
33 lock as lockmod,
35 match as matchmod,
34 match as matchmod,
36 obsolete,
35 obsolete,
37 patch,
36 patch,
38 pathutil,
37 pathutil,
39 phases,
38 phases,
40 pycompat,
39 pycompat,
41 registrar,
40 registrar,
42 repair,
41 repair,
43 revlog,
42 revlog,
44 revset,
43 revset,
45 scmutil,
44 scmutil,
46 smartset,
45 smartset,
47 templatekw,
46 templatekw,
48 templater,
47 templater,
49 util,
48 util,
50 vfs as vfsmod,
49 vfs as vfsmod,
51 )
50 )
52 stringio = util.stringio
51 stringio = util.stringio
53
52
54 # templates of common command options
53 # templates of common command options
55
54
56 dryrunopts = [
55 dryrunopts = [
57 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
58 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
59 ]
58 ]
60
59
61 remoteopts = [
60 remoteopts = [
62 ('e', 'ssh', '',
61 ('e', 'ssh', '',
63 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
64 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
65 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
66 ('', 'insecure', None,
65 ('', 'insecure', None,
67 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
68 ]
67 ]
69
68
70 walkopts = [
69 walkopts = [
71 ('I', 'include', [],
70 ('I', 'include', [],
72 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
73 ('X', 'exclude', [],
72 ('X', 'exclude', [],
74 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
75 ]
74 ]
76
75
77 commitopts = [
76 commitopts = [
78 ('m', 'message', '',
77 ('m', 'message', '',
79 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
80 ('l', 'logfile', '',
79 ('l', 'logfile', '',
81 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
82 ]
81 ]
83
82
84 commitopts2 = [
83 commitopts2 = [
85 ('d', 'date', '',
84 ('d', 'date', '',
86 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
87 ('u', 'user', '',
86 ('u', 'user', '',
88 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
89 ]
88 ]
90
89
91 # hidden for now
90 # hidden for now
92 formatteropts = [
91 formatteropts = [
93 ('T', 'template', '',
92 ('T', 'template', '',
94 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
95 ]
94 ]
96
95
97 templateopts = [
96 templateopts = [
98 ('', 'style', '',
97 ('', 'style', '',
99 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
100 ('T', 'template', '',
99 ('T', 'template', '',
101 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
102 ]
101 ]
103
102
104 logopts = [
103 logopts = [
105 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
106 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
107 ('l', 'limit', '',
106 ('l', 'limit', '',
108 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
109 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
110 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
111 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
112 ] + templateopts
111 ] + templateopts
113
112
114 diffopts = [
113 diffopts = [
115 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
116 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
117 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
118 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
119 ]
118 ]
120
119
121 diffwsopts = [
120 diffwsopts = [
122 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
123 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
124 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
125 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
126 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
127 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
128 ]
127 ]
129
128
130 diffopts2 = [
129 diffopts2 = [
131 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 ('p', 'show-function', None, _('show which function each change is in')),
131 ('p', 'show-function', None, _('show which function each change is in')),
133 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 ] + diffwsopts + [
133 ] + diffwsopts + [
135 ('U', 'unified', '',
134 ('U', 'unified', '',
136 _('number of lines of context to show'), _('NUM')),
135 _('number of lines of context to show'), _('NUM')),
137 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 ]
138 ]
140
139
141 mergetoolopts = [
140 mergetoolopts = [
142 ('t', 'tool', '', _('specify merge tool')),
141 ('t', 'tool', '', _('specify merge tool')),
143 ]
142 ]
144
143
145 similarityopts = [
144 similarityopts = [
146 ('s', 'similarity', '',
145 ('s', 'similarity', '',
147 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 ]
147 ]
149
148
150 subrepoopts = [
149 subrepoopts = [
151 ('S', 'subrepos', None,
150 ('S', 'subrepos', None,
152 _('recurse into subrepositories'))
151 _('recurse into subrepositories'))
153 ]
152 ]
154
153
155 debugrevlogopts = [
154 debugrevlogopts = [
156 ('c', 'changelog', False, _('open changelog')),
155 ('c', 'changelog', False, _('open changelog')),
157 ('m', 'manifest', False, _('open manifest')),
156 ('m', 'manifest', False, _('open manifest')),
158 ('', 'dir', '', _('open directory manifest')),
157 ('', 'dir', '', _('open directory manifest')),
159 ]
158 ]
160
159
161 # special string such that everything below this line will be ingored in the
160 # special string such that everything below this line will be ingored in the
162 # editor text
161 # editor text
163 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164
163
165 def ishunk(x):
164 def ishunk(x):
166 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 return isinstance(x, hunkclasses)
166 return isinstance(x, hunkclasses)
168
167
169 def newandmodified(chunks, originalchunks):
168 def newandmodified(chunks, originalchunks):
170 newlyaddedandmodifiedfiles = set()
169 newlyaddedandmodifiedfiles = set()
171 for chunk in chunks:
170 for chunk in chunks:
172 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 originalchunks:
172 originalchunks:
174 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 return newlyaddedandmodifiedfiles
174 return newlyaddedandmodifiedfiles
176
175
177 def parsealiases(cmd):
176 def parsealiases(cmd):
178 return cmd.lstrip("^").split("|")
177 return cmd.lstrip("^").split("|")
179
178
180 def setupwrapcolorwrite(ui):
179 def setupwrapcolorwrite(ui):
181 # wrap ui.write so diff output can be labeled/colorized
180 # wrap ui.write so diff output can be labeled/colorized
182 def wrapwrite(orig, *args, **kw):
181 def wrapwrite(orig, *args, **kw):
183 label = kw.pop('label', '')
182 label = kw.pop('label', '')
184 for chunk, l in patch.difflabel(lambda: args):
183 for chunk, l in patch.difflabel(lambda: args):
185 orig(chunk, label=label + l)
184 orig(chunk, label=label + l)
186
185
187 oldwrite = ui.write
186 oldwrite = ui.write
188 def wrap(*args, **kwargs):
187 def wrap(*args, **kwargs):
189 return wrapwrite(oldwrite, *args, **kwargs)
188 return wrapwrite(oldwrite, *args, **kwargs)
190 setattr(ui, 'write', wrap)
189 setattr(ui, 'write', wrap)
191 return oldwrite
190 return oldwrite
192
191
193 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 if usecurses:
193 if usecurses:
195 if testfile:
194 if testfile:
196 recordfn = crecordmod.testdecorator(testfile,
195 recordfn = crecordmod.testdecorator(testfile,
197 crecordmod.testchunkselector)
196 crecordmod.testchunkselector)
198 else:
197 else:
199 recordfn = crecordmod.chunkselector
198 recordfn = crecordmod.chunkselector
200
199
201 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202
201
203 else:
202 else:
204 return patch.filterpatch(ui, originalhunks, operation)
203 return patch.filterpatch(ui, originalhunks, operation)
205
204
206 def recordfilter(ui, originalhunks, operation=None):
205 def recordfilter(ui, originalhunks, operation=None):
207 """ Prompts the user to filter the originalhunks and return a list of
206 """ Prompts the user to filter the originalhunks and return a list of
208 selected hunks.
207 selected hunks.
209 *operation* is used for to build ui messages to indicate the user what
208 *operation* is used for to build ui messages to indicate the user what
210 kind of filtering they are doing: reverting, committing, shelving, etc.
209 kind of filtering they are doing: reverting, committing, shelving, etc.
211 (see patch.filterpatch).
210 (see patch.filterpatch).
212 """
211 """
213 usecurses = crecordmod.checkcurses(ui)
212 usecurses = crecordmod.checkcurses(ui)
214 testfile = ui.config('experimental', 'crecordtest', None)
213 testfile = ui.config('experimental', 'crecordtest', None)
215 oldwrite = setupwrapcolorwrite(ui)
214 oldwrite = setupwrapcolorwrite(ui)
216 try:
215 try:
217 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 testfile, operation)
217 testfile, operation)
219 finally:
218 finally:
220 ui.write = oldwrite
219 ui.write = oldwrite
221 return newchunks, newopts
220 return newchunks, newopts
222
221
223 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 filterfn, *pats, **opts):
223 filterfn, *pats, **opts):
225 from . import merge as mergemod
224 from . import merge as mergemod
226 opts = pycompat.byteskwargs(opts)
225 opts = pycompat.byteskwargs(opts)
227 if not ui.interactive():
226 if not ui.interactive():
228 if cmdsuggest:
227 if cmdsuggest:
229 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 else:
229 else:
231 msg = _('running non-interactively')
230 msg = _('running non-interactively')
232 raise error.Abort(msg)
231 raise error.Abort(msg)
233
232
234 # make sure username is set before going interactive
233 # make sure username is set before going interactive
235 if not opts.get('user'):
234 if not opts.get('user'):
236 ui.username() # raise exception, username not provided
235 ui.username() # raise exception, username not provided
237
236
238 def recordfunc(ui, repo, message, match, opts):
237 def recordfunc(ui, repo, message, match, opts):
239 """This is generic record driver.
238 """This is generic record driver.
240
239
241 Its job is to interactively filter local changes, and
240 Its job is to interactively filter local changes, and
242 accordingly prepare working directory into a state in which the
241 accordingly prepare working directory into a state in which the
243 job can be delegated to a non-interactive commit command such as
242 job can be delegated to a non-interactive commit command such as
244 'commit' or 'qrefresh'.
243 'commit' or 'qrefresh'.
245
244
246 After the actual job is done by non-interactive command, the
245 After the actual job is done by non-interactive command, the
247 working directory is restored to its original state.
246 working directory is restored to its original state.
248
247
249 In the end we'll record interesting changes, and everything else
248 In the end we'll record interesting changes, and everything else
250 will be left in place, so the user can continue working.
249 will be left in place, so the user can continue working.
251 """
250 """
252
251
253 checkunfinished(repo, commit=True)
252 checkunfinished(repo, commit=True)
254 wctx = repo[None]
253 wctx = repo[None]
255 merge = len(wctx.parents()) > 1
254 merge = len(wctx.parents()) > 1
256 if merge:
255 if merge:
257 raise error.Abort(_('cannot partially commit a merge '
256 raise error.Abort(_('cannot partially commit a merge '
258 '(use "hg commit" instead)'))
257 '(use "hg commit" instead)'))
259
258
260 def fail(f, msg):
259 def fail(f, msg):
261 raise error.Abort('%s: %s' % (f, msg))
260 raise error.Abort('%s: %s' % (f, msg))
262
261
263 force = opts.get('force')
262 force = opts.get('force')
264 if not force:
263 if not force:
265 vdirs = []
264 vdirs = []
266 match.explicitdir = vdirs.append
265 match.explicitdir = vdirs.append
267 match.bad = fail
266 match.bad = fail
268
267
269 status = repo.status(match=match)
268 status = repo.status(match=match)
270 if not force:
269 if not force:
271 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 diffopts.nodates = True
272 diffopts.nodates = True
274 diffopts.git = True
273 diffopts.git = True
275 diffopts.showfunc = True
274 diffopts.showfunc = True
276 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 originalchunks = patch.parsepatch(originaldiff)
276 originalchunks = patch.parsepatch(originaldiff)
278
277
279 # 1. filter patch, since we are intending to apply subset of it
278 # 1. filter patch, since we are intending to apply subset of it
280 try:
279 try:
281 chunks, newopts = filterfn(ui, originalchunks)
280 chunks, newopts = filterfn(ui, originalchunks)
282 except patch.PatchError as err:
281 except patch.PatchError as err:
283 raise error.Abort(_('error parsing patch: %s') % err)
282 raise error.Abort(_('error parsing patch: %s') % err)
284 opts.update(newopts)
283 opts.update(newopts)
285
284
286 # We need to keep a backup of files that have been newly added and
285 # We need to keep a backup of files that have been newly added and
287 # modified during the recording process because there is a previous
286 # modified during the recording process because there is a previous
288 # version without the edit in the workdir
287 # version without the edit in the workdir
289 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 contenders = set()
289 contenders = set()
291 for h in chunks:
290 for h in chunks:
292 try:
291 try:
293 contenders.update(set(h.files()))
292 contenders.update(set(h.files()))
294 except AttributeError:
293 except AttributeError:
295 pass
294 pass
296
295
297 changed = status.modified + status.added + status.removed
296 changed = status.modified + status.added + status.removed
298 newfiles = [f for f in changed if f in contenders]
297 newfiles = [f for f in changed if f in contenders]
299 if not newfiles:
298 if not newfiles:
300 ui.status(_('no changes to record\n'))
299 ui.status(_('no changes to record\n'))
301 return 0
300 return 0
302
301
303 modified = set(status.modified)
302 modified = set(status.modified)
304
303
305 # 2. backup changed files, so we can restore them in the end
304 # 2. backup changed files, so we can restore them in the end
306
305
307 if backupall:
306 if backupall:
308 tobackup = changed
307 tobackup = changed
309 else:
308 else:
310 tobackup = [f for f in newfiles if f in modified or f in \
309 tobackup = [f for f in newfiles if f in modified or f in \
311 newlyaddedandmodifiedfiles]
310 newlyaddedandmodifiedfiles]
312 backups = {}
311 backups = {}
313 if tobackup:
312 if tobackup:
314 backupdir = repo.vfs.join('record-backups')
313 backupdir = repo.vfs.join('record-backups')
315 try:
314 try:
316 os.mkdir(backupdir)
315 os.mkdir(backupdir)
317 except OSError as err:
316 except OSError as err:
318 if err.errno != errno.EEXIST:
317 if err.errno != errno.EEXIST:
319 raise
318 raise
320 try:
319 try:
321 # backup continues
320 # backup continues
322 for f in tobackup:
321 for f in tobackup:
323 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 dir=backupdir)
323 dir=backupdir)
325 os.close(fd)
324 os.close(fd)
326 ui.debug('backup %r as %r\n' % (f, tmpname))
325 ui.debug('backup %r as %r\n' % (f, tmpname))
327 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 backups[f] = tmpname
327 backups[f] = tmpname
329
328
330 fp = stringio()
329 fp = stringio()
331 for c in chunks:
330 for c in chunks:
332 fname = c.filename()
331 fname = c.filename()
333 if fname in backups:
332 if fname in backups:
334 c.write(fp)
333 c.write(fp)
335 dopatch = fp.tell()
334 dopatch = fp.tell()
336 fp.seek(0)
335 fp.seek(0)
337
336
338 # 2.5 optionally review / modify patch in text editor
337 # 2.5 optionally review / modify patch in text editor
339 if opts.get('review', False):
338 if opts.get('review', False):
340 patchtext = (crecordmod.diffhelptext
339 patchtext = (crecordmod.diffhelptext
341 + crecordmod.patchhelptext
340 + crecordmod.patchhelptext
342 + fp.read())
341 + fp.read())
343 reviewedpatch = ui.edit(patchtext, "",
342 reviewedpatch = ui.edit(patchtext, "",
344 extra={"suffix": ".diff"},
343 extra={"suffix": ".diff"},
345 repopath=repo.path)
344 repopath=repo.path)
346 fp.truncate(0)
345 fp.truncate(0)
347 fp.write(reviewedpatch)
346 fp.write(reviewedpatch)
348 fp.seek(0)
347 fp.seek(0)
349
348
350 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 # 3a. apply filtered patch to clean repo (clean)
350 # 3a. apply filtered patch to clean repo (clean)
352 if backups:
351 if backups:
353 # Equivalent to hg.revert
352 # Equivalent to hg.revert
354 m = scmutil.matchfiles(repo, backups.keys())
353 m = scmutil.matchfiles(repo, backups.keys())
355 mergemod.update(repo, repo.dirstate.p1(),
354 mergemod.update(repo, repo.dirstate.p1(),
356 False, True, matcher=m)
355 False, True, matcher=m)
357
356
358 # 3b. (apply)
357 # 3b. (apply)
359 if dopatch:
358 if dopatch:
360 try:
359 try:
361 ui.debug('applying patch\n')
360 ui.debug('applying patch\n')
362 ui.debug(fp.getvalue())
361 ui.debug(fp.getvalue())
363 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 except patch.PatchError as err:
363 except patch.PatchError as err:
365 raise error.Abort(str(err))
364 raise error.Abort(str(err))
366 del fp
365 del fp
367
366
368 # 4. We prepared working directory according to filtered
367 # 4. We prepared working directory according to filtered
369 # patch. Now is the time to delegate the job to
368 # patch. Now is the time to delegate the job to
370 # commit/qrefresh or the like!
369 # commit/qrefresh or the like!
371
370
372 # Make all of the pathnames absolute.
371 # Make all of the pathnames absolute.
373 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 return commitfunc(ui, repo, *newfiles, **opts)
373 return commitfunc(ui, repo, *newfiles, **opts)
375 finally:
374 finally:
376 # 5. finally restore backed-up files
375 # 5. finally restore backed-up files
377 try:
376 try:
378 dirstate = repo.dirstate
377 dirstate = repo.dirstate
379 for realname, tmpname in backups.iteritems():
378 for realname, tmpname in backups.iteritems():
380 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381
380
382 if dirstate[realname] == 'n':
381 if dirstate[realname] == 'n':
383 # without normallookup, restoring timestamp
382 # without normallookup, restoring timestamp
384 # may cause partially committed files
383 # may cause partially committed files
385 # to be treated as unmodified
384 # to be treated as unmodified
386 dirstate.normallookup(realname)
385 dirstate.normallookup(realname)
387
386
388 # copystat=True here and above are a hack to trick any
387 # copystat=True here and above are a hack to trick any
389 # editors that have f open that we haven't modified them.
388 # editors that have f open that we haven't modified them.
390 #
389 #
391 # Also note that this racy as an editor could notice the
390 # Also note that this racy as an editor could notice the
392 # file's mtime before we've finished writing it.
391 # file's mtime before we've finished writing it.
393 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 os.unlink(tmpname)
393 os.unlink(tmpname)
395 if tobackup:
394 if tobackup:
396 os.rmdir(backupdir)
395 os.rmdir(backupdir)
397 except OSError:
396 except OSError:
398 pass
397 pass
399
398
400 def recordinwlock(ui, repo, message, match, opts):
399 def recordinwlock(ui, repo, message, match, opts):
401 with repo.wlock():
400 with repo.wlock():
402 return recordfunc(ui, repo, message, match, opts)
401 return recordfunc(ui, repo, message, match, opts)
403
402
404 return commit(ui, repo, recordinwlock, pats, opts)
403 return commit(ui, repo, recordinwlock, pats, opts)
405
404
406 def findpossible(cmd, table, strict=False):
405 def findpossible(cmd, table, strict=False):
407 """
406 """
408 Return cmd -> (aliases, command table entry)
407 Return cmd -> (aliases, command table entry)
409 for each matching command.
408 for each matching command.
410 Return debug commands (or their aliases) only if no normal command matches.
409 Return debug commands (or their aliases) only if no normal command matches.
411 """
410 """
412 choice = {}
411 choice = {}
413 debugchoice = {}
412 debugchoice = {}
414
413
415 if cmd in table:
414 if cmd in table:
416 # short-circuit exact matches, "log" alias beats "^log|history"
415 # short-circuit exact matches, "log" alias beats "^log|history"
417 keys = [cmd]
416 keys = [cmd]
418 else:
417 else:
419 keys = table.keys()
418 keys = table.keys()
420
419
421 allcmds = []
420 allcmds = []
422 for e in keys:
421 for e in keys:
423 aliases = parsealiases(e)
422 aliases = parsealiases(e)
424 allcmds.extend(aliases)
423 allcmds.extend(aliases)
425 found = None
424 found = None
426 if cmd in aliases:
425 if cmd in aliases:
427 found = cmd
426 found = cmd
428 elif not strict:
427 elif not strict:
429 for a in aliases:
428 for a in aliases:
430 if a.startswith(cmd):
429 if a.startswith(cmd):
431 found = a
430 found = a
432 break
431 break
433 if found is not None:
432 if found is not None:
434 if aliases[0].startswith("debug") or found.startswith("debug"):
433 if aliases[0].startswith("debug") or found.startswith("debug"):
435 debugchoice[found] = (aliases, table[e])
434 debugchoice[found] = (aliases, table[e])
436 else:
435 else:
437 choice[found] = (aliases, table[e])
436 choice[found] = (aliases, table[e])
438
437
439 if not choice and debugchoice:
438 if not choice and debugchoice:
440 choice = debugchoice
439 choice = debugchoice
441
440
442 return choice, allcmds
441 return choice, allcmds
443
442
444 def findcmd(cmd, table, strict=True):
443 def findcmd(cmd, table, strict=True):
445 """Return (aliases, command table entry) for command string."""
444 """Return (aliases, command table entry) for command string."""
446 choice, allcmds = findpossible(cmd, table, strict)
445 choice, allcmds = findpossible(cmd, table, strict)
447
446
448 if cmd in choice:
447 if cmd in choice:
449 return choice[cmd]
448 return choice[cmd]
450
449
451 if len(choice) > 1:
450 if len(choice) > 1:
452 clist = sorted(choice)
451 clist = sorted(choice)
453 raise error.AmbiguousCommand(cmd, clist)
452 raise error.AmbiguousCommand(cmd, clist)
454
453
455 if choice:
454 if choice:
456 return choice.values()[0]
455 return choice.values()[0]
457
456
458 raise error.UnknownCommand(cmd, allcmds)
457 raise error.UnknownCommand(cmd, allcmds)
459
458
460 def findrepo(p):
459 def findrepo(p):
461 while not os.path.isdir(os.path.join(p, ".hg")):
460 while not os.path.isdir(os.path.join(p, ".hg")):
462 oldp, p = p, os.path.dirname(p)
461 oldp, p = p, os.path.dirname(p)
463 if p == oldp:
462 if p == oldp:
464 return None
463 return None
465
464
466 return p
465 return p
467
466
468 def bailifchanged(repo, merge=True, hint=None):
467 def bailifchanged(repo, merge=True, hint=None):
469 """ enforce the precondition that working directory must be clean.
468 """ enforce the precondition that working directory must be clean.
470
469
471 'merge' can be set to false if a pending uncommitted merge should be
470 'merge' can be set to false if a pending uncommitted merge should be
472 ignored (such as when 'update --check' runs).
471 ignored (such as when 'update --check' runs).
473
472
474 'hint' is the usual hint given to Abort exception.
473 'hint' is the usual hint given to Abort exception.
475 """
474 """
476
475
477 if merge and repo.dirstate.p2() != nullid:
476 if merge and repo.dirstate.p2() != nullid:
478 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
479 modified, added, removed, deleted = repo.status()[:4]
478 modified, added, removed, deleted = repo.status()[:4]
480 if modified or added or removed or deleted:
479 if modified or added or removed or deleted:
481 raise error.Abort(_('uncommitted changes'), hint=hint)
480 raise error.Abort(_('uncommitted changes'), hint=hint)
482 ctx = repo[None]
481 ctx = repo[None]
483 for s in sorted(ctx.substate):
482 for s in sorted(ctx.substate):
484 ctx.sub(s).bailifchanged(hint=hint)
483 ctx.sub(s).bailifchanged(hint=hint)
485
484
486 def logmessage(ui, opts):
485 def logmessage(ui, opts):
487 """ get the log message according to -m and -l option """
486 """ get the log message according to -m and -l option """
488 message = opts.get('message')
487 message = opts.get('message')
489 logfile = opts.get('logfile')
488 logfile = opts.get('logfile')
490
489
491 if message and logfile:
490 if message and logfile:
492 raise error.Abort(_('options --message and --logfile are mutually '
491 raise error.Abort(_('options --message and --logfile are mutually '
493 'exclusive'))
492 'exclusive'))
494 if not message and logfile:
493 if not message and logfile:
495 try:
494 try:
496 if isstdiofilename(logfile):
495 if isstdiofilename(logfile):
497 message = ui.fin.read()
496 message = ui.fin.read()
498 else:
497 else:
499 message = '\n'.join(util.readfile(logfile).splitlines())
498 message = '\n'.join(util.readfile(logfile).splitlines())
500 except IOError as inst:
499 except IOError as inst:
501 raise error.Abort(_("can't read commit message '%s': %s") %
500 raise error.Abort(_("can't read commit message '%s': %s") %
502 (logfile, inst.strerror))
501 (logfile, inst.strerror))
503 return message
502 return message
504
503
505 def mergeeditform(ctxorbool, baseformname):
504 def mergeeditform(ctxorbool, baseformname):
506 """return appropriate editform name (referencing a committemplate)
505 """return appropriate editform name (referencing a committemplate)
507
506
508 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
509 merging is committed.
508 merging is committed.
510
509
511 This returns baseformname with '.merge' appended if it is a merge,
510 This returns baseformname with '.merge' appended if it is a merge,
512 otherwise '.normal' is appended.
511 otherwise '.normal' is appended.
513 """
512 """
514 if isinstance(ctxorbool, bool):
513 if isinstance(ctxorbool, bool):
515 if ctxorbool:
514 if ctxorbool:
516 return baseformname + ".merge"
515 return baseformname + ".merge"
517 elif 1 < len(ctxorbool.parents()):
516 elif 1 < len(ctxorbool.parents()):
518 return baseformname + ".merge"
517 return baseformname + ".merge"
519
518
520 return baseformname + ".normal"
519 return baseformname + ".normal"
521
520
522 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
523 editform='', **opts):
522 editform='', **opts):
524 """get appropriate commit message editor according to '--edit' option
523 """get appropriate commit message editor according to '--edit' option
525
524
526 'finishdesc' is a function to be called with edited commit message
525 'finishdesc' is a function to be called with edited commit message
527 (= 'description' of the new changeset) just after editing, but
526 (= 'description' of the new changeset) just after editing, but
528 before checking empty-ness. It should return actual text to be
527 before checking empty-ness. It should return actual text to be
529 stored into history. This allows to change description before
528 stored into history. This allows to change description before
530 storing.
529 storing.
531
530
532 'extramsg' is a extra message to be shown in the editor instead of
531 'extramsg' is a extra message to be shown in the editor instead of
533 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
534 is automatically added.
533 is automatically added.
535
534
536 'editform' is a dot-separated list of names, to distinguish
535 'editform' is a dot-separated list of names, to distinguish
537 the purpose of commit text editing.
536 the purpose of commit text editing.
538
537
539 'getcommiteditor' returns 'commitforceeditor' regardless of
538 'getcommiteditor' returns 'commitforceeditor' regardless of
540 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
541 they are specific for usage in MQ.
540 they are specific for usage in MQ.
542 """
541 """
543 if edit or finishdesc or extramsg:
542 if edit or finishdesc or extramsg:
544 return lambda r, c, s: commitforceeditor(r, c, s,
543 return lambda r, c, s: commitforceeditor(r, c, s,
545 finishdesc=finishdesc,
544 finishdesc=finishdesc,
546 extramsg=extramsg,
545 extramsg=extramsg,
547 editform=editform)
546 editform=editform)
548 elif editform:
547 elif editform:
549 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
550 else:
549 else:
551 return commiteditor
550 return commiteditor
552
551
553 def loglimit(opts):
552 def loglimit(opts):
554 """get the log limit according to option -l/--limit"""
553 """get the log limit according to option -l/--limit"""
555 limit = opts.get('limit')
554 limit = opts.get('limit')
556 if limit:
555 if limit:
557 try:
556 try:
558 limit = int(limit)
557 limit = int(limit)
559 except ValueError:
558 except ValueError:
560 raise error.Abort(_('limit must be a positive integer'))
559 raise error.Abort(_('limit must be a positive integer'))
561 if limit <= 0:
560 if limit <= 0:
562 raise error.Abort(_('limit must be positive'))
561 raise error.Abort(_('limit must be positive'))
563 else:
562 else:
564 limit = None
563 limit = None
565 return limit
564 return limit
566
565
567 def makefilename(repo, pat, node, desc=None,
566 def makefilename(repo, pat, node, desc=None,
568 total=None, seqno=None, revwidth=None, pathname=None):
567 total=None, seqno=None, revwidth=None, pathname=None):
569 node_expander = {
568 node_expander = {
570 'H': lambda: hex(node),
569 'H': lambda: hex(node),
571 'R': lambda: str(repo.changelog.rev(node)),
570 'R': lambda: str(repo.changelog.rev(node)),
572 'h': lambda: short(node),
571 'h': lambda: short(node),
573 'm': lambda: re.sub('[^\w]', '_', str(desc))
572 'm': lambda: re.sub('[^\w]', '_', str(desc))
574 }
573 }
575 expander = {
574 expander = {
576 '%': lambda: '%',
575 '%': lambda: '%',
577 'b': lambda: os.path.basename(repo.root),
576 'b': lambda: os.path.basename(repo.root),
578 }
577 }
579
578
580 try:
579 try:
581 if node:
580 if node:
582 expander.update(node_expander)
581 expander.update(node_expander)
583 if node:
582 if node:
584 expander['r'] = (lambda:
583 expander['r'] = (lambda:
585 str(repo.changelog.rev(node)).zfill(revwidth or 0))
584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
586 if total is not None:
585 if total is not None:
587 expander['N'] = lambda: str(total)
586 expander['N'] = lambda: str(total)
588 if seqno is not None:
587 if seqno is not None:
589 expander['n'] = lambda: str(seqno)
588 expander['n'] = lambda: str(seqno)
590 if total is not None and seqno is not None:
589 if total is not None and seqno is not None:
591 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
592 if pathname is not None:
591 if pathname is not None:
593 expander['s'] = lambda: os.path.basename(pathname)
592 expander['s'] = lambda: os.path.basename(pathname)
594 expander['d'] = lambda: os.path.dirname(pathname) or '.'
593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
595 expander['p'] = lambda: pathname
594 expander['p'] = lambda: pathname
596
595
597 newname = []
596 newname = []
598 patlen = len(pat)
597 patlen = len(pat)
599 i = 0
598 i = 0
600 while i < patlen:
599 while i < patlen:
601 c = pat[i:i + 1]
600 c = pat[i:i + 1]
602 if c == '%':
601 if c == '%':
603 i += 1
602 i += 1
604 c = pat[i:i + 1]
603 c = pat[i:i + 1]
605 c = expander[c]()
604 c = expander[c]()
606 newname.append(c)
605 newname.append(c)
607 i += 1
606 i += 1
608 return ''.join(newname)
607 return ''.join(newname)
609 except KeyError as inst:
608 except KeyError as inst:
610 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
611 inst.args[0])
610 inst.args[0])
612
611
613 def isstdiofilename(pat):
612 def isstdiofilename(pat):
614 """True if the given pat looks like a filename denoting stdin/stdout"""
613 """True if the given pat looks like a filename denoting stdin/stdout"""
615 return not pat or pat == '-'
614 return not pat or pat == '-'
616
615
617 class _unclosablefile(object):
616 class _unclosablefile(object):
618 def __init__(self, fp):
617 def __init__(self, fp):
619 self._fp = fp
618 self._fp = fp
620
619
621 def close(self):
620 def close(self):
622 pass
621 pass
623
622
624 def __iter__(self):
623 def __iter__(self):
625 return iter(self._fp)
624 return iter(self._fp)
626
625
627 def __getattr__(self, attr):
626 def __getattr__(self, attr):
628 return getattr(self._fp, attr)
627 return getattr(self._fp, attr)
629
628
630 def __enter__(self):
629 def __enter__(self):
631 return self
630 return self
632
631
633 def __exit__(self, exc_type, exc_value, exc_tb):
632 def __exit__(self, exc_type, exc_value, exc_tb):
634 pass
633 pass
635
634
636 def makefileobj(repo, pat, node=None, desc=None, total=None,
635 def makefileobj(repo, pat, node=None, desc=None, total=None,
637 seqno=None, revwidth=None, mode='wb', modemap=None,
636 seqno=None, revwidth=None, mode='wb', modemap=None,
638 pathname=None):
637 pathname=None):
639
638
640 writable = mode not in ('r', 'rb')
639 writable = mode not in ('r', 'rb')
641
640
642 if isstdiofilename(pat):
641 if isstdiofilename(pat):
643 if writable:
642 if writable:
644 fp = repo.ui.fout
643 fp = repo.ui.fout
645 else:
644 else:
646 fp = repo.ui.fin
645 fp = repo.ui.fin
647 return _unclosablefile(fp)
646 return _unclosablefile(fp)
648 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
649 if modemap is not None:
648 if modemap is not None:
650 mode = modemap.get(fn, mode)
649 mode = modemap.get(fn, mode)
651 if mode == 'wb':
650 if mode == 'wb':
652 modemap[fn] = 'ab'
651 modemap[fn] = 'ab'
653 return open(fn, mode)
652 return open(fn, mode)
654
653
655 def openrevlog(repo, cmd, file_, opts):
654 def openrevlog(repo, cmd, file_, opts):
656 """opens the changelog, manifest, a filelog or a given revlog"""
655 """opens the changelog, manifest, a filelog or a given revlog"""
657 cl = opts['changelog']
656 cl = opts['changelog']
658 mf = opts['manifest']
657 mf = opts['manifest']
659 dir = opts['dir']
658 dir = opts['dir']
660 msg = None
659 msg = None
661 if cl and mf:
660 if cl and mf:
662 msg = _('cannot specify --changelog and --manifest at the same time')
661 msg = _('cannot specify --changelog and --manifest at the same time')
663 elif cl and dir:
662 elif cl and dir:
664 msg = _('cannot specify --changelog and --dir at the same time')
663 msg = _('cannot specify --changelog and --dir at the same time')
665 elif cl or mf or dir:
664 elif cl or mf or dir:
666 if file_:
665 if file_:
667 msg = _('cannot specify filename with --changelog or --manifest')
666 msg = _('cannot specify filename with --changelog or --manifest')
668 elif not repo:
667 elif not repo:
669 msg = _('cannot specify --changelog or --manifest or --dir '
668 msg = _('cannot specify --changelog or --manifest or --dir '
670 'without a repository')
669 'without a repository')
671 if msg:
670 if msg:
672 raise error.Abort(msg)
671 raise error.Abort(msg)
673
672
674 r = None
673 r = None
675 if repo:
674 if repo:
676 if cl:
675 if cl:
677 r = repo.unfiltered().changelog
676 r = repo.unfiltered().changelog
678 elif dir:
677 elif dir:
679 if 'treemanifest' not in repo.requirements:
678 if 'treemanifest' not in repo.requirements:
680 raise error.Abort(_("--dir can only be used on repos with "
679 raise error.Abort(_("--dir can only be used on repos with "
681 "treemanifest enabled"))
680 "treemanifest enabled"))
682 dirlog = repo.manifestlog._revlog.dirlog(dir)
681 dirlog = repo.manifestlog._revlog.dirlog(dir)
683 if len(dirlog):
682 if len(dirlog):
684 r = dirlog
683 r = dirlog
685 elif mf:
684 elif mf:
686 r = repo.manifestlog._revlog
685 r = repo.manifestlog._revlog
687 elif file_:
686 elif file_:
688 filelog = repo.file(file_)
687 filelog = repo.file(file_)
689 if len(filelog):
688 if len(filelog):
690 r = filelog
689 r = filelog
691 if not r:
690 if not r:
692 if not file_:
691 if not file_:
693 raise error.CommandError(cmd, _('invalid arguments'))
692 raise error.CommandError(cmd, _('invalid arguments'))
694 if not os.path.isfile(file_):
693 if not os.path.isfile(file_):
695 raise error.Abort(_("revlog '%s' not found") % file_)
694 raise error.Abort(_("revlog '%s' not found") % file_)
696 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
697 file_[:-2] + ".i")
696 file_[:-2] + ".i")
698 return r
697 return r
699
698
700 def copy(ui, repo, pats, opts, rename=False):
699 def copy(ui, repo, pats, opts, rename=False):
701 # called with the repo lock held
700 # called with the repo lock held
702 #
701 #
703 # hgsep => pathname that uses "/" to separate directories
702 # hgsep => pathname that uses "/" to separate directories
704 # ossep => pathname that uses os.sep to separate directories
703 # ossep => pathname that uses os.sep to separate directories
705 cwd = repo.getcwd()
704 cwd = repo.getcwd()
706 targets = {}
705 targets = {}
707 after = opts.get("after")
706 after = opts.get("after")
708 dryrun = opts.get("dry_run")
707 dryrun = opts.get("dry_run")
709 wctx = repo[None]
708 wctx = repo[None]
710
709
711 def walkpat(pat):
710 def walkpat(pat):
712 srcs = []
711 srcs = []
713 if after:
712 if after:
714 badstates = '?'
713 badstates = '?'
715 else:
714 else:
716 badstates = '?r'
715 badstates = '?r'
717 m = scmutil.match(wctx, [pat], opts, globbed=True)
716 m = scmutil.match(wctx, [pat], opts, globbed=True)
718 for abs in wctx.walk(m):
717 for abs in wctx.walk(m):
719 state = repo.dirstate[abs]
718 state = repo.dirstate[abs]
720 rel = m.rel(abs)
719 rel = m.rel(abs)
721 exact = m.exact(abs)
720 exact = m.exact(abs)
722 if state in badstates:
721 if state in badstates:
723 if exact and state == '?':
722 if exact and state == '?':
724 ui.warn(_('%s: not copying - file is not managed\n') % rel)
723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
725 if exact and state == 'r':
724 if exact and state == 'r':
726 ui.warn(_('%s: not copying - file has been marked for'
725 ui.warn(_('%s: not copying - file has been marked for'
727 ' remove\n') % rel)
726 ' remove\n') % rel)
728 continue
727 continue
729 # abs: hgsep
728 # abs: hgsep
730 # rel: ossep
729 # rel: ossep
731 srcs.append((abs, rel, exact))
730 srcs.append((abs, rel, exact))
732 return srcs
731 return srcs
733
732
734 # abssrc: hgsep
733 # abssrc: hgsep
735 # relsrc: ossep
734 # relsrc: ossep
736 # otarget: ossep
735 # otarget: ossep
737 def copyfile(abssrc, relsrc, otarget, exact):
736 def copyfile(abssrc, relsrc, otarget, exact):
738 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
739 if '/' in abstarget:
738 if '/' in abstarget:
740 # We cannot normalize abstarget itself, this would prevent
739 # We cannot normalize abstarget itself, this would prevent
741 # case only renames, like a => A.
740 # case only renames, like a => A.
742 abspath, absname = abstarget.rsplit('/', 1)
741 abspath, absname = abstarget.rsplit('/', 1)
743 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
744 reltarget = repo.pathto(abstarget, cwd)
743 reltarget = repo.pathto(abstarget, cwd)
745 target = repo.wjoin(abstarget)
744 target = repo.wjoin(abstarget)
746 src = repo.wjoin(abssrc)
745 src = repo.wjoin(abssrc)
747 state = repo.dirstate[abstarget]
746 state = repo.dirstate[abstarget]
748
747
749 scmutil.checkportable(ui, abstarget)
748 scmutil.checkportable(ui, abstarget)
750
749
751 # check for collisions
750 # check for collisions
752 prevsrc = targets.get(abstarget)
751 prevsrc = targets.get(abstarget)
753 if prevsrc is not None:
752 if prevsrc is not None:
754 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
755 (reltarget, repo.pathto(abssrc, cwd),
754 (reltarget, repo.pathto(abssrc, cwd),
756 repo.pathto(prevsrc, cwd)))
755 repo.pathto(prevsrc, cwd)))
757 return
756 return
758
757
759 # check for overwrites
758 # check for overwrites
760 exists = os.path.lexists(target)
759 exists = os.path.lexists(target)
761 samefile = False
760 samefile = False
762 if exists and abssrc != abstarget:
761 if exists and abssrc != abstarget:
763 if (repo.dirstate.normalize(abssrc) ==
762 if (repo.dirstate.normalize(abssrc) ==
764 repo.dirstate.normalize(abstarget)):
763 repo.dirstate.normalize(abstarget)):
765 if not rename:
764 if not rename:
766 ui.warn(_("%s: can't copy - same file\n") % reltarget)
765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
767 return
766 return
768 exists = False
767 exists = False
769 samefile = True
768 samefile = True
770
769
771 if not after and exists or after and state in 'mn':
770 if not after and exists or after and state in 'mn':
772 if not opts['force']:
771 if not opts['force']:
773 if state in 'mn':
772 if state in 'mn':
774 msg = _('%s: not overwriting - file already committed\n')
773 msg = _('%s: not overwriting - file already committed\n')
775 if after:
774 if after:
776 flags = '--after --force'
775 flags = '--after --force'
777 else:
776 else:
778 flags = '--force'
777 flags = '--force'
779 if rename:
778 if rename:
780 hint = _('(hg rename %s to replace the file by '
779 hint = _('(hg rename %s to replace the file by '
781 'recording a rename)\n') % flags
780 'recording a rename)\n') % flags
782 else:
781 else:
783 hint = _('(hg copy %s to replace the file by '
782 hint = _('(hg copy %s to replace the file by '
784 'recording a copy)\n') % flags
783 'recording a copy)\n') % flags
785 else:
784 else:
786 msg = _('%s: not overwriting - file exists\n')
785 msg = _('%s: not overwriting - file exists\n')
787 if rename:
786 if rename:
788 hint = _('(hg rename --after to record the rename)\n')
787 hint = _('(hg rename --after to record the rename)\n')
789 else:
788 else:
790 hint = _('(hg copy --after to record the copy)\n')
789 hint = _('(hg copy --after to record the copy)\n')
791 ui.warn(msg % reltarget)
790 ui.warn(msg % reltarget)
792 ui.warn(hint)
791 ui.warn(hint)
793 return
792 return
794
793
795 if after:
794 if after:
796 if not exists:
795 if not exists:
797 if rename:
796 if rename:
798 ui.warn(_('%s: not recording move - %s does not exist\n') %
797 ui.warn(_('%s: not recording move - %s does not exist\n') %
799 (relsrc, reltarget))
798 (relsrc, reltarget))
800 else:
799 else:
801 ui.warn(_('%s: not recording copy - %s does not exist\n') %
800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
802 (relsrc, reltarget))
801 (relsrc, reltarget))
803 return
802 return
804 elif not dryrun:
803 elif not dryrun:
805 try:
804 try:
806 if exists:
805 if exists:
807 os.unlink(target)
806 os.unlink(target)
808 targetdir = os.path.dirname(target) or '.'
807 targetdir = os.path.dirname(target) or '.'
809 if not os.path.isdir(targetdir):
808 if not os.path.isdir(targetdir):
810 os.makedirs(targetdir)
809 os.makedirs(targetdir)
811 if samefile:
810 if samefile:
812 tmp = target + "~hgrename"
811 tmp = target + "~hgrename"
813 os.rename(src, tmp)
812 os.rename(src, tmp)
814 os.rename(tmp, target)
813 os.rename(tmp, target)
815 else:
814 else:
816 util.copyfile(src, target)
815 util.copyfile(src, target)
817 srcexists = True
816 srcexists = True
818 except IOError as inst:
817 except IOError as inst:
819 if inst.errno == errno.ENOENT:
818 if inst.errno == errno.ENOENT:
820 ui.warn(_('%s: deleted in working directory\n') % relsrc)
819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
821 srcexists = False
820 srcexists = False
822 else:
821 else:
823 ui.warn(_('%s: cannot copy - %s\n') %
822 ui.warn(_('%s: cannot copy - %s\n') %
824 (relsrc, inst.strerror))
823 (relsrc, inst.strerror))
825 return True # report a failure
824 return True # report a failure
826
825
827 if ui.verbose or not exact:
826 if ui.verbose or not exact:
828 if rename:
827 if rename:
829 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
830 else:
829 else:
831 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
832
831
833 targets[abstarget] = abssrc
832 targets[abstarget] = abssrc
834
833
835 # fix up dirstate
834 # fix up dirstate
836 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
837 dryrun=dryrun, cwd=cwd)
836 dryrun=dryrun, cwd=cwd)
838 if rename and not dryrun:
837 if rename and not dryrun:
839 if not after and srcexists and not samefile:
838 if not after and srcexists and not samefile:
840 repo.wvfs.unlinkpath(abssrc)
839 repo.wvfs.unlinkpath(abssrc)
841 wctx.forget([abssrc])
840 wctx.forget([abssrc])
842
841
843 # pat: ossep
842 # pat: ossep
844 # dest ossep
843 # dest ossep
845 # srcs: list of (hgsep, hgsep, ossep, bool)
844 # srcs: list of (hgsep, hgsep, ossep, bool)
846 # return: function that takes hgsep and returns ossep
845 # return: function that takes hgsep and returns ossep
847 def targetpathfn(pat, dest, srcs):
846 def targetpathfn(pat, dest, srcs):
848 if os.path.isdir(pat):
847 if os.path.isdir(pat):
849 abspfx = pathutil.canonpath(repo.root, cwd, pat)
848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
850 abspfx = util.localpath(abspfx)
849 abspfx = util.localpath(abspfx)
851 if destdirexists:
850 if destdirexists:
852 striplen = len(os.path.split(abspfx)[0])
851 striplen = len(os.path.split(abspfx)[0])
853 else:
852 else:
854 striplen = len(abspfx)
853 striplen = len(abspfx)
855 if striplen:
854 if striplen:
856 striplen += len(pycompat.ossep)
855 striplen += len(pycompat.ossep)
857 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
858 elif destdirexists:
857 elif destdirexists:
859 res = lambda p: os.path.join(dest,
858 res = lambda p: os.path.join(dest,
860 os.path.basename(util.localpath(p)))
859 os.path.basename(util.localpath(p)))
861 else:
860 else:
862 res = lambda p: dest
861 res = lambda p: dest
863 return res
862 return res
864
863
865 # pat: ossep
864 # pat: ossep
866 # dest ossep
865 # dest ossep
867 # srcs: list of (hgsep, hgsep, ossep, bool)
866 # srcs: list of (hgsep, hgsep, ossep, bool)
868 # return: function that takes hgsep and returns ossep
867 # return: function that takes hgsep and returns ossep
869 def targetpathafterfn(pat, dest, srcs):
868 def targetpathafterfn(pat, dest, srcs):
870 if matchmod.patkind(pat):
869 if matchmod.patkind(pat):
871 # a mercurial pattern
870 # a mercurial pattern
872 res = lambda p: os.path.join(dest,
871 res = lambda p: os.path.join(dest,
873 os.path.basename(util.localpath(p)))
872 os.path.basename(util.localpath(p)))
874 else:
873 else:
875 abspfx = pathutil.canonpath(repo.root, cwd, pat)
874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
876 if len(abspfx) < len(srcs[0][0]):
875 if len(abspfx) < len(srcs[0][0]):
877 # A directory. Either the target path contains the last
876 # A directory. Either the target path contains the last
878 # component of the source path or it does not.
877 # component of the source path or it does not.
879 def evalpath(striplen):
878 def evalpath(striplen):
880 score = 0
879 score = 0
881 for s in srcs:
880 for s in srcs:
882 t = os.path.join(dest, util.localpath(s[0])[striplen:])
881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
883 if os.path.lexists(t):
882 if os.path.lexists(t):
884 score += 1
883 score += 1
885 return score
884 return score
886
885
887 abspfx = util.localpath(abspfx)
886 abspfx = util.localpath(abspfx)
888 striplen = len(abspfx)
887 striplen = len(abspfx)
889 if striplen:
888 if striplen:
890 striplen += len(pycompat.ossep)
889 striplen += len(pycompat.ossep)
891 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
892 score = evalpath(striplen)
891 score = evalpath(striplen)
893 striplen1 = len(os.path.split(abspfx)[0])
892 striplen1 = len(os.path.split(abspfx)[0])
894 if striplen1:
893 if striplen1:
895 striplen1 += len(pycompat.ossep)
894 striplen1 += len(pycompat.ossep)
896 if evalpath(striplen1) > score:
895 if evalpath(striplen1) > score:
897 striplen = striplen1
896 striplen = striplen1
898 res = lambda p: os.path.join(dest,
897 res = lambda p: os.path.join(dest,
899 util.localpath(p)[striplen:])
898 util.localpath(p)[striplen:])
900 else:
899 else:
901 # a file
900 # a file
902 if destdirexists:
901 if destdirexists:
903 res = lambda p: os.path.join(dest,
902 res = lambda p: os.path.join(dest,
904 os.path.basename(util.localpath(p)))
903 os.path.basename(util.localpath(p)))
905 else:
904 else:
906 res = lambda p: dest
905 res = lambda p: dest
907 return res
906 return res
908
907
909 pats = scmutil.expandpats(pats)
908 pats = scmutil.expandpats(pats)
910 if not pats:
909 if not pats:
911 raise error.Abort(_('no source or destination specified'))
910 raise error.Abort(_('no source or destination specified'))
912 if len(pats) == 1:
911 if len(pats) == 1:
913 raise error.Abort(_('no destination specified'))
912 raise error.Abort(_('no destination specified'))
914 dest = pats.pop()
913 dest = pats.pop()
915 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
916 if not destdirexists:
915 if not destdirexists:
917 if len(pats) > 1 or matchmod.patkind(pats[0]):
916 if len(pats) > 1 or matchmod.patkind(pats[0]):
918 raise error.Abort(_('with multiple sources, destination must be an '
917 raise error.Abort(_('with multiple sources, destination must be an '
919 'existing directory'))
918 'existing directory'))
920 if util.endswithsep(dest):
919 if util.endswithsep(dest):
921 raise error.Abort(_('destination %s is not a directory') % dest)
920 raise error.Abort(_('destination %s is not a directory') % dest)
922
921
923 tfn = targetpathfn
922 tfn = targetpathfn
924 if after:
923 if after:
925 tfn = targetpathafterfn
924 tfn = targetpathafterfn
926 copylist = []
925 copylist = []
927 for pat in pats:
926 for pat in pats:
928 srcs = walkpat(pat)
927 srcs = walkpat(pat)
929 if not srcs:
928 if not srcs:
930 continue
929 continue
931 copylist.append((tfn(pat, dest, srcs), srcs))
930 copylist.append((tfn(pat, dest, srcs), srcs))
932 if not copylist:
931 if not copylist:
933 raise error.Abort(_('no files to copy'))
932 raise error.Abort(_('no files to copy'))
934
933
935 errors = 0
934 errors = 0
936 for targetpath, srcs in copylist:
935 for targetpath, srcs in copylist:
937 for abssrc, relsrc, exact in srcs:
936 for abssrc, relsrc, exact in srcs:
938 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
939 errors += 1
938 errors += 1
940
939
941 if errors:
940 if errors:
942 ui.warn(_('(consider using --after)\n'))
941 ui.warn(_('(consider using --after)\n'))
943
942
944 return errors != 0
943 return errors != 0
945
944
946 ## facility to let extension process additional data into an import patch
945 ## facility to let extension process additional data into an import patch
947 # list of identifier to be executed in order
946 # list of identifier to be executed in order
948 extrapreimport = [] # run before commit
947 extrapreimport = [] # run before commit
949 extrapostimport = [] # run after commit
948 extrapostimport = [] # run after commit
950 # mapping from identifier to actual import function
949 # mapping from identifier to actual import function
951 #
950 #
952 # 'preimport' are run before the commit is made and are provided the following
951 # 'preimport' are run before the commit is made and are provided the following
953 # arguments:
952 # arguments:
954 # - repo: the localrepository instance,
953 # - repo: the localrepository instance,
955 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
956 # - extra: the future extra dictionary of the changeset, please mutate it,
955 # - extra: the future extra dictionary of the changeset, please mutate it,
957 # - opts: the import options.
956 # - opts: the import options.
958 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
959 # mutation of in memory commit and more. Feel free to rework the code to get
958 # mutation of in memory commit and more. Feel free to rework the code to get
960 # there.
959 # there.
961 extrapreimportmap = {}
960 extrapreimportmap = {}
962 # 'postimport' are run after the commit is made and are provided the following
961 # 'postimport' are run after the commit is made and are provided the following
963 # argument:
962 # argument:
964 # - ctx: the changectx created by import.
963 # - ctx: the changectx created by import.
965 extrapostimportmap = {}
964 extrapostimportmap = {}
966
965
967 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
968 """Utility function used by commands.import to import a single patch
967 """Utility function used by commands.import to import a single patch
969
968
970 This function is explicitly defined here to help the evolve extension to
969 This function is explicitly defined here to help the evolve extension to
971 wrap this part of the import logic.
970 wrap this part of the import logic.
972
971
973 The API is currently a bit ugly because it a simple code translation from
972 The API is currently a bit ugly because it a simple code translation from
974 the import command. Feel free to make it better.
973 the import command. Feel free to make it better.
975
974
976 :hunk: a patch (as a binary string)
975 :hunk: a patch (as a binary string)
977 :parents: nodes that will be parent of the created commit
976 :parents: nodes that will be parent of the created commit
978 :opts: the full dict of option passed to the import command
977 :opts: the full dict of option passed to the import command
979 :msgs: list to save commit message to.
978 :msgs: list to save commit message to.
980 (used in case we need to save it when failing)
979 (used in case we need to save it when failing)
981 :updatefunc: a function that update a repo to a given node
980 :updatefunc: a function that update a repo to a given node
982 updatefunc(<repo>, <node>)
981 updatefunc(<repo>, <node>)
983 """
982 """
984 # avoid cycle context -> subrepo -> cmdutil
983 # avoid cycle context -> subrepo -> cmdutil
985 from . import context
984 from . import context
986 extractdata = patch.extract(ui, hunk)
985 extractdata = patch.extract(ui, hunk)
987 tmpname = extractdata.get('filename')
986 tmpname = extractdata.get('filename')
988 message = extractdata.get('message')
987 message = extractdata.get('message')
989 user = opts.get('user') or extractdata.get('user')
988 user = opts.get('user') or extractdata.get('user')
990 date = opts.get('date') or extractdata.get('date')
989 date = opts.get('date') or extractdata.get('date')
991 branch = extractdata.get('branch')
990 branch = extractdata.get('branch')
992 nodeid = extractdata.get('nodeid')
991 nodeid = extractdata.get('nodeid')
993 p1 = extractdata.get('p1')
992 p1 = extractdata.get('p1')
994 p2 = extractdata.get('p2')
993 p2 = extractdata.get('p2')
995
994
996 nocommit = opts.get('no_commit')
995 nocommit = opts.get('no_commit')
997 importbranch = opts.get('import_branch')
996 importbranch = opts.get('import_branch')
998 update = not opts.get('bypass')
997 update = not opts.get('bypass')
999 strip = opts["strip"]
998 strip = opts["strip"]
1000 prefix = opts["prefix"]
999 prefix = opts["prefix"]
1001 sim = float(opts.get('similarity') or 0)
1000 sim = float(opts.get('similarity') or 0)
1002 if not tmpname:
1001 if not tmpname:
1003 return (None, None, False)
1002 return (None, None, False)
1004
1003
1005 rejects = False
1004 rejects = False
1006
1005
1007 try:
1006 try:
1008 cmdline_message = logmessage(ui, opts)
1007 cmdline_message = logmessage(ui, opts)
1009 if cmdline_message:
1008 if cmdline_message:
1010 # pickup the cmdline msg
1009 # pickup the cmdline msg
1011 message = cmdline_message
1010 message = cmdline_message
1012 elif message:
1011 elif message:
1013 # pickup the patch msg
1012 # pickup the patch msg
1014 message = message.strip()
1013 message = message.strip()
1015 else:
1014 else:
1016 # launch the editor
1015 # launch the editor
1017 message = None
1016 message = None
1018 ui.debug('message:\n%s\n' % message)
1017 ui.debug('message:\n%s\n' % message)
1019
1018
1020 if len(parents) == 1:
1019 if len(parents) == 1:
1021 parents.append(repo[nullid])
1020 parents.append(repo[nullid])
1022 if opts.get('exact'):
1021 if opts.get('exact'):
1023 if not nodeid or not p1:
1022 if not nodeid or not p1:
1024 raise error.Abort(_('not a Mercurial patch'))
1023 raise error.Abort(_('not a Mercurial patch'))
1025 p1 = repo[p1]
1024 p1 = repo[p1]
1026 p2 = repo[p2 or nullid]
1025 p2 = repo[p2 or nullid]
1027 elif p2:
1026 elif p2:
1028 try:
1027 try:
1029 p1 = repo[p1]
1028 p1 = repo[p1]
1030 p2 = repo[p2]
1029 p2 = repo[p2]
1031 # Without any options, consider p2 only if the
1030 # Without any options, consider p2 only if the
1032 # patch is being applied on top of the recorded
1031 # patch is being applied on top of the recorded
1033 # first parent.
1032 # first parent.
1034 if p1 != parents[0]:
1033 if p1 != parents[0]:
1035 p1 = parents[0]
1034 p1 = parents[0]
1036 p2 = repo[nullid]
1035 p2 = repo[nullid]
1037 except error.RepoError:
1036 except error.RepoError:
1038 p1, p2 = parents
1037 p1, p2 = parents
1039 if p2.node() == nullid:
1038 if p2.node() == nullid:
1040 ui.warn(_("warning: import the patch as a normal revision\n"
1039 ui.warn(_("warning: import the patch as a normal revision\n"
1041 "(use --exact to import the patch as a merge)\n"))
1040 "(use --exact to import the patch as a merge)\n"))
1042 else:
1041 else:
1043 p1, p2 = parents
1042 p1, p2 = parents
1044
1043
1045 n = None
1044 n = None
1046 if update:
1045 if update:
1047 if p1 != parents[0]:
1046 if p1 != parents[0]:
1048 updatefunc(repo, p1.node())
1047 updatefunc(repo, p1.node())
1049 if p2 != parents[1]:
1048 if p2 != parents[1]:
1050 repo.setparents(p1.node(), p2.node())
1049 repo.setparents(p1.node(), p2.node())
1051
1050
1052 if opts.get('exact') or importbranch:
1051 if opts.get('exact') or importbranch:
1053 repo.dirstate.setbranch(branch or 'default')
1052 repo.dirstate.setbranch(branch or 'default')
1054
1053
1055 partial = opts.get('partial', False)
1054 partial = opts.get('partial', False)
1056 files = set()
1055 files = set()
1057 try:
1056 try:
1058 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1059 files=files, eolmode=None, similarity=sim / 100.0)
1058 files=files, eolmode=None, similarity=sim / 100.0)
1060 except patch.PatchError as e:
1059 except patch.PatchError as e:
1061 if not partial:
1060 if not partial:
1062 raise error.Abort(str(e))
1061 raise error.Abort(str(e))
1063 if partial:
1062 if partial:
1064 rejects = True
1063 rejects = True
1065
1064
1066 files = list(files)
1065 files = list(files)
1067 if nocommit:
1066 if nocommit:
1068 if message:
1067 if message:
1069 msgs.append(message)
1068 msgs.append(message)
1070 else:
1069 else:
1071 if opts.get('exact') or p2:
1070 if opts.get('exact') or p2:
1072 # If you got here, you either use --force and know what
1071 # If you got here, you either use --force and know what
1073 # you are doing or used --exact or a merge patch while
1072 # you are doing or used --exact or a merge patch while
1074 # being updated to its first parent.
1073 # being updated to its first parent.
1075 m = None
1074 m = None
1076 else:
1075 else:
1077 m = scmutil.matchfiles(repo, files or [])
1076 m = scmutil.matchfiles(repo, files or [])
1078 editform = mergeeditform(repo[None], 'import.normal')
1077 editform = mergeeditform(repo[None], 'import.normal')
1079 if opts.get('exact'):
1078 if opts.get('exact'):
1080 editor = None
1079 editor = None
1081 else:
1080 else:
1082 editor = getcommiteditor(editform=editform, **opts)
1081 editor = getcommiteditor(editform=editform, **opts)
1083 extra = {}
1082 extra = {}
1084 for idfunc in extrapreimport:
1083 for idfunc in extrapreimport:
1085 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1086 overrides = {}
1085 overrides = {}
1087 if partial:
1086 if partial:
1088 overrides[('ui', 'allowemptycommit')] = True
1087 overrides[('ui', 'allowemptycommit')] = True
1089 with repo.ui.configoverride(overrides, 'import'):
1088 with repo.ui.configoverride(overrides, 'import'):
1090 n = repo.commit(message, user,
1089 n = repo.commit(message, user,
1091 date, match=m,
1090 date, match=m,
1092 editor=editor, extra=extra)
1091 editor=editor, extra=extra)
1093 for idfunc in extrapostimport:
1092 for idfunc in extrapostimport:
1094 extrapostimportmap[idfunc](repo[n])
1093 extrapostimportmap[idfunc](repo[n])
1095 else:
1094 else:
1096 if opts.get('exact') or importbranch:
1095 if opts.get('exact') or importbranch:
1097 branch = branch or 'default'
1096 branch = branch or 'default'
1098 else:
1097 else:
1099 branch = p1.branch()
1098 branch = p1.branch()
1100 store = patch.filestore()
1099 store = patch.filestore()
1101 try:
1100 try:
1102 files = set()
1101 files = set()
1103 try:
1102 try:
1104 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1105 files, eolmode=None)
1104 files, eolmode=None)
1106 except patch.PatchError as e:
1105 except patch.PatchError as e:
1107 raise error.Abort(str(e))
1106 raise error.Abort(str(e))
1108 if opts.get('exact'):
1107 if opts.get('exact'):
1109 editor = None
1108 editor = None
1110 else:
1109 else:
1111 editor = getcommiteditor(editform='import.bypass')
1110 editor = getcommiteditor(editform='import.bypass')
1112 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1111 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1113 message,
1112 message,
1114 user,
1113 user,
1115 date,
1114 date,
1116 branch, files, store,
1115 branch, files, store,
1117 editor=editor)
1116 editor=editor)
1118 n = memctx.commit()
1117 n = memctx.commit()
1119 finally:
1118 finally:
1120 store.close()
1119 store.close()
1121 if opts.get('exact') and nocommit:
1120 if opts.get('exact') and nocommit:
1122 # --exact with --no-commit is still useful in that it does merge
1121 # --exact with --no-commit is still useful in that it does merge
1123 # and branch bits
1122 # and branch bits
1124 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1123 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 elif opts.get('exact') and hex(n) != nodeid:
1124 elif opts.get('exact') and hex(n) != nodeid:
1126 raise error.Abort(_('patch is damaged or loses information'))
1125 raise error.Abort(_('patch is damaged or loses information'))
1127 msg = _('applied to working directory')
1126 msg = _('applied to working directory')
1128 if n:
1127 if n:
1129 # i18n: refers to a short changeset id
1128 # i18n: refers to a short changeset id
1130 msg = _('created %s') % short(n)
1129 msg = _('created %s') % short(n)
1131 return (msg, n, rejects)
1130 return (msg, n, rejects)
1132 finally:
1131 finally:
1133 os.unlink(tmpname)
1132 os.unlink(tmpname)
1134
1133
1135 # facility to let extensions include additional data in an exported patch
1134 # facility to let extensions include additional data in an exported patch
1136 # list of identifiers to be executed in order
1135 # list of identifiers to be executed in order
1137 extraexport = []
1136 extraexport = []
1138 # mapping from identifier to actual export function
1137 # mapping from identifier to actual export function
1139 # function as to return a string to be added to the header or None
1138 # function as to return a string to be added to the header or None
1140 # it is given two arguments (sequencenumber, changectx)
1139 # it is given two arguments (sequencenumber, changectx)
1141 extraexportmap = {}
1140 extraexportmap = {}
1142
1141
1143 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1142 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1144 node = ctx.node()
1143 node = ctx.node()
1145 parents = [p.node() for p in ctx.parents() if p]
1144 parents = [p.node() for p in ctx.parents() if p]
1146 branch = ctx.branch()
1145 branch = ctx.branch()
1147 if switch_parent:
1146 if switch_parent:
1148 parents.reverse()
1147 parents.reverse()
1149
1148
1150 if parents:
1149 if parents:
1151 prev = parents[0]
1150 prev = parents[0]
1152 else:
1151 else:
1153 prev = nullid
1152 prev = nullid
1154
1153
1155 write("# HG changeset patch\n")
1154 write("# HG changeset patch\n")
1156 write("# User %s\n" % ctx.user())
1155 write("# User %s\n" % ctx.user())
1157 write("# Date %d %d\n" % ctx.date())
1156 write("# Date %d %d\n" % ctx.date())
1158 write("# %s\n" % util.datestr(ctx.date()))
1157 write("# %s\n" % util.datestr(ctx.date()))
1159 if branch and branch != 'default':
1158 if branch and branch != 'default':
1160 write("# Branch %s\n" % branch)
1159 write("# Branch %s\n" % branch)
1161 write("# Node ID %s\n" % hex(node))
1160 write("# Node ID %s\n" % hex(node))
1162 write("# Parent %s\n" % hex(prev))
1161 write("# Parent %s\n" % hex(prev))
1163 if len(parents) > 1:
1162 if len(parents) > 1:
1164 write("# Parent %s\n" % hex(parents[1]))
1163 write("# Parent %s\n" % hex(parents[1]))
1165
1164
1166 for headerid in extraexport:
1165 for headerid in extraexport:
1167 header = extraexportmap[headerid](seqno, ctx)
1166 header = extraexportmap[headerid](seqno, ctx)
1168 if header is not None:
1167 if header is not None:
1169 write('# %s\n' % header)
1168 write('# %s\n' % header)
1170 write(ctx.description().rstrip())
1169 write(ctx.description().rstrip())
1171 write("\n\n")
1170 write("\n\n")
1172
1171
1173 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1172 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1174 write(chunk, label=label)
1173 write(chunk, label=label)
1175
1174
1176 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1175 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1177 opts=None, match=None):
1176 opts=None, match=None):
1178 '''export changesets as hg patches
1177 '''export changesets as hg patches
1179
1178
1180 Args:
1179 Args:
1181 repo: The repository from which we're exporting revisions.
1180 repo: The repository from which we're exporting revisions.
1182 revs: A list of revisions to export as revision numbers.
1181 revs: A list of revisions to export as revision numbers.
1183 fntemplate: An optional string to use for generating patch file names.
1182 fntemplate: An optional string to use for generating patch file names.
1184 fp: An optional file-like object to which patches should be written.
1183 fp: An optional file-like object to which patches should be written.
1185 switch_parent: If True, show diffs against second parent when not nullid.
1184 switch_parent: If True, show diffs against second parent when not nullid.
1186 Default is false, which always shows diff against p1.
1185 Default is false, which always shows diff against p1.
1187 opts: diff options to use for generating the patch.
1186 opts: diff options to use for generating the patch.
1188 match: If specified, only export changes to files matching this matcher.
1187 match: If specified, only export changes to files matching this matcher.
1189
1188
1190 Returns:
1189 Returns:
1191 Nothing.
1190 Nothing.
1192
1191
1193 Side Effect:
1192 Side Effect:
1194 "HG Changeset Patch" data is emitted to one of the following
1193 "HG Changeset Patch" data is emitted to one of the following
1195 destinations:
1194 destinations:
1196 fp is specified: All revs are written to the specified
1195 fp is specified: All revs are written to the specified
1197 file-like object.
1196 file-like object.
1198 fntemplate specified: Each rev is written to a unique file named using
1197 fntemplate specified: Each rev is written to a unique file named using
1199 the given template.
1198 the given template.
1200 Neither fp nor template specified: All revs written to repo.ui.write()
1199 Neither fp nor template specified: All revs written to repo.ui.write()
1201 '''
1200 '''
1202
1201
1203 total = len(revs)
1202 total = len(revs)
1204 revwidth = max(len(str(rev)) for rev in revs)
1203 revwidth = max(len(str(rev)) for rev in revs)
1205 filemode = {}
1204 filemode = {}
1206
1205
1207 write = None
1206 write = None
1208 dest = '<unnamed>'
1207 dest = '<unnamed>'
1209 if fp:
1208 if fp:
1210 dest = getattr(fp, 'name', dest)
1209 dest = getattr(fp, 'name', dest)
1211 def write(s, **kw):
1210 def write(s, **kw):
1212 fp.write(s)
1211 fp.write(s)
1213 elif not fntemplate:
1212 elif not fntemplate:
1214 write = repo.ui.write
1213 write = repo.ui.write
1215
1214
1216 for seqno, rev in enumerate(revs, 1):
1215 for seqno, rev in enumerate(revs, 1):
1217 ctx = repo[rev]
1216 ctx = repo[rev]
1218 fo = None
1217 fo = None
1219 if not fp and fntemplate:
1218 if not fp and fntemplate:
1220 desc_lines = ctx.description().rstrip().split('\n')
1219 desc_lines = ctx.description().rstrip().split('\n')
1221 desc = desc_lines[0] #Commit always has a first line.
1220 desc = desc_lines[0] #Commit always has a first line.
1222 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1221 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1223 total=total, seqno=seqno, revwidth=revwidth,
1222 total=total, seqno=seqno, revwidth=revwidth,
1224 mode='wb', modemap=filemode)
1223 mode='wb', modemap=filemode)
1225 dest = fo.name
1224 dest = fo.name
1226 def write(s, **kw):
1225 def write(s, **kw):
1227 fo.write(s)
1226 fo.write(s)
1228 if not dest.startswith('<'):
1227 if not dest.startswith('<'):
1229 repo.ui.note("%s\n" % dest)
1228 repo.ui.note("%s\n" % dest)
1230 _exportsingle(
1229 _exportsingle(
1231 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1230 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1232 if fo is not None:
1231 if fo is not None:
1233 fo.close()
1232 fo.close()
1234
1233
1235 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1234 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1236 changes=None, stat=False, fp=None, prefix='',
1235 changes=None, stat=False, fp=None, prefix='',
1237 root='', listsubrepos=False):
1236 root='', listsubrepos=False):
1238 '''show diff or diffstat.'''
1237 '''show diff or diffstat.'''
1239 if fp is None:
1238 if fp is None:
1240 write = ui.write
1239 write = ui.write
1241 else:
1240 else:
1242 def write(s, **kw):
1241 def write(s, **kw):
1243 fp.write(s)
1242 fp.write(s)
1244
1243
1245 if root:
1244 if root:
1246 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1245 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1247 else:
1246 else:
1248 relroot = ''
1247 relroot = ''
1249 if relroot != '':
1248 if relroot != '':
1250 # XXX relative roots currently don't work if the root is within a
1249 # XXX relative roots currently don't work if the root is within a
1251 # subrepo
1250 # subrepo
1252 uirelroot = match.uipath(relroot)
1251 uirelroot = match.uipath(relroot)
1253 relroot += '/'
1252 relroot += '/'
1254 for matchroot in match.files():
1253 for matchroot in match.files():
1255 if not matchroot.startswith(relroot):
1254 if not matchroot.startswith(relroot):
1256 ui.warn(_('warning: %s not inside relative root %s\n') % (
1255 ui.warn(_('warning: %s not inside relative root %s\n') % (
1257 match.uipath(matchroot), uirelroot))
1256 match.uipath(matchroot), uirelroot))
1258
1257
1259 if stat:
1258 if stat:
1260 diffopts = diffopts.copy(context=0)
1259 diffopts = diffopts.copy(context=0)
1261 width = 80
1260 width = 80
1262 if not ui.plain():
1261 if not ui.plain():
1263 width = ui.termwidth()
1262 width = ui.termwidth()
1264 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1263 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1265 prefix=prefix, relroot=relroot)
1264 prefix=prefix, relroot=relroot)
1266 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1265 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1267 width=width):
1266 width=width):
1268 write(chunk, label=label)
1267 write(chunk, label=label)
1269 else:
1268 else:
1270 for chunk, label in patch.diffui(repo, node1, node2, match,
1269 for chunk, label in patch.diffui(repo, node1, node2, match,
1271 changes, diffopts, prefix=prefix,
1270 changes, diffopts, prefix=prefix,
1272 relroot=relroot):
1271 relroot=relroot):
1273 write(chunk, label=label)
1272 write(chunk, label=label)
1274
1273
1275 if listsubrepos:
1274 if listsubrepos:
1276 ctx1 = repo[node1]
1275 ctx1 = repo[node1]
1277 ctx2 = repo[node2]
1276 ctx2 = repo[node2]
1278 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1277 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1279 tempnode2 = node2
1278 tempnode2 = node2
1280 try:
1279 try:
1281 if node2 is not None:
1280 if node2 is not None:
1282 tempnode2 = ctx2.substate[subpath][1]
1281 tempnode2 = ctx2.substate[subpath][1]
1283 except KeyError:
1282 except KeyError:
1284 # A subrepo that existed in node1 was deleted between node1 and
1283 # A subrepo that existed in node1 was deleted between node1 and
1285 # node2 (inclusive). Thus, ctx2's substate won't contain that
1284 # node2 (inclusive). Thus, ctx2's substate won't contain that
1286 # subpath. The best we can do is to ignore it.
1285 # subpath. The best we can do is to ignore it.
1287 tempnode2 = None
1286 tempnode2 = None
1288 submatch = matchmod.subdirmatcher(subpath, match)
1287 submatch = matchmod.subdirmatcher(subpath, match)
1289 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1288 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1290 stat=stat, fp=fp, prefix=prefix)
1289 stat=stat, fp=fp, prefix=prefix)
1291
1290
1292 def _changesetlabels(ctx):
1291 def _changesetlabels(ctx):
1293 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1292 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1294 if ctx.obsolete():
1293 if ctx.obsolete():
1295 labels.append('changeset.obsolete')
1294 labels.append('changeset.obsolete')
1296 if ctx.troubled():
1295 if ctx.troubled():
1297 labels.append('changeset.troubled')
1296 labels.append('changeset.troubled')
1298 for trouble in ctx.troubles():
1297 for trouble in ctx.troubles():
1299 labels.append('trouble.%s' % trouble)
1298 labels.append('trouble.%s' % trouble)
1300 return ' '.join(labels)
1299 return ' '.join(labels)
1301
1300
1302 class changeset_printer(object):
1301 class changeset_printer(object):
1303 '''show changeset information when templating not requested.'''
1302 '''show changeset information when templating not requested.'''
1304
1303
1305 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1304 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1306 self.ui = ui
1305 self.ui = ui
1307 self.repo = repo
1306 self.repo = repo
1308 self.buffered = buffered
1307 self.buffered = buffered
1309 self.matchfn = matchfn
1308 self.matchfn = matchfn
1310 self.diffopts = diffopts
1309 self.diffopts = diffopts
1311 self.header = {}
1310 self.header = {}
1312 self.hunk = {}
1311 self.hunk = {}
1313 self.lastheader = None
1312 self.lastheader = None
1314 self.footer = None
1313 self.footer = None
1315
1314
1316 def flush(self, ctx):
1315 def flush(self, ctx):
1317 rev = ctx.rev()
1316 rev = ctx.rev()
1318 if rev in self.header:
1317 if rev in self.header:
1319 h = self.header[rev]
1318 h = self.header[rev]
1320 if h != self.lastheader:
1319 if h != self.lastheader:
1321 self.lastheader = h
1320 self.lastheader = h
1322 self.ui.write(h)
1321 self.ui.write(h)
1323 del self.header[rev]
1322 del self.header[rev]
1324 if rev in self.hunk:
1323 if rev in self.hunk:
1325 self.ui.write(self.hunk[rev])
1324 self.ui.write(self.hunk[rev])
1326 del self.hunk[rev]
1325 del self.hunk[rev]
1327 return 1
1326 return 1
1328 return 0
1327 return 0
1329
1328
1330 def close(self):
1329 def close(self):
1331 if self.footer:
1330 if self.footer:
1332 self.ui.write(self.footer)
1331 self.ui.write(self.footer)
1333
1332
1334 def show(self, ctx, copies=None, matchfn=None, **props):
1333 def show(self, ctx, copies=None, matchfn=None, **props):
1335 if self.buffered:
1334 if self.buffered:
1336 self.ui.pushbuffer(labeled=True)
1335 self.ui.pushbuffer(labeled=True)
1337 self._show(ctx, copies, matchfn, props)
1336 self._show(ctx, copies, matchfn, props)
1338 self.hunk[ctx.rev()] = self.ui.popbuffer()
1337 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 else:
1338 else:
1340 self._show(ctx, copies, matchfn, props)
1339 self._show(ctx, copies, matchfn, props)
1341
1340
1342 def _show(self, ctx, copies, matchfn, props):
1341 def _show(self, ctx, copies, matchfn, props):
1343 '''show a single changeset or file revision'''
1342 '''show a single changeset or file revision'''
1344 changenode = ctx.node()
1343 changenode = ctx.node()
1345 rev = ctx.rev()
1344 rev = ctx.rev()
1346 if self.ui.debugflag:
1345 if self.ui.debugflag:
1347 hexfunc = hex
1346 hexfunc = hex
1348 else:
1347 else:
1349 hexfunc = short
1348 hexfunc = short
1350 # as of now, wctx.node() and wctx.rev() return None, but we want to
1349 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 # show the same values as {node} and {rev} templatekw
1350 # show the same values as {node} and {rev} templatekw
1352 revnode = (scmutil.intrev(ctx), hexfunc(bin(ctx.hex())))
1351 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1353
1352
1354 if self.ui.quiet:
1353 if self.ui.quiet:
1355 self.ui.write("%d:%s\n" % revnode, label='log.node')
1354 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 return
1355 return
1357
1356
1358 date = util.datestr(ctx.date())
1357 date = util.datestr(ctx.date())
1359
1358
1360 # i18n: column positioning for "hg log"
1359 # i18n: column positioning for "hg log"
1361 self.ui.write(_("changeset: %d:%s\n") % revnode,
1360 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 label=_changesetlabels(ctx))
1361 label=_changesetlabels(ctx))
1363
1362
1364 # branches are shown first before any other names due to backwards
1363 # branches are shown first before any other names due to backwards
1365 # compatibility
1364 # compatibility
1366 branch = ctx.branch()
1365 branch = ctx.branch()
1367 # don't show the default branch name
1366 # don't show the default branch name
1368 if branch != 'default':
1367 if branch != 'default':
1369 # i18n: column positioning for "hg log"
1368 # i18n: column positioning for "hg log"
1370 self.ui.write(_("branch: %s\n") % branch,
1369 self.ui.write(_("branch: %s\n") % branch,
1371 label='log.branch')
1370 label='log.branch')
1372
1371
1373 for nsname, ns in self.repo.names.iteritems():
1372 for nsname, ns in self.repo.names.iteritems():
1374 # branches has special logic already handled above, so here we just
1373 # branches has special logic already handled above, so here we just
1375 # skip it
1374 # skip it
1376 if nsname == 'branches':
1375 if nsname == 'branches':
1377 continue
1376 continue
1378 # we will use the templatename as the color name since those two
1377 # we will use the templatename as the color name since those two
1379 # should be the same
1378 # should be the same
1380 for name in ns.names(self.repo, changenode):
1379 for name in ns.names(self.repo, changenode):
1381 self.ui.write(ns.logfmt % name,
1380 self.ui.write(ns.logfmt % name,
1382 label='log.%s' % ns.colorname)
1381 label='log.%s' % ns.colorname)
1383 if self.ui.debugflag:
1382 if self.ui.debugflag:
1384 # i18n: column positioning for "hg log"
1383 # i18n: column positioning for "hg log"
1385 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1384 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 label='log.phase')
1385 label='log.phase')
1387 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1386 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 label = 'log.parent changeset.%s' % pctx.phasestr()
1387 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 # i18n: column positioning for "hg log"
1388 # i18n: column positioning for "hg log"
1390 self.ui.write(_("parent: %d:%s\n")
1389 self.ui.write(_("parent: %d:%s\n")
1391 % (pctx.rev(), hexfunc(pctx.node())),
1390 % (pctx.rev(), hexfunc(pctx.node())),
1392 label=label)
1391 label=label)
1393
1392
1394 if self.ui.debugflag and rev is not None:
1393 if self.ui.debugflag and rev is not None:
1395 mnode = ctx.manifestnode()
1394 mnode = ctx.manifestnode()
1396 # i18n: column positioning for "hg log"
1395 # i18n: column positioning for "hg log"
1397 self.ui.write(_("manifest: %d:%s\n") %
1396 self.ui.write(_("manifest: %d:%s\n") %
1398 (self.repo.manifestlog._revlog.rev(mnode),
1397 (self.repo.manifestlog._revlog.rev(mnode),
1399 hex(mnode)),
1398 hex(mnode)),
1400 label='ui.debug log.manifest')
1399 label='ui.debug log.manifest')
1401 # i18n: column positioning for "hg log"
1400 # i18n: column positioning for "hg log"
1402 self.ui.write(_("user: %s\n") % ctx.user(),
1401 self.ui.write(_("user: %s\n") % ctx.user(),
1403 label='log.user')
1402 label='log.user')
1404 # i18n: column positioning for "hg log"
1403 # i18n: column positioning for "hg log"
1405 self.ui.write(_("date: %s\n") % date,
1404 self.ui.write(_("date: %s\n") % date,
1406 label='log.date')
1405 label='log.date')
1407
1406
1408 if ctx.troubled():
1407 if ctx.troubled():
1409 # i18n: column positioning for "hg log"
1408 # i18n: column positioning for "hg log"
1410 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1409 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 label='log.trouble')
1410 label='log.trouble')
1412
1411
1413 if self.ui.debugflag:
1412 if self.ui.debugflag:
1414 files = ctx.p1().status(ctx)[:3]
1413 files = ctx.p1().status(ctx)[:3]
1415 for key, value in zip([# i18n: column positioning for "hg log"
1414 for key, value in zip([# i18n: column positioning for "hg log"
1416 _("files:"),
1415 _("files:"),
1417 # i18n: column positioning for "hg log"
1416 # i18n: column positioning for "hg log"
1418 _("files+:"),
1417 _("files+:"),
1419 # i18n: column positioning for "hg log"
1418 # i18n: column positioning for "hg log"
1420 _("files-:")], files):
1419 _("files-:")], files):
1421 if value:
1420 if value:
1422 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1421 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 label='ui.debug log.files')
1422 label='ui.debug log.files')
1424 elif ctx.files() and self.ui.verbose:
1423 elif ctx.files() and self.ui.verbose:
1425 # i18n: column positioning for "hg log"
1424 # i18n: column positioning for "hg log"
1426 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1425 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 label='ui.note log.files')
1426 label='ui.note log.files')
1428 if copies and self.ui.verbose:
1427 if copies and self.ui.verbose:
1429 copies = ['%s (%s)' % c for c in copies]
1428 copies = ['%s (%s)' % c for c in copies]
1430 # i18n: column positioning for "hg log"
1429 # i18n: column positioning for "hg log"
1431 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1430 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 label='ui.note log.copies')
1431 label='ui.note log.copies')
1433
1432
1434 extra = ctx.extra()
1433 extra = ctx.extra()
1435 if extra and self.ui.debugflag:
1434 if extra and self.ui.debugflag:
1436 for key, value in sorted(extra.items()):
1435 for key, value in sorted(extra.items()):
1437 # i18n: column positioning for "hg log"
1436 # i18n: column positioning for "hg log"
1438 self.ui.write(_("extra: %s=%s\n")
1437 self.ui.write(_("extra: %s=%s\n")
1439 % (key, util.escapestr(value)),
1438 % (key, util.escapestr(value)),
1440 label='ui.debug log.extra')
1439 label='ui.debug log.extra')
1441
1440
1442 description = ctx.description().strip()
1441 description = ctx.description().strip()
1443 if description:
1442 if description:
1444 if self.ui.verbose:
1443 if self.ui.verbose:
1445 self.ui.write(_("description:\n"),
1444 self.ui.write(_("description:\n"),
1446 label='ui.note log.description')
1445 label='ui.note log.description')
1447 self.ui.write(description,
1446 self.ui.write(description,
1448 label='ui.note log.description')
1447 label='ui.note log.description')
1449 self.ui.write("\n\n")
1448 self.ui.write("\n\n")
1450 else:
1449 else:
1451 # i18n: column positioning for "hg log"
1450 # i18n: column positioning for "hg log"
1452 self.ui.write(_("summary: %s\n") %
1451 self.ui.write(_("summary: %s\n") %
1453 description.splitlines()[0],
1452 description.splitlines()[0],
1454 label='log.summary')
1453 label='log.summary')
1455 self.ui.write("\n")
1454 self.ui.write("\n")
1456
1455
1457 self.showpatch(ctx, matchfn)
1456 self.showpatch(ctx, matchfn)
1458
1457
1459 def showpatch(self, ctx, matchfn):
1458 def showpatch(self, ctx, matchfn):
1460 if not matchfn:
1459 if not matchfn:
1461 matchfn = self.matchfn
1460 matchfn = self.matchfn
1462 if matchfn:
1461 if matchfn:
1463 stat = self.diffopts.get('stat')
1462 stat = self.diffopts.get('stat')
1464 diff = self.diffopts.get('patch')
1463 diff = self.diffopts.get('patch')
1465 diffopts = patch.diffallopts(self.ui, self.diffopts)
1464 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 node = ctx.node()
1465 node = ctx.node()
1467 prev = ctx.p1().node()
1466 prev = ctx.p1().node()
1468 if stat:
1467 if stat:
1469 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1468 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 match=matchfn, stat=True)
1469 match=matchfn, stat=True)
1471 if diff:
1470 if diff:
1472 if stat:
1471 if stat:
1473 self.ui.write("\n")
1472 self.ui.write("\n")
1474 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1473 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 match=matchfn, stat=False)
1474 match=matchfn, stat=False)
1476 self.ui.write("\n")
1475 self.ui.write("\n")
1477
1476
1478 class jsonchangeset(changeset_printer):
1477 class jsonchangeset(changeset_printer):
1479 '''format changeset information.'''
1478 '''format changeset information.'''
1480
1479
1481 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1480 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1481 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 self.cache = {}
1482 self.cache = {}
1484 self._first = True
1483 self._first = True
1485
1484
1486 def close(self):
1485 def close(self):
1487 if not self._first:
1486 if not self._first:
1488 self.ui.write("\n]\n")
1487 self.ui.write("\n]\n")
1489 else:
1488 else:
1490 self.ui.write("[]\n")
1489 self.ui.write("[]\n")
1491
1490
1492 def _show(self, ctx, copies, matchfn, props):
1491 def _show(self, ctx, copies, matchfn, props):
1493 '''show a single changeset or file revision'''
1492 '''show a single changeset or file revision'''
1494 rev = ctx.rev()
1493 rev = ctx.rev()
1495 if rev is None:
1494 if rev is None:
1496 jrev = jnode = 'null'
1495 jrev = jnode = 'null'
1497 else:
1496 else:
1498 jrev = '%d' % rev
1497 jrev = '%d' % rev
1499 jnode = '"%s"' % hex(ctx.node())
1498 jnode = '"%s"' % hex(ctx.node())
1500 j = encoding.jsonescape
1499 j = encoding.jsonescape
1501
1500
1502 if self._first:
1501 if self._first:
1503 self.ui.write("[\n {")
1502 self.ui.write("[\n {")
1504 self._first = False
1503 self._first = False
1505 else:
1504 else:
1506 self.ui.write(",\n {")
1505 self.ui.write(",\n {")
1507
1506
1508 if self.ui.quiet:
1507 if self.ui.quiet:
1509 self.ui.write(('\n "rev": %s') % jrev)
1508 self.ui.write(('\n "rev": %s') % jrev)
1510 self.ui.write((',\n "node": %s') % jnode)
1509 self.ui.write((',\n "node": %s') % jnode)
1511 self.ui.write('\n }')
1510 self.ui.write('\n }')
1512 return
1511 return
1513
1512
1514 self.ui.write(('\n "rev": %s') % jrev)
1513 self.ui.write(('\n "rev": %s') % jrev)
1515 self.ui.write((',\n "node": %s') % jnode)
1514 self.ui.write((',\n "node": %s') % jnode)
1516 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1515 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1516 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1517 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1518 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1519 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521
1520
1522 self.ui.write((',\n "bookmarks": [%s]') %
1521 self.ui.write((',\n "bookmarks": [%s]') %
1523 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1522 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 self.ui.write((',\n "tags": [%s]') %
1523 self.ui.write((',\n "tags": [%s]') %
1525 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1524 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 self.ui.write((',\n "parents": [%s]') %
1525 self.ui.write((',\n "parents": [%s]') %
1527 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1526 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528
1527
1529 if self.ui.debugflag:
1528 if self.ui.debugflag:
1530 if rev is None:
1529 if rev is None:
1531 jmanifestnode = 'null'
1530 jmanifestnode = 'null'
1532 else:
1531 else:
1533 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1532 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1533 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535
1534
1536 self.ui.write((',\n "extra": {%s}') %
1535 self.ui.write((',\n "extra": {%s}') %
1537 ", ".join('"%s": "%s"' % (j(k), j(v))
1536 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 for k, v in ctx.extra().items()))
1537 for k, v in ctx.extra().items()))
1539
1538
1540 files = ctx.p1().status(ctx)
1539 files = ctx.p1().status(ctx)
1541 self.ui.write((',\n "modified": [%s]') %
1540 self.ui.write((',\n "modified": [%s]') %
1542 ", ".join('"%s"' % j(f) for f in files[0]))
1541 ", ".join('"%s"' % j(f) for f in files[0]))
1543 self.ui.write((',\n "added": [%s]') %
1542 self.ui.write((',\n "added": [%s]') %
1544 ", ".join('"%s"' % j(f) for f in files[1]))
1543 ", ".join('"%s"' % j(f) for f in files[1]))
1545 self.ui.write((',\n "removed": [%s]') %
1544 self.ui.write((',\n "removed": [%s]') %
1546 ", ".join('"%s"' % j(f) for f in files[2]))
1545 ", ".join('"%s"' % j(f) for f in files[2]))
1547
1546
1548 elif self.ui.verbose:
1547 elif self.ui.verbose:
1549 self.ui.write((',\n "files": [%s]') %
1548 self.ui.write((',\n "files": [%s]') %
1550 ", ".join('"%s"' % j(f) for f in ctx.files()))
1549 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551
1550
1552 if copies:
1551 if copies:
1553 self.ui.write((',\n "copies": {%s}') %
1552 self.ui.write((',\n "copies": {%s}') %
1554 ", ".join('"%s": "%s"' % (j(k), j(v))
1553 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 for k, v in copies))
1554 for k, v in copies))
1556
1555
1557 matchfn = self.matchfn
1556 matchfn = self.matchfn
1558 if matchfn:
1557 if matchfn:
1559 stat = self.diffopts.get('stat')
1558 stat = self.diffopts.get('stat')
1560 diff = self.diffopts.get('patch')
1559 diff = self.diffopts.get('patch')
1561 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1560 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 node, prev = ctx.node(), ctx.p1().node()
1561 node, prev = ctx.node(), ctx.p1().node()
1563 if stat:
1562 if stat:
1564 self.ui.pushbuffer()
1563 self.ui.pushbuffer()
1565 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1564 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 match=matchfn, stat=True)
1565 match=matchfn, stat=True)
1567 self.ui.write((',\n "diffstat": "%s"')
1566 self.ui.write((',\n "diffstat": "%s"')
1568 % j(self.ui.popbuffer()))
1567 % j(self.ui.popbuffer()))
1569 if diff:
1568 if diff:
1570 self.ui.pushbuffer()
1569 self.ui.pushbuffer()
1571 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1570 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 match=matchfn, stat=False)
1571 match=matchfn, stat=False)
1573 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1572 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574
1573
1575 self.ui.write("\n }")
1574 self.ui.write("\n }")
1576
1575
1577 class changeset_templater(changeset_printer):
1576 class changeset_templater(changeset_printer):
1578 '''format changeset information.'''
1577 '''format changeset information.'''
1579
1578
1580 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1579 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1581 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1580 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1582 assert not (tmpl and mapfile)
1581 assert not (tmpl and mapfile)
1583 defaulttempl = templatekw.defaulttempl
1582 defaulttempl = templatekw.defaulttempl
1584 if mapfile:
1583 if mapfile:
1585 self.t = templater.templater.frommapfile(mapfile,
1584 self.t = templater.templater.frommapfile(mapfile,
1586 cache=defaulttempl)
1585 cache=defaulttempl)
1587 else:
1586 else:
1588 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1587 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1589 cache=defaulttempl)
1588 cache=defaulttempl)
1590
1589
1591 self._counter = itertools.count()
1590 self._counter = itertools.count()
1592 self.cache = {}
1591 self.cache = {}
1593
1592
1594 # find correct templates for current mode
1593 # find correct templates for current mode
1595 tmplmodes = [
1594 tmplmodes = [
1596 (True, None),
1595 (True, None),
1597 (self.ui.verbose, 'verbose'),
1596 (self.ui.verbose, 'verbose'),
1598 (self.ui.quiet, 'quiet'),
1597 (self.ui.quiet, 'quiet'),
1599 (self.ui.debugflag, 'debug'),
1598 (self.ui.debugflag, 'debug'),
1600 ]
1599 ]
1601
1600
1602 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1601 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1603 'docheader': '', 'docfooter': ''}
1602 'docheader': '', 'docfooter': ''}
1604 for mode, postfix in tmplmodes:
1603 for mode, postfix in tmplmodes:
1605 for t in self._parts:
1604 for t in self._parts:
1606 cur = t
1605 cur = t
1607 if postfix:
1606 if postfix:
1608 cur += "_" + postfix
1607 cur += "_" + postfix
1609 if mode and cur in self.t:
1608 if mode and cur in self.t:
1610 self._parts[t] = cur
1609 self._parts[t] = cur
1611
1610
1612 if self._parts['docheader']:
1611 if self._parts['docheader']:
1613 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1612 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1614
1613
1615 def close(self):
1614 def close(self):
1616 if self._parts['docfooter']:
1615 if self._parts['docfooter']:
1617 if not self.footer:
1616 if not self.footer:
1618 self.footer = ""
1617 self.footer = ""
1619 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1618 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1620 return super(changeset_templater, self).close()
1619 return super(changeset_templater, self).close()
1621
1620
1622 def _show(self, ctx, copies, matchfn, props):
1621 def _show(self, ctx, copies, matchfn, props):
1623 '''show a single changeset or file revision'''
1622 '''show a single changeset or file revision'''
1624 props = props.copy()
1623 props = props.copy()
1625 props.update(templatekw.keywords)
1624 props.update(templatekw.keywords)
1626 props['templ'] = self.t
1625 props['templ'] = self.t
1627 props['ctx'] = ctx
1626 props['ctx'] = ctx
1628 props['repo'] = self.repo
1627 props['repo'] = self.repo
1629 props['ui'] = self.repo.ui
1628 props['ui'] = self.repo.ui
1630 props['index'] = next(self._counter)
1629 props['index'] = next(self._counter)
1631 props['revcache'] = {'copies': copies}
1630 props['revcache'] = {'copies': copies}
1632 props['cache'] = self.cache
1631 props['cache'] = self.cache
1633 props = pycompat.strkwargs(props)
1632 props = pycompat.strkwargs(props)
1634
1633
1635 # write header
1634 # write header
1636 if self._parts['header']:
1635 if self._parts['header']:
1637 h = templater.stringify(self.t(self._parts['header'], **props))
1636 h = templater.stringify(self.t(self._parts['header'], **props))
1638 if self.buffered:
1637 if self.buffered:
1639 self.header[ctx.rev()] = h
1638 self.header[ctx.rev()] = h
1640 else:
1639 else:
1641 if self.lastheader != h:
1640 if self.lastheader != h:
1642 self.lastheader = h
1641 self.lastheader = h
1643 self.ui.write(h)
1642 self.ui.write(h)
1644
1643
1645 # write changeset metadata, then patch if requested
1644 # write changeset metadata, then patch if requested
1646 key = self._parts['changeset']
1645 key = self._parts['changeset']
1647 self.ui.write(templater.stringify(self.t(key, **props)))
1646 self.ui.write(templater.stringify(self.t(key, **props)))
1648 self.showpatch(ctx, matchfn)
1647 self.showpatch(ctx, matchfn)
1649
1648
1650 if self._parts['footer']:
1649 if self._parts['footer']:
1651 if not self.footer:
1650 if not self.footer:
1652 self.footer = templater.stringify(
1651 self.footer = templater.stringify(
1653 self.t(self._parts['footer'], **props))
1652 self.t(self._parts['footer'], **props))
1654
1653
1655 def gettemplate(ui, tmpl, style):
1654 def gettemplate(ui, tmpl, style):
1656 """
1655 """
1657 Find the template matching the given template spec or style.
1656 Find the template matching the given template spec or style.
1658 """
1657 """
1659
1658
1660 # ui settings
1659 # ui settings
1661 if not tmpl and not style: # template are stronger than style
1660 if not tmpl and not style: # template are stronger than style
1662 tmpl = ui.config('ui', 'logtemplate')
1661 tmpl = ui.config('ui', 'logtemplate')
1663 if tmpl:
1662 if tmpl:
1664 return templater.unquotestring(tmpl), None
1663 return templater.unquotestring(tmpl), None
1665 else:
1664 else:
1666 style = util.expandpath(ui.config('ui', 'style', ''))
1665 style = util.expandpath(ui.config('ui', 'style', ''))
1667
1666
1668 if not tmpl and style:
1667 if not tmpl and style:
1669 mapfile = style
1668 mapfile = style
1670 if not os.path.split(mapfile)[0]:
1669 if not os.path.split(mapfile)[0]:
1671 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1670 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1672 or templater.templatepath(mapfile))
1671 or templater.templatepath(mapfile))
1673 if mapname:
1672 if mapname:
1674 mapfile = mapname
1673 mapfile = mapname
1675 return None, mapfile
1674 return None, mapfile
1676
1675
1677 if not tmpl:
1676 if not tmpl:
1678 return None, None
1677 return None, None
1679
1678
1680 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1679 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1681
1680
1682 def show_changeset(ui, repo, opts, buffered=False):
1681 def show_changeset(ui, repo, opts, buffered=False):
1683 """show one changeset using template or regular display.
1682 """show one changeset using template or regular display.
1684
1683
1685 Display format will be the first non-empty hit of:
1684 Display format will be the first non-empty hit of:
1686 1. option 'template'
1685 1. option 'template'
1687 2. option 'style'
1686 2. option 'style'
1688 3. [ui] setting 'logtemplate'
1687 3. [ui] setting 'logtemplate'
1689 4. [ui] setting 'style'
1688 4. [ui] setting 'style'
1690 If all of these values are either the unset or the empty string,
1689 If all of these values are either the unset or the empty string,
1691 regular display via changeset_printer() is done.
1690 regular display via changeset_printer() is done.
1692 """
1691 """
1693 # options
1692 # options
1694 matchfn = None
1693 matchfn = None
1695 if opts.get('patch') or opts.get('stat'):
1694 if opts.get('patch') or opts.get('stat'):
1696 matchfn = scmutil.matchall(repo)
1695 matchfn = scmutil.matchall(repo)
1697
1696
1698 if opts.get('template') == 'json':
1697 if opts.get('template') == 'json':
1699 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1698 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1700
1699
1701 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1700 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1702
1701
1703 if not tmpl and not mapfile:
1702 if not tmpl and not mapfile:
1704 return changeset_printer(ui, repo, matchfn, opts, buffered)
1703 return changeset_printer(ui, repo, matchfn, opts, buffered)
1705
1704
1706 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1705 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1707
1706
1708 def showmarker(fm, marker, index=None):
1707 def showmarker(fm, marker, index=None):
1709 """utility function to display obsolescence marker in a readable way
1708 """utility function to display obsolescence marker in a readable way
1710
1709
1711 To be used by debug function."""
1710 To be used by debug function."""
1712 if index is not None:
1711 if index is not None:
1713 fm.write('index', '%i ', index)
1712 fm.write('index', '%i ', index)
1714 fm.write('precnode', '%s ', hex(marker.precnode()))
1713 fm.write('precnode', '%s ', hex(marker.precnode()))
1715 succs = marker.succnodes()
1714 succs = marker.succnodes()
1716 fm.condwrite(succs, 'succnodes', '%s ',
1715 fm.condwrite(succs, 'succnodes', '%s ',
1717 fm.formatlist(map(hex, succs), name='node'))
1716 fm.formatlist(map(hex, succs), name='node'))
1718 fm.write('flag', '%X ', marker.flags())
1717 fm.write('flag', '%X ', marker.flags())
1719 parents = marker.parentnodes()
1718 parents = marker.parentnodes()
1720 if parents is not None:
1719 if parents is not None:
1721 fm.write('parentnodes', '{%s} ',
1720 fm.write('parentnodes', '{%s} ',
1722 fm.formatlist(map(hex, parents), name='node', sep=', '))
1721 fm.formatlist(map(hex, parents), name='node', sep=', '))
1723 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1722 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1724 meta = marker.metadata().copy()
1723 meta = marker.metadata().copy()
1725 meta.pop('date', None)
1724 meta.pop('date', None)
1726 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1725 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1727 fm.plain('\n')
1726 fm.plain('\n')
1728
1727
1729 def finddate(ui, repo, date):
1728 def finddate(ui, repo, date):
1730 """Find the tipmost changeset that matches the given date spec"""
1729 """Find the tipmost changeset that matches the given date spec"""
1731
1730
1732 df = util.matchdate(date)
1731 df = util.matchdate(date)
1733 m = scmutil.matchall(repo)
1732 m = scmutil.matchall(repo)
1734 results = {}
1733 results = {}
1735
1734
1736 def prep(ctx, fns):
1735 def prep(ctx, fns):
1737 d = ctx.date()
1736 d = ctx.date()
1738 if df(d[0]):
1737 if df(d[0]):
1739 results[ctx.rev()] = d
1738 results[ctx.rev()] = d
1740
1739
1741 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1740 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1742 rev = ctx.rev()
1741 rev = ctx.rev()
1743 if rev in results:
1742 if rev in results:
1744 ui.status(_("found revision %s from %s\n") %
1743 ui.status(_("found revision %s from %s\n") %
1745 (rev, util.datestr(results[rev])))
1744 (rev, util.datestr(results[rev])))
1746 return '%d' % rev
1745 return '%d' % rev
1747
1746
1748 raise error.Abort(_("revision matching date not found"))
1747 raise error.Abort(_("revision matching date not found"))
1749
1748
1750 def increasingwindows(windowsize=8, sizelimit=512):
1749 def increasingwindows(windowsize=8, sizelimit=512):
1751 while True:
1750 while True:
1752 yield windowsize
1751 yield windowsize
1753 if windowsize < sizelimit:
1752 if windowsize < sizelimit:
1754 windowsize *= 2
1753 windowsize *= 2
1755
1754
1756 class FileWalkError(Exception):
1755 class FileWalkError(Exception):
1757 pass
1756 pass
1758
1757
1759 def walkfilerevs(repo, match, follow, revs, fncache):
1758 def walkfilerevs(repo, match, follow, revs, fncache):
1760 '''Walks the file history for the matched files.
1759 '''Walks the file history for the matched files.
1761
1760
1762 Returns the changeset revs that are involved in the file history.
1761 Returns the changeset revs that are involved in the file history.
1763
1762
1764 Throws FileWalkError if the file history can't be walked using
1763 Throws FileWalkError if the file history can't be walked using
1765 filelogs alone.
1764 filelogs alone.
1766 '''
1765 '''
1767 wanted = set()
1766 wanted = set()
1768 copies = []
1767 copies = []
1769 minrev, maxrev = min(revs), max(revs)
1768 minrev, maxrev = min(revs), max(revs)
1770 def filerevgen(filelog, last):
1769 def filerevgen(filelog, last):
1771 """
1770 """
1772 Only files, no patterns. Check the history of each file.
1771 Only files, no patterns. Check the history of each file.
1773
1772
1774 Examines filelog entries within minrev, maxrev linkrev range
1773 Examines filelog entries within minrev, maxrev linkrev range
1775 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1774 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1776 tuples in backwards order
1775 tuples in backwards order
1777 """
1776 """
1778 cl_count = len(repo)
1777 cl_count = len(repo)
1779 revs = []
1778 revs = []
1780 for j in xrange(0, last + 1):
1779 for j in xrange(0, last + 1):
1781 linkrev = filelog.linkrev(j)
1780 linkrev = filelog.linkrev(j)
1782 if linkrev < minrev:
1781 if linkrev < minrev:
1783 continue
1782 continue
1784 # only yield rev for which we have the changelog, it can
1783 # only yield rev for which we have the changelog, it can
1785 # happen while doing "hg log" during a pull or commit
1784 # happen while doing "hg log" during a pull or commit
1786 if linkrev >= cl_count:
1785 if linkrev >= cl_count:
1787 break
1786 break
1788
1787
1789 parentlinkrevs = []
1788 parentlinkrevs = []
1790 for p in filelog.parentrevs(j):
1789 for p in filelog.parentrevs(j):
1791 if p != nullrev:
1790 if p != nullrev:
1792 parentlinkrevs.append(filelog.linkrev(p))
1791 parentlinkrevs.append(filelog.linkrev(p))
1793 n = filelog.node(j)
1792 n = filelog.node(j)
1794 revs.append((linkrev, parentlinkrevs,
1793 revs.append((linkrev, parentlinkrevs,
1795 follow and filelog.renamed(n)))
1794 follow and filelog.renamed(n)))
1796
1795
1797 return reversed(revs)
1796 return reversed(revs)
1798 def iterfiles():
1797 def iterfiles():
1799 pctx = repo['.']
1798 pctx = repo['.']
1800 for filename in match.files():
1799 for filename in match.files():
1801 if follow:
1800 if follow:
1802 if filename not in pctx:
1801 if filename not in pctx:
1803 raise error.Abort(_('cannot follow file not in parent '
1802 raise error.Abort(_('cannot follow file not in parent '
1804 'revision: "%s"') % filename)
1803 'revision: "%s"') % filename)
1805 yield filename, pctx[filename].filenode()
1804 yield filename, pctx[filename].filenode()
1806 else:
1805 else:
1807 yield filename, None
1806 yield filename, None
1808 for filename_node in copies:
1807 for filename_node in copies:
1809 yield filename_node
1808 yield filename_node
1810
1809
1811 for file_, node in iterfiles():
1810 for file_, node in iterfiles():
1812 filelog = repo.file(file_)
1811 filelog = repo.file(file_)
1813 if not len(filelog):
1812 if not len(filelog):
1814 if node is None:
1813 if node is None:
1815 # A zero count may be a directory or deleted file, so
1814 # A zero count may be a directory or deleted file, so
1816 # try to find matching entries on the slow path.
1815 # try to find matching entries on the slow path.
1817 if follow:
1816 if follow:
1818 raise error.Abort(
1817 raise error.Abort(
1819 _('cannot follow nonexistent file: "%s"') % file_)
1818 _('cannot follow nonexistent file: "%s"') % file_)
1820 raise FileWalkError("Cannot walk via filelog")
1819 raise FileWalkError("Cannot walk via filelog")
1821 else:
1820 else:
1822 continue
1821 continue
1823
1822
1824 if node is None:
1823 if node is None:
1825 last = len(filelog) - 1
1824 last = len(filelog) - 1
1826 else:
1825 else:
1827 last = filelog.rev(node)
1826 last = filelog.rev(node)
1828
1827
1829 # keep track of all ancestors of the file
1828 # keep track of all ancestors of the file
1830 ancestors = {filelog.linkrev(last)}
1829 ancestors = {filelog.linkrev(last)}
1831
1830
1832 # iterate from latest to oldest revision
1831 # iterate from latest to oldest revision
1833 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1832 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1834 if not follow:
1833 if not follow:
1835 if rev > maxrev:
1834 if rev > maxrev:
1836 continue
1835 continue
1837 else:
1836 else:
1838 # Note that last might not be the first interesting
1837 # Note that last might not be the first interesting
1839 # rev to us:
1838 # rev to us:
1840 # if the file has been changed after maxrev, we'll
1839 # if the file has been changed after maxrev, we'll
1841 # have linkrev(last) > maxrev, and we still need
1840 # have linkrev(last) > maxrev, and we still need
1842 # to explore the file graph
1841 # to explore the file graph
1843 if rev not in ancestors:
1842 if rev not in ancestors:
1844 continue
1843 continue
1845 # XXX insert 1327 fix here
1844 # XXX insert 1327 fix here
1846 if flparentlinkrevs:
1845 if flparentlinkrevs:
1847 ancestors.update(flparentlinkrevs)
1846 ancestors.update(flparentlinkrevs)
1848
1847
1849 fncache.setdefault(rev, []).append(file_)
1848 fncache.setdefault(rev, []).append(file_)
1850 wanted.add(rev)
1849 wanted.add(rev)
1851 if copied:
1850 if copied:
1852 copies.append(copied)
1851 copies.append(copied)
1853
1852
1854 return wanted
1853 return wanted
1855
1854
1856 class _followfilter(object):
1855 class _followfilter(object):
1857 def __init__(self, repo, onlyfirst=False):
1856 def __init__(self, repo, onlyfirst=False):
1858 self.repo = repo
1857 self.repo = repo
1859 self.startrev = nullrev
1858 self.startrev = nullrev
1860 self.roots = set()
1859 self.roots = set()
1861 self.onlyfirst = onlyfirst
1860 self.onlyfirst = onlyfirst
1862
1861
1863 def match(self, rev):
1862 def match(self, rev):
1864 def realparents(rev):
1863 def realparents(rev):
1865 if self.onlyfirst:
1864 if self.onlyfirst:
1866 return self.repo.changelog.parentrevs(rev)[0:1]
1865 return self.repo.changelog.parentrevs(rev)[0:1]
1867 else:
1866 else:
1868 return filter(lambda x: x != nullrev,
1867 return filter(lambda x: x != nullrev,
1869 self.repo.changelog.parentrevs(rev))
1868 self.repo.changelog.parentrevs(rev))
1870
1869
1871 if self.startrev == nullrev:
1870 if self.startrev == nullrev:
1872 self.startrev = rev
1871 self.startrev = rev
1873 return True
1872 return True
1874
1873
1875 if rev > self.startrev:
1874 if rev > self.startrev:
1876 # forward: all descendants
1875 # forward: all descendants
1877 if not self.roots:
1876 if not self.roots:
1878 self.roots.add(self.startrev)
1877 self.roots.add(self.startrev)
1879 for parent in realparents(rev):
1878 for parent in realparents(rev):
1880 if parent in self.roots:
1879 if parent in self.roots:
1881 self.roots.add(rev)
1880 self.roots.add(rev)
1882 return True
1881 return True
1883 else:
1882 else:
1884 # backwards: all parents
1883 # backwards: all parents
1885 if not self.roots:
1884 if not self.roots:
1886 self.roots.update(realparents(self.startrev))
1885 self.roots.update(realparents(self.startrev))
1887 if rev in self.roots:
1886 if rev in self.roots:
1888 self.roots.remove(rev)
1887 self.roots.remove(rev)
1889 self.roots.update(realparents(rev))
1888 self.roots.update(realparents(rev))
1890 return True
1889 return True
1891
1890
1892 return False
1891 return False
1893
1892
1894 def walkchangerevs(repo, match, opts, prepare):
1893 def walkchangerevs(repo, match, opts, prepare):
1895 '''Iterate over files and the revs in which they changed.
1894 '''Iterate over files and the revs in which they changed.
1896
1895
1897 Callers most commonly need to iterate backwards over the history
1896 Callers most commonly need to iterate backwards over the history
1898 in which they are interested. Doing so has awful (quadratic-looking)
1897 in which they are interested. Doing so has awful (quadratic-looking)
1899 performance, so we use iterators in a "windowed" way.
1898 performance, so we use iterators in a "windowed" way.
1900
1899
1901 We walk a window of revisions in the desired order. Within the
1900 We walk a window of revisions in the desired order. Within the
1902 window, we first walk forwards to gather data, then in the desired
1901 window, we first walk forwards to gather data, then in the desired
1903 order (usually backwards) to display it.
1902 order (usually backwards) to display it.
1904
1903
1905 This function returns an iterator yielding contexts. Before
1904 This function returns an iterator yielding contexts. Before
1906 yielding each context, the iterator will first call the prepare
1905 yielding each context, the iterator will first call the prepare
1907 function on each context in the window in forward order.'''
1906 function on each context in the window in forward order.'''
1908
1907
1909 follow = opts.get('follow') or opts.get('follow_first')
1908 follow = opts.get('follow') or opts.get('follow_first')
1910 revs = _logrevs(repo, opts)
1909 revs = _logrevs(repo, opts)
1911 if not revs:
1910 if not revs:
1912 return []
1911 return []
1913 wanted = set()
1912 wanted = set()
1914 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1913 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1915 opts.get('removed'))
1914 opts.get('removed'))
1916 fncache = {}
1915 fncache = {}
1917 change = repo.changectx
1916 change = repo.changectx
1918
1917
1919 # First step is to fill wanted, the set of revisions that we want to yield.
1918 # First step is to fill wanted, the set of revisions that we want to yield.
1920 # When it does not induce extra cost, we also fill fncache for revisions in
1919 # When it does not induce extra cost, we also fill fncache for revisions in
1921 # wanted: a cache of filenames that were changed (ctx.files()) and that
1920 # wanted: a cache of filenames that were changed (ctx.files()) and that
1922 # match the file filtering conditions.
1921 # match the file filtering conditions.
1923
1922
1924 if match.always():
1923 if match.always():
1925 # No files, no patterns. Display all revs.
1924 # No files, no patterns. Display all revs.
1926 wanted = revs
1925 wanted = revs
1927 elif not slowpath:
1926 elif not slowpath:
1928 # We only have to read through the filelog to find wanted revisions
1927 # We only have to read through the filelog to find wanted revisions
1929
1928
1930 try:
1929 try:
1931 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1930 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1932 except FileWalkError:
1931 except FileWalkError:
1933 slowpath = True
1932 slowpath = True
1934
1933
1935 # We decided to fall back to the slowpath because at least one
1934 # We decided to fall back to the slowpath because at least one
1936 # of the paths was not a file. Check to see if at least one of them
1935 # of the paths was not a file. Check to see if at least one of them
1937 # existed in history, otherwise simply return
1936 # existed in history, otherwise simply return
1938 for path in match.files():
1937 for path in match.files():
1939 if path == '.' or path in repo.store:
1938 if path == '.' or path in repo.store:
1940 break
1939 break
1941 else:
1940 else:
1942 return []
1941 return []
1943
1942
1944 if slowpath:
1943 if slowpath:
1945 # We have to read the changelog to match filenames against
1944 # We have to read the changelog to match filenames against
1946 # changed files
1945 # changed files
1947
1946
1948 if follow:
1947 if follow:
1949 raise error.Abort(_('can only follow copies/renames for explicit '
1948 raise error.Abort(_('can only follow copies/renames for explicit '
1950 'filenames'))
1949 'filenames'))
1951
1950
1952 # The slow path checks files modified in every changeset.
1951 # The slow path checks files modified in every changeset.
1953 # This is really slow on large repos, so compute the set lazily.
1952 # This is really slow on large repos, so compute the set lazily.
1954 class lazywantedset(object):
1953 class lazywantedset(object):
1955 def __init__(self):
1954 def __init__(self):
1956 self.set = set()
1955 self.set = set()
1957 self.revs = set(revs)
1956 self.revs = set(revs)
1958
1957
1959 # No need to worry about locality here because it will be accessed
1958 # No need to worry about locality here because it will be accessed
1960 # in the same order as the increasing window below.
1959 # in the same order as the increasing window below.
1961 def __contains__(self, value):
1960 def __contains__(self, value):
1962 if value in self.set:
1961 if value in self.set:
1963 return True
1962 return True
1964 elif not value in self.revs:
1963 elif not value in self.revs:
1965 return False
1964 return False
1966 else:
1965 else:
1967 self.revs.discard(value)
1966 self.revs.discard(value)
1968 ctx = change(value)
1967 ctx = change(value)
1969 matches = filter(match, ctx.files())
1968 matches = filter(match, ctx.files())
1970 if matches:
1969 if matches:
1971 fncache[value] = matches
1970 fncache[value] = matches
1972 self.set.add(value)
1971 self.set.add(value)
1973 return True
1972 return True
1974 return False
1973 return False
1975
1974
1976 def discard(self, value):
1975 def discard(self, value):
1977 self.revs.discard(value)
1976 self.revs.discard(value)
1978 self.set.discard(value)
1977 self.set.discard(value)
1979
1978
1980 wanted = lazywantedset()
1979 wanted = lazywantedset()
1981
1980
1982 # it might be worthwhile to do this in the iterator if the rev range
1981 # it might be worthwhile to do this in the iterator if the rev range
1983 # is descending and the prune args are all within that range
1982 # is descending and the prune args are all within that range
1984 for rev in opts.get('prune', ()):
1983 for rev in opts.get('prune', ()):
1985 rev = repo[rev].rev()
1984 rev = repo[rev].rev()
1986 ff = _followfilter(repo)
1985 ff = _followfilter(repo)
1987 stop = min(revs[0], revs[-1])
1986 stop = min(revs[0], revs[-1])
1988 for x in xrange(rev, stop - 1, -1):
1987 for x in xrange(rev, stop - 1, -1):
1989 if ff.match(x):
1988 if ff.match(x):
1990 wanted = wanted - [x]
1989 wanted = wanted - [x]
1991
1990
1992 # Now that wanted is correctly initialized, we can iterate over the
1991 # Now that wanted is correctly initialized, we can iterate over the
1993 # revision range, yielding only revisions in wanted.
1992 # revision range, yielding only revisions in wanted.
1994 def iterate():
1993 def iterate():
1995 if follow and match.always():
1994 if follow and match.always():
1996 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1995 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1997 def want(rev):
1996 def want(rev):
1998 return ff.match(rev) and rev in wanted
1997 return ff.match(rev) and rev in wanted
1999 else:
1998 else:
2000 def want(rev):
1999 def want(rev):
2001 return rev in wanted
2000 return rev in wanted
2002
2001
2003 it = iter(revs)
2002 it = iter(revs)
2004 stopiteration = False
2003 stopiteration = False
2005 for windowsize in increasingwindows():
2004 for windowsize in increasingwindows():
2006 nrevs = []
2005 nrevs = []
2007 for i in xrange(windowsize):
2006 for i in xrange(windowsize):
2008 rev = next(it, None)
2007 rev = next(it, None)
2009 if rev is None:
2008 if rev is None:
2010 stopiteration = True
2009 stopiteration = True
2011 break
2010 break
2012 elif want(rev):
2011 elif want(rev):
2013 nrevs.append(rev)
2012 nrevs.append(rev)
2014 for rev in sorted(nrevs):
2013 for rev in sorted(nrevs):
2015 fns = fncache.get(rev)
2014 fns = fncache.get(rev)
2016 ctx = change(rev)
2015 ctx = change(rev)
2017 if not fns:
2016 if not fns:
2018 def fns_generator():
2017 def fns_generator():
2019 for f in ctx.files():
2018 for f in ctx.files():
2020 if match(f):
2019 if match(f):
2021 yield f
2020 yield f
2022 fns = fns_generator()
2021 fns = fns_generator()
2023 prepare(ctx, fns)
2022 prepare(ctx, fns)
2024 for rev in nrevs:
2023 for rev in nrevs:
2025 yield change(rev)
2024 yield change(rev)
2026
2025
2027 if stopiteration:
2026 if stopiteration:
2028 break
2027 break
2029
2028
2030 return iterate()
2029 return iterate()
2031
2030
2032 def _makefollowlogfilematcher(repo, files, followfirst):
2031 def _makefollowlogfilematcher(repo, files, followfirst):
2033 # When displaying a revision with --patch --follow FILE, we have
2032 # When displaying a revision with --patch --follow FILE, we have
2034 # to know which file of the revision must be diffed. With
2033 # to know which file of the revision must be diffed. With
2035 # --follow, we want the names of the ancestors of FILE in the
2034 # --follow, we want the names of the ancestors of FILE in the
2036 # revision, stored in "fcache". "fcache" is populated by
2035 # revision, stored in "fcache". "fcache" is populated by
2037 # reproducing the graph traversal already done by --follow revset
2036 # reproducing the graph traversal already done by --follow revset
2038 # and relating revs to file names (which is not "correct" but
2037 # and relating revs to file names (which is not "correct" but
2039 # good enough).
2038 # good enough).
2040 fcache = {}
2039 fcache = {}
2041 fcacheready = [False]
2040 fcacheready = [False]
2042 pctx = repo['.']
2041 pctx = repo['.']
2043
2042
2044 def populate():
2043 def populate():
2045 for fn in files:
2044 for fn in files:
2046 fctx = pctx[fn]
2045 fctx = pctx[fn]
2047 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2046 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2048 for c in fctx.ancestors(followfirst=followfirst):
2047 for c in fctx.ancestors(followfirst=followfirst):
2049 fcache.setdefault(c.rev(), set()).add(c.path())
2048 fcache.setdefault(c.rev(), set()).add(c.path())
2050
2049
2051 def filematcher(rev):
2050 def filematcher(rev):
2052 if not fcacheready[0]:
2051 if not fcacheready[0]:
2053 # Lazy initialization
2052 # Lazy initialization
2054 fcacheready[0] = True
2053 fcacheready[0] = True
2055 populate()
2054 populate()
2056 return scmutil.matchfiles(repo, fcache.get(rev, []))
2055 return scmutil.matchfiles(repo, fcache.get(rev, []))
2057
2056
2058 return filematcher
2057 return filematcher
2059
2058
2060 def _makenofollowlogfilematcher(repo, pats, opts):
2059 def _makenofollowlogfilematcher(repo, pats, opts):
2061 '''hook for extensions to override the filematcher for non-follow cases'''
2060 '''hook for extensions to override the filematcher for non-follow cases'''
2062 return None
2061 return None
2063
2062
2064 def _makelogrevset(repo, pats, opts, revs):
2063 def _makelogrevset(repo, pats, opts, revs):
2065 """Return (expr, filematcher) where expr is a revset string built
2064 """Return (expr, filematcher) where expr is a revset string built
2066 from log options and file patterns or None. If --stat or --patch
2065 from log options and file patterns or None. If --stat or --patch
2067 are not passed filematcher is None. Otherwise it is a callable
2066 are not passed filematcher is None. Otherwise it is a callable
2068 taking a revision number and returning a match objects filtering
2067 taking a revision number and returning a match objects filtering
2069 the files to be detailed when displaying the revision.
2068 the files to be detailed when displaying the revision.
2070 """
2069 """
2071 opt2revset = {
2070 opt2revset = {
2072 'no_merges': ('not merge()', None),
2071 'no_merges': ('not merge()', None),
2073 'only_merges': ('merge()', None),
2072 'only_merges': ('merge()', None),
2074 '_ancestors': ('ancestors(%(val)s)', None),
2073 '_ancestors': ('ancestors(%(val)s)', None),
2075 '_fancestors': ('_firstancestors(%(val)s)', None),
2074 '_fancestors': ('_firstancestors(%(val)s)', None),
2076 '_descendants': ('descendants(%(val)s)', None),
2075 '_descendants': ('descendants(%(val)s)', None),
2077 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2076 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2078 '_matchfiles': ('_matchfiles(%(val)s)', None),
2077 '_matchfiles': ('_matchfiles(%(val)s)', None),
2079 'date': ('date(%(val)r)', None),
2078 'date': ('date(%(val)r)', None),
2080 'branch': ('branch(%(val)r)', ' or '),
2079 'branch': ('branch(%(val)r)', ' or '),
2081 '_patslog': ('filelog(%(val)r)', ' or '),
2080 '_patslog': ('filelog(%(val)r)', ' or '),
2082 '_patsfollow': ('follow(%(val)r)', ' or '),
2081 '_patsfollow': ('follow(%(val)r)', ' or '),
2083 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2082 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2084 'keyword': ('keyword(%(val)r)', ' or '),
2083 'keyword': ('keyword(%(val)r)', ' or '),
2085 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2084 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2086 'user': ('user(%(val)r)', ' or '),
2085 'user': ('user(%(val)r)', ' or '),
2087 }
2086 }
2088
2087
2089 opts = dict(opts)
2088 opts = dict(opts)
2090 # follow or not follow?
2089 # follow or not follow?
2091 follow = opts.get('follow') or opts.get('follow_first')
2090 follow = opts.get('follow') or opts.get('follow_first')
2092 if opts.get('follow_first'):
2091 if opts.get('follow_first'):
2093 followfirst = 1
2092 followfirst = 1
2094 else:
2093 else:
2095 followfirst = 0
2094 followfirst = 0
2096 # --follow with FILE behavior depends on revs...
2095 # --follow with FILE behavior depends on revs...
2097 it = iter(revs)
2096 it = iter(revs)
2098 startrev = next(it)
2097 startrev = next(it)
2099 followdescendants = startrev < next(it, startrev)
2098 followdescendants = startrev < next(it, startrev)
2100
2099
2101 # branch and only_branch are really aliases and must be handled at
2100 # branch and only_branch are really aliases and must be handled at
2102 # the same time
2101 # the same time
2103 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2102 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2104 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2103 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2105 # pats/include/exclude are passed to match.match() directly in
2104 # pats/include/exclude are passed to match.match() directly in
2106 # _matchfiles() revset but walkchangerevs() builds its matcher with
2105 # _matchfiles() revset but walkchangerevs() builds its matcher with
2107 # scmutil.match(). The difference is input pats are globbed on
2106 # scmutil.match(). The difference is input pats are globbed on
2108 # platforms without shell expansion (windows).
2107 # platforms without shell expansion (windows).
2109 wctx = repo[None]
2108 wctx = repo[None]
2110 match, pats = scmutil.matchandpats(wctx, pats, opts)
2109 match, pats = scmutil.matchandpats(wctx, pats, opts)
2111 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2110 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2112 opts.get('removed'))
2111 opts.get('removed'))
2113 if not slowpath:
2112 if not slowpath:
2114 for f in match.files():
2113 for f in match.files():
2115 if follow and f not in wctx:
2114 if follow and f not in wctx:
2116 # If the file exists, it may be a directory, so let it
2115 # If the file exists, it may be a directory, so let it
2117 # take the slow path.
2116 # take the slow path.
2118 if os.path.exists(repo.wjoin(f)):
2117 if os.path.exists(repo.wjoin(f)):
2119 slowpath = True
2118 slowpath = True
2120 continue
2119 continue
2121 else:
2120 else:
2122 raise error.Abort(_('cannot follow file not in parent '
2121 raise error.Abort(_('cannot follow file not in parent '
2123 'revision: "%s"') % f)
2122 'revision: "%s"') % f)
2124 filelog = repo.file(f)
2123 filelog = repo.file(f)
2125 if not filelog:
2124 if not filelog:
2126 # A zero count may be a directory or deleted file, so
2125 # A zero count may be a directory or deleted file, so
2127 # try to find matching entries on the slow path.
2126 # try to find matching entries on the slow path.
2128 if follow:
2127 if follow:
2129 raise error.Abort(
2128 raise error.Abort(
2130 _('cannot follow nonexistent file: "%s"') % f)
2129 _('cannot follow nonexistent file: "%s"') % f)
2131 slowpath = True
2130 slowpath = True
2132
2131
2133 # We decided to fall back to the slowpath because at least one
2132 # We decided to fall back to the slowpath because at least one
2134 # of the paths was not a file. Check to see if at least one of them
2133 # of the paths was not a file. Check to see if at least one of them
2135 # existed in history - in that case, we'll continue down the
2134 # existed in history - in that case, we'll continue down the
2136 # slowpath; otherwise, we can turn off the slowpath
2135 # slowpath; otherwise, we can turn off the slowpath
2137 if slowpath:
2136 if slowpath:
2138 for path in match.files():
2137 for path in match.files():
2139 if path == '.' or path in repo.store:
2138 if path == '.' or path in repo.store:
2140 break
2139 break
2141 else:
2140 else:
2142 slowpath = False
2141 slowpath = False
2143
2142
2144 fpats = ('_patsfollow', '_patsfollowfirst')
2143 fpats = ('_patsfollow', '_patsfollowfirst')
2145 fnopats = (('_ancestors', '_fancestors'),
2144 fnopats = (('_ancestors', '_fancestors'),
2146 ('_descendants', '_fdescendants'))
2145 ('_descendants', '_fdescendants'))
2147 if slowpath:
2146 if slowpath:
2148 # See walkchangerevs() slow path.
2147 # See walkchangerevs() slow path.
2149 #
2148 #
2150 # pats/include/exclude cannot be represented as separate
2149 # pats/include/exclude cannot be represented as separate
2151 # revset expressions as their filtering logic applies at file
2150 # revset expressions as their filtering logic applies at file
2152 # level. For instance "-I a -X a" matches a revision touching
2151 # level. For instance "-I a -X a" matches a revision touching
2153 # "a" and "b" while "file(a) and not file(b)" does
2152 # "a" and "b" while "file(a) and not file(b)" does
2154 # not. Besides, filesets are evaluated against the working
2153 # not. Besides, filesets are evaluated against the working
2155 # directory.
2154 # directory.
2156 matchargs = ['r:', 'd:relpath']
2155 matchargs = ['r:', 'd:relpath']
2157 for p in pats:
2156 for p in pats:
2158 matchargs.append('p:' + p)
2157 matchargs.append('p:' + p)
2159 for p in opts.get('include', []):
2158 for p in opts.get('include', []):
2160 matchargs.append('i:' + p)
2159 matchargs.append('i:' + p)
2161 for p in opts.get('exclude', []):
2160 for p in opts.get('exclude', []):
2162 matchargs.append('x:' + p)
2161 matchargs.append('x:' + p)
2163 matchargs = ','.join(('%r' % p) for p in matchargs)
2162 matchargs = ','.join(('%r' % p) for p in matchargs)
2164 opts['_matchfiles'] = matchargs
2163 opts['_matchfiles'] = matchargs
2165 if follow:
2164 if follow:
2166 opts[fnopats[0][followfirst]] = '.'
2165 opts[fnopats[0][followfirst]] = '.'
2167 else:
2166 else:
2168 if follow:
2167 if follow:
2169 if pats:
2168 if pats:
2170 # follow() revset interprets its file argument as a
2169 # follow() revset interprets its file argument as a
2171 # manifest entry, so use match.files(), not pats.
2170 # manifest entry, so use match.files(), not pats.
2172 opts[fpats[followfirst]] = list(match.files())
2171 opts[fpats[followfirst]] = list(match.files())
2173 else:
2172 else:
2174 op = fnopats[followdescendants][followfirst]
2173 op = fnopats[followdescendants][followfirst]
2175 opts[op] = 'rev(%d)' % startrev
2174 opts[op] = 'rev(%d)' % startrev
2176 else:
2175 else:
2177 opts['_patslog'] = list(pats)
2176 opts['_patslog'] = list(pats)
2178
2177
2179 filematcher = None
2178 filematcher = None
2180 if opts.get('patch') or opts.get('stat'):
2179 if opts.get('patch') or opts.get('stat'):
2181 # When following files, track renames via a special matcher.
2180 # When following files, track renames via a special matcher.
2182 # If we're forced to take the slowpath it means we're following
2181 # If we're forced to take the slowpath it means we're following
2183 # at least one pattern/directory, so don't bother with rename tracking.
2182 # at least one pattern/directory, so don't bother with rename tracking.
2184 if follow and not match.always() and not slowpath:
2183 if follow and not match.always() and not slowpath:
2185 # _makefollowlogfilematcher expects its files argument to be
2184 # _makefollowlogfilematcher expects its files argument to be
2186 # relative to the repo root, so use match.files(), not pats.
2185 # relative to the repo root, so use match.files(), not pats.
2187 filematcher = _makefollowlogfilematcher(repo, match.files(),
2186 filematcher = _makefollowlogfilematcher(repo, match.files(),
2188 followfirst)
2187 followfirst)
2189 else:
2188 else:
2190 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2189 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2191 if filematcher is None:
2190 if filematcher is None:
2192 filematcher = lambda rev: match
2191 filematcher = lambda rev: match
2193
2192
2194 expr = []
2193 expr = []
2195 for op, val in sorted(opts.iteritems()):
2194 for op, val in sorted(opts.iteritems()):
2196 if not val:
2195 if not val:
2197 continue
2196 continue
2198 if op not in opt2revset:
2197 if op not in opt2revset:
2199 continue
2198 continue
2200 revop, andor = opt2revset[op]
2199 revop, andor = opt2revset[op]
2201 if '%(val)' not in revop:
2200 if '%(val)' not in revop:
2202 expr.append(revop)
2201 expr.append(revop)
2203 else:
2202 else:
2204 if not isinstance(val, list):
2203 if not isinstance(val, list):
2205 e = revop % {'val': val}
2204 e = revop % {'val': val}
2206 else:
2205 else:
2207 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2206 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2208 expr.append(e)
2207 expr.append(e)
2209
2208
2210 if expr:
2209 if expr:
2211 expr = '(' + ' and '.join(expr) + ')'
2210 expr = '(' + ' and '.join(expr) + ')'
2212 else:
2211 else:
2213 expr = None
2212 expr = None
2214 return expr, filematcher
2213 return expr, filematcher
2215
2214
2216 def _logrevs(repo, opts):
2215 def _logrevs(repo, opts):
2217 # Default --rev value depends on --follow but --follow behavior
2216 # Default --rev value depends on --follow but --follow behavior
2218 # depends on revisions resolved from --rev...
2217 # depends on revisions resolved from --rev...
2219 follow = opts.get('follow') or opts.get('follow_first')
2218 follow = opts.get('follow') or opts.get('follow_first')
2220 if opts.get('rev'):
2219 if opts.get('rev'):
2221 revs = scmutil.revrange(repo, opts['rev'])
2220 revs = scmutil.revrange(repo, opts['rev'])
2222 elif follow and repo.dirstate.p1() == nullid:
2221 elif follow and repo.dirstate.p1() == nullid:
2223 revs = smartset.baseset()
2222 revs = smartset.baseset()
2224 elif follow:
2223 elif follow:
2225 revs = repo.revs('reverse(:.)')
2224 revs = repo.revs('reverse(:.)')
2226 else:
2225 else:
2227 revs = smartset.spanset(repo)
2226 revs = smartset.spanset(repo)
2228 revs.reverse()
2227 revs.reverse()
2229 return revs
2228 return revs
2230
2229
2231 def getgraphlogrevs(repo, pats, opts):
2230 def getgraphlogrevs(repo, pats, opts):
2232 """Return (revs, expr, filematcher) where revs is an iterable of
2231 """Return (revs, expr, filematcher) where revs is an iterable of
2233 revision numbers, expr is a revset string built from log options
2232 revision numbers, expr is a revset string built from log options
2234 and file patterns or None, and used to filter 'revs'. If --stat or
2233 and file patterns or None, and used to filter 'revs'. If --stat or
2235 --patch are not passed filematcher is None. Otherwise it is a
2234 --patch are not passed filematcher is None. Otherwise it is a
2236 callable taking a revision number and returning a match objects
2235 callable taking a revision number and returning a match objects
2237 filtering the files to be detailed when displaying the revision.
2236 filtering the files to be detailed when displaying the revision.
2238 """
2237 """
2239 limit = loglimit(opts)
2238 limit = loglimit(opts)
2240 revs = _logrevs(repo, opts)
2239 revs = _logrevs(repo, opts)
2241 if not revs:
2240 if not revs:
2242 return smartset.baseset(), None, None
2241 return smartset.baseset(), None, None
2243 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2242 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2244 if opts.get('rev'):
2243 if opts.get('rev'):
2245 # User-specified revs might be unsorted, but don't sort before
2244 # User-specified revs might be unsorted, but don't sort before
2246 # _makelogrevset because it might depend on the order of revs
2245 # _makelogrevset because it might depend on the order of revs
2247 if not (revs.isdescending() or revs.istopo()):
2246 if not (revs.isdescending() or revs.istopo()):
2248 revs.sort(reverse=True)
2247 revs.sort(reverse=True)
2249 if expr:
2248 if expr:
2250 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2249 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2251 revs = matcher(repo, revs)
2250 revs = matcher(repo, revs)
2252 if limit is not None:
2251 if limit is not None:
2253 limitedrevs = []
2252 limitedrevs = []
2254 for idx, rev in enumerate(revs):
2253 for idx, rev in enumerate(revs):
2255 if idx >= limit:
2254 if idx >= limit:
2256 break
2255 break
2257 limitedrevs.append(rev)
2256 limitedrevs.append(rev)
2258 revs = smartset.baseset(limitedrevs)
2257 revs = smartset.baseset(limitedrevs)
2259
2258
2260 return revs, expr, filematcher
2259 return revs, expr, filematcher
2261
2260
2262 def getlogrevs(repo, pats, opts):
2261 def getlogrevs(repo, pats, opts):
2263 """Return (revs, expr, filematcher) where revs is an iterable of
2262 """Return (revs, expr, filematcher) where revs is an iterable of
2264 revision numbers, expr is a revset string built from log options
2263 revision numbers, expr is a revset string built from log options
2265 and file patterns or None, and used to filter 'revs'. If --stat or
2264 and file patterns or None, and used to filter 'revs'. If --stat or
2266 --patch are not passed filematcher is None. Otherwise it is a
2265 --patch are not passed filematcher is None. Otherwise it is a
2267 callable taking a revision number and returning a match objects
2266 callable taking a revision number and returning a match objects
2268 filtering the files to be detailed when displaying the revision.
2267 filtering the files to be detailed when displaying the revision.
2269 """
2268 """
2270 limit = loglimit(opts)
2269 limit = loglimit(opts)
2271 revs = _logrevs(repo, opts)
2270 revs = _logrevs(repo, opts)
2272 if not revs:
2271 if not revs:
2273 return smartset.baseset([]), None, None
2272 return smartset.baseset([]), None, None
2274 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2273 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2275 if expr:
2274 if expr:
2276 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2275 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2277 revs = matcher(repo, revs)
2276 revs = matcher(repo, revs)
2278 if limit is not None:
2277 if limit is not None:
2279 limitedrevs = []
2278 limitedrevs = []
2280 for idx, r in enumerate(revs):
2279 for idx, r in enumerate(revs):
2281 if limit <= idx:
2280 if limit <= idx:
2282 break
2281 break
2283 limitedrevs.append(r)
2282 limitedrevs.append(r)
2284 revs = smartset.baseset(limitedrevs)
2283 revs = smartset.baseset(limitedrevs)
2285
2284
2286 return revs, expr, filematcher
2285 return revs, expr, filematcher
2287
2286
2288 def _graphnodeformatter(ui, displayer):
2287 def _graphnodeformatter(ui, displayer):
2289 spec = ui.config('ui', 'graphnodetemplate')
2288 spec = ui.config('ui', 'graphnodetemplate')
2290 if not spec:
2289 if not spec:
2291 return templatekw.showgraphnode # fast path for "{graphnode}"
2290 return templatekw.showgraphnode # fast path for "{graphnode}"
2292
2291
2293 spec = templater.unquotestring(spec)
2292 spec = templater.unquotestring(spec)
2294 templ = formatter.gettemplater(ui, 'graphnode', spec)
2293 templ = formatter.gettemplater(ui, 'graphnode', spec)
2295 cache = {}
2294 cache = {}
2296 if isinstance(displayer, changeset_templater):
2295 if isinstance(displayer, changeset_templater):
2297 cache = displayer.cache # reuse cache of slow templates
2296 cache = displayer.cache # reuse cache of slow templates
2298 props = templatekw.keywords.copy()
2297 props = templatekw.keywords.copy()
2299 props['templ'] = templ
2298 props['templ'] = templ
2300 props['cache'] = cache
2299 props['cache'] = cache
2301 def formatnode(repo, ctx):
2300 def formatnode(repo, ctx):
2302 props['ctx'] = ctx
2301 props['ctx'] = ctx
2303 props['repo'] = repo
2302 props['repo'] = repo
2304 props['ui'] = repo.ui
2303 props['ui'] = repo.ui
2305 props['revcache'] = {}
2304 props['revcache'] = {}
2306 return templater.stringify(templ('graphnode', **props))
2305 return templater.stringify(templ('graphnode', **props))
2307 return formatnode
2306 return formatnode
2308
2307
2309 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2308 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2310 filematcher=None):
2309 filematcher=None):
2311 formatnode = _graphnodeformatter(ui, displayer)
2310 formatnode = _graphnodeformatter(ui, displayer)
2312 state = graphmod.asciistate()
2311 state = graphmod.asciistate()
2313 styles = state['styles']
2312 styles = state['styles']
2314
2313
2315 # only set graph styling if HGPLAIN is not set.
2314 # only set graph styling if HGPLAIN is not set.
2316 if ui.plain('graph'):
2315 if ui.plain('graph'):
2317 # set all edge styles to |, the default pre-3.8 behaviour
2316 # set all edge styles to |, the default pre-3.8 behaviour
2318 styles.update(dict.fromkeys(styles, '|'))
2317 styles.update(dict.fromkeys(styles, '|'))
2319 else:
2318 else:
2320 edgetypes = {
2319 edgetypes = {
2321 'parent': graphmod.PARENT,
2320 'parent': graphmod.PARENT,
2322 'grandparent': graphmod.GRANDPARENT,
2321 'grandparent': graphmod.GRANDPARENT,
2323 'missing': graphmod.MISSINGPARENT
2322 'missing': graphmod.MISSINGPARENT
2324 }
2323 }
2325 for name, key in edgetypes.items():
2324 for name, key in edgetypes.items():
2326 # experimental config: experimental.graphstyle.*
2325 # experimental config: experimental.graphstyle.*
2327 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2326 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2328 styles[key])
2327 styles[key])
2329 if not styles[key]:
2328 if not styles[key]:
2330 styles[key] = None
2329 styles[key] = None
2331
2330
2332 # experimental config: experimental.graphshorten
2331 # experimental config: experimental.graphshorten
2333 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2332 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2334
2333
2335 for rev, type, ctx, parents in dag:
2334 for rev, type, ctx, parents in dag:
2336 char = formatnode(repo, ctx)
2335 char = formatnode(repo, ctx)
2337 copies = None
2336 copies = None
2338 if getrenamed and ctx.rev():
2337 if getrenamed and ctx.rev():
2339 copies = []
2338 copies = []
2340 for fn in ctx.files():
2339 for fn in ctx.files():
2341 rename = getrenamed(fn, ctx.rev())
2340 rename = getrenamed(fn, ctx.rev())
2342 if rename:
2341 if rename:
2343 copies.append((fn, rename[0]))
2342 copies.append((fn, rename[0]))
2344 revmatchfn = None
2343 revmatchfn = None
2345 if filematcher is not None:
2344 if filematcher is not None:
2346 revmatchfn = filematcher(ctx.rev())
2345 revmatchfn = filematcher(ctx.rev())
2347 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2346 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2348 lines = displayer.hunk.pop(rev).split('\n')
2347 lines = displayer.hunk.pop(rev).split('\n')
2349 if not lines[-1]:
2348 if not lines[-1]:
2350 del lines[-1]
2349 del lines[-1]
2351 displayer.flush(ctx)
2350 displayer.flush(ctx)
2352 edges = edgefn(type, char, lines, state, rev, parents)
2351 edges = edgefn(type, char, lines, state, rev, parents)
2353 for type, char, lines, coldata in edges:
2352 for type, char, lines, coldata in edges:
2354 graphmod.ascii(ui, state, type, char, lines, coldata)
2353 graphmod.ascii(ui, state, type, char, lines, coldata)
2355 displayer.close()
2354 displayer.close()
2356
2355
2357 def graphlog(ui, repo, pats, opts):
2356 def graphlog(ui, repo, pats, opts):
2358 # Parameters are identical to log command ones
2357 # Parameters are identical to log command ones
2359 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2358 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2360 revdag = graphmod.dagwalker(repo, revs)
2359 revdag = graphmod.dagwalker(repo, revs)
2361
2360
2362 getrenamed = None
2361 getrenamed = None
2363 if opts.get('copies'):
2362 if opts.get('copies'):
2364 endrev = None
2363 endrev = None
2365 if opts.get('rev'):
2364 if opts.get('rev'):
2366 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2365 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2367 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2366 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2368
2367
2369 ui.pager('log')
2368 ui.pager('log')
2370 displayer = show_changeset(ui, repo, opts, buffered=True)
2369 displayer = show_changeset(ui, repo, opts, buffered=True)
2371 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2370 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2372 filematcher)
2371 filematcher)
2373
2372
2374 def checkunsupportedgraphflags(pats, opts):
2373 def checkunsupportedgraphflags(pats, opts):
2375 for op in ["newest_first"]:
2374 for op in ["newest_first"]:
2376 if op in opts and opts[op]:
2375 if op in opts and opts[op]:
2377 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2376 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2378 % op.replace("_", "-"))
2377 % op.replace("_", "-"))
2379
2378
2380 def graphrevs(repo, nodes, opts):
2379 def graphrevs(repo, nodes, opts):
2381 limit = loglimit(opts)
2380 limit = loglimit(opts)
2382 nodes.reverse()
2381 nodes.reverse()
2383 if limit is not None:
2382 if limit is not None:
2384 nodes = nodes[:limit]
2383 nodes = nodes[:limit]
2385 return graphmod.nodes(repo, nodes)
2384 return graphmod.nodes(repo, nodes)
2386
2385
2387 def add(ui, repo, match, prefix, explicitonly, **opts):
2386 def add(ui, repo, match, prefix, explicitonly, **opts):
2388 join = lambda f: os.path.join(prefix, f)
2387 join = lambda f: os.path.join(prefix, f)
2389 bad = []
2388 bad = []
2390
2389
2391 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2390 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2392 names = []
2391 names = []
2393 wctx = repo[None]
2392 wctx = repo[None]
2394 cca = None
2393 cca = None
2395 abort, warn = scmutil.checkportabilityalert(ui)
2394 abort, warn = scmutil.checkportabilityalert(ui)
2396 if abort or warn:
2395 if abort or warn:
2397 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2396 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2398
2397
2399 badmatch = matchmod.badmatch(match, badfn)
2398 badmatch = matchmod.badmatch(match, badfn)
2400 dirstate = repo.dirstate
2399 dirstate = repo.dirstate
2401 # We don't want to just call wctx.walk here, since it would return a lot of
2400 # We don't want to just call wctx.walk here, since it would return a lot of
2402 # clean files, which we aren't interested in and takes time.
2401 # clean files, which we aren't interested in and takes time.
2403 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2402 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2404 True, False, full=False)):
2403 True, False, full=False)):
2405 exact = match.exact(f)
2404 exact = match.exact(f)
2406 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2405 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2407 if cca:
2406 if cca:
2408 cca(f)
2407 cca(f)
2409 names.append(f)
2408 names.append(f)
2410 if ui.verbose or not exact:
2409 if ui.verbose or not exact:
2411 ui.status(_('adding %s\n') % match.rel(f))
2410 ui.status(_('adding %s\n') % match.rel(f))
2412
2411
2413 for subpath in sorted(wctx.substate):
2412 for subpath in sorted(wctx.substate):
2414 sub = wctx.sub(subpath)
2413 sub = wctx.sub(subpath)
2415 try:
2414 try:
2416 submatch = matchmod.subdirmatcher(subpath, match)
2415 submatch = matchmod.subdirmatcher(subpath, match)
2417 if opts.get(r'subrepos'):
2416 if opts.get(r'subrepos'):
2418 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2417 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2419 else:
2418 else:
2420 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2419 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2421 except error.LookupError:
2420 except error.LookupError:
2422 ui.status(_("skipping missing subrepository: %s\n")
2421 ui.status(_("skipping missing subrepository: %s\n")
2423 % join(subpath))
2422 % join(subpath))
2424
2423
2425 if not opts.get(r'dry_run'):
2424 if not opts.get(r'dry_run'):
2426 rejected = wctx.add(names, prefix)
2425 rejected = wctx.add(names, prefix)
2427 bad.extend(f for f in rejected if f in match.files())
2426 bad.extend(f for f in rejected if f in match.files())
2428 return bad
2427 return bad
2429
2428
2430 def addwebdirpath(repo, serverpath, webconf):
2429 def addwebdirpath(repo, serverpath, webconf):
2431 webconf[serverpath] = repo.root
2430 webconf[serverpath] = repo.root
2432 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2431 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2433
2432
2434 for r in repo.revs('filelog("path:.hgsub")'):
2433 for r in repo.revs('filelog("path:.hgsub")'):
2435 ctx = repo[r]
2434 ctx = repo[r]
2436 for subpath in ctx.substate:
2435 for subpath in ctx.substate:
2437 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2436 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2438
2437
2439 def forget(ui, repo, match, prefix, explicitonly):
2438 def forget(ui, repo, match, prefix, explicitonly):
2440 join = lambda f: os.path.join(prefix, f)
2439 join = lambda f: os.path.join(prefix, f)
2441 bad = []
2440 bad = []
2442 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2441 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2443 wctx = repo[None]
2442 wctx = repo[None]
2444 forgot = []
2443 forgot = []
2445
2444
2446 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2445 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2447 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2446 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2448 if explicitonly:
2447 if explicitonly:
2449 forget = [f for f in forget if match.exact(f)]
2448 forget = [f for f in forget if match.exact(f)]
2450
2449
2451 for subpath in sorted(wctx.substate):
2450 for subpath in sorted(wctx.substate):
2452 sub = wctx.sub(subpath)
2451 sub = wctx.sub(subpath)
2453 try:
2452 try:
2454 submatch = matchmod.subdirmatcher(subpath, match)
2453 submatch = matchmod.subdirmatcher(subpath, match)
2455 subbad, subforgot = sub.forget(submatch, prefix)
2454 subbad, subforgot = sub.forget(submatch, prefix)
2456 bad.extend([subpath + '/' + f for f in subbad])
2455 bad.extend([subpath + '/' + f for f in subbad])
2457 forgot.extend([subpath + '/' + f for f in subforgot])
2456 forgot.extend([subpath + '/' + f for f in subforgot])
2458 except error.LookupError:
2457 except error.LookupError:
2459 ui.status(_("skipping missing subrepository: %s\n")
2458 ui.status(_("skipping missing subrepository: %s\n")
2460 % join(subpath))
2459 % join(subpath))
2461
2460
2462 if not explicitonly:
2461 if not explicitonly:
2463 for f in match.files():
2462 for f in match.files():
2464 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2463 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2465 if f not in forgot:
2464 if f not in forgot:
2466 if repo.wvfs.exists(f):
2465 if repo.wvfs.exists(f):
2467 # Don't complain if the exact case match wasn't given.
2466 # Don't complain if the exact case match wasn't given.
2468 # But don't do this until after checking 'forgot', so
2467 # But don't do this until after checking 'forgot', so
2469 # that subrepo files aren't normalized, and this op is
2468 # that subrepo files aren't normalized, and this op is
2470 # purely from data cached by the status walk above.
2469 # purely from data cached by the status walk above.
2471 if repo.dirstate.normalize(f) in repo.dirstate:
2470 if repo.dirstate.normalize(f) in repo.dirstate:
2472 continue
2471 continue
2473 ui.warn(_('not removing %s: '
2472 ui.warn(_('not removing %s: '
2474 'file is already untracked\n')
2473 'file is already untracked\n')
2475 % match.rel(f))
2474 % match.rel(f))
2476 bad.append(f)
2475 bad.append(f)
2477
2476
2478 for f in forget:
2477 for f in forget:
2479 if ui.verbose or not match.exact(f):
2478 if ui.verbose or not match.exact(f):
2480 ui.status(_('removing %s\n') % match.rel(f))
2479 ui.status(_('removing %s\n') % match.rel(f))
2481
2480
2482 rejected = wctx.forget(forget, prefix)
2481 rejected = wctx.forget(forget, prefix)
2483 bad.extend(f for f in rejected if f in match.files())
2482 bad.extend(f for f in rejected if f in match.files())
2484 forgot.extend(f for f in forget if f not in rejected)
2483 forgot.extend(f for f in forget if f not in rejected)
2485 return bad, forgot
2484 return bad, forgot
2486
2485
2487 def files(ui, ctx, m, fm, fmt, subrepos):
2486 def files(ui, ctx, m, fm, fmt, subrepos):
2488 rev = ctx.rev()
2487 rev = ctx.rev()
2489 ret = 1
2488 ret = 1
2490 ds = ctx.repo().dirstate
2489 ds = ctx.repo().dirstate
2491
2490
2492 for f in ctx.matches(m):
2491 for f in ctx.matches(m):
2493 if rev is None and ds[f] == 'r':
2492 if rev is None and ds[f] == 'r':
2494 continue
2493 continue
2495 fm.startitem()
2494 fm.startitem()
2496 if ui.verbose:
2495 if ui.verbose:
2497 fc = ctx[f]
2496 fc = ctx[f]
2498 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2497 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2499 fm.data(abspath=f)
2498 fm.data(abspath=f)
2500 fm.write('path', fmt, m.rel(f))
2499 fm.write('path', fmt, m.rel(f))
2501 ret = 0
2500 ret = 0
2502
2501
2503 for subpath in sorted(ctx.substate):
2502 for subpath in sorted(ctx.substate):
2504 submatch = matchmod.subdirmatcher(subpath, m)
2503 submatch = matchmod.subdirmatcher(subpath, m)
2505 if (subrepos or m.exact(subpath) or any(submatch.files())):
2504 if (subrepos or m.exact(subpath) or any(submatch.files())):
2506 sub = ctx.sub(subpath)
2505 sub = ctx.sub(subpath)
2507 try:
2506 try:
2508 recurse = m.exact(subpath) or subrepos
2507 recurse = m.exact(subpath) or subrepos
2509 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2508 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2510 ret = 0
2509 ret = 0
2511 except error.LookupError:
2510 except error.LookupError:
2512 ui.status(_("skipping missing subrepository: %s\n")
2511 ui.status(_("skipping missing subrepository: %s\n")
2513 % m.abs(subpath))
2512 % m.abs(subpath))
2514
2513
2515 return ret
2514 return ret
2516
2515
2517 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2516 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2518 join = lambda f: os.path.join(prefix, f)
2517 join = lambda f: os.path.join(prefix, f)
2519 ret = 0
2518 ret = 0
2520 s = repo.status(match=m, clean=True)
2519 s = repo.status(match=m, clean=True)
2521 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2520 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2522
2521
2523 wctx = repo[None]
2522 wctx = repo[None]
2524
2523
2525 if warnings is None:
2524 if warnings is None:
2526 warnings = []
2525 warnings = []
2527 warn = True
2526 warn = True
2528 else:
2527 else:
2529 warn = False
2528 warn = False
2530
2529
2531 subs = sorted(wctx.substate)
2530 subs = sorted(wctx.substate)
2532 total = len(subs)
2531 total = len(subs)
2533 count = 0
2532 count = 0
2534 for subpath in subs:
2533 for subpath in subs:
2535 count += 1
2534 count += 1
2536 submatch = matchmod.subdirmatcher(subpath, m)
2535 submatch = matchmod.subdirmatcher(subpath, m)
2537 if subrepos or m.exact(subpath) or any(submatch.files()):
2536 if subrepos or m.exact(subpath) or any(submatch.files()):
2538 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2537 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2539 sub = wctx.sub(subpath)
2538 sub = wctx.sub(subpath)
2540 try:
2539 try:
2541 if sub.removefiles(submatch, prefix, after, force, subrepos,
2540 if sub.removefiles(submatch, prefix, after, force, subrepos,
2542 warnings):
2541 warnings):
2543 ret = 1
2542 ret = 1
2544 except error.LookupError:
2543 except error.LookupError:
2545 warnings.append(_("skipping missing subrepository: %s\n")
2544 warnings.append(_("skipping missing subrepository: %s\n")
2546 % join(subpath))
2545 % join(subpath))
2547 ui.progress(_('searching'), None)
2546 ui.progress(_('searching'), None)
2548
2547
2549 # warn about failure to delete explicit files/dirs
2548 # warn about failure to delete explicit files/dirs
2550 deleteddirs = util.dirs(deleted)
2549 deleteddirs = util.dirs(deleted)
2551 files = m.files()
2550 files = m.files()
2552 total = len(files)
2551 total = len(files)
2553 count = 0
2552 count = 0
2554 for f in files:
2553 for f in files:
2555 def insubrepo():
2554 def insubrepo():
2556 for subpath in wctx.substate:
2555 for subpath in wctx.substate:
2557 if f.startswith(subpath + '/'):
2556 if f.startswith(subpath + '/'):
2558 return True
2557 return True
2559 return False
2558 return False
2560
2559
2561 count += 1
2560 count += 1
2562 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2561 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2563 isdir = f in deleteddirs or wctx.hasdir(f)
2562 isdir = f in deleteddirs or wctx.hasdir(f)
2564 if (f in repo.dirstate or isdir or f == '.'
2563 if (f in repo.dirstate or isdir or f == '.'
2565 or insubrepo() or f in subs):
2564 or insubrepo() or f in subs):
2566 continue
2565 continue
2567
2566
2568 if repo.wvfs.exists(f):
2567 if repo.wvfs.exists(f):
2569 if repo.wvfs.isdir(f):
2568 if repo.wvfs.isdir(f):
2570 warnings.append(_('not removing %s: no tracked files\n')
2569 warnings.append(_('not removing %s: no tracked files\n')
2571 % m.rel(f))
2570 % m.rel(f))
2572 else:
2571 else:
2573 warnings.append(_('not removing %s: file is untracked\n')
2572 warnings.append(_('not removing %s: file is untracked\n')
2574 % m.rel(f))
2573 % m.rel(f))
2575 # missing files will generate a warning elsewhere
2574 # missing files will generate a warning elsewhere
2576 ret = 1
2575 ret = 1
2577 ui.progress(_('deleting'), None)
2576 ui.progress(_('deleting'), None)
2578
2577
2579 if force:
2578 if force:
2580 list = modified + deleted + clean + added
2579 list = modified + deleted + clean + added
2581 elif after:
2580 elif after:
2582 list = deleted
2581 list = deleted
2583 remaining = modified + added + clean
2582 remaining = modified + added + clean
2584 total = len(remaining)
2583 total = len(remaining)
2585 count = 0
2584 count = 0
2586 for f in remaining:
2585 for f in remaining:
2587 count += 1
2586 count += 1
2588 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2587 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2589 warnings.append(_('not removing %s: file still exists\n')
2588 warnings.append(_('not removing %s: file still exists\n')
2590 % m.rel(f))
2589 % m.rel(f))
2591 ret = 1
2590 ret = 1
2592 ui.progress(_('skipping'), None)
2591 ui.progress(_('skipping'), None)
2593 else:
2592 else:
2594 list = deleted + clean
2593 list = deleted + clean
2595 total = len(modified) + len(added)
2594 total = len(modified) + len(added)
2596 count = 0
2595 count = 0
2597 for f in modified:
2596 for f in modified:
2598 count += 1
2597 count += 1
2599 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2598 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2600 warnings.append(_('not removing %s: file is modified (use -f'
2599 warnings.append(_('not removing %s: file is modified (use -f'
2601 ' to force removal)\n') % m.rel(f))
2600 ' to force removal)\n') % m.rel(f))
2602 ret = 1
2601 ret = 1
2603 for f in added:
2602 for f in added:
2604 count += 1
2603 count += 1
2605 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2604 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2606 warnings.append(_("not removing %s: file has been marked for add"
2605 warnings.append(_("not removing %s: file has been marked for add"
2607 " (use 'hg forget' to undo add)\n") % m.rel(f))
2606 " (use 'hg forget' to undo add)\n") % m.rel(f))
2608 ret = 1
2607 ret = 1
2609 ui.progress(_('skipping'), None)
2608 ui.progress(_('skipping'), None)
2610
2609
2611 list = sorted(list)
2610 list = sorted(list)
2612 total = len(list)
2611 total = len(list)
2613 count = 0
2612 count = 0
2614 for f in list:
2613 for f in list:
2615 count += 1
2614 count += 1
2616 if ui.verbose or not m.exact(f):
2615 if ui.verbose or not m.exact(f):
2617 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2616 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2618 ui.status(_('removing %s\n') % m.rel(f))
2617 ui.status(_('removing %s\n') % m.rel(f))
2619 ui.progress(_('deleting'), None)
2618 ui.progress(_('deleting'), None)
2620
2619
2621 with repo.wlock():
2620 with repo.wlock():
2622 if not after:
2621 if not after:
2623 for f in list:
2622 for f in list:
2624 if f in added:
2623 if f in added:
2625 continue # we never unlink added files on remove
2624 continue # we never unlink added files on remove
2626 repo.wvfs.unlinkpath(f, ignoremissing=True)
2625 repo.wvfs.unlinkpath(f, ignoremissing=True)
2627 repo[None].forget(list)
2626 repo[None].forget(list)
2628
2627
2629 if warn:
2628 if warn:
2630 for warning in warnings:
2629 for warning in warnings:
2631 ui.warn(warning)
2630 ui.warn(warning)
2632
2631
2633 return ret
2632 return ret
2634
2633
2635 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2634 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2636 err = 1
2635 err = 1
2637
2636
2638 def write(path):
2637 def write(path):
2639 filename = None
2638 filename = None
2640 if fntemplate:
2639 if fntemplate:
2641 filename = makefilename(repo, fntemplate, ctx.node(),
2640 filename = makefilename(repo, fntemplate, ctx.node(),
2642 pathname=os.path.join(prefix, path))
2641 pathname=os.path.join(prefix, path))
2643 with formatter.maybereopen(basefm, filename, opts) as fm:
2642 with formatter.maybereopen(basefm, filename, opts) as fm:
2644 data = ctx[path].data()
2643 data = ctx[path].data()
2645 if opts.get('decode'):
2644 if opts.get('decode'):
2646 data = repo.wwritedata(path, data)
2645 data = repo.wwritedata(path, data)
2647 fm.startitem()
2646 fm.startitem()
2648 fm.write('data', '%s', data)
2647 fm.write('data', '%s', data)
2649 fm.data(abspath=path, path=matcher.rel(path))
2648 fm.data(abspath=path, path=matcher.rel(path))
2650
2649
2651 # Automation often uses hg cat on single files, so special case it
2650 # Automation often uses hg cat on single files, so special case it
2652 # for performance to avoid the cost of parsing the manifest.
2651 # for performance to avoid the cost of parsing the manifest.
2653 if len(matcher.files()) == 1 and not matcher.anypats():
2652 if len(matcher.files()) == 1 and not matcher.anypats():
2654 file = matcher.files()[0]
2653 file = matcher.files()[0]
2655 mfl = repo.manifestlog
2654 mfl = repo.manifestlog
2656 mfnode = ctx.manifestnode()
2655 mfnode = ctx.manifestnode()
2657 try:
2656 try:
2658 if mfnode and mfl[mfnode].find(file)[0]:
2657 if mfnode and mfl[mfnode].find(file)[0]:
2659 write(file)
2658 write(file)
2660 return 0
2659 return 0
2661 except KeyError:
2660 except KeyError:
2662 pass
2661 pass
2663
2662
2664 for abs in ctx.walk(matcher):
2663 for abs in ctx.walk(matcher):
2665 write(abs)
2664 write(abs)
2666 err = 0
2665 err = 0
2667
2666
2668 for subpath in sorted(ctx.substate):
2667 for subpath in sorted(ctx.substate):
2669 sub = ctx.sub(subpath)
2668 sub = ctx.sub(subpath)
2670 try:
2669 try:
2671 submatch = matchmod.subdirmatcher(subpath, matcher)
2670 submatch = matchmod.subdirmatcher(subpath, matcher)
2672
2671
2673 if not sub.cat(submatch, basefm, fntemplate,
2672 if not sub.cat(submatch, basefm, fntemplate,
2674 os.path.join(prefix, sub._path), **opts):
2673 os.path.join(prefix, sub._path), **opts):
2675 err = 0
2674 err = 0
2676 except error.RepoLookupError:
2675 except error.RepoLookupError:
2677 ui.status(_("skipping missing subrepository: %s\n")
2676 ui.status(_("skipping missing subrepository: %s\n")
2678 % os.path.join(prefix, subpath))
2677 % os.path.join(prefix, subpath))
2679
2678
2680 return err
2679 return err
2681
2680
2682 def commit(ui, repo, commitfunc, pats, opts):
2681 def commit(ui, repo, commitfunc, pats, opts):
2683 '''commit the specified files or all outstanding changes'''
2682 '''commit the specified files or all outstanding changes'''
2684 date = opts.get('date')
2683 date = opts.get('date')
2685 if date:
2684 if date:
2686 opts['date'] = util.parsedate(date)
2685 opts['date'] = util.parsedate(date)
2687 message = logmessage(ui, opts)
2686 message = logmessage(ui, opts)
2688 matcher = scmutil.match(repo[None], pats, opts)
2687 matcher = scmutil.match(repo[None], pats, opts)
2689
2688
2690 # extract addremove carefully -- this function can be called from a command
2689 # extract addremove carefully -- this function can be called from a command
2691 # that doesn't support addremove
2690 # that doesn't support addremove
2692 if opts.get('addremove'):
2691 if opts.get('addremove'):
2693 if scmutil.addremove(repo, matcher, "", opts) != 0:
2692 if scmutil.addremove(repo, matcher, "", opts) != 0:
2694 raise error.Abort(
2693 raise error.Abort(
2695 _("failed to mark all new/missing files as added/removed"))
2694 _("failed to mark all new/missing files as added/removed"))
2696
2695
2697 return commitfunc(ui, repo, message, matcher, opts)
2696 return commitfunc(ui, repo, message, matcher, opts)
2698
2697
2699 def samefile(f, ctx1, ctx2):
2698 def samefile(f, ctx1, ctx2):
2700 if f in ctx1.manifest():
2699 if f in ctx1.manifest():
2701 a = ctx1.filectx(f)
2700 a = ctx1.filectx(f)
2702 if f in ctx2.manifest():
2701 if f in ctx2.manifest():
2703 b = ctx2.filectx(f)
2702 b = ctx2.filectx(f)
2704 return (not a.cmp(b)
2703 return (not a.cmp(b)
2705 and a.flags() == b.flags())
2704 and a.flags() == b.flags())
2706 else:
2705 else:
2707 return False
2706 return False
2708 else:
2707 else:
2709 return f not in ctx2.manifest()
2708 return f not in ctx2.manifest()
2710
2709
2711 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2710 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2712 # avoid cycle context -> subrepo -> cmdutil
2711 # avoid cycle context -> subrepo -> cmdutil
2713 from . import context
2712 from . import context
2714
2713
2715 # amend will reuse the existing user if not specified, but the obsolete
2714 # amend will reuse the existing user if not specified, but the obsolete
2716 # marker creation requires that the current user's name is specified.
2715 # marker creation requires that the current user's name is specified.
2717 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2716 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2718 ui.username() # raise exception if username not set
2717 ui.username() # raise exception if username not set
2719
2718
2720 ui.note(_('amending changeset %s\n') % old)
2719 ui.note(_('amending changeset %s\n') % old)
2721 base = old.p1()
2720 base = old.p1()
2722 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2721 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2723
2722
2724 wlock = lock = newid = None
2723 wlock = lock = newid = None
2725 try:
2724 try:
2726 wlock = repo.wlock()
2725 wlock = repo.wlock()
2727 lock = repo.lock()
2726 lock = repo.lock()
2728 with repo.transaction('amend') as tr:
2727 with repo.transaction('amend') as tr:
2729 # See if we got a message from -m or -l, if not, open the editor
2728 # See if we got a message from -m or -l, if not, open the editor
2730 # with the message of the changeset to amend
2729 # with the message of the changeset to amend
2731 message = logmessage(ui, opts)
2730 message = logmessage(ui, opts)
2732 # ensure logfile does not conflict with later enforcement of the
2731 # ensure logfile does not conflict with later enforcement of the
2733 # message. potential logfile content has been processed by
2732 # message. potential logfile content has been processed by
2734 # `logmessage` anyway.
2733 # `logmessage` anyway.
2735 opts.pop('logfile')
2734 opts.pop('logfile')
2736 # First, do a regular commit to record all changes in the working
2735 # First, do a regular commit to record all changes in the working
2737 # directory (if there are any)
2736 # directory (if there are any)
2738 ui.callhooks = False
2737 ui.callhooks = False
2739 activebookmark = repo._bookmarks.active
2738 activebookmark = repo._bookmarks.active
2740 try:
2739 try:
2741 repo._bookmarks.active = None
2740 repo._bookmarks.active = None
2742 opts['message'] = 'temporary amend commit for %s' % old
2741 opts['message'] = 'temporary amend commit for %s' % old
2743 node = commit(ui, repo, commitfunc, pats, opts)
2742 node = commit(ui, repo, commitfunc, pats, opts)
2744 finally:
2743 finally:
2745 repo._bookmarks.active = activebookmark
2744 repo._bookmarks.active = activebookmark
2746 repo._bookmarks.recordchange(tr)
2745 repo._bookmarks.recordchange(tr)
2747 ui.callhooks = True
2746 ui.callhooks = True
2748 ctx = repo[node]
2747 ctx = repo[node]
2749
2748
2750 # Participating changesets:
2749 # Participating changesets:
2751 #
2750 #
2752 # node/ctx o - new (intermediate) commit that contains changes
2751 # node/ctx o - new (intermediate) commit that contains changes
2753 # | from working dir to go into amending commit
2752 # | from working dir to go into amending commit
2754 # | (or a workingctx if there were no changes)
2753 # | (or a workingctx if there were no changes)
2755 # |
2754 # |
2756 # old o - changeset to amend
2755 # old o - changeset to amend
2757 # |
2756 # |
2758 # base o - parent of amending changeset
2757 # base o - parent of amending changeset
2759
2758
2760 # Update extra dict from amended commit (e.g. to preserve graft
2759 # Update extra dict from amended commit (e.g. to preserve graft
2761 # source)
2760 # source)
2762 extra.update(old.extra())
2761 extra.update(old.extra())
2763
2762
2764 # Also update it from the intermediate commit or from the wctx
2763 # Also update it from the intermediate commit or from the wctx
2765 extra.update(ctx.extra())
2764 extra.update(ctx.extra())
2766
2765
2767 if len(old.parents()) > 1:
2766 if len(old.parents()) > 1:
2768 # ctx.files() isn't reliable for merges, so fall back to the
2767 # ctx.files() isn't reliable for merges, so fall back to the
2769 # slower repo.status() method
2768 # slower repo.status() method
2770 files = set([fn for st in repo.status(base, old)[:3]
2769 files = set([fn for st in repo.status(base, old)[:3]
2771 for fn in st])
2770 for fn in st])
2772 else:
2771 else:
2773 files = set(old.files())
2772 files = set(old.files())
2774
2773
2775 # Second, we use either the commit we just did, or if there were no
2774 # Second, we use either the commit we just did, or if there were no
2776 # changes the parent of the working directory as the version of the
2775 # changes the parent of the working directory as the version of the
2777 # files in the final amend commit
2776 # files in the final amend commit
2778 if node:
2777 if node:
2779 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2778 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2780
2779
2781 user = ctx.user()
2780 user = ctx.user()
2782 date = ctx.date()
2781 date = ctx.date()
2783 # Recompute copies (avoid recording a -> b -> a)
2782 # Recompute copies (avoid recording a -> b -> a)
2784 copied = copies.pathcopies(base, ctx)
2783 copied = copies.pathcopies(base, ctx)
2785 if old.p2:
2784 if old.p2:
2786 copied.update(copies.pathcopies(old.p2(), ctx))
2785 copied.update(copies.pathcopies(old.p2(), ctx))
2787
2786
2788 # Prune files which were reverted by the updates: if old
2787 # Prune files which were reverted by the updates: if old
2789 # introduced file X and our intermediate commit, node,
2788 # introduced file X and our intermediate commit, node,
2790 # renamed that file, then those two files are the same and
2789 # renamed that file, then those two files are the same and
2791 # we can discard X from our list of files. Likewise if X
2790 # we can discard X from our list of files. Likewise if X
2792 # was deleted, it's no longer relevant
2791 # was deleted, it's no longer relevant
2793 files.update(ctx.files())
2792 files.update(ctx.files())
2794 files = [f for f in files if not samefile(f, ctx, base)]
2793 files = [f for f in files if not samefile(f, ctx, base)]
2795
2794
2796 def filectxfn(repo, ctx_, path):
2795 def filectxfn(repo, ctx_, path):
2797 try:
2796 try:
2798 fctx = ctx[path]
2797 fctx = ctx[path]
2799 flags = fctx.flags()
2798 flags = fctx.flags()
2800 mctx = context.memfilectx(repo,
2799 mctx = context.memfilectx(repo,
2801 fctx.path(), fctx.data(),
2800 fctx.path(), fctx.data(),
2802 islink='l' in flags,
2801 islink='l' in flags,
2803 isexec='x' in flags,
2802 isexec='x' in flags,
2804 copied=copied.get(path))
2803 copied=copied.get(path))
2805 return mctx
2804 return mctx
2806 except KeyError:
2805 except KeyError:
2807 return None
2806 return None
2808 else:
2807 else:
2809 ui.note(_('copying changeset %s to %s\n') % (old, base))
2808 ui.note(_('copying changeset %s to %s\n') % (old, base))
2810
2809
2811 # Use version of files as in the old cset
2810 # Use version of files as in the old cset
2812 def filectxfn(repo, ctx_, path):
2811 def filectxfn(repo, ctx_, path):
2813 try:
2812 try:
2814 return old.filectx(path)
2813 return old.filectx(path)
2815 except KeyError:
2814 except KeyError:
2816 return None
2815 return None
2817
2816
2818 user = opts.get('user') or old.user()
2817 user = opts.get('user') or old.user()
2819 date = opts.get('date') or old.date()
2818 date = opts.get('date') or old.date()
2820 editform = mergeeditform(old, 'commit.amend')
2819 editform = mergeeditform(old, 'commit.amend')
2821 editor = getcommiteditor(editform=editform, **opts)
2820 editor = getcommiteditor(editform=editform, **opts)
2822 if not message:
2821 if not message:
2823 editor = getcommiteditor(edit=True, editform=editform)
2822 editor = getcommiteditor(edit=True, editform=editform)
2824 message = old.description()
2823 message = old.description()
2825
2824
2826 pureextra = extra.copy()
2825 pureextra = extra.copy()
2827 extra['amend_source'] = old.hex()
2826 extra['amend_source'] = old.hex()
2828
2827
2829 new = context.memctx(repo,
2828 new = context.memctx(repo,
2830 parents=[base.node(), old.p2().node()],
2829 parents=[base.node(), old.p2().node()],
2831 text=message,
2830 text=message,
2832 files=files,
2831 files=files,
2833 filectxfn=filectxfn,
2832 filectxfn=filectxfn,
2834 user=user,
2833 user=user,
2835 date=date,
2834 date=date,
2836 extra=extra,
2835 extra=extra,
2837 editor=editor)
2836 editor=editor)
2838
2837
2839 newdesc = changelog.stripdesc(new.description())
2838 newdesc = changelog.stripdesc(new.description())
2840 if ((not node)
2839 if ((not node)
2841 and newdesc == old.description()
2840 and newdesc == old.description()
2842 and user == old.user()
2841 and user == old.user()
2843 and date == old.date()
2842 and date == old.date()
2844 and pureextra == old.extra()):
2843 and pureextra == old.extra()):
2845 # nothing changed. continuing here would create a new node
2844 # nothing changed. continuing here would create a new node
2846 # anyway because of the amend_source noise.
2845 # anyway because of the amend_source noise.
2847 #
2846 #
2848 # This not what we expect from amend.
2847 # This not what we expect from amend.
2849 return old.node()
2848 return old.node()
2850
2849
2851 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2850 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2852 try:
2851 try:
2853 if opts.get('secret'):
2852 if opts.get('secret'):
2854 commitphase = 'secret'
2853 commitphase = 'secret'
2855 else:
2854 else:
2856 commitphase = old.phase()
2855 commitphase = old.phase()
2857 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2856 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2858 newid = repo.commitctx(new)
2857 newid = repo.commitctx(new)
2859 finally:
2858 finally:
2860 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2859 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2861 if newid != old.node():
2860 if newid != old.node():
2862 # Reroute the working copy parent to the new changeset
2861 # Reroute the working copy parent to the new changeset
2863 repo.setparents(newid, nullid)
2862 repo.setparents(newid, nullid)
2864
2863
2865 # Move bookmarks from old parent to amend commit
2864 # Move bookmarks from old parent to amend commit
2866 bms = repo.nodebookmarks(old.node())
2865 bms = repo.nodebookmarks(old.node())
2867 if bms:
2866 if bms:
2868 marks = repo._bookmarks
2867 marks = repo._bookmarks
2869 for bm in bms:
2868 for bm in bms:
2870 ui.debug('moving bookmarks %r from %s to %s\n' %
2869 ui.debug('moving bookmarks %r from %s to %s\n' %
2871 (marks, old.hex(), hex(newid)))
2870 (marks, old.hex(), hex(newid)))
2872 marks[bm] = newid
2871 marks[bm] = newid
2873 marks.recordchange(tr)
2872 marks.recordchange(tr)
2874 #commit the whole amend process
2873 #commit the whole amend process
2875 if createmarkers:
2874 if createmarkers:
2876 # mark the new changeset as successor of the rewritten one
2875 # mark the new changeset as successor of the rewritten one
2877 new = repo[newid]
2876 new = repo[newid]
2878 obs = [(old, (new,))]
2877 obs = [(old, (new,))]
2879 if node:
2878 if node:
2880 obs.append((ctx, ()))
2879 obs.append((ctx, ()))
2881
2880
2882 obsolete.createmarkers(repo, obs, operation='amend')
2881 obsolete.createmarkers(repo, obs, operation='amend')
2883 if not createmarkers and newid != old.node():
2882 if not createmarkers and newid != old.node():
2884 # Strip the intermediate commit (if there was one) and the amended
2883 # Strip the intermediate commit (if there was one) and the amended
2885 # commit
2884 # commit
2886 if node:
2885 if node:
2887 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2886 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2888 ui.note(_('stripping amended changeset %s\n') % old)
2887 ui.note(_('stripping amended changeset %s\n') % old)
2889 repair.strip(ui, repo, old.node(), topic='amend-backup')
2888 repair.strip(ui, repo, old.node(), topic='amend-backup')
2890 finally:
2889 finally:
2891 lockmod.release(lock, wlock)
2890 lockmod.release(lock, wlock)
2892 return newid
2891 return newid
2893
2892
2894 def commiteditor(repo, ctx, subs, editform=''):
2893 def commiteditor(repo, ctx, subs, editform=''):
2895 if ctx.description():
2894 if ctx.description():
2896 return ctx.description()
2895 return ctx.description()
2897 return commitforceeditor(repo, ctx, subs, editform=editform,
2896 return commitforceeditor(repo, ctx, subs, editform=editform,
2898 unchangedmessagedetection=True)
2897 unchangedmessagedetection=True)
2899
2898
2900 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2899 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2901 editform='', unchangedmessagedetection=False):
2900 editform='', unchangedmessagedetection=False):
2902 if not extramsg:
2901 if not extramsg:
2903 extramsg = _("Leave message empty to abort commit.")
2902 extramsg = _("Leave message empty to abort commit.")
2904
2903
2905 forms = [e for e in editform.split('.') if e]
2904 forms = [e for e in editform.split('.') if e]
2906 forms.insert(0, 'changeset')
2905 forms.insert(0, 'changeset')
2907 templatetext = None
2906 templatetext = None
2908 while forms:
2907 while forms:
2909 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2908 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2910 if tmpl:
2909 if tmpl:
2911 tmpl = templater.unquotestring(tmpl)
2910 tmpl = templater.unquotestring(tmpl)
2912 templatetext = committext = buildcommittemplate(
2911 templatetext = committext = buildcommittemplate(
2913 repo, ctx, subs, extramsg, tmpl)
2912 repo, ctx, subs, extramsg, tmpl)
2914 break
2913 break
2915 forms.pop()
2914 forms.pop()
2916 else:
2915 else:
2917 committext = buildcommittext(repo, ctx, subs, extramsg)
2916 committext = buildcommittext(repo, ctx, subs, extramsg)
2918
2917
2919 # run editor in the repository root
2918 # run editor in the repository root
2920 olddir = pycompat.getcwd()
2919 olddir = pycompat.getcwd()
2921 os.chdir(repo.root)
2920 os.chdir(repo.root)
2922
2921
2923 # make in-memory changes visible to external process
2922 # make in-memory changes visible to external process
2924 tr = repo.currenttransaction()
2923 tr = repo.currenttransaction()
2925 repo.dirstate.write(tr)
2924 repo.dirstate.write(tr)
2926 pending = tr and tr.writepending() and repo.root
2925 pending = tr and tr.writepending() and repo.root
2927
2926
2928 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2927 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2929 editform=editform, pending=pending,
2928 editform=editform, pending=pending,
2930 repopath=repo.path)
2929 repopath=repo.path)
2931 text = editortext
2930 text = editortext
2932
2931
2933 # strip away anything below this special string (used for editors that want
2932 # strip away anything below this special string (used for editors that want
2934 # to display the diff)
2933 # to display the diff)
2935 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2934 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2936 if stripbelow:
2935 if stripbelow:
2937 text = text[:stripbelow.start()]
2936 text = text[:stripbelow.start()]
2938
2937
2939 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2938 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2940 os.chdir(olddir)
2939 os.chdir(olddir)
2941
2940
2942 if finishdesc:
2941 if finishdesc:
2943 text = finishdesc(text)
2942 text = finishdesc(text)
2944 if not text.strip():
2943 if not text.strip():
2945 raise error.Abort(_("empty commit message"))
2944 raise error.Abort(_("empty commit message"))
2946 if unchangedmessagedetection and editortext == templatetext:
2945 if unchangedmessagedetection and editortext == templatetext:
2947 raise error.Abort(_("commit message unchanged"))
2946 raise error.Abort(_("commit message unchanged"))
2948
2947
2949 return text
2948 return text
2950
2949
2951 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2950 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2952 ui = repo.ui
2951 ui = repo.ui
2953 tmpl, mapfile = gettemplate(ui, tmpl, None)
2952 tmpl, mapfile = gettemplate(ui, tmpl, None)
2954
2953
2955 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2954 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2956
2955
2957 for k, v in repo.ui.configitems('committemplate'):
2956 for k, v in repo.ui.configitems('committemplate'):
2958 if k != 'changeset':
2957 if k != 'changeset':
2959 t.t.cache[k] = v
2958 t.t.cache[k] = v
2960
2959
2961 if not extramsg:
2960 if not extramsg:
2962 extramsg = '' # ensure that extramsg is string
2961 extramsg = '' # ensure that extramsg is string
2963
2962
2964 ui.pushbuffer()
2963 ui.pushbuffer()
2965 t.show(ctx, extramsg=extramsg)
2964 t.show(ctx, extramsg=extramsg)
2966 return ui.popbuffer()
2965 return ui.popbuffer()
2967
2966
2968 def hgprefix(msg):
2967 def hgprefix(msg):
2969 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2968 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2970
2969
2971 def buildcommittext(repo, ctx, subs, extramsg):
2970 def buildcommittext(repo, ctx, subs, extramsg):
2972 edittext = []
2971 edittext = []
2973 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2972 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2974 if ctx.description():
2973 if ctx.description():
2975 edittext.append(ctx.description())
2974 edittext.append(ctx.description())
2976 edittext.append("")
2975 edittext.append("")
2977 edittext.append("") # Empty line between message and comments.
2976 edittext.append("") # Empty line between message and comments.
2978 edittext.append(hgprefix(_("Enter commit message."
2977 edittext.append(hgprefix(_("Enter commit message."
2979 " Lines beginning with 'HG:' are removed.")))
2978 " Lines beginning with 'HG:' are removed.")))
2980 edittext.append(hgprefix(extramsg))
2979 edittext.append(hgprefix(extramsg))
2981 edittext.append("HG: --")
2980 edittext.append("HG: --")
2982 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2981 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2983 if ctx.p2():
2982 if ctx.p2():
2984 edittext.append(hgprefix(_("branch merge")))
2983 edittext.append(hgprefix(_("branch merge")))
2985 if ctx.branch():
2984 if ctx.branch():
2986 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2985 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2987 if bookmarks.isactivewdirparent(repo):
2986 if bookmarks.isactivewdirparent(repo):
2988 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2987 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2989 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2988 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2990 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2989 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2991 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2990 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2992 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2991 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2993 if not added and not modified and not removed:
2992 if not added and not modified and not removed:
2994 edittext.append(hgprefix(_("no files changed")))
2993 edittext.append(hgprefix(_("no files changed")))
2995 edittext.append("")
2994 edittext.append("")
2996
2995
2997 return "\n".join(edittext)
2996 return "\n".join(edittext)
2998
2997
2999 def commitstatus(repo, node, branch, bheads=None, opts=None):
2998 def commitstatus(repo, node, branch, bheads=None, opts=None):
3000 if opts is None:
2999 if opts is None:
3001 opts = {}
3000 opts = {}
3002 ctx = repo[node]
3001 ctx = repo[node]
3003 parents = ctx.parents()
3002 parents = ctx.parents()
3004
3003
3005 if (not opts.get('amend') and bheads and node not in bheads and not
3004 if (not opts.get('amend') and bheads and node not in bheads and not
3006 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3005 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3007 repo.ui.status(_('created new head\n'))
3006 repo.ui.status(_('created new head\n'))
3008 # The message is not printed for initial roots. For the other
3007 # The message is not printed for initial roots. For the other
3009 # changesets, it is printed in the following situations:
3008 # changesets, it is printed in the following situations:
3010 #
3009 #
3011 # Par column: for the 2 parents with ...
3010 # Par column: for the 2 parents with ...
3012 # N: null or no parent
3011 # N: null or no parent
3013 # B: parent is on another named branch
3012 # B: parent is on another named branch
3014 # C: parent is a regular non head changeset
3013 # C: parent is a regular non head changeset
3015 # H: parent was a branch head of the current branch
3014 # H: parent was a branch head of the current branch
3016 # Msg column: whether we print "created new head" message
3015 # Msg column: whether we print "created new head" message
3017 # In the following, it is assumed that there already exists some
3016 # In the following, it is assumed that there already exists some
3018 # initial branch heads of the current branch, otherwise nothing is
3017 # initial branch heads of the current branch, otherwise nothing is
3019 # printed anyway.
3018 # printed anyway.
3020 #
3019 #
3021 # Par Msg Comment
3020 # Par Msg Comment
3022 # N N y additional topo root
3021 # N N y additional topo root
3023 #
3022 #
3024 # B N y additional branch root
3023 # B N y additional branch root
3025 # C N y additional topo head
3024 # C N y additional topo head
3026 # H N n usual case
3025 # H N n usual case
3027 #
3026 #
3028 # B B y weird additional branch root
3027 # B B y weird additional branch root
3029 # C B y branch merge
3028 # C B y branch merge
3030 # H B n merge with named branch
3029 # H B n merge with named branch
3031 #
3030 #
3032 # C C y additional head from merge
3031 # C C y additional head from merge
3033 # C H n merge with a head
3032 # C H n merge with a head
3034 #
3033 #
3035 # H H n head merge: head count decreases
3034 # H H n head merge: head count decreases
3036
3035
3037 if not opts.get('close_branch'):
3036 if not opts.get('close_branch'):
3038 for r in parents:
3037 for r in parents:
3039 if r.closesbranch() and r.branch() == branch:
3038 if r.closesbranch() and r.branch() == branch:
3040 repo.ui.status(_('reopening closed branch head %d\n') % r)
3039 repo.ui.status(_('reopening closed branch head %d\n') % r)
3041
3040
3042 if repo.ui.debugflag:
3041 if repo.ui.debugflag:
3043 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3042 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3044 elif repo.ui.verbose:
3043 elif repo.ui.verbose:
3045 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3044 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3046
3045
3047 def postcommitstatus(repo, pats, opts):
3046 def postcommitstatus(repo, pats, opts):
3048 return repo.status(match=scmutil.match(repo[None], pats, opts))
3047 return repo.status(match=scmutil.match(repo[None], pats, opts))
3049
3048
3050 def revert(ui, repo, ctx, parents, *pats, **opts):
3049 def revert(ui, repo, ctx, parents, *pats, **opts):
3051 parent, p2 = parents
3050 parent, p2 = parents
3052 node = ctx.node()
3051 node = ctx.node()
3053
3052
3054 mf = ctx.manifest()
3053 mf = ctx.manifest()
3055 if node == p2:
3054 if node == p2:
3056 parent = p2
3055 parent = p2
3057
3056
3058 # need all matching names in dirstate and manifest of target rev,
3057 # need all matching names in dirstate and manifest of target rev,
3059 # so have to walk both. do not print errors if files exist in one
3058 # so have to walk both. do not print errors if files exist in one
3060 # but not other. in both cases, filesets should be evaluated against
3059 # but not other. in both cases, filesets should be evaluated against
3061 # workingctx to get consistent result (issue4497). this means 'set:**'
3060 # workingctx to get consistent result (issue4497). this means 'set:**'
3062 # cannot be used to select missing files from target rev.
3061 # cannot be used to select missing files from target rev.
3063
3062
3064 # `names` is a mapping for all elements in working copy and target revision
3063 # `names` is a mapping for all elements in working copy and target revision
3065 # The mapping is in the form:
3064 # The mapping is in the form:
3066 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3065 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3067 names = {}
3066 names = {}
3068
3067
3069 with repo.wlock():
3068 with repo.wlock():
3070 ## filling of the `names` mapping
3069 ## filling of the `names` mapping
3071 # walk dirstate to fill `names`
3070 # walk dirstate to fill `names`
3072
3071
3073 interactive = opts.get('interactive', False)
3072 interactive = opts.get('interactive', False)
3074 wctx = repo[None]
3073 wctx = repo[None]
3075 m = scmutil.match(wctx, pats, opts)
3074 m = scmutil.match(wctx, pats, opts)
3076
3075
3077 # we'll need this later
3076 # we'll need this later
3078 targetsubs = sorted(s for s in wctx.substate if m(s))
3077 targetsubs = sorted(s for s in wctx.substate if m(s))
3079
3078
3080 if not m.always():
3079 if not m.always():
3081 matcher = matchmod.badmatch(m, lambda x, y: False)
3080 matcher = matchmod.badmatch(m, lambda x, y: False)
3082 for abs in wctx.walk(matcher):
3081 for abs in wctx.walk(matcher):
3083 names[abs] = m.rel(abs), m.exact(abs)
3082 names[abs] = m.rel(abs), m.exact(abs)
3084
3083
3085 # walk target manifest to fill `names`
3084 # walk target manifest to fill `names`
3086
3085
3087 def badfn(path, msg):
3086 def badfn(path, msg):
3088 if path in names:
3087 if path in names:
3089 return
3088 return
3090 if path in ctx.substate:
3089 if path in ctx.substate:
3091 return
3090 return
3092 path_ = path + '/'
3091 path_ = path + '/'
3093 for f in names:
3092 for f in names:
3094 if f.startswith(path_):
3093 if f.startswith(path_):
3095 return
3094 return
3096 ui.warn("%s: %s\n" % (m.rel(path), msg))
3095 ui.warn("%s: %s\n" % (m.rel(path), msg))
3097
3096
3098 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3097 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3099 if abs not in names:
3098 if abs not in names:
3100 names[abs] = m.rel(abs), m.exact(abs)
3099 names[abs] = m.rel(abs), m.exact(abs)
3101
3100
3102 # Find status of all file in `names`.
3101 # Find status of all file in `names`.
3103 m = scmutil.matchfiles(repo, names)
3102 m = scmutil.matchfiles(repo, names)
3104
3103
3105 changes = repo.status(node1=node, match=m,
3104 changes = repo.status(node1=node, match=m,
3106 unknown=True, ignored=True, clean=True)
3105 unknown=True, ignored=True, clean=True)
3107 else:
3106 else:
3108 changes = repo.status(node1=node, match=m)
3107 changes = repo.status(node1=node, match=m)
3109 for kind in changes:
3108 for kind in changes:
3110 for abs in kind:
3109 for abs in kind:
3111 names[abs] = m.rel(abs), m.exact(abs)
3110 names[abs] = m.rel(abs), m.exact(abs)
3112
3111
3113 m = scmutil.matchfiles(repo, names)
3112 m = scmutil.matchfiles(repo, names)
3114
3113
3115 modified = set(changes.modified)
3114 modified = set(changes.modified)
3116 added = set(changes.added)
3115 added = set(changes.added)
3117 removed = set(changes.removed)
3116 removed = set(changes.removed)
3118 _deleted = set(changes.deleted)
3117 _deleted = set(changes.deleted)
3119 unknown = set(changes.unknown)
3118 unknown = set(changes.unknown)
3120 unknown.update(changes.ignored)
3119 unknown.update(changes.ignored)
3121 clean = set(changes.clean)
3120 clean = set(changes.clean)
3122 modadded = set()
3121 modadded = set()
3123
3122
3124 # We need to account for the state of the file in the dirstate,
3123 # We need to account for the state of the file in the dirstate,
3125 # even when we revert against something else than parent. This will
3124 # even when we revert against something else than parent. This will
3126 # slightly alter the behavior of revert (doing back up or not, delete
3125 # slightly alter the behavior of revert (doing back up or not, delete
3127 # or just forget etc).
3126 # or just forget etc).
3128 if parent == node:
3127 if parent == node:
3129 dsmodified = modified
3128 dsmodified = modified
3130 dsadded = added
3129 dsadded = added
3131 dsremoved = removed
3130 dsremoved = removed
3132 # store all local modifications, useful later for rename detection
3131 # store all local modifications, useful later for rename detection
3133 localchanges = dsmodified | dsadded
3132 localchanges = dsmodified | dsadded
3134 modified, added, removed = set(), set(), set()
3133 modified, added, removed = set(), set(), set()
3135 else:
3134 else:
3136 changes = repo.status(node1=parent, match=m)
3135 changes = repo.status(node1=parent, match=m)
3137 dsmodified = set(changes.modified)
3136 dsmodified = set(changes.modified)
3138 dsadded = set(changes.added)
3137 dsadded = set(changes.added)
3139 dsremoved = set(changes.removed)
3138 dsremoved = set(changes.removed)
3140 # store all local modifications, useful later for rename detection
3139 # store all local modifications, useful later for rename detection
3141 localchanges = dsmodified | dsadded
3140 localchanges = dsmodified | dsadded
3142
3141
3143 # only take into account for removes between wc and target
3142 # only take into account for removes between wc and target
3144 clean |= dsremoved - removed
3143 clean |= dsremoved - removed
3145 dsremoved &= removed
3144 dsremoved &= removed
3146 # distinct between dirstate remove and other
3145 # distinct between dirstate remove and other
3147 removed -= dsremoved
3146 removed -= dsremoved
3148
3147
3149 modadded = added & dsmodified
3148 modadded = added & dsmodified
3150 added -= modadded
3149 added -= modadded
3151
3150
3152 # tell newly modified apart.
3151 # tell newly modified apart.
3153 dsmodified &= modified
3152 dsmodified &= modified
3154 dsmodified |= modified & dsadded # dirstate added may need backup
3153 dsmodified |= modified & dsadded # dirstate added may need backup
3155 modified -= dsmodified
3154 modified -= dsmodified
3156
3155
3157 # We need to wait for some post-processing to update this set
3156 # We need to wait for some post-processing to update this set
3158 # before making the distinction. The dirstate will be used for
3157 # before making the distinction. The dirstate will be used for
3159 # that purpose.
3158 # that purpose.
3160 dsadded = added
3159 dsadded = added
3161
3160
3162 # in case of merge, files that are actually added can be reported as
3161 # in case of merge, files that are actually added can be reported as
3163 # modified, we need to post process the result
3162 # modified, we need to post process the result
3164 if p2 != nullid:
3163 if p2 != nullid:
3165 mergeadd = set(dsmodified)
3164 mergeadd = set(dsmodified)
3166 for path in dsmodified:
3165 for path in dsmodified:
3167 if path in mf:
3166 if path in mf:
3168 mergeadd.remove(path)
3167 mergeadd.remove(path)
3169 dsadded |= mergeadd
3168 dsadded |= mergeadd
3170 dsmodified -= mergeadd
3169 dsmodified -= mergeadd
3171
3170
3172 # if f is a rename, update `names` to also revert the source
3171 # if f is a rename, update `names` to also revert the source
3173 cwd = repo.getcwd()
3172 cwd = repo.getcwd()
3174 for f in localchanges:
3173 for f in localchanges:
3175 src = repo.dirstate.copied(f)
3174 src = repo.dirstate.copied(f)
3176 # XXX should we check for rename down to target node?
3175 # XXX should we check for rename down to target node?
3177 if src and src not in names and repo.dirstate[src] == 'r':
3176 if src and src not in names and repo.dirstate[src] == 'r':
3178 dsremoved.add(src)
3177 dsremoved.add(src)
3179 names[src] = (repo.pathto(src, cwd), True)
3178 names[src] = (repo.pathto(src, cwd), True)
3180
3179
3181 # determine the exact nature of the deleted changesets
3180 # determine the exact nature of the deleted changesets
3182 deladded = set(_deleted)
3181 deladded = set(_deleted)
3183 for path in _deleted:
3182 for path in _deleted:
3184 if path in mf:
3183 if path in mf:
3185 deladded.remove(path)
3184 deladded.remove(path)
3186 deleted = _deleted - deladded
3185 deleted = _deleted - deladded
3187
3186
3188 # distinguish between file to forget and the other
3187 # distinguish between file to forget and the other
3189 added = set()
3188 added = set()
3190 for abs in dsadded:
3189 for abs in dsadded:
3191 if repo.dirstate[abs] != 'a':
3190 if repo.dirstate[abs] != 'a':
3192 added.add(abs)
3191 added.add(abs)
3193 dsadded -= added
3192 dsadded -= added
3194
3193
3195 for abs in deladded:
3194 for abs in deladded:
3196 if repo.dirstate[abs] == 'a':
3195 if repo.dirstate[abs] == 'a':
3197 dsadded.add(abs)
3196 dsadded.add(abs)
3198 deladded -= dsadded
3197 deladded -= dsadded
3199
3198
3200 # For files marked as removed, we check if an unknown file is present at
3199 # For files marked as removed, we check if an unknown file is present at
3201 # the same path. If a such file exists it may need to be backed up.
3200 # the same path. If a such file exists it may need to be backed up.
3202 # Making the distinction at this stage helps have simpler backup
3201 # Making the distinction at this stage helps have simpler backup
3203 # logic.
3202 # logic.
3204 removunk = set()
3203 removunk = set()
3205 for abs in removed:
3204 for abs in removed:
3206 target = repo.wjoin(abs)
3205 target = repo.wjoin(abs)
3207 if os.path.lexists(target):
3206 if os.path.lexists(target):
3208 removunk.add(abs)
3207 removunk.add(abs)
3209 removed -= removunk
3208 removed -= removunk
3210
3209
3211 dsremovunk = set()
3210 dsremovunk = set()
3212 for abs in dsremoved:
3211 for abs in dsremoved:
3213 target = repo.wjoin(abs)
3212 target = repo.wjoin(abs)
3214 if os.path.lexists(target):
3213 if os.path.lexists(target):
3215 dsremovunk.add(abs)
3214 dsremovunk.add(abs)
3216 dsremoved -= dsremovunk
3215 dsremoved -= dsremovunk
3217
3216
3218 # action to be actually performed by revert
3217 # action to be actually performed by revert
3219 # (<list of file>, message>) tuple
3218 # (<list of file>, message>) tuple
3220 actions = {'revert': ([], _('reverting %s\n')),
3219 actions = {'revert': ([], _('reverting %s\n')),
3221 'add': ([], _('adding %s\n')),
3220 'add': ([], _('adding %s\n')),
3222 'remove': ([], _('removing %s\n')),
3221 'remove': ([], _('removing %s\n')),
3223 'drop': ([], _('removing %s\n')),
3222 'drop': ([], _('removing %s\n')),
3224 'forget': ([], _('forgetting %s\n')),
3223 'forget': ([], _('forgetting %s\n')),
3225 'undelete': ([], _('undeleting %s\n')),
3224 'undelete': ([], _('undeleting %s\n')),
3226 'noop': (None, _('no changes needed to %s\n')),
3225 'noop': (None, _('no changes needed to %s\n')),
3227 'unknown': (None, _('file not managed: %s\n')),
3226 'unknown': (None, _('file not managed: %s\n')),
3228 }
3227 }
3229
3228
3230 # "constant" that convey the backup strategy.
3229 # "constant" that convey the backup strategy.
3231 # All set to `discard` if `no-backup` is set do avoid checking
3230 # All set to `discard` if `no-backup` is set do avoid checking
3232 # no_backup lower in the code.
3231 # no_backup lower in the code.
3233 # These values are ordered for comparison purposes
3232 # These values are ordered for comparison purposes
3234 backupinteractive = 3 # do backup if interactively modified
3233 backupinteractive = 3 # do backup if interactively modified
3235 backup = 2 # unconditionally do backup
3234 backup = 2 # unconditionally do backup
3236 check = 1 # check if the existing file differs from target
3235 check = 1 # check if the existing file differs from target
3237 discard = 0 # never do backup
3236 discard = 0 # never do backup
3238 if opts.get('no_backup'):
3237 if opts.get('no_backup'):
3239 backupinteractive = backup = check = discard
3238 backupinteractive = backup = check = discard
3240 if interactive:
3239 if interactive:
3241 dsmodifiedbackup = backupinteractive
3240 dsmodifiedbackup = backupinteractive
3242 else:
3241 else:
3243 dsmodifiedbackup = backup
3242 dsmodifiedbackup = backup
3244 tobackup = set()
3243 tobackup = set()
3245
3244
3246 backupanddel = actions['remove']
3245 backupanddel = actions['remove']
3247 if not opts.get('no_backup'):
3246 if not opts.get('no_backup'):
3248 backupanddel = actions['drop']
3247 backupanddel = actions['drop']
3249
3248
3250 disptable = (
3249 disptable = (
3251 # dispatch table:
3250 # dispatch table:
3252 # file state
3251 # file state
3253 # action
3252 # action
3254 # make backup
3253 # make backup
3255
3254
3256 ## Sets that results that will change file on disk
3255 ## Sets that results that will change file on disk
3257 # Modified compared to target, no local change
3256 # Modified compared to target, no local change
3258 (modified, actions['revert'], discard),
3257 (modified, actions['revert'], discard),
3259 # Modified compared to target, but local file is deleted
3258 # Modified compared to target, but local file is deleted
3260 (deleted, actions['revert'], discard),
3259 (deleted, actions['revert'], discard),
3261 # Modified compared to target, local change
3260 # Modified compared to target, local change
3262 (dsmodified, actions['revert'], dsmodifiedbackup),
3261 (dsmodified, actions['revert'], dsmodifiedbackup),
3263 # Added since target
3262 # Added since target
3264 (added, actions['remove'], discard),
3263 (added, actions['remove'], discard),
3265 # Added in working directory
3264 # Added in working directory
3266 (dsadded, actions['forget'], discard),
3265 (dsadded, actions['forget'], discard),
3267 # Added since target, have local modification
3266 # Added since target, have local modification
3268 (modadded, backupanddel, backup),
3267 (modadded, backupanddel, backup),
3269 # Added since target but file is missing in working directory
3268 # Added since target but file is missing in working directory
3270 (deladded, actions['drop'], discard),
3269 (deladded, actions['drop'], discard),
3271 # Removed since target, before working copy parent
3270 # Removed since target, before working copy parent
3272 (removed, actions['add'], discard),
3271 (removed, actions['add'], discard),
3273 # Same as `removed` but an unknown file exists at the same path
3272 # Same as `removed` but an unknown file exists at the same path
3274 (removunk, actions['add'], check),
3273 (removunk, actions['add'], check),
3275 # Removed since targe, marked as such in working copy parent
3274 # Removed since targe, marked as such in working copy parent
3276 (dsremoved, actions['undelete'], discard),
3275 (dsremoved, actions['undelete'], discard),
3277 # Same as `dsremoved` but an unknown file exists at the same path
3276 # Same as `dsremoved` but an unknown file exists at the same path
3278 (dsremovunk, actions['undelete'], check),
3277 (dsremovunk, actions['undelete'], check),
3279 ## the following sets does not result in any file changes
3278 ## the following sets does not result in any file changes
3280 # File with no modification
3279 # File with no modification
3281 (clean, actions['noop'], discard),
3280 (clean, actions['noop'], discard),
3282 # Existing file, not tracked anywhere
3281 # Existing file, not tracked anywhere
3283 (unknown, actions['unknown'], discard),
3282 (unknown, actions['unknown'], discard),
3284 )
3283 )
3285
3284
3286 for abs, (rel, exact) in sorted(names.items()):
3285 for abs, (rel, exact) in sorted(names.items()):
3287 # target file to be touch on disk (relative to cwd)
3286 # target file to be touch on disk (relative to cwd)
3288 target = repo.wjoin(abs)
3287 target = repo.wjoin(abs)
3289 # search the entry in the dispatch table.
3288 # search the entry in the dispatch table.
3290 # if the file is in any of these sets, it was touched in the working
3289 # if the file is in any of these sets, it was touched in the working
3291 # directory parent and we are sure it needs to be reverted.
3290 # directory parent and we are sure it needs to be reverted.
3292 for table, (xlist, msg), dobackup in disptable:
3291 for table, (xlist, msg), dobackup in disptable:
3293 if abs not in table:
3292 if abs not in table:
3294 continue
3293 continue
3295 if xlist is not None:
3294 if xlist is not None:
3296 xlist.append(abs)
3295 xlist.append(abs)
3297 if dobackup:
3296 if dobackup:
3298 # If in interactive mode, don't automatically create
3297 # If in interactive mode, don't automatically create
3299 # .orig files (issue4793)
3298 # .orig files (issue4793)
3300 if dobackup == backupinteractive:
3299 if dobackup == backupinteractive:
3301 tobackup.add(abs)
3300 tobackup.add(abs)
3302 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3301 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3303 bakname = scmutil.origpath(ui, repo, rel)
3302 bakname = scmutil.origpath(ui, repo, rel)
3304 ui.note(_('saving current version of %s as %s\n') %
3303 ui.note(_('saving current version of %s as %s\n') %
3305 (rel, bakname))
3304 (rel, bakname))
3306 if not opts.get('dry_run'):
3305 if not opts.get('dry_run'):
3307 if interactive:
3306 if interactive:
3308 util.copyfile(target, bakname)
3307 util.copyfile(target, bakname)
3309 else:
3308 else:
3310 util.rename(target, bakname)
3309 util.rename(target, bakname)
3311 if ui.verbose or not exact:
3310 if ui.verbose or not exact:
3312 if not isinstance(msg, basestring):
3311 if not isinstance(msg, basestring):
3313 msg = msg(abs)
3312 msg = msg(abs)
3314 ui.status(msg % rel)
3313 ui.status(msg % rel)
3315 elif exact:
3314 elif exact:
3316 ui.warn(msg % rel)
3315 ui.warn(msg % rel)
3317 break
3316 break
3318
3317
3319 if not opts.get('dry_run'):
3318 if not opts.get('dry_run'):
3320 needdata = ('revert', 'add', 'undelete')
3319 needdata = ('revert', 'add', 'undelete')
3321 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3320 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3322 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3321 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3323
3322
3324 if targetsubs:
3323 if targetsubs:
3325 # Revert the subrepos on the revert list
3324 # Revert the subrepos on the revert list
3326 for sub in targetsubs:
3325 for sub in targetsubs:
3327 try:
3326 try:
3328 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3327 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3329 except KeyError:
3328 except KeyError:
3330 raise error.Abort("subrepository '%s' does not exist in %s!"
3329 raise error.Abort("subrepository '%s' does not exist in %s!"
3331 % (sub, short(ctx.node())))
3330 % (sub, short(ctx.node())))
3332
3331
3333 def _revertprefetch(repo, ctx, *files):
3332 def _revertprefetch(repo, ctx, *files):
3334 """Let extension changing the storage layer prefetch content"""
3333 """Let extension changing the storage layer prefetch content"""
3335 pass
3334 pass
3336
3335
3337 def _performrevert(repo, parents, ctx, actions, interactive=False,
3336 def _performrevert(repo, parents, ctx, actions, interactive=False,
3338 tobackup=None):
3337 tobackup=None):
3339 """function that actually perform all the actions computed for revert
3338 """function that actually perform all the actions computed for revert
3340
3339
3341 This is an independent function to let extension to plug in and react to
3340 This is an independent function to let extension to plug in and react to
3342 the imminent revert.
3341 the imminent revert.
3343
3342
3344 Make sure you have the working directory locked when calling this function.
3343 Make sure you have the working directory locked when calling this function.
3345 """
3344 """
3346 parent, p2 = parents
3345 parent, p2 = parents
3347 node = ctx.node()
3346 node = ctx.node()
3348 excluded_files = []
3347 excluded_files = []
3349 matcher_opts = {"exclude": excluded_files}
3348 matcher_opts = {"exclude": excluded_files}
3350
3349
3351 def checkout(f):
3350 def checkout(f):
3352 fc = ctx[f]
3351 fc = ctx[f]
3353 repo.wwrite(f, fc.data(), fc.flags())
3352 repo.wwrite(f, fc.data(), fc.flags())
3354
3353
3355 def doremove(f):
3354 def doremove(f):
3356 try:
3355 try:
3357 repo.wvfs.unlinkpath(f)
3356 repo.wvfs.unlinkpath(f)
3358 except OSError:
3357 except OSError:
3359 pass
3358 pass
3360 repo.dirstate.remove(f)
3359 repo.dirstate.remove(f)
3361
3360
3362 audit_path = pathutil.pathauditor(repo.root)
3361 audit_path = pathutil.pathauditor(repo.root)
3363 for f in actions['forget'][0]:
3362 for f in actions['forget'][0]:
3364 if interactive:
3363 if interactive:
3365 choice = repo.ui.promptchoice(
3364 choice = repo.ui.promptchoice(
3366 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3365 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3367 if choice == 0:
3366 if choice == 0:
3368 repo.dirstate.drop(f)
3367 repo.dirstate.drop(f)
3369 else:
3368 else:
3370 excluded_files.append(repo.wjoin(f))
3369 excluded_files.append(repo.wjoin(f))
3371 else:
3370 else:
3372 repo.dirstate.drop(f)
3371 repo.dirstate.drop(f)
3373 for f in actions['remove'][0]:
3372 for f in actions['remove'][0]:
3374 audit_path(f)
3373 audit_path(f)
3375 if interactive:
3374 if interactive:
3376 choice = repo.ui.promptchoice(
3375 choice = repo.ui.promptchoice(
3377 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3376 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3378 if choice == 0:
3377 if choice == 0:
3379 doremove(f)
3378 doremove(f)
3380 else:
3379 else:
3381 excluded_files.append(repo.wjoin(f))
3380 excluded_files.append(repo.wjoin(f))
3382 else:
3381 else:
3383 doremove(f)
3382 doremove(f)
3384 for f in actions['drop'][0]:
3383 for f in actions['drop'][0]:
3385 audit_path(f)
3384 audit_path(f)
3386 repo.dirstate.remove(f)
3385 repo.dirstate.remove(f)
3387
3386
3388 normal = None
3387 normal = None
3389 if node == parent:
3388 if node == parent:
3390 # We're reverting to our parent. If possible, we'd like status
3389 # We're reverting to our parent. If possible, we'd like status
3391 # to report the file as clean. We have to use normallookup for
3390 # to report the file as clean. We have to use normallookup for
3392 # merges to avoid losing information about merged/dirty files.
3391 # merges to avoid losing information about merged/dirty files.
3393 if p2 != nullid:
3392 if p2 != nullid:
3394 normal = repo.dirstate.normallookup
3393 normal = repo.dirstate.normallookup
3395 else:
3394 else:
3396 normal = repo.dirstate.normal
3395 normal = repo.dirstate.normal
3397
3396
3398 newlyaddedandmodifiedfiles = set()
3397 newlyaddedandmodifiedfiles = set()
3399 if interactive:
3398 if interactive:
3400 # Prompt the user for changes to revert
3399 # Prompt the user for changes to revert
3401 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3400 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3402 m = scmutil.match(ctx, torevert, matcher_opts)
3401 m = scmutil.match(ctx, torevert, matcher_opts)
3403 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3402 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3404 diffopts.nodates = True
3403 diffopts.nodates = True
3405 diffopts.git = True
3404 diffopts.git = True
3406 operation = 'discard'
3405 operation = 'discard'
3407 reversehunks = True
3406 reversehunks = True
3408 if node != parent:
3407 if node != parent:
3409 operation = 'revert'
3408 operation = 'revert'
3410 reversehunks = repo.ui.configbool('experimental',
3409 reversehunks = repo.ui.configbool('experimental',
3411 'revertalternateinteractivemode',
3410 'revertalternateinteractivemode',
3412 True)
3411 True)
3413 if reversehunks:
3412 if reversehunks:
3414 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3413 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3415 else:
3414 else:
3416 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3415 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3417 originalchunks = patch.parsepatch(diff)
3416 originalchunks = patch.parsepatch(diff)
3418
3417
3419 try:
3418 try:
3420
3419
3421 chunks, opts = recordfilter(repo.ui, originalchunks,
3420 chunks, opts = recordfilter(repo.ui, originalchunks,
3422 operation=operation)
3421 operation=operation)
3423 if reversehunks:
3422 if reversehunks:
3424 chunks = patch.reversehunks(chunks)
3423 chunks = patch.reversehunks(chunks)
3425
3424
3426 except patch.PatchError as err:
3425 except patch.PatchError as err:
3427 raise error.Abort(_('error parsing patch: %s') % err)
3426 raise error.Abort(_('error parsing patch: %s') % err)
3428
3427
3429 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3428 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3430 if tobackup is None:
3429 if tobackup is None:
3431 tobackup = set()
3430 tobackup = set()
3432 # Apply changes
3431 # Apply changes
3433 fp = stringio()
3432 fp = stringio()
3434 for c in chunks:
3433 for c in chunks:
3435 # Create a backup file only if this hunk should be backed up
3434 # Create a backup file only if this hunk should be backed up
3436 if ishunk(c) and c.header.filename() in tobackup:
3435 if ishunk(c) and c.header.filename() in tobackup:
3437 abs = c.header.filename()
3436 abs = c.header.filename()
3438 target = repo.wjoin(abs)
3437 target = repo.wjoin(abs)
3439 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3438 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3440 util.copyfile(target, bakname)
3439 util.copyfile(target, bakname)
3441 tobackup.remove(abs)
3440 tobackup.remove(abs)
3442 c.write(fp)
3441 c.write(fp)
3443 dopatch = fp.tell()
3442 dopatch = fp.tell()
3444 fp.seek(0)
3443 fp.seek(0)
3445 if dopatch:
3444 if dopatch:
3446 try:
3445 try:
3447 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3446 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3448 except patch.PatchError as err:
3447 except patch.PatchError as err:
3449 raise error.Abort(str(err))
3448 raise error.Abort(str(err))
3450 del fp
3449 del fp
3451 else:
3450 else:
3452 for f in actions['revert'][0]:
3451 for f in actions['revert'][0]:
3453 checkout(f)
3452 checkout(f)
3454 if normal:
3453 if normal:
3455 normal(f)
3454 normal(f)
3456
3455
3457 for f in actions['add'][0]:
3456 for f in actions['add'][0]:
3458 # Don't checkout modified files, they are already created by the diff
3457 # Don't checkout modified files, they are already created by the diff
3459 if f not in newlyaddedandmodifiedfiles:
3458 if f not in newlyaddedandmodifiedfiles:
3460 checkout(f)
3459 checkout(f)
3461 repo.dirstate.add(f)
3460 repo.dirstate.add(f)
3462
3461
3463 normal = repo.dirstate.normallookup
3462 normal = repo.dirstate.normallookup
3464 if node == parent and p2 == nullid:
3463 if node == parent and p2 == nullid:
3465 normal = repo.dirstate.normal
3464 normal = repo.dirstate.normal
3466 for f in actions['undelete'][0]:
3465 for f in actions['undelete'][0]:
3467 checkout(f)
3466 checkout(f)
3468 normal(f)
3467 normal(f)
3469
3468
3470 copied = copies.pathcopies(repo[parent], ctx)
3469 copied = copies.pathcopies(repo[parent], ctx)
3471
3470
3472 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3471 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3473 if f in copied:
3472 if f in copied:
3474 repo.dirstate.copy(copied[f], f)
3473 repo.dirstate.copy(copied[f], f)
3475
3474
3476 class command(registrar.command):
3475 class command(registrar.command):
3477 def _doregister(self, func, name, *args, **kwargs):
3476 def _doregister(self, func, name, *args, **kwargs):
3478 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3477 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3479 return super(command, self)._doregister(func, name, *args, **kwargs)
3478 return super(command, self)._doregister(func, name, *args, **kwargs)
3480
3479
3481 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3480 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3482 # commands.outgoing. "missing" is "missing" of the result of
3481 # commands.outgoing. "missing" is "missing" of the result of
3483 # "findcommonoutgoing()"
3482 # "findcommonoutgoing()"
3484 outgoinghooks = util.hooks()
3483 outgoinghooks = util.hooks()
3485
3484
3486 # a list of (ui, repo) functions called by commands.summary
3485 # a list of (ui, repo) functions called by commands.summary
3487 summaryhooks = util.hooks()
3486 summaryhooks = util.hooks()
3488
3487
3489 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3488 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3490 #
3489 #
3491 # functions should return tuple of booleans below, if 'changes' is None:
3490 # functions should return tuple of booleans below, if 'changes' is None:
3492 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3491 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3493 #
3492 #
3494 # otherwise, 'changes' is a tuple of tuples below:
3493 # otherwise, 'changes' is a tuple of tuples below:
3495 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3494 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3496 # - (desturl, destbranch, destpeer, outgoing)
3495 # - (desturl, destbranch, destpeer, outgoing)
3497 summaryremotehooks = util.hooks()
3496 summaryremotehooks = util.hooks()
3498
3497
3499 # A list of state files kept by multistep operations like graft.
3498 # A list of state files kept by multistep operations like graft.
3500 # Since graft cannot be aborted, it is considered 'clearable' by update.
3499 # Since graft cannot be aborted, it is considered 'clearable' by update.
3501 # note: bisect is intentionally excluded
3500 # note: bisect is intentionally excluded
3502 # (state file, clearable, allowcommit, error, hint)
3501 # (state file, clearable, allowcommit, error, hint)
3503 unfinishedstates = [
3502 unfinishedstates = [
3504 ('graftstate', True, False, _('graft in progress'),
3503 ('graftstate', True, False, _('graft in progress'),
3505 _("use 'hg graft --continue' or 'hg update' to abort")),
3504 _("use 'hg graft --continue' or 'hg update' to abort")),
3506 ('updatestate', True, False, _('last update was interrupted'),
3505 ('updatestate', True, False, _('last update was interrupted'),
3507 _("use 'hg update' to get a consistent checkout"))
3506 _("use 'hg update' to get a consistent checkout"))
3508 ]
3507 ]
3509
3508
3510 def checkunfinished(repo, commit=False):
3509 def checkunfinished(repo, commit=False):
3511 '''Look for an unfinished multistep operation, like graft, and abort
3510 '''Look for an unfinished multistep operation, like graft, and abort
3512 if found. It's probably good to check this right before
3511 if found. It's probably good to check this right before
3513 bailifchanged().
3512 bailifchanged().
3514 '''
3513 '''
3515 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3514 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3516 if commit and allowcommit:
3515 if commit and allowcommit:
3517 continue
3516 continue
3518 if repo.vfs.exists(f):
3517 if repo.vfs.exists(f):
3519 raise error.Abort(msg, hint=hint)
3518 raise error.Abort(msg, hint=hint)
3520
3519
3521 def clearunfinished(repo):
3520 def clearunfinished(repo):
3522 '''Check for unfinished operations (as above), and clear the ones
3521 '''Check for unfinished operations (as above), and clear the ones
3523 that are clearable.
3522 that are clearable.
3524 '''
3523 '''
3525 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3524 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3526 if not clearable and repo.vfs.exists(f):
3525 if not clearable and repo.vfs.exists(f):
3527 raise error.Abort(msg, hint=hint)
3526 raise error.Abort(msg, hint=hint)
3528 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3527 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3529 if clearable and repo.vfs.exists(f):
3528 if clearable and repo.vfs.exists(f):
3530 util.unlink(repo.vfs.join(f))
3529 util.unlink(repo.vfs.join(f))
3531
3530
3532 afterresolvedstates = [
3531 afterresolvedstates = [
3533 ('graftstate',
3532 ('graftstate',
3534 _('hg graft --continue')),
3533 _('hg graft --continue')),
3535 ]
3534 ]
3536
3535
3537 def howtocontinue(repo):
3536 def howtocontinue(repo):
3538 '''Check for an unfinished operation and return the command to finish
3537 '''Check for an unfinished operation and return the command to finish
3539 it.
3538 it.
3540
3539
3541 afterresolvedstates tuples define a .hg/{file} and the corresponding
3540 afterresolvedstates tuples define a .hg/{file} and the corresponding
3542 command needed to finish it.
3541 command needed to finish it.
3543
3542
3544 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3543 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3545 a boolean.
3544 a boolean.
3546 '''
3545 '''
3547 contmsg = _("continue: %s")
3546 contmsg = _("continue: %s")
3548 for f, msg in afterresolvedstates:
3547 for f, msg in afterresolvedstates:
3549 if repo.vfs.exists(f):
3548 if repo.vfs.exists(f):
3550 return contmsg % msg, True
3549 return contmsg % msg, True
3551 workingctx = repo[None]
3550 workingctx = repo[None]
3552 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3551 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3553 for s in workingctx.substate)
3552 for s in workingctx.substate)
3554 if dirty:
3553 if dirty:
3555 return contmsg % _("hg commit"), False
3554 return contmsg % _("hg commit"), False
3556 return None, None
3555 return None, None
3557
3556
3558 def checkafterresolved(repo):
3557 def checkafterresolved(repo):
3559 '''Inform the user about the next action after completing hg resolve
3558 '''Inform the user about the next action after completing hg resolve
3560
3559
3561 If there's a matching afterresolvedstates, howtocontinue will yield
3560 If there's a matching afterresolvedstates, howtocontinue will yield
3562 repo.ui.warn as the reporter.
3561 repo.ui.warn as the reporter.
3563
3562
3564 Otherwise, it will yield repo.ui.note.
3563 Otherwise, it will yield repo.ui.note.
3565 '''
3564 '''
3566 msg, warning = howtocontinue(repo)
3565 msg, warning = howtocontinue(repo)
3567 if msg is not None:
3566 if msg is not None:
3568 if warning:
3567 if warning:
3569 repo.ui.warn("%s\n" % msg)
3568 repo.ui.warn("%s\n" % msg)
3570 else:
3569 else:
3571 repo.ui.note("%s\n" % msg)
3570 repo.ui.note("%s\n" % msg)
3572
3571
3573 def wrongtooltocontinue(repo, task):
3572 def wrongtooltocontinue(repo, task):
3574 '''Raise an abort suggesting how to properly continue if there is an
3573 '''Raise an abort suggesting how to properly continue if there is an
3575 active task.
3574 active task.
3576
3575
3577 Uses howtocontinue() to find the active task.
3576 Uses howtocontinue() to find the active task.
3578
3577
3579 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3578 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3580 a hint.
3579 a hint.
3581 '''
3580 '''
3582 after = howtocontinue(repo)
3581 after = howtocontinue(repo)
3583 hint = None
3582 hint = None
3584 if after[1]:
3583 if after[1]:
3585 hint = after[0]
3584 hint = after[0]
3586 raise error.Abort(_('no %s in progress') % task, hint=hint)
3585 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,983 +1,994 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16
16
17 from .i18n import _
17 from .i18n import _
18 from .node import wdirrev
18 from .node import (
19 wdirid,
20 wdirrev,
21 )
22
19 from . import (
23 from . import (
20 encoding,
24 encoding,
21 error,
25 error,
22 match as matchmod,
26 match as matchmod,
23 pathutil,
27 pathutil,
24 phases,
28 phases,
25 pycompat,
29 pycompat,
26 revsetlang,
30 revsetlang,
27 similar,
31 similar,
28 util,
32 util,
29 )
33 )
30
34
31 if pycompat.osname == 'nt':
35 if pycompat.osname == 'nt':
32 from . import scmwindows as scmplatform
36 from . import scmwindows as scmplatform
33 else:
37 else:
34 from . import scmposix as scmplatform
38 from . import scmposix as scmplatform
35
39
36 termsize = scmplatform.termsize
40 termsize = scmplatform.termsize
37
41
38 class status(tuple):
42 class status(tuple):
39 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
43 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
40 and 'ignored' properties are only relevant to the working copy.
44 and 'ignored' properties are only relevant to the working copy.
41 '''
45 '''
42
46
43 __slots__ = ()
47 __slots__ = ()
44
48
45 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
49 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
46 clean):
50 clean):
47 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
51 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
48 ignored, clean))
52 ignored, clean))
49
53
50 @property
54 @property
51 def modified(self):
55 def modified(self):
52 '''files that have been modified'''
56 '''files that have been modified'''
53 return self[0]
57 return self[0]
54
58
55 @property
59 @property
56 def added(self):
60 def added(self):
57 '''files that have been added'''
61 '''files that have been added'''
58 return self[1]
62 return self[1]
59
63
60 @property
64 @property
61 def removed(self):
65 def removed(self):
62 '''files that have been removed'''
66 '''files that have been removed'''
63 return self[2]
67 return self[2]
64
68
65 @property
69 @property
66 def deleted(self):
70 def deleted(self):
67 '''files that are in the dirstate, but have been deleted from the
71 '''files that are in the dirstate, but have been deleted from the
68 working copy (aka "missing")
72 working copy (aka "missing")
69 '''
73 '''
70 return self[3]
74 return self[3]
71
75
72 @property
76 @property
73 def unknown(self):
77 def unknown(self):
74 '''files not in the dirstate that are not ignored'''
78 '''files not in the dirstate that are not ignored'''
75 return self[4]
79 return self[4]
76
80
77 @property
81 @property
78 def ignored(self):
82 def ignored(self):
79 '''files not in the dirstate that are ignored (by _dirignore())'''
83 '''files not in the dirstate that are ignored (by _dirignore())'''
80 return self[5]
84 return self[5]
81
85
82 @property
86 @property
83 def clean(self):
87 def clean(self):
84 '''files that have not been modified'''
88 '''files that have not been modified'''
85 return self[6]
89 return self[6]
86
90
87 def __repr__(self, *args, **kwargs):
91 def __repr__(self, *args, **kwargs):
88 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
92 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
89 'unknown=%r, ignored=%r, clean=%r>') % self)
93 'unknown=%r, ignored=%r, clean=%r>') % self)
90
94
91 def itersubrepos(ctx1, ctx2):
95 def itersubrepos(ctx1, ctx2):
92 """find subrepos in ctx1 or ctx2"""
96 """find subrepos in ctx1 or ctx2"""
93 # Create a (subpath, ctx) mapping where we prefer subpaths from
97 # Create a (subpath, ctx) mapping where we prefer subpaths from
94 # ctx1. The subpaths from ctx2 are important when the .hgsub file
98 # ctx1. The subpaths from ctx2 are important when the .hgsub file
95 # has been modified (in ctx2) but not yet committed (in ctx1).
99 # has been modified (in ctx2) but not yet committed (in ctx1).
96 subpaths = dict.fromkeys(ctx2.substate, ctx2)
100 subpaths = dict.fromkeys(ctx2.substate, ctx2)
97 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
101 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
98
102
99 missing = set()
103 missing = set()
100
104
101 for subpath in ctx2.substate:
105 for subpath in ctx2.substate:
102 if subpath not in ctx1.substate:
106 if subpath not in ctx1.substate:
103 del subpaths[subpath]
107 del subpaths[subpath]
104 missing.add(subpath)
108 missing.add(subpath)
105
109
106 for subpath, ctx in sorted(subpaths.iteritems()):
110 for subpath, ctx in sorted(subpaths.iteritems()):
107 yield subpath, ctx.sub(subpath)
111 yield subpath, ctx.sub(subpath)
108
112
109 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
113 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
110 # status and diff will have an accurate result when it does
114 # status and diff will have an accurate result when it does
111 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
115 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
112 # against itself.
116 # against itself.
113 for subpath in missing:
117 for subpath in missing:
114 yield subpath, ctx2.nullsub(subpath, ctx1)
118 yield subpath, ctx2.nullsub(subpath, ctx1)
115
119
116 def nochangesfound(ui, repo, excluded=None):
120 def nochangesfound(ui, repo, excluded=None):
117 '''Report no changes for push/pull, excluded is None or a list of
121 '''Report no changes for push/pull, excluded is None or a list of
118 nodes excluded from the push/pull.
122 nodes excluded from the push/pull.
119 '''
123 '''
120 secretlist = []
124 secretlist = []
121 if excluded:
125 if excluded:
122 for n in excluded:
126 for n in excluded:
123 ctx = repo[n]
127 ctx = repo[n]
124 if ctx.phase() >= phases.secret and not ctx.extinct():
128 if ctx.phase() >= phases.secret and not ctx.extinct():
125 secretlist.append(n)
129 secretlist.append(n)
126
130
127 if secretlist:
131 if secretlist:
128 ui.status(_("no changes found (ignored %d secret changesets)\n")
132 ui.status(_("no changes found (ignored %d secret changesets)\n")
129 % len(secretlist))
133 % len(secretlist))
130 else:
134 else:
131 ui.status(_("no changes found\n"))
135 ui.status(_("no changes found\n"))
132
136
133 def callcatch(ui, func):
137 def callcatch(ui, func):
134 """call func() with global exception handling
138 """call func() with global exception handling
135
139
136 return func() if no exception happens. otherwise do some error handling
140 return func() if no exception happens. otherwise do some error handling
137 and return an exit code accordingly. does not handle all exceptions.
141 and return an exit code accordingly. does not handle all exceptions.
138 """
142 """
139 try:
143 try:
140 try:
144 try:
141 return func()
145 return func()
142 except: # re-raises
146 except: # re-raises
143 ui.traceback()
147 ui.traceback()
144 raise
148 raise
145 # Global exception handling, alphabetically
149 # Global exception handling, alphabetically
146 # Mercurial-specific first, followed by built-in and library exceptions
150 # Mercurial-specific first, followed by built-in and library exceptions
147 except error.LockHeld as inst:
151 except error.LockHeld as inst:
148 if inst.errno == errno.ETIMEDOUT:
152 if inst.errno == errno.ETIMEDOUT:
149 reason = _('timed out waiting for lock held by %r') % inst.locker
153 reason = _('timed out waiting for lock held by %r') % inst.locker
150 else:
154 else:
151 reason = _('lock held by %r') % inst.locker
155 reason = _('lock held by %r') % inst.locker
152 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
156 ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
153 if not inst.locker:
157 if not inst.locker:
154 ui.warn(_("(lock might be very busy)\n"))
158 ui.warn(_("(lock might be very busy)\n"))
155 except error.LockUnavailable as inst:
159 except error.LockUnavailable as inst:
156 ui.warn(_("abort: could not lock %s: %s\n") %
160 ui.warn(_("abort: could not lock %s: %s\n") %
157 (inst.desc or inst.filename, inst.strerror))
161 (inst.desc or inst.filename, inst.strerror))
158 except error.OutOfBandError as inst:
162 except error.OutOfBandError as inst:
159 if inst.args:
163 if inst.args:
160 msg = _("abort: remote error:\n")
164 msg = _("abort: remote error:\n")
161 else:
165 else:
162 msg = _("abort: remote error\n")
166 msg = _("abort: remote error\n")
163 ui.warn(msg)
167 ui.warn(msg)
164 if inst.args:
168 if inst.args:
165 ui.warn(''.join(inst.args))
169 ui.warn(''.join(inst.args))
166 if inst.hint:
170 if inst.hint:
167 ui.warn('(%s)\n' % inst.hint)
171 ui.warn('(%s)\n' % inst.hint)
168 except error.RepoError as inst:
172 except error.RepoError as inst:
169 ui.warn(_("abort: %s!\n") % inst)
173 ui.warn(_("abort: %s!\n") % inst)
170 if inst.hint:
174 if inst.hint:
171 ui.warn(_("(%s)\n") % inst.hint)
175 ui.warn(_("(%s)\n") % inst.hint)
172 except error.ResponseError as inst:
176 except error.ResponseError as inst:
173 ui.warn(_("abort: %s") % inst.args[0])
177 ui.warn(_("abort: %s") % inst.args[0])
174 if not isinstance(inst.args[1], basestring):
178 if not isinstance(inst.args[1], basestring):
175 ui.warn(" %r\n" % (inst.args[1],))
179 ui.warn(" %r\n" % (inst.args[1],))
176 elif not inst.args[1]:
180 elif not inst.args[1]:
177 ui.warn(_(" empty string\n"))
181 ui.warn(_(" empty string\n"))
178 else:
182 else:
179 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
183 ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
180 except error.CensoredNodeError as inst:
184 except error.CensoredNodeError as inst:
181 ui.warn(_("abort: file censored %s!\n") % inst)
185 ui.warn(_("abort: file censored %s!\n") % inst)
182 except error.RevlogError as inst:
186 except error.RevlogError as inst:
183 ui.warn(_("abort: %s!\n") % inst)
187 ui.warn(_("abort: %s!\n") % inst)
184 except error.InterventionRequired as inst:
188 except error.InterventionRequired as inst:
185 ui.warn("%s\n" % inst)
189 ui.warn("%s\n" % inst)
186 if inst.hint:
190 if inst.hint:
187 ui.warn(_("(%s)\n") % inst.hint)
191 ui.warn(_("(%s)\n") % inst.hint)
188 return 1
192 return 1
189 except error.Abort as inst:
193 except error.Abort as inst:
190 ui.warn(_("abort: %s\n") % inst)
194 ui.warn(_("abort: %s\n") % inst)
191 if inst.hint:
195 if inst.hint:
192 ui.warn(_("(%s)\n") % inst.hint)
196 ui.warn(_("(%s)\n") % inst.hint)
193 except ImportError as inst:
197 except ImportError as inst:
194 ui.warn(_("abort: %s!\n") % inst)
198 ui.warn(_("abort: %s!\n") % inst)
195 m = str(inst).split()[-1]
199 m = str(inst).split()[-1]
196 if m in "mpatch bdiff".split():
200 if m in "mpatch bdiff".split():
197 ui.warn(_("(did you forget to compile extensions?)\n"))
201 ui.warn(_("(did you forget to compile extensions?)\n"))
198 elif m in "zlib".split():
202 elif m in "zlib".split():
199 ui.warn(_("(is your Python install correct?)\n"))
203 ui.warn(_("(is your Python install correct?)\n"))
200 except IOError as inst:
204 except IOError as inst:
201 if util.safehasattr(inst, "code"):
205 if util.safehasattr(inst, "code"):
202 ui.warn(_("abort: %s\n") % inst)
206 ui.warn(_("abort: %s\n") % inst)
203 elif util.safehasattr(inst, "reason"):
207 elif util.safehasattr(inst, "reason"):
204 try: # usually it is in the form (errno, strerror)
208 try: # usually it is in the form (errno, strerror)
205 reason = inst.reason.args[1]
209 reason = inst.reason.args[1]
206 except (AttributeError, IndexError):
210 except (AttributeError, IndexError):
207 # it might be anything, for example a string
211 # it might be anything, for example a string
208 reason = inst.reason
212 reason = inst.reason
209 if isinstance(reason, unicode):
213 if isinstance(reason, unicode):
210 # SSLError of Python 2.7.9 contains a unicode
214 # SSLError of Python 2.7.9 contains a unicode
211 reason = encoding.unitolocal(reason)
215 reason = encoding.unitolocal(reason)
212 ui.warn(_("abort: error: %s\n") % reason)
216 ui.warn(_("abort: error: %s\n") % reason)
213 elif (util.safehasattr(inst, "args")
217 elif (util.safehasattr(inst, "args")
214 and inst.args and inst.args[0] == errno.EPIPE):
218 and inst.args and inst.args[0] == errno.EPIPE):
215 pass
219 pass
216 elif getattr(inst, "strerror", None):
220 elif getattr(inst, "strerror", None):
217 if getattr(inst, "filename", None):
221 if getattr(inst, "filename", None):
218 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
222 ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
219 else:
223 else:
220 ui.warn(_("abort: %s\n") % inst.strerror)
224 ui.warn(_("abort: %s\n") % inst.strerror)
221 else:
225 else:
222 raise
226 raise
223 except OSError as inst:
227 except OSError as inst:
224 if getattr(inst, "filename", None) is not None:
228 if getattr(inst, "filename", None) is not None:
225 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
229 ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
226 else:
230 else:
227 ui.warn(_("abort: %s\n") % inst.strerror)
231 ui.warn(_("abort: %s\n") % inst.strerror)
228 except MemoryError:
232 except MemoryError:
229 ui.warn(_("abort: out of memory\n"))
233 ui.warn(_("abort: out of memory\n"))
230 except SystemExit as inst:
234 except SystemExit as inst:
231 # Commands shouldn't sys.exit directly, but give a return code.
235 # Commands shouldn't sys.exit directly, but give a return code.
232 # Just in case catch this and and pass exit code to caller.
236 # Just in case catch this and and pass exit code to caller.
233 return inst.code
237 return inst.code
234 except socket.error as inst:
238 except socket.error as inst:
235 ui.warn(_("abort: %s\n") % inst.args[-1])
239 ui.warn(_("abort: %s\n") % inst.args[-1])
236
240
237 return -1
241 return -1
238
242
239 def checknewlabel(repo, lbl, kind):
243 def checknewlabel(repo, lbl, kind):
240 # Do not use the "kind" parameter in ui output.
244 # Do not use the "kind" parameter in ui output.
241 # It makes strings difficult to translate.
245 # It makes strings difficult to translate.
242 if lbl in ['tip', '.', 'null']:
246 if lbl in ['tip', '.', 'null']:
243 raise error.Abort(_("the name '%s' is reserved") % lbl)
247 raise error.Abort(_("the name '%s' is reserved") % lbl)
244 for c in (':', '\0', '\n', '\r'):
248 for c in (':', '\0', '\n', '\r'):
245 if c in lbl:
249 if c in lbl:
246 raise error.Abort(_("%r cannot be used in a name") % c)
250 raise error.Abort(_("%r cannot be used in a name") % c)
247 try:
251 try:
248 int(lbl)
252 int(lbl)
249 raise error.Abort(_("cannot use an integer as a name"))
253 raise error.Abort(_("cannot use an integer as a name"))
250 except ValueError:
254 except ValueError:
251 pass
255 pass
252
256
253 def checkfilename(f):
257 def checkfilename(f):
254 '''Check that the filename f is an acceptable filename for a tracked file'''
258 '''Check that the filename f is an acceptable filename for a tracked file'''
255 if '\r' in f or '\n' in f:
259 if '\r' in f or '\n' in f:
256 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
260 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
257
261
258 def checkportable(ui, f):
262 def checkportable(ui, f):
259 '''Check if filename f is portable and warn or abort depending on config'''
263 '''Check if filename f is portable and warn or abort depending on config'''
260 checkfilename(f)
264 checkfilename(f)
261 abort, warn = checkportabilityalert(ui)
265 abort, warn = checkportabilityalert(ui)
262 if abort or warn:
266 if abort or warn:
263 msg = util.checkwinfilename(f)
267 msg = util.checkwinfilename(f)
264 if msg:
268 if msg:
265 msg = "%s: %r" % (msg, f)
269 msg = "%s: %r" % (msg, f)
266 if abort:
270 if abort:
267 raise error.Abort(msg)
271 raise error.Abort(msg)
268 ui.warn(_("warning: %s\n") % msg)
272 ui.warn(_("warning: %s\n") % msg)
269
273
270 def checkportabilityalert(ui):
274 def checkportabilityalert(ui):
271 '''check if the user's config requests nothing, a warning, or abort for
275 '''check if the user's config requests nothing, a warning, or abort for
272 non-portable filenames'''
276 non-portable filenames'''
273 val = ui.config('ui', 'portablefilenames', 'warn')
277 val = ui.config('ui', 'portablefilenames', 'warn')
274 lval = val.lower()
278 lval = val.lower()
275 bval = util.parsebool(val)
279 bval = util.parsebool(val)
276 abort = pycompat.osname == 'nt' or lval == 'abort'
280 abort = pycompat.osname == 'nt' or lval == 'abort'
277 warn = bval or lval == 'warn'
281 warn = bval or lval == 'warn'
278 if bval is None and not (warn or abort or lval == 'ignore'):
282 if bval is None and not (warn or abort or lval == 'ignore'):
279 raise error.ConfigError(
283 raise error.ConfigError(
280 _("ui.portablefilenames value is invalid ('%s')") % val)
284 _("ui.portablefilenames value is invalid ('%s')") % val)
281 return abort, warn
285 return abort, warn
282
286
283 class casecollisionauditor(object):
287 class casecollisionauditor(object):
284 def __init__(self, ui, abort, dirstate):
288 def __init__(self, ui, abort, dirstate):
285 self._ui = ui
289 self._ui = ui
286 self._abort = abort
290 self._abort = abort
287 allfiles = '\0'.join(dirstate._map)
291 allfiles = '\0'.join(dirstate._map)
288 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
292 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
289 self._dirstate = dirstate
293 self._dirstate = dirstate
290 # The purpose of _newfiles is so that we don't complain about
294 # The purpose of _newfiles is so that we don't complain about
291 # case collisions if someone were to call this object with the
295 # case collisions if someone were to call this object with the
292 # same filename twice.
296 # same filename twice.
293 self._newfiles = set()
297 self._newfiles = set()
294
298
295 def __call__(self, f):
299 def __call__(self, f):
296 if f in self._newfiles:
300 if f in self._newfiles:
297 return
301 return
298 fl = encoding.lower(f)
302 fl = encoding.lower(f)
299 if fl in self._loweredfiles and f not in self._dirstate:
303 if fl in self._loweredfiles and f not in self._dirstate:
300 msg = _('possible case-folding collision for %s') % f
304 msg = _('possible case-folding collision for %s') % f
301 if self._abort:
305 if self._abort:
302 raise error.Abort(msg)
306 raise error.Abort(msg)
303 self._ui.warn(_("warning: %s\n") % msg)
307 self._ui.warn(_("warning: %s\n") % msg)
304 self._loweredfiles.add(fl)
308 self._loweredfiles.add(fl)
305 self._newfiles.add(f)
309 self._newfiles.add(f)
306
310
307 def filteredhash(repo, maxrev):
311 def filteredhash(repo, maxrev):
308 """build hash of filtered revisions in the current repoview.
312 """build hash of filtered revisions in the current repoview.
309
313
310 Multiple caches perform up-to-date validation by checking that the
314 Multiple caches perform up-to-date validation by checking that the
311 tiprev and tipnode stored in the cache file match the current repository.
315 tiprev and tipnode stored in the cache file match the current repository.
312 However, this is not sufficient for validating repoviews because the set
316 However, this is not sufficient for validating repoviews because the set
313 of revisions in the view may change without the repository tiprev and
317 of revisions in the view may change without the repository tiprev and
314 tipnode changing.
318 tipnode changing.
315
319
316 This function hashes all the revs filtered from the view and returns
320 This function hashes all the revs filtered from the view and returns
317 that SHA-1 digest.
321 that SHA-1 digest.
318 """
322 """
319 cl = repo.changelog
323 cl = repo.changelog
320 if not cl.filteredrevs:
324 if not cl.filteredrevs:
321 return None
325 return None
322 key = None
326 key = None
323 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
327 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
324 if revs:
328 if revs:
325 s = hashlib.sha1()
329 s = hashlib.sha1()
326 for rev in revs:
330 for rev in revs:
327 s.update('%d;' % rev)
331 s.update('%d;' % rev)
328 key = s.digest()
332 key = s.digest()
329 return key
333 return key
330
334
331 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
335 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
332 '''yield every hg repository under path, always recursively.
336 '''yield every hg repository under path, always recursively.
333 The recurse flag will only control recursion into repo working dirs'''
337 The recurse flag will only control recursion into repo working dirs'''
334 def errhandler(err):
338 def errhandler(err):
335 if err.filename == path:
339 if err.filename == path:
336 raise err
340 raise err
337 samestat = getattr(os.path, 'samestat', None)
341 samestat = getattr(os.path, 'samestat', None)
338 if followsym and samestat is not None:
342 if followsym and samestat is not None:
339 def adddir(dirlst, dirname):
343 def adddir(dirlst, dirname):
340 match = False
344 match = False
341 dirstat = os.stat(dirname)
345 dirstat = os.stat(dirname)
342 for lstdirstat in dirlst:
346 for lstdirstat in dirlst:
343 if samestat(dirstat, lstdirstat):
347 if samestat(dirstat, lstdirstat):
344 match = True
348 match = True
345 break
349 break
346 if not match:
350 if not match:
347 dirlst.append(dirstat)
351 dirlst.append(dirstat)
348 return not match
352 return not match
349 else:
353 else:
350 followsym = False
354 followsym = False
351
355
352 if (seen_dirs is None) and followsym:
356 if (seen_dirs is None) and followsym:
353 seen_dirs = []
357 seen_dirs = []
354 adddir(seen_dirs, path)
358 adddir(seen_dirs, path)
355 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
359 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
356 dirs.sort()
360 dirs.sort()
357 if '.hg' in dirs:
361 if '.hg' in dirs:
358 yield root # found a repository
362 yield root # found a repository
359 qroot = os.path.join(root, '.hg', 'patches')
363 qroot = os.path.join(root, '.hg', 'patches')
360 if os.path.isdir(os.path.join(qroot, '.hg')):
364 if os.path.isdir(os.path.join(qroot, '.hg')):
361 yield qroot # we have a patch queue repo here
365 yield qroot # we have a patch queue repo here
362 if recurse:
366 if recurse:
363 # avoid recursing inside the .hg directory
367 # avoid recursing inside the .hg directory
364 dirs.remove('.hg')
368 dirs.remove('.hg')
365 else:
369 else:
366 dirs[:] = [] # don't descend further
370 dirs[:] = [] # don't descend further
367 elif followsym:
371 elif followsym:
368 newdirs = []
372 newdirs = []
369 for d in dirs:
373 for d in dirs:
370 fname = os.path.join(root, d)
374 fname = os.path.join(root, d)
371 if adddir(seen_dirs, fname):
375 if adddir(seen_dirs, fname):
372 if os.path.islink(fname):
376 if os.path.islink(fname):
373 for hgname in walkrepos(fname, True, seen_dirs):
377 for hgname in walkrepos(fname, True, seen_dirs):
374 yield hgname
378 yield hgname
375 else:
379 else:
376 newdirs.append(d)
380 newdirs.append(d)
377 dirs[:] = newdirs
381 dirs[:] = newdirs
378
382
383 def binnode(ctx):
384 """Return binary node id for a given basectx"""
385 node = ctx.node()
386 if node is None:
387 return wdirid
388 return node
389
379 def intrev(ctx):
390 def intrev(ctx):
380 """Return integer for a given basectx that can be used in comparison or
391 """Return integer for a given basectx that can be used in comparison or
381 arithmetic operation"""
392 arithmetic operation"""
382 rev = ctx.rev()
393 rev = ctx.rev()
383 if rev is None:
394 if rev is None:
384 return wdirrev
395 return wdirrev
385 return rev
396 return rev
386
397
387 def revsingle(repo, revspec, default='.'):
398 def revsingle(repo, revspec, default='.'):
388 if not revspec and revspec != 0:
399 if not revspec and revspec != 0:
389 return repo[default]
400 return repo[default]
390
401
391 l = revrange(repo, [revspec])
402 l = revrange(repo, [revspec])
392 if not l:
403 if not l:
393 raise error.Abort(_('empty revision set'))
404 raise error.Abort(_('empty revision set'))
394 return repo[l.last()]
405 return repo[l.last()]
395
406
396 def _pairspec(revspec):
407 def _pairspec(revspec):
397 tree = revsetlang.parse(revspec)
408 tree = revsetlang.parse(revspec)
398 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
409 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
399
410
400 def revpair(repo, revs):
411 def revpair(repo, revs):
401 if not revs:
412 if not revs:
402 return repo.dirstate.p1(), None
413 return repo.dirstate.p1(), None
403
414
404 l = revrange(repo, revs)
415 l = revrange(repo, revs)
405
416
406 if not l:
417 if not l:
407 first = second = None
418 first = second = None
408 elif l.isascending():
419 elif l.isascending():
409 first = l.min()
420 first = l.min()
410 second = l.max()
421 second = l.max()
411 elif l.isdescending():
422 elif l.isdescending():
412 first = l.max()
423 first = l.max()
413 second = l.min()
424 second = l.min()
414 else:
425 else:
415 first = l.first()
426 first = l.first()
416 second = l.last()
427 second = l.last()
417
428
418 if first is None:
429 if first is None:
419 raise error.Abort(_('empty revision range'))
430 raise error.Abort(_('empty revision range'))
420 if (first == second and len(revs) >= 2
431 if (first == second and len(revs) >= 2
421 and not all(revrange(repo, [r]) for r in revs)):
432 and not all(revrange(repo, [r]) for r in revs)):
422 raise error.Abort(_('empty revision on one side of range'))
433 raise error.Abort(_('empty revision on one side of range'))
423
434
424 # if top-level is range expression, the result must always be a pair
435 # if top-level is range expression, the result must always be a pair
425 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
436 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
426 return repo.lookup(first), None
437 return repo.lookup(first), None
427
438
428 return repo.lookup(first), repo.lookup(second)
439 return repo.lookup(first), repo.lookup(second)
429
440
430 def revrange(repo, specs):
441 def revrange(repo, specs):
431 """Execute 1 to many revsets and return the union.
442 """Execute 1 to many revsets and return the union.
432
443
433 This is the preferred mechanism for executing revsets using user-specified
444 This is the preferred mechanism for executing revsets using user-specified
434 config options, such as revset aliases.
445 config options, such as revset aliases.
435
446
436 The revsets specified by ``specs`` will be executed via a chained ``OR``
447 The revsets specified by ``specs`` will be executed via a chained ``OR``
437 expression. If ``specs`` is empty, an empty result is returned.
448 expression. If ``specs`` is empty, an empty result is returned.
438
449
439 ``specs`` can contain integers, in which case they are assumed to be
450 ``specs`` can contain integers, in which case they are assumed to be
440 revision numbers.
451 revision numbers.
441
452
442 It is assumed the revsets are already formatted. If you have arguments
453 It is assumed the revsets are already formatted. If you have arguments
443 that need to be expanded in the revset, call ``revsetlang.formatspec()``
454 that need to be expanded in the revset, call ``revsetlang.formatspec()``
444 and pass the result as an element of ``specs``.
455 and pass the result as an element of ``specs``.
445
456
446 Specifying a single revset is allowed.
457 Specifying a single revset is allowed.
447
458
448 Returns a ``revset.abstractsmartset`` which is a list-like interface over
459 Returns a ``revset.abstractsmartset`` which is a list-like interface over
449 integer revisions.
460 integer revisions.
450 """
461 """
451 allspecs = []
462 allspecs = []
452 for spec in specs:
463 for spec in specs:
453 if isinstance(spec, int):
464 if isinstance(spec, int):
454 spec = revsetlang.formatspec('rev(%d)', spec)
465 spec = revsetlang.formatspec('rev(%d)', spec)
455 allspecs.append(spec)
466 allspecs.append(spec)
456 return repo.anyrevs(allspecs, user=True)
467 return repo.anyrevs(allspecs, user=True)
457
468
458 def meaningfulparents(repo, ctx):
469 def meaningfulparents(repo, ctx):
459 """Return list of meaningful (or all if debug) parentrevs for rev.
470 """Return list of meaningful (or all if debug) parentrevs for rev.
460
471
461 For merges (two non-nullrev revisions) both parents are meaningful.
472 For merges (two non-nullrev revisions) both parents are meaningful.
462 Otherwise the first parent revision is considered meaningful if it
473 Otherwise the first parent revision is considered meaningful if it
463 is not the preceding revision.
474 is not the preceding revision.
464 """
475 """
465 parents = ctx.parents()
476 parents = ctx.parents()
466 if len(parents) > 1:
477 if len(parents) > 1:
467 return parents
478 return parents
468 if repo.ui.debugflag:
479 if repo.ui.debugflag:
469 return [parents[0], repo['null']]
480 return [parents[0], repo['null']]
470 if parents[0].rev() >= intrev(ctx) - 1:
481 if parents[0].rev() >= intrev(ctx) - 1:
471 return []
482 return []
472 return parents
483 return parents
473
484
474 def expandpats(pats):
485 def expandpats(pats):
475 '''Expand bare globs when running on windows.
486 '''Expand bare globs when running on windows.
476 On posix we assume it already has already been done by sh.'''
487 On posix we assume it already has already been done by sh.'''
477 if not util.expandglobs:
488 if not util.expandglobs:
478 return list(pats)
489 return list(pats)
479 ret = []
490 ret = []
480 for kindpat in pats:
491 for kindpat in pats:
481 kind, pat = matchmod._patsplit(kindpat, None)
492 kind, pat = matchmod._patsplit(kindpat, None)
482 if kind is None:
493 if kind is None:
483 try:
494 try:
484 globbed = glob.glob(pat)
495 globbed = glob.glob(pat)
485 except re.error:
496 except re.error:
486 globbed = [pat]
497 globbed = [pat]
487 if globbed:
498 if globbed:
488 ret.extend(globbed)
499 ret.extend(globbed)
489 continue
500 continue
490 ret.append(kindpat)
501 ret.append(kindpat)
491 return ret
502 return ret
492
503
493 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
504 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
494 badfn=None):
505 badfn=None):
495 '''Return a matcher and the patterns that were used.
506 '''Return a matcher and the patterns that were used.
496 The matcher will warn about bad matches, unless an alternate badfn callback
507 The matcher will warn about bad matches, unless an alternate badfn callback
497 is provided.'''
508 is provided.'''
498 if pats == ("",):
509 if pats == ("",):
499 pats = []
510 pats = []
500 if opts is None:
511 if opts is None:
501 opts = {}
512 opts = {}
502 if not globbed and default == 'relpath':
513 if not globbed and default == 'relpath':
503 pats = expandpats(pats or [])
514 pats = expandpats(pats or [])
504
515
505 def bad(f, msg):
516 def bad(f, msg):
506 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
517 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
507
518
508 if badfn is None:
519 if badfn is None:
509 badfn = bad
520 badfn = bad
510
521
511 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
522 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
512 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
523 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
513
524
514 if m.always():
525 if m.always():
515 pats = []
526 pats = []
516 return m, pats
527 return m, pats
517
528
518 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
529 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
519 badfn=None):
530 badfn=None):
520 '''Return a matcher that will warn about bad matches.'''
531 '''Return a matcher that will warn about bad matches.'''
521 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
532 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
522
533
523 def matchall(repo):
534 def matchall(repo):
524 '''Return a matcher that will efficiently match everything.'''
535 '''Return a matcher that will efficiently match everything.'''
525 return matchmod.always(repo.root, repo.getcwd())
536 return matchmod.always(repo.root, repo.getcwd())
526
537
527 def matchfiles(repo, files, badfn=None):
538 def matchfiles(repo, files, badfn=None):
528 '''Return a matcher that will efficiently match exactly these files.'''
539 '''Return a matcher that will efficiently match exactly these files.'''
529 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
540 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
530
541
531 def origpath(ui, repo, filepath):
542 def origpath(ui, repo, filepath):
532 '''customize where .orig files are created
543 '''customize where .orig files are created
533
544
534 Fetch user defined path from config file: [ui] origbackuppath = <path>
545 Fetch user defined path from config file: [ui] origbackuppath = <path>
535 Fall back to default (filepath) if not specified
546 Fall back to default (filepath) if not specified
536 '''
547 '''
537 origbackuppath = ui.config('ui', 'origbackuppath', None)
548 origbackuppath = ui.config('ui', 'origbackuppath', None)
538 if origbackuppath is None:
549 if origbackuppath is None:
539 return filepath + ".orig"
550 return filepath + ".orig"
540
551
541 filepathfromroot = os.path.relpath(filepath, start=repo.root)
552 filepathfromroot = os.path.relpath(filepath, start=repo.root)
542 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
553 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
543
554
544 origbackupdir = repo.vfs.dirname(fullorigpath)
555 origbackupdir = repo.vfs.dirname(fullorigpath)
545 if not repo.vfs.exists(origbackupdir):
556 if not repo.vfs.exists(origbackupdir):
546 ui.note(_('creating directory: %s\n') % origbackupdir)
557 ui.note(_('creating directory: %s\n') % origbackupdir)
547 util.makedirs(origbackupdir)
558 util.makedirs(origbackupdir)
548
559
549 return fullorigpath + ".orig"
560 return fullorigpath + ".orig"
550
561
551 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
562 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
552 if opts is None:
563 if opts is None:
553 opts = {}
564 opts = {}
554 m = matcher
565 m = matcher
555 if dry_run is None:
566 if dry_run is None:
556 dry_run = opts.get('dry_run')
567 dry_run = opts.get('dry_run')
557 if similarity is None:
568 if similarity is None:
558 similarity = float(opts.get('similarity') or 0)
569 similarity = float(opts.get('similarity') or 0)
559
570
560 ret = 0
571 ret = 0
561 join = lambda f: os.path.join(prefix, f)
572 join = lambda f: os.path.join(prefix, f)
562
573
563 wctx = repo[None]
574 wctx = repo[None]
564 for subpath in sorted(wctx.substate):
575 for subpath in sorted(wctx.substate):
565 submatch = matchmod.subdirmatcher(subpath, m)
576 submatch = matchmod.subdirmatcher(subpath, m)
566 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
577 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
567 sub = wctx.sub(subpath)
578 sub = wctx.sub(subpath)
568 try:
579 try:
569 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
580 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
570 ret = 1
581 ret = 1
571 except error.LookupError:
582 except error.LookupError:
572 repo.ui.status(_("skipping missing subrepository: %s\n")
583 repo.ui.status(_("skipping missing subrepository: %s\n")
573 % join(subpath))
584 % join(subpath))
574
585
575 rejected = []
586 rejected = []
576 def badfn(f, msg):
587 def badfn(f, msg):
577 if f in m.files():
588 if f in m.files():
578 m.bad(f, msg)
589 m.bad(f, msg)
579 rejected.append(f)
590 rejected.append(f)
580
591
581 badmatch = matchmod.badmatch(m, badfn)
592 badmatch = matchmod.badmatch(m, badfn)
582 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
593 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
583 badmatch)
594 badmatch)
584
595
585 unknownset = set(unknown + forgotten)
596 unknownset = set(unknown + forgotten)
586 toprint = unknownset.copy()
597 toprint = unknownset.copy()
587 toprint.update(deleted)
598 toprint.update(deleted)
588 for abs in sorted(toprint):
599 for abs in sorted(toprint):
589 if repo.ui.verbose or not m.exact(abs):
600 if repo.ui.verbose or not m.exact(abs):
590 if abs in unknownset:
601 if abs in unknownset:
591 status = _('adding %s\n') % m.uipath(abs)
602 status = _('adding %s\n') % m.uipath(abs)
592 else:
603 else:
593 status = _('removing %s\n') % m.uipath(abs)
604 status = _('removing %s\n') % m.uipath(abs)
594 repo.ui.status(status)
605 repo.ui.status(status)
595
606
596 renames = _findrenames(repo, m, added + unknown, removed + deleted,
607 renames = _findrenames(repo, m, added + unknown, removed + deleted,
597 similarity)
608 similarity)
598
609
599 if not dry_run:
610 if not dry_run:
600 _markchanges(repo, unknown + forgotten, deleted, renames)
611 _markchanges(repo, unknown + forgotten, deleted, renames)
601
612
602 for f in rejected:
613 for f in rejected:
603 if f in m.files():
614 if f in m.files():
604 return 1
615 return 1
605 return ret
616 return ret
606
617
607 def marktouched(repo, files, similarity=0.0):
618 def marktouched(repo, files, similarity=0.0):
608 '''Assert that files have somehow been operated upon. files are relative to
619 '''Assert that files have somehow been operated upon. files are relative to
609 the repo root.'''
620 the repo root.'''
610 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
621 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
611 rejected = []
622 rejected = []
612
623
613 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
624 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
614
625
615 if repo.ui.verbose:
626 if repo.ui.verbose:
616 unknownset = set(unknown + forgotten)
627 unknownset = set(unknown + forgotten)
617 toprint = unknownset.copy()
628 toprint = unknownset.copy()
618 toprint.update(deleted)
629 toprint.update(deleted)
619 for abs in sorted(toprint):
630 for abs in sorted(toprint):
620 if abs in unknownset:
631 if abs in unknownset:
621 status = _('adding %s\n') % abs
632 status = _('adding %s\n') % abs
622 else:
633 else:
623 status = _('removing %s\n') % abs
634 status = _('removing %s\n') % abs
624 repo.ui.status(status)
635 repo.ui.status(status)
625
636
626 renames = _findrenames(repo, m, added + unknown, removed + deleted,
637 renames = _findrenames(repo, m, added + unknown, removed + deleted,
627 similarity)
638 similarity)
628
639
629 _markchanges(repo, unknown + forgotten, deleted, renames)
640 _markchanges(repo, unknown + forgotten, deleted, renames)
630
641
631 for f in rejected:
642 for f in rejected:
632 if f in m.files():
643 if f in m.files():
633 return 1
644 return 1
634 return 0
645 return 0
635
646
636 def _interestingfiles(repo, matcher):
647 def _interestingfiles(repo, matcher):
637 '''Walk dirstate with matcher, looking for files that addremove would care
648 '''Walk dirstate with matcher, looking for files that addremove would care
638 about.
649 about.
639
650
640 This is different from dirstate.status because it doesn't care about
651 This is different from dirstate.status because it doesn't care about
641 whether files are modified or clean.'''
652 whether files are modified or clean.'''
642 added, unknown, deleted, removed, forgotten = [], [], [], [], []
653 added, unknown, deleted, removed, forgotten = [], [], [], [], []
643 audit_path = pathutil.pathauditor(repo.root)
654 audit_path = pathutil.pathauditor(repo.root)
644
655
645 ctx = repo[None]
656 ctx = repo[None]
646 dirstate = repo.dirstate
657 dirstate = repo.dirstate
647 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
658 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
648 full=False)
659 full=False)
649 for abs, st in walkresults.iteritems():
660 for abs, st in walkresults.iteritems():
650 dstate = dirstate[abs]
661 dstate = dirstate[abs]
651 if dstate == '?' and audit_path.check(abs):
662 if dstate == '?' and audit_path.check(abs):
652 unknown.append(abs)
663 unknown.append(abs)
653 elif dstate != 'r' and not st:
664 elif dstate != 'r' and not st:
654 deleted.append(abs)
665 deleted.append(abs)
655 elif dstate == 'r' and st:
666 elif dstate == 'r' and st:
656 forgotten.append(abs)
667 forgotten.append(abs)
657 # for finding renames
668 # for finding renames
658 elif dstate == 'r' and not st:
669 elif dstate == 'r' and not st:
659 removed.append(abs)
670 removed.append(abs)
660 elif dstate == 'a':
671 elif dstate == 'a':
661 added.append(abs)
672 added.append(abs)
662
673
663 return added, unknown, deleted, removed, forgotten
674 return added, unknown, deleted, removed, forgotten
664
675
665 def _findrenames(repo, matcher, added, removed, similarity):
676 def _findrenames(repo, matcher, added, removed, similarity):
666 '''Find renames from removed files to added ones.'''
677 '''Find renames from removed files to added ones.'''
667 renames = {}
678 renames = {}
668 if similarity > 0:
679 if similarity > 0:
669 for old, new, score in similar.findrenames(repo, added, removed,
680 for old, new, score in similar.findrenames(repo, added, removed,
670 similarity):
681 similarity):
671 if (repo.ui.verbose or not matcher.exact(old)
682 if (repo.ui.verbose or not matcher.exact(old)
672 or not matcher.exact(new)):
683 or not matcher.exact(new)):
673 repo.ui.status(_('recording removal of %s as rename to %s '
684 repo.ui.status(_('recording removal of %s as rename to %s '
674 '(%d%% similar)\n') %
685 '(%d%% similar)\n') %
675 (matcher.rel(old), matcher.rel(new),
686 (matcher.rel(old), matcher.rel(new),
676 score * 100))
687 score * 100))
677 renames[new] = old
688 renames[new] = old
678 return renames
689 return renames
679
690
680 def _markchanges(repo, unknown, deleted, renames):
691 def _markchanges(repo, unknown, deleted, renames):
681 '''Marks the files in unknown as added, the files in deleted as removed,
692 '''Marks the files in unknown as added, the files in deleted as removed,
682 and the files in renames as copied.'''
693 and the files in renames as copied.'''
683 wctx = repo[None]
694 wctx = repo[None]
684 with repo.wlock():
695 with repo.wlock():
685 wctx.forget(deleted)
696 wctx.forget(deleted)
686 wctx.add(unknown)
697 wctx.add(unknown)
687 for new, old in renames.iteritems():
698 for new, old in renames.iteritems():
688 wctx.copy(old, new)
699 wctx.copy(old, new)
689
700
690 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
701 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
691 """Update the dirstate to reflect the intent of copying src to dst. For
702 """Update the dirstate to reflect the intent of copying src to dst. For
692 different reasons it might not end with dst being marked as copied from src.
703 different reasons it might not end with dst being marked as copied from src.
693 """
704 """
694 origsrc = repo.dirstate.copied(src) or src
705 origsrc = repo.dirstate.copied(src) or src
695 if dst == origsrc: # copying back a copy?
706 if dst == origsrc: # copying back a copy?
696 if repo.dirstate[dst] not in 'mn' and not dryrun:
707 if repo.dirstate[dst] not in 'mn' and not dryrun:
697 repo.dirstate.normallookup(dst)
708 repo.dirstate.normallookup(dst)
698 else:
709 else:
699 if repo.dirstate[origsrc] == 'a' and origsrc == src:
710 if repo.dirstate[origsrc] == 'a' and origsrc == src:
700 if not ui.quiet:
711 if not ui.quiet:
701 ui.warn(_("%s has not been committed yet, so no copy "
712 ui.warn(_("%s has not been committed yet, so no copy "
702 "data will be stored for %s.\n")
713 "data will be stored for %s.\n")
703 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
714 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
704 if repo.dirstate[dst] in '?r' and not dryrun:
715 if repo.dirstate[dst] in '?r' and not dryrun:
705 wctx.add([dst])
716 wctx.add([dst])
706 elif not dryrun:
717 elif not dryrun:
707 wctx.copy(origsrc, dst)
718 wctx.copy(origsrc, dst)
708
719
709 def readrequires(opener, supported):
720 def readrequires(opener, supported):
710 '''Reads and parses .hg/requires and checks if all entries found
721 '''Reads and parses .hg/requires and checks if all entries found
711 are in the list of supported features.'''
722 are in the list of supported features.'''
712 requirements = set(opener.read("requires").splitlines())
723 requirements = set(opener.read("requires").splitlines())
713 missings = []
724 missings = []
714 for r in requirements:
725 for r in requirements:
715 if r not in supported:
726 if r not in supported:
716 if not r or not r[0].isalnum():
727 if not r or not r[0].isalnum():
717 raise error.RequirementError(_(".hg/requires file is corrupt"))
728 raise error.RequirementError(_(".hg/requires file is corrupt"))
718 missings.append(r)
729 missings.append(r)
719 missings.sort()
730 missings.sort()
720 if missings:
731 if missings:
721 raise error.RequirementError(
732 raise error.RequirementError(
722 _("repository requires features unknown to this Mercurial: %s")
733 _("repository requires features unknown to this Mercurial: %s")
723 % " ".join(missings),
734 % " ".join(missings),
724 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
735 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
725 " for more information"))
736 " for more information"))
726 return requirements
737 return requirements
727
738
728 def writerequires(opener, requirements):
739 def writerequires(opener, requirements):
729 with opener('requires', 'w') as fp:
740 with opener('requires', 'w') as fp:
730 for r in sorted(requirements):
741 for r in sorted(requirements):
731 fp.write("%s\n" % r)
742 fp.write("%s\n" % r)
732
743
733 class filecachesubentry(object):
744 class filecachesubentry(object):
734 def __init__(self, path, stat):
745 def __init__(self, path, stat):
735 self.path = path
746 self.path = path
736 self.cachestat = None
747 self.cachestat = None
737 self._cacheable = None
748 self._cacheable = None
738
749
739 if stat:
750 if stat:
740 self.cachestat = filecachesubentry.stat(self.path)
751 self.cachestat = filecachesubentry.stat(self.path)
741
752
742 if self.cachestat:
753 if self.cachestat:
743 self._cacheable = self.cachestat.cacheable()
754 self._cacheable = self.cachestat.cacheable()
744 else:
755 else:
745 # None means we don't know yet
756 # None means we don't know yet
746 self._cacheable = None
757 self._cacheable = None
747
758
748 def refresh(self):
759 def refresh(self):
749 if self.cacheable():
760 if self.cacheable():
750 self.cachestat = filecachesubentry.stat(self.path)
761 self.cachestat = filecachesubentry.stat(self.path)
751
762
752 def cacheable(self):
763 def cacheable(self):
753 if self._cacheable is not None:
764 if self._cacheable is not None:
754 return self._cacheable
765 return self._cacheable
755
766
756 # we don't know yet, assume it is for now
767 # we don't know yet, assume it is for now
757 return True
768 return True
758
769
759 def changed(self):
770 def changed(self):
760 # no point in going further if we can't cache it
771 # no point in going further if we can't cache it
761 if not self.cacheable():
772 if not self.cacheable():
762 return True
773 return True
763
774
764 newstat = filecachesubentry.stat(self.path)
775 newstat = filecachesubentry.stat(self.path)
765
776
766 # we may not know if it's cacheable yet, check again now
777 # we may not know if it's cacheable yet, check again now
767 if newstat and self._cacheable is None:
778 if newstat and self._cacheable is None:
768 self._cacheable = newstat.cacheable()
779 self._cacheable = newstat.cacheable()
769
780
770 # check again
781 # check again
771 if not self._cacheable:
782 if not self._cacheable:
772 return True
783 return True
773
784
774 if self.cachestat != newstat:
785 if self.cachestat != newstat:
775 self.cachestat = newstat
786 self.cachestat = newstat
776 return True
787 return True
777 else:
788 else:
778 return False
789 return False
779
790
780 @staticmethod
791 @staticmethod
781 def stat(path):
792 def stat(path):
782 try:
793 try:
783 return util.cachestat(path)
794 return util.cachestat(path)
784 except OSError as e:
795 except OSError as e:
785 if e.errno != errno.ENOENT:
796 if e.errno != errno.ENOENT:
786 raise
797 raise
787
798
788 class filecacheentry(object):
799 class filecacheentry(object):
789 def __init__(self, paths, stat=True):
800 def __init__(self, paths, stat=True):
790 self._entries = []
801 self._entries = []
791 for path in paths:
802 for path in paths:
792 self._entries.append(filecachesubentry(path, stat))
803 self._entries.append(filecachesubentry(path, stat))
793
804
794 def changed(self):
805 def changed(self):
795 '''true if any entry has changed'''
806 '''true if any entry has changed'''
796 for entry in self._entries:
807 for entry in self._entries:
797 if entry.changed():
808 if entry.changed():
798 return True
809 return True
799 return False
810 return False
800
811
801 def refresh(self):
812 def refresh(self):
802 for entry in self._entries:
813 for entry in self._entries:
803 entry.refresh()
814 entry.refresh()
804
815
805 class filecache(object):
816 class filecache(object):
806 '''A property like decorator that tracks files under .hg/ for updates.
817 '''A property like decorator that tracks files under .hg/ for updates.
807
818
808 Records stat info when called in _filecache.
819 Records stat info when called in _filecache.
809
820
810 On subsequent calls, compares old stat info with new info, and recreates the
821 On subsequent calls, compares old stat info with new info, and recreates the
811 object when any of the files changes, updating the new stat info in
822 object when any of the files changes, updating the new stat info in
812 _filecache.
823 _filecache.
813
824
814 Mercurial either atomic renames or appends for files under .hg,
825 Mercurial either atomic renames or appends for files under .hg,
815 so to ensure the cache is reliable we need the filesystem to be able
826 so to ensure the cache is reliable we need the filesystem to be able
816 to tell us if a file has been replaced. If it can't, we fallback to
827 to tell us if a file has been replaced. If it can't, we fallback to
817 recreating the object on every call (essentially the same behavior as
828 recreating the object on every call (essentially the same behavior as
818 propertycache).
829 propertycache).
819
830
820 '''
831 '''
821 def __init__(self, *paths):
832 def __init__(self, *paths):
822 self.paths = paths
833 self.paths = paths
823
834
824 def join(self, obj, fname):
835 def join(self, obj, fname):
825 """Used to compute the runtime path of a cached file.
836 """Used to compute the runtime path of a cached file.
826
837
827 Users should subclass filecache and provide their own version of this
838 Users should subclass filecache and provide their own version of this
828 function to call the appropriate join function on 'obj' (an instance
839 function to call the appropriate join function on 'obj' (an instance
829 of the class that its member function was decorated).
840 of the class that its member function was decorated).
830 """
841 """
831 raise NotImplementedError
842 raise NotImplementedError
832
843
833 def __call__(self, func):
844 def __call__(self, func):
834 self.func = func
845 self.func = func
835 self.name = func.__name__.encode('ascii')
846 self.name = func.__name__.encode('ascii')
836 return self
847 return self
837
848
838 def __get__(self, obj, type=None):
849 def __get__(self, obj, type=None):
839 # if accessed on the class, return the descriptor itself.
850 # if accessed on the class, return the descriptor itself.
840 if obj is None:
851 if obj is None:
841 return self
852 return self
842 # do we need to check if the file changed?
853 # do we need to check if the file changed?
843 if self.name in obj.__dict__:
854 if self.name in obj.__dict__:
844 assert self.name in obj._filecache, self.name
855 assert self.name in obj._filecache, self.name
845 return obj.__dict__[self.name]
856 return obj.__dict__[self.name]
846
857
847 entry = obj._filecache.get(self.name)
858 entry = obj._filecache.get(self.name)
848
859
849 if entry:
860 if entry:
850 if entry.changed():
861 if entry.changed():
851 entry.obj = self.func(obj)
862 entry.obj = self.func(obj)
852 else:
863 else:
853 paths = [self.join(obj, path) for path in self.paths]
864 paths = [self.join(obj, path) for path in self.paths]
854
865
855 # We stat -before- creating the object so our cache doesn't lie if
866 # We stat -before- creating the object so our cache doesn't lie if
856 # a writer modified between the time we read and stat
867 # a writer modified between the time we read and stat
857 entry = filecacheentry(paths, True)
868 entry = filecacheentry(paths, True)
858 entry.obj = self.func(obj)
869 entry.obj = self.func(obj)
859
870
860 obj._filecache[self.name] = entry
871 obj._filecache[self.name] = entry
861
872
862 obj.__dict__[self.name] = entry.obj
873 obj.__dict__[self.name] = entry.obj
863 return entry.obj
874 return entry.obj
864
875
865 def __set__(self, obj, value):
876 def __set__(self, obj, value):
866 if self.name not in obj._filecache:
877 if self.name not in obj._filecache:
867 # we add an entry for the missing value because X in __dict__
878 # we add an entry for the missing value because X in __dict__
868 # implies X in _filecache
879 # implies X in _filecache
869 paths = [self.join(obj, path) for path in self.paths]
880 paths = [self.join(obj, path) for path in self.paths]
870 ce = filecacheentry(paths, False)
881 ce = filecacheentry(paths, False)
871 obj._filecache[self.name] = ce
882 obj._filecache[self.name] = ce
872 else:
883 else:
873 ce = obj._filecache[self.name]
884 ce = obj._filecache[self.name]
874
885
875 ce.obj = value # update cached copy
886 ce.obj = value # update cached copy
876 obj.__dict__[self.name] = value # update copy returned by obj.x
887 obj.__dict__[self.name] = value # update copy returned by obj.x
877
888
878 def __delete__(self, obj):
889 def __delete__(self, obj):
879 try:
890 try:
880 del obj.__dict__[self.name]
891 del obj.__dict__[self.name]
881 except KeyError:
892 except KeyError:
882 raise AttributeError(self.name)
893 raise AttributeError(self.name)
883
894
884 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
895 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
885 if lock is None:
896 if lock is None:
886 raise error.LockInheritanceContractViolation(
897 raise error.LockInheritanceContractViolation(
887 'lock can only be inherited while held')
898 'lock can only be inherited while held')
888 if environ is None:
899 if environ is None:
889 environ = {}
900 environ = {}
890 with lock.inherit() as locker:
901 with lock.inherit() as locker:
891 environ[envvar] = locker
902 environ[envvar] = locker
892 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
903 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
893
904
894 def wlocksub(repo, cmd, *args, **kwargs):
905 def wlocksub(repo, cmd, *args, **kwargs):
895 """run cmd as a subprocess that allows inheriting repo's wlock
906 """run cmd as a subprocess that allows inheriting repo's wlock
896
907
897 This can only be called while the wlock is held. This takes all the
908 This can only be called while the wlock is held. This takes all the
898 arguments that ui.system does, and returns the exit code of the
909 arguments that ui.system does, and returns the exit code of the
899 subprocess."""
910 subprocess."""
900 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
911 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
901 **kwargs)
912 **kwargs)
902
913
903 def gdinitconfig(ui):
914 def gdinitconfig(ui):
904 """helper function to know if a repo should be created as general delta
915 """helper function to know if a repo should be created as general delta
905 """
916 """
906 # experimental config: format.generaldelta
917 # experimental config: format.generaldelta
907 return (ui.configbool('format', 'generaldelta', False)
918 return (ui.configbool('format', 'generaldelta', False)
908 or ui.configbool('format', 'usegeneraldelta', True))
919 or ui.configbool('format', 'usegeneraldelta', True))
909
920
910 def gddeltaconfig(ui):
921 def gddeltaconfig(ui):
911 """helper function to know if incoming delta should be optimised
922 """helper function to know if incoming delta should be optimised
912 """
923 """
913 # experimental config: format.generaldelta
924 # experimental config: format.generaldelta
914 return ui.configbool('format', 'generaldelta', False)
925 return ui.configbool('format', 'generaldelta', False)
915
926
916 class simplekeyvaluefile(object):
927 class simplekeyvaluefile(object):
917 """A simple file with key=value lines
928 """A simple file with key=value lines
918
929
919 Keys must be alphanumerics and start with a letter, values must not
930 Keys must be alphanumerics and start with a letter, values must not
920 contain '\n' characters"""
931 contain '\n' characters"""
921 firstlinekey = '__firstline'
932 firstlinekey = '__firstline'
922
933
923 def __init__(self, vfs, path, keys=None):
934 def __init__(self, vfs, path, keys=None):
924 self.vfs = vfs
935 self.vfs = vfs
925 self.path = path
936 self.path = path
926
937
927 def read(self, firstlinenonkeyval=False):
938 def read(self, firstlinenonkeyval=False):
928 """Read the contents of a simple key-value file
939 """Read the contents of a simple key-value file
929
940
930 'firstlinenonkeyval' indicates whether the first line of file should
941 'firstlinenonkeyval' indicates whether the first line of file should
931 be treated as a key-value pair or reuturned fully under the
942 be treated as a key-value pair or reuturned fully under the
932 __firstline key."""
943 __firstline key."""
933 lines = self.vfs.readlines(self.path)
944 lines = self.vfs.readlines(self.path)
934 d = {}
945 d = {}
935 if firstlinenonkeyval:
946 if firstlinenonkeyval:
936 if not lines:
947 if not lines:
937 e = _("empty simplekeyvalue file")
948 e = _("empty simplekeyvalue file")
938 raise error.CorruptedState(e)
949 raise error.CorruptedState(e)
939 # we don't want to include '\n' in the __firstline
950 # we don't want to include '\n' in the __firstline
940 d[self.firstlinekey] = lines[0][:-1]
951 d[self.firstlinekey] = lines[0][:-1]
941 del lines[0]
952 del lines[0]
942
953
943 try:
954 try:
944 # the 'if line.strip()' part prevents us from failing on empty
955 # the 'if line.strip()' part prevents us from failing on empty
945 # lines which only contain '\n' therefore are not skipped
956 # lines which only contain '\n' therefore are not skipped
946 # by 'if line'
957 # by 'if line'
947 updatedict = dict(line[:-1].split('=', 1) for line in lines
958 updatedict = dict(line[:-1].split('=', 1) for line in lines
948 if line.strip())
959 if line.strip())
949 if self.firstlinekey in updatedict:
960 if self.firstlinekey in updatedict:
950 e = _("%r can't be used as a key")
961 e = _("%r can't be used as a key")
951 raise error.CorruptedState(e % self.firstlinekey)
962 raise error.CorruptedState(e % self.firstlinekey)
952 d.update(updatedict)
963 d.update(updatedict)
953 except ValueError as e:
964 except ValueError as e:
954 raise error.CorruptedState(str(e))
965 raise error.CorruptedState(str(e))
955 return d
966 return d
956
967
957 def write(self, data, firstline=None):
968 def write(self, data, firstline=None):
958 """Write key=>value mapping to a file
969 """Write key=>value mapping to a file
959 data is a dict. Keys must be alphanumerical and start with a letter.
970 data is a dict. Keys must be alphanumerical and start with a letter.
960 Values must not contain newline characters.
971 Values must not contain newline characters.
961
972
962 If 'firstline' is not None, it is written to file before
973 If 'firstline' is not None, it is written to file before
963 everything else, as it is, not in a key=value form"""
974 everything else, as it is, not in a key=value form"""
964 lines = []
975 lines = []
965 if firstline is not None:
976 if firstline is not None:
966 lines.append('%s\n' % firstline)
977 lines.append('%s\n' % firstline)
967
978
968 for k, v in data.items():
979 for k, v in data.items():
969 if k == self.firstlinekey:
980 if k == self.firstlinekey:
970 e = "key name '%s' is reserved" % self.firstlinekey
981 e = "key name '%s' is reserved" % self.firstlinekey
971 raise error.ProgrammingError(e)
982 raise error.ProgrammingError(e)
972 if not k[0].isalpha():
983 if not k[0].isalpha():
973 e = "keys must start with a letter in a key-value file"
984 e = "keys must start with a letter in a key-value file"
974 raise error.ProgrammingError(e)
985 raise error.ProgrammingError(e)
975 if not k.isalnum():
986 if not k.isalnum():
976 e = "invalid key name in a simple key-value file"
987 e = "invalid key name in a simple key-value file"
977 raise error.ProgrammingError(e)
988 raise error.ProgrammingError(e)
978 if '\n' in v:
989 if '\n' in v:
979 e = "invalid value in a simple key-value file"
990 e = "invalid value in a simple key-value file"
980 raise error.ProgrammingError(e)
991 raise error.ProgrammingError(e)
981 lines.append("%s=%s\n" % (k, v))
992 lines.append("%s=%s\n" % (k, v))
982 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
993 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
983 fp.write(''.join(lines))
994 fp.write(''.join(lines))
@@ -1,671 +1,676 b''
1 # templatekw.py - common changeset template keywords
1 # templatekw.py - common changeset template keywords
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from .node import hex, nullid
11 from .node import (
12 hex,
13 nullid,
14 short,
15 )
16
12 from . import (
17 from . import (
13 encoding,
18 encoding,
14 error,
19 error,
15 hbisect,
20 hbisect,
16 patch,
21 patch,
17 registrar,
22 registrar,
18 scmutil,
23 scmutil,
19 util,
24 util,
20 )
25 )
21
26
22 class _hybrid(object):
27 class _hybrid(object):
23 """Wrapper for list or dict to support legacy template
28 """Wrapper for list or dict to support legacy template
24
29
25 This class allows us to handle both:
30 This class allows us to handle both:
26 - "{files}" (legacy command-line-specific list hack) and
31 - "{files}" (legacy command-line-specific list hack) and
27 - "{files % '{file}\n'}" (hgweb-style with inlining and function support)
32 - "{files % '{file}\n'}" (hgweb-style with inlining and function support)
28 and to access raw values:
33 and to access raw values:
29 - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
34 - "{ifcontains(file, files, ...)}", "{ifcontains(key, extras, ...)}"
30 - "{get(extras, key)}"
35 - "{get(extras, key)}"
31 - "{files|json}"
36 - "{files|json}"
32 """
37 """
33
38
34 def __init__(self, gen, values, makemap, joinfmt):
39 def __init__(self, gen, values, makemap, joinfmt):
35 if gen is not None:
40 if gen is not None:
36 self.gen = gen
41 self.gen = gen
37 self._values = values
42 self._values = values
38 self._makemap = makemap
43 self._makemap = makemap
39 self.joinfmt = joinfmt
44 self.joinfmt = joinfmt
40 @util.propertycache
45 @util.propertycache
41 def gen(self):
46 def gen(self):
42 return self._defaultgen()
47 return self._defaultgen()
43 def _defaultgen(self):
48 def _defaultgen(self):
44 """Generator to stringify this as {join(self, ' ')}"""
49 """Generator to stringify this as {join(self, ' ')}"""
45 for i, d in enumerate(self.itermaps()):
50 for i, d in enumerate(self.itermaps()):
46 if i > 0:
51 if i > 0:
47 yield ' '
52 yield ' '
48 yield self.joinfmt(d)
53 yield self.joinfmt(d)
49 def itermaps(self):
54 def itermaps(self):
50 makemap = self._makemap
55 makemap = self._makemap
51 for x in self._values:
56 for x in self._values:
52 yield makemap(x)
57 yield makemap(x)
53 def __contains__(self, x):
58 def __contains__(self, x):
54 return x in self._values
59 return x in self._values
55 def __len__(self):
60 def __len__(self):
56 return len(self._values)
61 return len(self._values)
57 def __iter__(self):
62 def __iter__(self):
58 return iter(self._values)
63 return iter(self._values)
59 def __getattr__(self, name):
64 def __getattr__(self, name):
60 if name not in ('get', 'items', 'iteritems', 'iterkeys', 'itervalues',
65 if name not in ('get', 'items', 'iteritems', 'iterkeys', 'itervalues',
61 'keys', 'values'):
66 'keys', 'values'):
62 raise AttributeError(name)
67 raise AttributeError(name)
63 return getattr(self._values, name)
68 return getattr(self._values, name)
64
69
65 def hybriddict(data, key='key', value='value', fmt='%s=%s', gen=None):
70 def hybriddict(data, key='key', value='value', fmt='%s=%s', gen=None):
66 """Wrap data to support both dict-like and string-like operations"""
71 """Wrap data to support both dict-like and string-like operations"""
67 return _hybrid(gen, data, lambda k: {key: k, value: data[k]},
72 return _hybrid(gen, data, lambda k: {key: k, value: data[k]},
68 lambda d: fmt % (d[key], d[value]))
73 lambda d: fmt % (d[key], d[value]))
69
74
70 def hybridlist(data, name, fmt='%s', gen=None):
75 def hybridlist(data, name, fmt='%s', gen=None):
71 """Wrap data to support both list-like and string-like operations"""
76 """Wrap data to support both list-like and string-like operations"""
72 return _hybrid(gen, data, lambda x: {name: x}, lambda d: fmt % d[name])
77 return _hybrid(gen, data, lambda x: {name: x}, lambda d: fmt % d[name])
73
78
74 def unwraphybrid(thing):
79 def unwraphybrid(thing):
75 """Return an object which can be stringified possibly by using a legacy
80 """Return an object which can be stringified possibly by using a legacy
76 template"""
81 template"""
77 if not util.safehasattr(thing, 'gen'):
82 if not util.safehasattr(thing, 'gen'):
78 return thing
83 return thing
79 return thing.gen
84 return thing.gen
80
85
81 def showdict(name, data, mapping, plural=None, key='key', value='value',
86 def showdict(name, data, mapping, plural=None, key='key', value='value',
82 fmt='%s=%s', separator=' '):
87 fmt='%s=%s', separator=' '):
83 c = [{key: k, value: v} for k, v in data.iteritems()]
88 c = [{key: k, value: v} for k, v in data.iteritems()]
84 f = _showlist(name, c, mapping, plural, separator)
89 f = _showlist(name, c, mapping, plural, separator)
85 return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
90 return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
86
91
87 def showlist(name, values, mapping, plural=None, element=None, separator=' '):
92 def showlist(name, values, mapping, plural=None, element=None, separator=' '):
88 if not element:
93 if not element:
89 element = name
94 element = name
90 f = _showlist(name, values, mapping, plural, separator)
95 f = _showlist(name, values, mapping, plural, separator)
91 return hybridlist(values, name=element, gen=f)
96 return hybridlist(values, name=element, gen=f)
92
97
93 def _showlist(name, values, mapping, plural=None, separator=' '):
98 def _showlist(name, values, mapping, plural=None, separator=' '):
94 '''expand set of values.
99 '''expand set of values.
95 name is name of key in template map.
100 name is name of key in template map.
96 values is list of strings or dicts.
101 values is list of strings or dicts.
97 plural is plural of name, if not simply name + 's'.
102 plural is plural of name, if not simply name + 's'.
98 separator is used to join values as a string
103 separator is used to join values as a string
99
104
100 expansion works like this, given name 'foo'.
105 expansion works like this, given name 'foo'.
101
106
102 if values is empty, expand 'no_foos'.
107 if values is empty, expand 'no_foos'.
103
108
104 if 'foo' not in template map, return values as a string,
109 if 'foo' not in template map, return values as a string,
105 joined by 'separator'.
110 joined by 'separator'.
106
111
107 expand 'start_foos'.
112 expand 'start_foos'.
108
113
109 for each value, expand 'foo'. if 'last_foo' in template
114 for each value, expand 'foo'. if 'last_foo' in template
110 map, expand it instead of 'foo' for last key.
115 map, expand it instead of 'foo' for last key.
111
116
112 expand 'end_foos'.
117 expand 'end_foos'.
113 '''
118 '''
114 templ = mapping['templ']
119 templ = mapping['templ']
115 if not plural:
120 if not plural:
116 plural = name + 's'
121 plural = name + 's'
117 if not values:
122 if not values:
118 noname = 'no_' + plural
123 noname = 'no_' + plural
119 if noname in templ:
124 if noname in templ:
120 yield templ(noname, **mapping)
125 yield templ(noname, **mapping)
121 return
126 return
122 if name not in templ:
127 if name not in templ:
123 if isinstance(values[0], str):
128 if isinstance(values[0], str):
124 yield separator.join(values)
129 yield separator.join(values)
125 else:
130 else:
126 for v in values:
131 for v in values:
127 yield dict(v, **mapping)
132 yield dict(v, **mapping)
128 return
133 return
129 startname = 'start_' + plural
134 startname = 'start_' + plural
130 if startname in templ:
135 if startname in templ:
131 yield templ(startname, **mapping)
136 yield templ(startname, **mapping)
132 vmapping = mapping.copy()
137 vmapping = mapping.copy()
133 def one(v, tag=name):
138 def one(v, tag=name):
134 try:
139 try:
135 vmapping.update(v)
140 vmapping.update(v)
136 except (AttributeError, ValueError):
141 except (AttributeError, ValueError):
137 try:
142 try:
138 for a, b in v:
143 for a, b in v:
139 vmapping[a] = b
144 vmapping[a] = b
140 except ValueError:
145 except ValueError:
141 vmapping[name] = v
146 vmapping[name] = v
142 return templ(tag, **vmapping)
147 return templ(tag, **vmapping)
143 lastname = 'last_' + name
148 lastname = 'last_' + name
144 if lastname in templ:
149 if lastname in templ:
145 last = values.pop()
150 last = values.pop()
146 else:
151 else:
147 last = None
152 last = None
148 for v in values:
153 for v in values:
149 yield one(v)
154 yield one(v)
150 if last is not None:
155 if last is not None:
151 yield one(last, tag=lastname)
156 yield one(last, tag=lastname)
152 endname = 'end_' + plural
157 endname = 'end_' + plural
153 if endname in templ:
158 if endname in templ:
154 yield templ(endname, **mapping)
159 yield templ(endname, **mapping)
155
160
156 def _formatrevnode(ctx):
161 def _formatrevnode(ctx):
157 """Format changeset as '{rev}:{node|formatnode}', which is the default
162 """Format changeset as '{rev}:{node|formatnode}', which is the default
158 template provided by cmdutil.changeset_templater"""
163 template provided by cmdutil.changeset_templater"""
159 repo = ctx.repo()
164 repo = ctx.repo()
160 if repo.ui.debugflag:
165 if repo.ui.debugflag:
161 hexnode = ctx.hex()
166 hexfunc = hex
162 else:
167 else:
163 hexnode = ctx.hex()[:12]
168 hexfunc = short
164 return '%d:%s' % (scmutil.intrev(ctx), hexnode)
169 return '%d:%s' % (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
165
170
166 def getfiles(repo, ctx, revcache):
171 def getfiles(repo, ctx, revcache):
167 if 'files' not in revcache:
172 if 'files' not in revcache:
168 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
173 revcache['files'] = repo.status(ctx.p1(), ctx)[:3]
169 return revcache['files']
174 return revcache['files']
170
175
171 def getlatesttags(repo, ctx, cache, pattern=None):
176 def getlatesttags(repo, ctx, cache, pattern=None):
172 '''return date, distance and name for the latest tag of rev'''
177 '''return date, distance and name for the latest tag of rev'''
173
178
174 cachename = 'latesttags'
179 cachename = 'latesttags'
175 if pattern is not None:
180 if pattern is not None:
176 cachename += '-' + pattern
181 cachename += '-' + pattern
177 match = util.stringmatcher(pattern)[2]
182 match = util.stringmatcher(pattern)[2]
178 else:
183 else:
179 match = util.always
184 match = util.always
180
185
181 if cachename not in cache:
186 if cachename not in cache:
182 # Cache mapping from rev to a tuple with tag date, tag
187 # Cache mapping from rev to a tuple with tag date, tag
183 # distance and tag name
188 # distance and tag name
184 cache[cachename] = {-1: (0, 0, ['null'])}
189 cache[cachename] = {-1: (0, 0, ['null'])}
185 latesttags = cache[cachename]
190 latesttags = cache[cachename]
186
191
187 rev = ctx.rev()
192 rev = ctx.rev()
188 todo = [rev]
193 todo = [rev]
189 while todo:
194 while todo:
190 rev = todo.pop()
195 rev = todo.pop()
191 if rev in latesttags:
196 if rev in latesttags:
192 continue
197 continue
193 ctx = repo[rev]
198 ctx = repo[rev]
194 tags = [t for t in ctx.tags()
199 tags = [t for t in ctx.tags()
195 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
200 if (repo.tagtype(t) and repo.tagtype(t) != 'local'
196 and match(t))]
201 and match(t))]
197 if tags:
202 if tags:
198 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
203 latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
199 continue
204 continue
200 try:
205 try:
201 # The tuples are laid out so the right one can be found by
206 # The tuples are laid out so the right one can be found by
202 # comparison.
207 # comparison.
203 pdate, pdist, ptag = max(
208 pdate, pdist, ptag = max(
204 latesttags[p.rev()] for p in ctx.parents())
209 latesttags[p.rev()] for p in ctx.parents())
205 except KeyError:
210 except KeyError:
206 # Cache miss - recurse
211 # Cache miss - recurse
207 todo.append(rev)
212 todo.append(rev)
208 todo.extend(p.rev() for p in ctx.parents())
213 todo.extend(p.rev() for p in ctx.parents())
209 continue
214 continue
210 latesttags[rev] = pdate, pdist + 1, ptag
215 latesttags[rev] = pdate, pdist + 1, ptag
211 return latesttags[rev]
216 return latesttags[rev]
212
217
213 def getrenamedfn(repo, endrev=None):
218 def getrenamedfn(repo, endrev=None):
214 rcache = {}
219 rcache = {}
215 if endrev is None:
220 if endrev is None:
216 endrev = len(repo)
221 endrev = len(repo)
217
222
218 def getrenamed(fn, rev):
223 def getrenamed(fn, rev):
219 '''looks up all renames for a file (up to endrev) the first
224 '''looks up all renames for a file (up to endrev) the first
220 time the file is given. It indexes on the changerev and only
225 time the file is given. It indexes on the changerev and only
221 parses the manifest if linkrev != changerev.
226 parses the manifest if linkrev != changerev.
222 Returns rename info for fn at changerev rev.'''
227 Returns rename info for fn at changerev rev.'''
223 if fn not in rcache:
228 if fn not in rcache:
224 rcache[fn] = {}
229 rcache[fn] = {}
225 fl = repo.file(fn)
230 fl = repo.file(fn)
226 for i in fl:
231 for i in fl:
227 lr = fl.linkrev(i)
232 lr = fl.linkrev(i)
228 renamed = fl.renamed(fl.node(i))
233 renamed = fl.renamed(fl.node(i))
229 rcache[fn][lr] = renamed
234 rcache[fn][lr] = renamed
230 if lr >= endrev:
235 if lr >= endrev:
231 break
236 break
232 if rev in rcache[fn]:
237 if rev in rcache[fn]:
233 return rcache[fn][rev]
238 return rcache[fn][rev]
234
239
235 # If linkrev != rev (i.e. rev not found in rcache) fallback to
240 # If linkrev != rev (i.e. rev not found in rcache) fallback to
236 # filectx logic.
241 # filectx logic.
237 try:
242 try:
238 return repo[rev][fn].renamed()
243 return repo[rev][fn].renamed()
239 except error.LookupError:
244 except error.LookupError:
240 return None
245 return None
241
246
242 return getrenamed
247 return getrenamed
243
248
244 # default templates internally used for rendering of lists
249 # default templates internally used for rendering of lists
245 defaulttempl = {
250 defaulttempl = {
246 'parent': '{rev}:{node|formatnode} ',
251 'parent': '{rev}:{node|formatnode} ',
247 'manifest': '{rev}:{node|formatnode}',
252 'manifest': '{rev}:{node|formatnode}',
248 'file_copy': '{name} ({source})',
253 'file_copy': '{name} ({source})',
249 'envvar': '{key}={value}',
254 'envvar': '{key}={value}',
250 'extra': '{key}={value|stringescape}'
255 'extra': '{key}={value|stringescape}'
251 }
256 }
252 # filecopy is preserved for compatibility reasons
257 # filecopy is preserved for compatibility reasons
253 defaulttempl['filecopy'] = defaulttempl['file_copy']
258 defaulttempl['filecopy'] = defaulttempl['file_copy']
254
259
255 # keywords are callables like:
260 # keywords are callables like:
256 # fn(repo, ctx, templ, cache, revcache, **args)
261 # fn(repo, ctx, templ, cache, revcache, **args)
257 # with:
262 # with:
258 # repo - current repository instance
263 # repo - current repository instance
259 # ctx - the changectx being displayed
264 # ctx - the changectx being displayed
260 # templ - the templater instance
265 # templ - the templater instance
261 # cache - a cache dictionary for the whole templater run
266 # cache - a cache dictionary for the whole templater run
262 # revcache - a cache dictionary for the current revision
267 # revcache - a cache dictionary for the current revision
263 keywords = {}
268 keywords = {}
264
269
265 templatekeyword = registrar.templatekeyword(keywords)
270 templatekeyword = registrar.templatekeyword(keywords)
266
271
267 @templatekeyword('author')
272 @templatekeyword('author')
268 def showauthor(repo, ctx, templ, **args):
273 def showauthor(repo, ctx, templ, **args):
269 """String. The unmodified author of the changeset."""
274 """String. The unmodified author of the changeset."""
270 return ctx.user()
275 return ctx.user()
271
276
272 @templatekeyword('bisect')
277 @templatekeyword('bisect')
273 def showbisect(repo, ctx, templ, **args):
278 def showbisect(repo, ctx, templ, **args):
274 """String. The changeset bisection status."""
279 """String. The changeset bisection status."""
275 return hbisect.label(repo, ctx.node())
280 return hbisect.label(repo, ctx.node())
276
281
277 @templatekeyword('branch')
282 @templatekeyword('branch')
278 def showbranch(**args):
283 def showbranch(**args):
279 """String. The name of the branch on which the changeset was
284 """String. The name of the branch on which the changeset was
280 committed.
285 committed.
281 """
286 """
282 return args['ctx'].branch()
287 return args['ctx'].branch()
283
288
284 @templatekeyword('branches')
289 @templatekeyword('branches')
285 def showbranches(**args):
290 def showbranches(**args):
286 """List of strings. The name of the branch on which the
291 """List of strings. The name of the branch on which the
287 changeset was committed. Will be empty if the branch name was
292 changeset was committed. Will be empty if the branch name was
288 default. (DEPRECATED)
293 default. (DEPRECATED)
289 """
294 """
290 branch = args['ctx'].branch()
295 branch = args['ctx'].branch()
291 if branch != 'default':
296 if branch != 'default':
292 return showlist('branch', [branch], args, plural='branches')
297 return showlist('branch', [branch], args, plural='branches')
293 return showlist('branch', [], args, plural='branches')
298 return showlist('branch', [], args, plural='branches')
294
299
295 @templatekeyword('bookmarks')
300 @templatekeyword('bookmarks')
296 def showbookmarks(**args):
301 def showbookmarks(**args):
297 """List of strings. Any bookmarks associated with the
302 """List of strings. Any bookmarks associated with the
298 changeset. Also sets 'active', the name of the active bookmark.
303 changeset. Also sets 'active', the name of the active bookmark.
299 """
304 """
300 repo = args['ctx']._repo
305 repo = args['ctx']._repo
301 bookmarks = args['ctx'].bookmarks()
306 bookmarks = args['ctx'].bookmarks()
302 active = repo._activebookmark
307 active = repo._activebookmark
303 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
308 makemap = lambda v: {'bookmark': v, 'active': active, 'current': active}
304 f = _showlist('bookmark', bookmarks, args)
309 f = _showlist('bookmark', bookmarks, args)
305 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
310 return _hybrid(f, bookmarks, makemap, lambda x: x['bookmark'])
306
311
307 @templatekeyword('children')
312 @templatekeyword('children')
308 def showchildren(**args):
313 def showchildren(**args):
309 """List of strings. The children of the changeset."""
314 """List of strings. The children of the changeset."""
310 ctx = args['ctx']
315 ctx = args['ctx']
311 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
316 childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()]
312 return showlist('children', childrevs, args, element='child')
317 return showlist('children', childrevs, args, element='child')
313
318
314 # Deprecated, but kept alive for help generation a purpose.
319 # Deprecated, but kept alive for help generation a purpose.
315 @templatekeyword('currentbookmark')
320 @templatekeyword('currentbookmark')
316 def showcurrentbookmark(**args):
321 def showcurrentbookmark(**args):
317 """String. The active bookmark, if it is
322 """String. The active bookmark, if it is
318 associated with the changeset (DEPRECATED)"""
323 associated with the changeset (DEPRECATED)"""
319 return showactivebookmark(**args)
324 return showactivebookmark(**args)
320
325
321 @templatekeyword('activebookmark')
326 @templatekeyword('activebookmark')
322 def showactivebookmark(**args):
327 def showactivebookmark(**args):
323 """String. The active bookmark, if it is
328 """String. The active bookmark, if it is
324 associated with the changeset"""
329 associated with the changeset"""
325 active = args['repo']._activebookmark
330 active = args['repo']._activebookmark
326 if active and active in args['ctx'].bookmarks():
331 if active and active in args['ctx'].bookmarks():
327 return active
332 return active
328 return ''
333 return ''
329
334
330 @templatekeyword('date')
335 @templatekeyword('date')
331 def showdate(repo, ctx, templ, **args):
336 def showdate(repo, ctx, templ, **args):
332 """Date information. The date when the changeset was committed."""
337 """Date information. The date when the changeset was committed."""
333 return ctx.date()
338 return ctx.date()
334
339
335 @templatekeyword('desc')
340 @templatekeyword('desc')
336 def showdescription(repo, ctx, templ, **args):
341 def showdescription(repo, ctx, templ, **args):
337 """String. The text of the changeset description."""
342 """String. The text of the changeset description."""
338 s = ctx.description()
343 s = ctx.description()
339 if isinstance(s, encoding.localstr):
344 if isinstance(s, encoding.localstr):
340 # try hard to preserve utf-8 bytes
345 # try hard to preserve utf-8 bytes
341 return encoding.tolocal(encoding.fromlocal(s).strip())
346 return encoding.tolocal(encoding.fromlocal(s).strip())
342 else:
347 else:
343 return s.strip()
348 return s.strip()
344
349
345 @templatekeyword('diffstat')
350 @templatekeyword('diffstat')
346 def showdiffstat(repo, ctx, templ, **args):
351 def showdiffstat(repo, ctx, templ, **args):
347 """String. Statistics of changes with the following format:
352 """String. Statistics of changes with the following format:
348 "modified files: +added/-removed lines"
353 "modified files: +added/-removed lines"
349 """
354 """
350 stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False)))
355 stats = patch.diffstatdata(util.iterlines(ctx.diff(noprefix=False)))
351 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
356 maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
352 return '%s: +%s/-%s' % (len(stats), adds, removes)
357 return '%s: +%s/-%s' % (len(stats), adds, removes)
353
358
354 @templatekeyword('envvars')
359 @templatekeyword('envvars')
355 def showenvvars(repo, **args):
360 def showenvvars(repo, **args):
356 """A dictionary of environment variables. (EXPERIMENTAL)"""
361 """A dictionary of environment variables. (EXPERIMENTAL)"""
357 env = repo.ui.exportableenviron()
362 env = repo.ui.exportableenviron()
358 env = util.sortdict((k, env[k]) for k in sorted(env))
363 env = util.sortdict((k, env[k]) for k in sorted(env))
359 return showdict('envvar', env, args, plural='envvars')
364 return showdict('envvar', env, args, plural='envvars')
360
365
361 @templatekeyword('extras')
366 @templatekeyword('extras')
362 def showextras(**args):
367 def showextras(**args):
363 """List of dicts with key, value entries of the 'extras'
368 """List of dicts with key, value entries of the 'extras'
364 field of this changeset."""
369 field of this changeset."""
365 extras = args['ctx'].extra()
370 extras = args['ctx'].extra()
366 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
371 extras = util.sortdict((k, extras[k]) for k in sorted(extras))
367 makemap = lambda k: {'key': k, 'value': extras[k]}
372 makemap = lambda k: {'key': k, 'value': extras[k]}
368 c = [makemap(k) for k in extras]
373 c = [makemap(k) for k in extras]
369 f = _showlist('extra', c, args, plural='extras')
374 f = _showlist('extra', c, args, plural='extras')
370 return _hybrid(f, extras, makemap,
375 return _hybrid(f, extras, makemap,
371 lambda x: '%s=%s' % (x['key'], util.escapestr(x['value'])))
376 lambda x: '%s=%s' % (x['key'], util.escapestr(x['value'])))
372
377
373 @templatekeyword('file_adds')
378 @templatekeyword('file_adds')
374 def showfileadds(**args):
379 def showfileadds(**args):
375 """List of strings. Files added by this changeset."""
380 """List of strings. Files added by this changeset."""
376 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
381 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
377 return showlist('file_add', getfiles(repo, ctx, revcache)[1], args,
382 return showlist('file_add', getfiles(repo, ctx, revcache)[1], args,
378 element='file')
383 element='file')
379
384
380 @templatekeyword('file_copies')
385 @templatekeyword('file_copies')
381 def showfilecopies(**args):
386 def showfilecopies(**args):
382 """List of strings. Files copied in this changeset with
387 """List of strings. Files copied in this changeset with
383 their sources.
388 their sources.
384 """
389 """
385 cache, ctx = args['cache'], args['ctx']
390 cache, ctx = args['cache'], args['ctx']
386 copies = args['revcache'].get('copies')
391 copies = args['revcache'].get('copies')
387 if copies is None:
392 if copies is None:
388 if 'getrenamed' not in cache:
393 if 'getrenamed' not in cache:
389 cache['getrenamed'] = getrenamedfn(args['repo'])
394 cache['getrenamed'] = getrenamedfn(args['repo'])
390 copies = []
395 copies = []
391 getrenamed = cache['getrenamed']
396 getrenamed = cache['getrenamed']
392 for fn in ctx.files():
397 for fn in ctx.files():
393 rename = getrenamed(fn, ctx.rev())
398 rename = getrenamed(fn, ctx.rev())
394 if rename:
399 if rename:
395 copies.append((fn, rename[0]))
400 copies.append((fn, rename[0]))
396
401
397 copies = util.sortdict(copies)
402 copies = util.sortdict(copies)
398 return showdict('file_copy', copies, args, plural='file_copies',
403 return showdict('file_copy', copies, args, plural='file_copies',
399 key='name', value='source', fmt='%s (%s)')
404 key='name', value='source', fmt='%s (%s)')
400
405
401 # showfilecopiesswitch() displays file copies only if copy records are
406 # showfilecopiesswitch() displays file copies only if copy records are
402 # provided before calling the templater, usually with a --copies
407 # provided before calling the templater, usually with a --copies
403 # command line switch.
408 # command line switch.
404 @templatekeyword('file_copies_switch')
409 @templatekeyword('file_copies_switch')
405 def showfilecopiesswitch(**args):
410 def showfilecopiesswitch(**args):
406 """List of strings. Like "file_copies" but displayed
411 """List of strings. Like "file_copies" but displayed
407 only if the --copied switch is set.
412 only if the --copied switch is set.
408 """
413 """
409 copies = args['revcache'].get('copies') or []
414 copies = args['revcache'].get('copies') or []
410 copies = util.sortdict(copies)
415 copies = util.sortdict(copies)
411 return showdict('file_copy', copies, args, plural='file_copies',
416 return showdict('file_copy', copies, args, plural='file_copies',
412 key='name', value='source', fmt='%s (%s)')
417 key='name', value='source', fmt='%s (%s)')
413
418
414 @templatekeyword('file_dels')
419 @templatekeyword('file_dels')
415 def showfiledels(**args):
420 def showfiledels(**args):
416 """List of strings. Files removed by this changeset."""
421 """List of strings. Files removed by this changeset."""
417 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
422 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
418 return showlist('file_del', getfiles(repo, ctx, revcache)[2], args,
423 return showlist('file_del', getfiles(repo, ctx, revcache)[2], args,
419 element='file')
424 element='file')
420
425
421 @templatekeyword('file_mods')
426 @templatekeyword('file_mods')
422 def showfilemods(**args):
427 def showfilemods(**args):
423 """List of strings. Files modified by this changeset."""
428 """List of strings. Files modified by this changeset."""
424 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
429 repo, ctx, revcache = args['repo'], args['ctx'], args['revcache']
425 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], args,
430 return showlist('file_mod', getfiles(repo, ctx, revcache)[0], args,
426 element='file')
431 element='file')
427
432
428 @templatekeyword('files')
433 @templatekeyword('files')
429 def showfiles(**args):
434 def showfiles(**args):
430 """List of strings. All files modified, added, or removed by this
435 """List of strings. All files modified, added, or removed by this
431 changeset.
436 changeset.
432 """
437 """
433 return showlist('file', args['ctx'].files(), args)
438 return showlist('file', args['ctx'].files(), args)
434
439
435 @templatekeyword('graphnode')
440 @templatekeyword('graphnode')
436 def showgraphnode(repo, ctx, **args):
441 def showgraphnode(repo, ctx, **args):
437 """String. The character representing the changeset node in
442 """String. The character representing the changeset node in
438 an ASCII revision graph"""
443 an ASCII revision graph"""
439 wpnodes = repo.dirstate.parents()
444 wpnodes = repo.dirstate.parents()
440 if wpnodes[1] == nullid:
445 if wpnodes[1] == nullid:
441 wpnodes = wpnodes[:1]
446 wpnodes = wpnodes[:1]
442 if ctx.node() in wpnodes:
447 if ctx.node() in wpnodes:
443 return '@'
448 return '@'
444 elif ctx.obsolete():
449 elif ctx.obsolete():
445 return 'x'
450 return 'x'
446 elif ctx.closesbranch():
451 elif ctx.closesbranch():
447 return '_'
452 return '_'
448 else:
453 else:
449 return 'o'
454 return 'o'
450
455
451 @templatekeyword('index')
456 @templatekeyword('index')
452 def showindex(**args):
457 def showindex(**args):
453 """Integer. The current iteration of the loop. (0 indexed)"""
458 """Integer. The current iteration of the loop. (0 indexed)"""
454 # just hosts documentation; should be overridden by template mapping
459 # just hosts documentation; should be overridden by template mapping
455 raise error.Abort(_("can't use index in this context"))
460 raise error.Abort(_("can't use index in this context"))
456
461
457 @templatekeyword('latesttag')
462 @templatekeyword('latesttag')
458 def showlatesttag(**args):
463 def showlatesttag(**args):
459 """List of strings. The global tags on the most recent globally
464 """List of strings. The global tags on the most recent globally
460 tagged ancestor of this changeset. If no such tags exist, the list
465 tagged ancestor of this changeset. If no such tags exist, the list
461 consists of the single string "null".
466 consists of the single string "null".
462 """
467 """
463 return showlatesttags(None, **args)
468 return showlatesttags(None, **args)
464
469
465 def showlatesttags(pattern, **args):
470 def showlatesttags(pattern, **args):
466 """helper method for the latesttag keyword and function"""
471 """helper method for the latesttag keyword and function"""
467 repo, ctx = args['repo'], args['ctx']
472 repo, ctx = args['repo'], args['ctx']
468 cache = args['cache']
473 cache = args['cache']
469 latesttags = getlatesttags(repo, ctx, cache, pattern)
474 latesttags = getlatesttags(repo, ctx, cache, pattern)
470
475
471 # latesttag[0] is an implementation detail for sorting csets on different
476 # latesttag[0] is an implementation detail for sorting csets on different
472 # branches in a stable manner- it is the date the tagged cset was created,
477 # branches in a stable manner- it is the date the tagged cset was created,
473 # not the date the tag was created. Therefore it isn't made visible here.
478 # not the date the tag was created. Therefore it isn't made visible here.
474 makemap = lambda v: {
479 makemap = lambda v: {
475 'changes': _showchangessincetag,
480 'changes': _showchangessincetag,
476 'distance': latesttags[1],
481 'distance': latesttags[1],
477 'latesttag': v, # BC with {latesttag % '{latesttag}'}
482 'latesttag': v, # BC with {latesttag % '{latesttag}'}
478 'tag': v
483 'tag': v
479 }
484 }
480
485
481 tags = latesttags[2]
486 tags = latesttags[2]
482 f = _showlist('latesttag', tags, args, separator=':')
487 f = _showlist('latesttag', tags, args, separator=':')
483 return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
488 return _hybrid(f, tags, makemap, lambda x: x['latesttag'])
484
489
485 @templatekeyword('latesttagdistance')
490 @templatekeyword('latesttagdistance')
486 def showlatesttagdistance(repo, ctx, templ, cache, **args):
491 def showlatesttagdistance(repo, ctx, templ, cache, **args):
487 """Integer. Longest path to the latest tag."""
492 """Integer. Longest path to the latest tag."""
488 return getlatesttags(repo, ctx, cache)[1]
493 return getlatesttags(repo, ctx, cache)[1]
489
494
490 @templatekeyword('changessincelatesttag')
495 @templatekeyword('changessincelatesttag')
491 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
496 def showchangessincelatesttag(repo, ctx, templ, cache, **args):
492 """Integer. All ancestors not in the latest tag."""
497 """Integer. All ancestors not in the latest tag."""
493 latesttag = getlatesttags(repo, ctx, cache)[2][0]
498 latesttag = getlatesttags(repo, ctx, cache)[2][0]
494
499
495 return _showchangessincetag(repo, ctx, tag=latesttag, **args)
500 return _showchangessincetag(repo, ctx, tag=latesttag, **args)
496
501
497 def _showchangessincetag(repo, ctx, **args):
502 def _showchangessincetag(repo, ctx, **args):
498 offset = 0
503 offset = 0
499 revs = [ctx.rev()]
504 revs = [ctx.rev()]
500 tag = args['tag']
505 tag = args['tag']
501
506
502 # The only() revset doesn't currently support wdir()
507 # The only() revset doesn't currently support wdir()
503 if ctx.rev() is None:
508 if ctx.rev() is None:
504 offset = 1
509 offset = 1
505 revs = [p.rev() for p in ctx.parents()]
510 revs = [p.rev() for p in ctx.parents()]
506
511
507 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
512 return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
508
513
509 @templatekeyword('manifest')
514 @templatekeyword('manifest')
510 def showmanifest(**args):
515 def showmanifest(**args):
511 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
516 repo, ctx, templ = args['repo'], args['ctx'], args['templ']
512 mnode = ctx.manifestnode()
517 mnode = ctx.manifestnode()
513 if mnode is None:
518 if mnode is None:
514 # just avoid crash, we might want to use the 'ff...' hash in future
519 # just avoid crash, we might want to use the 'ff...' hash in future
515 return
520 return
516 args = args.copy()
521 args = args.copy()
517 args.update({'rev': repo.manifestlog._revlog.rev(mnode),
522 args.update({'rev': repo.manifestlog._revlog.rev(mnode),
518 'node': hex(mnode)})
523 'node': hex(mnode)})
519 return templ('manifest', **args)
524 return templ('manifest', **args)
520
525
521 def shownames(namespace, **args):
526 def shownames(namespace, **args):
522 """helper method to generate a template keyword for a namespace"""
527 """helper method to generate a template keyword for a namespace"""
523 ctx = args['ctx']
528 ctx = args['ctx']
524 repo = ctx.repo()
529 repo = ctx.repo()
525 ns = repo.names[namespace]
530 ns = repo.names[namespace]
526 names = ns.names(repo, ctx.node())
531 names = ns.names(repo, ctx.node())
527 return showlist(ns.templatename, names, args, plural=namespace)
532 return showlist(ns.templatename, names, args, plural=namespace)
528
533
529 @templatekeyword('namespaces')
534 @templatekeyword('namespaces')
530 def shownamespaces(**args):
535 def shownamespaces(**args):
531 """Dict of lists. Names attached to this changeset per
536 """Dict of lists. Names attached to this changeset per
532 namespace."""
537 namespace."""
533 ctx = args['ctx']
538 ctx = args['ctx']
534 repo = ctx.repo()
539 repo = ctx.repo()
535 namespaces = util.sortdict((k, showlist('name', ns.names(repo, ctx.node()),
540 namespaces = util.sortdict((k, showlist('name', ns.names(repo, ctx.node()),
536 args))
541 args))
537 for k, ns in repo.names.iteritems())
542 for k, ns in repo.names.iteritems())
538 f = _showlist('namespace', list(namespaces), args)
543 f = _showlist('namespace', list(namespaces), args)
539 return _hybrid(f, namespaces,
544 return _hybrid(f, namespaces,
540 lambda k: {'namespace': k, 'names': namespaces[k]},
545 lambda k: {'namespace': k, 'names': namespaces[k]},
541 lambda x: x['namespace'])
546 lambda x: x['namespace'])
542
547
543 @templatekeyword('node')
548 @templatekeyword('node')
544 def shownode(repo, ctx, templ, **args):
549 def shownode(repo, ctx, templ, **args):
545 """String. The changeset identification hash, as a 40 hexadecimal
550 """String. The changeset identification hash, as a 40 hexadecimal
546 digit string.
551 digit string.
547 """
552 """
548 return ctx.hex()
553 return ctx.hex()
549
554
550 @templatekeyword('obsolete')
555 @templatekeyword('obsolete')
551 def showobsolete(repo, ctx, templ, **args):
556 def showobsolete(repo, ctx, templ, **args):
552 """String. Whether the changeset is obsolete.
557 """String. Whether the changeset is obsolete.
553 """
558 """
554 if ctx.obsolete():
559 if ctx.obsolete():
555 return 'obsolete'
560 return 'obsolete'
556 return ''
561 return ''
557
562
558 @templatekeyword('p1rev')
563 @templatekeyword('p1rev')
559 def showp1rev(repo, ctx, templ, **args):
564 def showp1rev(repo, ctx, templ, **args):
560 """Integer. The repository-local revision number of the changeset's
565 """Integer. The repository-local revision number of the changeset's
561 first parent, or -1 if the changeset has no parents."""
566 first parent, or -1 if the changeset has no parents."""
562 return ctx.p1().rev()
567 return ctx.p1().rev()
563
568
564 @templatekeyword('p2rev')
569 @templatekeyword('p2rev')
565 def showp2rev(repo, ctx, templ, **args):
570 def showp2rev(repo, ctx, templ, **args):
566 """Integer. The repository-local revision number of the changeset's
571 """Integer. The repository-local revision number of the changeset's
567 second parent, or -1 if the changeset has no second parent."""
572 second parent, or -1 if the changeset has no second parent."""
568 return ctx.p2().rev()
573 return ctx.p2().rev()
569
574
570 @templatekeyword('p1node')
575 @templatekeyword('p1node')
571 def showp1node(repo, ctx, templ, **args):
576 def showp1node(repo, ctx, templ, **args):
572 """String. The identification hash of the changeset's first parent,
577 """String. The identification hash of the changeset's first parent,
573 as a 40 digit hexadecimal string. If the changeset has no parents, all
578 as a 40 digit hexadecimal string. If the changeset has no parents, all
574 digits are 0."""
579 digits are 0."""
575 return ctx.p1().hex()
580 return ctx.p1().hex()
576
581
577 @templatekeyword('p2node')
582 @templatekeyword('p2node')
578 def showp2node(repo, ctx, templ, **args):
583 def showp2node(repo, ctx, templ, **args):
579 """String. The identification hash of the changeset's second
584 """String. The identification hash of the changeset's second
580 parent, as a 40 digit hexadecimal string. If the changeset has no second
585 parent, as a 40 digit hexadecimal string. If the changeset has no second
581 parent, all digits are 0."""
586 parent, all digits are 0."""
582 return ctx.p2().hex()
587 return ctx.p2().hex()
583
588
584 @templatekeyword('parents')
589 @templatekeyword('parents')
585 def showparents(**args):
590 def showparents(**args):
586 """List of strings. The parents of the changeset in "rev:node"
591 """List of strings. The parents of the changeset in "rev:node"
587 format. If the changeset has only one "natural" parent (the predecessor
592 format. If the changeset has only one "natural" parent (the predecessor
588 revision) nothing is shown."""
593 revision) nothing is shown."""
589 repo = args['repo']
594 repo = args['repo']
590 ctx = args['ctx']
595 ctx = args['ctx']
591 pctxs = scmutil.meaningfulparents(repo, ctx)
596 pctxs = scmutil.meaningfulparents(repo, ctx)
592 prevs = [str(p.rev()) for p in pctxs] # ifcontains() needs a list of str
597 prevs = [str(p.rev()) for p in pctxs] # ifcontains() needs a list of str
593 parents = [[('rev', p.rev()),
598 parents = [[('rev', p.rev()),
594 ('node', p.hex()),
599 ('node', p.hex()),
595 ('phase', p.phasestr())]
600 ('phase', p.phasestr())]
596 for p in pctxs]
601 for p in pctxs]
597 f = _showlist('parent', parents, args)
602 f = _showlist('parent', parents, args)
598 return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}},
603 return _hybrid(f, prevs, lambda x: {'ctx': repo[int(x)], 'revcache': {}},
599 lambda d: _formatrevnode(d['ctx']))
604 lambda d: _formatrevnode(d['ctx']))
600
605
601 @templatekeyword('phase')
606 @templatekeyword('phase')
602 def showphase(repo, ctx, templ, **args):
607 def showphase(repo, ctx, templ, **args):
603 """String. The changeset phase name."""
608 """String. The changeset phase name."""
604 return ctx.phasestr()
609 return ctx.phasestr()
605
610
606 @templatekeyword('phaseidx')
611 @templatekeyword('phaseidx')
607 def showphaseidx(repo, ctx, templ, **args):
612 def showphaseidx(repo, ctx, templ, **args):
608 """Integer. The changeset phase index."""
613 """Integer. The changeset phase index."""
609 return ctx.phase()
614 return ctx.phase()
610
615
611 @templatekeyword('rev')
616 @templatekeyword('rev')
612 def showrev(repo, ctx, templ, **args):
617 def showrev(repo, ctx, templ, **args):
613 """Integer. The repository-local changeset revision number."""
618 """Integer. The repository-local changeset revision number."""
614 return scmutil.intrev(ctx)
619 return scmutil.intrev(ctx)
615
620
616 def showrevslist(name, revs, **args):
621 def showrevslist(name, revs, **args):
617 """helper to generate a list of revisions in which a mapped template will
622 """helper to generate a list of revisions in which a mapped template will
618 be evaluated"""
623 be evaluated"""
619 repo = args['ctx'].repo()
624 repo = args['ctx'].repo()
620 revs = [str(r) for r in revs] # ifcontains() needs a list of str
625 revs = [str(r) for r in revs] # ifcontains() needs a list of str
621 f = _showlist(name, revs, args)
626 f = _showlist(name, revs, args)
622 return _hybrid(f, revs,
627 return _hybrid(f, revs,
623 lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
628 lambda x: {name: x, 'ctx': repo[int(x)], 'revcache': {}},
624 lambda d: d[name])
629 lambda d: d[name])
625
630
626 @templatekeyword('subrepos')
631 @templatekeyword('subrepos')
627 def showsubrepos(**args):
632 def showsubrepos(**args):
628 """List of strings. Updated subrepositories in the changeset."""
633 """List of strings. Updated subrepositories in the changeset."""
629 ctx = args['ctx']
634 ctx = args['ctx']
630 substate = ctx.substate
635 substate = ctx.substate
631 if not substate:
636 if not substate:
632 return showlist('subrepo', [], args)
637 return showlist('subrepo', [], args)
633 psubstate = ctx.parents()[0].substate or {}
638 psubstate = ctx.parents()[0].substate or {}
634 subrepos = []
639 subrepos = []
635 for sub in substate:
640 for sub in substate:
636 if sub not in psubstate or substate[sub] != psubstate[sub]:
641 if sub not in psubstate or substate[sub] != psubstate[sub]:
637 subrepos.append(sub) # modified or newly added in ctx
642 subrepos.append(sub) # modified or newly added in ctx
638 for sub in psubstate:
643 for sub in psubstate:
639 if sub not in substate:
644 if sub not in substate:
640 subrepos.append(sub) # removed in ctx
645 subrepos.append(sub) # removed in ctx
641 return showlist('subrepo', sorted(subrepos), args)
646 return showlist('subrepo', sorted(subrepos), args)
642
647
643 # don't remove "showtags" definition, even though namespaces will put
648 # don't remove "showtags" definition, even though namespaces will put
644 # a helper function for "tags" keyword into "keywords" map automatically,
649 # a helper function for "tags" keyword into "keywords" map automatically,
645 # because online help text is built without namespaces initialization
650 # because online help text is built without namespaces initialization
646 @templatekeyword('tags')
651 @templatekeyword('tags')
647 def showtags(**args):
652 def showtags(**args):
648 """List of strings. Any tags associated with the changeset."""
653 """List of strings. Any tags associated with the changeset."""
649 return shownames('tags', **args)
654 return shownames('tags', **args)
650
655
651 def loadkeyword(ui, extname, registrarobj):
656 def loadkeyword(ui, extname, registrarobj):
652 """Load template keyword from specified registrarobj
657 """Load template keyword from specified registrarobj
653 """
658 """
654 for name, func in registrarobj._table.iteritems():
659 for name, func in registrarobj._table.iteritems():
655 keywords[name] = func
660 keywords[name] = func
656
661
657 @templatekeyword('termwidth')
662 @templatekeyword('termwidth')
658 def termwidth(repo, ctx, templ, **args):
663 def termwidth(repo, ctx, templ, **args):
659 """Integer. The width of the current terminal."""
664 """Integer. The width of the current terminal."""
660 return repo.ui.termwidth()
665 return repo.ui.termwidth()
661
666
662 @templatekeyword('troubles')
667 @templatekeyword('troubles')
663 def showtroubles(**args):
668 def showtroubles(**args):
664 """List of strings. Evolution troubles affecting the changeset.
669 """List of strings. Evolution troubles affecting the changeset.
665
670
666 (EXPERIMENTAL)
671 (EXPERIMENTAL)
667 """
672 """
668 return showlist('trouble', args['ctx'].troubles(), args)
673 return showlist('trouble', args['ctx'].troubles(), args)
669
674
670 # tell hggettext to extract docstrings from these functions:
675 # tell hggettext to extract docstrings from these functions:
671 i18nfunctions = keywords.values()
676 i18nfunctions = keywords.values()
General Comments 0
You need to be logged in to leave comments. Login now