##// END OF EJS Templates
docs: add args/returns docs for some cmdutil, context, and registrar functions...
rlevasseur@google.com -
r35106:b22a0d9e default
parent child Browse files
Show More
@@ -1,3969 +1,3977
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 dagop,
29 dagop,
30 dirstateguard,
30 dirstateguard,
31 encoding,
31 encoding,
32 error,
32 error,
33 formatter,
33 formatter,
34 graphmod,
34 graphmod,
35 match as matchmod,
35 match as matchmod,
36 mdiff,
36 mdiff,
37 obsolete,
37 obsolete,
38 patch,
38 patch,
39 pathutil,
39 pathutil,
40 pycompat,
40 pycompat,
41 registrar,
41 registrar,
42 revlog,
42 revlog,
43 revset,
43 revset,
44 scmutil,
44 scmutil,
45 smartset,
45 smartset,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51 stringio = util.stringio
51 stringio = util.stringio
52
52
53 # templates of common command options
53 # templates of common command options
54
54
55 dryrunopts = [
55 dryrunopts = [
56 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
57 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
58 ]
58 ]
59
59
60 remoteopts = [
60 remoteopts = [
61 ('e', 'ssh', '',
61 ('e', 'ssh', '',
62 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
63 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
64 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
65 ('', 'insecure', None,
65 ('', 'insecure', None,
66 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 ]
67 ]
68
68
69 walkopts = [
69 walkopts = [
70 ('I', 'include', [],
70 ('I', 'include', [],
71 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
72 ('X', 'exclude', [],
72 ('X', 'exclude', [],
73 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
74 ]
74 ]
75
75
76 commitopts = [
76 commitopts = [
77 ('m', 'message', '',
77 ('m', 'message', '',
78 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
79 ('l', 'logfile', '',
79 ('l', 'logfile', '',
80 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
81 ]
81 ]
82
82
83 commitopts2 = [
83 commitopts2 = [
84 ('d', 'date', '',
84 ('d', 'date', '',
85 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
86 ('u', 'user', '',
86 ('u', 'user', '',
87 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
88 ]
88 ]
89
89
90 # hidden for now
90 # hidden for now
91 formatteropts = [
91 formatteropts = [
92 ('T', 'template', '',
92 ('T', 'template', '',
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 ]
94 ]
95
95
96 templateopts = [
96 templateopts = [
97 ('', 'style', '',
97 ('', 'style', '',
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 ('T', 'template', '',
99 ('T', 'template', '',
100 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
101 ]
101 ]
102
102
103 logopts = [
103 logopts = [
104 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('l', 'limit', '',
106 ('l', 'limit', '',
107 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
108 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
111 ] + templateopts
111 ] + templateopts
112
112
113 diffopts = [
113 diffopts = [
114 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
118 ]
118 ]
119
119
120 diffwsopts = [
120 diffwsopts = [
121 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
122 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
123 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
124 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
125 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
126 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
127 ('Z', 'ignore-space-at-eol', None,
127 ('Z', 'ignore-space-at-eol', None,
128 _('ignore changes in whitespace at EOL')),
128 _('ignore changes in whitespace at EOL')),
129 ]
129 ]
130
130
131 diffopts2 = [
131 diffopts2 = [
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
133 ('p', 'show-function', None, _('show which function each change is in')),
133 ('p', 'show-function', None, _('show which function each change is in')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
135 ] + diffwsopts + [
135 ] + diffwsopts + [
136 ('U', 'unified', '',
136 ('U', 'unified', '',
137 _('number of lines of context to show'), _('NUM')),
137 _('number of lines of context to show'), _('NUM')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
140 ]
140 ]
141
141
142 mergetoolopts = [
142 mergetoolopts = [
143 ('t', 'tool', '', _('specify merge tool')),
143 ('t', 'tool', '', _('specify merge tool')),
144 ]
144 ]
145
145
146 similarityopts = [
146 similarityopts = [
147 ('s', 'similarity', '',
147 ('s', 'similarity', '',
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
149 ]
149 ]
150
150
151 subrepoopts = [
151 subrepoopts = [
152 ('S', 'subrepos', None,
152 ('S', 'subrepos', None,
153 _('recurse into subrepositories'))
153 _('recurse into subrepositories'))
154 ]
154 ]
155
155
156 debugrevlogopts = [
156 debugrevlogopts = [
157 ('c', 'changelog', False, _('open changelog')),
157 ('c', 'changelog', False, _('open changelog')),
158 ('m', 'manifest', False, _('open manifest')),
158 ('m', 'manifest', False, _('open manifest')),
159 ('', 'dir', '', _('open directory manifest')),
159 ('', 'dir', '', _('open directory manifest')),
160 ]
160 ]
161
161
162 # special string such that everything below this line will be ingored in the
162 # special string such that everything below this line will be ingored in the
163 # editor text
163 # editor text
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
165
165
166 def ishunk(x):
166 def ishunk(x):
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
168 return isinstance(x, hunkclasses)
168 return isinstance(x, hunkclasses)
169
169
170 def newandmodified(chunks, originalchunks):
170 def newandmodified(chunks, originalchunks):
171 newlyaddedandmodifiedfiles = set()
171 newlyaddedandmodifiedfiles = set()
172 for chunk in chunks:
172 for chunk in chunks:
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
174 originalchunks:
174 originalchunks:
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
176 return newlyaddedandmodifiedfiles
176 return newlyaddedandmodifiedfiles
177
177
178 def parsealiases(cmd):
178 def parsealiases(cmd):
179 return cmd.lstrip("^").split("|")
179 return cmd.lstrip("^").split("|")
180
180
181 def setupwrapcolorwrite(ui):
181 def setupwrapcolorwrite(ui):
182 # wrap ui.write so diff output can be labeled/colorized
182 # wrap ui.write so diff output can be labeled/colorized
183 def wrapwrite(orig, *args, **kw):
183 def wrapwrite(orig, *args, **kw):
184 label = kw.pop('label', '')
184 label = kw.pop('label', '')
185 for chunk, l in patch.difflabel(lambda: args):
185 for chunk, l in patch.difflabel(lambda: args):
186 orig(chunk, label=label + l)
186 orig(chunk, label=label + l)
187
187
188 oldwrite = ui.write
188 oldwrite = ui.write
189 def wrap(*args, **kwargs):
189 def wrap(*args, **kwargs):
190 return wrapwrite(oldwrite, *args, **kwargs)
190 return wrapwrite(oldwrite, *args, **kwargs)
191 setattr(ui, 'write', wrap)
191 setattr(ui, 'write', wrap)
192 return oldwrite
192 return oldwrite
193
193
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
195 if usecurses:
195 if usecurses:
196 if testfile:
196 if testfile:
197 recordfn = crecordmod.testdecorator(testfile,
197 recordfn = crecordmod.testdecorator(testfile,
198 crecordmod.testchunkselector)
198 crecordmod.testchunkselector)
199 else:
199 else:
200 recordfn = crecordmod.chunkselector
200 recordfn = crecordmod.chunkselector
201
201
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
203
203
204 else:
204 else:
205 return patch.filterpatch(ui, originalhunks, operation)
205 return patch.filterpatch(ui, originalhunks, operation)
206
206
207 def recordfilter(ui, originalhunks, operation=None):
207 def recordfilter(ui, originalhunks, operation=None):
208 """ Prompts the user to filter the originalhunks and return a list of
208 """ Prompts the user to filter the originalhunks and return a list of
209 selected hunks.
209 selected hunks.
210 *operation* is used for to build ui messages to indicate the user what
210 *operation* is used for to build ui messages to indicate the user what
211 kind of filtering they are doing: reverting, committing, shelving, etc.
211 kind of filtering they are doing: reverting, committing, shelving, etc.
212 (see patch.filterpatch).
212 (see patch.filterpatch).
213 """
213 """
214 usecurses = crecordmod.checkcurses(ui)
214 usecurses = crecordmod.checkcurses(ui)
215 testfile = ui.config('experimental', 'crecordtest')
215 testfile = ui.config('experimental', 'crecordtest')
216 oldwrite = setupwrapcolorwrite(ui)
216 oldwrite = setupwrapcolorwrite(ui)
217 try:
217 try:
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
219 testfile, operation)
219 testfile, operation)
220 finally:
220 finally:
221 ui.write = oldwrite
221 ui.write = oldwrite
222 return newchunks, newopts
222 return newchunks, newopts
223
223
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
225 filterfn, *pats, **opts):
225 filterfn, *pats, **opts):
226 from . import merge as mergemod
226 from . import merge as mergemod
227 opts = pycompat.byteskwargs(opts)
227 opts = pycompat.byteskwargs(opts)
228 if not ui.interactive():
228 if not ui.interactive():
229 if cmdsuggest:
229 if cmdsuggest:
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
231 else:
231 else:
232 msg = _('running non-interactively')
232 msg = _('running non-interactively')
233 raise error.Abort(msg)
233 raise error.Abort(msg)
234
234
235 # make sure username is set before going interactive
235 # make sure username is set before going interactive
236 if not opts.get('user'):
236 if not opts.get('user'):
237 ui.username() # raise exception, username not provided
237 ui.username() # raise exception, username not provided
238
238
239 def recordfunc(ui, repo, message, match, opts):
239 def recordfunc(ui, repo, message, match, opts):
240 """This is generic record driver.
240 """This is generic record driver.
241
241
242 Its job is to interactively filter local changes, and
242 Its job is to interactively filter local changes, and
243 accordingly prepare working directory into a state in which the
243 accordingly prepare working directory into a state in which the
244 job can be delegated to a non-interactive commit command such as
244 job can be delegated to a non-interactive commit command such as
245 'commit' or 'qrefresh'.
245 'commit' or 'qrefresh'.
246
246
247 After the actual job is done by non-interactive command, the
247 After the actual job is done by non-interactive command, the
248 working directory is restored to its original state.
248 working directory is restored to its original state.
249
249
250 In the end we'll record interesting changes, and everything else
250 In the end we'll record interesting changes, and everything else
251 will be left in place, so the user can continue working.
251 will be left in place, so the user can continue working.
252 """
252 """
253
253
254 checkunfinished(repo, commit=True)
254 checkunfinished(repo, commit=True)
255 wctx = repo[None]
255 wctx = repo[None]
256 merge = len(wctx.parents()) > 1
256 merge = len(wctx.parents()) > 1
257 if merge:
257 if merge:
258 raise error.Abort(_('cannot partially commit a merge '
258 raise error.Abort(_('cannot partially commit a merge '
259 '(use "hg commit" instead)'))
259 '(use "hg commit" instead)'))
260
260
261 def fail(f, msg):
261 def fail(f, msg):
262 raise error.Abort('%s: %s' % (f, msg))
262 raise error.Abort('%s: %s' % (f, msg))
263
263
264 force = opts.get('force')
264 force = opts.get('force')
265 if not force:
265 if not force:
266 vdirs = []
266 vdirs = []
267 match.explicitdir = vdirs.append
267 match.explicitdir = vdirs.append
268 match.bad = fail
268 match.bad = fail
269
269
270 status = repo.status(match=match)
270 status = repo.status(match=match)
271 if not force:
271 if not force:
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
274 diffopts.nodates = True
274 diffopts.nodates = True
275 diffopts.git = True
275 diffopts.git = True
276 diffopts.showfunc = True
276 diffopts.showfunc = True
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
278 originalchunks = patch.parsepatch(originaldiff)
278 originalchunks = patch.parsepatch(originaldiff)
279
279
280 # 1. filter patch, since we are intending to apply subset of it
280 # 1. filter patch, since we are intending to apply subset of it
281 try:
281 try:
282 chunks, newopts = filterfn(ui, originalchunks)
282 chunks, newopts = filterfn(ui, originalchunks)
283 except error.PatchError as err:
283 except error.PatchError as err:
284 raise error.Abort(_('error parsing patch: %s') % err)
284 raise error.Abort(_('error parsing patch: %s') % err)
285 opts.update(newopts)
285 opts.update(newopts)
286
286
287 # We need to keep a backup of files that have been newly added and
287 # We need to keep a backup of files that have been newly added and
288 # modified during the recording process because there is a previous
288 # modified during the recording process because there is a previous
289 # version without the edit in the workdir
289 # version without the edit in the workdir
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
291 contenders = set()
291 contenders = set()
292 for h in chunks:
292 for h in chunks:
293 try:
293 try:
294 contenders.update(set(h.files()))
294 contenders.update(set(h.files()))
295 except AttributeError:
295 except AttributeError:
296 pass
296 pass
297
297
298 changed = status.modified + status.added + status.removed
298 changed = status.modified + status.added + status.removed
299 newfiles = [f for f in changed if f in contenders]
299 newfiles = [f for f in changed if f in contenders]
300 if not newfiles:
300 if not newfiles:
301 ui.status(_('no changes to record\n'))
301 ui.status(_('no changes to record\n'))
302 return 0
302 return 0
303
303
304 modified = set(status.modified)
304 modified = set(status.modified)
305
305
306 # 2. backup changed files, so we can restore them in the end
306 # 2. backup changed files, so we can restore them in the end
307
307
308 if backupall:
308 if backupall:
309 tobackup = changed
309 tobackup = changed
310 else:
310 else:
311 tobackup = [f for f in newfiles if f in modified or f in \
311 tobackup = [f for f in newfiles if f in modified or f in \
312 newlyaddedandmodifiedfiles]
312 newlyaddedandmodifiedfiles]
313 backups = {}
313 backups = {}
314 if tobackup:
314 if tobackup:
315 backupdir = repo.vfs.join('record-backups')
315 backupdir = repo.vfs.join('record-backups')
316 try:
316 try:
317 os.mkdir(backupdir)
317 os.mkdir(backupdir)
318 except OSError as err:
318 except OSError as err:
319 if err.errno != errno.EEXIST:
319 if err.errno != errno.EEXIST:
320 raise
320 raise
321 try:
321 try:
322 # backup continues
322 # backup continues
323 for f in tobackup:
323 for f in tobackup:
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
325 dir=backupdir)
325 dir=backupdir)
326 os.close(fd)
326 os.close(fd)
327 ui.debug('backup %r as %r\n' % (f, tmpname))
327 ui.debug('backup %r as %r\n' % (f, tmpname))
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
329 backups[f] = tmpname
329 backups[f] = tmpname
330
330
331 fp = stringio()
331 fp = stringio()
332 for c in chunks:
332 for c in chunks:
333 fname = c.filename()
333 fname = c.filename()
334 if fname in backups:
334 if fname in backups:
335 c.write(fp)
335 c.write(fp)
336 dopatch = fp.tell()
336 dopatch = fp.tell()
337 fp.seek(0)
337 fp.seek(0)
338
338
339 # 2.5 optionally review / modify patch in text editor
339 # 2.5 optionally review / modify patch in text editor
340 if opts.get('review', False):
340 if opts.get('review', False):
341 patchtext = (crecordmod.diffhelptext
341 patchtext = (crecordmod.diffhelptext
342 + crecordmod.patchhelptext
342 + crecordmod.patchhelptext
343 + fp.read())
343 + fp.read())
344 reviewedpatch = ui.edit(patchtext, "",
344 reviewedpatch = ui.edit(patchtext, "",
345 action="diff",
345 action="diff",
346 repopath=repo.path)
346 repopath=repo.path)
347 fp.truncate(0)
347 fp.truncate(0)
348 fp.write(reviewedpatch)
348 fp.write(reviewedpatch)
349 fp.seek(0)
349 fp.seek(0)
350
350
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
352 # 3a. apply filtered patch to clean repo (clean)
352 # 3a. apply filtered patch to clean repo (clean)
353 if backups:
353 if backups:
354 # Equivalent to hg.revert
354 # Equivalent to hg.revert
355 m = scmutil.matchfiles(repo, backups.keys())
355 m = scmutil.matchfiles(repo, backups.keys())
356 mergemod.update(repo, repo.dirstate.p1(),
356 mergemod.update(repo, repo.dirstate.p1(),
357 False, True, matcher=m)
357 False, True, matcher=m)
358
358
359 # 3b. (apply)
359 # 3b. (apply)
360 if dopatch:
360 if dopatch:
361 try:
361 try:
362 ui.debug('applying patch\n')
362 ui.debug('applying patch\n')
363 ui.debug(fp.getvalue())
363 ui.debug(fp.getvalue())
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
365 except error.PatchError as err:
365 except error.PatchError as err:
366 raise error.Abort(str(err))
366 raise error.Abort(str(err))
367 del fp
367 del fp
368
368
369 # 4. We prepared working directory according to filtered
369 # 4. We prepared working directory according to filtered
370 # patch. Now is the time to delegate the job to
370 # patch. Now is the time to delegate the job to
371 # commit/qrefresh or the like!
371 # commit/qrefresh or the like!
372
372
373 # Make all of the pathnames absolute.
373 # Make all of the pathnames absolute.
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
375 return commitfunc(ui, repo, *newfiles, **opts)
375 return commitfunc(ui, repo, *newfiles, **opts)
376 finally:
376 finally:
377 # 5. finally restore backed-up files
377 # 5. finally restore backed-up files
378 try:
378 try:
379 dirstate = repo.dirstate
379 dirstate = repo.dirstate
380 for realname, tmpname in backups.iteritems():
380 for realname, tmpname in backups.iteritems():
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
382
382
383 if dirstate[realname] == 'n':
383 if dirstate[realname] == 'n':
384 # without normallookup, restoring timestamp
384 # without normallookup, restoring timestamp
385 # may cause partially committed files
385 # may cause partially committed files
386 # to be treated as unmodified
386 # to be treated as unmodified
387 dirstate.normallookup(realname)
387 dirstate.normallookup(realname)
388
388
389 # copystat=True here and above are a hack to trick any
389 # copystat=True here and above are a hack to trick any
390 # editors that have f open that we haven't modified them.
390 # editors that have f open that we haven't modified them.
391 #
391 #
392 # Also note that this racy as an editor could notice the
392 # Also note that this racy as an editor could notice the
393 # file's mtime before we've finished writing it.
393 # file's mtime before we've finished writing it.
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
395 os.unlink(tmpname)
395 os.unlink(tmpname)
396 if tobackup:
396 if tobackup:
397 os.rmdir(backupdir)
397 os.rmdir(backupdir)
398 except OSError:
398 except OSError:
399 pass
399 pass
400
400
401 def recordinwlock(ui, repo, message, match, opts):
401 def recordinwlock(ui, repo, message, match, opts):
402 with repo.wlock():
402 with repo.wlock():
403 return recordfunc(ui, repo, message, match, opts)
403 return recordfunc(ui, repo, message, match, opts)
404
404
405 return commit(ui, repo, recordinwlock, pats, opts)
405 return commit(ui, repo, recordinwlock, pats, opts)
406
406
407 class dirnode(object):
407 class dirnode(object):
408 """
408 """
409 Represent a directory in user working copy with information required for
409 Represent a directory in user working copy with information required for
410 the purpose of tersing its status.
410 the purpose of tersing its status.
411
411
412 path is the path to the directory
412 path is the path to the directory
413
413
414 statuses is a set of statuses of all files in this directory (this includes
414 statuses is a set of statuses of all files in this directory (this includes
415 all the files in all the subdirectories too)
415 all the files in all the subdirectories too)
416
416
417 files is a list of files which are direct child of this directory
417 files is a list of files which are direct child of this directory
418
418
419 subdirs is a dictionary of sub-directory name as the key and it's own
419 subdirs is a dictionary of sub-directory name as the key and it's own
420 dirnode object as the value
420 dirnode object as the value
421 """
421 """
422
422
423 def __init__(self, dirpath):
423 def __init__(self, dirpath):
424 self.path = dirpath
424 self.path = dirpath
425 self.statuses = set([])
425 self.statuses = set([])
426 self.files = []
426 self.files = []
427 self.subdirs = {}
427 self.subdirs = {}
428
428
429 def _addfileindir(self, filename, status):
429 def _addfileindir(self, filename, status):
430 """Add a file in this directory as a direct child."""
430 """Add a file in this directory as a direct child."""
431 self.files.append((filename, status))
431 self.files.append((filename, status))
432
432
433 def addfile(self, filename, status):
433 def addfile(self, filename, status):
434 """
434 """
435 Add a file to this directory or to its direct parent directory.
435 Add a file to this directory or to its direct parent directory.
436
436
437 If the file is not direct child of this directory, we traverse to the
437 If the file is not direct child of this directory, we traverse to the
438 directory of which this file is a direct child of and add the file
438 directory of which this file is a direct child of and add the file
439 there.
439 there.
440 """
440 """
441
441
442 # the filename contains a path separator, it means it's not the direct
442 # the filename contains a path separator, it means it's not the direct
443 # child of this directory
443 # child of this directory
444 if '/' in filename:
444 if '/' in filename:
445 subdir, filep = filename.split('/', 1)
445 subdir, filep = filename.split('/', 1)
446
446
447 # does the dirnode object for subdir exists
447 # does the dirnode object for subdir exists
448 if subdir not in self.subdirs:
448 if subdir not in self.subdirs:
449 subdirpath = os.path.join(self.path, subdir)
449 subdirpath = os.path.join(self.path, subdir)
450 self.subdirs[subdir] = dirnode(subdirpath)
450 self.subdirs[subdir] = dirnode(subdirpath)
451
451
452 # try adding the file in subdir
452 # try adding the file in subdir
453 self.subdirs[subdir].addfile(filep, status)
453 self.subdirs[subdir].addfile(filep, status)
454
454
455 else:
455 else:
456 self._addfileindir(filename, status)
456 self._addfileindir(filename, status)
457
457
458 if status not in self.statuses:
458 if status not in self.statuses:
459 self.statuses.add(status)
459 self.statuses.add(status)
460
460
461 def iterfilepaths(self):
461 def iterfilepaths(self):
462 """Yield (status, path) for files directly under this directory."""
462 """Yield (status, path) for files directly under this directory."""
463 for f, st in self.files:
463 for f, st in self.files:
464 yield st, os.path.join(self.path, f)
464 yield st, os.path.join(self.path, f)
465
465
466 def tersewalk(self, terseargs):
466 def tersewalk(self, terseargs):
467 """
467 """
468 Yield (status, path) obtained by processing the status of this
468 Yield (status, path) obtained by processing the status of this
469 dirnode.
469 dirnode.
470
470
471 terseargs is the string of arguments passed by the user with `--terse`
471 terseargs is the string of arguments passed by the user with `--terse`
472 flag.
472 flag.
473
473
474 Following are the cases which can happen:
474 Following are the cases which can happen:
475
475
476 1) All the files in the directory (including all the files in its
476 1) All the files in the directory (including all the files in its
477 subdirectories) share the same status and the user has asked us to terse
477 subdirectories) share the same status and the user has asked us to terse
478 that status. -> yield (status, dirpath)
478 that status. -> yield (status, dirpath)
479
479
480 2) Otherwise, we do following:
480 2) Otherwise, we do following:
481
481
482 a) Yield (status, filepath) for all the files which are in this
482 a) Yield (status, filepath) for all the files which are in this
483 directory (only the ones in this directory, not the subdirs)
483 directory (only the ones in this directory, not the subdirs)
484
484
485 b) Recurse the function on all the subdirectories of this
485 b) Recurse the function on all the subdirectories of this
486 directory
486 directory
487 """
487 """
488
488
489 if len(self.statuses) == 1:
489 if len(self.statuses) == 1:
490 onlyst = self.statuses.pop()
490 onlyst = self.statuses.pop()
491
491
492 # Making sure we terse only when the status abbreviation is
492 # Making sure we terse only when the status abbreviation is
493 # passed as terse argument
493 # passed as terse argument
494 if onlyst in terseargs:
494 if onlyst in terseargs:
495 yield onlyst, self.path + pycompat.ossep
495 yield onlyst, self.path + pycompat.ossep
496 return
496 return
497
497
498 # add the files to status list
498 # add the files to status list
499 for st, fpath in self.iterfilepaths():
499 for st, fpath in self.iterfilepaths():
500 yield st, fpath
500 yield st, fpath
501
501
502 #recurse on the subdirs
502 #recurse on the subdirs
503 for dirobj in self.subdirs.values():
503 for dirobj in self.subdirs.values():
504 for st, fpath in dirobj.tersewalk(terseargs):
504 for st, fpath in dirobj.tersewalk(terseargs):
505 yield st, fpath
505 yield st, fpath
506
506
507 def tersedir(statuslist, terseargs):
507 def tersedir(statuslist, terseargs):
508 """
508 """
509 Terse the status if all the files in a directory shares the same status.
509 Terse the status if all the files in a directory shares the same status.
510
510
511 statuslist is scmutil.status() object which contains a list of files for
511 statuslist is scmutil.status() object which contains a list of files for
512 each status.
512 each status.
513 terseargs is string which is passed by the user as the argument to `--terse`
513 terseargs is string which is passed by the user as the argument to `--terse`
514 flag.
514 flag.
515
515
516 The function makes a tree of objects of dirnode class, and at each node it
516 The function makes a tree of objects of dirnode class, and at each node it
517 stores the information required to know whether we can terse a certain
517 stores the information required to know whether we can terse a certain
518 directory or not.
518 directory or not.
519 """
519 """
520 # the order matters here as that is used to produce final list
520 # the order matters here as that is used to produce final list
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
522
522
523 # checking the argument validity
523 # checking the argument validity
524 for s in pycompat.bytestr(terseargs):
524 for s in pycompat.bytestr(terseargs):
525 if s not in allst:
525 if s not in allst:
526 raise error.Abort(_("'%s' not recognized") % s)
526 raise error.Abort(_("'%s' not recognized") % s)
527
527
528 # creating a dirnode object for the root of the repo
528 # creating a dirnode object for the root of the repo
529 rootobj = dirnode('')
529 rootobj = dirnode('')
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
531 'ignored', 'removed')
531 'ignored', 'removed')
532
532
533 tersedict = {}
533 tersedict = {}
534 for attrname in pstatus:
534 for attrname in pstatus:
535 statuschar = attrname[0:1]
535 statuschar = attrname[0:1]
536 for f in getattr(statuslist, attrname):
536 for f in getattr(statuslist, attrname):
537 rootobj.addfile(f, statuschar)
537 rootobj.addfile(f, statuschar)
538 tersedict[statuschar] = []
538 tersedict[statuschar] = []
539
539
540 # we won't be tersing the root dir, so add files in it
540 # we won't be tersing the root dir, so add files in it
541 for st, fpath in rootobj.iterfilepaths():
541 for st, fpath in rootobj.iterfilepaths():
542 tersedict[st].append(fpath)
542 tersedict[st].append(fpath)
543
543
544 # process each sub-directory and build tersedict
544 # process each sub-directory and build tersedict
545 for subdir in rootobj.subdirs.values():
545 for subdir in rootobj.subdirs.values():
546 for st, f in subdir.tersewalk(terseargs):
546 for st, f in subdir.tersewalk(terseargs):
547 tersedict[st].append(f)
547 tersedict[st].append(f)
548
548
549 tersedlist = []
549 tersedlist = []
550 for st in allst:
550 for st in allst:
551 tersedict[st].sort()
551 tersedict[st].sort()
552 tersedlist.append(tersedict[st])
552 tersedlist.append(tersedict[st])
553
553
554 return tersedlist
554 return tersedlist
555
555
556 def _commentlines(raw):
556 def _commentlines(raw):
557 '''Surround lineswith a comment char and a new line'''
557 '''Surround lineswith a comment char and a new line'''
558 lines = raw.splitlines()
558 lines = raw.splitlines()
559 commentedlines = ['# %s' % line for line in lines]
559 commentedlines = ['# %s' % line for line in lines]
560 return '\n'.join(commentedlines) + '\n'
560 return '\n'.join(commentedlines) + '\n'
561
561
562 def _conflictsmsg(repo):
562 def _conflictsmsg(repo):
563 # avoid merge cycle
563 # avoid merge cycle
564 from . import merge as mergemod
564 from . import merge as mergemod
565 mergestate = mergemod.mergestate.read(repo)
565 mergestate = mergemod.mergestate.read(repo)
566 if not mergestate.active():
566 if not mergestate.active():
567 return
567 return
568
568
569 m = scmutil.match(repo[None])
569 m = scmutil.match(repo[None])
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
571 if unresolvedlist:
571 if unresolvedlist:
572 mergeliststr = '\n'.join(
572 mergeliststr = '\n'.join(
573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
573 [' %s' % util.pathto(repo.root, pycompat.getcwd(), path)
574 for path in unresolvedlist])
574 for path in unresolvedlist])
575 msg = _('''Unresolved merge conflicts:
575 msg = _('''Unresolved merge conflicts:
576
576
577 %s
577 %s
578
578
579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
579 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
580 else:
580 else:
581 msg = _('No unresolved merge conflicts.')
581 msg = _('No unresolved merge conflicts.')
582
582
583 return _commentlines(msg)
583 return _commentlines(msg)
584
584
585 def _helpmessage(continuecmd, abortcmd):
585 def _helpmessage(continuecmd, abortcmd):
586 msg = _('To continue: %s\n'
586 msg = _('To continue: %s\n'
587 'To abort: %s') % (continuecmd, abortcmd)
587 'To abort: %s') % (continuecmd, abortcmd)
588 return _commentlines(msg)
588 return _commentlines(msg)
589
589
590 def _rebasemsg():
590 def _rebasemsg():
591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
591 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
592
592
593 def _histeditmsg():
593 def _histeditmsg():
594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
594 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
595
595
596 def _unshelvemsg():
596 def _unshelvemsg():
597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
597 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
598
598
599 def _updatecleanmsg(dest=None):
599 def _updatecleanmsg(dest=None):
600 warning = _('warning: this will discard uncommitted changes')
600 warning = _('warning: this will discard uncommitted changes')
601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
601 return 'hg update --clean %s (%s)' % (dest or '.', warning)
602
602
603 def _graftmsg():
603 def _graftmsg():
604 # tweakdefaults requires `update` to have a rev hence the `.`
604 # tweakdefaults requires `update` to have a rev hence the `.`
605 return _helpmessage('hg graft --continue', _updatecleanmsg())
605 return _helpmessage('hg graft --continue', _updatecleanmsg())
606
606
607 def _mergemsg():
607 def _mergemsg():
608 # tweakdefaults requires `update` to have a rev hence the `.`
608 # tweakdefaults requires `update` to have a rev hence the `.`
609 return _helpmessage('hg commit', _updatecleanmsg())
609 return _helpmessage('hg commit', _updatecleanmsg())
610
610
611 def _bisectmsg():
611 def _bisectmsg():
612 msg = _('To mark the changeset good: hg bisect --good\n'
612 msg = _('To mark the changeset good: hg bisect --good\n'
613 'To mark the changeset bad: hg bisect --bad\n'
613 'To mark the changeset bad: hg bisect --bad\n'
614 'To abort: hg bisect --reset\n')
614 'To abort: hg bisect --reset\n')
615 return _commentlines(msg)
615 return _commentlines(msg)
616
616
617 def fileexistspredicate(filename):
617 def fileexistspredicate(filename):
618 return lambda repo: repo.vfs.exists(filename)
618 return lambda repo: repo.vfs.exists(filename)
619
619
620 def _mergepredicate(repo):
620 def _mergepredicate(repo):
621 return len(repo[None].parents()) > 1
621 return len(repo[None].parents()) > 1
622
622
623 STATES = (
623 STATES = (
624 # (state, predicate to detect states, helpful message function)
624 # (state, predicate to detect states, helpful message function)
625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
625 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
626 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
627 ('graft', fileexistspredicate('graftstate'), _graftmsg),
628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
628 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
629 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
630 # The merge state is part of a list that will be iterated over.
630 # The merge state is part of a list that will be iterated over.
631 # They need to be last because some of the other unfinished states may also
631 # They need to be last because some of the other unfinished states may also
632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
632 # be in a merge or update state (eg. rebase, histedit, graft, etc).
633 # We want those to have priority.
633 # We want those to have priority.
634 ('merge', _mergepredicate, _mergemsg),
634 ('merge', _mergepredicate, _mergemsg),
635 )
635 )
636
636
637 def _getrepostate(repo):
637 def _getrepostate(repo):
638 # experimental config: commands.status.skipstates
638 # experimental config: commands.status.skipstates
639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
639 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
640 for state, statedetectionpredicate, msgfn in STATES:
640 for state, statedetectionpredicate, msgfn in STATES:
641 if state in skip:
641 if state in skip:
642 continue
642 continue
643 if statedetectionpredicate(repo):
643 if statedetectionpredicate(repo):
644 return (state, statedetectionpredicate, msgfn)
644 return (state, statedetectionpredicate, msgfn)
645
645
646 def morestatus(repo, fm):
646 def morestatus(repo, fm):
647 statetuple = _getrepostate(repo)
647 statetuple = _getrepostate(repo)
648 label = 'status.morestatus'
648 label = 'status.morestatus'
649 if statetuple:
649 if statetuple:
650 fm.startitem()
650 fm.startitem()
651 state, statedetectionpredicate, helpfulmsg = statetuple
651 state, statedetectionpredicate, helpfulmsg = statetuple
652 statemsg = _('The repository is in an unfinished *%s* state.') % state
652 statemsg = _('The repository is in an unfinished *%s* state.') % state
653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
653 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
654 conmsg = _conflictsmsg(repo)
654 conmsg = _conflictsmsg(repo)
655 if conmsg:
655 if conmsg:
656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
656 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
657 if helpfulmsg:
657 if helpfulmsg:
658 helpmsg = helpfulmsg()
658 helpmsg = helpfulmsg()
659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
659 fm.write('helpmsg', '%s\n', helpmsg, label=label)
660
660
661 def findpossible(cmd, table, strict=False):
661 def findpossible(cmd, table, strict=False):
662 """
662 """
663 Return cmd -> (aliases, command table entry)
663 Return cmd -> (aliases, command table entry)
664 for each matching command.
664 for each matching command.
665 Return debug commands (or their aliases) only if no normal command matches.
665 Return debug commands (or their aliases) only if no normal command matches.
666 """
666 """
667 choice = {}
667 choice = {}
668 debugchoice = {}
668 debugchoice = {}
669
669
670 if cmd in table:
670 if cmd in table:
671 # short-circuit exact matches, "log" alias beats "^log|history"
671 # short-circuit exact matches, "log" alias beats "^log|history"
672 keys = [cmd]
672 keys = [cmd]
673 else:
673 else:
674 keys = table.keys()
674 keys = table.keys()
675
675
676 allcmds = []
676 allcmds = []
677 for e in keys:
677 for e in keys:
678 aliases = parsealiases(e)
678 aliases = parsealiases(e)
679 allcmds.extend(aliases)
679 allcmds.extend(aliases)
680 found = None
680 found = None
681 if cmd in aliases:
681 if cmd in aliases:
682 found = cmd
682 found = cmd
683 elif not strict:
683 elif not strict:
684 for a in aliases:
684 for a in aliases:
685 if a.startswith(cmd):
685 if a.startswith(cmd):
686 found = a
686 found = a
687 break
687 break
688 if found is not None:
688 if found is not None:
689 if aliases[0].startswith("debug") or found.startswith("debug"):
689 if aliases[0].startswith("debug") or found.startswith("debug"):
690 debugchoice[found] = (aliases, table[e])
690 debugchoice[found] = (aliases, table[e])
691 else:
691 else:
692 choice[found] = (aliases, table[e])
692 choice[found] = (aliases, table[e])
693
693
694 if not choice and debugchoice:
694 if not choice and debugchoice:
695 choice = debugchoice
695 choice = debugchoice
696
696
697 return choice, allcmds
697 return choice, allcmds
698
698
699 def findcmd(cmd, table, strict=True):
699 def findcmd(cmd, table, strict=True):
700 """Return (aliases, command table entry) for command string."""
700 """Return (aliases, command table entry) for command string."""
701 choice, allcmds = findpossible(cmd, table, strict)
701 choice, allcmds = findpossible(cmd, table, strict)
702
702
703 if cmd in choice:
703 if cmd in choice:
704 return choice[cmd]
704 return choice[cmd]
705
705
706 if len(choice) > 1:
706 if len(choice) > 1:
707 clist = sorted(choice)
707 clist = sorted(choice)
708 raise error.AmbiguousCommand(cmd, clist)
708 raise error.AmbiguousCommand(cmd, clist)
709
709
710 if choice:
710 if choice:
711 return list(choice.values())[0]
711 return list(choice.values())[0]
712
712
713 raise error.UnknownCommand(cmd, allcmds)
713 raise error.UnknownCommand(cmd, allcmds)
714
714
715 def findrepo(p):
715 def findrepo(p):
716 while not os.path.isdir(os.path.join(p, ".hg")):
716 while not os.path.isdir(os.path.join(p, ".hg")):
717 oldp, p = p, os.path.dirname(p)
717 oldp, p = p, os.path.dirname(p)
718 if p == oldp:
718 if p == oldp:
719 return None
719 return None
720
720
721 return p
721 return p
722
722
723 def bailifchanged(repo, merge=True, hint=None):
723 def bailifchanged(repo, merge=True, hint=None):
724 """ enforce the precondition that working directory must be clean.
724 """ enforce the precondition that working directory must be clean.
725
725
726 'merge' can be set to false if a pending uncommitted merge should be
726 'merge' can be set to false if a pending uncommitted merge should be
727 ignored (such as when 'update --check' runs).
727 ignored (such as when 'update --check' runs).
728
728
729 'hint' is the usual hint given to Abort exception.
729 'hint' is the usual hint given to Abort exception.
730 """
730 """
731
731
732 if merge and repo.dirstate.p2() != nullid:
732 if merge and repo.dirstate.p2() != nullid:
733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
733 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
734 modified, added, removed, deleted = repo.status()[:4]
734 modified, added, removed, deleted = repo.status()[:4]
735 if modified or added or removed or deleted:
735 if modified or added or removed or deleted:
736 raise error.Abort(_('uncommitted changes'), hint=hint)
736 raise error.Abort(_('uncommitted changes'), hint=hint)
737 ctx = repo[None]
737 ctx = repo[None]
738 for s in sorted(ctx.substate):
738 for s in sorted(ctx.substate):
739 ctx.sub(s).bailifchanged(hint=hint)
739 ctx.sub(s).bailifchanged(hint=hint)
740
740
741 def logmessage(ui, opts):
741 def logmessage(ui, opts):
742 """ get the log message according to -m and -l option """
742 """ get the log message according to -m and -l option """
743 message = opts.get('message')
743 message = opts.get('message')
744 logfile = opts.get('logfile')
744 logfile = opts.get('logfile')
745
745
746 if message and logfile:
746 if message and logfile:
747 raise error.Abort(_('options --message and --logfile are mutually '
747 raise error.Abort(_('options --message and --logfile are mutually '
748 'exclusive'))
748 'exclusive'))
749 if not message and logfile:
749 if not message and logfile:
750 try:
750 try:
751 if isstdiofilename(logfile):
751 if isstdiofilename(logfile):
752 message = ui.fin.read()
752 message = ui.fin.read()
753 else:
753 else:
754 message = '\n'.join(util.readfile(logfile).splitlines())
754 message = '\n'.join(util.readfile(logfile).splitlines())
755 except IOError as inst:
755 except IOError as inst:
756 raise error.Abort(_("can't read commit message '%s': %s") %
756 raise error.Abort(_("can't read commit message '%s': %s") %
757 (logfile, encoding.strtolocal(inst.strerror)))
757 (logfile, encoding.strtolocal(inst.strerror)))
758 return message
758 return message
759
759
760 def mergeeditform(ctxorbool, baseformname):
760 def mergeeditform(ctxorbool, baseformname):
761 """return appropriate editform name (referencing a committemplate)
761 """return appropriate editform name (referencing a committemplate)
762
762
763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
763 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
764 merging is committed.
764 merging is committed.
765
765
766 This returns baseformname with '.merge' appended if it is a merge,
766 This returns baseformname with '.merge' appended if it is a merge,
767 otherwise '.normal' is appended.
767 otherwise '.normal' is appended.
768 """
768 """
769 if isinstance(ctxorbool, bool):
769 if isinstance(ctxorbool, bool):
770 if ctxorbool:
770 if ctxorbool:
771 return baseformname + ".merge"
771 return baseformname + ".merge"
772 elif 1 < len(ctxorbool.parents()):
772 elif 1 < len(ctxorbool.parents()):
773 return baseformname + ".merge"
773 return baseformname + ".merge"
774
774
775 return baseformname + ".normal"
775 return baseformname + ".normal"
776
776
777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
777 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
778 editform='', **opts):
778 editform='', **opts):
779 """get appropriate commit message editor according to '--edit' option
779 """get appropriate commit message editor according to '--edit' option
780
780
781 'finishdesc' is a function to be called with edited commit message
781 'finishdesc' is a function to be called with edited commit message
782 (= 'description' of the new changeset) just after editing, but
782 (= 'description' of the new changeset) just after editing, but
783 before checking empty-ness. It should return actual text to be
783 before checking empty-ness. It should return actual text to be
784 stored into history. This allows to change description before
784 stored into history. This allows to change description before
785 storing.
785 storing.
786
786
787 'extramsg' is a extra message to be shown in the editor instead of
787 'extramsg' is a extra message to be shown in the editor instead of
788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
788 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
789 is automatically added.
789 is automatically added.
790
790
791 'editform' is a dot-separated list of names, to distinguish
791 'editform' is a dot-separated list of names, to distinguish
792 the purpose of commit text editing.
792 the purpose of commit text editing.
793
793
794 'getcommiteditor' returns 'commitforceeditor' regardless of
794 'getcommiteditor' returns 'commitforceeditor' regardless of
795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
795 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
796 they are specific for usage in MQ.
796 they are specific for usage in MQ.
797 """
797 """
798 if edit or finishdesc or extramsg:
798 if edit or finishdesc or extramsg:
799 return lambda r, c, s: commitforceeditor(r, c, s,
799 return lambda r, c, s: commitforceeditor(r, c, s,
800 finishdesc=finishdesc,
800 finishdesc=finishdesc,
801 extramsg=extramsg,
801 extramsg=extramsg,
802 editform=editform)
802 editform=editform)
803 elif editform:
803 elif editform:
804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
804 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
805 else:
805 else:
806 return commiteditor
806 return commiteditor
807
807
808 def loglimit(opts):
808 def loglimit(opts):
809 """get the log limit according to option -l/--limit"""
809 """get the log limit according to option -l/--limit"""
810 limit = opts.get('limit')
810 limit = opts.get('limit')
811 if limit:
811 if limit:
812 try:
812 try:
813 limit = int(limit)
813 limit = int(limit)
814 except ValueError:
814 except ValueError:
815 raise error.Abort(_('limit must be a positive integer'))
815 raise error.Abort(_('limit must be a positive integer'))
816 if limit <= 0:
816 if limit <= 0:
817 raise error.Abort(_('limit must be positive'))
817 raise error.Abort(_('limit must be positive'))
818 else:
818 else:
819 limit = None
819 limit = None
820 return limit
820 return limit
821
821
822 def makefilename(repo, pat, node, desc=None,
822 def makefilename(repo, pat, node, desc=None,
823 total=None, seqno=None, revwidth=None, pathname=None):
823 total=None, seqno=None, revwidth=None, pathname=None):
824 node_expander = {
824 node_expander = {
825 'H': lambda: hex(node),
825 'H': lambda: hex(node),
826 'R': lambda: str(repo.changelog.rev(node)),
826 'R': lambda: str(repo.changelog.rev(node)),
827 'h': lambda: short(node),
827 'h': lambda: short(node),
828 'm': lambda: re.sub('[^\w]', '_', str(desc))
828 'm': lambda: re.sub('[^\w]', '_', str(desc))
829 }
829 }
830 expander = {
830 expander = {
831 '%': lambda: '%',
831 '%': lambda: '%',
832 'b': lambda: os.path.basename(repo.root),
832 'b': lambda: os.path.basename(repo.root),
833 }
833 }
834
834
835 try:
835 try:
836 if node:
836 if node:
837 expander.update(node_expander)
837 expander.update(node_expander)
838 if node:
838 if node:
839 expander['r'] = (lambda:
839 expander['r'] = (lambda:
840 str(repo.changelog.rev(node)).zfill(revwidth or 0))
840 str(repo.changelog.rev(node)).zfill(revwidth or 0))
841 if total is not None:
841 if total is not None:
842 expander['N'] = lambda: str(total)
842 expander['N'] = lambda: str(total)
843 if seqno is not None:
843 if seqno is not None:
844 expander['n'] = lambda: str(seqno)
844 expander['n'] = lambda: str(seqno)
845 if total is not None and seqno is not None:
845 if total is not None and seqno is not None:
846 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
846 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
847 if pathname is not None:
847 if pathname is not None:
848 expander['s'] = lambda: os.path.basename(pathname)
848 expander['s'] = lambda: os.path.basename(pathname)
849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
849 expander['d'] = lambda: os.path.dirname(pathname) or '.'
850 expander['p'] = lambda: pathname
850 expander['p'] = lambda: pathname
851
851
852 newname = []
852 newname = []
853 patlen = len(pat)
853 patlen = len(pat)
854 i = 0
854 i = 0
855 while i < patlen:
855 while i < patlen:
856 c = pat[i:i + 1]
856 c = pat[i:i + 1]
857 if c == '%':
857 if c == '%':
858 i += 1
858 i += 1
859 c = pat[i:i + 1]
859 c = pat[i:i + 1]
860 c = expander[c]()
860 c = expander[c]()
861 newname.append(c)
861 newname.append(c)
862 i += 1
862 i += 1
863 return ''.join(newname)
863 return ''.join(newname)
864 except KeyError as inst:
864 except KeyError as inst:
865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
865 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
866 inst.args[0])
866 inst.args[0])
867
867
868 def isstdiofilename(pat):
868 def isstdiofilename(pat):
869 """True if the given pat looks like a filename denoting stdin/stdout"""
869 """True if the given pat looks like a filename denoting stdin/stdout"""
870 return not pat or pat == '-'
870 return not pat or pat == '-'
871
871
872 class _unclosablefile(object):
872 class _unclosablefile(object):
873 def __init__(self, fp):
873 def __init__(self, fp):
874 self._fp = fp
874 self._fp = fp
875
875
876 def close(self):
876 def close(self):
877 pass
877 pass
878
878
879 def __iter__(self):
879 def __iter__(self):
880 return iter(self._fp)
880 return iter(self._fp)
881
881
882 def __getattr__(self, attr):
882 def __getattr__(self, attr):
883 return getattr(self._fp, attr)
883 return getattr(self._fp, attr)
884
884
885 def __enter__(self):
885 def __enter__(self):
886 return self
886 return self
887
887
888 def __exit__(self, exc_type, exc_value, exc_tb):
888 def __exit__(self, exc_type, exc_value, exc_tb):
889 pass
889 pass
890
890
891 def makefileobj(repo, pat, node=None, desc=None, total=None,
891 def makefileobj(repo, pat, node=None, desc=None, total=None,
892 seqno=None, revwidth=None, mode='wb', modemap=None,
892 seqno=None, revwidth=None, mode='wb', modemap=None,
893 pathname=None):
893 pathname=None):
894
894
895 writable = mode not in ('r', 'rb')
895 writable = mode not in ('r', 'rb')
896
896
897 if isstdiofilename(pat):
897 if isstdiofilename(pat):
898 if writable:
898 if writable:
899 fp = repo.ui.fout
899 fp = repo.ui.fout
900 else:
900 else:
901 fp = repo.ui.fin
901 fp = repo.ui.fin
902 return _unclosablefile(fp)
902 return _unclosablefile(fp)
903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
903 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
904 if modemap is not None:
904 if modemap is not None:
905 mode = modemap.get(fn, mode)
905 mode = modemap.get(fn, mode)
906 if mode == 'wb':
906 if mode == 'wb':
907 modemap[fn] = 'ab'
907 modemap[fn] = 'ab'
908 return open(fn, mode)
908 return open(fn, mode)
909
909
910 def openrevlog(repo, cmd, file_, opts):
910 def openrevlog(repo, cmd, file_, opts):
911 """opens the changelog, manifest, a filelog or a given revlog"""
911 """opens the changelog, manifest, a filelog or a given revlog"""
912 cl = opts['changelog']
912 cl = opts['changelog']
913 mf = opts['manifest']
913 mf = opts['manifest']
914 dir = opts['dir']
914 dir = opts['dir']
915 msg = None
915 msg = None
916 if cl and mf:
916 if cl and mf:
917 msg = _('cannot specify --changelog and --manifest at the same time')
917 msg = _('cannot specify --changelog and --manifest at the same time')
918 elif cl and dir:
918 elif cl and dir:
919 msg = _('cannot specify --changelog and --dir at the same time')
919 msg = _('cannot specify --changelog and --dir at the same time')
920 elif cl or mf or dir:
920 elif cl or mf or dir:
921 if file_:
921 if file_:
922 msg = _('cannot specify filename with --changelog or --manifest')
922 msg = _('cannot specify filename with --changelog or --manifest')
923 elif not repo:
923 elif not repo:
924 msg = _('cannot specify --changelog or --manifest or --dir '
924 msg = _('cannot specify --changelog or --manifest or --dir '
925 'without a repository')
925 'without a repository')
926 if msg:
926 if msg:
927 raise error.Abort(msg)
927 raise error.Abort(msg)
928
928
929 r = None
929 r = None
930 if repo:
930 if repo:
931 if cl:
931 if cl:
932 r = repo.unfiltered().changelog
932 r = repo.unfiltered().changelog
933 elif dir:
933 elif dir:
934 if 'treemanifest' not in repo.requirements:
934 if 'treemanifest' not in repo.requirements:
935 raise error.Abort(_("--dir can only be used on repos with "
935 raise error.Abort(_("--dir can only be used on repos with "
936 "treemanifest enabled"))
936 "treemanifest enabled"))
937 dirlog = repo.manifestlog._revlog.dirlog(dir)
937 dirlog = repo.manifestlog._revlog.dirlog(dir)
938 if len(dirlog):
938 if len(dirlog):
939 r = dirlog
939 r = dirlog
940 elif mf:
940 elif mf:
941 r = repo.manifestlog._revlog
941 r = repo.manifestlog._revlog
942 elif file_:
942 elif file_:
943 filelog = repo.file(file_)
943 filelog = repo.file(file_)
944 if len(filelog):
944 if len(filelog):
945 r = filelog
945 r = filelog
946 if not r:
946 if not r:
947 if not file_:
947 if not file_:
948 raise error.CommandError(cmd, _('invalid arguments'))
948 raise error.CommandError(cmd, _('invalid arguments'))
949 if not os.path.isfile(file_):
949 if not os.path.isfile(file_):
950 raise error.Abort(_("revlog '%s' not found") % file_)
950 raise error.Abort(_("revlog '%s' not found") % file_)
951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
951 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
952 file_[:-2] + ".i")
952 file_[:-2] + ".i")
953 return r
953 return r
954
954
955 def copy(ui, repo, pats, opts, rename=False):
955 def copy(ui, repo, pats, opts, rename=False):
956 # called with the repo lock held
956 # called with the repo lock held
957 #
957 #
958 # hgsep => pathname that uses "/" to separate directories
958 # hgsep => pathname that uses "/" to separate directories
959 # ossep => pathname that uses os.sep to separate directories
959 # ossep => pathname that uses os.sep to separate directories
960 cwd = repo.getcwd()
960 cwd = repo.getcwd()
961 targets = {}
961 targets = {}
962 after = opts.get("after")
962 after = opts.get("after")
963 dryrun = opts.get("dry_run")
963 dryrun = opts.get("dry_run")
964 wctx = repo[None]
964 wctx = repo[None]
965
965
966 def walkpat(pat):
966 def walkpat(pat):
967 srcs = []
967 srcs = []
968 if after:
968 if after:
969 badstates = '?'
969 badstates = '?'
970 else:
970 else:
971 badstates = '?r'
971 badstates = '?r'
972 m = scmutil.match(wctx, [pat], opts, globbed=True)
972 m = scmutil.match(wctx, [pat], opts, globbed=True)
973 for abs in wctx.walk(m):
973 for abs in wctx.walk(m):
974 state = repo.dirstate[abs]
974 state = repo.dirstate[abs]
975 rel = m.rel(abs)
975 rel = m.rel(abs)
976 exact = m.exact(abs)
976 exact = m.exact(abs)
977 if state in badstates:
977 if state in badstates:
978 if exact and state == '?':
978 if exact and state == '?':
979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
979 ui.warn(_('%s: not copying - file is not managed\n') % rel)
980 if exact and state == 'r':
980 if exact and state == 'r':
981 ui.warn(_('%s: not copying - file has been marked for'
981 ui.warn(_('%s: not copying - file has been marked for'
982 ' remove\n') % rel)
982 ' remove\n') % rel)
983 continue
983 continue
984 # abs: hgsep
984 # abs: hgsep
985 # rel: ossep
985 # rel: ossep
986 srcs.append((abs, rel, exact))
986 srcs.append((abs, rel, exact))
987 return srcs
987 return srcs
988
988
989 # abssrc: hgsep
989 # abssrc: hgsep
990 # relsrc: ossep
990 # relsrc: ossep
991 # otarget: ossep
991 # otarget: ossep
992 def copyfile(abssrc, relsrc, otarget, exact):
992 def copyfile(abssrc, relsrc, otarget, exact):
993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
993 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
994 if '/' in abstarget:
994 if '/' in abstarget:
995 # We cannot normalize abstarget itself, this would prevent
995 # We cannot normalize abstarget itself, this would prevent
996 # case only renames, like a => A.
996 # case only renames, like a => A.
997 abspath, absname = abstarget.rsplit('/', 1)
997 abspath, absname = abstarget.rsplit('/', 1)
998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
998 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
999 reltarget = repo.pathto(abstarget, cwd)
999 reltarget = repo.pathto(abstarget, cwd)
1000 target = repo.wjoin(abstarget)
1000 target = repo.wjoin(abstarget)
1001 src = repo.wjoin(abssrc)
1001 src = repo.wjoin(abssrc)
1002 state = repo.dirstate[abstarget]
1002 state = repo.dirstate[abstarget]
1003
1003
1004 scmutil.checkportable(ui, abstarget)
1004 scmutil.checkportable(ui, abstarget)
1005
1005
1006 # check for collisions
1006 # check for collisions
1007 prevsrc = targets.get(abstarget)
1007 prevsrc = targets.get(abstarget)
1008 if prevsrc is not None:
1008 if prevsrc is not None:
1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1009 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 (reltarget, repo.pathto(abssrc, cwd),
1010 (reltarget, repo.pathto(abssrc, cwd),
1011 repo.pathto(prevsrc, cwd)))
1011 repo.pathto(prevsrc, cwd)))
1012 return
1012 return
1013
1013
1014 # check for overwrites
1014 # check for overwrites
1015 exists = os.path.lexists(target)
1015 exists = os.path.lexists(target)
1016 samefile = False
1016 samefile = False
1017 if exists and abssrc != abstarget:
1017 if exists and abssrc != abstarget:
1018 if (repo.dirstate.normalize(abssrc) ==
1018 if (repo.dirstate.normalize(abssrc) ==
1019 repo.dirstate.normalize(abstarget)):
1019 repo.dirstate.normalize(abstarget)):
1020 if not rename:
1020 if not rename:
1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1021 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1022 return
1022 return
1023 exists = False
1023 exists = False
1024 samefile = True
1024 samefile = True
1025
1025
1026 if not after and exists or after and state in 'mn':
1026 if not after and exists or after and state in 'mn':
1027 if not opts['force']:
1027 if not opts['force']:
1028 if state in 'mn':
1028 if state in 'mn':
1029 msg = _('%s: not overwriting - file already committed\n')
1029 msg = _('%s: not overwriting - file already committed\n')
1030 if after:
1030 if after:
1031 flags = '--after --force'
1031 flags = '--after --force'
1032 else:
1032 else:
1033 flags = '--force'
1033 flags = '--force'
1034 if rename:
1034 if rename:
1035 hint = _('(hg rename %s to replace the file by '
1035 hint = _('(hg rename %s to replace the file by '
1036 'recording a rename)\n') % flags
1036 'recording a rename)\n') % flags
1037 else:
1037 else:
1038 hint = _('(hg copy %s to replace the file by '
1038 hint = _('(hg copy %s to replace the file by '
1039 'recording a copy)\n') % flags
1039 'recording a copy)\n') % flags
1040 else:
1040 else:
1041 msg = _('%s: not overwriting - file exists\n')
1041 msg = _('%s: not overwriting - file exists\n')
1042 if rename:
1042 if rename:
1043 hint = _('(hg rename --after to record the rename)\n')
1043 hint = _('(hg rename --after to record the rename)\n')
1044 else:
1044 else:
1045 hint = _('(hg copy --after to record the copy)\n')
1045 hint = _('(hg copy --after to record the copy)\n')
1046 ui.warn(msg % reltarget)
1046 ui.warn(msg % reltarget)
1047 ui.warn(hint)
1047 ui.warn(hint)
1048 return
1048 return
1049
1049
1050 if after:
1050 if after:
1051 if not exists:
1051 if not exists:
1052 if rename:
1052 if rename:
1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1053 ui.warn(_('%s: not recording move - %s does not exist\n') %
1054 (relsrc, reltarget))
1054 (relsrc, reltarget))
1055 else:
1055 else:
1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1056 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1057 (relsrc, reltarget))
1057 (relsrc, reltarget))
1058 return
1058 return
1059 elif not dryrun:
1059 elif not dryrun:
1060 try:
1060 try:
1061 if exists:
1061 if exists:
1062 os.unlink(target)
1062 os.unlink(target)
1063 targetdir = os.path.dirname(target) or '.'
1063 targetdir = os.path.dirname(target) or '.'
1064 if not os.path.isdir(targetdir):
1064 if not os.path.isdir(targetdir):
1065 os.makedirs(targetdir)
1065 os.makedirs(targetdir)
1066 if samefile:
1066 if samefile:
1067 tmp = target + "~hgrename"
1067 tmp = target + "~hgrename"
1068 os.rename(src, tmp)
1068 os.rename(src, tmp)
1069 os.rename(tmp, target)
1069 os.rename(tmp, target)
1070 else:
1070 else:
1071 util.copyfile(src, target)
1071 util.copyfile(src, target)
1072 srcexists = True
1072 srcexists = True
1073 except IOError as inst:
1073 except IOError as inst:
1074 if inst.errno == errno.ENOENT:
1074 if inst.errno == errno.ENOENT:
1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1075 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1076 srcexists = False
1076 srcexists = False
1077 else:
1077 else:
1078 ui.warn(_('%s: cannot copy - %s\n') %
1078 ui.warn(_('%s: cannot copy - %s\n') %
1079 (relsrc, encoding.strtolocal(inst.strerror)))
1079 (relsrc, encoding.strtolocal(inst.strerror)))
1080 return True # report a failure
1080 return True # report a failure
1081
1081
1082 if ui.verbose or not exact:
1082 if ui.verbose or not exact:
1083 if rename:
1083 if rename:
1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1084 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1085 else:
1085 else:
1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1086 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1087
1087
1088 targets[abstarget] = abssrc
1088 targets[abstarget] = abssrc
1089
1089
1090 # fix up dirstate
1090 # fix up dirstate
1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1091 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1092 dryrun=dryrun, cwd=cwd)
1092 dryrun=dryrun, cwd=cwd)
1093 if rename and not dryrun:
1093 if rename and not dryrun:
1094 if not after and srcexists and not samefile:
1094 if not after and srcexists and not samefile:
1095 repo.wvfs.unlinkpath(abssrc)
1095 repo.wvfs.unlinkpath(abssrc)
1096 wctx.forget([abssrc])
1096 wctx.forget([abssrc])
1097
1097
1098 # pat: ossep
1098 # pat: ossep
1099 # dest ossep
1099 # dest ossep
1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1100 # srcs: list of (hgsep, hgsep, ossep, bool)
1101 # return: function that takes hgsep and returns ossep
1101 # return: function that takes hgsep and returns ossep
1102 def targetpathfn(pat, dest, srcs):
1102 def targetpathfn(pat, dest, srcs):
1103 if os.path.isdir(pat):
1103 if os.path.isdir(pat):
1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1104 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1105 abspfx = util.localpath(abspfx)
1105 abspfx = util.localpath(abspfx)
1106 if destdirexists:
1106 if destdirexists:
1107 striplen = len(os.path.split(abspfx)[0])
1107 striplen = len(os.path.split(abspfx)[0])
1108 else:
1108 else:
1109 striplen = len(abspfx)
1109 striplen = len(abspfx)
1110 if striplen:
1110 if striplen:
1111 striplen += len(pycompat.ossep)
1111 striplen += len(pycompat.ossep)
1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1112 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1113 elif destdirexists:
1113 elif destdirexists:
1114 res = lambda p: os.path.join(dest,
1114 res = lambda p: os.path.join(dest,
1115 os.path.basename(util.localpath(p)))
1115 os.path.basename(util.localpath(p)))
1116 else:
1116 else:
1117 res = lambda p: dest
1117 res = lambda p: dest
1118 return res
1118 return res
1119
1119
1120 # pat: ossep
1120 # pat: ossep
1121 # dest ossep
1121 # dest ossep
1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1122 # srcs: list of (hgsep, hgsep, ossep, bool)
1123 # return: function that takes hgsep and returns ossep
1123 # return: function that takes hgsep and returns ossep
1124 def targetpathafterfn(pat, dest, srcs):
1124 def targetpathafterfn(pat, dest, srcs):
1125 if matchmod.patkind(pat):
1125 if matchmod.patkind(pat):
1126 # a mercurial pattern
1126 # a mercurial pattern
1127 res = lambda p: os.path.join(dest,
1127 res = lambda p: os.path.join(dest,
1128 os.path.basename(util.localpath(p)))
1128 os.path.basename(util.localpath(p)))
1129 else:
1129 else:
1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1130 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1131 if len(abspfx) < len(srcs[0][0]):
1131 if len(abspfx) < len(srcs[0][0]):
1132 # A directory. Either the target path contains the last
1132 # A directory. Either the target path contains the last
1133 # component of the source path or it does not.
1133 # component of the source path or it does not.
1134 def evalpath(striplen):
1134 def evalpath(striplen):
1135 score = 0
1135 score = 0
1136 for s in srcs:
1136 for s in srcs:
1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1137 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1138 if os.path.lexists(t):
1138 if os.path.lexists(t):
1139 score += 1
1139 score += 1
1140 return score
1140 return score
1141
1141
1142 abspfx = util.localpath(abspfx)
1142 abspfx = util.localpath(abspfx)
1143 striplen = len(abspfx)
1143 striplen = len(abspfx)
1144 if striplen:
1144 if striplen:
1145 striplen += len(pycompat.ossep)
1145 striplen += len(pycompat.ossep)
1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1146 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1147 score = evalpath(striplen)
1147 score = evalpath(striplen)
1148 striplen1 = len(os.path.split(abspfx)[0])
1148 striplen1 = len(os.path.split(abspfx)[0])
1149 if striplen1:
1149 if striplen1:
1150 striplen1 += len(pycompat.ossep)
1150 striplen1 += len(pycompat.ossep)
1151 if evalpath(striplen1) > score:
1151 if evalpath(striplen1) > score:
1152 striplen = striplen1
1152 striplen = striplen1
1153 res = lambda p: os.path.join(dest,
1153 res = lambda p: os.path.join(dest,
1154 util.localpath(p)[striplen:])
1154 util.localpath(p)[striplen:])
1155 else:
1155 else:
1156 # a file
1156 # a file
1157 if destdirexists:
1157 if destdirexists:
1158 res = lambda p: os.path.join(dest,
1158 res = lambda p: os.path.join(dest,
1159 os.path.basename(util.localpath(p)))
1159 os.path.basename(util.localpath(p)))
1160 else:
1160 else:
1161 res = lambda p: dest
1161 res = lambda p: dest
1162 return res
1162 return res
1163
1163
1164 pats = scmutil.expandpats(pats)
1164 pats = scmutil.expandpats(pats)
1165 if not pats:
1165 if not pats:
1166 raise error.Abort(_('no source or destination specified'))
1166 raise error.Abort(_('no source or destination specified'))
1167 if len(pats) == 1:
1167 if len(pats) == 1:
1168 raise error.Abort(_('no destination specified'))
1168 raise error.Abort(_('no destination specified'))
1169 dest = pats.pop()
1169 dest = pats.pop()
1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1170 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1171 if not destdirexists:
1171 if not destdirexists:
1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1172 if len(pats) > 1 or matchmod.patkind(pats[0]):
1173 raise error.Abort(_('with multiple sources, destination must be an '
1173 raise error.Abort(_('with multiple sources, destination must be an '
1174 'existing directory'))
1174 'existing directory'))
1175 if util.endswithsep(dest):
1175 if util.endswithsep(dest):
1176 raise error.Abort(_('destination %s is not a directory') % dest)
1176 raise error.Abort(_('destination %s is not a directory') % dest)
1177
1177
1178 tfn = targetpathfn
1178 tfn = targetpathfn
1179 if after:
1179 if after:
1180 tfn = targetpathafterfn
1180 tfn = targetpathafterfn
1181 copylist = []
1181 copylist = []
1182 for pat in pats:
1182 for pat in pats:
1183 srcs = walkpat(pat)
1183 srcs = walkpat(pat)
1184 if not srcs:
1184 if not srcs:
1185 continue
1185 continue
1186 copylist.append((tfn(pat, dest, srcs), srcs))
1186 copylist.append((tfn(pat, dest, srcs), srcs))
1187 if not copylist:
1187 if not copylist:
1188 raise error.Abort(_('no files to copy'))
1188 raise error.Abort(_('no files to copy'))
1189
1189
1190 errors = 0
1190 errors = 0
1191 for targetpath, srcs in copylist:
1191 for targetpath, srcs in copylist:
1192 for abssrc, relsrc, exact in srcs:
1192 for abssrc, relsrc, exact in srcs:
1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1193 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1194 errors += 1
1194 errors += 1
1195
1195
1196 if errors:
1196 if errors:
1197 ui.warn(_('(consider using --after)\n'))
1197 ui.warn(_('(consider using --after)\n'))
1198
1198
1199 return errors != 0
1199 return errors != 0
1200
1200
1201 ## facility to let extension process additional data into an import patch
1201 ## facility to let extension process additional data into an import patch
1202 # list of identifier to be executed in order
1202 # list of identifier to be executed in order
1203 extrapreimport = [] # run before commit
1203 extrapreimport = [] # run before commit
1204 extrapostimport = [] # run after commit
1204 extrapostimport = [] # run after commit
1205 # mapping from identifier to actual import function
1205 # mapping from identifier to actual import function
1206 #
1206 #
1207 # 'preimport' are run before the commit is made and are provided the following
1207 # 'preimport' are run before the commit is made and are provided the following
1208 # arguments:
1208 # arguments:
1209 # - repo: the localrepository instance,
1209 # - repo: the localrepository instance,
1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1210 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1211 # - extra: the future extra dictionary of the changeset, please mutate it,
1212 # - opts: the import options.
1212 # - opts: the import options.
1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1213 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1214 # mutation of in memory commit and more. Feel free to rework the code to get
1214 # mutation of in memory commit and more. Feel free to rework the code to get
1215 # there.
1215 # there.
1216 extrapreimportmap = {}
1216 extrapreimportmap = {}
1217 # 'postimport' are run after the commit is made and are provided the following
1217 # 'postimport' are run after the commit is made and are provided the following
1218 # argument:
1218 # argument:
1219 # - ctx: the changectx created by import.
1219 # - ctx: the changectx created by import.
1220 extrapostimportmap = {}
1220 extrapostimportmap = {}
1221
1221
1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1222 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1223 """Utility function used by commands.import to import a single patch
1223 """Utility function used by commands.import to import a single patch
1224
1224
1225 This function is explicitly defined here to help the evolve extension to
1225 This function is explicitly defined here to help the evolve extension to
1226 wrap this part of the import logic.
1226 wrap this part of the import logic.
1227
1227
1228 The API is currently a bit ugly because it a simple code translation from
1228 The API is currently a bit ugly because it a simple code translation from
1229 the import command. Feel free to make it better.
1229 the import command. Feel free to make it better.
1230
1230
1231 :hunk: a patch (as a binary string)
1231 :hunk: a patch (as a binary string)
1232 :parents: nodes that will be parent of the created commit
1232 :parents: nodes that will be parent of the created commit
1233 :opts: the full dict of option passed to the import command
1233 :opts: the full dict of option passed to the import command
1234 :msgs: list to save commit message to.
1234 :msgs: list to save commit message to.
1235 (used in case we need to save it when failing)
1235 (used in case we need to save it when failing)
1236 :updatefunc: a function that update a repo to a given node
1236 :updatefunc: a function that update a repo to a given node
1237 updatefunc(<repo>, <node>)
1237 updatefunc(<repo>, <node>)
1238 """
1238 """
1239 # avoid cycle context -> subrepo -> cmdutil
1239 # avoid cycle context -> subrepo -> cmdutil
1240 from . import context
1240 from . import context
1241 extractdata = patch.extract(ui, hunk)
1241 extractdata = patch.extract(ui, hunk)
1242 tmpname = extractdata.get('filename')
1242 tmpname = extractdata.get('filename')
1243 message = extractdata.get('message')
1243 message = extractdata.get('message')
1244 user = opts.get('user') or extractdata.get('user')
1244 user = opts.get('user') or extractdata.get('user')
1245 date = opts.get('date') or extractdata.get('date')
1245 date = opts.get('date') or extractdata.get('date')
1246 branch = extractdata.get('branch')
1246 branch = extractdata.get('branch')
1247 nodeid = extractdata.get('nodeid')
1247 nodeid = extractdata.get('nodeid')
1248 p1 = extractdata.get('p1')
1248 p1 = extractdata.get('p1')
1249 p2 = extractdata.get('p2')
1249 p2 = extractdata.get('p2')
1250
1250
1251 nocommit = opts.get('no_commit')
1251 nocommit = opts.get('no_commit')
1252 importbranch = opts.get('import_branch')
1252 importbranch = opts.get('import_branch')
1253 update = not opts.get('bypass')
1253 update = not opts.get('bypass')
1254 strip = opts["strip"]
1254 strip = opts["strip"]
1255 prefix = opts["prefix"]
1255 prefix = opts["prefix"]
1256 sim = float(opts.get('similarity') or 0)
1256 sim = float(opts.get('similarity') or 0)
1257 if not tmpname:
1257 if not tmpname:
1258 return (None, None, False)
1258 return (None, None, False)
1259
1259
1260 rejects = False
1260 rejects = False
1261
1261
1262 try:
1262 try:
1263 cmdline_message = logmessage(ui, opts)
1263 cmdline_message = logmessage(ui, opts)
1264 if cmdline_message:
1264 if cmdline_message:
1265 # pickup the cmdline msg
1265 # pickup the cmdline msg
1266 message = cmdline_message
1266 message = cmdline_message
1267 elif message:
1267 elif message:
1268 # pickup the patch msg
1268 # pickup the patch msg
1269 message = message.strip()
1269 message = message.strip()
1270 else:
1270 else:
1271 # launch the editor
1271 # launch the editor
1272 message = None
1272 message = None
1273 ui.debug('message:\n%s\n' % message)
1273 ui.debug('message:\n%s\n' % message)
1274
1274
1275 if len(parents) == 1:
1275 if len(parents) == 1:
1276 parents.append(repo[nullid])
1276 parents.append(repo[nullid])
1277 if opts.get('exact'):
1277 if opts.get('exact'):
1278 if not nodeid or not p1:
1278 if not nodeid or not p1:
1279 raise error.Abort(_('not a Mercurial patch'))
1279 raise error.Abort(_('not a Mercurial patch'))
1280 p1 = repo[p1]
1280 p1 = repo[p1]
1281 p2 = repo[p2 or nullid]
1281 p2 = repo[p2 or nullid]
1282 elif p2:
1282 elif p2:
1283 try:
1283 try:
1284 p1 = repo[p1]
1284 p1 = repo[p1]
1285 p2 = repo[p2]
1285 p2 = repo[p2]
1286 # Without any options, consider p2 only if the
1286 # Without any options, consider p2 only if the
1287 # patch is being applied on top of the recorded
1287 # patch is being applied on top of the recorded
1288 # first parent.
1288 # first parent.
1289 if p1 != parents[0]:
1289 if p1 != parents[0]:
1290 p1 = parents[0]
1290 p1 = parents[0]
1291 p2 = repo[nullid]
1291 p2 = repo[nullid]
1292 except error.RepoError:
1292 except error.RepoError:
1293 p1, p2 = parents
1293 p1, p2 = parents
1294 if p2.node() == nullid:
1294 if p2.node() == nullid:
1295 ui.warn(_("warning: import the patch as a normal revision\n"
1295 ui.warn(_("warning: import the patch as a normal revision\n"
1296 "(use --exact to import the patch as a merge)\n"))
1296 "(use --exact to import the patch as a merge)\n"))
1297 else:
1297 else:
1298 p1, p2 = parents
1298 p1, p2 = parents
1299
1299
1300 n = None
1300 n = None
1301 if update:
1301 if update:
1302 if p1 != parents[0]:
1302 if p1 != parents[0]:
1303 updatefunc(repo, p1.node())
1303 updatefunc(repo, p1.node())
1304 if p2 != parents[1]:
1304 if p2 != parents[1]:
1305 repo.setparents(p1.node(), p2.node())
1305 repo.setparents(p1.node(), p2.node())
1306
1306
1307 if opts.get('exact') or importbranch:
1307 if opts.get('exact') or importbranch:
1308 repo.dirstate.setbranch(branch or 'default')
1308 repo.dirstate.setbranch(branch or 'default')
1309
1309
1310 partial = opts.get('partial', False)
1310 partial = opts.get('partial', False)
1311 files = set()
1311 files = set()
1312 try:
1312 try:
1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1313 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1314 files=files, eolmode=None, similarity=sim / 100.0)
1314 files=files, eolmode=None, similarity=sim / 100.0)
1315 except error.PatchError as e:
1315 except error.PatchError as e:
1316 if not partial:
1316 if not partial:
1317 raise error.Abort(str(e))
1317 raise error.Abort(str(e))
1318 if partial:
1318 if partial:
1319 rejects = True
1319 rejects = True
1320
1320
1321 files = list(files)
1321 files = list(files)
1322 if nocommit:
1322 if nocommit:
1323 if message:
1323 if message:
1324 msgs.append(message)
1324 msgs.append(message)
1325 else:
1325 else:
1326 if opts.get('exact') or p2:
1326 if opts.get('exact') or p2:
1327 # If you got here, you either use --force and know what
1327 # If you got here, you either use --force and know what
1328 # you are doing or used --exact or a merge patch while
1328 # you are doing or used --exact or a merge patch while
1329 # being updated to its first parent.
1329 # being updated to its first parent.
1330 m = None
1330 m = None
1331 else:
1331 else:
1332 m = scmutil.matchfiles(repo, files or [])
1332 m = scmutil.matchfiles(repo, files or [])
1333 editform = mergeeditform(repo[None], 'import.normal')
1333 editform = mergeeditform(repo[None], 'import.normal')
1334 if opts.get('exact'):
1334 if opts.get('exact'):
1335 editor = None
1335 editor = None
1336 else:
1336 else:
1337 editor = getcommiteditor(editform=editform, **opts)
1337 editor = getcommiteditor(editform=editform, **opts)
1338 extra = {}
1338 extra = {}
1339 for idfunc in extrapreimport:
1339 for idfunc in extrapreimport:
1340 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1340 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1341 overrides = {}
1341 overrides = {}
1342 if partial:
1342 if partial:
1343 overrides[('ui', 'allowemptycommit')] = True
1343 overrides[('ui', 'allowemptycommit')] = True
1344 with repo.ui.configoverride(overrides, 'import'):
1344 with repo.ui.configoverride(overrides, 'import'):
1345 n = repo.commit(message, user,
1345 n = repo.commit(message, user,
1346 date, match=m,
1346 date, match=m,
1347 editor=editor, extra=extra)
1347 editor=editor, extra=extra)
1348 for idfunc in extrapostimport:
1348 for idfunc in extrapostimport:
1349 extrapostimportmap[idfunc](repo[n])
1349 extrapostimportmap[idfunc](repo[n])
1350 else:
1350 else:
1351 if opts.get('exact') or importbranch:
1351 if opts.get('exact') or importbranch:
1352 branch = branch or 'default'
1352 branch = branch or 'default'
1353 else:
1353 else:
1354 branch = p1.branch()
1354 branch = p1.branch()
1355 store = patch.filestore()
1355 store = patch.filestore()
1356 try:
1356 try:
1357 files = set()
1357 files = set()
1358 try:
1358 try:
1359 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1359 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1360 files, eolmode=None)
1360 files, eolmode=None)
1361 except error.PatchError as e:
1361 except error.PatchError as e:
1362 raise error.Abort(str(e))
1362 raise error.Abort(str(e))
1363 if opts.get('exact'):
1363 if opts.get('exact'):
1364 editor = None
1364 editor = None
1365 else:
1365 else:
1366 editor = getcommiteditor(editform='import.bypass')
1366 editor = getcommiteditor(editform='import.bypass')
1367 memctx = context.memctx(repo, (p1.node(), p2.node()),
1367 memctx = context.memctx(repo, (p1.node(), p2.node()),
1368 message,
1368 message,
1369 files=files,
1369 files=files,
1370 filectxfn=store,
1370 filectxfn=store,
1371 user=user,
1371 user=user,
1372 date=date,
1372 date=date,
1373 branch=branch,
1373 branch=branch,
1374 editor=editor)
1374 editor=editor)
1375 n = memctx.commit()
1375 n = memctx.commit()
1376 finally:
1376 finally:
1377 store.close()
1377 store.close()
1378 if opts.get('exact') and nocommit:
1378 if opts.get('exact') and nocommit:
1379 # --exact with --no-commit is still useful in that it does merge
1379 # --exact with --no-commit is still useful in that it does merge
1380 # and branch bits
1380 # and branch bits
1381 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1381 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1382 elif opts.get('exact') and hex(n) != nodeid:
1382 elif opts.get('exact') and hex(n) != nodeid:
1383 raise error.Abort(_('patch is damaged or loses information'))
1383 raise error.Abort(_('patch is damaged or loses information'))
1384 msg = _('applied to working directory')
1384 msg = _('applied to working directory')
1385 if n:
1385 if n:
1386 # i18n: refers to a short changeset id
1386 # i18n: refers to a short changeset id
1387 msg = _('created %s') % short(n)
1387 msg = _('created %s') % short(n)
1388 return (msg, n, rejects)
1388 return (msg, n, rejects)
1389 finally:
1389 finally:
1390 os.unlink(tmpname)
1390 os.unlink(tmpname)
1391
1391
1392 # facility to let extensions include additional data in an exported patch
1392 # facility to let extensions include additional data in an exported patch
1393 # list of identifiers to be executed in order
1393 # list of identifiers to be executed in order
1394 extraexport = []
1394 extraexport = []
1395 # mapping from identifier to actual export function
1395 # mapping from identifier to actual export function
1396 # function as to return a string to be added to the header or None
1396 # function as to return a string to be added to the header or None
1397 # it is given two arguments (sequencenumber, changectx)
1397 # it is given two arguments (sequencenumber, changectx)
1398 extraexportmap = {}
1398 extraexportmap = {}
1399
1399
1400 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1400 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1401 node = scmutil.binnode(ctx)
1401 node = scmutil.binnode(ctx)
1402 parents = [p.node() for p in ctx.parents() if p]
1402 parents = [p.node() for p in ctx.parents() if p]
1403 branch = ctx.branch()
1403 branch = ctx.branch()
1404 if switch_parent:
1404 if switch_parent:
1405 parents.reverse()
1405 parents.reverse()
1406
1406
1407 if parents:
1407 if parents:
1408 prev = parents[0]
1408 prev = parents[0]
1409 else:
1409 else:
1410 prev = nullid
1410 prev = nullid
1411
1411
1412 write("# HG changeset patch\n")
1412 write("# HG changeset patch\n")
1413 write("# User %s\n" % ctx.user())
1413 write("# User %s\n" % ctx.user())
1414 write("# Date %d %d\n" % ctx.date())
1414 write("# Date %d %d\n" % ctx.date())
1415 write("# %s\n" % util.datestr(ctx.date()))
1415 write("# %s\n" % util.datestr(ctx.date()))
1416 if branch and branch != 'default':
1416 if branch and branch != 'default':
1417 write("# Branch %s\n" % branch)
1417 write("# Branch %s\n" % branch)
1418 write("# Node ID %s\n" % hex(node))
1418 write("# Node ID %s\n" % hex(node))
1419 write("# Parent %s\n" % hex(prev))
1419 write("# Parent %s\n" % hex(prev))
1420 if len(parents) > 1:
1420 if len(parents) > 1:
1421 write("# Parent %s\n" % hex(parents[1]))
1421 write("# Parent %s\n" % hex(parents[1]))
1422
1422
1423 for headerid in extraexport:
1423 for headerid in extraexport:
1424 header = extraexportmap[headerid](seqno, ctx)
1424 header = extraexportmap[headerid](seqno, ctx)
1425 if header is not None:
1425 if header is not None:
1426 write('# %s\n' % header)
1426 write('# %s\n' % header)
1427 write(ctx.description().rstrip())
1427 write(ctx.description().rstrip())
1428 write("\n\n")
1428 write("\n\n")
1429
1429
1430 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1430 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1431 write(chunk, label=label)
1431 write(chunk, label=label)
1432
1432
1433 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1433 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1434 opts=None, match=None):
1434 opts=None, match=None):
1435 '''export changesets as hg patches
1435 '''export changesets as hg patches
1436
1436
1437 Args:
1437 Args:
1438 repo: The repository from which we're exporting revisions.
1438 repo: The repository from which we're exporting revisions.
1439 revs: A list of revisions to export as revision numbers.
1439 revs: A list of revisions to export as revision numbers.
1440 fntemplate: An optional string to use for generating patch file names.
1440 fntemplate: An optional string to use for generating patch file names.
1441 fp: An optional file-like object to which patches should be written.
1441 fp: An optional file-like object to which patches should be written.
1442 switch_parent: If True, show diffs against second parent when not nullid.
1442 switch_parent: If True, show diffs against second parent when not nullid.
1443 Default is false, which always shows diff against p1.
1443 Default is false, which always shows diff against p1.
1444 opts: diff options to use for generating the patch.
1444 opts: diff options to use for generating the patch.
1445 match: If specified, only export changes to files matching this matcher.
1445 match: If specified, only export changes to files matching this matcher.
1446
1446
1447 Returns:
1447 Returns:
1448 Nothing.
1448 Nothing.
1449
1449
1450 Side Effect:
1450 Side Effect:
1451 "HG Changeset Patch" data is emitted to one of the following
1451 "HG Changeset Patch" data is emitted to one of the following
1452 destinations:
1452 destinations:
1453 fp is specified: All revs are written to the specified
1453 fp is specified: All revs are written to the specified
1454 file-like object.
1454 file-like object.
1455 fntemplate specified: Each rev is written to a unique file named using
1455 fntemplate specified: Each rev is written to a unique file named using
1456 the given template.
1456 the given template.
1457 Neither fp nor template specified: All revs written to repo.ui.write()
1457 Neither fp nor template specified: All revs written to repo.ui.write()
1458 '''
1458 '''
1459
1459
1460 total = len(revs)
1460 total = len(revs)
1461 revwidth = max(len(str(rev)) for rev in revs)
1461 revwidth = max(len(str(rev)) for rev in revs)
1462 filemode = {}
1462 filemode = {}
1463
1463
1464 write = None
1464 write = None
1465 dest = '<unnamed>'
1465 dest = '<unnamed>'
1466 if fp:
1466 if fp:
1467 dest = getattr(fp, 'name', dest)
1467 dest = getattr(fp, 'name', dest)
1468 def write(s, **kw):
1468 def write(s, **kw):
1469 fp.write(s)
1469 fp.write(s)
1470 elif not fntemplate:
1470 elif not fntemplate:
1471 write = repo.ui.write
1471 write = repo.ui.write
1472
1472
1473 for seqno, rev in enumerate(revs, 1):
1473 for seqno, rev in enumerate(revs, 1):
1474 ctx = repo[rev]
1474 ctx = repo[rev]
1475 fo = None
1475 fo = None
1476 if not fp and fntemplate:
1476 if not fp and fntemplate:
1477 desc_lines = ctx.description().rstrip().split('\n')
1477 desc_lines = ctx.description().rstrip().split('\n')
1478 desc = desc_lines[0] #Commit always has a first line.
1478 desc = desc_lines[0] #Commit always has a first line.
1479 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1479 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1480 total=total, seqno=seqno, revwidth=revwidth,
1480 total=total, seqno=seqno, revwidth=revwidth,
1481 mode='wb', modemap=filemode)
1481 mode='wb', modemap=filemode)
1482 dest = fo.name
1482 dest = fo.name
1483 def write(s, **kw):
1483 def write(s, **kw):
1484 fo.write(s)
1484 fo.write(s)
1485 if not dest.startswith('<'):
1485 if not dest.startswith('<'):
1486 repo.ui.note("%s\n" % dest)
1486 repo.ui.note("%s\n" % dest)
1487 _exportsingle(
1487 _exportsingle(
1488 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1488 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1489 if fo is not None:
1489 if fo is not None:
1490 fo.close()
1490 fo.close()
1491
1491
1492 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1492 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1493 changes=None, stat=False, fp=None, prefix='',
1493 changes=None, stat=False, fp=None, prefix='',
1494 root='', listsubrepos=False, hunksfilterfn=None):
1494 root='', listsubrepos=False, hunksfilterfn=None):
1495 '''show diff or diffstat.'''
1495 '''show diff or diffstat.'''
1496 if fp is None:
1496 if fp is None:
1497 write = ui.write
1497 write = ui.write
1498 else:
1498 else:
1499 def write(s, **kw):
1499 def write(s, **kw):
1500 fp.write(s)
1500 fp.write(s)
1501
1501
1502 if root:
1502 if root:
1503 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1503 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1504 else:
1504 else:
1505 relroot = ''
1505 relroot = ''
1506 if relroot != '':
1506 if relroot != '':
1507 # XXX relative roots currently don't work if the root is within a
1507 # XXX relative roots currently don't work if the root is within a
1508 # subrepo
1508 # subrepo
1509 uirelroot = match.uipath(relroot)
1509 uirelroot = match.uipath(relroot)
1510 relroot += '/'
1510 relroot += '/'
1511 for matchroot in match.files():
1511 for matchroot in match.files():
1512 if not matchroot.startswith(relroot):
1512 if not matchroot.startswith(relroot):
1513 ui.warn(_('warning: %s not inside relative root %s\n') % (
1513 ui.warn(_('warning: %s not inside relative root %s\n') % (
1514 match.uipath(matchroot), uirelroot))
1514 match.uipath(matchroot), uirelroot))
1515
1515
1516 if stat:
1516 if stat:
1517 diffopts = diffopts.copy(context=0)
1517 diffopts = diffopts.copy(context=0)
1518 width = 80
1518 width = 80
1519 if not ui.plain():
1519 if not ui.plain():
1520 width = ui.termwidth()
1520 width = ui.termwidth()
1521 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1521 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1522 prefix=prefix, relroot=relroot,
1522 prefix=prefix, relroot=relroot,
1523 hunksfilterfn=hunksfilterfn)
1523 hunksfilterfn=hunksfilterfn)
1524 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1524 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1525 width=width):
1525 width=width):
1526 write(chunk, label=label)
1526 write(chunk, label=label)
1527 else:
1527 else:
1528 for chunk, label in patch.diffui(repo, node1, node2, match,
1528 for chunk, label in patch.diffui(repo, node1, node2, match,
1529 changes, diffopts, prefix=prefix,
1529 changes, diffopts, prefix=prefix,
1530 relroot=relroot,
1530 relroot=relroot,
1531 hunksfilterfn=hunksfilterfn):
1531 hunksfilterfn=hunksfilterfn):
1532 write(chunk, label=label)
1532 write(chunk, label=label)
1533
1533
1534 if listsubrepos:
1534 if listsubrepos:
1535 ctx1 = repo[node1]
1535 ctx1 = repo[node1]
1536 ctx2 = repo[node2]
1536 ctx2 = repo[node2]
1537 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1537 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1538 tempnode2 = node2
1538 tempnode2 = node2
1539 try:
1539 try:
1540 if node2 is not None:
1540 if node2 is not None:
1541 tempnode2 = ctx2.substate[subpath][1]
1541 tempnode2 = ctx2.substate[subpath][1]
1542 except KeyError:
1542 except KeyError:
1543 # A subrepo that existed in node1 was deleted between node1 and
1543 # A subrepo that existed in node1 was deleted between node1 and
1544 # node2 (inclusive). Thus, ctx2's substate won't contain that
1544 # node2 (inclusive). Thus, ctx2's substate won't contain that
1545 # subpath. The best we can do is to ignore it.
1545 # subpath. The best we can do is to ignore it.
1546 tempnode2 = None
1546 tempnode2 = None
1547 submatch = matchmod.subdirmatcher(subpath, match)
1547 submatch = matchmod.subdirmatcher(subpath, match)
1548 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1548 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1549 stat=stat, fp=fp, prefix=prefix)
1549 stat=stat, fp=fp, prefix=prefix)
1550
1550
1551 def _changesetlabels(ctx):
1551 def _changesetlabels(ctx):
1552 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1552 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1553 if ctx.obsolete():
1553 if ctx.obsolete():
1554 labels.append('changeset.obsolete')
1554 labels.append('changeset.obsolete')
1555 if ctx.isunstable():
1555 if ctx.isunstable():
1556 labels.append('changeset.unstable')
1556 labels.append('changeset.unstable')
1557 for instability in ctx.instabilities():
1557 for instability in ctx.instabilities():
1558 labels.append('instability.%s' % instability)
1558 labels.append('instability.%s' % instability)
1559 return ' '.join(labels)
1559 return ' '.join(labels)
1560
1560
1561 class changeset_printer(object):
1561 class changeset_printer(object):
1562 '''show changeset information when templating not requested.'''
1562 '''show changeset information when templating not requested.'''
1563
1563
1564 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1564 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1565 self.ui = ui
1565 self.ui = ui
1566 self.repo = repo
1566 self.repo = repo
1567 self.buffered = buffered
1567 self.buffered = buffered
1568 self.matchfn = matchfn
1568 self.matchfn = matchfn
1569 self.diffopts = diffopts
1569 self.diffopts = diffopts
1570 self.header = {}
1570 self.header = {}
1571 self.hunk = {}
1571 self.hunk = {}
1572 self.lastheader = None
1572 self.lastheader = None
1573 self.footer = None
1573 self.footer = None
1574
1574
1575 def flush(self, ctx):
1575 def flush(self, ctx):
1576 rev = ctx.rev()
1576 rev = ctx.rev()
1577 if rev in self.header:
1577 if rev in self.header:
1578 h = self.header[rev]
1578 h = self.header[rev]
1579 if h != self.lastheader:
1579 if h != self.lastheader:
1580 self.lastheader = h
1580 self.lastheader = h
1581 self.ui.write(h)
1581 self.ui.write(h)
1582 del self.header[rev]
1582 del self.header[rev]
1583 if rev in self.hunk:
1583 if rev in self.hunk:
1584 self.ui.write(self.hunk[rev])
1584 self.ui.write(self.hunk[rev])
1585 del self.hunk[rev]
1585 del self.hunk[rev]
1586 return 1
1586 return 1
1587 return 0
1587 return 0
1588
1588
1589 def close(self):
1589 def close(self):
1590 if self.footer:
1590 if self.footer:
1591 self.ui.write(self.footer)
1591 self.ui.write(self.footer)
1592
1592
1593 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1593 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1594 **props):
1594 **props):
1595 props = pycompat.byteskwargs(props)
1595 props = pycompat.byteskwargs(props)
1596 if self.buffered:
1596 if self.buffered:
1597 self.ui.pushbuffer(labeled=True)
1597 self.ui.pushbuffer(labeled=True)
1598 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1598 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1599 self.hunk[ctx.rev()] = self.ui.popbuffer()
1599 self.hunk[ctx.rev()] = self.ui.popbuffer()
1600 else:
1600 else:
1601 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1601 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1602
1602
1603 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1603 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1604 '''show a single changeset or file revision'''
1604 '''show a single changeset or file revision'''
1605 changenode = ctx.node()
1605 changenode = ctx.node()
1606 rev = ctx.rev()
1606 rev = ctx.rev()
1607
1607
1608 if self.ui.quiet:
1608 if self.ui.quiet:
1609 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1609 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1610 label='log.node')
1610 label='log.node')
1611 return
1611 return
1612
1612
1613 date = util.datestr(ctx.date())
1613 date = util.datestr(ctx.date())
1614
1614
1615 # i18n: column positioning for "hg log"
1615 # i18n: column positioning for "hg log"
1616 self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
1616 self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
1617 label=_changesetlabels(ctx))
1617 label=_changesetlabels(ctx))
1618
1618
1619 # branches are shown first before any other names due to backwards
1619 # branches are shown first before any other names due to backwards
1620 # compatibility
1620 # compatibility
1621 branch = ctx.branch()
1621 branch = ctx.branch()
1622 # don't show the default branch name
1622 # don't show the default branch name
1623 if branch != 'default':
1623 if branch != 'default':
1624 # i18n: column positioning for "hg log"
1624 # i18n: column positioning for "hg log"
1625 self.ui.write(_("branch: %s\n") % branch,
1625 self.ui.write(_("branch: %s\n") % branch,
1626 label='log.branch')
1626 label='log.branch')
1627
1627
1628 for nsname, ns in self.repo.names.iteritems():
1628 for nsname, ns in self.repo.names.iteritems():
1629 # branches has special logic already handled above, so here we just
1629 # branches has special logic already handled above, so here we just
1630 # skip it
1630 # skip it
1631 if nsname == 'branches':
1631 if nsname == 'branches':
1632 continue
1632 continue
1633 # we will use the templatename as the color name since those two
1633 # we will use the templatename as the color name since those two
1634 # should be the same
1634 # should be the same
1635 for name in ns.names(self.repo, changenode):
1635 for name in ns.names(self.repo, changenode):
1636 self.ui.write(ns.logfmt % name,
1636 self.ui.write(ns.logfmt % name,
1637 label='log.%s' % ns.colorname)
1637 label='log.%s' % ns.colorname)
1638 if self.ui.debugflag:
1638 if self.ui.debugflag:
1639 # i18n: column positioning for "hg log"
1639 # i18n: column positioning for "hg log"
1640 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1640 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1641 label='log.phase')
1641 label='log.phase')
1642 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1642 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1643 label = 'log.parent changeset.%s' % pctx.phasestr()
1643 label = 'log.parent changeset.%s' % pctx.phasestr()
1644 # i18n: column positioning for "hg log"
1644 # i18n: column positioning for "hg log"
1645 self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
1645 self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
1646 label=label)
1646 label=label)
1647
1647
1648 if self.ui.debugflag and rev is not None:
1648 if self.ui.debugflag and rev is not None:
1649 mnode = ctx.manifestnode()
1649 mnode = ctx.manifestnode()
1650 mrev = self.repo.manifestlog._revlog.rev(mnode)
1650 mrev = self.repo.manifestlog._revlog.rev(mnode)
1651 # i18n: column positioning for "hg log"
1651 # i18n: column positioning for "hg log"
1652 self.ui.write(_("manifest: %s\n")
1652 self.ui.write(_("manifest: %s\n")
1653 % scmutil.formatrevnode(self.ui, mrev, mnode),
1653 % scmutil.formatrevnode(self.ui, mrev, mnode),
1654 label='ui.debug log.manifest')
1654 label='ui.debug log.manifest')
1655 # i18n: column positioning for "hg log"
1655 # i18n: column positioning for "hg log"
1656 self.ui.write(_("user: %s\n") % ctx.user(),
1656 self.ui.write(_("user: %s\n") % ctx.user(),
1657 label='log.user')
1657 label='log.user')
1658 # i18n: column positioning for "hg log"
1658 # i18n: column positioning for "hg log"
1659 self.ui.write(_("date: %s\n") % date,
1659 self.ui.write(_("date: %s\n") % date,
1660 label='log.date')
1660 label='log.date')
1661
1661
1662 if ctx.isunstable():
1662 if ctx.isunstable():
1663 # i18n: column positioning for "hg log"
1663 # i18n: column positioning for "hg log"
1664 instabilities = ctx.instabilities()
1664 instabilities = ctx.instabilities()
1665 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1665 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1666 label='log.instability')
1666 label='log.instability')
1667
1667
1668 elif ctx.obsolete():
1668 elif ctx.obsolete():
1669 self._showobsfate(ctx)
1669 self._showobsfate(ctx)
1670
1670
1671 self._exthook(ctx)
1671 self._exthook(ctx)
1672
1672
1673 if self.ui.debugflag:
1673 if self.ui.debugflag:
1674 files = ctx.p1().status(ctx)[:3]
1674 files = ctx.p1().status(ctx)[:3]
1675 for key, value in zip([# i18n: column positioning for "hg log"
1675 for key, value in zip([# i18n: column positioning for "hg log"
1676 _("files:"),
1676 _("files:"),
1677 # i18n: column positioning for "hg log"
1677 # i18n: column positioning for "hg log"
1678 _("files+:"),
1678 _("files+:"),
1679 # i18n: column positioning for "hg log"
1679 # i18n: column positioning for "hg log"
1680 _("files-:")], files):
1680 _("files-:")], files):
1681 if value:
1681 if value:
1682 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1682 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1683 label='ui.debug log.files')
1683 label='ui.debug log.files')
1684 elif ctx.files() and self.ui.verbose:
1684 elif ctx.files() and self.ui.verbose:
1685 # i18n: column positioning for "hg log"
1685 # i18n: column positioning for "hg log"
1686 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1686 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1687 label='ui.note log.files')
1687 label='ui.note log.files')
1688 if copies and self.ui.verbose:
1688 if copies and self.ui.verbose:
1689 copies = ['%s (%s)' % c for c in copies]
1689 copies = ['%s (%s)' % c for c in copies]
1690 # i18n: column positioning for "hg log"
1690 # i18n: column positioning for "hg log"
1691 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1691 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1692 label='ui.note log.copies')
1692 label='ui.note log.copies')
1693
1693
1694 extra = ctx.extra()
1694 extra = ctx.extra()
1695 if extra and self.ui.debugflag:
1695 if extra and self.ui.debugflag:
1696 for key, value in sorted(extra.items()):
1696 for key, value in sorted(extra.items()):
1697 # i18n: column positioning for "hg log"
1697 # i18n: column positioning for "hg log"
1698 self.ui.write(_("extra: %s=%s\n")
1698 self.ui.write(_("extra: %s=%s\n")
1699 % (key, util.escapestr(value)),
1699 % (key, util.escapestr(value)),
1700 label='ui.debug log.extra')
1700 label='ui.debug log.extra')
1701
1701
1702 description = ctx.description().strip()
1702 description = ctx.description().strip()
1703 if description:
1703 if description:
1704 if self.ui.verbose:
1704 if self.ui.verbose:
1705 self.ui.write(_("description:\n"),
1705 self.ui.write(_("description:\n"),
1706 label='ui.note log.description')
1706 label='ui.note log.description')
1707 self.ui.write(description,
1707 self.ui.write(description,
1708 label='ui.note log.description')
1708 label='ui.note log.description')
1709 self.ui.write("\n\n")
1709 self.ui.write("\n\n")
1710 else:
1710 else:
1711 # i18n: column positioning for "hg log"
1711 # i18n: column positioning for "hg log"
1712 self.ui.write(_("summary: %s\n") %
1712 self.ui.write(_("summary: %s\n") %
1713 description.splitlines()[0],
1713 description.splitlines()[0],
1714 label='log.summary')
1714 label='log.summary')
1715 self.ui.write("\n")
1715 self.ui.write("\n")
1716
1716
1717 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1717 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1718
1718
1719 def _showobsfate(self, ctx):
1719 def _showobsfate(self, ctx):
1720 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1720 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1721
1721
1722 if obsfate:
1722 if obsfate:
1723 for obsfateline in obsfate:
1723 for obsfateline in obsfate:
1724 # i18n: column positioning for "hg log"
1724 # i18n: column positioning for "hg log"
1725 self.ui.write(_("obsolete: %s\n") % obsfateline,
1725 self.ui.write(_("obsolete: %s\n") % obsfateline,
1726 label='log.obsfate')
1726 label='log.obsfate')
1727
1727
1728 def _exthook(self, ctx):
1728 def _exthook(self, ctx):
1729 '''empty method used by extension as a hook point
1729 '''empty method used by extension as a hook point
1730 '''
1730 '''
1731
1731
1732 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1732 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1733 if not matchfn:
1733 if not matchfn:
1734 matchfn = self.matchfn
1734 matchfn = self.matchfn
1735 if matchfn:
1735 if matchfn:
1736 stat = self.diffopts.get('stat')
1736 stat = self.diffopts.get('stat')
1737 diff = self.diffopts.get('patch')
1737 diff = self.diffopts.get('patch')
1738 diffopts = patch.diffallopts(self.ui, self.diffopts)
1738 diffopts = patch.diffallopts(self.ui, self.diffopts)
1739 node = ctx.node()
1739 node = ctx.node()
1740 prev = ctx.p1().node()
1740 prev = ctx.p1().node()
1741 if stat:
1741 if stat:
1742 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1742 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1743 match=matchfn, stat=True,
1743 match=matchfn, stat=True,
1744 hunksfilterfn=hunksfilterfn)
1744 hunksfilterfn=hunksfilterfn)
1745 if diff:
1745 if diff:
1746 if stat:
1746 if stat:
1747 self.ui.write("\n")
1747 self.ui.write("\n")
1748 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1748 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1749 match=matchfn, stat=False,
1749 match=matchfn, stat=False,
1750 hunksfilterfn=hunksfilterfn)
1750 hunksfilterfn=hunksfilterfn)
1751 self.ui.write("\n")
1751 self.ui.write("\n")
1752
1752
1753 class jsonchangeset(changeset_printer):
1753 class jsonchangeset(changeset_printer):
1754 '''format changeset information.'''
1754 '''format changeset information.'''
1755
1755
1756 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1756 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1757 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1757 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1758 self.cache = {}
1758 self.cache = {}
1759 self._first = True
1759 self._first = True
1760
1760
1761 def close(self):
1761 def close(self):
1762 if not self._first:
1762 if not self._first:
1763 self.ui.write("\n]\n")
1763 self.ui.write("\n]\n")
1764 else:
1764 else:
1765 self.ui.write("[]\n")
1765 self.ui.write("[]\n")
1766
1766
1767 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1767 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1768 '''show a single changeset or file revision'''
1768 '''show a single changeset or file revision'''
1769 rev = ctx.rev()
1769 rev = ctx.rev()
1770 if rev is None:
1770 if rev is None:
1771 jrev = jnode = 'null'
1771 jrev = jnode = 'null'
1772 else:
1772 else:
1773 jrev = '%d' % rev
1773 jrev = '%d' % rev
1774 jnode = '"%s"' % hex(ctx.node())
1774 jnode = '"%s"' % hex(ctx.node())
1775 j = encoding.jsonescape
1775 j = encoding.jsonescape
1776
1776
1777 if self._first:
1777 if self._first:
1778 self.ui.write("[\n {")
1778 self.ui.write("[\n {")
1779 self._first = False
1779 self._first = False
1780 else:
1780 else:
1781 self.ui.write(",\n {")
1781 self.ui.write(",\n {")
1782
1782
1783 if self.ui.quiet:
1783 if self.ui.quiet:
1784 self.ui.write(('\n "rev": %s') % jrev)
1784 self.ui.write(('\n "rev": %s') % jrev)
1785 self.ui.write((',\n "node": %s') % jnode)
1785 self.ui.write((',\n "node": %s') % jnode)
1786 self.ui.write('\n }')
1786 self.ui.write('\n }')
1787 return
1787 return
1788
1788
1789 self.ui.write(('\n "rev": %s') % jrev)
1789 self.ui.write(('\n "rev": %s') % jrev)
1790 self.ui.write((',\n "node": %s') % jnode)
1790 self.ui.write((',\n "node": %s') % jnode)
1791 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1791 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1792 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1792 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1793 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1793 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1794 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1794 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1795 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1795 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1796
1796
1797 self.ui.write((',\n "bookmarks": [%s]') %
1797 self.ui.write((',\n "bookmarks": [%s]') %
1798 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1798 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1799 self.ui.write((',\n "tags": [%s]') %
1799 self.ui.write((',\n "tags": [%s]') %
1800 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1800 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1801 self.ui.write((',\n "parents": [%s]') %
1801 self.ui.write((',\n "parents": [%s]') %
1802 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1802 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1803
1803
1804 if self.ui.debugflag:
1804 if self.ui.debugflag:
1805 if rev is None:
1805 if rev is None:
1806 jmanifestnode = 'null'
1806 jmanifestnode = 'null'
1807 else:
1807 else:
1808 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1808 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1809 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1809 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1810
1810
1811 self.ui.write((',\n "extra": {%s}') %
1811 self.ui.write((',\n "extra": {%s}') %
1812 ", ".join('"%s": "%s"' % (j(k), j(v))
1812 ", ".join('"%s": "%s"' % (j(k), j(v))
1813 for k, v in ctx.extra().items()))
1813 for k, v in ctx.extra().items()))
1814
1814
1815 files = ctx.p1().status(ctx)
1815 files = ctx.p1().status(ctx)
1816 self.ui.write((',\n "modified": [%s]') %
1816 self.ui.write((',\n "modified": [%s]') %
1817 ", ".join('"%s"' % j(f) for f in files[0]))
1817 ", ".join('"%s"' % j(f) for f in files[0]))
1818 self.ui.write((',\n "added": [%s]') %
1818 self.ui.write((',\n "added": [%s]') %
1819 ", ".join('"%s"' % j(f) for f in files[1]))
1819 ", ".join('"%s"' % j(f) for f in files[1]))
1820 self.ui.write((',\n "removed": [%s]') %
1820 self.ui.write((',\n "removed": [%s]') %
1821 ", ".join('"%s"' % j(f) for f in files[2]))
1821 ", ".join('"%s"' % j(f) for f in files[2]))
1822
1822
1823 elif self.ui.verbose:
1823 elif self.ui.verbose:
1824 self.ui.write((',\n "files": [%s]') %
1824 self.ui.write((',\n "files": [%s]') %
1825 ", ".join('"%s"' % j(f) for f in ctx.files()))
1825 ", ".join('"%s"' % j(f) for f in ctx.files()))
1826
1826
1827 if copies:
1827 if copies:
1828 self.ui.write((',\n "copies": {%s}') %
1828 self.ui.write((',\n "copies": {%s}') %
1829 ", ".join('"%s": "%s"' % (j(k), j(v))
1829 ", ".join('"%s": "%s"' % (j(k), j(v))
1830 for k, v in copies))
1830 for k, v in copies))
1831
1831
1832 matchfn = self.matchfn
1832 matchfn = self.matchfn
1833 if matchfn:
1833 if matchfn:
1834 stat = self.diffopts.get('stat')
1834 stat = self.diffopts.get('stat')
1835 diff = self.diffopts.get('patch')
1835 diff = self.diffopts.get('patch')
1836 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1836 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1837 node, prev = ctx.node(), ctx.p1().node()
1837 node, prev = ctx.node(), ctx.p1().node()
1838 if stat:
1838 if stat:
1839 self.ui.pushbuffer()
1839 self.ui.pushbuffer()
1840 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1840 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1841 match=matchfn, stat=True)
1841 match=matchfn, stat=True)
1842 self.ui.write((',\n "diffstat": "%s"')
1842 self.ui.write((',\n "diffstat": "%s"')
1843 % j(self.ui.popbuffer()))
1843 % j(self.ui.popbuffer()))
1844 if diff:
1844 if diff:
1845 self.ui.pushbuffer()
1845 self.ui.pushbuffer()
1846 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1846 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1847 match=matchfn, stat=False)
1847 match=matchfn, stat=False)
1848 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1848 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1849
1849
1850 self.ui.write("\n }")
1850 self.ui.write("\n }")
1851
1851
1852 class changeset_templater(changeset_printer):
1852 class changeset_templater(changeset_printer):
1853 '''format changeset information.'''
1853 '''format changeset information.
1854
1855 Note: there are a variety of convenience functions to build a
1856 changeset_templater for common cases. See functions such as:
1857 makelogtemplater, show_changeset, buildcommittemplate, or other
1858 functions that use changesest_templater.
1859 '''
1854
1860
1855 # Arguments before "buffered" used to be positional. Consider not
1861 # Arguments before "buffered" used to be positional. Consider not
1856 # adding/removing arguments before "buffered" to not break callers.
1862 # adding/removing arguments before "buffered" to not break callers.
1857 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1863 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1858 buffered=False):
1864 buffered=False):
1859 diffopts = diffopts or {}
1865 diffopts = diffopts or {}
1860
1866
1861 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1867 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1862 self.t = formatter.loadtemplater(ui, tmplspec,
1868 self.t = formatter.loadtemplater(ui, tmplspec,
1863 cache=templatekw.defaulttempl)
1869 cache=templatekw.defaulttempl)
1864 self._counter = itertools.count()
1870 self._counter = itertools.count()
1865 self.cache = {}
1871 self.cache = {}
1866
1872
1867 self._tref = tmplspec.ref
1873 self._tref = tmplspec.ref
1868 self._parts = {'header': '', 'footer': '',
1874 self._parts = {'header': '', 'footer': '',
1869 tmplspec.ref: tmplspec.ref,
1875 tmplspec.ref: tmplspec.ref,
1870 'docheader': '', 'docfooter': '',
1876 'docheader': '', 'docfooter': '',
1871 'separator': ''}
1877 'separator': ''}
1872 if tmplspec.mapfile:
1878 if tmplspec.mapfile:
1873 # find correct templates for current mode, for backward
1879 # find correct templates for current mode, for backward
1874 # compatibility with 'log -v/-q/--debug' using a mapfile
1880 # compatibility with 'log -v/-q/--debug' using a mapfile
1875 tmplmodes = [
1881 tmplmodes = [
1876 (True, ''),
1882 (True, ''),
1877 (self.ui.verbose, '_verbose'),
1883 (self.ui.verbose, '_verbose'),
1878 (self.ui.quiet, '_quiet'),
1884 (self.ui.quiet, '_quiet'),
1879 (self.ui.debugflag, '_debug'),
1885 (self.ui.debugflag, '_debug'),
1880 ]
1886 ]
1881 for mode, postfix in tmplmodes:
1887 for mode, postfix in tmplmodes:
1882 for t in self._parts:
1888 for t in self._parts:
1883 cur = t + postfix
1889 cur = t + postfix
1884 if mode and cur in self.t:
1890 if mode and cur in self.t:
1885 self._parts[t] = cur
1891 self._parts[t] = cur
1886 else:
1892 else:
1887 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1893 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1888 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1894 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1889 self._parts.update(m)
1895 self._parts.update(m)
1890
1896
1891 if self._parts['docheader']:
1897 if self._parts['docheader']:
1892 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1898 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1893
1899
1894 def close(self):
1900 def close(self):
1895 if self._parts['docfooter']:
1901 if self._parts['docfooter']:
1896 if not self.footer:
1902 if not self.footer:
1897 self.footer = ""
1903 self.footer = ""
1898 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1904 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1899 return super(changeset_templater, self).close()
1905 return super(changeset_templater, self).close()
1900
1906
1901 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1907 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1902 '''show a single changeset or file revision'''
1908 '''show a single changeset or file revision'''
1903 props = props.copy()
1909 props = props.copy()
1904 props.update(templatekw.keywords)
1910 props.update(templatekw.keywords)
1905 props['templ'] = self.t
1911 props['templ'] = self.t
1906 props['ctx'] = ctx
1912 props['ctx'] = ctx
1907 props['repo'] = self.repo
1913 props['repo'] = self.repo
1908 props['ui'] = self.repo.ui
1914 props['ui'] = self.repo.ui
1909 props['index'] = index = next(self._counter)
1915 props['index'] = index = next(self._counter)
1910 props['revcache'] = {'copies': copies}
1916 props['revcache'] = {'copies': copies}
1911 props['cache'] = self.cache
1917 props['cache'] = self.cache
1912 props = pycompat.strkwargs(props)
1918 props = pycompat.strkwargs(props)
1913
1919
1914 # write separator, which wouldn't work well with the header part below
1920 # write separator, which wouldn't work well with the header part below
1915 # since there's inherently a conflict between header (across items) and
1921 # since there's inherently a conflict between header (across items) and
1916 # separator (per item)
1922 # separator (per item)
1917 if self._parts['separator'] and index > 0:
1923 if self._parts['separator'] and index > 0:
1918 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1924 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1919
1925
1920 # write header
1926 # write header
1921 if self._parts['header']:
1927 if self._parts['header']:
1922 h = templater.stringify(self.t(self._parts['header'], **props))
1928 h = templater.stringify(self.t(self._parts['header'], **props))
1923 if self.buffered:
1929 if self.buffered:
1924 self.header[ctx.rev()] = h
1930 self.header[ctx.rev()] = h
1925 else:
1931 else:
1926 if self.lastheader != h:
1932 if self.lastheader != h:
1927 self.lastheader = h
1933 self.lastheader = h
1928 self.ui.write(h)
1934 self.ui.write(h)
1929
1935
1930 # write changeset metadata, then patch if requested
1936 # write changeset metadata, then patch if requested
1931 key = self._parts[self._tref]
1937 key = self._parts[self._tref]
1932 self.ui.write(templater.stringify(self.t(key, **props)))
1938 self.ui.write(templater.stringify(self.t(key, **props)))
1933 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1939 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1934
1940
1935 if self._parts['footer']:
1941 if self._parts['footer']:
1936 if not self.footer:
1942 if not self.footer:
1937 self.footer = templater.stringify(
1943 self.footer = templater.stringify(
1938 self.t(self._parts['footer'], **props))
1944 self.t(self._parts['footer'], **props))
1939
1945
1940 def logtemplatespec(tmpl, mapfile):
1946 def logtemplatespec(tmpl, mapfile):
1941 if mapfile:
1947 if mapfile:
1942 return formatter.templatespec('changeset', tmpl, mapfile)
1948 return formatter.templatespec('changeset', tmpl, mapfile)
1943 else:
1949 else:
1944 return formatter.templatespec('', tmpl, None)
1950 return formatter.templatespec('', tmpl, None)
1945
1951
1946 def _lookuplogtemplate(ui, tmpl, style):
1952 def _lookuplogtemplate(ui, tmpl, style):
1947 """Find the template matching the given template spec or style
1953 """Find the template matching the given template spec or style
1948
1954
1949 See formatter.lookuptemplate() for details.
1955 See formatter.lookuptemplate() for details.
1950 """
1956 """
1951
1957
1952 # ui settings
1958 # ui settings
1953 if not tmpl and not style: # template are stronger than style
1959 if not tmpl and not style: # template are stronger than style
1954 tmpl = ui.config('ui', 'logtemplate')
1960 tmpl = ui.config('ui', 'logtemplate')
1955 if tmpl:
1961 if tmpl:
1956 return logtemplatespec(templater.unquotestring(tmpl), None)
1962 return logtemplatespec(templater.unquotestring(tmpl), None)
1957 else:
1963 else:
1958 style = util.expandpath(ui.config('ui', 'style'))
1964 style = util.expandpath(ui.config('ui', 'style'))
1959
1965
1960 if not tmpl and style:
1966 if not tmpl and style:
1961 mapfile = style
1967 mapfile = style
1962 if not os.path.split(mapfile)[0]:
1968 if not os.path.split(mapfile)[0]:
1963 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1969 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1964 or templater.templatepath(mapfile))
1970 or templater.templatepath(mapfile))
1965 if mapname:
1971 if mapname:
1966 mapfile = mapname
1972 mapfile = mapname
1967 return logtemplatespec(None, mapfile)
1973 return logtemplatespec(None, mapfile)
1968
1974
1969 if not tmpl:
1975 if not tmpl:
1970 return logtemplatespec(None, None)
1976 return logtemplatespec(None, None)
1971
1977
1972 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1978 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1973
1979
1974 def makelogtemplater(ui, repo, tmpl, buffered=False):
1980 def makelogtemplater(ui, repo, tmpl, buffered=False):
1975 """Create a changeset_templater from a literal template 'tmpl'"""
1981 """Create a changeset_templater from a literal template 'tmpl'
1982 byte-string."""
1976 spec = logtemplatespec(tmpl, None)
1983 spec = logtemplatespec(tmpl, None)
1977 return changeset_templater(ui, repo, spec, buffered=buffered)
1984 return changeset_templater(ui, repo, spec, buffered=buffered)
1978
1985
1979 def show_changeset(ui, repo, opts, buffered=False):
1986 def show_changeset(ui, repo, opts, buffered=False):
1980 """show one changeset using template or regular display.
1987 """show one changeset using template or regular display.
1981
1988
1982 Display format will be the first non-empty hit of:
1989 Display format will be the first non-empty hit of:
1983 1. option 'template'
1990 1. option 'template'
1984 2. option 'style'
1991 2. option 'style'
1985 3. [ui] setting 'logtemplate'
1992 3. [ui] setting 'logtemplate'
1986 4. [ui] setting 'style'
1993 4. [ui] setting 'style'
1987 If all of these values are either the unset or the empty string,
1994 If all of these values are either the unset or the empty string,
1988 regular display via changeset_printer() is done.
1995 regular display via changeset_printer() is done.
1989 """
1996 """
1990 # options
1997 # options
1991 match = None
1998 match = None
1992 if opts.get('patch') or opts.get('stat'):
1999 if opts.get('patch') or opts.get('stat'):
1993 match = scmutil.matchall(repo)
2000 match = scmutil.matchall(repo)
1994
2001
1995 if opts.get('template') == 'json':
2002 if opts.get('template') == 'json':
1996 return jsonchangeset(ui, repo, match, opts, buffered)
2003 return jsonchangeset(ui, repo, match, opts, buffered)
1997
2004
1998 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
2005 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1999
2006
2000 if not spec.ref and not spec.tmpl and not spec.mapfile:
2007 if not spec.ref and not spec.tmpl and not spec.mapfile:
2001 return changeset_printer(ui, repo, match, opts, buffered)
2008 return changeset_printer(ui, repo, match, opts, buffered)
2002
2009
2003 return changeset_templater(ui, repo, spec, match, opts, buffered)
2010 return changeset_templater(ui, repo, spec, match, opts, buffered)
2004
2011
2005 def showmarker(fm, marker, index=None):
2012 def showmarker(fm, marker, index=None):
2006 """utility function to display obsolescence marker in a readable way
2013 """utility function to display obsolescence marker in a readable way
2007
2014
2008 To be used by debug function."""
2015 To be used by debug function."""
2009 if index is not None:
2016 if index is not None:
2010 fm.write('index', '%i ', index)
2017 fm.write('index', '%i ', index)
2011 fm.write('prednode', '%s ', hex(marker.prednode()))
2018 fm.write('prednode', '%s ', hex(marker.prednode()))
2012 succs = marker.succnodes()
2019 succs = marker.succnodes()
2013 fm.condwrite(succs, 'succnodes', '%s ',
2020 fm.condwrite(succs, 'succnodes', '%s ',
2014 fm.formatlist(map(hex, succs), name='node'))
2021 fm.formatlist(map(hex, succs), name='node'))
2015 fm.write('flag', '%X ', marker.flags())
2022 fm.write('flag', '%X ', marker.flags())
2016 parents = marker.parentnodes()
2023 parents = marker.parentnodes()
2017 if parents is not None:
2024 if parents is not None:
2018 fm.write('parentnodes', '{%s} ',
2025 fm.write('parentnodes', '{%s} ',
2019 fm.formatlist(map(hex, parents), name='node', sep=', '))
2026 fm.formatlist(map(hex, parents), name='node', sep=', '))
2020 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2027 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2021 meta = marker.metadata().copy()
2028 meta = marker.metadata().copy()
2022 meta.pop('date', None)
2029 meta.pop('date', None)
2023 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2030 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2024 fm.plain('\n')
2031 fm.plain('\n')
2025
2032
2026 def finddate(ui, repo, date):
2033 def finddate(ui, repo, date):
2027 """Find the tipmost changeset that matches the given date spec"""
2034 """Find the tipmost changeset that matches the given date spec"""
2028
2035
2029 df = util.matchdate(date)
2036 df = util.matchdate(date)
2030 m = scmutil.matchall(repo)
2037 m = scmutil.matchall(repo)
2031 results = {}
2038 results = {}
2032
2039
2033 def prep(ctx, fns):
2040 def prep(ctx, fns):
2034 d = ctx.date()
2041 d = ctx.date()
2035 if df(d[0]):
2042 if df(d[0]):
2036 results[ctx.rev()] = d
2043 results[ctx.rev()] = d
2037
2044
2038 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2045 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2039 rev = ctx.rev()
2046 rev = ctx.rev()
2040 if rev in results:
2047 if rev in results:
2041 ui.status(_("found revision %s from %s\n") %
2048 ui.status(_("found revision %s from %s\n") %
2042 (rev, util.datestr(results[rev])))
2049 (rev, util.datestr(results[rev])))
2043 return '%d' % rev
2050 return '%d' % rev
2044
2051
2045 raise error.Abort(_("revision matching date not found"))
2052 raise error.Abort(_("revision matching date not found"))
2046
2053
2047 def increasingwindows(windowsize=8, sizelimit=512):
2054 def increasingwindows(windowsize=8, sizelimit=512):
2048 while True:
2055 while True:
2049 yield windowsize
2056 yield windowsize
2050 if windowsize < sizelimit:
2057 if windowsize < sizelimit:
2051 windowsize *= 2
2058 windowsize *= 2
2052
2059
2053 class FileWalkError(Exception):
2060 class FileWalkError(Exception):
2054 pass
2061 pass
2055
2062
2056 def walkfilerevs(repo, match, follow, revs, fncache):
2063 def walkfilerevs(repo, match, follow, revs, fncache):
2057 '''Walks the file history for the matched files.
2064 '''Walks the file history for the matched files.
2058
2065
2059 Returns the changeset revs that are involved in the file history.
2066 Returns the changeset revs that are involved in the file history.
2060
2067
2061 Throws FileWalkError if the file history can't be walked using
2068 Throws FileWalkError if the file history can't be walked using
2062 filelogs alone.
2069 filelogs alone.
2063 '''
2070 '''
2064 wanted = set()
2071 wanted = set()
2065 copies = []
2072 copies = []
2066 minrev, maxrev = min(revs), max(revs)
2073 minrev, maxrev = min(revs), max(revs)
2067 def filerevgen(filelog, last):
2074 def filerevgen(filelog, last):
2068 """
2075 """
2069 Only files, no patterns. Check the history of each file.
2076 Only files, no patterns. Check the history of each file.
2070
2077
2071 Examines filelog entries within minrev, maxrev linkrev range
2078 Examines filelog entries within minrev, maxrev linkrev range
2072 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2079 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2073 tuples in backwards order
2080 tuples in backwards order
2074 """
2081 """
2075 cl_count = len(repo)
2082 cl_count = len(repo)
2076 revs = []
2083 revs = []
2077 for j in xrange(0, last + 1):
2084 for j in xrange(0, last + 1):
2078 linkrev = filelog.linkrev(j)
2085 linkrev = filelog.linkrev(j)
2079 if linkrev < minrev:
2086 if linkrev < minrev:
2080 continue
2087 continue
2081 # only yield rev for which we have the changelog, it can
2088 # only yield rev for which we have the changelog, it can
2082 # happen while doing "hg log" during a pull or commit
2089 # happen while doing "hg log" during a pull or commit
2083 if linkrev >= cl_count:
2090 if linkrev >= cl_count:
2084 break
2091 break
2085
2092
2086 parentlinkrevs = []
2093 parentlinkrevs = []
2087 for p in filelog.parentrevs(j):
2094 for p in filelog.parentrevs(j):
2088 if p != nullrev:
2095 if p != nullrev:
2089 parentlinkrevs.append(filelog.linkrev(p))
2096 parentlinkrevs.append(filelog.linkrev(p))
2090 n = filelog.node(j)
2097 n = filelog.node(j)
2091 revs.append((linkrev, parentlinkrevs,
2098 revs.append((linkrev, parentlinkrevs,
2092 follow and filelog.renamed(n)))
2099 follow and filelog.renamed(n)))
2093
2100
2094 return reversed(revs)
2101 return reversed(revs)
2095 def iterfiles():
2102 def iterfiles():
2096 pctx = repo['.']
2103 pctx = repo['.']
2097 for filename in match.files():
2104 for filename in match.files():
2098 if follow:
2105 if follow:
2099 if filename not in pctx:
2106 if filename not in pctx:
2100 raise error.Abort(_('cannot follow file not in parent '
2107 raise error.Abort(_('cannot follow file not in parent '
2101 'revision: "%s"') % filename)
2108 'revision: "%s"') % filename)
2102 yield filename, pctx[filename].filenode()
2109 yield filename, pctx[filename].filenode()
2103 else:
2110 else:
2104 yield filename, None
2111 yield filename, None
2105 for filename_node in copies:
2112 for filename_node in copies:
2106 yield filename_node
2113 yield filename_node
2107
2114
2108 for file_, node in iterfiles():
2115 for file_, node in iterfiles():
2109 filelog = repo.file(file_)
2116 filelog = repo.file(file_)
2110 if not len(filelog):
2117 if not len(filelog):
2111 if node is None:
2118 if node is None:
2112 # A zero count may be a directory or deleted file, so
2119 # A zero count may be a directory or deleted file, so
2113 # try to find matching entries on the slow path.
2120 # try to find matching entries on the slow path.
2114 if follow:
2121 if follow:
2115 raise error.Abort(
2122 raise error.Abort(
2116 _('cannot follow nonexistent file: "%s"') % file_)
2123 _('cannot follow nonexistent file: "%s"') % file_)
2117 raise FileWalkError("Cannot walk via filelog")
2124 raise FileWalkError("Cannot walk via filelog")
2118 else:
2125 else:
2119 continue
2126 continue
2120
2127
2121 if node is None:
2128 if node is None:
2122 last = len(filelog) - 1
2129 last = len(filelog) - 1
2123 else:
2130 else:
2124 last = filelog.rev(node)
2131 last = filelog.rev(node)
2125
2132
2126 # keep track of all ancestors of the file
2133 # keep track of all ancestors of the file
2127 ancestors = {filelog.linkrev(last)}
2134 ancestors = {filelog.linkrev(last)}
2128
2135
2129 # iterate from latest to oldest revision
2136 # iterate from latest to oldest revision
2130 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2137 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2131 if not follow:
2138 if not follow:
2132 if rev > maxrev:
2139 if rev > maxrev:
2133 continue
2140 continue
2134 else:
2141 else:
2135 # Note that last might not be the first interesting
2142 # Note that last might not be the first interesting
2136 # rev to us:
2143 # rev to us:
2137 # if the file has been changed after maxrev, we'll
2144 # if the file has been changed after maxrev, we'll
2138 # have linkrev(last) > maxrev, and we still need
2145 # have linkrev(last) > maxrev, and we still need
2139 # to explore the file graph
2146 # to explore the file graph
2140 if rev not in ancestors:
2147 if rev not in ancestors:
2141 continue
2148 continue
2142 # XXX insert 1327 fix here
2149 # XXX insert 1327 fix here
2143 if flparentlinkrevs:
2150 if flparentlinkrevs:
2144 ancestors.update(flparentlinkrevs)
2151 ancestors.update(flparentlinkrevs)
2145
2152
2146 fncache.setdefault(rev, []).append(file_)
2153 fncache.setdefault(rev, []).append(file_)
2147 wanted.add(rev)
2154 wanted.add(rev)
2148 if copied:
2155 if copied:
2149 copies.append(copied)
2156 copies.append(copied)
2150
2157
2151 return wanted
2158 return wanted
2152
2159
2153 class _followfilter(object):
2160 class _followfilter(object):
2154 def __init__(self, repo, onlyfirst=False):
2161 def __init__(self, repo, onlyfirst=False):
2155 self.repo = repo
2162 self.repo = repo
2156 self.startrev = nullrev
2163 self.startrev = nullrev
2157 self.roots = set()
2164 self.roots = set()
2158 self.onlyfirst = onlyfirst
2165 self.onlyfirst = onlyfirst
2159
2166
2160 def match(self, rev):
2167 def match(self, rev):
2161 def realparents(rev):
2168 def realparents(rev):
2162 if self.onlyfirst:
2169 if self.onlyfirst:
2163 return self.repo.changelog.parentrevs(rev)[0:1]
2170 return self.repo.changelog.parentrevs(rev)[0:1]
2164 else:
2171 else:
2165 return filter(lambda x: x != nullrev,
2172 return filter(lambda x: x != nullrev,
2166 self.repo.changelog.parentrevs(rev))
2173 self.repo.changelog.parentrevs(rev))
2167
2174
2168 if self.startrev == nullrev:
2175 if self.startrev == nullrev:
2169 self.startrev = rev
2176 self.startrev = rev
2170 return True
2177 return True
2171
2178
2172 if rev > self.startrev:
2179 if rev > self.startrev:
2173 # forward: all descendants
2180 # forward: all descendants
2174 if not self.roots:
2181 if not self.roots:
2175 self.roots.add(self.startrev)
2182 self.roots.add(self.startrev)
2176 for parent in realparents(rev):
2183 for parent in realparents(rev):
2177 if parent in self.roots:
2184 if parent in self.roots:
2178 self.roots.add(rev)
2185 self.roots.add(rev)
2179 return True
2186 return True
2180 else:
2187 else:
2181 # backwards: all parents
2188 # backwards: all parents
2182 if not self.roots:
2189 if not self.roots:
2183 self.roots.update(realparents(self.startrev))
2190 self.roots.update(realparents(self.startrev))
2184 if rev in self.roots:
2191 if rev in self.roots:
2185 self.roots.remove(rev)
2192 self.roots.remove(rev)
2186 self.roots.update(realparents(rev))
2193 self.roots.update(realparents(rev))
2187 return True
2194 return True
2188
2195
2189 return False
2196 return False
2190
2197
2191 def walkchangerevs(repo, match, opts, prepare):
2198 def walkchangerevs(repo, match, opts, prepare):
2192 '''Iterate over files and the revs in which they changed.
2199 '''Iterate over files and the revs in which they changed.
2193
2200
2194 Callers most commonly need to iterate backwards over the history
2201 Callers most commonly need to iterate backwards over the history
2195 in which they are interested. Doing so has awful (quadratic-looking)
2202 in which they are interested. Doing so has awful (quadratic-looking)
2196 performance, so we use iterators in a "windowed" way.
2203 performance, so we use iterators in a "windowed" way.
2197
2204
2198 We walk a window of revisions in the desired order. Within the
2205 We walk a window of revisions in the desired order. Within the
2199 window, we first walk forwards to gather data, then in the desired
2206 window, we first walk forwards to gather data, then in the desired
2200 order (usually backwards) to display it.
2207 order (usually backwards) to display it.
2201
2208
2202 This function returns an iterator yielding contexts. Before
2209 This function returns an iterator yielding contexts. Before
2203 yielding each context, the iterator will first call the prepare
2210 yielding each context, the iterator will first call the prepare
2204 function on each context in the window in forward order.'''
2211 function on each context in the window in forward order.'''
2205
2212
2206 follow = opts.get('follow') or opts.get('follow_first')
2213 follow = opts.get('follow') or opts.get('follow_first')
2207 revs = _logrevs(repo, opts)
2214 revs = _logrevs(repo, opts)
2208 if not revs:
2215 if not revs:
2209 return []
2216 return []
2210 wanted = set()
2217 wanted = set()
2211 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2218 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2212 opts.get('removed'))
2219 opts.get('removed'))
2213 fncache = {}
2220 fncache = {}
2214 change = repo.changectx
2221 change = repo.changectx
2215
2222
2216 # First step is to fill wanted, the set of revisions that we want to yield.
2223 # First step is to fill wanted, the set of revisions that we want to yield.
2217 # When it does not induce extra cost, we also fill fncache for revisions in
2224 # When it does not induce extra cost, we also fill fncache for revisions in
2218 # wanted: a cache of filenames that were changed (ctx.files()) and that
2225 # wanted: a cache of filenames that were changed (ctx.files()) and that
2219 # match the file filtering conditions.
2226 # match the file filtering conditions.
2220
2227
2221 if match.always():
2228 if match.always():
2222 # No files, no patterns. Display all revs.
2229 # No files, no patterns. Display all revs.
2223 wanted = revs
2230 wanted = revs
2224 elif not slowpath:
2231 elif not slowpath:
2225 # We only have to read through the filelog to find wanted revisions
2232 # We only have to read through the filelog to find wanted revisions
2226
2233
2227 try:
2234 try:
2228 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2235 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2229 except FileWalkError:
2236 except FileWalkError:
2230 slowpath = True
2237 slowpath = True
2231
2238
2232 # We decided to fall back to the slowpath because at least one
2239 # We decided to fall back to the slowpath because at least one
2233 # of the paths was not a file. Check to see if at least one of them
2240 # of the paths was not a file. Check to see if at least one of them
2234 # existed in history, otherwise simply return
2241 # existed in history, otherwise simply return
2235 for path in match.files():
2242 for path in match.files():
2236 if path == '.' or path in repo.store:
2243 if path == '.' or path in repo.store:
2237 break
2244 break
2238 else:
2245 else:
2239 return []
2246 return []
2240
2247
2241 if slowpath:
2248 if slowpath:
2242 # We have to read the changelog to match filenames against
2249 # We have to read the changelog to match filenames against
2243 # changed files
2250 # changed files
2244
2251
2245 if follow:
2252 if follow:
2246 raise error.Abort(_('can only follow copies/renames for explicit '
2253 raise error.Abort(_('can only follow copies/renames for explicit '
2247 'filenames'))
2254 'filenames'))
2248
2255
2249 # The slow path checks files modified in every changeset.
2256 # The slow path checks files modified in every changeset.
2250 # This is really slow on large repos, so compute the set lazily.
2257 # This is really slow on large repos, so compute the set lazily.
2251 class lazywantedset(object):
2258 class lazywantedset(object):
2252 def __init__(self):
2259 def __init__(self):
2253 self.set = set()
2260 self.set = set()
2254 self.revs = set(revs)
2261 self.revs = set(revs)
2255
2262
2256 # No need to worry about locality here because it will be accessed
2263 # No need to worry about locality here because it will be accessed
2257 # in the same order as the increasing window below.
2264 # in the same order as the increasing window below.
2258 def __contains__(self, value):
2265 def __contains__(self, value):
2259 if value in self.set:
2266 if value in self.set:
2260 return True
2267 return True
2261 elif not value in self.revs:
2268 elif not value in self.revs:
2262 return False
2269 return False
2263 else:
2270 else:
2264 self.revs.discard(value)
2271 self.revs.discard(value)
2265 ctx = change(value)
2272 ctx = change(value)
2266 matches = filter(match, ctx.files())
2273 matches = filter(match, ctx.files())
2267 if matches:
2274 if matches:
2268 fncache[value] = matches
2275 fncache[value] = matches
2269 self.set.add(value)
2276 self.set.add(value)
2270 return True
2277 return True
2271 return False
2278 return False
2272
2279
2273 def discard(self, value):
2280 def discard(self, value):
2274 self.revs.discard(value)
2281 self.revs.discard(value)
2275 self.set.discard(value)
2282 self.set.discard(value)
2276
2283
2277 wanted = lazywantedset()
2284 wanted = lazywantedset()
2278
2285
2279 # it might be worthwhile to do this in the iterator if the rev range
2286 # it might be worthwhile to do this in the iterator if the rev range
2280 # is descending and the prune args are all within that range
2287 # is descending and the prune args are all within that range
2281 for rev in opts.get('prune', ()):
2288 for rev in opts.get('prune', ()):
2282 rev = repo[rev].rev()
2289 rev = repo[rev].rev()
2283 ff = _followfilter(repo)
2290 ff = _followfilter(repo)
2284 stop = min(revs[0], revs[-1])
2291 stop = min(revs[0], revs[-1])
2285 for x in xrange(rev, stop - 1, -1):
2292 for x in xrange(rev, stop - 1, -1):
2286 if ff.match(x):
2293 if ff.match(x):
2287 wanted = wanted - [x]
2294 wanted = wanted - [x]
2288
2295
2289 # Now that wanted is correctly initialized, we can iterate over the
2296 # Now that wanted is correctly initialized, we can iterate over the
2290 # revision range, yielding only revisions in wanted.
2297 # revision range, yielding only revisions in wanted.
2291 def iterate():
2298 def iterate():
2292 if follow and match.always():
2299 if follow and match.always():
2293 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2300 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2294 def want(rev):
2301 def want(rev):
2295 return ff.match(rev) and rev in wanted
2302 return ff.match(rev) and rev in wanted
2296 else:
2303 else:
2297 def want(rev):
2304 def want(rev):
2298 return rev in wanted
2305 return rev in wanted
2299
2306
2300 it = iter(revs)
2307 it = iter(revs)
2301 stopiteration = False
2308 stopiteration = False
2302 for windowsize in increasingwindows():
2309 for windowsize in increasingwindows():
2303 nrevs = []
2310 nrevs = []
2304 for i in xrange(windowsize):
2311 for i in xrange(windowsize):
2305 rev = next(it, None)
2312 rev = next(it, None)
2306 if rev is None:
2313 if rev is None:
2307 stopiteration = True
2314 stopiteration = True
2308 break
2315 break
2309 elif want(rev):
2316 elif want(rev):
2310 nrevs.append(rev)
2317 nrevs.append(rev)
2311 for rev in sorted(nrevs):
2318 for rev in sorted(nrevs):
2312 fns = fncache.get(rev)
2319 fns = fncache.get(rev)
2313 ctx = change(rev)
2320 ctx = change(rev)
2314 if not fns:
2321 if not fns:
2315 def fns_generator():
2322 def fns_generator():
2316 for f in ctx.files():
2323 for f in ctx.files():
2317 if match(f):
2324 if match(f):
2318 yield f
2325 yield f
2319 fns = fns_generator()
2326 fns = fns_generator()
2320 prepare(ctx, fns)
2327 prepare(ctx, fns)
2321 for rev in nrevs:
2328 for rev in nrevs:
2322 yield change(rev)
2329 yield change(rev)
2323
2330
2324 if stopiteration:
2331 if stopiteration:
2325 break
2332 break
2326
2333
2327 return iterate()
2334 return iterate()
2328
2335
2329 def _makefollowlogfilematcher(repo, files, followfirst):
2336 def _makefollowlogfilematcher(repo, files, followfirst):
2330 # When displaying a revision with --patch --follow FILE, we have
2337 # When displaying a revision with --patch --follow FILE, we have
2331 # to know which file of the revision must be diffed. With
2338 # to know which file of the revision must be diffed. With
2332 # --follow, we want the names of the ancestors of FILE in the
2339 # --follow, we want the names of the ancestors of FILE in the
2333 # revision, stored in "fcache". "fcache" is populated by
2340 # revision, stored in "fcache". "fcache" is populated by
2334 # reproducing the graph traversal already done by --follow revset
2341 # reproducing the graph traversal already done by --follow revset
2335 # and relating revs to file names (which is not "correct" but
2342 # and relating revs to file names (which is not "correct" but
2336 # good enough).
2343 # good enough).
2337 fcache = {}
2344 fcache = {}
2338 fcacheready = [False]
2345 fcacheready = [False]
2339 pctx = repo['.']
2346 pctx = repo['.']
2340
2347
2341 def populate():
2348 def populate():
2342 for fn in files:
2349 for fn in files:
2343 fctx = pctx[fn]
2350 fctx = pctx[fn]
2344 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2351 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2345 for c in fctx.ancestors(followfirst=followfirst):
2352 for c in fctx.ancestors(followfirst=followfirst):
2346 fcache.setdefault(c.rev(), set()).add(c.path())
2353 fcache.setdefault(c.rev(), set()).add(c.path())
2347
2354
2348 def filematcher(rev):
2355 def filematcher(rev):
2349 if not fcacheready[0]:
2356 if not fcacheready[0]:
2350 # Lazy initialization
2357 # Lazy initialization
2351 fcacheready[0] = True
2358 fcacheready[0] = True
2352 populate()
2359 populate()
2353 return scmutil.matchfiles(repo, fcache.get(rev, []))
2360 return scmutil.matchfiles(repo, fcache.get(rev, []))
2354
2361
2355 return filematcher
2362 return filematcher
2356
2363
2357 def _makenofollowlogfilematcher(repo, pats, opts):
2364 def _makenofollowlogfilematcher(repo, pats, opts):
2358 '''hook for extensions to override the filematcher for non-follow cases'''
2365 '''hook for extensions to override the filematcher for non-follow cases'''
2359 return None
2366 return None
2360
2367
2361 def _makelogrevset(repo, pats, opts, revs):
2368 def _makelogrevset(repo, pats, opts, revs):
2362 """Return (expr, filematcher) where expr is a revset string built
2369 """Return (expr, filematcher) where expr is a revset string built
2363 from log options and file patterns or None. If --stat or --patch
2370 from log options and file patterns or None. If --stat or --patch
2364 are not passed filematcher is None. Otherwise it is a callable
2371 are not passed filematcher is None. Otherwise it is a callable
2365 taking a revision number and returning a match objects filtering
2372 taking a revision number and returning a match objects filtering
2366 the files to be detailed when displaying the revision.
2373 the files to be detailed when displaying the revision.
2367 """
2374 """
2368 opt2revset = {
2375 opt2revset = {
2369 'no_merges': ('not merge()', None),
2376 'no_merges': ('not merge()', None),
2370 'only_merges': ('merge()', None),
2377 'only_merges': ('merge()', None),
2371 '_ancestors': ('ancestors(%(val)s)', None),
2378 '_ancestors': ('ancestors(%(val)s)', None),
2372 '_fancestors': ('_firstancestors(%(val)s)', None),
2379 '_fancestors': ('_firstancestors(%(val)s)', None),
2373 '_descendants': ('descendants(%(val)s)', None),
2380 '_descendants': ('descendants(%(val)s)', None),
2374 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2381 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2375 '_matchfiles': ('_matchfiles(%(val)s)', None),
2382 '_matchfiles': ('_matchfiles(%(val)s)', None),
2376 'date': ('date(%(val)r)', None),
2383 'date': ('date(%(val)r)', None),
2377 'branch': ('branch(%(val)r)', ' or '),
2384 'branch': ('branch(%(val)r)', ' or '),
2378 '_patslog': ('filelog(%(val)r)', ' or '),
2385 '_patslog': ('filelog(%(val)r)', ' or '),
2379 '_patsfollow': ('follow(%(val)r)', ' or '),
2386 '_patsfollow': ('follow(%(val)r)', ' or '),
2380 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2387 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2381 'keyword': ('keyword(%(val)r)', ' or '),
2388 'keyword': ('keyword(%(val)r)', ' or '),
2382 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2389 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2383 'user': ('user(%(val)r)', ' or '),
2390 'user': ('user(%(val)r)', ' or '),
2384 }
2391 }
2385
2392
2386 opts = dict(opts)
2393 opts = dict(opts)
2387 # follow or not follow?
2394 # follow or not follow?
2388 follow = opts.get('follow') or opts.get('follow_first')
2395 follow = opts.get('follow') or opts.get('follow_first')
2389 if opts.get('follow_first'):
2396 if opts.get('follow_first'):
2390 followfirst = 1
2397 followfirst = 1
2391 else:
2398 else:
2392 followfirst = 0
2399 followfirst = 0
2393 # --follow with FILE behavior depends on revs...
2400 # --follow with FILE behavior depends on revs...
2394 it = iter(revs)
2401 it = iter(revs)
2395 startrev = next(it)
2402 startrev = next(it)
2396 followdescendants = startrev < next(it, startrev)
2403 followdescendants = startrev < next(it, startrev)
2397
2404
2398 # branch and only_branch are really aliases and must be handled at
2405 # branch and only_branch are really aliases and must be handled at
2399 # the same time
2406 # the same time
2400 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2407 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2401 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2408 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2402 # pats/include/exclude are passed to match.match() directly in
2409 # pats/include/exclude are passed to match.match() directly in
2403 # _matchfiles() revset but walkchangerevs() builds its matcher with
2410 # _matchfiles() revset but walkchangerevs() builds its matcher with
2404 # scmutil.match(). The difference is input pats are globbed on
2411 # scmutil.match(). The difference is input pats are globbed on
2405 # platforms without shell expansion (windows).
2412 # platforms without shell expansion (windows).
2406 wctx = repo[None]
2413 wctx = repo[None]
2407 match, pats = scmutil.matchandpats(wctx, pats, opts)
2414 match, pats = scmutil.matchandpats(wctx, pats, opts)
2408 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2415 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2409 opts.get('removed'))
2416 opts.get('removed'))
2410 if not slowpath:
2417 if not slowpath:
2411 for f in match.files():
2418 for f in match.files():
2412 if follow and f not in wctx:
2419 if follow and f not in wctx:
2413 # If the file exists, it may be a directory, so let it
2420 # If the file exists, it may be a directory, so let it
2414 # take the slow path.
2421 # take the slow path.
2415 if os.path.exists(repo.wjoin(f)):
2422 if os.path.exists(repo.wjoin(f)):
2416 slowpath = True
2423 slowpath = True
2417 continue
2424 continue
2418 else:
2425 else:
2419 raise error.Abort(_('cannot follow file not in parent '
2426 raise error.Abort(_('cannot follow file not in parent '
2420 'revision: "%s"') % f)
2427 'revision: "%s"') % f)
2421 filelog = repo.file(f)
2428 filelog = repo.file(f)
2422 if not filelog:
2429 if not filelog:
2423 # A zero count may be a directory or deleted file, so
2430 # A zero count may be a directory or deleted file, so
2424 # try to find matching entries on the slow path.
2431 # try to find matching entries on the slow path.
2425 if follow:
2432 if follow:
2426 raise error.Abort(
2433 raise error.Abort(
2427 _('cannot follow nonexistent file: "%s"') % f)
2434 _('cannot follow nonexistent file: "%s"') % f)
2428 slowpath = True
2435 slowpath = True
2429
2436
2430 # We decided to fall back to the slowpath because at least one
2437 # We decided to fall back to the slowpath because at least one
2431 # of the paths was not a file. Check to see if at least one of them
2438 # of the paths was not a file. Check to see if at least one of them
2432 # existed in history - in that case, we'll continue down the
2439 # existed in history - in that case, we'll continue down the
2433 # slowpath; otherwise, we can turn off the slowpath
2440 # slowpath; otherwise, we can turn off the slowpath
2434 if slowpath:
2441 if slowpath:
2435 for path in match.files():
2442 for path in match.files():
2436 if path == '.' or path in repo.store:
2443 if path == '.' or path in repo.store:
2437 break
2444 break
2438 else:
2445 else:
2439 slowpath = False
2446 slowpath = False
2440
2447
2441 fpats = ('_patsfollow', '_patsfollowfirst')
2448 fpats = ('_patsfollow', '_patsfollowfirst')
2442 fnopats = (('_ancestors', '_fancestors'),
2449 fnopats = (('_ancestors', '_fancestors'),
2443 ('_descendants', '_fdescendants'))
2450 ('_descendants', '_fdescendants'))
2444 if slowpath:
2451 if slowpath:
2445 # See walkchangerevs() slow path.
2452 # See walkchangerevs() slow path.
2446 #
2453 #
2447 # pats/include/exclude cannot be represented as separate
2454 # pats/include/exclude cannot be represented as separate
2448 # revset expressions as their filtering logic applies at file
2455 # revset expressions as their filtering logic applies at file
2449 # level. For instance "-I a -X a" matches a revision touching
2456 # level. For instance "-I a -X a" matches a revision touching
2450 # "a" and "b" while "file(a) and not file(b)" does
2457 # "a" and "b" while "file(a) and not file(b)" does
2451 # not. Besides, filesets are evaluated against the working
2458 # not. Besides, filesets are evaluated against the working
2452 # directory.
2459 # directory.
2453 matchargs = ['r:', 'd:relpath']
2460 matchargs = ['r:', 'd:relpath']
2454 for p in pats:
2461 for p in pats:
2455 matchargs.append('p:' + p)
2462 matchargs.append('p:' + p)
2456 for p in opts.get('include', []):
2463 for p in opts.get('include', []):
2457 matchargs.append('i:' + p)
2464 matchargs.append('i:' + p)
2458 for p in opts.get('exclude', []):
2465 for p in opts.get('exclude', []):
2459 matchargs.append('x:' + p)
2466 matchargs.append('x:' + p)
2460 matchargs = ','.join(('%r' % p) for p in matchargs)
2467 matchargs = ','.join(('%r' % p) for p in matchargs)
2461 opts['_matchfiles'] = matchargs
2468 opts['_matchfiles'] = matchargs
2462 if follow:
2469 if follow:
2463 opts[fnopats[0][followfirst]] = '.'
2470 opts[fnopats[0][followfirst]] = '.'
2464 else:
2471 else:
2465 if follow:
2472 if follow:
2466 if pats:
2473 if pats:
2467 # follow() revset interprets its file argument as a
2474 # follow() revset interprets its file argument as a
2468 # manifest entry, so use match.files(), not pats.
2475 # manifest entry, so use match.files(), not pats.
2469 opts[fpats[followfirst]] = list(match.files())
2476 opts[fpats[followfirst]] = list(match.files())
2470 else:
2477 else:
2471 op = fnopats[followdescendants][followfirst]
2478 op = fnopats[followdescendants][followfirst]
2472 opts[op] = 'rev(%d)' % startrev
2479 opts[op] = 'rev(%d)' % startrev
2473 else:
2480 else:
2474 opts['_patslog'] = list(pats)
2481 opts['_patslog'] = list(pats)
2475
2482
2476 filematcher = None
2483 filematcher = None
2477 if opts.get('patch') or opts.get('stat'):
2484 if opts.get('patch') or opts.get('stat'):
2478 # When following files, track renames via a special matcher.
2485 # When following files, track renames via a special matcher.
2479 # If we're forced to take the slowpath it means we're following
2486 # If we're forced to take the slowpath it means we're following
2480 # at least one pattern/directory, so don't bother with rename tracking.
2487 # at least one pattern/directory, so don't bother with rename tracking.
2481 if follow and not match.always() and not slowpath:
2488 if follow and not match.always() and not slowpath:
2482 # _makefollowlogfilematcher expects its files argument to be
2489 # _makefollowlogfilematcher expects its files argument to be
2483 # relative to the repo root, so use match.files(), not pats.
2490 # relative to the repo root, so use match.files(), not pats.
2484 filematcher = _makefollowlogfilematcher(repo, match.files(),
2491 filematcher = _makefollowlogfilematcher(repo, match.files(),
2485 followfirst)
2492 followfirst)
2486 else:
2493 else:
2487 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2494 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2488 if filematcher is None:
2495 if filematcher is None:
2489 filematcher = lambda rev: match
2496 filematcher = lambda rev: match
2490
2497
2491 expr = []
2498 expr = []
2492 for op, val in sorted(opts.iteritems()):
2499 for op, val in sorted(opts.iteritems()):
2493 if not val:
2500 if not val:
2494 continue
2501 continue
2495 if op not in opt2revset:
2502 if op not in opt2revset:
2496 continue
2503 continue
2497 revop, andor = opt2revset[op]
2504 revop, andor = opt2revset[op]
2498 if '%(val)' not in revop:
2505 if '%(val)' not in revop:
2499 expr.append(revop)
2506 expr.append(revop)
2500 else:
2507 else:
2501 if not isinstance(val, list):
2508 if not isinstance(val, list):
2502 e = revop % {'val': val}
2509 e = revop % {'val': val}
2503 else:
2510 else:
2504 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2511 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2505 expr.append(e)
2512 expr.append(e)
2506
2513
2507 if expr:
2514 if expr:
2508 expr = '(' + ' and '.join(expr) + ')'
2515 expr = '(' + ' and '.join(expr) + ')'
2509 else:
2516 else:
2510 expr = None
2517 expr = None
2511 return expr, filematcher
2518 return expr, filematcher
2512
2519
2513 def _logrevs(repo, opts):
2520 def _logrevs(repo, opts):
2514 # Default --rev value depends on --follow but --follow behavior
2521 # Default --rev value depends on --follow but --follow behavior
2515 # depends on revisions resolved from --rev...
2522 # depends on revisions resolved from --rev...
2516 follow = opts.get('follow') or opts.get('follow_first')
2523 follow = opts.get('follow') or opts.get('follow_first')
2517 if opts.get('rev'):
2524 if opts.get('rev'):
2518 revs = scmutil.revrange(repo, opts['rev'])
2525 revs = scmutil.revrange(repo, opts['rev'])
2519 elif follow and repo.dirstate.p1() == nullid:
2526 elif follow and repo.dirstate.p1() == nullid:
2520 revs = smartset.baseset()
2527 revs = smartset.baseset()
2521 elif follow:
2528 elif follow:
2522 revs = repo.revs('reverse(:.)')
2529 revs = repo.revs('reverse(:.)')
2523 else:
2530 else:
2524 revs = smartset.spanset(repo)
2531 revs = smartset.spanset(repo)
2525 revs.reverse()
2532 revs.reverse()
2526 return revs
2533 return revs
2527
2534
2528 def getgraphlogrevs(repo, pats, opts):
2535 def getgraphlogrevs(repo, pats, opts):
2529 """Return (revs, expr, filematcher) where revs is an iterable of
2536 """Return (revs, expr, filematcher) where revs is an iterable of
2530 revision numbers, expr is a revset string built from log options
2537 revision numbers, expr is a revset string built from log options
2531 and file patterns or None, and used to filter 'revs'. If --stat or
2538 and file patterns or None, and used to filter 'revs'. If --stat or
2532 --patch are not passed filematcher is None. Otherwise it is a
2539 --patch are not passed filematcher is None. Otherwise it is a
2533 callable taking a revision number and returning a match objects
2540 callable taking a revision number and returning a match objects
2534 filtering the files to be detailed when displaying the revision.
2541 filtering the files to be detailed when displaying the revision.
2535 """
2542 """
2536 limit = loglimit(opts)
2543 limit = loglimit(opts)
2537 revs = _logrevs(repo, opts)
2544 revs = _logrevs(repo, opts)
2538 if not revs:
2545 if not revs:
2539 return smartset.baseset(), None, None
2546 return smartset.baseset(), None, None
2540 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2547 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2541 if opts.get('rev'):
2548 if opts.get('rev'):
2542 # User-specified revs might be unsorted, but don't sort before
2549 # User-specified revs might be unsorted, but don't sort before
2543 # _makelogrevset because it might depend on the order of revs
2550 # _makelogrevset because it might depend on the order of revs
2544 if not (revs.isdescending() or revs.istopo()):
2551 if not (revs.isdescending() or revs.istopo()):
2545 revs.sort(reverse=True)
2552 revs.sort(reverse=True)
2546 if expr:
2553 if expr:
2547 matcher = revset.match(repo.ui, expr)
2554 matcher = revset.match(repo.ui, expr)
2548 revs = matcher(repo, revs)
2555 revs = matcher(repo, revs)
2549 if limit is not None:
2556 if limit is not None:
2550 limitedrevs = []
2557 limitedrevs = []
2551 for idx, rev in enumerate(revs):
2558 for idx, rev in enumerate(revs):
2552 if idx >= limit:
2559 if idx >= limit:
2553 break
2560 break
2554 limitedrevs.append(rev)
2561 limitedrevs.append(rev)
2555 revs = smartset.baseset(limitedrevs)
2562 revs = smartset.baseset(limitedrevs)
2556
2563
2557 return revs, expr, filematcher
2564 return revs, expr, filematcher
2558
2565
2559 def getlogrevs(repo, pats, opts):
2566 def getlogrevs(repo, pats, opts):
2560 """Return (revs, expr, filematcher) where revs is an iterable of
2567 """Return (revs, expr, filematcher) where revs is an iterable of
2561 revision numbers, expr is a revset string built from log options
2568 revision numbers, expr is a revset string built from log options
2562 and file patterns or None, and used to filter 'revs'. If --stat or
2569 and file patterns or None, and used to filter 'revs'. If --stat or
2563 --patch are not passed filematcher is None. Otherwise it is a
2570 --patch are not passed filematcher is None. Otherwise it is a
2564 callable taking a revision number and returning a match objects
2571 callable taking a revision number and returning a match objects
2565 filtering the files to be detailed when displaying the revision.
2572 filtering the files to be detailed when displaying the revision.
2566 """
2573 """
2567 limit = loglimit(opts)
2574 limit = loglimit(opts)
2568 revs = _logrevs(repo, opts)
2575 revs = _logrevs(repo, opts)
2569 if not revs:
2576 if not revs:
2570 return smartset.baseset([]), None, None
2577 return smartset.baseset([]), None, None
2571 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2578 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2572 if expr:
2579 if expr:
2573 matcher = revset.match(repo.ui, expr)
2580 matcher = revset.match(repo.ui, expr)
2574 revs = matcher(repo, revs)
2581 revs = matcher(repo, revs)
2575 if limit is not None:
2582 if limit is not None:
2576 limitedrevs = []
2583 limitedrevs = []
2577 for idx, r in enumerate(revs):
2584 for idx, r in enumerate(revs):
2578 if limit <= idx:
2585 if limit <= idx:
2579 break
2586 break
2580 limitedrevs.append(r)
2587 limitedrevs.append(r)
2581 revs = smartset.baseset(limitedrevs)
2588 revs = smartset.baseset(limitedrevs)
2582
2589
2583 return revs, expr, filematcher
2590 return revs, expr, filematcher
2584
2591
2585 def _parselinerangelogopt(repo, opts):
2592 def _parselinerangelogopt(repo, opts):
2586 """Parse --line-range log option and return a list of tuples (filename,
2593 """Parse --line-range log option and return a list of tuples (filename,
2587 (fromline, toline)).
2594 (fromline, toline)).
2588 """
2595 """
2589 linerangebyfname = []
2596 linerangebyfname = []
2590 for pat in opts.get('line_range', []):
2597 for pat in opts.get('line_range', []):
2591 try:
2598 try:
2592 pat, linerange = pat.rsplit(',', 1)
2599 pat, linerange = pat.rsplit(',', 1)
2593 except ValueError:
2600 except ValueError:
2594 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2601 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2595 try:
2602 try:
2596 fromline, toline = map(int, linerange.split(':'))
2603 fromline, toline = map(int, linerange.split(':'))
2597 except ValueError:
2604 except ValueError:
2598 raise error.Abort(_("invalid line range for %s") % pat)
2605 raise error.Abort(_("invalid line range for %s") % pat)
2599 msg = _("line range pattern '%s' must match exactly one file") % pat
2606 msg = _("line range pattern '%s' must match exactly one file") % pat
2600 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2607 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2601 linerangebyfname.append(
2608 linerangebyfname.append(
2602 (fname, util.processlinerange(fromline, toline)))
2609 (fname, util.processlinerange(fromline, toline)))
2603 return linerangebyfname
2610 return linerangebyfname
2604
2611
2605 def getloglinerangerevs(repo, userrevs, opts):
2612 def getloglinerangerevs(repo, userrevs, opts):
2606 """Return (revs, filematcher, hunksfilter).
2613 """Return (revs, filematcher, hunksfilter).
2607
2614
2608 "revs" are revisions obtained by processing "line-range" log options and
2615 "revs" are revisions obtained by processing "line-range" log options and
2609 walking block ancestors of each specified file/line-range.
2616 walking block ancestors of each specified file/line-range.
2610
2617
2611 "filematcher(rev) -> match" is a factory function returning a match object
2618 "filematcher(rev) -> match" is a factory function returning a match object
2612 for a given revision for file patterns specified in --line-range option.
2619 for a given revision for file patterns specified in --line-range option.
2613 If neither --stat nor --patch options are passed, "filematcher" is None.
2620 If neither --stat nor --patch options are passed, "filematcher" is None.
2614
2621
2615 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2622 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2616 returning a hunks filtering function.
2623 returning a hunks filtering function.
2617 If neither --stat nor --patch options are passed, "filterhunks" is None.
2624 If neither --stat nor --patch options are passed, "filterhunks" is None.
2618 """
2625 """
2619 wctx = repo[None]
2626 wctx = repo[None]
2620
2627
2621 # Two-levels map of "rev -> file ctx -> [line range]".
2628 # Two-levels map of "rev -> file ctx -> [line range]".
2622 linerangesbyrev = {}
2629 linerangesbyrev = {}
2623 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2630 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2624 if fname not in wctx:
2631 if fname not in wctx:
2625 raise error.Abort(_('cannot follow file not in parent '
2632 raise error.Abort(_('cannot follow file not in parent '
2626 'revision: "%s"') % fname)
2633 'revision: "%s"') % fname)
2627 fctx = wctx.filectx(fname)
2634 fctx = wctx.filectx(fname)
2628 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2635 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2629 rev = fctx.introrev()
2636 rev = fctx.introrev()
2630 if rev not in userrevs:
2637 if rev not in userrevs:
2631 continue
2638 continue
2632 linerangesbyrev.setdefault(
2639 linerangesbyrev.setdefault(
2633 rev, {}).setdefault(
2640 rev, {}).setdefault(
2634 fctx.path(), []).append(linerange)
2641 fctx.path(), []).append(linerange)
2635
2642
2636 filematcher = None
2643 filematcher = None
2637 hunksfilter = None
2644 hunksfilter = None
2638 if opts.get('patch') or opts.get('stat'):
2645 if opts.get('patch') or opts.get('stat'):
2639
2646
2640 def nofilterhunksfn(fctx, hunks):
2647 def nofilterhunksfn(fctx, hunks):
2641 return hunks
2648 return hunks
2642
2649
2643 def hunksfilter(rev):
2650 def hunksfilter(rev):
2644 fctxlineranges = linerangesbyrev.get(rev)
2651 fctxlineranges = linerangesbyrev.get(rev)
2645 if fctxlineranges is None:
2652 if fctxlineranges is None:
2646 return nofilterhunksfn
2653 return nofilterhunksfn
2647
2654
2648 def filterfn(fctx, hunks):
2655 def filterfn(fctx, hunks):
2649 lineranges = fctxlineranges.get(fctx.path())
2656 lineranges = fctxlineranges.get(fctx.path())
2650 if lineranges is not None:
2657 if lineranges is not None:
2651 for hr, lines in hunks:
2658 for hr, lines in hunks:
2652 if hr is None: # binary
2659 if hr is None: # binary
2653 yield hr, lines
2660 yield hr, lines
2654 continue
2661 continue
2655 if any(mdiff.hunkinrange(hr[2:], lr)
2662 if any(mdiff.hunkinrange(hr[2:], lr)
2656 for lr in lineranges):
2663 for lr in lineranges):
2657 yield hr, lines
2664 yield hr, lines
2658 else:
2665 else:
2659 for hunk in hunks:
2666 for hunk in hunks:
2660 yield hunk
2667 yield hunk
2661
2668
2662 return filterfn
2669 return filterfn
2663
2670
2664 def filematcher(rev):
2671 def filematcher(rev):
2665 files = list(linerangesbyrev.get(rev, []))
2672 files = list(linerangesbyrev.get(rev, []))
2666 return scmutil.matchfiles(repo, files)
2673 return scmutil.matchfiles(repo, files)
2667
2674
2668 revs = sorted(linerangesbyrev, reverse=True)
2675 revs = sorted(linerangesbyrev, reverse=True)
2669
2676
2670 return revs, filematcher, hunksfilter
2677 return revs, filematcher, hunksfilter
2671
2678
2672 def _graphnodeformatter(ui, displayer):
2679 def _graphnodeformatter(ui, displayer):
2673 spec = ui.config('ui', 'graphnodetemplate')
2680 spec = ui.config('ui', 'graphnodetemplate')
2674 if not spec:
2681 if not spec:
2675 return templatekw.showgraphnode # fast path for "{graphnode}"
2682 return templatekw.showgraphnode # fast path for "{graphnode}"
2676
2683
2677 spec = templater.unquotestring(spec)
2684 spec = templater.unquotestring(spec)
2678 templ = formatter.maketemplater(ui, spec)
2685 templ = formatter.maketemplater(ui, spec)
2679 cache = {}
2686 cache = {}
2680 if isinstance(displayer, changeset_templater):
2687 if isinstance(displayer, changeset_templater):
2681 cache = displayer.cache # reuse cache of slow templates
2688 cache = displayer.cache # reuse cache of slow templates
2682 props = templatekw.keywords.copy()
2689 props = templatekw.keywords.copy()
2683 props['templ'] = templ
2690 props['templ'] = templ
2684 props['cache'] = cache
2691 props['cache'] = cache
2685 def formatnode(repo, ctx):
2692 def formatnode(repo, ctx):
2686 props['ctx'] = ctx
2693 props['ctx'] = ctx
2687 props['repo'] = repo
2694 props['repo'] = repo
2688 props['ui'] = repo.ui
2695 props['ui'] = repo.ui
2689 props['revcache'] = {}
2696 props['revcache'] = {}
2690 return templ.render(props)
2697 return templ.render(props)
2691 return formatnode
2698 return formatnode
2692
2699
2693 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2700 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2694 filematcher=None, props=None):
2701 filematcher=None, props=None):
2695 props = props or {}
2702 props = props or {}
2696 formatnode = _graphnodeformatter(ui, displayer)
2703 formatnode = _graphnodeformatter(ui, displayer)
2697 state = graphmod.asciistate()
2704 state = graphmod.asciistate()
2698 styles = state['styles']
2705 styles = state['styles']
2699
2706
2700 # only set graph styling if HGPLAIN is not set.
2707 # only set graph styling if HGPLAIN is not set.
2701 if ui.plain('graph'):
2708 if ui.plain('graph'):
2702 # set all edge styles to |, the default pre-3.8 behaviour
2709 # set all edge styles to |, the default pre-3.8 behaviour
2703 styles.update(dict.fromkeys(styles, '|'))
2710 styles.update(dict.fromkeys(styles, '|'))
2704 else:
2711 else:
2705 edgetypes = {
2712 edgetypes = {
2706 'parent': graphmod.PARENT,
2713 'parent': graphmod.PARENT,
2707 'grandparent': graphmod.GRANDPARENT,
2714 'grandparent': graphmod.GRANDPARENT,
2708 'missing': graphmod.MISSINGPARENT
2715 'missing': graphmod.MISSINGPARENT
2709 }
2716 }
2710 for name, key in edgetypes.items():
2717 for name, key in edgetypes.items():
2711 # experimental config: experimental.graphstyle.*
2718 # experimental config: experimental.graphstyle.*
2712 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2719 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2713 styles[key])
2720 styles[key])
2714 if not styles[key]:
2721 if not styles[key]:
2715 styles[key] = None
2722 styles[key] = None
2716
2723
2717 # experimental config: experimental.graphshorten
2724 # experimental config: experimental.graphshorten
2718 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2725 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2719
2726
2720 for rev, type, ctx, parents in dag:
2727 for rev, type, ctx, parents in dag:
2721 char = formatnode(repo, ctx)
2728 char = formatnode(repo, ctx)
2722 copies = None
2729 copies = None
2723 if getrenamed and ctx.rev():
2730 if getrenamed and ctx.rev():
2724 copies = []
2731 copies = []
2725 for fn in ctx.files():
2732 for fn in ctx.files():
2726 rename = getrenamed(fn, ctx.rev())
2733 rename = getrenamed(fn, ctx.rev())
2727 if rename:
2734 if rename:
2728 copies.append((fn, rename[0]))
2735 copies.append((fn, rename[0]))
2729 revmatchfn = None
2736 revmatchfn = None
2730 if filematcher is not None:
2737 if filematcher is not None:
2731 revmatchfn = filematcher(ctx.rev())
2738 revmatchfn = filematcher(ctx.rev())
2732 edges = edgefn(type, char, state, rev, parents)
2739 edges = edgefn(type, char, state, rev, parents)
2733 firstedge = next(edges)
2740 firstedge = next(edges)
2734 width = firstedge[2]
2741 width = firstedge[2]
2735 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2742 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2736 _graphwidth=width, **props)
2743 _graphwidth=width, **props)
2737 lines = displayer.hunk.pop(rev).split('\n')
2744 lines = displayer.hunk.pop(rev).split('\n')
2738 if not lines[-1]:
2745 if not lines[-1]:
2739 del lines[-1]
2746 del lines[-1]
2740 displayer.flush(ctx)
2747 displayer.flush(ctx)
2741 for type, char, width, coldata in itertools.chain([firstedge], edges):
2748 for type, char, width, coldata in itertools.chain([firstedge], edges):
2742 graphmod.ascii(ui, state, type, char, lines, coldata)
2749 graphmod.ascii(ui, state, type, char, lines, coldata)
2743 lines = []
2750 lines = []
2744 displayer.close()
2751 displayer.close()
2745
2752
2746 def graphlog(ui, repo, pats, opts):
2753 def graphlog(ui, repo, pats, opts):
2747 # Parameters are identical to log command ones
2754 # Parameters are identical to log command ones
2748 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2755 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2749 revdag = graphmod.dagwalker(repo, revs)
2756 revdag = graphmod.dagwalker(repo, revs)
2750
2757
2751 getrenamed = None
2758 getrenamed = None
2752 if opts.get('copies'):
2759 if opts.get('copies'):
2753 endrev = None
2760 endrev = None
2754 if opts.get('rev'):
2761 if opts.get('rev'):
2755 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2762 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2756 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2763 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2757
2764
2758 ui.pager('log')
2765 ui.pager('log')
2759 displayer = show_changeset(ui, repo, opts, buffered=True)
2766 displayer = show_changeset(ui, repo, opts, buffered=True)
2760 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2767 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2761 filematcher)
2768 filematcher)
2762
2769
2763 def checkunsupportedgraphflags(pats, opts):
2770 def checkunsupportedgraphflags(pats, opts):
2764 for op in ["newest_first"]:
2771 for op in ["newest_first"]:
2765 if op in opts and opts[op]:
2772 if op in opts and opts[op]:
2766 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2773 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2767 % op.replace("_", "-"))
2774 % op.replace("_", "-"))
2768
2775
2769 def graphrevs(repo, nodes, opts):
2776 def graphrevs(repo, nodes, opts):
2770 limit = loglimit(opts)
2777 limit = loglimit(opts)
2771 nodes.reverse()
2778 nodes.reverse()
2772 if limit is not None:
2779 if limit is not None:
2773 nodes = nodes[:limit]
2780 nodes = nodes[:limit]
2774 return graphmod.nodes(repo, nodes)
2781 return graphmod.nodes(repo, nodes)
2775
2782
2776 def add(ui, repo, match, prefix, explicitonly, **opts):
2783 def add(ui, repo, match, prefix, explicitonly, **opts):
2777 join = lambda f: os.path.join(prefix, f)
2784 join = lambda f: os.path.join(prefix, f)
2778 bad = []
2785 bad = []
2779
2786
2780 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2787 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2781 names = []
2788 names = []
2782 wctx = repo[None]
2789 wctx = repo[None]
2783 cca = None
2790 cca = None
2784 abort, warn = scmutil.checkportabilityalert(ui)
2791 abort, warn = scmutil.checkportabilityalert(ui)
2785 if abort or warn:
2792 if abort or warn:
2786 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2793 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2787
2794
2788 badmatch = matchmod.badmatch(match, badfn)
2795 badmatch = matchmod.badmatch(match, badfn)
2789 dirstate = repo.dirstate
2796 dirstate = repo.dirstate
2790 # We don't want to just call wctx.walk here, since it would return a lot of
2797 # We don't want to just call wctx.walk here, since it would return a lot of
2791 # clean files, which we aren't interested in and takes time.
2798 # clean files, which we aren't interested in and takes time.
2792 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2799 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2793 unknown=True, ignored=False, full=False)):
2800 unknown=True, ignored=False, full=False)):
2794 exact = match.exact(f)
2801 exact = match.exact(f)
2795 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2802 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2796 if cca:
2803 if cca:
2797 cca(f)
2804 cca(f)
2798 names.append(f)
2805 names.append(f)
2799 if ui.verbose or not exact:
2806 if ui.verbose or not exact:
2800 ui.status(_('adding %s\n') % match.rel(f))
2807 ui.status(_('adding %s\n') % match.rel(f))
2801
2808
2802 for subpath in sorted(wctx.substate):
2809 for subpath in sorted(wctx.substate):
2803 sub = wctx.sub(subpath)
2810 sub = wctx.sub(subpath)
2804 try:
2811 try:
2805 submatch = matchmod.subdirmatcher(subpath, match)
2812 submatch = matchmod.subdirmatcher(subpath, match)
2806 if opts.get(r'subrepos'):
2813 if opts.get(r'subrepos'):
2807 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2814 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2808 else:
2815 else:
2809 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2816 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2810 except error.LookupError:
2817 except error.LookupError:
2811 ui.status(_("skipping missing subrepository: %s\n")
2818 ui.status(_("skipping missing subrepository: %s\n")
2812 % join(subpath))
2819 % join(subpath))
2813
2820
2814 if not opts.get(r'dry_run'):
2821 if not opts.get(r'dry_run'):
2815 rejected = wctx.add(names, prefix)
2822 rejected = wctx.add(names, prefix)
2816 bad.extend(f for f in rejected if f in match.files())
2823 bad.extend(f for f in rejected if f in match.files())
2817 return bad
2824 return bad
2818
2825
2819 def addwebdirpath(repo, serverpath, webconf):
2826 def addwebdirpath(repo, serverpath, webconf):
2820 webconf[serverpath] = repo.root
2827 webconf[serverpath] = repo.root
2821 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2828 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2822
2829
2823 for r in repo.revs('filelog("path:.hgsub")'):
2830 for r in repo.revs('filelog("path:.hgsub")'):
2824 ctx = repo[r]
2831 ctx = repo[r]
2825 for subpath in ctx.substate:
2832 for subpath in ctx.substate:
2826 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2833 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2827
2834
2828 def forget(ui, repo, match, prefix, explicitonly):
2835 def forget(ui, repo, match, prefix, explicitonly):
2829 join = lambda f: os.path.join(prefix, f)
2836 join = lambda f: os.path.join(prefix, f)
2830 bad = []
2837 bad = []
2831 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2838 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2832 wctx = repo[None]
2839 wctx = repo[None]
2833 forgot = []
2840 forgot = []
2834
2841
2835 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2842 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2836 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2843 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2837 if explicitonly:
2844 if explicitonly:
2838 forget = [f for f in forget if match.exact(f)]
2845 forget = [f for f in forget if match.exact(f)]
2839
2846
2840 for subpath in sorted(wctx.substate):
2847 for subpath in sorted(wctx.substate):
2841 sub = wctx.sub(subpath)
2848 sub = wctx.sub(subpath)
2842 try:
2849 try:
2843 submatch = matchmod.subdirmatcher(subpath, match)
2850 submatch = matchmod.subdirmatcher(subpath, match)
2844 subbad, subforgot = sub.forget(submatch, prefix)
2851 subbad, subforgot = sub.forget(submatch, prefix)
2845 bad.extend([subpath + '/' + f for f in subbad])
2852 bad.extend([subpath + '/' + f for f in subbad])
2846 forgot.extend([subpath + '/' + f for f in subforgot])
2853 forgot.extend([subpath + '/' + f for f in subforgot])
2847 except error.LookupError:
2854 except error.LookupError:
2848 ui.status(_("skipping missing subrepository: %s\n")
2855 ui.status(_("skipping missing subrepository: %s\n")
2849 % join(subpath))
2856 % join(subpath))
2850
2857
2851 if not explicitonly:
2858 if not explicitonly:
2852 for f in match.files():
2859 for f in match.files():
2853 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2860 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2854 if f not in forgot:
2861 if f not in forgot:
2855 if repo.wvfs.exists(f):
2862 if repo.wvfs.exists(f):
2856 # Don't complain if the exact case match wasn't given.
2863 # Don't complain if the exact case match wasn't given.
2857 # But don't do this until after checking 'forgot', so
2864 # But don't do this until after checking 'forgot', so
2858 # that subrepo files aren't normalized, and this op is
2865 # that subrepo files aren't normalized, and this op is
2859 # purely from data cached by the status walk above.
2866 # purely from data cached by the status walk above.
2860 if repo.dirstate.normalize(f) in repo.dirstate:
2867 if repo.dirstate.normalize(f) in repo.dirstate:
2861 continue
2868 continue
2862 ui.warn(_('not removing %s: '
2869 ui.warn(_('not removing %s: '
2863 'file is already untracked\n')
2870 'file is already untracked\n')
2864 % match.rel(f))
2871 % match.rel(f))
2865 bad.append(f)
2872 bad.append(f)
2866
2873
2867 for f in forget:
2874 for f in forget:
2868 if ui.verbose or not match.exact(f):
2875 if ui.verbose or not match.exact(f):
2869 ui.status(_('removing %s\n') % match.rel(f))
2876 ui.status(_('removing %s\n') % match.rel(f))
2870
2877
2871 rejected = wctx.forget(forget, prefix)
2878 rejected = wctx.forget(forget, prefix)
2872 bad.extend(f for f in rejected if f in match.files())
2879 bad.extend(f for f in rejected if f in match.files())
2873 forgot.extend(f for f in forget if f not in rejected)
2880 forgot.extend(f for f in forget if f not in rejected)
2874 return bad, forgot
2881 return bad, forgot
2875
2882
2876 def files(ui, ctx, m, fm, fmt, subrepos):
2883 def files(ui, ctx, m, fm, fmt, subrepos):
2877 rev = ctx.rev()
2884 rev = ctx.rev()
2878 ret = 1
2885 ret = 1
2879 ds = ctx.repo().dirstate
2886 ds = ctx.repo().dirstate
2880
2887
2881 for f in ctx.matches(m):
2888 for f in ctx.matches(m):
2882 if rev is None and ds[f] == 'r':
2889 if rev is None and ds[f] == 'r':
2883 continue
2890 continue
2884 fm.startitem()
2891 fm.startitem()
2885 if ui.verbose:
2892 if ui.verbose:
2886 fc = ctx[f]
2893 fc = ctx[f]
2887 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2894 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2888 fm.data(abspath=f)
2895 fm.data(abspath=f)
2889 fm.write('path', fmt, m.rel(f))
2896 fm.write('path', fmt, m.rel(f))
2890 ret = 0
2897 ret = 0
2891
2898
2892 for subpath in sorted(ctx.substate):
2899 for subpath in sorted(ctx.substate):
2893 submatch = matchmod.subdirmatcher(subpath, m)
2900 submatch = matchmod.subdirmatcher(subpath, m)
2894 if (subrepos or m.exact(subpath) or any(submatch.files())):
2901 if (subrepos or m.exact(subpath) or any(submatch.files())):
2895 sub = ctx.sub(subpath)
2902 sub = ctx.sub(subpath)
2896 try:
2903 try:
2897 recurse = m.exact(subpath) or subrepos
2904 recurse = m.exact(subpath) or subrepos
2898 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2905 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2899 ret = 0
2906 ret = 0
2900 except error.LookupError:
2907 except error.LookupError:
2901 ui.status(_("skipping missing subrepository: %s\n")
2908 ui.status(_("skipping missing subrepository: %s\n")
2902 % m.abs(subpath))
2909 % m.abs(subpath))
2903
2910
2904 return ret
2911 return ret
2905
2912
2906 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2913 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2907 join = lambda f: os.path.join(prefix, f)
2914 join = lambda f: os.path.join(prefix, f)
2908 ret = 0
2915 ret = 0
2909 s = repo.status(match=m, clean=True)
2916 s = repo.status(match=m, clean=True)
2910 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2917 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2911
2918
2912 wctx = repo[None]
2919 wctx = repo[None]
2913
2920
2914 if warnings is None:
2921 if warnings is None:
2915 warnings = []
2922 warnings = []
2916 warn = True
2923 warn = True
2917 else:
2924 else:
2918 warn = False
2925 warn = False
2919
2926
2920 subs = sorted(wctx.substate)
2927 subs = sorted(wctx.substate)
2921 total = len(subs)
2928 total = len(subs)
2922 count = 0
2929 count = 0
2923 for subpath in subs:
2930 for subpath in subs:
2924 count += 1
2931 count += 1
2925 submatch = matchmod.subdirmatcher(subpath, m)
2932 submatch = matchmod.subdirmatcher(subpath, m)
2926 if subrepos or m.exact(subpath) or any(submatch.files()):
2933 if subrepos or m.exact(subpath) or any(submatch.files()):
2927 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2934 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2928 sub = wctx.sub(subpath)
2935 sub = wctx.sub(subpath)
2929 try:
2936 try:
2930 if sub.removefiles(submatch, prefix, after, force, subrepos,
2937 if sub.removefiles(submatch, prefix, after, force, subrepos,
2931 warnings):
2938 warnings):
2932 ret = 1
2939 ret = 1
2933 except error.LookupError:
2940 except error.LookupError:
2934 warnings.append(_("skipping missing subrepository: %s\n")
2941 warnings.append(_("skipping missing subrepository: %s\n")
2935 % join(subpath))
2942 % join(subpath))
2936 ui.progress(_('searching'), None)
2943 ui.progress(_('searching'), None)
2937
2944
2938 # warn about failure to delete explicit files/dirs
2945 # warn about failure to delete explicit files/dirs
2939 deleteddirs = util.dirs(deleted)
2946 deleteddirs = util.dirs(deleted)
2940 files = m.files()
2947 files = m.files()
2941 total = len(files)
2948 total = len(files)
2942 count = 0
2949 count = 0
2943 for f in files:
2950 for f in files:
2944 def insubrepo():
2951 def insubrepo():
2945 for subpath in wctx.substate:
2952 for subpath in wctx.substate:
2946 if f.startswith(subpath + '/'):
2953 if f.startswith(subpath + '/'):
2947 return True
2954 return True
2948 return False
2955 return False
2949
2956
2950 count += 1
2957 count += 1
2951 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2958 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2952 isdir = f in deleteddirs or wctx.hasdir(f)
2959 isdir = f in deleteddirs or wctx.hasdir(f)
2953 if (f in repo.dirstate or isdir or f == '.'
2960 if (f in repo.dirstate or isdir or f == '.'
2954 or insubrepo() or f in subs):
2961 or insubrepo() or f in subs):
2955 continue
2962 continue
2956
2963
2957 if repo.wvfs.exists(f):
2964 if repo.wvfs.exists(f):
2958 if repo.wvfs.isdir(f):
2965 if repo.wvfs.isdir(f):
2959 warnings.append(_('not removing %s: no tracked files\n')
2966 warnings.append(_('not removing %s: no tracked files\n')
2960 % m.rel(f))
2967 % m.rel(f))
2961 else:
2968 else:
2962 warnings.append(_('not removing %s: file is untracked\n')
2969 warnings.append(_('not removing %s: file is untracked\n')
2963 % m.rel(f))
2970 % m.rel(f))
2964 # missing files will generate a warning elsewhere
2971 # missing files will generate a warning elsewhere
2965 ret = 1
2972 ret = 1
2966 ui.progress(_('deleting'), None)
2973 ui.progress(_('deleting'), None)
2967
2974
2968 if force:
2975 if force:
2969 list = modified + deleted + clean + added
2976 list = modified + deleted + clean + added
2970 elif after:
2977 elif after:
2971 list = deleted
2978 list = deleted
2972 remaining = modified + added + clean
2979 remaining = modified + added + clean
2973 total = len(remaining)
2980 total = len(remaining)
2974 count = 0
2981 count = 0
2975 for f in remaining:
2982 for f in remaining:
2976 count += 1
2983 count += 1
2977 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2984 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2978 warnings.append(_('not removing %s: file still exists\n')
2985 warnings.append(_('not removing %s: file still exists\n')
2979 % m.rel(f))
2986 % m.rel(f))
2980 ret = 1
2987 ret = 1
2981 ui.progress(_('skipping'), None)
2988 ui.progress(_('skipping'), None)
2982 else:
2989 else:
2983 list = deleted + clean
2990 list = deleted + clean
2984 total = len(modified) + len(added)
2991 total = len(modified) + len(added)
2985 count = 0
2992 count = 0
2986 for f in modified:
2993 for f in modified:
2987 count += 1
2994 count += 1
2988 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2995 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2989 warnings.append(_('not removing %s: file is modified (use -f'
2996 warnings.append(_('not removing %s: file is modified (use -f'
2990 ' to force removal)\n') % m.rel(f))
2997 ' to force removal)\n') % m.rel(f))
2991 ret = 1
2998 ret = 1
2992 for f in added:
2999 for f in added:
2993 count += 1
3000 count += 1
2994 ui.progress(_('skipping'), count, total=total, unit=_('files'))
3001 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2995 warnings.append(_("not removing %s: file has been marked for add"
3002 warnings.append(_("not removing %s: file has been marked for add"
2996 " (use 'hg forget' to undo add)\n") % m.rel(f))
3003 " (use 'hg forget' to undo add)\n") % m.rel(f))
2997 ret = 1
3004 ret = 1
2998 ui.progress(_('skipping'), None)
3005 ui.progress(_('skipping'), None)
2999
3006
3000 list = sorted(list)
3007 list = sorted(list)
3001 total = len(list)
3008 total = len(list)
3002 count = 0
3009 count = 0
3003 for f in list:
3010 for f in list:
3004 count += 1
3011 count += 1
3005 if ui.verbose or not m.exact(f):
3012 if ui.verbose or not m.exact(f):
3006 ui.progress(_('deleting'), count, total=total, unit=_('files'))
3013 ui.progress(_('deleting'), count, total=total, unit=_('files'))
3007 ui.status(_('removing %s\n') % m.rel(f))
3014 ui.status(_('removing %s\n') % m.rel(f))
3008 ui.progress(_('deleting'), None)
3015 ui.progress(_('deleting'), None)
3009
3016
3010 with repo.wlock():
3017 with repo.wlock():
3011 if not after:
3018 if not after:
3012 for f in list:
3019 for f in list:
3013 if f in added:
3020 if f in added:
3014 continue # we never unlink added files on remove
3021 continue # we never unlink added files on remove
3015 repo.wvfs.unlinkpath(f, ignoremissing=True)
3022 repo.wvfs.unlinkpath(f, ignoremissing=True)
3016 repo[None].forget(list)
3023 repo[None].forget(list)
3017
3024
3018 if warn:
3025 if warn:
3019 for warning in warnings:
3026 for warning in warnings:
3020 ui.warn(warning)
3027 ui.warn(warning)
3021
3028
3022 return ret
3029 return ret
3023
3030
3024 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3031 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3025 err = 1
3032 err = 1
3026
3033
3027 def write(path):
3034 def write(path):
3028 filename = None
3035 filename = None
3029 if fntemplate:
3036 if fntemplate:
3030 filename = makefilename(repo, fntemplate, ctx.node(),
3037 filename = makefilename(repo, fntemplate, ctx.node(),
3031 pathname=os.path.join(prefix, path))
3038 pathname=os.path.join(prefix, path))
3032 # attempt to create the directory if it does not already exist
3039 # attempt to create the directory if it does not already exist
3033 try:
3040 try:
3034 os.makedirs(os.path.dirname(filename))
3041 os.makedirs(os.path.dirname(filename))
3035 except OSError:
3042 except OSError:
3036 pass
3043 pass
3037 with formatter.maybereopen(basefm, filename, opts) as fm:
3044 with formatter.maybereopen(basefm, filename, opts) as fm:
3038 data = ctx[path].data()
3045 data = ctx[path].data()
3039 if opts.get('decode'):
3046 if opts.get('decode'):
3040 data = repo.wwritedata(path, data)
3047 data = repo.wwritedata(path, data)
3041 fm.startitem()
3048 fm.startitem()
3042 fm.write('data', '%s', data)
3049 fm.write('data', '%s', data)
3043 fm.data(abspath=path, path=matcher.rel(path))
3050 fm.data(abspath=path, path=matcher.rel(path))
3044
3051
3045 # Automation often uses hg cat on single files, so special case it
3052 # Automation often uses hg cat on single files, so special case it
3046 # for performance to avoid the cost of parsing the manifest.
3053 # for performance to avoid the cost of parsing the manifest.
3047 if len(matcher.files()) == 1 and not matcher.anypats():
3054 if len(matcher.files()) == 1 and not matcher.anypats():
3048 file = matcher.files()[0]
3055 file = matcher.files()[0]
3049 mfl = repo.manifestlog
3056 mfl = repo.manifestlog
3050 mfnode = ctx.manifestnode()
3057 mfnode = ctx.manifestnode()
3051 try:
3058 try:
3052 if mfnode and mfl[mfnode].find(file)[0]:
3059 if mfnode and mfl[mfnode].find(file)[0]:
3053 write(file)
3060 write(file)
3054 return 0
3061 return 0
3055 except KeyError:
3062 except KeyError:
3056 pass
3063 pass
3057
3064
3058 for abs in ctx.walk(matcher):
3065 for abs in ctx.walk(matcher):
3059 write(abs)
3066 write(abs)
3060 err = 0
3067 err = 0
3061
3068
3062 for subpath in sorted(ctx.substate):
3069 for subpath in sorted(ctx.substate):
3063 sub = ctx.sub(subpath)
3070 sub = ctx.sub(subpath)
3064 try:
3071 try:
3065 submatch = matchmod.subdirmatcher(subpath, matcher)
3072 submatch = matchmod.subdirmatcher(subpath, matcher)
3066
3073
3067 if not sub.cat(submatch, basefm, fntemplate,
3074 if not sub.cat(submatch, basefm, fntemplate,
3068 os.path.join(prefix, sub._path), **opts):
3075 os.path.join(prefix, sub._path), **opts):
3069 err = 0
3076 err = 0
3070 except error.RepoLookupError:
3077 except error.RepoLookupError:
3071 ui.status(_("skipping missing subrepository: %s\n")
3078 ui.status(_("skipping missing subrepository: %s\n")
3072 % os.path.join(prefix, subpath))
3079 % os.path.join(prefix, subpath))
3073
3080
3074 return err
3081 return err
3075
3082
3076 def commit(ui, repo, commitfunc, pats, opts):
3083 def commit(ui, repo, commitfunc, pats, opts):
3077 '''commit the specified files or all outstanding changes'''
3084 '''commit the specified files or all outstanding changes'''
3078 date = opts.get('date')
3085 date = opts.get('date')
3079 if date:
3086 if date:
3080 opts['date'] = util.parsedate(date)
3087 opts['date'] = util.parsedate(date)
3081 message = logmessage(ui, opts)
3088 message = logmessage(ui, opts)
3082 matcher = scmutil.match(repo[None], pats, opts)
3089 matcher = scmutil.match(repo[None], pats, opts)
3083
3090
3084 dsguard = None
3091 dsguard = None
3085 # extract addremove carefully -- this function can be called from a command
3092 # extract addremove carefully -- this function can be called from a command
3086 # that doesn't support addremove
3093 # that doesn't support addremove
3087 if opts.get('addremove'):
3094 if opts.get('addremove'):
3088 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3095 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3089 with dsguard or util.nullcontextmanager():
3096 with dsguard or util.nullcontextmanager():
3090 if dsguard:
3097 if dsguard:
3091 if scmutil.addremove(repo, matcher, "", opts) != 0:
3098 if scmutil.addremove(repo, matcher, "", opts) != 0:
3092 raise error.Abort(
3099 raise error.Abort(
3093 _("failed to mark all new/missing files as added/removed"))
3100 _("failed to mark all new/missing files as added/removed"))
3094
3101
3095 return commitfunc(ui, repo, message, matcher, opts)
3102 return commitfunc(ui, repo, message, matcher, opts)
3096
3103
3097 def samefile(f, ctx1, ctx2):
3104 def samefile(f, ctx1, ctx2):
3098 if f in ctx1.manifest():
3105 if f in ctx1.manifest():
3099 a = ctx1.filectx(f)
3106 a = ctx1.filectx(f)
3100 if f in ctx2.manifest():
3107 if f in ctx2.manifest():
3101 b = ctx2.filectx(f)
3108 b = ctx2.filectx(f)
3102 return (not a.cmp(b)
3109 return (not a.cmp(b)
3103 and a.flags() == b.flags())
3110 and a.flags() == b.flags())
3104 else:
3111 else:
3105 return False
3112 return False
3106 else:
3113 else:
3107 return f not in ctx2.manifest()
3114 return f not in ctx2.manifest()
3108
3115
3109 def amend(ui, repo, old, extra, pats, opts):
3116 def amend(ui, repo, old, extra, pats, opts):
3110 # avoid cycle context -> subrepo -> cmdutil
3117 # avoid cycle context -> subrepo -> cmdutil
3111 from . import context
3118 from . import context
3112
3119
3113 # amend will reuse the existing user if not specified, but the obsolete
3120 # amend will reuse the existing user if not specified, but the obsolete
3114 # marker creation requires that the current user's name is specified.
3121 # marker creation requires that the current user's name is specified.
3115 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3122 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3116 ui.username() # raise exception if username not set
3123 ui.username() # raise exception if username not set
3117
3124
3118 ui.note(_('amending changeset %s\n') % old)
3125 ui.note(_('amending changeset %s\n') % old)
3119 base = old.p1()
3126 base = old.p1()
3120
3127
3121 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3128 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3122 # Participating changesets:
3129 # Participating changesets:
3123 #
3130 #
3124 # wctx o - workingctx that contains changes from working copy
3131 # wctx o - workingctx that contains changes from working copy
3125 # | to go into amending commit
3132 # | to go into amending commit
3126 # |
3133 # |
3127 # old o - changeset to amend
3134 # old o - changeset to amend
3128 # |
3135 # |
3129 # base o - first parent of the changeset to amend
3136 # base o - first parent of the changeset to amend
3130 wctx = repo[None]
3137 wctx = repo[None]
3131
3138
3132 # Update extra dict from amended commit (e.g. to preserve graft
3139 # Update extra dict from amended commit (e.g. to preserve graft
3133 # source)
3140 # source)
3134 extra.update(old.extra())
3141 extra.update(old.extra())
3135
3142
3136 # Also update it from the from the wctx
3143 # Also update it from the from the wctx
3137 extra.update(wctx.extra())
3144 extra.update(wctx.extra())
3138
3145
3139 user = opts.get('user') or old.user()
3146 user = opts.get('user') or old.user()
3140 date = opts.get('date') or old.date()
3147 date = opts.get('date') or old.date()
3141
3148
3142 # Parse the date to allow comparison between date and old.date()
3149 # Parse the date to allow comparison between date and old.date()
3143 date = util.parsedate(date)
3150 date = util.parsedate(date)
3144
3151
3145 if len(old.parents()) > 1:
3152 if len(old.parents()) > 1:
3146 # ctx.files() isn't reliable for merges, so fall back to the
3153 # ctx.files() isn't reliable for merges, so fall back to the
3147 # slower repo.status() method
3154 # slower repo.status() method
3148 files = set([fn for st in repo.status(base, old)[:3]
3155 files = set([fn for st in repo.status(base, old)[:3]
3149 for fn in st])
3156 for fn in st])
3150 else:
3157 else:
3151 files = set(old.files())
3158 files = set(old.files())
3152
3159
3153 # add/remove the files to the working copy if the "addremove" option
3160 # add/remove the files to the working copy if the "addremove" option
3154 # was specified.
3161 # was specified.
3155 matcher = scmutil.match(wctx, pats, opts)
3162 matcher = scmutil.match(wctx, pats, opts)
3156 if (opts.get('addremove')
3163 if (opts.get('addremove')
3157 and scmutil.addremove(repo, matcher, "", opts)):
3164 and scmutil.addremove(repo, matcher, "", opts)):
3158 raise error.Abort(
3165 raise error.Abort(
3159 _("failed to mark all new/missing files as added/removed"))
3166 _("failed to mark all new/missing files as added/removed"))
3160
3167
3161 filestoamend = set(f for f in wctx.files() if matcher(f))
3168 filestoamend = set(f for f in wctx.files() if matcher(f))
3162
3169
3163 changes = (len(filestoamend) > 0)
3170 changes = (len(filestoamend) > 0)
3164 if changes:
3171 if changes:
3165 # Recompute copies (avoid recording a -> b -> a)
3172 # Recompute copies (avoid recording a -> b -> a)
3166 copied = copies.pathcopies(base, wctx, matcher)
3173 copied = copies.pathcopies(base, wctx, matcher)
3167 if old.p2:
3174 if old.p2:
3168 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3175 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3169
3176
3170 # Prune files which were reverted by the updates: if old
3177 # Prune files which were reverted by the updates: if old
3171 # introduced file X and the file was renamed in the working
3178 # introduced file X and the file was renamed in the working
3172 # copy, then those two files are the same and
3179 # copy, then those two files are the same and
3173 # we can discard X from our list of files. Likewise if X
3180 # we can discard X from our list of files. Likewise if X
3174 # was deleted, it's no longer relevant
3181 # was deleted, it's no longer relevant
3175 files.update(filestoamend)
3182 files.update(filestoamend)
3176 files = [f for f in files if not samefile(f, wctx, base)]
3183 files = [f for f in files if not samefile(f, wctx, base)]
3177
3184
3178 def filectxfn(repo, ctx_, path):
3185 def filectxfn(repo, ctx_, path):
3179 try:
3186 try:
3180 # If the file being considered is not amongst the files
3187 # If the file being considered is not amongst the files
3181 # to be amended, we should return the file context from the
3188 # to be amended, we should return the file context from the
3182 # old changeset. This avoids issues when only some files in
3189 # old changeset. This avoids issues when only some files in
3183 # the working copy are being amended but there are also
3190 # the working copy are being amended but there are also
3184 # changes to other files from the old changeset.
3191 # changes to other files from the old changeset.
3185 if path not in filestoamend:
3192 if path not in filestoamend:
3186 return old.filectx(path)
3193 return old.filectx(path)
3187
3194
3188 fctx = wctx[path]
3195 fctx = wctx[path]
3189
3196
3190 # Return None for removed files.
3197 # Return None for removed files.
3191 if not fctx.exists():
3198 if not fctx.exists():
3192 return None
3199 return None
3193
3200
3194 flags = fctx.flags()
3201 flags = fctx.flags()
3195 mctx = context.memfilectx(repo,
3202 mctx = context.memfilectx(repo,
3196 fctx.path(), fctx.data(),
3203 fctx.path(), fctx.data(),
3197 islink='l' in flags,
3204 islink='l' in flags,
3198 isexec='x' in flags,
3205 isexec='x' in flags,
3199 copied=copied.get(path))
3206 copied=copied.get(path))
3200 return mctx
3207 return mctx
3201 except KeyError:
3208 except KeyError:
3202 return None
3209 return None
3203 else:
3210 else:
3204 ui.note(_('copying changeset %s to %s\n') % (old, base))
3211 ui.note(_('copying changeset %s to %s\n') % (old, base))
3205
3212
3206 # Use version of files as in the old cset
3213 # Use version of files as in the old cset
3207 def filectxfn(repo, ctx_, path):
3214 def filectxfn(repo, ctx_, path):
3208 try:
3215 try:
3209 return old.filectx(path)
3216 return old.filectx(path)
3210 except KeyError:
3217 except KeyError:
3211 return None
3218 return None
3212
3219
3213 # See if we got a message from -m or -l, if not, open the editor with
3220 # See if we got a message from -m or -l, if not, open the editor with
3214 # the message of the changeset to amend.
3221 # the message of the changeset to amend.
3215 message = logmessage(ui, opts)
3222 message = logmessage(ui, opts)
3216
3223
3217 editform = mergeeditform(old, 'commit.amend')
3224 editform = mergeeditform(old, 'commit.amend')
3218 editor = getcommiteditor(editform=editform,
3225 editor = getcommiteditor(editform=editform,
3219 **pycompat.strkwargs(opts))
3226 **pycompat.strkwargs(opts))
3220
3227
3221 if not message:
3228 if not message:
3222 editor = getcommiteditor(edit=True, editform=editform)
3229 editor = getcommiteditor(edit=True, editform=editform)
3223 message = old.description()
3230 message = old.description()
3224
3231
3225 pureextra = extra.copy()
3232 pureextra = extra.copy()
3226 extra['amend_source'] = old.hex()
3233 extra['amend_source'] = old.hex()
3227
3234
3228 new = context.memctx(repo,
3235 new = context.memctx(repo,
3229 parents=[base.node(), old.p2().node()],
3236 parents=[base.node(), old.p2().node()],
3230 text=message,
3237 text=message,
3231 files=files,
3238 files=files,
3232 filectxfn=filectxfn,
3239 filectxfn=filectxfn,
3233 user=user,
3240 user=user,
3234 date=date,
3241 date=date,
3235 extra=extra,
3242 extra=extra,
3236 editor=editor)
3243 editor=editor)
3237
3244
3238 newdesc = changelog.stripdesc(new.description())
3245 newdesc = changelog.stripdesc(new.description())
3239 if ((not changes)
3246 if ((not changes)
3240 and newdesc == old.description()
3247 and newdesc == old.description()
3241 and user == old.user()
3248 and user == old.user()
3242 and date == old.date()
3249 and date == old.date()
3243 and pureextra == old.extra()):
3250 and pureextra == old.extra()):
3244 # nothing changed. continuing here would create a new node
3251 # nothing changed. continuing here would create a new node
3245 # anyway because of the amend_source noise.
3252 # anyway because of the amend_source noise.
3246 #
3253 #
3247 # This not what we expect from amend.
3254 # This not what we expect from amend.
3248 return old.node()
3255 return old.node()
3249
3256
3250 if opts.get('secret'):
3257 if opts.get('secret'):
3251 commitphase = 'secret'
3258 commitphase = 'secret'
3252 else:
3259 else:
3253 commitphase = old.phase()
3260 commitphase = old.phase()
3254 overrides = {('phases', 'new-commit'): commitphase}
3261 overrides = {('phases', 'new-commit'): commitphase}
3255 with ui.configoverride(overrides, 'amend'):
3262 with ui.configoverride(overrides, 'amend'):
3256 newid = repo.commitctx(new)
3263 newid = repo.commitctx(new)
3257
3264
3258 # Reroute the working copy parent to the new changeset
3265 # Reroute the working copy parent to the new changeset
3259 repo.setparents(newid, nullid)
3266 repo.setparents(newid, nullid)
3260 mapping = {old.node(): (newid,)}
3267 mapping = {old.node(): (newid,)}
3261 obsmetadata = None
3268 obsmetadata = None
3262 if opts.get('note'):
3269 if opts.get('note'):
3263 obsmetadata = {'note': opts['note']}
3270 obsmetadata = {'note': opts['note']}
3264 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3271 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3265
3272
3266 # Fixing the dirstate because localrepo.commitctx does not update
3273 # Fixing the dirstate because localrepo.commitctx does not update
3267 # it. This is rather convenient because we did not need to update
3274 # it. This is rather convenient because we did not need to update
3268 # the dirstate for all the files in the new commit which commitctx
3275 # the dirstate for all the files in the new commit which commitctx
3269 # could have done if it updated the dirstate. Now, we can
3276 # could have done if it updated the dirstate. Now, we can
3270 # selectively update the dirstate only for the amended files.
3277 # selectively update the dirstate only for the amended files.
3271 dirstate = repo.dirstate
3278 dirstate = repo.dirstate
3272
3279
3273 # Update the state of the files which were added and
3280 # Update the state of the files which were added and
3274 # and modified in the amend to "normal" in the dirstate.
3281 # and modified in the amend to "normal" in the dirstate.
3275 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3282 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3276 for f in normalfiles:
3283 for f in normalfiles:
3277 dirstate.normal(f)
3284 dirstate.normal(f)
3278
3285
3279 # Update the state of files which were removed in the amend
3286 # Update the state of files which were removed in the amend
3280 # to "removed" in the dirstate.
3287 # to "removed" in the dirstate.
3281 removedfiles = set(wctx.removed()) & filestoamend
3288 removedfiles = set(wctx.removed()) & filestoamend
3282 for f in removedfiles:
3289 for f in removedfiles:
3283 dirstate.drop(f)
3290 dirstate.drop(f)
3284
3291
3285 return newid
3292 return newid
3286
3293
3287 def commiteditor(repo, ctx, subs, editform=''):
3294 def commiteditor(repo, ctx, subs, editform=''):
3288 if ctx.description():
3295 if ctx.description():
3289 return ctx.description()
3296 return ctx.description()
3290 return commitforceeditor(repo, ctx, subs, editform=editform,
3297 return commitforceeditor(repo, ctx, subs, editform=editform,
3291 unchangedmessagedetection=True)
3298 unchangedmessagedetection=True)
3292
3299
3293 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3300 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3294 editform='', unchangedmessagedetection=False):
3301 editform='', unchangedmessagedetection=False):
3295 if not extramsg:
3302 if not extramsg:
3296 extramsg = _("Leave message empty to abort commit.")
3303 extramsg = _("Leave message empty to abort commit.")
3297
3304
3298 forms = [e for e in editform.split('.') if e]
3305 forms = [e for e in editform.split('.') if e]
3299 forms.insert(0, 'changeset')
3306 forms.insert(0, 'changeset')
3300 templatetext = None
3307 templatetext = None
3301 while forms:
3308 while forms:
3302 ref = '.'.join(forms)
3309 ref = '.'.join(forms)
3303 if repo.ui.config('committemplate', ref):
3310 if repo.ui.config('committemplate', ref):
3304 templatetext = committext = buildcommittemplate(
3311 templatetext = committext = buildcommittemplate(
3305 repo, ctx, subs, extramsg, ref)
3312 repo, ctx, subs, extramsg, ref)
3306 break
3313 break
3307 forms.pop()
3314 forms.pop()
3308 else:
3315 else:
3309 committext = buildcommittext(repo, ctx, subs, extramsg)
3316 committext = buildcommittext(repo, ctx, subs, extramsg)
3310
3317
3311 # run editor in the repository root
3318 # run editor in the repository root
3312 olddir = pycompat.getcwd()
3319 olddir = pycompat.getcwd()
3313 os.chdir(repo.root)
3320 os.chdir(repo.root)
3314
3321
3315 # make in-memory changes visible to external process
3322 # make in-memory changes visible to external process
3316 tr = repo.currenttransaction()
3323 tr = repo.currenttransaction()
3317 repo.dirstate.write(tr)
3324 repo.dirstate.write(tr)
3318 pending = tr and tr.writepending() and repo.root
3325 pending = tr and tr.writepending() and repo.root
3319
3326
3320 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3327 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3321 editform=editform, pending=pending,
3328 editform=editform, pending=pending,
3322 repopath=repo.path, action='commit')
3329 repopath=repo.path, action='commit')
3323 text = editortext
3330 text = editortext
3324
3331
3325 # strip away anything below this special string (used for editors that want
3332 # strip away anything below this special string (used for editors that want
3326 # to display the diff)
3333 # to display the diff)
3327 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3334 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3328 if stripbelow:
3335 if stripbelow:
3329 text = text[:stripbelow.start()]
3336 text = text[:stripbelow.start()]
3330
3337
3331 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3338 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3332 os.chdir(olddir)
3339 os.chdir(olddir)
3333
3340
3334 if finishdesc:
3341 if finishdesc:
3335 text = finishdesc(text)
3342 text = finishdesc(text)
3336 if not text.strip():
3343 if not text.strip():
3337 raise error.Abort(_("empty commit message"))
3344 raise error.Abort(_("empty commit message"))
3338 if unchangedmessagedetection and editortext == templatetext:
3345 if unchangedmessagedetection and editortext == templatetext:
3339 raise error.Abort(_("commit message unchanged"))
3346 raise error.Abort(_("commit message unchanged"))
3340
3347
3341 return text
3348 return text
3342
3349
3343 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3350 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3344 ui = repo.ui
3351 ui = repo.ui
3345 spec = formatter.templatespec(ref, None, None)
3352 spec = formatter.templatespec(ref, None, None)
3346 t = changeset_templater(ui, repo, spec, None, {}, False)
3353 t = changeset_templater(ui, repo, spec, None, {}, False)
3347 t.t.cache.update((k, templater.unquotestring(v))
3354 t.t.cache.update((k, templater.unquotestring(v))
3348 for k, v in repo.ui.configitems('committemplate'))
3355 for k, v in repo.ui.configitems('committemplate'))
3349
3356
3350 if not extramsg:
3357 if not extramsg:
3351 extramsg = '' # ensure that extramsg is string
3358 extramsg = '' # ensure that extramsg is string
3352
3359
3353 ui.pushbuffer()
3360 ui.pushbuffer()
3354 t.show(ctx, extramsg=extramsg)
3361 t.show(ctx, extramsg=extramsg)
3355 return ui.popbuffer()
3362 return ui.popbuffer()
3356
3363
3357 def hgprefix(msg):
3364 def hgprefix(msg):
3358 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3365 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3359
3366
3360 def buildcommittext(repo, ctx, subs, extramsg):
3367 def buildcommittext(repo, ctx, subs, extramsg):
3361 edittext = []
3368 edittext = []
3362 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3369 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3363 if ctx.description():
3370 if ctx.description():
3364 edittext.append(ctx.description())
3371 edittext.append(ctx.description())
3365 edittext.append("")
3372 edittext.append("")
3366 edittext.append("") # Empty line between message and comments.
3373 edittext.append("") # Empty line between message and comments.
3367 edittext.append(hgprefix(_("Enter commit message."
3374 edittext.append(hgprefix(_("Enter commit message."
3368 " Lines beginning with 'HG:' are removed.")))
3375 " Lines beginning with 'HG:' are removed.")))
3369 edittext.append(hgprefix(extramsg))
3376 edittext.append(hgprefix(extramsg))
3370 edittext.append("HG: --")
3377 edittext.append("HG: --")
3371 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3378 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3372 if ctx.p2():
3379 if ctx.p2():
3373 edittext.append(hgprefix(_("branch merge")))
3380 edittext.append(hgprefix(_("branch merge")))
3374 if ctx.branch():
3381 if ctx.branch():
3375 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3382 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3376 if bookmarks.isactivewdirparent(repo):
3383 if bookmarks.isactivewdirparent(repo):
3377 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3384 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3378 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3385 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3379 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3386 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3380 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3387 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3381 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3388 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3382 if not added and not modified and not removed:
3389 if not added and not modified and not removed:
3383 edittext.append(hgprefix(_("no files changed")))
3390 edittext.append(hgprefix(_("no files changed")))
3384 edittext.append("")
3391 edittext.append("")
3385
3392
3386 return "\n".join(edittext)
3393 return "\n".join(edittext)
3387
3394
3388 def commitstatus(repo, node, branch, bheads=None, opts=None):
3395 def commitstatus(repo, node, branch, bheads=None, opts=None):
3389 if opts is None:
3396 if opts is None:
3390 opts = {}
3397 opts = {}
3391 ctx = repo[node]
3398 ctx = repo[node]
3392 parents = ctx.parents()
3399 parents = ctx.parents()
3393
3400
3394 if (not opts.get('amend') and bheads and node not in bheads and not
3401 if (not opts.get('amend') and bheads and node not in bheads and not
3395 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3402 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3396 repo.ui.status(_('created new head\n'))
3403 repo.ui.status(_('created new head\n'))
3397 # The message is not printed for initial roots. For the other
3404 # The message is not printed for initial roots. For the other
3398 # changesets, it is printed in the following situations:
3405 # changesets, it is printed in the following situations:
3399 #
3406 #
3400 # Par column: for the 2 parents with ...
3407 # Par column: for the 2 parents with ...
3401 # N: null or no parent
3408 # N: null or no parent
3402 # B: parent is on another named branch
3409 # B: parent is on another named branch
3403 # C: parent is a regular non head changeset
3410 # C: parent is a regular non head changeset
3404 # H: parent was a branch head of the current branch
3411 # H: parent was a branch head of the current branch
3405 # Msg column: whether we print "created new head" message
3412 # Msg column: whether we print "created new head" message
3406 # In the following, it is assumed that there already exists some
3413 # In the following, it is assumed that there already exists some
3407 # initial branch heads of the current branch, otherwise nothing is
3414 # initial branch heads of the current branch, otherwise nothing is
3408 # printed anyway.
3415 # printed anyway.
3409 #
3416 #
3410 # Par Msg Comment
3417 # Par Msg Comment
3411 # N N y additional topo root
3418 # N N y additional topo root
3412 #
3419 #
3413 # B N y additional branch root
3420 # B N y additional branch root
3414 # C N y additional topo head
3421 # C N y additional topo head
3415 # H N n usual case
3422 # H N n usual case
3416 #
3423 #
3417 # B B y weird additional branch root
3424 # B B y weird additional branch root
3418 # C B y branch merge
3425 # C B y branch merge
3419 # H B n merge with named branch
3426 # H B n merge with named branch
3420 #
3427 #
3421 # C C y additional head from merge
3428 # C C y additional head from merge
3422 # C H n merge with a head
3429 # C H n merge with a head
3423 #
3430 #
3424 # H H n head merge: head count decreases
3431 # H H n head merge: head count decreases
3425
3432
3426 if not opts.get('close_branch'):
3433 if not opts.get('close_branch'):
3427 for r in parents:
3434 for r in parents:
3428 if r.closesbranch() and r.branch() == branch:
3435 if r.closesbranch() and r.branch() == branch:
3429 repo.ui.status(_('reopening closed branch head %d\n') % r)
3436 repo.ui.status(_('reopening closed branch head %d\n') % r)
3430
3437
3431 if repo.ui.debugflag:
3438 if repo.ui.debugflag:
3432 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3439 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3433 elif repo.ui.verbose:
3440 elif repo.ui.verbose:
3434 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3441 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3435
3442
3436 def postcommitstatus(repo, pats, opts):
3443 def postcommitstatus(repo, pats, opts):
3437 return repo.status(match=scmutil.match(repo[None], pats, opts))
3444 return repo.status(match=scmutil.match(repo[None], pats, opts))
3438
3445
3439 def revert(ui, repo, ctx, parents, *pats, **opts):
3446 def revert(ui, repo, ctx, parents, *pats, **opts):
3440 parent, p2 = parents
3447 parent, p2 = parents
3441 node = ctx.node()
3448 node = ctx.node()
3442
3449
3443 mf = ctx.manifest()
3450 mf = ctx.manifest()
3444 if node == p2:
3451 if node == p2:
3445 parent = p2
3452 parent = p2
3446
3453
3447 # need all matching names in dirstate and manifest of target rev,
3454 # need all matching names in dirstate and manifest of target rev,
3448 # so have to walk both. do not print errors if files exist in one
3455 # so have to walk both. do not print errors if files exist in one
3449 # but not other. in both cases, filesets should be evaluated against
3456 # but not other. in both cases, filesets should be evaluated against
3450 # workingctx to get consistent result (issue4497). this means 'set:**'
3457 # workingctx to get consistent result (issue4497). this means 'set:**'
3451 # cannot be used to select missing files from target rev.
3458 # cannot be used to select missing files from target rev.
3452
3459
3453 # `names` is a mapping for all elements in working copy and target revision
3460 # `names` is a mapping for all elements in working copy and target revision
3454 # The mapping is in the form:
3461 # The mapping is in the form:
3455 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3462 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3456 names = {}
3463 names = {}
3457
3464
3458 with repo.wlock():
3465 with repo.wlock():
3459 ## filling of the `names` mapping
3466 ## filling of the `names` mapping
3460 # walk dirstate to fill `names`
3467 # walk dirstate to fill `names`
3461
3468
3462 interactive = opts.get('interactive', False)
3469 interactive = opts.get('interactive', False)
3463 wctx = repo[None]
3470 wctx = repo[None]
3464 m = scmutil.match(wctx, pats, opts)
3471 m = scmutil.match(wctx, pats, opts)
3465
3472
3466 # we'll need this later
3473 # we'll need this later
3467 targetsubs = sorted(s for s in wctx.substate if m(s))
3474 targetsubs = sorted(s for s in wctx.substate if m(s))
3468
3475
3469 if not m.always():
3476 if not m.always():
3470 matcher = matchmod.badmatch(m, lambda x, y: False)
3477 matcher = matchmod.badmatch(m, lambda x, y: False)
3471 for abs in wctx.walk(matcher):
3478 for abs in wctx.walk(matcher):
3472 names[abs] = m.rel(abs), m.exact(abs)
3479 names[abs] = m.rel(abs), m.exact(abs)
3473
3480
3474 # walk target manifest to fill `names`
3481 # walk target manifest to fill `names`
3475
3482
3476 def badfn(path, msg):
3483 def badfn(path, msg):
3477 if path in names:
3484 if path in names:
3478 return
3485 return
3479 if path in ctx.substate:
3486 if path in ctx.substate:
3480 return
3487 return
3481 path_ = path + '/'
3488 path_ = path + '/'
3482 for f in names:
3489 for f in names:
3483 if f.startswith(path_):
3490 if f.startswith(path_):
3484 return
3491 return
3485 ui.warn("%s: %s\n" % (m.rel(path), msg))
3492 ui.warn("%s: %s\n" % (m.rel(path), msg))
3486
3493
3487 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3494 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3488 if abs not in names:
3495 if abs not in names:
3489 names[abs] = m.rel(abs), m.exact(abs)
3496 names[abs] = m.rel(abs), m.exact(abs)
3490
3497
3491 # Find status of all file in `names`.
3498 # Find status of all file in `names`.
3492 m = scmutil.matchfiles(repo, names)
3499 m = scmutil.matchfiles(repo, names)
3493
3500
3494 changes = repo.status(node1=node, match=m,
3501 changes = repo.status(node1=node, match=m,
3495 unknown=True, ignored=True, clean=True)
3502 unknown=True, ignored=True, clean=True)
3496 else:
3503 else:
3497 changes = repo.status(node1=node, match=m)
3504 changes = repo.status(node1=node, match=m)
3498 for kind in changes:
3505 for kind in changes:
3499 for abs in kind:
3506 for abs in kind:
3500 names[abs] = m.rel(abs), m.exact(abs)
3507 names[abs] = m.rel(abs), m.exact(abs)
3501
3508
3502 m = scmutil.matchfiles(repo, names)
3509 m = scmutil.matchfiles(repo, names)
3503
3510
3504 modified = set(changes.modified)
3511 modified = set(changes.modified)
3505 added = set(changes.added)
3512 added = set(changes.added)
3506 removed = set(changes.removed)
3513 removed = set(changes.removed)
3507 _deleted = set(changes.deleted)
3514 _deleted = set(changes.deleted)
3508 unknown = set(changes.unknown)
3515 unknown = set(changes.unknown)
3509 unknown.update(changes.ignored)
3516 unknown.update(changes.ignored)
3510 clean = set(changes.clean)
3517 clean = set(changes.clean)
3511 modadded = set()
3518 modadded = set()
3512
3519
3513 # We need to account for the state of the file in the dirstate,
3520 # We need to account for the state of the file in the dirstate,
3514 # even when we revert against something else than parent. This will
3521 # even when we revert against something else than parent. This will
3515 # slightly alter the behavior of revert (doing back up or not, delete
3522 # slightly alter the behavior of revert (doing back up or not, delete
3516 # or just forget etc).
3523 # or just forget etc).
3517 if parent == node:
3524 if parent == node:
3518 dsmodified = modified
3525 dsmodified = modified
3519 dsadded = added
3526 dsadded = added
3520 dsremoved = removed
3527 dsremoved = removed
3521 # store all local modifications, useful later for rename detection
3528 # store all local modifications, useful later for rename detection
3522 localchanges = dsmodified | dsadded
3529 localchanges = dsmodified | dsadded
3523 modified, added, removed = set(), set(), set()
3530 modified, added, removed = set(), set(), set()
3524 else:
3531 else:
3525 changes = repo.status(node1=parent, match=m)
3532 changes = repo.status(node1=parent, match=m)
3526 dsmodified = set(changes.modified)
3533 dsmodified = set(changes.modified)
3527 dsadded = set(changes.added)
3534 dsadded = set(changes.added)
3528 dsremoved = set(changes.removed)
3535 dsremoved = set(changes.removed)
3529 # store all local modifications, useful later for rename detection
3536 # store all local modifications, useful later for rename detection
3530 localchanges = dsmodified | dsadded
3537 localchanges = dsmodified | dsadded
3531
3538
3532 # only take into account for removes between wc and target
3539 # only take into account for removes between wc and target
3533 clean |= dsremoved - removed
3540 clean |= dsremoved - removed
3534 dsremoved &= removed
3541 dsremoved &= removed
3535 # distinct between dirstate remove and other
3542 # distinct between dirstate remove and other
3536 removed -= dsremoved
3543 removed -= dsremoved
3537
3544
3538 modadded = added & dsmodified
3545 modadded = added & dsmodified
3539 added -= modadded
3546 added -= modadded
3540
3547
3541 # tell newly modified apart.
3548 # tell newly modified apart.
3542 dsmodified &= modified
3549 dsmodified &= modified
3543 dsmodified |= modified & dsadded # dirstate added may need backup
3550 dsmodified |= modified & dsadded # dirstate added may need backup
3544 modified -= dsmodified
3551 modified -= dsmodified
3545
3552
3546 # We need to wait for some post-processing to update this set
3553 # We need to wait for some post-processing to update this set
3547 # before making the distinction. The dirstate will be used for
3554 # before making the distinction. The dirstate will be used for
3548 # that purpose.
3555 # that purpose.
3549 dsadded = added
3556 dsadded = added
3550
3557
3551 # in case of merge, files that are actually added can be reported as
3558 # in case of merge, files that are actually added can be reported as
3552 # modified, we need to post process the result
3559 # modified, we need to post process the result
3553 if p2 != nullid:
3560 if p2 != nullid:
3554 mergeadd = set(dsmodified)
3561 mergeadd = set(dsmodified)
3555 for path in dsmodified:
3562 for path in dsmodified:
3556 if path in mf:
3563 if path in mf:
3557 mergeadd.remove(path)
3564 mergeadd.remove(path)
3558 dsadded |= mergeadd
3565 dsadded |= mergeadd
3559 dsmodified -= mergeadd
3566 dsmodified -= mergeadd
3560
3567
3561 # if f is a rename, update `names` to also revert the source
3568 # if f is a rename, update `names` to also revert the source
3562 cwd = repo.getcwd()
3569 cwd = repo.getcwd()
3563 for f in localchanges:
3570 for f in localchanges:
3564 src = repo.dirstate.copied(f)
3571 src = repo.dirstate.copied(f)
3565 # XXX should we check for rename down to target node?
3572 # XXX should we check for rename down to target node?
3566 if src and src not in names and repo.dirstate[src] == 'r':
3573 if src and src not in names and repo.dirstate[src] == 'r':
3567 dsremoved.add(src)
3574 dsremoved.add(src)
3568 names[src] = (repo.pathto(src, cwd), True)
3575 names[src] = (repo.pathto(src, cwd), True)
3569
3576
3570 # determine the exact nature of the deleted changesets
3577 # determine the exact nature of the deleted changesets
3571 deladded = set(_deleted)
3578 deladded = set(_deleted)
3572 for path in _deleted:
3579 for path in _deleted:
3573 if path in mf:
3580 if path in mf:
3574 deladded.remove(path)
3581 deladded.remove(path)
3575 deleted = _deleted - deladded
3582 deleted = _deleted - deladded
3576
3583
3577 # distinguish between file to forget and the other
3584 # distinguish between file to forget and the other
3578 added = set()
3585 added = set()
3579 for abs in dsadded:
3586 for abs in dsadded:
3580 if repo.dirstate[abs] != 'a':
3587 if repo.dirstate[abs] != 'a':
3581 added.add(abs)
3588 added.add(abs)
3582 dsadded -= added
3589 dsadded -= added
3583
3590
3584 for abs in deladded:
3591 for abs in deladded:
3585 if repo.dirstate[abs] == 'a':
3592 if repo.dirstate[abs] == 'a':
3586 dsadded.add(abs)
3593 dsadded.add(abs)
3587 deladded -= dsadded
3594 deladded -= dsadded
3588
3595
3589 # For files marked as removed, we check if an unknown file is present at
3596 # For files marked as removed, we check if an unknown file is present at
3590 # the same path. If a such file exists it may need to be backed up.
3597 # the same path. If a such file exists it may need to be backed up.
3591 # Making the distinction at this stage helps have simpler backup
3598 # Making the distinction at this stage helps have simpler backup
3592 # logic.
3599 # logic.
3593 removunk = set()
3600 removunk = set()
3594 for abs in removed:
3601 for abs in removed:
3595 target = repo.wjoin(abs)
3602 target = repo.wjoin(abs)
3596 if os.path.lexists(target):
3603 if os.path.lexists(target):
3597 removunk.add(abs)
3604 removunk.add(abs)
3598 removed -= removunk
3605 removed -= removunk
3599
3606
3600 dsremovunk = set()
3607 dsremovunk = set()
3601 for abs in dsremoved:
3608 for abs in dsremoved:
3602 target = repo.wjoin(abs)
3609 target = repo.wjoin(abs)
3603 if os.path.lexists(target):
3610 if os.path.lexists(target):
3604 dsremovunk.add(abs)
3611 dsremovunk.add(abs)
3605 dsremoved -= dsremovunk
3612 dsremoved -= dsremovunk
3606
3613
3607 # action to be actually performed by revert
3614 # action to be actually performed by revert
3608 # (<list of file>, message>) tuple
3615 # (<list of file>, message>) tuple
3609 actions = {'revert': ([], _('reverting %s\n')),
3616 actions = {'revert': ([], _('reverting %s\n')),
3610 'add': ([], _('adding %s\n')),
3617 'add': ([], _('adding %s\n')),
3611 'remove': ([], _('removing %s\n')),
3618 'remove': ([], _('removing %s\n')),
3612 'drop': ([], _('removing %s\n')),
3619 'drop': ([], _('removing %s\n')),
3613 'forget': ([], _('forgetting %s\n')),
3620 'forget': ([], _('forgetting %s\n')),
3614 'undelete': ([], _('undeleting %s\n')),
3621 'undelete': ([], _('undeleting %s\n')),
3615 'noop': (None, _('no changes needed to %s\n')),
3622 'noop': (None, _('no changes needed to %s\n')),
3616 'unknown': (None, _('file not managed: %s\n')),
3623 'unknown': (None, _('file not managed: %s\n')),
3617 }
3624 }
3618
3625
3619 # "constant" that convey the backup strategy.
3626 # "constant" that convey the backup strategy.
3620 # All set to `discard` if `no-backup` is set do avoid checking
3627 # All set to `discard` if `no-backup` is set do avoid checking
3621 # no_backup lower in the code.
3628 # no_backup lower in the code.
3622 # These values are ordered for comparison purposes
3629 # These values are ordered for comparison purposes
3623 backupinteractive = 3 # do backup if interactively modified
3630 backupinteractive = 3 # do backup if interactively modified
3624 backup = 2 # unconditionally do backup
3631 backup = 2 # unconditionally do backup
3625 check = 1 # check if the existing file differs from target
3632 check = 1 # check if the existing file differs from target
3626 discard = 0 # never do backup
3633 discard = 0 # never do backup
3627 if opts.get('no_backup'):
3634 if opts.get('no_backup'):
3628 backupinteractive = backup = check = discard
3635 backupinteractive = backup = check = discard
3629 if interactive:
3636 if interactive:
3630 dsmodifiedbackup = backupinteractive
3637 dsmodifiedbackup = backupinteractive
3631 else:
3638 else:
3632 dsmodifiedbackup = backup
3639 dsmodifiedbackup = backup
3633 tobackup = set()
3640 tobackup = set()
3634
3641
3635 backupanddel = actions['remove']
3642 backupanddel = actions['remove']
3636 if not opts.get('no_backup'):
3643 if not opts.get('no_backup'):
3637 backupanddel = actions['drop']
3644 backupanddel = actions['drop']
3638
3645
3639 disptable = (
3646 disptable = (
3640 # dispatch table:
3647 # dispatch table:
3641 # file state
3648 # file state
3642 # action
3649 # action
3643 # make backup
3650 # make backup
3644
3651
3645 ## Sets that results that will change file on disk
3652 ## Sets that results that will change file on disk
3646 # Modified compared to target, no local change
3653 # Modified compared to target, no local change
3647 (modified, actions['revert'], discard),
3654 (modified, actions['revert'], discard),
3648 # Modified compared to target, but local file is deleted
3655 # Modified compared to target, but local file is deleted
3649 (deleted, actions['revert'], discard),
3656 (deleted, actions['revert'], discard),
3650 # Modified compared to target, local change
3657 # Modified compared to target, local change
3651 (dsmodified, actions['revert'], dsmodifiedbackup),
3658 (dsmodified, actions['revert'], dsmodifiedbackup),
3652 # Added since target
3659 # Added since target
3653 (added, actions['remove'], discard),
3660 (added, actions['remove'], discard),
3654 # Added in working directory
3661 # Added in working directory
3655 (dsadded, actions['forget'], discard),
3662 (dsadded, actions['forget'], discard),
3656 # Added since target, have local modification
3663 # Added since target, have local modification
3657 (modadded, backupanddel, backup),
3664 (modadded, backupanddel, backup),
3658 # Added since target but file is missing in working directory
3665 # Added since target but file is missing in working directory
3659 (deladded, actions['drop'], discard),
3666 (deladded, actions['drop'], discard),
3660 # Removed since target, before working copy parent
3667 # Removed since target, before working copy parent
3661 (removed, actions['add'], discard),
3668 (removed, actions['add'], discard),
3662 # Same as `removed` but an unknown file exists at the same path
3669 # Same as `removed` but an unknown file exists at the same path
3663 (removunk, actions['add'], check),
3670 (removunk, actions['add'], check),
3664 # Removed since targe, marked as such in working copy parent
3671 # Removed since targe, marked as such in working copy parent
3665 (dsremoved, actions['undelete'], discard),
3672 (dsremoved, actions['undelete'], discard),
3666 # Same as `dsremoved` but an unknown file exists at the same path
3673 # Same as `dsremoved` but an unknown file exists at the same path
3667 (dsremovunk, actions['undelete'], check),
3674 (dsremovunk, actions['undelete'], check),
3668 ## the following sets does not result in any file changes
3675 ## the following sets does not result in any file changes
3669 # File with no modification
3676 # File with no modification
3670 (clean, actions['noop'], discard),
3677 (clean, actions['noop'], discard),
3671 # Existing file, not tracked anywhere
3678 # Existing file, not tracked anywhere
3672 (unknown, actions['unknown'], discard),
3679 (unknown, actions['unknown'], discard),
3673 )
3680 )
3674
3681
3675 for abs, (rel, exact) in sorted(names.items()):
3682 for abs, (rel, exact) in sorted(names.items()):
3676 # target file to be touch on disk (relative to cwd)
3683 # target file to be touch on disk (relative to cwd)
3677 target = repo.wjoin(abs)
3684 target = repo.wjoin(abs)
3678 # search the entry in the dispatch table.
3685 # search the entry in the dispatch table.
3679 # if the file is in any of these sets, it was touched in the working
3686 # if the file is in any of these sets, it was touched in the working
3680 # directory parent and we are sure it needs to be reverted.
3687 # directory parent and we are sure it needs to be reverted.
3681 for table, (xlist, msg), dobackup in disptable:
3688 for table, (xlist, msg), dobackup in disptable:
3682 if abs not in table:
3689 if abs not in table:
3683 continue
3690 continue
3684 if xlist is not None:
3691 if xlist is not None:
3685 xlist.append(abs)
3692 xlist.append(abs)
3686 if dobackup:
3693 if dobackup:
3687 # If in interactive mode, don't automatically create
3694 # If in interactive mode, don't automatically create
3688 # .orig files (issue4793)
3695 # .orig files (issue4793)
3689 if dobackup == backupinteractive:
3696 if dobackup == backupinteractive:
3690 tobackup.add(abs)
3697 tobackup.add(abs)
3691 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3698 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3692 bakname = scmutil.origpath(ui, repo, rel)
3699 bakname = scmutil.origpath(ui, repo, rel)
3693 ui.note(_('saving current version of %s as %s\n') %
3700 ui.note(_('saving current version of %s as %s\n') %
3694 (rel, bakname))
3701 (rel, bakname))
3695 if not opts.get('dry_run'):
3702 if not opts.get('dry_run'):
3696 if interactive:
3703 if interactive:
3697 util.copyfile(target, bakname)
3704 util.copyfile(target, bakname)
3698 else:
3705 else:
3699 util.rename(target, bakname)
3706 util.rename(target, bakname)
3700 if ui.verbose or not exact:
3707 if ui.verbose or not exact:
3701 if not isinstance(msg, basestring):
3708 if not isinstance(msg, basestring):
3702 msg = msg(abs)
3709 msg = msg(abs)
3703 ui.status(msg % rel)
3710 ui.status(msg % rel)
3704 elif exact:
3711 elif exact:
3705 ui.warn(msg % rel)
3712 ui.warn(msg % rel)
3706 break
3713 break
3707
3714
3708 if not opts.get('dry_run'):
3715 if not opts.get('dry_run'):
3709 needdata = ('revert', 'add', 'undelete')
3716 needdata = ('revert', 'add', 'undelete')
3710 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3717 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3711 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3718 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3712
3719
3713 if targetsubs:
3720 if targetsubs:
3714 # Revert the subrepos on the revert list
3721 # Revert the subrepos on the revert list
3715 for sub in targetsubs:
3722 for sub in targetsubs:
3716 try:
3723 try:
3717 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3724 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3718 except KeyError:
3725 except KeyError:
3719 raise error.Abort("subrepository '%s' does not exist in %s!"
3726 raise error.Abort("subrepository '%s' does not exist in %s!"
3720 % (sub, short(ctx.node())))
3727 % (sub, short(ctx.node())))
3721
3728
3722 def _revertprefetch(repo, ctx, *files):
3729 def _revertprefetch(repo, ctx, *files):
3723 """Let extension changing the storage layer prefetch content"""
3730 """Let extension changing the storage layer prefetch content"""
3724
3731
3725 def _performrevert(repo, parents, ctx, actions, interactive=False,
3732 def _performrevert(repo, parents, ctx, actions, interactive=False,
3726 tobackup=None):
3733 tobackup=None):
3727 """function that actually perform all the actions computed for revert
3734 """function that actually perform all the actions computed for revert
3728
3735
3729 This is an independent function to let extension to plug in and react to
3736 This is an independent function to let extension to plug in and react to
3730 the imminent revert.
3737 the imminent revert.
3731
3738
3732 Make sure you have the working directory locked when calling this function.
3739 Make sure you have the working directory locked when calling this function.
3733 """
3740 """
3734 parent, p2 = parents
3741 parent, p2 = parents
3735 node = ctx.node()
3742 node = ctx.node()
3736 excluded_files = []
3743 excluded_files = []
3737 matcher_opts = {"exclude": excluded_files}
3744 matcher_opts = {"exclude": excluded_files}
3738
3745
3739 def checkout(f):
3746 def checkout(f):
3740 fc = ctx[f]
3747 fc = ctx[f]
3741 repo.wwrite(f, fc.data(), fc.flags())
3748 repo.wwrite(f, fc.data(), fc.flags())
3742
3749
3743 def doremove(f):
3750 def doremove(f):
3744 try:
3751 try:
3745 repo.wvfs.unlinkpath(f)
3752 repo.wvfs.unlinkpath(f)
3746 except OSError:
3753 except OSError:
3747 pass
3754 pass
3748 repo.dirstate.remove(f)
3755 repo.dirstate.remove(f)
3749
3756
3750 audit_path = pathutil.pathauditor(repo.root, cached=True)
3757 audit_path = pathutil.pathauditor(repo.root, cached=True)
3751 for f in actions['forget'][0]:
3758 for f in actions['forget'][0]:
3752 if interactive:
3759 if interactive:
3753 choice = repo.ui.promptchoice(
3760 choice = repo.ui.promptchoice(
3754 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3761 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3755 if choice == 0:
3762 if choice == 0:
3756 repo.dirstate.drop(f)
3763 repo.dirstate.drop(f)
3757 else:
3764 else:
3758 excluded_files.append(repo.wjoin(f))
3765 excluded_files.append(repo.wjoin(f))
3759 else:
3766 else:
3760 repo.dirstate.drop(f)
3767 repo.dirstate.drop(f)
3761 for f in actions['remove'][0]:
3768 for f in actions['remove'][0]:
3762 audit_path(f)
3769 audit_path(f)
3763 if interactive:
3770 if interactive:
3764 choice = repo.ui.promptchoice(
3771 choice = repo.ui.promptchoice(
3765 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3772 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3766 if choice == 0:
3773 if choice == 0:
3767 doremove(f)
3774 doremove(f)
3768 else:
3775 else:
3769 excluded_files.append(repo.wjoin(f))
3776 excluded_files.append(repo.wjoin(f))
3770 else:
3777 else:
3771 doremove(f)
3778 doremove(f)
3772 for f in actions['drop'][0]:
3779 for f in actions['drop'][0]:
3773 audit_path(f)
3780 audit_path(f)
3774 repo.dirstate.remove(f)
3781 repo.dirstate.remove(f)
3775
3782
3776 normal = None
3783 normal = None
3777 if node == parent:
3784 if node == parent:
3778 # We're reverting to our parent. If possible, we'd like status
3785 # We're reverting to our parent. If possible, we'd like status
3779 # to report the file as clean. We have to use normallookup for
3786 # to report the file as clean. We have to use normallookup for
3780 # merges to avoid losing information about merged/dirty files.
3787 # merges to avoid losing information about merged/dirty files.
3781 if p2 != nullid:
3788 if p2 != nullid:
3782 normal = repo.dirstate.normallookup
3789 normal = repo.dirstate.normallookup
3783 else:
3790 else:
3784 normal = repo.dirstate.normal
3791 normal = repo.dirstate.normal
3785
3792
3786 newlyaddedandmodifiedfiles = set()
3793 newlyaddedandmodifiedfiles = set()
3787 if interactive:
3794 if interactive:
3788 # Prompt the user for changes to revert
3795 # Prompt the user for changes to revert
3789 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3796 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3790 m = scmutil.match(ctx, torevert, matcher_opts)
3797 m = scmutil.match(ctx, torevert, matcher_opts)
3791 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3798 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3792 diffopts.nodates = True
3799 diffopts.nodates = True
3793 diffopts.git = True
3800 diffopts.git = True
3794 operation = 'discard'
3801 operation = 'discard'
3795 reversehunks = True
3802 reversehunks = True
3796 if node != parent:
3803 if node != parent:
3797 operation = 'apply'
3804 operation = 'apply'
3798 reversehunks = False
3805 reversehunks = False
3799 if reversehunks:
3806 if reversehunks:
3800 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3807 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3801 else:
3808 else:
3802 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3809 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3803 originalchunks = patch.parsepatch(diff)
3810 originalchunks = patch.parsepatch(diff)
3804
3811
3805 try:
3812 try:
3806
3813
3807 chunks, opts = recordfilter(repo.ui, originalchunks,
3814 chunks, opts = recordfilter(repo.ui, originalchunks,
3808 operation=operation)
3815 operation=operation)
3809 if reversehunks:
3816 if reversehunks:
3810 chunks = patch.reversehunks(chunks)
3817 chunks = patch.reversehunks(chunks)
3811
3818
3812 except error.PatchError as err:
3819 except error.PatchError as err:
3813 raise error.Abort(_('error parsing patch: %s') % err)
3820 raise error.Abort(_('error parsing patch: %s') % err)
3814
3821
3815 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3822 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3816 if tobackup is None:
3823 if tobackup is None:
3817 tobackup = set()
3824 tobackup = set()
3818 # Apply changes
3825 # Apply changes
3819 fp = stringio()
3826 fp = stringio()
3820 for c in chunks:
3827 for c in chunks:
3821 # Create a backup file only if this hunk should be backed up
3828 # Create a backup file only if this hunk should be backed up
3822 if ishunk(c) and c.header.filename() in tobackup:
3829 if ishunk(c) and c.header.filename() in tobackup:
3823 abs = c.header.filename()
3830 abs = c.header.filename()
3824 target = repo.wjoin(abs)
3831 target = repo.wjoin(abs)
3825 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3832 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3826 util.copyfile(target, bakname)
3833 util.copyfile(target, bakname)
3827 tobackup.remove(abs)
3834 tobackup.remove(abs)
3828 c.write(fp)
3835 c.write(fp)
3829 dopatch = fp.tell()
3836 dopatch = fp.tell()
3830 fp.seek(0)
3837 fp.seek(0)
3831 if dopatch:
3838 if dopatch:
3832 try:
3839 try:
3833 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3840 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3834 except error.PatchError as err:
3841 except error.PatchError as err:
3835 raise error.Abort(str(err))
3842 raise error.Abort(str(err))
3836 del fp
3843 del fp
3837 else:
3844 else:
3838 for f in actions['revert'][0]:
3845 for f in actions['revert'][0]:
3839 checkout(f)
3846 checkout(f)
3840 if normal:
3847 if normal:
3841 normal(f)
3848 normal(f)
3842
3849
3843 for f in actions['add'][0]:
3850 for f in actions['add'][0]:
3844 # Don't checkout modified files, they are already created by the diff
3851 # Don't checkout modified files, they are already created by the diff
3845 if f not in newlyaddedandmodifiedfiles:
3852 if f not in newlyaddedandmodifiedfiles:
3846 checkout(f)
3853 checkout(f)
3847 repo.dirstate.add(f)
3854 repo.dirstate.add(f)
3848
3855
3849 normal = repo.dirstate.normallookup
3856 normal = repo.dirstate.normallookup
3850 if node == parent and p2 == nullid:
3857 if node == parent and p2 == nullid:
3851 normal = repo.dirstate.normal
3858 normal = repo.dirstate.normal
3852 for f in actions['undelete'][0]:
3859 for f in actions['undelete'][0]:
3853 checkout(f)
3860 checkout(f)
3854 normal(f)
3861 normal(f)
3855
3862
3856 copied = copies.pathcopies(repo[parent], ctx)
3863 copied = copies.pathcopies(repo[parent], ctx)
3857
3864
3858 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3865 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3859 if f in copied:
3866 if f in copied:
3860 repo.dirstate.copy(copied[f], f)
3867 repo.dirstate.copy(copied[f], f)
3861
3868
3862 class command(registrar.command):
3869 class command(registrar.command):
3870 """deprecated: used registrar.command instead"""
3863 def _doregister(self, func, name, *args, **kwargs):
3871 def _doregister(self, func, name, *args, **kwargs):
3864 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3872 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3865 return super(command, self)._doregister(func, name, *args, **kwargs)
3873 return super(command, self)._doregister(func, name, *args, **kwargs)
3866
3874
3867 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3875 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3868 # commands.outgoing. "missing" is "missing" of the result of
3876 # commands.outgoing. "missing" is "missing" of the result of
3869 # "findcommonoutgoing()"
3877 # "findcommonoutgoing()"
3870 outgoinghooks = util.hooks()
3878 outgoinghooks = util.hooks()
3871
3879
3872 # a list of (ui, repo) functions called by commands.summary
3880 # a list of (ui, repo) functions called by commands.summary
3873 summaryhooks = util.hooks()
3881 summaryhooks = util.hooks()
3874
3882
3875 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3883 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3876 #
3884 #
3877 # functions should return tuple of booleans below, if 'changes' is None:
3885 # functions should return tuple of booleans below, if 'changes' is None:
3878 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3886 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3879 #
3887 #
3880 # otherwise, 'changes' is a tuple of tuples below:
3888 # otherwise, 'changes' is a tuple of tuples below:
3881 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3889 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3882 # - (desturl, destbranch, destpeer, outgoing)
3890 # - (desturl, destbranch, destpeer, outgoing)
3883 summaryremotehooks = util.hooks()
3891 summaryremotehooks = util.hooks()
3884
3892
3885 # A list of state files kept by multistep operations like graft.
3893 # A list of state files kept by multistep operations like graft.
3886 # Since graft cannot be aborted, it is considered 'clearable' by update.
3894 # Since graft cannot be aborted, it is considered 'clearable' by update.
3887 # note: bisect is intentionally excluded
3895 # note: bisect is intentionally excluded
3888 # (state file, clearable, allowcommit, error, hint)
3896 # (state file, clearable, allowcommit, error, hint)
3889 unfinishedstates = [
3897 unfinishedstates = [
3890 ('graftstate', True, False, _('graft in progress'),
3898 ('graftstate', True, False, _('graft in progress'),
3891 _("use 'hg graft --continue' or 'hg update' to abort")),
3899 _("use 'hg graft --continue' or 'hg update' to abort")),
3892 ('updatestate', True, False, _('last update was interrupted'),
3900 ('updatestate', True, False, _('last update was interrupted'),
3893 _("use 'hg update' to get a consistent checkout"))
3901 _("use 'hg update' to get a consistent checkout"))
3894 ]
3902 ]
3895
3903
3896 def checkunfinished(repo, commit=False):
3904 def checkunfinished(repo, commit=False):
3897 '''Look for an unfinished multistep operation, like graft, and abort
3905 '''Look for an unfinished multistep operation, like graft, and abort
3898 if found. It's probably good to check this right before
3906 if found. It's probably good to check this right before
3899 bailifchanged().
3907 bailifchanged().
3900 '''
3908 '''
3901 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3909 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3902 if commit and allowcommit:
3910 if commit and allowcommit:
3903 continue
3911 continue
3904 if repo.vfs.exists(f):
3912 if repo.vfs.exists(f):
3905 raise error.Abort(msg, hint=hint)
3913 raise error.Abort(msg, hint=hint)
3906
3914
3907 def clearunfinished(repo):
3915 def clearunfinished(repo):
3908 '''Check for unfinished operations (as above), and clear the ones
3916 '''Check for unfinished operations (as above), and clear the ones
3909 that are clearable.
3917 that are clearable.
3910 '''
3918 '''
3911 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3919 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3912 if not clearable and repo.vfs.exists(f):
3920 if not clearable and repo.vfs.exists(f):
3913 raise error.Abort(msg, hint=hint)
3921 raise error.Abort(msg, hint=hint)
3914 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3922 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3915 if clearable and repo.vfs.exists(f):
3923 if clearable and repo.vfs.exists(f):
3916 util.unlink(repo.vfs.join(f))
3924 util.unlink(repo.vfs.join(f))
3917
3925
3918 afterresolvedstates = [
3926 afterresolvedstates = [
3919 ('graftstate',
3927 ('graftstate',
3920 _('hg graft --continue')),
3928 _('hg graft --continue')),
3921 ]
3929 ]
3922
3930
3923 def howtocontinue(repo):
3931 def howtocontinue(repo):
3924 '''Check for an unfinished operation and return the command to finish
3932 '''Check for an unfinished operation and return the command to finish
3925 it.
3933 it.
3926
3934
3927 afterresolvedstates tuples define a .hg/{file} and the corresponding
3935 afterresolvedstates tuples define a .hg/{file} and the corresponding
3928 command needed to finish it.
3936 command needed to finish it.
3929
3937
3930 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3938 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3931 a boolean.
3939 a boolean.
3932 '''
3940 '''
3933 contmsg = _("continue: %s")
3941 contmsg = _("continue: %s")
3934 for f, msg in afterresolvedstates:
3942 for f, msg in afterresolvedstates:
3935 if repo.vfs.exists(f):
3943 if repo.vfs.exists(f):
3936 return contmsg % msg, True
3944 return contmsg % msg, True
3937 if repo[None].dirty(missing=True, merge=False, branch=False):
3945 if repo[None].dirty(missing=True, merge=False, branch=False):
3938 return contmsg % _("hg commit"), False
3946 return contmsg % _("hg commit"), False
3939 return None, None
3947 return None, None
3940
3948
3941 def checkafterresolved(repo):
3949 def checkafterresolved(repo):
3942 '''Inform the user about the next action after completing hg resolve
3950 '''Inform the user about the next action after completing hg resolve
3943
3951
3944 If there's a matching afterresolvedstates, howtocontinue will yield
3952 If there's a matching afterresolvedstates, howtocontinue will yield
3945 repo.ui.warn as the reporter.
3953 repo.ui.warn as the reporter.
3946
3954
3947 Otherwise, it will yield repo.ui.note.
3955 Otherwise, it will yield repo.ui.note.
3948 '''
3956 '''
3949 msg, warning = howtocontinue(repo)
3957 msg, warning = howtocontinue(repo)
3950 if msg is not None:
3958 if msg is not None:
3951 if warning:
3959 if warning:
3952 repo.ui.warn("%s\n" % msg)
3960 repo.ui.warn("%s\n" % msg)
3953 else:
3961 else:
3954 repo.ui.note("%s\n" % msg)
3962 repo.ui.note("%s\n" % msg)
3955
3963
3956 def wrongtooltocontinue(repo, task):
3964 def wrongtooltocontinue(repo, task):
3957 '''Raise an abort suggesting how to properly continue if there is an
3965 '''Raise an abort suggesting how to properly continue if there is an
3958 active task.
3966 active task.
3959
3967
3960 Uses howtocontinue() to find the active task.
3968 Uses howtocontinue() to find the active task.
3961
3969
3962 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3970 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3963 a hint.
3971 a hint.
3964 '''
3972 '''
3965 after = howtocontinue(repo)
3973 after = howtocontinue(repo)
3966 hint = None
3974 hint = None
3967 if after[1]:
3975 if after[1]:
3968 hint = after[0]
3976 hint = after[0]
3969 raise error.Abort(_('no %s in progress') % task, hint=hint)
3977 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,2608 +1,2619
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirnodes,
26 wdirnodes,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .thirdparty import (
29 from .thirdparty import (
30 attr,
30 attr,
31 )
31 )
32 from . import (
32 from . import (
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 mdiff,
37 mdiff,
38 obsolete as obsmod,
38 obsolete as obsmod,
39 patch,
39 patch,
40 pathutil,
40 pathutil,
41 phases,
41 phases,
42 pycompat,
42 pycompat,
43 repoview,
43 repoview,
44 revlog,
44 revlog,
45 scmutil,
45 scmutil,
46 sparse,
46 sparse,
47 subrepo,
47 subrepo,
48 util,
48 util,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 nonascii = re.compile(r'[^\x21-\x7f]').search
53 nonascii = re.compile(r'[^\x21-\x7f]').search
54
54
55 class basectx(object):
55 class basectx(object):
56 """A basectx object represents the common logic for its children:
56 """A basectx object represents the common logic for its children:
57 changectx: read-only context that is already present in the repo,
57 changectx: read-only context that is already present in the repo,
58 workingctx: a context that represents the working directory and can
58 workingctx: a context that represents the working directory and can
59 be committed,
59 be committed,
60 memctx: a context that represents changes in-memory and can also
60 memctx: a context that represents changes in-memory and can also
61 be committed."""
61 be committed."""
62 def __new__(cls, repo, changeid='', *args, **kwargs):
62 def __new__(cls, repo, changeid='', *args, **kwargs):
63 if isinstance(changeid, basectx):
63 if isinstance(changeid, basectx):
64 return changeid
64 return changeid
65
65
66 o = super(basectx, cls).__new__(cls)
66 o = super(basectx, cls).__new__(cls)
67
67
68 o._repo = repo
68 o._repo = repo
69 o._rev = nullrev
69 o._rev = nullrev
70 o._node = nullid
70 o._node = nullid
71
71
72 return o
72 return o
73
73
74 def __bytes__(self):
74 def __bytes__(self):
75 return short(self.node())
75 return short(self.node())
76
76
77 __str__ = encoding.strmethod(__bytes__)
77 __str__ = encoding.strmethod(__bytes__)
78
78
79 def __int__(self):
79 def __int__(self):
80 return self.rev()
80 return self.rev()
81
81
82 def __repr__(self):
82 def __repr__(self):
83 return r"<%s %s>" % (type(self).__name__, str(self))
83 return r"<%s %s>" % (type(self).__name__, str(self))
84
84
85 def __eq__(self, other):
85 def __eq__(self, other):
86 try:
86 try:
87 return type(self) == type(other) and self._rev == other._rev
87 return type(self) == type(other) and self._rev == other._rev
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90
90
91 def __ne__(self, other):
91 def __ne__(self, other):
92 return not (self == other)
92 return not (self == other)
93
93
94 def __contains__(self, key):
94 def __contains__(self, key):
95 return key in self._manifest
95 return key in self._manifest
96
96
97 def __getitem__(self, key):
97 def __getitem__(self, key):
98 return self.filectx(key)
98 return self.filectx(key)
99
99
100 def __iter__(self):
100 def __iter__(self):
101 return iter(self._manifest)
101 return iter(self._manifest)
102
102
103 def _buildstatusmanifest(self, status):
103 def _buildstatusmanifest(self, status):
104 """Builds a manifest that includes the given status results, if this is
104 """Builds a manifest that includes the given status results, if this is
105 a working copy context. For non-working copy contexts, it just returns
105 a working copy context. For non-working copy contexts, it just returns
106 the normal manifest."""
106 the normal manifest."""
107 return self.manifest()
107 return self.manifest()
108
108
109 def _matchstatus(self, other, match):
109 def _matchstatus(self, other, match):
110 """This internal method provides a way for child objects to override the
110 """This internal method provides a way for child objects to override the
111 match operator.
111 match operator.
112 """
112 """
113 return match
113 return match
114
114
115 def _buildstatus(self, other, s, match, listignored, listclean,
115 def _buildstatus(self, other, s, match, listignored, listclean,
116 listunknown):
116 listunknown):
117 """build a status with respect to another context"""
117 """build a status with respect to another context"""
118 # Load earliest manifest first for caching reasons. More specifically,
118 # Load earliest manifest first for caching reasons. More specifically,
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
121 # 1000 and cache it so that when you read 1001, we just need to apply a
121 # 1000 and cache it so that when you read 1001, we just need to apply a
122 # delta to what's in the cache. So that's one full reconstruction + one
122 # delta to what's in the cache. So that's one full reconstruction + one
123 # delta application.
123 # delta application.
124 mf2 = None
124 mf2 = None
125 if self.rev() is not None and self.rev() < other.rev():
125 if self.rev() is not None and self.rev() < other.rev():
126 mf2 = self._buildstatusmanifest(s)
126 mf2 = self._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
128 if mf2 is None:
128 if mf2 is None:
129 mf2 = self._buildstatusmanifest(s)
129 mf2 = self._buildstatusmanifest(s)
130
130
131 modified, added = [], []
131 modified, added = [], []
132 removed = []
132 removed = []
133 clean = []
133 clean = []
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
135 deletedset = set(deleted)
135 deletedset = set(deleted)
136 d = mf1.diff(mf2, match=match, clean=listclean)
136 d = mf1.diff(mf2, match=match, clean=listclean)
137 for fn, value in d.iteritems():
137 for fn, value in d.iteritems():
138 if fn in deletedset:
138 if fn in deletedset:
139 continue
139 continue
140 if value is None:
140 if value is None:
141 clean.append(fn)
141 clean.append(fn)
142 continue
142 continue
143 (node1, flag1), (node2, flag2) = value
143 (node1, flag1), (node2, flag2) = value
144 if node1 is None:
144 if node1 is None:
145 added.append(fn)
145 added.append(fn)
146 elif node2 is None:
146 elif node2 is None:
147 removed.append(fn)
147 removed.append(fn)
148 elif flag1 != flag2:
148 elif flag1 != flag2:
149 modified.append(fn)
149 modified.append(fn)
150 elif node2 not in wdirnodes:
150 elif node2 not in wdirnodes:
151 # When comparing files between two commits, we save time by
151 # When comparing files between two commits, we save time by
152 # not comparing the file contents when the nodeids differ.
152 # not comparing the file contents when the nodeids differ.
153 # Note that this means we incorrectly report a reverted change
153 # Note that this means we incorrectly report a reverted change
154 # to a file as a modification.
154 # to a file as a modification.
155 modified.append(fn)
155 modified.append(fn)
156 elif self[fn].cmp(other[fn]):
156 elif self[fn].cmp(other[fn]):
157 modified.append(fn)
157 modified.append(fn)
158 else:
158 else:
159 clean.append(fn)
159 clean.append(fn)
160
160
161 if removed:
161 if removed:
162 # need to filter files if they are already reported as removed
162 # need to filter files if they are already reported as removed
163 unknown = [fn for fn in unknown if fn not in mf1 and
163 unknown = [fn for fn in unknown if fn not in mf1 and
164 (not match or match(fn))]
164 (not match or match(fn))]
165 ignored = [fn for fn in ignored if fn not in mf1 and
165 ignored = [fn for fn in ignored if fn not in mf1 and
166 (not match or match(fn))]
166 (not match or match(fn))]
167 # if they're deleted, don't report them as removed
167 # if they're deleted, don't report them as removed
168 removed = [fn for fn in removed if fn not in deletedset]
168 removed = [fn for fn in removed if fn not in deletedset]
169
169
170 return scmutil.status(modified, added, removed, deleted, unknown,
170 return scmutil.status(modified, added, removed, deleted, unknown,
171 ignored, clean)
171 ignored, clean)
172
172
173 @propertycache
173 @propertycache
174 def substate(self):
174 def substate(self):
175 return subrepo.state(self, self._repo.ui)
175 return subrepo.state(self, self._repo.ui)
176
176
177 def subrev(self, subpath):
177 def subrev(self, subpath):
178 return self.substate[subpath][1]
178 return self.substate[subpath][1]
179
179
180 def rev(self):
180 def rev(self):
181 return self._rev
181 return self._rev
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184 def hex(self):
184 def hex(self):
185 return hex(self.node())
185 return hex(self.node())
186 def manifest(self):
186 def manifest(self):
187 return self._manifest
187 return self._manifest
188 def manifestctx(self):
188 def manifestctx(self):
189 return self._manifestctx
189 return self._manifestctx
190 def repo(self):
190 def repo(self):
191 return self._repo
191 return self._repo
192 def phasestr(self):
192 def phasestr(self):
193 return phases.phasenames[self.phase()]
193 return phases.phasenames[self.phase()]
194 def mutable(self):
194 def mutable(self):
195 return self.phase() > phases.public
195 return self.phase() > phases.public
196
196
197 def getfileset(self, expr):
197 def getfileset(self, expr):
198 return fileset.getfileset(self, expr)
198 return fileset.getfileset(self, expr)
199
199
200 def obsolete(self):
200 def obsolete(self):
201 """True if the changeset is obsolete"""
201 """True if the changeset is obsolete"""
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
203
203
204 def extinct(self):
204 def extinct(self):
205 """True if the changeset is extinct"""
205 """True if the changeset is extinct"""
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
207
207
208 def unstable(self):
208 def unstable(self):
209 msg = ("'context.unstable' is deprecated, "
209 msg = ("'context.unstable' is deprecated, "
210 "use 'context.orphan'")
210 "use 'context.orphan'")
211 self._repo.ui.deprecwarn(msg, '4.4')
211 self._repo.ui.deprecwarn(msg, '4.4')
212 return self.orphan()
212 return self.orphan()
213
213
214 def orphan(self):
214 def orphan(self):
215 """True if the changeset is not obsolete but it's ancestor are"""
215 """True if the changeset is not obsolete but it's ancestor are"""
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
217
217
218 def bumped(self):
218 def bumped(self):
219 msg = ("'context.bumped' is deprecated, "
219 msg = ("'context.bumped' is deprecated, "
220 "use 'context.phasedivergent'")
220 "use 'context.phasedivergent'")
221 self._repo.ui.deprecwarn(msg, '4.4')
221 self._repo.ui.deprecwarn(msg, '4.4')
222 return self.phasedivergent()
222 return self.phasedivergent()
223
223
224 def phasedivergent(self):
224 def phasedivergent(self):
225 """True if the changeset try to be a successor of a public changeset
225 """True if the changeset try to be a successor of a public changeset
226
226
227 Only non-public and non-obsolete changesets may be bumped.
227 Only non-public and non-obsolete changesets may be bumped.
228 """
228 """
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
230
230
231 def divergent(self):
231 def divergent(self):
232 msg = ("'context.divergent' is deprecated, "
232 msg = ("'context.divergent' is deprecated, "
233 "use 'context.contentdivergent'")
233 "use 'context.contentdivergent'")
234 self._repo.ui.deprecwarn(msg, '4.4')
234 self._repo.ui.deprecwarn(msg, '4.4')
235 return self.contentdivergent()
235 return self.contentdivergent()
236
236
237 def contentdivergent(self):
237 def contentdivergent(self):
238 """Is a successors of a changeset with multiple possible successors set
238 """Is a successors of a changeset with multiple possible successors set
239
239
240 Only non-public and non-obsolete changesets may be divergent.
240 Only non-public and non-obsolete changesets may be divergent.
241 """
241 """
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
243
243
244 def troubled(self):
244 def troubled(self):
245 msg = ("'context.troubled' is deprecated, "
245 msg = ("'context.troubled' is deprecated, "
246 "use 'context.isunstable'")
246 "use 'context.isunstable'")
247 self._repo.ui.deprecwarn(msg, '4.4')
247 self._repo.ui.deprecwarn(msg, '4.4')
248 return self.isunstable()
248 return self.isunstable()
249
249
250 def isunstable(self):
250 def isunstable(self):
251 """True if the changeset is either unstable, bumped or divergent"""
251 """True if the changeset is either unstable, bumped or divergent"""
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
253
253
254 def troubles(self):
254 def troubles(self):
255 """Keep the old version around in order to avoid breaking extensions
255 """Keep the old version around in order to avoid breaking extensions
256 about different return values.
256 about different return values.
257 """
257 """
258 msg = ("'context.troubles' is deprecated, "
258 msg = ("'context.troubles' is deprecated, "
259 "use 'context.instabilities'")
259 "use 'context.instabilities'")
260 self._repo.ui.deprecwarn(msg, '4.4')
260 self._repo.ui.deprecwarn(msg, '4.4')
261
261
262 troubles = []
262 troubles = []
263 if self.orphan():
263 if self.orphan():
264 troubles.append('orphan')
264 troubles.append('orphan')
265 if self.phasedivergent():
265 if self.phasedivergent():
266 troubles.append('bumped')
266 troubles.append('bumped')
267 if self.contentdivergent():
267 if self.contentdivergent():
268 troubles.append('divergent')
268 troubles.append('divergent')
269 return troubles
269 return troubles
270
270
271 def instabilities(self):
271 def instabilities(self):
272 """return the list of instabilities affecting this changeset.
272 """return the list of instabilities affecting this changeset.
273
273
274 Instabilities are returned as strings. possible values are:
274 Instabilities are returned as strings. possible values are:
275 - orphan,
275 - orphan,
276 - phase-divergent,
276 - phase-divergent,
277 - content-divergent.
277 - content-divergent.
278 """
278 """
279 instabilities = []
279 instabilities = []
280 if self.orphan():
280 if self.orphan():
281 instabilities.append('orphan')
281 instabilities.append('orphan')
282 if self.phasedivergent():
282 if self.phasedivergent():
283 instabilities.append('phase-divergent')
283 instabilities.append('phase-divergent')
284 if self.contentdivergent():
284 if self.contentdivergent():
285 instabilities.append('content-divergent')
285 instabilities.append('content-divergent')
286 return instabilities
286 return instabilities
287
287
288 def parents(self):
288 def parents(self):
289 """return contexts for each parent changeset"""
289 """return contexts for each parent changeset"""
290 return self._parents
290 return self._parents
291
291
292 def p1(self):
292 def p1(self):
293 return self._parents[0]
293 return self._parents[0]
294
294
295 def p2(self):
295 def p2(self):
296 parents = self._parents
296 parents = self._parents
297 if len(parents) == 2:
297 if len(parents) == 2:
298 return parents[1]
298 return parents[1]
299 return changectx(self._repo, nullrev)
299 return changectx(self._repo, nullrev)
300
300
301 def _fileinfo(self, path):
301 def _fileinfo(self, path):
302 if r'_manifest' in self.__dict__:
302 if r'_manifest' in self.__dict__:
303 try:
303 try:
304 return self._manifest[path], self._manifest.flags(path)
304 return self._manifest[path], self._manifest.flags(path)
305 except KeyError:
305 except KeyError:
306 raise error.ManifestLookupError(self._node, path,
306 raise error.ManifestLookupError(self._node, path,
307 _('not found in manifest'))
307 _('not found in manifest'))
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
309 if path in self._manifestdelta:
309 if path in self._manifestdelta:
310 return (self._manifestdelta[path],
310 return (self._manifestdelta[path],
311 self._manifestdelta.flags(path))
311 self._manifestdelta.flags(path))
312 mfl = self._repo.manifestlog
312 mfl = self._repo.manifestlog
313 try:
313 try:
314 node, flag = mfl[self._changeset.manifest].find(path)
314 node, flag = mfl[self._changeset.manifest].find(path)
315 except KeyError:
315 except KeyError:
316 raise error.ManifestLookupError(self._node, path,
316 raise error.ManifestLookupError(self._node, path,
317 _('not found in manifest'))
317 _('not found in manifest'))
318
318
319 return node, flag
319 return node, flag
320
320
321 def filenode(self, path):
321 def filenode(self, path):
322 return self._fileinfo(path)[0]
322 return self._fileinfo(path)[0]
323
323
324 def flags(self, path):
324 def flags(self, path):
325 try:
325 try:
326 return self._fileinfo(path)[1]
326 return self._fileinfo(path)[1]
327 except error.LookupError:
327 except error.LookupError:
328 return ''
328 return ''
329
329
330 def sub(self, path, allowcreate=True):
330 def sub(self, path, allowcreate=True):
331 '''return a subrepo for the stored revision of path, never wdir()'''
331 '''return a subrepo for the stored revision of path, never wdir()'''
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
333
333
334 def nullsub(self, path, pctx):
334 def nullsub(self, path, pctx):
335 return subrepo.nullsubrepo(self, path, pctx)
335 return subrepo.nullsubrepo(self, path, pctx)
336
336
337 def workingsub(self, path):
337 def workingsub(self, path):
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
339 context.
339 context.
340 '''
340 '''
341 return subrepo.subrepo(self, path, allowwdir=True)
341 return subrepo.subrepo(self, path, allowwdir=True)
342
342
343 def match(self, pats=None, include=None, exclude=None, default='glob',
343 def match(self, pats=None, include=None, exclude=None, default='glob',
344 listsubrepos=False, badfn=None):
344 listsubrepos=False, badfn=None):
345 r = self._repo
345 r = self._repo
346 return matchmod.match(r.root, r.getcwd(), pats,
346 return matchmod.match(r.root, r.getcwd(), pats,
347 include, exclude, default,
347 include, exclude, default,
348 auditor=r.nofsauditor, ctx=self,
348 auditor=r.nofsauditor, ctx=self,
349 listsubrepos=listsubrepos, badfn=badfn)
349 listsubrepos=listsubrepos, badfn=badfn)
350
350
351 def diff(self, ctx2=None, match=None, **opts):
351 def diff(self, ctx2=None, match=None, **opts):
352 """Returns a diff generator for the given contexts and matcher"""
352 """Returns a diff generator for the given contexts and matcher"""
353 if ctx2 is None:
353 if ctx2 is None:
354 ctx2 = self.p1()
354 ctx2 = self.p1()
355 if ctx2 is not None:
355 if ctx2 is not None:
356 ctx2 = self._repo[ctx2]
356 ctx2 = self._repo[ctx2]
357 diffopts = patch.diffopts(self._repo.ui, opts)
357 diffopts = patch.diffopts(self._repo.ui, opts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
359
359
360 def dirs(self):
360 def dirs(self):
361 return self._manifest.dirs()
361 return self._manifest.dirs()
362
362
363 def hasdir(self, dir):
363 def hasdir(self, dir):
364 return self._manifest.hasdir(dir)
364 return self._manifest.hasdir(dir)
365
365
366 def status(self, other=None, match=None, listignored=False,
366 def status(self, other=None, match=None, listignored=False,
367 listclean=False, listunknown=False, listsubrepos=False):
367 listclean=False, listunknown=False, listsubrepos=False):
368 """return status of files between two nodes or node and working
368 """return status of files between two nodes or node and working
369 directory.
369 directory.
370
370
371 If other is None, compare this node with working directory.
371 If other is None, compare this node with working directory.
372
372
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
374 """
374 """
375
375
376 ctx1 = self
376 ctx1 = self
377 ctx2 = self._repo[other]
377 ctx2 = self._repo[other]
378
378
379 # This next code block is, admittedly, fragile logic that tests for
379 # This next code block is, admittedly, fragile logic that tests for
380 # reversing the contexts and wouldn't need to exist if it weren't for
380 # reversing the contexts and wouldn't need to exist if it weren't for
381 # the fast (and common) code path of comparing the working directory
381 # the fast (and common) code path of comparing the working directory
382 # with its first parent.
382 # with its first parent.
383 #
383 #
384 # What we're aiming for here is the ability to call:
384 # What we're aiming for here is the ability to call:
385 #
385 #
386 # workingctx.status(parentctx)
386 # workingctx.status(parentctx)
387 #
387 #
388 # If we always built the manifest for each context and compared those,
388 # If we always built the manifest for each context and compared those,
389 # then we'd be done. But the special case of the above call means we
389 # then we'd be done. But the special case of the above call means we
390 # just copy the manifest of the parent.
390 # just copy the manifest of the parent.
391 reversed = False
391 reversed = False
392 if (not isinstance(ctx1, changectx)
392 if (not isinstance(ctx1, changectx)
393 and isinstance(ctx2, changectx)):
393 and isinstance(ctx2, changectx)):
394 reversed = True
394 reversed = True
395 ctx1, ctx2 = ctx2, ctx1
395 ctx1, ctx2 = ctx2, ctx1
396
396
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
398 match = ctx2._matchstatus(ctx1, match)
398 match = ctx2._matchstatus(ctx1, match)
399 r = scmutil.status([], [], [], [], [], [], [])
399 r = scmutil.status([], [], [], [], [], [], [])
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
401 listunknown)
401 listunknown)
402
402
403 if reversed:
403 if reversed:
404 # Reverse added and removed. Clear deleted, unknown and ignored as
404 # Reverse added and removed. Clear deleted, unknown and ignored as
405 # these make no sense to reverse.
405 # these make no sense to reverse.
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
407 r.clean)
407 r.clean)
408
408
409 if listsubrepos:
409 if listsubrepos:
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
411 try:
411 try:
412 rev2 = ctx2.subrev(subpath)
412 rev2 = ctx2.subrev(subpath)
413 except KeyError:
413 except KeyError:
414 # A subrepo that existed in node1 was deleted between
414 # A subrepo that existed in node1 was deleted between
415 # node1 and node2 (inclusive). Thus, ctx2's substate
415 # node1 and node2 (inclusive). Thus, ctx2's substate
416 # won't contain that subpath. The best we can do ignore it.
416 # won't contain that subpath. The best we can do ignore it.
417 rev2 = None
417 rev2 = None
418 submatch = matchmod.subdirmatcher(subpath, match)
418 submatch = matchmod.subdirmatcher(subpath, match)
419 s = sub.status(rev2, match=submatch, ignored=listignored,
419 s = sub.status(rev2, match=submatch, ignored=listignored,
420 clean=listclean, unknown=listunknown,
420 clean=listclean, unknown=listunknown,
421 listsubrepos=True)
421 listsubrepos=True)
422 for rfiles, sfiles in zip(r, s):
422 for rfiles, sfiles in zip(r, s):
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
424
424
425 for l in r:
425 for l in r:
426 l.sort()
426 l.sort()
427
427
428 return r
428 return r
429
429
430 def _filterederror(repo, changeid):
430 def _filterederror(repo, changeid):
431 """build an exception to be raised about a filtered changeid
431 """build an exception to be raised about a filtered changeid
432
432
433 This is extracted in a function to help extensions (eg: evolve) to
433 This is extracted in a function to help extensions (eg: evolve) to
434 experiment with various message variants."""
434 experiment with various message variants."""
435 if repo.filtername.startswith('visible'):
435 if repo.filtername.startswith('visible'):
436 msg = _("hidden revision '%s'") % changeid
436 msg = _("hidden revision '%s'") % changeid
437 hint = _('use --hidden to access hidden revisions')
437 hint = _('use --hidden to access hidden revisions')
438 return error.FilteredRepoLookupError(msg, hint=hint)
438 return error.FilteredRepoLookupError(msg, hint=hint)
439 msg = _("filtered revision '%s' (not in '%s' subset)")
439 msg = _("filtered revision '%s' (not in '%s' subset)")
440 msg %= (changeid, repo.filtername)
440 msg %= (changeid, repo.filtername)
441 return error.FilteredRepoLookupError(msg)
441 return error.FilteredRepoLookupError(msg)
442
442
443 class changectx(basectx):
443 class changectx(basectx):
444 """A changecontext object makes access to data related to a particular
444 """A changecontext object makes access to data related to a particular
445 changeset convenient. It represents a read-only context already present in
445 changeset convenient. It represents a read-only context already present in
446 the repo."""
446 the repo."""
447 def __init__(self, repo, changeid=''):
447 def __init__(self, repo, changeid=''):
448 """changeid is a revision number, node, or tag"""
448 """changeid is a revision number, node, or tag"""
449
449
450 # since basectx.__new__ already took care of copying the object, we
450 # since basectx.__new__ already took care of copying the object, we
451 # don't need to do anything in __init__, so we just exit here
451 # don't need to do anything in __init__, so we just exit here
452 if isinstance(changeid, basectx):
452 if isinstance(changeid, basectx):
453 return
453 return
454
454
455 if changeid == '':
455 if changeid == '':
456 changeid = '.'
456 changeid = '.'
457 self._repo = repo
457 self._repo = repo
458
458
459 try:
459 try:
460 if isinstance(changeid, int):
460 if isinstance(changeid, int):
461 self._node = repo.changelog.node(changeid)
461 self._node = repo.changelog.node(changeid)
462 self._rev = changeid
462 self._rev = changeid
463 return
463 return
464 if not pycompat.ispy3 and isinstance(changeid, long):
464 if not pycompat.ispy3 and isinstance(changeid, long):
465 changeid = str(changeid)
465 changeid = str(changeid)
466 if changeid == 'null':
466 if changeid == 'null':
467 self._node = nullid
467 self._node = nullid
468 self._rev = nullrev
468 self._rev = nullrev
469 return
469 return
470 if changeid == 'tip':
470 if changeid == 'tip':
471 self._node = repo.changelog.tip()
471 self._node = repo.changelog.tip()
472 self._rev = repo.changelog.rev(self._node)
472 self._rev = repo.changelog.rev(self._node)
473 return
473 return
474 if (changeid == '.'
474 if (changeid == '.'
475 or repo.local() and changeid == repo.dirstate.p1()):
475 or repo.local() and changeid == repo.dirstate.p1()):
476 # this is a hack to delay/avoid loading obsmarkers
476 # this is a hack to delay/avoid loading obsmarkers
477 # when we know that '.' won't be hidden
477 # when we know that '.' won't be hidden
478 self._node = repo.dirstate.p1()
478 self._node = repo.dirstate.p1()
479 self._rev = repo.unfiltered().changelog.rev(self._node)
479 self._rev = repo.unfiltered().changelog.rev(self._node)
480 return
480 return
481 if len(changeid) == 20:
481 if len(changeid) == 20:
482 try:
482 try:
483 self._node = changeid
483 self._node = changeid
484 self._rev = repo.changelog.rev(changeid)
484 self._rev = repo.changelog.rev(changeid)
485 return
485 return
486 except error.FilteredRepoLookupError:
486 except error.FilteredRepoLookupError:
487 raise
487 raise
488 except LookupError:
488 except LookupError:
489 pass
489 pass
490
490
491 try:
491 try:
492 r = int(changeid)
492 r = int(changeid)
493 if '%d' % r != changeid:
493 if '%d' % r != changeid:
494 raise ValueError
494 raise ValueError
495 l = len(repo.changelog)
495 l = len(repo.changelog)
496 if r < 0:
496 if r < 0:
497 r += l
497 r += l
498 if r < 0 or r >= l and r != wdirrev:
498 if r < 0 or r >= l and r != wdirrev:
499 raise ValueError
499 raise ValueError
500 self._rev = r
500 self._rev = r
501 self._node = repo.changelog.node(r)
501 self._node = repo.changelog.node(r)
502 return
502 return
503 except error.FilteredIndexError:
503 except error.FilteredIndexError:
504 raise
504 raise
505 except (ValueError, OverflowError, IndexError):
505 except (ValueError, OverflowError, IndexError):
506 pass
506 pass
507
507
508 if len(changeid) == 40:
508 if len(changeid) == 40:
509 try:
509 try:
510 self._node = bin(changeid)
510 self._node = bin(changeid)
511 self._rev = repo.changelog.rev(self._node)
511 self._rev = repo.changelog.rev(self._node)
512 return
512 return
513 except error.FilteredLookupError:
513 except error.FilteredLookupError:
514 raise
514 raise
515 except (TypeError, LookupError):
515 except (TypeError, LookupError):
516 pass
516 pass
517
517
518 # lookup bookmarks through the name interface
518 # lookup bookmarks through the name interface
519 try:
519 try:
520 self._node = repo.names.singlenode(repo, changeid)
520 self._node = repo.names.singlenode(repo, changeid)
521 self._rev = repo.changelog.rev(self._node)
521 self._rev = repo.changelog.rev(self._node)
522 return
522 return
523 except KeyError:
523 except KeyError:
524 pass
524 pass
525 except error.FilteredRepoLookupError:
525 except error.FilteredRepoLookupError:
526 raise
526 raise
527 except error.RepoLookupError:
527 except error.RepoLookupError:
528 pass
528 pass
529
529
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
531 if self._node is not None:
531 if self._node is not None:
532 self._rev = repo.changelog.rev(self._node)
532 self._rev = repo.changelog.rev(self._node)
533 return
533 return
534
534
535 # lookup failed
535 # lookup failed
536 # check if it might have come from damaged dirstate
536 # check if it might have come from damaged dirstate
537 #
537 #
538 # XXX we could avoid the unfiltered if we had a recognizable
538 # XXX we could avoid the unfiltered if we had a recognizable
539 # exception for filtered changeset access
539 # exception for filtered changeset access
540 if (repo.local()
540 if (repo.local()
541 and changeid in repo.unfiltered().dirstate.parents()):
541 and changeid in repo.unfiltered().dirstate.parents()):
542 msg = _("working directory has unknown parent '%s'!")
542 msg = _("working directory has unknown parent '%s'!")
543 raise error.Abort(msg % short(changeid))
543 raise error.Abort(msg % short(changeid))
544 try:
544 try:
545 if len(changeid) == 20 and nonascii(changeid):
545 if len(changeid) == 20 and nonascii(changeid):
546 changeid = hex(changeid)
546 changeid = hex(changeid)
547 except TypeError:
547 except TypeError:
548 pass
548 pass
549 except (error.FilteredIndexError, error.FilteredLookupError,
549 except (error.FilteredIndexError, error.FilteredLookupError,
550 error.FilteredRepoLookupError):
550 error.FilteredRepoLookupError):
551 raise _filterederror(repo, changeid)
551 raise _filterederror(repo, changeid)
552 except IndexError:
552 except IndexError:
553 pass
553 pass
554 raise error.RepoLookupError(
554 raise error.RepoLookupError(
555 _("unknown revision '%s'") % changeid)
555 _("unknown revision '%s'") % changeid)
556
556
557 def __hash__(self):
557 def __hash__(self):
558 try:
558 try:
559 return hash(self._rev)
559 return hash(self._rev)
560 except AttributeError:
560 except AttributeError:
561 return id(self)
561 return id(self)
562
562
563 def __nonzero__(self):
563 def __nonzero__(self):
564 return self._rev != nullrev
564 return self._rev != nullrev
565
565
566 __bool__ = __nonzero__
566 __bool__ = __nonzero__
567
567
568 @propertycache
568 @propertycache
569 def _changeset(self):
569 def _changeset(self):
570 return self._repo.changelog.changelogrevision(self.rev())
570 return self._repo.changelog.changelogrevision(self.rev())
571
571
572 @propertycache
572 @propertycache
573 def _manifest(self):
573 def _manifest(self):
574 return self._manifestctx.read()
574 return self._manifestctx.read()
575
575
576 @property
576 @property
577 def _manifestctx(self):
577 def _manifestctx(self):
578 return self._repo.manifestlog[self._changeset.manifest]
578 return self._repo.manifestlog[self._changeset.manifest]
579
579
580 @propertycache
580 @propertycache
581 def _manifestdelta(self):
581 def _manifestdelta(self):
582 return self._manifestctx.readdelta()
582 return self._manifestctx.readdelta()
583
583
584 @propertycache
584 @propertycache
585 def _parents(self):
585 def _parents(self):
586 repo = self._repo
586 repo = self._repo
587 p1, p2 = repo.changelog.parentrevs(self._rev)
587 p1, p2 = repo.changelog.parentrevs(self._rev)
588 if p2 == nullrev:
588 if p2 == nullrev:
589 return [changectx(repo, p1)]
589 return [changectx(repo, p1)]
590 return [changectx(repo, p1), changectx(repo, p2)]
590 return [changectx(repo, p1), changectx(repo, p2)]
591
591
592 def changeset(self):
592 def changeset(self):
593 c = self._changeset
593 c = self._changeset
594 return (
594 return (
595 c.manifest,
595 c.manifest,
596 c.user,
596 c.user,
597 c.date,
597 c.date,
598 c.files,
598 c.files,
599 c.description,
599 c.description,
600 c.extra,
600 c.extra,
601 )
601 )
602 def manifestnode(self):
602 def manifestnode(self):
603 return self._changeset.manifest
603 return self._changeset.manifest
604
604
605 def user(self):
605 def user(self):
606 return self._changeset.user
606 return self._changeset.user
607 def date(self):
607 def date(self):
608 return self._changeset.date
608 return self._changeset.date
609 def files(self):
609 def files(self):
610 return self._changeset.files
610 return self._changeset.files
611 def description(self):
611 def description(self):
612 return self._changeset.description
612 return self._changeset.description
613 def branch(self):
613 def branch(self):
614 return encoding.tolocal(self._changeset.extra.get("branch"))
614 return encoding.tolocal(self._changeset.extra.get("branch"))
615 def closesbranch(self):
615 def closesbranch(self):
616 return 'close' in self._changeset.extra
616 return 'close' in self._changeset.extra
617 def extra(self):
617 def extra(self):
618 """Return a dict of extra information."""
618 return self._changeset.extra
619 return self._changeset.extra
619 def tags(self):
620 def tags(self):
621 """Return a list of byte tag names"""
620 return self._repo.nodetags(self._node)
622 return self._repo.nodetags(self._node)
621 def bookmarks(self):
623 def bookmarks(self):
624 """Return a list of byte bookmark names."""
622 return self._repo.nodebookmarks(self._node)
625 return self._repo.nodebookmarks(self._node)
623 def phase(self):
626 def phase(self):
624 return self._repo._phasecache.phase(self._repo, self._rev)
627 return self._repo._phasecache.phase(self._repo, self._rev)
625 def hidden(self):
628 def hidden(self):
626 return self._rev in repoview.filterrevs(self._repo, 'visible')
629 return self._rev in repoview.filterrevs(self._repo, 'visible')
627
630
628 def isinmemory(self):
631 def isinmemory(self):
629 return False
632 return False
630
633
631 def children(self):
634 def children(self):
632 """return contexts for each child changeset"""
635 """return list of changectx contexts for each child changeset.
636
637 This returns only the immediate child changesets. Use descendants() to
638 recursively walk children.
639 """
633 c = self._repo.changelog.children(self._node)
640 c = self._repo.changelog.children(self._node)
634 return [changectx(self._repo, x) for x in c]
641 return [changectx(self._repo, x) for x in c]
635
642
636 def ancestors(self):
643 def ancestors(self):
637 for a in self._repo.changelog.ancestors([self._rev]):
644 for a in self._repo.changelog.ancestors([self._rev]):
638 yield changectx(self._repo, a)
645 yield changectx(self._repo, a)
639
646
640 def descendants(self):
647 def descendants(self):
648 """Recursively yield all children of the changeset.
649
650 For just the immediate children, use children()
651 """
641 for d in self._repo.changelog.descendants([self._rev]):
652 for d in self._repo.changelog.descendants([self._rev]):
642 yield changectx(self._repo, d)
653 yield changectx(self._repo, d)
643
654
644 def filectx(self, path, fileid=None, filelog=None):
655 def filectx(self, path, fileid=None, filelog=None):
645 """get a file context from this changeset"""
656 """get a file context from this changeset"""
646 if fileid is None:
657 if fileid is None:
647 fileid = self.filenode(path)
658 fileid = self.filenode(path)
648 return filectx(self._repo, path, fileid=fileid,
659 return filectx(self._repo, path, fileid=fileid,
649 changectx=self, filelog=filelog)
660 changectx=self, filelog=filelog)
650
661
651 def ancestor(self, c2, warn=False):
662 def ancestor(self, c2, warn=False):
652 """return the "best" ancestor context of self and c2
663 """return the "best" ancestor context of self and c2
653
664
654 If there are multiple candidates, it will show a message and check
665 If there are multiple candidates, it will show a message and check
655 merge.preferancestor configuration before falling back to the
666 merge.preferancestor configuration before falling back to the
656 revlog ancestor."""
667 revlog ancestor."""
657 # deal with workingctxs
668 # deal with workingctxs
658 n2 = c2._node
669 n2 = c2._node
659 if n2 is None:
670 if n2 is None:
660 n2 = c2._parents[0]._node
671 n2 = c2._parents[0]._node
661 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
672 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
662 if not cahs:
673 if not cahs:
663 anc = nullid
674 anc = nullid
664 elif len(cahs) == 1:
675 elif len(cahs) == 1:
665 anc = cahs[0]
676 anc = cahs[0]
666 else:
677 else:
667 # experimental config: merge.preferancestor
678 # experimental config: merge.preferancestor
668 for r in self._repo.ui.configlist('merge', 'preferancestor'):
679 for r in self._repo.ui.configlist('merge', 'preferancestor'):
669 try:
680 try:
670 ctx = changectx(self._repo, r)
681 ctx = changectx(self._repo, r)
671 except error.RepoLookupError:
682 except error.RepoLookupError:
672 continue
683 continue
673 anc = ctx.node()
684 anc = ctx.node()
674 if anc in cahs:
685 if anc in cahs:
675 break
686 break
676 else:
687 else:
677 anc = self._repo.changelog.ancestor(self._node, n2)
688 anc = self._repo.changelog.ancestor(self._node, n2)
678 if warn:
689 if warn:
679 self._repo.ui.status(
690 self._repo.ui.status(
680 (_("note: using %s as ancestor of %s and %s\n") %
691 (_("note: using %s as ancestor of %s and %s\n") %
681 (short(anc), short(self._node), short(n2))) +
692 (short(anc), short(self._node), short(n2))) +
682 ''.join(_(" alternatively, use --config "
693 ''.join(_(" alternatively, use --config "
683 "merge.preferancestor=%s\n") %
694 "merge.preferancestor=%s\n") %
684 short(n) for n in sorted(cahs) if n != anc))
695 short(n) for n in sorted(cahs) if n != anc))
685 return changectx(self._repo, anc)
696 return changectx(self._repo, anc)
686
697
687 def descendant(self, other):
698 def descendant(self, other):
688 """True if other is descendant of this changeset"""
699 """True if other is descendant of this changeset"""
689 return self._repo.changelog.descendant(self._rev, other._rev)
700 return self._repo.changelog.descendant(self._rev, other._rev)
690
701
691 def walk(self, match):
702 def walk(self, match):
692 '''Generates matching file names.'''
703 '''Generates matching file names.'''
693
704
694 # Wrap match.bad method to have message with nodeid
705 # Wrap match.bad method to have message with nodeid
695 def bad(fn, msg):
706 def bad(fn, msg):
696 # The manifest doesn't know about subrepos, so don't complain about
707 # The manifest doesn't know about subrepos, so don't complain about
697 # paths into valid subrepos.
708 # paths into valid subrepos.
698 if any(fn == s or fn.startswith(s + '/')
709 if any(fn == s or fn.startswith(s + '/')
699 for s in self.substate):
710 for s in self.substate):
700 return
711 return
701 match.bad(fn, _('no such file in rev %s') % self)
712 match.bad(fn, _('no such file in rev %s') % self)
702
713
703 m = matchmod.badmatch(match, bad)
714 m = matchmod.badmatch(match, bad)
704 return self._manifest.walk(m)
715 return self._manifest.walk(m)
705
716
706 def matches(self, match):
717 def matches(self, match):
707 return self.walk(match)
718 return self.walk(match)
708
719
709 class basefilectx(object):
720 class basefilectx(object):
710 """A filecontext object represents the common logic for its children:
721 """A filecontext object represents the common logic for its children:
711 filectx: read-only access to a filerevision that is already present
722 filectx: read-only access to a filerevision that is already present
712 in the repo,
723 in the repo,
713 workingfilectx: a filecontext that represents files from the working
724 workingfilectx: a filecontext that represents files from the working
714 directory,
725 directory,
715 memfilectx: a filecontext that represents files in-memory,
726 memfilectx: a filecontext that represents files in-memory,
716 overlayfilectx: duplicate another filecontext with some fields overridden.
727 overlayfilectx: duplicate another filecontext with some fields overridden.
717 """
728 """
718 @propertycache
729 @propertycache
719 def _filelog(self):
730 def _filelog(self):
720 return self._repo.file(self._path)
731 return self._repo.file(self._path)
721
732
722 @propertycache
733 @propertycache
723 def _changeid(self):
734 def _changeid(self):
724 if r'_changeid' in self.__dict__:
735 if r'_changeid' in self.__dict__:
725 return self._changeid
736 return self._changeid
726 elif r'_changectx' in self.__dict__:
737 elif r'_changectx' in self.__dict__:
727 return self._changectx.rev()
738 return self._changectx.rev()
728 elif r'_descendantrev' in self.__dict__:
739 elif r'_descendantrev' in self.__dict__:
729 # this file context was created from a revision with a known
740 # this file context was created from a revision with a known
730 # descendant, we can (lazily) correct for linkrev aliases
741 # descendant, we can (lazily) correct for linkrev aliases
731 return self._adjustlinkrev(self._descendantrev)
742 return self._adjustlinkrev(self._descendantrev)
732 else:
743 else:
733 return self._filelog.linkrev(self._filerev)
744 return self._filelog.linkrev(self._filerev)
734
745
735 @propertycache
746 @propertycache
736 def _filenode(self):
747 def _filenode(self):
737 if r'_fileid' in self.__dict__:
748 if r'_fileid' in self.__dict__:
738 return self._filelog.lookup(self._fileid)
749 return self._filelog.lookup(self._fileid)
739 else:
750 else:
740 return self._changectx.filenode(self._path)
751 return self._changectx.filenode(self._path)
741
752
742 @propertycache
753 @propertycache
743 def _filerev(self):
754 def _filerev(self):
744 return self._filelog.rev(self._filenode)
755 return self._filelog.rev(self._filenode)
745
756
746 @propertycache
757 @propertycache
747 def _repopath(self):
758 def _repopath(self):
748 return self._path
759 return self._path
749
760
750 def __nonzero__(self):
761 def __nonzero__(self):
751 try:
762 try:
752 self._filenode
763 self._filenode
753 return True
764 return True
754 except error.LookupError:
765 except error.LookupError:
755 # file is missing
766 # file is missing
756 return False
767 return False
757
768
758 __bool__ = __nonzero__
769 __bool__ = __nonzero__
759
770
760 def __bytes__(self):
771 def __bytes__(self):
761 try:
772 try:
762 return "%s@%s" % (self.path(), self._changectx)
773 return "%s@%s" % (self.path(), self._changectx)
763 except error.LookupError:
774 except error.LookupError:
764 return "%s@???" % self.path()
775 return "%s@???" % self.path()
765
776
766 __str__ = encoding.strmethod(__bytes__)
777 __str__ = encoding.strmethod(__bytes__)
767
778
768 def __repr__(self):
779 def __repr__(self):
769 return "<%s %s>" % (type(self).__name__, str(self))
780 return "<%s %s>" % (type(self).__name__, str(self))
770
781
771 def __hash__(self):
782 def __hash__(self):
772 try:
783 try:
773 return hash((self._path, self._filenode))
784 return hash((self._path, self._filenode))
774 except AttributeError:
785 except AttributeError:
775 return id(self)
786 return id(self)
776
787
777 def __eq__(self, other):
788 def __eq__(self, other):
778 try:
789 try:
779 return (type(self) == type(other) and self._path == other._path
790 return (type(self) == type(other) and self._path == other._path
780 and self._filenode == other._filenode)
791 and self._filenode == other._filenode)
781 except AttributeError:
792 except AttributeError:
782 return False
793 return False
783
794
784 def __ne__(self, other):
795 def __ne__(self, other):
785 return not (self == other)
796 return not (self == other)
786
797
787 def filerev(self):
798 def filerev(self):
788 return self._filerev
799 return self._filerev
789 def filenode(self):
800 def filenode(self):
790 return self._filenode
801 return self._filenode
791 @propertycache
802 @propertycache
792 def _flags(self):
803 def _flags(self):
793 return self._changectx.flags(self._path)
804 return self._changectx.flags(self._path)
794 def flags(self):
805 def flags(self):
795 return self._flags
806 return self._flags
796 def filelog(self):
807 def filelog(self):
797 return self._filelog
808 return self._filelog
798 def rev(self):
809 def rev(self):
799 return self._changeid
810 return self._changeid
800 def linkrev(self):
811 def linkrev(self):
801 return self._filelog.linkrev(self._filerev)
812 return self._filelog.linkrev(self._filerev)
802 def node(self):
813 def node(self):
803 return self._changectx.node()
814 return self._changectx.node()
804 def hex(self):
815 def hex(self):
805 return self._changectx.hex()
816 return self._changectx.hex()
806 def user(self):
817 def user(self):
807 return self._changectx.user()
818 return self._changectx.user()
808 def date(self):
819 def date(self):
809 return self._changectx.date()
820 return self._changectx.date()
810 def files(self):
821 def files(self):
811 return self._changectx.files()
822 return self._changectx.files()
812 def description(self):
823 def description(self):
813 return self._changectx.description()
824 return self._changectx.description()
814 def branch(self):
825 def branch(self):
815 return self._changectx.branch()
826 return self._changectx.branch()
816 def extra(self):
827 def extra(self):
817 return self._changectx.extra()
828 return self._changectx.extra()
818 def phase(self):
829 def phase(self):
819 return self._changectx.phase()
830 return self._changectx.phase()
820 def phasestr(self):
831 def phasestr(self):
821 return self._changectx.phasestr()
832 return self._changectx.phasestr()
822 def obsolete(self):
833 def obsolete(self):
823 return self._changectx.obsolete()
834 return self._changectx.obsolete()
824 def instabilities(self):
835 def instabilities(self):
825 return self._changectx.instabilities()
836 return self._changectx.instabilities()
826 def manifest(self):
837 def manifest(self):
827 return self._changectx.manifest()
838 return self._changectx.manifest()
828 def changectx(self):
839 def changectx(self):
829 return self._changectx
840 return self._changectx
830 def renamed(self):
841 def renamed(self):
831 return self._copied
842 return self._copied
832 def repo(self):
843 def repo(self):
833 return self._repo
844 return self._repo
834 def size(self):
845 def size(self):
835 return len(self.data())
846 return len(self.data())
836
847
837 def path(self):
848 def path(self):
838 return self._path
849 return self._path
839
850
840 def isbinary(self):
851 def isbinary(self):
841 try:
852 try:
842 return util.binary(self.data())
853 return util.binary(self.data())
843 except IOError:
854 except IOError:
844 return False
855 return False
845 def isexec(self):
856 def isexec(self):
846 return 'x' in self.flags()
857 return 'x' in self.flags()
847 def islink(self):
858 def islink(self):
848 return 'l' in self.flags()
859 return 'l' in self.flags()
849
860
850 def isabsent(self):
861 def isabsent(self):
851 """whether this filectx represents a file not in self._changectx
862 """whether this filectx represents a file not in self._changectx
852
863
853 This is mainly for merge code to detect change/delete conflicts. This is
864 This is mainly for merge code to detect change/delete conflicts. This is
854 expected to be True for all subclasses of basectx."""
865 expected to be True for all subclasses of basectx."""
855 return False
866 return False
856
867
857 _customcmp = False
868 _customcmp = False
858 def cmp(self, fctx):
869 def cmp(self, fctx):
859 """compare with other file context
870 """compare with other file context
860
871
861 returns True if different than fctx.
872 returns True if different than fctx.
862 """
873 """
863 if fctx._customcmp:
874 if fctx._customcmp:
864 return fctx.cmp(self)
875 return fctx.cmp(self)
865
876
866 if (fctx._filenode is None
877 if (fctx._filenode is None
867 and (self._repo._encodefilterpats
878 and (self._repo._encodefilterpats
868 # if file data starts with '\1\n', empty metadata block is
879 # if file data starts with '\1\n', empty metadata block is
869 # prepended, which adds 4 bytes to filelog.size().
880 # prepended, which adds 4 bytes to filelog.size().
870 or self.size() - 4 == fctx.size())
881 or self.size() - 4 == fctx.size())
871 or self.size() == fctx.size()):
882 or self.size() == fctx.size()):
872 return self._filelog.cmp(self._filenode, fctx.data())
883 return self._filelog.cmp(self._filenode, fctx.data())
873
884
874 return True
885 return True
875
886
876 def _adjustlinkrev(self, srcrev, inclusive=False):
887 def _adjustlinkrev(self, srcrev, inclusive=False):
877 """return the first ancestor of <srcrev> introducing <fnode>
888 """return the first ancestor of <srcrev> introducing <fnode>
878
889
879 If the linkrev of the file revision does not point to an ancestor of
890 If the linkrev of the file revision does not point to an ancestor of
880 srcrev, we'll walk down the ancestors until we find one introducing
891 srcrev, we'll walk down the ancestors until we find one introducing
881 this file revision.
892 this file revision.
882
893
883 :srcrev: the changeset revision we search ancestors from
894 :srcrev: the changeset revision we search ancestors from
884 :inclusive: if true, the src revision will also be checked
895 :inclusive: if true, the src revision will also be checked
885 """
896 """
886 repo = self._repo
897 repo = self._repo
887 cl = repo.unfiltered().changelog
898 cl = repo.unfiltered().changelog
888 mfl = repo.manifestlog
899 mfl = repo.manifestlog
889 # fetch the linkrev
900 # fetch the linkrev
890 lkr = self.linkrev()
901 lkr = self.linkrev()
891 # hack to reuse ancestor computation when searching for renames
902 # hack to reuse ancestor computation when searching for renames
892 memberanc = getattr(self, '_ancestrycontext', None)
903 memberanc = getattr(self, '_ancestrycontext', None)
893 iteranc = None
904 iteranc = None
894 if srcrev is None:
905 if srcrev is None:
895 # wctx case, used by workingfilectx during mergecopy
906 # wctx case, used by workingfilectx during mergecopy
896 revs = [p.rev() for p in self._repo[None].parents()]
907 revs = [p.rev() for p in self._repo[None].parents()]
897 inclusive = True # we skipped the real (revless) source
908 inclusive = True # we skipped the real (revless) source
898 else:
909 else:
899 revs = [srcrev]
910 revs = [srcrev]
900 if memberanc is None:
911 if memberanc is None:
901 memberanc = iteranc = cl.ancestors(revs, lkr,
912 memberanc = iteranc = cl.ancestors(revs, lkr,
902 inclusive=inclusive)
913 inclusive=inclusive)
903 # check if this linkrev is an ancestor of srcrev
914 # check if this linkrev is an ancestor of srcrev
904 if lkr not in memberanc:
915 if lkr not in memberanc:
905 if iteranc is None:
916 if iteranc is None:
906 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
917 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
907 fnode = self._filenode
918 fnode = self._filenode
908 path = self._path
919 path = self._path
909 for a in iteranc:
920 for a in iteranc:
910 ac = cl.read(a) # get changeset data (we avoid object creation)
921 ac = cl.read(a) # get changeset data (we avoid object creation)
911 if path in ac[3]: # checking the 'files' field.
922 if path in ac[3]: # checking the 'files' field.
912 # The file has been touched, check if the content is
923 # The file has been touched, check if the content is
913 # similar to the one we search for.
924 # similar to the one we search for.
914 if fnode == mfl[ac[0]].readfast().get(path):
925 if fnode == mfl[ac[0]].readfast().get(path):
915 return a
926 return a
916 # In theory, we should never get out of that loop without a result.
927 # In theory, we should never get out of that loop without a result.
917 # But if manifest uses a buggy file revision (not children of the
928 # But if manifest uses a buggy file revision (not children of the
918 # one it replaces) we could. Such a buggy situation will likely
929 # one it replaces) we could. Such a buggy situation will likely
919 # result is crash somewhere else at to some point.
930 # result is crash somewhere else at to some point.
920 return lkr
931 return lkr
921
932
922 def introrev(self):
933 def introrev(self):
923 """return the rev of the changeset which introduced this file revision
934 """return the rev of the changeset which introduced this file revision
924
935
925 This method is different from linkrev because it take into account the
936 This method is different from linkrev because it take into account the
926 changeset the filectx was created from. It ensures the returned
937 changeset the filectx was created from. It ensures the returned
927 revision is one of its ancestors. This prevents bugs from
938 revision is one of its ancestors. This prevents bugs from
928 'linkrev-shadowing' when a file revision is used by multiple
939 'linkrev-shadowing' when a file revision is used by multiple
929 changesets.
940 changesets.
930 """
941 """
931 lkr = self.linkrev()
942 lkr = self.linkrev()
932 attrs = vars(self)
943 attrs = vars(self)
933 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
944 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
934 if noctx or self.rev() == lkr:
945 if noctx or self.rev() == lkr:
935 return self.linkrev()
946 return self.linkrev()
936 return self._adjustlinkrev(self.rev(), inclusive=True)
947 return self._adjustlinkrev(self.rev(), inclusive=True)
937
948
938 def _parentfilectx(self, path, fileid, filelog):
949 def _parentfilectx(self, path, fileid, filelog):
939 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
950 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
940 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
951 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
941 if '_changeid' in vars(self) or '_changectx' in vars(self):
952 if '_changeid' in vars(self) or '_changectx' in vars(self):
942 # If self is associated with a changeset (probably explicitly
953 # If self is associated with a changeset (probably explicitly
943 # fed), ensure the created filectx is associated with a
954 # fed), ensure the created filectx is associated with a
944 # changeset that is an ancestor of self.changectx.
955 # changeset that is an ancestor of self.changectx.
945 # This lets us later use _adjustlinkrev to get a correct link.
956 # This lets us later use _adjustlinkrev to get a correct link.
946 fctx._descendantrev = self.rev()
957 fctx._descendantrev = self.rev()
947 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
958 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
948 elif '_descendantrev' in vars(self):
959 elif '_descendantrev' in vars(self):
949 # Otherwise propagate _descendantrev if we have one associated.
960 # Otherwise propagate _descendantrev if we have one associated.
950 fctx._descendantrev = self._descendantrev
961 fctx._descendantrev = self._descendantrev
951 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
962 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
952 return fctx
963 return fctx
953
964
954 def parents(self):
965 def parents(self):
955 _path = self._path
966 _path = self._path
956 fl = self._filelog
967 fl = self._filelog
957 parents = self._filelog.parents(self._filenode)
968 parents = self._filelog.parents(self._filenode)
958 pl = [(_path, node, fl) for node in parents if node != nullid]
969 pl = [(_path, node, fl) for node in parents if node != nullid]
959
970
960 r = fl.renamed(self._filenode)
971 r = fl.renamed(self._filenode)
961 if r:
972 if r:
962 # - In the simple rename case, both parent are nullid, pl is empty.
973 # - In the simple rename case, both parent are nullid, pl is empty.
963 # - In case of merge, only one of the parent is null id and should
974 # - In case of merge, only one of the parent is null id and should
964 # be replaced with the rename information. This parent is -always-
975 # be replaced with the rename information. This parent is -always-
965 # the first one.
976 # the first one.
966 #
977 #
967 # As null id have always been filtered out in the previous list
978 # As null id have always been filtered out in the previous list
968 # comprehension, inserting to 0 will always result in "replacing
979 # comprehension, inserting to 0 will always result in "replacing
969 # first nullid parent with rename information.
980 # first nullid parent with rename information.
970 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
981 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
971
982
972 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
983 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
973
984
974 def p1(self):
985 def p1(self):
975 return self.parents()[0]
986 return self.parents()[0]
976
987
977 def p2(self):
988 def p2(self):
978 p = self.parents()
989 p = self.parents()
979 if len(p) == 2:
990 if len(p) == 2:
980 return p[1]
991 return p[1]
981 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
992 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
982
993
983 def annotate(self, follow=False, linenumber=False, skiprevs=None,
994 def annotate(self, follow=False, linenumber=False, skiprevs=None,
984 diffopts=None):
995 diffopts=None):
985 '''returns a list of tuples of ((ctx, number), line) for each line
996 '''returns a list of tuples of ((ctx, number), line) for each line
986 in the file, where ctx is the filectx of the node where
997 in the file, where ctx is the filectx of the node where
987 that line was last changed; if linenumber parameter is true, number is
998 that line was last changed; if linenumber parameter is true, number is
988 the line number at the first appearance in the managed file, otherwise,
999 the line number at the first appearance in the managed file, otherwise,
989 number has a fixed value of False.
1000 number has a fixed value of False.
990 '''
1001 '''
991
1002
992 def lines(text):
1003 def lines(text):
993 if text.endswith("\n"):
1004 if text.endswith("\n"):
994 return text.count("\n")
1005 return text.count("\n")
995 return text.count("\n") + int(bool(text))
1006 return text.count("\n") + int(bool(text))
996
1007
997 if linenumber:
1008 if linenumber:
998 def decorate(text, rev):
1009 def decorate(text, rev):
999 return ([annotateline(fctx=rev, lineno=i)
1010 return ([annotateline(fctx=rev, lineno=i)
1000 for i in xrange(1, lines(text) + 1)], text)
1011 for i in xrange(1, lines(text) + 1)], text)
1001 else:
1012 else:
1002 def decorate(text, rev):
1013 def decorate(text, rev):
1003 return ([annotateline(fctx=rev)] * lines(text), text)
1014 return ([annotateline(fctx=rev)] * lines(text), text)
1004
1015
1005 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1016 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1006
1017
1007 def parents(f):
1018 def parents(f):
1008 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1019 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1009 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1020 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1010 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1021 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1011 # isn't an ancestor of the srcrev.
1022 # isn't an ancestor of the srcrev.
1012 f._changeid
1023 f._changeid
1013 pl = f.parents()
1024 pl = f.parents()
1014
1025
1015 # Don't return renamed parents if we aren't following.
1026 # Don't return renamed parents if we aren't following.
1016 if not follow:
1027 if not follow:
1017 pl = [p for p in pl if p.path() == f.path()]
1028 pl = [p for p in pl if p.path() == f.path()]
1018
1029
1019 # renamed filectx won't have a filelog yet, so set it
1030 # renamed filectx won't have a filelog yet, so set it
1020 # from the cache to save time
1031 # from the cache to save time
1021 for p in pl:
1032 for p in pl:
1022 if not '_filelog' in p.__dict__:
1033 if not '_filelog' in p.__dict__:
1023 p._filelog = getlog(p.path())
1034 p._filelog = getlog(p.path())
1024
1035
1025 return pl
1036 return pl
1026
1037
1027 # use linkrev to find the first changeset where self appeared
1038 # use linkrev to find the first changeset where self appeared
1028 base = self
1039 base = self
1029 introrev = self.introrev()
1040 introrev = self.introrev()
1030 if self.rev() != introrev:
1041 if self.rev() != introrev:
1031 base = self.filectx(self.filenode(), changeid=introrev)
1042 base = self.filectx(self.filenode(), changeid=introrev)
1032 if getattr(base, '_ancestrycontext', None) is None:
1043 if getattr(base, '_ancestrycontext', None) is None:
1033 cl = self._repo.changelog
1044 cl = self._repo.changelog
1034 if introrev is None:
1045 if introrev is None:
1035 # wctx is not inclusive, but works because _ancestrycontext
1046 # wctx is not inclusive, but works because _ancestrycontext
1036 # is used to test filelog revisions
1047 # is used to test filelog revisions
1037 ac = cl.ancestors([p.rev() for p in base.parents()],
1048 ac = cl.ancestors([p.rev() for p in base.parents()],
1038 inclusive=True)
1049 inclusive=True)
1039 else:
1050 else:
1040 ac = cl.ancestors([introrev], inclusive=True)
1051 ac = cl.ancestors([introrev], inclusive=True)
1041 base._ancestrycontext = ac
1052 base._ancestrycontext = ac
1042
1053
1043 # This algorithm would prefer to be recursive, but Python is a
1054 # This algorithm would prefer to be recursive, but Python is a
1044 # bit recursion-hostile. Instead we do an iterative
1055 # bit recursion-hostile. Instead we do an iterative
1045 # depth-first search.
1056 # depth-first search.
1046
1057
1047 # 1st DFS pre-calculates pcache and needed
1058 # 1st DFS pre-calculates pcache and needed
1048 visit = [base]
1059 visit = [base]
1049 pcache = {}
1060 pcache = {}
1050 needed = {base: 1}
1061 needed = {base: 1}
1051 while visit:
1062 while visit:
1052 f = visit.pop()
1063 f = visit.pop()
1053 if f in pcache:
1064 if f in pcache:
1054 continue
1065 continue
1055 pl = parents(f)
1066 pl = parents(f)
1056 pcache[f] = pl
1067 pcache[f] = pl
1057 for p in pl:
1068 for p in pl:
1058 needed[p] = needed.get(p, 0) + 1
1069 needed[p] = needed.get(p, 0) + 1
1059 if p not in pcache:
1070 if p not in pcache:
1060 visit.append(p)
1071 visit.append(p)
1061
1072
1062 # 2nd DFS does the actual annotate
1073 # 2nd DFS does the actual annotate
1063 visit[:] = [base]
1074 visit[:] = [base]
1064 hist = {}
1075 hist = {}
1065 while visit:
1076 while visit:
1066 f = visit[-1]
1077 f = visit[-1]
1067 if f in hist:
1078 if f in hist:
1068 visit.pop()
1079 visit.pop()
1069 continue
1080 continue
1070
1081
1071 ready = True
1082 ready = True
1072 pl = pcache[f]
1083 pl = pcache[f]
1073 for p in pl:
1084 for p in pl:
1074 if p not in hist:
1085 if p not in hist:
1075 ready = False
1086 ready = False
1076 visit.append(p)
1087 visit.append(p)
1077 if ready:
1088 if ready:
1078 visit.pop()
1089 visit.pop()
1079 curr = decorate(f.data(), f)
1090 curr = decorate(f.data(), f)
1080 skipchild = False
1091 skipchild = False
1081 if skiprevs is not None:
1092 if skiprevs is not None:
1082 skipchild = f._changeid in skiprevs
1093 skipchild = f._changeid in skiprevs
1083 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1094 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1084 diffopts)
1095 diffopts)
1085 for p in pl:
1096 for p in pl:
1086 if needed[p] == 1:
1097 if needed[p] == 1:
1087 del hist[p]
1098 del hist[p]
1088 del needed[p]
1099 del needed[p]
1089 else:
1100 else:
1090 needed[p] -= 1
1101 needed[p] -= 1
1091
1102
1092 hist[f] = curr
1103 hist[f] = curr
1093 del pcache[f]
1104 del pcache[f]
1094
1105
1095 return zip(hist[base][0], hist[base][1].splitlines(True))
1106 return zip(hist[base][0], hist[base][1].splitlines(True))
1096
1107
1097 def ancestors(self, followfirst=False):
1108 def ancestors(self, followfirst=False):
1098 visit = {}
1109 visit = {}
1099 c = self
1110 c = self
1100 if followfirst:
1111 if followfirst:
1101 cut = 1
1112 cut = 1
1102 else:
1113 else:
1103 cut = None
1114 cut = None
1104
1115
1105 while True:
1116 while True:
1106 for parent in c.parents()[:cut]:
1117 for parent in c.parents()[:cut]:
1107 visit[(parent.linkrev(), parent.filenode())] = parent
1118 visit[(parent.linkrev(), parent.filenode())] = parent
1108 if not visit:
1119 if not visit:
1109 break
1120 break
1110 c = visit.pop(max(visit))
1121 c = visit.pop(max(visit))
1111 yield c
1122 yield c
1112
1123
1113 def decodeddata(self):
1124 def decodeddata(self):
1114 """Returns `data()` after running repository decoding filters.
1125 """Returns `data()` after running repository decoding filters.
1115
1126
1116 This is often equivalent to how the data would be expressed on disk.
1127 This is often equivalent to how the data would be expressed on disk.
1117 """
1128 """
1118 return self._repo.wwritedata(self.path(), self.data())
1129 return self._repo.wwritedata(self.path(), self.data())
1119
1130
1120 @attr.s(slots=True, frozen=True)
1131 @attr.s(slots=True, frozen=True)
1121 class annotateline(object):
1132 class annotateline(object):
1122 fctx = attr.ib()
1133 fctx = attr.ib()
1123 lineno = attr.ib(default=False)
1134 lineno = attr.ib(default=False)
1124 # Whether this annotation was the result of a skip-annotate.
1135 # Whether this annotation was the result of a skip-annotate.
1125 skip = attr.ib(default=False)
1136 skip = attr.ib(default=False)
1126
1137
1127 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1138 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1128 r'''
1139 r'''
1129 Given parent and child fctxes and annotate data for parents, for all lines
1140 Given parent and child fctxes and annotate data for parents, for all lines
1130 in either parent that match the child, annotate the child with the parent's
1141 in either parent that match the child, annotate the child with the parent's
1131 data.
1142 data.
1132
1143
1133 Additionally, if `skipchild` is True, replace all other lines with parent
1144 Additionally, if `skipchild` is True, replace all other lines with parent
1134 annotate data as well such that child is never blamed for any lines.
1145 annotate data as well such that child is never blamed for any lines.
1135
1146
1136 See test-annotate.py for unit tests.
1147 See test-annotate.py for unit tests.
1137 '''
1148 '''
1138 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1149 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1139 for parent in parents]
1150 for parent in parents]
1140
1151
1141 if skipchild:
1152 if skipchild:
1142 # Need to iterate over the blocks twice -- make it a list
1153 # Need to iterate over the blocks twice -- make it a list
1143 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1154 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1144 # Mercurial currently prefers p2 over p1 for annotate.
1155 # Mercurial currently prefers p2 over p1 for annotate.
1145 # TODO: change this?
1156 # TODO: change this?
1146 for parent, blocks in pblocks:
1157 for parent, blocks in pblocks:
1147 for (a1, a2, b1, b2), t in blocks:
1158 for (a1, a2, b1, b2), t in blocks:
1148 # Changed blocks ('!') or blocks made only of blank lines ('~')
1159 # Changed blocks ('!') or blocks made only of blank lines ('~')
1149 # belong to the child.
1160 # belong to the child.
1150 if t == '=':
1161 if t == '=':
1151 child[0][b1:b2] = parent[0][a1:a2]
1162 child[0][b1:b2] = parent[0][a1:a2]
1152
1163
1153 if skipchild:
1164 if skipchild:
1154 # Now try and match up anything that couldn't be matched,
1165 # Now try and match up anything that couldn't be matched,
1155 # Reversing pblocks maintains bias towards p2, matching above
1166 # Reversing pblocks maintains bias towards p2, matching above
1156 # behavior.
1167 # behavior.
1157 pblocks.reverse()
1168 pblocks.reverse()
1158
1169
1159 # The heuristics are:
1170 # The heuristics are:
1160 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1171 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1161 # This could potentially be smarter but works well enough.
1172 # This could potentially be smarter but works well enough.
1162 # * For a non-matching section, do a best-effort fit. Match lines in
1173 # * For a non-matching section, do a best-effort fit. Match lines in
1163 # diff hunks 1:1, dropping lines as necessary.
1174 # diff hunks 1:1, dropping lines as necessary.
1164 # * Repeat the last line as a last resort.
1175 # * Repeat the last line as a last resort.
1165
1176
1166 # First, replace as much as possible without repeating the last line.
1177 # First, replace as much as possible without repeating the last line.
1167 remaining = [(parent, []) for parent, _blocks in pblocks]
1178 remaining = [(parent, []) for parent, _blocks in pblocks]
1168 for idx, (parent, blocks) in enumerate(pblocks):
1179 for idx, (parent, blocks) in enumerate(pblocks):
1169 for (a1, a2, b1, b2), _t in blocks:
1180 for (a1, a2, b1, b2), _t in blocks:
1170 if a2 - a1 >= b2 - b1:
1181 if a2 - a1 >= b2 - b1:
1171 for bk in xrange(b1, b2):
1182 for bk in xrange(b1, b2):
1172 if child[0][bk].fctx == childfctx:
1183 if child[0][bk].fctx == childfctx:
1173 ak = min(a1 + (bk - b1), a2 - 1)
1184 ak = min(a1 + (bk - b1), a2 - 1)
1174 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1185 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1175 else:
1186 else:
1176 remaining[idx][1].append((a1, a2, b1, b2))
1187 remaining[idx][1].append((a1, a2, b1, b2))
1177
1188
1178 # Then, look at anything left, which might involve repeating the last
1189 # Then, look at anything left, which might involve repeating the last
1179 # line.
1190 # line.
1180 for parent, blocks in remaining:
1191 for parent, blocks in remaining:
1181 for a1, a2, b1, b2 in blocks:
1192 for a1, a2, b1, b2 in blocks:
1182 for bk in xrange(b1, b2):
1193 for bk in xrange(b1, b2):
1183 if child[0][bk].fctx == childfctx:
1194 if child[0][bk].fctx == childfctx:
1184 ak = min(a1 + (bk - b1), a2 - 1)
1195 ak = min(a1 + (bk - b1), a2 - 1)
1185 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1196 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1186 return child
1197 return child
1187
1198
1188 class filectx(basefilectx):
1199 class filectx(basefilectx):
1189 """A filecontext object makes access to data related to a particular
1200 """A filecontext object makes access to data related to a particular
1190 filerevision convenient."""
1201 filerevision convenient."""
1191 def __init__(self, repo, path, changeid=None, fileid=None,
1202 def __init__(self, repo, path, changeid=None, fileid=None,
1192 filelog=None, changectx=None):
1203 filelog=None, changectx=None):
1193 """changeid can be a changeset revision, node, or tag.
1204 """changeid can be a changeset revision, node, or tag.
1194 fileid can be a file revision or node."""
1205 fileid can be a file revision or node."""
1195 self._repo = repo
1206 self._repo = repo
1196 self._path = path
1207 self._path = path
1197
1208
1198 assert (changeid is not None
1209 assert (changeid is not None
1199 or fileid is not None
1210 or fileid is not None
1200 or changectx is not None), \
1211 or changectx is not None), \
1201 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1212 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1202 % (changeid, fileid, changectx))
1213 % (changeid, fileid, changectx))
1203
1214
1204 if filelog is not None:
1215 if filelog is not None:
1205 self._filelog = filelog
1216 self._filelog = filelog
1206
1217
1207 if changeid is not None:
1218 if changeid is not None:
1208 self._changeid = changeid
1219 self._changeid = changeid
1209 if changectx is not None:
1220 if changectx is not None:
1210 self._changectx = changectx
1221 self._changectx = changectx
1211 if fileid is not None:
1222 if fileid is not None:
1212 self._fileid = fileid
1223 self._fileid = fileid
1213
1224
1214 @propertycache
1225 @propertycache
1215 def _changectx(self):
1226 def _changectx(self):
1216 try:
1227 try:
1217 return changectx(self._repo, self._changeid)
1228 return changectx(self._repo, self._changeid)
1218 except error.FilteredRepoLookupError:
1229 except error.FilteredRepoLookupError:
1219 # Linkrev may point to any revision in the repository. When the
1230 # Linkrev may point to any revision in the repository. When the
1220 # repository is filtered this may lead to `filectx` trying to build
1231 # repository is filtered this may lead to `filectx` trying to build
1221 # `changectx` for filtered revision. In such case we fallback to
1232 # `changectx` for filtered revision. In such case we fallback to
1222 # creating `changectx` on the unfiltered version of the reposition.
1233 # creating `changectx` on the unfiltered version of the reposition.
1223 # This fallback should not be an issue because `changectx` from
1234 # This fallback should not be an issue because `changectx` from
1224 # `filectx` are not used in complex operations that care about
1235 # `filectx` are not used in complex operations that care about
1225 # filtering.
1236 # filtering.
1226 #
1237 #
1227 # This fallback is a cheap and dirty fix that prevent several
1238 # This fallback is a cheap and dirty fix that prevent several
1228 # crashes. It does not ensure the behavior is correct. However the
1239 # crashes. It does not ensure the behavior is correct. However the
1229 # behavior was not correct before filtering either and "incorrect
1240 # behavior was not correct before filtering either and "incorrect
1230 # behavior" is seen as better as "crash"
1241 # behavior" is seen as better as "crash"
1231 #
1242 #
1232 # Linkrevs have several serious troubles with filtering that are
1243 # Linkrevs have several serious troubles with filtering that are
1233 # complicated to solve. Proper handling of the issue here should be
1244 # complicated to solve. Proper handling of the issue here should be
1234 # considered when solving linkrev issue are on the table.
1245 # considered when solving linkrev issue are on the table.
1235 return changectx(self._repo.unfiltered(), self._changeid)
1246 return changectx(self._repo.unfiltered(), self._changeid)
1236
1247
1237 def filectx(self, fileid, changeid=None):
1248 def filectx(self, fileid, changeid=None):
1238 '''opens an arbitrary revision of the file without
1249 '''opens an arbitrary revision of the file without
1239 opening a new filelog'''
1250 opening a new filelog'''
1240 return filectx(self._repo, self._path, fileid=fileid,
1251 return filectx(self._repo, self._path, fileid=fileid,
1241 filelog=self._filelog, changeid=changeid)
1252 filelog=self._filelog, changeid=changeid)
1242
1253
1243 def rawdata(self):
1254 def rawdata(self):
1244 return self._filelog.revision(self._filenode, raw=True)
1255 return self._filelog.revision(self._filenode, raw=True)
1245
1256
1246 def rawflags(self):
1257 def rawflags(self):
1247 """low-level revlog flags"""
1258 """low-level revlog flags"""
1248 return self._filelog.flags(self._filerev)
1259 return self._filelog.flags(self._filerev)
1249
1260
1250 def data(self):
1261 def data(self):
1251 try:
1262 try:
1252 return self._filelog.read(self._filenode)
1263 return self._filelog.read(self._filenode)
1253 except error.CensoredNodeError:
1264 except error.CensoredNodeError:
1254 if self._repo.ui.config("censor", "policy") == "ignore":
1265 if self._repo.ui.config("censor", "policy") == "ignore":
1255 return ""
1266 return ""
1256 raise error.Abort(_("censored node: %s") % short(self._filenode),
1267 raise error.Abort(_("censored node: %s") % short(self._filenode),
1257 hint=_("set censor.policy to ignore errors"))
1268 hint=_("set censor.policy to ignore errors"))
1258
1269
1259 def size(self):
1270 def size(self):
1260 return self._filelog.size(self._filerev)
1271 return self._filelog.size(self._filerev)
1261
1272
1262 @propertycache
1273 @propertycache
1263 def _copied(self):
1274 def _copied(self):
1264 """check if file was actually renamed in this changeset revision
1275 """check if file was actually renamed in this changeset revision
1265
1276
1266 If rename logged in file revision, we report copy for changeset only
1277 If rename logged in file revision, we report copy for changeset only
1267 if file revisions linkrev points back to the changeset in question
1278 if file revisions linkrev points back to the changeset in question
1268 or both changeset parents contain different file revisions.
1279 or both changeset parents contain different file revisions.
1269 """
1280 """
1270
1281
1271 renamed = self._filelog.renamed(self._filenode)
1282 renamed = self._filelog.renamed(self._filenode)
1272 if not renamed:
1283 if not renamed:
1273 return renamed
1284 return renamed
1274
1285
1275 if self.rev() == self.linkrev():
1286 if self.rev() == self.linkrev():
1276 return renamed
1287 return renamed
1277
1288
1278 name = self.path()
1289 name = self.path()
1279 fnode = self._filenode
1290 fnode = self._filenode
1280 for p in self._changectx.parents():
1291 for p in self._changectx.parents():
1281 try:
1292 try:
1282 if fnode == p.filenode(name):
1293 if fnode == p.filenode(name):
1283 return None
1294 return None
1284 except error.LookupError:
1295 except error.LookupError:
1285 pass
1296 pass
1286 return renamed
1297 return renamed
1287
1298
1288 def children(self):
1299 def children(self):
1289 # hard for renames
1300 # hard for renames
1290 c = self._filelog.children(self._filenode)
1301 c = self._filelog.children(self._filenode)
1291 return [filectx(self._repo, self._path, fileid=x,
1302 return [filectx(self._repo, self._path, fileid=x,
1292 filelog=self._filelog) for x in c]
1303 filelog=self._filelog) for x in c]
1293
1304
1294 class committablectx(basectx):
1305 class committablectx(basectx):
1295 """A committablectx object provides common functionality for a context that
1306 """A committablectx object provides common functionality for a context that
1296 wants the ability to commit, e.g. workingctx or memctx."""
1307 wants the ability to commit, e.g. workingctx or memctx."""
1297 def __init__(self, repo, text="", user=None, date=None, extra=None,
1308 def __init__(self, repo, text="", user=None, date=None, extra=None,
1298 changes=None):
1309 changes=None):
1299 self._repo = repo
1310 self._repo = repo
1300 self._rev = None
1311 self._rev = None
1301 self._node = None
1312 self._node = None
1302 self._text = text
1313 self._text = text
1303 if date:
1314 if date:
1304 self._date = util.parsedate(date)
1315 self._date = util.parsedate(date)
1305 if user:
1316 if user:
1306 self._user = user
1317 self._user = user
1307 if changes:
1318 if changes:
1308 self._status = changes
1319 self._status = changes
1309
1320
1310 self._extra = {}
1321 self._extra = {}
1311 if extra:
1322 if extra:
1312 self._extra = extra.copy()
1323 self._extra = extra.copy()
1313 if 'branch' not in self._extra:
1324 if 'branch' not in self._extra:
1314 try:
1325 try:
1315 branch = encoding.fromlocal(self._repo.dirstate.branch())
1326 branch = encoding.fromlocal(self._repo.dirstate.branch())
1316 except UnicodeDecodeError:
1327 except UnicodeDecodeError:
1317 raise error.Abort(_('branch name not in UTF-8!'))
1328 raise error.Abort(_('branch name not in UTF-8!'))
1318 self._extra['branch'] = branch
1329 self._extra['branch'] = branch
1319 if self._extra['branch'] == '':
1330 if self._extra['branch'] == '':
1320 self._extra['branch'] = 'default'
1331 self._extra['branch'] = 'default'
1321
1332
1322 def __bytes__(self):
1333 def __bytes__(self):
1323 return bytes(self._parents[0]) + "+"
1334 return bytes(self._parents[0]) + "+"
1324
1335
1325 __str__ = encoding.strmethod(__bytes__)
1336 __str__ = encoding.strmethod(__bytes__)
1326
1337
1327 def __nonzero__(self):
1338 def __nonzero__(self):
1328 return True
1339 return True
1329
1340
1330 __bool__ = __nonzero__
1341 __bool__ = __nonzero__
1331
1342
1332 def _buildflagfunc(self):
1343 def _buildflagfunc(self):
1333 # Create a fallback function for getting file flags when the
1344 # Create a fallback function for getting file flags when the
1334 # filesystem doesn't support them
1345 # filesystem doesn't support them
1335
1346
1336 copiesget = self._repo.dirstate.copies().get
1347 copiesget = self._repo.dirstate.copies().get
1337 parents = self.parents()
1348 parents = self.parents()
1338 if len(parents) < 2:
1349 if len(parents) < 2:
1339 # when we have one parent, it's easy: copy from parent
1350 # when we have one parent, it's easy: copy from parent
1340 man = parents[0].manifest()
1351 man = parents[0].manifest()
1341 def func(f):
1352 def func(f):
1342 f = copiesget(f, f)
1353 f = copiesget(f, f)
1343 return man.flags(f)
1354 return man.flags(f)
1344 else:
1355 else:
1345 # merges are tricky: we try to reconstruct the unstored
1356 # merges are tricky: we try to reconstruct the unstored
1346 # result from the merge (issue1802)
1357 # result from the merge (issue1802)
1347 p1, p2 = parents
1358 p1, p2 = parents
1348 pa = p1.ancestor(p2)
1359 pa = p1.ancestor(p2)
1349 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1360 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1350
1361
1351 def func(f):
1362 def func(f):
1352 f = copiesget(f, f) # may be wrong for merges with copies
1363 f = copiesget(f, f) # may be wrong for merges with copies
1353 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1364 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1354 if fl1 == fl2:
1365 if fl1 == fl2:
1355 return fl1
1366 return fl1
1356 if fl1 == fla:
1367 if fl1 == fla:
1357 return fl2
1368 return fl2
1358 if fl2 == fla:
1369 if fl2 == fla:
1359 return fl1
1370 return fl1
1360 return '' # punt for conflicts
1371 return '' # punt for conflicts
1361
1372
1362 return func
1373 return func
1363
1374
1364 @propertycache
1375 @propertycache
1365 def _flagfunc(self):
1376 def _flagfunc(self):
1366 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1377 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1367
1378
1368 @propertycache
1379 @propertycache
1369 def _status(self):
1380 def _status(self):
1370 return self._repo.status()
1381 return self._repo.status()
1371
1382
1372 @propertycache
1383 @propertycache
1373 def _user(self):
1384 def _user(self):
1374 return self._repo.ui.username()
1385 return self._repo.ui.username()
1375
1386
1376 @propertycache
1387 @propertycache
1377 def _date(self):
1388 def _date(self):
1378 ui = self._repo.ui
1389 ui = self._repo.ui
1379 date = ui.configdate('devel', 'default-date')
1390 date = ui.configdate('devel', 'default-date')
1380 if date is None:
1391 if date is None:
1381 date = util.makedate()
1392 date = util.makedate()
1382 return date
1393 return date
1383
1394
1384 def subrev(self, subpath):
1395 def subrev(self, subpath):
1385 return None
1396 return None
1386
1397
1387 def manifestnode(self):
1398 def manifestnode(self):
1388 return None
1399 return None
1389 def user(self):
1400 def user(self):
1390 return self._user or self._repo.ui.username()
1401 return self._user or self._repo.ui.username()
1391 def date(self):
1402 def date(self):
1392 return self._date
1403 return self._date
1393 def description(self):
1404 def description(self):
1394 return self._text
1405 return self._text
1395 def files(self):
1406 def files(self):
1396 return sorted(self._status.modified + self._status.added +
1407 return sorted(self._status.modified + self._status.added +
1397 self._status.removed)
1408 self._status.removed)
1398
1409
1399 def modified(self):
1410 def modified(self):
1400 return self._status.modified
1411 return self._status.modified
1401 def added(self):
1412 def added(self):
1402 return self._status.added
1413 return self._status.added
1403 def removed(self):
1414 def removed(self):
1404 return self._status.removed
1415 return self._status.removed
1405 def deleted(self):
1416 def deleted(self):
1406 return self._status.deleted
1417 return self._status.deleted
1407 def branch(self):
1418 def branch(self):
1408 return encoding.tolocal(self._extra['branch'])
1419 return encoding.tolocal(self._extra['branch'])
1409 def closesbranch(self):
1420 def closesbranch(self):
1410 return 'close' in self._extra
1421 return 'close' in self._extra
1411 def extra(self):
1422 def extra(self):
1412 return self._extra
1423 return self._extra
1413
1424
1414 def isinmemory(self):
1425 def isinmemory(self):
1415 return False
1426 return False
1416
1427
1417 def tags(self):
1428 def tags(self):
1418 return []
1429 return []
1419
1430
1420 def bookmarks(self):
1431 def bookmarks(self):
1421 b = []
1432 b = []
1422 for p in self.parents():
1433 for p in self.parents():
1423 b.extend(p.bookmarks())
1434 b.extend(p.bookmarks())
1424 return b
1435 return b
1425
1436
1426 def phase(self):
1437 def phase(self):
1427 phase = phases.draft # default phase to draft
1438 phase = phases.draft # default phase to draft
1428 for p in self.parents():
1439 for p in self.parents():
1429 phase = max(phase, p.phase())
1440 phase = max(phase, p.phase())
1430 return phase
1441 return phase
1431
1442
1432 def hidden(self):
1443 def hidden(self):
1433 return False
1444 return False
1434
1445
1435 def children(self):
1446 def children(self):
1436 return []
1447 return []
1437
1448
1438 def flags(self, path):
1449 def flags(self, path):
1439 if r'_manifest' in self.__dict__:
1450 if r'_manifest' in self.__dict__:
1440 try:
1451 try:
1441 return self._manifest.flags(path)
1452 return self._manifest.flags(path)
1442 except KeyError:
1453 except KeyError:
1443 return ''
1454 return ''
1444
1455
1445 try:
1456 try:
1446 return self._flagfunc(path)
1457 return self._flagfunc(path)
1447 except OSError:
1458 except OSError:
1448 return ''
1459 return ''
1449
1460
1450 def ancestor(self, c2):
1461 def ancestor(self, c2):
1451 """return the "best" ancestor context of self and c2"""
1462 """return the "best" ancestor context of self and c2"""
1452 return self._parents[0].ancestor(c2) # punt on two parents for now
1463 return self._parents[0].ancestor(c2) # punt on two parents for now
1453
1464
1454 def walk(self, match):
1465 def walk(self, match):
1455 '''Generates matching file names.'''
1466 '''Generates matching file names.'''
1456 return sorted(self._repo.dirstate.walk(match,
1467 return sorted(self._repo.dirstate.walk(match,
1457 subrepos=sorted(self.substate),
1468 subrepos=sorted(self.substate),
1458 unknown=True, ignored=False))
1469 unknown=True, ignored=False))
1459
1470
1460 def matches(self, match):
1471 def matches(self, match):
1461 return sorted(self._repo.dirstate.matches(match))
1472 return sorted(self._repo.dirstate.matches(match))
1462
1473
1463 def ancestors(self):
1474 def ancestors(self):
1464 for p in self._parents:
1475 for p in self._parents:
1465 yield p
1476 yield p
1466 for a in self._repo.changelog.ancestors(
1477 for a in self._repo.changelog.ancestors(
1467 [p.rev() for p in self._parents]):
1478 [p.rev() for p in self._parents]):
1468 yield changectx(self._repo, a)
1479 yield changectx(self._repo, a)
1469
1480
1470 def markcommitted(self, node):
1481 def markcommitted(self, node):
1471 """Perform post-commit cleanup necessary after committing this ctx
1482 """Perform post-commit cleanup necessary after committing this ctx
1472
1483
1473 Specifically, this updates backing stores this working context
1484 Specifically, this updates backing stores this working context
1474 wraps to reflect the fact that the changes reflected by this
1485 wraps to reflect the fact that the changes reflected by this
1475 workingctx have been committed. For example, it marks
1486 workingctx have been committed. For example, it marks
1476 modified and added files as normal in the dirstate.
1487 modified and added files as normal in the dirstate.
1477
1488
1478 """
1489 """
1479
1490
1480 with self._repo.dirstate.parentchange():
1491 with self._repo.dirstate.parentchange():
1481 for f in self.modified() + self.added():
1492 for f in self.modified() + self.added():
1482 self._repo.dirstate.normal(f)
1493 self._repo.dirstate.normal(f)
1483 for f in self.removed():
1494 for f in self.removed():
1484 self._repo.dirstate.drop(f)
1495 self._repo.dirstate.drop(f)
1485 self._repo.dirstate.setparents(node)
1496 self._repo.dirstate.setparents(node)
1486
1497
1487 # write changes out explicitly, because nesting wlock at
1498 # write changes out explicitly, because nesting wlock at
1488 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1499 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1489 # from immediately doing so for subsequent changing files
1500 # from immediately doing so for subsequent changing files
1490 self._repo.dirstate.write(self._repo.currenttransaction())
1501 self._repo.dirstate.write(self._repo.currenttransaction())
1491
1502
1492 def dirty(self, missing=False, merge=True, branch=True):
1503 def dirty(self, missing=False, merge=True, branch=True):
1493 return False
1504 return False
1494
1505
1495 class workingctx(committablectx):
1506 class workingctx(committablectx):
1496 """A workingctx object makes access to data related to
1507 """A workingctx object makes access to data related to
1497 the current working directory convenient.
1508 the current working directory convenient.
1498 date - any valid date string or (unixtime, offset), or None.
1509 date - any valid date string or (unixtime, offset), or None.
1499 user - username string, or None.
1510 user - username string, or None.
1500 extra - a dictionary of extra values, or None.
1511 extra - a dictionary of extra values, or None.
1501 changes - a list of file lists as returned by localrepo.status()
1512 changes - a list of file lists as returned by localrepo.status()
1502 or None to use the repository status.
1513 or None to use the repository status.
1503 """
1514 """
1504 def __init__(self, repo, text="", user=None, date=None, extra=None,
1515 def __init__(self, repo, text="", user=None, date=None, extra=None,
1505 changes=None):
1516 changes=None):
1506 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1517 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1507
1518
1508 def __iter__(self):
1519 def __iter__(self):
1509 d = self._repo.dirstate
1520 d = self._repo.dirstate
1510 for f in d:
1521 for f in d:
1511 if d[f] != 'r':
1522 if d[f] != 'r':
1512 yield f
1523 yield f
1513
1524
1514 def __contains__(self, key):
1525 def __contains__(self, key):
1515 return self._repo.dirstate[key] not in "?r"
1526 return self._repo.dirstate[key] not in "?r"
1516
1527
1517 def hex(self):
1528 def hex(self):
1518 return hex(wdirid)
1529 return hex(wdirid)
1519
1530
1520 @propertycache
1531 @propertycache
1521 def _parents(self):
1532 def _parents(self):
1522 p = self._repo.dirstate.parents()
1533 p = self._repo.dirstate.parents()
1523 if p[1] == nullid:
1534 if p[1] == nullid:
1524 p = p[:-1]
1535 p = p[:-1]
1525 return [changectx(self._repo, x) for x in p]
1536 return [changectx(self._repo, x) for x in p]
1526
1537
1527 def filectx(self, path, filelog=None):
1538 def filectx(self, path, filelog=None):
1528 """get a file context from the working directory"""
1539 """get a file context from the working directory"""
1529 return workingfilectx(self._repo, path, workingctx=self,
1540 return workingfilectx(self._repo, path, workingctx=self,
1530 filelog=filelog)
1541 filelog=filelog)
1531
1542
1532 def dirty(self, missing=False, merge=True, branch=True):
1543 def dirty(self, missing=False, merge=True, branch=True):
1533 "check whether a working directory is modified"
1544 "check whether a working directory is modified"
1534 # check subrepos first
1545 # check subrepos first
1535 for s in sorted(self.substate):
1546 for s in sorted(self.substate):
1536 if self.sub(s).dirty(missing=missing):
1547 if self.sub(s).dirty(missing=missing):
1537 return True
1548 return True
1538 # check current working dir
1549 # check current working dir
1539 return ((merge and self.p2()) or
1550 return ((merge and self.p2()) or
1540 (branch and self.branch() != self.p1().branch()) or
1551 (branch and self.branch() != self.p1().branch()) or
1541 self.modified() or self.added() or self.removed() or
1552 self.modified() or self.added() or self.removed() or
1542 (missing and self.deleted()))
1553 (missing and self.deleted()))
1543
1554
1544 def add(self, list, prefix=""):
1555 def add(self, list, prefix=""):
1545 with self._repo.wlock():
1556 with self._repo.wlock():
1546 ui, ds = self._repo.ui, self._repo.dirstate
1557 ui, ds = self._repo.ui, self._repo.dirstate
1547 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1558 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1548 rejected = []
1559 rejected = []
1549 lstat = self._repo.wvfs.lstat
1560 lstat = self._repo.wvfs.lstat
1550 for f in list:
1561 for f in list:
1551 # ds.pathto() returns an absolute file when this is invoked from
1562 # ds.pathto() returns an absolute file when this is invoked from
1552 # the keyword extension. That gets flagged as non-portable on
1563 # the keyword extension. That gets flagged as non-portable on
1553 # Windows, since it contains the drive letter and colon.
1564 # Windows, since it contains the drive letter and colon.
1554 scmutil.checkportable(ui, os.path.join(prefix, f))
1565 scmutil.checkportable(ui, os.path.join(prefix, f))
1555 try:
1566 try:
1556 st = lstat(f)
1567 st = lstat(f)
1557 except OSError:
1568 except OSError:
1558 ui.warn(_("%s does not exist!\n") % uipath(f))
1569 ui.warn(_("%s does not exist!\n") % uipath(f))
1559 rejected.append(f)
1570 rejected.append(f)
1560 continue
1571 continue
1561 if st.st_size > 10000000:
1572 if st.st_size > 10000000:
1562 ui.warn(_("%s: up to %d MB of RAM may be required "
1573 ui.warn(_("%s: up to %d MB of RAM may be required "
1563 "to manage this file\n"
1574 "to manage this file\n"
1564 "(use 'hg revert %s' to cancel the "
1575 "(use 'hg revert %s' to cancel the "
1565 "pending addition)\n")
1576 "pending addition)\n")
1566 % (f, 3 * st.st_size // 1000000, uipath(f)))
1577 % (f, 3 * st.st_size // 1000000, uipath(f)))
1567 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1578 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1568 ui.warn(_("%s not added: only files and symlinks "
1579 ui.warn(_("%s not added: only files and symlinks "
1569 "supported currently\n") % uipath(f))
1580 "supported currently\n") % uipath(f))
1570 rejected.append(f)
1581 rejected.append(f)
1571 elif ds[f] in 'amn':
1582 elif ds[f] in 'amn':
1572 ui.warn(_("%s already tracked!\n") % uipath(f))
1583 ui.warn(_("%s already tracked!\n") % uipath(f))
1573 elif ds[f] == 'r':
1584 elif ds[f] == 'r':
1574 ds.normallookup(f)
1585 ds.normallookup(f)
1575 else:
1586 else:
1576 ds.add(f)
1587 ds.add(f)
1577 return rejected
1588 return rejected
1578
1589
1579 def forget(self, files, prefix=""):
1590 def forget(self, files, prefix=""):
1580 with self._repo.wlock():
1591 with self._repo.wlock():
1581 ds = self._repo.dirstate
1592 ds = self._repo.dirstate
1582 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1593 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1583 rejected = []
1594 rejected = []
1584 for f in files:
1595 for f in files:
1585 if f not in self._repo.dirstate:
1596 if f not in self._repo.dirstate:
1586 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1597 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1587 rejected.append(f)
1598 rejected.append(f)
1588 elif self._repo.dirstate[f] != 'a':
1599 elif self._repo.dirstate[f] != 'a':
1589 self._repo.dirstate.remove(f)
1600 self._repo.dirstate.remove(f)
1590 else:
1601 else:
1591 self._repo.dirstate.drop(f)
1602 self._repo.dirstate.drop(f)
1592 return rejected
1603 return rejected
1593
1604
1594 def undelete(self, list):
1605 def undelete(self, list):
1595 pctxs = self.parents()
1606 pctxs = self.parents()
1596 with self._repo.wlock():
1607 with self._repo.wlock():
1597 ds = self._repo.dirstate
1608 ds = self._repo.dirstate
1598 for f in list:
1609 for f in list:
1599 if self._repo.dirstate[f] != 'r':
1610 if self._repo.dirstate[f] != 'r':
1600 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1611 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1601 else:
1612 else:
1602 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1613 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1603 t = fctx.data()
1614 t = fctx.data()
1604 self._repo.wwrite(f, t, fctx.flags())
1615 self._repo.wwrite(f, t, fctx.flags())
1605 self._repo.dirstate.normal(f)
1616 self._repo.dirstate.normal(f)
1606
1617
1607 def copy(self, source, dest):
1618 def copy(self, source, dest):
1608 try:
1619 try:
1609 st = self._repo.wvfs.lstat(dest)
1620 st = self._repo.wvfs.lstat(dest)
1610 except OSError as err:
1621 except OSError as err:
1611 if err.errno != errno.ENOENT:
1622 if err.errno != errno.ENOENT:
1612 raise
1623 raise
1613 self._repo.ui.warn(_("%s does not exist!\n")
1624 self._repo.ui.warn(_("%s does not exist!\n")
1614 % self._repo.dirstate.pathto(dest))
1625 % self._repo.dirstate.pathto(dest))
1615 return
1626 return
1616 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1627 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1617 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1628 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1618 "symbolic link\n")
1629 "symbolic link\n")
1619 % self._repo.dirstate.pathto(dest))
1630 % self._repo.dirstate.pathto(dest))
1620 else:
1631 else:
1621 with self._repo.wlock():
1632 with self._repo.wlock():
1622 if self._repo.dirstate[dest] in '?':
1633 if self._repo.dirstate[dest] in '?':
1623 self._repo.dirstate.add(dest)
1634 self._repo.dirstate.add(dest)
1624 elif self._repo.dirstate[dest] in 'r':
1635 elif self._repo.dirstate[dest] in 'r':
1625 self._repo.dirstate.normallookup(dest)
1636 self._repo.dirstate.normallookup(dest)
1626 self._repo.dirstate.copy(source, dest)
1637 self._repo.dirstate.copy(source, dest)
1627
1638
1628 def match(self, pats=None, include=None, exclude=None, default='glob',
1639 def match(self, pats=None, include=None, exclude=None, default='glob',
1629 listsubrepos=False, badfn=None):
1640 listsubrepos=False, badfn=None):
1630 r = self._repo
1641 r = self._repo
1631
1642
1632 # Only a case insensitive filesystem needs magic to translate user input
1643 # Only a case insensitive filesystem needs magic to translate user input
1633 # to actual case in the filesystem.
1644 # to actual case in the filesystem.
1634 icasefs = not util.fscasesensitive(r.root)
1645 icasefs = not util.fscasesensitive(r.root)
1635 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1646 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1636 default, auditor=r.auditor, ctx=self,
1647 default, auditor=r.auditor, ctx=self,
1637 listsubrepos=listsubrepos, badfn=badfn,
1648 listsubrepos=listsubrepos, badfn=badfn,
1638 icasefs=icasefs)
1649 icasefs=icasefs)
1639
1650
1640 def flushall(self):
1651 def flushall(self):
1641 pass # For overlayworkingfilectx compatibility.
1652 pass # For overlayworkingfilectx compatibility.
1642
1653
1643 def _filtersuspectsymlink(self, files):
1654 def _filtersuspectsymlink(self, files):
1644 if not files or self._repo.dirstate._checklink:
1655 if not files or self._repo.dirstate._checklink:
1645 return files
1656 return files
1646
1657
1647 # Symlink placeholders may get non-symlink-like contents
1658 # Symlink placeholders may get non-symlink-like contents
1648 # via user error or dereferencing by NFS or Samba servers,
1659 # via user error or dereferencing by NFS or Samba servers,
1649 # so we filter out any placeholders that don't look like a
1660 # so we filter out any placeholders that don't look like a
1650 # symlink
1661 # symlink
1651 sane = []
1662 sane = []
1652 for f in files:
1663 for f in files:
1653 if self.flags(f) == 'l':
1664 if self.flags(f) == 'l':
1654 d = self[f].data()
1665 d = self[f].data()
1655 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1666 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1656 self._repo.ui.debug('ignoring suspect symlink placeholder'
1667 self._repo.ui.debug('ignoring suspect symlink placeholder'
1657 ' "%s"\n' % f)
1668 ' "%s"\n' % f)
1658 continue
1669 continue
1659 sane.append(f)
1670 sane.append(f)
1660 return sane
1671 return sane
1661
1672
1662 def _checklookup(self, files):
1673 def _checklookup(self, files):
1663 # check for any possibly clean files
1674 # check for any possibly clean files
1664 if not files:
1675 if not files:
1665 return [], [], []
1676 return [], [], []
1666
1677
1667 modified = []
1678 modified = []
1668 deleted = []
1679 deleted = []
1669 fixup = []
1680 fixup = []
1670 pctx = self._parents[0]
1681 pctx = self._parents[0]
1671 # do a full compare of any files that might have changed
1682 # do a full compare of any files that might have changed
1672 for f in sorted(files):
1683 for f in sorted(files):
1673 try:
1684 try:
1674 # This will return True for a file that got replaced by a
1685 # This will return True for a file that got replaced by a
1675 # directory in the interim, but fixing that is pretty hard.
1686 # directory in the interim, but fixing that is pretty hard.
1676 if (f not in pctx or self.flags(f) != pctx.flags(f)
1687 if (f not in pctx or self.flags(f) != pctx.flags(f)
1677 or pctx[f].cmp(self[f])):
1688 or pctx[f].cmp(self[f])):
1678 modified.append(f)
1689 modified.append(f)
1679 else:
1690 else:
1680 fixup.append(f)
1691 fixup.append(f)
1681 except (IOError, OSError):
1692 except (IOError, OSError):
1682 # A file become inaccessible in between? Mark it as deleted,
1693 # A file become inaccessible in between? Mark it as deleted,
1683 # matching dirstate behavior (issue5584).
1694 # matching dirstate behavior (issue5584).
1684 # The dirstate has more complex behavior around whether a
1695 # The dirstate has more complex behavior around whether a
1685 # missing file matches a directory, etc, but we don't need to
1696 # missing file matches a directory, etc, but we don't need to
1686 # bother with that: if f has made it to this point, we're sure
1697 # bother with that: if f has made it to this point, we're sure
1687 # it's in the dirstate.
1698 # it's in the dirstate.
1688 deleted.append(f)
1699 deleted.append(f)
1689
1700
1690 return modified, deleted, fixup
1701 return modified, deleted, fixup
1691
1702
1692 def _poststatusfixup(self, status, fixup):
1703 def _poststatusfixup(self, status, fixup):
1693 """update dirstate for files that are actually clean"""
1704 """update dirstate for files that are actually clean"""
1694 poststatus = self._repo.postdsstatus()
1705 poststatus = self._repo.postdsstatus()
1695 if fixup or poststatus:
1706 if fixup or poststatus:
1696 try:
1707 try:
1697 oldid = self._repo.dirstate.identity()
1708 oldid = self._repo.dirstate.identity()
1698
1709
1699 # updating the dirstate is optional
1710 # updating the dirstate is optional
1700 # so we don't wait on the lock
1711 # so we don't wait on the lock
1701 # wlock can invalidate the dirstate, so cache normal _after_
1712 # wlock can invalidate the dirstate, so cache normal _after_
1702 # taking the lock
1713 # taking the lock
1703 with self._repo.wlock(False):
1714 with self._repo.wlock(False):
1704 if self._repo.dirstate.identity() == oldid:
1715 if self._repo.dirstate.identity() == oldid:
1705 if fixup:
1716 if fixup:
1706 normal = self._repo.dirstate.normal
1717 normal = self._repo.dirstate.normal
1707 for f in fixup:
1718 for f in fixup:
1708 normal(f)
1719 normal(f)
1709 # write changes out explicitly, because nesting
1720 # write changes out explicitly, because nesting
1710 # wlock at runtime may prevent 'wlock.release()'
1721 # wlock at runtime may prevent 'wlock.release()'
1711 # after this block from doing so for subsequent
1722 # after this block from doing so for subsequent
1712 # changing files
1723 # changing files
1713 tr = self._repo.currenttransaction()
1724 tr = self._repo.currenttransaction()
1714 self._repo.dirstate.write(tr)
1725 self._repo.dirstate.write(tr)
1715
1726
1716 if poststatus:
1727 if poststatus:
1717 for ps in poststatus:
1728 for ps in poststatus:
1718 ps(self, status)
1729 ps(self, status)
1719 else:
1730 else:
1720 # in this case, writing changes out breaks
1731 # in this case, writing changes out breaks
1721 # consistency, because .hg/dirstate was
1732 # consistency, because .hg/dirstate was
1722 # already changed simultaneously after last
1733 # already changed simultaneously after last
1723 # caching (see also issue5584 for detail)
1734 # caching (see also issue5584 for detail)
1724 self._repo.ui.debug('skip updating dirstate: '
1735 self._repo.ui.debug('skip updating dirstate: '
1725 'identity mismatch\n')
1736 'identity mismatch\n')
1726 except error.LockError:
1737 except error.LockError:
1727 pass
1738 pass
1728 finally:
1739 finally:
1729 # Even if the wlock couldn't be grabbed, clear out the list.
1740 # Even if the wlock couldn't be grabbed, clear out the list.
1730 self._repo.clearpostdsstatus()
1741 self._repo.clearpostdsstatus()
1731
1742
1732 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1743 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1733 '''Gets the status from the dirstate -- internal use only.'''
1744 '''Gets the status from the dirstate -- internal use only.'''
1734 subrepos = []
1745 subrepos = []
1735 if '.hgsub' in self:
1746 if '.hgsub' in self:
1736 subrepos = sorted(self.substate)
1747 subrepos = sorted(self.substate)
1737 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1748 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1738 clean=clean, unknown=unknown)
1749 clean=clean, unknown=unknown)
1739
1750
1740 # check for any possibly clean files
1751 # check for any possibly clean files
1741 fixup = []
1752 fixup = []
1742 if cmp:
1753 if cmp:
1743 modified2, deleted2, fixup = self._checklookup(cmp)
1754 modified2, deleted2, fixup = self._checklookup(cmp)
1744 s.modified.extend(modified2)
1755 s.modified.extend(modified2)
1745 s.deleted.extend(deleted2)
1756 s.deleted.extend(deleted2)
1746
1757
1747 if fixup and clean:
1758 if fixup and clean:
1748 s.clean.extend(fixup)
1759 s.clean.extend(fixup)
1749
1760
1750 self._poststatusfixup(s, fixup)
1761 self._poststatusfixup(s, fixup)
1751
1762
1752 if match.always():
1763 if match.always():
1753 # cache for performance
1764 # cache for performance
1754 if s.unknown or s.ignored or s.clean:
1765 if s.unknown or s.ignored or s.clean:
1755 # "_status" is cached with list*=False in the normal route
1766 # "_status" is cached with list*=False in the normal route
1756 self._status = scmutil.status(s.modified, s.added, s.removed,
1767 self._status = scmutil.status(s.modified, s.added, s.removed,
1757 s.deleted, [], [], [])
1768 s.deleted, [], [], [])
1758 else:
1769 else:
1759 self._status = s
1770 self._status = s
1760
1771
1761 return s
1772 return s
1762
1773
1763 @propertycache
1774 @propertycache
1764 def _manifest(self):
1775 def _manifest(self):
1765 """generate a manifest corresponding to the values in self._status
1776 """generate a manifest corresponding to the values in self._status
1766
1777
1767 This reuse the file nodeid from parent, but we use special node
1778 This reuse the file nodeid from parent, but we use special node
1768 identifiers for added and modified files. This is used by manifests
1779 identifiers for added and modified files. This is used by manifests
1769 merge to see that files are different and by update logic to avoid
1780 merge to see that files are different and by update logic to avoid
1770 deleting newly added files.
1781 deleting newly added files.
1771 """
1782 """
1772 return self._buildstatusmanifest(self._status)
1783 return self._buildstatusmanifest(self._status)
1773
1784
1774 def _buildstatusmanifest(self, status):
1785 def _buildstatusmanifest(self, status):
1775 """Builds a manifest that includes the given status results."""
1786 """Builds a manifest that includes the given status results."""
1776 parents = self.parents()
1787 parents = self.parents()
1777
1788
1778 man = parents[0].manifest().copy()
1789 man = parents[0].manifest().copy()
1779
1790
1780 ff = self._flagfunc
1791 ff = self._flagfunc
1781 for i, l in ((addednodeid, status.added),
1792 for i, l in ((addednodeid, status.added),
1782 (modifiednodeid, status.modified)):
1793 (modifiednodeid, status.modified)):
1783 for f in l:
1794 for f in l:
1784 man[f] = i
1795 man[f] = i
1785 try:
1796 try:
1786 man.setflag(f, ff(f))
1797 man.setflag(f, ff(f))
1787 except OSError:
1798 except OSError:
1788 pass
1799 pass
1789
1800
1790 for f in status.deleted + status.removed:
1801 for f in status.deleted + status.removed:
1791 if f in man:
1802 if f in man:
1792 del man[f]
1803 del man[f]
1793
1804
1794 return man
1805 return man
1795
1806
1796 def _buildstatus(self, other, s, match, listignored, listclean,
1807 def _buildstatus(self, other, s, match, listignored, listclean,
1797 listunknown):
1808 listunknown):
1798 """build a status with respect to another context
1809 """build a status with respect to another context
1799
1810
1800 This includes logic for maintaining the fast path of status when
1811 This includes logic for maintaining the fast path of status when
1801 comparing the working directory against its parent, which is to skip
1812 comparing the working directory against its parent, which is to skip
1802 building a new manifest if self (working directory) is not comparing
1813 building a new manifest if self (working directory) is not comparing
1803 against its parent (repo['.']).
1814 against its parent (repo['.']).
1804 """
1815 """
1805 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1816 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1806 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1817 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1807 # might have accidentally ended up with the entire contents of the file
1818 # might have accidentally ended up with the entire contents of the file
1808 # they are supposed to be linking to.
1819 # they are supposed to be linking to.
1809 s.modified[:] = self._filtersuspectsymlink(s.modified)
1820 s.modified[:] = self._filtersuspectsymlink(s.modified)
1810 if other != self._repo['.']:
1821 if other != self._repo['.']:
1811 s = super(workingctx, self)._buildstatus(other, s, match,
1822 s = super(workingctx, self)._buildstatus(other, s, match,
1812 listignored, listclean,
1823 listignored, listclean,
1813 listunknown)
1824 listunknown)
1814 return s
1825 return s
1815
1826
1816 def _matchstatus(self, other, match):
1827 def _matchstatus(self, other, match):
1817 """override the match method with a filter for directory patterns
1828 """override the match method with a filter for directory patterns
1818
1829
1819 We use inheritance to customize the match.bad method only in cases of
1830 We use inheritance to customize the match.bad method only in cases of
1820 workingctx since it belongs only to the working directory when
1831 workingctx since it belongs only to the working directory when
1821 comparing against the parent changeset.
1832 comparing against the parent changeset.
1822
1833
1823 If we aren't comparing against the working directory's parent, then we
1834 If we aren't comparing against the working directory's parent, then we
1824 just use the default match object sent to us.
1835 just use the default match object sent to us.
1825 """
1836 """
1826 if other != self._repo['.']:
1837 if other != self._repo['.']:
1827 def bad(f, msg):
1838 def bad(f, msg):
1828 # 'f' may be a directory pattern from 'match.files()',
1839 # 'f' may be a directory pattern from 'match.files()',
1829 # so 'f not in ctx1' is not enough
1840 # so 'f not in ctx1' is not enough
1830 if f not in other and not other.hasdir(f):
1841 if f not in other and not other.hasdir(f):
1831 self._repo.ui.warn('%s: %s\n' %
1842 self._repo.ui.warn('%s: %s\n' %
1832 (self._repo.dirstate.pathto(f), msg))
1843 (self._repo.dirstate.pathto(f), msg))
1833 match.bad = bad
1844 match.bad = bad
1834 return match
1845 return match
1835
1846
1836 def markcommitted(self, node):
1847 def markcommitted(self, node):
1837 super(workingctx, self).markcommitted(node)
1848 super(workingctx, self).markcommitted(node)
1838
1849
1839 sparse.aftercommit(self._repo, node)
1850 sparse.aftercommit(self._repo, node)
1840
1851
1841 class committablefilectx(basefilectx):
1852 class committablefilectx(basefilectx):
1842 """A committablefilectx provides common functionality for a file context
1853 """A committablefilectx provides common functionality for a file context
1843 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1854 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1844 def __init__(self, repo, path, filelog=None, ctx=None):
1855 def __init__(self, repo, path, filelog=None, ctx=None):
1845 self._repo = repo
1856 self._repo = repo
1846 self._path = path
1857 self._path = path
1847 self._changeid = None
1858 self._changeid = None
1848 self._filerev = self._filenode = None
1859 self._filerev = self._filenode = None
1849
1860
1850 if filelog is not None:
1861 if filelog is not None:
1851 self._filelog = filelog
1862 self._filelog = filelog
1852 if ctx:
1863 if ctx:
1853 self._changectx = ctx
1864 self._changectx = ctx
1854
1865
1855 def __nonzero__(self):
1866 def __nonzero__(self):
1856 return True
1867 return True
1857
1868
1858 __bool__ = __nonzero__
1869 __bool__ = __nonzero__
1859
1870
1860 def linkrev(self):
1871 def linkrev(self):
1861 # linked to self._changectx no matter if file is modified or not
1872 # linked to self._changectx no matter if file is modified or not
1862 return self.rev()
1873 return self.rev()
1863
1874
1864 def parents(self):
1875 def parents(self):
1865 '''return parent filectxs, following copies if necessary'''
1876 '''return parent filectxs, following copies if necessary'''
1866 def filenode(ctx, path):
1877 def filenode(ctx, path):
1867 return ctx._manifest.get(path, nullid)
1878 return ctx._manifest.get(path, nullid)
1868
1879
1869 path = self._path
1880 path = self._path
1870 fl = self._filelog
1881 fl = self._filelog
1871 pcl = self._changectx._parents
1882 pcl = self._changectx._parents
1872 renamed = self.renamed()
1883 renamed = self.renamed()
1873
1884
1874 if renamed:
1885 if renamed:
1875 pl = [renamed + (None,)]
1886 pl = [renamed + (None,)]
1876 else:
1887 else:
1877 pl = [(path, filenode(pcl[0], path), fl)]
1888 pl = [(path, filenode(pcl[0], path), fl)]
1878
1889
1879 for pc in pcl[1:]:
1890 for pc in pcl[1:]:
1880 pl.append((path, filenode(pc, path), fl))
1891 pl.append((path, filenode(pc, path), fl))
1881
1892
1882 return [self._parentfilectx(p, fileid=n, filelog=l)
1893 return [self._parentfilectx(p, fileid=n, filelog=l)
1883 for p, n, l in pl if n != nullid]
1894 for p, n, l in pl if n != nullid]
1884
1895
1885 def children(self):
1896 def children(self):
1886 return []
1897 return []
1887
1898
1888 class workingfilectx(committablefilectx):
1899 class workingfilectx(committablefilectx):
1889 """A workingfilectx object makes access to data related to a particular
1900 """A workingfilectx object makes access to data related to a particular
1890 file in the working directory convenient."""
1901 file in the working directory convenient."""
1891 def __init__(self, repo, path, filelog=None, workingctx=None):
1902 def __init__(self, repo, path, filelog=None, workingctx=None):
1892 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1903 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1893
1904
1894 @propertycache
1905 @propertycache
1895 def _changectx(self):
1906 def _changectx(self):
1896 return workingctx(self._repo)
1907 return workingctx(self._repo)
1897
1908
1898 def data(self):
1909 def data(self):
1899 return self._repo.wread(self._path)
1910 return self._repo.wread(self._path)
1900 def renamed(self):
1911 def renamed(self):
1901 rp = self._repo.dirstate.copied(self._path)
1912 rp = self._repo.dirstate.copied(self._path)
1902 if not rp:
1913 if not rp:
1903 return None
1914 return None
1904 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1915 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1905
1916
1906 def size(self):
1917 def size(self):
1907 return self._repo.wvfs.lstat(self._path).st_size
1918 return self._repo.wvfs.lstat(self._path).st_size
1908 def date(self):
1919 def date(self):
1909 t, tz = self._changectx.date()
1920 t, tz = self._changectx.date()
1910 try:
1921 try:
1911 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1922 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1912 except OSError as err:
1923 except OSError as err:
1913 if err.errno != errno.ENOENT:
1924 if err.errno != errno.ENOENT:
1914 raise
1925 raise
1915 return (t, tz)
1926 return (t, tz)
1916
1927
1917 def exists(self):
1928 def exists(self):
1918 return self._repo.wvfs.exists(self._path)
1929 return self._repo.wvfs.exists(self._path)
1919
1930
1920 def lexists(self):
1931 def lexists(self):
1921 return self._repo.wvfs.lexists(self._path)
1932 return self._repo.wvfs.lexists(self._path)
1922
1933
1923 def audit(self):
1934 def audit(self):
1924 return self._repo.wvfs.audit(self._path)
1935 return self._repo.wvfs.audit(self._path)
1925
1936
1926 def cmp(self, fctx):
1937 def cmp(self, fctx):
1927 """compare with other file context
1938 """compare with other file context
1928
1939
1929 returns True if different than fctx.
1940 returns True if different than fctx.
1930 """
1941 """
1931 # fctx should be a filectx (not a workingfilectx)
1942 # fctx should be a filectx (not a workingfilectx)
1932 # invert comparison to reuse the same code path
1943 # invert comparison to reuse the same code path
1933 return fctx.cmp(self)
1944 return fctx.cmp(self)
1934
1945
1935 def remove(self, ignoremissing=False):
1946 def remove(self, ignoremissing=False):
1936 """wraps unlink for a repo's working directory"""
1947 """wraps unlink for a repo's working directory"""
1937 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1948 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1938
1949
1939 def write(self, data, flags, backgroundclose=False):
1950 def write(self, data, flags, backgroundclose=False):
1940 """wraps repo.wwrite"""
1951 """wraps repo.wwrite"""
1941 self._repo.wwrite(self._path, data, flags,
1952 self._repo.wwrite(self._path, data, flags,
1942 backgroundclose=backgroundclose)
1953 backgroundclose=backgroundclose)
1943
1954
1944 def markcopied(self, src):
1955 def markcopied(self, src):
1945 """marks this file a copy of `src`"""
1956 """marks this file a copy of `src`"""
1946 if self._repo.dirstate[self._path] in "nma":
1957 if self._repo.dirstate[self._path] in "nma":
1947 self._repo.dirstate.copy(src, self._path)
1958 self._repo.dirstate.copy(src, self._path)
1948
1959
1949 def clearunknown(self):
1960 def clearunknown(self):
1950 """Removes conflicting items in the working directory so that
1961 """Removes conflicting items in the working directory so that
1951 ``write()`` can be called successfully.
1962 ``write()`` can be called successfully.
1952 """
1963 """
1953 wvfs = self._repo.wvfs
1964 wvfs = self._repo.wvfs
1954 f = self._path
1965 f = self._path
1955 wvfs.audit(f)
1966 wvfs.audit(f)
1956 if wvfs.isdir(f) and not wvfs.islink(f):
1967 if wvfs.isdir(f) and not wvfs.islink(f):
1957 wvfs.rmtree(f, forcibly=True)
1968 wvfs.rmtree(f, forcibly=True)
1958 for p in reversed(list(util.finddirs(f))):
1969 for p in reversed(list(util.finddirs(f))):
1959 if wvfs.isfileorlink(p):
1970 if wvfs.isfileorlink(p):
1960 wvfs.unlink(p)
1971 wvfs.unlink(p)
1961 break
1972 break
1962
1973
1963 def setflags(self, l, x):
1974 def setflags(self, l, x):
1964 self._repo.wvfs.setflags(self._path, l, x)
1975 self._repo.wvfs.setflags(self._path, l, x)
1965
1976
1966 class overlayworkingctx(workingctx):
1977 class overlayworkingctx(workingctx):
1967 """Wraps another mutable context with a write-back cache that can be flushed
1978 """Wraps another mutable context with a write-back cache that can be flushed
1968 at a later time.
1979 at a later time.
1969
1980
1970 self._cache[path] maps to a dict with keys: {
1981 self._cache[path] maps to a dict with keys: {
1971 'exists': bool?
1982 'exists': bool?
1972 'date': date?
1983 'date': date?
1973 'data': str?
1984 'data': str?
1974 'flags': str?
1985 'flags': str?
1975 }
1986 }
1976 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1987 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1977 is `False`, the file was deleted.
1988 is `False`, the file was deleted.
1978 """
1989 """
1979
1990
1980 def __init__(self, repo, wrappedctx):
1991 def __init__(self, repo, wrappedctx):
1981 super(overlayworkingctx, self).__init__(repo)
1992 super(overlayworkingctx, self).__init__(repo)
1982 self._repo = repo
1993 self._repo = repo
1983 self._wrappedctx = wrappedctx
1994 self._wrappedctx = wrappedctx
1984 self._clean()
1995 self._clean()
1985
1996
1986 def data(self, path):
1997 def data(self, path):
1987 if self.isdirty(path):
1998 if self.isdirty(path):
1988 if self._cache[path]['exists']:
1999 if self._cache[path]['exists']:
1989 if self._cache[path]['data']:
2000 if self._cache[path]['data']:
1990 return self._cache[path]['data']
2001 return self._cache[path]['data']
1991 else:
2002 else:
1992 # Must fallback here, too, because we only set flags.
2003 # Must fallback here, too, because we only set flags.
1993 return self._wrappedctx[path].data()
2004 return self._wrappedctx[path].data()
1994 else:
2005 else:
1995 raise error.ProgrammingError("No such file or directory: %s" %
2006 raise error.ProgrammingError("No such file or directory: %s" %
1996 self._path)
2007 self._path)
1997 else:
2008 else:
1998 return self._wrappedctx[path].data()
2009 return self._wrappedctx[path].data()
1999
2010
2000 def isinmemory(self):
2011 def isinmemory(self):
2001 return True
2012 return True
2002
2013
2003 def filedate(self, path):
2014 def filedate(self, path):
2004 if self.isdirty(path):
2015 if self.isdirty(path):
2005 return self._cache[path]['date']
2016 return self._cache[path]['date']
2006 else:
2017 else:
2007 return self._wrappedctx[path].date()
2018 return self._wrappedctx[path].date()
2008
2019
2009 def flags(self, path):
2020 def flags(self, path):
2010 if self.isdirty(path):
2021 if self.isdirty(path):
2011 if self._cache[path]['exists']:
2022 if self._cache[path]['exists']:
2012 return self._cache[path]['flags']
2023 return self._cache[path]['flags']
2013 else:
2024 else:
2014 raise error.ProgrammingError("No such file or directory: %s" %
2025 raise error.ProgrammingError("No such file or directory: %s" %
2015 self._path)
2026 self._path)
2016 else:
2027 else:
2017 return self._wrappedctx[path].flags()
2028 return self._wrappedctx[path].flags()
2018
2029
2019 def write(self, path, data, flags=''):
2030 def write(self, path, data, flags=''):
2020 if data is None:
2031 if data is None:
2021 raise error.ProgrammingError("data must be non-None")
2032 raise error.ProgrammingError("data must be non-None")
2022 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2033 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2023 flags=flags)
2034 flags=flags)
2024
2035
2025 def setflags(self, path, l, x):
2036 def setflags(self, path, l, x):
2026 self._markdirty(path, exists=True, date=util.makedate(),
2037 self._markdirty(path, exists=True, date=util.makedate(),
2027 flags=(l and 'l' or '') + (x and 'x' or ''))
2038 flags=(l and 'l' or '') + (x and 'x' or ''))
2028
2039
2029 def remove(self, path):
2040 def remove(self, path):
2030 self._markdirty(path, exists=False)
2041 self._markdirty(path, exists=False)
2031
2042
2032 def exists(self, path):
2043 def exists(self, path):
2033 """exists behaves like `lexists`, but needs to follow symlinks and
2044 """exists behaves like `lexists`, but needs to follow symlinks and
2034 return False if they are broken.
2045 return False if they are broken.
2035 """
2046 """
2036 if self.isdirty(path):
2047 if self.isdirty(path):
2037 # If this path exists and is a symlink, "follow" it by calling
2048 # If this path exists and is a symlink, "follow" it by calling
2038 # exists on the destination path.
2049 # exists on the destination path.
2039 if (self._cache[path]['exists'] and
2050 if (self._cache[path]['exists'] and
2040 'l' in self._cache[path]['flags']):
2051 'l' in self._cache[path]['flags']):
2041 return self.exists(self._cache[path]['data'].strip())
2052 return self.exists(self._cache[path]['data'].strip())
2042 else:
2053 else:
2043 return self._cache[path]['exists']
2054 return self._cache[path]['exists']
2044 return self._wrappedctx[path].exists()
2055 return self._wrappedctx[path].exists()
2045
2056
2046 def lexists(self, path):
2057 def lexists(self, path):
2047 """lexists returns True if the path exists"""
2058 """lexists returns True if the path exists"""
2048 if self.isdirty(path):
2059 if self.isdirty(path):
2049 return self._cache[path]['exists']
2060 return self._cache[path]['exists']
2050 return self._wrappedctx[path].lexists()
2061 return self._wrappedctx[path].lexists()
2051
2062
2052 def size(self, path):
2063 def size(self, path):
2053 if self.isdirty(path):
2064 if self.isdirty(path):
2054 if self._cache[path]['exists']:
2065 if self._cache[path]['exists']:
2055 return len(self._cache[path]['data'])
2066 return len(self._cache[path]['data'])
2056 else:
2067 else:
2057 raise error.ProgrammingError("No such file or directory: %s" %
2068 raise error.ProgrammingError("No such file or directory: %s" %
2058 self._path)
2069 self._path)
2059 return self._wrappedctx[path].size()
2070 return self._wrappedctx[path].size()
2060
2071
2061 def flushall(self):
2072 def flushall(self):
2062 for path in self._writeorder:
2073 for path in self._writeorder:
2063 entry = self._cache[path]
2074 entry = self._cache[path]
2064 if entry['exists']:
2075 if entry['exists']:
2065 self._wrappedctx[path].clearunknown()
2076 self._wrappedctx[path].clearunknown()
2066 if entry['data'] is not None:
2077 if entry['data'] is not None:
2067 if entry['flags'] is None:
2078 if entry['flags'] is None:
2068 raise error.ProgrammingError('data set but not flags')
2079 raise error.ProgrammingError('data set but not flags')
2069 self._wrappedctx[path].write(
2080 self._wrappedctx[path].write(
2070 entry['data'],
2081 entry['data'],
2071 entry['flags'])
2082 entry['flags'])
2072 else:
2083 else:
2073 self._wrappedctx[path].setflags(
2084 self._wrappedctx[path].setflags(
2074 'l' in entry['flags'],
2085 'l' in entry['flags'],
2075 'x' in entry['flags'])
2086 'x' in entry['flags'])
2076 else:
2087 else:
2077 self._wrappedctx[path].remove(path)
2088 self._wrappedctx[path].remove(path)
2078 self._clean()
2089 self._clean()
2079
2090
2080 def isdirty(self, path):
2091 def isdirty(self, path):
2081 return path in self._cache
2092 return path in self._cache
2082
2093
2083 def _clean(self):
2094 def _clean(self):
2084 self._cache = {}
2095 self._cache = {}
2085 self._writeorder = []
2096 self._writeorder = []
2086
2097
2087 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2098 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2088 if path not in self._cache:
2099 if path not in self._cache:
2089 self._writeorder.append(path)
2100 self._writeorder.append(path)
2090
2101
2091 self._cache[path] = {
2102 self._cache[path] = {
2092 'exists': exists,
2103 'exists': exists,
2093 'data': data,
2104 'data': data,
2094 'date': date,
2105 'date': date,
2095 'flags': flags,
2106 'flags': flags,
2096 }
2107 }
2097
2108
2098 def filectx(self, path, filelog=None):
2109 def filectx(self, path, filelog=None):
2099 return overlayworkingfilectx(self._repo, path, parent=self,
2110 return overlayworkingfilectx(self._repo, path, parent=self,
2100 filelog=filelog)
2111 filelog=filelog)
2101
2112
2102 class overlayworkingfilectx(workingfilectx):
2113 class overlayworkingfilectx(workingfilectx):
2103 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2114 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2104 cache, which can be flushed through later by calling ``flush()``."""
2115 cache, which can be flushed through later by calling ``flush()``."""
2105
2116
2106 def __init__(self, repo, path, filelog=None, parent=None):
2117 def __init__(self, repo, path, filelog=None, parent=None):
2107 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2118 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2108 parent)
2119 parent)
2109 self._repo = repo
2120 self._repo = repo
2110 self._parent = parent
2121 self._parent = parent
2111 self._path = path
2122 self._path = path
2112
2123
2113 def cmp(self, fctx):
2124 def cmp(self, fctx):
2114 return self.data() != fctx.data()
2125 return self.data() != fctx.data()
2115
2126
2116 def ctx(self):
2127 def ctx(self):
2117 return self._parent
2128 return self._parent
2118
2129
2119 def data(self):
2130 def data(self):
2120 return self._parent.data(self._path)
2131 return self._parent.data(self._path)
2121
2132
2122 def date(self):
2133 def date(self):
2123 return self._parent.filedate(self._path)
2134 return self._parent.filedate(self._path)
2124
2135
2125 def exists(self):
2136 def exists(self):
2126 return self.lexists()
2137 return self.lexists()
2127
2138
2128 def lexists(self):
2139 def lexists(self):
2129 return self._parent.exists(self._path)
2140 return self._parent.exists(self._path)
2130
2141
2131 def renamed(self):
2142 def renamed(self):
2132 # Copies are currently tracked in the dirstate as before. Straight copy
2143 # Copies are currently tracked in the dirstate as before. Straight copy
2133 # from workingfilectx.
2144 # from workingfilectx.
2134 rp = self._repo.dirstate.copied(self._path)
2145 rp = self._repo.dirstate.copied(self._path)
2135 if not rp:
2146 if not rp:
2136 return None
2147 return None
2137 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2148 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2138
2149
2139 def size(self):
2150 def size(self):
2140 return self._parent.size(self._path)
2151 return self._parent.size(self._path)
2141
2152
2142 def audit(self):
2153 def audit(self):
2143 pass
2154 pass
2144
2155
2145 def flags(self):
2156 def flags(self):
2146 return self._parent.flags(self._path)
2157 return self._parent.flags(self._path)
2147
2158
2148 def setflags(self, islink, isexec):
2159 def setflags(self, islink, isexec):
2149 return self._parent.setflags(self._path, islink, isexec)
2160 return self._parent.setflags(self._path, islink, isexec)
2150
2161
2151 def write(self, data, flags, backgroundclose=False):
2162 def write(self, data, flags, backgroundclose=False):
2152 return self._parent.write(self._path, data, flags)
2163 return self._parent.write(self._path, data, flags)
2153
2164
2154 def remove(self, ignoremissing=False):
2165 def remove(self, ignoremissing=False):
2155 return self._parent.remove(self._path)
2166 return self._parent.remove(self._path)
2156
2167
2157 class workingcommitctx(workingctx):
2168 class workingcommitctx(workingctx):
2158 """A workingcommitctx object makes access to data related to
2169 """A workingcommitctx object makes access to data related to
2159 the revision being committed convenient.
2170 the revision being committed convenient.
2160
2171
2161 This hides changes in the working directory, if they aren't
2172 This hides changes in the working directory, if they aren't
2162 committed in this context.
2173 committed in this context.
2163 """
2174 """
2164 def __init__(self, repo, changes,
2175 def __init__(self, repo, changes,
2165 text="", user=None, date=None, extra=None):
2176 text="", user=None, date=None, extra=None):
2166 super(workingctx, self).__init__(repo, text, user, date, extra,
2177 super(workingctx, self).__init__(repo, text, user, date, extra,
2167 changes)
2178 changes)
2168
2179
2169 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2180 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2170 """Return matched files only in ``self._status``
2181 """Return matched files only in ``self._status``
2171
2182
2172 Uncommitted files appear "clean" via this context, even if
2183 Uncommitted files appear "clean" via this context, even if
2173 they aren't actually so in the working directory.
2184 they aren't actually so in the working directory.
2174 """
2185 """
2175 if clean:
2186 if clean:
2176 clean = [f for f in self._manifest if f not in self._changedset]
2187 clean = [f for f in self._manifest if f not in self._changedset]
2177 else:
2188 else:
2178 clean = []
2189 clean = []
2179 return scmutil.status([f for f in self._status.modified if match(f)],
2190 return scmutil.status([f for f in self._status.modified if match(f)],
2180 [f for f in self._status.added if match(f)],
2191 [f for f in self._status.added if match(f)],
2181 [f for f in self._status.removed if match(f)],
2192 [f for f in self._status.removed if match(f)],
2182 [], [], [], clean)
2193 [], [], [], clean)
2183
2194
2184 @propertycache
2195 @propertycache
2185 def _changedset(self):
2196 def _changedset(self):
2186 """Return the set of files changed in this context
2197 """Return the set of files changed in this context
2187 """
2198 """
2188 changed = set(self._status.modified)
2199 changed = set(self._status.modified)
2189 changed.update(self._status.added)
2200 changed.update(self._status.added)
2190 changed.update(self._status.removed)
2201 changed.update(self._status.removed)
2191 return changed
2202 return changed
2192
2203
2193 def makecachingfilectxfn(func):
2204 def makecachingfilectxfn(func):
2194 """Create a filectxfn that caches based on the path.
2205 """Create a filectxfn that caches based on the path.
2195
2206
2196 We can't use util.cachefunc because it uses all arguments as the cache
2207 We can't use util.cachefunc because it uses all arguments as the cache
2197 key and this creates a cycle since the arguments include the repo and
2208 key and this creates a cycle since the arguments include the repo and
2198 memctx.
2209 memctx.
2199 """
2210 """
2200 cache = {}
2211 cache = {}
2201
2212
2202 def getfilectx(repo, memctx, path):
2213 def getfilectx(repo, memctx, path):
2203 if path not in cache:
2214 if path not in cache:
2204 cache[path] = func(repo, memctx, path)
2215 cache[path] = func(repo, memctx, path)
2205 return cache[path]
2216 return cache[path]
2206
2217
2207 return getfilectx
2218 return getfilectx
2208
2219
2209 def memfilefromctx(ctx):
2220 def memfilefromctx(ctx):
2210 """Given a context return a memfilectx for ctx[path]
2221 """Given a context return a memfilectx for ctx[path]
2211
2222
2212 This is a convenience method for building a memctx based on another
2223 This is a convenience method for building a memctx based on another
2213 context.
2224 context.
2214 """
2225 """
2215 def getfilectx(repo, memctx, path):
2226 def getfilectx(repo, memctx, path):
2216 fctx = ctx[path]
2227 fctx = ctx[path]
2217 # this is weird but apparently we only keep track of one parent
2228 # this is weird but apparently we only keep track of one parent
2218 # (why not only store that instead of a tuple?)
2229 # (why not only store that instead of a tuple?)
2219 copied = fctx.renamed()
2230 copied = fctx.renamed()
2220 if copied:
2231 if copied:
2221 copied = copied[0]
2232 copied = copied[0]
2222 return memfilectx(repo, path, fctx.data(),
2233 return memfilectx(repo, path, fctx.data(),
2223 islink=fctx.islink(), isexec=fctx.isexec(),
2234 islink=fctx.islink(), isexec=fctx.isexec(),
2224 copied=copied, memctx=memctx)
2235 copied=copied, memctx=memctx)
2225
2236
2226 return getfilectx
2237 return getfilectx
2227
2238
2228 def memfilefrompatch(patchstore):
2239 def memfilefrompatch(patchstore):
2229 """Given a patch (e.g. patchstore object) return a memfilectx
2240 """Given a patch (e.g. patchstore object) return a memfilectx
2230
2241
2231 This is a convenience method for building a memctx based on a patchstore.
2242 This is a convenience method for building a memctx based on a patchstore.
2232 """
2243 """
2233 def getfilectx(repo, memctx, path):
2244 def getfilectx(repo, memctx, path):
2234 data, mode, copied = patchstore.getfile(path)
2245 data, mode, copied = patchstore.getfile(path)
2235 if data is None:
2246 if data is None:
2236 return None
2247 return None
2237 islink, isexec = mode
2248 islink, isexec = mode
2238 return memfilectx(repo, path, data, islink=islink,
2249 return memfilectx(repo, path, data, islink=islink,
2239 isexec=isexec, copied=copied,
2250 isexec=isexec, copied=copied,
2240 memctx=memctx)
2251 memctx=memctx)
2241
2252
2242 return getfilectx
2253 return getfilectx
2243
2254
2244 class memctx(committablectx):
2255 class memctx(committablectx):
2245 """Use memctx to perform in-memory commits via localrepo.commitctx().
2256 """Use memctx to perform in-memory commits via localrepo.commitctx().
2246
2257
2247 Revision information is supplied at initialization time while
2258 Revision information is supplied at initialization time while
2248 related files data and is made available through a callback
2259 related files data and is made available through a callback
2249 mechanism. 'repo' is the current localrepo, 'parents' is a
2260 mechanism. 'repo' is the current localrepo, 'parents' is a
2250 sequence of two parent revisions identifiers (pass None for every
2261 sequence of two parent revisions identifiers (pass None for every
2251 missing parent), 'text' is the commit message and 'files' lists
2262 missing parent), 'text' is the commit message and 'files' lists
2252 names of files touched by the revision (normalized and relative to
2263 names of files touched by the revision (normalized and relative to
2253 repository root).
2264 repository root).
2254
2265
2255 filectxfn(repo, memctx, path) is a callable receiving the
2266 filectxfn(repo, memctx, path) is a callable receiving the
2256 repository, the current memctx object and the normalized path of
2267 repository, the current memctx object and the normalized path of
2257 requested file, relative to repository root. It is fired by the
2268 requested file, relative to repository root. It is fired by the
2258 commit function for every file in 'files', but calls order is
2269 commit function for every file in 'files', but calls order is
2259 undefined. If the file is available in the revision being
2270 undefined. If the file is available in the revision being
2260 committed (updated or added), filectxfn returns a memfilectx
2271 committed (updated or added), filectxfn returns a memfilectx
2261 object. If the file was removed, filectxfn return None for recent
2272 object. If the file was removed, filectxfn return None for recent
2262 Mercurial. Moved files are represented by marking the source file
2273 Mercurial. Moved files are represented by marking the source file
2263 removed and the new file added with copy information (see
2274 removed and the new file added with copy information (see
2264 memfilectx).
2275 memfilectx).
2265
2276
2266 user receives the committer name and defaults to current
2277 user receives the committer name and defaults to current
2267 repository username, date is the commit date in any format
2278 repository username, date is the commit date in any format
2268 supported by util.parsedate() and defaults to current date, extra
2279 supported by util.parsedate() and defaults to current date, extra
2269 is a dictionary of metadata or is left empty.
2280 is a dictionary of metadata or is left empty.
2270 """
2281 """
2271
2282
2272 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2283 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2273 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2284 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2274 # this field to determine what to do in filectxfn.
2285 # this field to determine what to do in filectxfn.
2275 _returnnoneformissingfiles = True
2286 _returnnoneformissingfiles = True
2276
2287
2277 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2288 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2278 date=None, extra=None, branch=None, editor=False):
2289 date=None, extra=None, branch=None, editor=False):
2279 super(memctx, self).__init__(repo, text, user, date, extra)
2290 super(memctx, self).__init__(repo, text, user, date, extra)
2280 self._rev = None
2291 self._rev = None
2281 self._node = None
2292 self._node = None
2282 parents = [(p or nullid) for p in parents]
2293 parents = [(p or nullid) for p in parents]
2283 p1, p2 = parents
2294 p1, p2 = parents
2284 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2295 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2285 files = sorted(set(files))
2296 files = sorted(set(files))
2286 self._files = files
2297 self._files = files
2287 if branch is not None:
2298 if branch is not None:
2288 self._extra['branch'] = encoding.fromlocal(branch)
2299 self._extra['branch'] = encoding.fromlocal(branch)
2289 self.substate = {}
2300 self.substate = {}
2290
2301
2291 if isinstance(filectxfn, patch.filestore):
2302 if isinstance(filectxfn, patch.filestore):
2292 filectxfn = memfilefrompatch(filectxfn)
2303 filectxfn = memfilefrompatch(filectxfn)
2293 elif not callable(filectxfn):
2304 elif not callable(filectxfn):
2294 # if store is not callable, wrap it in a function
2305 # if store is not callable, wrap it in a function
2295 filectxfn = memfilefromctx(filectxfn)
2306 filectxfn = memfilefromctx(filectxfn)
2296
2307
2297 # memoizing increases performance for e.g. vcs convert scenarios.
2308 # memoizing increases performance for e.g. vcs convert scenarios.
2298 self._filectxfn = makecachingfilectxfn(filectxfn)
2309 self._filectxfn = makecachingfilectxfn(filectxfn)
2299
2310
2300 if editor:
2311 if editor:
2301 self._text = editor(self._repo, self, [])
2312 self._text = editor(self._repo, self, [])
2302 self._repo.savecommitmessage(self._text)
2313 self._repo.savecommitmessage(self._text)
2303
2314
2304 def filectx(self, path, filelog=None):
2315 def filectx(self, path, filelog=None):
2305 """get a file context from the working directory
2316 """get a file context from the working directory
2306
2317
2307 Returns None if file doesn't exist and should be removed."""
2318 Returns None if file doesn't exist and should be removed."""
2308 return self._filectxfn(self._repo, self, path)
2319 return self._filectxfn(self._repo, self, path)
2309
2320
2310 def commit(self):
2321 def commit(self):
2311 """commit context to the repo"""
2322 """commit context to the repo"""
2312 return self._repo.commitctx(self)
2323 return self._repo.commitctx(self)
2313
2324
2314 @propertycache
2325 @propertycache
2315 def _manifest(self):
2326 def _manifest(self):
2316 """generate a manifest based on the return values of filectxfn"""
2327 """generate a manifest based on the return values of filectxfn"""
2317
2328
2318 # keep this simple for now; just worry about p1
2329 # keep this simple for now; just worry about p1
2319 pctx = self._parents[0]
2330 pctx = self._parents[0]
2320 man = pctx.manifest().copy()
2331 man = pctx.manifest().copy()
2321
2332
2322 for f in self._status.modified:
2333 for f in self._status.modified:
2323 p1node = nullid
2334 p1node = nullid
2324 p2node = nullid
2335 p2node = nullid
2325 p = pctx[f].parents() # if file isn't in pctx, check p2?
2336 p = pctx[f].parents() # if file isn't in pctx, check p2?
2326 if len(p) > 0:
2337 if len(p) > 0:
2327 p1node = p[0].filenode()
2338 p1node = p[0].filenode()
2328 if len(p) > 1:
2339 if len(p) > 1:
2329 p2node = p[1].filenode()
2340 p2node = p[1].filenode()
2330 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2341 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2331
2342
2332 for f in self._status.added:
2343 for f in self._status.added:
2333 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2344 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2334
2345
2335 for f in self._status.removed:
2346 for f in self._status.removed:
2336 if f in man:
2347 if f in man:
2337 del man[f]
2348 del man[f]
2338
2349
2339 return man
2350 return man
2340
2351
2341 @propertycache
2352 @propertycache
2342 def _status(self):
2353 def _status(self):
2343 """Calculate exact status from ``files`` specified at construction
2354 """Calculate exact status from ``files`` specified at construction
2344 """
2355 """
2345 man1 = self.p1().manifest()
2356 man1 = self.p1().manifest()
2346 p2 = self._parents[1]
2357 p2 = self._parents[1]
2347 # "1 < len(self._parents)" can't be used for checking
2358 # "1 < len(self._parents)" can't be used for checking
2348 # existence of the 2nd parent, because "memctx._parents" is
2359 # existence of the 2nd parent, because "memctx._parents" is
2349 # explicitly initialized by the list, of which length is 2.
2360 # explicitly initialized by the list, of which length is 2.
2350 if p2.node() != nullid:
2361 if p2.node() != nullid:
2351 man2 = p2.manifest()
2362 man2 = p2.manifest()
2352 managing = lambda f: f in man1 or f in man2
2363 managing = lambda f: f in man1 or f in man2
2353 else:
2364 else:
2354 managing = lambda f: f in man1
2365 managing = lambda f: f in man1
2355
2366
2356 modified, added, removed = [], [], []
2367 modified, added, removed = [], [], []
2357 for f in self._files:
2368 for f in self._files:
2358 if not managing(f):
2369 if not managing(f):
2359 added.append(f)
2370 added.append(f)
2360 elif self[f]:
2371 elif self[f]:
2361 modified.append(f)
2372 modified.append(f)
2362 else:
2373 else:
2363 removed.append(f)
2374 removed.append(f)
2364
2375
2365 return scmutil.status(modified, added, removed, [], [], [], [])
2376 return scmutil.status(modified, added, removed, [], [], [], [])
2366
2377
2367 class memfilectx(committablefilectx):
2378 class memfilectx(committablefilectx):
2368 """memfilectx represents an in-memory file to commit.
2379 """memfilectx represents an in-memory file to commit.
2369
2380
2370 See memctx and committablefilectx for more details.
2381 See memctx and committablefilectx for more details.
2371 """
2382 """
2372 def __init__(self, repo, path, data, islink=False,
2383 def __init__(self, repo, path, data, islink=False,
2373 isexec=False, copied=None, memctx=None):
2384 isexec=False, copied=None, memctx=None):
2374 """
2385 """
2375 path is the normalized file path relative to repository root.
2386 path is the normalized file path relative to repository root.
2376 data is the file content as a string.
2387 data is the file content as a string.
2377 islink is True if the file is a symbolic link.
2388 islink is True if the file is a symbolic link.
2378 isexec is True if the file is executable.
2389 isexec is True if the file is executable.
2379 copied is the source file path if current file was copied in the
2390 copied is the source file path if current file was copied in the
2380 revision being committed, or None."""
2391 revision being committed, or None."""
2381 super(memfilectx, self).__init__(repo, path, None, memctx)
2392 super(memfilectx, self).__init__(repo, path, None, memctx)
2382 self._data = data
2393 self._data = data
2383 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2394 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2384 self._copied = None
2395 self._copied = None
2385 if copied:
2396 if copied:
2386 self._copied = (copied, nullid)
2397 self._copied = (copied, nullid)
2387
2398
2388 def data(self):
2399 def data(self):
2389 return self._data
2400 return self._data
2390
2401
2391 def remove(self, ignoremissing=False):
2402 def remove(self, ignoremissing=False):
2392 """wraps unlink for a repo's working directory"""
2403 """wraps unlink for a repo's working directory"""
2393 # need to figure out what to do here
2404 # need to figure out what to do here
2394 del self._changectx[self._path]
2405 del self._changectx[self._path]
2395
2406
2396 def write(self, data, flags):
2407 def write(self, data, flags):
2397 """wraps repo.wwrite"""
2408 """wraps repo.wwrite"""
2398 self._data = data
2409 self._data = data
2399
2410
2400 class overlayfilectx(committablefilectx):
2411 class overlayfilectx(committablefilectx):
2401 """Like memfilectx but take an original filectx and optional parameters to
2412 """Like memfilectx but take an original filectx and optional parameters to
2402 override parts of it. This is useful when fctx.data() is expensive (i.e.
2413 override parts of it. This is useful when fctx.data() is expensive (i.e.
2403 flag processor is expensive) and raw data, flags, and filenode could be
2414 flag processor is expensive) and raw data, flags, and filenode could be
2404 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2415 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2405 """
2416 """
2406
2417
2407 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2418 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2408 copied=None, ctx=None):
2419 copied=None, ctx=None):
2409 """originalfctx: filecontext to duplicate
2420 """originalfctx: filecontext to duplicate
2410
2421
2411 datafunc: None or a function to override data (file content). It is a
2422 datafunc: None or a function to override data (file content). It is a
2412 function to be lazy. path, flags, copied, ctx: None or overridden value
2423 function to be lazy. path, flags, copied, ctx: None or overridden value
2413
2424
2414 copied could be (path, rev), or False. copied could also be just path,
2425 copied could be (path, rev), or False. copied could also be just path,
2415 and will be converted to (path, nullid). This simplifies some callers.
2426 and will be converted to (path, nullid). This simplifies some callers.
2416 """
2427 """
2417
2428
2418 if path is None:
2429 if path is None:
2419 path = originalfctx.path()
2430 path = originalfctx.path()
2420 if ctx is None:
2431 if ctx is None:
2421 ctx = originalfctx.changectx()
2432 ctx = originalfctx.changectx()
2422 ctxmatch = lambda: True
2433 ctxmatch = lambda: True
2423 else:
2434 else:
2424 ctxmatch = lambda: ctx == originalfctx.changectx()
2435 ctxmatch = lambda: ctx == originalfctx.changectx()
2425
2436
2426 repo = originalfctx.repo()
2437 repo = originalfctx.repo()
2427 flog = originalfctx.filelog()
2438 flog = originalfctx.filelog()
2428 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2439 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2429
2440
2430 if copied is None:
2441 if copied is None:
2431 copied = originalfctx.renamed()
2442 copied = originalfctx.renamed()
2432 copiedmatch = lambda: True
2443 copiedmatch = lambda: True
2433 else:
2444 else:
2434 if copied and not isinstance(copied, tuple):
2445 if copied and not isinstance(copied, tuple):
2435 # repo._filecommit will recalculate copyrev so nullid is okay
2446 # repo._filecommit will recalculate copyrev so nullid is okay
2436 copied = (copied, nullid)
2447 copied = (copied, nullid)
2437 copiedmatch = lambda: copied == originalfctx.renamed()
2448 copiedmatch = lambda: copied == originalfctx.renamed()
2438
2449
2439 # When data, copied (could affect data), ctx (could affect filelog
2450 # When data, copied (could affect data), ctx (could affect filelog
2440 # parents) are not overridden, rawdata, rawflags, and filenode may be
2451 # parents) are not overridden, rawdata, rawflags, and filenode may be
2441 # reused (repo._filecommit should double check filelog parents).
2452 # reused (repo._filecommit should double check filelog parents).
2442 #
2453 #
2443 # path, flags are not hashed in filelog (but in manifestlog) so they do
2454 # path, flags are not hashed in filelog (but in manifestlog) so they do
2444 # not affect reusable here.
2455 # not affect reusable here.
2445 #
2456 #
2446 # If ctx or copied is overridden to a same value with originalfctx,
2457 # If ctx or copied is overridden to a same value with originalfctx,
2447 # still consider it's reusable. originalfctx.renamed() may be a bit
2458 # still consider it's reusable. originalfctx.renamed() may be a bit
2448 # expensive so it's not called unless necessary. Assuming datafunc is
2459 # expensive so it's not called unless necessary. Assuming datafunc is
2449 # always expensive, do not call it for this "reusable" test.
2460 # always expensive, do not call it for this "reusable" test.
2450 reusable = datafunc is None and ctxmatch() and copiedmatch()
2461 reusable = datafunc is None and ctxmatch() and copiedmatch()
2451
2462
2452 if datafunc is None:
2463 if datafunc is None:
2453 datafunc = originalfctx.data
2464 datafunc = originalfctx.data
2454 if flags is None:
2465 if flags is None:
2455 flags = originalfctx.flags()
2466 flags = originalfctx.flags()
2456
2467
2457 self._datafunc = datafunc
2468 self._datafunc = datafunc
2458 self._flags = flags
2469 self._flags = flags
2459 self._copied = copied
2470 self._copied = copied
2460
2471
2461 if reusable:
2472 if reusable:
2462 # copy extra fields from originalfctx
2473 # copy extra fields from originalfctx
2463 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2474 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2464 for attr_ in attrs:
2475 for attr_ in attrs:
2465 if util.safehasattr(originalfctx, attr_):
2476 if util.safehasattr(originalfctx, attr_):
2466 setattr(self, attr_, getattr(originalfctx, attr_))
2477 setattr(self, attr_, getattr(originalfctx, attr_))
2467
2478
2468 def data(self):
2479 def data(self):
2469 return self._datafunc()
2480 return self._datafunc()
2470
2481
2471 class metadataonlyctx(committablectx):
2482 class metadataonlyctx(committablectx):
2472 """Like memctx but it's reusing the manifest of different commit.
2483 """Like memctx but it's reusing the manifest of different commit.
2473 Intended to be used by lightweight operations that are creating
2484 Intended to be used by lightweight operations that are creating
2474 metadata-only changes.
2485 metadata-only changes.
2475
2486
2476 Revision information is supplied at initialization time. 'repo' is the
2487 Revision information is supplied at initialization time. 'repo' is the
2477 current localrepo, 'ctx' is original revision which manifest we're reuisng
2488 current localrepo, 'ctx' is original revision which manifest we're reuisng
2478 'parents' is a sequence of two parent revisions identifiers (pass None for
2489 'parents' is a sequence of two parent revisions identifiers (pass None for
2479 every missing parent), 'text' is the commit.
2490 every missing parent), 'text' is the commit.
2480
2491
2481 user receives the committer name and defaults to current repository
2492 user receives the committer name and defaults to current repository
2482 username, date is the commit date in any format supported by
2493 username, date is the commit date in any format supported by
2483 util.parsedate() and defaults to current date, extra is a dictionary of
2494 util.parsedate() and defaults to current date, extra is a dictionary of
2484 metadata or is left empty.
2495 metadata or is left empty.
2485 """
2496 """
2486 def __new__(cls, repo, originalctx, *args, **kwargs):
2497 def __new__(cls, repo, originalctx, *args, **kwargs):
2487 return super(metadataonlyctx, cls).__new__(cls, repo)
2498 return super(metadataonlyctx, cls).__new__(cls, repo)
2488
2499
2489 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2500 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2490 date=None, extra=None, editor=False):
2501 date=None, extra=None, editor=False):
2491 if text is None:
2502 if text is None:
2492 text = originalctx.description()
2503 text = originalctx.description()
2493 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2504 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2494 self._rev = None
2505 self._rev = None
2495 self._node = None
2506 self._node = None
2496 self._originalctx = originalctx
2507 self._originalctx = originalctx
2497 self._manifestnode = originalctx.manifestnode()
2508 self._manifestnode = originalctx.manifestnode()
2498 if parents is None:
2509 if parents is None:
2499 parents = originalctx.parents()
2510 parents = originalctx.parents()
2500 else:
2511 else:
2501 parents = [repo[p] for p in parents if p is not None]
2512 parents = [repo[p] for p in parents if p is not None]
2502 parents = parents[:]
2513 parents = parents[:]
2503 while len(parents) < 2:
2514 while len(parents) < 2:
2504 parents.append(repo[nullid])
2515 parents.append(repo[nullid])
2505 p1, p2 = self._parents = parents
2516 p1, p2 = self._parents = parents
2506
2517
2507 # sanity check to ensure that the reused manifest parents are
2518 # sanity check to ensure that the reused manifest parents are
2508 # manifests of our commit parents
2519 # manifests of our commit parents
2509 mp1, mp2 = self.manifestctx().parents
2520 mp1, mp2 = self.manifestctx().parents
2510 if p1 != nullid and p1.manifestnode() != mp1:
2521 if p1 != nullid and p1.manifestnode() != mp1:
2511 raise RuntimeError('can\'t reuse the manifest: '
2522 raise RuntimeError('can\'t reuse the manifest: '
2512 'its p1 doesn\'t match the new ctx p1')
2523 'its p1 doesn\'t match the new ctx p1')
2513 if p2 != nullid and p2.manifestnode() != mp2:
2524 if p2 != nullid and p2.manifestnode() != mp2:
2514 raise RuntimeError('can\'t reuse the manifest: '
2525 raise RuntimeError('can\'t reuse the manifest: '
2515 'its p2 doesn\'t match the new ctx p2')
2526 'its p2 doesn\'t match the new ctx p2')
2516
2527
2517 self._files = originalctx.files()
2528 self._files = originalctx.files()
2518 self.substate = {}
2529 self.substate = {}
2519
2530
2520 if editor:
2531 if editor:
2521 self._text = editor(self._repo, self, [])
2532 self._text = editor(self._repo, self, [])
2522 self._repo.savecommitmessage(self._text)
2533 self._repo.savecommitmessage(self._text)
2523
2534
2524 def manifestnode(self):
2535 def manifestnode(self):
2525 return self._manifestnode
2536 return self._manifestnode
2526
2537
2527 @property
2538 @property
2528 def _manifestctx(self):
2539 def _manifestctx(self):
2529 return self._repo.manifestlog[self._manifestnode]
2540 return self._repo.manifestlog[self._manifestnode]
2530
2541
2531 def filectx(self, path, filelog=None):
2542 def filectx(self, path, filelog=None):
2532 return self._originalctx.filectx(path, filelog=filelog)
2543 return self._originalctx.filectx(path, filelog=filelog)
2533
2544
2534 def commit(self):
2545 def commit(self):
2535 """commit context to the repo"""
2546 """commit context to the repo"""
2536 return self._repo.commitctx(self)
2547 return self._repo.commitctx(self)
2537
2548
2538 @property
2549 @property
2539 def _manifest(self):
2550 def _manifest(self):
2540 return self._originalctx.manifest()
2551 return self._originalctx.manifest()
2541
2552
2542 @propertycache
2553 @propertycache
2543 def _status(self):
2554 def _status(self):
2544 """Calculate exact status from ``files`` specified in the ``origctx``
2555 """Calculate exact status from ``files`` specified in the ``origctx``
2545 and parents manifests.
2556 and parents manifests.
2546 """
2557 """
2547 man1 = self.p1().manifest()
2558 man1 = self.p1().manifest()
2548 p2 = self._parents[1]
2559 p2 = self._parents[1]
2549 # "1 < len(self._parents)" can't be used for checking
2560 # "1 < len(self._parents)" can't be used for checking
2550 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2561 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2551 # explicitly initialized by the list, of which length is 2.
2562 # explicitly initialized by the list, of which length is 2.
2552 if p2.node() != nullid:
2563 if p2.node() != nullid:
2553 man2 = p2.manifest()
2564 man2 = p2.manifest()
2554 managing = lambda f: f in man1 or f in man2
2565 managing = lambda f: f in man1 or f in man2
2555 else:
2566 else:
2556 managing = lambda f: f in man1
2567 managing = lambda f: f in man1
2557
2568
2558 modified, added, removed = [], [], []
2569 modified, added, removed = [], [], []
2559 for f in self._files:
2570 for f in self._files:
2560 if not managing(f):
2571 if not managing(f):
2561 added.append(f)
2572 added.append(f)
2562 elif f in self:
2573 elif f in self:
2563 modified.append(f)
2574 modified.append(f)
2564 else:
2575 else:
2565 removed.append(f)
2576 removed.append(f)
2566
2577
2567 return scmutil.status(modified, added, removed, [], [], [], [])
2578 return scmutil.status(modified, added, removed, [], [], [], [])
2568
2579
2569 class arbitraryfilectx(object):
2580 class arbitraryfilectx(object):
2570 """Allows you to use filectx-like functions on a file in an arbitrary
2581 """Allows you to use filectx-like functions on a file in an arbitrary
2571 location on disk, possibly not in the working directory.
2582 location on disk, possibly not in the working directory.
2572 """
2583 """
2573 def __init__(self, path, repo=None):
2584 def __init__(self, path, repo=None):
2574 # Repo is optional because contrib/simplemerge uses this class.
2585 # Repo is optional because contrib/simplemerge uses this class.
2575 self._repo = repo
2586 self._repo = repo
2576 self._path = path
2587 self._path = path
2577
2588
2578 def cmp(self, fctx):
2589 def cmp(self, fctx):
2579 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2590 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2580 # path if either side is a symlink.
2591 # path if either side is a symlink.
2581 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2592 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2582 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2593 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2583 # Add a fast-path for merge if both sides are disk-backed.
2594 # Add a fast-path for merge if both sides are disk-backed.
2584 # Note that filecmp uses the opposite return values (True if same)
2595 # Note that filecmp uses the opposite return values (True if same)
2585 # from our cmp functions (True if different).
2596 # from our cmp functions (True if different).
2586 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2597 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2587 return self.data() != fctx.data()
2598 return self.data() != fctx.data()
2588
2599
2589 def path(self):
2600 def path(self):
2590 return self._path
2601 return self._path
2591
2602
2592 def flags(self):
2603 def flags(self):
2593 return ''
2604 return ''
2594
2605
2595 def data(self):
2606 def data(self):
2596 return util.readfile(self._path)
2607 return util.readfile(self._path)
2597
2608
2598 def decodeddata(self):
2609 def decodeddata(self):
2599 with open(self._path, "rb") as f:
2610 with open(self._path, "rb") as f:
2600 return f.read()
2611 return f.read()
2601
2612
2602 def remove(self):
2613 def remove(self):
2603 util.unlink(self._path)
2614 util.unlink(self._path)
2604
2615
2605 def write(self, data, flags):
2616 def write(self, data, flags):
2606 assert not flags
2617 assert not flags
2607 with open(self._path, "w") as f:
2618 with open(self._path, "w") as f:
2608 f.write(data)
2619 f.write(data)
@@ -1,403 +1,421
1 # registrar.py - utilities to register function for specific purpose
1 # registrar.py - utilities to register function for specific purpose
2 #
2 #
3 # Copyright FUJIWARA Katsunori <foozy@lares.dti.ne.jp> and others
3 # Copyright FUJIWARA Katsunori <foozy@lares.dti.ne.jp> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from . import (
10 from . import (
11 configitems,
11 configitems,
12 error,
12 error,
13 pycompat,
13 pycompat,
14 util,
14 util,
15 )
15 )
16
16
17 # unlike the other registered items, config options are neither functions or
17 # unlike the other registered items, config options are neither functions or
18 # classes. Registering the option is just small function call.
18 # classes. Registering the option is just small function call.
19 #
19 #
20 # We still add the official API to the registrar module for consistency with
20 # We still add the official API to the registrar module for consistency with
21 # the other items extensions want might to register.
21 # the other items extensions want might to register.
22 configitem = configitems.getitemregister
22 configitem = configitems.getitemregister
23
23
24 class _funcregistrarbase(object):
24 class _funcregistrarbase(object):
25 """Base of decorator to register a function for specific purpose
25 """Base of decorator to register a function for specific purpose
26
26
27 This decorator stores decorated functions into own dict 'table'.
27 This decorator stores decorated functions into own dict 'table'.
28
28
29 The least derived class can be defined by overriding 'formatdoc',
29 The least derived class can be defined by overriding 'formatdoc',
30 for example::
30 for example::
31
31
32 class keyword(_funcregistrarbase):
32 class keyword(_funcregistrarbase):
33 _docformat = ":%s: %s"
33 _docformat = ":%s: %s"
34
34
35 This should be used as below:
35 This should be used as below:
36
36
37 keyword = registrar.keyword()
37 keyword = registrar.keyword()
38
38
39 @keyword('bar')
39 @keyword('bar')
40 def barfunc(*args, **kwargs):
40 def barfunc(*args, **kwargs):
41 '''Explanation of bar keyword ....
41 '''Explanation of bar keyword ....
42 '''
42 '''
43 pass
43 pass
44
44
45 In this case:
45 In this case:
46
46
47 - 'barfunc' is stored as 'bar' in '_table' of an instance 'keyword' above
47 - 'barfunc' is stored as 'bar' in '_table' of an instance 'keyword' above
48 - 'barfunc.__doc__' becomes ":bar: Explanation of bar keyword"
48 - 'barfunc.__doc__' becomes ":bar: Explanation of bar keyword"
49 """
49 """
50 def __init__(self, table=None):
50 def __init__(self, table=None):
51 if table is None:
51 if table is None:
52 self._table = {}
52 self._table = {}
53 else:
53 else:
54 self._table = table
54 self._table = table
55
55
56 def __call__(self, decl, *args, **kwargs):
56 def __call__(self, decl, *args, **kwargs):
57 return lambda func: self._doregister(func, decl, *args, **kwargs)
57 return lambda func: self._doregister(func, decl, *args, **kwargs)
58
58
59 def _doregister(self, func, decl, *args, **kwargs):
59 def _doregister(self, func, decl, *args, **kwargs):
60 name = self._getname(decl)
60 name = self._getname(decl)
61
61
62 if name in self._table:
62 if name in self._table:
63 msg = 'duplicate registration for name: "%s"' % name
63 msg = 'duplicate registration for name: "%s"' % name
64 raise error.ProgrammingError(msg)
64 raise error.ProgrammingError(msg)
65
65
66 if func.__doc__ and not util.safehasattr(func, '_origdoc'):
66 if func.__doc__ and not util.safehasattr(func, '_origdoc'):
67 doc = pycompat.sysbytes(func.__doc__).strip()
67 doc = pycompat.sysbytes(func.__doc__).strip()
68 func._origdoc = doc
68 func._origdoc = doc
69 func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
69 func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
70
70
71 self._table[name] = func
71 self._table[name] = func
72 self._extrasetup(name, func, *args, **kwargs)
72 self._extrasetup(name, func, *args, **kwargs)
73
73
74 return func
74 return func
75
75
76 def _parsefuncdecl(self, decl):
76 def _parsefuncdecl(self, decl):
77 """Parse function declaration and return the name of function in it
77 """Parse function declaration and return the name of function in it
78 """
78 """
79 i = decl.find('(')
79 i = decl.find('(')
80 if i >= 0:
80 if i >= 0:
81 return decl[:i]
81 return decl[:i]
82 else:
82 else:
83 return decl
83 return decl
84
84
85 def _getname(self, decl):
85 def _getname(self, decl):
86 """Return the name of the registered function from decl
86 """Return the name of the registered function from decl
87
87
88 Derived class should override this, if it allows more
88 Derived class should override this, if it allows more
89 descriptive 'decl' string than just a name.
89 descriptive 'decl' string than just a name.
90 """
90 """
91 return decl
91 return decl
92
92
93 _docformat = None
93 _docformat = None
94
94
95 def _formatdoc(self, decl, doc):
95 def _formatdoc(self, decl, doc):
96 """Return formatted document of the registered function for help
96 """Return formatted document of the registered function for help
97
97
98 'doc' is '__doc__.strip()' of the registered function.
98 'doc' is '__doc__.strip()' of the registered function.
99 """
99 """
100 return self._docformat % (decl, doc)
100 return self._docformat % (decl, doc)
101
101
102 def _extrasetup(self, name, func):
102 def _extrasetup(self, name, func):
103 """Execute exra setup for registered function, if needed
103 """Execute exra setup for registered function, if needed
104 """
104 """
105
105
106 class command(_funcregistrarbase):
106 class command(_funcregistrarbase):
107 """Decorator to register a command function to table
107 """Decorator to register a command function to table
108
108
109 This class receives a command table as its argument. The table should
109 This class receives a command table as its argument. The table should
110 be a dict.
110 be a dict.
111
111
112 The created object can be used as a decorator for adding commands to
112 The created object can be used as a decorator for adding commands to
113 that command table. This accepts multiple arguments to define a command.
113 that command table. This accepts multiple arguments to define a command.
114
114
115 The first argument is the command name.
115 The first argument is the command name (as bytes).
116
116
117 The options argument is an iterable of tuples defining command arguments.
117 The `options` keyword argument is an iterable of tuples defining command
118 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
118 arguments. See ``mercurial.fancyopts.fancyopts()`` for the format of each
119 tuple.
119
120
120 The synopsis argument defines a short, one line summary of how to use the
121 The `synopsis` argument defines a short, one line summary of how to use the
121 command. This shows up in the help output.
122 command. This shows up in the help output.
122
123
123 The norepo argument defines whether the command does not require a
124 There are three arguments that control what repository (if any) is found
125 and passed to the decorated function: `norepo`, `optionalrepo`, and
126 `inferrepo`.
127
128 The `norepo` argument defines whether the command does not require a
124 local repository. Most commands operate against a repository, thus the
129 local repository. Most commands operate against a repository, thus the
125 default is False.
130 default is False. When True, no repository will be passed.
126
131
127 The optionalrepo argument defines whether the command optionally requires
132 The `optionalrepo` argument defines whether the command optionally requires
128 a local repository.
133 a local repository. If no repository can be found, None will be passed
134 to the decorated function.
129
135
130 The inferrepo argument defines whether to try to find a repository from the
136 The `inferrepo` argument defines whether to try to find a repository from
131 command line arguments. If True, arguments will be examined for potential
137 the command line arguments. If True, arguments will be examined for
132 repository locations. See ``findrepo()``. If a repository is found, it
138 potential repository locations. See ``findrepo()``. If a repository is
133 will be used.
139 found, it will be used and passed to the decorated function.
134
140
135 There are three constants in the class which tells what type of the command
141 There are three constants in the class which tells what type of the command
136 that is. That information will be helpful at various places. It will be also
142 that is. That information will be helpful at various places. It will be also
137 be used to decide what level of access the command has on hidden commits.
143 be used to decide what level of access the command has on hidden commits.
138 The constants are:
144 The constants are:
139
145
140 unrecoverablewrite is for those write commands which can't be recovered like
146 `unrecoverablewrite` is for those write commands which can't be recovered
141 push.
147 like push.
142 recoverablewrite is for write commands which can be recovered like commit.
148 `recoverablewrite` is for write commands which can be recovered like commit.
143 readonly is for commands which are read only.
149 `readonly` is for commands which are read only.
150
151 The signature of the decorated function looks like this:
152 def cmd(ui[, repo] [, <args>] [, <options>])
153
154 `repo` is required if `norepo` is False.
155 `<args>` are positional args (or `*args`) arguments, of non-option
156 arguments from the command line.
157 `<options>` are keyword arguments (or `**options`) of option arguments
158 from the command line.
159
160 See the WritingExtensions and MercurialApi documentation for more exhaustive
161 descriptions and examples.
144 """
162 """
145
163
146 unrecoverablewrite = "unrecoverable"
164 unrecoverablewrite = "unrecoverable"
147 recoverablewrite = "recoverable"
165 recoverablewrite = "recoverable"
148 readonly = "readonly"
166 readonly = "readonly"
149
167
150 possiblecmdtypes = {unrecoverablewrite, recoverablewrite, readonly}
168 possiblecmdtypes = {unrecoverablewrite, recoverablewrite, readonly}
151
169
152 def _doregister(self, func, name, options=(), synopsis=None,
170 def _doregister(self, func, name, options=(), synopsis=None,
153 norepo=False, optionalrepo=False, inferrepo=False,
171 norepo=False, optionalrepo=False, inferrepo=False,
154 cmdtype=unrecoverablewrite):
172 cmdtype=unrecoverablewrite):
155
173
156 if cmdtype not in self.possiblecmdtypes:
174 if cmdtype not in self.possiblecmdtypes:
157 raise error.ProgrammingError("unknown cmdtype value '%s' for "
175 raise error.ProgrammingError("unknown cmdtype value '%s' for "
158 "'%s' command" % (cmdtype, name))
176 "'%s' command" % (cmdtype, name))
159 func.norepo = norepo
177 func.norepo = norepo
160 func.optionalrepo = optionalrepo
178 func.optionalrepo = optionalrepo
161 func.inferrepo = inferrepo
179 func.inferrepo = inferrepo
162 func.cmdtype = cmdtype
180 func.cmdtype = cmdtype
163 if synopsis:
181 if synopsis:
164 self._table[name] = func, list(options), synopsis
182 self._table[name] = func, list(options), synopsis
165 else:
183 else:
166 self._table[name] = func, list(options)
184 self._table[name] = func, list(options)
167 return func
185 return func
168
186
169 class revsetpredicate(_funcregistrarbase):
187 class revsetpredicate(_funcregistrarbase):
170 """Decorator to register revset predicate
188 """Decorator to register revset predicate
171
189
172 Usage::
190 Usage::
173
191
174 revsetpredicate = registrar.revsetpredicate()
192 revsetpredicate = registrar.revsetpredicate()
175
193
176 @revsetpredicate('mypredicate(arg1, arg2[, arg3])')
194 @revsetpredicate('mypredicate(arg1, arg2[, arg3])')
177 def mypredicatefunc(repo, subset, x):
195 def mypredicatefunc(repo, subset, x):
178 '''Explanation of this revset predicate ....
196 '''Explanation of this revset predicate ....
179 '''
197 '''
180 pass
198 pass
181
199
182 The first string argument is used also in online help.
200 The first string argument is used also in online help.
183
201
184 Optional argument 'safe' indicates whether a predicate is safe for
202 Optional argument 'safe' indicates whether a predicate is safe for
185 DoS attack (False by default).
203 DoS attack (False by default).
186
204
187 Optional argument 'takeorder' indicates whether a predicate function
205 Optional argument 'takeorder' indicates whether a predicate function
188 takes ordering policy as the last argument.
206 takes ordering policy as the last argument.
189
207
190 Optional argument 'weight' indicates the estimated run-time cost, useful
208 Optional argument 'weight' indicates the estimated run-time cost, useful
191 for static optimization, default is 1. Higher weight means more expensive.
209 for static optimization, default is 1. Higher weight means more expensive.
192 Usually, revsets that are fast and return only one revision has a weight of
210 Usually, revsets that are fast and return only one revision has a weight of
193 0.5 (ex. a symbol); revsets with O(changelog) complexity and read only the
211 0.5 (ex. a symbol); revsets with O(changelog) complexity and read only the
194 changelog have weight 10 (ex. author); revsets reading manifest deltas have
212 changelog have weight 10 (ex. author); revsets reading manifest deltas have
195 weight 30 (ex. adds); revset reading manifest contents have weight 100
213 weight 30 (ex. adds); revset reading manifest contents have weight 100
196 (ex. contains). Note: those values are flexible. If the revset has a
214 (ex. contains). Note: those values are flexible. If the revset has a
197 same big-O time complexity as 'contains', but with a smaller constant, it
215 same big-O time complexity as 'contains', but with a smaller constant, it
198 might have a weight of 90.
216 might have a weight of 90.
199
217
200 'revsetpredicate' instance in example above can be used to
218 'revsetpredicate' instance in example above can be used to
201 decorate multiple functions.
219 decorate multiple functions.
202
220
203 Decorated functions are registered automatically at loading
221 Decorated functions are registered automatically at loading
204 extension, if an instance named as 'revsetpredicate' is used for
222 extension, if an instance named as 'revsetpredicate' is used for
205 decorating in extension.
223 decorating in extension.
206
224
207 Otherwise, explicit 'revset.loadpredicate()' is needed.
225 Otherwise, explicit 'revset.loadpredicate()' is needed.
208 """
226 """
209 _getname = _funcregistrarbase._parsefuncdecl
227 _getname = _funcregistrarbase._parsefuncdecl
210 _docformat = "``%s``\n %s"
228 _docformat = "``%s``\n %s"
211
229
212 def _extrasetup(self, name, func, safe=False, takeorder=False, weight=1):
230 def _extrasetup(self, name, func, safe=False, takeorder=False, weight=1):
213 func._safe = safe
231 func._safe = safe
214 func._takeorder = takeorder
232 func._takeorder = takeorder
215 func._weight = weight
233 func._weight = weight
216
234
217 class filesetpredicate(_funcregistrarbase):
235 class filesetpredicate(_funcregistrarbase):
218 """Decorator to register fileset predicate
236 """Decorator to register fileset predicate
219
237
220 Usage::
238 Usage::
221
239
222 filesetpredicate = registrar.filesetpredicate()
240 filesetpredicate = registrar.filesetpredicate()
223
241
224 @filesetpredicate('mypredicate()')
242 @filesetpredicate('mypredicate()')
225 def mypredicatefunc(mctx, x):
243 def mypredicatefunc(mctx, x):
226 '''Explanation of this fileset predicate ....
244 '''Explanation of this fileset predicate ....
227 '''
245 '''
228 pass
246 pass
229
247
230 The first string argument is used also in online help.
248 The first string argument is used also in online help.
231
249
232 Optional argument 'callstatus' indicates whether a predicate
250 Optional argument 'callstatus' indicates whether a predicate
233 implies 'matchctx.status()' at runtime or not (False, by
251 implies 'matchctx.status()' at runtime or not (False, by
234 default).
252 default).
235
253
236 Optional argument 'callexisting' indicates whether a predicate
254 Optional argument 'callexisting' indicates whether a predicate
237 implies 'matchctx.existing()' at runtime or not (False, by
255 implies 'matchctx.existing()' at runtime or not (False, by
238 default).
256 default).
239
257
240 'filesetpredicate' instance in example above can be used to
258 'filesetpredicate' instance in example above can be used to
241 decorate multiple functions.
259 decorate multiple functions.
242
260
243 Decorated functions are registered automatically at loading
261 Decorated functions are registered automatically at loading
244 extension, if an instance named as 'filesetpredicate' is used for
262 extension, if an instance named as 'filesetpredicate' is used for
245 decorating in extension.
263 decorating in extension.
246
264
247 Otherwise, explicit 'fileset.loadpredicate()' is needed.
265 Otherwise, explicit 'fileset.loadpredicate()' is needed.
248 """
266 """
249 _getname = _funcregistrarbase._parsefuncdecl
267 _getname = _funcregistrarbase._parsefuncdecl
250 _docformat = "``%s``\n %s"
268 _docformat = "``%s``\n %s"
251
269
252 def _extrasetup(self, name, func, callstatus=False, callexisting=False):
270 def _extrasetup(self, name, func, callstatus=False, callexisting=False):
253 func._callstatus = callstatus
271 func._callstatus = callstatus
254 func._callexisting = callexisting
272 func._callexisting = callexisting
255
273
256 class _templateregistrarbase(_funcregistrarbase):
274 class _templateregistrarbase(_funcregistrarbase):
257 """Base of decorator to register functions as template specific one
275 """Base of decorator to register functions as template specific one
258 """
276 """
259 _docformat = ":%s: %s"
277 _docformat = ":%s: %s"
260
278
261 class templatekeyword(_templateregistrarbase):
279 class templatekeyword(_templateregistrarbase):
262 """Decorator to register template keyword
280 """Decorator to register template keyword
263
281
264 Usage::
282 Usage::
265
283
266 templatekeyword = registrar.templatekeyword()
284 templatekeyword = registrar.templatekeyword()
267
285
268 @templatekeyword('mykeyword')
286 @templatekeyword('mykeyword')
269 def mykeywordfunc(repo, ctx, templ, cache, revcache, **args):
287 def mykeywordfunc(repo, ctx, templ, cache, revcache, **args):
270 '''Explanation of this template keyword ....
288 '''Explanation of this template keyword ....
271 '''
289 '''
272 pass
290 pass
273
291
274 The first string argument is used also in online help.
292 The first string argument is used also in online help.
275
293
276 'templatekeyword' instance in example above can be used to
294 'templatekeyword' instance in example above can be used to
277 decorate multiple functions.
295 decorate multiple functions.
278
296
279 Decorated functions are registered automatically at loading
297 Decorated functions are registered automatically at loading
280 extension, if an instance named as 'templatekeyword' is used for
298 extension, if an instance named as 'templatekeyword' is used for
281 decorating in extension.
299 decorating in extension.
282
300
283 Otherwise, explicit 'templatekw.loadkeyword()' is needed.
301 Otherwise, explicit 'templatekw.loadkeyword()' is needed.
284 """
302 """
285
303
286 class templatefilter(_templateregistrarbase):
304 class templatefilter(_templateregistrarbase):
287 """Decorator to register template filer
305 """Decorator to register template filer
288
306
289 Usage::
307 Usage::
290
308
291 templatefilter = registrar.templatefilter()
309 templatefilter = registrar.templatefilter()
292
310
293 @templatefilter('myfilter')
311 @templatefilter('myfilter')
294 def myfilterfunc(text):
312 def myfilterfunc(text):
295 '''Explanation of this template filter ....
313 '''Explanation of this template filter ....
296 '''
314 '''
297 pass
315 pass
298
316
299 The first string argument is used also in online help.
317 The first string argument is used also in online help.
300
318
301 'templatefilter' instance in example above can be used to
319 'templatefilter' instance in example above can be used to
302 decorate multiple functions.
320 decorate multiple functions.
303
321
304 Decorated functions are registered automatically at loading
322 Decorated functions are registered automatically at loading
305 extension, if an instance named as 'templatefilter' is used for
323 extension, if an instance named as 'templatefilter' is used for
306 decorating in extension.
324 decorating in extension.
307
325
308 Otherwise, explicit 'templatefilters.loadkeyword()' is needed.
326 Otherwise, explicit 'templatefilters.loadkeyword()' is needed.
309 """
327 """
310
328
311 class templatefunc(_templateregistrarbase):
329 class templatefunc(_templateregistrarbase):
312 """Decorator to register template function
330 """Decorator to register template function
313
331
314 Usage::
332 Usage::
315
333
316 templatefunc = registrar.templatefunc()
334 templatefunc = registrar.templatefunc()
317
335
318 @templatefunc('myfunc(arg1, arg2[, arg3])', argspec='arg1 arg2 arg3')
336 @templatefunc('myfunc(arg1, arg2[, arg3])', argspec='arg1 arg2 arg3')
319 def myfuncfunc(context, mapping, args):
337 def myfuncfunc(context, mapping, args):
320 '''Explanation of this template function ....
338 '''Explanation of this template function ....
321 '''
339 '''
322 pass
340 pass
323
341
324 The first string argument is used also in online help.
342 The first string argument is used also in online help.
325
343
326 If optional 'argspec' is defined, the function will receive 'args' as
344 If optional 'argspec' is defined, the function will receive 'args' as
327 a dict of named arguments. Otherwise 'args' is a list of positional
345 a dict of named arguments. Otherwise 'args' is a list of positional
328 arguments.
346 arguments.
329
347
330 'templatefunc' instance in example above can be used to
348 'templatefunc' instance in example above can be used to
331 decorate multiple functions.
349 decorate multiple functions.
332
350
333 Decorated functions are registered automatically at loading
351 Decorated functions are registered automatically at loading
334 extension, if an instance named as 'templatefunc' is used for
352 extension, if an instance named as 'templatefunc' is used for
335 decorating in extension.
353 decorating in extension.
336
354
337 Otherwise, explicit 'templater.loadfunction()' is needed.
355 Otherwise, explicit 'templater.loadfunction()' is needed.
338 """
356 """
339 _getname = _funcregistrarbase._parsefuncdecl
357 _getname = _funcregistrarbase._parsefuncdecl
340
358
341 def _extrasetup(self, name, func, argspec=None):
359 def _extrasetup(self, name, func, argspec=None):
342 func._argspec = argspec
360 func._argspec = argspec
343
361
344 class internalmerge(_funcregistrarbase):
362 class internalmerge(_funcregistrarbase):
345 """Decorator to register in-process merge tool
363 """Decorator to register in-process merge tool
346
364
347 Usage::
365 Usage::
348
366
349 internalmerge = registrar.internalmerge()
367 internalmerge = registrar.internalmerge()
350
368
351 @internalmerge('mymerge', internalmerge.mergeonly,
369 @internalmerge('mymerge', internalmerge.mergeonly,
352 onfailure=None, precheck=None):
370 onfailure=None, precheck=None):
353 def mymergefunc(repo, mynode, orig, fcd, fco, fca,
371 def mymergefunc(repo, mynode, orig, fcd, fco, fca,
354 toolconf, files, labels=None):
372 toolconf, files, labels=None):
355 '''Explanation of this internal merge tool ....
373 '''Explanation of this internal merge tool ....
356 '''
374 '''
357 return 1, False # means "conflicted", "no deletion needed"
375 return 1, False # means "conflicted", "no deletion needed"
358
376
359 The first string argument is used to compose actual merge tool name,
377 The first string argument is used to compose actual merge tool name,
360 ":name" and "internal:name" (the latter is historical one).
378 ":name" and "internal:name" (the latter is historical one).
361
379
362 The second argument is one of merge types below:
380 The second argument is one of merge types below:
363
381
364 ========== ======== ======== =========
382 ========== ======== ======== =========
365 merge type precheck premerge fullmerge
383 merge type precheck premerge fullmerge
366 ========== ======== ======== =========
384 ========== ======== ======== =========
367 nomerge x x x
385 nomerge x x x
368 mergeonly o x o
386 mergeonly o x o
369 fullmerge o o o
387 fullmerge o o o
370 ========== ======== ======== =========
388 ========== ======== ======== =========
371
389
372 Optional argument 'onfailure' is the format of warning message
390 Optional argument 'onfailure' is the format of warning message
373 to be used at failure of merging (target filename is specified
391 to be used at failure of merging (target filename is specified
374 at formatting). Or, None or so, if warning message should be
392 at formatting). Or, None or so, if warning message should be
375 suppressed.
393 suppressed.
376
394
377 Optional argument 'precheck' is the function to be used
395 Optional argument 'precheck' is the function to be used
378 before actual invocation of internal merge tool itself.
396 before actual invocation of internal merge tool itself.
379 It takes as same arguments as internal merge tool does, other than
397 It takes as same arguments as internal merge tool does, other than
380 'files' and 'labels'. If it returns false value, merging is aborted
398 'files' and 'labels'. If it returns false value, merging is aborted
381 immediately (and file is marked as "unresolved").
399 immediately (and file is marked as "unresolved").
382
400
383 'internalmerge' instance in example above can be used to
401 'internalmerge' instance in example above can be used to
384 decorate multiple functions.
402 decorate multiple functions.
385
403
386 Decorated functions are registered automatically at loading
404 Decorated functions are registered automatically at loading
387 extension, if an instance named as 'internalmerge' is used for
405 extension, if an instance named as 'internalmerge' is used for
388 decorating in extension.
406 decorating in extension.
389
407
390 Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
408 Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
391 """
409 """
392 _docformat = "``:%s``\n %s"
410 _docformat = "``:%s``\n %s"
393
411
394 # merge type definitions:
412 # merge type definitions:
395 nomerge = None
413 nomerge = None
396 mergeonly = 'mergeonly' # just the full merge, no premerge
414 mergeonly = 'mergeonly' # just the full merge, no premerge
397 fullmerge = 'fullmerge' # both premerge and merge
415 fullmerge = 'fullmerge' # both premerge and merge
398
416
399 def _extrasetup(self, name, func, mergetype,
417 def _extrasetup(self, name, func, mergetype,
400 onfailure=None, precheck=None):
418 onfailure=None, precheck=None):
401 func.mergetype = mergetype
419 func.mergetype = mergetype
402 func.onfailure = onfailure
420 func.onfailure = onfailure
403 func.precheck = precheck
421 func.precheck = precheck
General Comments 0
You need to be logged in to leave comments. Login now