##// END OF EJS Templates
obsmarker: rename precnode into prednode...
Boris Feld -
r33697:2cb442bc default
parent child Browse files
Show More
@@ -1,3752 +1,3752 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 graphmod,
32 graphmod,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 patch,
35 patch,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 pycompat,
38 pycompat,
39 registrar,
39 registrar,
40 revlog,
40 revlog,
41 revset,
41 revset,
42 scmutil,
42 scmutil,
43 smartset,
43 smartset,
44 templatekw,
44 templatekw,
45 templater,
45 templater,
46 util,
46 util,
47 vfs as vfsmod,
47 vfs as vfsmod,
48 )
48 )
49 stringio = util.stringio
49 stringio = util.stringio
50
50
51 # templates of common command options
51 # templates of common command options
52
52
53 dryrunopts = [
53 dryrunopts = [
54 ('n', 'dry-run', None,
54 ('n', 'dry-run', None,
55 _('do not perform actions, just print output')),
55 _('do not perform actions, just print output')),
56 ]
56 ]
57
57
58 remoteopts = [
58 remoteopts = [
59 ('e', 'ssh', '',
59 ('e', 'ssh', '',
60 _('specify ssh command to use'), _('CMD')),
60 _('specify ssh command to use'), _('CMD')),
61 ('', 'remotecmd', '',
61 ('', 'remotecmd', '',
62 _('specify hg command to run on the remote side'), _('CMD')),
62 _('specify hg command to run on the remote side'), _('CMD')),
63 ('', 'insecure', None,
63 ('', 'insecure', None,
64 _('do not verify server certificate (ignoring web.cacerts config)')),
64 _('do not verify server certificate (ignoring web.cacerts config)')),
65 ]
65 ]
66
66
67 walkopts = [
67 walkopts = [
68 ('I', 'include', [],
68 ('I', 'include', [],
69 _('include names matching the given patterns'), _('PATTERN')),
69 _('include names matching the given patterns'), _('PATTERN')),
70 ('X', 'exclude', [],
70 ('X', 'exclude', [],
71 _('exclude names matching the given patterns'), _('PATTERN')),
71 _('exclude names matching the given patterns'), _('PATTERN')),
72 ]
72 ]
73
73
74 commitopts = [
74 commitopts = [
75 ('m', 'message', '',
75 ('m', 'message', '',
76 _('use text as commit message'), _('TEXT')),
76 _('use text as commit message'), _('TEXT')),
77 ('l', 'logfile', '',
77 ('l', 'logfile', '',
78 _('read commit message from file'), _('FILE')),
78 _('read commit message from file'), _('FILE')),
79 ]
79 ]
80
80
81 commitopts2 = [
81 commitopts2 = [
82 ('d', 'date', '',
82 ('d', 'date', '',
83 _('record the specified date as commit date'), _('DATE')),
83 _('record the specified date as commit date'), _('DATE')),
84 ('u', 'user', '',
84 ('u', 'user', '',
85 _('record the specified user as committer'), _('USER')),
85 _('record the specified user as committer'), _('USER')),
86 ]
86 ]
87
87
88 # hidden for now
88 # hidden for now
89 formatteropts = [
89 formatteropts = [
90 ('T', 'template', '',
90 ('T', 'template', '',
91 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
91 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
92 ]
92 ]
93
93
94 templateopts = [
94 templateopts = [
95 ('', 'style', '',
95 ('', 'style', '',
96 _('display using template map file (DEPRECATED)'), _('STYLE')),
96 _('display using template map file (DEPRECATED)'), _('STYLE')),
97 ('T', 'template', '',
97 ('T', 'template', '',
98 _('display with template'), _('TEMPLATE')),
98 _('display with template'), _('TEMPLATE')),
99 ]
99 ]
100
100
101 logopts = [
101 logopts = [
102 ('p', 'patch', None, _('show patch')),
102 ('p', 'patch', None, _('show patch')),
103 ('g', 'git', None, _('use git extended diff format')),
103 ('g', 'git', None, _('use git extended diff format')),
104 ('l', 'limit', '',
104 ('l', 'limit', '',
105 _('limit number of changes displayed'), _('NUM')),
105 _('limit number of changes displayed'), _('NUM')),
106 ('M', 'no-merges', None, _('do not show merges')),
106 ('M', 'no-merges', None, _('do not show merges')),
107 ('', 'stat', None, _('output diffstat-style summary of changes')),
107 ('', 'stat', None, _('output diffstat-style summary of changes')),
108 ('G', 'graph', None, _("show the revision DAG")),
108 ('G', 'graph', None, _("show the revision DAG")),
109 ] + templateopts
109 ] + templateopts
110
110
111 diffopts = [
111 diffopts = [
112 ('a', 'text', None, _('treat all files as text')),
112 ('a', 'text', None, _('treat all files as text')),
113 ('g', 'git', None, _('use git extended diff format')),
113 ('g', 'git', None, _('use git extended diff format')),
114 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
114 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
115 ('', 'nodates', None, _('omit dates from diff headers'))
115 ('', 'nodates', None, _('omit dates from diff headers'))
116 ]
116 ]
117
117
118 diffwsopts = [
118 diffwsopts = [
119 ('w', 'ignore-all-space', None,
119 ('w', 'ignore-all-space', None,
120 _('ignore white space when comparing lines')),
120 _('ignore white space when comparing lines')),
121 ('b', 'ignore-space-change', None,
121 ('b', 'ignore-space-change', None,
122 _('ignore changes in the amount of white space')),
122 _('ignore changes in the amount of white space')),
123 ('B', 'ignore-blank-lines', None,
123 ('B', 'ignore-blank-lines', None,
124 _('ignore changes whose lines are all blank')),
124 _('ignore changes whose lines are all blank')),
125 ]
125 ]
126
126
127 diffopts2 = [
127 diffopts2 = [
128 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
128 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
129 ('p', 'show-function', None, _('show which function each change is in')),
129 ('p', 'show-function', None, _('show which function each change is in')),
130 ('', 'reverse', None, _('produce a diff that undoes the changes')),
130 ('', 'reverse', None, _('produce a diff that undoes the changes')),
131 ] + diffwsopts + [
131 ] + diffwsopts + [
132 ('U', 'unified', '',
132 ('U', 'unified', '',
133 _('number of lines of context to show'), _('NUM')),
133 _('number of lines of context to show'), _('NUM')),
134 ('', 'stat', None, _('output diffstat-style summary of changes')),
134 ('', 'stat', None, _('output diffstat-style summary of changes')),
135 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
135 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
136 ]
136 ]
137
137
138 mergetoolopts = [
138 mergetoolopts = [
139 ('t', 'tool', '', _('specify merge tool')),
139 ('t', 'tool', '', _('specify merge tool')),
140 ]
140 ]
141
141
142 similarityopts = [
142 similarityopts = [
143 ('s', 'similarity', '',
143 ('s', 'similarity', '',
144 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
144 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
145 ]
145 ]
146
146
147 subrepoopts = [
147 subrepoopts = [
148 ('S', 'subrepos', None,
148 ('S', 'subrepos', None,
149 _('recurse into subrepositories'))
149 _('recurse into subrepositories'))
150 ]
150 ]
151
151
152 debugrevlogopts = [
152 debugrevlogopts = [
153 ('c', 'changelog', False, _('open changelog')),
153 ('c', 'changelog', False, _('open changelog')),
154 ('m', 'manifest', False, _('open manifest')),
154 ('m', 'manifest', False, _('open manifest')),
155 ('', 'dir', '', _('open directory manifest')),
155 ('', 'dir', '', _('open directory manifest')),
156 ]
156 ]
157
157
158 # special string such that everything below this line will be ingored in the
158 # special string such that everything below this line will be ingored in the
159 # editor text
159 # editor text
160 _linebelow = "^HG: ------------------------ >8 ------------------------$"
160 _linebelow = "^HG: ------------------------ >8 ------------------------$"
161
161
162 def ishunk(x):
162 def ishunk(x):
163 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
163 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
164 return isinstance(x, hunkclasses)
164 return isinstance(x, hunkclasses)
165
165
166 def newandmodified(chunks, originalchunks):
166 def newandmodified(chunks, originalchunks):
167 newlyaddedandmodifiedfiles = set()
167 newlyaddedandmodifiedfiles = set()
168 for chunk in chunks:
168 for chunk in chunks:
169 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
169 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
170 originalchunks:
170 originalchunks:
171 newlyaddedandmodifiedfiles.add(chunk.header.filename())
171 newlyaddedandmodifiedfiles.add(chunk.header.filename())
172 return newlyaddedandmodifiedfiles
172 return newlyaddedandmodifiedfiles
173
173
174 def parsealiases(cmd):
174 def parsealiases(cmd):
175 return cmd.lstrip("^").split("|")
175 return cmd.lstrip("^").split("|")
176
176
177 def setupwrapcolorwrite(ui):
177 def setupwrapcolorwrite(ui):
178 # wrap ui.write so diff output can be labeled/colorized
178 # wrap ui.write so diff output can be labeled/colorized
179 def wrapwrite(orig, *args, **kw):
179 def wrapwrite(orig, *args, **kw):
180 label = kw.pop('label', '')
180 label = kw.pop('label', '')
181 for chunk, l in patch.difflabel(lambda: args):
181 for chunk, l in patch.difflabel(lambda: args):
182 orig(chunk, label=label + l)
182 orig(chunk, label=label + l)
183
183
184 oldwrite = ui.write
184 oldwrite = ui.write
185 def wrap(*args, **kwargs):
185 def wrap(*args, **kwargs):
186 return wrapwrite(oldwrite, *args, **kwargs)
186 return wrapwrite(oldwrite, *args, **kwargs)
187 setattr(ui, 'write', wrap)
187 setattr(ui, 'write', wrap)
188 return oldwrite
188 return oldwrite
189
189
190 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
190 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
191 if usecurses:
191 if usecurses:
192 if testfile:
192 if testfile:
193 recordfn = crecordmod.testdecorator(testfile,
193 recordfn = crecordmod.testdecorator(testfile,
194 crecordmod.testchunkselector)
194 crecordmod.testchunkselector)
195 else:
195 else:
196 recordfn = crecordmod.chunkselector
196 recordfn = crecordmod.chunkselector
197
197
198 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
198 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
199
199
200 else:
200 else:
201 return patch.filterpatch(ui, originalhunks, operation)
201 return patch.filterpatch(ui, originalhunks, operation)
202
202
203 def recordfilter(ui, originalhunks, operation=None):
203 def recordfilter(ui, originalhunks, operation=None):
204 """ Prompts the user to filter the originalhunks and return a list of
204 """ Prompts the user to filter the originalhunks and return a list of
205 selected hunks.
205 selected hunks.
206 *operation* is used for to build ui messages to indicate the user what
206 *operation* is used for to build ui messages to indicate the user what
207 kind of filtering they are doing: reverting, committing, shelving, etc.
207 kind of filtering they are doing: reverting, committing, shelving, etc.
208 (see patch.filterpatch).
208 (see patch.filterpatch).
209 """
209 """
210 usecurses = crecordmod.checkcurses(ui)
210 usecurses = crecordmod.checkcurses(ui)
211 testfile = ui.config('experimental', 'crecordtest')
211 testfile = ui.config('experimental', 'crecordtest')
212 oldwrite = setupwrapcolorwrite(ui)
212 oldwrite = setupwrapcolorwrite(ui)
213 try:
213 try:
214 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
214 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
215 testfile, operation)
215 testfile, operation)
216 finally:
216 finally:
217 ui.write = oldwrite
217 ui.write = oldwrite
218 return newchunks, newopts
218 return newchunks, newopts
219
219
220 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
220 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
221 filterfn, *pats, **opts):
221 filterfn, *pats, **opts):
222 from . import merge as mergemod
222 from . import merge as mergemod
223 opts = pycompat.byteskwargs(opts)
223 opts = pycompat.byteskwargs(opts)
224 if not ui.interactive():
224 if not ui.interactive():
225 if cmdsuggest:
225 if cmdsuggest:
226 msg = _('running non-interactively, use %s instead') % cmdsuggest
226 msg = _('running non-interactively, use %s instead') % cmdsuggest
227 else:
227 else:
228 msg = _('running non-interactively')
228 msg = _('running non-interactively')
229 raise error.Abort(msg)
229 raise error.Abort(msg)
230
230
231 # make sure username is set before going interactive
231 # make sure username is set before going interactive
232 if not opts.get('user'):
232 if not opts.get('user'):
233 ui.username() # raise exception, username not provided
233 ui.username() # raise exception, username not provided
234
234
235 def recordfunc(ui, repo, message, match, opts):
235 def recordfunc(ui, repo, message, match, opts):
236 """This is generic record driver.
236 """This is generic record driver.
237
237
238 Its job is to interactively filter local changes, and
238 Its job is to interactively filter local changes, and
239 accordingly prepare working directory into a state in which the
239 accordingly prepare working directory into a state in which the
240 job can be delegated to a non-interactive commit command such as
240 job can be delegated to a non-interactive commit command such as
241 'commit' or 'qrefresh'.
241 'commit' or 'qrefresh'.
242
242
243 After the actual job is done by non-interactive command, the
243 After the actual job is done by non-interactive command, the
244 working directory is restored to its original state.
244 working directory is restored to its original state.
245
245
246 In the end we'll record interesting changes, and everything else
246 In the end we'll record interesting changes, and everything else
247 will be left in place, so the user can continue working.
247 will be left in place, so the user can continue working.
248 """
248 """
249
249
250 checkunfinished(repo, commit=True)
250 checkunfinished(repo, commit=True)
251 wctx = repo[None]
251 wctx = repo[None]
252 merge = len(wctx.parents()) > 1
252 merge = len(wctx.parents()) > 1
253 if merge:
253 if merge:
254 raise error.Abort(_('cannot partially commit a merge '
254 raise error.Abort(_('cannot partially commit a merge '
255 '(use "hg commit" instead)'))
255 '(use "hg commit" instead)'))
256
256
257 def fail(f, msg):
257 def fail(f, msg):
258 raise error.Abort('%s: %s' % (f, msg))
258 raise error.Abort('%s: %s' % (f, msg))
259
259
260 force = opts.get('force')
260 force = opts.get('force')
261 if not force:
261 if not force:
262 vdirs = []
262 vdirs = []
263 match.explicitdir = vdirs.append
263 match.explicitdir = vdirs.append
264 match.bad = fail
264 match.bad = fail
265
265
266 status = repo.status(match=match)
266 status = repo.status(match=match)
267 if not force:
267 if not force:
268 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
268 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
269 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
269 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
270 diffopts.nodates = True
270 diffopts.nodates = True
271 diffopts.git = True
271 diffopts.git = True
272 diffopts.showfunc = True
272 diffopts.showfunc = True
273 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
273 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
274 originalchunks = patch.parsepatch(originaldiff)
274 originalchunks = patch.parsepatch(originaldiff)
275
275
276 # 1. filter patch, since we are intending to apply subset of it
276 # 1. filter patch, since we are intending to apply subset of it
277 try:
277 try:
278 chunks, newopts = filterfn(ui, originalchunks)
278 chunks, newopts = filterfn(ui, originalchunks)
279 except patch.PatchError as err:
279 except patch.PatchError as err:
280 raise error.Abort(_('error parsing patch: %s') % err)
280 raise error.Abort(_('error parsing patch: %s') % err)
281 opts.update(newopts)
281 opts.update(newopts)
282
282
283 # We need to keep a backup of files that have been newly added and
283 # We need to keep a backup of files that have been newly added and
284 # modified during the recording process because there is a previous
284 # modified during the recording process because there is a previous
285 # version without the edit in the workdir
285 # version without the edit in the workdir
286 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
286 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
287 contenders = set()
287 contenders = set()
288 for h in chunks:
288 for h in chunks:
289 try:
289 try:
290 contenders.update(set(h.files()))
290 contenders.update(set(h.files()))
291 except AttributeError:
291 except AttributeError:
292 pass
292 pass
293
293
294 changed = status.modified + status.added + status.removed
294 changed = status.modified + status.added + status.removed
295 newfiles = [f for f in changed if f in contenders]
295 newfiles = [f for f in changed if f in contenders]
296 if not newfiles:
296 if not newfiles:
297 ui.status(_('no changes to record\n'))
297 ui.status(_('no changes to record\n'))
298 return 0
298 return 0
299
299
300 modified = set(status.modified)
300 modified = set(status.modified)
301
301
302 # 2. backup changed files, so we can restore them in the end
302 # 2. backup changed files, so we can restore them in the end
303
303
304 if backupall:
304 if backupall:
305 tobackup = changed
305 tobackup = changed
306 else:
306 else:
307 tobackup = [f for f in newfiles if f in modified or f in \
307 tobackup = [f for f in newfiles if f in modified or f in \
308 newlyaddedandmodifiedfiles]
308 newlyaddedandmodifiedfiles]
309 backups = {}
309 backups = {}
310 if tobackup:
310 if tobackup:
311 backupdir = repo.vfs.join('record-backups')
311 backupdir = repo.vfs.join('record-backups')
312 try:
312 try:
313 os.mkdir(backupdir)
313 os.mkdir(backupdir)
314 except OSError as err:
314 except OSError as err:
315 if err.errno != errno.EEXIST:
315 if err.errno != errno.EEXIST:
316 raise
316 raise
317 try:
317 try:
318 # backup continues
318 # backup continues
319 for f in tobackup:
319 for f in tobackup:
320 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
320 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
321 dir=backupdir)
321 dir=backupdir)
322 os.close(fd)
322 os.close(fd)
323 ui.debug('backup %r as %r\n' % (f, tmpname))
323 ui.debug('backup %r as %r\n' % (f, tmpname))
324 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
324 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
325 backups[f] = tmpname
325 backups[f] = tmpname
326
326
327 fp = stringio()
327 fp = stringio()
328 for c in chunks:
328 for c in chunks:
329 fname = c.filename()
329 fname = c.filename()
330 if fname in backups:
330 if fname in backups:
331 c.write(fp)
331 c.write(fp)
332 dopatch = fp.tell()
332 dopatch = fp.tell()
333 fp.seek(0)
333 fp.seek(0)
334
334
335 # 2.5 optionally review / modify patch in text editor
335 # 2.5 optionally review / modify patch in text editor
336 if opts.get('review', False):
336 if opts.get('review', False):
337 patchtext = (crecordmod.diffhelptext
337 patchtext = (crecordmod.diffhelptext
338 + crecordmod.patchhelptext
338 + crecordmod.patchhelptext
339 + fp.read())
339 + fp.read())
340 reviewedpatch = ui.edit(patchtext, "",
340 reviewedpatch = ui.edit(patchtext, "",
341 extra={"suffix": ".diff"},
341 extra={"suffix": ".diff"},
342 repopath=repo.path)
342 repopath=repo.path)
343 fp.truncate(0)
343 fp.truncate(0)
344 fp.write(reviewedpatch)
344 fp.write(reviewedpatch)
345 fp.seek(0)
345 fp.seek(0)
346
346
347 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
347 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
348 # 3a. apply filtered patch to clean repo (clean)
348 # 3a. apply filtered patch to clean repo (clean)
349 if backups:
349 if backups:
350 # Equivalent to hg.revert
350 # Equivalent to hg.revert
351 m = scmutil.matchfiles(repo, backups.keys())
351 m = scmutil.matchfiles(repo, backups.keys())
352 mergemod.update(repo, repo.dirstate.p1(),
352 mergemod.update(repo, repo.dirstate.p1(),
353 False, True, matcher=m)
353 False, True, matcher=m)
354
354
355 # 3b. (apply)
355 # 3b. (apply)
356 if dopatch:
356 if dopatch:
357 try:
357 try:
358 ui.debug('applying patch\n')
358 ui.debug('applying patch\n')
359 ui.debug(fp.getvalue())
359 ui.debug(fp.getvalue())
360 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
360 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
361 except patch.PatchError as err:
361 except patch.PatchError as err:
362 raise error.Abort(str(err))
362 raise error.Abort(str(err))
363 del fp
363 del fp
364
364
365 # 4. We prepared working directory according to filtered
365 # 4. We prepared working directory according to filtered
366 # patch. Now is the time to delegate the job to
366 # patch. Now is the time to delegate the job to
367 # commit/qrefresh or the like!
367 # commit/qrefresh or the like!
368
368
369 # Make all of the pathnames absolute.
369 # Make all of the pathnames absolute.
370 newfiles = [repo.wjoin(nf) for nf in newfiles]
370 newfiles = [repo.wjoin(nf) for nf in newfiles]
371 return commitfunc(ui, repo, *newfiles, **opts)
371 return commitfunc(ui, repo, *newfiles, **opts)
372 finally:
372 finally:
373 # 5. finally restore backed-up files
373 # 5. finally restore backed-up files
374 try:
374 try:
375 dirstate = repo.dirstate
375 dirstate = repo.dirstate
376 for realname, tmpname in backups.iteritems():
376 for realname, tmpname in backups.iteritems():
377 ui.debug('restoring %r to %r\n' % (tmpname, realname))
377 ui.debug('restoring %r to %r\n' % (tmpname, realname))
378
378
379 if dirstate[realname] == 'n':
379 if dirstate[realname] == 'n':
380 # without normallookup, restoring timestamp
380 # without normallookup, restoring timestamp
381 # may cause partially committed files
381 # may cause partially committed files
382 # to be treated as unmodified
382 # to be treated as unmodified
383 dirstate.normallookup(realname)
383 dirstate.normallookup(realname)
384
384
385 # copystat=True here and above are a hack to trick any
385 # copystat=True here and above are a hack to trick any
386 # editors that have f open that we haven't modified them.
386 # editors that have f open that we haven't modified them.
387 #
387 #
388 # Also note that this racy as an editor could notice the
388 # Also note that this racy as an editor could notice the
389 # file's mtime before we've finished writing it.
389 # file's mtime before we've finished writing it.
390 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
390 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
391 os.unlink(tmpname)
391 os.unlink(tmpname)
392 if tobackup:
392 if tobackup:
393 os.rmdir(backupdir)
393 os.rmdir(backupdir)
394 except OSError:
394 except OSError:
395 pass
395 pass
396
396
397 def recordinwlock(ui, repo, message, match, opts):
397 def recordinwlock(ui, repo, message, match, opts):
398 with repo.wlock():
398 with repo.wlock():
399 return recordfunc(ui, repo, message, match, opts)
399 return recordfunc(ui, repo, message, match, opts)
400
400
401 return commit(ui, repo, recordinwlock, pats, opts)
401 return commit(ui, repo, recordinwlock, pats, opts)
402
402
403 def tersestatus(root, statlist, status, ignorefn, ignore):
403 def tersestatus(root, statlist, status, ignorefn, ignore):
404 """
404 """
405 Returns a list of statuses with directory collapsed if all the files in the
405 Returns a list of statuses with directory collapsed if all the files in the
406 directory has the same status.
406 directory has the same status.
407 """
407 """
408
408
409 def numfiles(dirname):
409 def numfiles(dirname):
410 """
410 """
411 Calculates the number of tracked files in a given directory which also
411 Calculates the number of tracked files in a given directory which also
412 includes files which were removed or deleted. Considers ignored files
412 includes files which were removed or deleted. Considers ignored files
413 if ignore argument is True or 'i' is present in status argument.
413 if ignore argument is True or 'i' is present in status argument.
414 """
414 """
415 if lencache.get(dirname):
415 if lencache.get(dirname):
416 return lencache[dirname]
416 return lencache[dirname]
417 if 'i' in status or ignore:
417 if 'i' in status or ignore:
418 def match(localpath):
418 def match(localpath):
419 absolutepath = os.path.join(root, localpath)
419 absolutepath = os.path.join(root, localpath)
420 if os.path.isdir(absolutepath) and isemptydir(absolutepath):
420 if os.path.isdir(absolutepath) and isemptydir(absolutepath):
421 return True
421 return True
422 return False
422 return False
423 else:
423 else:
424 def match(localpath):
424 def match(localpath):
425 # there can be directory whose all the files are ignored and
425 # there can be directory whose all the files are ignored and
426 # hence the drectory should also be ignored while counting
426 # hence the drectory should also be ignored while counting
427 # number of files or subdirs in it's parent directory. This
427 # number of files or subdirs in it's parent directory. This
428 # checks the same.
428 # checks the same.
429 # XXX: We need a better logic here.
429 # XXX: We need a better logic here.
430 if os.path.isdir(os.path.join(root, localpath)):
430 if os.path.isdir(os.path.join(root, localpath)):
431 return isignoreddir(localpath)
431 return isignoreddir(localpath)
432 else:
432 else:
433 # XXX: there can be files which have the ignored pattern but
433 # XXX: there can be files which have the ignored pattern but
434 # are not ignored. That leads to bug in counting number of
434 # are not ignored. That leads to bug in counting number of
435 # tracked files in the directory.
435 # tracked files in the directory.
436 return ignorefn(localpath)
436 return ignorefn(localpath)
437 lendir = 0
437 lendir = 0
438 abspath = os.path.join(root, dirname)
438 abspath = os.path.join(root, dirname)
439 # There might be cases when a directory does not exists as the whole
439 # There might be cases when a directory does not exists as the whole
440 # directory can be removed and/or deleted.
440 # directory can be removed and/or deleted.
441 try:
441 try:
442 for f in os.listdir(abspath):
442 for f in os.listdir(abspath):
443 localpath = os.path.join(dirname, f)
443 localpath = os.path.join(dirname, f)
444 if not match(localpath):
444 if not match(localpath):
445 lendir += 1
445 lendir += 1
446 except OSError:
446 except OSError:
447 pass
447 pass
448 lendir += len(absentdir.get(dirname, []))
448 lendir += len(absentdir.get(dirname, []))
449 lencache[dirname] = lendir
449 lencache[dirname] = lendir
450 return lendir
450 return lendir
451
451
452 def isemptydir(abspath):
452 def isemptydir(abspath):
453 """
453 """
454 Check whether a directory is empty or not, i.e. there is no files in the
454 Check whether a directory is empty or not, i.e. there is no files in the
455 directory and all its subdirectories.
455 directory and all its subdirectories.
456 """
456 """
457 for f in os.listdir(abspath):
457 for f in os.listdir(abspath):
458 fullpath = os.path.join(abspath, f)
458 fullpath = os.path.join(abspath, f)
459 if os.path.isdir(fullpath):
459 if os.path.isdir(fullpath):
460 # recursion here
460 # recursion here
461 ret = isemptydir(fullpath)
461 ret = isemptydir(fullpath)
462 if not ret:
462 if not ret:
463 return False
463 return False
464 else:
464 else:
465 return False
465 return False
466 return True
466 return True
467
467
468 def isignoreddir(localpath):
468 def isignoreddir(localpath):
469 """Return True if `localpath` directory is ignored or contains only
469 """Return True if `localpath` directory is ignored or contains only
470 ignored files and should hence be considered ignored.
470 ignored files and should hence be considered ignored.
471 """
471 """
472 dirpath = os.path.join(root, localpath)
472 dirpath = os.path.join(root, localpath)
473 if ignorefn(dirpath):
473 if ignorefn(dirpath):
474 return True
474 return True
475 for f in os.listdir(dirpath):
475 for f in os.listdir(dirpath):
476 filepath = os.path.join(dirpath, f)
476 filepath = os.path.join(dirpath, f)
477 if os.path.isdir(filepath):
477 if os.path.isdir(filepath):
478 # recursion here
478 # recursion here
479 ret = isignoreddir(os.path.join(localpath, f))
479 ret = isignoreddir(os.path.join(localpath, f))
480 if not ret:
480 if not ret:
481 return False
481 return False
482 else:
482 else:
483 if not ignorefn(os.path.join(localpath, f)):
483 if not ignorefn(os.path.join(localpath, f)):
484 return False
484 return False
485 return True
485 return True
486
486
487 def absentones(removedfiles, missingfiles):
487 def absentones(removedfiles, missingfiles):
488 """
488 """
489 Returns a dictionary of directories with files in it which are either
489 Returns a dictionary of directories with files in it which are either
490 removed or missing (deleted) in them.
490 removed or missing (deleted) in them.
491 """
491 """
492 absentdir = {}
492 absentdir = {}
493 absentfiles = removedfiles + missingfiles
493 absentfiles = removedfiles + missingfiles
494 while absentfiles:
494 while absentfiles:
495 f = absentfiles.pop()
495 f = absentfiles.pop()
496 par = os.path.dirname(f)
496 par = os.path.dirname(f)
497 if par == '':
497 if par == '':
498 continue
498 continue
499 # we need to store files rather than number of files as some files
499 # we need to store files rather than number of files as some files
500 # or subdirectories in a directory can be counted twice. This is
500 # or subdirectories in a directory can be counted twice. This is
501 # also we have used sets here.
501 # also we have used sets here.
502 try:
502 try:
503 absentdir[par].add(f)
503 absentdir[par].add(f)
504 except KeyError:
504 except KeyError:
505 absentdir[par] = set([f])
505 absentdir[par] = set([f])
506 absentfiles.append(par)
506 absentfiles.append(par)
507 return absentdir
507 return absentdir
508
508
509 indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6}
509 indexes = {'m': 0, 'a': 1, 'r': 2, 'd': 3, 'u': 4, 'i': 5, 'c': 6}
510 # get a dictonary of directories and files which are missing as os.listdir()
510 # get a dictonary of directories and files which are missing as os.listdir()
511 # won't be able to list them.
511 # won't be able to list them.
512 absentdir = absentones(statlist[2], statlist[3])
512 absentdir = absentones(statlist[2], statlist[3])
513 finalrs = [[]] * len(indexes)
513 finalrs = [[]] * len(indexes)
514 didsomethingchanged = False
514 didsomethingchanged = False
515 # dictionary to store number of files and subdir in a directory so that we
515 # dictionary to store number of files and subdir in a directory so that we
516 # don't compute that again.
516 # don't compute that again.
517 lencache = {}
517 lencache = {}
518
518
519 for st in pycompat.bytestr(status):
519 for st in pycompat.bytestr(status):
520
520
521 try:
521 try:
522 ind = indexes[st]
522 ind = indexes[st]
523 except KeyError:
523 except KeyError:
524 # TODO: Need a better error message here
524 # TODO: Need a better error message here
525 raise error.Abort("'%s' not recognized" % st)
525 raise error.Abort("'%s' not recognized" % st)
526
526
527 sfiles = statlist[ind]
527 sfiles = statlist[ind]
528 if not sfiles:
528 if not sfiles:
529 continue
529 continue
530 pardict = {}
530 pardict = {}
531 for a in sfiles:
531 for a in sfiles:
532 par = os.path.dirname(a)
532 par = os.path.dirname(a)
533 pardict.setdefault(par, []).append(a)
533 pardict.setdefault(par, []).append(a)
534
534
535 rs = []
535 rs = []
536 newls = []
536 newls = []
537 for par, files in pardict.iteritems():
537 for par, files in pardict.iteritems():
538 lenpar = numfiles(par)
538 lenpar = numfiles(par)
539 if lenpar == len(files):
539 if lenpar == len(files):
540 newls.append(par)
540 newls.append(par)
541
541
542 if not newls:
542 if not newls:
543 continue
543 continue
544
544
545 while newls:
545 while newls:
546 newel = newls.pop()
546 newel = newls.pop()
547 if newel == '':
547 if newel == '':
548 continue
548 continue
549 parn = os.path.dirname(newel)
549 parn = os.path.dirname(newel)
550 pardict[newel] = []
550 pardict[newel] = []
551 # Adding pycompat.ossep as newel is a directory.
551 # Adding pycompat.ossep as newel is a directory.
552 pardict.setdefault(parn, []).append(newel + pycompat.ossep)
552 pardict.setdefault(parn, []).append(newel + pycompat.ossep)
553 lenpar = numfiles(parn)
553 lenpar = numfiles(parn)
554 if lenpar == len(pardict[parn]):
554 if lenpar == len(pardict[parn]):
555 newls.append(parn)
555 newls.append(parn)
556
556
557 # dict.values() for Py3 compatibility
557 # dict.values() for Py3 compatibility
558 for files in pardict.values():
558 for files in pardict.values():
559 rs.extend(files)
559 rs.extend(files)
560
560
561 rs.sort()
561 rs.sort()
562 finalrs[ind] = rs
562 finalrs[ind] = rs
563 didsomethingchanged = True
563 didsomethingchanged = True
564
564
565 # If nothing is changed, make sure the order of files is preserved.
565 # If nothing is changed, make sure the order of files is preserved.
566 if not didsomethingchanged:
566 if not didsomethingchanged:
567 return statlist
567 return statlist
568
568
569 for x in xrange(len(indexes)):
569 for x in xrange(len(indexes)):
570 if not finalrs[x]:
570 if not finalrs[x]:
571 finalrs[x] = statlist[x]
571 finalrs[x] = statlist[x]
572
572
573 return finalrs
573 return finalrs
574
574
575 def findpossible(cmd, table, strict=False):
575 def findpossible(cmd, table, strict=False):
576 """
576 """
577 Return cmd -> (aliases, command table entry)
577 Return cmd -> (aliases, command table entry)
578 for each matching command.
578 for each matching command.
579 Return debug commands (or their aliases) only if no normal command matches.
579 Return debug commands (or their aliases) only if no normal command matches.
580 """
580 """
581 choice = {}
581 choice = {}
582 debugchoice = {}
582 debugchoice = {}
583
583
584 if cmd in table:
584 if cmd in table:
585 # short-circuit exact matches, "log" alias beats "^log|history"
585 # short-circuit exact matches, "log" alias beats "^log|history"
586 keys = [cmd]
586 keys = [cmd]
587 else:
587 else:
588 keys = table.keys()
588 keys = table.keys()
589
589
590 allcmds = []
590 allcmds = []
591 for e in keys:
591 for e in keys:
592 aliases = parsealiases(e)
592 aliases = parsealiases(e)
593 allcmds.extend(aliases)
593 allcmds.extend(aliases)
594 found = None
594 found = None
595 if cmd in aliases:
595 if cmd in aliases:
596 found = cmd
596 found = cmd
597 elif not strict:
597 elif not strict:
598 for a in aliases:
598 for a in aliases:
599 if a.startswith(cmd):
599 if a.startswith(cmd):
600 found = a
600 found = a
601 break
601 break
602 if found is not None:
602 if found is not None:
603 if aliases[0].startswith("debug") or found.startswith("debug"):
603 if aliases[0].startswith("debug") or found.startswith("debug"):
604 debugchoice[found] = (aliases, table[e])
604 debugchoice[found] = (aliases, table[e])
605 else:
605 else:
606 choice[found] = (aliases, table[e])
606 choice[found] = (aliases, table[e])
607
607
608 if not choice and debugchoice:
608 if not choice and debugchoice:
609 choice = debugchoice
609 choice = debugchoice
610
610
611 return choice, allcmds
611 return choice, allcmds
612
612
613 def findcmd(cmd, table, strict=True):
613 def findcmd(cmd, table, strict=True):
614 """Return (aliases, command table entry) for command string."""
614 """Return (aliases, command table entry) for command string."""
615 choice, allcmds = findpossible(cmd, table, strict)
615 choice, allcmds = findpossible(cmd, table, strict)
616
616
617 if cmd in choice:
617 if cmd in choice:
618 return choice[cmd]
618 return choice[cmd]
619
619
620 if len(choice) > 1:
620 if len(choice) > 1:
621 clist = sorted(choice)
621 clist = sorted(choice)
622 raise error.AmbiguousCommand(cmd, clist)
622 raise error.AmbiguousCommand(cmd, clist)
623
623
624 if choice:
624 if choice:
625 return list(choice.values())[0]
625 return list(choice.values())[0]
626
626
627 raise error.UnknownCommand(cmd, allcmds)
627 raise error.UnknownCommand(cmd, allcmds)
628
628
629 def findrepo(p):
629 def findrepo(p):
630 while not os.path.isdir(os.path.join(p, ".hg")):
630 while not os.path.isdir(os.path.join(p, ".hg")):
631 oldp, p = p, os.path.dirname(p)
631 oldp, p = p, os.path.dirname(p)
632 if p == oldp:
632 if p == oldp:
633 return None
633 return None
634
634
635 return p
635 return p
636
636
637 def bailifchanged(repo, merge=True, hint=None):
637 def bailifchanged(repo, merge=True, hint=None):
638 """ enforce the precondition that working directory must be clean.
638 """ enforce the precondition that working directory must be clean.
639
639
640 'merge' can be set to false if a pending uncommitted merge should be
640 'merge' can be set to false if a pending uncommitted merge should be
641 ignored (such as when 'update --check' runs).
641 ignored (such as when 'update --check' runs).
642
642
643 'hint' is the usual hint given to Abort exception.
643 'hint' is the usual hint given to Abort exception.
644 """
644 """
645
645
646 if merge and repo.dirstate.p2() != nullid:
646 if merge and repo.dirstate.p2() != nullid:
647 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
647 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
648 modified, added, removed, deleted = repo.status()[:4]
648 modified, added, removed, deleted = repo.status()[:4]
649 if modified or added or removed or deleted:
649 if modified or added or removed or deleted:
650 raise error.Abort(_('uncommitted changes'), hint=hint)
650 raise error.Abort(_('uncommitted changes'), hint=hint)
651 ctx = repo[None]
651 ctx = repo[None]
652 for s in sorted(ctx.substate):
652 for s in sorted(ctx.substate):
653 ctx.sub(s).bailifchanged(hint=hint)
653 ctx.sub(s).bailifchanged(hint=hint)
654
654
655 def logmessage(ui, opts):
655 def logmessage(ui, opts):
656 """ get the log message according to -m and -l option """
656 """ get the log message according to -m and -l option """
657 message = opts.get('message')
657 message = opts.get('message')
658 logfile = opts.get('logfile')
658 logfile = opts.get('logfile')
659
659
660 if message and logfile:
660 if message and logfile:
661 raise error.Abort(_('options --message and --logfile are mutually '
661 raise error.Abort(_('options --message and --logfile are mutually '
662 'exclusive'))
662 'exclusive'))
663 if not message and logfile:
663 if not message and logfile:
664 try:
664 try:
665 if isstdiofilename(logfile):
665 if isstdiofilename(logfile):
666 message = ui.fin.read()
666 message = ui.fin.read()
667 else:
667 else:
668 message = '\n'.join(util.readfile(logfile).splitlines())
668 message = '\n'.join(util.readfile(logfile).splitlines())
669 except IOError as inst:
669 except IOError as inst:
670 raise error.Abort(_("can't read commit message '%s': %s") %
670 raise error.Abort(_("can't read commit message '%s': %s") %
671 (logfile, inst.strerror))
671 (logfile, inst.strerror))
672 return message
672 return message
673
673
674 def mergeeditform(ctxorbool, baseformname):
674 def mergeeditform(ctxorbool, baseformname):
675 """return appropriate editform name (referencing a committemplate)
675 """return appropriate editform name (referencing a committemplate)
676
676
677 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
677 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
678 merging is committed.
678 merging is committed.
679
679
680 This returns baseformname with '.merge' appended if it is a merge,
680 This returns baseformname with '.merge' appended if it is a merge,
681 otherwise '.normal' is appended.
681 otherwise '.normal' is appended.
682 """
682 """
683 if isinstance(ctxorbool, bool):
683 if isinstance(ctxorbool, bool):
684 if ctxorbool:
684 if ctxorbool:
685 return baseformname + ".merge"
685 return baseformname + ".merge"
686 elif 1 < len(ctxorbool.parents()):
686 elif 1 < len(ctxorbool.parents()):
687 return baseformname + ".merge"
687 return baseformname + ".merge"
688
688
689 return baseformname + ".normal"
689 return baseformname + ".normal"
690
690
691 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
691 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
692 editform='', **opts):
692 editform='', **opts):
693 """get appropriate commit message editor according to '--edit' option
693 """get appropriate commit message editor according to '--edit' option
694
694
695 'finishdesc' is a function to be called with edited commit message
695 'finishdesc' is a function to be called with edited commit message
696 (= 'description' of the new changeset) just after editing, but
696 (= 'description' of the new changeset) just after editing, but
697 before checking empty-ness. It should return actual text to be
697 before checking empty-ness. It should return actual text to be
698 stored into history. This allows to change description before
698 stored into history. This allows to change description before
699 storing.
699 storing.
700
700
701 'extramsg' is a extra message to be shown in the editor instead of
701 'extramsg' is a extra message to be shown in the editor instead of
702 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
702 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
703 is automatically added.
703 is automatically added.
704
704
705 'editform' is a dot-separated list of names, to distinguish
705 'editform' is a dot-separated list of names, to distinguish
706 the purpose of commit text editing.
706 the purpose of commit text editing.
707
707
708 'getcommiteditor' returns 'commitforceeditor' regardless of
708 'getcommiteditor' returns 'commitforceeditor' regardless of
709 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
709 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
710 they are specific for usage in MQ.
710 they are specific for usage in MQ.
711 """
711 """
712 if edit or finishdesc or extramsg:
712 if edit or finishdesc or extramsg:
713 return lambda r, c, s: commitforceeditor(r, c, s,
713 return lambda r, c, s: commitforceeditor(r, c, s,
714 finishdesc=finishdesc,
714 finishdesc=finishdesc,
715 extramsg=extramsg,
715 extramsg=extramsg,
716 editform=editform)
716 editform=editform)
717 elif editform:
717 elif editform:
718 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
718 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
719 else:
719 else:
720 return commiteditor
720 return commiteditor
721
721
722 def loglimit(opts):
722 def loglimit(opts):
723 """get the log limit according to option -l/--limit"""
723 """get the log limit according to option -l/--limit"""
724 limit = opts.get('limit')
724 limit = opts.get('limit')
725 if limit:
725 if limit:
726 try:
726 try:
727 limit = int(limit)
727 limit = int(limit)
728 except ValueError:
728 except ValueError:
729 raise error.Abort(_('limit must be a positive integer'))
729 raise error.Abort(_('limit must be a positive integer'))
730 if limit <= 0:
730 if limit <= 0:
731 raise error.Abort(_('limit must be positive'))
731 raise error.Abort(_('limit must be positive'))
732 else:
732 else:
733 limit = None
733 limit = None
734 return limit
734 return limit
735
735
736 def makefilename(repo, pat, node, desc=None,
736 def makefilename(repo, pat, node, desc=None,
737 total=None, seqno=None, revwidth=None, pathname=None):
737 total=None, seqno=None, revwidth=None, pathname=None):
738 node_expander = {
738 node_expander = {
739 'H': lambda: hex(node),
739 'H': lambda: hex(node),
740 'R': lambda: str(repo.changelog.rev(node)),
740 'R': lambda: str(repo.changelog.rev(node)),
741 'h': lambda: short(node),
741 'h': lambda: short(node),
742 'm': lambda: re.sub('[^\w]', '_', str(desc))
742 'm': lambda: re.sub('[^\w]', '_', str(desc))
743 }
743 }
744 expander = {
744 expander = {
745 '%': lambda: '%',
745 '%': lambda: '%',
746 'b': lambda: os.path.basename(repo.root),
746 'b': lambda: os.path.basename(repo.root),
747 }
747 }
748
748
749 try:
749 try:
750 if node:
750 if node:
751 expander.update(node_expander)
751 expander.update(node_expander)
752 if node:
752 if node:
753 expander['r'] = (lambda:
753 expander['r'] = (lambda:
754 str(repo.changelog.rev(node)).zfill(revwidth or 0))
754 str(repo.changelog.rev(node)).zfill(revwidth or 0))
755 if total is not None:
755 if total is not None:
756 expander['N'] = lambda: str(total)
756 expander['N'] = lambda: str(total)
757 if seqno is not None:
757 if seqno is not None:
758 expander['n'] = lambda: str(seqno)
758 expander['n'] = lambda: str(seqno)
759 if total is not None and seqno is not None:
759 if total is not None and seqno is not None:
760 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
760 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
761 if pathname is not None:
761 if pathname is not None:
762 expander['s'] = lambda: os.path.basename(pathname)
762 expander['s'] = lambda: os.path.basename(pathname)
763 expander['d'] = lambda: os.path.dirname(pathname) or '.'
763 expander['d'] = lambda: os.path.dirname(pathname) or '.'
764 expander['p'] = lambda: pathname
764 expander['p'] = lambda: pathname
765
765
766 newname = []
766 newname = []
767 patlen = len(pat)
767 patlen = len(pat)
768 i = 0
768 i = 0
769 while i < patlen:
769 while i < patlen:
770 c = pat[i:i + 1]
770 c = pat[i:i + 1]
771 if c == '%':
771 if c == '%':
772 i += 1
772 i += 1
773 c = pat[i:i + 1]
773 c = pat[i:i + 1]
774 c = expander[c]()
774 c = expander[c]()
775 newname.append(c)
775 newname.append(c)
776 i += 1
776 i += 1
777 return ''.join(newname)
777 return ''.join(newname)
778 except KeyError as inst:
778 except KeyError as inst:
779 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
779 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
780 inst.args[0])
780 inst.args[0])
781
781
782 def isstdiofilename(pat):
782 def isstdiofilename(pat):
783 """True if the given pat looks like a filename denoting stdin/stdout"""
783 """True if the given pat looks like a filename denoting stdin/stdout"""
784 return not pat or pat == '-'
784 return not pat or pat == '-'
785
785
786 class _unclosablefile(object):
786 class _unclosablefile(object):
787 def __init__(self, fp):
787 def __init__(self, fp):
788 self._fp = fp
788 self._fp = fp
789
789
790 def close(self):
790 def close(self):
791 pass
791 pass
792
792
793 def __iter__(self):
793 def __iter__(self):
794 return iter(self._fp)
794 return iter(self._fp)
795
795
796 def __getattr__(self, attr):
796 def __getattr__(self, attr):
797 return getattr(self._fp, attr)
797 return getattr(self._fp, attr)
798
798
799 def __enter__(self):
799 def __enter__(self):
800 return self
800 return self
801
801
802 def __exit__(self, exc_type, exc_value, exc_tb):
802 def __exit__(self, exc_type, exc_value, exc_tb):
803 pass
803 pass
804
804
805 def makefileobj(repo, pat, node=None, desc=None, total=None,
805 def makefileobj(repo, pat, node=None, desc=None, total=None,
806 seqno=None, revwidth=None, mode='wb', modemap=None,
806 seqno=None, revwidth=None, mode='wb', modemap=None,
807 pathname=None):
807 pathname=None):
808
808
809 writable = mode not in ('r', 'rb')
809 writable = mode not in ('r', 'rb')
810
810
811 if isstdiofilename(pat):
811 if isstdiofilename(pat):
812 if writable:
812 if writable:
813 fp = repo.ui.fout
813 fp = repo.ui.fout
814 else:
814 else:
815 fp = repo.ui.fin
815 fp = repo.ui.fin
816 return _unclosablefile(fp)
816 return _unclosablefile(fp)
817 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
817 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
818 if modemap is not None:
818 if modemap is not None:
819 mode = modemap.get(fn, mode)
819 mode = modemap.get(fn, mode)
820 if mode == 'wb':
820 if mode == 'wb':
821 modemap[fn] = 'ab'
821 modemap[fn] = 'ab'
822 return open(fn, mode)
822 return open(fn, mode)
823
823
824 def openrevlog(repo, cmd, file_, opts):
824 def openrevlog(repo, cmd, file_, opts):
825 """opens the changelog, manifest, a filelog or a given revlog"""
825 """opens the changelog, manifest, a filelog or a given revlog"""
826 cl = opts['changelog']
826 cl = opts['changelog']
827 mf = opts['manifest']
827 mf = opts['manifest']
828 dir = opts['dir']
828 dir = opts['dir']
829 msg = None
829 msg = None
830 if cl and mf:
830 if cl and mf:
831 msg = _('cannot specify --changelog and --manifest at the same time')
831 msg = _('cannot specify --changelog and --manifest at the same time')
832 elif cl and dir:
832 elif cl and dir:
833 msg = _('cannot specify --changelog and --dir at the same time')
833 msg = _('cannot specify --changelog and --dir at the same time')
834 elif cl or mf or dir:
834 elif cl or mf or dir:
835 if file_:
835 if file_:
836 msg = _('cannot specify filename with --changelog or --manifest')
836 msg = _('cannot specify filename with --changelog or --manifest')
837 elif not repo:
837 elif not repo:
838 msg = _('cannot specify --changelog or --manifest or --dir '
838 msg = _('cannot specify --changelog or --manifest or --dir '
839 'without a repository')
839 'without a repository')
840 if msg:
840 if msg:
841 raise error.Abort(msg)
841 raise error.Abort(msg)
842
842
843 r = None
843 r = None
844 if repo:
844 if repo:
845 if cl:
845 if cl:
846 r = repo.unfiltered().changelog
846 r = repo.unfiltered().changelog
847 elif dir:
847 elif dir:
848 if 'treemanifest' not in repo.requirements:
848 if 'treemanifest' not in repo.requirements:
849 raise error.Abort(_("--dir can only be used on repos with "
849 raise error.Abort(_("--dir can only be used on repos with "
850 "treemanifest enabled"))
850 "treemanifest enabled"))
851 dirlog = repo.manifestlog._revlog.dirlog(dir)
851 dirlog = repo.manifestlog._revlog.dirlog(dir)
852 if len(dirlog):
852 if len(dirlog):
853 r = dirlog
853 r = dirlog
854 elif mf:
854 elif mf:
855 r = repo.manifestlog._revlog
855 r = repo.manifestlog._revlog
856 elif file_:
856 elif file_:
857 filelog = repo.file(file_)
857 filelog = repo.file(file_)
858 if len(filelog):
858 if len(filelog):
859 r = filelog
859 r = filelog
860 if not r:
860 if not r:
861 if not file_:
861 if not file_:
862 raise error.CommandError(cmd, _('invalid arguments'))
862 raise error.CommandError(cmd, _('invalid arguments'))
863 if not os.path.isfile(file_):
863 if not os.path.isfile(file_):
864 raise error.Abort(_("revlog '%s' not found") % file_)
864 raise error.Abort(_("revlog '%s' not found") % file_)
865 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
865 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
866 file_[:-2] + ".i")
866 file_[:-2] + ".i")
867 return r
867 return r
868
868
869 def copy(ui, repo, pats, opts, rename=False):
869 def copy(ui, repo, pats, opts, rename=False):
870 # called with the repo lock held
870 # called with the repo lock held
871 #
871 #
872 # hgsep => pathname that uses "/" to separate directories
872 # hgsep => pathname that uses "/" to separate directories
873 # ossep => pathname that uses os.sep to separate directories
873 # ossep => pathname that uses os.sep to separate directories
874 cwd = repo.getcwd()
874 cwd = repo.getcwd()
875 targets = {}
875 targets = {}
876 after = opts.get("after")
876 after = opts.get("after")
877 dryrun = opts.get("dry_run")
877 dryrun = opts.get("dry_run")
878 wctx = repo[None]
878 wctx = repo[None]
879
879
880 def walkpat(pat):
880 def walkpat(pat):
881 srcs = []
881 srcs = []
882 if after:
882 if after:
883 badstates = '?'
883 badstates = '?'
884 else:
884 else:
885 badstates = '?r'
885 badstates = '?r'
886 m = scmutil.match(wctx, [pat], opts, globbed=True)
886 m = scmutil.match(wctx, [pat], opts, globbed=True)
887 for abs in wctx.walk(m):
887 for abs in wctx.walk(m):
888 state = repo.dirstate[abs]
888 state = repo.dirstate[abs]
889 rel = m.rel(abs)
889 rel = m.rel(abs)
890 exact = m.exact(abs)
890 exact = m.exact(abs)
891 if state in badstates:
891 if state in badstates:
892 if exact and state == '?':
892 if exact and state == '?':
893 ui.warn(_('%s: not copying - file is not managed\n') % rel)
893 ui.warn(_('%s: not copying - file is not managed\n') % rel)
894 if exact and state == 'r':
894 if exact and state == 'r':
895 ui.warn(_('%s: not copying - file has been marked for'
895 ui.warn(_('%s: not copying - file has been marked for'
896 ' remove\n') % rel)
896 ' remove\n') % rel)
897 continue
897 continue
898 # abs: hgsep
898 # abs: hgsep
899 # rel: ossep
899 # rel: ossep
900 srcs.append((abs, rel, exact))
900 srcs.append((abs, rel, exact))
901 return srcs
901 return srcs
902
902
903 # abssrc: hgsep
903 # abssrc: hgsep
904 # relsrc: ossep
904 # relsrc: ossep
905 # otarget: ossep
905 # otarget: ossep
906 def copyfile(abssrc, relsrc, otarget, exact):
906 def copyfile(abssrc, relsrc, otarget, exact):
907 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
907 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
908 if '/' in abstarget:
908 if '/' in abstarget:
909 # We cannot normalize abstarget itself, this would prevent
909 # We cannot normalize abstarget itself, this would prevent
910 # case only renames, like a => A.
910 # case only renames, like a => A.
911 abspath, absname = abstarget.rsplit('/', 1)
911 abspath, absname = abstarget.rsplit('/', 1)
912 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
912 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
913 reltarget = repo.pathto(abstarget, cwd)
913 reltarget = repo.pathto(abstarget, cwd)
914 target = repo.wjoin(abstarget)
914 target = repo.wjoin(abstarget)
915 src = repo.wjoin(abssrc)
915 src = repo.wjoin(abssrc)
916 state = repo.dirstate[abstarget]
916 state = repo.dirstate[abstarget]
917
917
918 scmutil.checkportable(ui, abstarget)
918 scmutil.checkportable(ui, abstarget)
919
919
920 # check for collisions
920 # check for collisions
921 prevsrc = targets.get(abstarget)
921 prevsrc = targets.get(abstarget)
922 if prevsrc is not None:
922 if prevsrc is not None:
923 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
923 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
924 (reltarget, repo.pathto(abssrc, cwd),
924 (reltarget, repo.pathto(abssrc, cwd),
925 repo.pathto(prevsrc, cwd)))
925 repo.pathto(prevsrc, cwd)))
926 return
926 return
927
927
928 # check for overwrites
928 # check for overwrites
929 exists = os.path.lexists(target)
929 exists = os.path.lexists(target)
930 samefile = False
930 samefile = False
931 if exists and abssrc != abstarget:
931 if exists and abssrc != abstarget:
932 if (repo.dirstate.normalize(abssrc) ==
932 if (repo.dirstate.normalize(abssrc) ==
933 repo.dirstate.normalize(abstarget)):
933 repo.dirstate.normalize(abstarget)):
934 if not rename:
934 if not rename:
935 ui.warn(_("%s: can't copy - same file\n") % reltarget)
935 ui.warn(_("%s: can't copy - same file\n") % reltarget)
936 return
936 return
937 exists = False
937 exists = False
938 samefile = True
938 samefile = True
939
939
940 if not after and exists or after and state in 'mn':
940 if not after and exists or after and state in 'mn':
941 if not opts['force']:
941 if not opts['force']:
942 if state in 'mn':
942 if state in 'mn':
943 msg = _('%s: not overwriting - file already committed\n')
943 msg = _('%s: not overwriting - file already committed\n')
944 if after:
944 if after:
945 flags = '--after --force'
945 flags = '--after --force'
946 else:
946 else:
947 flags = '--force'
947 flags = '--force'
948 if rename:
948 if rename:
949 hint = _('(hg rename %s to replace the file by '
949 hint = _('(hg rename %s to replace the file by '
950 'recording a rename)\n') % flags
950 'recording a rename)\n') % flags
951 else:
951 else:
952 hint = _('(hg copy %s to replace the file by '
952 hint = _('(hg copy %s to replace the file by '
953 'recording a copy)\n') % flags
953 'recording a copy)\n') % flags
954 else:
954 else:
955 msg = _('%s: not overwriting - file exists\n')
955 msg = _('%s: not overwriting - file exists\n')
956 if rename:
956 if rename:
957 hint = _('(hg rename --after to record the rename)\n')
957 hint = _('(hg rename --after to record the rename)\n')
958 else:
958 else:
959 hint = _('(hg copy --after to record the copy)\n')
959 hint = _('(hg copy --after to record the copy)\n')
960 ui.warn(msg % reltarget)
960 ui.warn(msg % reltarget)
961 ui.warn(hint)
961 ui.warn(hint)
962 return
962 return
963
963
964 if after:
964 if after:
965 if not exists:
965 if not exists:
966 if rename:
966 if rename:
967 ui.warn(_('%s: not recording move - %s does not exist\n') %
967 ui.warn(_('%s: not recording move - %s does not exist\n') %
968 (relsrc, reltarget))
968 (relsrc, reltarget))
969 else:
969 else:
970 ui.warn(_('%s: not recording copy - %s does not exist\n') %
970 ui.warn(_('%s: not recording copy - %s does not exist\n') %
971 (relsrc, reltarget))
971 (relsrc, reltarget))
972 return
972 return
973 elif not dryrun:
973 elif not dryrun:
974 try:
974 try:
975 if exists:
975 if exists:
976 os.unlink(target)
976 os.unlink(target)
977 targetdir = os.path.dirname(target) or '.'
977 targetdir = os.path.dirname(target) or '.'
978 if not os.path.isdir(targetdir):
978 if not os.path.isdir(targetdir):
979 os.makedirs(targetdir)
979 os.makedirs(targetdir)
980 if samefile:
980 if samefile:
981 tmp = target + "~hgrename"
981 tmp = target + "~hgrename"
982 os.rename(src, tmp)
982 os.rename(src, tmp)
983 os.rename(tmp, target)
983 os.rename(tmp, target)
984 else:
984 else:
985 util.copyfile(src, target)
985 util.copyfile(src, target)
986 srcexists = True
986 srcexists = True
987 except IOError as inst:
987 except IOError as inst:
988 if inst.errno == errno.ENOENT:
988 if inst.errno == errno.ENOENT:
989 ui.warn(_('%s: deleted in working directory\n') % relsrc)
989 ui.warn(_('%s: deleted in working directory\n') % relsrc)
990 srcexists = False
990 srcexists = False
991 else:
991 else:
992 ui.warn(_('%s: cannot copy - %s\n') %
992 ui.warn(_('%s: cannot copy - %s\n') %
993 (relsrc, inst.strerror))
993 (relsrc, inst.strerror))
994 return True # report a failure
994 return True # report a failure
995
995
996 if ui.verbose or not exact:
996 if ui.verbose or not exact:
997 if rename:
997 if rename:
998 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
998 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
999 else:
999 else:
1000 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1000 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1001
1001
1002 targets[abstarget] = abssrc
1002 targets[abstarget] = abssrc
1003
1003
1004 # fix up dirstate
1004 # fix up dirstate
1005 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1005 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1006 dryrun=dryrun, cwd=cwd)
1006 dryrun=dryrun, cwd=cwd)
1007 if rename and not dryrun:
1007 if rename and not dryrun:
1008 if not after and srcexists and not samefile:
1008 if not after and srcexists and not samefile:
1009 repo.wvfs.unlinkpath(abssrc)
1009 repo.wvfs.unlinkpath(abssrc)
1010 wctx.forget([abssrc])
1010 wctx.forget([abssrc])
1011
1011
1012 # pat: ossep
1012 # pat: ossep
1013 # dest ossep
1013 # dest ossep
1014 # srcs: list of (hgsep, hgsep, ossep, bool)
1014 # srcs: list of (hgsep, hgsep, ossep, bool)
1015 # return: function that takes hgsep and returns ossep
1015 # return: function that takes hgsep and returns ossep
1016 def targetpathfn(pat, dest, srcs):
1016 def targetpathfn(pat, dest, srcs):
1017 if os.path.isdir(pat):
1017 if os.path.isdir(pat):
1018 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1018 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1019 abspfx = util.localpath(abspfx)
1019 abspfx = util.localpath(abspfx)
1020 if destdirexists:
1020 if destdirexists:
1021 striplen = len(os.path.split(abspfx)[0])
1021 striplen = len(os.path.split(abspfx)[0])
1022 else:
1022 else:
1023 striplen = len(abspfx)
1023 striplen = len(abspfx)
1024 if striplen:
1024 if striplen:
1025 striplen += len(pycompat.ossep)
1025 striplen += len(pycompat.ossep)
1026 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1026 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1027 elif destdirexists:
1027 elif destdirexists:
1028 res = lambda p: os.path.join(dest,
1028 res = lambda p: os.path.join(dest,
1029 os.path.basename(util.localpath(p)))
1029 os.path.basename(util.localpath(p)))
1030 else:
1030 else:
1031 res = lambda p: dest
1031 res = lambda p: dest
1032 return res
1032 return res
1033
1033
1034 # pat: ossep
1034 # pat: ossep
1035 # dest ossep
1035 # dest ossep
1036 # srcs: list of (hgsep, hgsep, ossep, bool)
1036 # srcs: list of (hgsep, hgsep, ossep, bool)
1037 # return: function that takes hgsep and returns ossep
1037 # return: function that takes hgsep and returns ossep
1038 def targetpathafterfn(pat, dest, srcs):
1038 def targetpathafterfn(pat, dest, srcs):
1039 if matchmod.patkind(pat):
1039 if matchmod.patkind(pat):
1040 # a mercurial pattern
1040 # a mercurial pattern
1041 res = lambda p: os.path.join(dest,
1041 res = lambda p: os.path.join(dest,
1042 os.path.basename(util.localpath(p)))
1042 os.path.basename(util.localpath(p)))
1043 else:
1043 else:
1044 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1044 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1045 if len(abspfx) < len(srcs[0][0]):
1045 if len(abspfx) < len(srcs[0][0]):
1046 # A directory. Either the target path contains the last
1046 # A directory. Either the target path contains the last
1047 # component of the source path or it does not.
1047 # component of the source path or it does not.
1048 def evalpath(striplen):
1048 def evalpath(striplen):
1049 score = 0
1049 score = 0
1050 for s in srcs:
1050 for s in srcs:
1051 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1051 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1052 if os.path.lexists(t):
1052 if os.path.lexists(t):
1053 score += 1
1053 score += 1
1054 return score
1054 return score
1055
1055
1056 abspfx = util.localpath(abspfx)
1056 abspfx = util.localpath(abspfx)
1057 striplen = len(abspfx)
1057 striplen = len(abspfx)
1058 if striplen:
1058 if striplen:
1059 striplen += len(pycompat.ossep)
1059 striplen += len(pycompat.ossep)
1060 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1060 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1061 score = evalpath(striplen)
1061 score = evalpath(striplen)
1062 striplen1 = len(os.path.split(abspfx)[0])
1062 striplen1 = len(os.path.split(abspfx)[0])
1063 if striplen1:
1063 if striplen1:
1064 striplen1 += len(pycompat.ossep)
1064 striplen1 += len(pycompat.ossep)
1065 if evalpath(striplen1) > score:
1065 if evalpath(striplen1) > score:
1066 striplen = striplen1
1066 striplen = striplen1
1067 res = lambda p: os.path.join(dest,
1067 res = lambda p: os.path.join(dest,
1068 util.localpath(p)[striplen:])
1068 util.localpath(p)[striplen:])
1069 else:
1069 else:
1070 # a file
1070 # a file
1071 if destdirexists:
1071 if destdirexists:
1072 res = lambda p: os.path.join(dest,
1072 res = lambda p: os.path.join(dest,
1073 os.path.basename(util.localpath(p)))
1073 os.path.basename(util.localpath(p)))
1074 else:
1074 else:
1075 res = lambda p: dest
1075 res = lambda p: dest
1076 return res
1076 return res
1077
1077
1078 pats = scmutil.expandpats(pats)
1078 pats = scmutil.expandpats(pats)
1079 if not pats:
1079 if not pats:
1080 raise error.Abort(_('no source or destination specified'))
1080 raise error.Abort(_('no source or destination specified'))
1081 if len(pats) == 1:
1081 if len(pats) == 1:
1082 raise error.Abort(_('no destination specified'))
1082 raise error.Abort(_('no destination specified'))
1083 dest = pats.pop()
1083 dest = pats.pop()
1084 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1084 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1085 if not destdirexists:
1085 if not destdirexists:
1086 if len(pats) > 1 or matchmod.patkind(pats[0]):
1086 if len(pats) > 1 or matchmod.patkind(pats[0]):
1087 raise error.Abort(_('with multiple sources, destination must be an '
1087 raise error.Abort(_('with multiple sources, destination must be an '
1088 'existing directory'))
1088 'existing directory'))
1089 if util.endswithsep(dest):
1089 if util.endswithsep(dest):
1090 raise error.Abort(_('destination %s is not a directory') % dest)
1090 raise error.Abort(_('destination %s is not a directory') % dest)
1091
1091
1092 tfn = targetpathfn
1092 tfn = targetpathfn
1093 if after:
1093 if after:
1094 tfn = targetpathafterfn
1094 tfn = targetpathafterfn
1095 copylist = []
1095 copylist = []
1096 for pat in pats:
1096 for pat in pats:
1097 srcs = walkpat(pat)
1097 srcs = walkpat(pat)
1098 if not srcs:
1098 if not srcs:
1099 continue
1099 continue
1100 copylist.append((tfn(pat, dest, srcs), srcs))
1100 copylist.append((tfn(pat, dest, srcs), srcs))
1101 if not copylist:
1101 if not copylist:
1102 raise error.Abort(_('no files to copy'))
1102 raise error.Abort(_('no files to copy'))
1103
1103
1104 errors = 0
1104 errors = 0
1105 for targetpath, srcs in copylist:
1105 for targetpath, srcs in copylist:
1106 for abssrc, relsrc, exact in srcs:
1106 for abssrc, relsrc, exact in srcs:
1107 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1107 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1108 errors += 1
1108 errors += 1
1109
1109
1110 if errors:
1110 if errors:
1111 ui.warn(_('(consider using --after)\n'))
1111 ui.warn(_('(consider using --after)\n'))
1112
1112
1113 return errors != 0
1113 return errors != 0
1114
1114
1115 ## facility to let extension process additional data into an import patch
1115 ## facility to let extension process additional data into an import patch
1116 # list of identifier to be executed in order
1116 # list of identifier to be executed in order
1117 extrapreimport = [] # run before commit
1117 extrapreimport = [] # run before commit
1118 extrapostimport = [] # run after commit
1118 extrapostimport = [] # run after commit
1119 # mapping from identifier to actual import function
1119 # mapping from identifier to actual import function
1120 #
1120 #
1121 # 'preimport' are run before the commit is made and are provided the following
1121 # 'preimport' are run before the commit is made and are provided the following
1122 # arguments:
1122 # arguments:
1123 # - repo: the localrepository instance,
1123 # - repo: the localrepository instance,
1124 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1124 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1125 # - extra: the future extra dictionary of the changeset, please mutate it,
1125 # - extra: the future extra dictionary of the changeset, please mutate it,
1126 # - opts: the import options.
1126 # - opts: the import options.
1127 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1127 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1128 # mutation of in memory commit and more. Feel free to rework the code to get
1128 # mutation of in memory commit and more. Feel free to rework the code to get
1129 # there.
1129 # there.
1130 extrapreimportmap = {}
1130 extrapreimportmap = {}
1131 # 'postimport' are run after the commit is made and are provided the following
1131 # 'postimport' are run after the commit is made and are provided the following
1132 # argument:
1132 # argument:
1133 # - ctx: the changectx created by import.
1133 # - ctx: the changectx created by import.
1134 extrapostimportmap = {}
1134 extrapostimportmap = {}
1135
1135
1136 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1136 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1137 """Utility function used by commands.import to import a single patch
1137 """Utility function used by commands.import to import a single patch
1138
1138
1139 This function is explicitly defined here to help the evolve extension to
1139 This function is explicitly defined here to help the evolve extension to
1140 wrap this part of the import logic.
1140 wrap this part of the import logic.
1141
1141
1142 The API is currently a bit ugly because it a simple code translation from
1142 The API is currently a bit ugly because it a simple code translation from
1143 the import command. Feel free to make it better.
1143 the import command. Feel free to make it better.
1144
1144
1145 :hunk: a patch (as a binary string)
1145 :hunk: a patch (as a binary string)
1146 :parents: nodes that will be parent of the created commit
1146 :parents: nodes that will be parent of the created commit
1147 :opts: the full dict of option passed to the import command
1147 :opts: the full dict of option passed to the import command
1148 :msgs: list to save commit message to.
1148 :msgs: list to save commit message to.
1149 (used in case we need to save it when failing)
1149 (used in case we need to save it when failing)
1150 :updatefunc: a function that update a repo to a given node
1150 :updatefunc: a function that update a repo to a given node
1151 updatefunc(<repo>, <node>)
1151 updatefunc(<repo>, <node>)
1152 """
1152 """
1153 # avoid cycle context -> subrepo -> cmdutil
1153 # avoid cycle context -> subrepo -> cmdutil
1154 from . import context
1154 from . import context
1155 extractdata = patch.extract(ui, hunk)
1155 extractdata = patch.extract(ui, hunk)
1156 tmpname = extractdata.get('filename')
1156 tmpname = extractdata.get('filename')
1157 message = extractdata.get('message')
1157 message = extractdata.get('message')
1158 user = opts.get('user') or extractdata.get('user')
1158 user = opts.get('user') or extractdata.get('user')
1159 date = opts.get('date') or extractdata.get('date')
1159 date = opts.get('date') or extractdata.get('date')
1160 branch = extractdata.get('branch')
1160 branch = extractdata.get('branch')
1161 nodeid = extractdata.get('nodeid')
1161 nodeid = extractdata.get('nodeid')
1162 p1 = extractdata.get('p1')
1162 p1 = extractdata.get('p1')
1163 p2 = extractdata.get('p2')
1163 p2 = extractdata.get('p2')
1164
1164
1165 nocommit = opts.get('no_commit')
1165 nocommit = opts.get('no_commit')
1166 importbranch = opts.get('import_branch')
1166 importbranch = opts.get('import_branch')
1167 update = not opts.get('bypass')
1167 update = not opts.get('bypass')
1168 strip = opts["strip"]
1168 strip = opts["strip"]
1169 prefix = opts["prefix"]
1169 prefix = opts["prefix"]
1170 sim = float(opts.get('similarity') or 0)
1170 sim = float(opts.get('similarity') or 0)
1171 if not tmpname:
1171 if not tmpname:
1172 return (None, None, False)
1172 return (None, None, False)
1173
1173
1174 rejects = False
1174 rejects = False
1175
1175
1176 try:
1176 try:
1177 cmdline_message = logmessage(ui, opts)
1177 cmdline_message = logmessage(ui, opts)
1178 if cmdline_message:
1178 if cmdline_message:
1179 # pickup the cmdline msg
1179 # pickup the cmdline msg
1180 message = cmdline_message
1180 message = cmdline_message
1181 elif message:
1181 elif message:
1182 # pickup the patch msg
1182 # pickup the patch msg
1183 message = message.strip()
1183 message = message.strip()
1184 else:
1184 else:
1185 # launch the editor
1185 # launch the editor
1186 message = None
1186 message = None
1187 ui.debug('message:\n%s\n' % message)
1187 ui.debug('message:\n%s\n' % message)
1188
1188
1189 if len(parents) == 1:
1189 if len(parents) == 1:
1190 parents.append(repo[nullid])
1190 parents.append(repo[nullid])
1191 if opts.get('exact'):
1191 if opts.get('exact'):
1192 if not nodeid or not p1:
1192 if not nodeid or not p1:
1193 raise error.Abort(_('not a Mercurial patch'))
1193 raise error.Abort(_('not a Mercurial patch'))
1194 p1 = repo[p1]
1194 p1 = repo[p1]
1195 p2 = repo[p2 or nullid]
1195 p2 = repo[p2 or nullid]
1196 elif p2:
1196 elif p2:
1197 try:
1197 try:
1198 p1 = repo[p1]
1198 p1 = repo[p1]
1199 p2 = repo[p2]
1199 p2 = repo[p2]
1200 # Without any options, consider p2 only if the
1200 # Without any options, consider p2 only if the
1201 # patch is being applied on top of the recorded
1201 # patch is being applied on top of the recorded
1202 # first parent.
1202 # first parent.
1203 if p1 != parents[0]:
1203 if p1 != parents[0]:
1204 p1 = parents[0]
1204 p1 = parents[0]
1205 p2 = repo[nullid]
1205 p2 = repo[nullid]
1206 except error.RepoError:
1206 except error.RepoError:
1207 p1, p2 = parents
1207 p1, p2 = parents
1208 if p2.node() == nullid:
1208 if p2.node() == nullid:
1209 ui.warn(_("warning: import the patch as a normal revision\n"
1209 ui.warn(_("warning: import the patch as a normal revision\n"
1210 "(use --exact to import the patch as a merge)\n"))
1210 "(use --exact to import the patch as a merge)\n"))
1211 else:
1211 else:
1212 p1, p2 = parents
1212 p1, p2 = parents
1213
1213
1214 n = None
1214 n = None
1215 if update:
1215 if update:
1216 if p1 != parents[0]:
1216 if p1 != parents[0]:
1217 updatefunc(repo, p1.node())
1217 updatefunc(repo, p1.node())
1218 if p2 != parents[1]:
1218 if p2 != parents[1]:
1219 repo.setparents(p1.node(), p2.node())
1219 repo.setparents(p1.node(), p2.node())
1220
1220
1221 if opts.get('exact') or importbranch:
1221 if opts.get('exact') or importbranch:
1222 repo.dirstate.setbranch(branch or 'default')
1222 repo.dirstate.setbranch(branch or 'default')
1223
1223
1224 partial = opts.get('partial', False)
1224 partial = opts.get('partial', False)
1225 files = set()
1225 files = set()
1226 try:
1226 try:
1227 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1227 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1228 files=files, eolmode=None, similarity=sim / 100.0)
1228 files=files, eolmode=None, similarity=sim / 100.0)
1229 except patch.PatchError as e:
1229 except patch.PatchError as e:
1230 if not partial:
1230 if not partial:
1231 raise error.Abort(str(e))
1231 raise error.Abort(str(e))
1232 if partial:
1232 if partial:
1233 rejects = True
1233 rejects = True
1234
1234
1235 files = list(files)
1235 files = list(files)
1236 if nocommit:
1236 if nocommit:
1237 if message:
1237 if message:
1238 msgs.append(message)
1238 msgs.append(message)
1239 else:
1239 else:
1240 if opts.get('exact') or p2:
1240 if opts.get('exact') or p2:
1241 # If you got here, you either use --force and know what
1241 # If you got here, you either use --force and know what
1242 # you are doing or used --exact or a merge patch while
1242 # you are doing or used --exact or a merge patch while
1243 # being updated to its first parent.
1243 # being updated to its first parent.
1244 m = None
1244 m = None
1245 else:
1245 else:
1246 m = scmutil.matchfiles(repo, files or [])
1246 m = scmutil.matchfiles(repo, files or [])
1247 editform = mergeeditform(repo[None], 'import.normal')
1247 editform = mergeeditform(repo[None], 'import.normal')
1248 if opts.get('exact'):
1248 if opts.get('exact'):
1249 editor = None
1249 editor = None
1250 else:
1250 else:
1251 editor = getcommiteditor(editform=editform, **opts)
1251 editor = getcommiteditor(editform=editform, **opts)
1252 extra = {}
1252 extra = {}
1253 for idfunc in extrapreimport:
1253 for idfunc in extrapreimport:
1254 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1254 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1255 overrides = {}
1255 overrides = {}
1256 if partial:
1256 if partial:
1257 overrides[('ui', 'allowemptycommit')] = True
1257 overrides[('ui', 'allowemptycommit')] = True
1258 with repo.ui.configoverride(overrides, 'import'):
1258 with repo.ui.configoverride(overrides, 'import'):
1259 n = repo.commit(message, user,
1259 n = repo.commit(message, user,
1260 date, match=m,
1260 date, match=m,
1261 editor=editor, extra=extra)
1261 editor=editor, extra=extra)
1262 for idfunc in extrapostimport:
1262 for idfunc in extrapostimport:
1263 extrapostimportmap[idfunc](repo[n])
1263 extrapostimportmap[idfunc](repo[n])
1264 else:
1264 else:
1265 if opts.get('exact') or importbranch:
1265 if opts.get('exact') or importbranch:
1266 branch = branch or 'default'
1266 branch = branch or 'default'
1267 else:
1267 else:
1268 branch = p1.branch()
1268 branch = p1.branch()
1269 store = patch.filestore()
1269 store = patch.filestore()
1270 try:
1270 try:
1271 files = set()
1271 files = set()
1272 try:
1272 try:
1273 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1273 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1274 files, eolmode=None)
1274 files, eolmode=None)
1275 except patch.PatchError as e:
1275 except patch.PatchError as e:
1276 raise error.Abort(str(e))
1276 raise error.Abort(str(e))
1277 if opts.get('exact'):
1277 if opts.get('exact'):
1278 editor = None
1278 editor = None
1279 else:
1279 else:
1280 editor = getcommiteditor(editform='import.bypass')
1280 editor = getcommiteditor(editform='import.bypass')
1281 memctx = context.memctx(repo, (p1.node(), p2.node()),
1281 memctx = context.memctx(repo, (p1.node(), p2.node()),
1282 message,
1282 message,
1283 files=files,
1283 files=files,
1284 filectxfn=store,
1284 filectxfn=store,
1285 user=user,
1285 user=user,
1286 date=date,
1286 date=date,
1287 branch=branch,
1287 branch=branch,
1288 editor=editor)
1288 editor=editor)
1289 n = memctx.commit()
1289 n = memctx.commit()
1290 finally:
1290 finally:
1291 store.close()
1291 store.close()
1292 if opts.get('exact') and nocommit:
1292 if opts.get('exact') and nocommit:
1293 # --exact with --no-commit is still useful in that it does merge
1293 # --exact with --no-commit is still useful in that it does merge
1294 # and branch bits
1294 # and branch bits
1295 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1295 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1296 elif opts.get('exact') and hex(n) != nodeid:
1296 elif opts.get('exact') and hex(n) != nodeid:
1297 raise error.Abort(_('patch is damaged or loses information'))
1297 raise error.Abort(_('patch is damaged or loses information'))
1298 msg = _('applied to working directory')
1298 msg = _('applied to working directory')
1299 if n:
1299 if n:
1300 # i18n: refers to a short changeset id
1300 # i18n: refers to a short changeset id
1301 msg = _('created %s') % short(n)
1301 msg = _('created %s') % short(n)
1302 return (msg, n, rejects)
1302 return (msg, n, rejects)
1303 finally:
1303 finally:
1304 os.unlink(tmpname)
1304 os.unlink(tmpname)
1305
1305
1306 # facility to let extensions include additional data in an exported patch
1306 # facility to let extensions include additional data in an exported patch
1307 # list of identifiers to be executed in order
1307 # list of identifiers to be executed in order
1308 extraexport = []
1308 extraexport = []
1309 # mapping from identifier to actual export function
1309 # mapping from identifier to actual export function
1310 # function as to return a string to be added to the header or None
1310 # function as to return a string to be added to the header or None
1311 # it is given two arguments (sequencenumber, changectx)
1311 # it is given two arguments (sequencenumber, changectx)
1312 extraexportmap = {}
1312 extraexportmap = {}
1313
1313
1314 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1314 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1315 node = scmutil.binnode(ctx)
1315 node = scmutil.binnode(ctx)
1316 parents = [p.node() for p in ctx.parents() if p]
1316 parents = [p.node() for p in ctx.parents() if p]
1317 branch = ctx.branch()
1317 branch = ctx.branch()
1318 if switch_parent:
1318 if switch_parent:
1319 parents.reverse()
1319 parents.reverse()
1320
1320
1321 if parents:
1321 if parents:
1322 prev = parents[0]
1322 prev = parents[0]
1323 else:
1323 else:
1324 prev = nullid
1324 prev = nullid
1325
1325
1326 write("# HG changeset patch\n")
1326 write("# HG changeset patch\n")
1327 write("# User %s\n" % ctx.user())
1327 write("# User %s\n" % ctx.user())
1328 write("# Date %d %d\n" % ctx.date())
1328 write("# Date %d %d\n" % ctx.date())
1329 write("# %s\n" % util.datestr(ctx.date()))
1329 write("# %s\n" % util.datestr(ctx.date()))
1330 if branch and branch != 'default':
1330 if branch and branch != 'default':
1331 write("# Branch %s\n" % branch)
1331 write("# Branch %s\n" % branch)
1332 write("# Node ID %s\n" % hex(node))
1332 write("# Node ID %s\n" % hex(node))
1333 write("# Parent %s\n" % hex(prev))
1333 write("# Parent %s\n" % hex(prev))
1334 if len(parents) > 1:
1334 if len(parents) > 1:
1335 write("# Parent %s\n" % hex(parents[1]))
1335 write("# Parent %s\n" % hex(parents[1]))
1336
1336
1337 for headerid in extraexport:
1337 for headerid in extraexport:
1338 header = extraexportmap[headerid](seqno, ctx)
1338 header = extraexportmap[headerid](seqno, ctx)
1339 if header is not None:
1339 if header is not None:
1340 write('# %s\n' % header)
1340 write('# %s\n' % header)
1341 write(ctx.description().rstrip())
1341 write(ctx.description().rstrip())
1342 write("\n\n")
1342 write("\n\n")
1343
1343
1344 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1344 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1345 write(chunk, label=label)
1345 write(chunk, label=label)
1346
1346
1347 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1347 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1348 opts=None, match=None):
1348 opts=None, match=None):
1349 '''export changesets as hg patches
1349 '''export changesets as hg patches
1350
1350
1351 Args:
1351 Args:
1352 repo: The repository from which we're exporting revisions.
1352 repo: The repository from which we're exporting revisions.
1353 revs: A list of revisions to export as revision numbers.
1353 revs: A list of revisions to export as revision numbers.
1354 fntemplate: An optional string to use for generating patch file names.
1354 fntemplate: An optional string to use for generating patch file names.
1355 fp: An optional file-like object to which patches should be written.
1355 fp: An optional file-like object to which patches should be written.
1356 switch_parent: If True, show diffs against second parent when not nullid.
1356 switch_parent: If True, show diffs against second parent when not nullid.
1357 Default is false, which always shows diff against p1.
1357 Default is false, which always shows diff against p1.
1358 opts: diff options to use for generating the patch.
1358 opts: diff options to use for generating the patch.
1359 match: If specified, only export changes to files matching this matcher.
1359 match: If specified, only export changes to files matching this matcher.
1360
1360
1361 Returns:
1361 Returns:
1362 Nothing.
1362 Nothing.
1363
1363
1364 Side Effect:
1364 Side Effect:
1365 "HG Changeset Patch" data is emitted to one of the following
1365 "HG Changeset Patch" data is emitted to one of the following
1366 destinations:
1366 destinations:
1367 fp is specified: All revs are written to the specified
1367 fp is specified: All revs are written to the specified
1368 file-like object.
1368 file-like object.
1369 fntemplate specified: Each rev is written to a unique file named using
1369 fntemplate specified: Each rev is written to a unique file named using
1370 the given template.
1370 the given template.
1371 Neither fp nor template specified: All revs written to repo.ui.write()
1371 Neither fp nor template specified: All revs written to repo.ui.write()
1372 '''
1372 '''
1373
1373
1374 total = len(revs)
1374 total = len(revs)
1375 revwidth = max(len(str(rev)) for rev in revs)
1375 revwidth = max(len(str(rev)) for rev in revs)
1376 filemode = {}
1376 filemode = {}
1377
1377
1378 write = None
1378 write = None
1379 dest = '<unnamed>'
1379 dest = '<unnamed>'
1380 if fp:
1380 if fp:
1381 dest = getattr(fp, 'name', dest)
1381 dest = getattr(fp, 'name', dest)
1382 def write(s, **kw):
1382 def write(s, **kw):
1383 fp.write(s)
1383 fp.write(s)
1384 elif not fntemplate:
1384 elif not fntemplate:
1385 write = repo.ui.write
1385 write = repo.ui.write
1386
1386
1387 for seqno, rev in enumerate(revs, 1):
1387 for seqno, rev in enumerate(revs, 1):
1388 ctx = repo[rev]
1388 ctx = repo[rev]
1389 fo = None
1389 fo = None
1390 if not fp and fntemplate:
1390 if not fp and fntemplate:
1391 desc_lines = ctx.description().rstrip().split('\n')
1391 desc_lines = ctx.description().rstrip().split('\n')
1392 desc = desc_lines[0] #Commit always has a first line.
1392 desc = desc_lines[0] #Commit always has a first line.
1393 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1393 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1394 total=total, seqno=seqno, revwidth=revwidth,
1394 total=total, seqno=seqno, revwidth=revwidth,
1395 mode='wb', modemap=filemode)
1395 mode='wb', modemap=filemode)
1396 dest = fo.name
1396 dest = fo.name
1397 def write(s, **kw):
1397 def write(s, **kw):
1398 fo.write(s)
1398 fo.write(s)
1399 if not dest.startswith('<'):
1399 if not dest.startswith('<'):
1400 repo.ui.note("%s\n" % dest)
1400 repo.ui.note("%s\n" % dest)
1401 _exportsingle(
1401 _exportsingle(
1402 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1402 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1403 if fo is not None:
1403 if fo is not None:
1404 fo.close()
1404 fo.close()
1405
1405
1406 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1406 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1407 changes=None, stat=False, fp=None, prefix='',
1407 changes=None, stat=False, fp=None, prefix='',
1408 root='', listsubrepos=False):
1408 root='', listsubrepos=False):
1409 '''show diff or diffstat.'''
1409 '''show diff or diffstat.'''
1410 if fp is None:
1410 if fp is None:
1411 write = ui.write
1411 write = ui.write
1412 else:
1412 else:
1413 def write(s, **kw):
1413 def write(s, **kw):
1414 fp.write(s)
1414 fp.write(s)
1415
1415
1416 if root:
1416 if root:
1417 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1417 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1418 else:
1418 else:
1419 relroot = ''
1419 relroot = ''
1420 if relroot != '':
1420 if relroot != '':
1421 # XXX relative roots currently don't work if the root is within a
1421 # XXX relative roots currently don't work if the root is within a
1422 # subrepo
1422 # subrepo
1423 uirelroot = match.uipath(relroot)
1423 uirelroot = match.uipath(relroot)
1424 relroot += '/'
1424 relroot += '/'
1425 for matchroot in match.files():
1425 for matchroot in match.files():
1426 if not matchroot.startswith(relroot):
1426 if not matchroot.startswith(relroot):
1427 ui.warn(_('warning: %s not inside relative root %s\n') % (
1427 ui.warn(_('warning: %s not inside relative root %s\n') % (
1428 match.uipath(matchroot), uirelroot))
1428 match.uipath(matchroot), uirelroot))
1429
1429
1430 if stat:
1430 if stat:
1431 diffopts = diffopts.copy(context=0)
1431 diffopts = diffopts.copy(context=0)
1432 width = 80
1432 width = 80
1433 if not ui.plain():
1433 if not ui.plain():
1434 width = ui.termwidth()
1434 width = ui.termwidth()
1435 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1435 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1436 prefix=prefix, relroot=relroot)
1436 prefix=prefix, relroot=relroot)
1437 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1437 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1438 width=width):
1438 width=width):
1439 write(chunk, label=label)
1439 write(chunk, label=label)
1440 else:
1440 else:
1441 for chunk, label in patch.diffui(repo, node1, node2, match,
1441 for chunk, label in patch.diffui(repo, node1, node2, match,
1442 changes, diffopts, prefix=prefix,
1442 changes, diffopts, prefix=prefix,
1443 relroot=relroot):
1443 relroot=relroot):
1444 write(chunk, label=label)
1444 write(chunk, label=label)
1445
1445
1446 if listsubrepos:
1446 if listsubrepos:
1447 ctx1 = repo[node1]
1447 ctx1 = repo[node1]
1448 ctx2 = repo[node2]
1448 ctx2 = repo[node2]
1449 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1449 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1450 tempnode2 = node2
1450 tempnode2 = node2
1451 try:
1451 try:
1452 if node2 is not None:
1452 if node2 is not None:
1453 tempnode2 = ctx2.substate[subpath][1]
1453 tempnode2 = ctx2.substate[subpath][1]
1454 except KeyError:
1454 except KeyError:
1455 # A subrepo that existed in node1 was deleted between node1 and
1455 # A subrepo that existed in node1 was deleted between node1 and
1456 # node2 (inclusive). Thus, ctx2's substate won't contain that
1456 # node2 (inclusive). Thus, ctx2's substate won't contain that
1457 # subpath. The best we can do is to ignore it.
1457 # subpath. The best we can do is to ignore it.
1458 tempnode2 = None
1458 tempnode2 = None
1459 submatch = matchmod.subdirmatcher(subpath, match)
1459 submatch = matchmod.subdirmatcher(subpath, match)
1460 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1460 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1461 stat=stat, fp=fp, prefix=prefix)
1461 stat=stat, fp=fp, prefix=prefix)
1462
1462
1463 def _changesetlabels(ctx):
1463 def _changesetlabels(ctx):
1464 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1464 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1465 if ctx.obsolete():
1465 if ctx.obsolete():
1466 labels.append('changeset.obsolete')
1466 labels.append('changeset.obsolete')
1467 if ctx.isunstable():
1467 if ctx.isunstable():
1468 labels.append('changeset.troubled')
1468 labels.append('changeset.troubled')
1469 for instability in ctx.instabilities():
1469 for instability in ctx.instabilities():
1470 labels.append('trouble.%s' % instability)
1470 labels.append('trouble.%s' % instability)
1471 return ' '.join(labels)
1471 return ' '.join(labels)
1472
1472
1473 class changeset_printer(object):
1473 class changeset_printer(object):
1474 '''show changeset information when templating not requested.'''
1474 '''show changeset information when templating not requested.'''
1475
1475
1476 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1476 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1477 self.ui = ui
1477 self.ui = ui
1478 self.repo = repo
1478 self.repo = repo
1479 self.buffered = buffered
1479 self.buffered = buffered
1480 self.matchfn = matchfn
1480 self.matchfn = matchfn
1481 self.diffopts = diffopts
1481 self.diffopts = diffopts
1482 self.header = {}
1482 self.header = {}
1483 self.hunk = {}
1483 self.hunk = {}
1484 self.lastheader = None
1484 self.lastheader = None
1485 self.footer = None
1485 self.footer = None
1486
1486
1487 def flush(self, ctx):
1487 def flush(self, ctx):
1488 rev = ctx.rev()
1488 rev = ctx.rev()
1489 if rev in self.header:
1489 if rev in self.header:
1490 h = self.header[rev]
1490 h = self.header[rev]
1491 if h != self.lastheader:
1491 if h != self.lastheader:
1492 self.lastheader = h
1492 self.lastheader = h
1493 self.ui.write(h)
1493 self.ui.write(h)
1494 del self.header[rev]
1494 del self.header[rev]
1495 if rev in self.hunk:
1495 if rev in self.hunk:
1496 self.ui.write(self.hunk[rev])
1496 self.ui.write(self.hunk[rev])
1497 del self.hunk[rev]
1497 del self.hunk[rev]
1498 return 1
1498 return 1
1499 return 0
1499 return 0
1500
1500
1501 def close(self):
1501 def close(self):
1502 if self.footer:
1502 if self.footer:
1503 self.ui.write(self.footer)
1503 self.ui.write(self.footer)
1504
1504
1505 def show(self, ctx, copies=None, matchfn=None, **props):
1505 def show(self, ctx, copies=None, matchfn=None, **props):
1506 props = pycompat.byteskwargs(props)
1506 props = pycompat.byteskwargs(props)
1507 if self.buffered:
1507 if self.buffered:
1508 self.ui.pushbuffer(labeled=True)
1508 self.ui.pushbuffer(labeled=True)
1509 self._show(ctx, copies, matchfn, props)
1509 self._show(ctx, copies, matchfn, props)
1510 self.hunk[ctx.rev()] = self.ui.popbuffer()
1510 self.hunk[ctx.rev()] = self.ui.popbuffer()
1511 else:
1511 else:
1512 self._show(ctx, copies, matchfn, props)
1512 self._show(ctx, copies, matchfn, props)
1513
1513
1514 def _show(self, ctx, copies, matchfn, props):
1514 def _show(self, ctx, copies, matchfn, props):
1515 '''show a single changeset or file revision'''
1515 '''show a single changeset or file revision'''
1516 changenode = ctx.node()
1516 changenode = ctx.node()
1517 rev = ctx.rev()
1517 rev = ctx.rev()
1518 if self.ui.debugflag:
1518 if self.ui.debugflag:
1519 hexfunc = hex
1519 hexfunc = hex
1520 else:
1520 else:
1521 hexfunc = short
1521 hexfunc = short
1522 # as of now, wctx.node() and wctx.rev() return None, but we want to
1522 # as of now, wctx.node() and wctx.rev() return None, but we want to
1523 # show the same values as {node} and {rev} templatekw
1523 # show the same values as {node} and {rev} templatekw
1524 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1524 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1525
1525
1526 if self.ui.quiet:
1526 if self.ui.quiet:
1527 self.ui.write("%d:%s\n" % revnode, label='log.node')
1527 self.ui.write("%d:%s\n" % revnode, label='log.node')
1528 return
1528 return
1529
1529
1530 date = util.datestr(ctx.date())
1530 date = util.datestr(ctx.date())
1531
1531
1532 # i18n: column positioning for "hg log"
1532 # i18n: column positioning for "hg log"
1533 self.ui.write(_("changeset: %d:%s\n") % revnode,
1533 self.ui.write(_("changeset: %d:%s\n") % revnode,
1534 label=_changesetlabels(ctx))
1534 label=_changesetlabels(ctx))
1535
1535
1536 # branches are shown first before any other names due to backwards
1536 # branches are shown first before any other names due to backwards
1537 # compatibility
1537 # compatibility
1538 branch = ctx.branch()
1538 branch = ctx.branch()
1539 # don't show the default branch name
1539 # don't show the default branch name
1540 if branch != 'default':
1540 if branch != 'default':
1541 # i18n: column positioning for "hg log"
1541 # i18n: column positioning for "hg log"
1542 self.ui.write(_("branch: %s\n") % branch,
1542 self.ui.write(_("branch: %s\n") % branch,
1543 label='log.branch')
1543 label='log.branch')
1544
1544
1545 for nsname, ns in self.repo.names.iteritems():
1545 for nsname, ns in self.repo.names.iteritems():
1546 # branches has special logic already handled above, so here we just
1546 # branches has special logic already handled above, so here we just
1547 # skip it
1547 # skip it
1548 if nsname == 'branches':
1548 if nsname == 'branches':
1549 continue
1549 continue
1550 # we will use the templatename as the color name since those two
1550 # we will use the templatename as the color name since those two
1551 # should be the same
1551 # should be the same
1552 for name in ns.names(self.repo, changenode):
1552 for name in ns.names(self.repo, changenode):
1553 self.ui.write(ns.logfmt % name,
1553 self.ui.write(ns.logfmt % name,
1554 label='log.%s' % ns.colorname)
1554 label='log.%s' % ns.colorname)
1555 if self.ui.debugflag:
1555 if self.ui.debugflag:
1556 # i18n: column positioning for "hg log"
1556 # i18n: column positioning for "hg log"
1557 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1557 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1558 label='log.phase')
1558 label='log.phase')
1559 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1559 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1560 label = 'log.parent changeset.%s' % pctx.phasestr()
1560 label = 'log.parent changeset.%s' % pctx.phasestr()
1561 # i18n: column positioning for "hg log"
1561 # i18n: column positioning for "hg log"
1562 self.ui.write(_("parent: %d:%s\n")
1562 self.ui.write(_("parent: %d:%s\n")
1563 % (pctx.rev(), hexfunc(pctx.node())),
1563 % (pctx.rev(), hexfunc(pctx.node())),
1564 label=label)
1564 label=label)
1565
1565
1566 if self.ui.debugflag and rev is not None:
1566 if self.ui.debugflag and rev is not None:
1567 mnode = ctx.manifestnode()
1567 mnode = ctx.manifestnode()
1568 # i18n: column positioning for "hg log"
1568 # i18n: column positioning for "hg log"
1569 self.ui.write(_("manifest: %d:%s\n") %
1569 self.ui.write(_("manifest: %d:%s\n") %
1570 (self.repo.manifestlog._revlog.rev(mnode),
1570 (self.repo.manifestlog._revlog.rev(mnode),
1571 hex(mnode)),
1571 hex(mnode)),
1572 label='ui.debug log.manifest')
1572 label='ui.debug log.manifest')
1573 # i18n: column positioning for "hg log"
1573 # i18n: column positioning for "hg log"
1574 self.ui.write(_("user: %s\n") % ctx.user(),
1574 self.ui.write(_("user: %s\n") % ctx.user(),
1575 label='log.user')
1575 label='log.user')
1576 # i18n: column positioning for "hg log"
1576 # i18n: column positioning for "hg log"
1577 self.ui.write(_("date: %s\n") % date,
1577 self.ui.write(_("date: %s\n") % date,
1578 label='log.date')
1578 label='log.date')
1579
1579
1580 if ctx.isunstable():
1580 if ctx.isunstable():
1581 # i18n: column positioning for "hg log"
1581 # i18n: column positioning for "hg log"
1582 instabilities = ctx.instabilities()
1582 instabilities = ctx.instabilities()
1583 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1583 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1584 label='log.trouble')
1584 label='log.trouble')
1585
1585
1586 self._exthook(ctx)
1586 self._exthook(ctx)
1587
1587
1588 if self.ui.debugflag:
1588 if self.ui.debugflag:
1589 files = ctx.p1().status(ctx)[:3]
1589 files = ctx.p1().status(ctx)[:3]
1590 for key, value in zip([# i18n: column positioning for "hg log"
1590 for key, value in zip([# i18n: column positioning for "hg log"
1591 _("files:"),
1591 _("files:"),
1592 # i18n: column positioning for "hg log"
1592 # i18n: column positioning for "hg log"
1593 _("files+:"),
1593 _("files+:"),
1594 # i18n: column positioning for "hg log"
1594 # i18n: column positioning for "hg log"
1595 _("files-:")], files):
1595 _("files-:")], files):
1596 if value:
1596 if value:
1597 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1597 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1598 label='ui.debug log.files')
1598 label='ui.debug log.files')
1599 elif ctx.files() and self.ui.verbose:
1599 elif ctx.files() and self.ui.verbose:
1600 # i18n: column positioning for "hg log"
1600 # i18n: column positioning for "hg log"
1601 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1601 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1602 label='ui.note log.files')
1602 label='ui.note log.files')
1603 if copies and self.ui.verbose:
1603 if copies and self.ui.verbose:
1604 copies = ['%s (%s)' % c for c in copies]
1604 copies = ['%s (%s)' % c for c in copies]
1605 # i18n: column positioning for "hg log"
1605 # i18n: column positioning for "hg log"
1606 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1606 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1607 label='ui.note log.copies')
1607 label='ui.note log.copies')
1608
1608
1609 extra = ctx.extra()
1609 extra = ctx.extra()
1610 if extra and self.ui.debugflag:
1610 if extra and self.ui.debugflag:
1611 for key, value in sorted(extra.items()):
1611 for key, value in sorted(extra.items()):
1612 # i18n: column positioning for "hg log"
1612 # i18n: column positioning for "hg log"
1613 self.ui.write(_("extra: %s=%s\n")
1613 self.ui.write(_("extra: %s=%s\n")
1614 % (key, util.escapestr(value)),
1614 % (key, util.escapestr(value)),
1615 label='ui.debug log.extra')
1615 label='ui.debug log.extra')
1616
1616
1617 description = ctx.description().strip()
1617 description = ctx.description().strip()
1618 if description:
1618 if description:
1619 if self.ui.verbose:
1619 if self.ui.verbose:
1620 self.ui.write(_("description:\n"),
1620 self.ui.write(_("description:\n"),
1621 label='ui.note log.description')
1621 label='ui.note log.description')
1622 self.ui.write(description,
1622 self.ui.write(description,
1623 label='ui.note log.description')
1623 label='ui.note log.description')
1624 self.ui.write("\n\n")
1624 self.ui.write("\n\n")
1625 else:
1625 else:
1626 # i18n: column positioning for "hg log"
1626 # i18n: column positioning for "hg log"
1627 self.ui.write(_("summary: %s\n") %
1627 self.ui.write(_("summary: %s\n") %
1628 description.splitlines()[0],
1628 description.splitlines()[0],
1629 label='log.summary')
1629 label='log.summary')
1630 self.ui.write("\n")
1630 self.ui.write("\n")
1631
1631
1632 self.showpatch(ctx, matchfn)
1632 self.showpatch(ctx, matchfn)
1633
1633
1634 def _exthook(self, ctx):
1634 def _exthook(self, ctx):
1635 '''empty method used by extension as a hook point
1635 '''empty method used by extension as a hook point
1636 '''
1636 '''
1637 pass
1637 pass
1638
1638
1639 def showpatch(self, ctx, matchfn):
1639 def showpatch(self, ctx, matchfn):
1640 if not matchfn:
1640 if not matchfn:
1641 matchfn = self.matchfn
1641 matchfn = self.matchfn
1642 if matchfn:
1642 if matchfn:
1643 stat = self.diffopts.get('stat')
1643 stat = self.diffopts.get('stat')
1644 diff = self.diffopts.get('patch')
1644 diff = self.diffopts.get('patch')
1645 diffopts = patch.diffallopts(self.ui, self.diffopts)
1645 diffopts = patch.diffallopts(self.ui, self.diffopts)
1646 node = ctx.node()
1646 node = ctx.node()
1647 prev = ctx.p1().node()
1647 prev = ctx.p1().node()
1648 if stat:
1648 if stat:
1649 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1649 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1650 match=matchfn, stat=True)
1650 match=matchfn, stat=True)
1651 if diff:
1651 if diff:
1652 if stat:
1652 if stat:
1653 self.ui.write("\n")
1653 self.ui.write("\n")
1654 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1654 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1655 match=matchfn, stat=False)
1655 match=matchfn, stat=False)
1656 self.ui.write("\n")
1656 self.ui.write("\n")
1657
1657
1658 class jsonchangeset(changeset_printer):
1658 class jsonchangeset(changeset_printer):
1659 '''format changeset information.'''
1659 '''format changeset information.'''
1660
1660
1661 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1661 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1662 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1662 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1663 self.cache = {}
1663 self.cache = {}
1664 self._first = True
1664 self._first = True
1665
1665
1666 def close(self):
1666 def close(self):
1667 if not self._first:
1667 if not self._first:
1668 self.ui.write("\n]\n")
1668 self.ui.write("\n]\n")
1669 else:
1669 else:
1670 self.ui.write("[]\n")
1670 self.ui.write("[]\n")
1671
1671
1672 def _show(self, ctx, copies, matchfn, props):
1672 def _show(self, ctx, copies, matchfn, props):
1673 '''show a single changeset or file revision'''
1673 '''show a single changeset or file revision'''
1674 rev = ctx.rev()
1674 rev = ctx.rev()
1675 if rev is None:
1675 if rev is None:
1676 jrev = jnode = 'null'
1676 jrev = jnode = 'null'
1677 else:
1677 else:
1678 jrev = '%d' % rev
1678 jrev = '%d' % rev
1679 jnode = '"%s"' % hex(ctx.node())
1679 jnode = '"%s"' % hex(ctx.node())
1680 j = encoding.jsonescape
1680 j = encoding.jsonescape
1681
1681
1682 if self._first:
1682 if self._first:
1683 self.ui.write("[\n {")
1683 self.ui.write("[\n {")
1684 self._first = False
1684 self._first = False
1685 else:
1685 else:
1686 self.ui.write(",\n {")
1686 self.ui.write(",\n {")
1687
1687
1688 if self.ui.quiet:
1688 if self.ui.quiet:
1689 self.ui.write(('\n "rev": %s') % jrev)
1689 self.ui.write(('\n "rev": %s') % jrev)
1690 self.ui.write((',\n "node": %s') % jnode)
1690 self.ui.write((',\n "node": %s') % jnode)
1691 self.ui.write('\n }')
1691 self.ui.write('\n }')
1692 return
1692 return
1693
1693
1694 self.ui.write(('\n "rev": %s') % jrev)
1694 self.ui.write(('\n "rev": %s') % jrev)
1695 self.ui.write((',\n "node": %s') % jnode)
1695 self.ui.write((',\n "node": %s') % jnode)
1696 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1696 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1697 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1697 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1698 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1698 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1699 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1699 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1700 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1700 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1701
1701
1702 self.ui.write((',\n "bookmarks": [%s]') %
1702 self.ui.write((',\n "bookmarks": [%s]') %
1703 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1703 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1704 self.ui.write((',\n "tags": [%s]') %
1704 self.ui.write((',\n "tags": [%s]') %
1705 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1705 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1706 self.ui.write((',\n "parents": [%s]') %
1706 self.ui.write((',\n "parents": [%s]') %
1707 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1707 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1708
1708
1709 if self.ui.debugflag:
1709 if self.ui.debugflag:
1710 if rev is None:
1710 if rev is None:
1711 jmanifestnode = 'null'
1711 jmanifestnode = 'null'
1712 else:
1712 else:
1713 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1713 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1714 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1714 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1715
1715
1716 self.ui.write((',\n "extra": {%s}') %
1716 self.ui.write((',\n "extra": {%s}') %
1717 ", ".join('"%s": "%s"' % (j(k), j(v))
1717 ", ".join('"%s": "%s"' % (j(k), j(v))
1718 for k, v in ctx.extra().items()))
1718 for k, v in ctx.extra().items()))
1719
1719
1720 files = ctx.p1().status(ctx)
1720 files = ctx.p1().status(ctx)
1721 self.ui.write((',\n "modified": [%s]') %
1721 self.ui.write((',\n "modified": [%s]') %
1722 ", ".join('"%s"' % j(f) for f in files[0]))
1722 ", ".join('"%s"' % j(f) for f in files[0]))
1723 self.ui.write((',\n "added": [%s]') %
1723 self.ui.write((',\n "added": [%s]') %
1724 ", ".join('"%s"' % j(f) for f in files[1]))
1724 ", ".join('"%s"' % j(f) for f in files[1]))
1725 self.ui.write((',\n "removed": [%s]') %
1725 self.ui.write((',\n "removed": [%s]') %
1726 ", ".join('"%s"' % j(f) for f in files[2]))
1726 ", ".join('"%s"' % j(f) for f in files[2]))
1727
1727
1728 elif self.ui.verbose:
1728 elif self.ui.verbose:
1729 self.ui.write((',\n "files": [%s]') %
1729 self.ui.write((',\n "files": [%s]') %
1730 ", ".join('"%s"' % j(f) for f in ctx.files()))
1730 ", ".join('"%s"' % j(f) for f in ctx.files()))
1731
1731
1732 if copies:
1732 if copies:
1733 self.ui.write((',\n "copies": {%s}') %
1733 self.ui.write((',\n "copies": {%s}') %
1734 ", ".join('"%s": "%s"' % (j(k), j(v))
1734 ", ".join('"%s": "%s"' % (j(k), j(v))
1735 for k, v in copies))
1735 for k, v in copies))
1736
1736
1737 matchfn = self.matchfn
1737 matchfn = self.matchfn
1738 if matchfn:
1738 if matchfn:
1739 stat = self.diffopts.get('stat')
1739 stat = self.diffopts.get('stat')
1740 diff = self.diffopts.get('patch')
1740 diff = self.diffopts.get('patch')
1741 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1741 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1742 node, prev = ctx.node(), ctx.p1().node()
1742 node, prev = ctx.node(), ctx.p1().node()
1743 if stat:
1743 if stat:
1744 self.ui.pushbuffer()
1744 self.ui.pushbuffer()
1745 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1745 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1746 match=matchfn, stat=True)
1746 match=matchfn, stat=True)
1747 self.ui.write((',\n "diffstat": "%s"')
1747 self.ui.write((',\n "diffstat": "%s"')
1748 % j(self.ui.popbuffer()))
1748 % j(self.ui.popbuffer()))
1749 if diff:
1749 if diff:
1750 self.ui.pushbuffer()
1750 self.ui.pushbuffer()
1751 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1751 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1752 match=matchfn, stat=False)
1752 match=matchfn, stat=False)
1753 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1753 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1754
1754
1755 self.ui.write("\n }")
1755 self.ui.write("\n }")
1756
1756
1757 class changeset_templater(changeset_printer):
1757 class changeset_templater(changeset_printer):
1758 '''format changeset information.'''
1758 '''format changeset information.'''
1759
1759
1760 # Arguments before "buffered" used to be positional. Consider not
1760 # Arguments before "buffered" used to be positional. Consider not
1761 # adding/removing arguments before "buffered" to not break callers.
1761 # adding/removing arguments before "buffered" to not break callers.
1762 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1762 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1763 buffered=False):
1763 buffered=False):
1764 diffopts = diffopts or {}
1764 diffopts = diffopts or {}
1765
1765
1766 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1766 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1767 self.t = formatter.loadtemplater(ui, tmplspec,
1767 self.t = formatter.loadtemplater(ui, tmplspec,
1768 cache=templatekw.defaulttempl)
1768 cache=templatekw.defaulttempl)
1769 self._counter = itertools.count()
1769 self._counter = itertools.count()
1770 self.cache = {}
1770 self.cache = {}
1771
1771
1772 self._tref = tmplspec.ref
1772 self._tref = tmplspec.ref
1773 self._parts = {'header': '', 'footer': '',
1773 self._parts = {'header': '', 'footer': '',
1774 tmplspec.ref: tmplspec.ref,
1774 tmplspec.ref: tmplspec.ref,
1775 'docheader': '', 'docfooter': '',
1775 'docheader': '', 'docfooter': '',
1776 'separator': ''}
1776 'separator': ''}
1777 if tmplspec.mapfile:
1777 if tmplspec.mapfile:
1778 # find correct templates for current mode, for backward
1778 # find correct templates for current mode, for backward
1779 # compatibility with 'log -v/-q/--debug' using a mapfile
1779 # compatibility with 'log -v/-q/--debug' using a mapfile
1780 tmplmodes = [
1780 tmplmodes = [
1781 (True, ''),
1781 (True, ''),
1782 (self.ui.verbose, '_verbose'),
1782 (self.ui.verbose, '_verbose'),
1783 (self.ui.quiet, '_quiet'),
1783 (self.ui.quiet, '_quiet'),
1784 (self.ui.debugflag, '_debug'),
1784 (self.ui.debugflag, '_debug'),
1785 ]
1785 ]
1786 for mode, postfix in tmplmodes:
1786 for mode, postfix in tmplmodes:
1787 for t in self._parts:
1787 for t in self._parts:
1788 cur = t + postfix
1788 cur = t + postfix
1789 if mode and cur in self.t:
1789 if mode and cur in self.t:
1790 self._parts[t] = cur
1790 self._parts[t] = cur
1791 else:
1791 else:
1792 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1792 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1793 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1793 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1794 self._parts.update(m)
1794 self._parts.update(m)
1795
1795
1796 if self._parts['docheader']:
1796 if self._parts['docheader']:
1797 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1797 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1798
1798
1799 def close(self):
1799 def close(self):
1800 if self._parts['docfooter']:
1800 if self._parts['docfooter']:
1801 if not self.footer:
1801 if not self.footer:
1802 self.footer = ""
1802 self.footer = ""
1803 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1803 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1804 return super(changeset_templater, self).close()
1804 return super(changeset_templater, self).close()
1805
1805
1806 def _show(self, ctx, copies, matchfn, props):
1806 def _show(self, ctx, copies, matchfn, props):
1807 '''show a single changeset or file revision'''
1807 '''show a single changeset or file revision'''
1808 props = props.copy()
1808 props = props.copy()
1809 props.update(templatekw.keywords)
1809 props.update(templatekw.keywords)
1810 props['templ'] = self.t
1810 props['templ'] = self.t
1811 props['ctx'] = ctx
1811 props['ctx'] = ctx
1812 props['repo'] = self.repo
1812 props['repo'] = self.repo
1813 props['ui'] = self.repo.ui
1813 props['ui'] = self.repo.ui
1814 props['index'] = index = next(self._counter)
1814 props['index'] = index = next(self._counter)
1815 props['revcache'] = {'copies': copies}
1815 props['revcache'] = {'copies': copies}
1816 props['cache'] = self.cache
1816 props['cache'] = self.cache
1817 props = pycompat.strkwargs(props)
1817 props = pycompat.strkwargs(props)
1818
1818
1819 # write separator, which wouldn't work well with the header part below
1819 # write separator, which wouldn't work well with the header part below
1820 # since there's inherently a conflict between header (across items) and
1820 # since there's inherently a conflict between header (across items) and
1821 # separator (per item)
1821 # separator (per item)
1822 if self._parts['separator'] and index > 0:
1822 if self._parts['separator'] and index > 0:
1823 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1823 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1824
1824
1825 # write header
1825 # write header
1826 if self._parts['header']:
1826 if self._parts['header']:
1827 h = templater.stringify(self.t(self._parts['header'], **props))
1827 h = templater.stringify(self.t(self._parts['header'], **props))
1828 if self.buffered:
1828 if self.buffered:
1829 self.header[ctx.rev()] = h
1829 self.header[ctx.rev()] = h
1830 else:
1830 else:
1831 if self.lastheader != h:
1831 if self.lastheader != h:
1832 self.lastheader = h
1832 self.lastheader = h
1833 self.ui.write(h)
1833 self.ui.write(h)
1834
1834
1835 # write changeset metadata, then patch if requested
1835 # write changeset metadata, then patch if requested
1836 key = self._parts[self._tref]
1836 key = self._parts[self._tref]
1837 self.ui.write(templater.stringify(self.t(key, **props)))
1837 self.ui.write(templater.stringify(self.t(key, **props)))
1838 self.showpatch(ctx, matchfn)
1838 self.showpatch(ctx, matchfn)
1839
1839
1840 if self._parts['footer']:
1840 if self._parts['footer']:
1841 if not self.footer:
1841 if not self.footer:
1842 self.footer = templater.stringify(
1842 self.footer = templater.stringify(
1843 self.t(self._parts['footer'], **props))
1843 self.t(self._parts['footer'], **props))
1844
1844
1845 def logtemplatespec(tmpl, mapfile):
1845 def logtemplatespec(tmpl, mapfile):
1846 if mapfile:
1846 if mapfile:
1847 return formatter.templatespec('changeset', tmpl, mapfile)
1847 return formatter.templatespec('changeset', tmpl, mapfile)
1848 else:
1848 else:
1849 return formatter.templatespec('', tmpl, None)
1849 return formatter.templatespec('', tmpl, None)
1850
1850
1851 def _lookuplogtemplate(ui, tmpl, style):
1851 def _lookuplogtemplate(ui, tmpl, style):
1852 """Find the template matching the given template spec or style
1852 """Find the template matching the given template spec or style
1853
1853
1854 See formatter.lookuptemplate() for details.
1854 See formatter.lookuptemplate() for details.
1855 """
1855 """
1856
1856
1857 # ui settings
1857 # ui settings
1858 if not tmpl and not style: # template are stronger than style
1858 if not tmpl and not style: # template are stronger than style
1859 tmpl = ui.config('ui', 'logtemplate')
1859 tmpl = ui.config('ui', 'logtemplate')
1860 if tmpl:
1860 if tmpl:
1861 return logtemplatespec(templater.unquotestring(tmpl), None)
1861 return logtemplatespec(templater.unquotestring(tmpl), None)
1862 else:
1862 else:
1863 style = util.expandpath(ui.config('ui', 'style'))
1863 style = util.expandpath(ui.config('ui', 'style'))
1864
1864
1865 if not tmpl and style:
1865 if not tmpl and style:
1866 mapfile = style
1866 mapfile = style
1867 if not os.path.split(mapfile)[0]:
1867 if not os.path.split(mapfile)[0]:
1868 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1868 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1869 or templater.templatepath(mapfile))
1869 or templater.templatepath(mapfile))
1870 if mapname:
1870 if mapname:
1871 mapfile = mapname
1871 mapfile = mapname
1872 return logtemplatespec(None, mapfile)
1872 return logtemplatespec(None, mapfile)
1873
1873
1874 if not tmpl:
1874 if not tmpl:
1875 return logtemplatespec(None, None)
1875 return logtemplatespec(None, None)
1876
1876
1877 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1877 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1878
1878
1879 def makelogtemplater(ui, repo, tmpl, buffered=False):
1879 def makelogtemplater(ui, repo, tmpl, buffered=False):
1880 """Create a changeset_templater from a literal template 'tmpl'"""
1880 """Create a changeset_templater from a literal template 'tmpl'"""
1881 spec = logtemplatespec(tmpl, None)
1881 spec = logtemplatespec(tmpl, None)
1882 return changeset_templater(ui, repo, spec, buffered=buffered)
1882 return changeset_templater(ui, repo, spec, buffered=buffered)
1883
1883
1884 def show_changeset(ui, repo, opts, buffered=False):
1884 def show_changeset(ui, repo, opts, buffered=False):
1885 """show one changeset using template or regular display.
1885 """show one changeset using template or regular display.
1886
1886
1887 Display format will be the first non-empty hit of:
1887 Display format will be the first non-empty hit of:
1888 1. option 'template'
1888 1. option 'template'
1889 2. option 'style'
1889 2. option 'style'
1890 3. [ui] setting 'logtemplate'
1890 3. [ui] setting 'logtemplate'
1891 4. [ui] setting 'style'
1891 4. [ui] setting 'style'
1892 If all of these values are either the unset or the empty string,
1892 If all of these values are either the unset or the empty string,
1893 regular display via changeset_printer() is done.
1893 regular display via changeset_printer() is done.
1894 """
1894 """
1895 # options
1895 # options
1896 matchfn = None
1896 matchfn = None
1897 if opts.get('patch') or opts.get('stat'):
1897 if opts.get('patch') or opts.get('stat'):
1898 matchfn = scmutil.matchall(repo)
1898 matchfn = scmutil.matchall(repo)
1899
1899
1900 if opts.get('template') == 'json':
1900 if opts.get('template') == 'json':
1901 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1901 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1902
1902
1903 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1903 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1904
1904
1905 if not spec.ref and not spec.tmpl and not spec.mapfile:
1905 if not spec.ref and not spec.tmpl and not spec.mapfile:
1906 return changeset_printer(ui, repo, matchfn, opts, buffered)
1906 return changeset_printer(ui, repo, matchfn, opts, buffered)
1907
1907
1908 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1908 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1909
1909
1910 def showmarker(fm, marker, index=None):
1910 def showmarker(fm, marker, index=None):
1911 """utility function to display obsolescence marker in a readable way
1911 """utility function to display obsolescence marker in a readable way
1912
1912
1913 To be used by debug function."""
1913 To be used by debug function."""
1914 if index is not None:
1914 if index is not None:
1915 fm.write('index', '%i ', index)
1915 fm.write('index', '%i ', index)
1916 fm.write('precnode', '%s ', hex(marker.precnode()))
1916 fm.write('precnode', '%s ', hex(marker.prednode()))
1917 succs = marker.succnodes()
1917 succs = marker.succnodes()
1918 fm.condwrite(succs, 'succnodes', '%s ',
1918 fm.condwrite(succs, 'succnodes', '%s ',
1919 fm.formatlist(map(hex, succs), name='node'))
1919 fm.formatlist(map(hex, succs), name='node'))
1920 fm.write('flag', '%X ', marker.flags())
1920 fm.write('flag', '%X ', marker.flags())
1921 parents = marker.parentnodes()
1921 parents = marker.parentnodes()
1922 if parents is not None:
1922 if parents is not None:
1923 fm.write('parentnodes', '{%s} ',
1923 fm.write('parentnodes', '{%s} ',
1924 fm.formatlist(map(hex, parents), name='node', sep=', '))
1924 fm.formatlist(map(hex, parents), name='node', sep=', '))
1925 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1925 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1926 meta = marker.metadata().copy()
1926 meta = marker.metadata().copy()
1927 meta.pop('date', None)
1927 meta.pop('date', None)
1928 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1928 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1929 fm.plain('\n')
1929 fm.plain('\n')
1930
1930
1931 def finddate(ui, repo, date):
1931 def finddate(ui, repo, date):
1932 """Find the tipmost changeset that matches the given date spec"""
1932 """Find the tipmost changeset that matches the given date spec"""
1933
1933
1934 df = util.matchdate(date)
1934 df = util.matchdate(date)
1935 m = scmutil.matchall(repo)
1935 m = scmutil.matchall(repo)
1936 results = {}
1936 results = {}
1937
1937
1938 def prep(ctx, fns):
1938 def prep(ctx, fns):
1939 d = ctx.date()
1939 d = ctx.date()
1940 if df(d[0]):
1940 if df(d[0]):
1941 results[ctx.rev()] = d
1941 results[ctx.rev()] = d
1942
1942
1943 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1943 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1944 rev = ctx.rev()
1944 rev = ctx.rev()
1945 if rev in results:
1945 if rev in results:
1946 ui.status(_("found revision %s from %s\n") %
1946 ui.status(_("found revision %s from %s\n") %
1947 (rev, util.datestr(results[rev])))
1947 (rev, util.datestr(results[rev])))
1948 return '%d' % rev
1948 return '%d' % rev
1949
1949
1950 raise error.Abort(_("revision matching date not found"))
1950 raise error.Abort(_("revision matching date not found"))
1951
1951
1952 def increasingwindows(windowsize=8, sizelimit=512):
1952 def increasingwindows(windowsize=8, sizelimit=512):
1953 while True:
1953 while True:
1954 yield windowsize
1954 yield windowsize
1955 if windowsize < sizelimit:
1955 if windowsize < sizelimit:
1956 windowsize *= 2
1956 windowsize *= 2
1957
1957
1958 class FileWalkError(Exception):
1958 class FileWalkError(Exception):
1959 pass
1959 pass
1960
1960
1961 def walkfilerevs(repo, match, follow, revs, fncache):
1961 def walkfilerevs(repo, match, follow, revs, fncache):
1962 '''Walks the file history for the matched files.
1962 '''Walks the file history for the matched files.
1963
1963
1964 Returns the changeset revs that are involved in the file history.
1964 Returns the changeset revs that are involved in the file history.
1965
1965
1966 Throws FileWalkError if the file history can't be walked using
1966 Throws FileWalkError if the file history can't be walked using
1967 filelogs alone.
1967 filelogs alone.
1968 '''
1968 '''
1969 wanted = set()
1969 wanted = set()
1970 copies = []
1970 copies = []
1971 minrev, maxrev = min(revs), max(revs)
1971 minrev, maxrev = min(revs), max(revs)
1972 def filerevgen(filelog, last):
1972 def filerevgen(filelog, last):
1973 """
1973 """
1974 Only files, no patterns. Check the history of each file.
1974 Only files, no patterns. Check the history of each file.
1975
1975
1976 Examines filelog entries within minrev, maxrev linkrev range
1976 Examines filelog entries within minrev, maxrev linkrev range
1977 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1977 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1978 tuples in backwards order
1978 tuples in backwards order
1979 """
1979 """
1980 cl_count = len(repo)
1980 cl_count = len(repo)
1981 revs = []
1981 revs = []
1982 for j in xrange(0, last + 1):
1982 for j in xrange(0, last + 1):
1983 linkrev = filelog.linkrev(j)
1983 linkrev = filelog.linkrev(j)
1984 if linkrev < minrev:
1984 if linkrev < minrev:
1985 continue
1985 continue
1986 # only yield rev for which we have the changelog, it can
1986 # only yield rev for which we have the changelog, it can
1987 # happen while doing "hg log" during a pull or commit
1987 # happen while doing "hg log" during a pull or commit
1988 if linkrev >= cl_count:
1988 if linkrev >= cl_count:
1989 break
1989 break
1990
1990
1991 parentlinkrevs = []
1991 parentlinkrevs = []
1992 for p in filelog.parentrevs(j):
1992 for p in filelog.parentrevs(j):
1993 if p != nullrev:
1993 if p != nullrev:
1994 parentlinkrevs.append(filelog.linkrev(p))
1994 parentlinkrevs.append(filelog.linkrev(p))
1995 n = filelog.node(j)
1995 n = filelog.node(j)
1996 revs.append((linkrev, parentlinkrevs,
1996 revs.append((linkrev, parentlinkrevs,
1997 follow and filelog.renamed(n)))
1997 follow and filelog.renamed(n)))
1998
1998
1999 return reversed(revs)
1999 return reversed(revs)
2000 def iterfiles():
2000 def iterfiles():
2001 pctx = repo['.']
2001 pctx = repo['.']
2002 for filename in match.files():
2002 for filename in match.files():
2003 if follow:
2003 if follow:
2004 if filename not in pctx:
2004 if filename not in pctx:
2005 raise error.Abort(_('cannot follow file not in parent '
2005 raise error.Abort(_('cannot follow file not in parent '
2006 'revision: "%s"') % filename)
2006 'revision: "%s"') % filename)
2007 yield filename, pctx[filename].filenode()
2007 yield filename, pctx[filename].filenode()
2008 else:
2008 else:
2009 yield filename, None
2009 yield filename, None
2010 for filename_node in copies:
2010 for filename_node in copies:
2011 yield filename_node
2011 yield filename_node
2012
2012
2013 for file_, node in iterfiles():
2013 for file_, node in iterfiles():
2014 filelog = repo.file(file_)
2014 filelog = repo.file(file_)
2015 if not len(filelog):
2015 if not len(filelog):
2016 if node is None:
2016 if node is None:
2017 # A zero count may be a directory or deleted file, so
2017 # A zero count may be a directory or deleted file, so
2018 # try to find matching entries on the slow path.
2018 # try to find matching entries on the slow path.
2019 if follow:
2019 if follow:
2020 raise error.Abort(
2020 raise error.Abort(
2021 _('cannot follow nonexistent file: "%s"') % file_)
2021 _('cannot follow nonexistent file: "%s"') % file_)
2022 raise FileWalkError("Cannot walk via filelog")
2022 raise FileWalkError("Cannot walk via filelog")
2023 else:
2023 else:
2024 continue
2024 continue
2025
2025
2026 if node is None:
2026 if node is None:
2027 last = len(filelog) - 1
2027 last = len(filelog) - 1
2028 else:
2028 else:
2029 last = filelog.rev(node)
2029 last = filelog.rev(node)
2030
2030
2031 # keep track of all ancestors of the file
2031 # keep track of all ancestors of the file
2032 ancestors = {filelog.linkrev(last)}
2032 ancestors = {filelog.linkrev(last)}
2033
2033
2034 # iterate from latest to oldest revision
2034 # iterate from latest to oldest revision
2035 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2035 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2036 if not follow:
2036 if not follow:
2037 if rev > maxrev:
2037 if rev > maxrev:
2038 continue
2038 continue
2039 else:
2039 else:
2040 # Note that last might not be the first interesting
2040 # Note that last might not be the first interesting
2041 # rev to us:
2041 # rev to us:
2042 # if the file has been changed after maxrev, we'll
2042 # if the file has been changed after maxrev, we'll
2043 # have linkrev(last) > maxrev, and we still need
2043 # have linkrev(last) > maxrev, and we still need
2044 # to explore the file graph
2044 # to explore the file graph
2045 if rev not in ancestors:
2045 if rev not in ancestors:
2046 continue
2046 continue
2047 # XXX insert 1327 fix here
2047 # XXX insert 1327 fix here
2048 if flparentlinkrevs:
2048 if flparentlinkrevs:
2049 ancestors.update(flparentlinkrevs)
2049 ancestors.update(flparentlinkrevs)
2050
2050
2051 fncache.setdefault(rev, []).append(file_)
2051 fncache.setdefault(rev, []).append(file_)
2052 wanted.add(rev)
2052 wanted.add(rev)
2053 if copied:
2053 if copied:
2054 copies.append(copied)
2054 copies.append(copied)
2055
2055
2056 return wanted
2056 return wanted
2057
2057
2058 class _followfilter(object):
2058 class _followfilter(object):
2059 def __init__(self, repo, onlyfirst=False):
2059 def __init__(self, repo, onlyfirst=False):
2060 self.repo = repo
2060 self.repo = repo
2061 self.startrev = nullrev
2061 self.startrev = nullrev
2062 self.roots = set()
2062 self.roots = set()
2063 self.onlyfirst = onlyfirst
2063 self.onlyfirst = onlyfirst
2064
2064
2065 def match(self, rev):
2065 def match(self, rev):
2066 def realparents(rev):
2066 def realparents(rev):
2067 if self.onlyfirst:
2067 if self.onlyfirst:
2068 return self.repo.changelog.parentrevs(rev)[0:1]
2068 return self.repo.changelog.parentrevs(rev)[0:1]
2069 else:
2069 else:
2070 return filter(lambda x: x != nullrev,
2070 return filter(lambda x: x != nullrev,
2071 self.repo.changelog.parentrevs(rev))
2071 self.repo.changelog.parentrevs(rev))
2072
2072
2073 if self.startrev == nullrev:
2073 if self.startrev == nullrev:
2074 self.startrev = rev
2074 self.startrev = rev
2075 return True
2075 return True
2076
2076
2077 if rev > self.startrev:
2077 if rev > self.startrev:
2078 # forward: all descendants
2078 # forward: all descendants
2079 if not self.roots:
2079 if not self.roots:
2080 self.roots.add(self.startrev)
2080 self.roots.add(self.startrev)
2081 for parent in realparents(rev):
2081 for parent in realparents(rev):
2082 if parent in self.roots:
2082 if parent in self.roots:
2083 self.roots.add(rev)
2083 self.roots.add(rev)
2084 return True
2084 return True
2085 else:
2085 else:
2086 # backwards: all parents
2086 # backwards: all parents
2087 if not self.roots:
2087 if not self.roots:
2088 self.roots.update(realparents(self.startrev))
2088 self.roots.update(realparents(self.startrev))
2089 if rev in self.roots:
2089 if rev in self.roots:
2090 self.roots.remove(rev)
2090 self.roots.remove(rev)
2091 self.roots.update(realparents(rev))
2091 self.roots.update(realparents(rev))
2092 return True
2092 return True
2093
2093
2094 return False
2094 return False
2095
2095
2096 def walkchangerevs(repo, match, opts, prepare):
2096 def walkchangerevs(repo, match, opts, prepare):
2097 '''Iterate over files and the revs in which they changed.
2097 '''Iterate over files and the revs in which they changed.
2098
2098
2099 Callers most commonly need to iterate backwards over the history
2099 Callers most commonly need to iterate backwards over the history
2100 in which they are interested. Doing so has awful (quadratic-looking)
2100 in which they are interested. Doing so has awful (quadratic-looking)
2101 performance, so we use iterators in a "windowed" way.
2101 performance, so we use iterators in a "windowed" way.
2102
2102
2103 We walk a window of revisions in the desired order. Within the
2103 We walk a window of revisions in the desired order. Within the
2104 window, we first walk forwards to gather data, then in the desired
2104 window, we first walk forwards to gather data, then in the desired
2105 order (usually backwards) to display it.
2105 order (usually backwards) to display it.
2106
2106
2107 This function returns an iterator yielding contexts. Before
2107 This function returns an iterator yielding contexts. Before
2108 yielding each context, the iterator will first call the prepare
2108 yielding each context, the iterator will first call the prepare
2109 function on each context in the window in forward order.'''
2109 function on each context in the window in forward order.'''
2110
2110
2111 follow = opts.get('follow') or opts.get('follow_first')
2111 follow = opts.get('follow') or opts.get('follow_first')
2112 revs = _logrevs(repo, opts)
2112 revs = _logrevs(repo, opts)
2113 if not revs:
2113 if not revs:
2114 return []
2114 return []
2115 wanted = set()
2115 wanted = set()
2116 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2116 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2117 opts.get('removed'))
2117 opts.get('removed'))
2118 fncache = {}
2118 fncache = {}
2119 change = repo.changectx
2119 change = repo.changectx
2120
2120
2121 # First step is to fill wanted, the set of revisions that we want to yield.
2121 # First step is to fill wanted, the set of revisions that we want to yield.
2122 # When it does not induce extra cost, we also fill fncache for revisions in
2122 # When it does not induce extra cost, we also fill fncache for revisions in
2123 # wanted: a cache of filenames that were changed (ctx.files()) and that
2123 # wanted: a cache of filenames that were changed (ctx.files()) and that
2124 # match the file filtering conditions.
2124 # match the file filtering conditions.
2125
2125
2126 if match.always():
2126 if match.always():
2127 # No files, no patterns. Display all revs.
2127 # No files, no patterns. Display all revs.
2128 wanted = revs
2128 wanted = revs
2129 elif not slowpath:
2129 elif not slowpath:
2130 # We only have to read through the filelog to find wanted revisions
2130 # We only have to read through the filelog to find wanted revisions
2131
2131
2132 try:
2132 try:
2133 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2133 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2134 except FileWalkError:
2134 except FileWalkError:
2135 slowpath = True
2135 slowpath = True
2136
2136
2137 # We decided to fall back to the slowpath because at least one
2137 # We decided to fall back to the slowpath because at least one
2138 # of the paths was not a file. Check to see if at least one of them
2138 # of the paths was not a file. Check to see if at least one of them
2139 # existed in history, otherwise simply return
2139 # existed in history, otherwise simply return
2140 for path in match.files():
2140 for path in match.files():
2141 if path == '.' or path in repo.store:
2141 if path == '.' or path in repo.store:
2142 break
2142 break
2143 else:
2143 else:
2144 return []
2144 return []
2145
2145
2146 if slowpath:
2146 if slowpath:
2147 # We have to read the changelog to match filenames against
2147 # We have to read the changelog to match filenames against
2148 # changed files
2148 # changed files
2149
2149
2150 if follow:
2150 if follow:
2151 raise error.Abort(_('can only follow copies/renames for explicit '
2151 raise error.Abort(_('can only follow copies/renames for explicit '
2152 'filenames'))
2152 'filenames'))
2153
2153
2154 # The slow path checks files modified in every changeset.
2154 # The slow path checks files modified in every changeset.
2155 # This is really slow on large repos, so compute the set lazily.
2155 # This is really slow on large repos, so compute the set lazily.
2156 class lazywantedset(object):
2156 class lazywantedset(object):
2157 def __init__(self):
2157 def __init__(self):
2158 self.set = set()
2158 self.set = set()
2159 self.revs = set(revs)
2159 self.revs = set(revs)
2160
2160
2161 # No need to worry about locality here because it will be accessed
2161 # No need to worry about locality here because it will be accessed
2162 # in the same order as the increasing window below.
2162 # in the same order as the increasing window below.
2163 def __contains__(self, value):
2163 def __contains__(self, value):
2164 if value in self.set:
2164 if value in self.set:
2165 return True
2165 return True
2166 elif not value in self.revs:
2166 elif not value in self.revs:
2167 return False
2167 return False
2168 else:
2168 else:
2169 self.revs.discard(value)
2169 self.revs.discard(value)
2170 ctx = change(value)
2170 ctx = change(value)
2171 matches = filter(match, ctx.files())
2171 matches = filter(match, ctx.files())
2172 if matches:
2172 if matches:
2173 fncache[value] = matches
2173 fncache[value] = matches
2174 self.set.add(value)
2174 self.set.add(value)
2175 return True
2175 return True
2176 return False
2176 return False
2177
2177
2178 def discard(self, value):
2178 def discard(self, value):
2179 self.revs.discard(value)
2179 self.revs.discard(value)
2180 self.set.discard(value)
2180 self.set.discard(value)
2181
2181
2182 wanted = lazywantedset()
2182 wanted = lazywantedset()
2183
2183
2184 # it might be worthwhile to do this in the iterator if the rev range
2184 # it might be worthwhile to do this in the iterator if the rev range
2185 # is descending and the prune args are all within that range
2185 # is descending and the prune args are all within that range
2186 for rev in opts.get('prune', ()):
2186 for rev in opts.get('prune', ()):
2187 rev = repo[rev].rev()
2187 rev = repo[rev].rev()
2188 ff = _followfilter(repo)
2188 ff = _followfilter(repo)
2189 stop = min(revs[0], revs[-1])
2189 stop = min(revs[0], revs[-1])
2190 for x in xrange(rev, stop - 1, -1):
2190 for x in xrange(rev, stop - 1, -1):
2191 if ff.match(x):
2191 if ff.match(x):
2192 wanted = wanted - [x]
2192 wanted = wanted - [x]
2193
2193
2194 # Now that wanted is correctly initialized, we can iterate over the
2194 # Now that wanted is correctly initialized, we can iterate over the
2195 # revision range, yielding only revisions in wanted.
2195 # revision range, yielding only revisions in wanted.
2196 def iterate():
2196 def iterate():
2197 if follow and match.always():
2197 if follow and match.always():
2198 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2198 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2199 def want(rev):
2199 def want(rev):
2200 return ff.match(rev) and rev in wanted
2200 return ff.match(rev) and rev in wanted
2201 else:
2201 else:
2202 def want(rev):
2202 def want(rev):
2203 return rev in wanted
2203 return rev in wanted
2204
2204
2205 it = iter(revs)
2205 it = iter(revs)
2206 stopiteration = False
2206 stopiteration = False
2207 for windowsize in increasingwindows():
2207 for windowsize in increasingwindows():
2208 nrevs = []
2208 nrevs = []
2209 for i in xrange(windowsize):
2209 for i in xrange(windowsize):
2210 rev = next(it, None)
2210 rev = next(it, None)
2211 if rev is None:
2211 if rev is None:
2212 stopiteration = True
2212 stopiteration = True
2213 break
2213 break
2214 elif want(rev):
2214 elif want(rev):
2215 nrevs.append(rev)
2215 nrevs.append(rev)
2216 for rev in sorted(nrevs):
2216 for rev in sorted(nrevs):
2217 fns = fncache.get(rev)
2217 fns = fncache.get(rev)
2218 ctx = change(rev)
2218 ctx = change(rev)
2219 if not fns:
2219 if not fns:
2220 def fns_generator():
2220 def fns_generator():
2221 for f in ctx.files():
2221 for f in ctx.files():
2222 if match(f):
2222 if match(f):
2223 yield f
2223 yield f
2224 fns = fns_generator()
2224 fns = fns_generator()
2225 prepare(ctx, fns)
2225 prepare(ctx, fns)
2226 for rev in nrevs:
2226 for rev in nrevs:
2227 yield change(rev)
2227 yield change(rev)
2228
2228
2229 if stopiteration:
2229 if stopiteration:
2230 break
2230 break
2231
2231
2232 return iterate()
2232 return iterate()
2233
2233
2234 def _makefollowlogfilematcher(repo, files, followfirst):
2234 def _makefollowlogfilematcher(repo, files, followfirst):
2235 # When displaying a revision with --patch --follow FILE, we have
2235 # When displaying a revision with --patch --follow FILE, we have
2236 # to know which file of the revision must be diffed. With
2236 # to know which file of the revision must be diffed. With
2237 # --follow, we want the names of the ancestors of FILE in the
2237 # --follow, we want the names of the ancestors of FILE in the
2238 # revision, stored in "fcache". "fcache" is populated by
2238 # revision, stored in "fcache". "fcache" is populated by
2239 # reproducing the graph traversal already done by --follow revset
2239 # reproducing the graph traversal already done by --follow revset
2240 # and relating revs to file names (which is not "correct" but
2240 # and relating revs to file names (which is not "correct" but
2241 # good enough).
2241 # good enough).
2242 fcache = {}
2242 fcache = {}
2243 fcacheready = [False]
2243 fcacheready = [False]
2244 pctx = repo['.']
2244 pctx = repo['.']
2245
2245
2246 def populate():
2246 def populate():
2247 for fn in files:
2247 for fn in files:
2248 fctx = pctx[fn]
2248 fctx = pctx[fn]
2249 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2249 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2250 for c in fctx.ancestors(followfirst=followfirst):
2250 for c in fctx.ancestors(followfirst=followfirst):
2251 fcache.setdefault(c.rev(), set()).add(c.path())
2251 fcache.setdefault(c.rev(), set()).add(c.path())
2252
2252
2253 def filematcher(rev):
2253 def filematcher(rev):
2254 if not fcacheready[0]:
2254 if not fcacheready[0]:
2255 # Lazy initialization
2255 # Lazy initialization
2256 fcacheready[0] = True
2256 fcacheready[0] = True
2257 populate()
2257 populate()
2258 return scmutil.matchfiles(repo, fcache.get(rev, []))
2258 return scmutil.matchfiles(repo, fcache.get(rev, []))
2259
2259
2260 return filematcher
2260 return filematcher
2261
2261
2262 def _makenofollowlogfilematcher(repo, pats, opts):
2262 def _makenofollowlogfilematcher(repo, pats, opts):
2263 '''hook for extensions to override the filematcher for non-follow cases'''
2263 '''hook for extensions to override the filematcher for non-follow cases'''
2264 return None
2264 return None
2265
2265
2266 def _makelogrevset(repo, pats, opts, revs):
2266 def _makelogrevset(repo, pats, opts, revs):
2267 """Return (expr, filematcher) where expr is a revset string built
2267 """Return (expr, filematcher) where expr is a revset string built
2268 from log options and file patterns or None. If --stat or --patch
2268 from log options and file patterns or None. If --stat or --patch
2269 are not passed filematcher is None. Otherwise it is a callable
2269 are not passed filematcher is None. Otherwise it is a callable
2270 taking a revision number and returning a match objects filtering
2270 taking a revision number and returning a match objects filtering
2271 the files to be detailed when displaying the revision.
2271 the files to be detailed when displaying the revision.
2272 """
2272 """
2273 opt2revset = {
2273 opt2revset = {
2274 'no_merges': ('not merge()', None),
2274 'no_merges': ('not merge()', None),
2275 'only_merges': ('merge()', None),
2275 'only_merges': ('merge()', None),
2276 '_ancestors': ('ancestors(%(val)s)', None),
2276 '_ancestors': ('ancestors(%(val)s)', None),
2277 '_fancestors': ('_firstancestors(%(val)s)', None),
2277 '_fancestors': ('_firstancestors(%(val)s)', None),
2278 '_descendants': ('descendants(%(val)s)', None),
2278 '_descendants': ('descendants(%(val)s)', None),
2279 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2279 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2280 '_matchfiles': ('_matchfiles(%(val)s)', None),
2280 '_matchfiles': ('_matchfiles(%(val)s)', None),
2281 'date': ('date(%(val)r)', None),
2281 'date': ('date(%(val)r)', None),
2282 'branch': ('branch(%(val)r)', ' or '),
2282 'branch': ('branch(%(val)r)', ' or '),
2283 '_patslog': ('filelog(%(val)r)', ' or '),
2283 '_patslog': ('filelog(%(val)r)', ' or '),
2284 '_patsfollow': ('follow(%(val)r)', ' or '),
2284 '_patsfollow': ('follow(%(val)r)', ' or '),
2285 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2285 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2286 'keyword': ('keyword(%(val)r)', ' or '),
2286 'keyword': ('keyword(%(val)r)', ' or '),
2287 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2287 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2288 'user': ('user(%(val)r)', ' or '),
2288 'user': ('user(%(val)r)', ' or '),
2289 }
2289 }
2290
2290
2291 opts = dict(opts)
2291 opts = dict(opts)
2292 # follow or not follow?
2292 # follow or not follow?
2293 follow = opts.get('follow') or opts.get('follow_first')
2293 follow = opts.get('follow') or opts.get('follow_first')
2294 if opts.get('follow_first'):
2294 if opts.get('follow_first'):
2295 followfirst = 1
2295 followfirst = 1
2296 else:
2296 else:
2297 followfirst = 0
2297 followfirst = 0
2298 # --follow with FILE behavior depends on revs...
2298 # --follow with FILE behavior depends on revs...
2299 it = iter(revs)
2299 it = iter(revs)
2300 startrev = next(it)
2300 startrev = next(it)
2301 followdescendants = startrev < next(it, startrev)
2301 followdescendants = startrev < next(it, startrev)
2302
2302
2303 # branch and only_branch are really aliases and must be handled at
2303 # branch and only_branch are really aliases and must be handled at
2304 # the same time
2304 # the same time
2305 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2305 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2306 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2306 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2307 # pats/include/exclude are passed to match.match() directly in
2307 # pats/include/exclude are passed to match.match() directly in
2308 # _matchfiles() revset but walkchangerevs() builds its matcher with
2308 # _matchfiles() revset but walkchangerevs() builds its matcher with
2309 # scmutil.match(). The difference is input pats are globbed on
2309 # scmutil.match(). The difference is input pats are globbed on
2310 # platforms without shell expansion (windows).
2310 # platforms without shell expansion (windows).
2311 wctx = repo[None]
2311 wctx = repo[None]
2312 match, pats = scmutil.matchandpats(wctx, pats, opts)
2312 match, pats = scmutil.matchandpats(wctx, pats, opts)
2313 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2313 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2314 opts.get('removed'))
2314 opts.get('removed'))
2315 if not slowpath:
2315 if not slowpath:
2316 for f in match.files():
2316 for f in match.files():
2317 if follow and f not in wctx:
2317 if follow and f not in wctx:
2318 # If the file exists, it may be a directory, so let it
2318 # If the file exists, it may be a directory, so let it
2319 # take the slow path.
2319 # take the slow path.
2320 if os.path.exists(repo.wjoin(f)):
2320 if os.path.exists(repo.wjoin(f)):
2321 slowpath = True
2321 slowpath = True
2322 continue
2322 continue
2323 else:
2323 else:
2324 raise error.Abort(_('cannot follow file not in parent '
2324 raise error.Abort(_('cannot follow file not in parent '
2325 'revision: "%s"') % f)
2325 'revision: "%s"') % f)
2326 filelog = repo.file(f)
2326 filelog = repo.file(f)
2327 if not filelog:
2327 if not filelog:
2328 # A zero count may be a directory or deleted file, so
2328 # A zero count may be a directory or deleted file, so
2329 # try to find matching entries on the slow path.
2329 # try to find matching entries on the slow path.
2330 if follow:
2330 if follow:
2331 raise error.Abort(
2331 raise error.Abort(
2332 _('cannot follow nonexistent file: "%s"') % f)
2332 _('cannot follow nonexistent file: "%s"') % f)
2333 slowpath = True
2333 slowpath = True
2334
2334
2335 # We decided to fall back to the slowpath because at least one
2335 # We decided to fall back to the slowpath because at least one
2336 # of the paths was not a file. Check to see if at least one of them
2336 # of the paths was not a file. Check to see if at least one of them
2337 # existed in history - in that case, we'll continue down the
2337 # existed in history - in that case, we'll continue down the
2338 # slowpath; otherwise, we can turn off the slowpath
2338 # slowpath; otherwise, we can turn off the slowpath
2339 if slowpath:
2339 if slowpath:
2340 for path in match.files():
2340 for path in match.files():
2341 if path == '.' or path in repo.store:
2341 if path == '.' or path in repo.store:
2342 break
2342 break
2343 else:
2343 else:
2344 slowpath = False
2344 slowpath = False
2345
2345
2346 fpats = ('_patsfollow', '_patsfollowfirst')
2346 fpats = ('_patsfollow', '_patsfollowfirst')
2347 fnopats = (('_ancestors', '_fancestors'),
2347 fnopats = (('_ancestors', '_fancestors'),
2348 ('_descendants', '_fdescendants'))
2348 ('_descendants', '_fdescendants'))
2349 if slowpath:
2349 if slowpath:
2350 # See walkchangerevs() slow path.
2350 # See walkchangerevs() slow path.
2351 #
2351 #
2352 # pats/include/exclude cannot be represented as separate
2352 # pats/include/exclude cannot be represented as separate
2353 # revset expressions as their filtering logic applies at file
2353 # revset expressions as their filtering logic applies at file
2354 # level. For instance "-I a -X a" matches a revision touching
2354 # level. For instance "-I a -X a" matches a revision touching
2355 # "a" and "b" while "file(a) and not file(b)" does
2355 # "a" and "b" while "file(a) and not file(b)" does
2356 # not. Besides, filesets are evaluated against the working
2356 # not. Besides, filesets are evaluated against the working
2357 # directory.
2357 # directory.
2358 matchargs = ['r:', 'd:relpath']
2358 matchargs = ['r:', 'd:relpath']
2359 for p in pats:
2359 for p in pats:
2360 matchargs.append('p:' + p)
2360 matchargs.append('p:' + p)
2361 for p in opts.get('include', []):
2361 for p in opts.get('include', []):
2362 matchargs.append('i:' + p)
2362 matchargs.append('i:' + p)
2363 for p in opts.get('exclude', []):
2363 for p in opts.get('exclude', []):
2364 matchargs.append('x:' + p)
2364 matchargs.append('x:' + p)
2365 matchargs = ','.join(('%r' % p) for p in matchargs)
2365 matchargs = ','.join(('%r' % p) for p in matchargs)
2366 opts['_matchfiles'] = matchargs
2366 opts['_matchfiles'] = matchargs
2367 if follow:
2367 if follow:
2368 opts[fnopats[0][followfirst]] = '.'
2368 opts[fnopats[0][followfirst]] = '.'
2369 else:
2369 else:
2370 if follow:
2370 if follow:
2371 if pats:
2371 if pats:
2372 # follow() revset interprets its file argument as a
2372 # follow() revset interprets its file argument as a
2373 # manifest entry, so use match.files(), not pats.
2373 # manifest entry, so use match.files(), not pats.
2374 opts[fpats[followfirst]] = list(match.files())
2374 opts[fpats[followfirst]] = list(match.files())
2375 else:
2375 else:
2376 op = fnopats[followdescendants][followfirst]
2376 op = fnopats[followdescendants][followfirst]
2377 opts[op] = 'rev(%d)' % startrev
2377 opts[op] = 'rev(%d)' % startrev
2378 else:
2378 else:
2379 opts['_patslog'] = list(pats)
2379 opts['_patslog'] = list(pats)
2380
2380
2381 filematcher = None
2381 filematcher = None
2382 if opts.get('patch') or opts.get('stat'):
2382 if opts.get('patch') or opts.get('stat'):
2383 # When following files, track renames via a special matcher.
2383 # When following files, track renames via a special matcher.
2384 # If we're forced to take the slowpath it means we're following
2384 # If we're forced to take the slowpath it means we're following
2385 # at least one pattern/directory, so don't bother with rename tracking.
2385 # at least one pattern/directory, so don't bother with rename tracking.
2386 if follow and not match.always() and not slowpath:
2386 if follow and not match.always() and not slowpath:
2387 # _makefollowlogfilematcher expects its files argument to be
2387 # _makefollowlogfilematcher expects its files argument to be
2388 # relative to the repo root, so use match.files(), not pats.
2388 # relative to the repo root, so use match.files(), not pats.
2389 filematcher = _makefollowlogfilematcher(repo, match.files(),
2389 filematcher = _makefollowlogfilematcher(repo, match.files(),
2390 followfirst)
2390 followfirst)
2391 else:
2391 else:
2392 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2392 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2393 if filematcher is None:
2393 if filematcher is None:
2394 filematcher = lambda rev: match
2394 filematcher = lambda rev: match
2395
2395
2396 expr = []
2396 expr = []
2397 for op, val in sorted(opts.iteritems()):
2397 for op, val in sorted(opts.iteritems()):
2398 if not val:
2398 if not val:
2399 continue
2399 continue
2400 if op not in opt2revset:
2400 if op not in opt2revset:
2401 continue
2401 continue
2402 revop, andor = opt2revset[op]
2402 revop, andor = opt2revset[op]
2403 if '%(val)' not in revop:
2403 if '%(val)' not in revop:
2404 expr.append(revop)
2404 expr.append(revop)
2405 else:
2405 else:
2406 if not isinstance(val, list):
2406 if not isinstance(val, list):
2407 e = revop % {'val': val}
2407 e = revop % {'val': val}
2408 else:
2408 else:
2409 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2409 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2410 expr.append(e)
2410 expr.append(e)
2411
2411
2412 if expr:
2412 if expr:
2413 expr = '(' + ' and '.join(expr) + ')'
2413 expr = '(' + ' and '.join(expr) + ')'
2414 else:
2414 else:
2415 expr = None
2415 expr = None
2416 return expr, filematcher
2416 return expr, filematcher
2417
2417
2418 def _logrevs(repo, opts):
2418 def _logrevs(repo, opts):
2419 # Default --rev value depends on --follow but --follow behavior
2419 # Default --rev value depends on --follow but --follow behavior
2420 # depends on revisions resolved from --rev...
2420 # depends on revisions resolved from --rev...
2421 follow = opts.get('follow') or opts.get('follow_first')
2421 follow = opts.get('follow') or opts.get('follow_first')
2422 if opts.get('rev'):
2422 if opts.get('rev'):
2423 revs = scmutil.revrange(repo, opts['rev'])
2423 revs = scmutil.revrange(repo, opts['rev'])
2424 elif follow and repo.dirstate.p1() == nullid:
2424 elif follow and repo.dirstate.p1() == nullid:
2425 revs = smartset.baseset()
2425 revs = smartset.baseset()
2426 elif follow:
2426 elif follow:
2427 revs = repo.revs('reverse(:.)')
2427 revs = repo.revs('reverse(:.)')
2428 else:
2428 else:
2429 revs = smartset.spanset(repo)
2429 revs = smartset.spanset(repo)
2430 revs.reverse()
2430 revs.reverse()
2431 return revs
2431 return revs
2432
2432
2433 def getgraphlogrevs(repo, pats, opts):
2433 def getgraphlogrevs(repo, pats, opts):
2434 """Return (revs, expr, filematcher) where revs is an iterable of
2434 """Return (revs, expr, filematcher) where revs is an iterable of
2435 revision numbers, expr is a revset string built from log options
2435 revision numbers, expr is a revset string built from log options
2436 and file patterns or None, and used to filter 'revs'. If --stat or
2436 and file patterns or None, and used to filter 'revs'. If --stat or
2437 --patch are not passed filematcher is None. Otherwise it is a
2437 --patch are not passed filematcher is None. Otherwise it is a
2438 callable taking a revision number and returning a match objects
2438 callable taking a revision number and returning a match objects
2439 filtering the files to be detailed when displaying the revision.
2439 filtering the files to be detailed when displaying the revision.
2440 """
2440 """
2441 limit = loglimit(opts)
2441 limit = loglimit(opts)
2442 revs = _logrevs(repo, opts)
2442 revs = _logrevs(repo, opts)
2443 if not revs:
2443 if not revs:
2444 return smartset.baseset(), None, None
2444 return smartset.baseset(), None, None
2445 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2445 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2446 if opts.get('rev'):
2446 if opts.get('rev'):
2447 # User-specified revs might be unsorted, but don't sort before
2447 # User-specified revs might be unsorted, but don't sort before
2448 # _makelogrevset because it might depend on the order of revs
2448 # _makelogrevset because it might depend on the order of revs
2449 if not (revs.isdescending() or revs.istopo()):
2449 if not (revs.isdescending() or revs.istopo()):
2450 revs.sort(reverse=True)
2450 revs.sort(reverse=True)
2451 if expr:
2451 if expr:
2452 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2452 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2453 revs = matcher(repo, revs)
2453 revs = matcher(repo, revs)
2454 if limit is not None:
2454 if limit is not None:
2455 limitedrevs = []
2455 limitedrevs = []
2456 for idx, rev in enumerate(revs):
2456 for idx, rev in enumerate(revs):
2457 if idx >= limit:
2457 if idx >= limit:
2458 break
2458 break
2459 limitedrevs.append(rev)
2459 limitedrevs.append(rev)
2460 revs = smartset.baseset(limitedrevs)
2460 revs = smartset.baseset(limitedrevs)
2461
2461
2462 return revs, expr, filematcher
2462 return revs, expr, filematcher
2463
2463
2464 def getlogrevs(repo, pats, opts):
2464 def getlogrevs(repo, pats, opts):
2465 """Return (revs, expr, filematcher) where revs is an iterable of
2465 """Return (revs, expr, filematcher) where revs is an iterable of
2466 revision numbers, expr is a revset string built from log options
2466 revision numbers, expr is a revset string built from log options
2467 and file patterns or None, and used to filter 'revs'. If --stat or
2467 and file patterns or None, and used to filter 'revs'. If --stat or
2468 --patch are not passed filematcher is None. Otherwise it is a
2468 --patch are not passed filematcher is None. Otherwise it is a
2469 callable taking a revision number and returning a match objects
2469 callable taking a revision number and returning a match objects
2470 filtering the files to be detailed when displaying the revision.
2470 filtering the files to be detailed when displaying the revision.
2471 """
2471 """
2472 limit = loglimit(opts)
2472 limit = loglimit(opts)
2473 revs = _logrevs(repo, opts)
2473 revs = _logrevs(repo, opts)
2474 if not revs:
2474 if not revs:
2475 return smartset.baseset([]), None, None
2475 return smartset.baseset([]), None, None
2476 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2476 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2477 if expr:
2477 if expr:
2478 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2478 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2479 revs = matcher(repo, revs)
2479 revs = matcher(repo, revs)
2480 if limit is not None:
2480 if limit is not None:
2481 limitedrevs = []
2481 limitedrevs = []
2482 for idx, r in enumerate(revs):
2482 for idx, r in enumerate(revs):
2483 if limit <= idx:
2483 if limit <= idx:
2484 break
2484 break
2485 limitedrevs.append(r)
2485 limitedrevs.append(r)
2486 revs = smartset.baseset(limitedrevs)
2486 revs = smartset.baseset(limitedrevs)
2487
2487
2488 return revs, expr, filematcher
2488 return revs, expr, filematcher
2489
2489
2490 def _graphnodeformatter(ui, displayer):
2490 def _graphnodeformatter(ui, displayer):
2491 spec = ui.config('ui', 'graphnodetemplate')
2491 spec = ui.config('ui', 'graphnodetemplate')
2492 if not spec:
2492 if not spec:
2493 return templatekw.showgraphnode # fast path for "{graphnode}"
2493 return templatekw.showgraphnode # fast path for "{graphnode}"
2494
2494
2495 spec = templater.unquotestring(spec)
2495 spec = templater.unquotestring(spec)
2496 templ = formatter.maketemplater(ui, spec)
2496 templ = formatter.maketemplater(ui, spec)
2497 cache = {}
2497 cache = {}
2498 if isinstance(displayer, changeset_templater):
2498 if isinstance(displayer, changeset_templater):
2499 cache = displayer.cache # reuse cache of slow templates
2499 cache = displayer.cache # reuse cache of slow templates
2500 props = templatekw.keywords.copy()
2500 props = templatekw.keywords.copy()
2501 props['templ'] = templ
2501 props['templ'] = templ
2502 props['cache'] = cache
2502 props['cache'] = cache
2503 def formatnode(repo, ctx):
2503 def formatnode(repo, ctx):
2504 props['ctx'] = ctx
2504 props['ctx'] = ctx
2505 props['repo'] = repo
2505 props['repo'] = repo
2506 props['ui'] = repo.ui
2506 props['ui'] = repo.ui
2507 props['revcache'] = {}
2507 props['revcache'] = {}
2508 return templ.render(props)
2508 return templ.render(props)
2509 return formatnode
2509 return formatnode
2510
2510
2511 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2511 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2512 filematcher=None):
2512 filematcher=None):
2513 formatnode = _graphnodeformatter(ui, displayer)
2513 formatnode = _graphnodeformatter(ui, displayer)
2514 state = graphmod.asciistate()
2514 state = graphmod.asciistate()
2515 styles = state['styles']
2515 styles = state['styles']
2516
2516
2517 # only set graph styling if HGPLAIN is not set.
2517 # only set graph styling if HGPLAIN is not set.
2518 if ui.plain('graph'):
2518 if ui.plain('graph'):
2519 # set all edge styles to |, the default pre-3.8 behaviour
2519 # set all edge styles to |, the default pre-3.8 behaviour
2520 styles.update(dict.fromkeys(styles, '|'))
2520 styles.update(dict.fromkeys(styles, '|'))
2521 else:
2521 else:
2522 edgetypes = {
2522 edgetypes = {
2523 'parent': graphmod.PARENT,
2523 'parent': graphmod.PARENT,
2524 'grandparent': graphmod.GRANDPARENT,
2524 'grandparent': graphmod.GRANDPARENT,
2525 'missing': graphmod.MISSINGPARENT
2525 'missing': graphmod.MISSINGPARENT
2526 }
2526 }
2527 for name, key in edgetypes.items():
2527 for name, key in edgetypes.items():
2528 # experimental config: experimental.graphstyle.*
2528 # experimental config: experimental.graphstyle.*
2529 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2529 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2530 styles[key])
2530 styles[key])
2531 if not styles[key]:
2531 if not styles[key]:
2532 styles[key] = None
2532 styles[key] = None
2533
2533
2534 # experimental config: experimental.graphshorten
2534 # experimental config: experimental.graphshorten
2535 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2535 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2536
2536
2537 for rev, type, ctx, parents in dag:
2537 for rev, type, ctx, parents in dag:
2538 char = formatnode(repo, ctx)
2538 char = formatnode(repo, ctx)
2539 copies = None
2539 copies = None
2540 if getrenamed and ctx.rev():
2540 if getrenamed and ctx.rev():
2541 copies = []
2541 copies = []
2542 for fn in ctx.files():
2542 for fn in ctx.files():
2543 rename = getrenamed(fn, ctx.rev())
2543 rename = getrenamed(fn, ctx.rev())
2544 if rename:
2544 if rename:
2545 copies.append((fn, rename[0]))
2545 copies.append((fn, rename[0]))
2546 revmatchfn = None
2546 revmatchfn = None
2547 if filematcher is not None:
2547 if filematcher is not None:
2548 revmatchfn = filematcher(ctx.rev())
2548 revmatchfn = filematcher(ctx.rev())
2549 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2549 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2550 lines = displayer.hunk.pop(rev).split('\n')
2550 lines = displayer.hunk.pop(rev).split('\n')
2551 if not lines[-1]:
2551 if not lines[-1]:
2552 del lines[-1]
2552 del lines[-1]
2553 displayer.flush(ctx)
2553 displayer.flush(ctx)
2554 edges = edgefn(type, char, lines, state, rev, parents)
2554 edges = edgefn(type, char, lines, state, rev, parents)
2555 for type, char, lines, coldata in edges:
2555 for type, char, lines, coldata in edges:
2556 graphmod.ascii(ui, state, type, char, lines, coldata)
2556 graphmod.ascii(ui, state, type, char, lines, coldata)
2557 displayer.close()
2557 displayer.close()
2558
2558
2559 def graphlog(ui, repo, pats, opts):
2559 def graphlog(ui, repo, pats, opts):
2560 # Parameters are identical to log command ones
2560 # Parameters are identical to log command ones
2561 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2561 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2562 revdag = graphmod.dagwalker(repo, revs)
2562 revdag = graphmod.dagwalker(repo, revs)
2563
2563
2564 getrenamed = None
2564 getrenamed = None
2565 if opts.get('copies'):
2565 if opts.get('copies'):
2566 endrev = None
2566 endrev = None
2567 if opts.get('rev'):
2567 if opts.get('rev'):
2568 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2568 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2569 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2569 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2570
2570
2571 ui.pager('log')
2571 ui.pager('log')
2572 displayer = show_changeset(ui, repo, opts, buffered=True)
2572 displayer = show_changeset(ui, repo, opts, buffered=True)
2573 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2573 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2574 filematcher)
2574 filematcher)
2575
2575
2576 def checkunsupportedgraphflags(pats, opts):
2576 def checkunsupportedgraphflags(pats, opts):
2577 for op in ["newest_first"]:
2577 for op in ["newest_first"]:
2578 if op in opts and opts[op]:
2578 if op in opts and opts[op]:
2579 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2579 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2580 % op.replace("_", "-"))
2580 % op.replace("_", "-"))
2581
2581
2582 def graphrevs(repo, nodes, opts):
2582 def graphrevs(repo, nodes, opts):
2583 limit = loglimit(opts)
2583 limit = loglimit(opts)
2584 nodes.reverse()
2584 nodes.reverse()
2585 if limit is not None:
2585 if limit is not None:
2586 nodes = nodes[:limit]
2586 nodes = nodes[:limit]
2587 return graphmod.nodes(repo, nodes)
2587 return graphmod.nodes(repo, nodes)
2588
2588
2589 def add(ui, repo, match, prefix, explicitonly, **opts):
2589 def add(ui, repo, match, prefix, explicitonly, **opts):
2590 join = lambda f: os.path.join(prefix, f)
2590 join = lambda f: os.path.join(prefix, f)
2591 bad = []
2591 bad = []
2592
2592
2593 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2593 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2594 names = []
2594 names = []
2595 wctx = repo[None]
2595 wctx = repo[None]
2596 cca = None
2596 cca = None
2597 abort, warn = scmutil.checkportabilityalert(ui)
2597 abort, warn = scmutil.checkportabilityalert(ui)
2598 if abort or warn:
2598 if abort or warn:
2599 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2599 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2600
2600
2601 badmatch = matchmod.badmatch(match, badfn)
2601 badmatch = matchmod.badmatch(match, badfn)
2602 dirstate = repo.dirstate
2602 dirstate = repo.dirstate
2603 # We don't want to just call wctx.walk here, since it would return a lot of
2603 # We don't want to just call wctx.walk here, since it would return a lot of
2604 # clean files, which we aren't interested in and takes time.
2604 # clean files, which we aren't interested in and takes time.
2605 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2605 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2606 True, False, full=False)):
2606 True, False, full=False)):
2607 exact = match.exact(f)
2607 exact = match.exact(f)
2608 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2608 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2609 if cca:
2609 if cca:
2610 cca(f)
2610 cca(f)
2611 names.append(f)
2611 names.append(f)
2612 if ui.verbose or not exact:
2612 if ui.verbose or not exact:
2613 ui.status(_('adding %s\n') % match.rel(f))
2613 ui.status(_('adding %s\n') % match.rel(f))
2614
2614
2615 for subpath in sorted(wctx.substate):
2615 for subpath in sorted(wctx.substate):
2616 sub = wctx.sub(subpath)
2616 sub = wctx.sub(subpath)
2617 try:
2617 try:
2618 submatch = matchmod.subdirmatcher(subpath, match)
2618 submatch = matchmod.subdirmatcher(subpath, match)
2619 if opts.get(r'subrepos'):
2619 if opts.get(r'subrepos'):
2620 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2620 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2621 else:
2621 else:
2622 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2622 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2623 except error.LookupError:
2623 except error.LookupError:
2624 ui.status(_("skipping missing subrepository: %s\n")
2624 ui.status(_("skipping missing subrepository: %s\n")
2625 % join(subpath))
2625 % join(subpath))
2626
2626
2627 if not opts.get(r'dry_run'):
2627 if not opts.get(r'dry_run'):
2628 rejected = wctx.add(names, prefix)
2628 rejected = wctx.add(names, prefix)
2629 bad.extend(f for f in rejected if f in match.files())
2629 bad.extend(f for f in rejected if f in match.files())
2630 return bad
2630 return bad
2631
2631
2632 def addwebdirpath(repo, serverpath, webconf):
2632 def addwebdirpath(repo, serverpath, webconf):
2633 webconf[serverpath] = repo.root
2633 webconf[serverpath] = repo.root
2634 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2634 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2635
2635
2636 for r in repo.revs('filelog("path:.hgsub")'):
2636 for r in repo.revs('filelog("path:.hgsub")'):
2637 ctx = repo[r]
2637 ctx = repo[r]
2638 for subpath in ctx.substate:
2638 for subpath in ctx.substate:
2639 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2639 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2640
2640
2641 def forget(ui, repo, match, prefix, explicitonly):
2641 def forget(ui, repo, match, prefix, explicitonly):
2642 join = lambda f: os.path.join(prefix, f)
2642 join = lambda f: os.path.join(prefix, f)
2643 bad = []
2643 bad = []
2644 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2644 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2645 wctx = repo[None]
2645 wctx = repo[None]
2646 forgot = []
2646 forgot = []
2647
2647
2648 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2648 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2649 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2649 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2650 if explicitonly:
2650 if explicitonly:
2651 forget = [f for f in forget if match.exact(f)]
2651 forget = [f for f in forget if match.exact(f)]
2652
2652
2653 for subpath in sorted(wctx.substate):
2653 for subpath in sorted(wctx.substate):
2654 sub = wctx.sub(subpath)
2654 sub = wctx.sub(subpath)
2655 try:
2655 try:
2656 submatch = matchmod.subdirmatcher(subpath, match)
2656 submatch = matchmod.subdirmatcher(subpath, match)
2657 subbad, subforgot = sub.forget(submatch, prefix)
2657 subbad, subforgot = sub.forget(submatch, prefix)
2658 bad.extend([subpath + '/' + f for f in subbad])
2658 bad.extend([subpath + '/' + f for f in subbad])
2659 forgot.extend([subpath + '/' + f for f in subforgot])
2659 forgot.extend([subpath + '/' + f for f in subforgot])
2660 except error.LookupError:
2660 except error.LookupError:
2661 ui.status(_("skipping missing subrepository: %s\n")
2661 ui.status(_("skipping missing subrepository: %s\n")
2662 % join(subpath))
2662 % join(subpath))
2663
2663
2664 if not explicitonly:
2664 if not explicitonly:
2665 for f in match.files():
2665 for f in match.files():
2666 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2666 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2667 if f not in forgot:
2667 if f not in forgot:
2668 if repo.wvfs.exists(f):
2668 if repo.wvfs.exists(f):
2669 # Don't complain if the exact case match wasn't given.
2669 # Don't complain if the exact case match wasn't given.
2670 # But don't do this until after checking 'forgot', so
2670 # But don't do this until after checking 'forgot', so
2671 # that subrepo files aren't normalized, and this op is
2671 # that subrepo files aren't normalized, and this op is
2672 # purely from data cached by the status walk above.
2672 # purely from data cached by the status walk above.
2673 if repo.dirstate.normalize(f) in repo.dirstate:
2673 if repo.dirstate.normalize(f) in repo.dirstate:
2674 continue
2674 continue
2675 ui.warn(_('not removing %s: '
2675 ui.warn(_('not removing %s: '
2676 'file is already untracked\n')
2676 'file is already untracked\n')
2677 % match.rel(f))
2677 % match.rel(f))
2678 bad.append(f)
2678 bad.append(f)
2679
2679
2680 for f in forget:
2680 for f in forget:
2681 if ui.verbose or not match.exact(f):
2681 if ui.verbose or not match.exact(f):
2682 ui.status(_('removing %s\n') % match.rel(f))
2682 ui.status(_('removing %s\n') % match.rel(f))
2683
2683
2684 rejected = wctx.forget(forget, prefix)
2684 rejected = wctx.forget(forget, prefix)
2685 bad.extend(f for f in rejected if f in match.files())
2685 bad.extend(f for f in rejected if f in match.files())
2686 forgot.extend(f for f in forget if f not in rejected)
2686 forgot.extend(f for f in forget if f not in rejected)
2687 return bad, forgot
2687 return bad, forgot
2688
2688
2689 def files(ui, ctx, m, fm, fmt, subrepos):
2689 def files(ui, ctx, m, fm, fmt, subrepos):
2690 rev = ctx.rev()
2690 rev = ctx.rev()
2691 ret = 1
2691 ret = 1
2692 ds = ctx.repo().dirstate
2692 ds = ctx.repo().dirstate
2693
2693
2694 for f in ctx.matches(m):
2694 for f in ctx.matches(m):
2695 if rev is None and ds[f] == 'r':
2695 if rev is None and ds[f] == 'r':
2696 continue
2696 continue
2697 fm.startitem()
2697 fm.startitem()
2698 if ui.verbose:
2698 if ui.verbose:
2699 fc = ctx[f]
2699 fc = ctx[f]
2700 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2700 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2701 fm.data(abspath=f)
2701 fm.data(abspath=f)
2702 fm.write('path', fmt, m.rel(f))
2702 fm.write('path', fmt, m.rel(f))
2703 ret = 0
2703 ret = 0
2704
2704
2705 for subpath in sorted(ctx.substate):
2705 for subpath in sorted(ctx.substate):
2706 submatch = matchmod.subdirmatcher(subpath, m)
2706 submatch = matchmod.subdirmatcher(subpath, m)
2707 if (subrepos or m.exact(subpath) or any(submatch.files())):
2707 if (subrepos or m.exact(subpath) or any(submatch.files())):
2708 sub = ctx.sub(subpath)
2708 sub = ctx.sub(subpath)
2709 try:
2709 try:
2710 recurse = m.exact(subpath) or subrepos
2710 recurse = m.exact(subpath) or subrepos
2711 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2711 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2712 ret = 0
2712 ret = 0
2713 except error.LookupError:
2713 except error.LookupError:
2714 ui.status(_("skipping missing subrepository: %s\n")
2714 ui.status(_("skipping missing subrepository: %s\n")
2715 % m.abs(subpath))
2715 % m.abs(subpath))
2716
2716
2717 return ret
2717 return ret
2718
2718
2719 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2719 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2720 join = lambda f: os.path.join(prefix, f)
2720 join = lambda f: os.path.join(prefix, f)
2721 ret = 0
2721 ret = 0
2722 s = repo.status(match=m, clean=True)
2722 s = repo.status(match=m, clean=True)
2723 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2723 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2724
2724
2725 wctx = repo[None]
2725 wctx = repo[None]
2726
2726
2727 if warnings is None:
2727 if warnings is None:
2728 warnings = []
2728 warnings = []
2729 warn = True
2729 warn = True
2730 else:
2730 else:
2731 warn = False
2731 warn = False
2732
2732
2733 subs = sorted(wctx.substate)
2733 subs = sorted(wctx.substate)
2734 total = len(subs)
2734 total = len(subs)
2735 count = 0
2735 count = 0
2736 for subpath in subs:
2736 for subpath in subs:
2737 count += 1
2737 count += 1
2738 submatch = matchmod.subdirmatcher(subpath, m)
2738 submatch = matchmod.subdirmatcher(subpath, m)
2739 if subrepos or m.exact(subpath) or any(submatch.files()):
2739 if subrepos or m.exact(subpath) or any(submatch.files()):
2740 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2740 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2741 sub = wctx.sub(subpath)
2741 sub = wctx.sub(subpath)
2742 try:
2742 try:
2743 if sub.removefiles(submatch, prefix, after, force, subrepos,
2743 if sub.removefiles(submatch, prefix, after, force, subrepos,
2744 warnings):
2744 warnings):
2745 ret = 1
2745 ret = 1
2746 except error.LookupError:
2746 except error.LookupError:
2747 warnings.append(_("skipping missing subrepository: %s\n")
2747 warnings.append(_("skipping missing subrepository: %s\n")
2748 % join(subpath))
2748 % join(subpath))
2749 ui.progress(_('searching'), None)
2749 ui.progress(_('searching'), None)
2750
2750
2751 # warn about failure to delete explicit files/dirs
2751 # warn about failure to delete explicit files/dirs
2752 deleteddirs = util.dirs(deleted)
2752 deleteddirs = util.dirs(deleted)
2753 files = m.files()
2753 files = m.files()
2754 total = len(files)
2754 total = len(files)
2755 count = 0
2755 count = 0
2756 for f in files:
2756 for f in files:
2757 def insubrepo():
2757 def insubrepo():
2758 for subpath in wctx.substate:
2758 for subpath in wctx.substate:
2759 if f.startswith(subpath + '/'):
2759 if f.startswith(subpath + '/'):
2760 return True
2760 return True
2761 return False
2761 return False
2762
2762
2763 count += 1
2763 count += 1
2764 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2764 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2765 isdir = f in deleteddirs or wctx.hasdir(f)
2765 isdir = f in deleteddirs or wctx.hasdir(f)
2766 if (f in repo.dirstate or isdir or f == '.'
2766 if (f in repo.dirstate or isdir or f == '.'
2767 or insubrepo() or f in subs):
2767 or insubrepo() or f in subs):
2768 continue
2768 continue
2769
2769
2770 if repo.wvfs.exists(f):
2770 if repo.wvfs.exists(f):
2771 if repo.wvfs.isdir(f):
2771 if repo.wvfs.isdir(f):
2772 warnings.append(_('not removing %s: no tracked files\n')
2772 warnings.append(_('not removing %s: no tracked files\n')
2773 % m.rel(f))
2773 % m.rel(f))
2774 else:
2774 else:
2775 warnings.append(_('not removing %s: file is untracked\n')
2775 warnings.append(_('not removing %s: file is untracked\n')
2776 % m.rel(f))
2776 % m.rel(f))
2777 # missing files will generate a warning elsewhere
2777 # missing files will generate a warning elsewhere
2778 ret = 1
2778 ret = 1
2779 ui.progress(_('deleting'), None)
2779 ui.progress(_('deleting'), None)
2780
2780
2781 if force:
2781 if force:
2782 list = modified + deleted + clean + added
2782 list = modified + deleted + clean + added
2783 elif after:
2783 elif after:
2784 list = deleted
2784 list = deleted
2785 remaining = modified + added + clean
2785 remaining = modified + added + clean
2786 total = len(remaining)
2786 total = len(remaining)
2787 count = 0
2787 count = 0
2788 for f in remaining:
2788 for f in remaining:
2789 count += 1
2789 count += 1
2790 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2790 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2791 warnings.append(_('not removing %s: file still exists\n')
2791 warnings.append(_('not removing %s: file still exists\n')
2792 % m.rel(f))
2792 % m.rel(f))
2793 ret = 1
2793 ret = 1
2794 ui.progress(_('skipping'), None)
2794 ui.progress(_('skipping'), None)
2795 else:
2795 else:
2796 list = deleted + clean
2796 list = deleted + clean
2797 total = len(modified) + len(added)
2797 total = len(modified) + len(added)
2798 count = 0
2798 count = 0
2799 for f in modified:
2799 for f in modified:
2800 count += 1
2800 count += 1
2801 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2801 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2802 warnings.append(_('not removing %s: file is modified (use -f'
2802 warnings.append(_('not removing %s: file is modified (use -f'
2803 ' to force removal)\n') % m.rel(f))
2803 ' to force removal)\n') % m.rel(f))
2804 ret = 1
2804 ret = 1
2805 for f in added:
2805 for f in added:
2806 count += 1
2806 count += 1
2807 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2807 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2808 warnings.append(_("not removing %s: file has been marked for add"
2808 warnings.append(_("not removing %s: file has been marked for add"
2809 " (use 'hg forget' to undo add)\n") % m.rel(f))
2809 " (use 'hg forget' to undo add)\n") % m.rel(f))
2810 ret = 1
2810 ret = 1
2811 ui.progress(_('skipping'), None)
2811 ui.progress(_('skipping'), None)
2812
2812
2813 list = sorted(list)
2813 list = sorted(list)
2814 total = len(list)
2814 total = len(list)
2815 count = 0
2815 count = 0
2816 for f in list:
2816 for f in list:
2817 count += 1
2817 count += 1
2818 if ui.verbose or not m.exact(f):
2818 if ui.verbose or not m.exact(f):
2819 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2819 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2820 ui.status(_('removing %s\n') % m.rel(f))
2820 ui.status(_('removing %s\n') % m.rel(f))
2821 ui.progress(_('deleting'), None)
2821 ui.progress(_('deleting'), None)
2822
2822
2823 with repo.wlock():
2823 with repo.wlock():
2824 if not after:
2824 if not after:
2825 for f in list:
2825 for f in list:
2826 if f in added:
2826 if f in added:
2827 continue # we never unlink added files on remove
2827 continue # we never unlink added files on remove
2828 repo.wvfs.unlinkpath(f, ignoremissing=True)
2828 repo.wvfs.unlinkpath(f, ignoremissing=True)
2829 repo[None].forget(list)
2829 repo[None].forget(list)
2830
2830
2831 if warn:
2831 if warn:
2832 for warning in warnings:
2832 for warning in warnings:
2833 ui.warn(warning)
2833 ui.warn(warning)
2834
2834
2835 return ret
2835 return ret
2836
2836
2837 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2837 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2838 err = 1
2838 err = 1
2839
2839
2840 def write(path):
2840 def write(path):
2841 filename = None
2841 filename = None
2842 if fntemplate:
2842 if fntemplate:
2843 filename = makefilename(repo, fntemplate, ctx.node(),
2843 filename = makefilename(repo, fntemplate, ctx.node(),
2844 pathname=os.path.join(prefix, path))
2844 pathname=os.path.join(prefix, path))
2845 with formatter.maybereopen(basefm, filename, opts) as fm:
2845 with formatter.maybereopen(basefm, filename, opts) as fm:
2846 data = ctx[path].data()
2846 data = ctx[path].data()
2847 if opts.get('decode'):
2847 if opts.get('decode'):
2848 data = repo.wwritedata(path, data)
2848 data = repo.wwritedata(path, data)
2849 fm.startitem()
2849 fm.startitem()
2850 fm.write('data', '%s', data)
2850 fm.write('data', '%s', data)
2851 fm.data(abspath=path, path=matcher.rel(path))
2851 fm.data(abspath=path, path=matcher.rel(path))
2852
2852
2853 # Automation often uses hg cat on single files, so special case it
2853 # Automation often uses hg cat on single files, so special case it
2854 # for performance to avoid the cost of parsing the manifest.
2854 # for performance to avoid the cost of parsing the manifest.
2855 if len(matcher.files()) == 1 and not matcher.anypats():
2855 if len(matcher.files()) == 1 and not matcher.anypats():
2856 file = matcher.files()[0]
2856 file = matcher.files()[0]
2857 mfl = repo.manifestlog
2857 mfl = repo.manifestlog
2858 mfnode = ctx.manifestnode()
2858 mfnode = ctx.manifestnode()
2859 try:
2859 try:
2860 if mfnode and mfl[mfnode].find(file)[0]:
2860 if mfnode and mfl[mfnode].find(file)[0]:
2861 write(file)
2861 write(file)
2862 return 0
2862 return 0
2863 except KeyError:
2863 except KeyError:
2864 pass
2864 pass
2865
2865
2866 for abs in ctx.walk(matcher):
2866 for abs in ctx.walk(matcher):
2867 write(abs)
2867 write(abs)
2868 err = 0
2868 err = 0
2869
2869
2870 for subpath in sorted(ctx.substate):
2870 for subpath in sorted(ctx.substate):
2871 sub = ctx.sub(subpath)
2871 sub = ctx.sub(subpath)
2872 try:
2872 try:
2873 submatch = matchmod.subdirmatcher(subpath, matcher)
2873 submatch = matchmod.subdirmatcher(subpath, matcher)
2874
2874
2875 if not sub.cat(submatch, basefm, fntemplate,
2875 if not sub.cat(submatch, basefm, fntemplate,
2876 os.path.join(prefix, sub._path), **opts):
2876 os.path.join(prefix, sub._path), **opts):
2877 err = 0
2877 err = 0
2878 except error.RepoLookupError:
2878 except error.RepoLookupError:
2879 ui.status(_("skipping missing subrepository: %s\n")
2879 ui.status(_("skipping missing subrepository: %s\n")
2880 % os.path.join(prefix, subpath))
2880 % os.path.join(prefix, subpath))
2881
2881
2882 return err
2882 return err
2883
2883
2884 def commit(ui, repo, commitfunc, pats, opts):
2884 def commit(ui, repo, commitfunc, pats, opts):
2885 '''commit the specified files or all outstanding changes'''
2885 '''commit the specified files or all outstanding changes'''
2886 date = opts.get('date')
2886 date = opts.get('date')
2887 if date:
2887 if date:
2888 opts['date'] = util.parsedate(date)
2888 opts['date'] = util.parsedate(date)
2889 message = logmessage(ui, opts)
2889 message = logmessage(ui, opts)
2890 matcher = scmutil.match(repo[None], pats, opts)
2890 matcher = scmutil.match(repo[None], pats, opts)
2891
2891
2892 # extract addremove carefully -- this function can be called from a command
2892 # extract addremove carefully -- this function can be called from a command
2893 # that doesn't support addremove
2893 # that doesn't support addremove
2894 if opts.get('addremove'):
2894 if opts.get('addremove'):
2895 if scmutil.addremove(repo, matcher, "", opts) != 0:
2895 if scmutil.addremove(repo, matcher, "", opts) != 0:
2896 raise error.Abort(
2896 raise error.Abort(
2897 _("failed to mark all new/missing files as added/removed"))
2897 _("failed to mark all new/missing files as added/removed"))
2898
2898
2899 return commitfunc(ui, repo, message, matcher, opts)
2899 return commitfunc(ui, repo, message, matcher, opts)
2900
2900
2901 def samefile(f, ctx1, ctx2):
2901 def samefile(f, ctx1, ctx2):
2902 if f in ctx1.manifest():
2902 if f in ctx1.manifest():
2903 a = ctx1.filectx(f)
2903 a = ctx1.filectx(f)
2904 if f in ctx2.manifest():
2904 if f in ctx2.manifest():
2905 b = ctx2.filectx(f)
2905 b = ctx2.filectx(f)
2906 return (not a.cmp(b)
2906 return (not a.cmp(b)
2907 and a.flags() == b.flags())
2907 and a.flags() == b.flags())
2908 else:
2908 else:
2909 return False
2909 return False
2910 else:
2910 else:
2911 return f not in ctx2.manifest()
2911 return f not in ctx2.manifest()
2912
2912
2913 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2913 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2914 # avoid cycle context -> subrepo -> cmdutil
2914 # avoid cycle context -> subrepo -> cmdutil
2915 from . import context
2915 from . import context
2916
2916
2917 # amend will reuse the existing user if not specified, but the obsolete
2917 # amend will reuse the existing user if not specified, but the obsolete
2918 # marker creation requires that the current user's name is specified.
2918 # marker creation requires that the current user's name is specified.
2919 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2919 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2920 ui.username() # raise exception if username not set
2920 ui.username() # raise exception if username not set
2921
2921
2922 ui.note(_('amending changeset %s\n') % old)
2922 ui.note(_('amending changeset %s\n') % old)
2923 base = old.p1()
2923 base = old.p1()
2924
2924
2925 newid = None
2925 newid = None
2926 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2926 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2927 # See if we got a message from -m or -l, if not, open the editor
2927 # See if we got a message from -m or -l, if not, open the editor
2928 # with the message of the changeset to amend
2928 # with the message of the changeset to amend
2929 message = logmessage(ui, opts)
2929 message = logmessage(ui, opts)
2930 # ensure logfile does not conflict with later enforcement of the
2930 # ensure logfile does not conflict with later enforcement of the
2931 # message. potential logfile content has been processed by
2931 # message. potential logfile content has been processed by
2932 # `logmessage` anyway.
2932 # `logmessage` anyway.
2933 opts.pop('logfile')
2933 opts.pop('logfile')
2934 # First, do a regular commit to record all changes in the working
2934 # First, do a regular commit to record all changes in the working
2935 # directory (if there are any)
2935 # directory (if there are any)
2936 ui.callhooks = False
2936 ui.callhooks = False
2937 activebookmark = repo._bookmarks.active
2937 activebookmark = repo._bookmarks.active
2938 try:
2938 try:
2939 repo._bookmarks.active = None
2939 repo._bookmarks.active = None
2940 opts['message'] = 'temporary amend commit for %s' % old
2940 opts['message'] = 'temporary amend commit for %s' % old
2941 node = commit(ui, repo, commitfunc, pats, opts)
2941 node = commit(ui, repo, commitfunc, pats, opts)
2942 finally:
2942 finally:
2943 repo._bookmarks.active = activebookmark
2943 repo._bookmarks.active = activebookmark
2944 ui.callhooks = True
2944 ui.callhooks = True
2945 ctx = repo[node]
2945 ctx = repo[node]
2946
2946
2947 # Participating changesets:
2947 # Participating changesets:
2948 #
2948 #
2949 # node/ctx o - new (intermediate) commit that contains changes
2949 # node/ctx o - new (intermediate) commit that contains changes
2950 # | from working dir to go into amending commit
2950 # | from working dir to go into amending commit
2951 # | (or a workingctx if there were no changes)
2951 # | (or a workingctx if there were no changes)
2952 # |
2952 # |
2953 # old o - changeset to amend
2953 # old o - changeset to amend
2954 # |
2954 # |
2955 # base o - parent of amending changeset
2955 # base o - parent of amending changeset
2956
2956
2957 # Update extra dict from amended commit (e.g. to preserve graft
2957 # Update extra dict from amended commit (e.g. to preserve graft
2958 # source)
2958 # source)
2959 extra.update(old.extra())
2959 extra.update(old.extra())
2960
2960
2961 # Also update it from the intermediate commit or from the wctx
2961 # Also update it from the intermediate commit or from the wctx
2962 extra.update(ctx.extra())
2962 extra.update(ctx.extra())
2963
2963
2964 if len(old.parents()) > 1:
2964 if len(old.parents()) > 1:
2965 # ctx.files() isn't reliable for merges, so fall back to the
2965 # ctx.files() isn't reliable for merges, so fall back to the
2966 # slower repo.status() method
2966 # slower repo.status() method
2967 files = set([fn for st in repo.status(base, old)[:3]
2967 files = set([fn for st in repo.status(base, old)[:3]
2968 for fn in st])
2968 for fn in st])
2969 else:
2969 else:
2970 files = set(old.files())
2970 files = set(old.files())
2971
2971
2972 # Second, we use either the commit we just did, or if there were no
2972 # Second, we use either the commit we just did, or if there were no
2973 # changes the parent of the working directory as the version of the
2973 # changes the parent of the working directory as the version of the
2974 # files in the final amend commit
2974 # files in the final amend commit
2975 if node:
2975 if node:
2976 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2976 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2977
2977
2978 user = ctx.user()
2978 user = ctx.user()
2979 date = ctx.date()
2979 date = ctx.date()
2980 # Recompute copies (avoid recording a -> b -> a)
2980 # Recompute copies (avoid recording a -> b -> a)
2981 copied = copies.pathcopies(base, ctx)
2981 copied = copies.pathcopies(base, ctx)
2982 if old.p2:
2982 if old.p2:
2983 copied.update(copies.pathcopies(old.p2(), ctx))
2983 copied.update(copies.pathcopies(old.p2(), ctx))
2984
2984
2985 # Prune files which were reverted by the updates: if old
2985 # Prune files which were reverted by the updates: if old
2986 # introduced file X and our intermediate commit, node,
2986 # introduced file X and our intermediate commit, node,
2987 # renamed that file, then those two files are the same and
2987 # renamed that file, then those two files are the same and
2988 # we can discard X from our list of files. Likewise if X
2988 # we can discard X from our list of files. Likewise if X
2989 # was deleted, it's no longer relevant
2989 # was deleted, it's no longer relevant
2990 files.update(ctx.files())
2990 files.update(ctx.files())
2991 files = [f for f in files if not samefile(f, ctx, base)]
2991 files = [f for f in files if not samefile(f, ctx, base)]
2992
2992
2993 def filectxfn(repo, ctx_, path):
2993 def filectxfn(repo, ctx_, path):
2994 try:
2994 try:
2995 fctx = ctx[path]
2995 fctx = ctx[path]
2996 flags = fctx.flags()
2996 flags = fctx.flags()
2997 mctx = context.memfilectx(repo,
2997 mctx = context.memfilectx(repo,
2998 fctx.path(), fctx.data(),
2998 fctx.path(), fctx.data(),
2999 islink='l' in flags,
2999 islink='l' in flags,
3000 isexec='x' in flags,
3000 isexec='x' in flags,
3001 copied=copied.get(path))
3001 copied=copied.get(path))
3002 return mctx
3002 return mctx
3003 except KeyError:
3003 except KeyError:
3004 return None
3004 return None
3005 else:
3005 else:
3006 ui.note(_('copying changeset %s to %s\n') % (old, base))
3006 ui.note(_('copying changeset %s to %s\n') % (old, base))
3007
3007
3008 # Use version of files as in the old cset
3008 # Use version of files as in the old cset
3009 def filectxfn(repo, ctx_, path):
3009 def filectxfn(repo, ctx_, path):
3010 try:
3010 try:
3011 return old.filectx(path)
3011 return old.filectx(path)
3012 except KeyError:
3012 except KeyError:
3013 return None
3013 return None
3014
3014
3015 user = opts.get('user') or old.user()
3015 user = opts.get('user') or old.user()
3016 date = opts.get('date') or old.date()
3016 date = opts.get('date') or old.date()
3017 editform = mergeeditform(old, 'commit.amend')
3017 editform = mergeeditform(old, 'commit.amend')
3018 editor = getcommiteditor(editform=editform,
3018 editor = getcommiteditor(editform=editform,
3019 **pycompat.strkwargs(opts))
3019 **pycompat.strkwargs(opts))
3020 if not message:
3020 if not message:
3021 editor = getcommiteditor(edit=True, editform=editform)
3021 editor = getcommiteditor(edit=True, editform=editform)
3022 message = old.description()
3022 message = old.description()
3023
3023
3024 pureextra = extra.copy()
3024 pureextra = extra.copy()
3025 extra['amend_source'] = old.hex()
3025 extra['amend_source'] = old.hex()
3026
3026
3027 new = context.memctx(repo,
3027 new = context.memctx(repo,
3028 parents=[base.node(), old.p2().node()],
3028 parents=[base.node(), old.p2().node()],
3029 text=message,
3029 text=message,
3030 files=files,
3030 files=files,
3031 filectxfn=filectxfn,
3031 filectxfn=filectxfn,
3032 user=user,
3032 user=user,
3033 date=date,
3033 date=date,
3034 extra=extra,
3034 extra=extra,
3035 editor=editor)
3035 editor=editor)
3036
3036
3037 newdesc = changelog.stripdesc(new.description())
3037 newdesc = changelog.stripdesc(new.description())
3038 if ((not node)
3038 if ((not node)
3039 and newdesc == old.description()
3039 and newdesc == old.description()
3040 and user == old.user()
3040 and user == old.user()
3041 and date == old.date()
3041 and date == old.date()
3042 and pureextra == old.extra()):
3042 and pureextra == old.extra()):
3043 # nothing changed. continuing here would create a new node
3043 # nothing changed. continuing here would create a new node
3044 # anyway because of the amend_source noise.
3044 # anyway because of the amend_source noise.
3045 #
3045 #
3046 # This not what we expect from amend.
3046 # This not what we expect from amend.
3047 return old.node()
3047 return old.node()
3048
3048
3049 ph = repo.ui.config('phases', 'new-commit', phases.draft)
3049 ph = repo.ui.config('phases', 'new-commit', phases.draft)
3050 try:
3050 try:
3051 if opts.get('secret'):
3051 if opts.get('secret'):
3052 commitphase = 'secret'
3052 commitphase = 'secret'
3053 else:
3053 else:
3054 commitphase = old.phase()
3054 commitphase = old.phase()
3055 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
3055 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
3056 newid = repo.commitctx(new)
3056 newid = repo.commitctx(new)
3057 finally:
3057 finally:
3058 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
3058 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
3059 if newid != old.node():
3059 if newid != old.node():
3060 # Reroute the working copy parent to the new changeset
3060 # Reroute the working copy parent to the new changeset
3061 repo.setparents(newid, nullid)
3061 repo.setparents(newid, nullid)
3062 mapping = {old.node(): (newid,)}
3062 mapping = {old.node(): (newid,)}
3063 if node:
3063 if node:
3064 mapping[node] = ()
3064 mapping[node] = ()
3065 scmutil.cleanupnodes(repo, mapping, 'amend')
3065 scmutil.cleanupnodes(repo, mapping, 'amend')
3066 return newid
3066 return newid
3067
3067
3068 def commiteditor(repo, ctx, subs, editform=''):
3068 def commiteditor(repo, ctx, subs, editform=''):
3069 if ctx.description():
3069 if ctx.description():
3070 return ctx.description()
3070 return ctx.description()
3071 return commitforceeditor(repo, ctx, subs, editform=editform,
3071 return commitforceeditor(repo, ctx, subs, editform=editform,
3072 unchangedmessagedetection=True)
3072 unchangedmessagedetection=True)
3073
3073
3074 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3074 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3075 editform='', unchangedmessagedetection=False):
3075 editform='', unchangedmessagedetection=False):
3076 if not extramsg:
3076 if not extramsg:
3077 extramsg = _("Leave message empty to abort commit.")
3077 extramsg = _("Leave message empty to abort commit.")
3078
3078
3079 forms = [e for e in editform.split('.') if e]
3079 forms = [e for e in editform.split('.') if e]
3080 forms.insert(0, 'changeset')
3080 forms.insert(0, 'changeset')
3081 templatetext = None
3081 templatetext = None
3082 while forms:
3082 while forms:
3083 ref = '.'.join(forms)
3083 ref = '.'.join(forms)
3084 if repo.ui.config('committemplate', ref):
3084 if repo.ui.config('committemplate', ref):
3085 templatetext = committext = buildcommittemplate(
3085 templatetext = committext = buildcommittemplate(
3086 repo, ctx, subs, extramsg, ref)
3086 repo, ctx, subs, extramsg, ref)
3087 break
3087 break
3088 forms.pop()
3088 forms.pop()
3089 else:
3089 else:
3090 committext = buildcommittext(repo, ctx, subs, extramsg)
3090 committext = buildcommittext(repo, ctx, subs, extramsg)
3091
3091
3092 # run editor in the repository root
3092 # run editor in the repository root
3093 olddir = pycompat.getcwd()
3093 olddir = pycompat.getcwd()
3094 os.chdir(repo.root)
3094 os.chdir(repo.root)
3095
3095
3096 # make in-memory changes visible to external process
3096 # make in-memory changes visible to external process
3097 tr = repo.currenttransaction()
3097 tr = repo.currenttransaction()
3098 repo.dirstate.write(tr)
3098 repo.dirstate.write(tr)
3099 pending = tr and tr.writepending() and repo.root
3099 pending = tr and tr.writepending() and repo.root
3100
3100
3101 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3101 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3102 editform=editform, pending=pending,
3102 editform=editform, pending=pending,
3103 repopath=repo.path)
3103 repopath=repo.path)
3104 text = editortext
3104 text = editortext
3105
3105
3106 # strip away anything below this special string (used for editors that want
3106 # strip away anything below this special string (used for editors that want
3107 # to display the diff)
3107 # to display the diff)
3108 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3108 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3109 if stripbelow:
3109 if stripbelow:
3110 text = text[:stripbelow.start()]
3110 text = text[:stripbelow.start()]
3111
3111
3112 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3112 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3113 os.chdir(olddir)
3113 os.chdir(olddir)
3114
3114
3115 if finishdesc:
3115 if finishdesc:
3116 text = finishdesc(text)
3116 text = finishdesc(text)
3117 if not text.strip():
3117 if not text.strip():
3118 raise error.Abort(_("empty commit message"))
3118 raise error.Abort(_("empty commit message"))
3119 if unchangedmessagedetection and editortext == templatetext:
3119 if unchangedmessagedetection and editortext == templatetext:
3120 raise error.Abort(_("commit message unchanged"))
3120 raise error.Abort(_("commit message unchanged"))
3121
3121
3122 return text
3122 return text
3123
3123
3124 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3124 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3125 ui = repo.ui
3125 ui = repo.ui
3126 spec = formatter.templatespec(ref, None, None)
3126 spec = formatter.templatespec(ref, None, None)
3127 t = changeset_templater(ui, repo, spec, None, {}, False)
3127 t = changeset_templater(ui, repo, spec, None, {}, False)
3128 t.t.cache.update((k, templater.unquotestring(v))
3128 t.t.cache.update((k, templater.unquotestring(v))
3129 for k, v in repo.ui.configitems('committemplate'))
3129 for k, v in repo.ui.configitems('committemplate'))
3130
3130
3131 if not extramsg:
3131 if not extramsg:
3132 extramsg = '' # ensure that extramsg is string
3132 extramsg = '' # ensure that extramsg is string
3133
3133
3134 ui.pushbuffer()
3134 ui.pushbuffer()
3135 t.show(ctx, extramsg=extramsg)
3135 t.show(ctx, extramsg=extramsg)
3136 return ui.popbuffer()
3136 return ui.popbuffer()
3137
3137
3138 def hgprefix(msg):
3138 def hgprefix(msg):
3139 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3139 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3140
3140
3141 def buildcommittext(repo, ctx, subs, extramsg):
3141 def buildcommittext(repo, ctx, subs, extramsg):
3142 edittext = []
3142 edittext = []
3143 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3143 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3144 if ctx.description():
3144 if ctx.description():
3145 edittext.append(ctx.description())
3145 edittext.append(ctx.description())
3146 edittext.append("")
3146 edittext.append("")
3147 edittext.append("") # Empty line between message and comments.
3147 edittext.append("") # Empty line between message and comments.
3148 edittext.append(hgprefix(_("Enter commit message."
3148 edittext.append(hgprefix(_("Enter commit message."
3149 " Lines beginning with 'HG:' are removed.")))
3149 " Lines beginning with 'HG:' are removed.")))
3150 edittext.append(hgprefix(extramsg))
3150 edittext.append(hgprefix(extramsg))
3151 edittext.append("HG: --")
3151 edittext.append("HG: --")
3152 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3152 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3153 if ctx.p2():
3153 if ctx.p2():
3154 edittext.append(hgprefix(_("branch merge")))
3154 edittext.append(hgprefix(_("branch merge")))
3155 if ctx.branch():
3155 if ctx.branch():
3156 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3156 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3157 if bookmarks.isactivewdirparent(repo):
3157 if bookmarks.isactivewdirparent(repo):
3158 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3158 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3159 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3159 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3160 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3160 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3161 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3161 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3162 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3162 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3163 if not added and not modified and not removed:
3163 if not added and not modified and not removed:
3164 edittext.append(hgprefix(_("no files changed")))
3164 edittext.append(hgprefix(_("no files changed")))
3165 edittext.append("")
3165 edittext.append("")
3166
3166
3167 return "\n".join(edittext)
3167 return "\n".join(edittext)
3168
3168
3169 def commitstatus(repo, node, branch, bheads=None, opts=None):
3169 def commitstatus(repo, node, branch, bheads=None, opts=None):
3170 if opts is None:
3170 if opts is None:
3171 opts = {}
3171 opts = {}
3172 ctx = repo[node]
3172 ctx = repo[node]
3173 parents = ctx.parents()
3173 parents = ctx.parents()
3174
3174
3175 if (not opts.get('amend') and bheads and node not in bheads and not
3175 if (not opts.get('amend') and bheads and node not in bheads and not
3176 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3176 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3177 repo.ui.status(_('created new head\n'))
3177 repo.ui.status(_('created new head\n'))
3178 # The message is not printed for initial roots. For the other
3178 # The message is not printed for initial roots. For the other
3179 # changesets, it is printed in the following situations:
3179 # changesets, it is printed in the following situations:
3180 #
3180 #
3181 # Par column: for the 2 parents with ...
3181 # Par column: for the 2 parents with ...
3182 # N: null or no parent
3182 # N: null or no parent
3183 # B: parent is on another named branch
3183 # B: parent is on another named branch
3184 # C: parent is a regular non head changeset
3184 # C: parent is a regular non head changeset
3185 # H: parent was a branch head of the current branch
3185 # H: parent was a branch head of the current branch
3186 # Msg column: whether we print "created new head" message
3186 # Msg column: whether we print "created new head" message
3187 # In the following, it is assumed that there already exists some
3187 # In the following, it is assumed that there already exists some
3188 # initial branch heads of the current branch, otherwise nothing is
3188 # initial branch heads of the current branch, otherwise nothing is
3189 # printed anyway.
3189 # printed anyway.
3190 #
3190 #
3191 # Par Msg Comment
3191 # Par Msg Comment
3192 # N N y additional topo root
3192 # N N y additional topo root
3193 #
3193 #
3194 # B N y additional branch root
3194 # B N y additional branch root
3195 # C N y additional topo head
3195 # C N y additional topo head
3196 # H N n usual case
3196 # H N n usual case
3197 #
3197 #
3198 # B B y weird additional branch root
3198 # B B y weird additional branch root
3199 # C B y branch merge
3199 # C B y branch merge
3200 # H B n merge with named branch
3200 # H B n merge with named branch
3201 #
3201 #
3202 # C C y additional head from merge
3202 # C C y additional head from merge
3203 # C H n merge with a head
3203 # C H n merge with a head
3204 #
3204 #
3205 # H H n head merge: head count decreases
3205 # H H n head merge: head count decreases
3206
3206
3207 if not opts.get('close_branch'):
3207 if not opts.get('close_branch'):
3208 for r in parents:
3208 for r in parents:
3209 if r.closesbranch() and r.branch() == branch:
3209 if r.closesbranch() and r.branch() == branch:
3210 repo.ui.status(_('reopening closed branch head %d\n') % r)
3210 repo.ui.status(_('reopening closed branch head %d\n') % r)
3211
3211
3212 if repo.ui.debugflag:
3212 if repo.ui.debugflag:
3213 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3213 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3214 elif repo.ui.verbose:
3214 elif repo.ui.verbose:
3215 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3215 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3216
3216
3217 def postcommitstatus(repo, pats, opts):
3217 def postcommitstatus(repo, pats, opts):
3218 return repo.status(match=scmutil.match(repo[None], pats, opts))
3218 return repo.status(match=scmutil.match(repo[None], pats, opts))
3219
3219
3220 def revert(ui, repo, ctx, parents, *pats, **opts):
3220 def revert(ui, repo, ctx, parents, *pats, **opts):
3221 parent, p2 = parents
3221 parent, p2 = parents
3222 node = ctx.node()
3222 node = ctx.node()
3223
3223
3224 mf = ctx.manifest()
3224 mf = ctx.manifest()
3225 if node == p2:
3225 if node == p2:
3226 parent = p2
3226 parent = p2
3227
3227
3228 # need all matching names in dirstate and manifest of target rev,
3228 # need all matching names in dirstate and manifest of target rev,
3229 # so have to walk both. do not print errors if files exist in one
3229 # so have to walk both. do not print errors if files exist in one
3230 # but not other. in both cases, filesets should be evaluated against
3230 # but not other. in both cases, filesets should be evaluated against
3231 # workingctx to get consistent result (issue4497). this means 'set:**'
3231 # workingctx to get consistent result (issue4497). this means 'set:**'
3232 # cannot be used to select missing files from target rev.
3232 # cannot be used to select missing files from target rev.
3233
3233
3234 # `names` is a mapping for all elements in working copy and target revision
3234 # `names` is a mapping for all elements in working copy and target revision
3235 # The mapping is in the form:
3235 # The mapping is in the form:
3236 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3236 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3237 names = {}
3237 names = {}
3238
3238
3239 with repo.wlock():
3239 with repo.wlock():
3240 ## filling of the `names` mapping
3240 ## filling of the `names` mapping
3241 # walk dirstate to fill `names`
3241 # walk dirstate to fill `names`
3242
3242
3243 interactive = opts.get('interactive', False)
3243 interactive = opts.get('interactive', False)
3244 wctx = repo[None]
3244 wctx = repo[None]
3245 m = scmutil.match(wctx, pats, opts)
3245 m = scmutil.match(wctx, pats, opts)
3246
3246
3247 # we'll need this later
3247 # we'll need this later
3248 targetsubs = sorted(s for s in wctx.substate if m(s))
3248 targetsubs = sorted(s for s in wctx.substate if m(s))
3249
3249
3250 if not m.always():
3250 if not m.always():
3251 matcher = matchmod.badmatch(m, lambda x, y: False)
3251 matcher = matchmod.badmatch(m, lambda x, y: False)
3252 for abs in wctx.walk(matcher):
3252 for abs in wctx.walk(matcher):
3253 names[abs] = m.rel(abs), m.exact(abs)
3253 names[abs] = m.rel(abs), m.exact(abs)
3254
3254
3255 # walk target manifest to fill `names`
3255 # walk target manifest to fill `names`
3256
3256
3257 def badfn(path, msg):
3257 def badfn(path, msg):
3258 if path in names:
3258 if path in names:
3259 return
3259 return
3260 if path in ctx.substate:
3260 if path in ctx.substate:
3261 return
3261 return
3262 path_ = path + '/'
3262 path_ = path + '/'
3263 for f in names:
3263 for f in names:
3264 if f.startswith(path_):
3264 if f.startswith(path_):
3265 return
3265 return
3266 ui.warn("%s: %s\n" % (m.rel(path), msg))
3266 ui.warn("%s: %s\n" % (m.rel(path), msg))
3267
3267
3268 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3268 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3269 if abs not in names:
3269 if abs not in names:
3270 names[abs] = m.rel(abs), m.exact(abs)
3270 names[abs] = m.rel(abs), m.exact(abs)
3271
3271
3272 # Find status of all file in `names`.
3272 # Find status of all file in `names`.
3273 m = scmutil.matchfiles(repo, names)
3273 m = scmutil.matchfiles(repo, names)
3274
3274
3275 changes = repo.status(node1=node, match=m,
3275 changes = repo.status(node1=node, match=m,
3276 unknown=True, ignored=True, clean=True)
3276 unknown=True, ignored=True, clean=True)
3277 else:
3277 else:
3278 changes = repo.status(node1=node, match=m)
3278 changes = repo.status(node1=node, match=m)
3279 for kind in changes:
3279 for kind in changes:
3280 for abs in kind:
3280 for abs in kind:
3281 names[abs] = m.rel(abs), m.exact(abs)
3281 names[abs] = m.rel(abs), m.exact(abs)
3282
3282
3283 m = scmutil.matchfiles(repo, names)
3283 m = scmutil.matchfiles(repo, names)
3284
3284
3285 modified = set(changes.modified)
3285 modified = set(changes.modified)
3286 added = set(changes.added)
3286 added = set(changes.added)
3287 removed = set(changes.removed)
3287 removed = set(changes.removed)
3288 _deleted = set(changes.deleted)
3288 _deleted = set(changes.deleted)
3289 unknown = set(changes.unknown)
3289 unknown = set(changes.unknown)
3290 unknown.update(changes.ignored)
3290 unknown.update(changes.ignored)
3291 clean = set(changes.clean)
3291 clean = set(changes.clean)
3292 modadded = set()
3292 modadded = set()
3293
3293
3294 # We need to account for the state of the file in the dirstate,
3294 # We need to account for the state of the file in the dirstate,
3295 # even when we revert against something else than parent. This will
3295 # even when we revert against something else than parent. This will
3296 # slightly alter the behavior of revert (doing back up or not, delete
3296 # slightly alter the behavior of revert (doing back up or not, delete
3297 # or just forget etc).
3297 # or just forget etc).
3298 if parent == node:
3298 if parent == node:
3299 dsmodified = modified
3299 dsmodified = modified
3300 dsadded = added
3300 dsadded = added
3301 dsremoved = removed
3301 dsremoved = removed
3302 # store all local modifications, useful later for rename detection
3302 # store all local modifications, useful later for rename detection
3303 localchanges = dsmodified | dsadded
3303 localchanges = dsmodified | dsadded
3304 modified, added, removed = set(), set(), set()
3304 modified, added, removed = set(), set(), set()
3305 else:
3305 else:
3306 changes = repo.status(node1=parent, match=m)
3306 changes = repo.status(node1=parent, match=m)
3307 dsmodified = set(changes.modified)
3307 dsmodified = set(changes.modified)
3308 dsadded = set(changes.added)
3308 dsadded = set(changes.added)
3309 dsremoved = set(changes.removed)
3309 dsremoved = set(changes.removed)
3310 # store all local modifications, useful later for rename detection
3310 # store all local modifications, useful later for rename detection
3311 localchanges = dsmodified | dsadded
3311 localchanges = dsmodified | dsadded
3312
3312
3313 # only take into account for removes between wc and target
3313 # only take into account for removes between wc and target
3314 clean |= dsremoved - removed
3314 clean |= dsremoved - removed
3315 dsremoved &= removed
3315 dsremoved &= removed
3316 # distinct between dirstate remove and other
3316 # distinct between dirstate remove and other
3317 removed -= dsremoved
3317 removed -= dsremoved
3318
3318
3319 modadded = added & dsmodified
3319 modadded = added & dsmodified
3320 added -= modadded
3320 added -= modadded
3321
3321
3322 # tell newly modified apart.
3322 # tell newly modified apart.
3323 dsmodified &= modified
3323 dsmodified &= modified
3324 dsmodified |= modified & dsadded # dirstate added may need backup
3324 dsmodified |= modified & dsadded # dirstate added may need backup
3325 modified -= dsmodified
3325 modified -= dsmodified
3326
3326
3327 # We need to wait for some post-processing to update this set
3327 # We need to wait for some post-processing to update this set
3328 # before making the distinction. The dirstate will be used for
3328 # before making the distinction. The dirstate will be used for
3329 # that purpose.
3329 # that purpose.
3330 dsadded = added
3330 dsadded = added
3331
3331
3332 # in case of merge, files that are actually added can be reported as
3332 # in case of merge, files that are actually added can be reported as
3333 # modified, we need to post process the result
3333 # modified, we need to post process the result
3334 if p2 != nullid:
3334 if p2 != nullid:
3335 mergeadd = set(dsmodified)
3335 mergeadd = set(dsmodified)
3336 for path in dsmodified:
3336 for path in dsmodified:
3337 if path in mf:
3337 if path in mf:
3338 mergeadd.remove(path)
3338 mergeadd.remove(path)
3339 dsadded |= mergeadd
3339 dsadded |= mergeadd
3340 dsmodified -= mergeadd
3340 dsmodified -= mergeadd
3341
3341
3342 # if f is a rename, update `names` to also revert the source
3342 # if f is a rename, update `names` to also revert the source
3343 cwd = repo.getcwd()
3343 cwd = repo.getcwd()
3344 for f in localchanges:
3344 for f in localchanges:
3345 src = repo.dirstate.copied(f)
3345 src = repo.dirstate.copied(f)
3346 # XXX should we check for rename down to target node?
3346 # XXX should we check for rename down to target node?
3347 if src and src not in names and repo.dirstate[src] == 'r':
3347 if src and src not in names and repo.dirstate[src] == 'r':
3348 dsremoved.add(src)
3348 dsremoved.add(src)
3349 names[src] = (repo.pathto(src, cwd), True)
3349 names[src] = (repo.pathto(src, cwd), True)
3350
3350
3351 # determine the exact nature of the deleted changesets
3351 # determine the exact nature of the deleted changesets
3352 deladded = set(_deleted)
3352 deladded = set(_deleted)
3353 for path in _deleted:
3353 for path in _deleted:
3354 if path in mf:
3354 if path in mf:
3355 deladded.remove(path)
3355 deladded.remove(path)
3356 deleted = _deleted - deladded
3356 deleted = _deleted - deladded
3357
3357
3358 # distinguish between file to forget and the other
3358 # distinguish between file to forget and the other
3359 added = set()
3359 added = set()
3360 for abs in dsadded:
3360 for abs in dsadded:
3361 if repo.dirstate[abs] != 'a':
3361 if repo.dirstate[abs] != 'a':
3362 added.add(abs)
3362 added.add(abs)
3363 dsadded -= added
3363 dsadded -= added
3364
3364
3365 for abs in deladded:
3365 for abs in deladded:
3366 if repo.dirstate[abs] == 'a':
3366 if repo.dirstate[abs] == 'a':
3367 dsadded.add(abs)
3367 dsadded.add(abs)
3368 deladded -= dsadded
3368 deladded -= dsadded
3369
3369
3370 # For files marked as removed, we check if an unknown file is present at
3370 # For files marked as removed, we check if an unknown file is present at
3371 # the same path. If a such file exists it may need to be backed up.
3371 # the same path. If a such file exists it may need to be backed up.
3372 # Making the distinction at this stage helps have simpler backup
3372 # Making the distinction at this stage helps have simpler backup
3373 # logic.
3373 # logic.
3374 removunk = set()
3374 removunk = set()
3375 for abs in removed:
3375 for abs in removed:
3376 target = repo.wjoin(abs)
3376 target = repo.wjoin(abs)
3377 if os.path.lexists(target):
3377 if os.path.lexists(target):
3378 removunk.add(abs)
3378 removunk.add(abs)
3379 removed -= removunk
3379 removed -= removunk
3380
3380
3381 dsremovunk = set()
3381 dsremovunk = set()
3382 for abs in dsremoved:
3382 for abs in dsremoved:
3383 target = repo.wjoin(abs)
3383 target = repo.wjoin(abs)
3384 if os.path.lexists(target):
3384 if os.path.lexists(target):
3385 dsremovunk.add(abs)
3385 dsremovunk.add(abs)
3386 dsremoved -= dsremovunk
3386 dsremoved -= dsremovunk
3387
3387
3388 # action to be actually performed by revert
3388 # action to be actually performed by revert
3389 # (<list of file>, message>) tuple
3389 # (<list of file>, message>) tuple
3390 actions = {'revert': ([], _('reverting %s\n')),
3390 actions = {'revert': ([], _('reverting %s\n')),
3391 'add': ([], _('adding %s\n')),
3391 'add': ([], _('adding %s\n')),
3392 'remove': ([], _('removing %s\n')),
3392 'remove': ([], _('removing %s\n')),
3393 'drop': ([], _('removing %s\n')),
3393 'drop': ([], _('removing %s\n')),
3394 'forget': ([], _('forgetting %s\n')),
3394 'forget': ([], _('forgetting %s\n')),
3395 'undelete': ([], _('undeleting %s\n')),
3395 'undelete': ([], _('undeleting %s\n')),
3396 'noop': (None, _('no changes needed to %s\n')),
3396 'noop': (None, _('no changes needed to %s\n')),
3397 'unknown': (None, _('file not managed: %s\n')),
3397 'unknown': (None, _('file not managed: %s\n')),
3398 }
3398 }
3399
3399
3400 # "constant" that convey the backup strategy.
3400 # "constant" that convey the backup strategy.
3401 # All set to `discard` if `no-backup` is set do avoid checking
3401 # All set to `discard` if `no-backup` is set do avoid checking
3402 # no_backup lower in the code.
3402 # no_backup lower in the code.
3403 # These values are ordered for comparison purposes
3403 # These values are ordered for comparison purposes
3404 backupinteractive = 3 # do backup if interactively modified
3404 backupinteractive = 3 # do backup if interactively modified
3405 backup = 2 # unconditionally do backup
3405 backup = 2 # unconditionally do backup
3406 check = 1 # check if the existing file differs from target
3406 check = 1 # check if the existing file differs from target
3407 discard = 0 # never do backup
3407 discard = 0 # never do backup
3408 if opts.get('no_backup'):
3408 if opts.get('no_backup'):
3409 backupinteractive = backup = check = discard
3409 backupinteractive = backup = check = discard
3410 if interactive:
3410 if interactive:
3411 dsmodifiedbackup = backupinteractive
3411 dsmodifiedbackup = backupinteractive
3412 else:
3412 else:
3413 dsmodifiedbackup = backup
3413 dsmodifiedbackup = backup
3414 tobackup = set()
3414 tobackup = set()
3415
3415
3416 backupanddel = actions['remove']
3416 backupanddel = actions['remove']
3417 if not opts.get('no_backup'):
3417 if not opts.get('no_backup'):
3418 backupanddel = actions['drop']
3418 backupanddel = actions['drop']
3419
3419
3420 disptable = (
3420 disptable = (
3421 # dispatch table:
3421 # dispatch table:
3422 # file state
3422 # file state
3423 # action
3423 # action
3424 # make backup
3424 # make backup
3425
3425
3426 ## Sets that results that will change file on disk
3426 ## Sets that results that will change file on disk
3427 # Modified compared to target, no local change
3427 # Modified compared to target, no local change
3428 (modified, actions['revert'], discard),
3428 (modified, actions['revert'], discard),
3429 # Modified compared to target, but local file is deleted
3429 # Modified compared to target, but local file is deleted
3430 (deleted, actions['revert'], discard),
3430 (deleted, actions['revert'], discard),
3431 # Modified compared to target, local change
3431 # Modified compared to target, local change
3432 (dsmodified, actions['revert'], dsmodifiedbackup),
3432 (dsmodified, actions['revert'], dsmodifiedbackup),
3433 # Added since target
3433 # Added since target
3434 (added, actions['remove'], discard),
3434 (added, actions['remove'], discard),
3435 # Added in working directory
3435 # Added in working directory
3436 (dsadded, actions['forget'], discard),
3436 (dsadded, actions['forget'], discard),
3437 # Added since target, have local modification
3437 # Added since target, have local modification
3438 (modadded, backupanddel, backup),
3438 (modadded, backupanddel, backup),
3439 # Added since target but file is missing in working directory
3439 # Added since target but file is missing in working directory
3440 (deladded, actions['drop'], discard),
3440 (deladded, actions['drop'], discard),
3441 # Removed since target, before working copy parent
3441 # Removed since target, before working copy parent
3442 (removed, actions['add'], discard),
3442 (removed, actions['add'], discard),
3443 # Same as `removed` but an unknown file exists at the same path
3443 # Same as `removed` but an unknown file exists at the same path
3444 (removunk, actions['add'], check),
3444 (removunk, actions['add'], check),
3445 # Removed since targe, marked as such in working copy parent
3445 # Removed since targe, marked as such in working copy parent
3446 (dsremoved, actions['undelete'], discard),
3446 (dsremoved, actions['undelete'], discard),
3447 # Same as `dsremoved` but an unknown file exists at the same path
3447 # Same as `dsremoved` but an unknown file exists at the same path
3448 (dsremovunk, actions['undelete'], check),
3448 (dsremovunk, actions['undelete'], check),
3449 ## the following sets does not result in any file changes
3449 ## the following sets does not result in any file changes
3450 # File with no modification
3450 # File with no modification
3451 (clean, actions['noop'], discard),
3451 (clean, actions['noop'], discard),
3452 # Existing file, not tracked anywhere
3452 # Existing file, not tracked anywhere
3453 (unknown, actions['unknown'], discard),
3453 (unknown, actions['unknown'], discard),
3454 )
3454 )
3455
3455
3456 for abs, (rel, exact) in sorted(names.items()):
3456 for abs, (rel, exact) in sorted(names.items()):
3457 # target file to be touch on disk (relative to cwd)
3457 # target file to be touch on disk (relative to cwd)
3458 target = repo.wjoin(abs)
3458 target = repo.wjoin(abs)
3459 # search the entry in the dispatch table.
3459 # search the entry in the dispatch table.
3460 # if the file is in any of these sets, it was touched in the working
3460 # if the file is in any of these sets, it was touched in the working
3461 # directory parent and we are sure it needs to be reverted.
3461 # directory parent and we are sure it needs to be reverted.
3462 for table, (xlist, msg), dobackup in disptable:
3462 for table, (xlist, msg), dobackup in disptable:
3463 if abs not in table:
3463 if abs not in table:
3464 continue
3464 continue
3465 if xlist is not None:
3465 if xlist is not None:
3466 xlist.append(abs)
3466 xlist.append(abs)
3467 if dobackup:
3467 if dobackup:
3468 # If in interactive mode, don't automatically create
3468 # If in interactive mode, don't automatically create
3469 # .orig files (issue4793)
3469 # .orig files (issue4793)
3470 if dobackup == backupinteractive:
3470 if dobackup == backupinteractive:
3471 tobackup.add(abs)
3471 tobackup.add(abs)
3472 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3472 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3473 bakname = scmutil.origpath(ui, repo, rel)
3473 bakname = scmutil.origpath(ui, repo, rel)
3474 ui.note(_('saving current version of %s as %s\n') %
3474 ui.note(_('saving current version of %s as %s\n') %
3475 (rel, bakname))
3475 (rel, bakname))
3476 if not opts.get('dry_run'):
3476 if not opts.get('dry_run'):
3477 if interactive:
3477 if interactive:
3478 util.copyfile(target, bakname)
3478 util.copyfile(target, bakname)
3479 else:
3479 else:
3480 util.rename(target, bakname)
3480 util.rename(target, bakname)
3481 if ui.verbose or not exact:
3481 if ui.verbose or not exact:
3482 if not isinstance(msg, basestring):
3482 if not isinstance(msg, basestring):
3483 msg = msg(abs)
3483 msg = msg(abs)
3484 ui.status(msg % rel)
3484 ui.status(msg % rel)
3485 elif exact:
3485 elif exact:
3486 ui.warn(msg % rel)
3486 ui.warn(msg % rel)
3487 break
3487 break
3488
3488
3489 if not opts.get('dry_run'):
3489 if not opts.get('dry_run'):
3490 needdata = ('revert', 'add', 'undelete')
3490 needdata = ('revert', 'add', 'undelete')
3491 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3491 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3492 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3492 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3493
3493
3494 if targetsubs:
3494 if targetsubs:
3495 # Revert the subrepos on the revert list
3495 # Revert the subrepos on the revert list
3496 for sub in targetsubs:
3496 for sub in targetsubs:
3497 try:
3497 try:
3498 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3498 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3499 except KeyError:
3499 except KeyError:
3500 raise error.Abort("subrepository '%s' does not exist in %s!"
3500 raise error.Abort("subrepository '%s' does not exist in %s!"
3501 % (sub, short(ctx.node())))
3501 % (sub, short(ctx.node())))
3502
3502
3503 def _revertprefetch(repo, ctx, *files):
3503 def _revertprefetch(repo, ctx, *files):
3504 """Let extension changing the storage layer prefetch content"""
3504 """Let extension changing the storage layer prefetch content"""
3505 pass
3505 pass
3506
3506
3507 def _performrevert(repo, parents, ctx, actions, interactive=False,
3507 def _performrevert(repo, parents, ctx, actions, interactive=False,
3508 tobackup=None):
3508 tobackup=None):
3509 """function that actually perform all the actions computed for revert
3509 """function that actually perform all the actions computed for revert
3510
3510
3511 This is an independent function to let extension to plug in and react to
3511 This is an independent function to let extension to plug in and react to
3512 the imminent revert.
3512 the imminent revert.
3513
3513
3514 Make sure you have the working directory locked when calling this function.
3514 Make sure you have the working directory locked when calling this function.
3515 """
3515 """
3516 parent, p2 = parents
3516 parent, p2 = parents
3517 node = ctx.node()
3517 node = ctx.node()
3518 excluded_files = []
3518 excluded_files = []
3519 matcher_opts = {"exclude": excluded_files}
3519 matcher_opts = {"exclude": excluded_files}
3520
3520
3521 def checkout(f):
3521 def checkout(f):
3522 fc = ctx[f]
3522 fc = ctx[f]
3523 repo.wwrite(f, fc.data(), fc.flags())
3523 repo.wwrite(f, fc.data(), fc.flags())
3524
3524
3525 def doremove(f):
3525 def doremove(f):
3526 try:
3526 try:
3527 repo.wvfs.unlinkpath(f)
3527 repo.wvfs.unlinkpath(f)
3528 except OSError:
3528 except OSError:
3529 pass
3529 pass
3530 repo.dirstate.remove(f)
3530 repo.dirstate.remove(f)
3531
3531
3532 audit_path = pathutil.pathauditor(repo.root)
3532 audit_path = pathutil.pathauditor(repo.root)
3533 for f in actions['forget'][0]:
3533 for f in actions['forget'][0]:
3534 if interactive:
3534 if interactive:
3535 choice = repo.ui.promptchoice(
3535 choice = repo.ui.promptchoice(
3536 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3536 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3537 if choice == 0:
3537 if choice == 0:
3538 repo.dirstate.drop(f)
3538 repo.dirstate.drop(f)
3539 else:
3539 else:
3540 excluded_files.append(repo.wjoin(f))
3540 excluded_files.append(repo.wjoin(f))
3541 else:
3541 else:
3542 repo.dirstate.drop(f)
3542 repo.dirstate.drop(f)
3543 for f in actions['remove'][0]:
3543 for f in actions['remove'][0]:
3544 audit_path(f)
3544 audit_path(f)
3545 if interactive:
3545 if interactive:
3546 choice = repo.ui.promptchoice(
3546 choice = repo.ui.promptchoice(
3547 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3547 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3548 if choice == 0:
3548 if choice == 0:
3549 doremove(f)
3549 doremove(f)
3550 else:
3550 else:
3551 excluded_files.append(repo.wjoin(f))
3551 excluded_files.append(repo.wjoin(f))
3552 else:
3552 else:
3553 doremove(f)
3553 doremove(f)
3554 for f in actions['drop'][0]:
3554 for f in actions['drop'][0]:
3555 audit_path(f)
3555 audit_path(f)
3556 repo.dirstate.remove(f)
3556 repo.dirstate.remove(f)
3557
3557
3558 normal = None
3558 normal = None
3559 if node == parent:
3559 if node == parent:
3560 # We're reverting to our parent. If possible, we'd like status
3560 # We're reverting to our parent. If possible, we'd like status
3561 # to report the file as clean. We have to use normallookup for
3561 # to report the file as clean. We have to use normallookup for
3562 # merges to avoid losing information about merged/dirty files.
3562 # merges to avoid losing information about merged/dirty files.
3563 if p2 != nullid:
3563 if p2 != nullid:
3564 normal = repo.dirstate.normallookup
3564 normal = repo.dirstate.normallookup
3565 else:
3565 else:
3566 normal = repo.dirstate.normal
3566 normal = repo.dirstate.normal
3567
3567
3568 newlyaddedandmodifiedfiles = set()
3568 newlyaddedandmodifiedfiles = set()
3569 if interactive:
3569 if interactive:
3570 # Prompt the user for changes to revert
3570 # Prompt the user for changes to revert
3571 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3571 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3572 m = scmutil.match(ctx, torevert, matcher_opts)
3572 m = scmutil.match(ctx, torevert, matcher_opts)
3573 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3573 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3574 diffopts.nodates = True
3574 diffopts.nodates = True
3575 diffopts.git = True
3575 diffopts.git = True
3576 operation = 'discard'
3576 operation = 'discard'
3577 reversehunks = True
3577 reversehunks = True
3578 if node != parent:
3578 if node != parent:
3579 operation = 'revert'
3579 operation = 'revert'
3580 reversehunks = repo.ui.configbool('experimental',
3580 reversehunks = repo.ui.configbool('experimental',
3581 'revertalternateinteractivemode')
3581 'revertalternateinteractivemode')
3582 if reversehunks:
3582 if reversehunks:
3583 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3583 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3584 else:
3584 else:
3585 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3585 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3586 originalchunks = patch.parsepatch(diff)
3586 originalchunks = patch.parsepatch(diff)
3587
3587
3588 try:
3588 try:
3589
3589
3590 chunks, opts = recordfilter(repo.ui, originalchunks,
3590 chunks, opts = recordfilter(repo.ui, originalchunks,
3591 operation=operation)
3591 operation=operation)
3592 if reversehunks:
3592 if reversehunks:
3593 chunks = patch.reversehunks(chunks)
3593 chunks = patch.reversehunks(chunks)
3594
3594
3595 except patch.PatchError as err:
3595 except patch.PatchError as err:
3596 raise error.Abort(_('error parsing patch: %s') % err)
3596 raise error.Abort(_('error parsing patch: %s') % err)
3597
3597
3598 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3598 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3599 if tobackup is None:
3599 if tobackup is None:
3600 tobackup = set()
3600 tobackup = set()
3601 # Apply changes
3601 # Apply changes
3602 fp = stringio()
3602 fp = stringio()
3603 for c in chunks:
3603 for c in chunks:
3604 # Create a backup file only if this hunk should be backed up
3604 # Create a backup file only if this hunk should be backed up
3605 if ishunk(c) and c.header.filename() in tobackup:
3605 if ishunk(c) and c.header.filename() in tobackup:
3606 abs = c.header.filename()
3606 abs = c.header.filename()
3607 target = repo.wjoin(abs)
3607 target = repo.wjoin(abs)
3608 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3608 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3609 util.copyfile(target, bakname)
3609 util.copyfile(target, bakname)
3610 tobackup.remove(abs)
3610 tobackup.remove(abs)
3611 c.write(fp)
3611 c.write(fp)
3612 dopatch = fp.tell()
3612 dopatch = fp.tell()
3613 fp.seek(0)
3613 fp.seek(0)
3614 if dopatch:
3614 if dopatch:
3615 try:
3615 try:
3616 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3616 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3617 except patch.PatchError as err:
3617 except patch.PatchError as err:
3618 raise error.Abort(str(err))
3618 raise error.Abort(str(err))
3619 del fp
3619 del fp
3620 else:
3620 else:
3621 for f in actions['revert'][0]:
3621 for f in actions['revert'][0]:
3622 checkout(f)
3622 checkout(f)
3623 if normal:
3623 if normal:
3624 normal(f)
3624 normal(f)
3625
3625
3626 for f in actions['add'][0]:
3626 for f in actions['add'][0]:
3627 # Don't checkout modified files, they are already created by the diff
3627 # Don't checkout modified files, they are already created by the diff
3628 if f not in newlyaddedandmodifiedfiles:
3628 if f not in newlyaddedandmodifiedfiles:
3629 checkout(f)
3629 checkout(f)
3630 repo.dirstate.add(f)
3630 repo.dirstate.add(f)
3631
3631
3632 normal = repo.dirstate.normallookup
3632 normal = repo.dirstate.normallookup
3633 if node == parent and p2 == nullid:
3633 if node == parent and p2 == nullid:
3634 normal = repo.dirstate.normal
3634 normal = repo.dirstate.normal
3635 for f in actions['undelete'][0]:
3635 for f in actions['undelete'][0]:
3636 checkout(f)
3636 checkout(f)
3637 normal(f)
3637 normal(f)
3638
3638
3639 copied = copies.pathcopies(repo[parent], ctx)
3639 copied = copies.pathcopies(repo[parent], ctx)
3640
3640
3641 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3641 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3642 if f in copied:
3642 if f in copied:
3643 repo.dirstate.copy(copied[f], f)
3643 repo.dirstate.copy(copied[f], f)
3644
3644
3645 class command(registrar.command):
3645 class command(registrar.command):
3646 def _doregister(self, func, name, *args, **kwargs):
3646 def _doregister(self, func, name, *args, **kwargs):
3647 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3647 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3648 return super(command, self)._doregister(func, name, *args, **kwargs)
3648 return super(command, self)._doregister(func, name, *args, **kwargs)
3649
3649
3650 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3650 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3651 # commands.outgoing. "missing" is "missing" of the result of
3651 # commands.outgoing. "missing" is "missing" of the result of
3652 # "findcommonoutgoing()"
3652 # "findcommonoutgoing()"
3653 outgoinghooks = util.hooks()
3653 outgoinghooks = util.hooks()
3654
3654
3655 # a list of (ui, repo) functions called by commands.summary
3655 # a list of (ui, repo) functions called by commands.summary
3656 summaryhooks = util.hooks()
3656 summaryhooks = util.hooks()
3657
3657
3658 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3658 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3659 #
3659 #
3660 # functions should return tuple of booleans below, if 'changes' is None:
3660 # functions should return tuple of booleans below, if 'changes' is None:
3661 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3661 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3662 #
3662 #
3663 # otherwise, 'changes' is a tuple of tuples below:
3663 # otherwise, 'changes' is a tuple of tuples below:
3664 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3664 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3665 # - (desturl, destbranch, destpeer, outgoing)
3665 # - (desturl, destbranch, destpeer, outgoing)
3666 summaryremotehooks = util.hooks()
3666 summaryremotehooks = util.hooks()
3667
3667
3668 # A list of state files kept by multistep operations like graft.
3668 # A list of state files kept by multistep operations like graft.
3669 # Since graft cannot be aborted, it is considered 'clearable' by update.
3669 # Since graft cannot be aborted, it is considered 'clearable' by update.
3670 # note: bisect is intentionally excluded
3670 # note: bisect is intentionally excluded
3671 # (state file, clearable, allowcommit, error, hint)
3671 # (state file, clearable, allowcommit, error, hint)
3672 unfinishedstates = [
3672 unfinishedstates = [
3673 ('graftstate', True, False, _('graft in progress'),
3673 ('graftstate', True, False, _('graft in progress'),
3674 _("use 'hg graft --continue' or 'hg update' to abort")),
3674 _("use 'hg graft --continue' or 'hg update' to abort")),
3675 ('updatestate', True, False, _('last update was interrupted'),
3675 ('updatestate', True, False, _('last update was interrupted'),
3676 _("use 'hg update' to get a consistent checkout"))
3676 _("use 'hg update' to get a consistent checkout"))
3677 ]
3677 ]
3678
3678
3679 def checkunfinished(repo, commit=False):
3679 def checkunfinished(repo, commit=False):
3680 '''Look for an unfinished multistep operation, like graft, and abort
3680 '''Look for an unfinished multistep operation, like graft, and abort
3681 if found. It's probably good to check this right before
3681 if found. It's probably good to check this right before
3682 bailifchanged().
3682 bailifchanged().
3683 '''
3683 '''
3684 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3684 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3685 if commit and allowcommit:
3685 if commit and allowcommit:
3686 continue
3686 continue
3687 if repo.vfs.exists(f):
3687 if repo.vfs.exists(f):
3688 raise error.Abort(msg, hint=hint)
3688 raise error.Abort(msg, hint=hint)
3689
3689
3690 def clearunfinished(repo):
3690 def clearunfinished(repo):
3691 '''Check for unfinished operations (as above), and clear the ones
3691 '''Check for unfinished operations (as above), and clear the ones
3692 that are clearable.
3692 that are clearable.
3693 '''
3693 '''
3694 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3694 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3695 if not clearable and repo.vfs.exists(f):
3695 if not clearable and repo.vfs.exists(f):
3696 raise error.Abort(msg, hint=hint)
3696 raise error.Abort(msg, hint=hint)
3697 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3697 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3698 if clearable and repo.vfs.exists(f):
3698 if clearable and repo.vfs.exists(f):
3699 util.unlink(repo.vfs.join(f))
3699 util.unlink(repo.vfs.join(f))
3700
3700
3701 afterresolvedstates = [
3701 afterresolvedstates = [
3702 ('graftstate',
3702 ('graftstate',
3703 _('hg graft --continue')),
3703 _('hg graft --continue')),
3704 ]
3704 ]
3705
3705
3706 def howtocontinue(repo):
3706 def howtocontinue(repo):
3707 '''Check for an unfinished operation and return the command to finish
3707 '''Check for an unfinished operation and return the command to finish
3708 it.
3708 it.
3709
3709
3710 afterresolvedstates tuples define a .hg/{file} and the corresponding
3710 afterresolvedstates tuples define a .hg/{file} and the corresponding
3711 command needed to finish it.
3711 command needed to finish it.
3712
3712
3713 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3713 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3714 a boolean.
3714 a boolean.
3715 '''
3715 '''
3716 contmsg = _("continue: %s")
3716 contmsg = _("continue: %s")
3717 for f, msg in afterresolvedstates:
3717 for f, msg in afterresolvedstates:
3718 if repo.vfs.exists(f):
3718 if repo.vfs.exists(f):
3719 return contmsg % msg, True
3719 return contmsg % msg, True
3720 if repo[None].dirty(missing=True, merge=False, branch=False):
3720 if repo[None].dirty(missing=True, merge=False, branch=False):
3721 return contmsg % _("hg commit"), False
3721 return contmsg % _("hg commit"), False
3722 return None, None
3722 return None, None
3723
3723
3724 def checkafterresolved(repo):
3724 def checkafterresolved(repo):
3725 '''Inform the user about the next action after completing hg resolve
3725 '''Inform the user about the next action after completing hg resolve
3726
3726
3727 If there's a matching afterresolvedstates, howtocontinue will yield
3727 If there's a matching afterresolvedstates, howtocontinue will yield
3728 repo.ui.warn as the reporter.
3728 repo.ui.warn as the reporter.
3729
3729
3730 Otherwise, it will yield repo.ui.note.
3730 Otherwise, it will yield repo.ui.note.
3731 '''
3731 '''
3732 msg, warning = howtocontinue(repo)
3732 msg, warning = howtocontinue(repo)
3733 if msg is not None:
3733 if msg is not None:
3734 if warning:
3734 if warning:
3735 repo.ui.warn("%s\n" % msg)
3735 repo.ui.warn("%s\n" % msg)
3736 else:
3736 else:
3737 repo.ui.note("%s\n" % msg)
3737 repo.ui.note("%s\n" % msg)
3738
3738
3739 def wrongtooltocontinue(repo, task):
3739 def wrongtooltocontinue(repo, task):
3740 '''Raise an abort suggesting how to properly continue if there is an
3740 '''Raise an abort suggesting how to properly continue if there is an
3741 active task.
3741 active task.
3742
3742
3743 Uses howtocontinue() to find the active task.
3743 Uses howtocontinue() to find the active task.
3744
3744
3745 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3745 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3746 a hint.
3746 a hint.
3747 '''
3747 '''
3748 after = howtocontinue(repo)
3748 after = howtocontinue(repo)
3749 hint = None
3749 hint = None
3750 if after[1]:
3750 if after[1]:
3751 hint = after[0]
3751 hint = after[0]
3752 raise error.Abort(_('no %s in progress') % task, hint=hint)
3752 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,537 +1,544 b''
1 # obsutil.py - utility functions for obsolescence
1 # obsutil.py - utility functions for obsolescence
2 #
2 #
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
3 # Copyright 2017 Boris Feld <boris.feld@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from . import (
10 from . import (
11 phases,
11 phases,
12 util
12 )
13 )
13
14
14 class marker(object):
15 class marker(object):
15 """Wrap obsolete marker raw data"""
16 """Wrap obsolete marker raw data"""
16
17
17 def __init__(self, repo, data):
18 def __init__(self, repo, data):
18 # the repo argument will be used to create changectx in later version
19 # the repo argument will be used to create changectx in later version
19 self._repo = repo
20 self._repo = repo
20 self._data = data
21 self._data = data
21 self._decodedmeta = None
22 self._decodedmeta = None
22
23
23 def __hash__(self):
24 def __hash__(self):
24 return hash(self._data)
25 return hash(self._data)
25
26
26 def __eq__(self, other):
27 def __eq__(self, other):
27 if type(other) != type(self):
28 if type(other) != type(self):
28 return False
29 return False
29 return self._data == other._data
30 return self._data == other._data
30
31
31 def precnode(self):
32 def precnode(self):
32 """Precursor changeset node identifier"""
33 msg = ("'marker.precnode' is deprecated, "
34 "use 'marker.precnode'")
35 util.nouideprecwarn(msg, '4.4')
36 return self.prednode()
37
38 def prednode(self):
39 """Predecessor changeset node identifier"""
33 return self._data[0]
40 return self._data[0]
34
41
35 def succnodes(self):
42 def succnodes(self):
36 """List of successor changesets node identifiers"""
43 """List of successor changesets node identifiers"""
37 return self._data[1]
44 return self._data[1]
38
45
39 def parentnodes(self):
46 def parentnodes(self):
40 """Parents of the precursors (None if not recorded)"""
47 """Parents of the predecessors (None if not recorded)"""
41 return self._data[5]
48 return self._data[5]
42
49
43 def metadata(self):
50 def metadata(self):
44 """Decoded metadata dictionary"""
51 """Decoded metadata dictionary"""
45 return dict(self._data[3])
52 return dict(self._data[3])
46
53
47 def date(self):
54 def date(self):
48 """Creation date as (unixtime, offset)"""
55 """Creation date as (unixtime, offset)"""
49 return self._data[4]
56 return self._data[4]
50
57
51 def flags(self):
58 def flags(self):
52 """The flags field of the marker"""
59 """The flags field of the marker"""
53 return self._data[2]
60 return self._data[2]
54
61
55 def getmarkers(repo, nodes=None, exclusive=False):
62 def getmarkers(repo, nodes=None, exclusive=False):
56 """returns markers known in a repository
63 """returns markers known in a repository
57
64
58 If <nodes> is specified, only markers "relevant" to those nodes are are
65 If <nodes> is specified, only markers "relevant" to those nodes are are
59 returned"""
66 returned"""
60 if nodes is None:
67 if nodes is None:
61 rawmarkers = repo.obsstore
68 rawmarkers = repo.obsstore
62 elif exclusive:
69 elif exclusive:
63 rawmarkers = exclusivemarkers(repo, nodes)
70 rawmarkers = exclusivemarkers(repo, nodes)
64 else:
71 else:
65 rawmarkers = repo.obsstore.relevantmarkers(nodes)
72 rawmarkers = repo.obsstore.relevantmarkers(nodes)
66
73
67 for markerdata in rawmarkers:
74 for markerdata in rawmarkers:
68 yield marker(repo, markerdata)
75 yield marker(repo, markerdata)
69
76
70 def closestpredecessors(repo, nodeid):
77 def closestpredecessors(repo, nodeid):
71 """yield the list of next predecessors pointing on visible changectx nodes
78 """yield the list of next predecessors pointing on visible changectx nodes
72
79
73 This function respect the repoview filtering, filtered revision will be
80 This function respect the repoview filtering, filtered revision will be
74 considered missing.
81 considered missing.
75 """
82 """
76
83
77 precursors = repo.obsstore.precursors
84 precursors = repo.obsstore.precursors
78 stack = [nodeid]
85 stack = [nodeid]
79 seen = set(stack)
86 seen = set(stack)
80
87
81 while stack:
88 while stack:
82 current = stack.pop()
89 current = stack.pop()
83 currentpreccs = precursors.get(current, ())
90 currentpreccs = precursors.get(current, ())
84
91
85 for prec in currentpreccs:
92 for prec in currentpreccs:
86 precnodeid = prec[0]
93 precnodeid = prec[0]
87
94
88 # Basic cycle protection
95 # Basic cycle protection
89 if precnodeid in seen:
96 if precnodeid in seen:
90 continue
97 continue
91 seen.add(precnodeid)
98 seen.add(precnodeid)
92
99
93 if precnodeid in repo:
100 if precnodeid in repo:
94 yield precnodeid
101 yield precnodeid
95 else:
102 else:
96 stack.append(precnodeid)
103 stack.append(precnodeid)
97
104
98 def allprecursors(obsstore, nodes, ignoreflags=0):
105 def allprecursors(obsstore, nodes, ignoreflags=0):
99 """Yield node for every precursors of <nodes>.
106 """Yield node for every precursors of <nodes>.
100
107
101 Some precursors may be unknown locally.
108 Some precursors may be unknown locally.
102
109
103 This is a linear yield unsuited to detecting folded changesets. It includes
110 This is a linear yield unsuited to detecting folded changesets. It includes
104 initial nodes too."""
111 initial nodes too."""
105
112
106 remaining = set(nodes)
113 remaining = set(nodes)
107 seen = set(remaining)
114 seen = set(remaining)
108 while remaining:
115 while remaining:
109 current = remaining.pop()
116 current = remaining.pop()
110 yield current
117 yield current
111 for mark in obsstore.precursors.get(current, ()):
118 for mark in obsstore.precursors.get(current, ()):
112 # ignore marker flagged with specified flag
119 # ignore marker flagged with specified flag
113 if mark[2] & ignoreflags:
120 if mark[2] & ignoreflags:
114 continue
121 continue
115 suc = mark[0]
122 suc = mark[0]
116 if suc not in seen:
123 if suc not in seen:
117 seen.add(suc)
124 seen.add(suc)
118 remaining.add(suc)
125 remaining.add(suc)
119
126
120 def allsuccessors(obsstore, nodes, ignoreflags=0):
127 def allsuccessors(obsstore, nodes, ignoreflags=0):
121 """Yield node for every successor of <nodes>.
128 """Yield node for every successor of <nodes>.
122
129
123 Some successors may be unknown locally.
130 Some successors may be unknown locally.
124
131
125 This is a linear yield unsuited to detecting split changesets. It includes
132 This is a linear yield unsuited to detecting split changesets. It includes
126 initial nodes too."""
133 initial nodes too."""
127 remaining = set(nodes)
134 remaining = set(nodes)
128 seen = set(remaining)
135 seen = set(remaining)
129 while remaining:
136 while remaining:
130 current = remaining.pop()
137 current = remaining.pop()
131 yield current
138 yield current
132 for mark in obsstore.successors.get(current, ()):
139 for mark in obsstore.successors.get(current, ()):
133 # ignore marker flagged with specified flag
140 # ignore marker flagged with specified flag
134 if mark[2] & ignoreflags:
141 if mark[2] & ignoreflags:
135 continue
142 continue
136 for suc in mark[1]:
143 for suc in mark[1]:
137 if suc not in seen:
144 if suc not in seen:
138 seen.add(suc)
145 seen.add(suc)
139 remaining.add(suc)
146 remaining.add(suc)
140
147
141 def _filterprunes(markers):
148 def _filterprunes(markers):
142 """return a set with no prune markers"""
149 """return a set with no prune markers"""
143 return set(m for m in markers if m[1])
150 return set(m for m in markers if m[1])
144
151
145 def exclusivemarkers(repo, nodes):
152 def exclusivemarkers(repo, nodes):
146 """set of markers relevant to "nodes" but no other locally-known nodes
153 """set of markers relevant to "nodes" but no other locally-known nodes
147
154
148 This function compute the set of markers "exclusive" to a locally-known
155 This function compute the set of markers "exclusive" to a locally-known
149 node. This means we walk the markers starting from <nodes> until we reach a
156 node. This means we walk the markers starting from <nodes> until we reach a
150 locally-known precursors outside of <nodes>. Element of <nodes> with
157 locally-known precursors outside of <nodes>. Element of <nodes> with
151 locally-known successors outside of <nodes> are ignored (since their
158 locally-known successors outside of <nodes> are ignored (since their
152 precursors markers are also relevant to these successors).
159 precursors markers are also relevant to these successors).
153
160
154 For example:
161 For example:
155
162
156 # (A0 rewritten as A1)
163 # (A0 rewritten as A1)
157 #
164 #
158 # A0 <-1- A1 # Marker "1" is exclusive to A1
165 # A0 <-1- A1 # Marker "1" is exclusive to A1
159
166
160 or
167 or
161
168
162 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
169 # (A0 rewritten as AX; AX rewritten as A1; AX is unkown locally)
163 #
170 #
164 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
171 # <-1- A0 <-2- AX <-3- A1 # Marker "2,3" are exclusive to A1
165
172
166 or
173 or
167
174
168 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
175 # (A0 has unknown precursors, A0 rewritten as A1 and A2 (divergence))
169 #
176 #
170 # <-2- A1 # Marker "2" is exclusive to A0,A1
177 # <-2- A1 # Marker "2" is exclusive to A0,A1
171 # /
178 # /
172 # <-1- A0
179 # <-1- A0
173 # \
180 # \
174 # <-3- A2 # Marker "3" is exclusive to A0,A2
181 # <-3- A2 # Marker "3" is exclusive to A0,A2
175 #
182 #
176 # in addition:
183 # in addition:
177 #
184 #
178 # Markers "2,3" are exclusive to A1,A2
185 # Markers "2,3" are exclusive to A1,A2
179 # Markers "1,2,3" are exclusive to A0,A1,A2
186 # Markers "1,2,3" are exclusive to A0,A1,A2
180
187
181 See test/test-obsolete-bundle-strip.t for more examples.
188 See test/test-obsolete-bundle-strip.t for more examples.
182
189
183 An example usage is strip. When stripping a changeset, we also want to
190 An example usage is strip. When stripping a changeset, we also want to
184 strip the markers exclusive to this changeset. Otherwise we would have
191 strip the markers exclusive to this changeset. Otherwise we would have
185 "dangling"" obsolescence markers from its precursors: Obsolescence markers
192 "dangling"" obsolescence markers from its precursors: Obsolescence markers
186 marking a node as obsolete without any successors available locally.
193 marking a node as obsolete without any successors available locally.
187
194
188 As for relevant markers, the prune markers for children will be followed.
195 As for relevant markers, the prune markers for children will be followed.
189 Of course, they will only be followed if the pruned children is
196 Of course, they will only be followed if the pruned children is
190 locally-known. Since the prune markers are relevant to the pruned node.
197 locally-known. Since the prune markers are relevant to the pruned node.
191 However, while prune markers are considered relevant to the parent of the
198 However, while prune markers are considered relevant to the parent of the
192 pruned changesets, prune markers for locally-known changeset (with no
199 pruned changesets, prune markers for locally-known changeset (with no
193 successors) are considered exclusive to the pruned nodes. This allows
200 successors) are considered exclusive to the pruned nodes. This allows
194 to strip the prune markers (with the rest of the exclusive chain) alongside
201 to strip the prune markers (with the rest of the exclusive chain) alongside
195 the pruned changesets.
202 the pruned changesets.
196 """
203 """
197 # running on a filtered repository would be dangerous as markers could be
204 # running on a filtered repository would be dangerous as markers could be
198 # reported as exclusive when they are relevant for other filtered nodes.
205 # reported as exclusive when they are relevant for other filtered nodes.
199 unfi = repo.unfiltered()
206 unfi = repo.unfiltered()
200
207
201 # shortcut to various useful item
208 # shortcut to various useful item
202 nm = unfi.changelog.nodemap
209 nm = unfi.changelog.nodemap
203 precursorsmarkers = unfi.obsstore.precursors
210 precursorsmarkers = unfi.obsstore.precursors
204 successormarkers = unfi.obsstore.successors
211 successormarkers = unfi.obsstore.successors
205 childrenmarkers = unfi.obsstore.children
212 childrenmarkers = unfi.obsstore.children
206
213
207 # exclusive markers (return of the function)
214 # exclusive markers (return of the function)
208 exclmarkers = set()
215 exclmarkers = set()
209 # we need fast membership testing
216 # we need fast membership testing
210 nodes = set(nodes)
217 nodes = set(nodes)
211 # looking for head in the obshistory
218 # looking for head in the obshistory
212 #
219 #
213 # XXX we are ignoring all issues in regard with cycle for now.
220 # XXX we are ignoring all issues in regard with cycle for now.
214 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
221 stack = [n for n in nodes if not _filterprunes(successormarkers.get(n, ()))]
215 stack.sort()
222 stack.sort()
216 # nodes already stacked
223 # nodes already stacked
217 seennodes = set(stack)
224 seennodes = set(stack)
218 while stack:
225 while stack:
219 current = stack.pop()
226 current = stack.pop()
220 # fetch precursors markers
227 # fetch precursors markers
221 markers = list(precursorsmarkers.get(current, ()))
228 markers = list(precursorsmarkers.get(current, ()))
222 # extend the list with prune markers
229 # extend the list with prune markers
223 for mark in successormarkers.get(current, ()):
230 for mark in successormarkers.get(current, ()):
224 if not mark[1]:
231 if not mark[1]:
225 markers.append(mark)
232 markers.append(mark)
226 # and markers from children (looking for prune)
233 # and markers from children (looking for prune)
227 for mark in childrenmarkers.get(current, ()):
234 for mark in childrenmarkers.get(current, ()):
228 if not mark[1]:
235 if not mark[1]:
229 markers.append(mark)
236 markers.append(mark)
230 # traverse the markers
237 # traverse the markers
231 for mark in markers:
238 for mark in markers:
232 if mark in exclmarkers:
239 if mark in exclmarkers:
233 # markers already selected
240 # markers already selected
234 continue
241 continue
235
242
236 # If the markers is about the current node, select it
243 # If the markers is about the current node, select it
237 #
244 #
238 # (this delay the addition of markers from children)
245 # (this delay the addition of markers from children)
239 if mark[1] or mark[0] == current:
246 if mark[1] or mark[0] == current:
240 exclmarkers.add(mark)
247 exclmarkers.add(mark)
241
248
242 # should we keep traversing through the precursors?
249 # should we keep traversing through the precursors?
243 prec = mark[0]
250 prec = mark[0]
244
251
245 # nodes in the stack or already processed
252 # nodes in the stack or already processed
246 if prec in seennodes:
253 if prec in seennodes:
247 continue
254 continue
248
255
249 # is this a locally known node ?
256 # is this a locally known node ?
250 known = prec in nm
257 known = prec in nm
251 # if locally-known and not in the <nodes> set the traversal
258 # if locally-known and not in the <nodes> set the traversal
252 # stop here.
259 # stop here.
253 if known and prec not in nodes:
260 if known and prec not in nodes:
254 continue
261 continue
255
262
256 # do not keep going if there are unselected markers pointing to this
263 # do not keep going if there are unselected markers pointing to this
257 # nodes. If we end up traversing these unselected markers later the
264 # nodes. If we end up traversing these unselected markers later the
258 # node will be taken care of at that point.
265 # node will be taken care of at that point.
259 precmarkers = _filterprunes(successormarkers.get(prec))
266 precmarkers = _filterprunes(successormarkers.get(prec))
260 if precmarkers.issubset(exclmarkers):
267 if precmarkers.issubset(exclmarkers):
261 seennodes.add(prec)
268 seennodes.add(prec)
262 stack.append(prec)
269 stack.append(prec)
263
270
264 return exclmarkers
271 return exclmarkers
265
272
266 def foreground(repo, nodes):
273 def foreground(repo, nodes):
267 """return all nodes in the "foreground" of other node
274 """return all nodes in the "foreground" of other node
268
275
269 The foreground of a revision is anything reachable using parent -> children
276 The foreground of a revision is anything reachable using parent -> children
270 or precursor -> successor relation. It is very similar to "descendant" but
277 or precursor -> successor relation. It is very similar to "descendant" but
271 augmented with obsolescence information.
278 augmented with obsolescence information.
272
279
273 Beware that possible obsolescence cycle may result if complex situation.
280 Beware that possible obsolescence cycle may result if complex situation.
274 """
281 """
275 repo = repo.unfiltered()
282 repo = repo.unfiltered()
276 foreground = set(repo.set('%ln::', nodes))
283 foreground = set(repo.set('%ln::', nodes))
277 if repo.obsstore:
284 if repo.obsstore:
278 # We only need this complicated logic if there is obsolescence
285 # We only need this complicated logic if there is obsolescence
279 # XXX will probably deserve an optimised revset.
286 # XXX will probably deserve an optimised revset.
280 nm = repo.changelog.nodemap
287 nm = repo.changelog.nodemap
281 plen = -1
288 plen = -1
282 # compute the whole set of successors or descendants
289 # compute the whole set of successors or descendants
283 while len(foreground) != plen:
290 while len(foreground) != plen:
284 plen = len(foreground)
291 plen = len(foreground)
285 succs = set(c.node() for c in foreground)
292 succs = set(c.node() for c in foreground)
286 mutable = [c.node() for c in foreground if c.mutable()]
293 mutable = [c.node() for c in foreground if c.mutable()]
287 succs.update(allsuccessors(repo.obsstore, mutable))
294 succs.update(allsuccessors(repo.obsstore, mutable))
288 known = (n for n in succs if n in nm)
295 known = (n for n in succs if n in nm)
289 foreground = set(repo.set('%ln::', known))
296 foreground = set(repo.set('%ln::', known))
290 return set(c.node() for c in foreground)
297 return set(c.node() for c in foreground)
291
298
292 def getobsoleted(repo, tr):
299 def getobsoleted(repo, tr):
293 """return the set of pre-existing revisions obsoleted by a transaction"""
300 """return the set of pre-existing revisions obsoleted by a transaction"""
294 torev = repo.unfiltered().changelog.nodemap.get
301 torev = repo.unfiltered().changelog.nodemap.get
295 phase = repo._phasecache.phase
302 phase = repo._phasecache.phase
296 succsmarkers = repo.obsstore.successors.get
303 succsmarkers = repo.obsstore.successors.get
297 public = phases.public
304 public = phases.public
298 addedmarkers = tr.changes.get('obsmarkers')
305 addedmarkers = tr.changes.get('obsmarkers')
299 addedrevs = tr.changes.get('revs')
306 addedrevs = tr.changes.get('revs')
300 seenrevs = set(addedrevs)
307 seenrevs = set(addedrevs)
301 obsoleted = set()
308 obsoleted = set()
302 for mark in addedmarkers:
309 for mark in addedmarkers:
303 node = mark[0]
310 node = mark[0]
304 rev = torev(node)
311 rev = torev(node)
305 if rev is None or rev in seenrevs:
312 if rev is None or rev in seenrevs:
306 continue
313 continue
307 seenrevs.add(rev)
314 seenrevs.add(rev)
308 if phase(repo, rev) == public:
315 if phase(repo, rev) == public:
309 continue
316 continue
310 if set(succsmarkers(node) or []).issubset(addedmarkers):
317 if set(succsmarkers(node) or []).issubset(addedmarkers):
311 obsoleted.add(rev)
318 obsoleted.add(rev)
312 return obsoleted
319 return obsoleted
313
320
314 def successorssets(repo, initialnode, closest=False, cache=None):
321 def successorssets(repo, initialnode, closest=False, cache=None):
315 """Return set of all latest successors of initial nodes
322 """Return set of all latest successors of initial nodes
316
323
317 The successors set of a changeset A are the group of revisions that succeed
324 The successors set of a changeset A are the group of revisions that succeed
318 A. It succeeds A as a consistent whole, each revision being only a partial
325 A. It succeeds A as a consistent whole, each revision being only a partial
319 replacement. By default, the successors set contains non-obsolete
326 replacement. By default, the successors set contains non-obsolete
320 changesets only, walking the obsolescence graph until reaching a leaf. If
327 changesets only, walking the obsolescence graph until reaching a leaf. If
321 'closest' is set to True, closest successors-sets are return (the
328 'closest' is set to True, closest successors-sets are return (the
322 obsolescence walk stops on known changesets).
329 obsolescence walk stops on known changesets).
323
330
324 This function returns the full list of successor sets which is why it
331 This function returns the full list of successor sets which is why it
325 returns a list of tuples and not just a single tuple. Each tuple is a valid
332 returns a list of tuples and not just a single tuple. Each tuple is a valid
326 successors set. Note that (A,) may be a valid successors set for changeset A
333 successors set. Note that (A,) may be a valid successors set for changeset A
327 (see below).
334 (see below).
328
335
329 In most cases, a changeset A will have a single element (e.g. the changeset
336 In most cases, a changeset A will have a single element (e.g. the changeset
330 A is replaced by A') in its successors set. Though, it is also common for a
337 A is replaced by A') in its successors set. Though, it is also common for a
331 changeset A to have no elements in its successor set (e.g. the changeset
338 changeset A to have no elements in its successor set (e.g. the changeset
332 has been pruned). Therefore, the returned list of successors sets will be
339 has been pruned). Therefore, the returned list of successors sets will be
333 [(A',)] or [], respectively.
340 [(A',)] or [], respectively.
334
341
335 When a changeset A is split into A' and B', however, it will result in a
342 When a changeset A is split into A' and B', however, it will result in a
336 successors set containing more than a single element, i.e. [(A',B')].
343 successors set containing more than a single element, i.e. [(A',B')].
337 Divergent changesets will result in multiple successors sets, i.e. [(A',),
344 Divergent changesets will result in multiple successors sets, i.e. [(A',),
338 (A'')].
345 (A'')].
339
346
340 If a changeset A is not obsolete, then it will conceptually have no
347 If a changeset A is not obsolete, then it will conceptually have no
341 successors set. To distinguish this from a pruned changeset, the successor
348 successors set. To distinguish this from a pruned changeset, the successor
342 set will contain itself only, i.e. [(A,)].
349 set will contain itself only, i.e. [(A,)].
343
350
344 Finally, final successors unknown locally are considered to be pruned
351 Finally, final successors unknown locally are considered to be pruned
345 (pruned: obsoleted without any successors). (Final: successors not affected
352 (pruned: obsoleted without any successors). (Final: successors not affected
346 by markers).
353 by markers).
347
354
348 The 'closest' mode respect the repoview filtering. For example, without
355 The 'closest' mode respect the repoview filtering. For example, without
349 filter it will stop at the first locally known changeset, with 'visible'
356 filter it will stop at the first locally known changeset, with 'visible'
350 filter it will stop on visible changesets).
357 filter it will stop on visible changesets).
351
358
352 The optional `cache` parameter is a dictionary that may contains
359 The optional `cache` parameter is a dictionary that may contains
353 precomputed successors sets. It is meant to reuse the computation of a
360 precomputed successors sets. It is meant to reuse the computation of a
354 previous call to `successorssets` when multiple calls are made at the same
361 previous call to `successorssets` when multiple calls are made at the same
355 time. The cache dictionary is updated in place. The caller is responsible
362 time. The cache dictionary is updated in place. The caller is responsible
356 for its life span. Code that makes multiple calls to `successorssets`
363 for its life span. Code that makes multiple calls to `successorssets`
357 *should* use this cache mechanism or risk a performance hit.
364 *should* use this cache mechanism or risk a performance hit.
358
365
359 Since results are different depending of the 'closest' most, the same cache
366 Since results are different depending of the 'closest' most, the same cache
360 cannot be reused for both mode.
367 cannot be reused for both mode.
361 """
368 """
362
369
363 succmarkers = repo.obsstore.successors
370 succmarkers = repo.obsstore.successors
364
371
365 # Stack of nodes we search successors sets for
372 # Stack of nodes we search successors sets for
366 toproceed = [initialnode]
373 toproceed = [initialnode]
367 # set version of above list for fast loop detection
374 # set version of above list for fast loop detection
368 # element added to "toproceed" must be added here
375 # element added to "toproceed" must be added here
369 stackedset = set(toproceed)
376 stackedset = set(toproceed)
370 if cache is None:
377 if cache is None:
371 cache = {}
378 cache = {}
372
379
373 # This while loop is the flattened version of a recursive search for
380 # This while loop is the flattened version of a recursive search for
374 # successors sets
381 # successors sets
375 #
382 #
376 # def successorssets(x):
383 # def successorssets(x):
377 # successors = directsuccessors(x)
384 # successors = directsuccessors(x)
378 # ss = [[]]
385 # ss = [[]]
379 # for succ in directsuccessors(x):
386 # for succ in directsuccessors(x):
380 # # product as in itertools cartesian product
387 # # product as in itertools cartesian product
381 # ss = product(ss, successorssets(succ))
388 # ss = product(ss, successorssets(succ))
382 # return ss
389 # return ss
383 #
390 #
384 # But we can not use plain recursive calls here:
391 # But we can not use plain recursive calls here:
385 # - that would blow the python call stack
392 # - that would blow the python call stack
386 # - obsolescence markers may have cycles, we need to handle them.
393 # - obsolescence markers may have cycles, we need to handle them.
387 #
394 #
388 # The `toproceed` list act as our call stack. Every node we search
395 # The `toproceed` list act as our call stack. Every node we search
389 # successors set for are stacked there.
396 # successors set for are stacked there.
390 #
397 #
391 # The `stackedset` is set version of this stack used to check if a node is
398 # The `stackedset` is set version of this stack used to check if a node is
392 # already stacked. This check is used to detect cycles and prevent infinite
399 # already stacked. This check is used to detect cycles and prevent infinite
393 # loop.
400 # loop.
394 #
401 #
395 # successors set of all nodes are stored in the `cache` dictionary.
402 # successors set of all nodes are stored in the `cache` dictionary.
396 #
403 #
397 # After this while loop ends we use the cache to return the successors sets
404 # After this while loop ends we use the cache to return the successors sets
398 # for the node requested by the caller.
405 # for the node requested by the caller.
399 while toproceed:
406 while toproceed:
400 # Every iteration tries to compute the successors sets of the topmost
407 # Every iteration tries to compute the successors sets of the topmost
401 # node of the stack: CURRENT.
408 # node of the stack: CURRENT.
402 #
409 #
403 # There are four possible outcomes:
410 # There are four possible outcomes:
404 #
411 #
405 # 1) We already know the successors sets of CURRENT:
412 # 1) We already know the successors sets of CURRENT:
406 # -> mission accomplished, pop it from the stack.
413 # -> mission accomplished, pop it from the stack.
407 # 2) Stop the walk:
414 # 2) Stop the walk:
408 # default case: Node is not obsolete
415 # default case: Node is not obsolete
409 # closest case: Node is known at this repo filter level
416 # closest case: Node is known at this repo filter level
410 # -> the node is its own successors sets. Add it to the cache.
417 # -> the node is its own successors sets. Add it to the cache.
411 # 3) We do not know successors set of direct successors of CURRENT:
418 # 3) We do not know successors set of direct successors of CURRENT:
412 # -> We add those successors to the stack.
419 # -> We add those successors to the stack.
413 # 4) We know successors sets of all direct successors of CURRENT:
420 # 4) We know successors sets of all direct successors of CURRENT:
414 # -> We can compute CURRENT successors set and add it to the
421 # -> We can compute CURRENT successors set and add it to the
415 # cache.
422 # cache.
416 #
423 #
417 current = toproceed[-1]
424 current = toproceed[-1]
418
425
419 # case 2 condition is a bit hairy because of closest,
426 # case 2 condition is a bit hairy because of closest,
420 # we compute it on its own
427 # we compute it on its own
421 case2condition = ((current not in succmarkers)
428 case2condition = ((current not in succmarkers)
422 or (closest and current != initialnode
429 or (closest and current != initialnode
423 and current in repo))
430 and current in repo))
424
431
425 if current in cache:
432 if current in cache:
426 # case (1): We already know the successors sets
433 # case (1): We already know the successors sets
427 stackedset.remove(toproceed.pop())
434 stackedset.remove(toproceed.pop())
428 elif case2condition:
435 elif case2condition:
429 # case (2): end of walk.
436 # case (2): end of walk.
430 if current in repo:
437 if current in repo:
431 # We have a valid successors.
438 # We have a valid successors.
432 cache[current] = [(current,)]
439 cache[current] = [(current,)]
433 else:
440 else:
434 # Final obsolete version is unknown locally.
441 # Final obsolete version is unknown locally.
435 # Do not count that as a valid successors
442 # Do not count that as a valid successors
436 cache[current] = []
443 cache[current] = []
437 else:
444 else:
438 # cases (3) and (4)
445 # cases (3) and (4)
439 #
446 #
440 # We proceed in two phases. Phase 1 aims to distinguish case (3)
447 # We proceed in two phases. Phase 1 aims to distinguish case (3)
441 # from case (4):
448 # from case (4):
442 #
449 #
443 # For each direct successors of CURRENT, we check whether its
450 # For each direct successors of CURRENT, we check whether its
444 # successors sets are known. If they are not, we stack the
451 # successors sets are known. If they are not, we stack the
445 # unknown node and proceed to the next iteration of the while
452 # unknown node and proceed to the next iteration of the while
446 # loop. (case 3)
453 # loop. (case 3)
447 #
454 #
448 # During this step, we may detect obsolescence cycles: a node
455 # During this step, we may detect obsolescence cycles: a node
449 # with unknown successors sets but already in the call stack.
456 # with unknown successors sets but already in the call stack.
450 # In such a situation, we arbitrary set the successors sets of
457 # In such a situation, we arbitrary set the successors sets of
451 # the node to nothing (node pruned) to break the cycle.
458 # the node to nothing (node pruned) to break the cycle.
452 #
459 #
453 # If no break was encountered we proceed to phase 2.
460 # If no break was encountered we proceed to phase 2.
454 #
461 #
455 # Phase 2 computes successors sets of CURRENT (case 4); see details
462 # Phase 2 computes successors sets of CURRENT (case 4); see details
456 # in phase 2 itself.
463 # in phase 2 itself.
457 #
464 #
458 # Note the two levels of iteration in each phase.
465 # Note the two levels of iteration in each phase.
459 # - The first one handles obsolescence markers using CURRENT as
466 # - The first one handles obsolescence markers using CURRENT as
460 # precursor (successors markers of CURRENT).
467 # precursor (successors markers of CURRENT).
461 #
468 #
462 # Having multiple entry here means divergence.
469 # Having multiple entry here means divergence.
463 #
470 #
464 # - The second one handles successors defined in each marker.
471 # - The second one handles successors defined in each marker.
465 #
472 #
466 # Having none means pruned node, multiple successors means split,
473 # Having none means pruned node, multiple successors means split,
467 # single successors are standard replacement.
474 # single successors are standard replacement.
468 #
475 #
469 for mark in sorted(succmarkers[current]):
476 for mark in sorted(succmarkers[current]):
470 for suc in mark[1]:
477 for suc in mark[1]:
471 if suc not in cache:
478 if suc not in cache:
472 if suc in stackedset:
479 if suc in stackedset:
473 # cycle breaking
480 # cycle breaking
474 cache[suc] = []
481 cache[suc] = []
475 else:
482 else:
476 # case (3) If we have not computed successors sets
483 # case (3) If we have not computed successors sets
477 # of one of those successors we add it to the
484 # of one of those successors we add it to the
478 # `toproceed` stack and stop all work for this
485 # `toproceed` stack and stop all work for this
479 # iteration.
486 # iteration.
480 toproceed.append(suc)
487 toproceed.append(suc)
481 stackedset.add(suc)
488 stackedset.add(suc)
482 break
489 break
483 else:
490 else:
484 continue
491 continue
485 break
492 break
486 else:
493 else:
487 # case (4): we know all successors sets of all direct
494 # case (4): we know all successors sets of all direct
488 # successors
495 # successors
489 #
496 #
490 # Successors set contributed by each marker depends on the
497 # Successors set contributed by each marker depends on the
491 # successors sets of all its "successors" node.
498 # successors sets of all its "successors" node.
492 #
499 #
493 # Each different marker is a divergence in the obsolescence
500 # Each different marker is a divergence in the obsolescence
494 # history. It contributes successors sets distinct from other
501 # history. It contributes successors sets distinct from other
495 # markers.
502 # markers.
496 #
503 #
497 # Within a marker, a successor may have divergent successors
504 # Within a marker, a successor may have divergent successors
498 # sets. In such a case, the marker will contribute multiple
505 # sets. In such a case, the marker will contribute multiple
499 # divergent successors sets. If multiple successors have
506 # divergent successors sets. If multiple successors have
500 # divergent successors sets, a Cartesian product is used.
507 # divergent successors sets, a Cartesian product is used.
501 #
508 #
502 # At the end we post-process successors sets to remove
509 # At the end we post-process successors sets to remove
503 # duplicated entry and successors set that are strict subset of
510 # duplicated entry and successors set that are strict subset of
504 # another one.
511 # another one.
505 succssets = []
512 succssets = []
506 for mark in sorted(succmarkers[current]):
513 for mark in sorted(succmarkers[current]):
507 # successors sets contributed by this marker
514 # successors sets contributed by this marker
508 markss = [[]]
515 markss = [[]]
509 for suc in mark[1]:
516 for suc in mark[1]:
510 # cardinal product with previous successors
517 # cardinal product with previous successors
511 productresult = []
518 productresult = []
512 for prefix in markss:
519 for prefix in markss:
513 for suffix in cache[suc]:
520 for suffix in cache[suc]:
514 newss = list(prefix)
521 newss = list(prefix)
515 for part in suffix:
522 for part in suffix:
516 # do not duplicated entry in successors set
523 # do not duplicated entry in successors set
517 # first entry wins.
524 # first entry wins.
518 if part not in newss:
525 if part not in newss:
519 newss.append(part)
526 newss.append(part)
520 productresult.append(newss)
527 productresult.append(newss)
521 markss = productresult
528 markss = productresult
522 succssets.extend(markss)
529 succssets.extend(markss)
523 # remove duplicated and subset
530 # remove duplicated and subset
524 seen = []
531 seen = []
525 final = []
532 final = []
526 candidate = sorted(((set(s), s) for s in succssets if s),
533 candidate = sorted(((set(s), s) for s in succssets if s),
527 key=lambda x: len(x[1]), reverse=True)
534 key=lambda x: len(x[1]), reverse=True)
528 for setversion, listversion in candidate:
535 for setversion, listversion in candidate:
529 for seenset in seen:
536 for seenset in seen:
530 if setversion.issubset(seenset):
537 if setversion.issubset(seenset):
531 break
538 break
532 else:
539 else:
533 final.append(listversion)
540 final.append(listversion)
534 seen.append(setversion)
541 seen.append(setversion)
535 final.reverse() # put small successors set first
542 final.reverse() # put small successors set first
536 cache[current] = final
543 cache[current] = final
537 return cache[initialnode]
544 return cache[initialnode]
General Comments 0
You need to be logged in to leave comments. Login now