##// END OF EJS Templates
templater: add simple interface for unnamed template (API)...
Yuya Nishihara -
r32873:2ecce24d default
parent child Browse files
Show More
@@ -1,3591 +1,3591 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 graphmod,
32 graphmod,
33 lock as lockmod,
33 lock as lockmod,
34 match as matchmod,
34 match as matchmod,
35 obsolete,
35 obsolete,
36 patch,
36 patch,
37 pathutil,
37 pathutil,
38 phases,
38 phases,
39 pycompat,
39 pycompat,
40 registrar,
40 registrar,
41 repair,
41 repair,
42 revlog,
42 revlog,
43 revset,
43 revset,
44 scmutil,
44 scmutil,
45 smartset,
45 smartset,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51 stringio = util.stringio
51 stringio = util.stringio
52
52
53 # templates of common command options
53 # templates of common command options
54
54
55 dryrunopts = [
55 dryrunopts = [
56 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
57 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
58 ]
58 ]
59
59
60 remoteopts = [
60 remoteopts = [
61 ('e', 'ssh', '',
61 ('e', 'ssh', '',
62 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
63 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
64 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
65 ('', 'insecure', None,
65 ('', 'insecure', None,
66 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 ]
67 ]
68
68
69 walkopts = [
69 walkopts = [
70 ('I', 'include', [],
70 ('I', 'include', [],
71 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
72 ('X', 'exclude', [],
72 ('X', 'exclude', [],
73 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
74 ]
74 ]
75
75
76 commitopts = [
76 commitopts = [
77 ('m', 'message', '',
77 ('m', 'message', '',
78 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
79 ('l', 'logfile', '',
79 ('l', 'logfile', '',
80 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
81 ]
81 ]
82
82
83 commitopts2 = [
83 commitopts2 = [
84 ('d', 'date', '',
84 ('d', 'date', '',
85 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
86 ('u', 'user', '',
86 ('u', 'user', '',
87 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
88 ]
88 ]
89
89
90 # hidden for now
90 # hidden for now
91 formatteropts = [
91 formatteropts = [
92 ('T', 'template', '',
92 ('T', 'template', '',
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 ]
94 ]
95
95
96 templateopts = [
96 templateopts = [
97 ('', 'style', '',
97 ('', 'style', '',
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 ('T', 'template', '',
99 ('T', 'template', '',
100 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
101 ]
101 ]
102
102
103 logopts = [
103 logopts = [
104 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('l', 'limit', '',
106 ('l', 'limit', '',
107 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
108 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
111 ] + templateopts
111 ] + templateopts
112
112
113 diffopts = [
113 diffopts = [
114 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
118 ]
118 ]
119
119
120 diffwsopts = [
120 diffwsopts = [
121 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
122 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
123 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
124 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
125 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
126 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
127 ]
127 ]
128
128
129 diffopts2 = [
129 diffopts2 = [
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
131 ('p', 'show-function', None, _('show which function each change is in')),
131 ('p', 'show-function', None, _('show which function each change is in')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ('', 'reverse', None, _('produce a diff that undoes the changes')),
133 ] + diffwsopts + [
133 ] + diffwsopts + [
134 ('U', 'unified', '',
134 ('U', 'unified', '',
135 _('number of lines of context to show'), _('NUM')),
135 _('number of lines of context to show'), _('NUM')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'stat', None, _('output diffstat-style summary of changes')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
138 ]
138 ]
139
139
140 mergetoolopts = [
140 mergetoolopts = [
141 ('t', 'tool', '', _('specify merge tool')),
141 ('t', 'tool', '', _('specify merge tool')),
142 ]
142 ]
143
143
144 similarityopts = [
144 similarityopts = [
145 ('s', 'similarity', '',
145 ('s', 'similarity', '',
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
147 ]
147 ]
148
148
149 subrepoopts = [
149 subrepoopts = [
150 ('S', 'subrepos', None,
150 ('S', 'subrepos', None,
151 _('recurse into subrepositories'))
151 _('recurse into subrepositories'))
152 ]
152 ]
153
153
154 debugrevlogopts = [
154 debugrevlogopts = [
155 ('c', 'changelog', False, _('open changelog')),
155 ('c', 'changelog', False, _('open changelog')),
156 ('m', 'manifest', False, _('open manifest')),
156 ('m', 'manifest', False, _('open manifest')),
157 ('', 'dir', '', _('open directory manifest')),
157 ('', 'dir', '', _('open directory manifest')),
158 ]
158 ]
159
159
160 # special string such that everything below this line will be ingored in the
160 # special string such that everything below this line will be ingored in the
161 # editor text
161 # editor text
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162 _linebelow = "^HG: ------------------------ >8 ------------------------$"
163
163
164 def ishunk(x):
164 def ishunk(x):
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
166 return isinstance(x, hunkclasses)
166 return isinstance(x, hunkclasses)
167
167
168 def newandmodified(chunks, originalchunks):
168 def newandmodified(chunks, originalchunks):
169 newlyaddedandmodifiedfiles = set()
169 newlyaddedandmodifiedfiles = set()
170 for chunk in chunks:
170 for chunk in chunks:
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
172 originalchunks:
172 originalchunks:
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 newlyaddedandmodifiedfiles.add(chunk.header.filename())
174 return newlyaddedandmodifiedfiles
174 return newlyaddedandmodifiedfiles
175
175
176 def parsealiases(cmd):
176 def parsealiases(cmd):
177 return cmd.lstrip("^").split("|")
177 return cmd.lstrip("^").split("|")
178
178
179 def setupwrapcolorwrite(ui):
179 def setupwrapcolorwrite(ui):
180 # wrap ui.write so diff output can be labeled/colorized
180 # wrap ui.write so diff output can be labeled/colorized
181 def wrapwrite(orig, *args, **kw):
181 def wrapwrite(orig, *args, **kw):
182 label = kw.pop('label', '')
182 label = kw.pop('label', '')
183 for chunk, l in patch.difflabel(lambda: args):
183 for chunk, l in patch.difflabel(lambda: args):
184 orig(chunk, label=label + l)
184 orig(chunk, label=label + l)
185
185
186 oldwrite = ui.write
186 oldwrite = ui.write
187 def wrap(*args, **kwargs):
187 def wrap(*args, **kwargs):
188 return wrapwrite(oldwrite, *args, **kwargs)
188 return wrapwrite(oldwrite, *args, **kwargs)
189 setattr(ui, 'write', wrap)
189 setattr(ui, 'write', wrap)
190 return oldwrite
190 return oldwrite
191
191
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
193 if usecurses:
193 if usecurses:
194 if testfile:
194 if testfile:
195 recordfn = crecordmod.testdecorator(testfile,
195 recordfn = crecordmod.testdecorator(testfile,
196 crecordmod.testchunkselector)
196 crecordmod.testchunkselector)
197 else:
197 else:
198 recordfn = crecordmod.chunkselector
198 recordfn = crecordmod.chunkselector
199
199
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
201
201
202 else:
202 else:
203 return patch.filterpatch(ui, originalhunks, operation)
203 return patch.filterpatch(ui, originalhunks, operation)
204
204
205 def recordfilter(ui, originalhunks, operation=None):
205 def recordfilter(ui, originalhunks, operation=None):
206 """ Prompts the user to filter the originalhunks and return a list of
206 """ Prompts the user to filter the originalhunks and return a list of
207 selected hunks.
207 selected hunks.
208 *operation* is used for to build ui messages to indicate the user what
208 *operation* is used for to build ui messages to indicate the user what
209 kind of filtering they are doing: reverting, committing, shelving, etc.
209 kind of filtering they are doing: reverting, committing, shelving, etc.
210 (see patch.filterpatch).
210 (see patch.filterpatch).
211 """
211 """
212 usecurses = crecordmod.checkcurses(ui)
212 usecurses = crecordmod.checkcurses(ui)
213 testfile = ui.config('experimental', 'crecordtest', None)
213 testfile = ui.config('experimental', 'crecordtest', None)
214 oldwrite = setupwrapcolorwrite(ui)
214 oldwrite = setupwrapcolorwrite(ui)
215 try:
215 try:
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
217 testfile, operation)
217 testfile, operation)
218 finally:
218 finally:
219 ui.write = oldwrite
219 ui.write = oldwrite
220 return newchunks, newopts
220 return newchunks, newopts
221
221
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
223 filterfn, *pats, **opts):
223 filterfn, *pats, **opts):
224 from . import merge as mergemod
224 from . import merge as mergemod
225 opts = pycompat.byteskwargs(opts)
225 opts = pycompat.byteskwargs(opts)
226 if not ui.interactive():
226 if not ui.interactive():
227 if cmdsuggest:
227 if cmdsuggest:
228 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 msg = _('running non-interactively, use %s instead') % cmdsuggest
229 else:
229 else:
230 msg = _('running non-interactively')
230 msg = _('running non-interactively')
231 raise error.Abort(msg)
231 raise error.Abort(msg)
232
232
233 # make sure username is set before going interactive
233 # make sure username is set before going interactive
234 if not opts.get('user'):
234 if not opts.get('user'):
235 ui.username() # raise exception, username not provided
235 ui.username() # raise exception, username not provided
236
236
237 def recordfunc(ui, repo, message, match, opts):
237 def recordfunc(ui, repo, message, match, opts):
238 """This is generic record driver.
238 """This is generic record driver.
239
239
240 Its job is to interactively filter local changes, and
240 Its job is to interactively filter local changes, and
241 accordingly prepare working directory into a state in which the
241 accordingly prepare working directory into a state in which the
242 job can be delegated to a non-interactive commit command such as
242 job can be delegated to a non-interactive commit command such as
243 'commit' or 'qrefresh'.
243 'commit' or 'qrefresh'.
244
244
245 After the actual job is done by non-interactive command, the
245 After the actual job is done by non-interactive command, the
246 working directory is restored to its original state.
246 working directory is restored to its original state.
247
247
248 In the end we'll record interesting changes, and everything else
248 In the end we'll record interesting changes, and everything else
249 will be left in place, so the user can continue working.
249 will be left in place, so the user can continue working.
250 """
250 """
251
251
252 checkunfinished(repo, commit=True)
252 checkunfinished(repo, commit=True)
253 wctx = repo[None]
253 wctx = repo[None]
254 merge = len(wctx.parents()) > 1
254 merge = len(wctx.parents()) > 1
255 if merge:
255 if merge:
256 raise error.Abort(_('cannot partially commit a merge '
256 raise error.Abort(_('cannot partially commit a merge '
257 '(use "hg commit" instead)'))
257 '(use "hg commit" instead)'))
258
258
259 def fail(f, msg):
259 def fail(f, msg):
260 raise error.Abort('%s: %s' % (f, msg))
260 raise error.Abort('%s: %s' % (f, msg))
261
261
262 force = opts.get('force')
262 force = opts.get('force')
263 if not force:
263 if not force:
264 vdirs = []
264 vdirs = []
265 match.explicitdir = vdirs.append
265 match.explicitdir = vdirs.append
266 match.bad = fail
266 match.bad = fail
267
267
268 status = repo.status(match=match)
268 status = repo.status(match=match)
269 if not force:
269 if not force:
270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
272 diffopts.nodates = True
272 diffopts.nodates = True
273 diffopts.git = True
273 diffopts.git = True
274 diffopts.showfunc = True
274 diffopts.showfunc = True
275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
276 originalchunks = patch.parsepatch(originaldiff)
276 originalchunks = patch.parsepatch(originaldiff)
277
277
278 # 1. filter patch, since we are intending to apply subset of it
278 # 1. filter patch, since we are intending to apply subset of it
279 try:
279 try:
280 chunks, newopts = filterfn(ui, originalchunks)
280 chunks, newopts = filterfn(ui, originalchunks)
281 except patch.PatchError as err:
281 except patch.PatchError as err:
282 raise error.Abort(_('error parsing patch: %s') % err)
282 raise error.Abort(_('error parsing patch: %s') % err)
283 opts.update(newopts)
283 opts.update(newopts)
284
284
285 # We need to keep a backup of files that have been newly added and
285 # We need to keep a backup of files that have been newly added and
286 # modified during the recording process because there is a previous
286 # modified during the recording process because there is a previous
287 # version without the edit in the workdir
287 # version without the edit in the workdir
288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
289 contenders = set()
289 contenders = set()
290 for h in chunks:
290 for h in chunks:
291 try:
291 try:
292 contenders.update(set(h.files()))
292 contenders.update(set(h.files()))
293 except AttributeError:
293 except AttributeError:
294 pass
294 pass
295
295
296 changed = status.modified + status.added + status.removed
296 changed = status.modified + status.added + status.removed
297 newfiles = [f for f in changed if f in contenders]
297 newfiles = [f for f in changed if f in contenders]
298 if not newfiles:
298 if not newfiles:
299 ui.status(_('no changes to record\n'))
299 ui.status(_('no changes to record\n'))
300 return 0
300 return 0
301
301
302 modified = set(status.modified)
302 modified = set(status.modified)
303
303
304 # 2. backup changed files, so we can restore them in the end
304 # 2. backup changed files, so we can restore them in the end
305
305
306 if backupall:
306 if backupall:
307 tobackup = changed
307 tobackup = changed
308 else:
308 else:
309 tobackup = [f for f in newfiles if f in modified or f in \
309 tobackup = [f for f in newfiles if f in modified or f in \
310 newlyaddedandmodifiedfiles]
310 newlyaddedandmodifiedfiles]
311 backups = {}
311 backups = {}
312 if tobackup:
312 if tobackup:
313 backupdir = repo.vfs.join('record-backups')
313 backupdir = repo.vfs.join('record-backups')
314 try:
314 try:
315 os.mkdir(backupdir)
315 os.mkdir(backupdir)
316 except OSError as err:
316 except OSError as err:
317 if err.errno != errno.EEXIST:
317 if err.errno != errno.EEXIST:
318 raise
318 raise
319 try:
319 try:
320 # backup continues
320 # backup continues
321 for f in tobackup:
321 for f in tobackup:
322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
323 dir=backupdir)
323 dir=backupdir)
324 os.close(fd)
324 os.close(fd)
325 ui.debug('backup %r as %r\n' % (f, tmpname))
325 ui.debug('backup %r as %r\n' % (f, tmpname))
326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
327 backups[f] = tmpname
327 backups[f] = tmpname
328
328
329 fp = stringio()
329 fp = stringio()
330 for c in chunks:
330 for c in chunks:
331 fname = c.filename()
331 fname = c.filename()
332 if fname in backups:
332 if fname in backups:
333 c.write(fp)
333 c.write(fp)
334 dopatch = fp.tell()
334 dopatch = fp.tell()
335 fp.seek(0)
335 fp.seek(0)
336
336
337 # 2.5 optionally review / modify patch in text editor
337 # 2.5 optionally review / modify patch in text editor
338 if opts.get('review', False):
338 if opts.get('review', False):
339 patchtext = (crecordmod.diffhelptext
339 patchtext = (crecordmod.diffhelptext
340 + crecordmod.patchhelptext
340 + crecordmod.patchhelptext
341 + fp.read())
341 + fp.read())
342 reviewedpatch = ui.edit(patchtext, "",
342 reviewedpatch = ui.edit(patchtext, "",
343 extra={"suffix": ".diff"},
343 extra={"suffix": ".diff"},
344 repopath=repo.path)
344 repopath=repo.path)
345 fp.truncate(0)
345 fp.truncate(0)
346 fp.write(reviewedpatch)
346 fp.write(reviewedpatch)
347 fp.seek(0)
347 fp.seek(0)
348
348
349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
350 # 3a. apply filtered patch to clean repo (clean)
350 # 3a. apply filtered patch to clean repo (clean)
351 if backups:
351 if backups:
352 # Equivalent to hg.revert
352 # Equivalent to hg.revert
353 m = scmutil.matchfiles(repo, backups.keys())
353 m = scmutil.matchfiles(repo, backups.keys())
354 mergemod.update(repo, repo.dirstate.p1(),
354 mergemod.update(repo, repo.dirstate.p1(),
355 False, True, matcher=m)
355 False, True, matcher=m)
356
356
357 # 3b. (apply)
357 # 3b. (apply)
358 if dopatch:
358 if dopatch:
359 try:
359 try:
360 ui.debug('applying patch\n')
360 ui.debug('applying patch\n')
361 ui.debug(fp.getvalue())
361 ui.debug(fp.getvalue())
362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
363 except patch.PatchError as err:
363 except patch.PatchError as err:
364 raise error.Abort(str(err))
364 raise error.Abort(str(err))
365 del fp
365 del fp
366
366
367 # 4. We prepared working directory according to filtered
367 # 4. We prepared working directory according to filtered
368 # patch. Now is the time to delegate the job to
368 # patch. Now is the time to delegate the job to
369 # commit/qrefresh or the like!
369 # commit/qrefresh or the like!
370
370
371 # Make all of the pathnames absolute.
371 # Make all of the pathnames absolute.
372 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 newfiles = [repo.wjoin(nf) for nf in newfiles]
373 return commitfunc(ui, repo, *newfiles, **opts)
373 return commitfunc(ui, repo, *newfiles, **opts)
374 finally:
374 finally:
375 # 5. finally restore backed-up files
375 # 5. finally restore backed-up files
376 try:
376 try:
377 dirstate = repo.dirstate
377 dirstate = repo.dirstate
378 for realname, tmpname in backups.iteritems():
378 for realname, tmpname in backups.iteritems():
379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379 ui.debug('restoring %r to %r\n' % (tmpname, realname))
380
380
381 if dirstate[realname] == 'n':
381 if dirstate[realname] == 'n':
382 # without normallookup, restoring timestamp
382 # without normallookup, restoring timestamp
383 # may cause partially committed files
383 # may cause partially committed files
384 # to be treated as unmodified
384 # to be treated as unmodified
385 dirstate.normallookup(realname)
385 dirstate.normallookup(realname)
386
386
387 # copystat=True here and above are a hack to trick any
387 # copystat=True here and above are a hack to trick any
388 # editors that have f open that we haven't modified them.
388 # editors that have f open that we haven't modified them.
389 #
389 #
390 # Also note that this racy as an editor could notice the
390 # Also note that this racy as an editor could notice the
391 # file's mtime before we've finished writing it.
391 # file's mtime before we've finished writing it.
392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
393 os.unlink(tmpname)
393 os.unlink(tmpname)
394 if tobackup:
394 if tobackup:
395 os.rmdir(backupdir)
395 os.rmdir(backupdir)
396 except OSError:
396 except OSError:
397 pass
397 pass
398
398
399 def recordinwlock(ui, repo, message, match, opts):
399 def recordinwlock(ui, repo, message, match, opts):
400 with repo.wlock():
400 with repo.wlock():
401 return recordfunc(ui, repo, message, match, opts)
401 return recordfunc(ui, repo, message, match, opts)
402
402
403 return commit(ui, repo, recordinwlock, pats, opts)
403 return commit(ui, repo, recordinwlock, pats, opts)
404
404
405 def findpossible(cmd, table, strict=False):
405 def findpossible(cmd, table, strict=False):
406 """
406 """
407 Return cmd -> (aliases, command table entry)
407 Return cmd -> (aliases, command table entry)
408 for each matching command.
408 for each matching command.
409 Return debug commands (or their aliases) only if no normal command matches.
409 Return debug commands (or their aliases) only if no normal command matches.
410 """
410 """
411 choice = {}
411 choice = {}
412 debugchoice = {}
412 debugchoice = {}
413
413
414 if cmd in table:
414 if cmd in table:
415 # short-circuit exact matches, "log" alias beats "^log|history"
415 # short-circuit exact matches, "log" alias beats "^log|history"
416 keys = [cmd]
416 keys = [cmd]
417 else:
417 else:
418 keys = table.keys()
418 keys = table.keys()
419
419
420 allcmds = []
420 allcmds = []
421 for e in keys:
421 for e in keys:
422 aliases = parsealiases(e)
422 aliases = parsealiases(e)
423 allcmds.extend(aliases)
423 allcmds.extend(aliases)
424 found = None
424 found = None
425 if cmd in aliases:
425 if cmd in aliases:
426 found = cmd
426 found = cmd
427 elif not strict:
427 elif not strict:
428 for a in aliases:
428 for a in aliases:
429 if a.startswith(cmd):
429 if a.startswith(cmd):
430 found = a
430 found = a
431 break
431 break
432 if found is not None:
432 if found is not None:
433 if aliases[0].startswith("debug") or found.startswith("debug"):
433 if aliases[0].startswith("debug") or found.startswith("debug"):
434 debugchoice[found] = (aliases, table[e])
434 debugchoice[found] = (aliases, table[e])
435 else:
435 else:
436 choice[found] = (aliases, table[e])
436 choice[found] = (aliases, table[e])
437
437
438 if not choice and debugchoice:
438 if not choice and debugchoice:
439 choice = debugchoice
439 choice = debugchoice
440
440
441 return choice, allcmds
441 return choice, allcmds
442
442
443 def findcmd(cmd, table, strict=True):
443 def findcmd(cmd, table, strict=True):
444 """Return (aliases, command table entry) for command string."""
444 """Return (aliases, command table entry) for command string."""
445 choice, allcmds = findpossible(cmd, table, strict)
445 choice, allcmds = findpossible(cmd, table, strict)
446
446
447 if cmd in choice:
447 if cmd in choice:
448 return choice[cmd]
448 return choice[cmd]
449
449
450 if len(choice) > 1:
450 if len(choice) > 1:
451 clist = sorted(choice)
451 clist = sorted(choice)
452 raise error.AmbiguousCommand(cmd, clist)
452 raise error.AmbiguousCommand(cmd, clist)
453
453
454 if choice:
454 if choice:
455 return list(choice.values())[0]
455 return list(choice.values())[0]
456
456
457 raise error.UnknownCommand(cmd, allcmds)
457 raise error.UnknownCommand(cmd, allcmds)
458
458
459 def findrepo(p):
459 def findrepo(p):
460 while not os.path.isdir(os.path.join(p, ".hg")):
460 while not os.path.isdir(os.path.join(p, ".hg")):
461 oldp, p = p, os.path.dirname(p)
461 oldp, p = p, os.path.dirname(p)
462 if p == oldp:
462 if p == oldp:
463 return None
463 return None
464
464
465 return p
465 return p
466
466
467 def bailifchanged(repo, merge=True, hint=None):
467 def bailifchanged(repo, merge=True, hint=None):
468 """ enforce the precondition that working directory must be clean.
468 """ enforce the precondition that working directory must be clean.
469
469
470 'merge' can be set to false if a pending uncommitted merge should be
470 'merge' can be set to false if a pending uncommitted merge should be
471 ignored (such as when 'update --check' runs).
471 ignored (such as when 'update --check' runs).
472
472
473 'hint' is the usual hint given to Abort exception.
473 'hint' is the usual hint given to Abort exception.
474 """
474 """
475
475
476 if merge and repo.dirstate.p2() != nullid:
476 if merge and repo.dirstate.p2() != nullid:
477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
477 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
478 modified, added, removed, deleted = repo.status()[:4]
478 modified, added, removed, deleted = repo.status()[:4]
479 if modified or added or removed or deleted:
479 if modified or added or removed or deleted:
480 raise error.Abort(_('uncommitted changes'), hint=hint)
480 raise error.Abort(_('uncommitted changes'), hint=hint)
481 ctx = repo[None]
481 ctx = repo[None]
482 for s in sorted(ctx.substate):
482 for s in sorted(ctx.substate):
483 ctx.sub(s).bailifchanged(hint=hint)
483 ctx.sub(s).bailifchanged(hint=hint)
484
484
485 def logmessage(ui, opts):
485 def logmessage(ui, opts):
486 """ get the log message according to -m and -l option """
486 """ get the log message according to -m and -l option """
487 message = opts.get('message')
487 message = opts.get('message')
488 logfile = opts.get('logfile')
488 logfile = opts.get('logfile')
489
489
490 if message and logfile:
490 if message and logfile:
491 raise error.Abort(_('options --message and --logfile are mutually '
491 raise error.Abort(_('options --message and --logfile are mutually '
492 'exclusive'))
492 'exclusive'))
493 if not message and logfile:
493 if not message and logfile:
494 try:
494 try:
495 if isstdiofilename(logfile):
495 if isstdiofilename(logfile):
496 message = ui.fin.read()
496 message = ui.fin.read()
497 else:
497 else:
498 message = '\n'.join(util.readfile(logfile).splitlines())
498 message = '\n'.join(util.readfile(logfile).splitlines())
499 except IOError as inst:
499 except IOError as inst:
500 raise error.Abort(_("can't read commit message '%s': %s") %
500 raise error.Abort(_("can't read commit message '%s': %s") %
501 (logfile, inst.strerror))
501 (logfile, inst.strerror))
502 return message
502 return message
503
503
504 def mergeeditform(ctxorbool, baseformname):
504 def mergeeditform(ctxorbool, baseformname):
505 """return appropriate editform name (referencing a committemplate)
505 """return appropriate editform name (referencing a committemplate)
506
506
507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
507 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
508 merging is committed.
508 merging is committed.
509
509
510 This returns baseformname with '.merge' appended if it is a merge,
510 This returns baseformname with '.merge' appended if it is a merge,
511 otherwise '.normal' is appended.
511 otherwise '.normal' is appended.
512 """
512 """
513 if isinstance(ctxorbool, bool):
513 if isinstance(ctxorbool, bool):
514 if ctxorbool:
514 if ctxorbool:
515 return baseformname + ".merge"
515 return baseformname + ".merge"
516 elif 1 < len(ctxorbool.parents()):
516 elif 1 < len(ctxorbool.parents()):
517 return baseformname + ".merge"
517 return baseformname + ".merge"
518
518
519 return baseformname + ".normal"
519 return baseformname + ".normal"
520
520
521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
521 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
522 editform='', **opts):
522 editform='', **opts):
523 """get appropriate commit message editor according to '--edit' option
523 """get appropriate commit message editor according to '--edit' option
524
524
525 'finishdesc' is a function to be called with edited commit message
525 'finishdesc' is a function to be called with edited commit message
526 (= 'description' of the new changeset) just after editing, but
526 (= 'description' of the new changeset) just after editing, but
527 before checking empty-ness. It should return actual text to be
527 before checking empty-ness. It should return actual text to be
528 stored into history. This allows to change description before
528 stored into history. This allows to change description before
529 storing.
529 storing.
530
530
531 'extramsg' is a extra message to be shown in the editor instead of
531 'extramsg' is a extra message to be shown in the editor instead of
532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
532 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
533 is automatically added.
533 is automatically added.
534
534
535 'editform' is a dot-separated list of names, to distinguish
535 'editform' is a dot-separated list of names, to distinguish
536 the purpose of commit text editing.
536 the purpose of commit text editing.
537
537
538 'getcommiteditor' returns 'commitforceeditor' regardless of
538 'getcommiteditor' returns 'commitforceeditor' regardless of
539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
539 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
540 they are specific for usage in MQ.
540 they are specific for usage in MQ.
541 """
541 """
542 if edit or finishdesc or extramsg:
542 if edit or finishdesc or extramsg:
543 return lambda r, c, s: commitforceeditor(r, c, s,
543 return lambda r, c, s: commitforceeditor(r, c, s,
544 finishdesc=finishdesc,
544 finishdesc=finishdesc,
545 extramsg=extramsg,
545 extramsg=extramsg,
546 editform=editform)
546 editform=editform)
547 elif editform:
547 elif editform:
548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
548 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
549 else:
549 else:
550 return commiteditor
550 return commiteditor
551
551
552 def loglimit(opts):
552 def loglimit(opts):
553 """get the log limit according to option -l/--limit"""
553 """get the log limit according to option -l/--limit"""
554 limit = opts.get('limit')
554 limit = opts.get('limit')
555 if limit:
555 if limit:
556 try:
556 try:
557 limit = int(limit)
557 limit = int(limit)
558 except ValueError:
558 except ValueError:
559 raise error.Abort(_('limit must be a positive integer'))
559 raise error.Abort(_('limit must be a positive integer'))
560 if limit <= 0:
560 if limit <= 0:
561 raise error.Abort(_('limit must be positive'))
561 raise error.Abort(_('limit must be positive'))
562 else:
562 else:
563 limit = None
563 limit = None
564 return limit
564 return limit
565
565
566 def makefilename(repo, pat, node, desc=None,
566 def makefilename(repo, pat, node, desc=None,
567 total=None, seqno=None, revwidth=None, pathname=None):
567 total=None, seqno=None, revwidth=None, pathname=None):
568 node_expander = {
568 node_expander = {
569 'H': lambda: hex(node),
569 'H': lambda: hex(node),
570 'R': lambda: str(repo.changelog.rev(node)),
570 'R': lambda: str(repo.changelog.rev(node)),
571 'h': lambda: short(node),
571 'h': lambda: short(node),
572 'm': lambda: re.sub('[^\w]', '_', str(desc))
572 'm': lambda: re.sub('[^\w]', '_', str(desc))
573 }
573 }
574 expander = {
574 expander = {
575 '%': lambda: '%',
575 '%': lambda: '%',
576 'b': lambda: os.path.basename(repo.root),
576 'b': lambda: os.path.basename(repo.root),
577 }
577 }
578
578
579 try:
579 try:
580 if node:
580 if node:
581 expander.update(node_expander)
581 expander.update(node_expander)
582 if node:
582 if node:
583 expander['r'] = (lambda:
583 expander['r'] = (lambda:
584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
584 str(repo.changelog.rev(node)).zfill(revwidth or 0))
585 if total is not None:
585 if total is not None:
586 expander['N'] = lambda: str(total)
586 expander['N'] = lambda: str(total)
587 if seqno is not None:
587 if seqno is not None:
588 expander['n'] = lambda: str(seqno)
588 expander['n'] = lambda: str(seqno)
589 if total is not None and seqno is not None:
589 if total is not None and seqno is not None:
590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
590 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
591 if pathname is not None:
591 if pathname is not None:
592 expander['s'] = lambda: os.path.basename(pathname)
592 expander['s'] = lambda: os.path.basename(pathname)
593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
593 expander['d'] = lambda: os.path.dirname(pathname) or '.'
594 expander['p'] = lambda: pathname
594 expander['p'] = lambda: pathname
595
595
596 newname = []
596 newname = []
597 patlen = len(pat)
597 patlen = len(pat)
598 i = 0
598 i = 0
599 while i < patlen:
599 while i < patlen:
600 c = pat[i:i + 1]
600 c = pat[i:i + 1]
601 if c == '%':
601 if c == '%':
602 i += 1
602 i += 1
603 c = pat[i:i + 1]
603 c = pat[i:i + 1]
604 c = expander[c]()
604 c = expander[c]()
605 newname.append(c)
605 newname.append(c)
606 i += 1
606 i += 1
607 return ''.join(newname)
607 return ''.join(newname)
608 except KeyError as inst:
608 except KeyError as inst:
609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
609 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
610 inst.args[0])
610 inst.args[0])
611
611
612 def isstdiofilename(pat):
612 def isstdiofilename(pat):
613 """True if the given pat looks like a filename denoting stdin/stdout"""
613 """True if the given pat looks like a filename denoting stdin/stdout"""
614 return not pat or pat == '-'
614 return not pat or pat == '-'
615
615
616 class _unclosablefile(object):
616 class _unclosablefile(object):
617 def __init__(self, fp):
617 def __init__(self, fp):
618 self._fp = fp
618 self._fp = fp
619
619
620 def close(self):
620 def close(self):
621 pass
621 pass
622
622
623 def __iter__(self):
623 def __iter__(self):
624 return iter(self._fp)
624 return iter(self._fp)
625
625
626 def __getattr__(self, attr):
626 def __getattr__(self, attr):
627 return getattr(self._fp, attr)
627 return getattr(self._fp, attr)
628
628
629 def __enter__(self):
629 def __enter__(self):
630 return self
630 return self
631
631
632 def __exit__(self, exc_type, exc_value, exc_tb):
632 def __exit__(self, exc_type, exc_value, exc_tb):
633 pass
633 pass
634
634
635 def makefileobj(repo, pat, node=None, desc=None, total=None,
635 def makefileobj(repo, pat, node=None, desc=None, total=None,
636 seqno=None, revwidth=None, mode='wb', modemap=None,
636 seqno=None, revwidth=None, mode='wb', modemap=None,
637 pathname=None):
637 pathname=None):
638
638
639 writable = mode not in ('r', 'rb')
639 writable = mode not in ('r', 'rb')
640
640
641 if isstdiofilename(pat):
641 if isstdiofilename(pat):
642 if writable:
642 if writable:
643 fp = repo.ui.fout
643 fp = repo.ui.fout
644 else:
644 else:
645 fp = repo.ui.fin
645 fp = repo.ui.fin
646 return _unclosablefile(fp)
646 return _unclosablefile(fp)
647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
647 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
648 if modemap is not None:
648 if modemap is not None:
649 mode = modemap.get(fn, mode)
649 mode = modemap.get(fn, mode)
650 if mode == 'wb':
650 if mode == 'wb':
651 modemap[fn] = 'ab'
651 modemap[fn] = 'ab'
652 return open(fn, mode)
652 return open(fn, mode)
653
653
654 def openrevlog(repo, cmd, file_, opts):
654 def openrevlog(repo, cmd, file_, opts):
655 """opens the changelog, manifest, a filelog or a given revlog"""
655 """opens the changelog, manifest, a filelog or a given revlog"""
656 cl = opts['changelog']
656 cl = opts['changelog']
657 mf = opts['manifest']
657 mf = opts['manifest']
658 dir = opts['dir']
658 dir = opts['dir']
659 msg = None
659 msg = None
660 if cl and mf:
660 if cl and mf:
661 msg = _('cannot specify --changelog and --manifest at the same time')
661 msg = _('cannot specify --changelog and --manifest at the same time')
662 elif cl and dir:
662 elif cl and dir:
663 msg = _('cannot specify --changelog and --dir at the same time')
663 msg = _('cannot specify --changelog and --dir at the same time')
664 elif cl or mf or dir:
664 elif cl or mf or dir:
665 if file_:
665 if file_:
666 msg = _('cannot specify filename with --changelog or --manifest')
666 msg = _('cannot specify filename with --changelog or --manifest')
667 elif not repo:
667 elif not repo:
668 msg = _('cannot specify --changelog or --manifest or --dir '
668 msg = _('cannot specify --changelog or --manifest or --dir '
669 'without a repository')
669 'without a repository')
670 if msg:
670 if msg:
671 raise error.Abort(msg)
671 raise error.Abort(msg)
672
672
673 r = None
673 r = None
674 if repo:
674 if repo:
675 if cl:
675 if cl:
676 r = repo.unfiltered().changelog
676 r = repo.unfiltered().changelog
677 elif dir:
677 elif dir:
678 if 'treemanifest' not in repo.requirements:
678 if 'treemanifest' not in repo.requirements:
679 raise error.Abort(_("--dir can only be used on repos with "
679 raise error.Abort(_("--dir can only be used on repos with "
680 "treemanifest enabled"))
680 "treemanifest enabled"))
681 dirlog = repo.manifestlog._revlog.dirlog(dir)
681 dirlog = repo.manifestlog._revlog.dirlog(dir)
682 if len(dirlog):
682 if len(dirlog):
683 r = dirlog
683 r = dirlog
684 elif mf:
684 elif mf:
685 r = repo.manifestlog._revlog
685 r = repo.manifestlog._revlog
686 elif file_:
686 elif file_:
687 filelog = repo.file(file_)
687 filelog = repo.file(file_)
688 if len(filelog):
688 if len(filelog):
689 r = filelog
689 r = filelog
690 if not r:
690 if not r:
691 if not file_:
691 if not file_:
692 raise error.CommandError(cmd, _('invalid arguments'))
692 raise error.CommandError(cmd, _('invalid arguments'))
693 if not os.path.isfile(file_):
693 if not os.path.isfile(file_):
694 raise error.Abort(_("revlog '%s' not found") % file_)
694 raise error.Abort(_("revlog '%s' not found") % file_)
695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
695 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
696 file_[:-2] + ".i")
696 file_[:-2] + ".i")
697 return r
697 return r
698
698
699 def copy(ui, repo, pats, opts, rename=False):
699 def copy(ui, repo, pats, opts, rename=False):
700 # called with the repo lock held
700 # called with the repo lock held
701 #
701 #
702 # hgsep => pathname that uses "/" to separate directories
702 # hgsep => pathname that uses "/" to separate directories
703 # ossep => pathname that uses os.sep to separate directories
703 # ossep => pathname that uses os.sep to separate directories
704 cwd = repo.getcwd()
704 cwd = repo.getcwd()
705 targets = {}
705 targets = {}
706 after = opts.get("after")
706 after = opts.get("after")
707 dryrun = opts.get("dry_run")
707 dryrun = opts.get("dry_run")
708 wctx = repo[None]
708 wctx = repo[None]
709
709
710 def walkpat(pat):
710 def walkpat(pat):
711 srcs = []
711 srcs = []
712 if after:
712 if after:
713 badstates = '?'
713 badstates = '?'
714 else:
714 else:
715 badstates = '?r'
715 badstates = '?r'
716 m = scmutil.match(wctx, [pat], opts, globbed=True)
716 m = scmutil.match(wctx, [pat], opts, globbed=True)
717 for abs in wctx.walk(m):
717 for abs in wctx.walk(m):
718 state = repo.dirstate[abs]
718 state = repo.dirstate[abs]
719 rel = m.rel(abs)
719 rel = m.rel(abs)
720 exact = m.exact(abs)
720 exact = m.exact(abs)
721 if state in badstates:
721 if state in badstates:
722 if exact and state == '?':
722 if exact and state == '?':
723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
723 ui.warn(_('%s: not copying - file is not managed\n') % rel)
724 if exact and state == 'r':
724 if exact and state == 'r':
725 ui.warn(_('%s: not copying - file has been marked for'
725 ui.warn(_('%s: not copying - file has been marked for'
726 ' remove\n') % rel)
726 ' remove\n') % rel)
727 continue
727 continue
728 # abs: hgsep
728 # abs: hgsep
729 # rel: ossep
729 # rel: ossep
730 srcs.append((abs, rel, exact))
730 srcs.append((abs, rel, exact))
731 return srcs
731 return srcs
732
732
733 # abssrc: hgsep
733 # abssrc: hgsep
734 # relsrc: ossep
734 # relsrc: ossep
735 # otarget: ossep
735 # otarget: ossep
736 def copyfile(abssrc, relsrc, otarget, exact):
736 def copyfile(abssrc, relsrc, otarget, exact):
737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
737 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
738 if '/' in abstarget:
738 if '/' in abstarget:
739 # We cannot normalize abstarget itself, this would prevent
739 # We cannot normalize abstarget itself, this would prevent
740 # case only renames, like a => A.
740 # case only renames, like a => A.
741 abspath, absname = abstarget.rsplit('/', 1)
741 abspath, absname = abstarget.rsplit('/', 1)
742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
742 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
743 reltarget = repo.pathto(abstarget, cwd)
743 reltarget = repo.pathto(abstarget, cwd)
744 target = repo.wjoin(abstarget)
744 target = repo.wjoin(abstarget)
745 src = repo.wjoin(abssrc)
745 src = repo.wjoin(abssrc)
746 state = repo.dirstate[abstarget]
746 state = repo.dirstate[abstarget]
747
747
748 scmutil.checkportable(ui, abstarget)
748 scmutil.checkportable(ui, abstarget)
749
749
750 # check for collisions
750 # check for collisions
751 prevsrc = targets.get(abstarget)
751 prevsrc = targets.get(abstarget)
752 if prevsrc is not None:
752 if prevsrc is not None:
753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
753 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
754 (reltarget, repo.pathto(abssrc, cwd),
754 (reltarget, repo.pathto(abssrc, cwd),
755 repo.pathto(prevsrc, cwd)))
755 repo.pathto(prevsrc, cwd)))
756 return
756 return
757
757
758 # check for overwrites
758 # check for overwrites
759 exists = os.path.lexists(target)
759 exists = os.path.lexists(target)
760 samefile = False
760 samefile = False
761 if exists and abssrc != abstarget:
761 if exists and abssrc != abstarget:
762 if (repo.dirstate.normalize(abssrc) ==
762 if (repo.dirstate.normalize(abssrc) ==
763 repo.dirstate.normalize(abstarget)):
763 repo.dirstate.normalize(abstarget)):
764 if not rename:
764 if not rename:
765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
765 ui.warn(_("%s: can't copy - same file\n") % reltarget)
766 return
766 return
767 exists = False
767 exists = False
768 samefile = True
768 samefile = True
769
769
770 if not after and exists or after and state in 'mn':
770 if not after and exists or after and state in 'mn':
771 if not opts['force']:
771 if not opts['force']:
772 if state in 'mn':
772 if state in 'mn':
773 msg = _('%s: not overwriting - file already committed\n')
773 msg = _('%s: not overwriting - file already committed\n')
774 if after:
774 if after:
775 flags = '--after --force'
775 flags = '--after --force'
776 else:
776 else:
777 flags = '--force'
777 flags = '--force'
778 if rename:
778 if rename:
779 hint = _('(hg rename %s to replace the file by '
779 hint = _('(hg rename %s to replace the file by '
780 'recording a rename)\n') % flags
780 'recording a rename)\n') % flags
781 else:
781 else:
782 hint = _('(hg copy %s to replace the file by '
782 hint = _('(hg copy %s to replace the file by '
783 'recording a copy)\n') % flags
783 'recording a copy)\n') % flags
784 else:
784 else:
785 msg = _('%s: not overwriting - file exists\n')
785 msg = _('%s: not overwriting - file exists\n')
786 if rename:
786 if rename:
787 hint = _('(hg rename --after to record the rename)\n')
787 hint = _('(hg rename --after to record the rename)\n')
788 else:
788 else:
789 hint = _('(hg copy --after to record the copy)\n')
789 hint = _('(hg copy --after to record the copy)\n')
790 ui.warn(msg % reltarget)
790 ui.warn(msg % reltarget)
791 ui.warn(hint)
791 ui.warn(hint)
792 return
792 return
793
793
794 if after:
794 if after:
795 if not exists:
795 if not exists:
796 if rename:
796 if rename:
797 ui.warn(_('%s: not recording move - %s does not exist\n') %
797 ui.warn(_('%s: not recording move - %s does not exist\n') %
798 (relsrc, reltarget))
798 (relsrc, reltarget))
799 else:
799 else:
800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
800 ui.warn(_('%s: not recording copy - %s does not exist\n') %
801 (relsrc, reltarget))
801 (relsrc, reltarget))
802 return
802 return
803 elif not dryrun:
803 elif not dryrun:
804 try:
804 try:
805 if exists:
805 if exists:
806 os.unlink(target)
806 os.unlink(target)
807 targetdir = os.path.dirname(target) or '.'
807 targetdir = os.path.dirname(target) or '.'
808 if not os.path.isdir(targetdir):
808 if not os.path.isdir(targetdir):
809 os.makedirs(targetdir)
809 os.makedirs(targetdir)
810 if samefile:
810 if samefile:
811 tmp = target + "~hgrename"
811 tmp = target + "~hgrename"
812 os.rename(src, tmp)
812 os.rename(src, tmp)
813 os.rename(tmp, target)
813 os.rename(tmp, target)
814 else:
814 else:
815 util.copyfile(src, target)
815 util.copyfile(src, target)
816 srcexists = True
816 srcexists = True
817 except IOError as inst:
817 except IOError as inst:
818 if inst.errno == errno.ENOENT:
818 if inst.errno == errno.ENOENT:
819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
819 ui.warn(_('%s: deleted in working directory\n') % relsrc)
820 srcexists = False
820 srcexists = False
821 else:
821 else:
822 ui.warn(_('%s: cannot copy - %s\n') %
822 ui.warn(_('%s: cannot copy - %s\n') %
823 (relsrc, inst.strerror))
823 (relsrc, inst.strerror))
824 return True # report a failure
824 return True # report a failure
825
825
826 if ui.verbose or not exact:
826 if ui.verbose or not exact:
827 if rename:
827 if rename:
828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
828 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
829 else:
829 else:
830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
830 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
831
831
832 targets[abstarget] = abssrc
832 targets[abstarget] = abssrc
833
833
834 # fix up dirstate
834 # fix up dirstate
835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
835 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
836 dryrun=dryrun, cwd=cwd)
836 dryrun=dryrun, cwd=cwd)
837 if rename and not dryrun:
837 if rename and not dryrun:
838 if not after and srcexists and not samefile:
838 if not after and srcexists and not samefile:
839 repo.wvfs.unlinkpath(abssrc)
839 repo.wvfs.unlinkpath(abssrc)
840 wctx.forget([abssrc])
840 wctx.forget([abssrc])
841
841
842 # pat: ossep
842 # pat: ossep
843 # dest ossep
843 # dest ossep
844 # srcs: list of (hgsep, hgsep, ossep, bool)
844 # srcs: list of (hgsep, hgsep, ossep, bool)
845 # return: function that takes hgsep and returns ossep
845 # return: function that takes hgsep and returns ossep
846 def targetpathfn(pat, dest, srcs):
846 def targetpathfn(pat, dest, srcs):
847 if os.path.isdir(pat):
847 if os.path.isdir(pat):
848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
848 abspfx = pathutil.canonpath(repo.root, cwd, pat)
849 abspfx = util.localpath(abspfx)
849 abspfx = util.localpath(abspfx)
850 if destdirexists:
850 if destdirexists:
851 striplen = len(os.path.split(abspfx)[0])
851 striplen = len(os.path.split(abspfx)[0])
852 else:
852 else:
853 striplen = len(abspfx)
853 striplen = len(abspfx)
854 if striplen:
854 if striplen:
855 striplen += len(pycompat.ossep)
855 striplen += len(pycompat.ossep)
856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
856 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
857 elif destdirexists:
857 elif destdirexists:
858 res = lambda p: os.path.join(dest,
858 res = lambda p: os.path.join(dest,
859 os.path.basename(util.localpath(p)))
859 os.path.basename(util.localpath(p)))
860 else:
860 else:
861 res = lambda p: dest
861 res = lambda p: dest
862 return res
862 return res
863
863
864 # pat: ossep
864 # pat: ossep
865 # dest ossep
865 # dest ossep
866 # srcs: list of (hgsep, hgsep, ossep, bool)
866 # srcs: list of (hgsep, hgsep, ossep, bool)
867 # return: function that takes hgsep and returns ossep
867 # return: function that takes hgsep and returns ossep
868 def targetpathafterfn(pat, dest, srcs):
868 def targetpathafterfn(pat, dest, srcs):
869 if matchmod.patkind(pat):
869 if matchmod.patkind(pat):
870 # a mercurial pattern
870 # a mercurial pattern
871 res = lambda p: os.path.join(dest,
871 res = lambda p: os.path.join(dest,
872 os.path.basename(util.localpath(p)))
872 os.path.basename(util.localpath(p)))
873 else:
873 else:
874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
874 abspfx = pathutil.canonpath(repo.root, cwd, pat)
875 if len(abspfx) < len(srcs[0][0]):
875 if len(abspfx) < len(srcs[0][0]):
876 # A directory. Either the target path contains the last
876 # A directory. Either the target path contains the last
877 # component of the source path or it does not.
877 # component of the source path or it does not.
878 def evalpath(striplen):
878 def evalpath(striplen):
879 score = 0
879 score = 0
880 for s in srcs:
880 for s in srcs:
881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
881 t = os.path.join(dest, util.localpath(s[0])[striplen:])
882 if os.path.lexists(t):
882 if os.path.lexists(t):
883 score += 1
883 score += 1
884 return score
884 return score
885
885
886 abspfx = util.localpath(abspfx)
886 abspfx = util.localpath(abspfx)
887 striplen = len(abspfx)
887 striplen = len(abspfx)
888 if striplen:
888 if striplen:
889 striplen += len(pycompat.ossep)
889 striplen += len(pycompat.ossep)
890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
890 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
891 score = evalpath(striplen)
891 score = evalpath(striplen)
892 striplen1 = len(os.path.split(abspfx)[0])
892 striplen1 = len(os.path.split(abspfx)[0])
893 if striplen1:
893 if striplen1:
894 striplen1 += len(pycompat.ossep)
894 striplen1 += len(pycompat.ossep)
895 if evalpath(striplen1) > score:
895 if evalpath(striplen1) > score:
896 striplen = striplen1
896 striplen = striplen1
897 res = lambda p: os.path.join(dest,
897 res = lambda p: os.path.join(dest,
898 util.localpath(p)[striplen:])
898 util.localpath(p)[striplen:])
899 else:
899 else:
900 # a file
900 # a file
901 if destdirexists:
901 if destdirexists:
902 res = lambda p: os.path.join(dest,
902 res = lambda p: os.path.join(dest,
903 os.path.basename(util.localpath(p)))
903 os.path.basename(util.localpath(p)))
904 else:
904 else:
905 res = lambda p: dest
905 res = lambda p: dest
906 return res
906 return res
907
907
908 pats = scmutil.expandpats(pats)
908 pats = scmutil.expandpats(pats)
909 if not pats:
909 if not pats:
910 raise error.Abort(_('no source or destination specified'))
910 raise error.Abort(_('no source or destination specified'))
911 if len(pats) == 1:
911 if len(pats) == 1:
912 raise error.Abort(_('no destination specified'))
912 raise error.Abort(_('no destination specified'))
913 dest = pats.pop()
913 dest = pats.pop()
914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
914 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
915 if not destdirexists:
915 if not destdirexists:
916 if len(pats) > 1 or matchmod.patkind(pats[0]):
916 if len(pats) > 1 or matchmod.patkind(pats[0]):
917 raise error.Abort(_('with multiple sources, destination must be an '
917 raise error.Abort(_('with multiple sources, destination must be an '
918 'existing directory'))
918 'existing directory'))
919 if util.endswithsep(dest):
919 if util.endswithsep(dest):
920 raise error.Abort(_('destination %s is not a directory') % dest)
920 raise error.Abort(_('destination %s is not a directory') % dest)
921
921
922 tfn = targetpathfn
922 tfn = targetpathfn
923 if after:
923 if after:
924 tfn = targetpathafterfn
924 tfn = targetpathafterfn
925 copylist = []
925 copylist = []
926 for pat in pats:
926 for pat in pats:
927 srcs = walkpat(pat)
927 srcs = walkpat(pat)
928 if not srcs:
928 if not srcs:
929 continue
929 continue
930 copylist.append((tfn(pat, dest, srcs), srcs))
930 copylist.append((tfn(pat, dest, srcs), srcs))
931 if not copylist:
931 if not copylist:
932 raise error.Abort(_('no files to copy'))
932 raise error.Abort(_('no files to copy'))
933
933
934 errors = 0
934 errors = 0
935 for targetpath, srcs in copylist:
935 for targetpath, srcs in copylist:
936 for abssrc, relsrc, exact in srcs:
936 for abssrc, relsrc, exact in srcs:
937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
937 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
938 errors += 1
938 errors += 1
939
939
940 if errors:
940 if errors:
941 ui.warn(_('(consider using --after)\n'))
941 ui.warn(_('(consider using --after)\n'))
942
942
943 return errors != 0
943 return errors != 0
944
944
945 ## facility to let extension process additional data into an import patch
945 ## facility to let extension process additional data into an import patch
946 # list of identifier to be executed in order
946 # list of identifier to be executed in order
947 extrapreimport = [] # run before commit
947 extrapreimport = [] # run before commit
948 extrapostimport = [] # run after commit
948 extrapostimport = [] # run after commit
949 # mapping from identifier to actual import function
949 # mapping from identifier to actual import function
950 #
950 #
951 # 'preimport' are run before the commit is made and are provided the following
951 # 'preimport' are run before the commit is made and are provided the following
952 # arguments:
952 # arguments:
953 # - repo: the localrepository instance,
953 # - repo: the localrepository instance,
954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
954 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
955 # - extra: the future extra dictionary of the changeset, please mutate it,
955 # - extra: the future extra dictionary of the changeset, please mutate it,
956 # - opts: the import options.
956 # - opts: the import options.
957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
957 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
958 # mutation of in memory commit and more. Feel free to rework the code to get
958 # mutation of in memory commit and more. Feel free to rework the code to get
959 # there.
959 # there.
960 extrapreimportmap = {}
960 extrapreimportmap = {}
961 # 'postimport' are run after the commit is made and are provided the following
961 # 'postimport' are run after the commit is made and are provided the following
962 # argument:
962 # argument:
963 # - ctx: the changectx created by import.
963 # - ctx: the changectx created by import.
964 extrapostimportmap = {}
964 extrapostimportmap = {}
965
965
966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
966 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
967 """Utility function used by commands.import to import a single patch
967 """Utility function used by commands.import to import a single patch
968
968
969 This function is explicitly defined here to help the evolve extension to
969 This function is explicitly defined here to help the evolve extension to
970 wrap this part of the import logic.
970 wrap this part of the import logic.
971
971
972 The API is currently a bit ugly because it a simple code translation from
972 The API is currently a bit ugly because it a simple code translation from
973 the import command. Feel free to make it better.
973 the import command. Feel free to make it better.
974
974
975 :hunk: a patch (as a binary string)
975 :hunk: a patch (as a binary string)
976 :parents: nodes that will be parent of the created commit
976 :parents: nodes that will be parent of the created commit
977 :opts: the full dict of option passed to the import command
977 :opts: the full dict of option passed to the import command
978 :msgs: list to save commit message to.
978 :msgs: list to save commit message to.
979 (used in case we need to save it when failing)
979 (used in case we need to save it when failing)
980 :updatefunc: a function that update a repo to a given node
980 :updatefunc: a function that update a repo to a given node
981 updatefunc(<repo>, <node>)
981 updatefunc(<repo>, <node>)
982 """
982 """
983 # avoid cycle context -> subrepo -> cmdutil
983 # avoid cycle context -> subrepo -> cmdutil
984 from . import context
984 from . import context
985 extractdata = patch.extract(ui, hunk)
985 extractdata = patch.extract(ui, hunk)
986 tmpname = extractdata.get('filename')
986 tmpname = extractdata.get('filename')
987 message = extractdata.get('message')
987 message = extractdata.get('message')
988 user = opts.get('user') or extractdata.get('user')
988 user = opts.get('user') or extractdata.get('user')
989 date = opts.get('date') or extractdata.get('date')
989 date = opts.get('date') or extractdata.get('date')
990 branch = extractdata.get('branch')
990 branch = extractdata.get('branch')
991 nodeid = extractdata.get('nodeid')
991 nodeid = extractdata.get('nodeid')
992 p1 = extractdata.get('p1')
992 p1 = extractdata.get('p1')
993 p2 = extractdata.get('p2')
993 p2 = extractdata.get('p2')
994
994
995 nocommit = opts.get('no_commit')
995 nocommit = opts.get('no_commit')
996 importbranch = opts.get('import_branch')
996 importbranch = opts.get('import_branch')
997 update = not opts.get('bypass')
997 update = not opts.get('bypass')
998 strip = opts["strip"]
998 strip = opts["strip"]
999 prefix = opts["prefix"]
999 prefix = opts["prefix"]
1000 sim = float(opts.get('similarity') or 0)
1000 sim = float(opts.get('similarity') or 0)
1001 if not tmpname:
1001 if not tmpname:
1002 return (None, None, False)
1002 return (None, None, False)
1003
1003
1004 rejects = False
1004 rejects = False
1005
1005
1006 try:
1006 try:
1007 cmdline_message = logmessage(ui, opts)
1007 cmdline_message = logmessage(ui, opts)
1008 if cmdline_message:
1008 if cmdline_message:
1009 # pickup the cmdline msg
1009 # pickup the cmdline msg
1010 message = cmdline_message
1010 message = cmdline_message
1011 elif message:
1011 elif message:
1012 # pickup the patch msg
1012 # pickup the patch msg
1013 message = message.strip()
1013 message = message.strip()
1014 else:
1014 else:
1015 # launch the editor
1015 # launch the editor
1016 message = None
1016 message = None
1017 ui.debug('message:\n%s\n' % message)
1017 ui.debug('message:\n%s\n' % message)
1018
1018
1019 if len(parents) == 1:
1019 if len(parents) == 1:
1020 parents.append(repo[nullid])
1020 parents.append(repo[nullid])
1021 if opts.get('exact'):
1021 if opts.get('exact'):
1022 if not nodeid or not p1:
1022 if not nodeid or not p1:
1023 raise error.Abort(_('not a Mercurial patch'))
1023 raise error.Abort(_('not a Mercurial patch'))
1024 p1 = repo[p1]
1024 p1 = repo[p1]
1025 p2 = repo[p2 or nullid]
1025 p2 = repo[p2 or nullid]
1026 elif p2:
1026 elif p2:
1027 try:
1027 try:
1028 p1 = repo[p1]
1028 p1 = repo[p1]
1029 p2 = repo[p2]
1029 p2 = repo[p2]
1030 # Without any options, consider p2 only if the
1030 # Without any options, consider p2 only if the
1031 # patch is being applied on top of the recorded
1031 # patch is being applied on top of the recorded
1032 # first parent.
1032 # first parent.
1033 if p1 != parents[0]:
1033 if p1 != parents[0]:
1034 p1 = parents[0]
1034 p1 = parents[0]
1035 p2 = repo[nullid]
1035 p2 = repo[nullid]
1036 except error.RepoError:
1036 except error.RepoError:
1037 p1, p2 = parents
1037 p1, p2 = parents
1038 if p2.node() == nullid:
1038 if p2.node() == nullid:
1039 ui.warn(_("warning: import the patch as a normal revision\n"
1039 ui.warn(_("warning: import the patch as a normal revision\n"
1040 "(use --exact to import the patch as a merge)\n"))
1040 "(use --exact to import the patch as a merge)\n"))
1041 else:
1041 else:
1042 p1, p2 = parents
1042 p1, p2 = parents
1043
1043
1044 n = None
1044 n = None
1045 if update:
1045 if update:
1046 if p1 != parents[0]:
1046 if p1 != parents[0]:
1047 updatefunc(repo, p1.node())
1047 updatefunc(repo, p1.node())
1048 if p2 != parents[1]:
1048 if p2 != parents[1]:
1049 repo.setparents(p1.node(), p2.node())
1049 repo.setparents(p1.node(), p2.node())
1050
1050
1051 if opts.get('exact') or importbranch:
1051 if opts.get('exact') or importbranch:
1052 repo.dirstate.setbranch(branch or 'default')
1052 repo.dirstate.setbranch(branch or 'default')
1053
1053
1054 partial = opts.get('partial', False)
1054 partial = opts.get('partial', False)
1055 files = set()
1055 files = set()
1056 try:
1056 try:
1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1057 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1058 files=files, eolmode=None, similarity=sim / 100.0)
1058 files=files, eolmode=None, similarity=sim / 100.0)
1059 except patch.PatchError as e:
1059 except patch.PatchError as e:
1060 if not partial:
1060 if not partial:
1061 raise error.Abort(str(e))
1061 raise error.Abort(str(e))
1062 if partial:
1062 if partial:
1063 rejects = True
1063 rejects = True
1064
1064
1065 files = list(files)
1065 files = list(files)
1066 if nocommit:
1066 if nocommit:
1067 if message:
1067 if message:
1068 msgs.append(message)
1068 msgs.append(message)
1069 else:
1069 else:
1070 if opts.get('exact') or p2:
1070 if opts.get('exact') or p2:
1071 # If you got here, you either use --force and know what
1071 # If you got here, you either use --force and know what
1072 # you are doing or used --exact or a merge patch while
1072 # you are doing or used --exact or a merge patch while
1073 # being updated to its first parent.
1073 # being updated to its first parent.
1074 m = None
1074 m = None
1075 else:
1075 else:
1076 m = scmutil.matchfiles(repo, files or [])
1076 m = scmutil.matchfiles(repo, files or [])
1077 editform = mergeeditform(repo[None], 'import.normal')
1077 editform = mergeeditform(repo[None], 'import.normal')
1078 if opts.get('exact'):
1078 if opts.get('exact'):
1079 editor = None
1079 editor = None
1080 else:
1080 else:
1081 editor = getcommiteditor(editform=editform, **opts)
1081 editor = getcommiteditor(editform=editform, **opts)
1082 extra = {}
1082 extra = {}
1083 for idfunc in extrapreimport:
1083 for idfunc in extrapreimport:
1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1084 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1085 overrides = {}
1085 overrides = {}
1086 if partial:
1086 if partial:
1087 overrides[('ui', 'allowemptycommit')] = True
1087 overrides[('ui', 'allowemptycommit')] = True
1088 with repo.ui.configoverride(overrides, 'import'):
1088 with repo.ui.configoverride(overrides, 'import'):
1089 n = repo.commit(message, user,
1089 n = repo.commit(message, user,
1090 date, match=m,
1090 date, match=m,
1091 editor=editor, extra=extra)
1091 editor=editor, extra=extra)
1092 for idfunc in extrapostimport:
1092 for idfunc in extrapostimport:
1093 extrapostimportmap[idfunc](repo[n])
1093 extrapostimportmap[idfunc](repo[n])
1094 else:
1094 else:
1095 if opts.get('exact') or importbranch:
1095 if opts.get('exact') or importbranch:
1096 branch = branch or 'default'
1096 branch = branch or 'default'
1097 else:
1097 else:
1098 branch = p1.branch()
1098 branch = p1.branch()
1099 store = patch.filestore()
1099 store = patch.filestore()
1100 try:
1100 try:
1101 files = set()
1101 files = set()
1102 try:
1102 try:
1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1103 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1104 files, eolmode=None)
1104 files, eolmode=None)
1105 except patch.PatchError as e:
1105 except patch.PatchError as e:
1106 raise error.Abort(str(e))
1106 raise error.Abort(str(e))
1107 if opts.get('exact'):
1107 if opts.get('exact'):
1108 editor = None
1108 editor = None
1109 else:
1109 else:
1110 editor = getcommiteditor(editform='import.bypass')
1110 editor = getcommiteditor(editform='import.bypass')
1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1111 memctx = context.memctx(repo, (p1.node(), p2.node()),
1112 message,
1112 message,
1113 files=files,
1113 files=files,
1114 filectxfn=store,
1114 filectxfn=store,
1115 user=user,
1115 user=user,
1116 date=date,
1116 date=date,
1117 branch=branch,
1117 branch=branch,
1118 editor=editor)
1118 editor=editor)
1119 n = memctx.commit()
1119 n = memctx.commit()
1120 finally:
1120 finally:
1121 store.close()
1121 store.close()
1122 if opts.get('exact') and nocommit:
1122 if opts.get('exact') and nocommit:
1123 # --exact with --no-commit is still useful in that it does merge
1123 # --exact with --no-commit is still useful in that it does merge
1124 # and branch bits
1124 # and branch bits
1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1126 elif opts.get('exact') and hex(n) != nodeid:
1126 elif opts.get('exact') and hex(n) != nodeid:
1127 raise error.Abort(_('patch is damaged or loses information'))
1127 raise error.Abort(_('patch is damaged or loses information'))
1128 msg = _('applied to working directory')
1128 msg = _('applied to working directory')
1129 if n:
1129 if n:
1130 # i18n: refers to a short changeset id
1130 # i18n: refers to a short changeset id
1131 msg = _('created %s') % short(n)
1131 msg = _('created %s') % short(n)
1132 return (msg, n, rejects)
1132 return (msg, n, rejects)
1133 finally:
1133 finally:
1134 os.unlink(tmpname)
1134 os.unlink(tmpname)
1135
1135
1136 # facility to let extensions include additional data in an exported patch
1136 # facility to let extensions include additional data in an exported patch
1137 # list of identifiers to be executed in order
1137 # list of identifiers to be executed in order
1138 extraexport = []
1138 extraexport = []
1139 # mapping from identifier to actual export function
1139 # mapping from identifier to actual export function
1140 # function as to return a string to be added to the header or None
1140 # function as to return a string to be added to the header or None
1141 # it is given two arguments (sequencenumber, changectx)
1141 # it is given two arguments (sequencenumber, changectx)
1142 extraexportmap = {}
1142 extraexportmap = {}
1143
1143
1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1144 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1145 node = scmutil.binnode(ctx)
1145 node = scmutil.binnode(ctx)
1146 parents = [p.node() for p in ctx.parents() if p]
1146 parents = [p.node() for p in ctx.parents() if p]
1147 branch = ctx.branch()
1147 branch = ctx.branch()
1148 if switch_parent:
1148 if switch_parent:
1149 parents.reverse()
1149 parents.reverse()
1150
1150
1151 if parents:
1151 if parents:
1152 prev = parents[0]
1152 prev = parents[0]
1153 else:
1153 else:
1154 prev = nullid
1154 prev = nullid
1155
1155
1156 write("# HG changeset patch\n")
1156 write("# HG changeset patch\n")
1157 write("# User %s\n" % ctx.user())
1157 write("# User %s\n" % ctx.user())
1158 write("# Date %d %d\n" % ctx.date())
1158 write("# Date %d %d\n" % ctx.date())
1159 write("# %s\n" % util.datestr(ctx.date()))
1159 write("# %s\n" % util.datestr(ctx.date()))
1160 if branch and branch != 'default':
1160 if branch and branch != 'default':
1161 write("# Branch %s\n" % branch)
1161 write("# Branch %s\n" % branch)
1162 write("# Node ID %s\n" % hex(node))
1162 write("# Node ID %s\n" % hex(node))
1163 write("# Parent %s\n" % hex(prev))
1163 write("# Parent %s\n" % hex(prev))
1164 if len(parents) > 1:
1164 if len(parents) > 1:
1165 write("# Parent %s\n" % hex(parents[1]))
1165 write("# Parent %s\n" % hex(parents[1]))
1166
1166
1167 for headerid in extraexport:
1167 for headerid in extraexport:
1168 header = extraexportmap[headerid](seqno, ctx)
1168 header = extraexportmap[headerid](seqno, ctx)
1169 if header is not None:
1169 if header is not None:
1170 write('# %s\n' % header)
1170 write('# %s\n' % header)
1171 write(ctx.description().rstrip())
1171 write(ctx.description().rstrip())
1172 write("\n\n")
1172 write("\n\n")
1173
1173
1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1174 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1175 write(chunk, label=label)
1175 write(chunk, label=label)
1176
1176
1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1177 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1178 opts=None, match=None):
1178 opts=None, match=None):
1179 '''export changesets as hg patches
1179 '''export changesets as hg patches
1180
1180
1181 Args:
1181 Args:
1182 repo: The repository from which we're exporting revisions.
1182 repo: The repository from which we're exporting revisions.
1183 revs: A list of revisions to export as revision numbers.
1183 revs: A list of revisions to export as revision numbers.
1184 fntemplate: An optional string to use for generating patch file names.
1184 fntemplate: An optional string to use for generating patch file names.
1185 fp: An optional file-like object to which patches should be written.
1185 fp: An optional file-like object to which patches should be written.
1186 switch_parent: If True, show diffs against second parent when not nullid.
1186 switch_parent: If True, show diffs against second parent when not nullid.
1187 Default is false, which always shows diff against p1.
1187 Default is false, which always shows diff against p1.
1188 opts: diff options to use for generating the patch.
1188 opts: diff options to use for generating the patch.
1189 match: If specified, only export changes to files matching this matcher.
1189 match: If specified, only export changes to files matching this matcher.
1190
1190
1191 Returns:
1191 Returns:
1192 Nothing.
1192 Nothing.
1193
1193
1194 Side Effect:
1194 Side Effect:
1195 "HG Changeset Patch" data is emitted to one of the following
1195 "HG Changeset Patch" data is emitted to one of the following
1196 destinations:
1196 destinations:
1197 fp is specified: All revs are written to the specified
1197 fp is specified: All revs are written to the specified
1198 file-like object.
1198 file-like object.
1199 fntemplate specified: Each rev is written to a unique file named using
1199 fntemplate specified: Each rev is written to a unique file named using
1200 the given template.
1200 the given template.
1201 Neither fp nor template specified: All revs written to repo.ui.write()
1201 Neither fp nor template specified: All revs written to repo.ui.write()
1202 '''
1202 '''
1203
1203
1204 total = len(revs)
1204 total = len(revs)
1205 revwidth = max(len(str(rev)) for rev in revs)
1205 revwidth = max(len(str(rev)) for rev in revs)
1206 filemode = {}
1206 filemode = {}
1207
1207
1208 write = None
1208 write = None
1209 dest = '<unnamed>'
1209 dest = '<unnamed>'
1210 if fp:
1210 if fp:
1211 dest = getattr(fp, 'name', dest)
1211 dest = getattr(fp, 'name', dest)
1212 def write(s, **kw):
1212 def write(s, **kw):
1213 fp.write(s)
1213 fp.write(s)
1214 elif not fntemplate:
1214 elif not fntemplate:
1215 write = repo.ui.write
1215 write = repo.ui.write
1216
1216
1217 for seqno, rev in enumerate(revs, 1):
1217 for seqno, rev in enumerate(revs, 1):
1218 ctx = repo[rev]
1218 ctx = repo[rev]
1219 fo = None
1219 fo = None
1220 if not fp and fntemplate:
1220 if not fp and fntemplate:
1221 desc_lines = ctx.description().rstrip().split('\n')
1221 desc_lines = ctx.description().rstrip().split('\n')
1222 desc = desc_lines[0] #Commit always has a first line.
1222 desc = desc_lines[0] #Commit always has a first line.
1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1223 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1224 total=total, seqno=seqno, revwidth=revwidth,
1224 total=total, seqno=seqno, revwidth=revwidth,
1225 mode='wb', modemap=filemode)
1225 mode='wb', modemap=filemode)
1226 dest = fo.name
1226 dest = fo.name
1227 def write(s, **kw):
1227 def write(s, **kw):
1228 fo.write(s)
1228 fo.write(s)
1229 if not dest.startswith('<'):
1229 if not dest.startswith('<'):
1230 repo.ui.note("%s\n" % dest)
1230 repo.ui.note("%s\n" % dest)
1231 _exportsingle(
1231 _exportsingle(
1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1232 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1233 if fo is not None:
1233 if fo is not None:
1234 fo.close()
1234 fo.close()
1235
1235
1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1236 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1237 changes=None, stat=False, fp=None, prefix='',
1237 changes=None, stat=False, fp=None, prefix='',
1238 root='', listsubrepos=False):
1238 root='', listsubrepos=False):
1239 '''show diff or diffstat.'''
1239 '''show diff or diffstat.'''
1240 if fp is None:
1240 if fp is None:
1241 write = ui.write
1241 write = ui.write
1242 else:
1242 else:
1243 def write(s, **kw):
1243 def write(s, **kw):
1244 fp.write(s)
1244 fp.write(s)
1245
1245
1246 if root:
1246 if root:
1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1247 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1248 else:
1248 else:
1249 relroot = ''
1249 relroot = ''
1250 if relroot != '':
1250 if relroot != '':
1251 # XXX relative roots currently don't work if the root is within a
1251 # XXX relative roots currently don't work if the root is within a
1252 # subrepo
1252 # subrepo
1253 uirelroot = match.uipath(relroot)
1253 uirelroot = match.uipath(relroot)
1254 relroot += '/'
1254 relroot += '/'
1255 for matchroot in match.files():
1255 for matchroot in match.files():
1256 if not matchroot.startswith(relroot):
1256 if not matchroot.startswith(relroot):
1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1257 ui.warn(_('warning: %s not inside relative root %s\n') % (
1258 match.uipath(matchroot), uirelroot))
1258 match.uipath(matchroot), uirelroot))
1259
1259
1260 if stat:
1260 if stat:
1261 diffopts = diffopts.copy(context=0)
1261 diffopts = diffopts.copy(context=0)
1262 width = 80
1262 width = 80
1263 if not ui.plain():
1263 if not ui.plain():
1264 width = ui.termwidth()
1264 width = ui.termwidth()
1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1265 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1266 prefix=prefix, relroot=relroot)
1266 prefix=prefix, relroot=relroot)
1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1267 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1268 width=width):
1268 width=width):
1269 write(chunk, label=label)
1269 write(chunk, label=label)
1270 else:
1270 else:
1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1271 for chunk, label in patch.diffui(repo, node1, node2, match,
1272 changes, diffopts, prefix=prefix,
1272 changes, diffopts, prefix=prefix,
1273 relroot=relroot):
1273 relroot=relroot):
1274 write(chunk, label=label)
1274 write(chunk, label=label)
1275
1275
1276 if listsubrepos:
1276 if listsubrepos:
1277 ctx1 = repo[node1]
1277 ctx1 = repo[node1]
1278 ctx2 = repo[node2]
1278 ctx2 = repo[node2]
1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1279 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1280 tempnode2 = node2
1280 tempnode2 = node2
1281 try:
1281 try:
1282 if node2 is not None:
1282 if node2 is not None:
1283 tempnode2 = ctx2.substate[subpath][1]
1283 tempnode2 = ctx2.substate[subpath][1]
1284 except KeyError:
1284 except KeyError:
1285 # A subrepo that existed in node1 was deleted between node1 and
1285 # A subrepo that existed in node1 was deleted between node1 and
1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1286 # node2 (inclusive). Thus, ctx2's substate won't contain that
1287 # subpath. The best we can do is to ignore it.
1287 # subpath. The best we can do is to ignore it.
1288 tempnode2 = None
1288 tempnode2 = None
1289 submatch = matchmod.subdirmatcher(subpath, match)
1289 submatch = matchmod.subdirmatcher(subpath, match)
1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1290 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1291 stat=stat, fp=fp, prefix=prefix)
1291 stat=stat, fp=fp, prefix=prefix)
1292
1292
1293 def _changesetlabels(ctx):
1293 def _changesetlabels(ctx):
1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1294 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1295 if ctx.obsolete():
1295 if ctx.obsolete():
1296 labels.append('changeset.obsolete')
1296 labels.append('changeset.obsolete')
1297 if ctx.troubled():
1297 if ctx.troubled():
1298 labels.append('changeset.troubled')
1298 labels.append('changeset.troubled')
1299 for trouble in ctx.troubles():
1299 for trouble in ctx.troubles():
1300 labels.append('trouble.%s' % trouble)
1300 labels.append('trouble.%s' % trouble)
1301 return ' '.join(labels)
1301 return ' '.join(labels)
1302
1302
1303 class changeset_printer(object):
1303 class changeset_printer(object):
1304 '''show changeset information when templating not requested.'''
1304 '''show changeset information when templating not requested.'''
1305
1305
1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1306 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1307 self.ui = ui
1307 self.ui = ui
1308 self.repo = repo
1308 self.repo = repo
1309 self.buffered = buffered
1309 self.buffered = buffered
1310 self.matchfn = matchfn
1310 self.matchfn = matchfn
1311 self.diffopts = diffopts
1311 self.diffopts = diffopts
1312 self.header = {}
1312 self.header = {}
1313 self.hunk = {}
1313 self.hunk = {}
1314 self.lastheader = None
1314 self.lastheader = None
1315 self.footer = None
1315 self.footer = None
1316
1316
1317 def flush(self, ctx):
1317 def flush(self, ctx):
1318 rev = ctx.rev()
1318 rev = ctx.rev()
1319 if rev in self.header:
1319 if rev in self.header:
1320 h = self.header[rev]
1320 h = self.header[rev]
1321 if h != self.lastheader:
1321 if h != self.lastheader:
1322 self.lastheader = h
1322 self.lastheader = h
1323 self.ui.write(h)
1323 self.ui.write(h)
1324 del self.header[rev]
1324 del self.header[rev]
1325 if rev in self.hunk:
1325 if rev in self.hunk:
1326 self.ui.write(self.hunk[rev])
1326 self.ui.write(self.hunk[rev])
1327 del self.hunk[rev]
1327 del self.hunk[rev]
1328 return 1
1328 return 1
1329 return 0
1329 return 0
1330
1330
1331 def close(self):
1331 def close(self):
1332 if self.footer:
1332 if self.footer:
1333 self.ui.write(self.footer)
1333 self.ui.write(self.footer)
1334
1334
1335 def show(self, ctx, copies=None, matchfn=None, **props):
1335 def show(self, ctx, copies=None, matchfn=None, **props):
1336 if self.buffered:
1336 if self.buffered:
1337 self.ui.pushbuffer(labeled=True)
1337 self.ui.pushbuffer(labeled=True)
1338 self._show(ctx, copies, matchfn, props)
1338 self._show(ctx, copies, matchfn, props)
1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1340 else:
1340 else:
1341 self._show(ctx, copies, matchfn, props)
1341 self._show(ctx, copies, matchfn, props)
1342
1342
1343 def _show(self, ctx, copies, matchfn, props):
1343 def _show(self, ctx, copies, matchfn, props):
1344 '''show a single changeset or file revision'''
1344 '''show a single changeset or file revision'''
1345 changenode = ctx.node()
1345 changenode = ctx.node()
1346 rev = ctx.rev()
1346 rev = ctx.rev()
1347 if self.ui.debugflag:
1347 if self.ui.debugflag:
1348 hexfunc = hex
1348 hexfunc = hex
1349 else:
1349 else:
1350 hexfunc = short
1350 hexfunc = short
1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1352 # show the same values as {node} and {rev} templatekw
1352 # show the same values as {node} and {rev} templatekw
1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1354
1354
1355 if self.ui.quiet:
1355 if self.ui.quiet:
1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1357 return
1357 return
1358
1358
1359 date = util.datestr(ctx.date())
1359 date = util.datestr(ctx.date())
1360
1360
1361 # i18n: column positioning for "hg log"
1361 # i18n: column positioning for "hg log"
1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1363 label=_changesetlabels(ctx))
1363 label=_changesetlabels(ctx))
1364
1364
1365 # branches are shown first before any other names due to backwards
1365 # branches are shown first before any other names due to backwards
1366 # compatibility
1366 # compatibility
1367 branch = ctx.branch()
1367 branch = ctx.branch()
1368 # don't show the default branch name
1368 # don't show the default branch name
1369 if branch != 'default':
1369 if branch != 'default':
1370 # i18n: column positioning for "hg log"
1370 # i18n: column positioning for "hg log"
1371 self.ui.write(_("branch: %s\n") % branch,
1371 self.ui.write(_("branch: %s\n") % branch,
1372 label='log.branch')
1372 label='log.branch')
1373
1373
1374 for nsname, ns in self.repo.names.iteritems():
1374 for nsname, ns in self.repo.names.iteritems():
1375 # branches has special logic already handled above, so here we just
1375 # branches has special logic already handled above, so here we just
1376 # skip it
1376 # skip it
1377 if nsname == 'branches':
1377 if nsname == 'branches':
1378 continue
1378 continue
1379 # we will use the templatename as the color name since those two
1379 # we will use the templatename as the color name since those two
1380 # should be the same
1380 # should be the same
1381 for name in ns.names(self.repo, changenode):
1381 for name in ns.names(self.repo, changenode):
1382 self.ui.write(ns.logfmt % name,
1382 self.ui.write(ns.logfmt % name,
1383 label='log.%s' % ns.colorname)
1383 label='log.%s' % ns.colorname)
1384 if self.ui.debugflag:
1384 if self.ui.debugflag:
1385 # i18n: column positioning for "hg log"
1385 # i18n: column positioning for "hg log"
1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1387 label='log.phase')
1387 label='log.phase')
1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1390 # i18n: column positioning for "hg log"
1390 # i18n: column positioning for "hg log"
1391 self.ui.write(_("parent: %d:%s\n")
1391 self.ui.write(_("parent: %d:%s\n")
1392 % (pctx.rev(), hexfunc(pctx.node())),
1392 % (pctx.rev(), hexfunc(pctx.node())),
1393 label=label)
1393 label=label)
1394
1394
1395 if self.ui.debugflag and rev is not None:
1395 if self.ui.debugflag and rev is not None:
1396 mnode = ctx.manifestnode()
1396 mnode = ctx.manifestnode()
1397 # i18n: column positioning for "hg log"
1397 # i18n: column positioning for "hg log"
1398 self.ui.write(_("manifest: %d:%s\n") %
1398 self.ui.write(_("manifest: %d:%s\n") %
1399 (self.repo.manifestlog._revlog.rev(mnode),
1399 (self.repo.manifestlog._revlog.rev(mnode),
1400 hex(mnode)),
1400 hex(mnode)),
1401 label='ui.debug log.manifest')
1401 label='ui.debug log.manifest')
1402 # i18n: column positioning for "hg log"
1402 # i18n: column positioning for "hg log"
1403 self.ui.write(_("user: %s\n") % ctx.user(),
1403 self.ui.write(_("user: %s\n") % ctx.user(),
1404 label='log.user')
1404 label='log.user')
1405 # i18n: column positioning for "hg log"
1405 # i18n: column positioning for "hg log"
1406 self.ui.write(_("date: %s\n") % date,
1406 self.ui.write(_("date: %s\n") % date,
1407 label='log.date')
1407 label='log.date')
1408
1408
1409 if ctx.troubled():
1409 if ctx.troubled():
1410 # i18n: column positioning for "hg log"
1410 # i18n: column positioning for "hg log"
1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1412 label='log.trouble')
1412 label='log.trouble')
1413
1413
1414 if self.ui.debugflag:
1414 if self.ui.debugflag:
1415 files = ctx.p1().status(ctx)[:3]
1415 files = ctx.p1().status(ctx)[:3]
1416 for key, value in zip([# i18n: column positioning for "hg log"
1416 for key, value in zip([# i18n: column positioning for "hg log"
1417 _("files:"),
1417 _("files:"),
1418 # i18n: column positioning for "hg log"
1418 # i18n: column positioning for "hg log"
1419 _("files+:"),
1419 _("files+:"),
1420 # i18n: column positioning for "hg log"
1420 # i18n: column positioning for "hg log"
1421 _("files-:")], files):
1421 _("files-:")], files):
1422 if value:
1422 if value:
1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1424 label='ui.debug log.files')
1424 label='ui.debug log.files')
1425 elif ctx.files() and self.ui.verbose:
1425 elif ctx.files() and self.ui.verbose:
1426 # i18n: column positioning for "hg log"
1426 # i18n: column positioning for "hg log"
1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1428 label='ui.note log.files')
1428 label='ui.note log.files')
1429 if copies and self.ui.verbose:
1429 if copies and self.ui.verbose:
1430 copies = ['%s (%s)' % c for c in copies]
1430 copies = ['%s (%s)' % c for c in copies]
1431 # i18n: column positioning for "hg log"
1431 # i18n: column positioning for "hg log"
1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1433 label='ui.note log.copies')
1433 label='ui.note log.copies')
1434
1434
1435 extra = ctx.extra()
1435 extra = ctx.extra()
1436 if extra and self.ui.debugflag:
1436 if extra and self.ui.debugflag:
1437 for key, value in sorted(extra.items()):
1437 for key, value in sorted(extra.items()):
1438 # i18n: column positioning for "hg log"
1438 # i18n: column positioning for "hg log"
1439 self.ui.write(_("extra: %s=%s\n")
1439 self.ui.write(_("extra: %s=%s\n")
1440 % (key, util.escapestr(value)),
1440 % (key, util.escapestr(value)),
1441 label='ui.debug log.extra')
1441 label='ui.debug log.extra')
1442
1442
1443 description = ctx.description().strip()
1443 description = ctx.description().strip()
1444 if description:
1444 if description:
1445 if self.ui.verbose:
1445 if self.ui.verbose:
1446 self.ui.write(_("description:\n"),
1446 self.ui.write(_("description:\n"),
1447 label='ui.note log.description')
1447 label='ui.note log.description')
1448 self.ui.write(description,
1448 self.ui.write(description,
1449 label='ui.note log.description')
1449 label='ui.note log.description')
1450 self.ui.write("\n\n")
1450 self.ui.write("\n\n")
1451 else:
1451 else:
1452 # i18n: column positioning for "hg log"
1452 # i18n: column positioning for "hg log"
1453 self.ui.write(_("summary: %s\n") %
1453 self.ui.write(_("summary: %s\n") %
1454 description.splitlines()[0],
1454 description.splitlines()[0],
1455 label='log.summary')
1455 label='log.summary')
1456 self.ui.write("\n")
1456 self.ui.write("\n")
1457
1457
1458 self.showpatch(ctx, matchfn)
1458 self.showpatch(ctx, matchfn)
1459
1459
1460 def showpatch(self, ctx, matchfn):
1460 def showpatch(self, ctx, matchfn):
1461 if not matchfn:
1461 if not matchfn:
1462 matchfn = self.matchfn
1462 matchfn = self.matchfn
1463 if matchfn:
1463 if matchfn:
1464 stat = self.diffopts.get('stat')
1464 stat = self.diffopts.get('stat')
1465 diff = self.diffopts.get('patch')
1465 diff = self.diffopts.get('patch')
1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1467 node = ctx.node()
1467 node = ctx.node()
1468 prev = ctx.p1().node()
1468 prev = ctx.p1().node()
1469 if stat:
1469 if stat:
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1471 match=matchfn, stat=True)
1471 match=matchfn, stat=True)
1472 if diff:
1472 if diff:
1473 if stat:
1473 if stat:
1474 self.ui.write("\n")
1474 self.ui.write("\n")
1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1476 match=matchfn, stat=False)
1476 match=matchfn, stat=False)
1477 self.ui.write("\n")
1477 self.ui.write("\n")
1478
1478
1479 class jsonchangeset(changeset_printer):
1479 class jsonchangeset(changeset_printer):
1480 '''format changeset information.'''
1480 '''format changeset information.'''
1481
1481
1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1484 self.cache = {}
1484 self.cache = {}
1485 self._first = True
1485 self._first = True
1486
1486
1487 def close(self):
1487 def close(self):
1488 if not self._first:
1488 if not self._first:
1489 self.ui.write("\n]\n")
1489 self.ui.write("\n]\n")
1490 else:
1490 else:
1491 self.ui.write("[]\n")
1491 self.ui.write("[]\n")
1492
1492
1493 def _show(self, ctx, copies, matchfn, props):
1493 def _show(self, ctx, copies, matchfn, props):
1494 '''show a single changeset or file revision'''
1494 '''show a single changeset or file revision'''
1495 rev = ctx.rev()
1495 rev = ctx.rev()
1496 if rev is None:
1496 if rev is None:
1497 jrev = jnode = 'null'
1497 jrev = jnode = 'null'
1498 else:
1498 else:
1499 jrev = '%d' % rev
1499 jrev = '%d' % rev
1500 jnode = '"%s"' % hex(ctx.node())
1500 jnode = '"%s"' % hex(ctx.node())
1501 j = encoding.jsonescape
1501 j = encoding.jsonescape
1502
1502
1503 if self._first:
1503 if self._first:
1504 self.ui.write("[\n {")
1504 self.ui.write("[\n {")
1505 self._first = False
1505 self._first = False
1506 else:
1506 else:
1507 self.ui.write(",\n {")
1507 self.ui.write(",\n {")
1508
1508
1509 if self.ui.quiet:
1509 if self.ui.quiet:
1510 self.ui.write(('\n "rev": %s') % jrev)
1510 self.ui.write(('\n "rev": %s') % jrev)
1511 self.ui.write((',\n "node": %s') % jnode)
1511 self.ui.write((',\n "node": %s') % jnode)
1512 self.ui.write('\n }')
1512 self.ui.write('\n }')
1513 return
1513 return
1514
1514
1515 self.ui.write(('\n "rev": %s') % jrev)
1515 self.ui.write(('\n "rev": %s') % jrev)
1516 self.ui.write((',\n "node": %s') % jnode)
1516 self.ui.write((',\n "node": %s') % jnode)
1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1522
1522
1523 self.ui.write((',\n "bookmarks": [%s]') %
1523 self.ui.write((',\n "bookmarks": [%s]') %
1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1525 self.ui.write((',\n "tags": [%s]') %
1525 self.ui.write((',\n "tags": [%s]') %
1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1527 self.ui.write((',\n "parents": [%s]') %
1527 self.ui.write((',\n "parents": [%s]') %
1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1529
1529
1530 if self.ui.debugflag:
1530 if self.ui.debugflag:
1531 if rev is None:
1531 if rev is None:
1532 jmanifestnode = 'null'
1532 jmanifestnode = 'null'
1533 else:
1533 else:
1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1536
1536
1537 self.ui.write((',\n "extra": {%s}') %
1537 self.ui.write((',\n "extra": {%s}') %
1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1539 for k, v in ctx.extra().items()))
1539 for k, v in ctx.extra().items()))
1540
1540
1541 files = ctx.p1().status(ctx)
1541 files = ctx.p1().status(ctx)
1542 self.ui.write((',\n "modified": [%s]') %
1542 self.ui.write((',\n "modified": [%s]') %
1543 ", ".join('"%s"' % j(f) for f in files[0]))
1543 ", ".join('"%s"' % j(f) for f in files[0]))
1544 self.ui.write((',\n "added": [%s]') %
1544 self.ui.write((',\n "added": [%s]') %
1545 ", ".join('"%s"' % j(f) for f in files[1]))
1545 ", ".join('"%s"' % j(f) for f in files[1]))
1546 self.ui.write((',\n "removed": [%s]') %
1546 self.ui.write((',\n "removed": [%s]') %
1547 ", ".join('"%s"' % j(f) for f in files[2]))
1547 ", ".join('"%s"' % j(f) for f in files[2]))
1548
1548
1549 elif self.ui.verbose:
1549 elif self.ui.verbose:
1550 self.ui.write((',\n "files": [%s]') %
1550 self.ui.write((',\n "files": [%s]') %
1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1552
1552
1553 if copies:
1553 if copies:
1554 self.ui.write((',\n "copies": {%s}') %
1554 self.ui.write((',\n "copies": {%s}') %
1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1556 for k, v in copies))
1556 for k, v in copies))
1557
1557
1558 matchfn = self.matchfn
1558 matchfn = self.matchfn
1559 if matchfn:
1559 if matchfn:
1560 stat = self.diffopts.get('stat')
1560 stat = self.diffopts.get('stat')
1561 diff = self.diffopts.get('patch')
1561 diff = self.diffopts.get('patch')
1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1563 node, prev = ctx.node(), ctx.p1().node()
1563 node, prev = ctx.node(), ctx.p1().node()
1564 if stat:
1564 if stat:
1565 self.ui.pushbuffer()
1565 self.ui.pushbuffer()
1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1567 match=matchfn, stat=True)
1567 match=matchfn, stat=True)
1568 self.ui.write((',\n "diffstat": "%s"')
1568 self.ui.write((',\n "diffstat": "%s"')
1569 % j(self.ui.popbuffer()))
1569 % j(self.ui.popbuffer()))
1570 if diff:
1570 if diff:
1571 self.ui.pushbuffer()
1571 self.ui.pushbuffer()
1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1573 match=matchfn, stat=False)
1573 match=matchfn, stat=False)
1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1575
1575
1576 self.ui.write("\n }")
1576 self.ui.write("\n }")
1577
1577
1578 class changeset_templater(changeset_printer):
1578 class changeset_templater(changeset_printer):
1579 '''format changeset information.'''
1579 '''format changeset information.'''
1580
1580
1581 def __init__(self, ui, repo, tmplspec, matchfn, diffopts, buffered):
1581 def __init__(self, ui, repo, tmplspec, matchfn, diffopts, buffered):
1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1582 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1583 self.t = formatter.loadtemplater(ui, tmplspec,
1583 self.t = formatter.loadtemplater(ui, tmplspec,
1584 cache=templatekw.defaulttempl)
1584 cache=templatekw.defaulttempl)
1585 self._counter = itertools.count()
1585 self._counter = itertools.count()
1586 self.cache = {}
1586 self.cache = {}
1587
1587
1588 # find correct templates for current mode
1588 # find correct templates for current mode
1589 tmplmodes = [
1589 tmplmodes = [
1590 (True, None),
1590 (True, None),
1591 (self.ui.verbose, 'verbose'),
1591 (self.ui.verbose, 'verbose'),
1592 (self.ui.quiet, 'quiet'),
1592 (self.ui.quiet, 'quiet'),
1593 (self.ui.debugflag, 'debug'),
1593 (self.ui.debugflag, 'debug'),
1594 ]
1594 ]
1595
1595
1596 self._tref = tmplspec.ref
1596 self._tref = tmplspec.ref
1597 self._parts = {'header': '', 'footer': '',
1597 self._parts = {'header': '', 'footer': '',
1598 tmplspec.ref: tmplspec.ref,
1598 tmplspec.ref: tmplspec.ref,
1599 'docheader': '', 'docfooter': ''}
1599 'docheader': '', 'docfooter': ''}
1600 for mode, postfix in tmplmodes:
1600 for mode, postfix in tmplmodes:
1601 for t in self._parts:
1601 for t in self._parts:
1602 cur = t
1602 cur = t
1603 if postfix:
1603 if postfix:
1604 cur += "_" + postfix
1604 cur += "_" + postfix
1605 if mode and cur in self.t:
1605 if mode and cur in self.t:
1606 self._parts[t] = cur
1606 self._parts[t] = cur
1607
1607
1608 if self._parts['docheader']:
1608 if self._parts['docheader']:
1609 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1609 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1610
1610
1611 def close(self):
1611 def close(self):
1612 if self._parts['docfooter']:
1612 if self._parts['docfooter']:
1613 if not self.footer:
1613 if not self.footer:
1614 self.footer = ""
1614 self.footer = ""
1615 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1615 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1616 return super(changeset_templater, self).close()
1616 return super(changeset_templater, self).close()
1617
1617
1618 def _show(self, ctx, copies, matchfn, props):
1618 def _show(self, ctx, copies, matchfn, props):
1619 '''show a single changeset or file revision'''
1619 '''show a single changeset or file revision'''
1620 props = props.copy()
1620 props = props.copy()
1621 props.update(templatekw.keywords)
1621 props.update(templatekw.keywords)
1622 props['templ'] = self.t
1622 props['templ'] = self.t
1623 props['ctx'] = ctx
1623 props['ctx'] = ctx
1624 props['repo'] = self.repo
1624 props['repo'] = self.repo
1625 props['ui'] = self.repo.ui
1625 props['ui'] = self.repo.ui
1626 props['index'] = next(self._counter)
1626 props['index'] = next(self._counter)
1627 props['revcache'] = {'copies': copies}
1627 props['revcache'] = {'copies': copies}
1628 props['cache'] = self.cache
1628 props['cache'] = self.cache
1629 props = pycompat.strkwargs(props)
1629 props = pycompat.strkwargs(props)
1630
1630
1631 # write header
1631 # write header
1632 if self._parts['header']:
1632 if self._parts['header']:
1633 h = templater.stringify(self.t(self._parts['header'], **props))
1633 h = templater.stringify(self.t(self._parts['header'], **props))
1634 if self.buffered:
1634 if self.buffered:
1635 self.header[ctx.rev()] = h
1635 self.header[ctx.rev()] = h
1636 else:
1636 else:
1637 if self.lastheader != h:
1637 if self.lastheader != h:
1638 self.lastheader = h
1638 self.lastheader = h
1639 self.ui.write(h)
1639 self.ui.write(h)
1640
1640
1641 # write changeset metadata, then patch if requested
1641 # write changeset metadata, then patch if requested
1642 key = self._parts[self._tref]
1642 key = self._parts[self._tref]
1643 self.ui.write(templater.stringify(self.t(key, **props)))
1643 self.ui.write(templater.stringify(self.t(key, **props)))
1644 self.showpatch(ctx, matchfn)
1644 self.showpatch(ctx, matchfn)
1645
1645
1646 if self._parts['footer']:
1646 if self._parts['footer']:
1647 if not self.footer:
1647 if not self.footer:
1648 self.footer = templater.stringify(
1648 self.footer = templater.stringify(
1649 self.t(self._parts['footer'], **props))
1649 self.t(self._parts['footer'], **props))
1650
1650
1651 def logtemplatespec(tmpl, mapfile):
1651 def logtemplatespec(tmpl, mapfile):
1652 return formatter.templatespec('changeset', tmpl, mapfile)
1652 return formatter.templatespec('changeset', tmpl, mapfile)
1653
1653
1654 def _lookuplogtemplate(ui, tmpl, style):
1654 def _lookuplogtemplate(ui, tmpl, style):
1655 """Find the template matching the given template spec or style
1655 """Find the template matching the given template spec or style
1656
1656
1657 See formatter.lookuptemplate() for details.
1657 See formatter.lookuptemplate() for details.
1658 """
1658 """
1659
1659
1660 # ui settings
1660 # ui settings
1661 if not tmpl and not style: # template are stronger than style
1661 if not tmpl and not style: # template are stronger than style
1662 tmpl = ui.config('ui', 'logtemplate')
1662 tmpl = ui.config('ui', 'logtemplate')
1663 if tmpl:
1663 if tmpl:
1664 return logtemplatespec(templater.unquotestring(tmpl), None)
1664 return logtemplatespec(templater.unquotestring(tmpl), None)
1665 else:
1665 else:
1666 style = util.expandpath(ui.config('ui', 'style', ''))
1666 style = util.expandpath(ui.config('ui', 'style', ''))
1667
1667
1668 if not tmpl and style:
1668 if not tmpl and style:
1669 mapfile = style
1669 mapfile = style
1670 if not os.path.split(mapfile)[0]:
1670 if not os.path.split(mapfile)[0]:
1671 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1671 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1672 or templater.templatepath(mapfile))
1672 or templater.templatepath(mapfile))
1673 if mapname:
1673 if mapname:
1674 mapfile = mapname
1674 mapfile = mapname
1675 return logtemplatespec(None, mapfile)
1675 return logtemplatespec(None, mapfile)
1676
1676
1677 if not tmpl:
1677 if not tmpl:
1678 return logtemplatespec(None, None)
1678 return logtemplatespec(None, None)
1679
1679
1680 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1680 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1681
1681
1682 def makelogtemplater(ui, repo, tmpl, buffered=False):
1682 def makelogtemplater(ui, repo, tmpl, buffered=False):
1683 """Create a changeset_templater from a literal template 'tmpl'"""
1683 """Create a changeset_templater from a literal template 'tmpl'"""
1684 spec = logtemplatespec(tmpl, None)
1684 spec = logtemplatespec(tmpl, None)
1685 return changeset_templater(ui, repo, spec, matchfn=None, diffopts={},
1685 return changeset_templater(ui, repo, spec, matchfn=None, diffopts={},
1686 buffered=buffered)
1686 buffered=buffered)
1687
1687
1688 def show_changeset(ui, repo, opts, buffered=False):
1688 def show_changeset(ui, repo, opts, buffered=False):
1689 """show one changeset using template or regular display.
1689 """show one changeset using template or regular display.
1690
1690
1691 Display format will be the first non-empty hit of:
1691 Display format will be the first non-empty hit of:
1692 1. option 'template'
1692 1. option 'template'
1693 2. option 'style'
1693 2. option 'style'
1694 3. [ui] setting 'logtemplate'
1694 3. [ui] setting 'logtemplate'
1695 4. [ui] setting 'style'
1695 4. [ui] setting 'style'
1696 If all of these values are either the unset or the empty string,
1696 If all of these values are either the unset or the empty string,
1697 regular display via changeset_printer() is done.
1697 regular display via changeset_printer() is done.
1698 """
1698 """
1699 # options
1699 # options
1700 matchfn = None
1700 matchfn = None
1701 if opts.get('patch') or opts.get('stat'):
1701 if opts.get('patch') or opts.get('stat'):
1702 matchfn = scmutil.matchall(repo)
1702 matchfn = scmutil.matchall(repo)
1703
1703
1704 if opts.get('template') == 'json':
1704 if opts.get('template') == 'json':
1705 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1705 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1706
1706
1707 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1707 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1708
1708
1709 if not spec.tmpl and not spec.mapfile:
1709 if not spec.tmpl and not spec.mapfile:
1710 return changeset_printer(ui, repo, matchfn, opts, buffered)
1710 return changeset_printer(ui, repo, matchfn, opts, buffered)
1711
1711
1712 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1712 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1713
1713
1714 def showmarker(fm, marker, index=None):
1714 def showmarker(fm, marker, index=None):
1715 """utility function to display obsolescence marker in a readable way
1715 """utility function to display obsolescence marker in a readable way
1716
1716
1717 To be used by debug function."""
1717 To be used by debug function."""
1718 if index is not None:
1718 if index is not None:
1719 fm.write('index', '%i ', index)
1719 fm.write('index', '%i ', index)
1720 fm.write('precnode', '%s ', hex(marker.precnode()))
1720 fm.write('precnode', '%s ', hex(marker.precnode()))
1721 succs = marker.succnodes()
1721 succs = marker.succnodes()
1722 fm.condwrite(succs, 'succnodes', '%s ',
1722 fm.condwrite(succs, 'succnodes', '%s ',
1723 fm.formatlist(map(hex, succs), name='node'))
1723 fm.formatlist(map(hex, succs), name='node'))
1724 fm.write('flag', '%X ', marker.flags())
1724 fm.write('flag', '%X ', marker.flags())
1725 parents = marker.parentnodes()
1725 parents = marker.parentnodes()
1726 if parents is not None:
1726 if parents is not None:
1727 fm.write('parentnodes', '{%s} ',
1727 fm.write('parentnodes', '{%s} ',
1728 fm.formatlist(map(hex, parents), name='node', sep=', '))
1728 fm.formatlist(map(hex, parents), name='node', sep=', '))
1729 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1729 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1730 meta = marker.metadata().copy()
1730 meta = marker.metadata().copy()
1731 meta.pop('date', None)
1731 meta.pop('date', None)
1732 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1732 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1733 fm.plain('\n')
1733 fm.plain('\n')
1734
1734
1735 def finddate(ui, repo, date):
1735 def finddate(ui, repo, date):
1736 """Find the tipmost changeset that matches the given date spec"""
1736 """Find the tipmost changeset that matches the given date spec"""
1737
1737
1738 df = util.matchdate(date)
1738 df = util.matchdate(date)
1739 m = scmutil.matchall(repo)
1739 m = scmutil.matchall(repo)
1740 results = {}
1740 results = {}
1741
1741
1742 def prep(ctx, fns):
1742 def prep(ctx, fns):
1743 d = ctx.date()
1743 d = ctx.date()
1744 if df(d[0]):
1744 if df(d[0]):
1745 results[ctx.rev()] = d
1745 results[ctx.rev()] = d
1746
1746
1747 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1747 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1748 rev = ctx.rev()
1748 rev = ctx.rev()
1749 if rev in results:
1749 if rev in results:
1750 ui.status(_("found revision %s from %s\n") %
1750 ui.status(_("found revision %s from %s\n") %
1751 (rev, util.datestr(results[rev])))
1751 (rev, util.datestr(results[rev])))
1752 return '%d' % rev
1752 return '%d' % rev
1753
1753
1754 raise error.Abort(_("revision matching date not found"))
1754 raise error.Abort(_("revision matching date not found"))
1755
1755
1756 def increasingwindows(windowsize=8, sizelimit=512):
1756 def increasingwindows(windowsize=8, sizelimit=512):
1757 while True:
1757 while True:
1758 yield windowsize
1758 yield windowsize
1759 if windowsize < sizelimit:
1759 if windowsize < sizelimit:
1760 windowsize *= 2
1760 windowsize *= 2
1761
1761
1762 class FileWalkError(Exception):
1762 class FileWalkError(Exception):
1763 pass
1763 pass
1764
1764
1765 def walkfilerevs(repo, match, follow, revs, fncache):
1765 def walkfilerevs(repo, match, follow, revs, fncache):
1766 '''Walks the file history for the matched files.
1766 '''Walks the file history for the matched files.
1767
1767
1768 Returns the changeset revs that are involved in the file history.
1768 Returns the changeset revs that are involved in the file history.
1769
1769
1770 Throws FileWalkError if the file history can't be walked using
1770 Throws FileWalkError if the file history can't be walked using
1771 filelogs alone.
1771 filelogs alone.
1772 '''
1772 '''
1773 wanted = set()
1773 wanted = set()
1774 copies = []
1774 copies = []
1775 minrev, maxrev = min(revs), max(revs)
1775 minrev, maxrev = min(revs), max(revs)
1776 def filerevgen(filelog, last):
1776 def filerevgen(filelog, last):
1777 """
1777 """
1778 Only files, no patterns. Check the history of each file.
1778 Only files, no patterns. Check the history of each file.
1779
1779
1780 Examines filelog entries within minrev, maxrev linkrev range
1780 Examines filelog entries within minrev, maxrev linkrev range
1781 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1781 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1782 tuples in backwards order
1782 tuples in backwards order
1783 """
1783 """
1784 cl_count = len(repo)
1784 cl_count = len(repo)
1785 revs = []
1785 revs = []
1786 for j in xrange(0, last + 1):
1786 for j in xrange(0, last + 1):
1787 linkrev = filelog.linkrev(j)
1787 linkrev = filelog.linkrev(j)
1788 if linkrev < minrev:
1788 if linkrev < minrev:
1789 continue
1789 continue
1790 # only yield rev for which we have the changelog, it can
1790 # only yield rev for which we have the changelog, it can
1791 # happen while doing "hg log" during a pull or commit
1791 # happen while doing "hg log" during a pull or commit
1792 if linkrev >= cl_count:
1792 if linkrev >= cl_count:
1793 break
1793 break
1794
1794
1795 parentlinkrevs = []
1795 parentlinkrevs = []
1796 for p in filelog.parentrevs(j):
1796 for p in filelog.parentrevs(j):
1797 if p != nullrev:
1797 if p != nullrev:
1798 parentlinkrevs.append(filelog.linkrev(p))
1798 parentlinkrevs.append(filelog.linkrev(p))
1799 n = filelog.node(j)
1799 n = filelog.node(j)
1800 revs.append((linkrev, parentlinkrevs,
1800 revs.append((linkrev, parentlinkrevs,
1801 follow and filelog.renamed(n)))
1801 follow and filelog.renamed(n)))
1802
1802
1803 return reversed(revs)
1803 return reversed(revs)
1804 def iterfiles():
1804 def iterfiles():
1805 pctx = repo['.']
1805 pctx = repo['.']
1806 for filename in match.files():
1806 for filename in match.files():
1807 if follow:
1807 if follow:
1808 if filename not in pctx:
1808 if filename not in pctx:
1809 raise error.Abort(_('cannot follow file not in parent '
1809 raise error.Abort(_('cannot follow file not in parent '
1810 'revision: "%s"') % filename)
1810 'revision: "%s"') % filename)
1811 yield filename, pctx[filename].filenode()
1811 yield filename, pctx[filename].filenode()
1812 else:
1812 else:
1813 yield filename, None
1813 yield filename, None
1814 for filename_node in copies:
1814 for filename_node in copies:
1815 yield filename_node
1815 yield filename_node
1816
1816
1817 for file_, node in iterfiles():
1817 for file_, node in iterfiles():
1818 filelog = repo.file(file_)
1818 filelog = repo.file(file_)
1819 if not len(filelog):
1819 if not len(filelog):
1820 if node is None:
1820 if node is None:
1821 # A zero count may be a directory or deleted file, so
1821 # A zero count may be a directory or deleted file, so
1822 # try to find matching entries on the slow path.
1822 # try to find matching entries on the slow path.
1823 if follow:
1823 if follow:
1824 raise error.Abort(
1824 raise error.Abort(
1825 _('cannot follow nonexistent file: "%s"') % file_)
1825 _('cannot follow nonexistent file: "%s"') % file_)
1826 raise FileWalkError("Cannot walk via filelog")
1826 raise FileWalkError("Cannot walk via filelog")
1827 else:
1827 else:
1828 continue
1828 continue
1829
1829
1830 if node is None:
1830 if node is None:
1831 last = len(filelog) - 1
1831 last = len(filelog) - 1
1832 else:
1832 else:
1833 last = filelog.rev(node)
1833 last = filelog.rev(node)
1834
1834
1835 # keep track of all ancestors of the file
1835 # keep track of all ancestors of the file
1836 ancestors = {filelog.linkrev(last)}
1836 ancestors = {filelog.linkrev(last)}
1837
1837
1838 # iterate from latest to oldest revision
1838 # iterate from latest to oldest revision
1839 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1839 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1840 if not follow:
1840 if not follow:
1841 if rev > maxrev:
1841 if rev > maxrev:
1842 continue
1842 continue
1843 else:
1843 else:
1844 # Note that last might not be the first interesting
1844 # Note that last might not be the first interesting
1845 # rev to us:
1845 # rev to us:
1846 # if the file has been changed after maxrev, we'll
1846 # if the file has been changed after maxrev, we'll
1847 # have linkrev(last) > maxrev, and we still need
1847 # have linkrev(last) > maxrev, and we still need
1848 # to explore the file graph
1848 # to explore the file graph
1849 if rev not in ancestors:
1849 if rev not in ancestors:
1850 continue
1850 continue
1851 # XXX insert 1327 fix here
1851 # XXX insert 1327 fix here
1852 if flparentlinkrevs:
1852 if flparentlinkrevs:
1853 ancestors.update(flparentlinkrevs)
1853 ancestors.update(flparentlinkrevs)
1854
1854
1855 fncache.setdefault(rev, []).append(file_)
1855 fncache.setdefault(rev, []).append(file_)
1856 wanted.add(rev)
1856 wanted.add(rev)
1857 if copied:
1857 if copied:
1858 copies.append(copied)
1858 copies.append(copied)
1859
1859
1860 return wanted
1860 return wanted
1861
1861
1862 class _followfilter(object):
1862 class _followfilter(object):
1863 def __init__(self, repo, onlyfirst=False):
1863 def __init__(self, repo, onlyfirst=False):
1864 self.repo = repo
1864 self.repo = repo
1865 self.startrev = nullrev
1865 self.startrev = nullrev
1866 self.roots = set()
1866 self.roots = set()
1867 self.onlyfirst = onlyfirst
1867 self.onlyfirst = onlyfirst
1868
1868
1869 def match(self, rev):
1869 def match(self, rev):
1870 def realparents(rev):
1870 def realparents(rev):
1871 if self.onlyfirst:
1871 if self.onlyfirst:
1872 return self.repo.changelog.parentrevs(rev)[0:1]
1872 return self.repo.changelog.parentrevs(rev)[0:1]
1873 else:
1873 else:
1874 return filter(lambda x: x != nullrev,
1874 return filter(lambda x: x != nullrev,
1875 self.repo.changelog.parentrevs(rev))
1875 self.repo.changelog.parentrevs(rev))
1876
1876
1877 if self.startrev == nullrev:
1877 if self.startrev == nullrev:
1878 self.startrev = rev
1878 self.startrev = rev
1879 return True
1879 return True
1880
1880
1881 if rev > self.startrev:
1881 if rev > self.startrev:
1882 # forward: all descendants
1882 # forward: all descendants
1883 if not self.roots:
1883 if not self.roots:
1884 self.roots.add(self.startrev)
1884 self.roots.add(self.startrev)
1885 for parent in realparents(rev):
1885 for parent in realparents(rev):
1886 if parent in self.roots:
1886 if parent in self.roots:
1887 self.roots.add(rev)
1887 self.roots.add(rev)
1888 return True
1888 return True
1889 else:
1889 else:
1890 # backwards: all parents
1890 # backwards: all parents
1891 if not self.roots:
1891 if not self.roots:
1892 self.roots.update(realparents(self.startrev))
1892 self.roots.update(realparents(self.startrev))
1893 if rev in self.roots:
1893 if rev in self.roots:
1894 self.roots.remove(rev)
1894 self.roots.remove(rev)
1895 self.roots.update(realparents(rev))
1895 self.roots.update(realparents(rev))
1896 return True
1896 return True
1897
1897
1898 return False
1898 return False
1899
1899
1900 def walkchangerevs(repo, match, opts, prepare):
1900 def walkchangerevs(repo, match, opts, prepare):
1901 '''Iterate over files and the revs in which they changed.
1901 '''Iterate over files and the revs in which they changed.
1902
1902
1903 Callers most commonly need to iterate backwards over the history
1903 Callers most commonly need to iterate backwards over the history
1904 in which they are interested. Doing so has awful (quadratic-looking)
1904 in which they are interested. Doing so has awful (quadratic-looking)
1905 performance, so we use iterators in a "windowed" way.
1905 performance, so we use iterators in a "windowed" way.
1906
1906
1907 We walk a window of revisions in the desired order. Within the
1907 We walk a window of revisions in the desired order. Within the
1908 window, we first walk forwards to gather data, then in the desired
1908 window, we first walk forwards to gather data, then in the desired
1909 order (usually backwards) to display it.
1909 order (usually backwards) to display it.
1910
1910
1911 This function returns an iterator yielding contexts. Before
1911 This function returns an iterator yielding contexts. Before
1912 yielding each context, the iterator will first call the prepare
1912 yielding each context, the iterator will first call the prepare
1913 function on each context in the window in forward order.'''
1913 function on each context in the window in forward order.'''
1914
1914
1915 follow = opts.get('follow') or opts.get('follow_first')
1915 follow = opts.get('follow') or opts.get('follow_first')
1916 revs = _logrevs(repo, opts)
1916 revs = _logrevs(repo, opts)
1917 if not revs:
1917 if not revs:
1918 return []
1918 return []
1919 wanted = set()
1919 wanted = set()
1920 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1920 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1921 opts.get('removed'))
1921 opts.get('removed'))
1922 fncache = {}
1922 fncache = {}
1923 change = repo.changectx
1923 change = repo.changectx
1924
1924
1925 # First step is to fill wanted, the set of revisions that we want to yield.
1925 # First step is to fill wanted, the set of revisions that we want to yield.
1926 # When it does not induce extra cost, we also fill fncache for revisions in
1926 # When it does not induce extra cost, we also fill fncache for revisions in
1927 # wanted: a cache of filenames that were changed (ctx.files()) and that
1927 # wanted: a cache of filenames that were changed (ctx.files()) and that
1928 # match the file filtering conditions.
1928 # match the file filtering conditions.
1929
1929
1930 if match.always():
1930 if match.always():
1931 # No files, no patterns. Display all revs.
1931 # No files, no patterns. Display all revs.
1932 wanted = revs
1932 wanted = revs
1933 elif not slowpath:
1933 elif not slowpath:
1934 # We only have to read through the filelog to find wanted revisions
1934 # We only have to read through the filelog to find wanted revisions
1935
1935
1936 try:
1936 try:
1937 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1937 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1938 except FileWalkError:
1938 except FileWalkError:
1939 slowpath = True
1939 slowpath = True
1940
1940
1941 # We decided to fall back to the slowpath because at least one
1941 # We decided to fall back to the slowpath because at least one
1942 # of the paths was not a file. Check to see if at least one of them
1942 # of the paths was not a file. Check to see if at least one of them
1943 # existed in history, otherwise simply return
1943 # existed in history, otherwise simply return
1944 for path in match.files():
1944 for path in match.files():
1945 if path == '.' or path in repo.store:
1945 if path == '.' or path in repo.store:
1946 break
1946 break
1947 else:
1947 else:
1948 return []
1948 return []
1949
1949
1950 if slowpath:
1950 if slowpath:
1951 # We have to read the changelog to match filenames against
1951 # We have to read the changelog to match filenames against
1952 # changed files
1952 # changed files
1953
1953
1954 if follow:
1954 if follow:
1955 raise error.Abort(_('can only follow copies/renames for explicit '
1955 raise error.Abort(_('can only follow copies/renames for explicit '
1956 'filenames'))
1956 'filenames'))
1957
1957
1958 # The slow path checks files modified in every changeset.
1958 # The slow path checks files modified in every changeset.
1959 # This is really slow on large repos, so compute the set lazily.
1959 # This is really slow on large repos, so compute the set lazily.
1960 class lazywantedset(object):
1960 class lazywantedset(object):
1961 def __init__(self):
1961 def __init__(self):
1962 self.set = set()
1962 self.set = set()
1963 self.revs = set(revs)
1963 self.revs = set(revs)
1964
1964
1965 # No need to worry about locality here because it will be accessed
1965 # No need to worry about locality here because it will be accessed
1966 # in the same order as the increasing window below.
1966 # in the same order as the increasing window below.
1967 def __contains__(self, value):
1967 def __contains__(self, value):
1968 if value in self.set:
1968 if value in self.set:
1969 return True
1969 return True
1970 elif not value in self.revs:
1970 elif not value in self.revs:
1971 return False
1971 return False
1972 else:
1972 else:
1973 self.revs.discard(value)
1973 self.revs.discard(value)
1974 ctx = change(value)
1974 ctx = change(value)
1975 matches = filter(match, ctx.files())
1975 matches = filter(match, ctx.files())
1976 if matches:
1976 if matches:
1977 fncache[value] = matches
1977 fncache[value] = matches
1978 self.set.add(value)
1978 self.set.add(value)
1979 return True
1979 return True
1980 return False
1980 return False
1981
1981
1982 def discard(self, value):
1982 def discard(self, value):
1983 self.revs.discard(value)
1983 self.revs.discard(value)
1984 self.set.discard(value)
1984 self.set.discard(value)
1985
1985
1986 wanted = lazywantedset()
1986 wanted = lazywantedset()
1987
1987
1988 # it might be worthwhile to do this in the iterator if the rev range
1988 # it might be worthwhile to do this in the iterator if the rev range
1989 # is descending and the prune args are all within that range
1989 # is descending and the prune args are all within that range
1990 for rev in opts.get('prune', ()):
1990 for rev in opts.get('prune', ()):
1991 rev = repo[rev].rev()
1991 rev = repo[rev].rev()
1992 ff = _followfilter(repo)
1992 ff = _followfilter(repo)
1993 stop = min(revs[0], revs[-1])
1993 stop = min(revs[0], revs[-1])
1994 for x in xrange(rev, stop - 1, -1):
1994 for x in xrange(rev, stop - 1, -1):
1995 if ff.match(x):
1995 if ff.match(x):
1996 wanted = wanted - [x]
1996 wanted = wanted - [x]
1997
1997
1998 # Now that wanted is correctly initialized, we can iterate over the
1998 # Now that wanted is correctly initialized, we can iterate over the
1999 # revision range, yielding only revisions in wanted.
1999 # revision range, yielding only revisions in wanted.
2000 def iterate():
2000 def iterate():
2001 if follow and match.always():
2001 if follow and match.always():
2002 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2002 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2003 def want(rev):
2003 def want(rev):
2004 return ff.match(rev) and rev in wanted
2004 return ff.match(rev) and rev in wanted
2005 else:
2005 else:
2006 def want(rev):
2006 def want(rev):
2007 return rev in wanted
2007 return rev in wanted
2008
2008
2009 it = iter(revs)
2009 it = iter(revs)
2010 stopiteration = False
2010 stopiteration = False
2011 for windowsize in increasingwindows():
2011 for windowsize in increasingwindows():
2012 nrevs = []
2012 nrevs = []
2013 for i in xrange(windowsize):
2013 for i in xrange(windowsize):
2014 rev = next(it, None)
2014 rev = next(it, None)
2015 if rev is None:
2015 if rev is None:
2016 stopiteration = True
2016 stopiteration = True
2017 break
2017 break
2018 elif want(rev):
2018 elif want(rev):
2019 nrevs.append(rev)
2019 nrevs.append(rev)
2020 for rev in sorted(nrevs):
2020 for rev in sorted(nrevs):
2021 fns = fncache.get(rev)
2021 fns = fncache.get(rev)
2022 ctx = change(rev)
2022 ctx = change(rev)
2023 if not fns:
2023 if not fns:
2024 def fns_generator():
2024 def fns_generator():
2025 for f in ctx.files():
2025 for f in ctx.files():
2026 if match(f):
2026 if match(f):
2027 yield f
2027 yield f
2028 fns = fns_generator()
2028 fns = fns_generator()
2029 prepare(ctx, fns)
2029 prepare(ctx, fns)
2030 for rev in nrevs:
2030 for rev in nrevs:
2031 yield change(rev)
2031 yield change(rev)
2032
2032
2033 if stopiteration:
2033 if stopiteration:
2034 break
2034 break
2035
2035
2036 return iterate()
2036 return iterate()
2037
2037
2038 def _makefollowlogfilematcher(repo, files, followfirst):
2038 def _makefollowlogfilematcher(repo, files, followfirst):
2039 # When displaying a revision with --patch --follow FILE, we have
2039 # When displaying a revision with --patch --follow FILE, we have
2040 # to know which file of the revision must be diffed. With
2040 # to know which file of the revision must be diffed. With
2041 # --follow, we want the names of the ancestors of FILE in the
2041 # --follow, we want the names of the ancestors of FILE in the
2042 # revision, stored in "fcache". "fcache" is populated by
2042 # revision, stored in "fcache". "fcache" is populated by
2043 # reproducing the graph traversal already done by --follow revset
2043 # reproducing the graph traversal already done by --follow revset
2044 # and relating revs to file names (which is not "correct" but
2044 # and relating revs to file names (which is not "correct" but
2045 # good enough).
2045 # good enough).
2046 fcache = {}
2046 fcache = {}
2047 fcacheready = [False]
2047 fcacheready = [False]
2048 pctx = repo['.']
2048 pctx = repo['.']
2049
2049
2050 def populate():
2050 def populate():
2051 for fn in files:
2051 for fn in files:
2052 fctx = pctx[fn]
2052 fctx = pctx[fn]
2053 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2053 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2054 for c in fctx.ancestors(followfirst=followfirst):
2054 for c in fctx.ancestors(followfirst=followfirst):
2055 fcache.setdefault(c.rev(), set()).add(c.path())
2055 fcache.setdefault(c.rev(), set()).add(c.path())
2056
2056
2057 def filematcher(rev):
2057 def filematcher(rev):
2058 if not fcacheready[0]:
2058 if not fcacheready[0]:
2059 # Lazy initialization
2059 # Lazy initialization
2060 fcacheready[0] = True
2060 fcacheready[0] = True
2061 populate()
2061 populate()
2062 return scmutil.matchfiles(repo, fcache.get(rev, []))
2062 return scmutil.matchfiles(repo, fcache.get(rev, []))
2063
2063
2064 return filematcher
2064 return filematcher
2065
2065
2066 def _makenofollowlogfilematcher(repo, pats, opts):
2066 def _makenofollowlogfilematcher(repo, pats, opts):
2067 '''hook for extensions to override the filematcher for non-follow cases'''
2067 '''hook for extensions to override the filematcher for non-follow cases'''
2068 return None
2068 return None
2069
2069
2070 def _makelogrevset(repo, pats, opts, revs):
2070 def _makelogrevset(repo, pats, opts, revs):
2071 """Return (expr, filematcher) where expr is a revset string built
2071 """Return (expr, filematcher) where expr is a revset string built
2072 from log options and file patterns or None. If --stat or --patch
2072 from log options and file patterns or None. If --stat or --patch
2073 are not passed filematcher is None. Otherwise it is a callable
2073 are not passed filematcher is None. Otherwise it is a callable
2074 taking a revision number and returning a match objects filtering
2074 taking a revision number and returning a match objects filtering
2075 the files to be detailed when displaying the revision.
2075 the files to be detailed when displaying the revision.
2076 """
2076 """
2077 opt2revset = {
2077 opt2revset = {
2078 'no_merges': ('not merge()', None),
2078 'no_merges': ('not merge()', None),
2079 'only_merges': ('merge()', None),
2079 'only_merges': ('merge()', None),
2080 '_ancestors': ('ancestors(%(val)s)', None),
2080 '_ancestors': ('ancestors(%(val)s)', None),
2081 '_fancestors': ('_firstancestors(%(val)s)', None),
2081 '_fancestors': ('_firstancestors(%(val)s)', None),
2082 '_descendants': ('descendants(%(val)s)', None),
2082 '_descendants': ('descendants(%(val)s)', None),
2083 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2083 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2084 '_matchfiles': ('_matchfiles(%(val)s)', None),
2084 '_matchfiles': ('_matchfiles(%(val)s)', None),
2085 'date': ('date(%(val)r)', None),
2085 'date': ('date(%(val)r)', None),
2086 'branch': ('branch(%(val)r)', ' or '),
2086 'branch': ('branch(%(val)r)', ' or '),
2087 '_patslog': ('filelog(%(val)r)', ' or '),
2087 '_patslog': ('filelog(%(val)r)', ' or '),
2088 '_patsfollow': ('follow(%(val)r)', ' or '),
2088 '_patsfollow': ('follow(%(val)r)', ' or '),
2089 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2089 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2090 'keyword': ('keyword(%(val)r)', ' or '),
2090 'keyword': ('keyword(%(val)r)', ' or '),
2091 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2091 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2092 'user': ('user(%(val)r)', ' or '),
2092 'user': ('user(%(val)r)', ' or '),
2093 }
2093 }
2094
2094
2095 opts = dict(opts)
2095 opts = dict(opts)
2096 # follow or not follow?
2096 # follow or not follow?
2097 follow = opts.get('follow') or opts.get('follow_first')
2097 follow = opts.get('follow') or opts.get('follow_first')
2098 if opts.get('follow_first'):
2098 if opts.get('follow_first'):
2099 followfirst = 1
2099 followfirst = 1
2100 else:
2100 else:
2101 followfirst = 0
2101 followfirst = 0
2102 # --follow with FILE behavior depends on revs...
2102 # --follow with FILE behavior depends on revs...
2103 it = iter(revs)
2103 it = iter(revs)
2104 startrev = next(it)
2104 startrev = next(it)
2105 followdescendants = startrev < next(it, startrev)
2105 followdescendants = startrev < next(it, startrev)
2106
2106
2107 # branch and only_branch are really aliases and must be handled at
2107 # branch and only_branch are really aliases and must be handled at
2108 # the same time
2108 # the same time
2109 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2109 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2110 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2110 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2111 # pats/include/exclude are passed to match.match() directly in
2111 # pats/include/exclude are passed to match.match() directly in
2112 # _matchfiles() revset but walkchangerevs() builds its matcher with
2112 # _matchfiles() revset but walkchangerevs() builds its matcher with
2113 # scmutil.match(). The difference is input pats are globbed on
2113 # scmutil.match(). The difference is input pats are globbed on
2114 # platforms without shell expansion (windows).
2114 # platforms without shell expansion (windows).
2115 wctx = repo[None]
2115 wctx = repo[None]
2116 match, pats = scmutil.matchandpats(wctx, pats, opts)
2116 match, pats = scmutil.matchandpats(wctx, pats, opts)
2117 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2117 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2118 opts.get('removed'))
2118 opts.get('removed'))
2119 if not slowpath:
2119 if not slowpath:
2120 for f in match.files():
2120 for f in match.files():
2121 if follow and f not in wctx:
2121 if follow and f not in wctx:
2122 # If the file exists, it may be a directory, so let it
2122 # If the file exists, it may be a directory, so let it
2123 # take the slow path.
2123 # take the slow path.
2124 if os.path.exists(repo.wjoin(f)):
2124 if os.path.exists(repo.wjoin(f)):
2125 slowpath = True
2125 slowpath = True
2126 continue
2126 continue
2127 else:
2127 else:
2128 raise error.Abort(_('cannot follow file not in parent '
2128 raise error.Abort(_('cannot follow file not in parent '
2129 'revision: "%s"') % f)
2129 'revision: "%s"') % f)
2130 filelog = repo.file(f)
2130 filelog = repo.file(f)
2131 if not filelog:
2131 if not filelog:
2132 # A zero count may be a directory or deleted file, so
2132 # A zero count may be a directory or deleted file, so
2133 # try to find matching entries on the slow path.
2133 # try to find matching entries on the slow path.
2134 if follow:
2134 if follow:
2135 raise error.Abort(
2135 raise error.Abort(
2136 _('cannot follow nonexistent file: "%s"') % f)
2136 _('cannot follow nonexistent file: "%s"') % f)
2137 slowpath = True
2137 slowpath = True
2138
2138
2139 # We decided to fall back to the slowpath because at least one
2139 # We decided to fall back to the slowpath because at least one
2140 # of the paths was not a file. Check to see if at least one of them
2140 # of the paths was not a file. Check to see if at least one of them
2141 # existed in history - in that case, we'll continue down the
2141 # existed in history - in that case, we'll continue down the
2142 # slowpath; otherwise, we can turn off the slowpath
2142 # slowpath; otherwise, we can turn off the slowpath
2143 if slowpath:
2143 if slowpath:
2144 for path in match.files():
2144 for path in match.files():
2145 if path == '.' or path in repo.store:
2145 if path == '.' or path in repo.store:
2146 break
2146 break
2147 else:
2147 else:
2148 slowpath = False
2148 slowpath = False
2149
2149
2150 fpats = ('_patsfollow', '_patsfollowfirst')
2150 fpats = ('_patsfollow', '_patsfollowfirst')
2151 fnopats = (('_ancestors', '_fancestors'),
2151 fnopats = (('_ancestors', '_fancestors'),
2152 ('_descendants', '_fdescendants'))
2152 ('_descendants', '_fdescendants'))
2153 if slowpath:
2153 if slowpath:
2154 # See walkchangerevs() slow path.
2154 # See walkchangerevs() slow path.
2155 #
2155 #
2156 # pats/include/exclude cannot be represented as separate
2156 # pats/include/exclude cannot be represented as separate
2157 # revset expressions as their filtering logic applies at file
2157 # revset expressions as their filtering logic applies at file
2158 # level. For instance "-I a -X a" matches a revision touching
2158 # level. For instance "-I a -X a" matches a revision touching
2159 # "a" and "b" while "file(a) and not file(b)" does
2159 # "a" and "b" while "file(a) and not file(b)" does
2160 # not. Besides, filesets are evaluated against the working
2160 # not. Besides, filesets are evaluated against the working
2161 # directory.
2161 # directory.
2162 matchargs = ['r:', 'd:relpath']
2162 matchargs = ['r:', 'd:relpath']
2163 for p in pats:
2163 for p in pats:
2164 matchargs.append('p:' + p)
2164 matchargs.append('p:' + p)
2165 for p in opts.get('include', []):
2165 for p in opts.get('include', []):
2166 matchargs.append('i:' + p)
2166 matchargs.append('i:' + p)
2167 for p in opts.get('exclude', []):
2167 for p in opts.get('exclude', []):
2168 matchargs.append('x:' + p)
2168 matchargs.append('x:' + p)
2169 matchargs = ','.join(('%r' % p) for p in matchargs)
2169 matchargs = ','.join(('%r' % p) for p in matchargs)
2170 opts['_matchfiles'] = matchargs
2170 opts['_matchfiles'] = matchargs
2171 if follow:
2171 if follow:
2172 opts[fnopats[0][followfirst]] = '.'
2172 opts[fnopats[0][followfirst]] = '.'
2173 else:
2173 else:
2174 if follow:
2174 if follow:
2175 if pats:
2175 if pats:
2176 # follow() revset interprets its file argument as a
2176 # follow() revset interprets its file argument as a
2177 # manifest entry, so use match.files(), not pats.
2177 # manifest entry, so use match.files(), not pats.
2178 opts[fpats[followfirst]] = list(match.files())
2178 opts[fpats[followfirst]] = list(match.files())
2179 else:
2179 else:
2180 op = fnopats[followdescendants][followfirst]
2180 op = fnopats[followdescendants][followfirst]
2181 opts[op] = 'rev(%d)' % startrev
2181 opts[op] = 'rev(%d)' % startrev
2182 else:
2182 else:
2183 opts['_patslog'] = list(pats)
2183 opts['_patslog'] = list(pats)
2184
2184
2185 filematcher = None
2185 filematcher = None
2186 if opts.get('patch') or opts.get('stat'):
2186 if opts.get('patch') or opts.get('stat'):
2187 # When following files, track renames via a special matcher.
2187 # When following files, track renames via a special matcher.
2188 # If we're forced to take the slowpath it means we're following
2188 # If we're forced to take the slowpath it means we're following
2189 # at least one pattern/directory, so don't bother with rename tracking.
2189 # at least one pattern/directory, so don't bother with rename tracking.
2190 if follow and not match.always() and not slowpath:
2190 if follow and not match.always() and not slowpath:
2191 # _makefollowlogfilematcher expects its files argument to be
2191 # _makefollowlogfilematcher expects its files argument to be
2192 # relative to the repo root, so use match.files(), not pats.
2192 # relative to the repo root, so use match.files(), not pats.
2193 filematcher = _makefollowlogfilematcher(repo, match.files(),
2193 filematcher = _makefollowlogfilematcher(repo, match.files(),
2194 followfirst)
2194 followfirst)
2195 else:
2195 else:
2196 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2196 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2197 if filematcher is None:
2197 if filematcher is None:
2198 filematcher = lambda rev: match
2198 filematcher = lambda rev: match
2199
2199
2200 expr = []
2200 expr = []
2201 for op, val in sorted(opts.iteritems()):
2201 for op, val in sorted(opts.iteritems()):
2202 if not val:
2202 if not val:
2203 continue
2203 continue
2204 if op not in opt2revset:
2204 if op not in opt2revset:
2205 continue
2205 continue
2206 revop, andor = opt2revset[op]
2206 revop, andor = opt2revset[op]
2207 if '%(val)' not in revop:
2207 if '%(val)' not in revop:
2208 expr.append(revop)
2208 expr.append(revop)
2209 else:
2209 else:
2210 if not isinstance(val, list):
2210 if not isinstance(val, list):
2211 e = revop % {'val': val}
2211 e = revop % {'val': val}
2212 else:
2212 else:
2213 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2213 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2214 expr.append(e)
2214 expr.append(e)
2215
2215
2216 if expr:
2216 if expr:
2217 expr = '(' + ' and '.join(expr) + ')'
2217 expr = '(' + ' and '.join(expr) + ')'
2218 else:
2218 else:
2219 expr = None
2219 expr = None
2220 return expr, filematcher
2220 return expr, filematcher
2221
2221
2222 def _logrevs(repo, opts):
2222 def _logrevs(repo, opts):
2223 # Default --rev value depends on --follow but --follow behavior
2223 # Default --rev value depends on --follow but --follow behavior
2224 # depends on revisions resolved from --rev...
2224 # depends on revisions resolved from --rev...
2225 follow = opts.get('follow') or opts.get('follow_first')
2225 follow = opts.get('follow') or opts.get('follow_first')
2226 if opts.get('rev'):
2226 if opts.get('rev'):
2227 revs = scmutil.revrange(repo, opts['rev'])
2227 revs = scmutil.revrange(repo, opts['rev'])
2228 elif follow and repo.dirstate.p1() == nullid:
2228 elif follow and repo.dirstate.p1() == nullid:
2229 revs = smartset.baseset()
2229 revs = smartset.baseset()
2230 elif follow:
2230 elif follow:
2231 revs = repo.revs('reverse(:.)')
2231 revs = repo.revs('reverse(:.)')
2232 else:
2232 else:
2233 revs = smartset.spanset(repo)
2233 revs = smartset.spanset(repo)
2234 revs.reverse()
2234 revs.reverse()
2235 return revs
2235 return revs
2236
2236
2237 def getgraphlogrevs(repo, pats, opts):
2237 def getgraphlogrevs(repo, pats, opts):
2238 """Return (revs, expr, filematcher) where revs is an iterable of
2238 """Return (revs, expr, filematcher) where revs is an iterable of
2239 revision numbers, expr is a revset string built from log options
2239 revision numbers, expr is a revset string built from log options
2240 and file patterns or None, and used to filter 'revs'. If --stat or
2240 and file patterns or None, and used to filter 'revs'. If --stat or
2241 --patch are not passed filematcher is None. Otherwise it is a
2241 --patch are not passed filematcher is None. Otherwise it is a
2242 callable taking a revision number and returning a match objects
2242 callable taking a revision number and returning a match objects
2243 filtering the files to be detailed when displaying the revision.
2243 filtering the files to be detailed when displaying the revision.
2244 """
2244 """
2245 limit = loglimit(opts)
2245 limit = loglimit(opts)
2246 revs = _logrevs(repo, opts)
2246 revs = _logrevs(repo, opts)
2247 if not revs:
2247 if not revs:
2248 return smartset.baseset(), None, None
2248 return smartset.baseset(), None, None
2249 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2249 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2250 if opts.get('rev'):
2250 if opts.get('rev'):
2251 # User-specified revs might be unsorted, but don't sort before
2251 # User-specified revs might be unsorted, but don't sort before
2252 # _makelogrevset because it might depend on the order of revs
2252 # _makelogrevset because it might depend on the order of revs
2253 if not (revs.isdescending() or revs.istopo()):
2253 if not (revs.isdescending() or revs.istopo()):
2254 revs.sort(reverse=True)
2254 revs.sort(reverse=True)
2255 if expr:
2255 if expr:
2256 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2256 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2257 revs = matcher(repo, revs)
2257 revs = matcher(repo, revs)
2258 if limit is not None:
2258 if limit is not None:
2259 limitedrevs = []
2259 limitedrevs = []
2260 for idx, rev in enumerate(revs):
2260 for idx, rev in enumerate(revs):
2261 if idx >= limit:
2261 if idx >= limit:
2262 break
2262 break
2263 limitedrevs.append(rev)
2263 limitedrevs.append(rev)
2264 revs = smartset.baseset(limitedrevs)
2264 revs = smartset.baseset(limitedrevs)
2265
2265
2266 return revs, expr, filematcher
2266 return revs, expr, filematcher
2267
2267
2268 def getlogrevs(repo, pats, opts):
2268 def getlogrevs(repo, pats, opts):
2269 """Return (revs, expr, filematcher) where revs is an iterable of
2269 """Return (revs, expr, filematcher) where revs is an iterable of
2270 revision numbers, expr is a revset string built from log options
2270 revision numbers, expr is a revset string built from log options
2271 and file patterns or None, and used to filter 'revs'. If --stat or
2271 and file patterns or None, and used to filter 'revs'. If --stat or
2272 --patch are not passed filematcher is None. Otherwise it is a
2272 --patch are not passed filematcher is None. Otherwise it is a
2273 callable taking a revision number and returning a match objects
2273 callable taking a revision number and returning a match objects
2274 filtering the files to be detailed when displaying the revision.
2274 filtering the files to be detailed when displaying the revision.
2275 """
2275 """
2276 limit = loglimit(opts)
2276 limit = loglimit(opts)
2277 revs = _logrevs(repo, opts)
2277 revs = _logrevs(repo, opts)
2278 if not revs:
2278 if not revs:
2279 return smartset.baseset([]), None, None
2279 return smartset.baseset([]), None, None
2280 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2280 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2281 if expr:
2281 if expr:
2282 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2282 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2283 revs = matcher(repo, revs)
2283 revs = matcher(repo, revs)
2284 if limit is not None:
2284 if limit is not None:
2285 limitedrevs = []
2285 limitedrevs = []
2286 for idx, r in enumerate(revs):
2286 for idx, r in enumerate(revs):
2287 if limit <= idx:
2287 if limit <= idx:
2288 break
2288 break
2289 limitedrevs.append(r)
2289 limitedrevs.append(r)
2290 revs = smartset.baseset(limitedrevs)
2290 revs = smartset.baseset(limitedrevs)
2291
2291
2292 return revs, expr, filematcher
2292 return revs, expr, filematcher
2293
2293
2294 def _graphnodeformatter(ui, displayer):
2294 def _graphnodeformatter(ui, displayer):
2295 spec = ui.config('ui', 'graphnodetemplate')
2295 spec = ui.config('ui', 'graphnodetemplate')
2296 if not spec:
2296 if not spec:
2297 return templatekw.showgraphnode # fast path for "{graphnode}"
2297 return templatekw.showgraphnode # fast path for "{graphnode}"
2298
2298
2299 spec = templater.unquotestring(spec)
2299 spec = templater.unquotestring(spec)
2300 templ = formatter.maketemplater(ui, 'graphnode', spec)
2300 templ = formatter.maketemplater(ui, spec)
2301 cache = {}
2301 cache = {}
2302 if isinstance(displayer, changeset_templater):
2302 if isinstance(displayer, changeset_templater):
2303 cache = displayer.cache # reuse cache of slow templates
2303 cache = displayer.cache # reuse cache of slow templates
2304 props = templatekw.keywords.copy()
2304 props = templatekw.keywords.copy()
2305 props['templ'] = templ
2305 props['templ'] = templ
2306 props['cache'] = cache
2306 props['cache'] = cache
2307 def formatnode(repo, ctx):
2307 def formatnode(repo, ctx):
2308 props['ctx'] = ctx
2308 props['ctx'] = ctx
2309 props['repo'] = repo
2309 props['repo'] = repo
2310 props['ui'] = repo.ui
2310 props['ui'] = repo.ui
2311 props['revcache'] = {}
2311 props['revcache'] = {}
2312 return templater.stringify(templ('graphnode', **props))
2312 return templ.render(props)
2313 return formatnode
2313 return formatnode
2314
2314
2315 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2315 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2316 filematcher=None):
2316 filematcher=None):
2317 formatnode = _graphnodeformatter(ui, displayer)
2317 formatnode = _graphnodeformatter(ui, displayer)
2318 state = graphmod.asciistate()
2318 state = graphmod.asciistate()
2319 styles = state['styles']
2319 styles = state['styles']
2320
2320
2321 # only set graph styling if HGPLAIN is not set.
2321 # only set graph styling if HGPLAIN is not set.
2322 if ui.plain('graph'):
2322 if ui.plain('graph'):
2323 # set all edge styles to |, the default pre-3.8 behaviour
2323 # set all edge styles to |, the default pre-3.8 behaviour
2324 styles.update(dict.fromkeys(styles, '|'))
2324 styles.update(dict.fromkeys(styles, '|'))
2325 else:
2325 else:
2326 edgetypes = {
2326 edgetypes = {
2327 'parent': graphmod.PARENT,
2327 'parent': graphmod.PARENT,
2328 'grandparent': graphmod.GRANDPARENT,
2328 'grandparent': graphmod.GRANDPARENT,
2329 'missing': graphmod.MISSINGPARENT
2329 'missing': graphmod.MISSINGPARENT
2330 }
2330 }
2331 for name, key in edgetypes.items():
2331 for name, key in edgetypes.items():
2332 # experimental config: experimental.graphstyle.*
2332 # experimental config: experimental.graphstyle.*
2333 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2333 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2334 styles[key])
2334 styles[key])
2335 if not styles[key]:
2335 if not styles[key]:
2336 styles[key] = None
2336 styles[key] = None
2337
2337
2338 # experimental config: experimental.graphshorten
2338 # experimental config: experimental.graphshorten
2339 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2339 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2340
2340
2341 for rev, type, ctx, parents in dag:
2341 for rev, type, ctx, parents in dag:
2342 char = formatnode(repo, ctx)
2342 char = formatnode(repo, ctx)
2343 copies = None
2343 copies = None
2344 if getrenamed and ctx.rev():
2344 if getrenamed and ctx.rev():
2345 copies = []
2345 copies = []
2346 for fn in ctx.files():
2346 for fn in ctx.files():
2347 rename = getrenamed(fn, ctx.rev())
2347 rename = getrenamed(fn, ctx.rev())
2348 if rename:
2348 if rename:
2349 copies.append((fn, rename[0]))
2349 copies.append((fn, rename[0]))
2350 revmatchfn = None
2350 revmatchfn = None
2351 if filematcher is not None:
2351 if filematcher is not None:
2352 revmatchfn = filematcher(ctx.rev())
2352 revmatchfn = filematcher(ctx.rev())
2353 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2353 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2354 lines = displayer.hunk.pop(rev).split('\n')
2354 lines = displayer.hunk.pop(rev).split('\n')
2355 if not lines[-1]:
2355 if not lines[-1]:
2356 del lines[-1]
2356 del lines[-1]
2357 displayer.flush(ctx)
2357 displayer.flush(ctx)
2358 edges = edgefn(type, char, lines, state, rev, parents)
2358 edges = edgefn(type, char, lines, state, rev, parents)
2359 for type, char, lines, coldata in edges:
2359 for type, char, lines, coldata in edges:
2360 graphmod.ascii(ui, state, type, char, lines, coldata)
2360 graphmod.ascii(ui, state, type, char, lines, coldata)
2361 displayer.close()
2361 displayer.close()
2362
2362
2363 def graphlog(ui, repo, pats, opts):
2363 def graphlog(ui, repo, pats, opts):
2364 # Parameters are identical to log command ones
2364 # Parameters are identical to log command ones
2365 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2365 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2366 revdag = graphmod.dagwalker(repo, revs)
2366 revdag = graphmod.dagwalker(repo, revs)
2367
2367
2368 getrenamed = None
2368 getrenamed = None
2369 if opts.get('copies'):
2369 if opts.get('copies'):
2370 endrev = None
2370 endrev = None
2371 if opts.get('rev'):
2371 if opts.get('rev'):
2372 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2372 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2373 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2373 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2374
2374
2375 ui.pager('log')
2375 ui.pager('log')
2376 displayer = show_changeset(ui, repo, opts, buffered=True)
2376 displayer = show_changeset(ui, repo, opts, buffered=True)
2377 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2377 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2378 filematcher)
2378 filematcher)
2379
2379
2380 def checkunsupportedgraphflags(pats, opts):
2380 def checkunsupportedgraphflags(pats, opts):
2381 for op in ["newest_first"]:
2381 for op in ["newest_first"]:
2382 if op in opts and opts[op]:
2382 if op in opts and opts[op]:
2383 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2383 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2384 % op.replace("_", "-"))
2384 % op.replace("_", "-"))
2385
2385
2386 def graphrevs(repo, nodes, opts):
2386 def graphrevs(repo, nodes, opts):
2387 limit = loglimit(opts)
2387 limit = loglimit(opts)
2388 nodes.reverse()
2388 nodes.reverse()
2389 if limit is not None:
2389 if limit is not None:
2390 nodes = nodes[:limit]
2390 nodes = nodes[:limit]
2391 return graphmod.nodes(repo, nodes)
2391 return graphmod.nodes(repo, nodes)
2392
2392
2393 def add(ui, repo, match, prefix, explicitonly, **opts):
2393 def add(ui, repo, match, prefix, explicitonly, **opts):
2394 join = lambda f: os.path.join(prefix, f)
2394 join = lambda f: os.path.join(prefix, f)
2395 bad = []
2395 bad = []
2396
2396
2397 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2397 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2398 names = []
2398 names = []
2399 wctx = repo[None]
2399 wctx = repo[None]
2400 cca = None
2400 cca = None
2401 abort, warn = scmutil.checkportabilityalert(ui)
2401 abort, warn = scmutil.checkportabilityalert(ui)
2402 if abort or warn:
2402 if abort or warn:
2403 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2403 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2404
2404
2405 badmatch = matchmod.badmatch(match, badfn)
2405 badmatch = matchmod.badmatch(match, badfn)
2406 dirstate = repo.dirstate
2406 dirstate = repo.dirstate
2407 # We don't want to just call wctx.walk here, since it would return a lot of
2407 # We don't want to just call wctx.walk here, since it would return a lot of
2408 # clean files, which we aren't interested in and takes time.
2408 # clean files, which we aren't interested in and takes time.
2409 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2409 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2410 True, False, full=False)):
2410 True, False, full=False)):
2411 exact = match.exact(f)
2411 exact = match.exact(f)
2412 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2412 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2413 if cca:
2413 if cca:
2414 cca(f)
2414 cca(f)
2415 names.append(f)
2415 names.append(f)
2416 if ui.verbose or not exact:
2416 if ui.verbose or not exact:
2417 ui.status(_('adding %s\n') % match.rel(f))
2417 ui.status(_('adding %s\n') % match.rel(f))
2418
2418
2419 for subpath in sorted(wctx.substate):
2419 for subpath in sorted(wctx.substate):
2420 sub = wctx.sub(subpath)
2420 sub = wctx.sub(subpath)
2421 try:
2421 try:
2422 submatch = matchmod.subdirmatcher(subpath, match)
2422 submatch = matchmod.subdirmatcher(subpath, match)
2423 if opts.get(r'subrepos'):
2423 if opts.get(r'subrepos'):
2424 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2424 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2425 else:
2425 else:
2426 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2426 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2427 except error.LookupError:
2427 except error.LookupError:
2428 ui.status(_("skipping missing subrepository: %s\n")
2428 ui.status(_("skipping missing subrepository: %s\n")
2429 % join(subpath))
2429 % join(subpath))
2430
2430
2431 if not opts.get(r'dry_run'):
2431 if not opts.get(r'dry_run'):
2432 rejected = wctx.add(names, prefix)
2432 rejected = wctx.add(names, prefix)
2433 bad.extend(f for f in rejected if f in match.files())
2433 bad.extend(f for f in rejected if f in match.files())
2434 return bad
2434 return bad
2435
2435
2436 def addwebdirpath(repo, serverpath, webconf):
2436 def addwebdirpath(repo, serverpath, webconf):
2437 webconf[serverpath] = repo.root
2437 webconf[serverpath] = repo.root
2438 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2438 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2439
2439
2440 for r in repo.revs('filelog("path:.hgsub")'):
2440 for r in repo.revs('filelog("path:.hgsub")'):
2441 ctx = repo[r]
2441 ctx = repo[r]
2442 for subpath in ctx.substate:
2442 for subpath in ctx.substate:
2443 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2443 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2444
2444
2445 def forget(ui, repo, match, prefix, explicitonly):
2445 def forget(ui, repo, match, prefix, explicitonly):
2446 join = lambda f: os.path.join(prefix, f)
2446 join = lambda f: os.path.join(prefix, f)
2447 bad = []
2447 bad = []
2448 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2448 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2449 wctx = repo[None]
2449 wctx = repo[None]
2450 forgot = []
2450 forgot = []
2451
2451
2452 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2452 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2453 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2453 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2454 if explicitonly:
2454 if explicitonly:
2455 forget = [f for f in forget if match.exact(f)]
2455 forget = [f for f in forget if match.exact(f)]
2456
2456
2457 for subpath in sorted(wctx.substate):
2457 for subpath in sorted(wctx.substate):
2458 sub = wctx.sub(subpath)
2458 sub = wctx.sub(subpath)
2459 try:
2459 try:
2460 submatch = matchmod.subdirmatcher(subpath, match)
2460 submatch = matchmod.subdirmatcher(subpath, match)
2461 subbad, subforgot = sub.forget(submatch, prefix)
2461 subbad, subforgot = sub.forget(submatch, prefix)
2462 bad.extend([subpath + '/' + f for f in subbad])
2462 bad.extend([subpath + '/' + f for f in subbad])
2463 forgot.extend([subpath + '/' + f for f in subforgot])
2463 forgot.extend([subpath + '/' + f for f in subforgot])
2464 except error.LookupError:
2464 except error.LookupError:
2465 ui.status(_("skipping missing subrepository: %s\n")
2465 ui.status(_("skipping missing subrepository: %s\n")
2466 % join(subpath))
2466 % join(subpath))
2467
2467
2468 if not explicitonly:
2468 if not explicitonly:
2469 for f in match.files():
2469 for f in match.files():
2470 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2470 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2471 if f not in forgot:
2471 if f not in forgot:
2472 if repo.wvfs.exists(f):
2472 if repo.wvfs.exists(f):
2473 # Don't complain if the exact case match wasn't given.
2473 # Don't complain if the exact case match wasn't given.
2474 # But don't do this until after checking 'forgot', so
2474 # But don't do this until after checking 'forgot', so
2475 # that subrepo files aren't normalized, and this op is
2475 # that subrepo files aren't normalized, and this op is
2476 # purely from data cached by the status walk above.
2476 # purely from data cached by the status walk above.
2477 if repo.dirstate.normalize(f) in repo.dirstate:
2477 if repo.dirstate.normalize(f) in repo.dirstate:
2478 continue
2478 continue
2479 ui.warn(_('not removing %s: '
2479 ui.warn(_('not removing %s: '
2480 'file is already untracked\n')
2480 'file is already untracked\n')
2481 % match.rel(f))
2481 % match.rel(f))
2482 bad.append(f)
2482 bad.append(f)
2483
2483
2484 for f in forget:
2484 for f in forget:
2485 if ui.verbose or not match.exact(f):
2485 if ui.verbose or not match.exact(f):
2486 ui.status(_('removing %s\n') % match.rel(f))
2486 ui.status(_('removing %s\n') % match.rel(f))
2487
2487
2488 rejected = wctx.forget(forget, prefix)
2488 rejected = wctx.forget(forget, prefix)
2489 bad.extend(f for f in rejected if f in match.files())
2489 bad.extend(f for f in rejected if f in match.files())
2490 forgot.extend(f for f in forget if f not in rejected)
2490 forgot.extend(f for f in forget if f not in rejected)
2491 return bad, forgot
2491 return bad, forgot
2492
2492
2493 def files(ui, ctx, m, fm, fmt, subrepos):
2493 def files(ui, ctx, m, fm, fmt, subrepos):
2494 rev = ctx.rev()
2494 rev = ctx.rev()
2495 ret = 1
2495 ret = 1
2496 ds = ctx.repo().dirstate
2496 ds = ctx.repo().dirstate
2497
2497
2498 for f in ctx.matches(m):
2498 for f in ctx.matches(m):
2499 if rev is None and ds[f] == 'r':
2499 if rev is None and ds[f] == 'r':
2500 continue
2500 continue
2501 fm.startitem()
2501 fm.startitem()
2502 if ui.verbose:
2502 if ui.verbose:
2503 fc = ctx[f]
2503 fc = ctx[f]
2504 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2504 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2505 fm.data(abspath=f)
2505 fm.data(abspath=f)
2506 fm.write('path', fmt, m.rel(f))
2506 fm.write('path', fmt, m.rel(f))
2507 ret = 0
2507 ret = 0
2508
2508
2509 for subpath in sorted(ctx.substate):
2509 for subpath in sorted(ctx.substate):
2510 submatch = matchmod.subdirmatcher(subpath, m)
2510 submatch = matchmod.subdirmatcher(subpath, m)
2511 if (subrepos or m.exact(subpath) or any(submatch.files())):
2511 if (subrepos or m.exact(subpath) or any(submatch.files())):
2512 sub = ctx.sub(subpath)
2512 sub = ctx.sub(subpath)
2513 try:
2513 try:
2514 recurse = m.exact(subpath) or subrepos
2514 recurse = m.exact(subpath) or subrepos
2515 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2515 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2516 ret = 0
2516 ret = 0
2517 except error.LookupError:
2517 except error.LookupError:
2518 ui.status(_("skipping missing subrepository: %s\n")
2518 ui.status(_("skipping missing subrepository: %s\n")
2519 % m.abs(subpath))
2519 % m.abs(subpath))
2520
2520
2521 return ret
2521 return ret
2522
2522
2523 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2523 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2524 join = lambda f: os.path.join(prefix, f)
2524 join = lambda f: os.path.join(prefix, f)
2525 ret = 0
2525 ret = 0
2526 s = repo.status(match=m, clean=True)
2526 s = repo.status(match=m, clean=True)
2527 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2527 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2528
2528
2529 wctx = repo[None]
2529 wctx = repo[None]
2530
2530
2531 if warnings is None:
2531 if warnings is None:
2532 warnings = []
2532 warnings = []
2533 warn = True
2533 warn = True
2534 else:
2534 else:
2535 warn = False
2535 warn = False
2536
2536
2537 subs = sorted(wctx.substate)
2537 subs = sorted(wctx.substate)
2538 total = len(subs)
2538 total = len(subs)
2539 count = 0
2539 count = 0
2540 for subpath in subs:
2540 for subpath in subs:
2541 count += 1
2541 count += 1
2542 submatch = matchmod.subdirmatcher(subpath, m)
2542 submatch = matchmod.subdirmatcher(subpath, m)
2543 if subrepos or m.exact(subpath) or any(submatch.files()):
2543 if subrepos or m.exact(subpath) or any(submatch.files()):
2544 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2544 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2545 sub = wctx.sub(subpath)
2545 sub = wctx.sub(subpath)
2546 try:
2546 try:
2547 if sub.removefiles(submatch, prefix, after, force, subrepos,
2547 if sub.removefiles(submatch, prefix, after, force, subrepos,
2548 warnings):
2548 warnings):
2549 ret = 1
2549 ret = 1
2550 except error.LookupError:
2550 except error.LookupError:
2551 warnings.append(_("skipping missing subrepository: %s\n")
2551 warnings.append(_("skipping missing subrepository: %s\n")
2552 % join(subpath))
2552 % join(subpath))
2553 ui.progress(_('searching'), None)
2553 ui.progress(_('searching'), None)
2554
2554
2555 # warn about failure to delete explicit files/dirs
2555 # warn about failure to delete explicit files/dirs
2556 deleteddirs = util.dirs(deleted)
2556 deleteddirs = util.dirs(deleted)
2557 files = m.files()
2557 files = m.files()
2558 total = len(files)
2558 total = len(files)
2559 count = 0
2559 count = 0
2560 for f in files:
2560 for f in files:
2561 def insubrepo():
2561 def insubrepo():
2562 for subpath in wctx.substate:
2562 for subpath in wctx.substate:
2563 if f.startswith(subpath + '/'):
2563 if f.startswith(subpath + '/'):
2564 return True
2564 return True
2565 return False
2565 return False
2566
2566
2567 count += 1
2567 count += 1
2568 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2568 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2569 isdir = f in deleteddirs or wctx.hasdir(f)
2569 isdir = f in deleteddirs or wctx.hasdir(f)
2570 if (f in repo.dirstate or isdir or f == '.'
2570 if (f in repo.dirstate or isdir or f == '.'
2571 or insubrepo() or f in subs):
2571 or insubrepo() or f in subs):
2572 continue
2572 continue
2573
2573
2574 if repo.wvfs.exists(f):
2574 if repo.wvfs.exists(f):
2575 if repo.wvfs.isdir(f):
2575 if repo.wvfs.isdir(f):
2576 warnings.append(_('not removing %s: no tracked files\n')
2576 warnings.append(_('not removing %s: no tracked files\n')
2577 % m.rel(f))
2577 % m.rel(f))
2578 else:
2578 else:
2579 warnings.append(_('not removing %s: file is untracked\n')
2579 warnings.append(_('not removing %s: file is untracked\n')
2580 % m.rel(f))
2580 % m.rel(f))
2581 # missing files will generate a warning elsewhere
2581 # missing files will generate a warning elsewhere
2582 ret = 1
2582 ret = 1
2583 ui.progress(_('deleting'), None)
2583 ui.progress(_('deleting'), None)
2584
2584
2585 if force:
2585 if force:
2586 list = modified + deleted + clean + added
2586 list = modified + deleted + clean + added
2587 elif after:
2587 elif after:
2588 list = deleted
2588 list = deleted
2589 remaining = modified + added + clean
2589 remaining = modified + added + clean
2590 total = len(remaining)
2590 total = len(remaining)
2591 count = 0
2591 count = 0
2592 for f in remaining:
2592 for f in remaining:
2593 count += 1
2593 count += 1
2594 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2594 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2595 warnings.append(_('not removing %s: file still exists\n')
2595 warnings.append(_('not removing %s: file still exists\n')
2596 % m.rel(f))
2596 % m.rel(f))
2597 ret = 1
2597 ret = 1
2598 ui.progress(_('skipping'), None)
2598 ui.progress(_('skipping'), None)
2599 else:
2599 else:
2600 list = deleted + clean
2600 list = deleted + clean
2601 total = len(modified) + len(added)
2601 total = len(modified) + len(added)
2602 count = 0
2602 count = 0
2603 for f in modified:
2603 for f in modified:
2604 count += 1
2604 count += 1
2605 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2605 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2606 warnings.append(_('not removing %s: file is modified (use -f'
2606 warnings.append(_('not removing %s: file is modified (use -f'
2607 ' to force removal)\n') % m.rel(f))
2607 ' to force removal)\n') % m.rel(f))
2608 ret = 1
2608 ret = 1
2609 for f in added:
2609 for f in added:
2610 count += 1
2610 count += 1
2611 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2611 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2612 warnings.append(_("not removing %s: file has been marked for add"
2612 warnings.append(_("not removing %s: file has been marked for add"
2613 " (use 'hg forget' to undo add)\n") % m.rel(f))
2613 " (use 'hg forget' to undo add)\n") % m.rel(f))
2614 ret = 1
2614 ret = 1
2615 ui.progress(_('skipping'), None)
2615 ui.progress(_('skipping'), None)
2616
2616
2617 list = sorted(list)
2617 list = sorted(list)
2618 total = len(list)
2618 total = len(list)
2619 count = 0
2619 count = 0
2620 for f in list:
2620 for f in list:
2621 count += 1
2621 count += 1
2622 if ui.verbose or not m.exact(f):
2622 if ui.verbose or not m.exact(f):
2623 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2623 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2624 ui.status(_('removing %s\n') % m.rel(f))
2624 ui.status(_('removing %s\n') % m.rel(f))
2625 ui.progress(_('deleting'), None)
2625 ui.progress(_('deleting'), None)
2626
2626
2627 with repo.wlock():
2627 with repo.wlock():
2628 if not after:
2628 if not after:
2629 for f in list:
2629 for f in list:
2630 if f in added:
2630 if f in added:
2631 continue # we never unlink added files on remove
2631 continue # we never unlink added files on remove
2632 repo.wvfs.unlinkpath(f, ignoremissing=True)
2632 repo.wvfs.unlinkpath(f, ignoremissing=True)
2633 repo[None].forget(list)
2633 repo[None].forget(list)
2634
2634
2635 if warn:
2635 if warn:
2636 for warning in warnings:
2636 for warning in warnings:
2637 ui.warn(warning)
2637 ui.warn(warning)
2638
2638
2639 return ret
2639 return ret
2640
2640
2641 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2641 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2642 err = 1
2642 err = 1
2643
2643
2644 def write(path):
2644 def write(path):
2645 filename = None
2645 filename = None
2646 if fntemplate:
2646 if fntemplate:
2647 filename = makefilename(repo, fntemplate, ctx.node(),
2647 filename = makefilename(repo, fntemplate, ctx.node(),
2648 pathname=os.path.join(prefix, path))
2648 pathname=os.path.join(prefix, path))
2649 with formatter.maybereopen(basefm, filename, opts) as fm:
2649 with formatter.maybereopen(basefm, filename, opts) as fm:
2650 data = ctx[path].data()
2650 data = ctx[path].data()
2651 if opts.get('decode'):
2651 if opts.get('decode'):
2652 data = repo.wwritedata(path, data)
2652 data = repo.wwritedata(path, data)
2653 fm.startitem()
2653 fm.startitem()
2654 fm.write('data', '%s', data)
2654 fm.write('data', '%s', data)
2655 fm.data(abspath=path, path=matcher.rel(path))
2655 fm.data(abspath=path, path=matcher.rel(path))
2656
2656
2657 # Automation often uses hg cat on single files, so special case it
2657 # Automation often uses hg cat on single files, so special case it
2658 # for performance to avoid the cost of parsing the manifest.
2658 # for performance to avoid the cost of parsing the manifest.
2659 if len(matcher.files()) == 1 and not matcher.anypats():
2659 if len(matcher.files()) == 1 and not matcher.anypats():
2660 file = matcher.files()[0]
2660 file = matcher.files()[0]
2661 mfl = repo.manifestlog
2661 mfl = repo.manifestlog
2662 mfnode = ctx.manifestnode()
2662 mfnode = ctx.manifestnode()
2663 try:
2663 try:
2664 if mfnode and mfl[mfnode].find(file)[0]:
2664 if mfnode and mfl[mfnode].find(file)[0]:
2665 write(file)
2665 write(file)
2666 return 0
2666 return 0
2667 except KeyError:
2667 except KeyError:
2668 pass
2668 pass
2669
2669
2670 for abs in ctx.walk(matcher):
2670 for abs in ctx.walk(matcher):
2671 write(abs)
2671 write(abs)
2672 err = 0
2672 err = 0
2673
2673
2674 for subpath in sorted(ctx.substate):
2674 for subpath in sorted(ctx.substate):
2675 sub = ctx.sub(subpath)
2675 sub = ctx.sub(subpath)
2676 try:
2676 try:
2677 submatch = matchmod.subdirmatcher(subpath, matcher)
2677 submatch = matchmod.subdirmatcher(subpath, matcher)
2678
2678
2679 if not sub.cat(submatch, basefm, fntemplate,
2679 if not sub.cat(submatch, basefm, fntemplate,
2680 os.path.join(prefix, sub._path), **opts):
2680 os.path.join(prefix, sub._path), **opts):
2681 err = 0
2681 err = 0
2682 except error.RepoLookupError:
2682 except error.RepoLookupError:
2683 ui.status(_("skipping missing subrepository: %s\n")
2683 ui.status(_("skipping missing subrepository: %s\n")
2684 % os.path.join(prefix, subpath))
2684 % os.path.join(prefix, subpath))
2685
2685
2686 return err
2686 return err
2687
2687
2688 def commit(ui, repo, commitfunc, pats, opts):
2688 def commit(ui, repo, commitfunc, pats, opts):
2689 '''commit the specified files or all outstanding changes'''
2689 '''commit the specified files or all outstanding changes'''
2690 date = opts.get('date')
2690 date = opts.get('date')
2691 if date:
2691 if date:
2692 opts['date'] = util.parsedate(date)
2692 opts['date'] = util.parsedate(date)
2693 message = logmessage(ui, opts)
2693 message = logmessage(ui, opts)
2694 matcher = scmutil.match(repo[None], pats, opts)
2694 matcher = scmutil.match(repo[None], pats, opts)
2695
2695
2696 # extract addremove carefully -- this function can be called from a command
2696 # extract addremove carefully -- this function can be called from a command
2697 # that doesn't support addremove
2697 # that doesn't support addremove
2698 if opts.get('addremove'):
2698 if opts.get('addremove'):
2699 if scmutil.addremove(repo, matcher, "", opts) != 0:
2699 if scmutil.addremove(repo, matcher, "", opts) != 0:
2700 raise error.Abort(
2700 raise error.Abort(
2701 _("failed to mark all new/missing files as added/removed"))
2701 _("failed to mark all new/missing files as added/removed"))
2702
2702
2703 return commitfunc(ui, repo, message, matcher, opts)
2703 return commitfunc(ui, repo, message, matcher, opts)
2704
2704
2705 def samefile(f, ctx1, ctx2):
2705 def samefile(f, ctx1, ctx2):
2706 if f in ctx1.manifest():
2706 if f in ctx1.manifest():
2707 a = ctx1.filectx(f)
2707 a = ctx1.filectx(f)
2708 if f in ctx2.manifest():
2708 if f in ctx2.manifest():
2709 b = ctx2.filectx(f)
2709 b = ctx2.filectx(f)
2710 return (not a.cmp(b)
2710 return (not a.cmp(b)
2711 and a.flags() == b.flags())
2711 and a.flags() == b.flags())
2712 else:
2712 else:
2713 return False
2713 return False
2714 else:
2714 else:
2715 return f not in ctx2.manifest()
2715 return f not in ctx2.manifest()
2716
2716
2717 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2717 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2718 # avoid cycle context -> subrepo -> cmdutil
2718 # avoid cycle context -> subrepo -> cmdutil
2719 from . import context
2719 from . import context
2720
2720
2721 # amend will reuse the existing user if not specified, but the obsolete
2721 # amend will reuse the existing user if not specified, but the obsolete
2722 # marker creation requires that the current user's name is specified.
2722 # marker creation requires that the current user's name is specified.
2723 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2723 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2724 ui.username() # raise exception if username not set
2724 ui.username() # raise exception if username not set
2725
2725
2726 ui.note(_('amending changeset %s\n') % old)
2726 ui.note(_('amending changeset %s\n') % old)
2727 base = old.p1()
2727 base = old.p1()
2728 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2728 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2729
2729
2730 wlock = lock = newid = None
2730 wlock = lock = newid = None
2731 try:
2731 try:
2732 wlock = repo.wlock()
2732 wlock = repo.wlock()
2733 lock = repo.lock()
2733 lock = repo.lock()
2734 with repo.transaction('amend') as tr:
2734 with repo.transaction('amend') as tr:
2735 # See if we got a message from -m or -l, if not, open the editor
2735 # See if we got a message from -m or -l, if not, open the editor
2736 # with the message of the changeset to amend
2736 # with the message of the changeset to amend
2737 message = logmessage(ui, opts)
2737 message = logmessage(ui, opts)
2738 # ensure logfile does not conflict with later enforcement of the
2738 # ensure logfile does not conflict with later enforcement of the
2739 # message. potential logfile content has been processed by
2739 # message. potential logfile content has been processed by
2740 # `logmessage` anyway.
2740 # `logmessage` anyway.
2741 opts.pop('logfile')
2741 opts.pop('logfile')
2742 # First, do a regular commit to record all changes in the working
2742 # First, do a regular commit to record all changes in the working
2743 # directory (if there are any)
2743 # directory (if there are any)
2744 ui.callhooks = False
2744 ui.callhooks = False
2745 activebookmark = repo._bookmarks.active
2745 activebookmark = repo._bookmarks.active
2746 try:
2746 try:
2747 repo._bookmarks.active = None
2747 repo._bookmarks.active = None
2748 opts['message'] = 'temporary amend commit for %s' % old
2748 opts['message'] = 'temporary amend commit for %s' % old
2749 node = commit(ui, repo, commitfunc, pats, opts)
2749 node = commit(ui, repo, commitfunc, pats, opts)
2750 finally:
2750 finally:
2751 repo._bookmarks.active = activebookmark
2751 repo._bookmarks.active = activebookmark
2752 repo._bookmarks.recordchange(tr)
2752 repo._bookmarks.recordchange(tr)
2753 ui.callhooks = True
2753 ui.callhooks = True
2754 ctx = repo[node]
2754 ctx = repo[node]
2755
2755
2756 # Participating changesets:
2756 # Participating changesets:
2757 #
2757 #
2758 # node/ctx o - new (intermediate) commit that contains changes
2758 # node/ctx o - new (intermediate) commit that contains changes
2759 # | from working dir to go into amending commit
2759 # | from working dir to go into amending commit
2760 # | (or a workingctx if there were no changes)
2760 # | (or a workingctx if there were no changes)
2761 # |
2761 # |
2762 # old o - changeset to amend
2762 # old o - changeset to amend
2763 # |
2763 # |
2764 # base o - parent of amending changeset
2764 # base o - parent of amending changeset
2765
2765
2766 # Update extra dict from amended commit (e.g. to preserve graft
2766 # Update extra dict from amended commit (e.g. to preserve graft
2767 # source)
2767 # source)
2768 extra.update(old.extra())
2768 extra.update(old.extra())
2769
2769
2770 # Also update it from the intermediate commit or from the wctx
2770 # Also update it from the intermediate commit or from the wctx
2771 extra.update(ctx.extra())
2771 extra.update(ctx.extra())
2772
2772
2773 if len(old.parents()) > 1:
2773 if len(old.parents()) > 1:
2774 # ctx.files() isn't reliable for merges, so fall back to the
2774 # ctx.files() isn't reliable for merges, so fall back to the
2775 # slower repo.status() method
2775 # slower repo.status() method
2776 files = set([fn for st in repo.status(base, old)[:3]
2776 files = set([fn for st in repo.status(base, old)[:3]
2777 for fn in st])
2777 for fn in st])
2778 else:
2778 else:
2779 files = set(old.files())
2779 files = set(old.files())
2780
2780
2781 # Second, we use either the commit we just did, or if there were no
2781 # Second, we use either the commit we just did, or if there were no
2782 # changes the parent of the working directory as the version of the
2782 # changes the parent of the working directory as the version of the
2783 # files in the final amend commit
2783 # files in the final amend commit
2784 if node:
2784 if node:
2785 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2785 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2786
2786
2787 user = ctx.user()
2787 user = ctx.user()
2788 date = ctx.date()
2788 date = ctx.date()
2789 # Recompute copies (avoid recording a -> b -> a)
2789 # Recompute copies (avoid recording a -> b -> a)
2790 copied = copies.pathcopies(base, ctx)
2790 copied = copies.pathcopies(base, ctx)
2791 if old.p2:
2791 if old.p2:
2792 copied.update(copies.pathcopies(old.p2(), ctx))
2792 copied.update(copies.pathcopies(old.p2(), ctx))
2793
2793
2794 # Prune files which were reverted by the updates: if old
2794 # Prune files which were reverted by the updates: if old
2795 # introduced file X and our intermediate commit, node,
2795 # introduced file X and our intermediate commit, node,
2796 # renamed that file, then those two files are the same and
2796 # renamed that file, then those two files are the same and
2797 # we can discard X from our list of files. Likewise if X
2797 # we can discard X from our list of files. Likewise if X
2798 # was deleted, it's no longer relevant
2798 # was deleted, it's no longer relevant
2799 files.update(ctx.files())
2799 files.update(ctx.files())
2800 files = [f for f in files if not samefile(f, ctx, base)]
2800 files = [f for f in files if not samefile(f, ctx, base)]
2801
2801
2802 def filectxfn(repo, ctx_, path):
2802 def filectxfn(repo, ctx_, path):
2803 try:
2803 try:
2804 fctx = ctx[path]
2804 fctx = ctx[path]
2805 flags = fctx.flags()
2805 flags = fctx.flags()
2806 mctx = context.memfilectx(repo,
2806 mctx = context.memfilectx(repo,
2807 fctx.path(), fctx.data(),
2807 fctx.path(), fctx.data(),
2808 islink='l' in flags,
2808 islink='l' in flags,
2809 isexec='x' in flags,
2809 isexec='x' in flags,
2810 copied=copied.get(path))
2810 copied=copied.get(path))
2811 return mctx
2811 return mctx
2812 except KeyError:
2812 except KeyError:
2813 return None
2813 return None
2814 else:
2814 else:
2815 ui.note(_('copying changeset %s to %s\n') % (old, base))
2815 ui.note(_('copying changeset %s to %s\n') % (old, base))
2816
2816
2817 # Use version of files as in the old cset
2817 # Use version of files as in the old cset
2818 def filectxfn(repo, ctx_, path):
2818 def filectxfn(repo, ctx_, path):
2819 try:
2819 try:
2820 return old.filectx(path)
2820 return old.filectx(path)
2821 except KeyError:
2821 except KeyError:
2822 return None
2822 return None
2823
2823
2824 user = opts.get('user') or old.user()
2824 user = opts.get('user') or old.user()
2825 date = opts.get('date') or old.date()
2825 date = opts.get('date') or old.date()
2826 editform = mergeeditform(old, 'commit.amend')
2826 editform = mergeeditform(old, 'commit.amend')
2827 editor = getcommiteditor(editform=editform, **opts)
2827 editor = getcommiteditor(editform=editform, **opts)
2828 if not message:
2828 if not message:
2829 editor = getcommiteditor(edit=True, editform=editform)
2829 editor = getcommiteditor(edit=True, editform=editform)
2830 message = old.description()
2830 message = old.description()
2831
2831
2832 pureextra = extra.copy()
2832 pureextra = extra.copy()
2833 extra['amend_source'] = old.hex()
2833 extra['amend_source'] = old.hex()
2834
2834
2835 new = context.memctx(repo,
2835 new = context.memctx(repo,
2836 parents=[base.node(), old.p2().node()],
2836 parents=[base.node(), old.p2().node()],
2837 text=message,
2837 text=message,
2838 files=files,
2838 files=files,
2839 filectxfn=filectxfn,
2839 filectxfn=filectxfn,
2840 user=user,
2840 user=user,
2841 date=date,
2841 date=date,
2842 extra=extra,
2842 extra=extra,
2843 editor=editor)
2843 editor=editor)
2844
2844
2845 newdesc = changelog.stripdesc(new.description())
2845 newdesc = changelog.stripdesc(new.description())
2846 if ((not node)
2846 if ((not node)
2847 and newdesc == old.description()
2847 and newdesc == old.description()
2848 and user == old.user()
2848 and user == old.user()
2849 and date == old.date()
2849 and date == old.date()
2850 and pureextra == old.extra()):
2850 and pureextra == old.extra()):
2851 # nothing changed. continuing here would create a new node
2851 # nothing changed. continuing here would create a new node
2852 # anyway because of the amend_source noise.
2852 # anyway because of the amend_source noise.
2853 #
2853 #
2854 # This not what we expect from amend.
2854 # This not what we expect from amend.
2855 return old.node()
2855 return old.node()
2856
2856
2857 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2857 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2858 try:
2858 try:
2859 if opts.get('secret'):
2859 if opts.get('secret'):
2860 commitphase = 'secret'
2860 commitphase = 'secret'
2861 else:
2861 else:
2862 commitphase = old.phase()
2862 commitphase = old.phase()
2863 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2863 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2864 newid = repo.commitctx(new)
2864 newid = repo.commitctx(new)
2865 finally:
2865 finally:
2866 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2866 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2867 if newid != old.node():
2867 if newid != old.node():
2868 # Reroute the working copy parent to the new changeset
2868 # Reroute the working copy parent to the new changeset
2869 repo.setparents(newid, nullid)
2869 repo.setparents(newid, nullid)
2870
2870
2871 # Move bookmarks from old parent to amend commit
2871 # Move bookmarks from old parent to amend commit
2872 bms = repo.nodebookmarks(old.node())
2872 bms = repo.nodebookmarks(old.node())
2873 if bms:
2873 if bms:
2874 marks = repo._bookmarks
2874 marks = repo._bookmarks
2875 for bm in bms:
2875 for bm in bms:
2876 ui.debug('moving bookmarks %r from %s to %s\n' %
2876 ui.debug('moving bookmarks %r from %s to %s\n' %
2877 (marks, old.hex(), hex(newid)))
2877 (marks, old.hex(), hex(newid)))
2878 marks[bm] = newid
2878 marks[bm] = newid
2879 marks.recordchange(tr)
2879 marks.recordchange(tr)
2880 #commit the whole amend process
2880 #commit the whole amend process
2881 if createmarkers:
2881 if createmarkers:
2882 # mark the new changeset as successor of the rewritten one
2882 # mark the new changeset as successor of the rewritten one
2883 new = repo[newid]
2883 new = repo[newid]
2884 obs = [(old, (new,))]
2884 obs = [(old, (new,))]
2885 if node:
2885 if node:
2886 obs.append((ctx, ()))
2886 obs.append((ctx, ()))
2887
2887
2888 obsolete.createmarkers(repo, obs, operation='amend')
2888 obsolete.createmarkers(repo, obs, operation='amend')
2889 if not createmarkers and newid != old.node():
2889 if not createmarkers and newid != old.node():
2890 # Strip the intermediate commit (if there was one) and the amended
2890 # Strip the intermediate commit (if there was one) and the amended
2891 # commit
2891 # commit
2892 if node:
2892 if node:
2893 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2893 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2894 ui.note(_('stripping amended changeset %s\n') % old)
2894 ui.note(_('stripping amended changeset %s\n') % old)
2895 repair.strip(ui, repo, old.node(), topic='amend-backup')
2895 repair.strip(ui, repo, old.node(), topic='amend-backup')
2896 finally:
2896 finally:
2897 lockmod.release(lock, wlock)
2897 lockmod.release(lock, wlock)
2898 return newid
2898 return newid
2899
2899
2900 def commiteditor(repo, ctx, subs, editform=''):
2900 def commiteditor(repo, ctx, subs, editform=''):
2901 if ctx.description():
2901 if ctx.description():
2902 return ctx.description()
2902 return ctx.description()
2903 return commitforceeditor(repo, ctx, subs, editform=editform,
2903 return commitforceeditor(repo, ctx, subs, editform=editform,
2904 unchangedmessagedetection=True)
2904 unchangedmessagedetection=True)
2905
2905
2906 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2906 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2907 editform='', unchangedmessagedetection=False):
2907 editform='', unchangedmessagedetection=False):
2908 if not extramsg:
2908 if not extramsg:
2909 extramsg = _("Leave message empty to abort commit.")
2909 extramsg = _("Leave message empty to abort commit.")
2910
2910
2911 forms = [e for e in editform.split('.') if e]
2911 forms = [e for e in editform.split('.') if e]
2912 forms.insert(0, 'changeset')
2912 forms.insert(0, 'changeset')
2913 templatetext = None
2913 templatetext = None
2914 while forms:
2914 while forms:
2915 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2915 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2916 if tmpl:
2916 if tmpl:
2917 tmpl = templater.unquotestring(tmpl)
2917 tmpl = templater.unquotestring(tmpl)
2918 templatetext = committext = buildcommittemplate(
2918 templatetext = committext = buildcommittemplate(
2919 repo, ctx, subs, extramsg, tmpl)
2919 repo, ctx, subs, extramsg, tmpl)
2920 break
2920 break
2921 forms.pop()
2921 forms.pop()
2922 else:
2922 else:
2923 committext = buildcommittext(repo, ctx, subs, extramsg)
2923 committext = buildcommittext(repo, ctx, subs, extramsg)
2924
2924
2925 # run editor in the repository root
2925 # run editor in the repository root
2926 olddir = pycompat.getcwd()
2926 olddir = pycompat.getcwd()
2927 os.chdir(repo.root)
2927 os.chdir(repo.root)
2928
2928
2929 # make in-memory changes visible to external process
2929 # make in-memory changes visible to external process
2930 tr = repo.currenttransaction()
2930 tr = repo.currenttransaction()
2931 repo.dirstate.write(tr)
2931 repo.dirstate.write(tr)
2932 pending = tr and tr.writepending() and repo.root
2932 pending = tr and tr.writepending() and repo.root
2933
2933
2934 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2934 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2935 editform=editform, pending=pending,
2935 editform=editform, pending=pending,
2936 repopath=repo.path)
2936 repopath=repo.path)
2937 text = editortext
2937 text = editortext
2938
2938
2939 # strip away anything below this special string (used for editors that want
2939 # strip away anything below this special string (used for editors that want
2940 # to display the diff)
2940 # to display the diff)
2941 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2941 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2942 if stripbelow:
2942 if stripbelow:
2943 text = text[:stripbelow.start()]
2943 text = text[:stripbelow.start()]
2944
2944
2945 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2945 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2946 os.chdir(olddir)
2946 os.chdir(olddir)
2947
2947
2948 if finishdesc:
2948 if finishdesc:
2949 text = finishdesc(text)
2949 text = finishdesc(text)
2950 if not text.strip():
2950 if not text.strip():
2951 raise error.Abort(_("empty commit message"))
2951 raise error.Abort(_("empty commit message"))
2952 if unchangedmessagedetection and editortext == templatetext:
2952 if unchangedmessagedetection and editortext == templatetext:
2953 raise error.Abort(_("commit message unchanged"))
2953 raise error.Abort(_("commit message unchanged"))
2954
2954
2955 return text
2955 return text
2956
2956
2957 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2957 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2958 ui = repo.ui
2958 ui = repo.ui
2959 spec = _lookuplogtemplate(ui, tmpl, None)
2959 spec = _lookuplogtemplate(ui, tmpl, None)
2960 t = changeset_templater(ui, repo, spec, None, {}, False)
2960 t = changeset_templater(ui, repo, spec, None, {}, False)
2961
2961
2962 for k, v in repo.ui.configitems('committemplate'):
2962 for k, v in repo.ui.configitems('committemplate'):
2963 if k != 'changeset':
2963 if k != 'changeset':
2964 t.t.cache[k] = v
2964 t.t.cache[k] = v
2965
2965
2966 if not extramsg:
2966 if not extramsg:
2967 extramsg = '' # ensure that extramsg is string
2967 extramsg = '' # ensure that extramsg is string
2968
2968
2969 ui.pushbuffer()
2969 ui.pushbuffer()
2970 t.show(ctx, extramsg=extramsg)
2970 t.show(ctx, extramsg=extramsg)
2971 return ui.popbuffer()
2971 return ui.popbuffer()
2972
2972
2973 def hgprefix(msg):
2973 def hgprefix(msg):
2974 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2974 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2975
2975
2976 def buildcommittext(repo, ctx, subs, extramsg):
2976 def buildcommittext(repo, ctx, subs, extramsg):
2977 edittext = []
2977 edittext = []
2978 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2978 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2979 if ctx.description():
2979 if ctx.description():
2980 edittext.append(ctx.description())
2980 edittext.append(ctx.description())
2981 edittext.append("")
2981 edittext.append("")
2982 edittext.append("") # Empty line between message and comments.
2982 edittext.append("") # Empty line between message and comments.
2983 edittext.append(hgprefix(_("Enter commit message."
2983 edittext.append(hgprefix(_("Enter commit message."
2984 " Lines beginning with 'HG:' are removed.")))
2984 " Lines beginning with 'HG:' are removed.")))
2985 edittext.append(hgprefix(extramsg))
2985 edittext.append(hgprefix(extramsg))
2986 edittext.append("HG: --")
2986 edittext.append("HG: --")
2987 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2987 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2988 if ctx.p2():
2988 if ctx.p2():
2989 edittext.append(hgprefix(_("branch merge")))
2989 edittext.append(hgprefix(_("branch merge")))
2990 if ctx.branch():
2990 if ctx.branch():
2991 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2991 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2992 if bookmarks.isactivewdirparent(repo):
2992 if bookmarks.isactivewdirparent(repo):
2993 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2993 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2994 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2994 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2995 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2995 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2996 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2996 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2997 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2997 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2998 if not added and not modified and not removed:
2998 if not added and not modified and not removed:
2999 edittext.append(hgprefix(_("no files changed")))
2999 edittext.append(hgprefix(_("no files changed")))
3000 edittext.append("")
3000 edittext.append("")
3001
3001
3002 return "\n".join(edittext)
3002 return "\n".join(edittext)
3003
3003
3004 def commitstatus(repo, node, branch, bheads=None, opts=None):
3004 def commitstatus(repo, node, branch, bheads=None, opts=None):
3005 if opts is None:
3005 if opts is None:
3006 opts = {}
3006 opts = {}
3007 ctx = repo[node]
3007 ctx = repo[node]
3008 parents = ctx.parents()
3008 parents = ctx.parents()
3009
3009
3010 if (not opts.get('amend') and bheads and node not in bheads and not
3010 if (not opts.get('amend') and bheads and node not in bheads and not
3011 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3011 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3012 repo.ui.status(_('created new head\n'))
3012 repo.ui.status(_('created new head\n'))
3013 # The message is not printed for initial roots. For the other
3013 # The message is not printed for initial roots. For the other
3014 # changesets, it is printed in the following situations:
3014 # changesets, it is printed in the following situations:
3015 #
3015 #
3016 # Par column: for the 2 parents with ...
3016 # Par column: for the 2 parents with ...
3017 # N: null or no parent
3017 # N: null or no parent
3018 # B: parent is on another named branch
3018 # B: parent is on another named branch
3019 # C: parent is a regular non head changeset
3019 # C: parent is a regular non head changeset
3020 # H: parent was a branch head of the current branch
3020 # H: parent was a branch head of the current branch
3021 # Msg column: whether we print "created new head" message
3021 # Msg column: whether we print "created new head" message
3022 # In the following, it is assumed that there already exists some
3022 # In the following, it is assumed that there already exists some
3023 # initial branch heads of the current branch, otherwise nothing is
3023 # initial branch heads of the current branch, otherwise nothing is
3024 # printed anyway.
3024 # printed anyway.
3025 #
3025 #
3026 # Par Msg Comment
3026 # Par Msg Comment
3027 # N N y additional topo root
3027 # N N y additional topo root
3028 #
3028 #
3029 # B N y additional branch root
3029 # B N y additional branch root
3030 # C N y additional topo head
3030 # C N y additional topo head
3031 # H N n usual case
3031 # H N n usual case
3032 #
3032 #
3033 # B B y weird additional branch root
3033 # B B y weird additional branch root
3034 # C B y branch merge
3034 # C B y branch merge
3035 # H B n merge with named branch
3035 # H B n merge with named branch
3036 #
3036 #
3037 # C C y additional head from merge
3037 # C C y additional head from merge
3038 # C H n merge with a head
3038 # C H n merge with a head
3039 #
3039 #
3040 # H H n head merge: head count decreases
3040 # H H n head merge: head count decreases
3041
3041
3042 if not opts.get('close_branch'):
3042 if not opts.get('close_branch'):
3043 for r in parents:
3043 for r in parents:
3044 if r.closesbranch() and r.branch() == branch:
3044 if r.closesbranch() and r.branch() == branch:
3045 repo.ui.status(_('reopening closed branch head %d\n') % r)
3045 repo.ui.status(_('reopening closed branch head %d\n') % r)
3046
3046
3047 if repo.ui.debugflag:
3047 if repo.ui.debugflag:
3048 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3048 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3049 elif repo.ui.verbose:
3049 elif repo.ui.verbose:
3050 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3050 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3051
3051
3052 def postcommitstatus(repo, pats, opts):
3052 def postcommitstatus(repo, pats, opts):
3053 return repo.status(match=scmutil.match(repo[None], pats, opts))
3053 return repo.status(match=scmutil.match(repo[None], pats, opts))
3054
3054
3055 def revert(ui, repo, ctx, parents, *pats, **opts):
3055 def revert(ui, repo, ctx, parents, *pats, **opts):
3056 parent, p2 = parents
3056 parent, p2 = parents
3057 node = ctx.node()
3057 node = ctx.node()
3058
3058
3059 mf = ctx.manifest()
3059 mf = ctx.manifest()
3060 if node == p2:
3060 if node == p2:
3061 parent = p2
3061 parent = p2
3062
3062
3063 # need all matching names in dirstate and manifest of target rev,
3063 # need all matching names in dirstate and manifest of target rev,
3064 # so have to walk both. do not print errors if files exist in one
3064 # so have to walk both. do not print errors if files exist in one
3065 # but not other. in both cases, filesets should be evaluated against
3065 # but not other. in both cases, filesets should be evaluated against
3066 # workingctx to get consistent result (issue4497). this means 'set:**'
3066 # workingctx to get consistent result (issue4497). this means 'set:**'
3067 # cannot be used to select missing files from target rev.
3067 # cannot be used to select missing files from target rev.
3068
3068
3069 # `names` is a mapping for all elements in working copy and target revision
3069 # `names` is a mapping for all elements in working copy and target revision
3070 # The mapping is in the form:
3070 # The mapping is in the form:
3071 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3071 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3072 names = {}
3072 names = {}
3073
3073
3074 with repo.wlock():
3074 with repo.wlock():
3075 ## filling of the `names` mapping
3075 ## filling of the `names` mapping
3076 # walk dirstate to fill `names`
3076 # walk dirstate to fill `names`
3077
3077
3078 interactive = opts.get('interactive', False)
3078 interactive = opts.get('interactive', False)
3079 wctx = repo[None]
3079 wctx = repo[None]
3080 m = scmutil.match(wctx, pats, opts)
3080 m = scmutil.match(wctx, pats, opts)
3081
3081
3082 # we'll need this later
3082 # we'll need this later
3083 targetsubs = sorted(s for s in wctx.substate if m(s))
3083 targetsubs = sorted(s for s in wctx.substate if m(s))
3084
3084
3085 if not m.always():
3085 if not m.always():
3086 matcher = matchmod.badmatch(m, lambda x, y: False)
3086 matcher = matchmod.badmatch(m, lambda x, y: False)
3087 for abs in wctx.walk(matcher):
3087 for abs in wctx.walk(matcher):
3088 names[abs] = m.rel(abs), m.exact(abs)
3088 names[abs] = m.rel(abs), m.exact(abs)
3089
3089
3090 # walk target manifest to fill `names`
3090 # walk target manifest to fill `names`
3091
3091
3092 def badfn(path, msg):
3092 def badfn(path, msg):
3093 if path in names:
3093 if path in names:
3094 return
3094 return
3095 if path in ctx.substate:
3095 if path in ctx.substate:
3096 return
3096 return
3097 path_ = path + '/'
3097 path_ = path + '/'
3098 for f in names:
3098 for f in names:
3099 if f.startswith(path_):
3099 if f.startswith(path_):
3100 return
3100 return
3101 ui.warn("%s: %s\n" % (m.rel(path), msg))
3101 ui.warn("%s: %s\n" % (m.rel(path), msg))
3102
3102
3103 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3103 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3104 if abs not in names:
3104 if abs not in names:
3105 names[abs] = m.rel(abs), m.exact(abs)
3105 names[abs] = m.rel(abs), m.exact(abs)
3106
3106
3107 # Find status of all file in `names`.
3107 # Find status of all file in `names`.
3108 m = scmutil.matchfiles(repo, names)
3108 m = scmutil.matchfiles(repo, names)
3109
3109
3110 changes = repo.status(node1=node, match=m,
3110 changes = repo.status(node1=node, match=m,
3111 unknown=True, ignored=True, clean=True)
3111 unknown=True, ignored=True, clean=True)
3112 else:
3112 else:
3113 changes = repo.status(node1=node, match=m)
3113 changes = repo.status(node1=node, match=m)
3114 for kind in changes:
3114 for kind in changes:
3115 for abs in kind:
3115 for abs in kind:
3116 names[abs] = m.rel(abs), m.exact(abs)
3116 names[abs] = m.rel(abs), m.exact(abs)
3117
3117
3118 m = scmutil.matchfiles(repo, names)
3118 m = scmutil.matchfiles(repo, names)
3119
3119
3120 modified = set(changes.modified)
3120 modified = set(changes.modified)
3121 added = set(changes.added)
3121 added = set(changes.added)
3122 removed = set(changes.removed)
3122 removed = set(changes.removed)
3123 _deleted = set(changes.deleted)
3123 _deleted = set(changes.deleted)
3124 unknown = set(changes.unknown)
3124 unknown = set(changes.unknown)
3125 unknown.update(changes.ignored)
3125 unknown.update(changes.ignored)
3126 clean = set(changes.clean)
3126 clean = set(changes.clean)
3127 modadded = set()
3127 modadded = set()
3128
3128
3129 # We need to account for the state of the file in the dirstate,
3129 # We need to account for the state of the file in the dirstate,
3130 # even when we revert against something else than parent. This will
3130 # even when we revert against something else than parent. This will
3131 # slightly alter the behavior of revert (doing back up or not, delete
3131 # slightly alter the behavior of revert (doing back up or not, delete
3132 # or just forget etc).
3132 # or just forget etc).
3133 if parent == node:
3133 if parent == node:
3134 dsmodified = modified
3134 dsmodified = modified
3135 dsadded = added
3135 dsadded = added
3136 dsremoved = removed
3136 dsremoved = removed
3137 # store all local modifications, useful later for rename detection
3137 # store all local modifications, useful later for rename detection
3138 localchanges = dsmodified | dsadded
3138 localchanges = dsmodified | dsadded
3139 modified, added, removed = set(), set(), set()
3139 modified, added, removed = set(), set(), set()
3140 else:
3140 else:
3141 changes = repo.status(node1=parent, match=m)
3141 changes = repo.status(node1=parent, match=m)
3142 dsmodified = set(changes.modified)
3142 dsmodified = set(changes.modified)
3143 dsadded = set(changes.added)
3143 dsadded = set(changes.added)
3144 dsremoved = set(changes.removed)
3144 dsremoved = set(changes.removed)
3145 # store all local modifications, useful later for rename detection
3145 # store all local modifications, useful later for rename detection
3146 localchanges = dsmodified | dsadded
3146 localchanges = dsmodified | dsadded
3147
3147
3148 # only take into account for removes between wc and target
3148 # only take into account for removes between wc and target
3149 clean |= dsremoved - removed
3149 clean |= dsremoved - removed
3150 dsremoved &= removed
3150 dsremoved &= removed
3151 # distinct between dirstate remove and other
3151 # distinct between dirstate remove and other
3152 removed -= dsremoved
3152 removed -= dsremoved
3153
3153
3154 modadded = added & dsmodified
3154 modadded = added & dsmodified
3155 added -= modadded
3155 added -= modadded
3156
3156
3157 # tell newly modified apart.
3157 # tell newly modified apart.
3158 dsmodified &= modified
3158 dsmodified &= modified
3159 dsmodified |= modified & dsadded # dirstate added may need backup
3159 dsmodified |= modified & dsadded # dirstate added may need backup
3160 modified -= dsmodified
3160 modified -= dsmodified
3161
3161
3162 # We need to wait for some post-processing to update this set
3162 # We need to wait for some post-processing to update this set
3163 # before making the distinction. The dirstate will be used for
3163 # before making the distinction. The dirstate will be used for
3164 # that purpose.
3164 # that purpose.
3165 dsadded = added
3165 dsadded = added
3166
3166
3167 # in case of merge, files that are actually added can be reported as
3167 # in case of merge, files that are actually added can be reported as
3168 # modified, we need to post process the result
3168 # modified, we need to post process the result
3169 if p2 != nullid:
3169 if p2 != nullid:
3170 mergeadd = set(dsmodified)
3170 mergeadd = set(dsmodified)
3171 for path in dsmodified:
3171 for path in dsmodified:
3172 if path in mf:
3172 if path in mf:
3173 mergeadd.remove(path)
3173 mergeadd.remove(path)
3174 dsadded |= mergeadd
3174 dsadded |= mergeadd
3175 dsmodified -= mergeadd
3175 dsmodified -= mergeadd
3176
3176
3177 # if f is a rename, update `names` to also revert the source
3177 # if f is a rename, update `names` to also revert the source
3178 cwd = repo.getcwd()
3178 cwd = repo.getcwd()
3179 for f in localchanges:
3179 for f in localchanges:
3180 src = repo.dirstate.copied(f)
3180 src = repo.dirstate.copied(f)
3181 # XXX should we check for rename down to target node?
3181 # XXX should we check for rename down to target node?
3182 if src and src not in names and repo.dirstate[src] == 'r':
3182 if src and src not in names and repo.dirstate[src] == 'r':
3183 dsremoved.add(src)
3183 dsremoved.add(src)
3184 names[src] = (repo.pathto(src, cwd), True)
3184 names[src] = (repo.pathto(src, cwd), True)
3185
3185
3186 # determine the exact nature of the deleted changesets
3186 # determine the exact nature of the deleted changesets
3187 deladded = set(_deleted)
3187 deladded = set(_deleted)
3188 for path in _deleted:
3188 for path in _deleted:
3189 if path in mf:
3189 if path in mf:
3190 deladded.remove(path)
3190 deladded.remove(path)
3191 deleted = _deleted - deladded
3191 deleted = _deleted - deladded
3192
3192
3193 # distinguish between file to forget and the other
3193 # distinguish between file to forget and the other
3194 added = set()
3194 added = set()
3195 for abs in dsadded:
3195 for abs in dsadded:
3196 if repo.dirstate[abs] != 'a':
3196 if repo.dirstate[abs] != 'a':
3197 added.add(abs)
3197 added.add(abs)
3198 dsadded -= added
3198 dsadded -= added
3199
3199
3200 for abs in deladded:
3200 for abs in deladded:
3201 if repo.dirstate[abs] == 'a':
3201 if repo.dirstate[abs] == 'a':
3202 dsadded.add(abs)
3202 dsadded.add(abs)
3203 deladded -= dsadded
3203 deladded -= dsadded
3204
3204
3205 # For files marked as removed, we check if an unknown file is present at
3205 # For files marked as removed, we check if an unknown file is present at
3206 # the same path. If a such file exists it may need to be backed up.
3206 # the same path. If a such file exists it may need to be backed up.
3207 # Making the distinction at this stage helps have simpler backup
3207 # Making the distinction at this stage helps have simpler backup
3208 # logic.
3208 # logic.
3209 removunk = set()
3209 removunk = set()
3210 for abs in removed:
3210 for abs in removed:
3211 target = repo.wjoin(abs)
3211 target = repo.wjoin(abs)
3212 if os.path.lexists(target):
3212 if os.path.lexists(target):
3213 removunk.add(abs)
3213 removunk.add(abs)
3214 removed -= removunk
3214 removed -= removunk
3215
3215
3216 dsremovunk = set()
3216 dsremovunk = set()
3217 for abs in dsremoved:
3217 for abs in dsremoved:
3218 target = repo.wjoin(abs)
3218 target = repo.wjoin(abs)
3219 if os.path.lexists(target):
3219 if os.path.lexists(target):
3220 dsremovunk.add(abs)
3220 dsremovunk.add(abs)
3221 dsremoved -= dsremovunk
3221 dsremoved -= dsremovunk
3222
3222
3223 # action to be actually performed by revert
3223 # action to be actually performed by revert
3224 # (<list of file>, message>) tuple
3224 # (<list of file>, message>) tuple
3225 actions = {'revert': ([], _('reverting %s\n')),
3225 actions = {'revert': ([], _('reverting %s\n')),
3226 'add': ([], _('adding %s\n')),
3226 'add': ([], _('adding %s\n')),
3227 'remove': ([], _('removing %s\n')),
3227 'remove': ([], _('removing %s\n')),
3228 'drop': ([], _('removing %s\n')),
3228 'drop': ([], _('removing %s\n')),
3229 'forget': ([], _('forgetting %s\n')),
3229 'forget': ([], _('forgetting %s\n')),
3230 'undelete': ([], _('undeleting %s\n')),
3230 'undelete': ([], _('undeleting %s\n')),
3231 'noop': (None, _('no changes needed to %s\n')),
3231 'noop': (None, _('no changes needed to %s\n')),
3232 'unknown': (None, _('file not managed: %s\n')),
3232 'unknown': (None, _('file not managed: %s\n')),
3233 }
3233 }
3234
3234
3235 # "constant" that convey the backup strategy.
3235 # "constant" that convey the backup strategy.
3236 # All set to `discard` if `no-backup` is set do avoid checking
3236 # All set to `discard` if `no-backup` is set do avoid checking
3237 # no_backup lower in the code.
3237 # no_backup lower in the code.
3238 # These values are ordered for comparison purposes
3238 # These values are ordered for comparison purposes
3239 backupinteractive = 3 # do backup if interactively modified
3239 backupinteractive = 3 # do backup if interactively modified
3240 backup = 2 # unconditionally do backup
3240 backup = 2 # unconditionally do backup
3241 check = 1 # check if the existing file differs from target
3241 check = 1 # check if the existing file differs from target
3242 discard = 0 # never do backup
3242 discard = 0 # never do backup
3243 if opts.get('no_backup'):
3243 if opts.get('no_backup'):
3244 backupinteractive = backup = check = discard
3244 backupinteractive = backup = check = discard
3245 if interactive:
3245 if interactive:
3246 dsmodifiedbackup = backupinteractive
3246 dsmodifiedbackup = backupinteractive
3247 else:
3247 else:
3248 dsmodifiedbackup = backup
3248 dsmodifiedbackup = backup
3249 tobackup = set()
3249 tobackup = set()
3250
3250
3251 backupanddel = actions['remove']
3251 backupanddel = actions['remove']
3252 if not opts.get('no_backup'):
3252 if not opts.get('no_backup'):
3253 backupanddel = actions['drop']
3253 backupanddel = actions['drop']
3254
3254
3255 disptable = (
3255 disptable = (
3256 # dispatch table:
3256 # dispatch table:
3257 # file state
3257 # file state
3258 # action
3258 # action
3259 # make backup
3259 # make backup
3260
3260
3261 ## Sets that results that will change file on disk
3261 ## Sets that results that will change file on disk
3262 # Modified compared to target, no local change
3262 # Modified compared to target, no local change
3263 (modified, actions['revert'], discard),
3263 (modified, actions['revert'], discard),
3264 # Modified compared to target, but local file is deleted
3264 # Modified compared to target, but local file is deleted
3265 (deleted, actions['revert'], discard),
3265 (deleted, actions['revert'], discard),
3266 # Modified compared to target, local change
3266 # Modified compared to target, local change
3267 (dsmodified, actions['revert'], dsmodifiedbackup),
3267 (dsmodified, actions['revert'], dsmodifiedbackup),
3268 # Added since target
3268 # Added since target
3269 (added, actions['remove'], discard),
3269 (added, actions['remove'], discard),
3270 # Added in working directory
3270 # Added in working directory
3271 (dsadded, actions['forget'], discard),
3271 (dsadded, actions['forget'], discard),
3272 # Added since target, have local modification
3272 # Added since target, have local modification
3273 (modadded, backupanddel, backup),
3273 (modadded, backupanddel, backup),
3274 # Added since target but file is missing in working directory
3274 # Added since target but file is missing in working directory
3275 (deladded, actions['drop'], discard),
3275 (deladded, actions['drop'], discard),
3276 # Removed since target, before working copy parent
3276 # Removed since target, before working copy parent
3277 (removed, actions['add'], discard),
3277 (removed, actions['add'], discard),
3278 # Same as `removed` but an unknown file exists at the same path
3278 # Same as `removed` but an unknown file exists at the same path
3279 (removunk, actions['add'], check),
3279 (removunk, actions['add'], check),
3280 # Removed since targe, marked as such in working copy parent
3280 # Removed since targe, marked as such in working copy parent
3281 (dsremoved, actions['undelete'], discard),
3281 (dsremoved, actions['undelete'], discard),
3282 # Same as `dsremoved` but an unknown file exists at the same path
3282 # Same as `dsremoved` but an unknown file exists at the same path
3283 (dsremovunk, actions['undelete'], check),
3283 (dsremovunk, actions['undelete'], check),
3284 ## the following sets does not result in any file changes
3284 ## the following sets does not result in any file changes
3285 # File with no modification
3285 # File with no modification
3286 (clean, actions['noop'], discard),
3286 (clean, actions['noop'], discard),
3287 # Existing file, not tracked anywhere
3287 # Existing file, not tracked anywhere
3288 (unknown, actions['unknown'], discard),
3288 (unknown, actions['unknown'], discard),
3289 )
3289 )
3290
3290
3291 for abs, (rel, exact) in sorted(names.items()):
3291 for abs, (rel, exact) in sorted(names.items()):
3292 # target file to be touch on disk (relative to cwd)
3292 # target file to be touch on disk (relative to cwd)
3293 target = repo.wjoin(abs)
3293 target = repo.wjoin(abs)
3294 # search the entry in the dispatch table.
3294 # search the entry in the dispatch table.
3295 # if the file is in any of these sets, it was touched in the working
3295 # if the file is in any of these sets, it was touched in the working
3296 # directory parent and we are sure it needs to be reverted.
3296 # directory parent and we are sure it needs to be reverted.
3297 for table, (xlist, msg), dobackup in disptable:
3297 for table, (xlist, msg), dobackup in disptable:
3298 if abs not in table:
3298 if abs not in table:
3299 continue
3299 continue
3300 if xlist is not None:
3300 if xlist is not None:
3301 xlist.append(abs)
3301 xlist.append(abs)
3302 if dobackup:
3302 if dobackup:
3303 # If in interactive mode, don't automatically create
3303 # If in interactive mode, don't automatically create
3304 # .orig files (issue4793)
3304 # .orig files (issue4793)
3305 if dobackup == backupinteractive:
3305 if dobackup == backupinteractive:
3306 tobackup.add(abs)
3306 tobackup.add(abs)
3307 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3307 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3308 bakname = scmutil.origpath(ui, repo, rel)
3308 bakname = scmutil.origpath(ui, repo, rel)
3309 ui.note(_('saving current version of %s as %s\n') %
3309 ui.note(_('saving current version of %s as %s\n') %
3310 (rel, bakname))
3310 (rel, bakname))
3311 if not opts.get('dry_run'):
3311 if not opts.get('dry_run'):
3312 if interactive:
3312 if interactive:
3313 util.copyfile(target, bakname)
3313 util.copyfile(target, bakname)
3314 else:
3314 else:
3315 util.rename(target, bakname)
3315 util.rename(target, bakname)
3316 if ui.verbose or not exact:
3316 if ui.verbose or not exact:
3317 if not isinstance(msg, basestring):
3317 if not isinstance(msg, basestring):
3318 msg = msg(abs)
3318 msg = msg(abs)
3319 ui.status(msg % rel)
3319 ui.status(msg % rel)
3320 elif exact:
3320 elif exact:
3321 ui.warn(msg % rel)
3321 ui.warn(msg % rel)
3322 break
3322 break
3323
3323
3324 if not opts.get('dry_run'):
3324 if not opts.get('dry_run'):
3325 needdata = ('revert', 'add', 'undelete')
3325 needdata = ('revert', 'add', 'undelete')
3326 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3326 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3327 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3327 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3328
3328
3329 if targetsubs:
3329 if targetsubs:
3330 # Revert the subrepos on the revert list
3330 # Revert the subrepos on the revert list
3331 for sub in targetsubs:
3331 for sub in targetsubs:
3332 try:
3332 try:
3333 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3333 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3334 except KeyError:
3334 except KeyError:
3335 raise error.Abort("subrepository '%s' does not exist in %s!"
3335 raise error.Abort("subrepository '%s' does not exist in %s!"
3336 % (sub, short(ctx.node())))
3336 % (sub, short(ctx.node())))
3337
3337
3338 def _revertprefetch(repo, ctx, *files):
3338 def _revertprefetch(repo, ctx, *files):
3339 """Let extension changing the storage layer prefetch content"""
3339 """Let extension changing the storage layer prefetch content"""
3340 pass
3340 pass
3341
3341
3342 def _performrevert(repo, parents, ctx, actions, interactive=False,
3342 def _performrevert(repo, parents, ctx, actions, interactive=False,
3343 tobackup=None):
3343 tobackup=None):
3344 """function that actually perform all the actions computed for revert
3344 """function that actually perform all the actions computed for revert
3345
3345
3346 This is an independent function to let extension to plug in and react to
3346 This is an independent function to let extension to plug in and react to
3347 the imminent revert.
3347 the imminent revert.
3348
3348
3349 Make sure you have the working directory locked when calling this function.
3349 Make sure you have the working directory locked when calling this function.
3350 """
3350 """
3351 parent, p2 = parents
3351 parent, p2 = parents
3352 node = ctx.node()
3352 node = ctx.node()
3353 excluded_files = []
3353 excluded_files = []
3354 matcher_opts = {"exclude": excluded_files}
3354 matcher_opts = {"exclude": excluded_files}
3355
3355
3356 def checkout(f):
3356 def checkout(f):
3357 fc = ctx[f]
3357 fc = ctx[f]
3358 repo.wwrite(f, fc.data(), fc.flags())
3358 repo.wwrite(f, fc.data(), fc.flags())
3359
3359
3360 def doremove(f):
3360 def doremove(f):
3361 try:
3361 try:
3362 repo.wvfs.unlinkpath(f)
3362 repo.wvfs.unlinkpath(f)
3363 except OSError:
3363 except OSError:
3364 pass
3364 pass
3365 repo.dirstate.remove(f)
3365 repo.dirstate.remove(f)
3366
3366
3367 audit_path = pathutil.pathauditor(repo.root)
3367 audit_path = pathutil.pathauditor(repo.root)
3368 for f in actions['forget'][0]:
3368 for f in actions['forget'][0]:
3369 if interactive:
3369 if interactive:
3370 choice = repo.ui.promptchoice(
3370 choice = repo.ui.promptchoice(
3371 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3371 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3372 if choice == 0:
3372 if choice == 0:
3373 repo.dirstate.drop(f)
3373 repo.dirstate.drop(f)
3374 else:
3374 else:
3375 excluded_files.append(repo.wjoin(f))
3375 excluded_files.append(repo.wjoin(f))
3376 else:
3376 else:
3377 repo.dirstate.drop(f)
3377 repo.dirstate.drop(f)
3378 for f in actions['remove'][0]:
3378 for f in actions['remove'][0]:
3379 audit_path(f)
3379 audit_path(f)
3380 if interactive:
3380 if interactive:
3381 choice = repo.ui.promptchoice(
3381 choice = repo.ui.promptchoice(
3382 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3382 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3383 if choice == 0:
3383 if choice == 0:
3384 doremove(f)
3384 doremove(f)
3385 else:
3385 else:
3386 excluded_files.append(repo.wjoin(f))
3386 excluded_files.append(repo.wjoin(f))
3387 else:
3387 else:
3388 doremove(f)
3388 doremove(f)
3389 for f in actions['drop'][0]:
3389 for f in actions['drop'][0]:
3390 audit_path(f)
3390 audit_path(f)
3391 repo.dirstate.remove(f)
3391 repo.dirstate.remove(f)
3392
3392
3393 normal = None
3393 normal = None
3394 if node == parent:
3394 if node == parent:
3395 # We're reverting to our parent. If possible, we'd like status
3395 # We're reverting to our parent. If possible, we'd like status
3396 # to report the file as clean. We have to use normallookup for
3396 # to report the file as clean. We have to use normallookup for
3397 # merges to avoid losing information about merged/dirty files.
3397 # merges to avoid losing information about merged/dirty files.
3398 if p2 != nullid:
3398 if p2 != nullid:
3399 normal = repo.dirstate.normallookup
3399 normal = repo.dirstate.normallookup
3400 else:
3400 else:
3401 normal = repo.dirstate.normal
3401 normal = repo.dirstate.normal
3402
3402
3403 newlyaddedandmodifiedfiles = set()
3403 newlyaddedandmodifiedfiles = set()
3404 if interactive:
3404 if interactive:
3405 # Prompt the user for changes to revert
3405 # Prompt the user for changes to revert
3406 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3406 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3407 m = scmutil.match(ctx, torevert, matcher_opts)
3407 m = scmutil.match(ctx, torevert, matcher_opts)
3408 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3408 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3409 diffopts.nodates = True
3409 diffopts.nodates = True
3410 diffopts.git = True
3410 diffopts.git = True
3411 operation = 'discard'
3411 operation = 'discard'
3412 reversehunks = True
3412 reversehunks = True
3413 if node != parent:
3413 if node != parent:
3414 operation = 'revert'
3414 operation = 'revert'
3415 reversehunks = repo.ui.configbool('experimental',
3415 reversehunks = repo.ui.configbool('experimental',
3416 'revertalternateinteractivemode',
3416 'revertalternateinteractivemode',
3417 True)
3417 True)
3418 if reversehunks:
3418 if reversehunks:
3419 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3419 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3420 else:
3420 else:
3421 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3421 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3422 originalchunks = patch.parsepatch(diff)
3422 originalchunks = patch.parsepatch(diff)
3423
3423
3424 try:
3424 try:
3425
3425
3426 chunks, opts = recordfilter(repo.ui, originalchunks,
3426 chunks, opts = recordfilter(repo.ui, originalchunks,
3427 operation=operation)
3427 operation=operation)
3428 if reversehunks:
3428 if reversehunks:
3429 chunks = patch.reversehunks(chunks)
3429 chunks = patch.reversehunks(chunks)
3430
3430
3431 except patch.PatchError as err:
3431 except patch.PatchError as err:
3432 raise error.Abort(_('error parsing patch: %s') % err)
3432 raise error.Abort(_('error parsing patch: %s') % err)
3433
3433
3434 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3434 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3435 if tobackup is None:
3435 if tobackup is None:
3436 tobackup = set()
3436 tobackup = set()
3437 # Apply changes
3437 # Apply changes
3438 fp = stringio()
3438 fp = stringio()
3439 for c in chunks:
3439 for c in chunks:
3440 # Create a backup file only if this hunk should be backed up
3440 # Create a backup file only if this hunk should be backed up
3441 if ishunk(c) and c.header.filename() in tobackup:
3441 if ishunk(c) and c.header.filename() in tobackup:
3442 abs = c.header.filename()
3442 abs = c.header.filename()
3443 target = repo.wjoin(abs)
3443 target = repo.wjoin(abs)
3444 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3444 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3445 util.copyfile(target, bakname)
3445 util.copyfile(target, bakname)
3446 tobackup.remove(abs)
3446 tobackup.remove(abs)
3447 c.write(fp)
3447 c.write(fp)
3448 dopatch = fp.tell()
3448 dopatch = fp.tell()
3449 fp.seek(0)
3449 fp.seek(0)
3450 if dopatch:
3450 if dopatch:
3451 try:
3451 try:
3452 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3452 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3453 except patch.PatchError as err:
3453 except patch.PatchError as err:
3454 raise error.Abort(str(err))
3454 raise error.Abort(str(err))
3455 del fp
3455 del fp
3456 else:
3456 else:
3457 for f in actions['revert'][0]:
3457 for f in actions['revert'][0]:
3458 checkout(f)
3458 checkout(f)
3459 if normal:
3459 if normal:
3460 normal(f)
3460 normal(f)
3461
3461
3462 for f in actions['add'][0]:
3462 for f in actions['add'][0]:
3463 # Don't checkout modified files, they are already created by the diff
3463 # Don't checkout modified files, they are already created by the diff
3464 if f not in newlyaddedandmodifiedfiles:
3464 if f not in newlyaddedandmodifiedfiles:
3465 checkout(f)
3465 checkout(f)
3466 repo.dirstate.add(f)
3466 repo.dirstate.add(f)
3467
3467
3468 normal = repo.dirstate.normallookup
3468 normal = repo.dirstate.normallookup
3469 if node == parent and p2 == nullid:
3469 if node == parent and p2 == nullid:
3470 normal = repo.dirstate.normal
3470 normal = repo.dirstate.normal
3471 for f in actions['undelete'][0]:
3471 for f in actions['undelete'][0]:
3472 checkout(f)
3472 checkout(f)
3473 normal(f)
3473 normal(f)
3474
3474
3475 copied = copies.pathcopies(repo[parent], ctx)
3475 copied = copies.pathcopies(repo[parent], ctx)
3476
3476
3477 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3477 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3478 if f in copied:
3478 if f in copied:
3479 repo.dirstate.copy(copied[f], f)
3479 repo.dirstate.copy(copied[f], f)
3480
3480
3481 class command(registrar.command):
3481 class command(registrar.command):
3482 def _doregister(self, func, name, *args, **kwargs):
3482 def _doregister(self, func, name, *args, **kwargs):
3483 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3483 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3484 return super(command, self)._doregister(func, name, *args, **kwargs)
3484 return super(command, self)._doregister(func, name, *args, **kwargs)
3485
3485
3486 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3486 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3487 # commands.outgoing. "missing" is "missing" of the result of
3487 # commands.outgoing. "missing" is "missing" of the result of
3488 # "findcommonoutgoing()"
3488 # "findcommonoutgoing()"
3489 outgoinghooks = util.hooks()
3489 outgoinghooks = util.hooks()
3490
3490
3491 # a list of (ui, repo) functions called by commands.summary
3491 # a list of (ui, repo) functions called by commands.summary
3492 summaryhooks = util.hooks()
3492 summaryhooks = util.hooks()
3493
3493
3494 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3494 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3495 #
3495 #
3496 # functions should return tuple of booleans below, if 'changes' is None:
3496 # functions should return tuple of booleans below, if 'changes' is None:
3497 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3497 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3498 #
3498 #
3499 # otherwise, 'changes' is a tuple of tuples below:
3499 # otherwise, 'changes' is a tuple of tuples below:
3500 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3500 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3501 # - (desturl, destbranch, destpeer, outgoing)
3501 # - (desturl, destbranch, destpeer, outgoing)
3502 summaryremotehooks = util.hooks()
3502 summaryremotehooks = util.hooks()
3503
3503
3504 # A list of state files kept by multistep operations like graft.
3504 # A list of state files kept by multistep operations like graft.
3505 # Since graft cannot be aborted, it is considered 'clearable' by update.
3505 # Since graft cannot be aborted, it is considered 'clearable' by update.
3506 # note: bisect is intentionally excluded
3506 # note: bisect is intentionally excluded
3507 # (state file, clearable, allowcommit, error, hint)
3507 # (state file, clearable, allowcommit, error, hint)
3508 unfinishedstates = [
3508 unfinishedstates = [
3509 ('graftstate', True, False, _('graft in progress'),
3509 ('graftstate', True, False, _('graft in progress'),
3510 _("use 'hg graft --continue' or 'hg update' to abort")),
3510 _("use 'hg graft --continue' or 'hg update' to abort")),
3511 ('updatestate', True, False, _('last update was interrupted'),
3511 ('updatestate', True, False, _('last update was interrupted'),
3512 _("use 'hg update' to get a consistent checkout"))
3512 _("use 'hg update' to get a consistent checkout"))
3513 ]
3513 ]
3514
3514
3515 def checkunfinished(repo, commit=False):
3515 def checkunfinished(repo, commit=False):
3516 '''Look for an unfinished multistep operation, like graft, and abort
3516 '''Look for an unfinished multistep operation, like graft, and abort
3517 if found. It's probably good to check this right before
3517 if found. It's probably good to check this right before
3518 bailifchanged().
3518 bailifchanged().
3519 '''
3519 '''
3520 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3520 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3521 if commit and allowcommit:
3521 if commit and allowcommit:
3522 continue
3522 continue
3523 if repo.vfs.exists(f):
3523 if repo.vfs.exists(f):
3524 raise error.Abort(msg, hint=hint)
3524 raise error.Abort(msg, hint=hint)
3525
3525
3526 def clearunfinished(repo):
3526 def clearunfinished(repo):
3527 '''Check for unfinished operations (as above), and clear the ones
3527 '''Check for unfinished operations (as above), and clear the ones
3528 that are clearable.
3528 that are clearable.
3529 '''
3529 '''
3530 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3530 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3531 if not clearable and repo.vfs.exists(f):
3531 if not clearable and repo.vfs.exists(f):
3532 raise error.Abort(msg, hint=hint)
3532 raise error.Abort(msg, hint=hint)
3533 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3533 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3534 if clearable and repo.vfs.exists(f):
3534 if clearable and repo.vfs.exists(f):
3535 util.unlink(repo.vfs.join(f))
3535 util.unlink(repo.vfs.join(f))
3536
3536
3537 afterresolvedstates = [
3537 afterresolvedstates = [
3538 ('graftstate',
3538 ('graftstate',
3539 _('hg graft --continue')),
3539 _('hg graft --continue')),
3540 ]
3540 ]
3541
3541
3542 def howtocontinue(repo):
3542 def howtocontinue(repo):
3543 '''Check for an unfinished operation and return the command to finish
3543 '''Check for an unfinished operation and return the command to finish
3544 it.
3544 it.
3545
3545
3546 afterresolvedstates tuples define a .hg/{file} and the corresponding
3546 afterresolvedstates tuples define a .hg/{file} and the corresponding
3547 command needed to finish it.
3547 command needed to finish it.
3548
3548
3549 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3549 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3550 a boolean.
3550 a boolean.
3551 '''
3551 '''
3552 contmsg = _("continue: %s")
3552 contmsg = _("continue: %s")
3553 for f, msg in afterresolvedstates:
3553 for f, msg in afterresolvedstates:
3554 if repo.vfs.exists(f):
3554 if repo.vfs.exists(f):
3555 return contmsg % msg, True
3555 return contmsg % msg, True
3556 workingctx = repo[None]
3556 workingctx = repo[None]
3557 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3557 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3558 for s in workingctx.substate)
3558 for s in workingctx.substate)
3559 if dirty:
3559 if dirty:
3560 return contmsg % _("hg commit"), False
3560 return contmsg % _("hg commit"), False
3561 return None, None
3561 return None, None
3562
3562
3563 def checkafterresolved(repo):
3563 def checkafterresolved(repo):
3564 '''Inform the user about the next action after completing hg resolve
3564 '''Inform the user about the next action after completing hg resolve
3565
3565
3566 If there's a matching afterresolvedstates, howtocontinue will yield
3566 If there's a matching afterresolvedstates, howtocontinue will yield
3567 repo.ui.warn as the reporter.
3567 repo.ui.warn as the reporter.
3568
3568
3569 Otherwise, it will yield repo.ui.note.
3569 Otherwise, it will yield repo.ui.note.
3570 '''
3570 '''
3571 msg, warning = howtocontinue(repo)
3571 msg, warning = howtocontinue(repo)
3572 if msg is not None:
3572 if msg is not None:
3573 if warning:
3573 if warning:
3574 repo.ui.warn("%s\n" % msg)
3574 repo.ui.warn("%s\n" % msg)
3575 else:
3575 else:
3576 repo.ui.note("%s\n" % msg)
3576 repo.ui.note("%s\n" % msg)
3577
3577
3578 def wrongtooltocontinue(repo, task):
3578 def wrongtooltocontinue(repo, task):
3579 '''Raise an abort suggesting how to properly continue if there is an
3579 '''Raise an abort suggesting how to properly continue if there is an
3580 active task.
3580 active task.
3581
3581
3582 Uses howtocontinue() to find the active task.
3582 Uses howtocontinue() to find the active task.
3583
3583
3584 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3584 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3585 a hint.
3585 a hint.
3586 '''
3586 '''
3587 after = howtocontinue(repo)
3587 after = howtocontinue(repo)
3588 hint = None
3588 hint = None
3589 if after[1]:
3589 if after[1]:
3590 hint = after[0]
3590 hint = after[0]
3591 raise error.Abort(_('no %s in progress') % task, hint=hint)
3591 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,2204 +1,2204 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 phases,
50 phases,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 command = registrar.command()
74 command = registrar.command()
75
75
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 def debugancestor(ui, repo, *args):
77 def debugancestor(ui, repo, *args):
78 """find the ancestor revision of two revisions in a given index"""
78 """find the ancestor revision of two revisions in a given index"""
79 if len(args) == 3:
79 if len(args) == 3:
80 index, rev1, rev2 = args
80 index, rev1, rev2 = args
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 lookup = r.lookup
82 lookup = r.lookup
83 elif len(args) == 2:
83 elif len(args) == 2:
84 if not repo:
84 if not repo:
85 raise error.Abort(_('there is no Mercurial repository here '
85 raise error.Abort(_('there is no Mercurial repository here '
86 '(.hg not found)'))
86 '(.hg not found)'))
87 rev1, rev2 = args
87 rev1, rev2 = args
88 r = repo.changelog
88 r = repo.changelog
89 lookup = repo.lookup
89 lookup = repo.lookup
90 else:
90 else:
91 raise error.Abort(_('either two or three arguments required'))
91 raise error.Abort(_('either two or three arguments required'))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94
94
95 @command('debugapplystreamclonebundle', [], 'FILE')
95 @command('debugapplystreamclonebundle', [], 'FILE')
96 def debugapplystreamclonebundle(ui, repo, fname):
96 def debugapplystreamclonebundle(ui, repo, fname):
97 """apply a stream clone bundle file"""
97 """apply a stream clone bundle file"""
98 f = hg.openpath(ui, fname)
98 f = hg.openpath(ui, fname)
99 gen = exchange.readbundle(ui, f, fname)
99 gen = exchange.readbundle(ui, f, fname)
100 gen.apply(repo)
100 gen.apply(repo)
101
101
102 @command('debugbuilddag',
102 @command('debugbuilddag',
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 ('n', 'new-file', None, _('add new file at each rev'))],
105 ('n', 'new-file', None, _('add new file at each rev'))],
106 _('[OPTION]... [TEXT]'))
106 _('[OPTION]... [TEXT]'))
107 def debugbuilddag(ui, repo, text=None,
107 def debugbuilddag(ui, repo, text=None,
108 mergeable_file=False,
108 mergeable_file=False,
109 overwritten_file=False,
109 overwritten_file=False,
110 new_file=False):
110 new_file=False):
111 """builds a repo with a given DAG from scratch in the current empty repo
111 """builds a repo with a given DAG from scratch in the current empty repo
112
112
113 The description of the DAG is read from stdin if not given on the
113 The description of the DAG is read from stdin if not given on the
114 command line.
114 command line.
115
115
116 Elements:
116 Elements:
117
117
118 - "+n" is a linear run of n nodes based on the current default parent
118 - "+n" is a linear run of n nodes based on the current default parent
119 - "." is a single node based on the current default parent
119 - "." is a single node based on the current default parent
120 - "$" resets the default parent to null (implied at the start);
120 - "$" resets the default parent to null (implied at the start);
121 otherwise the default parent is always the last node created
121 otherwise the default parent is always the last node created
122 - "<p" sets the default parent to the backref p
122 - "<p" sets the default parent to the backref p
123 - "*p" is a fork at parent p, which is a backref
123 - "*p" is a fork at parent p, which is a backref
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 - "/p2" is a merge of the preceding node and p2
125 - "/p2" is a merge of the preceding node and p2
126 - ":tag" defines a local tag for the preceding node
126 - ":tag" defines a local tag for the preceding node
127 - "@branch" sets the named branch for subsequent nodes
127 - "@branch" sets the named branch for subsequent nodes
128 - "#...\\n" is a comment up to the end of the line
128 - "#...\\n" is a comment up to the end of the line
129
129
130 Whitespace between the above elements is ignored.
130 Whitespace between the above elements is ignored.
131
131
132 A backref is either
132 A backref is either
133
133
134 - a number n, which references the node curr-n, where curr is the current
134 - a number n, which references the node curr-n, where curr is the current
135 node, or
135 node, or
136 - the name of a local tag you placed earlier using ":tag", or
136 - the name of a local tag you placed earlier using ":tag", or
137 - empty to denote the default parent.
137 - empty to denote the default parent.
138
138
139 All string valued-elements are either strictly alphanumeric, or must
139 All string valued-elements are either strictly alphanumeric, or must
140 be enclosed in double quotes ("..."), with "\\" as escape character.
140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 """
141 """
142
142
143 if text is None:
143 if text is None:
144 ui.status(_("reading DAG from stdin\n"))
144 ui.status(_("reading DAG from stdin\n"))
145 text = ui.fin.read()
145 text = ui.fin.read()
146
146
147 cl = repo.changelog
147 cl = repo.changelog
148 if len(cl) > 0:
148 if len(cl) > 0:
149 raise error.Abort(_('repository is not empty'))
149 raise error.Abort(_('repository is not empty'))
150
150
151 # determine number of revs in DAG
151 # determine number of revs in DAG
152 total = 0
152 total = 0
153 for type, data in dagparser.parsedag(text):
153 for type, data in dagparser.parsedag(text):
154 if type == 'n':
154 if type == 'n':
155 total += 1
155 total += 1
156
156
157 if mergeable_file:
157 if mergeable_file:
158 linesperrev = 2
158 linesperrev = 2
159 # make a file with k lines per rev
159 # make a file with k lines per rev
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 initialmergedlines.append("")
161 initialmergedlines.append("")
162
162
163 tags = []
163 tags = []
164
164
165 wlock = lock = tr = None
165 wlock = lock = tr = None
166 try:
166 try:
167 wlock = repo.wlock()
167 wlock = repo.wlock()
168 lock = repo.lock()
168 lock = repo.lock()
169 tr = repo.transaction("builddag")
169 tr = repo.transaction("builddag")
170
170
171 at = -1
171 at = -1
172 atbranch = 'default'
172 atbranch = 'default'
173 nodeids = []
173 nodeids = []
174 id = 0
174 id = 0
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 ui.note(('node %s\n' % str(data)))
178 ui.note(('node %s\n' % str(data)))
179 id, ps = data
179 id, ps = data
180
180
181 files = []
181 files = []
182 fctxs = {}
182 fctxs = {}
183
183
184 p2 = None
184 p2 = None
185 if mergeable_file:
185 if mergeable_file:
186 fn = "mf"
186 fn = "mf"
187 p1 = repo[ps[0]]
187 p1 = repo[ps[0]]
188 if len(ps) > 1:
188 if len(ps) > 1:
189 p2 = repo[ps[1]]
189 p2 = repo[ps[1]]
190 pa = p1.ancestor(p2)
190 pa = p1.ancestor(p2)
191 base, local, other = [x[fn].data() for x in (pa, p1,
191 base, local, other = [x[fn].data() for x in (pa, p1,
192 p2)]
192 p2)]
193 m3 = simplemerge.Merge3Text(base, local, other)
193 m3 = simplemerge.Merge3Text(base, local, other)
194 ml = [l.strip() for l in m3.merge_lines()]
194 ml = [l.strip() for l in m3.merge_lines()]
195 ml.append("")
195 ml.append("")
196 elif at > 0:
196 elif at > 0:
197 ml = p1[fn].data().split("\n")
197 ml = p1[fn].data().split("\n")
198 else:
198 else:
199 ml = initialmergedlines
199 ml = initialmergedlines
200 ml[id * linesperrev] += " r%i" % id
200 ml[id * linesperrev] += " r%i" % id
201 mergedtext = "\n".join(ml)
201 mergedtext = "\n".join(ml)
202 files.append(fn)
202 files.append(fn)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204
204
205 if overwritten_file:
205 if overwritten_file:
206 fn = "of"
206 fn = "of"
207 files.append(fn)
207 files.append(fn)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209
209
210 if new_file:
210 if new_file:
211 fn = "nf%i" % id
211 fn = "nf%i" % id
212 files.append(fn)
212 files.append(fn)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 if len(ps) > 1:
214 if len(ps) > 1:
215 if not p2:
215 if not p2:
216 p2 = repo[ps[1]]
216 p2 = repo[ps[1]]
217 for fn in p2:
217 for fn in p2:
218 if fn.startswith("nf"):
218 if fn.startswith("nf"):
219 files.append(fn)
219 files.append(fn)
220 fctxs[fn] = p2[fn]
220 fctxs[fn] = p2[fn]
221
221
222 def fctxfn(repo, cx, path):
222 def fctxfn(repo, cx, path):
223 return fctxs.get(path)
223 return fctxs.get(path)
224
224
225 if len(ps) == 0 or ps[0] < 0:
225 if len(ps) == 0 or ps[0] < 0:
226 pars = [None, None]
226 pars = [None, None]
227 elif len(ps) == 1:
227 elif len(ps) == 1:
228 pars = [nodeids[ps[0]], None]
228 pars = [nodeids[ps[0]], None]
229 else:
229 else:
230 pars = [nodeids[p] for p in ps]
230 pars = [nodeids[p] for p in ps]
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 date=(id, 0),
232 date=(id, 0),
233 user="debugbuilddag",
233 user="debugbuilddag",
234 extra={'branch': atbranch})
234 extra={'branch': atbranch})
235 nodeid = repo.commitctx(cx)
235 nodeid = repo.commitctx(cx)
236 nodeids.append(nodeid)
236 nodeids.append(nodeid)
237 at = id
237 at = id
238 elif type == 'l':
238 elif type == 'l':
239 id, name = data
239 id, name = data
240 ui.note(('tag %s\n' % name))
240 ui.note(('tag %s\n' % name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 elif type == 'a':
242 elif type == 'a':
243 ui.note(('branch %s\n' % data))
243 ui.note(('branch %s\n' % data))
244 atbranch = data
244 atbranch = data
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 tr.close()
246 tr.close()
247
247
248 if tags:
248 if tags:
249 repo.vfs.write("localtags", "".join(tags))
249 repo.vfs.write("localtags", "".join(tags))
250 finally:
250 finally:
251 ui.progress(_('building'), None)
251 ui.progress(_('building'), None)
252 release(tr, lock, wlock)
252 release(tr, lock, wlock)
253
253
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 indent_string = ' ' * indent
255 indent_string = ' ' * indent
256 if all:
256 if all:
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 % indent_string)
258 % indent_string)
259
259
260 def showchunks(named):
260 def showchunks(named):
261 ui.write("\n%s%s\n" % (indent_string, named))
261 ui.write("\n%s%s\n" % (indent_string, named))
262 chain = None
262 chain = None
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 node = chunkdata['node']
264 node = chunkdata['node']
265 p1 = chunkdata['p1']
265 p1 = chunkdata['p1']
266 p2 = chunkdata['p2']
266 p2 = chunkdata['p2']
267 cs = chunkdata['cs']
267 cs = chunkdata['cs']
268 deltabase = chunkdata['deltabase']
268 deltabase = chunkdata['deltabase']
269 delta = chunkdata['delta']
269 delta = chunkdata['delta']
270 ui.write("%s%s %s %s %s %s %s\n" %
270 ui.write("%s%s %s %s %s %s %s\n" %
271 (indent_string, hex(node), hex(p1), hex(p2),
271 (indent_string, hex(node), hex(p1), hex(p2),
272 hex(cs), hex(deltabase), len(delta)))
272 hex(cs), hex(deltabase), len(delta)))
273 chain = node
273 chain = node
274
274
275 chunkdata = gen.changelogheader()
275 chunkdata = gen.changelogheader()
276 showchunks("changelog")
276 showchunks("changelog")
277 chunkdata = gen.manifestheader()
277 chunkdata = gen.manifestheader()
278 showchunks("manifest")
278 showchunks("manifest")
279 for chunkdata in iter(gen.filelogheader, {}):
279 for chunkdata in iter(gen.filelogheader, {}):
280 fname = chunkdata['filename']
280 fname = chunkdata['filename']
281 showchunks(fname)
281 showchunks(fname)
282 else:
282 else:
283 if isinstance(gen, bundle2.unbundle20):
283 if isinstance(gen, bundle2.unbundle20):
284 raise error.Abort(_('use debugbundle2 for this file'))
284 raise error.Abort(_('use debugbundle2 for this file'))
285 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
286 chain = None
286 chain = None
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 node = chunkdata['node']
288 node = chunkdata['node']
289 ui.write("%s%s\n" % (indent_string, hex(node)))
289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 chain = node
290 chain = node
291
291
292 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
292 def _debugobsmarkers(ui, data, all=None, indent=0, **opts):
293 """display version and markers contained in 'data'"""
293 """display version and markers contained in 'data'"""
294 indent_string = ' ' * indent
294 indent_string = ' ' * indent
295 try:
295 try:
296 version, markers = obsolete._readmarkers(data)
296 version, markers = obsolete._readmarkers(data)
297 except error.UnknownVersion as exc:
297 except error.UnknownVersion as exc:
298 msg = "%sunsupported version: %s (%d bytes)\n"
298 msg = "%sunsupported version: %s (%d bytes)\n"
299 msg %= indent_string, exc.version, len(data)
299 msg %= indent_string, exc.version, len(data)
300 ui.write(msg)
300 ui.write(msg)
301 else:
301 else:
302 msg = "%sversion: %s (%d bytes)\n"
302 msg = "%sversion: %s (%d bytes)\n"
303 msg %= indent_string, version, len(data)
303 msg %= indent_string, version, len(data)
304 ui.write(msg)
304 ui.write(msg)
305 fm = ui.formatter('debugobsolete', opts)
305 fm = ui.formatter('debugobsolete', opts)
306 for rawmarker in sorted(markers):
306 for rawmarker in sorted(markers):
307 m = obsolete.marker(None, rawmarker)
307 m = obsolete.marker(None, rawmarker)
308 fm.startitem()
308 fm.startitem()
309 fm.plain(indent_string)
309 fm.plain(indent_string)
310 cmdutil.showmarker(fm, m)
310 cmdutil.showmarker(fm, m)
311 fm.end()
311 fm.end()
312
312
313 def _debugbundle2(ui, gen, all=None, **opts):
313 def _debugbundle2(ui, gen, all=None, **opts):
314 """lists the contents of a bundle2"""
314 """lists the contents of a bundle2"""
315 if not isinstance(gen, bundle2.unbundle20):
315 if not isinstance(gen, bundle2.unbundle20):
316 raise error.Abort(_('not a bundle2 file'))
316 raise error.Abort(_('not a bundle2 file'))
317 ui.write(('Stream params: %s\n' % repr(gen.params)))
317 ui.write(('Stream params: %s\n' % repr(gen.params)))
318 parttypes = opts.get('part_type', [])
318 parttypes = opts.get('part_type', [])
319 for part in gen.iterparts():
319 for part in gen.iterparts():
320 if parttypes and part.type not in parttypes:
320 if parttypes and part.type not in parttypes:
321 continue
321 continue
322 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
322 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
323 if part.type == 'changegroup':
323 if part.type == 'changegroup':
324 version = part.params.get('version', '01')
324 version = part.params.get('version', '01')
325 cg = changegroup.getunbundler(version, part, 'UN')
325 cg = changegroup.getunbundler(version, part, 'UN')
326 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
326 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
327 if part.type == 'obsmarkers':
327 if part.type == 'obsmarkers':
328 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
328 _debugobsmarkers(ui, part.read(), all=all, indent=4, **opts)
329
329
330 @command('debugbundle',
330 @command('debugbundle',
331 [('a', 'all', None, _('show all details')),
331 [('a', 'all', None, _('show all details')),
332 ('', 'part-type', [], _('show only the named part type')),
332 ('', 'part-type', [], _('show only the named part type')),
333 ('', 'spec', None, _('print the bundlespec of the bundle'))],
333 ('', 'spec', None, _('print the bundlespec of the bundle'))],
334 _('FILE'),
334 _('FILE'),
335 norepo=True)
335 norepo=True)
336 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
336 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
337 """lists the contents of a bundle"""
337 """lists the contents of a bundle"""
338 with hg.openpath(ui, bundlepath) as f:
338 with hg.openpath(ui, bundlepath) as f:
339 if spec:
339 if spec:
340 spec = exchange.getbundlespec(ui, f)
340 spec = exchange.getbundlespec(ui, f)
341 ui.write('%s\n' % spec)
341 ui.write('%s\n' % spec)
342 return
342 return
343
343
344 gen = exchange.readbundle(ui, f, bundlepath)
344 gen = exchange.readbundle(ui, f, bundlepath)
345 if isinstance(gen, bundle2.unbundle20):
345 if isinstance(gen, bundle2.unbundle20):
346 return _debugbundle2(ui, gen, all=all, **opts)
346 return _debugbundle2(ui, gen, all=all, **opts)
347 _debugchangegroup(ui, gen, all=all, **opts)
347 _debugchangegroup(ui, gen, all=all, **opts)
348
348
349 @command('debugcheckstate', [], '')
349 @command('debugcheckstate', [], '')
350 def debugcheckstate(ui, repo):
350 def debugcheckstate(ui, repo):
351 """validate the correctness of the current dirstate"""
351 """validate the correctness of the current dirstate"""
352 parent1, parent2 = repo.dirstate.parents()
352 parent1, parent2 = repo.dirstate.parents()
353 m1 = repo[parent1].manifest()
353 m1 = repo[parent1].manifest()
354 m2 = repo[parent2].manifest()
354 m2 = repo[parent2].manifest()
355 errors = 0
355 errors = 0
356 for f in repo.dirstate:
356 for f in repo.dirstate:
357 state = repo.dirstate[f]
357 state = repo.dirstate[f]
358 if state in "nr" and f not in m1:
358 if state in "nr" and f not in m1:
359 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
359 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
360 errors += 1
360 errors += 1
361 if state in "a" and f in m1:
361 if state in "a" and f in m1:
362 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
362 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
363 errors += 1
363 errors += 1
364 if state in "m" and f not in m1 and f not in m2:
364 if state in "m" and f not in m1 and f not in m2:
365 ui.warn(_("%s in state %s, but not in either manifest\n") %
365 ui.warn(_("%s in state %s, but not in either manifest\n") %
366 (f, state))
366 (f, state))
367 errors += 1
367 errors += 1
368 for f in m1:
368 for f in m1:
369 state = repo.dirstate[f]
369 state = repo.dirstate[f]
370 if state not in "nrm":
370 if state not in "nrm":
371 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
371 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
372 errors += 1
372 errors += 1
373 if errors:
373 if errors:
374 error = _(".hg/dirstate inconsistent with current parent's manifest")
374 error = _(".hg/dirstate inconsistent with current parent's manifest")
375 raise error.Abort(error)
375 raise error.Abort(error)
376
376
377 @command('debugcolor',
377 @command('debugcolor',
378 [('', 'style', None, _('show all configured styles'))],
378 [('', 'style', None, _('show all configured styles'))],
379 'hg debugcolor')
379 'hg debugcolor')
380 def debugcolor(ui, repo, **opts):
380 def debugcolor(ui, repo, **opts):
381 """show available color, effects or style"""
381 """show available color, effects or style"""
382 ui.write(('color mode: %s\n') % ui._colormode)
382 ui.write(('color mode: %s\n') % ui._colormode)
383 if opts.get('style'):
383 if opts.get('style'):
384 return _debugdisplaystyle(ui)
384 return _debugdisplaystyle(ui)
385 else:
385 else:
386 return _debugdisplaycolor(ui)
386 return _debugdisplaycolor(ui)
387
387
388 def _debugdisplaycolor(ui):
388 def _debugdisplaycolor(ui):
389 ui = ui.copy()
389 ui = ui.copy()
390 ui._styles.clear()
390 ui._styles.clear()
391 for effect in color._activeeffects(ui).keys():
391 for effect in color._activeeffects(ui).keys():
392 ui._styles[effect] = effect
392 ui._styles[effect] = effect
393 if ui._terminfoparams:
393 if ui._terminfoparams:
394 for k, v in ui.configitems('color'):
394 for k, v in ui.configitems('color'):
395 if k.startswith('color.'):
395 if k.startswith('color.'):
396 ui._styles[k] = k[6:]
396 ui._styles[k] = k[6:]
397 elif k.startswith('terminfo.'):
397 elif k.startswith('terminfo.'):
398 ui._styles[k] = k[9:]
398 ui._styles[k] = k[9:]
399 ui.write(_('available colors:\n'))
399 ui.write(_('available colors:\n'))
400 # sort label with a '_' after the other to group '_background' entry.
400 # sort label with a '_' after the other to group '_background' entry.
401 items = sorted(ui._styles.items(),
401 items = sorted(ui._styles.items(),
402 key=lambda i: ('_' in i[0], i[0], i[1]))
402 key=lambda i: ('_' in i[0], i[0], i[1]))
403 for colorname, label in items:
403 for colorname, label in items:
404 ui.write(('%s\n') % colorname, label=label)
404 ui.write(('%s\n') % colorname, label=label)
405
405
406 def _debugdisplaystyle(ui):
406 def _debugdisplaystyle(ui):
407 ui.write(_('available style:\n'))
407 ui.write(_('available style:\n'))
408 width = max(len(s) for s in ui._styles)
408 width = max(len(s) for s in ui._styles)
409 for label, effects in sorted(ui._styles.items()):
409 for label, effects in sorted(ui._styles.items()):
410 ui.write('%s' % label, label=label)
410 ui.write('%s' % label, label=label)
411 if effects:
411 if effects:
412 # 50
412 # 50
413 ui.write(': ')
413 ui.write(': ')
414 ui.write(' ' * (max(0, width - len(label))))
414 ui.write(' ' * (max(0, width - len(label))))
415 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
415 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
416 ui.write('\n')
416 ui.write('\n')
417
417
418 @command('debugcreatestreamclonebundle', [], 'FILE')
418 @command('debugcreatestreamclonebundle', [], 'FILE')
419 def debugcreatestreamclonebundle(ui, repo, fname):
419 def debugcreatestreamclonebundle(ui, repo, fname):
420 """create a stream clone bundle file
420 """create a stream clone bundle file
421
421
422 Stream bundles are special bundles that are essentially archives of
422 Stream bundles are special bundles that are essentially archives of
423 revlog files. They are commonly used for cloning very quickly.
423 revlog files. They are commonly used for cloning very quickly.
424 """
424 """
425 # TODO we may want to turn this into an abort when this functionality
425 # TODO we may want to turn this into an abort when this functionality
426 # is moved into `hg bundle`.
426 # is moved into `hg bundle`.
427 if phases.hassecret(repo):
427 if phases.hassecret(repo):
428 ui.warn(_('(warning: stream clone bundle will contain secret '
428 ui.warn(_('(warning: stream clone bundle will contain secret '
429 'revisions)\n'))
429 'revisions)\n'))
430
430
431 requirements, gen = streamclone.generatebundlev1(repo)
431 requirements, gen = streamclone.generatebundlev1(repo)
432 changegroup.writechunks(ui, gen, fname)
432 changegroup.writechunks(ui, gen, fname)
433
433
434 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
434 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
435
435
436 @command('debugdag',
436 @command('debugdag',
437 [('t', 'tags', None, _('use tags as labels')),
437 [('t', 'tags', None, _('use tags as labels')),
438 ('b', 'branches', None, _('annotate with branch names')),
438 ('b', 'branches', None, _('annotate with branch names')),
439 ('', 'dots', None, _('use dots for runs')),
439 ('', 'dots', None, _('use dots for runs')),
440 ('s', 'spaces', None, _('separate elements by spaces'))],
440 ('s', 'spaces', None, _('separate elements by spaces'))],
441 _('[OPTION]... [FILE [REV]...]'),
441 _('[OPTION]... [FILE [REV]...]'),
442 optionalrepo=True)
442 optionalrepo=True)
443 def debugdag(ui, repo, file_=None, *revs, **opts):
443 def debugdag(ui, repo, file_=None, *revs, **opts):
444 """format the changelog or an index DAG as a concise textual description
444 """format the changelog or an index DAG as a concise textual description
445
445
446 If you pass a revlog index, the revlog's DAG is emitted. If you list
446 If you pass a revlog index, the revlog's DAG is emitted. If you list
447 revision numbers, they get labeled in the output as rN.
447 revision numbers, they get labeled in the output as rN.
448
448
449 Otherwise, the changelog DAG of the current repo is emitted.
449 Otherwise, the changelog DAG of the current repo is emitted.
450 """
450 """
451 spaces = opts.get('spaces')
451 spaces = opts.get('spaces')
452 dots = opts.get('dots')
452 dots = opts.get('dots')
453 if file_:
453 if file_:
454 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
454 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
455 file_)
455 file_)
456 revs = set((int(r) for r in revs))
456 revs = set((int(r) for r in revs))
457 def events():
457 def events():
458 for r in rlog:
458 for r in rlog:
459 yield 'n', (r, list(p for p in rlog.parentrevs(r)
459 yield 'n', (r, list(p for p in rlog.parentrevs(r)
460 if p != -1))
460 if p != -1))
461 if r in revs:
461 if r in revs:
462 yield 'l', (r, "r%i" % r)
462 yield 'l', (r, "r%i" % r)
463 elif repo:
463 elif repo:
464 cl = repo.changelog
464 cl = repo.changelog
465 tags = opts.get('tags')
465 tags = opts.get('tags')
466 branches = opts.get('branches')
466 branches = opts.get('branches')
467 if tags:
467 if tags:
468 labels = {}
468 labels = {}
469 for l, n in repo.tags().items():
469 for l, n in repo.tags().items():
470 labels.setdefault(cl.rev(n), []).append(l)
470 labels.setdefault(cl.rev(n), []).append(l)
471 def events():
471 def events():
472 b = "default"
472 b = "default"
473 for r in cl:
473 for r in cl:
474 if branches:
474 if branches:
475 newb = cl.read(cl.node(r))[5]['branch']
475 newb = cl.read(cl.node(r))[5]['branch']
476 if newb != b:
476 if newb != b:
477 yield 'a', newb
477 yield 'a', newb
478 b = newb
478 b = newb
479 yield 'n', (r, list(p for p in cl.parentrevs(r)
479 yield 'n', (r, list(p for p in cl.parentrevs(r)
480 if p != -1))
480 if p != -1))
481 if tags:
481 if tags:
482 ls = labels.get(r)
482 ls = labels.get(r)
483 if ls:
483 if ls:
484 for l in ls:
484 for l in ls:
485 yield 'l', (r, l)
485 yield 'l', (r, l)
486 else:
486 else:
487 raise error.Abort(_('need repo for changelog dag'))
487 raise error.Abort(_('need repo for changelog dag'))
488
488
489 for line in dagparser.dagtextlines(events(),
489 for line in dagparser.dagtextlines(events(),
490 addspaces=spaces,
490 addspaces=spaces,
491 wraplabels=True,
491 wraplabels=True,
492 wrapannotations=True,
492 wrapannotations=True,
493 wrapnonlinear=dots,
493 wrapnonlinear=dots,
494 usedots=dots,
494 usedots=dots,
495 maxlinewidth=70):
495 maxlinewidth=70):
496 ui.write(line)
496 ui.write(line)
497 ui.write("\n")
497 ui.write("\n")
498
498
499 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
499 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
500 def debugdata(ui, repo, file_, rev=None, **opts):
500 def debugdata(ui, repo, file_, rev=None, **opts):
501 """dump the contents of a data file revision"""
501 """dump the contents of a data file revision"""
502 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
502 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
503 if rev is not None:
503 if rev is not None:
504 raise error.CommandError('debugdata', _('invalid arguments'))
504 raise error.CommandError('debugdata', _('invalid arguments'))
505 file_, rev = None, file_
505 file_, rev = None, file_
506 elif rev is None:
506 elif rev is None:
507 raise error.CommandError('debugdata', _('invalid arguments'))
507 raise error.CommandError('debugdata', _('invalid arguments'))
508 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
508 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
509 try:
509 try:
510 ui.write(r.revision(r.lookup(rev), raw=True))
510 ui.write(r.revision(r.lookup(rev), raw=True))
511 except KeyError:
511 except KeyError:
512 raise error.Abort(_('invalid revision identifier %s') % rev)
512 raise error.Abort(_('invalid revision identifier %s') % rev)
513
513
514 @command('debugdate',
514 @command('debugdate',
515 [('e', 'extended', None, _('try extended date formats'))],
515 [('e', 'extended', None, _('try extended date formats'))],
516 _('[-e] DATE [RANGE]'),
516 _('[-e] DATE [RANGE]'),
517 norepo=True, optionalrepo=True)
517 norepo=True, optionalrepo=True)
518 def debugdate(ui, date, range=None, **opts):
518 def debugdate(ui, date, range=None, **opts):
519 """parse and display a date"""
519 """parse and display a date"""
520 if opts["extended"]:
520 if opts["extended"]:
521 d = util.parsedate(date, util.extendeddateformats)
521 d = util.parsedate(date, util.extendeddateformats)
522 else:
522 else:
523 d = util.parsedate(date)
523 d = util.parsedate(date)
524 ui.write(("internal: %s %s\n") % d)
524 ui.write(("internal: %s %s\n") % d)
525 ui.write(("standard: %s\n") % util.datestr(d))
525 ui.write(("standard: %s\n") % util.datestr(d))
526 if range:
526 if range:
527 m = util.matchdate(range)
527 m = util.matchdate(range)
528 ui.write(("match: %s\n") % m(d[0]))
528 ui.write(("match: %s\n") % m(d[0]))
529
529
530 @command('debugdeltachain',
530 @command('debugdeltachain',
531 cmdutil.debugrevlogopts + cmdutil.formatteropts,
531 cmdutil.debugrevlogopts + cmdutil.formatteropts,
532 _('-c|-m|FILE'),
532 _('-c|-m|FILE'),
533 optionalrepo=True)
533 optionalrepo=True)
534 def debugdeltachain(ui, repo, file_=None, **opts):
534 def debugdeltachain(ui, repo, file_=None, **opts):
535 """dump information about delta chains in a revlog
535 """dump information about delta chains in a revlog
536
536
537 Output can be templatized. Available template keywords are:
537 Output can be templatized. Available template keywords are:
538
538
539 :``rev``: revision number
539 :``rev``: revision number
540 :``chainid``: delta chain identifier (numbered by unique base)
540 :``chainid``: delta chain identifier (numbered by unique base)
541 :``chainlen``: delta chain length to this revision
541 :``chainlen``: delta chain length to this revision
542 :``prevrev``: previous revision in delta chain
542 :``prevrev``: previous revision in delta chain
543 :``deltatype``: role of delta / how it was computed
543 :``deltatype``: role of delta / how it was computed
544 :``compsize``: compressed size of revision
544 :``compsize``: compressed size of revision
545 :``uncompsize``: uncompressed size of revision
545 :``uncompsize``: uncompressed size of revision
546 :``chainsize``: total size of compressed revisions in chain
546 :``chainsize``: total size of compressed revisions in chain
547 :``chainratio``: total chain size divided by uncompressed revision size
547 :``chainratio``: total chain size divided by uncompressed revision size
548 (new delta chains typically start at ratio 2.00)
548 (new delta chains typically start at ratio 2.00)
549 :``lindist``: linear distance from base revision in delta chain to end
549 :``lindist``: linear distance from base revision in delta chain to end
550 of this revision
550 of this revision
551 :``extradist``: total size of revisions not part of this delta chain from
551 :``extradist``: total size of revisions not part of this delta chain from
552 base of delta chain to end of this revision; a measurement
552 base of delta chain to end of this revision; a measurement
553 of how much extra data we need to read/seek across to read
553 of how much extra data we need to read/seek across to read
554 the delta chain for this revision
554 the delta chain for this revision
555 :``extraratio``: extradist divided by chainsize; another representation of
555 :``extraratio``: extradist divided by chainsize; another representation of
556 how much unrelated data is needed to load this delta chain
556 how much unrelated data is needed to load this delta chain
557 """
557 """
558 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
558 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
559 index = r.index
559 index = r.index
560 generaldelta = r.version & revlog.FLAG_GENERALDELTA
560 generaldelta = r.version & revlog.FLAG_GENERALDELTA
561
561
562 def revinfo(rev):
562 def revinfo(rev):
563 e = index[rev]
563 e = index[rev]
564 compsize = e[1]
564 compsize = e[1]
565 uncompsize = e[2]
565 uncompsize = e[2]
566 chainsize = 0
566 chainsize = 0
567
567
568 if generaldelta:
568 if generaldelta:
569 if e[3] == e[5]:
569 if e[3] == e[5]:
570 deltatype = 'p1'
570 deltatype = 'p1'
571 elif e[3] == e[6]:
571 elif e[3] == e[6]:
572 deltatype = 'p2'
572 deltatype = 'p2'
573 elif e[3] == rev - 1:
573 elif e[3] == rev - 1:
574 deltatype = 'prev'
574 deltatype = 'prev'
575 elif e[3] == rev:
575 elif e[3] == rev:
576 deltatype = 'base'
576 deltatype = 'base'
577 else:
577 else:
578 deltatype = 'other'
578 deltatype = 'other'
579 else:
579 else:
580 if e[3] == rev:
580 if e[3] == rev:
581 deltatype = 'base'
581 deltatype = 'base'
582 else:
582 else:
583 deltatype = 'prev'
583 deltatype = 'prev'
584
584
585 chain = r._deltachain(rev)[0]
585 chain = r._deltachain(rev)[0]
586 for iterrev in chain:
586 for iterrev in chain:
587 e = index[iterrev]
587 e = index[iterrev]
588 chainsize += e[1]
588 chainsize += e[1]
589
589
590 return compsize, uncompsize, deltatype, chain, chainsize
590 return compsize, uncompsize, deltatype, chain, chainsize
591
591
592 fm = ui.formatter('debugdeltachain', opts)
592 fm = ui.formatter('debugdeltachain', opts)
593
593
594 fm.plain(' rev chain# chainlen prev delta '
594 fm.plain(' rev chain# chainlen prev delta '
595 'size rawsize chainsize ratio lindist extradist '
595 'size rawsize chainsize ratio lindist extradist '
596 'extraratio\n')
596 'extraratio\n')
597
597
598 chainbases = {}
598 chainbases = {}
599 for rev in r:
599 for rev in r:
600 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
600 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
601 chainbase = chain[0]
601 chainbase = chain[0]
602 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
602 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
603 basestart = r.start(chainbase)
603 basestart = r.start(chainbase)
604 revstart = r.start(rev)
604 revstart = r.start(rev)
605 lineardist = revstart + comp - basestart
605 lineardist = revstart + comp - basestart
606 extradist = lineardist - chainsize
606 extradist = lineardist - chainsize
607 try:
607 try:
608 prevrev = chain[-2]
608 prevrev = chain[-2]
609 except IndexError:
609 except IndexError:
610 prevrev = -1
610 prevrev = -1
611
611
612 chainratio = float(chainsize) / float(uncomp)
612 chainratio = float(chainsize) / float(uncomp)
613 extraratio = float(extradist) / float(chainsize)
613 extraratio = float(extradist) / float(chainsize)
614
614
615 fm.startitem()
615 fm.startitem()
616 fm.write('rev chainid chainlen prevrev deltatype compsize '
616 fm.write('rev chainid chainlen prevrev deltatype compsize '
617 'uncompsize chainsize chainratio lindist extradist '
617 'uncompsize chainsize chainratio lindist extradist '
618 'extraratio',
618 'extraratio',
619 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
619 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
620 rev, chainid, len(chain), prevrev, deltatype, comp,
620 rev, chainid, len(chain), prevrev, deltatype, comp,
621 uncomp, chainsize, chainratio, lineardist, extradist,
621 uncomp, chainsize, chainratio, lineardist, extradist,
622 extraratio,
622 extraratio,
623 rev=rev, chainid=chainid, chainlen=len(chain),
623 rev=rev, chainid=chainid, chainlen=len(chain),
624 prevrev=prevrev, deltatype=deltatype, compsize=comp,
624 prevrev=prevrev, deltatype=deltatype, compsize=comp,
625 uncompsize=uncomp, chainsize=chainsize,
625 uncompsize=uncomp, chainsize=chainsize,
626 chainratio=chainratio, lindist=lineardist,
626 chainratio=chainratio, lindist=lineardist,
627 extradist=extradist, extraratio=extraratio)
627 extradist=extradist, extraratio=extraratio)
628
628
629 fm.end()
629 fm.end()
630
630
631 @command('debugdirstate|debugstate',
631 @command('debugdirstate|debugstate',
632 [('', 'nodates', None, _('do not display the saved mtime')),
632 [('', 'nodates', None, _('do not display the saved mtime')),
633 ('', 'datesort', None, _('sort by saved mtime'))],
633 ('', 'datesort', None, _('sort by saved mtime'))],
634 _('[OPTION]...'))
634 _('[OPTION]...'))
635 def debugstate(ui, repo, **opts):
635 def debugstate(ui, repo, **opts):
636 """show the contents of the current dirstate"""
636 """show the contents of the current dirstate"""
637
637
638 nodates = opts.get('nodates')
638 nodates = opts.get('nodates')
639 datesort = opts.get('datesort')
639 datesort = opts.get('datesort')
640
640
641 timestr = ""
641 timestr = ""
642 if datesort:
642 if datesort:
643 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
643 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
644 else:
644 else:
645 keyfunc = None # sort by filename
645 keyfunc = None # sort by filename
646 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
646 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
647 if ent[3] == -1:
647 if ent[3] == -1:
648 timestr = 'unset '
648 timestr = 'unset '
649 elif nodates:
649 elif nodates:
650 timestr = 'set '
650 timestr = 'set '
651 else:
651 else:
652 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
652 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
653 time.localtime(ent[3]))
653 time.localtime(ent[3]))
654 if ent[1] & 0o20000:
654 if ent[1] & 0o20000:
655 mode = 'lnk'
655 mode = 'lnk'
656 else:
656 else:
657 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
657 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
658 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
658 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
659 for f in repo.dirstate.copies():
659 for f in repo.dirstate.copies():
660 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
660 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
661
661
662 @command('debugdiscovery',
662 @command('debugdiscovery',
663 [('', 'old', None, _('use old-style discovery')),
663 [('', 'old', None, _('use old-style discovery')),
664 ('', 'nonheads', None,
664 ('', 'nonheads', None,
665 _('use old-style discovery with non-heads included')),
665 _('use old-style discovery with non-heads included')),
666 ] + cmdutil.remoteopts,
666 ] + cmdutil.remoteopts,
667 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
667 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
668 def debugdiscovery(ui, repo, remoteurl="default", **opts):
668 def debugdiscovery(ui, repo, remoteurl="default", **opts):
669 """runs the changeset discovery protocol in isolation"""
669 """runs the changeset discovery protocol in isolation"""
670 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
670 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
671 opts.get('branch'))
671 opts.get('branch'))
672 remote = hg.peer(repo, opts, remoteurl)
672 remote = hg.peer(repo, opts, remoteurl)
673 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
673 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
674
674
675 # make sure tests are repeatable
675 # make sure tests are repeatable
676 random.seed(12323)
676 random.seed(12323)
677
677
678 def doit(localheads, remoteheads, remote=remote):
678 def doit(localheads, remoteheads, remote=remote):
679 if opts.get('old'):
679 if opts.get('old'):
680 if localheads:
680 if localheads:
681 raise error.Abort('cannot use localheads with old style '
681 raise error.Abort('cannot use localheads with old style '
682 'discovery')
682 'discovery')
683 if not util.safehasattr(remote, 'branches'):
683 if not util.safehasattr(remote, 'branches'):
684 # enable in-client legacy support
684 # enable in-client legacy support
685 remote = localrepo.locallegacypeer(remote.local())
685 remote = localrepo.locallegacypeer(remote.local())
686 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
686 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
687 force=True)
687 force=True)
688 common = set(common)
688 common = set(common)
689 if not opts.get('nonheads'):
689 if not opts.get('nonheads'):
690 ui.write(("unpruned common: %s\n") %
690 ui.write(("unpruned common: %s\n") %
691 " ".join(sorted(short(n) for n in common)))
691 " ".join(sorted(short(n) for n in common)))
692 dag = dagutil.revlogdag(repo.changelog)
692 dag = dagutil.revlogdag(repo.changelog)
693 all = dag.ancestorset(dag.internalizeall(common))
693 all = dag.ancestorset(dag.internalizeall(common))
694 common = dag.externalizeall(dag.headsetofconnecteds(all))
694 common = dag.externalizeall(dag.headsetofconnecteds(all))
695 else:
695 else:
696 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
696 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
697 common = set(common)
697 common = set(common)
698 rheads = set(hds)
698 rheads = set(hds)
699 lheads = set(repo.heads())
699 lheads = set(repo.heads())
700 ui.write(("common heads: %s\n") %
700 ui.write(("common heads: %s\n") %
701 " ".join(sorted(short(n) for n in common)))
701 " ".join(sorted(short(n) for n in common)))
702 if lheads <= common:
702 if lheads <= common:
703 ui.write(("local is subset\n"))
703 ui.write(("local is subset\n"))
704 elif rheads <= common:
704 elif rheads <= common:
705 ui.write(("remote is subset\n"))
705 ui.write(("remote is subset\n"))
706
706
707 serverlogs = opts.get('serverlog')
707 serverlogs = opts.get('serverlog')
708 if serverlogs:
708 if serverlogs:
709 for filename in serverlogs:
709 for filename in serverlogs:
710 with open(filename, 'r') as logfile:
710 with open(filename, 'r') as logfile:
711 line = logfile.readline()
711 line = logfile.readline()
712 while line:
712 while line:
713 parts = line.strip().split(';')
713 parts = line.strip().split(';')
714 op = parts[1]
714 op = parts[1]
715 if op == 'cg':
715 if op == 'cg':
716 pass
716 pass
717 elif op == 'cgss':
717 elif op == 'cgss':
718 doit(parts[2].split(' '), parts[3].split(' '))
718 doit(parts[2].split(' '), parts[3].split(' '))
719 elif op == 'unb':
719 elif op == 'unb':
720 doit(parts[3].split(' '), parts[2].split(' '))
720 doit(parts[3].split(' '), parts[2].split(' '))
721 line = logfile.readline()
721 line = logfile.readline()
722 else:
722 else:
723 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
723 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
724 opts.get('remote_head'))
724 opts.get('remote_head'))
725 localrevs = opts.get('local_head')
725 localrevs = opts.get('local_head')
726 doit(localrevs, remoterevs)
726 doit(localrevs, remoterevs)
727
727
728 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
728 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
729 def debugextensions(ui, **opts):
729 def debugextensions(ui, **opts):
730 '''show information about active extensions'''
730 '''show information about active extensions'''
731 exts = extensions.extensions(ui)
731 exts = extensions.extensions(ui)
732 hgver = util.version()
732 hgver = util.version()
733 fm = ui.formatter('debugextensions', opts)
733 fm = ui.formatter('debugextensions', opts)
734 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
734 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
735 isinternal = extensions.ismoduleinternal(extmod)
735 isinternal = extensions.ismoduleinternal(extmod)
736 extsource = pycompat.fsencode(extmod.__file__)
736 extsource = pycompat.fsencode(extmod.__file__)
737 if isinternal:
737 if isinternal:
738 exttestedwith = [] # never expose magic string to users
738 exttestedwith = [] # never expose magic string to users
739 else:
739 else:
740 exttestedwith = getattr(extmod, 'testedwith', '').split()
740 exttestedwith = getattr(extmod, 'testedwith', '').split()
741 extbuglink = getattr(extmod, 'buglink', None)
741 extbuglink = getattr(extmod, 'buglink', None)
742
742
743 fm.startitem()
743 fm.startitem()
744
744
745 if ui.quiet or ui.verbose:
745 if ui.quiet or ui.verbose:
746 fm.write('name', '%s\n', extname)
746 fm.write('name', '%s\n', extname)
747 else:
747 else:
748 fm.write('name', '%s', extname)
748 fm.write('name', '%s', extname)
749 if isinternal or hgver in exttestedwith:
749 if isinternal or hgver in exttestedwith:
750 fm.plain('\n')
750 fm.plain('\n')
751 elif not exttestedwith:
751 elif not exttestedwith:
752 fm.plain(_(' (untested!)\n'))
752 fm.plain(_(' (untested!)\n'))
753 else:
753 else:
754 lasttestedversion = exttestedwith[-1]
754 lasttestedversion = exttestedwith[-1]
755 fm.plain(' (%s!)\n' % lasttestedversion)
755 fm.plain(' (%s!)\n' % lasttestedversion)
756
756
757 fm.condwrite(ui.verbose and extsource, 'source',
757 fm.condwrite(ui.verbose and extsource, 'source',
758 _(' location: %s\n'), extsource or "")
758 _(' location: %s\n'), extsource or "")
759
759
760 if ui.verbose:
760 if ui.verbose:
761 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
761 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
762 fm.data(bundled=isinternal)
762 fm.data(bundled=isinternal)
763
763
764 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
764 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
765 _(' tested with: %s\n'),
765 _(' tested with: %s\n'),
766 fm.formatlist(exttestedwith, name='ver'))
766 fm.formatlist(exttestedwith, name='ver'))
767
767
768 fm.condwrite(ui.verbose and extbuglink, 'buglink',
768 fm.condwrite(ui.verbose and extbuglink, 'buglink',
769 _(' bug reporting: %s\n'), extbuglink or "")
769 _(' bug reporting: %s\n'), extbuglink or "")
770
770
771 fm.end()
771 fm.end()
772
772
773 @command('debugfileset',
773 @command('debugfileset',
774 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
774 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
775 _('[-r REV] FILESPEC'))
775 _('[-r REV] FILESPEC'))
776 def debugfileset(ui, repo, expr, **opts):
776 def debugfileset(ui, repo, expr, **opts):
777 '''parse and apply a fileset specification'''
777 '''parse and apply a fileset specification'''
778 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
778 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
779 if ui.verbose:
779 if ui.verbose:
780 tree = fileset.parse(expr)
780 tree = fileset.parse(expr)
781 ui.note(fileset.prettyformat(tree), "\n")
781 ui.note(fileset.prettyformat(tree), "\n")
782
782
783 for f in ctx.getfileset(expr):
783 for f in ctx.getfileset(expr):
784 ui.write("%s\n" % f)
784 ui.write("%s\n" % f)
785
785
786 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
786 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
787 def debugfsinfo(ui, path="."):
787 def debugfsinfo(ui, path="."):
788 """show information detected about current filesystem"""
788 """show information detected about current filesystem"""
789 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
789 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
790 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
790 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
791 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
791 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
792 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
792 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
793 casesensitive = '(unknown)'
793 casesensitive = '(unknown)'
794 try:
794 try:
795 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
795 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
796 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
796 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
797 except OSError:
797 except OSError:
798 pass
798 pass
799 ui.write(('case-sensitive: %s\n') % casesensitive)
799 ui.write(('case-sensitive: %s\n') % casesensitive)
800
800
801 @command('debuggetbundle',
801 @command('debuggetbundle',
802 [('H', 'head', [], _('id of head node'), _('ID')),
802 [('H', 'head', [], _('id of head node'), _('ID')),
803 ('C', 'common', [], _('id of common node'), _('ID')),
803 ('C', 'common', [], _('id of common node'), _('ID')),
804 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
804 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
805 _('REPO FILE [-H|-C ID]...'),
805 _('REPO FILE [-H|-C ID]...'),
806 norepo=True)
806 norepo=True)
807 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
807 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
808 """retrieves a bundle from a repo
808 """retrieves a bundle from a repo
809
809
810 Every ID must be a full-length hex node id string. Saves the bundle to the
810 Every ID must be a full-length hex node id string. Saves the bundle to the
811 given file.
811 given file.
812 """
812 """
813 repo = hg.peer(ui, opts, repopath)
813 repo = hg.peer(ui, opts, repopath)
814 if not repo.capable('getbundle'):
814 if not repo.capable('getbundle'):
815 raise error.Abort("getbundle() not supported by target repository")
815 raise error.Abort("getbundle() not supported by target repository")
816 args = {}
816 args = {}
817 if common:
817 if common:
818 args['common'] = [bin(s) for s in common]
818 args['common'] = [bin(s) for s in common]
819 if head:
819 if head:
820 args['heads'] = [bin(s) for s in head]
820 args['heads'] = [bin(s) for s in head]
821 # TODO: get desired bundlecaps from command line.
821 # TODO: get desired bundlecaps from command line.
822 args['bundlecaps'] = None
822 args['bundlecaps'] = None
823 bundle = repo.getbundle('debug', **args)
823 bundle = repo.getbundle('debug', **args)
824
824
825 bundletype = opts.get('type', 'bzip2').lower()
825 bundletype = opts.get('type', 'bzip2').lower()
826 btypes = {'none': 'HG10UN',
826 btypes = {'none': 'HG10UN',
827 'bzip2': 'HG10BZ',
827 'bzip2': 'HG10BZ',
828 'gzip': 'HG10GZ',
828 'gzip': 'HG10GZ',
829 'bundle2': 'HG20'}
829 'bundle2': 'HG20'}
830 bundletype = btypes.get(bundletype)
830 bundletype = btypes.get(bundletype)
831 if bundletype not in bundle2.bundletypes:
831 if bundletype not in bundle2.bundletypes:
832 raise error.Abort(_('unknown bundle type specified with --type'))
832 raise error.Abort(_('unknown bundle type specified with --type'))
833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
833 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
834
834
835 @command('debugignore', [], '[FILE]')
835 @command('debugignore', [], '[FILE]')
836 def debugignore(ui, repo, *files, **opts):
836 def debugignore(ui, repo, *files, **opts):
837 """display the combined ignore pattern and information about ignored files
837 """display the combined ignore pattern and information about ignored files
838
838
839 With no argument display the combined ignore pattern.
839 With no argument display the combined ignore pattern.
840
840
841 Given space separated file names, shows if the given file is ignored and
841 Given space separated file names, shows if the given file is ignored and
842 if so, show the ignore rule (file and line number) that matched it.
842 if so, show the ignore rule (file and line number) that matched it.
843 """
843 """
844 ignore = repo.dirstate._ignore
844 ignore = repo.dirstate._ignore
845 if not files:
845 if not files:
846 # Show all the patterns
846 # Show all the patterns
847 ui.write("%s\n" % repr(ignore))
847 ui.write("%s\n" % repr(ignore))
848 else:
848 else:
849 for f in files:
849 for f in files:
850 nf = util.normpath(f)
850 nf = util.normpath(f)
851 ignored = None
851 ignored = None
852 ignoredata = None
852 ignoredata = None
853 if nf != '.':
853 if nf != '.':
854 if ignore(nf):
854 if ignore(nf):
855 ignored = nf
855 ignored = nf
856 ignoredata = repo.dirstate._ignorefileandline(nf)
856 ignoredata = repo.dirstate._ignorefileandline(nf)
857 else:
857 else:
858 for p in util.finddirs(nf):
858 for p in util.finddirs(nf):
859 if ignore(p):
859 if ignore(p):
860 ignored = p
860 ignored = p
861 ignoredata = repo.dirstate._ignorefileandline(p)
861 ignoredata = repo.dirstate._ignorefileandline(p)
862 break
862 break
863 if ignored:
863 if ignored:
864 if ignored == nf:
864 if ignored == nf:
865 ui.write(_("%s is ignored\n") % f)
865 ui.write(_("%s is ignored\n") % f)
866 else:
866 else:
867 ui.write(_("%s is ignored because of "
867 ui.write(_("%s is ignored because of "
868 "containing folder %s\n")
868 "containing folder %s\n")
869 % (f, ignored))
869 % (f, ignored))
870 ignorefile, lineno, line = ignoredata
870 ignorefile, lineno, line = ignoredata
871 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
871 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
872 % (ignorefile, lineno, line))
872 % (ignorefile, lineno, line))
873 else:
873 else:
874 ui.write(_("%s is not ignored\n") % f)
874 ui.write(_("%s is not ignored\n") % f)
875
875
876 @command('debugindex', cmdutil.debugrevlogopts +
876 @command('debugindex', cmdutil.debugrevlogopts +
877 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
877 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
878 _('[-f FORMAT] -c|-m|FILE'),
878 _('[-f FORMAT] -c|-m|FILE'),
879 optionalrepo=True)
879 optionalrepo=True)
880 def debugindex(ui, repo, file_=None, **opts):
880 def debugindex(ui, repo, file_=None, **opts):
881 """dump the contents of an index file"""
881 """dump the contents of an index file"""
882 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
882 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
883 format = opts.get('format', 0)
883 format = opts.get('format', 0)
884 if format not in (0, 1):
884 if format not in (0, 1):
885 raise error.Abort(_("unknown format %d") % format)
885 raise error.Abort(_("unknown format %d") % format)
886
886
887 generaldelta = r.version & revlog.FLAG_GENERALDELTA
887 generaldelta = r.version & revlog.FLAG_GENERALDELTA
888 if generaldelta:
888 if generaldelta:
889 basehdr = ' delta'
889 basehdr = ' delta'
890 else:
890 else:
891 basehdr = ' base'
891 basehdr = ' base'
892
892
893 if ui.debugflag:
893 if ui.debugflag:
894 shortfn = hex
894 shortfn = hex
895 else:
895 else:
896 shortfn = short
896 shortfn = short
897
897
898 # There might not be anything in r, so have a sane default
898 # There might not be anything in r, so have a sane default
899 idlen = 12
899 idlen = 12
900 for i in r:
900 for i in r:
901 idlen = len(shortfn(r.node(i)))
901 idlen = len(shortfn(r.node(i)))
902 break
902 break
903
903
904 if format == 0:
904 if format == 0:
905 ui.write((" rev offset length " + basehdr + " linkrev"
905 ui.write((" rev offset length " + basehdr + " linkrev"
906 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
906 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
907 elif format == 1:
907 elif format == 1:
908 ui.write((" rev flag offset length"
908 ui.write((" rev flag offset length"
909 " size " + basehdr + " link p1 p2"
909 " size " + basehdr + " link p1 p2"
910 " %s\n") % "nodeid".rjust(idlen))
910 " %s\n") % "nodeid".rjust(idlen))
911
911
912 for i in r:
912 for i in r:
913 node = r.node(i)
913 node = r.node(i)
914 if generaldelta:
914 if generaldelta:
915 base = r.deltaparent(i)
915 base = r.deltaparent(i)
916 else:
916 else:
917 base = r.chainbase(i)
917 base = r.chainbase(i)
918 if format == 0:
918 if format == 0:
919 try:
919 try:
920 pp = r.parents(node)
920 pp = r.parents(node)
921 except Exception:
921 except Exception:
922 pp = [nullid, nullid]
922 pp = [nullid, nullid]
923 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
923 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
924 i, r.start(i), r.length(i), base, r.linkrev(i),
924 i, r.start(i), r.length(i), base, r.linkrev(i),
925 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
925 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
926 elif format == 1:
926 elif format == 1:
927 pr = r.parentrevs(i)
927 pr = r.parentrevs(i)
928 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
928 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
929 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
929 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
930 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
930 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
931
931
932 @command('debugindexdot', cmdutil.debugrevlogopts,
932 @command('debugindexdot', cmdutil.debugrevlogopts,
933 _('-c|-m|FILE'), optionalrepo=True)
933 _('-c|-m|FILE'), optionalrepo=True)
934 def debugindexdot(ui, repo, file_=None, **opts):
934 def debugindexdot(ui, repo, file_=None, **opts):
935 """dump an index DAG as a graphviz dot file"""
935 """dump an index DAG as a graphviz dot file"""
936 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
936 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
937 ui.write(("digraph G {\n"))
937 ui.write(("digraph G {\n"))
938 for i in r:
938 for i in r:
939 node = r.node(i)
939 node = r.node(i)
940 pp = r.parents(node)
940 pp = r.parents(node)
941 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
941 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
942 if pp[1] != nullid:
942 if pp[1] != nullid:
943 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
943 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
944 ui.write("}\n")
944 ui.write("}\n")
945
945
946 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
946 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
947 def debuginstall(ui, **opts):
947 def debuginstall(ui, **opts):
948 '''test Mercurial installation
948 '''test Mercurial installation
949
949
950 Returns 0 on success.
950 Returns 0 on success.
951 '''
951 '''
952
952
953 def writetemp(contents):
953 def writetemp(contents):
954 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
954 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
955 f = os.fdopen(fd, pycompat.sysstr("wb"))
955 f = os.fdopen(fd, pycompat.sysstr("wb"))
956 f.write(contents)
956 f.write(contents)
957 f.close()
957 f.close()
958 return name
958 return name
959
959
960 problems = 0
960 problems = 0
961
961
962 fm = ui.formatter('debuginstall', opts)
962 fm = ui.formatter('debuginstall', opts)
963 fm.startitem()
963 fm.startitem()
964
964
965 # encoding
965 # encoding
966 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
966 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
967 err = None
967 err = None
968 try:
968 try:
969 encoding.fromlocal("test")
969 encoding.fromlocal("test")
970 except error.Abort as inst:
970 except error.Abort as inst:
971 err = inst
971 err = inst
972 problems += 1
972 problems += 1
973 fm.condwrite(err, 'encodingerror', _(" %s\n"
973 fm.condwrite(err, 'encodingerror', _(" %s\n"
974 " (check that your locale is properly set)\n"), err)
974 " (check that your locale is properly set)\n"), err)
975
975
976 # Python
976 # Python
977 fm.write('pythonexe', _("checking Python executable (%s)\n"),
977 fm.write('pythonexe', _("checking Python executable (%s)\n"),
978 pycompat.sysexecutable)
978 pycompat.sysexecutable)
979 fm.write('pythonver', _("checking Python version (%s)\n"),
979 fm.write('pythonver', _("checking Python version (%s)\n"),
980 ("%d.%d.%d" % sys.version_info[:3]))
980 ("%d.%d.%d" % sys.version_info[:3]))
981 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
981 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
982 os.path.dirname(pycompat.fsencode(os.__file__)))
982 os.path.dirname(pycompat.fsencode(os.__file__)))
983
983
984 security = set(sslutil.supportedprotocols)
984 security = set(sslutil.supportedprotocols)
985 if sslutil.hassni:
985 if sslutil.hassni:
986 security.add('sni')
986 security.add('sni')
987
987
988 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
988 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
989 fm.formatlist(sorted(security), name='protocol',
989 fm.formatlist(sorted(security), name='protocol',
990 fmt='%s', sep=','))
990 fmt='%s', sep=','))
991
991
992 # These are warnings, not errors. So don't increment problem count. This
992 # These are warnings, not errors. So don't increment problem count. This
993 # may change in the future.
993 # may change in the future.
994 if 'tls1.2' not in security:
994 if 'tls1.2' not in security:
995 fm.plain(_(' TLS 1.2 not supported by Python install; '
995 fm.plain(_(' TLS 1.2 not supported by Python install; '
996 'network connections lack modern security\n'))
996 'network connections lack modern security\n'))
997 if 'sni' not in security:
997 if 'sni' not in security:
998 fm.plain(_(' SNI not supported by Python install; may have '
998 fm.plain(_(' SNI not supported by Python install; may have '
999 'connectivity issues with some servers\n'))
999 'connectivity issues with some servers\n'))
1000
1000
1001 # TODO print CA cert info
1001 # TODO print CA cert info
1002
1002
1003 # hg version
1003 # hg version
1004 hgver = util.version()
1004 hgver = util.version()
1005 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1005 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1006 hgver.split('+')[0])
1006 hgver.split('+')[0])
1007 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1007 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1008 '+'.join(hgver.split('+')[1:]))
1008 '+'.join(hgver.split('+')[1:]))
1009
1009
1010 # compiled modules
1010 # compiled modules
1011 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1011 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1012 policy.policy)
1012 policy.policy)
1013 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1013 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1014 os.path.dirname(pycompat.fsencode(__file__)))
1014 os.path.dirname(pycompat.fsencode(__file__)))
1015
1015
1016 if policy.policy in ('c', 'allow'):
1016 if policy.policy in ('c', 'allow'):
1017 err = None
1017 err = None
1018 try:
1018 try:
1019 from .cext import (
1019 from .cext import (
1020 base85,
1020 base85,
1021 bdiff,
1021 bdiff,
1022 mpatch,
1022 mpatch,
1023 osutil,
1023 osutil,
1024 )
1024 )
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1025 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1026 except Exception as inst:
1026 except Exception as inst:
1027 err = inst
1027 err = inst
1028 problems += 1
1028 problems += 1
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1029 fm.condwrite(err, 'extensionserror', " %s\n", err)
1030
1030
1031 compengines = util.compengines._engines.values()
1031 compengines = util.compengines._engines.values()
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1032 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1033 fm.formatlist(sorted(e.name() for e in compengines),
1034 name='compengine', fmt='%s', sep=', '))
1034 name='compengine', fmt='%s', sep=', '))
1035 fm.write('compenginesavail', _('checking available compression engines '
1035 fm.write('compenginesavail', _('checking available compression engines '
1036 '(%s)\n'),
1036 '(%s)\n'),
1037 fm.formatlist(sorted(e.name() for e in compengines
1037 fm.formatlist(sorted(e.name() for e in compengines
1038 if e.available()),
1038 if e.available()),
1039 name='compengine', fmt='%s', sep=', '))
1039 name='compengine', fmt='%s', sep=', '))
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1040 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1041 fm.write('compenginesserver', _('checking available compression engines '
1041 fm.write('compenginesserver', _('checking available compression engines '
1042 'for wire protocol (%s)\n'),
1042 'for wire protocol (%s)\n'),
1043 fm.formatlist([e.name() for e in wirecompengines
1043 fm.formatlist([e.name() for e in wirecompengines
1044 if e.wireprotosupport()],
1044 if e.wireprotosupport()],
1045 name='compengine', fmt='%s', sep=', '))
1045 name='compengine', fmt='%s', sep=', '))
1046
1046
1047 # templates
1047 # templates
1048 p = templater.templatepaths()
1048 p = templater.templatepaths()
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1049 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1050 fm.condwrite(not p, '', _(" no template directories found\n"))
1051 if p:
1051 if p:
1052 m = templater.templatepath("map-cmdline.default")
1052 m = templater.templatepath("map-cmdline.default")
1053 if m:
1053 if m:
1054 # template found, check if it is working
1054 # template found, check if it is working
1055 err = None
1055 err = None
1056 try:
1056 try:
1057 templater.templater.frommapfile(m)
1057 templater.templater.frommapfile(m)
1058 except Exception as inst:
1058 except Exception as inst:
1059 err = inst
1059 err = inst
1060 p = None
1060 p = None
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1061 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1062 else:
1062 else:
1063 p = None
1063 p = None
1064 fm.condwrite(p, 'defaulttemplate',
1064 fm.condwrite(p, 'defaulttemplate',
1065 _("checking default template (%s)\n"), m)
1065 _("checking default template (%s)\n"), m)
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1066 fm.condwrite(not m, 'defaulttemplatenotfound',
1067 _(" template '%s' not found\n"), "default")
1067 _(" template '%s' not found\n"), "default")
1068 if not p:
1068 if not p:
1069 problems += 1
1069 problems += 1
1070 fm.condwrite(not p, '',
1070 fm.condwrite(not p, '',
1071 _(" (templates seem to have been installed incorrectly)\n"))
1071 _(" (templates seem to have been installed incorrectly)\n"))
1072
1072
1073 # editor
1073 # editor
1074 editor = ui.geteditor()
1074 editor = ui.geteditor()
1075 editor = util.expandpath(editor)
1075 editor = util.expandpath(editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1076 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1077 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1078 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1079 _(" No commit editor set and can't find %s in PATH\n"
1079 _(" No commit editor set and can't find %s in PATH\n"
1080 " (specify a commit editor in your configuration"
1080 " (specify a commit editor in your configuration"
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1081 " file)\n"), not cmdpath and editor == 'vi' and editor)
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1082 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1083 _(" Can't find editor '%s' in PATH\n"
1083 _(" Can't find editor '%s' in PATH\n"
1084 " (specify a commit editor in your configuration"
1084 " (specify a commit editor in your configuration"
1085 " file)\n"), not cmdpath and editor)
1085 " file)\n"), not cmdpath and editor)
1086 if not cmdpath and editor != 'vi':
1086 if not cmdpath and editor != 'vi':
1087 problems += 1
1087 problems += 1
1088
1088
1089 # check username
1089 # check username
1090 username = None
1090 username = None
1091 err = None
1091 err = None
1092 try:
1092 try:
1093 username = ui.username()
1093 username = ui.username()
1094 except error.Abort as e:
1094 except error.Abort as e:
1095 err = e
1095 err = e
1096 problems += 1
1096 problems += 1
1097
1097
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1098 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1099 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1100 " (specify a username in your configuration file)\n"), err)
1100 " (specify a username in your configuration file)\n"), err)
1101
1101
1102 fm.condwrite(not problems, '',
1102 fm.condwrite(not problems, '',
1103 _("no problems detected\n"))
1103 _("no problems detected\n"))
1104 if not problems:
1104 if not problems:
1105 fm.data(problems=problems)
1105 fm.data(problems=problems)
1106 fm.condwrite(problems, 'problems',
1106 fm.condwrite(problems, 'problems',
1107 _("%d problems detected,"
1107 _("%d problems detected,"
1108 " please check your install!\n"), problems)
1108 " please check your install!\n"), problems)
1109 fm.end()
1109 fm.end()
1110
1110
1111 return problems
1111 return problems
1112
1112
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1113 @command('debugknown', [], _('REPO ID...'), norepo=True)
1114 def debugknown(ui, repopath, *ids, **opts):
1114 def debugknown(ui, repopath, *ids, **opts):
1115 """test whether node ids are known to a repo
1115 """test whether node ids are known to a repo
1116
1116
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1117 Every ID must be a full-length hex node id string. Returns a list of 0s
1118 and 1s indicating unknown/known.
1118 and 1s indicating unknown/known.
1119 """
1119 """
1120 repo = hg.peer(ui, opts, repopath)
1120 repo = hg.peer(ui, opts, repopath)
1121 if not repo.capable('known'):
1121 if not repo.capable('known'):
1122 raise error.Abort("known() not supported by target repository")
1122 raise error.Abort("known() not supported by target repository")
1123 flags = repo.known([bin(s) for s in ids])
1123 flags = repo.known([bin(s) for s in ids])
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1124 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1125
1125
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1126 @command('debuglabelcomplete', [], _('LABEL...'))
1127 def debuglabelcomplete(ui, repo, *args):
1127 def debuglabelcomplete(ui, repo, *args):
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1128 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1129 debugnamecomplete(ui, repo, *args)
1129 debugnamecomplete(ui, repo, *args)
1130
1130
1131 @command('debuglocks',
1131 @command('debuglocks',
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1132 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1133 ('W', 'force-wlock', None,
1133 ('W', 'force-wlock', None,
1134 _('free the working state lock (DANGEROUS)'))],
1134 _('free the working state lock (DANGEROUS)'))],
1135 _('[OPTION]...'))
1135 _('[OPTION]...'))
1136 def debuglocks(ui, repo, **opts):
1136 def debuglocks(ui, repo, **opts):
1137 """show or modify state of locks
1137 """show or modify state of locks
1138
1138
1139 By default, this command will show which locks are held. This
1139 By default, this command will show which locks are held. This
1140 includes the user and process holding the lock, the amount of time
1140 includes the user and process holding the lock, the amount of time
1141 the lock has been held, and the machine name where the process is
1141 the lock has been held, and the machine name where the process is
1142 running if it's not local.
1142 running if it's not local.
1143
1143
1144 Locks protect the integrity of Mercurial's data, so should be
1144 Locks protect the integrity of Mercurial's data, so should be
1145 treated with care. System crashes or other interruptions may cause
1145 treated with care. System crashes or other interruptions may cause
1146 locks to not be properly released, though Mercurial will usually
1146 locks to not be properly released, though Mercurial will usually
1147 detect and remove such stale locks automatically.
1147 detect and remove such stale locks automatically.
1148
1148
1149 However, detecting stale locks may not always be possible (for
1149 However, detecting stale locks may not always be possible (for
1150 instance, on a shared filesystem). Removing locks may also be
1150 instance, on a shared filesystem). Removing locks may also be
1151 blocked by filesystem permissions.
1151 blocked by filesystem permissions.
1152
1152
1153 Returns 0 if no locks are held.
1153 Returns 0 if no locks are held.
1154
1154
1155 """
1155 """
1156
1156
1157 if opts.get('force_lock'):
1157 if opts.get('force_lock'):
1158 repo.svfs.unlink('lock')
1158 repo.svfs.unlink('lock')
1159 if opts.get('force_wlock'):
1159 if opts.get('force_wlock'):
1160 repo.vfs.unlink('wlock')
1160 repo.vfs.unlink('wlock')
1161 if opts.get('force_lock') or opts.get('force_lock'):
1161 if opts.get('force_lock') or opts.get('force_lock'):
1162 return 0
1162 return 0
1163
1163
1164 now = time.time()
1164 now = time.time()
1165 held = 0
1165 held = 0
1166
1166
1167 def report(vfs, name, method):
1167 def report(vfs, name, method):
1168 # this causes stale locks to get reaped for more accurate reporting
1168 # this causes stale locks to get reaped for more accurate reporting
1169 try:
1169 try:
1170 l = method(False)
1170 l = method(False)
1171 except error.LockHeld:
1171 except error.LockHeld:
1172 l = None
1172 l = None
1173
1173
1174 if l:
1174 if l:
1175 l.release()
1175 l.release()
1176 else:
1176 else:
1177 try:
1177 try:
1178 stat = vfs.lstat(name)
1178 stat = vfs.lstat(name)
1179 age = now - stat.st_mtime
1179 age = now - stat.st_mtime
1180 user = util.username(stat.st_uid)
1180 user = util.username(stat.st_uid)
1181 locker = vfs.readlock(name)
1181 locker = vfs.readlock(name)
1182 if ":" in locker:
1182 if ":" in locker:
1183 host, pid = locker.split(':')
1183 host, pid = locker.split(':')
1184 if host == socket.gethostname():
1184 if host == socket.gethostname():
1185 locker = 'user %s, process %s' % (user, pid)
1185 locker = 'user %s, process %s' % (user, pid)
1186 else:
1186 else:
1187 locker = 'user %s, process %s, host %s' \
1187 locker = 'user %s, process %s, host %s' \
1188 % (user, pid, host)
1188 % (user, pid, host)
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1189 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1190 return 1
1190 return 1
1191 except OSError as e:
1191 except OSError as e:
1192 if e.errno != errno.ENOENT:
1192 if e.errno != errno.ENOENT:
1193 raise
1193 raise
1194
1194
1195 ui.write(("%-6s free\n") % (name + ":"))
1195 ui.write(("%-6s free\n") % (name + ":"))
1196 return 0
1196 return 0
1197
1197
1198 held += report(repo.svfs, "lock", repo.lock)
1198 held += report(repo.svfs, "lock", repo.lock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1199 held += report(repo.vfs, "wlock", repo.wlock)
1200
1200
1201 return held
1201 return held
1202
1202
1203 @command('debugmergestate', [], '')
1203 @command('debugmergestate', [], '')
1204 def debugmergestate(ui, repo, *args):
1204 def debugmergestate(ui, repo, *args):
1205 """print merge state
1205 """print merge state
1206
1206
1207 Use --verbose to print out information about whether v1 or v2 merge state
1207 Use --verbose to print out information about whether v1 or v2 merge state
1208 was chosen."""
1208 was chosen."""
1209 def _hashornull(h):
1209 def _hashornull(h):
1210 if h == nullhex:
1210 if h == nullhex:
1211 return 'null'
1211 return 'null'
1212 else:
1212 else:
1213 return h
1213 return h
1214
1214
1215 def printrecords(version):
1215 def printrecords(version):
1216 ui.write(('* version %s records\n') % version)
1216 ui.write(('* version %s records\n') % version)
1217 if version == 1:
1217 if version == 1:
1218 records = v1records
1218 records = v1records
1219 else:
1219 else:
1220 records = v2records
1220 records = v2records
1221
1221
1222 for rtype, record in records:
1222 for rtype, record in records:
1223 # pretty print some record types
1223 # pretty print some record types
1224 if rtype == 'L':
1224 if rtype == 'L':
1225 ui.write(('local: %s\n') % record)
1225 ui.write(('local: %s\n') % record)
1226 elif rtype == 'O':
1226 elif rtype == 'O':
1227 ui.write(('other: %s\n') % record)
1227 ui.write(('other: %s\n') % record)
1228 elif rtype == 'm':
1228 elif rtype == 'm':
1229 driver, mdstate = record.split('\0', 1)
1229 driver, mdstate = record.split('\0', 1)
1230 ui.write(('merge driver: %s (state "%s")\n')
1230 ui.write(('merge driver: %s (state "%s")\n')
1231 % (driver, mdstate))
1231 % (driver, mdstate))
1232 elif rtype in 'FDC':
1232 elif rtype in 'FDC':
1233 r = record.split('\0')
1233 r = record.split('\0')
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1234 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1235 if version == 1:
1235 if version == 1:
1236 onode = 'not stored in v1 format'
1236 onode = 'not stored in v1 format'
1237 flags = r[7]
1237 flags = r[7]
1238 else:
1238 else:
1239 onode, flags = r[7:9]
1239 onode, flags = r[7:9]
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1240 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1241 % (f, rtype, state, _hashornull(hash)))
1241 % (f, rtype, state, _hashornull(hash)))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1242 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1243 ui.write((' ancestor path: %s (node %s)\n')
1243 ui.write((' ancestor path: %s (node %s)\n')
1244 % (afile, _hashornull(anode)))
1244 % (afile, _hashornull(anode)))
1245 ui.write((' other path: %s (node %s)\n')
1245 ui.write((' other path: %s (node %s)\n')
1246 % (ofile, _hashornull(onode)))
1246 % (ofile, _hashornull(onode)))
1247 elif rtype == 'f':
1247 elif rtype == 'f':
1248 filename, rawextras = record.split('\0', 1)
1248 filename, rawextras = record.split('\0', 1)
1249 extras = rawextras.split('\0')
1249 extras = rawextras.split('\0')
1250 i = 0
1250 i = 0
1251 extrastrings = []
1251 extrastrings = []
1252 while i < len(extras):
1252 while i < len(extras):
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1253 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1254 i += 2
1254 i += 2
1255
1255
1256 ui.write(('file extras: %s (%s)\n')
1256 ui.write(('file extras: %s (%s)\n')
1257 % (filename, ', '.join(extrastrings)))
1257 % (filename, ', '.join(extrastrings)))
1258 elif rtype == 'l':
1258 elif rtype == 'l':
1259 labels = record.split('\0', 2)
1259 labels = record.split('\0', 2)
1260 labels = [l for l in labels if len(l) > 0]
1260 labels = [l for l in labels if len(l) > 0]
1261 ui.write(('labels:\n'))
1261 ui.write(('labels:\n'))
1262 ui.write((' local: %s\n' % labels[0]))
1262 ui.write((' local: %s\n' % labels[0]))
1263 ui.write((' other: %s\n' % labels[1]))
1263 ui.write((' other: %s\n' % labels[1]))
1264 if len(labels) > 2:
1264 if len(labels) > 2:
1265 ui.write((' base: %s\n' % labels[2]))
1265 ui.write((' base: %s\n' % labels[2]))
1266 else:
1266 else:
1267 ui.write(('unrecognized entry: %s\t%s\n')
1267 ui.write(('unrecognized entry: %s\t%s\n')
1268 % (rtype, record.replace('\0', '\t')))
1268 % (rtype, record.replace('\0', '\t')))
1269
1269
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1270 # Avoid mergestate.read() since it may raise an exception for unsupported
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1271 # merge state records. We shouldn't be doing this, but this is OK since this
1272 # command is pretty low-level.
1272 # command is pretty low-level.
1273 ms = mergemod.mergestate(repo)
1273 ms = mergemod.mergestate(repo)
1274
1274
1275 # sort so that reasonable information is on top
1275 # sort so that reasonable information is on top
1276 v1records = ms._readrecordsv1()
1276 v1records = ms._readrecordsv1()
1277 v2records = ms._readrecordsv2()
1277 v2records = ms._readrecordsv2()
1278 order = 'LOml'
1278 order = 'LOml'
1279 def key(r):
1279 def key(r):
1280 idx = order.find(r[0])
1280 idx = order.find(r[0])
1281 if idx == -1:
1281 if idx == -1:
1282 return (1, r[1])
1282 return (1, r[1])
1283 else:
1283 else:
1284 return (0, idx)
1284 return (0, idx)
1285 v1records.sort(key=key)
1285 v1records.sort(key=key)
1286 v2records.sort(key=key)
1286 v2records.sort(key=key)
1287
1287
1288 if not v1records and not v2records:
1288 if not v1records and not v2records:
1289 ui.write(('no merge state found\n'))
1289 ui.write(('no merge state found\n'))
1290 elif not v2records:
1290 elif not v2records:
1291 ui.note(('no version 2 merge state\n'))
1291 ui.note(('no version 2 merge state\n'))
1292 printrecords(1)
1292 printrecords(1)
1293 elif ms._v1v2match(v1records, v2records):
1293 elif ms._v1v2match(v1records, v2records):
1294 ui.note(('v1 and v2 states match: using v2\n'))
1294 ui.note(('v1 and v2 states match: using v2\n'))
1295 printrecords(2)
1295 printrecords(2)
1296 else:
1296 else:
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1297 ui.note(('v1 and v2 states mismatch: using v1\n'))
1298 printrecords(1)
1298 printrecords(1)
1299 if ui.verbose:
1299 if ui.verbose:
1300 printrecords(2)
1300 printrecords(2)
1301
1301
1302 @command('debugnamecomplete', [], _('NAME...'))
1302 @command('debugnamecomplete', [], _('NAME...'))
1303 def debugnamecomplete(ui, repo, *args):
1303 def debugnamecomplete(ui, repo, *args):
1304 '''complete "names" - tags, open branch names, bookmark names'''
1304 '''complete "names" - tags, open branch names, bookmark names'''
1305
1305
1306 names = set()
1306 names = set()
1307 # since we previously only listed open branches, we will handle that
1307 # since we previously only listed open branches, we will handle that
1308 # specially (after this for loop)
1308 # specially (after this for loop)
1309 for name, ns in repo.names.iteritems():
1309 for name, ns in repo.names.iteritems():
1310 if name != 'branches':
1310 if name != 'branches':
1311 names.update(ns.listnames(repo))
1311 names.update(ns.listnames(repo))
1312 names.update(tag for (tag, heads, tip, closed)
1312 names.update(tag for (tag, heads, tip, closed)
1313 in repo.branchmap().iterbranches() if not closed)
1313 in repo.branchmap().iterbranches() if not closed)
1314 completions = set()
1314 completions = set()
1315 if not args:
1315 if not args:
1316 args = ['']
1316 args = ['']
1317 for a in args:
1317 for a in args:
1318 completions.update(n for n in names if n.startswith(a))
1318 completions.update(n for n in names if n.startswith(a))
1319 ui.write('\n'.join(sorted(completions)))
1319 ui.write('\n'.join(sorted(completions)))
1320 ui.write('\n')
1320 ui.write('\n')
1321
1321
1322 @command('debugobsolete',
1322 @command('debugobsolete',
1323 [('', 'flags', 0, _('markers flag')),
1323 [('', 'flags', 0, _('markers flag')),
1324 ('', 'record-parents', False,
1324 ('', 'record-parents', False,
1325 _('record parent information for the precursor')),
1325 _('record parent information for the precursor')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1326 ('r', 'rev', [], _('display markers relevant to REV')),
1327 ('', 'exclusive', False, _('restrict display to markers only '
1327 ('', 'exclusive', False, _('restrict display to markers only '
1328 'relevant to REV')),
1328 'relevant to REV')),
1329 ('', 'index', False, _('display index of the marker')),
1329 ('', 'index', False, _('display index of the marker')),
1330 ('', 'delete', [], _('delete markers specified by indices')),
1330 ('', 'delete', [], _('delete markers specified by indices')),
1331 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1331 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1332 _('[OBSOLETED [REPLACEMENT ...]]'))
1332 _('[OBSOLETED [REPLACEMENT ...]]'))
1333 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1333 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1334 """create arbitrary obsolete marker
1334 """create arbitrary obsolete marker
1335
1335
1336 With no arguments, displays the list of obsolescence markers."""
1336 With no arguments, displays the list of obsolescence markers."""
1337
1337
1338 def parsenodeid(s):
1338 def parsenodeid(s):
1339 try:
1339 try:
1340 # We do not use revsingle/revrange functions here to accept
1340 # We do not use revsingle/revrange functions here to accept
1341 # arbitrary node identifiers, possibly not present in the
1341 # arbitrary node identifiers, possibly not present in the
1342 # local repository.
1342 # local repository.
1343 n = bin(s)
1343 n = bin(s)
1344 if len(n) != len(nullid):
1344 if len(n) != len(nullid):
1345 raise TypeError()
1345 raise TypeError()
1346 return n
1346 return n
1347 except TypeError:
1347 except TypeError:
1348 raise error.Abort('changeset references must be full hexadecimal '
1348 raise error.Abort('changeset references must be full hexadecimal '
1349 'node identifiers')
1349 'node identifiers')
1350
1350
1351 if opts.get('delete'):
1351 if opts.get('delete'):
1352 indices = []
1352 indices = []
1353 for v in opts.get('delete'):
1353 for v in opts.get('delete'):
1354 try:
1354 try:
1355 indices.append(int(v))
1355 indices.append(int(v))
1356 except ValueError:
1356 except ValueError:
1357 raise error.Abort(_('invalid index value: %r') % v,
1357 raise error.Abort(_('invalid index value: %r') % v,
1358 hint=_('use integers for indices'))
1358 hint=_('use integers for indices'))
1359
1359
1360 if repo.currenttransaction():
1360 if repo.currenttransaction():
1361 raise error.Abort(_('cannot delete obsmarkers in the middle '
1361 raise error.Abort(_('cannot delete obsmarkers in the middle '
1362 'of transaction.'))
1362 'of transaction.'))
1363
1363
1364 with repo.lock():
1364 with repo.lock():
1365 n = repair.deleteobsmarkers(repo.obsstore, indices)
1365 n = repair.deleteobsmarkers(repo.obsstore, indices)
1366 ui.write(_('deleted %i obsolescence markers\n') % n)
1366 ui.write(_('deleted %i obsolescence markers\n') % n)
1367
1367
1368 return
1368 return
1369
1369
1370 if precursor is not None:
1370 if precursor is not None:
1371 if opts['rev']:
1371 if opts['rev']:
1372 raise error.Abort('cannot select revision when creating marker')
1372 raise error.Abort('cannot select revision when creating marker')
1373 metadata = {}
1373 metadata = {}
1374 metadata['user'] = opts['user'] or ui.username()
1374 metadata['user'] = opts['user'] or ui.username()
1375 succs = tuple(parsenodeid(succ) for succ in successors)
1375 succs = tuple(parsenodeid(succ) for succ in successors)
1376 l = repo.lock()
1376 l = repo.lock()
1377 try:
1377 try:
1378 tr = repo.transaction('debugobsolete')
1378 tr = repo.transaction('debugobsolete')
1379 try:
1379 try:
1380 date = opts.get('date')
1380 date = opts.get('date')
1381 if date:
1381 if date:
1382 date = util.parsedate(date)
1382 date = util.parsedate(date)
1383 else:
1383 else:
1384 date = None
1384 date = None
1385 prec = parsenodeid(precursor)
1385 prec = parsenodeid(precursor)
1386 parents = None
1386 parents = None
1387 if opts['record_parents']:
1387 if opts['record_parents']:
1388 if prec not in repo.unfiltered():
1388 if prec not in repo.unfiltered():
1389 raise error.Abort('cannot used --record-parents on '
1389 raise error.Abort('cannot used --record-parents on '
1390 'unknown changesets')
1390 'unknown changesets')
1391 parents = repo.unfiltered()[prec].parents()
1391 parents = repo.unfiltered()[prec].parents()
1392 parents = tuple(p.node() for p in parents)
1392 parents = tuple(p.node() for p in parents)
1393 repo.obsstore.create(tr, prec, succs, opts['flags'],
1393 repo.obsstore.create(tr, prec, succs, opts['flags'],
1394 parents=parents, date=date,
1394 parents=parents, date=date,
1395 metadata=metadata, ui=ui)
1395 metadata=metadata, ui=ui)
1396 tr.close()
1396 tr.close()
1397 except ValueError as exc:
1397 except ValueError as exc:
1398 raise error.Abort(_('bad obsmarker input: %s') % exc)
1398 raise error.Abort(_('bad obsmarker input: %s') % exc)
1399 finally:
1399 finally:
1400 tr.release()
1400 tr.release()
1401 finally:
1401 finally:
1402 l.release()
1402 l.release()
1403 else:
1403 else:
1404 if opts['rev']:
1404 if opts['rev']:
1405 revs = scmutil.revrange(repo, opts['rev'])
1405 revs = scmutil.revrange(repo, opts['rev'])
1406 nodes = [repo[r].node() for r in revs]
1406 nodes = [repo[r].node() for r in revs]
1407 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1407 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1408 exclusive=opts['exclusive']))
1408 exclusive=opts['exclusive']))
1409 markers.sort(key=lambda x: x._data)
1409 markers.sort(key=lambda x: x._data)
1410 else:
1410 else:
1411 markers = obsolete.getmarkers(repo)
1411 markers = obsolete.getmarkers(repo)
1412
1412
1413 markerstoiter = markers
1413 markerstoiter = markers
1414 isrelevant = lambda m: True
1414 isrelevant = lambda m: True
1415 if opts.get('rev') and opts.get('index'):
1415 if opts.get('rev') and opts.get('index'):
1416 markerstoiter = obsolete.getmarkers(repo)
1416 markerstoiter = obsolete.getmarkers(repo)
1417 markerset = set(markers)
1417 markerset = set(markers)
1418 isrelevant = lambda m: m in markerset
1418 isrelevant = lambda m: m in markerset
1419
1419
1420 fm = ui.formatter('debugobsolete', opts)
1420 fm = ui.formatter('debugobsolete', opts)
1421 for i, m in enumerate(markerstoiter):
1421 for i, m in enumerate(markerstoiter):
1422 if not isrelevant(m):
1422 if not isrelevant(m):
1423 # marker can be irrelevant when we're iterating over a set
1423 # marker can be irrelevant when we're iterating over a set
1424 # of markers (markerstoiter) which is bigger than the set
1424 # of markers (markerstoiter) which is bigger than the set
1425 # of markers we want to display (markers)
1425 # of markers we want to display (markers)
1426 # this can happen if both --index and --rev options are
1426 # this can happen if both --index and --rev options are
1427 # provided and thus we need to iterate over all of the markers
1427 # provided and thus we need to iterate over all of the markers
1428 # to get the correct indices, but only display the ones that
1428 # to get the correct indices, but only display the ones that
1429 # are relevant to --rev value
1429 # are relevant to --rev value
1430 continue
1430 continue
1431 fm.startitem()
1431 fm.startitem()
1432 ind = i if opts.get('index') else None
1432 ind = i if opts.get('index') else None
1433 cmdutil.showmarker(fm, m, index=ind)
1433 cmdutil.showmarker(fm, m, index=ind)
1434 fm.end()
1434 fm.end()
1435
1435
1436 @command('debugpathcomplete',
1436 @command('debugpathcomplete',
1437 [('f', 'full', None, _('complete an entire path')),
1437 [('f', 'full', None, _('complete an entire path')),
1438 ('n', 'normal', None, _('show only normal files')),
1438 ('n', 'normal', None, _('show only normal files')),
1439 ('a', 'added', None, _('show only added files')),
1439 ('a', 'added', None, _('show only added files')),
1440 ('r', 'removed', None, _('show only removed files'))],
1440 ('r', 'removed', None, _('show only removed files'))],
1441 _('FILESPEC...'))
1441 _('FILESPEC...'))
1442 def debugpathcomplete(ui, repo, *specs, **opts):
1442 def debugpathcomplete(ui, repo, *specs, **opts):
1443 '''complete part or all of a tracked path
1443 '''complete part or all of a tracked path
1444
1444
1445 This command supports shells that offer path name completion. It
1445 This command supports shells that offer path name completion. It
1446 currently completes only files already known to the dirstate.
1446 currently completes only files already known to the dirstate.
1447
1447
1448 Completion extends only to the next path segment unless
1448 Completion extends only to the next path segment unless
1449 --full is specified, in which case entire paths are used.'''
1449 --full is specified, in which case entire paths are used.'''
1450
1450
1451 def complete(path, acceptable):
1451 def complete(path, acceptable):
1452 dirstate = repo.dirstate
1452 dirstate = repo.dirstate
1453 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1453 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1454 rootdir = repo.root + pycompat.ossep
1454 rootdir = repo.root + pycompat.ossep
1455 if spec != repo.root and not spec.startswith(rootdir):
1455 if spec != repo.root and not spec.startswith(rootdir):
1456 return [], []
1456 return [], []
1457 if os.path.isdir(spec):
1457 if os.path.isdir(spec):
1458 spec += '/'
1458 spec += '/'
1459 spec = spec[len(rootdir):]
1459 spec = spec[len(rootdir):]
1460 fixpaths = pycompat.ossep != '/'
1460 fixpaths = pycompat.ossep != '/'
1461 if fixpaths:
1461 if fixpaths:
1462 spec = spec.replace(pycompat.ossep, '/')
1462 spec = spec.replace(pycompat.ossep, '/')
1463 speclen = len(spec)
1463 speclen = len(spec)
1464 fullpaths = opts['full']
1464 fullpaths = opts['full']
1465 files, dirs = set(), set()
1465 files, dirs = set(), set()
1466 adddir, addfile = dirs.add, files.add
1466 adddir, addfile = dirs.add, files.add
1467 for f, st in dirstate.iteritems():
1467 for f, st in dirstate.iteritems():
1468 if f.startswith(spec) and st[0] in acceptable:
1468 if f.startswith(spec) and st[0] in acceptable:
1469 if fixpaths:
1469 if fixpaths:
1470 f = f.replace('/', pycompat.ossep)
1470 f = f.replace('/', pycompat.ossep)
1471 if fullpaths:
1471 if fullpaths:
1472 addfile(f)
1472 addfile(f)
1473 continue
1473 continue
1474 s = f.find(pycompat.ossep, speclen)
1474 s = f.find(pycompat.ossep, speclen)
1475 if s >= 0:
1475 if s >= 0:
1476 adddir(f[:s])
1476 adddir(f[:s])
1477 else:
1477 else:
1478 addfile(f)
1478 addfile(f)
1479 return files, dirs
1479 return files, dirs
1480
1480
1481 acceptable = ''
1481 acceptable = ''
1482 if opts['normal']:
1482 if opts['normal']:
1483 acceptable += 'nm'
1483 acceptable += 'nm'
1484 if opts['added']:
1484 if opts['added']:
1485 acceptable += 'a'
1485 acceptable += 'a'
1486 if opts['removed']:
1486 if opts['removed']:
1487 acceptable += 'r'
1487 acceptable += 'r'
1488 cwd = repo.getcwd()
1488 cwd = repo.getcwd()
1489 if not specs:
1489 if not specs:
1490 specs = ['.']
1490 specs = ['.']
1491
1491
1492 files, dirs = set(), set()
1492 files, dirs = set(), set()
1493 for spec in specs:
1493 for spec in specs:
1494 f, d = complete(spec, acceptable or 'nmar')
1494 f, d = complete(spec, acceptable or 'nmar')
1495 files.update(f)
1495 files.update(f)
1496 dirs.update(d)
1496 dirs.update(d)
1497 files.update(dirs)
1497 files.update(dirs)
1498 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1498 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1499 ui.write('\n')
1499 ui.write('\n')
1500
1500
1501 @command('debugpickmergetool',
1501 @command('debugpickmergetool',
1502 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1502 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1503 ('', 'changedelete', None, _('emulate merging change and delete')),
1503 ('', 'changedelete', None, _('emulate merging change and delete')),
1504 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1504 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1505 _('[PATTERN]...'),
1505 _('[PATTERN]...'),
1506 inferrepo=True)
1506 inferrepo=True)
1507 def debugpickmergetool(ui, repo, *pats, **opts):
1507 def debugpickmergetool(ui, repo, *pats, **opts):
1508 """examine which merge tool is chosen for specified file
1508 """examine which merge tool is chosen for specified file
1509
1509
1510 As described in :hg:`help merge-tools`, Mercurial examines
1510 As described in :hg:`help merge-tools`, Mercurial examines
1511 configurations below in this order to decide which merge tool is
1511 configurations below in this order to decide which merge tool is
1512 chosen for specified file.
1512 chosen for specified file.
1513
1513
1514 1. ``--tool`` option
1514 1. ``--tool`` option
1515 2. ``HGMERGE`` environment variable
1515 2. ``HGMERGE`` environment variable
1516 3. configurations in ``merge-patterns`` section
1516 3. configurations in ``merge-patterns`` section
1517 4. configuration of ``ui.merge``
1517 4. configuration of ``ui.merge``
1518 5. configurations in ``merge-tools`` section
1518 5. configurations in ``merge-tools`` section
1519 6. ``hgmerge`` tool (for historical reason only)
1519 6. ``hgmerge`` tool (for historical reason only)
1520 7. default tool for fallback (``:merge`` or ``:prompt``)
1520 7. default tool for fallback (``:merge`` or ``:prompt``)
1521
1521
1522 This command writes out examination result in the style below::
1522 This command writes out examination result in the style below::
1523
1523
1524 FILE = MERGETOOL
1524 FILE = MERGETOOL
1525
1525
1526 By default, all files known in the first parent context of the
1526 By default, all files known in the first parent context of the
1527 working directory are examined. Use file patterns and/or -I/-X
1527 working directory are examined. Use file patterns and/or -I/-X
1528 options to limit target files. -r/--rev is also useful to examine
1528 options to limit target files. -r/--rev is also useful to examine
1529 files in another context without actual updating to it.
1529 files in another context without actual updating to it.
1530
1530
1531 With --debug, this command shows warning messages while matching
1531 With --debug, this command shows warning messages while matching
1532 against ``merge-patterns`` and so on, too. It is recommended to
1532 against ``merge-patterns`` and so on, too. It is recommended to
1533 use this option with explicit file patterns and/or -I/-X options,
1533 use this option with explicit file patterns and/or -I/-X options,
1534 because this option increases amount of output per file according
1534 because this option increases amount of output per file according
1535 to configurations in hgrc.
1535 to configurations in hgrc.
1536
1536
1537 With -v/--verbose, this command shows configurations below at
1537 With -v/--verbose, this command shows configurations below at
1538 first (only if specified).
1538 first (only if specified).
1539
1539
1540 - ``--tool`` option
1540 - ``--tool`` option
1541 - ``HGMERGE`` environment variable
1541 - ``HGMERGE`` environment variable
1542 - configuration of ``ui.merge``
1542 - configuration of ``ui.merge``
1543
1543
1544 If merge tool is chosen before matching against
1544 If merge tool is chosen before matching against
1545 ``merge-patterns``, this command can't show any helpful
1545 ``merge-patterns``, this command can't show any helpful
1546 information, even with --debug. In such case, information above is
1546 information, even with --debug. In such case, information above is
1547 useful to know why a merge tool is chosen.
1547 useful to know why a merge tool is chosen.
1548 """
1548 """
1549 overrides = {}
1549 overrides = {}
1550 if opts['tool']:
1550 if opts['tool']:
1551 overrides[('ui', 'forcemerge')] = opts['tool']
1551 overrides[('ui', 'forcemerge')] = opts['tool']
1552 ui.note(('with --tool %r\n') % (opts['tool']))
1552 ui.note(('with --tool %r\n') % (opts['tool']))
1553
1553
1554 with ui.configoverride(overrides, 'debugmergepatterns'):
1554 with ui.configoverride(overrides, 'debugmergepatterns'):
1555 hgmerge = encoding.environ.get("HGMERGE")
1555 hgmerge = encoding.environ.get("HGMERGE")
1556 if hgmerge is not None:
1556 if hgmerge is not None:
1557 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1557 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1558 uimerge = ui.config("ui", "merge")
1558 uimerge = ui.config("ui", "merge")
1559 if uimerge:
1559 if uimerge:
1560 ui.note(('with ui.merge=%r\n') % (uimerge))
1560 ui.note(('with ui.merge=%r\n') % (uimerge))
1561
1561
1562 ctx = scmutil.revsingle(repo, opts.get('rev'))
1562 ctx = scmutil.revsingle(repo, opts.get('rev'))
1563 m = scmutil.match(ctx, pats, opts)
1563 m = scmutil.match(ctx, pats, opts)
1564 changedelete = opts['changedelete']
1564 changedelete = opts['changedelete']
1565 for path in ctx.walk(m):
1565 for path in ctx.walk(m):
1566 fctx = ctx[path]
1566 fctx = ctx[path]
1567 try:
1567 try:
1568 if not ui.debugflag:
1568 if not ui.debugflag:
1569 ui.pushbuffer(error=True)
1569 ui.pushbuffer(error=True)
1570 tool, toolpath = filemerge._picktool(repo, ui, path,
1570 tool, toolpath = filemerge._picktool(repo, ui, path,
1571 fctx.isbinary(),
1571 fctx.isbinary(),
1572 'l' in fctx.flags(),
1572 'l' in fctx.flags(),
1573 changedelete)
1573 changedelete)
1574 finally:
1574 finally:
1575 if not ui.debugflag:
1575 if not ui.debugflag:
1576 ui.popbuffer()
1576 ui.popbuffer()
1577 ui.write(('%s = %s\n') % (path, tool))
1577 ui.write(('%s = %s\n') % (path, tool))
1578
1578
1579 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1579 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1580 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1580 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1581 '''access the pushkey key/value protocol
1581 '''access the pushkey key/value protocol
1582
1582
1583 With two args, list the keys in the given namespace.
1583 With two args, list the keys in the given namespace.
1584
1584
1585 With five args, set a key to new if it currently is set to old.
1585 With five args, set a key to new if it currently is set to old.
1586 Reports success or failure.
1586 Reports success or failure.
1587 '''
1587 '''
1588
1588
1589 target = hg.peer(ui, {}, repopath)
1589 target = hg.peer(ui, {}, repopath)
1590 if keyinfo:
1590 if keyinfo:
1591 key, old, new = keyinfo
1591 key, old, new = keyinfo
1592 r = target.pushkey(namespace, key, old, new)
1592 r = target.pushkey(namespace, key, old, new)
1593 ui.status(str(r) + '\n')
1593 ui.status(str(r) + '\n')
1594 return not r
1594 return not r
1595 else:
1595 else:
1596 for k, v in sorted(target.listkeys(namespace).iteritems()):
1596 for k, v in sorted(target.listkeys(namespace).iteritems()):
1597 ui.write("%s\t%s\n" % (util.escapestr(k),
1597 ui.write("%s\t%s\n" % (util.escapestr(k),
1598 util.escapestr(v)))
1598 util.escapestr(v)))
1599
1599
1600 @command('debugpvec', [], _('A B'))
1600 @command('debugpvec', [], _('A B'))
1601 def debugpvec(ui, repo, a, b=None):
1601 def debugpvec(ui, repo, a, b=None):
1602 ca = scmutil.revsingle(repo, a)
1602 ca = scmutil.revsingle(repo, a)
1603 cb = scmutil.revsingle(repo, b)
1603 cb = scmutil.revsingle(repo, b)
1604 pa = pvec.ctxpvec(ca)
1604 pa = pvec.ctxpvec(ca)
1605 pb = pvec.ctxpvec(cb)
1605 pb = pvec.ctxpvec(cb)
1606 if pa == pb:
1606 if pa == pb:
1607 rel = "="
1607 rel = "="
1608 elif pa > pb:
1608 elif pa > pb:
1609 rel = ">"
1609 rel = ">"
1610 elif pa < pb:
1610 elif pa < pb:
1611 rel = "<"
1611 rel = "<"
1612 elif pa | pb:
1612 elif pa | pb:
1613 rel = "|"
1613 rel = "|"
1614 ui.write(_("a: %s\n") % pa)
1614 ui.write(_("a: %s\n") % pa)
1615 ui.write(_("b: %s\n") % pb)
1615 ui.write(_("b: %s\n") % pb)
1616 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1616 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1617 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1617 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1618 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1618 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1619 pa.distance(pb), rel))
1619 pa.distance(pb), rel))
1620
1620
1621 @command('debugrebuilddirstate|debugrebuildstate',
1621 @command('debugrebuilddirstate|debugrebuildstate',
1622 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1622 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1623 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1623 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1624 'the working copy parent')),
1624 'the working copy parent')),
1625 ],
1625 ],
1626 _('[-r REV]'))
1626 _('[-r REV]'))
1627 def debugrebuilddirstate(ui, repo, rev, **opts):
1627 def debugrebuilddirstate(ui, repo, rev, **opts):
1628 """rebuild the dirstate as it would look like for the given revision
1628 """rebuild the dirstate as it would look like for the given revision
1629
1629
1630 If no revision is specified the first current parent will be used.
1630 If no revision is specified the first current parent will be used.
1631
1631
1632 The dirstate will be set to the files of the given revision.
1632 The dirstate will be set to the files of the given revision.
1633 The actual working directory content or existing dirstate
1633 The actual working directory content or existing dirstate
1634 information such as adds or removes is not considered.
1634 information such as adds or removes is not considered.
1635
1635
1636 ``minimal`` will only rebuild the dirstate status for files that claim to be
1636 ``minimal`` will only rebuild the dirstate status for files that claim to be
1637 tracked but are not in the parent manifest, or that exist in the parent
1637 tracked but are not in the parent manifest, or that exist in the parent
1638 manifest but are not in the dirstate. It will not change adds, removes, or
1638 manifest but are not in the dirstate. It will not change adds, removes, or
1639 modified files that are in the working copy parent.
1639 modified files that are in the working copy parent.
1640
1640
1641 One use of this command is to make the next :hg:`status` invocation
1641 One use of this command is to make the next :hg:`status` invocation
1642 check the actual file content.
1642 check the actual file content.
1643 """
1643 """
1644 ctx = scmutil.revsingle(repo, rev)
1644 ctx = scmutil.revsingle(repo, rev)
1645 with repo.wlock():
1645 with repo.wlock():
1646 dirstate = repo.dirstate
1646 dirstate = repo.dirstate
1647 changedfiles = None
1647 changedfiles = None
1648 # See command doc for what minimal does.
1648 # See command doc for what minimal does.
1649 if opts.get('minimal'):
1649 if opts.get('minimal'):
1650 manifestfiles = set(ctx.manifest().keys())
1650 manifestfiles = set(ctx.manifest().keys())
1651 dirstatefiles = set(dirstate)
1651 dirstatefiles = set(dirstate)
1652 manifestonly = manifestfiles - dirstatefiles
1652 manifestonly = manifestfiles - dirstatefiles
1653 dsonly = dirstatefiles - manifestfiles
1653 dsonly = dirstatefiles - manifestfiles
1654 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1654 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1655 changedfiles = manifestonly | dsnotadded
1655 changedfiles = manifestonly | dsnotadded
1656
1656
1657 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1657 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1658
1658
1659 @command('debugrebuildfncache', [], '')
1659 @command('debugrebuildfncache', [], '')
1660 def debugrebuildfncache(ui, repo):
1660 def debugrebuildfncache(ui, repo):
1661 """rebuild the fncache file"""
1661 """rebuild the fncache file"""
1662 repair.rebuildfncache(ui, repo)
1662 repair.rebuildfncache(ui, repo)
1663
1663
1664 @command('debugrename',
1664 @command('debugrename',
1665 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1665 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1666 _('[-r REV] FILE'))
1666 _('[-r REV] FILE'))
1667 def debugrename(ui, repo, file1, *pats, **opts):
1667 def debugrename(ui, repo, file1, *pats, **opts):
1668 """dump rename information"""
1668 """dump rename information"""
1669
1669
1670 ctx = scmutil.revsingle(repo, opts.get('rev'))
1670 ctx = scmutil.revsingle(repo, opts.get('rev'))
1671 m = scmutil.match(ctx, (file1,) + pats, opts)
1671 m = scmutil.match(ctx, (file1,) + pats, opts)
1672 for abs in ctx.walk(m):
1672 for abs in ctx.walk(m):
1673 fctx = ctx[abs]
1673 fctx = ctx[abs]
1674 o = fctx.filelog().renamed(fctx.filenode())
1674 o = fctx.filelog().renamed(fctx.filenode())
1675 rel = m.rel(abs)
1675 rel = m.rel(abs)
1676 if o:
1676 if o:
1677 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1677 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1678 else:
1678 else:
1679 ui.write(_("%s not renamed\n") % rel)
1679 ui.write(_("%s not renamed\n") % rel)
1680
1680
1681 @command('debugrevlog', cmdutil.debugrevlogopts +
1681 @command('debugrevlog', cmdutil.debugrevlogopts +
1682 [('d', 'dump', False, _('dump index data'))],
1682 [('d', 'dump', False, _('dump index data'))],
1683 _('-c|-m|FILE'),
1683 _('-c|-m|FILE'),
1684 optionalrepo=True)
1684 optionalrepo=True)
1685 def debugrevlog(ui, repo, file_=None, **opts):
1685 def debugrevlog(ui, repo, file_=None, **opts):
1686 """show data and statistics about a revlog"""
1686 """show data and statistics about a revlog"""
1687 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1687 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1688
1688
1689 if opts.get("dump"):
1689 if opts.get("dump"):
1690 numrevs = len(r)
1690 numrevs = len(r)
1691 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1691 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1692 " rawsize totalsize compression heads chainlen\n"))
1692 " rawsize totalsize compression heads chainlen\n"))
1693 ts = 0
1693 ts = 0
1694 heads = set()
1694 heads = set()
1695
1695
1696 for rev in xrange(numrevs):
1696 for rev in xrange(numrevs):
1697 dbase = r.deltaparent(rev)
1697 dbase = r.deltaparent(rev)
1698 if dbase == -1:
1698 if dbase == -1:
1699 dbase = rev
1699 dbase = rev
1700 cbase = r.chainbase(rev)
1700 cbase = r.chainbase(rev)
1701 clen = r.chainlen(rev)
1701 clen = r.chainlen(rev)
1702 p1, p2 = r.parentrevs(rev)
1702 p1, p2 = r.parentrevs(rev)
1703 rs = r.rawsize(rev)
1703 rs = r.rawsize(rev)
1704 ts = ts + rs
1704 ts = ts + rs
1705 heads -= set(r.parentrevs(rev))
1705 heads -= set(r.parentrevs(rev))
1706 heads.add(rev)
1706 heads.add(rev)
1707 try:
1707 try:
1708 compression = ts / r.end(rev)
1708 compression = ts / r.end(rev)
1709 except ZeroDivisionError:
1709 except ZeroDivisionError:
1710 compression = 0
1710 compression = 0
1711 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1711 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1712 "%11d %5d %8d\n" %
1712 "%11d %5d %8d\n" %
1713 (rev, p1, p2, r.start(rev), r.end(rev),
1713 (rev, p1, p2, r.start(rev), r.end(rev),
1714 r.start(dbase), r.start(cbase),
1714 r.start(dbase), r.start(cbase),
1715 r.start(p1), r.start(p2),
1715 r.start(p1), r.start(p2),
1716 rs, ts, compression, len(heads), clen))
1716 rs, ts, compression, len(heads), clen))
1717 return 0
1717 return 0
1718
1718
1719 v = r.version
1719 v = r.version
1720 format = v & 0xFFFF
1720 format = v & 0xFFFF
1721 flags = []
1721 flags = []
1722 gdelta = False
1722 gdelta = False
1723 if v & revlog.FLAG_INLINE_DATA:
1723 if v & revlog.FLAG_INLINE_DATA:
1724 flags.append('inline')
1724 flags.append('inline')
1725 if v & revlog.FLAG_GENERALDELTA:
1725 if v & revlog.FLAG_GENERALDELTA:
1726 gdelta = True
1726 gdelta = True
1727 flags.append('generaldelta')
1727 flags.append('generaldelta')
1728 if not flags:
1728 if not flags:
1729 flags = ['(none)']
1729 flags = ['(none)']
1730
1730
1731 nummerges = 0
1731 nummerges = 0
1732 numfull = 0
1732 numfull = 0
1733 numprev = 0
1733 numprev = 0
1734 nump1 = 0
1734 nump1 = 0
1735 nump2 = 0
1735 nump2 = 0
1736 numother = 0
1736 numother = 0
1737 nump1prev = 0
1737 nump1prev = 0
1738 nump2prev = 0
1738 nump2prev = 0
1739 chainlengths = []
1739 chainlengths = []
1740
1740
1741 datasize = [None, 0, 0]
1741 datasize = [None, 0, 0]
1742 fullsize = [None, 0, 0]
1742 fullsize = [None, 0, 0]
1743 deltasize = [None, 0, 0]
1743 deltasize = [None, 0, 0]
1744 chunktypecounts = {}
1744 chunktypecounts = {}
1745 chunktypesizes = {}
1745 chunktypesizes = {}
1746
1746
1747 def addsize(size, l):
1747 def addsize(size, l):
1748 if l[0] is None or size < l[0]:
1748 if l[0] is None or size < l[0]:
1749 l[0] = size
1749 l[0] = size
1750 if size > l[1]:
1750 if size > l[1]:
1751 l[1] = size
1751 l[1] = size
1752 l[2] += size
1752 l[2] += size
1753
1753
1754 numrevs = len(r)
1754 numrevs = len(r)
1755 for rev in xrange(numrevs):
1755 for rev in xrange(numrevs):
1756 p1, p2 = r.parentrevs(rev)
1756 p1, p2 = r.parentrevs(rev)
1757 delta = r.deltaparent(rev)
1757 delta = r.deltaparent(rev)
1758 if format > 0:
1758 if format > 0:
1759 addsize(r.rawsize(rev), datasize)
1759 addsize(r.rawsize(rev), datasize)
1760 if p2 != nullrev:
1760 if p2 != nullrev:
1761 nummerges += 1
1761 nummerges += 1
1762 size = r.length(rev)
1762 size = r.length(rev)
1763 if delta == nullrev:
1763 if delta == nullrev:
1764 chainlengths.append(0)
1764 chainlengths.append(0)
1765 numfull += 1
1765 numfull += 1
1766 addsize(size, fullsize)
1766 addsize(size, fullsize)
1767 else:
1767 else:
1768 chainlengths.append(chainlengths[delta] + 1)
1768 chainlengths.append(chainlengths[delta] + 1)
1769 addsize(size, deltasize)
1769 addsize(size, deltasize)
1770 if delta == rev - 1:
1770 if delta == rev - 1:
1771 numprev += 1
1771 numprev += 1
1772 if delta == p1:
1772 if delta == p1:
1773 nump1prev += 1
1773 nump1prev += 1
1774 elif delta == p2:
1774 elif delta == p2:
1775 nump2prev += 1
1775 nump2prev += 1
1776 elif delta == p1:
1776 elif delta == p1:
1777 nump1 += 1
1777 nump1 += 1
1778 elif delta == p2:
1778 elif delta == p2:
1779 nump2 += 1
1779 nump2 += 1
1780 elif delta != nullrev:
1780 elif delta != nullrev:
1781 numother += 1
1781 numother += 1
1782
1782
1783 # Obtain data on the raw chunks in the revlog.
1783 # Obtain data on the raw chunks in the revlog.
1784 segment = r._getsegmentforrevs(rev, rev)[1]
1784 segment = r._getsegmentforrevs(rev, rev)[1]
1785 if segment:
1785 if segment:
1786 chunktype = segment[0]
1786 chunktype = segment[0]
1787 else:
1787 else:
1788 chunktype = 'empty'
1788 chunktype = 'empty'
1789
1789
1790 if chunktype not in chunktypecounts:
1790 if chunktype not in chunktypecounts:
1791 chunktypecounts[chunktype] = 0
1791 chunktypecounts[chunktype] = 0
1792 chunktypesizes[chunktype] = 0
1792 chunktypesizes[chunktype] = 0
1793
1793
1794 chunktypecounts[chunktype] += 1
1794 chunktypecounts[chunktype] += 1
1795 chunktypesizes[chunktype] += size
1795 chunktypesizes[chunktype] += size
1796
1796
1797 # Adjust size min value for empty cases
1797 # Adjust size min value for empty cases
1798 for size in (datasize, fullsize, deltasize):
1798 for size in (datasize, fullsize, deltasize):
1799 if size[0] is None:
1799 if size[0] is None:
1800 size[0] = 0
1800 size[0] = 0
1801
1801
1802 numdeltas = numrevs - numfull
1802 numdeltas = numrevs - numfull
1803 numoprev = numprev - nump1prev - nump2prev
1803 numoprev = numprev - nump1prev - nump2prev
1804 totalrawsize = datasize[2]
1804 totalrawsize = datasize[2]
1805 datasize[2] /= numrevs
1805 datasize[2] /= numrevs
1806 fulltotal = fullsize[2]
1806 fulltotal = fullsize[2]
1807 fullsize[2] /= numfull
1807 fullsize[2] /= numfull
1808 deltatotal = deltasize[2]
1808 deltatotal = deltasize[2]
1809 if numrevs - numfull > 0:
1809 if numrevs - numfull > 0:
1810 deltasize[2] /= numrevs - numfull
1810 deltasize[2] /= numrevs - numfull
1811 totalsize = fulltotal + deltatotal
1811 totalsize = fulltotal + deltatotal
1812 avgchainlen = sum(chainlengths) / numrevs
1812 avgchainlen = sum(chainlengths) / numrevs
1813 maxchainlen = max(chainlengths)
1813 maxchainlen = max(chainlengths)
1814 compratio = 1
1814 compratio = 1
1815 if totalsize:
1815 if totalsize:
1816 compratio = totalrawsize / totalsize
1816 compratio = totalrawsize / totalsize
1817
1817
1818 basedfmtstr = '%%%dd\n'
1818 basedfmtstr = '%%%dd\n'
1819 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1819 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1820
1820
1821 def dfmtstr(max):
1821 def dfmtstr(max):
1822 return basedfmtstr % len(str(max))
1822 return basedfmtstr % len(str(max))
1823 def pcfmtstr(max, padding=0):
1823 def pcfmtstr(max, padding=0):
1824 return basepcfmtstr % (len(str(max)), ' ' * padding)
1824 return basepcfmtstr % (len(str(max)), ' ' * padding)
1825
1825
1826 def pcfmt(value, total):
1826 def pcfmt(value, total):
1827 if total:
1827 if total:
1828 return (value, 100 * float(value) / total)
1828 return (value, 100 * float(value) / total)
1829 else:
1829 else:
1830 return value, 100.0
1830 return value, 100.0
1831
1831
1832 ui.write(('format : %d\n') % format)
1832 ui.write(('format : %d\n') % format)
1833 ui.write(('flags : %s\n') % ', '.join(flags))
1833 ui.write(('flags : %s\n') % ', '.join(flags))
1834
1834
1835 ui.write('\n')
1835 ui.write('\n')
1836 fmt = pcfmtstr(totalsize)
1836 fmt = pcfmtstr(totalsize)
1837 fmt2 = dfmtstr(totalsize)
1837 fmt2 = dfmtstr(totalsize)
1838 ui.write(('revisions : ') + fmt2 % numrevs)
1838 ui.write(('revisions : ') + fmt2 % numrevs)
1839 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1839 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1840 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1840 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1841 ui.write(('revisions : ') + fmt2 % numrevs)
1841 ui.write(('revisions : ') + fmt2 % numrevs)
1842 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1842 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1843 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1843 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1844 ui.write(('revision size : ') + fmt2 % totalsize)
1844 ui.write(('revision size : ') + fmt2 % totalsize)
1845 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1845 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1846 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1846 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1847
1847
1848 def fmtchunktype(chunktype):
1848 def fmtchunktype(chunktype):
1849 if chunktype == 'empty':
1849 if chunktype == 'empty':
1850 return ' %s : ' % chunktype
1850 return ' %s : ' % chunktype
1851 elif chunktype in string.ascii_letters:
1851 elif chunktype in string.ascii_letters:
1852 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1852 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1853 else:
1853 else:
1854 return ' 0x%s : ' % hex(chunktype)
1854 return ' 0x%s : ' % hex(chunktype)
1855
1855
1856 ui.write('\n')
1856 ui.write('\n')
1857 ui.write(('chunks : ') + fmt2 % numrevs)
1857 ui.write(('chunks : ') + fmt2 % numrevs)
1858 for chunktype in sorted(chunktypecounts):
1858 for chunktype in sorted(chunktypecounts):
1859 ui.write(fmtchunktype(chunktype))
1859 ui.write(fmtchunktype(chunktype))
1860 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1860 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1861 ui.write(('chunks size : ') + fmt2 % totalsize)
1861 ui.write(('chunks size : ') + fmt2 % totalsize)
1862 for chunktype in sorted(chunktypecounts):
1862 for chunktype in sorted(chunktypecounts):
1863 ui.write(fmtchunktype(chunktype))
1863 ui.write(fmtchunktype(chunktype))
1864 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1864 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1865
1865
1866 ui.write('\n')
1866 ui.write('\n')
1867 fmt = dfmtstr(max(avgchainlen, compratio))
1867 fmt = dfmtstr(max(avgchainlen, compratio))
1868 ui.write(('avg chain length : ') + fmt % avgchainlen)
1868 ui.write(('avg chain length : ') + fmt % avgchainlen)
1869 ui.write(('max chain length : ') + fmt % maxchainlen)
1869 ui.write(('max chain length : ') + fmt % maxchainlen)
1870 ui.write(('compression ratio : ') + fmt % compratio)
1870 ui.write(('compression ratio : ') + fmt % compratio)
1871
1871
1872 if format > 0:
1872 if format > 0:
1873 ui.write('\n')
1873 ui.write('\n')
1874 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1874 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1875 % tuple(datasize))
1875 % tuple(datasize))
1876 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1876 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1877 % tuple(fullsize))
1877 % tuple(fullsize))
1878 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1878 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1879 % tuple(deltasize))
1879 % tuple(deltasize))
1880
1880
1881 if numdeltas > 0:
1881 if numdeltas > 0:
1882 ui.write('\n')
1882 ui.write('\n')
1883 fmt = pcfmtstr(numdeltas)
1883 fmt = pcfmtstr(numdeltas)
1884 fmt2 = pcfmtstr(numdeltas, 4)
1884 fmt2 = pcfmtstr(numdeltas, 4)
1885 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1885 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1886 if numprev > 0:
1886 if numprev > 0:
1887 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1887 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1888 numprev))
1888 numprev))
1889 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1889 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1890 numprev))
1890 numprev))
1891 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1891 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1892 numprev))
1892 numprev))
1893 if gdelta:
1893 if gdelta:
1894 ui.write(('deltas against p1 : ')
1894 ui.write(('deltas against p1 : ')
1895 + fmt % pcfmt(nump1, numdeltas))
1895 + fmt % pcfmt(nump1, numdeltas))
1896 ui.write(('deltas against p2 : ')
1896 ui.write(('deltas against p2 : ')
1897 + fmt % pcfmt(nump2, numdeltas))
1897 + fmt % pcfmt(nump2, numdeltas))
1898 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1898 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1899 numdeltas))
1899 numdeltas))
1900
1900
1901 @command('debugrevspec',
1901 @command('debugrevspec',
1902 [('', 'optimize', None,
1902 [('', 'optimize', None,
1903 _('print parsed tree after optimizing (DEPRECATED)')),
1903 _('print parsed tree after optimizing (DEPRECATED)')),
1904 ('', 'show-revs', True, _('print list of result revisions (default)')),
1904 ('', 'show-revs', True, _('print list of result revisions (default)')),
1905 ('s', 'show-set', None, _('print internal representation of result set')),
1905 ('s', 'show-set', None, _('print internal representation of result set')),
1906 ('p', 'show-stage', [],
1906 ('p', 'show-stage', [],
1907 _('print parsed tree at the given stage'), _('NAME')),
1907 _('print parsed tree at the given stage'), _('NAME')),
1908 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1908 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1909 ('', 'verify-optimized', False, _('verify optimized result')),
1909 ('', 'verify-optimized', False, _('verify optimized result')),
1910 ],
1910 ],
1911 ('REVSPEC'))
1911 ('REVSPEC'))
1912 def debugrevspec(ui, repo, expr, **opts):
1912 def debugrevspec(ui, repo, expr, **opts):
1913 """parse and apply a revision specification
1913 """parse and apply a revision specification
1914
1914
1915 Use -p/--show-stage option to print the parsed tree at the given stages.
1915 Use -p/--show-stage option to print the parsed tree at the given stages.
1916 Use -p all to print tree at every stage.
1916 Use -p all to print tree at every stage.
1917
1917
1918 Use --no-show-revs option with -s or -p to print only the set
1918 Use --no-show-revs option with -s or -p to print only the set
1919 representation or the parsed tree respectively.
1919 representation or the parsed tree respectively.
1920
1920
1921 Use --verify-optimized to compare the optimized result with the unoptimized
1921 Use --verify-optimized to compare the optimized result with the unoptimized
1922 one. Returns 1 if the optimized result differs.
1922 one. Returns 1 if the optimized result differs.
1923 """
1923 """
1924 stages = [
1924 stages = [
1925 ('parsed', lambda tree: tree),
1925 ('parsed', lambda tree: tree),
1926 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1926 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1927 ('concatenated', revsetlang.foldconcat),
1927 ('concatenated', revsetlang.foldconcat),
1928 ('analyzed', revsetlang.analyze),
1928 ('analyzed', revsetlang.analyze),
1929 ('optimized', revsetlang.optimize),
1929 ('optimized', revsetlang.optimize),
1930 ]
1930 ]
1931 if opts['no_optimized']:
1931 if opts['no_optimized']:
1932 stages = stages[:-1]
1932 stages = stages[:-1]
1933 if opts['verify_optimized'] and opts['no_optimized']:
1933 if opts['verify_optimized'] and opts['no_optimized']:
1934 raise error.Abort(_('cannot use --verify-optimized with '
1934 raise error.Abort(_('cannot use --verify-optimized with '
1935 '--no-optimized'))
1935 '--no-optimized'))
1936 stagenames = set(n for n, f in stages)
1936 stagenames = set(n for n, f in stages)
1937
1937
1938 showalways = set()
1938 showalways = set()
1939 showchanged = set()
1939 showchanged = set()
1940 if ui.verbose and not opts['show_stage']:
1940 if ui.verbose and not opts['show_stage']:
1941 # show parsed tree by --verbose (deprecated)
1941 # show parsed tree by --verbose (deprecated)
1942 showalways.add('parsed')
1942 showalways.add('parsed')
1943 showchanged.update(['expanded', 'concatenated'])
1943 showchanged.update(['expanded', 'concatenated'])
1944 if opts['optimize']:
1944 if opts['optimize']:
1945 showalways.add('optimized')
1945 showalways.add('optimized')
1946 if opts['show_stage'] and opts['optimize']:
1946 if opts['show_stage'] and opts['optimize']:
1947 raise error.Abort(_('cannot use --optimize with --show-stage'))
1947 raise error.Abort(_('cannot use --optimize with --show-stage'))
1948 if opts['show_stage'] == ['all']:
1948 if opts['show_stage'] == ['all']:
1949 showalways.update(stagenames)
1949 showalways.update(stagenames)
1950 else:
1950 else:
1951 for n in opts['show_stage']:
1951 for n in opts['show_stage']:
1952 if n not in stagenames:
1952 if n not in stagenames:
1953 raise error.Abort(_('invalid stage name: %s') % n)
1953 raise error.Abort(_('invalid stage name: %s') % n)
1954 showalways.update(opts['show_stage'])
1954 showalways.update(opts['show_stage'])
1955
1955
1956 treebystage = {}
1956 treebystage = {}
1957 printedtree = None
1957 printedtree = None
1958 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1958 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1959 for n, f in stages:
1959 for n, f in stages:
1960 treebystage[n] = tree = f(tree)
1960 treebystage[n] = tree = f(tree)
1961 if n in showalways or (n in showchanged and tree != printedtree):
1961 if n in showalways or (n in showchanged and tree != printedtree):
1962 if opts['show_stage'] or n != 'parsed':
1962 if opts['show_stage'] or n != 'parsed':
1963 ui.write(("* %s:\n") % n)
1963 ui.write(("* %s:\n") % n)
1964 ui.write(revsetlang.prettyformat(tree), "\n")
1964 ui.write(revsetlang.prettyformat(tree), "\n")
1965 printedtree = tree
1965 printedtree = tree
1966
1966
1967 if opts['verify_optimized']:
1967 if opts['verify_optimized']:
1968 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1968 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1969 brevs = revset.makematcher(treebystage['optimized'])(repo)
1969 brevs = revset.makematcher(treebystage['optimized'])(repo)
1970 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1970 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1971 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1971 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1972 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1972 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1973 arevs = list(arevs)
1973 arevs = list(arevs)
1974 brevs = list(brevs)
1974 brevs = list(brevs)
1975 if arevs == brevs:
1975 if arevs == brevs:
1976 return 0
1976 return 0
1977 ui.write(('--- analyzed\n'), label='diff.file_a')
1977 ui.write(('--- analyzed\n'), label='diff.file_a')
1978 ui.write(('+++ optimized\n'), label='diff.file_b')
1978 ui.write(('+++ optimized\n'), label='diff.file_b')
1979 sm = difflib.SequenceMatcher(None, arevs, brevs)
1979 sm = difflib.SequenceMatcher(None, arevs, brevs)
1980 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1980 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1981 if tag in ('delete', 'replace'):
1981 if tag in ('delete', 'replace'):
1982 for c in arevs[alo:ahi]:
1982 for c in arevs[alo:ahi]:
1983 ui.write('-%s\n' % c, label='diff.deleted')
1983 ui.write('-%s\n' % c, label='diff.deleted')
1984 if tag in ('insert', 'replace'):
1984 if tag in ('insert', 'replace'):
1985 for c in brevs[blo:bhi]:
1985 for c in brevs[blo:bhi]:
1986 ui.write('+%s\n' % c, label='diff.inserted')
1986 ui.write('+%s\n' % c, label='diff.inserted')
1987 if tag == 'equal':
1987 if tag == 'equal':
1988 for c in arevs[alo:ahi]:
1988 for c in arevs[alo:ahi]:
1989 ui.write(' %s\n' % c)
1989 ui.write(' %s\n' % c)
1990 return 1
1990 return 1
1991
1991
1992 func = revset.makematcher(tree)
1992 func = revset.makematcher(tree)
1993 revs = func(repo)
1993 revs = func(repo)
1994 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1994 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1995 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1995 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1996 if not opts['show_revs']:
1996 if not opts['show_revs']:
1997 return
1997 return
1998 for c in revs:
1998 for c in revs:
1999 ui.write("%s\n" % c)
1999 ui.write("%s\n" % c)
2000
2000
2001 @command('debugsetparents', [], _('REV1 [REV2]'))
2001 @command('debugsetparents', [], _('REV1 [REV2]'))
2002 def debugsetparents(ui, repo, rev1, rev2=None):
2002 def debugsetparents(ui, repo, rev1, rev2=None):
2003 """manually set the parents of the current working directory
2003 """manually set the parents of the current working directory
2004
2004
2005 This is useful for writing repository conversion tools, but should
2005 This is useful for writing repository conversion tools, but should
2006 be used with care. For example, neither the working directory nor the
2006 be used with care. For example, neither the working directory nor the
2007 dirstate is updated, so file status may be incorrect after running this
2007 dirstate is updated, so file status may be incorrect after running this
2008 command.
2008 command.
2009
2009
2010 Returns 0 on success.
2010 Returns 0 on success.
2011 """
2011 """
2012
2012
2013 r1 = scmutil.revsingle(repo, rev1).node()
2013 r1 = scmutil.revsingle(repo, rev1).node()
2014 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2014 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2015
2015
2016 with repo.wlock():
2016 with repo.wlock():
2017 repo.setparents(r1, r2)
2017 repo.setparents(r1, r2)
2018
2018
2019 @command('debugsub',
2019 @command('debugsub',
2020 [('r', 'rev', '',
2020 [('r', 'rev', '',
2021 _('revision to check'), _('REV'))],
2021 _('revision to check'), _('REV'))],
2022 _('[-r REV] [REV]'))
2022 _('[-r REV] [REV]'))
2023 def debugsub(ui, repo, rev=None):
2023 def debugsub(ui, repo, rev=None):
2024 ctx = scmutil.revsingle(repo, rev, None)
2024 ctx = scmutil.revsingle(repo, rev, None)
2025 for k, v in sorted(ctx.substate.items()):
2025 for k, v in sorted(ctx.substate.items()):
2026 ui.write(('path %s\n') % k)
2026 ui.write(('path %s\n') % k)
2027 ui.write((' source %s\n') % v[0])
2027 ui.write((' source %s\n') % v[0])
2028 ui.write((' revision %s\n') % v[1])
2028 ui.write((' revision %s\n') % v[1])
2029
2029
2030 @command('debugsuccessorssets',
2030 @command('debugsuccessorssets',
2031 [],
2031 [],
2032 _('[REV]'))
2032 _('[REV]'))
2033 def debugsuccessorssets(ui, repo, *revs):
2033 def debugsuccessorssets(ui, repo, *revs):
2034 """show set of successors for revision
2034 """show set of successors for revision
2035
2035
2036 A successors set of changeset A is a consistent group of revisions that
2036 A successors set of changeset A is a consistent group of revisions that
2037 succeed A. It contains non-obsolete changesets only.
2037 succeed A. It contains non-obsolete changesets only.
2038
2038
2039 In most cases a changeset A has a single successors set containing a single
2039 In most cases a changeset A has a single successors set containing a single
2040 successor (changeset A replaced by A').
2040 successor (changeset A replaced by A').
2041
2041
2042 A changeset that is made obsolete with no successors are called "pruned".
2042 A changeset that is made obsolete with no successors are called "pruned".
2043 Such changesets have no successors sets at all.
2043 Such changesets have no successors sets at all.
2044
2044
2045 A changeset that has been "split" will have a successors set containing
2045 A changeset that has been "split" will have a successors set containing
2046 more than one successor.
2046 more than one successor.
2047
2047
2048 A changeset that has been rewritten in multiple different ways is called
2048 A changeset that has been rewritten in multiple different ways is called
2049 "divergent". Such changesets have multiple successor sets (each of which
2049 "divergent". Such changesets have multiple successor sets (each of which
2050 may also be split, i.e. have multiple successors).
2050 may also be split, i.e. have multiple successors).
2051
2051
2052 Results are displayed as follows::
2052 Results are displayed as follows::
2053
2053
2054 <rev1>
2054 <rev1>
2055 <successors-1A>
2055 <successors-1A>
2056 <rev2>
2056 <rev2>
2057 <successors-2A>
2057 <successors-2A>
2058 <successors-2B1> <successors-2B2> <successors-2B3>
2058 <successors-2B1> <successors-2B2> <successors-2B3>
2059
2059
2060 Here rev2 has two possible (i.e. divergent) successors sets. The first
2060 Here rev2 has two possible (i.e. divergent) successors sets. The first
2061 holds one element, whereas the second holds three (i.e. the changeset has
2061 holds one element, whereas the second holds three (i.e. the changeset has
2062 been split).
2062 been split).
2063 """
2063 """
2064 # passed to successorssets caching computation from one call to another
2064 # passed to successorssets caching computation from one call to another
2065 cache = {}
2065 cache = {}
2066 ctx2str = str
2066 ctx2str = str
2067 node2str = short
2067 node2str = short
2068 if ui.debug():
2068 if ui.debug():
2069 def ctx2str(ctx):
2069 def ctx2str(ctx):
2070 return ctx.hex()
2070 return ctx.hex()
2071 node2str = hex
2071 node2str = hex
2072 for rev in scmutil.revrange(repo, revs):
2072 for rev in scmutil.revrange(repo, revs):
2073 ctx = repo[rev]
2073 ctx = repo[rev]
2074 ui.write('%s\n'% ctx2str(ctx))
2074 ui.write('%s\n'% ctx2str(ctx))
2075 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2075 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2076 if succsset:
2076 if succsset:
2077 ui.write(' ')
2077 ui.write(' ')
2078 ui.write(node2str(succsset[0]))
2078 ui.write(node2str(succsset[0]))
2079 for node in succsset[1:]:
2079 for node in succsset[1:]:
2080 ui.write(' ')
2080 ui.write(' ')
2081 ui.write(node2str(node))
2081 ui.write(node2str(node))
2082 ui.write('\n')
2082 ui.write('\n')
2083
2083
2084 @command('debugtemplate',
2084 @command('debugtemplate',
2085 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2085 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2086 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2086 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2087 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2087 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2088 optionalrepo=True)
2088 optionalrepo=True)
2089 def debugtemplate(ui, repo, tmpl, **opts):
2089 def debugtemplate(ui, repo, tmpl, **opts):
2090 """parse and apply a template
2090 """parse and apply a template
2091
2091
2092 If -r/--rev is given, the template is processed as a log template and
2092 If -r/--rev is given, the template is processed as a log template and
2093 applied to the given changesets. Otherwise, it is processed as a generic
2093 applied to the given changesets. Otherwise, it is processed as a generic
2094 template.
2094 template.
2095
2095
2096 Use --verbose to print the parsed tree.
2096 Use --verbose to print the parsed tree.
2097 """
2097 """
2098 revs = None
2098 revs = None
2099 if opts['rev']:
2099 if opts['rev']:
2100 if repo is None:
2100 if repo is None:
2101 raise error.RepoError(_('there is no Mercurial repository here '
2101 raise error.RepoError(_('there is no Mercurial repository here '
2102 '(.hg not found)'))
2102 '(.hg not found)'))
2103 revs = scmutil.revrange(repo, opts['rev'])
2103 revs = scmutil.revrange(repo, opts['rev'])
2104
2104
2105 props = {}
2105 props = {}
2106 for d in opts['define']:
2106 for d in opts['define']:
2107 try:
2107 try:
2108 k, v = (e.strip() for e in d.split('=', 1))
2108 k, v = (e.strip() for e in d.split('=', 1))
2109 if not k or k == 'ui':
2109 if not k or k == 'ui':
2110 raise ValueError
2110 raise ValueError
2111 props[k] = v
2111 props[k] = v
2112 except ValueError:
2112 except ValueError:
2113 raise error.Abort(_('malformed keyword definition: %s') % d)
2113 raise error.Abort(_('malformed keyword definition: %s') % d)
2114
2114
2115 if ui.verbose:
2115 if ui.verbose:
2116 aliases = ui.configitems('templatealias')
2116 aliases = ui.configitems('templatealias')
2117 tree = templater.parse(tmpl)
2117 tree = templater.parse(tmpl)
2118 ui.note(templater.prettyformat(tree), '\n')
2118 ui.note(templater.prettyformat(tree), '\n')
2119 newtree = templater.expandaliases(tree, aliases)
2119 newtree = templater.expandaliases(tree, aliases)
2120 if newtree != tree:
2120 if newtree != tree:
2121 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2121 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2122
2122
2123 if revs is None:
2123 if revs is None:
2124 k = 'debugtemplate'
2124 t = formatter.maketemplater(ui, tmpl)
2125 t = formatter.maketemplater(ui, k, tmpl)
2125 props['ui'] = ui
2126 ui.write(templater.stringify(t(k, ui=ui, **props)))
2126 ui.write(t.render(props))
2127 else:
2127 else:
2128 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2128 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2129 for r in revs:
2129 for r in revs:
2130 displayer.show(repo[r], **props)
2130 displayer.show(repo[r], **props)
2131 displayer.close()
2131 displayer.close()
2132
2132
2133 @command('debugupdatecaches', [])
2133 @command('debugupdatecaches', [])
2134 def debugupdatecaches(ui, repo, *pats, **opts):
2134 def debugupdatecaches(ui, repo, *pats, **opts):
2135 """warm all known caches in the repository"""
2135 """warm all known caches in the repository"""
2136 with repo.wlock():
2136 with repo.wlock():
2137 with repo.lock():
2137 with repo.lock():
2138 repo.updatecaches()
2138 repo.updatecaches()
2139
2139
2140 @command('debugupgraderepo', [
2140 @command('debugupgraderepo', [
2141 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2141 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2142 ('', 'run', False, _('performs an upgrade')),
2142 ('', 'run', False, _('performs an upgrade')),
2143 ])
2143 ])
2144 def debugupgraderepo(ui, repo, run=False, optimize=None):
2144 def debugupgraderepo(ui, repo, run=False, optimize=None):
2145 """upgrade a repository to use different features
2145 """upgrade a repository to use different features
2146
2146
2147 If no arguments are specified, the repository is evaluated for upgrade
2147 If no arguments are specified, the repository is evaluated for upgrade
2148 and a list of problems and potential optimizations is printed.
2148 and a list of problems and potential optimizations is printed.
2149
2149
2150 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2150 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2151 can be influenced via additional arguments. More details will be provided
2151 can be influenced via additional arguments. More details will be provided
2152 by the command output when run without ``--run``.
2152 by the command output when run without ``--run``.
2153
2153
2154 During the upgrade, the repository will be locked and no writes will be
2154 During the upgrade, the repository will be locked and no writes will be
2155 allowed.
2155 allowed.
2156
2156
2157 At the end of the upgrade, the repository may not be readable while new
2157 At the end of the upgrade, the repository may not be readable while new
2158 repository data is swapped in. This window will be as long as it takes to
2158 repository data is swapped in. This window will be as long as it takes to
2159 rename some directories inside the ``.hg`` directory. On most machines, this
2159 rename some directories inside the ``.hg`` directory. On most machines, this
2160 should complete almost instantaneously and the chances of a consumer being
2160 should complete almost instantaneously and the chances of a consumer being
2161 unable to access the repository should be low.
2161 unable to access the repository should be low.
2162 """
2162 """
2163 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2163 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2164
2164
2165 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2165 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2166 inferrepo=True)
2166 inferrepo=True)
2167 def debugwalk(ui, repo, *pats, **opts):
2167 def debugwalk(ui, repo, *pats, **opts):
2168 """show how files match on given patterns"""
2168 """show how files match on given patterns"""
2169 m = scmutil.match(repo[None], pats, opts)
2169 m = scmutil.match(repo[None], pats, opts)
2170 ui.write(('matcher: %r\n' % m))
2170 ui.write(('matcher: %r\n' % m))
2171 items = list(repo[None].walk(m))
2171 items = list(repo[None].walk(m))
2172 if not items:
2172 if not items:
2173 return
2173 return
2174 f = lambda fn: fn
2174 f = lambda fn: fn
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2175 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2176 f = lambda fn: util.normpath(fn)
2176 f = lambda fn: util.normpath(fn)
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2177 fmt = 'f %%-%ds %%-%ds %%s' % (
2178 max([len(abs) for abs in items]),
2178 max([len(abs) for abs in items]),
2179 max([len(m.rel(abs)) for abs in items]))
2179 max([len(m.rel(abs)) for abs in items]))
2180 for abs in items:
2180 for abs in items:
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2181 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2182 ui.write("%s\n" % line.rstrip())
2182 ui.write("%s\n" % line.rstrip())
2183
2183
2184 @command('debugwireargs',
2184 @command('debugwireargs',
2185 [('', 'three', '', 'three'),
2185 [('', 'three', '', 'three'),
2186 ('', 'four', '', 'four'),
2186 ('', 'four', '', 'four'),
2187 ('', 'five', '', 'five'),
2187 ('', 'five', '', 'five'),
2188 ] + cmdutil.remoteopts,
2188 ] + cmdutil.remoteopts,
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2189 _('REPO [OPTIONS]... [ONE [TWO]]'),
2190 norepo=True)
2190 norepo=True)
2191 def debugwireargs(ui, repopath, *vals, **opts):
2191 def debugwireargs(ui, repopath, *vals, **opts):
2192 repo = hg.peer(ui, opts, repopath)
2192 repo = hg.peer(ui, opts, repopath)
2193 for opt in cmdutil.remoteopts:
2193 for opt in cmdutil.remoteopts:
2194 del opts[opt[1]]
2194 del opts[opt[1]]
2195 args = {}
2195 args = {}
2196 for k, v in opts.iteritems():
2196 for k, v in opts.iteritems():
2197 if v:
2197 if v:
2198 args[k] = v
2198 args[k] = v
2199 # run twice to check that we don't mess up the stream for the next command
2199 # run twice to check that we don't mess up the stream for the next command
2200 res1 = repo.debugwireargs(*vals, **args)
2200 res1 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2201 res2 = repo.debugwireargs(*vals, **args)
2202 ui.write("%s\n" % res1)
2202 ui.write("%s\n" % res1)
2203 if res1 != res2:
2203 if res1 != res2:
2204 ui.warn("%s\n" % res2)
2204 ui.warn("%s\n" % res2)
@@ -1,744 +1,744 b''
1 # filemerge.py - file-level merge handling for Mercurial
1 # filemerge.py - file-level merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import filecmp
10 import filecmp
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid, short
16 from .node import nullid, short
17
17
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 formatter,
21 formatter,
22 match,
22 match,
23 pycompat,
23 pycompat,
24 scmutil,
24 scmutil,
25 simplemerge,
25 simplemerge,
26 tagmerge,
26 tagmerge,
27 templatekw,
27 templatekw,
28 templater,
28 templater,
29 util,
29 util,
30 )
30 )
31
31
32 def _toolstr(ui, tool, part, default=""):
32 def _toolstr(ui, tool, part, default=""):
33 return ui.config("merge-tools", tool + "." + part, default)
33 return ui.config("merge-tools", tool + "." + part, default)
34
34
35 def _toolbool(ui, tool, part, default=False):
35 def _toolbool(ui, tool, part, default=False):
36 return ui.configbool("merge-tools", tool + "." + part, default)
36 return ui.configbool("merge-tools", tool + "." + part, default)
37
37
38 def _toollist(ui, tool, part, default=None):
38 def _toollist(ui, tool, part, default=None):
39 if default is None:
39 if default is None:
40 default = []
40 default = []
41 return ui.configlist("merge-tools", tool + "." + part, default)
41 return ui.configlist("merge-tools", tool + "." + part, default)
42
42
43 internals = {}
43 internals = {}
44 # Merge tools to document.
44 # Merge tools to document.
45 internalsdoc = {}
45 internalsdoc = {}
46
46
47 # internal tool merge types
47 # internal tool merge types
48 nomerge = None
48 nomerge = None
49 mergeonly = 'mergeonly' # just the full merge, no premerge
49 mergeonly = 'mergeonly' # just the full merge, no premerge
50 fullmerge = 'fullmerge' # both premerge and merge
50 fullmerge = 'fullmerge' # both premerge and merge
51
51
52 _localchangedotherdeletedmsg = _(
52 _localchangedotherdeletedmsg = _(
53 "local%(l)s changed %(fd)s which other%(o)s deleted\n"
53 "local%(l)s changed %(fd)s which other%(o)s deleted\n"
54 "use (c)hanged version, (d)elete, or leave (u)nresolved?"
54 "use (c)hanged version, (d)elete, or leave (u)nresolved?"
55 "$$ &Changed $$ &Delete $$ &Unresolved")
55 "$$ &Changed $$ &Delete $$ &Unresolved")
56
56
57 _otherchangedlocaldeletedmsg = _(
57 _otherchangedlocaldeletedmsg = _(
58 "other%(o)s changed %(fd)s which local%(l)s deleted\n"
58 "other%(o)s changed %(fd)s which local%(l)s deleted\n"
59 "use (c)hanged version, leave (d)eleted, or "
59 "use (c)hanged version, leave (d)eleted, or "
60 "leave (u)nresolved?"
60 "leave (u)nresolved?"
61 "$$ &Changed $$ &Deleted $$ &Unresolved")
61 "$$ &Changed $$ &Deleted $$ &Unresolved")
62
62
63 class absentfilectx(object):
63 class absentfilectx(object):
64 """Represents a file that's ostensibly in a context but is actually not
64 """Represents a file that's ostensibly in a context but is actually not
65 present in it.
65 present in it.
66
66
67 This is here because it's very specific to the filemerge code for now --
67 This is here because it's very specific to the filemerge code for now --
68 other code is likely going to break with the values this returns."""
68 other code is likely going to break with the values this returns."""
69 def __init__(self, ctx, f):
69 def __init__(self, ctx, f):
70 self._ctx = ctx
70 self._ctx = ctx
71 self._f = f
71 self._f = f
72
72
73 def path(self):
73 def path(self):
74 return self._f
74 return self._f
75
75
76 def size(self):
76 def size(self):
77 return None
77 return None
78
78
79 def data(self):
79 def data(self):
80 return None
80 return None
81
81
82 def filenode(self):
82 def filenode(self):
83 return nullid
83 return nullid
84
84
85 _customcmp = True
85 _customcmp = True
86 def cmp(self, fctx):
86 def cmp(self, fctx):
87 """compare with other file context
87 """compare with other file context
88
88
89 returns True if different from fctx.
89 returns True if different from fctx.
90 """
90 """
91 return not (fctx.isabsent() and
91 return not (fctx.isabsent() and
92 fctx.ctx() == self.ctx() and
92 fctx.ctx() == self.ctx() and
93 fctx.path() == self.path())
93 fctx.path() == self.path())
94
94
95 def flags(self):
95 def flags(self):
96 return ''
96 return ''
97
97
98 def changectx(self):
98 def changectx(self):
99 return self._ctx
99 return self._ctx
100
100
101 def isbinary(self):
101 def isbinary(self):
102 return False
102 return False
103
103
104 def isabsent(self):
104 def isabsent(self):
105 return True
105 return True
106
106
107 def internaltool(name, mergetype, onfailure=None, precheck=None):
107 def internaltool(name, mergetype, onfailure=None, precheck=None):
108 '''return a decorator for populating internal merge tool table'''
108 '''return a decorator for populating internal merge tool table'''
109 def decorator(func):
109 def decorator(func):
110 fullname = ':' + name
110 fullname = ':' + name
111 func.__doc__ = (pycompat.sysstr("``%s``\n" % fullname)
111 func.__doc__ = (pycompat.sysstr("``%s``\n" % fullname)
112 + func.__doc__.strip())
112 + func.__doc__.strip())
113 internals[fullname] = func
113 internals[fullname] = func
114 internals['internal:' + name] = func
114 internals['internal:' + name] = func
115 internalsdoc[fullname] = func
115 internalsdoc[fullname] = func
116 func.mergetype = mergetype
116 func.mergetype = mergetype
117 func.onfailure = onfailure
117 func.onfailure = onfailure
118 func.precheck = precheck
118 func.precheck = precheck
119 return func
119 return func
120 return decorator
120 return decorator
121
121
122 def _findtool(ui, tool):
122 def _findtool(ui, tool):
123 if tool in internals:
123 if tool in internals:
124 return tool
124 return tool
125 return findexternaltool(ui, tool)
125 return findexternaltool(ui, tool)
126
126
127 def findexternaltool(ui, tool):
127 def findexternaltool(ui, tool):
128 for kn in ("regkey", "regkeyalt"):
128 for kn in ("regkey", "regkeyalt"):
129 k = _toolstr(ui, tool, kn)
129 k = _toolstr(ui, tool, kn)
130 if not k:
130 if not k:
131 continue
131 continue
132 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
132 p = util.lookupreg(k, _toolstr(ui, tool, "regname"))
133 if p:
133 if p:
134 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
134 p = util.findexe(p + _toolstr(ui, tool, "regappend"))
135 if p:
135 if p:
136 return p
136 return p
137 exe = _toolstr(ui, tool, "executable", tool)
137 exe = _toolstr(ui, tool, "executable", tool)
138 return util.findexe(util.expandpath(exe))
138 return util.findexe(util.expandpath(exe))
139
139
140 def _picktool(repo, ui, path, binary, symlink, changedelete):
140 def _picktool(repo, ui, path, binary, symlink, changedelete):
141 def supportscd(tool):
141 def supportscd(tool):
142 return tool in internals and internals[tool].mergetype == nomerge
142 return tool in internals and internals[tool].mergetype == nomerge
143
143
144 def check(tool, pat, symlink, binary, changedelete):
144 def check(tool, pat, symlink, binary, changedelete):
145 tmsg = tool
145 tmsg = tool
146 if pat:
146 if pat:
147 tmsg = _("%s (for pattern %s)") % (tool, pat)
147 tmsg = _("%s (for pattern %s)") % (tool, pat)
148 if not _findtool(ui, tool):
148 if not _findtool(ui, tool):
149 if pat: # explicitly requested tool deserves a warning
149 if pat: # explicitly requested tool deserves a warning
150 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
150 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
151 else: # configured but non-existing tools are more silent
151 else: # configured but non-existing tools are more silent
152 ui.note(_("couldn't find merge tool %s\n") % tmsg)
152 ui.note(_("couldn't find merge tool %s\n") % tmsg)
153 elif symlink and not _toolbool(ui, tool, "symlink"):
153 elif symlink and not _toolbool(ui, tool, "symlink"):
154 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
154 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
155 elif binary and not _toolbool(ui, tool, "binary"):
155 elif binary and not _toolbool(ui, tool, "binary"):
156 ui.warn(_("tool %s can't handle binary\n") % tmsg)
156 ui.warn(_("tool %s can't handle binary\n") % tmsg)
157 elif changedelete and not supportscd(tool):
157 elif changedelete and not supportscd(tool):
158 # the nomerge tools are the only tools that support change/delete
158 # the nomerge tools are the only tools that support change/delete
159 # conflicts
159 # conflicts
160 pass
160 pass
161 elif not util.gui() and _toolbool(ui, tool, "gui"):
161 elif not util.gui() and _toolbool(ui, tool, "gui"):
162 ui.warn(_("tool %s requires a GUI\n") % tmsg)
162 ui.warn(_("tool %s requires a GUI\n") % tmsg)
163 else:
163 else:
164 return True
164 return True
165 return False
165 return False
166
166
167 # internal config: ui.forcemerge
167 # internal config: ui.forcemerge
168 # forcemerge comes from command line arguments, highest priority
168 # forcemerge comes from command line arguments, highest priority
169 force = ui.config('ui', 'forcemerge')
169 force = ui.config('ui', 'forcemerge')
170 if force:
170 if force:
171 toolpath = _findtool(ui, force)
171 toolpath = _findtool(ui, force)
172 if changedelete and not supportscd(toolpath):
172 if changedelete and not supportscd(toolpath):
173 return ":prompt", None
173 return ":prompt", None
174 else:
174 else:
175 if toolpath:
175 if toolpath:
176 return (force, util.shellquote(toolpath))
176 return (force, util.shellquote(toolpath))
177 else:
177 else:
178 # mimic HGMERGE if given tool not found
178 # mimic HGMERGE if given tool not found
179 return (force, force)
179 return (force, force)
180
180
181 # HGMERGE takes next precedence
181 # HGMERGE takes next precedence
182 hgmerge = encoding.environ.get("HGMERGE")
182 hgmerge = encoding.environ.get("HGMERGE")
183 if hgmerge:
183 if hgmerge:
184 if changedelete and not supportscd(hgmerge):
184 if changedelete and not supportscd(hgmerge):
185 return ":prompt", None
185 return ":prompt", None
186 else:
186 else:
187 return (hgmerge, hgmerge)
187 return (hgmerge, hgmerge)
188
188
189 # then patterns
189 # then patterns
190 for pat, tool in ui.configitems("merge-patterns"):
190 for pat, tool in ui.configitems("merge-patterns"):
191 mf = match.match(repo.root, '', [pat])
191 mf = match.match(repo.root, '', [pat])
192 if mf(path) and check(tool, pat, symlink, False, changedelete):
192 if mf(path) and check(tool, pat, symlink, False, changedelete):
193 toolpath = _findtool(ui, tool)
193 toolpath = _findtool(ui, tool)
194 return (tool, util.shellquote(toolpath))
194 return (tool, util.shellquote(toolpath))
195
195
196 # then merge tools
196 # then merge tools
197 tools = {}
197 tools = {}
198 disabled = set()
198 disabled = set()
199 for k, v in ui.configitems("merge-tools"):
199 for k, v in ui.configitems("merge-tools"):
200 t = k.split('.')[0]
200 t = k.split('.')[0]
201 if t not in tools:
201 if t not in tools:
202 tools[t] = int(_toolstr(ui, t, "priority", "0"))
202 tools[t] = int(_toolstr(ui, t, "priority", "0"))
203 if _toolbool(ui, t, "disabled", False):
203 if _toolbool(ui, t, "disabled", False):
204 disabled.add(t)
204 disabled.add(t)
205 names = tools.keys()
205 names = tools.keys()
206 tools = sorted([(-p, tool) for tool, p in tools.items()
206 tools = sorted([(-p, tool) for tool, p in tools.items()
207 if tool not in disabled])
207 if tool not in disabled])
208 uimerge = ui.config("ui", "merge")
208 uimerge = ui.config("ui", "merge")
209 if uimerge:
209 if uimerge:
210 # external tools defined in uimerge won't be able to handle
210 # external tools defined in uimerge won't be able to handle
211 # change/delete conflicts
211 # change/delete conflicts
212 if uimerge not in names and not changedelete:
212 if uimerge not in names and not changedelete:
213 return (uimerge, uimerge)
213 return (uimerge, uimerge)
214 tools.insert(0, (None, uimerge)) # highest priority
214 tools.insert(0, (None, uimerge)) # highest priority
215 tools.append((None, "hgmerge")) # the old default, if found
215 tools.append((None, "hgmerge")) # the old default, if found
216 for p, t in tools:
216 for p, t in tools:
217 if check(t, None, symlink, binary, changedelete):
217 if check(t, None, symlink, binary, changedelete):
218 toolpath = _findtool(ui, t)
218 toolpath = _findtool(ui, t)
219 return (t, util.shellquote(toolpath))
219 return (t, util.shellquote(toolpath))
220
220
221 # internal merge or prompt as last resort
221 # internal merge or prompt as last resort
222 if symlink or binary or changedelete:
222 if symlink or binary or changedelete:
223 if not changedelete and len(tools):
223 if not changedelete and len(tools):
224 # any tool is rejected by capability for symlink or binary
224 # any tool is rejected by capability for symlink or binary
225 ui.warn(_("no tool found to merge %s\n") % path)
225 ui.warn(_("no tool found to merge %s\n") % path)
226 return ":prompt", None
226 return ":prompt", None
227 return ":merge", None
227 return ":merge", None
228
228
229 def _eoltype(data):
229 def _eoltype(data):
230 "Guess the EOL type of a file"
230 "Guess the EOL type of a file"
231 if '\0' in data: # binary
231 if '\0' in data: # binary
232 return None
232 return None
233 if '\r\n' in data: # Windows
233 if '\r\n' in data: # Windows
234 return '\r\n'
234 return '\r\n'
235 if '\r' in data: # Old Mac
235 if '\r' in data: # Old Mac
236 return '\r'
236 return '\r'
237 if '\n' in data: # UNIX
237 if '\n' in data: # UNIX
238 return '\n'
238 return '\n'
239 return None # unknown
239 return None # unknown
240
240
241 def _matcheol(file, origfile):
241 def _matcheol(file, origfile):
242 "Convert EOL markers in a file to match origfile"
242 "Convert EOL markers in a file to match origfile"
243 tostyle = _eoltype(util.readfile(origfile))
243 tostyle = _eoltype(util.readfile(origfile))
244 if tostyle:
244 if tostyle:
245 data = util.readfile(file)
245 data = util.readfile(file)
246 style = _eoltype(data)
246 style = _eoltype(data)
247 if style:
247 if style:
248 newdata = data.replace(style, tostyle)
248 newdata = data.replace(style, tostyle)
249 if newdata != data:
249 if newdata != data:
250 util.writefile(file, newdata)
250 util.writefile(file, newdata)
251
251
252 @internaltool('prompt', nomerge)
252 @internaltool('prompt', nomerge)
253 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
253 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
254 """Asks the user which of the local `p1()` or the other `p2()` version to
254 """Asks the user which of the local `p1()` or the other `p2()` version to
255 keep as the merged version."""
255 keep as the merged version."""
256 ui = repo.ui
256 ui = repo.ui
257 fd = fcd.path()
257 fd = fcd.path()
258
258
259 prompts = partextras(labels)
259 prompts = partextras(labels)
260 prompts['fd'] = fd
260 prompts['fd'] = fd
261 try:
261 try:
262 if fco.isabsent():
262 if fco.isabsent():
263 index = ui.promptchoice(
263 index = ui.promptchoice(
264 _localchangedotherdeletedmsg % prompts, 2)
264 _localchangedotherdeletedmsg % prompts, 2)
265 choice = ['local', 'other', 'unresolved'][index]
265 choice = ['local', 'other', 'unresolved'][index]
266 elif fcd.isabsent():
266 elif fcd.isabsent():
267 index = ui.promptchoice(
267 index = ui.promptchoice(
268 _otherchangedlocaldeletedmsg % prompts, 2)
268 _otherchangedlocaldeletedmsg % prompts, 2)
269 choice = ['other', 'local', 'unresolved'][index]
269 choice = ['other', 'local', 'unresolved'][index]
270 else:
270 else:
271 index = ui.promptchoice(
271 index = ui.promptchoice(
272 _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
272 _("keep (l)ocal%(l)s, take (o)ther%(o)s, or leave (u)nresolved"
273 " for %(fd)s?"
273 " for %(fd)s?"
274 "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
274 "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
275 choice = ['local', 'other', 'unresolved'][index]
275 choice = ['local', 'other', 'unresolved'][index]
276
276
277 if choice == 'other':
277 if choice == 'other':
278 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf,
278 return _iother(repo, mynode, orig, fcd, fco, fca, toolconf,
279 labels)
279 labels)
280 elif choice == 'local':
280 elif choice == 'local':
281 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf,
281 return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf,
282 labels)
282 labels)
283 elif choice == 'unresolved':
283 elif choice == 'unresolved':
284 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
284 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
285 labels)
285 labels)
286 except error.ResponseExpected:
286 except error.ResponseExpected:
287 ui.write("\n")
287 ui.write("\n")
288 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
288 return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
289 labels)
289 labels)
290
290
291 @internaltool('local', nomerge)
291 @internaltool('local', nomerge)
292 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
292 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
293 """Uses the local `p1()` version of files as the merged version."""
293 """Uses the local `p1()` version of files as the merged version."""
294 return 0, fcd.isabsent()
294 return 0, fcd.isabsent()
295
295
296 @internaltool('other', nomerge)
296 @internaltool('other', nomerge)
297 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
297 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
298 """Uses the other `p2()` version of files as the merged version."""
298 """Uses the other `p2()` version of files as the merged version."""
299 if fco.isabsent():
299 if fco.isabsent():
300 # local changed, remote deleted -- 'deleted' picked
300 # local changed, remote deleted -- 'deleted' picked
301 repo.wvfs.unlinkpath(fcd.path())
301 repo.wvfs.unlinkpath(fcd.path())
302 deleted = True
302 deleted = True
303 else:
303 else:
304 repo.wwrite(fcd.path(), fco.data(), fco.flags())
304 repo.wwrite(fcd.path(), fco.data(), fco.flags())
305 deleted = False
305 deleted = False
306 return 0, deleted
306 return 0, deleted
307
307
308 @internaltool('fail', nomerge)
308 @internaltool('fail', nomerge)
309 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
309 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
310 """
310 """
311 Rather than attempting to merge files that were modified on both
311 Rather than attempting to merge files that were modified on both
312 branches, it marks them as unresolved. The resolve command must be
312 branches, it marks them as unresolved. The resolve command must be
313 used to resolve these conflicts."""
313 used to resolve these conflicts."""
314 # for change/delete conflicts write out the changed version, then fail
314 # for change/delete conflicts write out the changed version, then fail
315 if fcd.isabsent():
315 if fcd.isabsent():
316 repo.wwrite(fcd.path(), fco.data(), fco.flags())
316 repo.wwrite(fcd.path(), fco.data(), fco.flags())
317 return 1, False
317 return 1, False
318
318
319 def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
319 def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
320 tool, toolpath, binary, symlink = toolconf
320 tool, toolpath, binary, symlink = toolconf
321 if symlink or fcd.isabsent() or fco.isabsent():
321 if symlink or fcd.isabsent() or fco.isabsent():
322 return 1
322 return 1
323 a, b, c, back = files
323 a, b, c, back = files
324
324
325 ui = repo.ui
325 ui = repo.ui
326
326
327 validkeep = ['keep', 'keep-merge3']
327 validkeep = ['keep', 'keep-merge3']
328
328
329 # do we attempt to simplemerge first?
329 # do we attempt to simplemerge first?
330 try:
330 try:
331 premerge = _toolbool(ui, tool, "premerge", not binary)
331 premerge = _toolbool(ui, tool, "premerge", not binary)
332 except error.ConfigError:
332 except error.ConfigError:
333 premerge = _toolstr(ui, tool, "premerge").lower()
333 premerge = _toolstr(ui, tool, "premerge").lower()
334 if premerge not in validkeep:
334 if premerge not in validkeep:
335 _valid = ', '.join(["'" + v + "'" for v in validkeep])
335 _valid = ', '.join(["'" + v + "'" for v in validkeep])
336 raise error.ConfigError(_("%s.premerge not valid "
336 raise error.ConfigError(_("%s.premerge not valid "
337 "('%s' is neither boolean nor %s)") %
337 "('%s' is neither boolean nor %s)") %
338 (tool, premerge, _valid))
338 (tool, premerge, _valid))
339
339
340 if premerge:
340 if premerge:
341 if premerge == 'keep-merge3':
341 if premerge == 'keep-merge3':
342 if not labels:
342 if not labels:
343 labels = _defaultconflictlabels
343 labels = _defaultconflictlabels
344 if len(labels) < 3:
344 if len(labels) < 3:
345 labels.append('base')
345 labels.append('base')
346 r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels)
346 r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels)
347 if not r:
347 if not r:
348 ui.debug(" premerge successful\n")
348 ui.debug(" premerge successful\n")
349 return 0
349 return 0
350 if premerge not in validkeep:
350 if premerge not in validkeep:
351 util.copyfile(back, a) # restore from backup and try again
351 util.copyfile(back, a) # restore from backup and try again
352 return 1 # continue merging
352 return 1 # continue merging
353
353
354 def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
354 def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
355 tool, toolpath, binary, symlink = toolconf
355 tool, toolpath, binary, symlink = toolconf
356 if symlink:
356 if symlink:
357 repo.ui.warn(_('warning: internal %s cannot merge symlinks '
357 repo.ui.warn(_('warning: internal %s cannot merge symlinks '
358 'for %s\n') % (tool, fcd.path()))
358 'for %s\n') % (tool, fcd.path()))
359 return False
359 return False
360 if fcd.isabsent() or fco.isabsent():
360 if fcd.isabsent() or fco.isabsent():
361 repo.ui.warn(_('warning: internal %s cannot merge change/delete '
361 repo.ui.warn(_('warning: internal %s cannot merge change/delete '
362 'conflict for %s\n') % (tool, fcd.path()))
362 'conflict for %s\n') % (tool, fcd.path()))
363 return False
363 return False
364 return True
364 return True
365
365
366 def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode):
366 def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode):
367 """
367 """
368 Uses the internal non-interactive simple merge algorithm for merging
368 Uses the internal non-interactive simple merge algorithm for merging
369 files. It will fail if there are any conflicts and leave markers in
369 files. It will fail if there are any conflicts and leave markers in
370 the partially merged file. Markers will have two sections, one for each side
370 the partially merged file. Markers will have two sections, one for each side
371 of merge, unless mode equals 'union' which suppresses the markers."""
371 of merge, unless mode equals 'union' which suppresses the markers."""
372 a, b, c, back = files
372 a, b, c, back = files
373
373
374 ui = repo.ui
374 ui = repo.ui
375
375
376 r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode)
376 r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode)
377 return True, r, False
377 return True, r, False
378
378
379 @internaltool('union', fullmerge,
379 @internaltool('union', fullmerge,
380 _("warning: conflicts while merging %s! "
380 _("warning: conflicts while merging %s! "
381 "(edit, then use 'hg resolve --mark')\n"),
381 "(edit, then use 'hg resolve --mark')\n"),
382 precheck=_mergecheck)
382 precheck=_mergecheck)
383 def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
383 def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
384 """
384 """
385 Uses the internal non-interactive simple merge algorithm for merging
385 Uses the internal non-interactive simple merge algorithm for merging
386 files. It will use both left and right sides for conflict regions.
386 files. It will use both left and right sides for conflict regions.
387 No markers are inserted."""
387 No markers are inserted."""
388 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
388 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
389 files, labels, 'union')
389 files, labels, 'union')
390
390
391 @internaltool('merge', fullmerge,
391 @internaltool('merge', fullmerge,
392 _("warning: conflicts while merging %s! "
392 _("warning: conflicts while merging %s! "
393 "(edit, then use 'hg resolve --mark')\n"),
393 "(edit, then use 'hg resolve --mark')\n"),
394 precheck=_mergecheck)
394 precheck=_mergecheck)
395 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
395 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
396 """
396 """
397 Uses the internal non-interactive simple merge algorithm for merging
397 Uses the internal non-interactive simple merge algorithm for merging
398 files. It will fail if there are any conflicts and leave markers in
398 files. It will fail if there are any conflicts and leave markers in
399 the partially merged file. Markers will have two sections, one for each side
399 the partially merged file. Markers will have two sections, one for each side
400 of merge."""
400 of merge."""
401 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
401 return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
402 files, labels, 'merge')
402 files, labels, 'merge')
403
403
404 @internaltool('merge3', fullmerge,
404 @internaltool('merge3', fullmerge,
405 _("warning: conflicts while merging %s! "
405 _("warning: conflicts while merging %s! "
406 "(edit, then use 'hg resolve --mark')\n"),
406 "(edit, then use 'hg resolve --mark')\n"),
407 precheck=_mergecheck)
407 precheck=_mergecheck)
408 def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
408 def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
409 """
409 """
410 Uses the internal non-interactive simple merge algorithm for merging
410 Uses the internal non-interactive simple merge algorithm for merging
411 files. It will fail if there are any conflicts and leave markers in
411 files. It will fail if there are any conflicts and leave markers in
412 the partially merged file. Marker will have three sections, one from each
412 the partially merged file. Marker will have three sections, one from each
413 side of the merge and one for the base content."""
413 side of the merge and one for the base content."""
414 if not labels:
414 if not labels:
415 labels = _defaultconflictlabels
415 labels = _defaultconflictlabels
416 if len(labels) < 3:
416 if len(labels) < 3:
417 labels.append('base')
417 labels.append('base')
418 return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
418 return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
419
419
420 def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files,
420 def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files,
421 labels=None, localorother=None):
421 labels=None, localorother=None):
422 """
422 """
423 Generic driver for _imergelocal and _imergeother
423 Generic driver for _imergelocal and _imergeother
424 """
424 """
425 assert localorother is not None
425 assert localorother is not None
426 tool, toolpath, binary, symlink = toolconf
426 tool, toolpath, binary, symlink = toolconf
427 a, b, c, back = files
427 a, b, c, back = files
428 r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels,
428 r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels,
429 localorother=localorother)
429 localorother=localorother)
430 return True, r
430 return True, r
431
431
432 @internaltool('merge-local', mergeonly, precheck=_mergecheck)
432 @internaltool('merge-local', mergeonly, precheck=_mergecheck)
433 def _imergelocal(*args, **kwargs):
433 def _imergelocal(*args, **kwargs):
434 """
434 """
435 Like :merge, but resolve all conflicts non-interactively in favor
435 Like :merge, but resolve all conflicts non-interactively in favor
436 of the local `p1()` changes."""
436 of the local `p1()` changes."""
437 success, status = _imergeauto(localorother='local', *args, **kwargs)
437 success, status = _imergeauto(localorother='local', *args, **kwargs)
438 return success, status, False
438 return success, status, False
439
439
440 @internaltool('merge-other', mergeonly, precheck=_mergecheck)
440 @internaltool('merge-other', mergeonly, precheck=_mergecheck)
441 def _imergeother(*args, **kwargs):
441 def _imergeother(*args, **kwargs):
442 """
442 """
443 Like :merge, but resolve all conflicts non-interactively in favor
443 Like :merge, but resolve all conflicts non-interactively in favor
444 of the other `p2()` changes."""
444 of the other `p2()` changes."""
445 success, status = _imergeauto(localorother='other', *args, **kwargs)
445 success, status = _imergeauto(localorother='other', *args, **kwargs)
446 return success, status, False
446 return success, status, False
447
447
448 @internaltool('tagmerge', mergeonly,
448 @internaltool('tagmerge', mergeonly,
449 _("automatic tag merging of %s failed! "
449 _("automatic tag merging of %s failed! "
450 "(use 'hg resolve --tool :merge' or another merge "
450 "(use 'hg resolve --tool :merge' or another merge "
451 "tool of your choice)\n"))
451 "tool of your choice)\n"))
452 def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
452 def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
453 """
453 """
454 Uses the internal tag merge algorithm (experimental).
454 Uses the internal tag merge algorithm (experimental).
455 """
455 """
456 success, status = tagmerge.merge(repo, fcd, fco, fca)
456 success, status = tagmerge.merge(repo, fcd, fco, fca)
457 return success, status, False
457 return success, status, False
458
458
459 @internaltool('dump', fullmerge)
459 @internaltool('dump', fullmerge)
460 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
460 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
461 """
461 """
462 Creates three versions of the files to merge, containing the
462 Creates three versions of the files to merge, containing the
463 contents of local, other and base. These files can then be used to
463 contents of local, other and base. These files can then be used to
464 perform a merge manually. If the file to be merged is named
464 perform a merge manually. If the file to be merged is named
465 ``a.txt``, these files will accordingly be named ``a.txt.local``,
465 ``a.txt``, these files will accordingly be named ``a.txt.local``,
466 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
466 ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
467 same directory as ``a.txt``.
467 same directory as ``a.txt``.
468
468
469 This implies permerge. Therefore, files aren't dumped, if premerge
469 This implies permerge. Therefore, files aren't dumped, if premerge
470 runs successfully. Use :forcedump to forcibly write files out.
470 runs successfully. Use :forcedump to forcibly write files out.
471 """
471 """
472 a, b, c, back = files
472 a, b, c, back = files
473
473
474 fd = fcd.path()
474 fd = fcd.path()
475
475
476 util.copyfile(a, a + ".local")
476 util.copyfile(a, a + ".local")
477 repo.wwrite(fd + ".other", fco.data(), fco.flags())
477 repo.wwrite(fd + ".other", fco.data(), fco.flags())
478 repo.wwrite(fd + ".base", fca.data(), fca.flags())
478 repo.wwrite(fd + ".base", fca.data(), fca.flags())
479 return False, 1, False
479 return False, 1, False
480
480
481 @internaltool('forcedump', mergeonly)
481 @internaltool('forcedump', mergeonly)
482 def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
482 def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
483 labels=None):
483 labels=None):
484 """
484 """
485 Creates three versions of the files as same as :dump, but omits premerge.
485 Creates three versions of the files as same as :dump, but omits premerge.
486 """
486 """
487 return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
487 return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
488 labels=labels)
488 labels=labels)
489
489
490 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
490 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
491 tool, toolpath, binary, symlink = toolconf
491 tool, toolpath, binary, symlink = toolconf
492 if fcd.isabsent() or fco.isabsent():
492 if fcd.isabsent() or fco.isabsent():
493 repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
493 repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
494 'for %s\n') % (tool, fcd.path()))
494 'for %s\n') % (tool, fcd.path()))
495 return False, 1, None
495 return False, 1, None
496 a, b, c, back = files
496 a, b, c, back = files
497 out = ""
497 out = ""
498 env = {'HG_FILE': fcd.path(),
498 env = {'HG_FILE': fcd.path(),
499 'HG_MY_NODE': short(mynode),
499 'HG_MY_NODE': short(mynode),
500 'HG_OTHER_NODE': str(fco.changectx()),
500 'HG_OTHER_NODE': str(fco.changectx()),
501 'HG_BASE_NODE': str(fca.changectx()),
501 'HG_BASE_NODE': str(fca.changectx()),
502 'HG_MY_ISLINK': 'l' in fcd.flags(),
502 'HG_MY_ISLINK': 'l' in fcd.flags(),
503 'HG_OTHER_ISLINK': 'l' in fco.flags(),
503 'HG_OTHER_ISLINK': 'l' in fco.flags(),
504 'HG_BASE_ISLINK': 'l' in fca.flags(),
504 'HG_BASE_ISLINK': 'l' in fca.flags(),
505 }
505 }
506
506
507 ui = repo.ui
507 ui = repo.ui
508
508
509 args = _toolstr(ui, tool, "args", '$local $base $other')
509 args = _toolstr(ui, tool, "args", '$local $base $other')
510 if "$output" in args:
510 if "$output" in args:
511 out, a = a, back # read input from backup, write to original
511 out, a = a, back # read input from backup, write to original
512 replace = {'local': a, 'base': b, 'other': c, 'output': out}
512 replace = {'local': a, 'base': b, 'other': c, 'output': out}
513 args = util.interpolate(r'\$', replace, args,
513 args = util.interpolate(r'\$', replace, args,
514 lambda s: util.shellquote(util.localpath(s)))
514 lambda s: util.shellquote(util.localpath(s)))
515 cmd = toolpath + ' ' + args
515 cmd = toolpath + ' ' + args
516 if _toolbool(ui, tool, "gui"):
516 if _toolbool(ui, tool, "gui"):
517 repo.ui.status(_('running merge tool %s for file %s\n') %
517 repo.ui.status(_('running merge tool %s for file %s\n') %
518 (tool, fcd.path()))
518 (tool, fcd.path()))
519 repo.ui.debug('launching merge tool: %s\n' % cmd)
519 repo.ui.debug('launching merge tool: %s\n' % cmd)
520 r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
520 r = ui.system(cmd, cwd=repo.root, environ=env, blockedtag='mergetool')
521 repo.ui.debug('merge tool returned: %s\n' % r)
521 repo.ui.debug('merge tool returned: %s\n' % r)
522 return True, r, False
522 return True, r, False
523
523
524 def _formatconflictmarker(repo, ctx, template, label, pad):
524 def _formatconflictmarker(repo, ctx, template, label, pad):
525 """Applies the given template to the ctx, prefixed by the label.
525 """Applies the given template to the ctx, prefixed by the label.
526
526
527 Pad is the minimum width of the label prefix, so that multiple markers
527 Pad is the minimum width of the label prefix, so that multiple markers
528 can have aligned templated parts.
528 can have aligned templated parts.
529 """
529 """
530 if ctx.node() is None:
530 if ctx.node() is None:
531 ctx = ctx.p1()
531 ctx = ctx.p1()
532
532
533 props = templatekw.keywords.copy()
533 props = templatekw.keywords.copy()
534 props['templ'] = template
534 props['templ'] = template
535 props['ctx'] = ctx
535 props['ctx'] = ctx
536 props['repo'] = repo
536 props['repo'] = repo
537 templateresult = template('conflictmarker', **props)
537 templateresult = template.render(props)
538
538
539 label = ('%s:' % label).ljust(pad + 1)
539 label = ('%s:' % label).ljust(pad + 1)
540 mark = '%s %s' % (label, templater.stringify(templateresult))
540 mark = '%s %s' % (label, templateresult)
541
541
542 if mark:
542 if mark:
543 mark = mark.splitlines()[0] # split for safety
543 mark = mark.splitlines()[0] # split for safety
544
544
545 # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ')
545 # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ')
546 return util.ellipsis(mark, 80 - 8)
546 return util.ellipsis(mark, 80 - 8)
547
547
548 _defaultconflictmarker = ('{node|short} '
548 _defaultconflictmarker = ('{node|short} '
549 '{ifeq(tags, "tip", "", '
549 '{ifeq(tags, "tip", "", '
550 'ifeq(tags, "", "", "{tags} "))}'
550 'ifeq(tags, "", "", "{tags} "))}'
551 '{if(bookmarks, "{bookmarks} ")}'
551 '{if(bookmarks, "{bookmarks} ")}'
552 '{ifeq(branch, "default", "", "{branch} ")}'
552 '{ifeq(branch, "default", "", "{branch} ")}'
553 '- {author|user}: {desc|firstline}')
553 '- {author|user}: {desc|firstline}')
554
554
555 _defaultconflictlabels = ['local', 'other']
555 _defaultconflictlabels = ['local', 'other']
556
556
557 def _formatlabels(repo, fcd, fco, fca, labels):
557 def _formatlabels(repo, fcd, fco, fca, labels):
558 """Formats the given labels using the conflict marker template.
558 """Formats the given labels using the conflict marker template.
559
559
560 Returns a list of formatted labels.
560 Returns a list of formatted labels.
561 """
561 """
562 cd = fcd.changectx()
562 cd = fcd.changectx()
563 co = fco.changectx()
563 co = fco.changectx()
564 ca = fca.changectx()
564 ca = fca.changectx()
565
565
566 ui = repo.ui
566 ui = repo.ui
567 template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker)
567 template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker)
568 template = templater.unquotestring(template)
568 template = templater.unquotestring(template)
569 tmpl = formatter.maketemplater(ui, 'conflictmarker', template)
569 tmpl = formatter.maketemplater(ui, template)
570
570
571 pad = max(len(l) for l in labels)
571 pad = max(len(l) for l in labels)
572
572
573 newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad),
573 newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad),
574 _formatconflictmarker(repo, co, tmpl, labels[1], pad)]
574 _formatconflictmarker(repo, co, tmpl, labels[1], pad)]
575 if len(labels) > 2:
575 if len(labels) > 2:
576 newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad))
576 newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad))
577 return newlabels
577 return newlabels
578
578
579 def partextras(labels):
579 def partextras(labels):
580 """Return a dictionary of extra labels for use in prompts to the user
580 """Return a dictionary of extra labels for use in prompts to the user
581
581
582 Intended use is in strings of the form "(l)ocal%(l)s".
582 Intended use is in strings of the form "(l)ocal%(l)s".
583 """
583 """
584 if labels is None:
584 if labels is None:
585 return {
585 return {
586 "l": "",
586 "l": "",
587 "o": "",
587 "o": "",
588 }
588 }
589
589
590 return {
590 return {
591 "l": " [%s]" % labels[0],
591 "l": " [%s]" % labels[0],
592 "o": " [%s]" % labels[1],
592 "o": " [%s]" % labels[1],
593 }
593 }
594
594
595 def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None):
595 def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None):
596 """perform a 3-way merge in the working directory
596 """perform a 3-way merge in the working directory
597
597
598 premerge = whether this is a premerge
598 premerge = whether this is a premerge
599 mynode = parent node before merge
599 mynode = parent node before merge
600 orig = original local filename before merge
600 orig = original local filename before merge
601 fco = other file context
601 fco = other file context
602 fca = ancestor file context
602 fca = ancestor file context
603 fcd = local file context for current/destination file
603 fcd = local file context for current/destination file
604
604
605 Returns whether the merge is complete, the return value of the merge, and
605 Returns whether the merge is complete, the return value of the merge, and
606 a boolean indicating whether the file was deleted from disk."""
606 a boolean indicating whether the file was deleted from disk."""
607
607
608 def temp(prefix, ctx):
608 def temp(prefix, ctx):
609 fullbase, ext = os.path.splitext(ctx.path())
609 fullbase, ext = os.path.splitext(ctx.path())
610 pre = "%s~%s." % (os.path.basename(fullbase), prefix)
610 pre = "%s~%s." % (os.path.basename(fullbase), prefix)
611 (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext)
611 (fd, name) = tempfile.mkstemp(prefix=pre, suffix=ext)
612 data = repo.wwritedata(ctx.path(), ctx.data())
612 data = repo.wwritedata(ctx.path(), ctx.data())
613 f = os.fdopen(fd, pycompat.sysstr("wb"))
613 f = os.fdopen(fd, pycompat.sysstr("wb"))
614 f.write(data)
614 f.write(data)
615 f.close()
615 f.close()
616 return name
616 return name
617
617
618 if not fco.cmp(fcd): # files identical?
618 if not fco.cmp(fcd): # files identical?
619 return True, None, False
619 return True, None, False
620
620
621 ui = repo.ui
621 ui = repo.ui
622 fd = fcd.path()
622 fd = fcd.path()
623 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
623 binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
624 symlink = 'l' in fcd.flags() + fco.flags()
624 symlink = 'l' in fcd.flags() + fco.flags()
625 changedelete = fcd.isabsent() or fco.isabsent()
625 changedelete = fcd.isabsent() or fco.isabsent()
626 tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete)
626 tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete)
627 if tool in internals and tool.startswith('internal:'):
627 if tool in internals and tool.startswith('internal:'):
628 # normalize to new-style names (':merge' etc)
628 # normalize to new-style names (':merge' etc)
629 tool = tool[len('internal'):]
629 tool = tool[len('internal'):]
630 ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
630 ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
631 % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
631 % (tool, fd, pycompat.bytestr(binary), pycompat.bytestr(symlink),
632 pycompat.bytestr(changedelete)))
632 pycompat.bytestr(changedelete)))
633
633
634 if tool in internals:
634 if tool in internals:
635 func = internals[tool]
635 func = internals[tool]
636 mergetype = func.mergetype
636 mergetype = func.mergetype
637 onfailure = func.onfailure
637 onfailure = func.onfailure
638 precheck = func.precheck
638 precheck = func.precheck
639 else:
639 else:
640 func = _xmerge
640 func = _xmerge
641 mergetype = fullmerge
641 mergetype = fullmerge
642 onfailure = _("merging %s failed!\n")
642 onfailure = _("merging %s failed!\n")
643 precheck = None
643 precheck = None
644
644
645 toolconf = tool, toolpath, binary, symlink
645 toolconf = tool, toolpath, binary, symlink
646
646
647 if mergetype == nomerge:
647 if mergetype == nomerge:
648 r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
648 r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
649 return True, r, deleted
649 return True, r, deleted
650
650
651 if premerge:
651 if premerge:
652 if orig != fco.path():
652 if orig != fco.path():
653 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
653 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
654 else:
654 else:
655 ui.status(_("merging %s\n") % fd)
655 ui.status(_("merging %s\n") % fd)
656
656
657 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
657 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
658
658
659 if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
659 if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
660 toolconf):
660 toolconf):
661 if onfailure:
661 if onfailure:
662 ui.warn(onfailure % fd)
662 ui.warn(onfailure % fd)
663 return True, 1, False
663 return True, 1, False
664
664
665 a = repo.wjoin(fd)
665 a = repo.wjoin(fd)
666 b = temp("base", fca)
666 b = temp("base", fca)
667 c = temp("other", fco)
667 c = temp("other", fco)
668 if not fcd.isabsent():
668 if not fcd.isabsent():
669 back = scmutil.origpath(ui, repo, a)
669 back = scmutil.origpath(ui, repo, a)
670 if premerge:
670 if premerge:
671 util.copyfile(a, back)
671 util.copyfile(a, back)
672 else:
672 else:
673 back = None
673 back = None
674 files = (a, b, c, back)
674 files = (a, b, c, back)
675
675
676 r = 1
676 r = 1
677 try:
677 try:
678 markerstyle = ui.config('ui', 'mergemarkers', 'basic')
678 markerstyle = ui.config('ui', 'mergemarkers', 'basic')
679 if not labels:
679 if not labels:
680 labels = _defaultconflictlabels
680 labels = _defaultconflictlabels
681 if markerstyle != 'basic':
681 if markerstyle != 'basic':
682 labels = _formatlabels(repo, fcd, fco, fca, labels)
682 labels = _formatlabels(repo, fcd, fco, fca, labels)
683
683
684 if premerge and mergetype == fullmerge:
684 if premerge and mergetype == fullmerge:
685 r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels)
685 r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels)
686 # complete if premerge successful (r is 0)
686 # complete if premerge successful (r is 0)
687 return not r, r, False
687 return not r, r, False
688
688
689 needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca,
689 needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca,
690 toolconf, files, labels=labels)
690 toolconf, files, labels=labels)
691
691
692 if needcheck:
692 if needcheck:
693 r = _check(r, ui, tool, fcd, files)
693 r = _check(r, ui, tool, fcd, files)
694
694
695 if r:
695 if r:
696 if onfailure:
696 if onfailure:
697 ui.warn(onfailure % fd)
697 ui.warn(onfailure % fd)
698
698
699 return True, r, deleted
699 return True, r, deleted
700 finally:
700 finally:
701 if not r and back is not None:
701 if not r and back is not None:
702 util.unlink(back)
702 util.unlink(back)
703 util.unlink(b)
703 util.unlink(b)
704 util.unlink(c)
704 util.unlink(c)
705
705
706 def _check(r, ui, tool, fcd, files):
706 def _check(r, ui, tool, fcd, files):
707 fd = fcd.path()
707 fd = fcd.path()
708 a, b, c, back = files
708 a, b, c, back = files
709
709
710 if not r and (_toolbool(ui, tool, "checkconflicts") or
710 if not r and (_toolbool(ui, tool, "checkconflicts") or
711 'conflicts' in _toollist(ui, tool, "check")):
711 'conflicts' in _toollist(ui, tool, "check")):
712 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
712 if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(),
713 re.MULTILINE):
713 re.MULTILINE):
714 r = 1
714 r = 1
715
715
716 checked = False
716 checked = False
717 if 'prompt' in _toollist(ui, tool, "check"):
717 if 'prompt' in _toollist(ui, tool, "check"):
718 checked = True
718 checked = True
719 if ui.promptchoice(_("was merge of '%s' successful (yn)?"
719 if ui.promptchoice(_("was merge of '%s' successful (yn)?"
720 "$$ &Yes $$ &No") % fd, 1):
720 "$$ &Yes $$ &No") % fd, 1):
721 r = 1
721 r = 1
722
722
723 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
723 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
724 'changed' in
724 'changed' in
725 _toollist(ui, tool, "check")):
725 _toollist(ui, tool, "check")):
726 if back is not None and filecmp.cmp(a, back):
726 if back is not None and filecmp.cmp(a, back):
727 if ui.promptchoice(_(" output file %s appears unchanged\n"
727 if ui.promptchoice(_(" output file %s appears unchanged\n"
728 "was merge successful (yn)?"
728 "was merge successful (yn)?"
729 "$$ &Yes $$ &No") % fd, 1):
729 "$$ &Yes $$ &No") % fd, 1):
730 r = 1
730 r = 1
731
731
732 if back is not None and _toolbool(ui, tool, "fixeol"):
732 if back is not None and _toolbool(ui, tool, "fixeol"):
733 _matcheol(a, back)
733 _matcheol(a, back)
734
734
735 return r
735 return r
736
736
737 def premerge(repo, mynode, orig, fcd, fco, fca, labels=None):
737 def premerge(repo, mynode, orig, fcd, fco, fca, labels=None):
738 return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels)
738 return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels)
739
739
740 def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None):
740 def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None):
741 return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels)
741 return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels)
742
742
743 # tell hggettext to extract docstrings from these functions:
743 # tell hggettext to extract docstrings from these functions:
744 i18nfunctions = internals.values()
744 i18nfunctions = internals.values()
@@ -1,486 +1,489 b''
1 # formatter.py - generic output formatting for mercurial
1 # formatter.py - generic output formatting for mercurial
2 #
2 #
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
3 # Copyright 2012 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """Generic output formatting for Mercurial
8 """Generic output formatting for Mercurial
9
9
10 The formatter provides API to show data in various ways. The following
10 The formatter provides API to show data in various ways. The following
11 functions should be used in place of ui.write():
11 functions should be used in place of ui.write():
12
12
13 - fm.write() for unconditional output
13 - fm.write() for unconditional output
14 - fm.condwrite() to show some extra data conditionally in plain output
14 - fm.condwrite() to show some extra data conditionally in plain output
15 - fm.context() to provide changectx to template output
15 - fm.context() to provide changectx to template output
16 - fm.data() to provide extra data to JSON or template output
16 - fm.data() to provide extra data to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
17 - fm.plain() to show raw text that isn't provided to JSON or template output
18
18
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
19 To show structured data (e.g. date tuples, dicts, lists), apply fm.format*()
20 beforehand so the data is converted to the appropriate data type. Use
20 beforehand so the data is converted to the appropriate data type. Use
21 fm.isplain() if you need to convert or format data conditionally which isn't
21 fm.isplain() if you need to convert or format data conditionally which isn't
22 supported by the formatter API.
22 supported by the formatter API.
23
23
24 To build nested structure (i.e. a list of dicts), use fm.nested().
24 To build nested structure (i.e. a list of dicts), use fm.nested().
25
25
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
26 See also https://www.mercurial-scm.org/wiki/GenericTemplatingPlan
27
27
28 fm.condwrite() vs 'if cond:':
28 fm.condwrite() vs 'if cond:':
29
29
30 In most cases, use fm.condwrite() so users can selectively show the data
30 In most cases, use fm.condwrite() so users can selectively show the data
31 in template output. If it's costly to build data, use plain 'if cond:' with
31 in template output. If it's costly to build data, use plain 'if cond:' with
32 fm.write().
32 fm.write().
33
33
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
34 fm.nested() vs fm.formatdict() (or fm.formatlist()):
35
35
36 fm.nested() should be used to form a tree structure (a list of dicts of
36 fm.nested() should be used to form a tree structure (a list of dicts of
37 lists of dicts...) which can be accessed through template keywords, e.g.
37 lists of dicts...) which can be accessed through template keywords, e.g.
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
38 "{foo % "{bar % {...}} {baz % {...}}"}". On the other hand, fm.formatdict()
39 exports a dict-type object to template, which can be accessed by e.g.
39 exports a dict-type object to template, which can be accessed by e.g.
40 "{get(foo, key)}" function.
40 "{get(foo, key)}" function.
41
41
42 Doctest helper:
42 Doctest helper:
43
43
44 >>> def show(fn, verbose=False, **opts):
44 >>> def show(fn, verbose=False, **opts):
45 ... import sys
45 ... import sys
46 ... from . import ui as uimod
46 ... from . import ui as uimod
47 ... ui = uimod.ui()
47 ... ui = uimod.ui()
48 ... ui.fout = sys.stdout # redirect to doctest
48 ... ui.fout = sys.stdout # redirect to doctest
49 ... ui.verbose = verbose
49 ... ui.verbose = verbose
50 ... return fn(ui, ui.formatter(fn.__name__, opts))
50 ... return fn(ui, ui.formatter(fn.__name__, opts))
51
51
52 Basic example:
52 Basic example:
53
53
54 >>> def files(ui, fm):
54 >>> def files(ui, fm):
55 ... files = [('foo', 123, (0, 0)), ('bar', 456, (1, 0))]
55 ... files = [('foo', 123, (0, 0)), ('bar', 456, (1, 0))]
56 ... for f in files:
56 ... for f in files:
57 ... fm.startitem()
57 ... fm.startitem()
58 ... fm.write('path', '%s', f[0])
58 ... fm.write('path', '%s', f[0])
59 ... fm.condwrite(ui.verbose, 'date', ' %s',
59 ... fm.condwrite(ui.verbose, 'date', ' %s',
60 ... fm.formatdate(f[2], '%Y-%m-%d %H:%M:%S'))
60 ... fm.formatdate(f[2], '%Y-%m-%d %H:%M:%S'))
61 ... fm.data(size=f[1])
61 ... fm.data(size=f[1])
62 ... fm.plain('\\n')
62 ... fm.plain('\\n')
63 ... fm.end()
63 ... fm.end()
64 >>> show(files)
64 >>> show(files)
65 foo
65 foo
66 bar
66 bar
67 >>> show(files, verbose=True)
67 >>> show(files, verbose=True)
68 foo 1970-01-01 00:00:00
68 foo 1970-01-01 00:00:00
69 bar 1970-01-01 00:00:01
69 bar 1970-01-01 00:00:01
70 >>> show(files, template='json')
70 >>> show(files, template='json')
71 [
71 [
72 {
72 {
73 "date": [0, 0],
73 "date": [0, 0],
74 "path": "foo",
74 "path": "foo",
75 "size": 123
75 "size": 123
76 },
76 },
77 {
77 {
78 "date": [1, 0],
78 "date": [1, 0],
79 "path": "bar",
79 "path": "bar",
80 "size": 456
80 "size": 456
81 }
81 }
82 ]
82 ]
83 >>> show(files, template='path: {path}\\ndate: {date|rfc3339date}\\n')
83 >>> show(files, template='path: {path}\\ndate: {date|rfc3339date}\\n')
84 path: foo
84 path: foo
85 date: 1970-01-01T00:00:00+00:00
85 date: 1970-01-01T00:00:00+00:00
86 path: bar
86 path: bar
87 date: 1970-01-01T00:00:01+00:00
87 date: 1970-01-01T00:00:01+00:00
88
88
89 Nested example:
89 Nested example:
90
90
91 >>> def subrepos(ui, fm):
91 >>> def subrepos(ui, fm):
92 ... fm.startitem()
92 ... fm.startitem()
93 ... fm.write('repo', '[%s]\\n', 'baz')
93 ... fm.write('repo', '[%s]\\n', 'baz')
94 ... files(ui, fm.nested('files'))
94 ... files(ui, fm.nested('files'))
95 ... fm.end()
95 ... fm.end()
96 >>> show(subrepos)
96 >>> show(subrepos)
97 [baz]
97 [baz]
98 foo
98 foo
99 bar
99 bar
100 >>> show(subrepos, template='{repo}: {join(files % "{path}", ", ")}\\n')
100 >>> show(subrepos, template='{repo}: {join(files % "{path}", ", ")}\\n')
101 baz: foo, bar
101 baz: foo, bar
102 """
102 """
103
103
104 from __future__ import absolute_import
104 from __future__ import absolute_import
105
105
106 import collections
106 import collections
107 import contextlib
107 import contextlib
108 import itertools
108 import itertools
109 import os
109 import os
110
110
111 from .i18n import _
111 from .i18n import _
112 from .node import (
112 from .node import (
113 hex,
113 hex,
114 short,
114 short,
115 )
115 )
116
116
117 from . import (
117 from . import (
118 error,
118 error,
119 pycompat,
119 pycompat,
120 templatefilters,
120 templatefilters,
121 templatekw,
121 templatekw,
122 templater,
122 templater,
123 util,
123 util,
124 )
124 )
125
125
126 pickle = util.pickle
126 pickle = util.pickle
127
127
128 class _nullconverter(object):
128 class _nullconverter(object):
129 '''convert non-primitive data types to be processed by formatter'''
129 '''convert non-primitive data types to be processed by formatter'''
130 @staticmethod
130 @staticmethod
131 def formatdate(date, fmt):
131 def formatdate(date, fmt):
132 '''convert date tuple to appropriate format'''
132 '''convert date tuple to appropriate format'''
133 return date
133 return date
134 @staticmethod
134 @staticmethod
135 def formatdict(data, key, value, fmt, sep):
135 def formatdict(data, key, value, fmt, sep):
136 '''convert dict or key-value pairs to appropriate dict format'''
136 '''convert dict or key-value pairs to appropriate dict format'''
137 # use plain dict instead of util.sortdict so that data can be
137 # use plain dict instead of util.sortdict so that data can be
138 # serialized as a builtin dict in pickle output
138 # serialized as a builtin dict in pickle output
139 return dict(data)
139 return dict(data)
140 @staticmethod
140 @staticmethod
141 def formatlist(data, name, fmt, sep):
141 def formatlist(data, name, fmt, sep):
142 '''convert iterable to appropriate list format'''
142 '''convert iterable to appropriate list format'''
143 return list(data)
143 return list(data)
144
144
145 class baseformatter(object):
145 class baseformatter(object):
146 def __init__(self, ui, topic, opts, converter):
146 def __init__(self, ui, topic, opts, converter):
147 self._ui = ui
147 self._ui = ui
148 self._topic = topic
148 self._topic = topic
149 self._style = opts.get("style")
149 self._style = opts.get("style")
150 self._template = opts.get("template")
150 self._template = opts.get("template")
151 self._converter = converter
151 self._converter = converter
152 self._item = None
152 self._item = None
153 # function to convert node to string suitable for this output
153 # function to convert node to string suitable for this output
154 self.hexfunc = hex
154 self.hexfunc = hex
155 def __enter__(self):
155 def __enter__(self):
156 return self
156 return self
157 def __exit__(self, exctype, excvalue, traceback):
157 def __exit__(self, exctype, excvalue, traceback):
158 if exctype is None:
158 if exctype is None:
159 self.end()
159 self.end()
160 def _showitem(self):
160 def _showitem(self):
161 '''show a formatted item once all data is collected'''
161 '''show a formatted item once all data is collected'''
162 pass
162 pass
163 def startitem(self):
163 def startitem(self):
164 '''begin an item in the format list'''
164 '''begin an item in the format list'''
165 if self._item is not None:
165 if self._item is not None:
166 self._showitem()
166 self._showitem()
167 self._item = {}
167 self._item = {}
168 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
168 def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
169 '''convert date tuple to appropriate format'''
169 '''convert date tuple to appropriate format'''
170 return self._converter.formatdate(date, fmt)
170 return self._converter.formatdate(date, fmt)
171 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
171 def formatdict(self, data, key='key', value='value', fmt='%s=%s', sep=' '):
172 '''convert dict or key-value pairs to appropriate dict format'''
172 '''convert dict or key-value pairs to appropriate dict format'''
173 return self._converter.formatdict(data, key, value, fmt, sep)
173 return self._converter.formatdict(data, key, value, fmt, sep)
174 def formatlist(self, data, name, fmt='%s', sep=' '):
174 def formatlist(self, data, name, fmt='%s', sep=' '):
175 '''convert iterable to appropriate list format'''
175 '''convert iterable to appropriate list format'''
176 # name is mandatory argument for now, but it could be optional if
176 # name is mandatory argument for now, but it could be optional if
177 # we have default template keyword, e.g. {item}
177 # we have default template keyword, e.g. {item}
178 return self._converter.formatlist(data, name, fmt, sep)
178 return self._converter.formatlist(data, name, fmt, sep)
179 def context(self, **ctxs):
179 def context(self, **ctxs):
180 '''insert context objects to be used to render template keywords'''
180 '''insert context objects to be used to render template keywords'''
181 pass
181 pass
182 def data(self, **data):
182 def data(self, **data):
183 '''insert data into item that's not shown in default output'''
183 '''insert data into item that's not shown in default output'''
184 data = pycompat.byteskwargs(data)
184 data = pycompat.byteskwargs(data)
185 self._item.update(data)
185 self._item.update(data)
186 def write(self, fields, deftext, *fielddata, **opts):
186 def write(self, fields, deftext, *fielddata, **opts):
187 '''do default text output while assigning data to item'''
187 '''do default text output while assigning data to item'''
188 fieldkeys = fields.split()
188 fieldkeys = fields.split()
189 assert len(fieldkeys) == len(fielddata)
189 assert len(fieldkeys) == len(fielddata)
190 self._item.update(zip(fieldkeys, fielddata))
190 self._item.update(zip(fieldkeys, fielddata))
191 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
191 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
192 '''do conditional write (primarily for plain formatter)'''
192 '''do conditional write (primarily for plain formatter)'''
193 fieldkeys = fields.split()
193 fieldkeys = fields.split()
194 assert len(fieldkeys) == len(fielddata)
194 assert len(fieldkeys) == len(fielddata)
195 self._item.update(zip(fieldkeys, fielddata))
195 self._item.update(zip(fieldkeys, fielddata))
196 def plain(self, text, **opts):
196 def plain(self, text, **opts):
197 '''show raw text for non-templated mode'''
197 '''show raw text for non-templated mode'''
198 pass
198 pass
199 def isplain(self):
199 def isplain(self):
200 '''check for plain formatter usage'''
200 '''check for plain formatter usage'''
201 return False
201 return False
202 def nested(self, field):
202 def nested(self, field):
203 '''sub formatter to store nested data in the specified field'''
203 '''sub formatter to store nested data in the specified field'''
204 self._item[field] = data = []
204 self._item[field] = data = []
205 return _nestedformatter(self._ui, self._converter, data)
205 return _nestedformatter(self._ui, self._converter, data)
206 def end(self):
206 def end(self):
207 '''end output for the formatter'''
207 '''end output for the formatter'''
208 if self._item is not None:
208 if self._item is not None:
209 self._showitem()
209 self._showitem()
210
210
211 def nullformatter(ui, topic):
211 def nullformatter(ui, topic):
212 '''formatter that prints nothing'''
212 '''formatter that prints nothing'''
213 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
213 return baseformatter(ui, topic, opts={}, converter=_nullconverter)
214
214
215 class _nestedformatter(baseformatter):
215 class _nestedformatter(baseformatter):
216 '''build sub items and store them in the parent formatter'''
216 '''build sub items and store them in the parent formatter'''
217 def __init__(self, ui, converter, data):
217 def __init__(self, ui, converter, data):
218 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
218 baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
219 self._data = data
219 self._data = data
220 def _showitem(self):
220 def _showitem(self):
221 self._data.append(self._item)
221 self._data.append(self._item)
222
222
223 def _iteritems(data):
223 def _iteritems(data):
224 '''iterate key-value pairs in stable order'''
224 '''iterate key-value pairs in stable order'''
225 if isinstance(data, dict):
225 if isinstance(data, dict):
226 return sorted(data.iteritems())
226 return sorted(data.iteritems())
227 return data
227 return data
228
228
229 class _plainconverter(object):
229 class _plainconverter(object):
230 '''convert non-primitive data types to text'''
230 '''convert non-primitive data types to text'''
231 @staticmethod
231 @staticmethod
232 def formatdate(date, fmt):
232 def formatdate(date, fmt):
233 '''stringify date tuple in the given format'''
233 '''stringify date tuple in the given format'''
234 return util.datestr(date, fmt)
234 return util.datestr(date, fmt)
235 @staticmethod
235 @staticmethod
236 def formatdict(data, key, value, fmt, sep):
236 def formatdict(data, key, value, fmt, sep):
237 '''stringify key-value pairs separated by sep'''
237 '''stringify key-value pairs separated by sep'''
238 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
238 return sep.join(fmt % (k, v) for k, v in _iteritems(data))
239 @staticmethod
239 @staticmethod
240 def formatlist(data, name, fmt, sep):
240 def formatlist(data, name, fmt, sep):
241 '''stringify iterable separated by sep'''
241 '''stringify iterable separated by sep'''
242 return sep.join(fmt % e for e in data)
242 return sep.join(fmt % e for e in data)
243
243
244 class plainformatter(baseformatter):
244 class plainformatter(baseformatter):
245 '''the default text output scheme'''
245 '''the default text output scheme'''
246 def __init__(self, ui, out, topic, opts):
246 def __init__(self, ui, out, topic, opts):
247 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
247 baseformatter.__init__(self, ui, topic, opts, _plainconverter)
248 if ui.debugflag:
248 if ui.debugflag:
249 self.hexfunc = hex
249 self.hexfunc = hex
250 else:
250 else:
251 self.hexfunc = short
251 self.hexfunc = short
252 if ui is out:
252 if ui is out:
253 self._write = ui.write
253 self._write = ui.write
254 else:
254 else:
255 self._write = lambda s, **opts: out.write(s)
255 self._write = lambda s, **opts: out.write(s)
256 def startitem(self):
256 def startitem(self):
257 pass
257 pass
258 def data(self, **data):
258 def data(self, **data):
259 pass
259 pass
260 def write(self, fields, deftext, *fielddata, **opts):
260 def write(self, fields, deftext, *fielddata, **opts):
261 self._write(deftext % fielddata, **opts)
261 self._write(deftext % fielddata, **opts)
262 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
262 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
263 '''do conditional write'''
263 '''do conditional write'''
264 if cond:
264 if cond:
265 self._write(deftext % fielddata, **opts)
265 self._write(deftext % fielddata, **opts)
266 def plain(self, text, **opts):
266 def plain(self, text, **opts):
267 self._write(text, **opts)
267 self._write(text, **opts)
268 def isplain(self):
268 def isplain(self):
269 return True
269 return True
270 def nested(self, field):
270 def nested(self, field):
271 # nested data will be directly written to ui
271 # nested data will be directly written to ui
272 return self
272 return self
273 def end(self):
273 def end(self):
274 pass
274 pass
275
275
276 class debugformatter(baseformatter):
276 class debugformatter(baseformatter):
277 def __init__(self, ui, out, topic, opts):
277 def __init__(self, ui, out, topic, opts):
278 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
278 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
279 self._out = out
279 self._out = out
280 self._out.write("%s = [\n" % self._topic)
280 self._out.write("%s = [\n" % self._topic)
281 def _showitem(self):
281 def _showitem(self):
282 self._out.write(" " + repr(self._item) + ",\n")
282 self._out.write(" " + repr(self._item) + ",\n")
283 def end(self):
283 def end(self):
284 baseformatter.end(self)
284 baseformatter.end(self)
285 self._out.write("]\n")
285 self._out.write("]\n")
286
286
287 class pickleformatter(baseformatter):
287 class pickleformatter(baseformatter):
288 def __init__(self, ui, out, topic, opts):
288 def __init__(self, ui, out, topic, opts):
289 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
289 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
290 self._out = out
290 self._out = out
291 self._data = []
291 self._data = []
292 def _showitem(self):
292 def _showitem(self):
293 self._data.append(self._item)
293 self._data.append(self._item)
294 def end(self):
294 def end(self):
295 baseformatter.end(self)
295 baseformatter.end(self)
296 self._out.write(pickle.dumps(self._data))
296 self._out.write(pickle.dumps(self._data))
297
297
298 class jsonformatter(baseformatter):
298 class jsonformatter(baseformatter):
299 def __init__(self, ui, out, topic, opts):
299 def __init__(self, ui, out, topic, opts):
300 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
300 baseformatter.__init__(self, ui, topic, opts, _nullconverter)
301 self._out = out
301 self._out = out
302 self._out.write("[")
302 self._out.write("[")
303 self._first = True
303 self._first = True
304 def _showitem(self):
304 def _showitem(self):
305 if self._first:
305 if self._first:
306 self._first = False
306 self._first = False
307 else:
307 else:
308 self._out.write(",")
308 self._out.write(",")
309
309
310 self._out.write("\n {\n")
310 self._out.write("\n {\n")
311 first = True
311 first = True
312 for k, v in sorted(self._item.items()):
312 for k, v in sorted(self._item.items()):
313 if first:
313 if first:
314 first = False
314 first = False
315 else:
315 else:
316 self._out.write(",\n")
316 self._out.write(",\n")
317 u = templatefilters.json(v, paranoid=False)
317 u = templatefilters.json(v, paranoid=False)
318 self._out.write(' "%s": %s' % (k, u))
318 self._out.write(' "%s": %s' % (k, u))
319 self._out.write("\n }")
319 self._out.write("\n }")
320 def end(self):
320 def end(self):
321 baseformatter.end(self)
321 baseformatter.end(self)
322 self._out.write("\n]\n")
322 self._out.write("\n]\n")
323
323
324 class _templateconverter(object):
324 class _templateconverter(object):
325 '''convert non-primitive data types to be processed by templater'''
325 '''convert non-primitive data types to be processed by templater'''
326 @staticmethod
326 @staticmethod
327 def formatdate(date, fmt):
327 def formatdate(date, fmt):
328 '''return date tuple'''
328 '''return date tuple'''
329 return date
329 return date
330 @staticmethod
330 @staticmethod
331 def formatdict(data, key, value, fmt, sep):
331 def formatdict(data, key, value, fmt, sep):
332 '''build object that can be evaluated as either plain string or dict'''
332 '''build object that can be evaluated as either plain string or dict'''
333 data = util.sortdict(_iteritems(data))
333 data = util.sortdict(_iteritems(data))
334 def f():
334 def f():
335 yield _plainconverter.formatdict(data, key, value, fmt, sep)
335 yield _plainconverter.formatdict(data, key, value, fmt, sep)
336 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt,
336 return templatekw.hybriddict(data, key=key, value=value, fmt=fmt,
337 gen=f())
337 gen=f())
338 @staticmethod
338 @staticmethod
339 def formatlist(data, name, fmt, sep):
339 def formatlist(data, name, fmt, sep):
340 '''build object that can be evaluated as either plain string or list'''
340 '''build object that can be evaluated as either plain string or list'''
341 data = list(data)
341 data = list(data)
342 def f():
342 def f():
343 yield _plainconverter.formatlist(data, name, fmt, sep)
343 yield _plainconverter.formatlist(data, name, fmt, sep)
344 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f())
344 return templatekw.hybridlist(data, name=name, fmt=fmt, gen=f())
345
345
346 class templateformatter(baseformatter):
346 class templateformatter(baseformatter):
347 def __init__(self, ui, out, topic, opts):
347 def __init__(self, ui, out, topic, opts):
348 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
348 baseformatter.__init__(self, ui, topic, opts, _templateconverter)
349 self._out = out
349 self._out = out
350 spec = lookuptemplate(ui, topic, opts.get('template', ''))
350 spec = lookuptemplate(ui, topic, opts.get('template', ''))
351 self._tref = spec.ref
351 self._tref = spec.ref
352 self._t = loadtemplater(ui, spec, cache=templatekw.defaulttempl)
352 self._t = loadtemplater(ui, spec, cache=templatekw.defaulttempl)
353 self._counter = itertools.count()
353 self._counter = itertools.count()
354 self._cache = {} # for templatekw/funcs to store reusable data
354 self._cache = {} # for templatekw/funcs to store reusable data
355 def context(self, **ctxs):
355 def context(self, **ctxs):
356 '''insert context objects to be used to render template keywords'''
356 '''insert context objects to be used to render template keywords'''
357 assert all(k == 'ctx' for k in ctxs)
357 assert all(k == 'ctx' for k in ctxs)
358 self._item.update(ctxs)
358 self._item.update(ctxs)
359 def _showitem(self):
359 def _showitem(self):
360 # TODO: add support for filectx. probably each template keyword or
360 # TODO: add support for filectx. probably each template keyword or
361 # function will have to declare dependent resources. e.g.
361 # function will have to declare dependent resources. e.g.
362 # @templatekeyword(..., requires=('ctx',))
362 # @templatekeyword(..., requires=('ctx',))
363 props = {}
363 props = {}
364 if 'ctx' in self._item:
364 if 'ctx' in self._item:
365 props.update(templatekw.keywords)
365 props.update(templatekw.keywords)
366 props['index'] = next(self._counter)
366 props['index'] = next(self._counter)
367 # explicitly-defined fields precede templatekw
367 # explicitly-defined fields precede templatekw
368 props.update(self._item)
368 props.update(self._item)
369 if 'ctx' in self._item:
369 if 'ctx' in self._item:
370 # but template resources must be always available
370 # but template resources must be always available
371 props['templ'] = self._t
371 props['templ'] = self._t
372 props['repo'] = props['ctx'].repo()
372 props['repo'] = props['ctx'].repo()
373 props['revcache'] = {}
373 props['revcache'] = {}
374 g = self._t(self._tref, ui=self._ui, cache=self._cache, **props)
374 g = self._t(self._tref, ui=self._ui, cache=self._cache, **props)
375 self._out.write(templater.stringify(g))
375 self._out.write(templater.stringify(g))
376
376
377 templatespec = collections.namedtuple(r'templatespec',
377 templatespec = collections.namedtuple(r'templatespec',
378 r'ref tmpl mapfile')
378 r'ref tmpl mapfile')
379
379
380 def lookuptemplate(ui, topic, tmpl):
380 def lookuptemplate(ui, topic, tmpl):
381 """Find the template matching the given -T/--template spec 'tmpl'
381 """Find the template matching the given -T/--template spec 'tmpl'
382
382
383 'tmpl' can be any of the following:
383 'tmpl' can be any of the following:
384
384
385 - a literal template (e.g. '{rev}')
385 - a literal template (e.g. '{rev}')
386 - a map-file name or path (e.g. 'changelog')
386 - a map-file name or path (e.g. 'changelog')
387 - a reference to [templates] in config file
387 - a reference to [templates] in config file
388 - a path to raw template file
388 - a path to raw template file
389
389
390 A map file defines a stand-alone template environment. If a map file
390 A map file defines a stand-alone template environment. If a map file
391 selected, all templates defined in the file will be loaded, and the
391 selected, all templates defined in the file will be loaded, and the
392 template matching the given topic will be rendered. No aliases will be
392 template matching the given topic will be rendered. No aliases will be
393 loaded from user config.
393 loaded from user config.
394 """
394 """
395
395
396 # looks like a literal template?
396 # looks like a literal template?
397 if '{' in tmpl:
397 if '{' in tmpl:
398 return templatespec(topic, tmpl, None)
398 return templatespec(topic, tmpl, None)
399
399
400 # perhaps a stock style?
400 # perhaps a stock style?
401 if not os.path.split(tmpl)[0]:
401 if not os.path.split(tmpl)[0]:
402 mapname = (templater.templatepath('map-cmdline.' + tmpl)
402 mapname = (templater.templatepath('map-cmdline.' + tmpl)
403 or templater.templatepath(tmpl))
403 or templater.templatepath(tmpl))
404 if mapname and os.path.isfile(mapname):
404 if mapname and os.path.isfile(mapname):
405 return templatespec(topic, None, mapname)
405 return templatespec(topic, None, mapname)
406
406
407 # perhaps it's a reference to [templates]
407 # perhaps it's a reference to [templates]
408 t = ui.config('templates', tmpl)
408 t = ui.config('templates', tmpl)
409 if t:
409 if t:
410 return templatespec(topic, templater.unquotestring(t), None)
410 return templatespec(topic, templater.unquotestring(t), None)
411
411
412 if tmpl == 'list':
412 if tmpl == 'list':
413 ui.write(_("available styles: %s\n") % templater.stylelist())
413 ui.write(_("available styles: %s\n") % templater.stylelist())
414 raise error.Abort(_("specify a template"))
414 raise error.Abort(_("specify a template"))
415
415
416 # perhaps it's a path to a map or a template
416 # perhaps it's a path to a map or a template
417 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
417 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
418 # is it a mapfile for a style?
418 # is it a mapfile for a style?
419 if os.path.basename(tmpl).startswith("map-"):
419 if os.path.basename(tmpl).startswith("map-"):
420 return templatespec(topic, None, os.path.realpath(tmpl))
420 return templatespec(topic, None, os.path.realpath(tmpl))
421 with util.posixfile(tmpl, 'rb') as f:
421 with util.posixfile(tmpl, 'rb') as f:
422 tmpl = f.read()
422 tmpl = f.read()
423 return templatespec(topic, tmpl, None)
423 return templatespec(topic, tmpl, None)
424
424
425 # constant string?
425 # constant string?
426 return templatespec(topic, tmpl, None)
426 return templatespec(topic, tmpl, None)
427
427
428 def loadtemplater(ui, spec, cache=None):
428 def loadtemplater(ui, spec, cache=None):
429 """Create a templater from either a literal template or loading from
429 """Create a templater from either a literal template or loading from
430 a map file"""
430 a map file"""
431 assert not (spec.tmpl and spec.mapfile)
431 assert not (spec.tmpl and spec.mapfile)
432 if spec.mapfile:
432 if spec.mapfile:
433 return templater.templater.frommapfile(spec.mapfile, cache=cache)
433 return templater.templater.frommapfile(spec.mapfile, cache=cache)
434 return maketemplater(ui, spec.ref, spec.tmpl, cache=cache)
434 return _maketemplater(ui, spec.ref, spec.tmpl, cache=cache)
435
435
436 def maketemplater(ui, topic, tmpl, cache=None):
436 def maketemplater(ui, tmpl, cache=None):
437 """Create a templater from a string template 'tmpl'"""
437 """Create a templater from a string template 'tmpl'"""
438 return _maketemplater(ui, '', tmpl, cache=cache)
439
440 def _maketemplater(ui, topic, tmpl, cache=None):
438 aliases = ui.configitems('templatealias')
441 aliases = ui.configitems('templatealias')
439 t = templater.templater(cache=cache, aliases=aliases)
442 t = templater.templater(cache=cache, aliases=aliases)
440 if tmpl:
443 if tmpl:
441 t.cache[topic] = tmpl
444 t.cache[topic] = tmpl
442 return t
445 return t
443
446
444 def formatter(ui, out, topic, opts):
447 def formatter(ui, out, topic, opts):
445 template = opts.get("template", "")
448 template = opts.get("template", "")
446 if template == "json":
449 if template == "json":
447 return jsonformatter(ui, out, topic, opts)
450 return jsonformatter(ui, out, topic, opts)
448 elif template == "pickle":
451 elif template == "pickle":
449 return pickleformatter(ui, out, topic, opts)
452 return pickleformatter(ui, out, topic, opts)
450 elif template == "debug":
453 elif template == "debug":
451 return debugformatter(ui, out, topic, opts)
454 return debugformatter(ui, out, topic, opts)
452 elif template != "":
455 elif template != "":
453 return templateformatter(ui, out, topic, opts)
456 return templateformatter(ui, out, topic, opts)
454 # developer config: ui.formatdebug
457 # developer config: ui.formatdebug
455 elif ui.configbool('ui', 'formatdebug'):
458 elif ui.configbool('ui', 'formatdebug'):
456 return debugformatter(ui, out, topic, opts)
459 return debugformatter(ui, out, topic, opts)
457 # deprecated config: ui.formatjson
460 # deprecated config: ui.formatjson
458 elif ui.configbool('ui', 'formatjson'):
461 elif ui.configbool('ui', 'formatjson'):
459 return jsonformatter(ui, out, topic, opts)
462 return jsonformatter(ui, out, topic, opts)
460 return plainformatter(ui, out, topic, opts)
463 return plainformatter(ui, out, topic, opts)
461
464
462 @contextlib.contextmanager
465 @contextlib.contextmanager
463 def openformatter(ui, filename, topic, opts):
466 def openformatter(ui, filename, topic, opts):
464 """Create a formatter that writes outputs to the specified file
467 """Create a formatter that writes outputs to the specified file
465
468
466 Must be invoked using the 'with' statement.
469 Must be invoked using the 'with' statement.
467 """
470 """
468 with util.posixfile(filename, 'wb') as out:
471 with util.posixfile(filename, 'wb') as out:
469 with formatter(ui, out, topic, opts) as fm:
472 with formatter(ui, out, topic, opts) as fm:
470 yield fm
473 yield fm
471
474
472 @contextlib.contextmanager
475 @contextlib.contextmanager
473 def _neverending(fm):
476 def _neverending(fm):
474 yield fm
477 yield fm
475
478
476 def maybereopen(fm, filename, opts):
479 def maybereopen(fm, filename, opts):
477 """Create a formatter backed by file if filename specified, else return
480 """Create a formatter backed by file if filename specified, else return
478 the given formatter
481 the given formatter
479
482
480 Must be invoked using the 'with' statement. This will never call fm.end()
483 Must be invoked using the 'with' statement. This will never call fm.end()
481 of the given formatter.
484 of the given formatter.
482 """
485 """
483 if filename:
486 if filename:
484 return openformatter(fm._ui, filename, fm._topic, opts)
487 return openformatter(fm._ui, filename, fm._topic, opts)
485 else:
488 else:
486 return _neverending(fm)
489 return _neverending(fm)
@@ -1,1375 +1,1379 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import re
11 import re
12 import types
12 import types
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 color,
16 color,
17 config,
17 config,
18 encoding,
18 encoding,
19 error,
19 error,
20 minirst,
20 minirst,
21 parser,
21 parser,
22 pycompat,
22 pycompat,
23 registrar,
23 registrar,
24 revset as revsetmod,
24 revset as revsetmod,
25 revsetlang,
25 revsetlang,
26 templatefilters,
26 templatefilters,
27 templatekw,
27 templatekw,
28 util,
28 util,
29 )
29 )
30
30
31 # template parsing
31 # template parsing
32
32
33 elements = {
33 elements = {
34 # token-type: binding-strength, primary, prefix, infix, suffix
34 # token-type: binding-strength, primary, prefix, infix, suffix
35 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
35 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
36 "%": (16, None, None, ("%", 16), None),
36 "%": (16, None, None, ("%", 16), None),
37 "|": (15, None, None, ("|", 15), None),
37 "|": (15, None, None, ("|", 15), None),
38 "*": (5, None, None, ("*", 5), None),
38 "*": (5, None, None, ("*", 5), None),
39 "/": (5, None, None, ("/", 5), None),
39 "/": (5, None, None, ("/", 5), None),
40 "+": (4, None, None, ("+", 4), None),
40 "+": (4, None, None, ("+", 4), None),
41 "-": (4, None, ("negate", 19), ("-", 4), None),
41 "-": (4, None, ("negate", 19), ("-", 4), None),
42 "=": (3, None, None, ("keyvalue", 3), None),
42 "=": (3, None, None, ("keyvalue", 3), None),
43 ",": (2, None, None, ("list", 2), None),
43 ",": (2, None, None, ("list", 2), None),
44 ")": (0, None, None, None, None),
44 ")": (0, None, None, None, None),
45 "integer": (0, "integer", None, None, None),
45 "integer": (0, "integer", None, None, None),
46 "symbol": (0, "symbol", None, None, None),
46 "symbol": (0, "symbol", None, None, None),
47 "string": (0, "string", None, None, None),
47 "string": (0, "string", None, None, None),
48 "template": (0, "template", None, None, None),
48 "template": (0, "template", None, None, None),
49 "end": (0, None, None, None, None),
49 "end": (0, None, None, None, None),
50 }
50 }
51
51
52 def tokenize(program, start, end, term=None):
52 def tokenize(program, start, end, term=None):
53 """Parse a template expression into a stream of tokens, which must end
53 """Parse a template expression into a stream of tokens, which must end
54 with term if specified"""
54 with term if specified"""
55 pos = start
55 pos = start
56 program = pycompat.bytestr(program)
56 program = pycompat.bytestr(program)
57 while pos < end:
57 while pos < end:
58 c = program[pos]
58 c = program[pos]
59 if c.isspace(): # skip inter-token whitespace
59 if c.isspace(): # skip inter-token whitespace
60 pass
60 pass
61 elif c in "(=,)%|+-*/": # handle simple operators
61 elif c in "(=,)%|+-*/": # handle simple operators
62 yield (c, None, pos)
62 yield (c, None, pos)
63 elif c in '"\'': # handle quoted templates
63 elif c in '"\'': # handle quoted templates
64 s = pos + 1
64 s = pos + 1
65 data, pos = _parsetemplate(program, s, end, c)
65 data, pos = _parsetemplate(program, s, end, c)
66 yield ('template', data, s)
66 yield ('template', data, s)
67 pos -= 1
67 pos -= 1
68 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
68 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
69 # handle quoted strings
69 # handle quoted strings
70 c = program[pos + 1]
70 c = program[pos + 1]
71 s = pos = pos + 2
71 s = pos = pos + 2
72 while pos < end: # find closing quote
72 while pos < end: # find closing quote
73 d = program[pos]
73 d = program[pos]
74 if d == '\\': # skip over escaped characters
74 if d == '\\': # skip over escaped characters
75 pos += 2
75 pos += 2
76 continue
76 continue
77 if d == c:
77 if d == c:
78 yield ('string', program[s:pos], s)
78 yield ('string', program[s:pos], s)
79 break
79 break
80 pos += 1
80 pos += 1
81 else:
81 else:
82 raise error.ParseError(_("unterminated string"), s)
82 raise error.ParseError(_("unterminated string"), s)
83 elif c.isdigit():
83 elif c.isdigit():
84 s = pos
84 s = pos
85 while pos < end:
85 while pos < end:
86 d = program[pos]
86 d = program[pos]
87 if not d.isdigit():
87 if not d.isdigit():
88 break
88 break
89 pos += 1
89 pos += 1
90 yield ('integer', program[s:pos], s)
90 yield ('integer', program[s:pos], s)
91 pos -= 1
91 pos -= 1
92 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
92 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
93 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
93 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
94 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
94 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
95 # where some of nested templates were preprocessed as strings and
95 # where some of nested templates were preprocessed as strings and
96 # then compiled. therefore, \"...\" was allowed. (issue4733)
96 # then compiled. therefore, \"...\" was allowed. (issue4733)
97 #
97 #
98 # processing flow of _evalifliteral() at 5ab28a2e9962:
98 # processing flow of _evalifliteral() at 5ab28a2e9962:
99 # outer template string -> stringify() -> compiletemplate()
99 # outer template string -> stringify() -> compiletemplate()
100 # ------------------------ ------------ ------------------
100 # ------------------------ ------------ ------------------
101 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
101 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
102 # ~~~~~~~~
102 # ~~~~~~~~
103 # escaped quoted string
103 # escaped quoted string
104 if c == 'r':
104 if c == 'r':
105 pos += 1
105 pos += 1
106 token = 'string'
106 token = 'string'
107 else:
107 else:
108 token = 'template'
108 token = 'template'
109 quote = program[pos:pos + 2]
109 quote = program[pos:pos + 2]
110 s = pos = pos + 2
110 s = pos = pos + 2
111 while pos < end: # find closing escaped quote
111 while pos < end: # find closing escaped quote
112 if program.startswith('\\\\\\', pos, end):
112 if program.startswith('\\\\\\', pos, end):
113 pos += 4 # skip over double escaped characters
113 pos += 4 # skip over double escaped characters
114 continue
114 continue
115 if program.startswith(quote, pos, end):
115 if program.startswith(quote, pos, end):
116 # interpret as if it were a part of an outer string
116 # interpret as if it were a part of an outer string
117 data = parser.unescapestr(program[s:pos])
117 data = parser.unescapestr(program[s:pos])
118 if token == 'template':
118 if token == 'template':
119 data = _parsetemplate(data, 0, len(data))[0]
119 data = _parsetemplate(data, 0, len(data))[0]
120 yield (token, data, s)
120 yield (token, data, s)
121 pos += 1
121 pos += 1
122 break
122 break
123 pos += 1
123 pos += 1
124 else:
124 else:
125 raise error.ParseError(_("unterminated string"), s)
125 raise error.ParseError(_("unterminated string"), s)
126 elif c.isalnum() or c in '_':
126 elif c.isalnum() or c in '_':
127 s = pos
127 s = pos
128 pos += 1
128 pos += 1
129 while pos < end: # find end of symbol
129 while pos < end: # find end of symbol
130 d = program[pos]
130 d = program[pos]
131 if not (d.isalnum() or d == "_"):
131 if not (d.isalnum() or d == "_"):
132 break
132 break
133 pos += 1
133 pos += 1
134 sym = program[s:pos]
134 sym = program[s:pos]
135 yield ('symbol', sym, s)
135 yield ('symbol', sym, s)
136 pos -= 1
136 pos -= 1
137 elif c == term:
137 elif c == term:
138 yield ('end', None, pos + 1)
138 yield ('end', None, pos + 1)
139 return
139 return
140 else:
140 else:
141 raise error.ParseError(_("syntax error"), pos)
141 raise error.ParseError(_("syntax error"), pos)
142 pos += 1
142 pos += 1
143 if term:
143 if term:
144 raise error.ParseError(_("unterminated template expansion"), start)
144 raise error.ParseError(_("unterminated template expansion"), start)
145 yield ('end', None, pos)
145 yield ('end', None, pos)
146
146
147 def _parsetemplate(tmpl, start, stop, quote=''):
147 def _parsetemplate(tmpl, start, stop, quote=''):
148 r"""
148 r"""
149 >>> _parsetemplate('foo{bar}"baz', 0, 12)
149 >>> _parsetemplate('foo{bar}"baz', 0, 12)
150 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
150 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
151 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
151 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
152 ([('string', 'foo'), ('symbol', 'bar')], 9)
152 ([('string', 'foo'), ('symbol', 'bar')], 9)
153 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
153 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
154 ([('string', 'foo')], 4)
154 ([('string', 'foo')], 4)
155 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
155 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
156 ([('string', 'foo"'), ('string', 'bar')], 9)
156 ([('string', 'foo"'), ('string', 'bar')], 9)
157 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
157 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
158 ([('string', 'foo\\')], 6)
158 ([('string', 'foo\\')], 6)
159 """
159 """
160 parsed = []
160 parsed = []
161 sepchars = '{' + quote
161 sepchars = '{' + quote
162 pos = start
162 pos = start
163 p = parser.parser(elements)
163 p = parser.parser(elements)
164 while pos < stop:
164 while pos < stop:
165 n = min((tmpl.find(c, pos, stop) for c in sepchars),
165 n = min((tmpl.find(c, pos, stop) for c in sepchars),
166 key=lambda n: (n < 0, n))
166 key=lambda n: (n < 0, n))
167 if n < 0:
167 if n < 0:
168 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
168 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
169 pos = stop
169 pos = stop
170 break
170 break
171 c = tmpl[n]
171 c = tmpl[n]
172 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
172 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
173 if bs % 2 == 1:
173 if bs % 2 == 1:
174 # escaped (e.g. '\{', '\\\{', but not '\\{')
174 # escaped (e.g. '\{', '\\\{', but not '\\{')
175 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
175 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
176 pos = n + 1
176 pos = n + 1
177 continue
177 continue
178 if n > pos:
178 if n > pos:
179 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
179 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
180 if c == quote:
180 if c == quote:
181 return parsed, n + 1
181 return parsed, n + 1
182
182
183 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
183 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
184 parsed.append(parseres)
184 parsed.append(parseres)
185
185
186 if quote:
186 if quote:
187 raise error.ParseError(_("unterminated string"), start)
187 raise error.ParseError(_("unterminated string"), start)
188 return parsed, pos
188 return parsed, pos
189
189
190 def _unnesttemplatelist(tree):
190 def _unnesttemplatelist(tree):
191 """Expand list of templates to node tuple
191 """Expand list of templates to node tuple
192
192
193 >>> def f(tree):
193 >>> def f(tree):
194 ... print prettyformat(_unnesttemplatelist(tree))
194 ... print prettyformat(_unnesttemplatelist(tree))
195 >>> f(('template', []))
195 >>> f(('template', []))
196 ('string', '')
196 ('string', '')
197 >>> f(('template', [('string', 'foo')]))
197 >>> f(('template', [('string', 'foo')]))
198 ('string', 'foo')
198 ('string', 'foo')
199 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
199 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
200 (template
200 (template
201 ('string', 'foo')
201 ('string', 'foo')
202 ('symbol', 'rev'))
202 ('symbol', 'rev'))
203 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
203 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
204 (template
204 (template
205 ('symbol', 'rev'))
205 ('symbol', 'rev'))
206 >>> f(('template', [('template', [('string', 'foo')])]))
206 >>> f(('template', [('template', [('string', 'foo')])]))
207 ('string', 'foo')
207 ('string', 'foo')
208 """
208 """
209 if not isinstance(tree, tuple):
209 if not isinstance(tree, tuple):
210 return tree
210 return tree
211 op = tree[0]
211 op = tree[0]
212 if op != 'template':
212 if op != 'template':
213 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
213 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
214
214
215 assert len(tree) == 2
215 assert len(tree) == 2
216 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
216 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
217 if not xs:
217 if not xs:
218 return ('string', '') # empty template ""
218 return ('string', '') # empty template ""
219 elif len(xs) == 1 and xs[0][0] == 'string':
219 elif len(xs) == 1 and xs[0][0] == 'string':
220 return xs[0] # fast path for string with no template fragment "x"
220 return xs[0] # fast path for string with no template fragment "x"
221 else:
221 else:
222 return (op,) + xs
222 return (op,) + xs
223
223
224 def parse(tmpl):
224 def parse(tmpl):
225 """Parse template string into tree"""
225 """Parse template string into tree"""
226 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
226 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
227 assert pos == len(tmpl), 'unquoted template should be consumed'
227 assert pos == len(tmpl), 'unquoted template should be consumed'
228 return _unnesttemplatelist(('template', parsed))
228 return _unnesttemplatelist(('template', parsed))
229
229
230 def _parseexpr(expr):
230 def _parseexpr(expr):
231 """Parse a template expression into tree
231 """Parse a template expression into tree
232
232
233 >>> _parseexpr('"foo"')
233 >>> _parseexpr('"foo"')
234 ('string', 'foo')
234 ('string', 'foo')
235 >>> _parseexpr('foo(bar)')
235 >>> _parseexpr('foo(bar)')
236 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
236 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
237 >>> _parseexpr('foo(')
237 >>> _parseexpr('foo(')
238 Traceback (most recent call last):
238 Traceback (most recent call last):
239 ...
239 ...
240 ParseError: ('not a prefix: end', 4)
240 ParseError: ('not a prefix: end', 4)
241 >>> _parseexpr('"foo" "bar"')
241 >>> _parseexpr('"foo" "bar"')
242 Traceback (most recent call last):
242 Traceback (most recent call last):
243 ...
243 ...
244 ParseError: ('invalid token', 7)
244 ParseError: ('invalid token', 7)
245 """
245 """
246 p = parser.parser(elements)
246 p = parser.parser(elements)
247 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
247 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
248 if pos != len(expr):
248 if pos != len(expr):
249 raise error.ParseError(_('invalid token'), pos)
249 raise error.ParseError(_('invalid token'), pos)
250 return _unnesttemplatelist(tree)
250 return _unnesttemplatelist(tree)
251
251
252 def prettyformat(tree):
252 def prettyformat(tree):
253 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
253 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
254
254
255 def compileexp(exp, context, curmethods):
255 def compileexp(exp, context, curmethods):
256 """Compile parsed template tree to (func, data) pair"""
256 """Compile parsed template tree to (func, data) pair"""
257 t = exp[0]
257 t = exp[0]
258 if t in curmethods:
258 if t in curmethods:
259 return curmethods[t](exp, context)
259 return curmethods[t](exp, context)
260 raise error.ParseError(_("unknown method '%s'") % t)
260 raise error.ParseError(_("unknown method '%s'") % t)
261
261
262 # template evaluation
262 # template evaluation
263
263
264 def getsymbol(exp):
264 def getsymbol(exp):
265 if exp[0] == 'symbol':
265 if exp[0] == 'symbol':
266 return exp[1]
266 return exp[1]
267 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
267 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
268
268
269 def getlist(x):
269 def getlist(x):
270 if not x:
270 if not x:
271 return []
271 return []
272 if x[0] == 'list':
272 if x[0] == 'list':
273 return getlist(x[1]) + [x[2]]
273 return getlist(x[1]) + [x[2]]
274 return [x]
274 return [x]
275
275
276 def gettemplate(exp, context):
276 def gettemplate(exp, context):
277 """Compile given template tree or load named template from map file;
277 """Compile given template tree or load named template from map file;
278 returns (func, data) pair"""
278 returns (func, data) pair"""
279 if exp[0] in ('template', 'string'):
279 if exp[0] in ('template', 'string'):
280 return compileexp(exp, context, methods)
280 return compileexp(exp, context, methods)
281 if exp[0] == 'symbol':
281 if exp[0] == 'symbol':
282 # unlike runsymbol(), here 'symbol' is always taken as template name
282 # unlike runsymbol(), here 'symbol' is always taken as template name
283 # even if it exists in mapping. this allows us to override mapping
283 # even if it exists in mapping. this allows us to override mapping
284 # by web templates, e.g. 'changelogtag' is redefined in map file.
284 # by web templates, e.g. 'changelogtag' is redefined in map file.
285 return context._load(exp[1])
285 return context._load(exp[1])
286 raise error.ParseError(_("expected template specifier"))
286 raise error.ParseError(_("expected template specifier"))
287
287
288 def findsymbolicname(arg):
288 def findsymbolicname(arg):
289 """Find symbolic name for the given compiled expression; returns None
289 """Find symbolic name for the given compiled expression; returns None
290 if nothing found reliably"""
290 if nothing found reliably"""
291 while True:
291 while True:
292 func, data = arg
292 func, data = arg
293 if func is runsymbol:
293 if func is runsymbol:
294 return data
294 return data
295 elif func is runfilter:
295 elif func is runfilter:
296 arg = data[0]
296 arg = data[0]
297 else:
297 else:
298 return None
298 return None
299
299
300 def evalfuncarg(context, mapping, arg):
300 def evalfuncarg(context, mapping, arg):
301 func, data = arg
301 func, data = arg
302 # func() may return string, generator of strings or arbitrary object such
302 # func() may return string, generator of strings or arbitrary object such
303 # as date tuple, but filter does not want generator.
303 # as date tuple, but filter does not want generator.
304 thing = func(context, mapping, data)
304 thing = func(context, mapping, data)
305 if isinstance(thing, types.GeneratorType):
305 if isinstance(thing, types.GeneratorType):
306 thing = stringify(thing)
306 thing = stringify(thing)
307 return thing
307 return thing
308
308
309 def evalboolean(context, mapping, arg):
309 def evalboolean(context, mapping, arg):
310 """Evaluate given argument as boolean, but also takes boolean literals"""
310 """Evaluate given argument as boolean, but also takes boolean literals"""
311 func, data = arg
311 func, data = arg
312 if func is runsymbol:
312 if func is runsymbol:
313 thing = func(context, mapping, data, default=None)
313 thing = func(context, mapping, data, default=None)
314 if thing is None:
314 if thing is None:
315 # not a template keyword, takes as a boolean literal
315 # not a template keyword, takes as a boolean literal
316 thing = util.parsebool(data)
316 thing = util.parsebool(data)
317 else:
317 else:
318 thing = func(context, mapping, data)
318 thing = func(context, mapping, data)
319 if isinstance(thing, bool):
319 if isinstance(thing, bool):
320 return thing
320 return thing
321 # other objects are evaluated as strings, which means 0 is True, but
321 # other objects are evaluated as strings, which means 0 is True, but
322 # empty dict/list should be False as they are expected to be ''
322 # empty dict/list should be False as they are expected to be ''
323 return bool(stringify(thing))
323 return bool(stringify(thing))
324
324
325 def evalinteger(context, mapping, arg, err):
325 def evalinteger(context, mapping, arg, err):
326 v = evalfuncarg(context, mapping, arg)
326 v = evalfuncarg(context, mapping, arg)
327 try:
327 try:
328 return int(v)
328 return int(v)
329 except (TypeError, ValueError):
329 except (TypeError, ValueError):
330 raise error.ParseError(err)
330 raise error.ParseError(err)
331
331
332 def evalstring(context, mapping, arg):
332 def evalstring(context, mapping, arg):
333 func, data = arg
333 func, data = arg
334 return stringify(func(context, mapping, data))
334 return stringify(func(context, mapping, data))
335
335
336 def evalstringliteral(context, mapping, arg):
336 def evalstringliteral(context, mapping, arg):
337 """Evaluate given argument as string template, but returns symbol name
337 """Evaluate given argument as string template, but returns symbol name
338 if it is unknown"""
338 if it is unknown"""
339 func, data = arg
339 func, data = arg
340 if func is runsymbol:
340 if func is runsymbol:
341 thing = func(context, mapping, data, default=data)
341 thing = func(context, mapping, data, default=data)
342 else:
342 else:
343 thing = func(context, mapping, data)
343 thing = func(context, mapping, data)
344 return stringify(thing)
344 return stringify(thing)
345
345
346 def runinteger(context, mapping, data):
346 def runinteger(context, mapping, data):
347 return int(data)
347 return int(data)
348
348
349 def runstring(context, mapping, data):
349 def runstring(context, mapping, data):
350 return data
350 return data
351
351
352 def _recursivesymbolblocker(key):
352 def _recursivesymbolblocker(key):
353 def showrecursion(**args):
353 def showrecursion(**args):
354 raise error.Abort(_("recursive reference '%s' in template") % key)
354 raise error.Abort(_("recursive reference '%s' in template") % key)
355 return showrecursion
355 return showrecursion
356
356
357 def _runrecursivesymbol(context, mapping, key):
357 def _runrecursivesymbol(context, mapping, key):
358 raise error.Abort(_("recursive reference '%s' in template") % key)
358 raise error.Abort(_("recursive reference '%s' in template") % key)
359
359
360 def runsymbol(context, mapping, key, default=''):
360 def runsymbol(context, mapping, key, default=''):
361 v = mapping.get(key)
361 v = mapping.get(key)
362 if v is None:
362 if v is None:
363 v = context._defaults.get(key)
363 v = context._defaults.get(key)
364 if v is None:
364 if v is None:
365 # put poison to cut recursion. we can't move this to parsing phase
365 # put poison to cut recursion. we can't move this to parsing phase
366 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
366 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
367 safemapping = mapping.copy()
367 safemapping = mapping.copy()
368 safemapping[key] = _recursivesymbolblocker(key)
368 safemapping[key] = _recursivesymbolblocker(key)
369 try:
369 try:
370 v = context.process(key, safemapping)
370 v = context.process(key, safemapping)
371 except TemplateNotFound:
371 except TemplateNotFound:
372 v = default
372 v = default
373 if callable(v):
373 if callable(v):
374 return v(**mapping)
374 return v(**mapping)
375 return v
375 return v
376
376
377 def buildtemplate(exp, context):
377 def buildtemplate(exp, context):
378 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
378 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
379 return (runtemplate, ctmpl)
379 return (runtemplate, ctmpl)
380
380
381 def runtemplate(context, mapping, template):
381 def runtemplate(context, mapping, template):
382 for func, data in template:
382 for func, data in template:
383 yield func(context, mapping, data)
383 yield func(context, mapping, data)
384
384
385 def buildfilter(exp, context):
385 def buildfilter(exp, context):
386 n = getsymbol(exp[2])
386 n = getsymbol(exp[2])
387 if n in context._filters:
387 if n in context._filters:
388 filt = context._filters[n]
388 filt = context._filters[n]
389 arg = compileexp(exp[1], context, methods)
389 arg = compileexp(exp[1], context, methods)
390 return (runfilter, (arg, filt))
390 return (runfilter, (arg, filt))
391 if n in funcs:
391 if n in funcs:
392 f = funcs[n]
392 f = funcs[n]
393 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
393 args = _buildfuncargs(exp[1], context, methods, n, f._argspec)
394 return (f, args)
394 return (f, args)
395 raise error.ParseError(_("unknown function '%s'") % n)
395 raise error.ParseError(_("unknown function '%s'") % n)
396
396
397 def runfilter(context, mapping, data):
397 def runfilter(context, mapping, data):
398 arg, filt = data
398 arg, filt = data
399 thing = evalfuncarg(context, mapping, arg)
399 thing = evalfuncarg(context, mapping, arg)
400 try:
400 try:
401 return filt(thing)
401 return filt(thing)
402 except (ValueError, AttributeError, TypeError):
402 except (ValueError, AttributeError, TypeError):
403 sym = findsymbolicname(arg)
403 sym = findsymbolicname(arg)
404 if sym:
404 if sym:
405 msg = (_("template filter '%s' is not compatible with keyword '%s'")
405 msg = (_("template filter '%s' is not compatible with keyword '%s'")
406 % (filt.func_name, sym))
406 % (filt.func_name, sym))
407 else:
407 else:
408 msg = _("incompatible use of template filter '%s'") % filt.func_name
408 msg = _("incompatible use of template filter '%s'") % filt.func_name
409 raise error.Abort(msg)
409 raise error.Abort(msg)
410
410
411 def buildmap(exp, context):
411 def buildmap(exp, context):
412 func, data = compileexp(exp[1], context, methods)
412 func, data = compileexp(exp[1], context, methods)
413 tfunc, tdata = gettemplate(exp[2], context)
413 tfunc, tdata = gettemplate(exp[2], context)
414 return (runmap, (func, data, tfunc, tdata))
414 return (runmap, (func, data, tfunc, tdata))
415
415
416 def runmap(context, mapping, data):
416 def runmap(context, mapping, data):
417 func, data, tfunc, tdata = data
417 func, data, tfunc, tdata = data
418 d = func(context, mapping, data)
418 d = func(context, mapping, data)
419 if util.safehasattr(d, 'itermaps'):
419 if util.safehasattr(d, 'itermaps'):
420 diter = d.itermaps()
420 diter = d.itermaps()
421 else:
421 else:
422 try:
422 try:
423 diter = iter(d)
423 diter = iter(d)
424 except TypeError:
424 except TypeError:
425 if func is runsymbol:
425 if func is runsymbol:
426 raise error.ParseError(_("keyword '%s' is not iterable") % data)
426 raise error.ParseError(_("keyword '%s' is not iterable") % data)
427 else:
427 else:
428 raise error.ParseError(_("%r is not iterable") % d)
428 raise error.ParseError(_("%r is not iterable") % d)
429
429
430 for i, v in enumerate(diter):
430 for i, v in enumerate(diter):
431 lm = mapping.copy()
431 lm = mapping.copy()
432 lm['index'] = i
432 lm['index'] = i
433 if isinstance(v, dict):
433 if isinstance(v, dict):
434 lm.update(v)
434 lm.update(v)
435 lm['originalnode'] = mapping.get('node')
435 lm['originalnode'] = mapping.get('node')
436 yield tfunc(context, lm, tdata)
436 yield tfunc(context, lm, tdata)
437 else:
437 else:
438 # v is not an iterable of dicts, this happen when 'key'
438 # v is not an iterable of dicts, this happen when 'key'
439 # has been fully expanded already and format is useless.
439 # has been fully expanded already and format is useless.
440 # If so, return the expanded value.
440 # If so, return the expanded value.
441 yield v
441 yield v
442
442
443 def buildnegate(exp, context):
443 def buildnegate(exp, context):
444 arg = compileexp(exp[1], context, exprmethods)
444 arg = compileexp(exp[1], context, exprmethods)
445 return (runnegate, arg)
445 return (runnegate, arg)
446
446
447 def runnegate(context, mapping, data):
447 def runnegate(context, mapping, data):
448 data = evalinteger(context, mapping, data,
448 data = evalinteger(context, mapping, data,
449 _('negation needs an integer argument'))
449 _('negation needs an integer argument'))
450 return -data
450 return -data
451
451
452 def buildarithmetic(exp, context, func):
452 def buildarithmetic(exp, context, func):
453 left = compileexp(exp[1], context, exprmethods)
453 left = compileexp(exp[1], context, exprmethods)
454 right = compileexp(exp[2], context, exprmethods)
454 right = compileexp(exp[2], context, exprmethods)
455 return (runarithmetic, (func, left, right))
455 return (runarithmetic, (func, left, right))
456
456
457 def runarithmetic(context, mapping, data):
457 def runarithmetic(context, mapping, data):
458 func, left, right = data
458 func, left, right = data
459 left = evalinteger(context, mapping, left,
459 left = evalinteger(context, mapping, left,
460 _('arithmetic only defined on integers'))
460 _('arithmetic only defined on integers'))
461 right = evalinteger(context, mapping, right,
461 right = evalinteger(context, mapping, right,
462 _('arithmetic only defined on integers'))
462 _('arithmetic only defined on integers'))
463 try:
463 try:
464 return func(left, right)
464 return func(left, right)
465 except ZeroDivisionError:
465 except ZeroDivisionError:
466 raise error.Abort(_('division by zero is not defined'))
466 raise error.Abort(_('division by zero is not defined'))
467
467
468 def buildfunc(exp, context):
468 def buildfunc(exp, context):
469 n = getsymbol(exp[1])
469 n = getsymbol(exp[1])
470 if n in funcs:
470 if n in funcs:
471 f = funcs[n]
471 f = funcs[n]
472 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
472 args = _buildfuncargs(exp[2], context, exprmethods, n, f._argspec)
473 return (f, args)
473 return (f, args)
474 if n in context._filters:
474 if n in context._filters:
475 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
475 args = _buildfuncargs(exp[2], context, exprmethods, n, argspec=None)
476 if len(args) != 1:
476 if len(args) != 1:
477 raise error.ParseError(_("filter %s expects one argument") % n)
477 raise error.ParseError(_("filter %s expects one argument") % n)
478 f = context._filters[n]
478 f = context._filters[n]
479 return (runfilter, (args[0], f))
479 return (runfilter, (args[0], f))
480 raise error.ParseError(_("unknown function '%s'") % n)
480 raise error.ParseError(_("unknown function '%s'") % n)
481
481
482 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
482 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
483 """Compile parsed tree of function arguments into list or dict of
483 """Compile parsed tree of function arguments into list or dict of
484 (func, data) pairs
484 (func, data) pairs
485
485
486 >>> context = engine(lambda t: (runsymbol, t))
486 >>> context = engine(lambda t: (runsymbol, t))
487 >>> def fargs(expr, argspec):
487 >>> def fargs(expr, argspec):
488 ... x = _parseexpr(expr)
488 ... x = _parseexpr(expr)
489 ... n = getsymbol(x[1])
489 ... n = getsymbol(x[1])
490 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
490 ... return _buildfuncargs(x[2], context, exprmethods, n, argspec)
491 >>> fargs('a(l=1, k=2)', 'k l m').keys()
491 >>> fargs('a(l=1, k=2)', 'k l m').keys()
492 ['l', 'k']
492 ['l', 'k']
493 >>> args = fargs('a(opts=1, k=2)', '**opts')
493 >>> args = fargs('a(opts=1, k=2)', '**opts')
494 >>> args.keys(), args['opts'].keys()
494 >>> args.keys(), args['opts'].keys()
495 (['opts'], ['opts', 'k'])
495 (['opts'], ['opts', 'k'])
496 """
496 """
497 def compiledict(xs):
497 def compiledict(xs):
498 return util.sortdict((k, compileexp(x, context, curmethods))
498 return util.sortdict((k, compileexp(x, context, curmethods))
499 for k, x in xs.iteritems())
499 for k, x in xs.iteritems())
500 def compilelist(xs):
500 def compilelist(xs):
501 return [compileexp(x, context, curmethods) for x in xs]
501 return [compileexp(x, context, curmethods) for x in xs]
502
502
503 if not argspec:
503 if not argspec:
504 # filter or function with no argspec: return list of positional args
504 # filter or function with no argspec: return list of positional args
505 return compilelist(getlist(exp))
505 return compilelist(getlist(exp))
506
506
507 # function with argspec: return dict of named args
507 # function with argspec: return dict of named args
508 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
508 _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
509 treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
509 treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
510 keyvaluenode='keyvalue', keynode='symbol')
510 keyvaluenode='keyvalue', keynode='symbol')
511 compargs = util.sortdict()
511 compargs = util.sortdict()
512 if varkey:
512 if varkey:
513 compargs[varkey] = compilelist(treeargs.pop(varkey))
513 compargs[varkey] = compilelist(treeargs.pop(varkey))
514 if optkey:
514 if optkey:
515 compargs[optkey] = compiledict(treeargs.pop(optkey))
515 compargs[optkey] = compiledict(treeargs.pop(optkey))
516 compargs.update(compiledict(treeargs))
516 compargs.update(compiledict(treeargs))
517 return compargs
517 return compargs
518
518
519 def buildkeyvaluepair(exp, content):
519 def buildkeyvaluepair(exp, content):
520 raise error.ParseError(_("can't use a key-value pair in this context"))
520 raise error.ParseError(_("can't use a key-value pair in this context"))
521
521
522 # dict of template built-in functions
522 # dict of template built-in functions
523 funcs = {}
523 funcs = {}
524
524
525 templatefunc = registrar.templatefunc(funcs)
525 templatefunc = registrar.templatefunc(funcs)
526
526
527 @templatefunc('date(date[, fmt])')
527 @templatefunc('date(date[, fmt])')
528 def date(context, mapping, args):
528 def date(context, mapping, args):
529 """Format a date. See :hg:`help dates` for formatting
529 """Format a date. See :hg:`help dates` for formatting
530 strings. The default is a Unix date format, including the timezone:
530 strings. The default is a Unix date format, including the timezone:
531 "Mon Sep 04 15:13:13 2006 0700"."""
531 "Mon Sep 04 15:13:13 2006 0700"."""
532 if not (1 <= len(args) <= 2):
532 if not (1 <= len(args) <= 2):
533 # i18n: "date" is a keyword
533 # i18n: "date" is a keyword
534 raise error.ParseError(_("date expects one or two arguments"))
534 raise error.ParseError(_("date expects one or two arguments"))
535
535
536 date = evalfuncarg(context, mapping, args[0])
536 date = evalfuncarg(context, mapping, args[0])
537 fmt = None
537 fmt = None
538 if len(args) == 2:
538 if len(args) == 2:
539 fmt = evalstring(context, mapping, args[1])
539 fmt = evalstring(context, mapping, args[1])
540 try:
540 try:
541 if fmt is None:
541 if fmt is None:
542 return util.datestr(date)
542 return util.datestr(date)
543 else:
543 else:
544 return util.datestr(date, fmt)
544 return util.datestr(date, fmt)
545 except (TypeError, ValueError):
545 except (TypeError, ValueError):
546 # i18n: "date" is a keyword
546 # i18n: "date" is a keyword
547 raise error.ParseError(_("date expects a date information"))
547 raise error.ParseError(_("date expects a date information"))
548
548
549 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
549 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
550 def dict_(context, mapping, args):
550 def dict_(context, mapping, args):
551 """Construct a dict from key-value pairs. A key may be omitted if
551 """Construct a dict from key-value pairs. A key may be omitted if
552 a value expression can provide an unambiguous name."""
552 a value expression can provide an unambiguous name."""
553 data = util.sortdict()
553 data = util.sortdict()
554
554
555 for v in args['args']:
555 for v in args['args']:
556 k = findsymbolicname(v)
556 k = findsymbolicname(v)
557 if not k:
557 if not k:
558 raise error.ParseError(_('dict key cannot be inferred'))
558 raise error.ParseError(_('dict key cannot be inferred'))
559 if k in data or k in args['kwargs']:
559 if k in data or k in args['kwargs']:
560 raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
560 raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
561 data[k] = evalfuncarg(context, mapping, v)
561 data[k] = evalfuncarg(context, mapping, v)
562
562
563 data.update((k, evalfuncarg(context, mapping, v))
563 data.update((k, evalfuncarg(context, mapping, v))
564 for k, v in args['kwargs'].iteritems())
564 for k, v in args['kwargs'].iteritems())
565 return templatekw.hybriddict(data)
565 return templatekw.hybriddict(data)
566
566
567 @templatefunc('diff([includepattern [, excludepattern]])')
567 @templatefunc('diff([includepattern [, excludepattern]])')
568 def diff(context, mapping, args):
568 def diff(context, mapping, args):
569 """Show a diff, optionally
569 """Show a diff, optionally
570 specifying files to include or exclude."""
570 specifying files to include or exclude."""
571 if len(args) > 2:
571 if len(args) > 2:
572 # i18n: "diff" is a keyword
572 # i18n: "diff" is a keyword
573 raise error.ParseError(_("diff expects zero, one, or two arguments"))
573 raise error.ParseError(_("diff expects zero, one, or two arguments"))
574
574
575 def getpatterns(i):
575 def getpatterns(i):
576 if i < len(args):
576 if i < len(args):
577 s = evalstring(context, mapping, args[i]).strip()
577 s = evalstring(context, mapping, args[i]).strip()
578 if s:
578 if s:
579 return [s]
579 return [s]
580 return []
580 return []
581
581
582 ctx = mapping['ctx']
582 ctx = mapping['ctx']
583 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
583 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
584
584
585 return ''.join(chunks)
585 return ''.join(chunks)
586
586
587 @templatefunc('files(pattern)')
587 @templatefunc('files(pattern)')
588 def files(context, mapping, args):
588 def files(context, mapping, args):
589 """All files of the current changeset matching the pattern. See
589 """All files of the current changeset matching the pattern. See
590 :hg:`help patterns`."""
590 :hg:`help patterns`."""
591 if not len(args) == 1:
591 if not len(args) == 1:
592 # i18n: "files" is a keyword
592 # i18n: "files" is a keyword
593 raise error.ParseError(_("files expects one argument"))
593 raise error.ParseError(_("files expects one argument"))
594
594
595 raw = evalstring(context, mapping, args[0])
595 raw = evalstring(context, mapping, args[0])
596 ctx = mapping['ctx']
596 ctx = mapping['ctx']
597 m = ctx.match([raw])
597 m = ctx.match([raw])
598 files = list(ctx.matches(m))
598 files = list(ctx.matches(m))
599 return templatekw.showlist("file", files, mapping)
599 return templatekw.showlist("file", files, mapping)
600
600
601 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
601 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
602 def fill(context, mapping, args):
602 def fill(context, mapping, args):
603 """Fill many
603 """Fill many
604 paragraphs with optional indentation. See the "fill" filter."""
604 paragraphs with optional indentation. See the "fill" filter."""
605 if not (1 <= len(args) <= 4):
605 if not (1 <= len(args) <= 4):
606 # i18n: "fill" is a keyword
606 # i18n: "fill" is a keyword
607 raise error.ParseError(_("fill expects one to four arguments"))
607 raise error.ParseError(_("fill expects one to four arguments"))
608
608
609 text = evalstring(context, mapping, args[0])
609 text = evalstring(context, mapping, args[0])
610 width = 76
610 width = 76
611 initindent = ''
611 initindent = ''
612 hangindent = ''
612 hangindent = ''
613 if 2 <= len(args) <= 4:
613 if 2 <= len(args) <= 4:
614 width = evalinteger(context, mapping, args[1],
614 width = evalinteger(context, mapping, args[1],
615 # i18n: "fill" is a keyword
615 # i18n: "fill" is a keyword
616 _("fill expects an integer width"))
616 _("fill expects an integer width"))
617 try:
617 try:
618 initindent = evalstring(context, mapping, args[2])
618 initindent = evalstring(context, mapping, args[2])
619 hangindent = evalstring(context, mapping, args[3])
619 hangindent = evalstring(context, mapping, args[3])
620 except IndexError:
620 except IndexError:
621 pass
621 pass
622
622
623 return templatefilters.fill(text, width, initindent, hangindent)
623 return templatefilters.fill(text, width, initindent, hangindent)
624
624
625 @templatefunc('formatnode(node)')
625 @templatefunc('formatnode(node)')
626 def formatnode(context, mapping, args):
626 def formatnode(context, mapping, args):
627 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
627 """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
628 if len(args) != 1:
628 if len(args) != 1:
629 # i18n: "formatnode" is a keyword
629 # i18n: "formatnode" is a keyword
630 raise error.ParseError(_("formatnode expects one argument"))
630 raise error.ParseError(_("formatnode expects one argument"))
631
631
632 ui = mapping['ui']
632 ui = mapping['ui']
633 node = evalstring(context, mapping, args[0])
633 node = evalstring(context, mapping, args[0])
634 if ui.debugflag:
634 if ui.debugflag:
635 return node
635 return node
636 return templatefilters.short(node)
636 return templatefilters.short(node)
637
637
638 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])',
638 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])',
639 argspec='text width fillchar left')
639 argspec='text width fillchar left')
640 def pad(context, mapping, args):
640 def pad(context, mapping, args):
641 """Pad text with a
641 """Pad text with a
642 fill character."""
642 fill character."""
643 if 'text' not in args or 'width' not in args:
643 if 'text' not in args or 'width' not in args:
644 # i18n: "pad" is a keyword
644 # i18n: "pad" is a keyword
645 raise error.ParseError(_("pad() expects two to four arguments"))
645 raise error.ParseError(_("pad() expects two to four arguments"))
646
646
647 width = evalinteger(context, mapping, args['width'],
647 width = evalinteger(context, mapping, args['width'],
648 # i18n: "pad" is a keyword
648 # i18n: "pad" is a keyword
649 _("pad() expects an integer width"))
649 _("pad() expects an integer width"))
650
650
651 text = evalstring(context, mapping, args['text'])
651 text = evalstring(context, mapping, args['text'])
652
652
653 left = False
653 left = False
654 fillchar = ' '
654 fillchar = ' '
655 if 'fillchar' in args:
655 if 'fillchar' in args:
656 fillchar = evalstring(context, mapping, args['fillchar'])
656 fillchar = evalstring(context, mapping, args['fillchar'])
657 if len(color.stripeffects(fillchar)) != 1:
657 if len(color.stripeffects(fillchar)) != 1:
658 # i18n: "pad" is a keyword
658 # i18n: "pad" is a keyword
659 raise error.ParseError(_("pad() expects a single fill character"))
659 raise error.ParseError(_("pad() expects a single fill character"))
660 if 'left' in args:
660 if 'left' in args:
661 left = evalboolean(context, mapping, args['left'])
661 left = evalboolean(context, mapping, args['left'])
662
662
663 fillwidth = width - encoding.colwidth(color.stripeffects(text))
663 fillwidth = width - encoding.colwidth(color.stripeffects(text))
664 if fillwidth <= 0:
664 if fillwidth <= 0:
665 return text
665 return text
666 if left:
666 if left:
667 return fillchar * fillwidth + text
667 return fillchar * fillwidth + text
668 else:
668 else:
669 return text + fillchar * fillwidth
669 return text + fillchar * fillwidth
670
670
671 @templatefunc('indent(text, indentchars[, firstline])')
671 @templatefunc('indent(text, indentchars[, firstline])')
672 def indent(context, mapping, args):
672 def indent(context, mapping, args):
673 """Indents all non-empty lines
673 """Indents all non-empty lines
674 with the characters given in the indentchars string. An optional
674 with the characters given in the indentchars string. An optional
675 third parameter will override the indent for the first line only
675 third parameter will override the indent for the first line only
676 if present."""
676 if present."""
677 if not (2 <= len(args) <= 3):
677 if not (2 <= len(args) <= 3):
678 # i18n: "indent" is a keyword
678 # i18n: "indent" is a keyword
679 raise error.ParseError(_("indent() expects two or three arguments"))
679 raise error.ParseError(_("indent() expects two or three arguments"))
680
680
681 text = evalstring(context, mapping, args[0])
681 text = evalstring(context, mapping, args[0])
682 indent = evalstring(context, mapping, args[1])
682 indent = evalstring(context, mapping, args[1])
683
683
684 if len(args) == 3:
684 if len(args) == 3:
685 firstline = evalstring(context, mapping, args[2])
685 firstline = evalstring(context, mapping, args[2])
686 else:
686 else:
687 firstline = indent
687 firstline = indent
688
688
689 # the indent function doesn't indent the first line, so we do it here
689 # the indent function doesn't indent the first line, so we do it here
690 return templatefilters.indent(firstline + text, indent)
690 return templatefilters.indent(firstline + text, indent)
691
691
692 @templatefunc('get(dict, key)')
692 @templatefunc('get(dict, key)')
693 def get(context, mapping, args):
693 def get(context, mapping, args):
694 """Get an attribute/key from an object. Some keywords
694 """Get an attribute/key from an object. Some keywords
695 are complex types. This function allows you to obtain the value of an
695 are complex types. This function allows you to obtain the value of an
696 attribute on these types."""
696 attribute on these types."""
697 if len(args) != 2:
697 if len(args) != 2:
698 # i18n: "get" is a keyword
698 # i18n: "get" is a keyword
699 raise error.ParseError(_("get() expects two arguments"))
699 raise error.ParseError(_("get() expects two arguments"))
700
700
701 dictarg = evalfuncarg(context, mapping, args[0])
701 dictarg = evalfuncarg(context, mapping, args[0])
702 if not util.safehasattr(dictarg, 'get'):
702 if not util.safehasattr(dictarg, 'get'):
703 # i18n: "get" is a keyword
703 # i18n: "get" is a keyword
704 raise error.ParseError(_("get() expects a dict as first argument"))
704 raise error.ParseError(_("get() expects a dict as first argument"))
705
705
706 key = evalfuncarg(context, mapping, args[1])
706 key = evalfuncarg(context, mapping, args[1])
707 return dictarg.get(key)
707 return dictarg.get(key)
708
708
709 @templatefunc('if(expr, then[, else])')
709 @templatefunc('if(expr, then[, else])')
710 def if_(context, mapping, args):
710 def if_(context, mapping, args):
711 """Conditionally execute based on the result of
711 """Conditionally execute based on the result of
712 an expression."""
712 an expression."""
713 if not (2 <= len(args) <= 3):
713 if not (2 <= len(args) <= 3):
714 # i18n: "if" is a keyword
714 # i18n: "if" is a keyword
715 raise error.ParseError(_("if expects two or three arguments"))
715 raise error.ParseError(_("if expects two or three arguments"))
716
716
717 test = evalboolean(context, mapping, args[0])
717 test = evalboolean(context, mapping, args[0])
718 if test:
718 if test:
719 yield args[1][0](context, mapping, args[1][1])
719 yield args[1][0](context, mapping, args[1][1])
720 elif len(args) == 3:
720 elif len(args) == 3:
721 yield args[2][0](context, mapping, args[2][1])
721 yield args[2][0](context, mapping, args[2][1])
722
722
723 @templatefunc('ifcontains(needle, haystack, then[, else])')
723 @templatefunc('ifcontains(needle, haystack, then[, else])')
724 def ifcontains(context, mapping, args):
724 def ifcontains(context, mapping, args):
725 """Conditionally execute based
725 """Conditionally execute based
726 on whether the item "needle" is in "haystack"."""
726 on whether the item "needle" is in "haystack"."""
727 if not (3 <= len(args) <= 4):
727 if not (3 <= len(args) <= 4):
728 # i18n: "ifcontains" is a keyword
728 # i18n: "ifcontains" is a keyword
729 raise error.ParseError(_("ifcontains expects three or four arguments"))
729 raise error.ParseError(_("ifcontains expects three or four arguments"))
730
730
731 needle = evalstring(context, mapping, args[0])
731 needle = evalstring(context, mapping, args[0])
732 haystack = evalfuncarg(context, mapping, args[1])
732 haystack = evalfuncarg(context, mapping, args[1])
733
733
734 if needle in haystack:
734 if needle in haystack:
735 yield args[2][0](context, mapping, args[2][1])
735 yield args[2][0](context, mapping, args[2][1])
736 elif len(args) == 4:
736 elif len(args) == 4:
737 yield args[3][0](context, mapping, args[3][1])
737 yield args[3][0](context, mapping, args[3][1])
738
738
739 @templatefunc('ifeq(expr1, expr2, then[, else])')
739 @templatefunc('ifeq(expr1, expr2, then[, else])')
740 def ifeq(context, mapping, args):
740 def ifeq(context, mapping, args):
741 """Conditionally execute based on
741 """Conditionally execute based on
742 whether 2 items are equivalent."""
742 whether 2 items are equivalent."""
743 if not (3 <= len(args) <= 4):
743 if not (3 <= len(args) <= 4):
744 # i18n: "ifeq" is a keyword
744 # i18n: "ifeq" is a keyword
745 raise error.ParseError(_("ifeq expects three or four arguments"))
745 raise error.ParseError(_("ifeq expects three or four arguments"))
746
746
747 test = evalstring(context, mapping, args[0])
747 test = evalstring(context, mapping, args[0])
748 match = evalstring(context, mapping, args[1])
748 match = evalstring(context, mapping, args[1])
749 if test == match:
749 if test == match:
750 yield args[2][0](context, mapping, args[2][1])
750 yield args[2][0](context, mapping, args[2][1])
751 elif len(args) == 4:
751 elif len(args) == 4:
752 yield args[3][0](context, mapping, args[3][1])
752 yield args[3][0](context, mapping, args[3][1])
753
753
754 @templatefunc('join(list, sep)')
754 @templatefunc('join(list, sep)')
755 def join(context, mapping, args):
755 def join(context, mapping, args):
756 """Join items in a list with a delimiter."""
756 """Join items in a list with a delimiter."""
757 if not (1 <= len(args) <= 2):
757 if not (1 <= len(args) <= 2):
758 # i18n: "join" is a keyword
758 # i18n: "join" is a keyword
759 raise error.ParseError(_("join expects one or two arguments"))
759 raise error.ParseError(_("join expects one or two arguments"))
760
760
761 joinset = args[0][0](context, mapping, args[0][1])
761 joinset = args[0][0](context, mapping, args[0][1])
762 if util.safehasattr(joinset, 'itermaps'):
762 if util.safehasattr(joinset, 'itermaps'):
763 jf = joinset.joinfmt
763 jf = joinset.joinfmt
764 joinset = [jf(x) for x in joinset.itermaps()]
764 joinset = [jf(x) for x in joinset.itermaps()]
765
765
766 joiner = " "
766 joiner = " "
767 if len(args) > 1:
767 if len(args) > 1:
768 joiner = evalstring(context, mapping, args[1])
768 joiner = evalstring(context, mapping, args[1])
769
769
770 first = True
770 first = True
771 for x in joinset:
771 for x in joinset:
772 if first:
772 if first:
773 first = False
773 first = False
774 else:
774 else:
775 yield joiner
775 yield joiner
776 yield x
776 yield x
777
777
778 @templatefunc('label(label, expr)')
778 @templatefunc('label(label, expr)')
779 def label(context, mapping, args):
779 def label(context, mapping, args):
780 """Apply a label to generated content. Content with
780 """Apply a label to generated content. Content with
781 a label applied can result in additional post-processing, such as
781 a label applied can result in additional post-processing, such as
782 automatic colorization."""
782 automatic colorization."""
783 if len(args) != 2:
783 if len(args) != 2:
784 # i18n: "label" is a keyword
784 # i18n: "label" is a keyword
785 raise error.ParseError(_("label expects two arguments"))
785 raise error.ParseError(_("label expects two arguments"))
786
786
787 ui = mapping['ui']
787 ui = mapping['ui']
788 thing = evalstring(context, mapping, args[1])
788 thing = evalstring(context, mapping, args[1])
789 # preserve unknown symbol as literal so effects like 'red', 'bold',
789 # preserve unknown symbol as literal so effects like 'red', 'bold',
790 # etc. don't need to be quoted
790 # etc. don't need to be quoted
791 label = evalstringliteral(context, mapping, args[0])
791 label = evalstringliteral(context, mapping, args[0])
792
792
793 return ui.label(thing, label)
793 return ui.label(thing, label)
794
794
795 @templatefunc('latesttag([pattern])')
795 @templatefunc('latesttag([pattern])')
796 def latesttag(context, mapping, args):
796 def latesttag(context, mapping, args):
797 """The global tags matching the given pattern on the
797 """The global tags matching the given pattern on the
798 most recent globally tagged ancestor of this changeset.
798 most recent globally tagged ancestor of this changeset.
799 If no such tags exist, the "{tag}" template resolves to
799 If no such tags exist, the "{tag}" template resolves to
800 the string "null"."""
800 the string "null"."""
801 if len(args) > 1:
801 if len(args) > 1:
802 # i18n: "latesttag" is a keyword
802 # i18n: "latesttag" is a keyword
803 raise error.ParseError(_("latesttag expects at most one argument"))
803 raise error.ParseError(_("latesttag expects at most one argument"))
804
804
805 pattern = None
805 pattern = None
806 if len(args) == 1:
806 if len(args) == 1:
807 pattern = evalstring(context, mapping, args[0])
807 pattern = evalstring(context, mapping, args[0])
808
808
809 return templatekw.showlatesttags(pattern, **mapping)
809 return templatekw.showlatesttags(pattern, **mapping)
810
810
811 @templatefunc('localdate(date[, tz])')
811 @templatefunc('localdate(date[, tz])')
812 def localdate(context, mapping, args):
812 def localdate(context, mapping, args):
813 """Converts a date to the specified timezone.
813 """Converts a date to the specified timezone.
814 The default is local date."""
814 The default is local date."""
815 if not (1 <= len(args) <= 2):
815 if not (1 <= len(args) <= 2):
816 # i18n: "localdate" is a keyword
816 # i18n: "localdate" is a keyword
817 raise error.ParseError(_("localdate expects one or two arguments"))
817 raise error.ParseError(_("localdate expects one or two arguments"))
818
818
819 date = evalfuncarg(context, mapping, args[0])
819 date = evalfuncarg(context, mapping, args[0])
820 try:
820 try:
821 date = util.parsedate(date)
821 date = util.parsedate(date)
822 except AttributeError: # not str nor date tuple
822 except AttributeError: # not str nor date tuple
823 # i18n: "localdate" is a keyword
823 # i18n: "localdate" is a keyword
824 raise error.ParseError(_("localdate expects a date information"))
824 raise error.ParseError(_("localdate expects a date information"))
825 if len(args) >= 2:
825 if len(args) >= 2:
826 tzoffset = None
826 tzoffset = None
827 tz = evalfuncarg(context, mapping, args[1])
827 tz = evalfuncarg(context, mapping, args[1])
828 if isinstance(tz, str):
828 if isinstance(tz, str):
829 tzoffset, remainder = util.parsetimezone(tz)
829 tzoffset, remainder = util.parsetimezone(tz)
830 if remainder:
830 if remainder:
831 tzoffset = None
831 tzoffset = None
832 if tzoffset is None:
832 if tzoffset is None:
833 try:
833 try:
834 tzoffset = int(tz)
834 tzoffset = int(tz)
835 except (TypeError, ValueError):
835 except (TypeError, ValueError):
836 # i18n: "localdate" is a keyword
836 # i18n: "localdate" is a keyword
837 raise error.ParseError(_("localdate expects a timezone"))
837 raise error.ParseError(_("localdate expects a timezone"))
838 else:
838 else:
839 tzoffset = util.makedate()[1]
839 tzoffset = util.makedate()[1]
840 return (date[0], tzoffset)
840 return (date[0], tzoffset)
841
841
842 @templatefunc('mod(a, b)')
842 @templatefunc('mod(a, b)')
843 def mod(context, mapping, args):
843 def mod(context, mapping, args):
844 """Calculate a mod b such that a / b + a mod b == a"""
844 """Calculate a mod b such that a / b + a mod b == a"""
845 if not len(args) == 2:
845 if not len(args) == 2:
846 # i18n: "mod" is a keyword
846 # i18n: "mod" is a keyword
847 raise error.ParseError(_("mod expects two arguments"))
847 raise error.ParseError(_("mod expects two arguments"))
848
848
849 func = lambda a, b: a % b
849 func = lambda a, b: a % b
850 return runarithmetic(context, mapping, (func, args[0], args[1]))
850 return runarithmetic(context, mapping, (func, args[0], args[1]))
851
851
852 @templatefunc('relpath(path)')
852 @templatefunc('relpath(path)')
853 def relpath(context, mapping, args):
853 def relpath(context, mapping, args):
854 """Convert a repository-absolute path into a filesystem path relative to
854 """Convert a repository-absolute path into a filesystem path relative to
855 the current working directory."""
855 the current working directory."""
856 if len(args) != 1:
856 if len(args) != 1:
857 # i18n: "relpath" is a keyword
857 # i18n: "relpath" is a keyword
858 raise error.ParseError(_("relpath expects one argument"))
858 raise error.ParseError(_("relpath expects one argument"))
859
859
860 repo = mapping['ctx'].repo()
860 repo = mapping['ctx'].repo()
861 path = evalstring(context, mapping, args[0])
861 path = evalstring(context, mapping, args[0])
862 return repo.pathto(path)
862 return repo.pathto(path)
863
863
864 @templatefunc('revset(query[, formatargs...])')
864 @templatefunc('revset(query[, formatargs...])')
865 def revset(context, mapping, args):
865 def revset(context, mapping, args):
866 """Execute a revision set query. See
866 """Execute a revision set query. See
867 :hg:`help revset`."""
867 :hg:`help revset`."""
868 if not len(args) > 0:
868 if not len(args) > 0:
869 # i18n: "revset" is a keyword
869 # i18n: "revset" is a keyword
870 raise error.ParseError(_("revset expects one or more arguments"))
870 raise error.ParseError(_("revset expects one or more arguments"))
871
871
872 raw = evalstring(context, mapping, args[0])
872 raw = evalstring(context, mapping, args[0])
873 ctx = mapping['ctx']
873 ctx = mapping['ctx']
874 repo = ctx.repo()
874 repo = ctx.repo()
875
875
876 def query(expr):
876 def query(expr):
877 m = revsetmod.match(repo.ui, expr)
877 m = revsetmod.match(repo.ui, expr)
878 return m(repo)
878 return m(repo)
879
879
880 if len(args) > 1:
880 if len(args) > 1:
881 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
881 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
882 revs = query(revsetlang.formatspec(raw, *formatargs))
882 revs = query(revsetlang.formatspec(raw, *formatargs))
883 revs = list(revs)
883 revs = list(revs)
884 else:
884 else:
885 revsetcache = mapping['cache'].setdefault("revsetcache", {})
885 revsetcache = mapping['cache'].setdefault("revsetcache", {})
886 if raw in revsetcache:
886 if raw in revsetcache:
887 revs = revsetcache[raw]
887 revs = revsetcache[raw]
888 else:
888 else:
889 revs = query(raw)
889 revs = query(raw)
890 revs = list(revs)
890 revs = list(revs)
891 revsetcache[raw] = revs
891 revsetcache[raw] = revs
892
892
893 return templatekw.showrevslist("revision", revs, **mapping)
893 return templatekw.showrevslist("revision", revs, **mapping)
894
894
895 @templatefunc('rstdoc(text, style)')
895 @templatefunc('rstdoc(text, style)')
896 def rstdoc(context, mapping, args):
896 def rstdoc(context, mapping, args):
897 """Format reStructuredText."""
897 """Format reStructuredText."""
898 if len(args) != 2:
898 if len(args) != 2:
899 # i18n: "rstdoc" is a keyword
899 # i18n: "rstdoc" is a keyword
900 raise error.ParseError(_("rstdoc expects two arguments"))
900 raise error.ParseError(_("rstdoc expects two arguments"))
901
901
902 text = evalstring(context, mapping, args[0])
902 text = evalstring(context, mapping, args[0])
903 style = evalstring(context, mapping, args[1])
903 style = evalstring(context, mapping, args[1])
904
904
905 return minirst.format(text, style=style, keep=['verbose'])
905 return minirst.format(text, style=style, keep=['verbose'])
906
906
907 @templatefunc('separate(sep, args)', argspec='sep *args')
907 @templatefunc('separate(sep, args)', argspec='sep *args')
908 def separate(context, mapping, args):
908 def separate(context, mapping, args):
909 """Add a separator between non-empty arguments."""
909 """Add a separator between non-empty arguments."""
910 if 'sep' not in args:
910 if 'sep' not in args:
911 # i18n: "separate" is a keyword
911 # i18n: "separate" is a keyword
912 raise error.ParseError(_("separate expects at least one argument"))
912 raise error.ParseError(_("separate expects at least one argument"))
913
913
914 sep = evalstring(context, mapping, args['sep'])
914 sep = evalstring(context, mapping, args['sep'])
915 first = True
915 first = True
916 for arg in args['args']:
916 for arg in args['args']:
917 argstr = evalstring(context, mapping, arg)
917 argstr = evalstring(context, mapping, arg)
918 if not argstr:
918 if not argstr:
919 continue
919 continue
920 if first:
920 if first:
921 first = False
921 first = False
922 else:
922 else:
923 yield sep
923 yield sep
924 yield argstr
924 yield argstr
925
925
926 @templatefunc('shortest(node, minlength=4)')
926 @templatefunc('shortest(node, minlength=4)')
927 def shortest(context, mapping, args):
927 def shortest(context, mapping, args):
928 """Obtain the shortest representation of
928 """Obtain the shortest representation of
929 a node."""
929 a node."""
930 if not (1 <= len(args) <= 2):
930 if not (1 <= len(args) <= 2):
931 # i18n: "shortest" is a keyword
931 # i18n: "shortest" is a keyword
932 raise error.ParseError(_("shortest() expects one or two arguments"))
932 raise error.ParseError(_("shortest() expects one or two arguments"))
933
933
934 node = evalstring(context, mapping, args[0])
934 node = evalstring(context, mapping, args[0])
935
935
936 minlength = 4
936 minlength = 4
937 if len(args) > 1:
937 if len(args) > 1:
938 minlength = evalinteger(context, mapping, args[1],
938 minlength = evalinteger(context, mapping, args[1],
939 # i18n: "shortest" is a keyword
939 # i18n: "shortest" is a keyword
940 _("shortest() expects an integer minlength"))
940 _("shortest() expects an integer minlength"))
941
941
942 # _partialmatch() of filtered changelog could take O(len(repo)) time,
942 # _partialmatch() of filtered changelog could take O(len(repo)) time,
943 # which would be unacceptably slow. so we look for hash collision in
943 # which would be unacceptably slow. so we look for hash collision in
944 # unfiltered space, which means some hashes may be slightly longer.
944 # unfiltered space, which means some hashes may be slightly longer.
945 cl = mapping['ctx']._repo.unfiltered().changelog
945 cl = mapping['ctx']._repo.unfiltered().changelog
946 def isvalid(test):
946 def isvalid(test):
947 try:
947 try:
948 if cl._partialmatch(test) is None:
948 if cl._partialmatch(test) is None:
949 return False
949 return False
950
950
951 try:
951 try:
952 i = int(test)
952 i = int(test)
953 # if we are a pure int, then starting with zero will not be
953 # if we are a pure int, then starting with zero will not be
954 # confused as a rev; or, obviously, if the int is larger than
954 # confused as a rev; or, obviously, if the int is larger than
955 # the value of the tip rev
955 # the value of the tip rev
956 if test[0] == '0' or i > len(cl):
956 if test[0] == '0' or i > len(cl):
957 return True
957 return True
958 return False
958 return False
959 except ValueError:
959 except ValueError:
960 return True
960 return True
961 except error.RevlogError:
961 except error.RevlogError:
962 return False
962 return False
963 except error.WdirUnsupported:
963 except error.WdirUnsupported:
964 # single 'ff...' match
964 # single 'ff...' match
965 return True
965 return True
966
966
967 shortest = node
967 shortest = node
968 startlength = max(6, minlength)
968 startlength = max(6, minlength)
969 length = startlength
969 length = startlength
970 while True:
970 while True:
971 test = node[:length]
971 test = node[:length]
972 if isvalid(test):
972 if isvalid(test):
973 shortest = test
973 shortest = test
974 if length == minlength or length > startlength:
974 if length == minlength or length > startlength:
975 return shortest
975 return shortest
976 length -= 1
976 length -= 1
977 else:
977 else:
978 length += 1
978 length += 1
979 if len(shortest) <= length:
979 if len(shortest) <= length:
980 return shortest
980 return shortest
981
981
982 @templatefunc('strip(text[, chars])')
982 @templatefunc('strip(text[, chars])')
983 def strip(context, mapping, args):
983 def strip(context, mapping, args):
984 """Strip characters from a string. By default,
984 """Strip characters from a string. By default,
985 strips all leading and trailing whitespace."""
985 strips all leading and trailing whitespace."""
986 if not (1 <= len(args) <= 2):
986 if not (1 <= len(args) <= 2):
987 # i18n: "strip" is a keyword
987 # i18n: "strip" is a keyword
988 raise error.ParseError(_("strip expects one or two arguments"))
988 raise error.ParseError(_("strip expects one or two arguments"))
989
989
990 text = evalstring(context, mapping, args[0])
990 text = evalstring(context, mapping, args[0])
991 if len(args) == 2:
991 if len(args) == 2:
992 chars = evalstring(context, mapping, args[1])
992 chars = evalstring(context, mapping, args[1])
993 return text.strip(chars)
993 return text.strip(chars)
994 return text.strip()
994 return text.strip()
995
995
996 @templatefunc('sub(pattern, replacement, expression)')
996 @templatefunc('sub(pattern, replacement, expression)')
997 def sub(context, mapping, args):
997 def sub(context, mapping, args):
998 """Perform text substitution
998 """Perform text substitution
999 using regular expressions."""
999 using regular expressions."""
1000 if len(args) != 3:
1000 if len(args) != 3:
1001 # i18n: "sub" is a keyword
1001 # i18n: "sub" is a keyword
1002 raise error.ParseError(_("sub expects three arguments"))
1002 raise error.ParseError(_("sub expects three arguments"))
1003
1003
1004 pat = evalstring(context, mapping, args[0])
1004 pat = evalstring(context, mapping, args[0])
1005 rpl = evalstring(context, mapping, args[1])
1005 rpl = evalstring(context, mapping, args[1])
1006 src = evalstring(context, mapping, args[2])
1006 src = evalstring(context, mapping, args[2])
1007 try:
1007 try:
1008 patre = re.compile(pat)
1008 patre = re.compile(pat)
1009 except re.error:
1009 except re.error:
1010 # i18n: "sub" is a keyword
1010 # i18n: "sub" is a keyword
1011 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
1011 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
1012 try:
1012 try:
1013 yield patre.sub(rpl, src)
1013 yield patre.sub(rpl, src)
1014 except re.error:
1014 except re.error:
1015 # i18n: "sub" is a keyword
1015 # i18n: "sub" is a keyword
1016 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
1016 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
1017
1017
1018 @templatefunc('startswith(pattern, text)')
1018 @templatefunc('startswith(pattern, text)')
1019 def startswith(context, mapping, args):
1019 def startswith(context, mapping, args):
1020 """Returns the value from the "text" argument
1020 """Returns the value from the "text" argument
1021 if it begins with the content from the "pattern" argument."""
1021 if it begins with the content from the "pattern" argument."""
1022 if len(args) != 2:
1022 if len(args) != 2:
1023 # i18n: "startswith" is a keyword
1023 # i18n: "startswith" is a keyword
1024 raise error.ParseError(_("startswith expects two arguments"))
1024 raise error.ParseError(_("startswith expects two arguments"))
1025
1025
1026 patn = evalstring(context, mapping, args[0])
1026 patn = evalstring(context, mapping, args[0])
1027 text = evalstring(context, mapping, args[1])
1027 text = evalstring(context, mapping, args[1])
1028 if text.startswith(patn):
1028 if text.startswith(patn):
1029 return text
1029 return text
1030 return ''
1030 return ''
1031
1031
1032 @templatefunc('word(number, text[, separator])')
1032 @templatefunc('word(number, text[, separator])')
1033 def word(context, mapping, args):
1033 def word(context, mapping, args):
1034 """Return the nth word from a string."""
1034 """Return the nth word from a string."""
1035 if not (2 <= len(args) <= 3):
1035 if not (2 <= len(args) <= 3):
1036 # i18n: "word" is a keyword
1036 # i18n: "word" is a keyword
1037 raise error.ParseError(_("word expects two or three arguments, got %d")
1037 raise error.ParseError(_("word expects two or three arguments, got %d")
1038 % len(args))
1038 % len(args))
1039
1039
1040 num = evalinteger(context, mapping, args[0],
1040 num = evalinteger(context, mapping, args[0],
1041 # i18n: "word" is a keyword
1041 # i18n: "word" is a keyword
1042 _("word expects an integer index"))
1042 _("word expects an integer index"))
1043 text = evalstring(context, mapping, args[1])
1043 text = evalstring(context, mapping, args[1])
1044 if len(args) == 3:
1044 if len(args) == 3:
1045 splitter = evalstring(context, mapping, args[2])
1045 splitter = evalstring(context, mapping, args[2])
1046 else:
1046 else:
1047 splitter = None
1047 splitter = None
1048
1048
1049 tokens = text.split(splitter)
1049 tokens = text.split(splitter)
1050 if num >= len(tokens) or num < -len(tokens):
1050 if num >= len(tokens) or num < -len(tokens):
1051 return ''
1051 return ''
1052 else:
1052 else:
1053 return tokens[num]
1053 return tokens[num]
1054
1054
1055 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
1055 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
1056 exprmethods = {
1056 exprmethods = {
1057 "integer": lambda e, c: (runinteger, e[1]),
1057 "integer": lambda e, c: (runinteger, e[1]),
1058 "string": lambda e, c: (runstring, e[1]),
1058 "string": lambda e, c: (runstring, e[1]),
1059 "symbol": lambda e, c: (runsymbol, e[1]),
1059 "symbol": lambda e, c: (runsymbol, e[1]),
1060 "template": buildtemplate,
1060 "template": buildtemplate,
1061 "group": lambda e, c: compileexp(e[1], c, exprmethods),
1061 "group": lambda e, c: compileexp(e[1], c, exprmethods),
1062 # ".": buildmember,
1062 # ".": buildmember,
1063 "|": buildfilter,
1063 "|": buildfilter,
1064 "%": buildmap,
1064 "%": buildmap,
1065 "func": buildfunc,
1065 "func": buildfunc,
1066 "keyvalue": buildkeyvaluepair,
1066 "keyvalue": buildkeyvaluepair,
1067 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
1067 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
1068 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
1068 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
1069 "negate": buildnegate,
1069 "negate": buildnegate,
1070 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
1070 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
1071 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
1071 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
1072 }
1072 }
1073
1073
1074 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
1074 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
1075 methods = exprmethods.copy()
1075 methods = exprmethods.copy()
1076 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
1076 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
1077
1077
1078 class _aliasrules(parser.basealiasrules):
1078 class _aliasrules(parser.basealiasrules):
1079 """Parsing and expansion rule set of template aliases"""
1079 """Parsing and expansion rule set of template aliases"""
1080 _section = _('template alias')
1080 _section = _('template alias')
1081 _parse = staticmethod(_parseexpr)
1081 _parse = staticmethod(_parseexpr)
1082
1082
1083 @staticmethod
1083 @staticmethod
1084 def _trygetfunc(tree):
1084 def _trygetfunc(tree):
1085 """Return (name, args) if tree is func(...) or ...|filter; otherwise
1085 """Return (name, args) if tree is func(...) or ...|filter; otherwise
1086 None"""
1086 None"""
1087 if tree[0] == 'func' and tree[1][0] == 'symbol':
1087 if tree[0] == 'func' and tree[1][0] == 'symbol':
1088 return tree[1][1], getlist(tree[2])
1088 return tree[1][1], getlist(tree[2])
1089 if tree[0] == '|' and tree[2][0] == 'symbol':
1089 if tree[0] == '|' and tree[2][0] == 'symbol':
1090 return tree[2][1], [tree[1]]
1090 return tree[2][1], [tree[1]]
1091
1091
1092 def expandaliases(tree, aliases):
1092 def expandaliases(tree, aliases):
1093 """Return new tree of aliases are expanded"""
1093 """Return new tree of aliases are expanded"""
1094 aliasmap = _aliasrules.buildmap(aliases)
1094 aliasmap = _aliasrules.buildmap(aliases)
1095 return _aliasrules.expand(aliasmap, tree)
1095 return _aliasrules.expand(aliasmap, tree)
1096
1096
1097 # template engine
1097 # template engine
1098
1098
1099 stringify = templatefilters.stringify
1099 stringify = templatefilters.stringify
1100
1100
1101 def _flatten(thing):
1101 def _flatten(thing):
1102 '''yield a single stream from a possibly nested set of iterators'''
1102 '''yield a single stream from a possibly nested set of iterators'''
1103 thing = templatekw.unwraphybrid(thing)
1103 thing = templatekw.unwraphybrid(thing)
1104 if isinstance(thing, str):
1104 if isinstance(thing, str):
1105 yield thing
1105 yield thing
1106 elif thing is None:
1106 elif thing is None:
1107 pass
1107 pass
1108 elif not util.safehasattr(thing, '__iter__'):
1108 elif not util.safehasattr(thing, '__iter__'):
1109 yield str(thing)
1109 yield str(thing)
1110 else:
1110 else:
1111 for i in thing:
1111 for i in thing:
1112 i = templatekw.unwraphybrid(i)
1112 i = templatekw.unwraphybrid(i)
1113 if isinstance(i, str):
1113 if isinstance(i, str):
1114 yield i
1114 yield i
1115 elif i is None:
1115 elif i is None:
1116 pass
1116 pass
1117 elif not util.safehasattr(i, '__iter__'):
1117 elif not util.safehasattr(i, '__iter__'):
1118 yield str(i)
1118 yield str(i)
1119 else:
1119 else:
1120 for j in _flatten(i):
1120 for j in _flatten(i):
1121 yield j
1121 yield j
1122
1122
1123 def unquotestring(s):
1123 def unquotestring(s):
1124 '''unwrap quotes if any; otherwise returns unmodified string'''
1124 '''unwrap quotes if any; otherwise returns unmodified string'''
1125 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1125 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1126 return s
1126 return s
1127 return s[1:-1]
1127 return s[1:-1]
1128
1128
1129 class engine(object):
1129 class engine(object):
1130 '''template expansion engine.
1130 '''template expansion engine.
1131
1131
1132 template expansion works like this. a map file contains key=value
1132 template expansion works like this. a map file contains key=value
1133 pairs. if value is quoted, it is treated as string. otherwise, it
1133 pairs. if value is quoted, it is treated as string. otherwise, it
1134 is treated as name of template file.
1134 is treated as name of template file.
1135
1135
1136 templater is asked to expand a key in map. it looks up key, and
1136 templater is asked to expand a key in map. it looks up key, and
1137 looks for strings like this: {foo}. it expands {foo} by looking up
1137 looks for strings like this: {foo}. it expands {foo} by looking up
1138 foo in map, and substituting it. expansion is recursive: it stops
1138 foo in map, and substituting it. expansion is recursive: it stops
1139 when there is no more {foo} to replace.
1139 when there is no more {foo} to replace.
1140
1140
1141 expansion also allows formatting and filtering.
1141 expansion also allows formatting and filtering.
1142
1142
1143 format uses key to expand each item in list. syntax is
1143 format uses key to expand each item in list. syntax is
1144 {key%format}.
1144 {key%format}.
1145
1145
1146 filter uses function to transform value. syntax is
1146 filter uses function to transform value. syntax is
1147 {key|filter1|filter2|...}.'''
1147 {key|filter1|filter2|...}.'''
1148
1148
1149 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1149 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1150 self._loader = loader
1150 self._loader = loader
1151 if filters is None:
1151 if filters is None:
1152 filters = {}
1152 filters = {}
1153 self._filters = filters
1153 self._filters = filters
1154 if defaults is None:
1154 if defaults is None:
1155 defaults = {}
1155 defaults = {}
1156 self._defaults = defaults
1156 self._defaults = defaults
1157 self._aliasmap = _aliasrules.buildmap(aliases)
1157 self._aliasmap = _aliasrules.buildmap(aliases)
1158 self._cache = {} # key: (func, data)
1158 self._cache = {} # key: (func, data)
1159
1159
1160 def _load(self, t):
1160 def _load(self, t):
1161 '''load, parse, and cache a template'''
1161 '''load, parse, and cache a template'''
1162 if t not in self._cache:
1162 if t not in self._cache:
1163 # put poison to cut recursion while compiling 't'
1163 # put poison to cut recursion while compiling 't'
1164 self._cache[t] = (_runrecursivesymbol, t)
1164 self._cache[t] = (_runrecursivesymbol, t)
1165 try:
1165 try:
1166 x = parse(self._loader(t))
1166 x = parse(self._loader(t))
1167 if self._aliasmap:
1167 if self._aliasmap:
1168 x = _aliasrules.expand(self._aliasmap, x)
1168 x = _aliasrules.expand(self._aliasmap, x)
1169 self._cache[t] = compileexp(x, self, methods)
1169 self._cache[t] = compileexp(x, self, methods)
1170 except: # re-raises
1170 except: # re-raises
1171 del self._cache[t]
1171 del self._cache[t]
1172 raise
1172 raise
1173 return self._cache[t]
1173 return self._cache[t]
1174
1174
1175 def process(self, t, mapping):
1175 def process(self, t, mapping):
1176 '''Perform expansion. t is name of map element to expand.
1176 '''Perform expansion. t is name of map element to expand.
1177 mapping contains added elements for use during expansion. Is a
1177 mapping contains added elements for use during expansion. Is a
1178 generator.'''
1178 generator.'''
1179 func, data = self._load(t)
1179 func, data = self._load(t)
1180 return _flatten(func(self, mapping, data))
1180 return _flatten(func(self, mapping, data))
1181
1181
1182 engines = {'default': engine}
1182 engines = {'default': engine}
1183
1183
1184 def stylelist():
1184 def stylelist():
1185 paths = templatepaths()
1185 paths = templatepaths()
1186 if not paths:
1186 if not paths:
1187 return _('no templates found, try `hg debuginstall` for more info')
1187 return _('no templates found, try `hg debuginstall` for more info')
1188 dirlist = os.listdir(paths[0])
1188 dirlist = os.listdir(paths[0])
1189 stylelist = []
1189 stylelist = []
1190 for file in dirlist:
1190 for file in dirlist:
1191 split = file.split(".")
1191 split = file.split(".")
1192 if split[-1] in ('orig', 'rej'):
1192 if split[-1] in ('orig', 'rej'):
1193 continue
1193 continue
1194 if split[0] == "map-cmdline":
1194 if split[0] == "map-cmdline":
1195 stylelist.append(split[1])
1195 stylelist.append(split[1])
1196 return ", ".join(sorted(stylelist))
1196 return ", ".join(sorted(stylelist))
1197
1197
1198 def _readmapfile(mapfile):
1198 def _readmapfile(mapfile):
1199 """Load template elements from the given map file"""
1199 """Load template elements from the given map file"""
1200 if not os.path.exists(mapfile):
1200 if not os.path.exists(mapfile):
1201 raise error.Abort(_("style '%s' not found") % mapfile,
1201 raise error.Abort(_("style '%s' not found") % mapfile,
1202 hint=_("available styles: %s") % stylelist())
1202 hint=_("available styles: %s") % stylelist())
1203
1203
1204 base = os.path.dirname(mapfile)
1204 base = os.path.dirname(mapfile)
1205 conf = config.config(includepaths=templatepaths())
1205 conf = config.config(includepaths=templatepaths())
1206 conf.read(mapfile)
1206 conf.read(mapfile)
1207
1207
1208 cache = {}
1208 cache = {}
1209 tmap = {}
1209 tmap = {}
1210 for key, val in conf[''].items():
1210 for key, val in conf[''].items():
1211 if not val:
1211 if not val:
1212 raise error.ParseError(_('missing value'), conf.source('', key))
1212 raise error.ParseError(_('missing value'), conf.source('', key))
1213 if val[0] in "'\"":
1213 if val[0] in "'\"":
1214 if val[0] != val[-1]:
1214 if val[0] != val[-1]:
1215 raise error.ParseError(_('unmatched quotes'),
1215 raise error.ParseError(_('unmatched quotes'),
1216 conf.source('', key))
1216 conf.source('', key))
1217 cache[key] = unquotestring(val)
1217 cache[key] = unquotestring(val)
1218 elif key == "__base__":
1218 elif key == "__base__":
1219 # treat as a pointer to a base class for this style
1219 # treat as a pointer to a base class for this style
1220 path = util.normpath(os.path.join(base, val))
1220 path = util.normpath(os.path.join(base, val))
1221
1221
1222 # fallback check in template paths
1222 # fallback check in template paths
1223 if not os.path.exists(path):
1223 if not os.path.exists(path):
1224 for p in templatepaths():
1224 for p in templatepaths():
1225 p2 = util.normpath(os.path.join(p, val))
1225 p2 = util.normpath(os.path.join(p, val))
1226 if os.path.isfile(p2):
1226 if os.path.isfile(p2):
1227 path = p2
1227 path = p2
1228 break
1228 break
1229 p3 = util.normpath(os.path.join(p2, "map"))
1229 p3 = util.normpath(os.path.join(p2, "map"))
1230 if os.path.isfile(p3):
1230 if os.path.isfile(p3):
1231 path = p3
1231 path = p3
1232 break
1232 break
1233
1233
1234 bcache, btmap = _readmapfile(path)
1234 bcache, btmap = _readmapfile(path)
1235 for k in bcache:
1235 for k in bcache:
1236 if k not in cache:
1236 if k not in cache:
1237 cache[k] = bcache[k]
1237 cache[k] = bcache[k]
1238 for k in btmap:
1238 for k in btmap:
1239 if k not in tmap:
1239 if k not in tmap:
1240 tmap[k] = btmap[k]
1240 tmap[k] = btmap[k]
1241 else:
1241 else:
1242 val = 'default', val
1242 val = 'default', val
1243 if ':' in val[1]:
1243 if ':' in val[1]:
1244 val = val[1].split(':', 1)
1244 val = val[1].split(':', 1)
1245 tmap[key] = val[0], os.path.join(base, val[1])
1245 tmap[key] = val[0], os.path.join(base, val[1])
1246 return cache, tmap
1246 return cache, tmap
1247
1247
1248 class TemplateNotFound(error.Abort):
1248 class TemplateNotFound(error.Abort):
1249 pass
1249 pass
1250
1250
1251 class templater(object):
1251 class templater(object):
1252
1252
1253 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1253 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1254 minchunk=1024, maxchunk=65536):
1254 minchunk=1024, maxchunk=65536):
1255 '''set up template engine.
1255 '''set up template engine.
1256 filters is dict of functions. each transforms a value into another.
1256 filters is dict of functions. each transforms a value into another.
1257 defaults is dict of default map definitions.
1257 defaults is dict of default map definitions.
1258 aliases is list of alias (name, replacement) pairs.
1258 aliases is list of alias (name, replacement) pairs.
1259 '''
1259 '''
1260 if filters is None:
1260 if filters is None:
1261 filters = {}
1261 filters = {}
1262 if defaults is None:
1262 if defaults is None:
1263 defaults = {}
1263 defaults = {}
1264 if cache is None:
1264 if cache is None:
1265 cache = {}
1265 cache = {}
1266 self.cache = cache.copy()
1266 self.cache = cache.copy()
1267 self.map = {}
1267 self.map = {}
1268 self.filters = templatefilters.filters.copy()
1268 self.filters = templatefilters.filters.copy()
1269 self.filters.update(filters)
1269 self.filters.update(filters)
1270 self.defaults = defaults
1270 self.defaults = defaults
1271 self._aliases = aliases
1271 self._aliases = aliases
1272 self.minchunk, self.maxchunk = minchunk, maxchunk
1272 self.minchunk, self.maxchunk = minchunk, maxchunk
1273 self.ecache = {}
1273 self.ecache = {}
1274
1274
1275 @classmethod
1275 @classmethod
1276 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1276 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1277 minchunk=1024, maxchunk=65536):
1277 minchunk=1024, maxchunk=65536):
1278 """Create templater from the specified map file"""
1278 """Create templater from the specified map file"""
1279 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1279 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1280 cache, tmap = _readmapfile(mapfile)
1280 cache, tmap = _readmapfile(mapfile)
1281 t.cache.update(cache)
1281 t.cache.update(cache)
1282 t.map = tmap
1282 t.map = tmap
1283 return t
1283 return t
1284
1284
1285 def __contains__(self, key):
1285 def __contains__(self, key):
1286 return key in self.cache or key in self.map
1286 return key in self.cache or key in self.map
1287
1287
1288 def load(self, t):
1288 def load(self, t):
1289 '''Get the template for the given template name. Use a local cache.'''
1289 '''Get the template for the given template name. Use a local cache.'''
1290 if t not in self.cache:
1290 if t not in self.cache:
1291 try:
1291 try:
1292 self.cache[t] = util.readfile(self.map[t][1])
1292 self.cache[t] = util.readfile(self.map[t][1])
1293 except KeyError as inst:
1293 except KeyError as inst:
1294 raise TemplateNotFound(_('"%s" not in template map') %
1294 raise TemplateNotFound(_('"%s" not in template map') %
1295 inst.args[0])
1295 inst.args[0])
1296 except IOError as inst:
1296 except IOError as inst:
1297 raise IOError(inst.args[0], _('template file %s: %s') %
1297 raise IOError(inst.args[0], _('template file %s: %s') %
1298 (self.map[t][1], inst.args[1]))
1298 (self.map[t][1], inst.args[1]))
1299 return self.cache[t]
1299 return self.cache[t]
1300
1300
1301 def render(self, mapping):
1302 """Render the default unnamed template and return result as string"""
1303 return stringify(self('', **mapping))
1304
1301 def __call__(self, t, **mapping):
1305 def __call__(self, t, **mapping):
1302 ttype = t in self.map and self.map[t][0] or 'default'
1306 ttype = t in self.map and self.map[t][0] or 'default'
1303 if ttype not in self.ecache:
1307 if ttype not in self.ecache:
1304 try:
1308 try:
1305 ecls = engines[ttype]
1309 ecls = engines[ttype]
1306 except KeyError:
1310 except KeyError:
1307 raise error.Abort(_('invalid template engine: %s') % ttype)
1311 raise error.Abort(_('invalid template engine: %s') % ttype)
1308 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1312 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1309 self._aliases)
1313 self._aliases)
1310 proc = self.ecache[ttype]
1314 proc = self.ecache[ttype]
1311
1315
1312 stream = proc.process(t, mapping)
1316 stream = proc.process(t, mapping)
1313 if self.minchunk:
1317 if self.minchunk:
1314 stream = util.increasingchunks(stream, min=self.minchunk,
1318 stream = util.increasingchunks(stream, min=self.minchunk,
1315 max=self.maxchunk)
1319 max=self.maxchunk)
1316 return stream
1320 return stream
1317
1321
1318 def templatepaths():
1322 def templatepaths():
1319 '''return locations used for template files.'''
1323 '''return locations used for template files.'''
1320 pathsrel = ['templates']
1324 pathsrel = ['templates']
1321 paths = [os.path.normpath(os.path.join(util.datapath, f))
1325 paths = [os.path.normpath(os.path.join(util.datapath, f))
1322 for f in pathsrel]
1326 for f in pathsrel]
1323 return [p for p in paths if os.path.isdir(p)]
1327 return [p for p in paths if os.path.isdir(p)]
1324
1328
1325 def templatepath(name):
1329 def templatepath(name):
1326 '''return location of template file. returns None if not found.'''
1330 '''return location of template file. returns None if not found.'''
1327 for p in templatepaths():
1331 for p in templatepaths():
1328 f = os.path.join(p, name)
1332 f = os.path.join(p, name)
1329 if os.path.exists(f):
1333 if os.path.exists(f):
1330 return f
1334 return f
1331 return None
1335 return None
1332
1336
1333 def stylemap(styles, paths=None):
1337 def stylemap(styles, paths=None):
1334 """Return path to mapfile for a given style.
1338 """Return path to mapfile for a given style.
1335
1339
1336 Searches mapfile in the following locations:
1340 Searches mapfile in the following locations:
1337 1. templatepath/style/map
1341 1. templatepath/style/map
1338 2. templatepath/map-style
1342 2. templatepath/map-style
1339 3. templatepath/map
1343 3. templatepath/map
1340 """
1344 """
1341
1345
1342 if paths is None:
1346 if paths is None:
1343 paths = templatepaths()
1347 paths = templatepaths()
1344 elif isinstance(paths, str):
1348 elif isinstance(paths, str):
1345 paths = [paths]
1349 paths = [paths]
1346
1350
1347 if isinstance(styles, str):
1351 if isinstance(styles, str):
1348 styles = [styles]
1352 styles = [styles]
1349
1353
1350 for style in styles:
1354 for style in styles:
1351 # only plain name is allowed to honor template paths
1355 # only plain name is allowed to honor template paths
1352 if (not style
1356 if (not style
1353 or style in (os.curdir, os.pardir)
1357 or style in (os.curdir, os.pardir)
1354 or pycompat.ossep in style
1358 or pycompat.ossep in style
1355 or pycompat.osaltsep and pycompat.osaltsep in style):
1359 or pycompat.osaltsep and pycompat.osaltsep in style):
1356 continue
1360 continue
1357 locations = [os.path.join(style, 'map'), 'map-' + style]
1361 locations = [os.path.join(style, 'map'), 'map-' + style]
1358 locations.append('map')
1362 locations.append('map')
1359
1363
1360 for path in paths:
1364 for path in paths:
1361 for location in locations:
1365 for location in locations:
1362 mapfile = os.path.join(path, location)
1366 mapfile = os.path.join(path, location)
1363 if os.path.isfile(mapfile):
1367 if os.path.isfile(mapfile):
1364 return style, mapfile
1368 return style, mapfile
1365
1369
1366 raise RuntimeError("No hgweb templates found in %r" % paths)
1370 raise RuntimeError("No hgweb templates found in %r" % paths)
1367
1371
1368 def loadfunction(ui, extname, registrarobj):
1372 def loadfunction(ui, extname, registrarobj):
1369 """Load template function from specified registrarobj
1373 """Load template function from specified registrarobj
1370 """
1374 """
1371 for name, func in registrarobj._table.iteritems():
1375 for name, func in registrarobj._table.iteritems():
1372 funcs[name] = func
1376 funcs[name] = func
1373
1377
1374 # tell hggettext to extract docstrings from these functions:
1378 # tell hggettext to extract docstrings from these functions:
1375 i18nfunctions = funcs.values()
1379 i18nfunctions = funcs.values()
General Comments 0
You need to be logged in to leave comments. Login now