##// END OF EJS Templates
revert: do not reverse hunks in interactive when REV is not parent (issue5096)...
Denis Laxalde -
r34969:3649c3f2 default
parent child Browse files
Show More
@@ -1,3966 +1,3965 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 dagop,
29 dagop,
30 dirstateguard,
30 dirstateguard,
31 encoding,
31 encoding,
32 error,
32 error,
33 formatter,
33 formatter,
34 graphmod,
34 graphmod,
35 match as matchmod,
35 match as matchmod,
36 mdiff,
36 mdiff,
37 obsolete,
37 obsolete,
38 patch,
38 patch,
39 pathutil,
39 pathutil,
40 pycompat,
40 pycompat,
41 registrar,
41 registrar,
42 revlog,
42 revlog,
43 revset,
43 revset,
44 scmutil,
44 scmutil,
45 smartset,
45 smartset,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51 stringio = util.stringio
51 stringio = util.stringio
52
52
53 # templates of common command options
53 # templates of common command options
54
54
55 dryrunopts = [
55 dryrunopts = [
56 ('n', 'dry-run', None,
56 ('n', 'dry-run', None,
57 _('do not perform actions, just print output')),
57 _('do not perform actions, just print output')),
58 ]
58 ]
59
59
60 remoteopts = [
60 remoteopts = [
61 ('e', 'ssh', '',
61 ('e', 'ssh', '',
62 _('specify ssh command to use'), _('CMD')),
62 _('specify ssh command to use'), _('CMD')),
63 ('', 'remotecmd', '',
63 ('', 'remotecmd', '',
64 _('specify hg command to run on the remote side'), _('CMD')),
64 _('specify hg command to run on the remote side'), _('CMD')),
65 ('', 'insecure', None,
65 ('', 'insecure', None,
66 _('do not verify server certificate (ignoring web.cacerts config)')),
66 _('do not verify server certificate (ignoring web.cacerts config)')),
67 ]
67 ]
68
68
69 walkopts = [
69 walkopts = [
70 ('I', 'include', [],
70 ('I', 'include', [],
71 _('include names matching the given patterns'), _('PATTERN')),
71 _('include names matching the given patterns'), _('PATTERN')),
72 ('X', 'exclude', [],
72 ('X', 'exclude', [],
73 _('exclude names matching the given patterns'), _('PATTERN')),
73 _('exclude names matching the given patterns'), _('PATTERN')),
74 ]
74 ]
75
75
76 commitopts = [
76 commitopts = [
77 ('m', 'message', '',
77 ('m', 'message', '',
78 _('use text as commit message'), _('TEXT')),
78 _('use text as commit message'), _('TEXT')),
79 ('l', 'logfile', '',
79 ('l', 'logfile', '',
80 _('read commit message from file'), _('FILE')),
80 _('read commit message from file'), _('FILE')),
81 ]
81 ]
82
82
83 commitopts2 = [
83 commitopts2 = [
84 ('d', 'date', '',
84 ('d', 'date', '',
85 _('record the specified date as commit date'), _('DATE')),
85 _('record the specified date as commit date'), _('DATE')),
86 ('u', 'user', '',
86 ('u', 'user', '',
87 _('record the specified user as committer'), _('USER')),
87 _('record the specified user as committer'), _('USER')),
88 ]
88 ]
89
89
90 # hidden for now
90 # hidden for now
91 formatteropts = [
91 formatteropts = [
92 ('T', 'template', '',
92 ('T', 'template', '',
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
94 ]
94 ]
95
95
96 templateopts = [
96 templateopts = [
97 ('', 'style', '',
97 ('', 'style', '',
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 _('display using template map file (DEPRECATED)'), _('STYLE')),
99 ('T', 'template', '',
99 ('T', 'template', '',
100 _('display with template'), _('TEMPLATE')),
100 _('display with template'), _('TEMPLATE')),
101 ]
101 ]
102
102
103 logopts = [
103 logopts = [
104 ('p', 'patch', None, _('show patch')),
104 ('p', 'patch', None, _('show patch')),
105 ('g', 'git', None, _('use git extended diff format')),
105 ('g', 'git', None, _('use git extended diff format')),
106 ('l', 'limit', '',
106 ('l', 'limit', '',
107 _('limit number of changes displayed'), _('NUM')),
107 _('limit number of changes displayed'), _('NUM')),
108 ('M', 'no-merges', None, _('do not show merges')),
108 ('M', 'no-merges', None, _('do not show merges')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('', 'stat', None, _('output diffstat-style summary of changes')),
110 ('G', 'graph', None, _("show the revision DAG")),
110 ('G', 'graph', None, _("show the revision DAG")),
111 ] + templateopts
111 ] + templateopts
112
112
113 diffopts = [
113 diffopts = [
114 ('a', 'text', None, _('treat all files as text')),
114 ('a', 'text', None, _('treat all files as text')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
117 ('', 'nodates', None, _('omit dates from diff headers'))
117 ('', 'nodates', None, _('omit dates from diff headers'))
118 ]
118 ]
119
119
120 diffwsopts = [
120 diffwsopts = [
121 ('w', 'ignore-all-space', None,
121 ('w', 'ignore-all-space', None,
122 _('ignore white space when comparing lines')),
122 _('ignore white space when comparing lines')),
123 ('b', 'ignore-space-change', None,
123 ('b', 'ignore-space-change', None,
124 _('ignore changes in the amount of white space')),
124 _('ignore changes in the amount of white space')),
125 ('B', 'ignore-blank-lines', None,
125 ('B', 'ignore-blank-lines', None,
126 _('ignore changes whose lines are all blank')),
126 _('ignore changes whose lines are all blank')),
127 ('Z', 'ignore-space-at-eol', None,
127 ('Z', 'ignore-space-at-eol', None,
128 _('ignore changes in whitespace at EOL')),
128 _('ignore changes in whitespace at EOL')),
129 ]
129 ]
130
130
131 diffopts2 = [
131 diffopts2 = [
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
132 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
133 ('p', 'show-function', None, _('show which function each change is in')),
133 ('p', 'show-function', None, _('show which function each change is in')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
134 ('', 'reverse', None, _('produce a diff that undoes the changes')),
135 ] + diffwsopts + [
135 ] + diffwsopts + [
136 ('U', 'unified', '',
136 ('U', 'unified', '',
137 _('number of lines of context to show'), _('NUM')),
137 _('number of lines of context to show'), _('NUM')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
138 ('', 'stat', None, _('output diffstat-style summary of changes')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
139 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
140 ]
140 ]
141
141
142 mergetoolopts = [
142 mergetoolopts = [
143 ('t', 'tool', '', _('specify merge tool')),
143 ('t', 'tool', '', _('specify merge tool')),
144 ]
144 ]
145
145
146 similarityopts = [
146 similarityopts = [
147 ('s', 'similarity', '',
147 ('s', 'similarity', '',
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
148 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
149 ]
149 ]
150
150
151 subrepoopts = [
151 subrepoopts = [
152 ('S', 'subrepos', None,
152 ('S', 'subrepos', None,
153 _('recurse into subrepositories'))
153 _('recurse into subrepositories'))
154 ]
154 ]
155
155
156 debugrevlogopts = [
156 debugrevlogopts = [
157 ('c', 'changelog', False, _('open changelog')),
157 ('c', 'changelog', False, _('open changelog')),
158 ('m', 'manifest', False, _('open manifest')),
158 ('m', 'manifest', False, _('open manifest')),
159 ('', 'dir', '', _('open directory manifest')),
159 ('', 'dir', '', _('open directory manifest')),
160 ]
160 ]
161
161
162 # special string such that everything below this line will be ingored in the
162 # special string such that everything below this line will be ingored in the
163 # editor text
163 # editor text
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
164 _linebelow = "^HG: ------------------------ >8 ------------------------$"
165
165
166 def ishunk(x):
166 def ishunk(x):
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
167 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
168 return isinstance(x, hunkclasses)
168 return isinstance(x, hunkclasses)
169
169
170 def newandmodified(chunks, originalchunks):
170 def newandmodified(chunks, originalchunks):
171 newlyaddedandmodifiedfiles = set()
171 newlyaddedandmodifiedfiles = set()
172 for chunk in chunks:
172 for chunk in chunks:
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
173 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
174 originalchunks:
174 originalchunks:
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
175 newlyaddedandmodifiedfiles.add(chunk.header.filename())
176 return newlyaddedandmodifiedfiles
176 return newlyaddedandmodifiedfiles
177
177
178 def parsealiases(cmd):
178 def parsealiases(cmd):
179 return cmd.lstrip("^").split("|")
179 return cmd.lstrip("^").split("|")
180
180
181 def setupwrapcolorwrite(ui):
181 def setupwrapcolorwrite(ui):
182 # wrap ui.write so diff output can be labeled/colorized
182 # wrap ui.write so diff output can be labeled/colorized
183 def wrapwrite(orig, *args, **kw):
183 def wrapwrite(orig, *args, **kw):
184 label = kw.pop('label', '')
184 label = kw.pop('label', '')
185 for chunk, l in patch.difflabel(lambda: args):
185 for chunk, l in patch.difflabel(lambda: args):
186 orig(chunk, label=label + l)
186 orig(chunk, label=label + l)
187
187
188 oldwrite = ui.write
188 oldwrite = ui.write
189 def wrap(*args, **kwargs):
189 def wrap(*args, **kwargs):
190 return wrapwrite(oldwrite, *args, **kwargs)
190 return wrapwrite(oldwrite, *args, **kwargs)
191 setattr(ui, 'write', wrap)
191 setattr(ui, 'write', wrap)
192 return oldwrite
192 return oldwrite
193
193
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
194 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
195 if usecurses:
195 if usecurses:
196 if testfile:
196 if testfile:
197 recordfn = crecordmod.testdecorator(testfile,
197 recordfn = crecordmod.testdecorator(testfile,
198 crecordmod.testchunkselector)
198 crecordmod.testchunkselector)
199 else:
199 else:
200 recordfn = crecordmod.chunkselector
200 recordfn = crecordmod.chunkselector
201
201
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
202 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
203
203
204 else:
204 else:
205 return patch.filterpatch(ui, originalhunks, operation)
205 return patch.filterpatch(ui, originalhunks, operation)
206
206
207 def recordfilter(ui, originalhunks, operation=None):
207 def recordfilter(ui, originalhunks, operation=None):
208 """ Prompts the user to filter the originalhunks and return a list of
208 """ Prompts the user to filter the originalhunks and return a list of
209 selected hunks.
209 selected hunks.
210 *operation* is used for to build ui messages to indicate the user what
210 *operation* is used for to build ui messages to indicate the user what
211 kind of filtering they are doing: reverting, committing, shelving, etc.
211 kind of filtering they are doing: reverting, committing, shelving, etc.
212 (see patch.filterpatch).
212 (see patch.filterpatch).
213 """
213 """
214 usecurses = crecordmod.checkcurses(ui)
214 usecurses = crecordmod.checkcurses(ui)
215 testfile = ui.config('experimental', 'crecordtest')
215 testfile = ui.config('experimental', 'crecordtest')
216 oldwrite = setupwrapcolorwrite(ui)
216 oldwrite = setupwrapcolorwrite(ui)
217 try:
217 try:
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
218 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
219 testfile, operation)
219 testfile, operation)
220 finally:
220 finally:
221 ui.write = oldwrite
221 ui.write = oldwrite
222 return newchunks, newopts
222 return newchunks, newopts
223
223
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
224 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
225 filterfn, *pats, **opts):
225 filterfn, *pats, **opts):
226 from . import merge as mergemod
226 from . import merge as mergemod
227 opts = pycompat.byteskwargs(opts)
227 opts = pycompat.byteskwargs(opts)
228 if not ui.interactive():
228 if not ui.interactive():
229 if cmdsuggest:
229 if cmdsuggest:
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
230 msg = _('running non-interactively, use %s instead') % cmdsuggest
231 else:
231 else:
232 msg = _('running non-interactively')
232 msg = _('running non-interactively')
233 raise error.Abort(msg)
233 raise error.Abort(msg)
234
234
235 # make sure username is set before going interactive
235 # make sure username is set before going interactive
236 if not opts.get('user'):
236 if not opts.get('user'):
237 ui.username() # raise exception, username not provided
237 ui.username() # raise exception, username not provided
238
238
239 def recordfunc(ui, repo, message, match, opts):
239 def recordfunc(ui, repo, message, match, opts):
240 """This is generic record driver.
240 """This is generic record driver.
241
241
242 Its job is to interactively filter local changes, and
242 Its job is to interactively filter local changes, and
243 accordingly prepare working directory into a state in which the
243 accordingly prepare working directory into a state in which the
244 job can be delegated to a non-interactive commit command such as
244 job can be delegated to a non-interactive commit command such as
245 'commit' or 'qrefresh'.
245 'commit' or 'qrefresh'.
246
246
247 After the actual job is done by non-interactive command, the
247 After the actual job is done by non-interactive command, the
248 working directory is restored to its original state.
248 working directory is restored to its original state.
249
249
250 In the end we'll record interesting changes, and everything else
250 In the end we'll record interesting changes, and everything else
251 will be left in place, so the user can continue working.
251 will be left in place, so the user can continue working.
252 """
252 """
253
253
254 checkunfinished(repo, commit=True)
254 checkunfinished(repo, commit=True)
255 wctx = repo[None]
255 wctx = repo[None]
256 merge = len(wctx.parents()) > 1
256 merge = len(wctx.parents()) > 1
257 if merge:
257 if merge:
258 raise error.Abort(_('cannot partially commit a merge '
258 raise error.Abort(_('cannot partially commit a merge '
259 '(use "hg commit" instead)'))
259 '(use "hg commit" instead)'))
260
260
261 def fail(f, msg):
261 def fail(f, msg):
262 raise error.Abort('%s: %s' % (f, msg))
262 raise error.Abort('%s: %s' % (f, msg))
263
263
264 force = opts.get('force')
264 force = opts.get('force')
265 if not force:
265 if not force:
266 vdirs = []
266 vdirs = []
267 match.explicitdir = vdirs.append
267 match.explicitdir = vdirs.append
268 match.bad = fail
268 match.bad = fail
269
269
270 status = repo.status(match=match)
270 status = repo.status(match=match)
271 if not force:
271 if not force:
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
272 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
273 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
274 diffopts.nodates = True
274 diffopts.nodates = True
275 diffopts.git = True
275 diffopts.git = True
276 diffopts.showfunc = True
276 diffopts.showfunc = True
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
277 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
278 originalchunks = patch.parsepatch(originaldiff)
278 originalchunks = patch.parsepatch(originaldiff)
279
279
280 # 1. filter patch, since we are intending to apply subset of it
280 # 1. filter patch, since we are intending to apply subset of it
281 try:
281 try:
282 chunks, newopts = filterfn(ui, originalchunks)
282 chunks, newopts = filterfn(ui, originalchunks)
283 except error.PatchError as err:
283 except error.PatchError as err:
284 raise error.Abort(_('error parsing patch: %s') % err)
284 raise error.Abort(_('error parsing patch: %s') % err)
285 opts.update(newopts)
285 opts.update(newopts)
286
286
287 # We need to keep a backup of files that have been newly added and
287 # We need to keep a backup of files that have been newly added and
288 # modified during the recording process because there is a previous
288 # modified during the recording process because there is a previous
289 # version without the edit in the workdir
289 # version without the edit in the workdir
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
290 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
291 contenders = set()
291 contenders = set()
292 for h in chunks:
292 for h in chunks:
293 try:
293 try:
294 contenders.update(set(h.files()))
294 contenders.update(set(h.files()))
295 except AttributeError:
295 except AttributeError:
296 pass
296 pass
297
297
298 changed = status.modified + status.added + status.removed
298 changed = status.modified + status.added + status.removed
299 newfiles = [f for f in changed if f in contenders]
299 newfiles = [f for f in changed if f in contenders]
300 if not newfiles:
300 if not newfiles:
301 ui.status(_('no changes to record\n'))
301 ui.status(_('no changes to record\n'))
302 return 0
302 return 0
303
303
304 modified = set(status.modified)
304 modified = set(status.modified)
305
305
306 # 2. backup changed files, so we can restore them in the end
306 # 2. backup changed files, so we can restore them in the end
307
307
308 if backupall:
308 if backupall:
309 tobackup = changed
309 tobackup = changed
310 else:
310 else:
311 tobackup = [f for f in newfiles if f in modified or f in \
311 tobackup = [f for f in newfiles if f in modified or f in \
312 newlyaddedandmodifiedfiles]
312 newlyaddedandmodifiedfiles]
313 backups = {}
313 backups = {}
314 if tobackup:
314 if tobackup:
315 backupdir = repo.vfs.join('record-backups')
315 backupdir = repo.vfs.join('record-backups')
316 try:
316 try:
317 os.mkdir(backupdir)
317 os.mkdir(backupdir)
318 except OSError as err:
318 except OSError as err:
319 if err.errno != errno.EEXIST:
319 if err.errno != errno.EEXIST:
320 raise
320 raise
321 try:
321 try:
322 # backup continues
322 # backup continues
323 for f in tobackup:
323 for f in tobackup:
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
324 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
325 dir=backupdir)
325 dir=backupdir)
326 os.close(fd)
326 os.close(fd)
327 ui.debug('backup %r as %r\n' % (f, tmpname))
327 ui.debug('backup %r as %r\n' % (f, tmpname))
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
328 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
329 backups[f] = tmpname
329 backups[f] = tmpname
330
330
331 fp = stringio()
331 fp = stringio()
332 for c in chunks:
332 for c in chunks:
333 fname = c.filename()
333 fname = c.filename()
334 if fname in backups:
334 if fname in backups:
335 c.write(fp)
335 c.write(fp)
336 dopatch = fp.tell()
336 dopatch = fp.tell()
337 fp.seek(0)
337 fp.seek(0)
338
338
339 # 2.5 optionally review / modify patch in text editor
339 # 2.5 optionally review / modify patch in text editor
340 if opts.get('review', False):
340 if opts.get('review', False):
341 patchtext = (crecordmod.diffhelptext
341 patchtext = (crecordmod.diffhelptext
342 + crecordmod.patchhelptext
342 + crecordmod.patchhelptext
343 + fp.read())
343 + fp.read())
344 reviewedpatch = ui.edit(patchtext, "",
344 reviewedpatch = ui.edit(patchtext, "",
345 action="diff",
345 action="diff",
346 repopath=repo.path)
346 repopath=repo.path)
347 fp.truncate(0)
347 fp.truncate(0)
348 fp.write(reviewedpatch)
348 fp.write(reviewedpatch)
349 fp.seek(0)
349 fp.seek(0)
350
350
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
351 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
352 # 3a. apply filtered patch to clean repo (clean)
352 # 3a. apply filtered patch to clean repo (clean)
353 if backups:
353 if backups:
354 # Equivalent to hg.revert
354 # Equivalent to hg.revert
355 m = scmutil.matchfiles(repo, backups.keys())
355 m = scmutil.matchfiles(repo, backups.keys())
356 mergemod.update(repo, repo.dirstate.p1(),
356 mergemod.update(repo, repo.dirstate.p1(),
357 False, True, matcher=m)
357 False, True, matcher=m)
358
358
359 # 3b. (apply)
359 # 3b. (apply)
360 if dopatch:
360 if dopatch:
361 try:
361 try:
362 ui.debug('applying patch\n')
362 ui.debug('applying patch\n')
363 ui.debug(fp.getvalue())
363 ui.debug(fp.getvalue())
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
364 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
365 except error.PatchError as err:
365 except error.PatchError as err:
366 raise error.Abort(str(err))
366 raise error.Abort(str(err))
367 del fp
367 del fp
368
368
369 # 4. We prepared working directory according to filtered
369 # 4. We prepared working directory according to filtered
370 # patch. Now is the time to delegate the job to
370 # patch. Now is the time to delegate the job to
371 # commit/qrefresh or the like!
371 # commit/qrefresh or the like!
372
372
373 # Make all of the pathnames absolute.
373 # Make all of the pathnames absolute.
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
374 newfiles = [repo.wjoin(nf) for nf in newfiles]
375 return commitfunc(ui, repo, *newfiles, **opts)
375 return commitfunc(ui, repo, *newfiles, **opts)
376 finally:
376 finally:
377 # 5. finally restore backed-up files
377 # 5. finally restore backed-up files
378 try:
378 try:
379 dirstate = repo.dirstate
379 dirstate = repo.dirstate
380 for realname, tmpname in backups.iteritems():
380 for realname, tmpname in backups.iteritems():
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
381 ui.debug('restoring %r to %r\n' % (tmpname, realname))
382
382
383 if dirstate[realname] == 'n':
383 if dirstate[realname] == 'n':
384 # without normallookup, restoring timestamp
384 # without normallookup, restoring timestamp
385 # may cause partially committed files
385 # may cause partially committed files
386 # to be treated as unmodified
386 # to be treated as unmodified
387 dirstate.normallookup(realname)
387 dirstate.normallookup(realname)
388
388
389 # copystat=True here and above are a hack to trick any
389 # copystat=True here and above are a hack to trick any
390 # editors that have f open that we haven't modified them.
390 # editors that have f open that we haven't modified them.
391 #
391 #
392 # Also note that this racy as an editor could notice the
392 # Also note that this racy as an editor could notice the
393 # file's mtime before we've finished writing it.
393 # file's mtime before we've finished writing it.
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
394 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
395 os.unlink(tmpname)
395 os.unlink(tmpname)
396 if tobackup:
396 if tobackup:
397 os.rmdir(backupdir)
397 os.rmdir(backupdir)
398 except OSError:
398 except OSError:
399 pass
399 pass
400
400
401 def recordinwlock(ui, repo, message, match, opts):
401 def recordinwlock(ui, repo, message, match, opts):
402 with repo.wlock():
402 with repo.wlock():
403 return recordfunc(ui, repo, message, match, opts)
403 return recordfunc(ui, repo, message, match, opts)
404
404
405 return commit(ui, repo, recordinwlock, pats, opts)
405 return commit(ui, repo, recordinwlock, pats, opts)
406
406
407 class dirnode(object):
407 class dirnode(object):
408 """
408 """
409 Represent a directory in user working copy with information required for
409 Represent a directory in user working copy with information required for
410 the purpose of tersing its status.
410 the purpose of tersing its status.
411
411
412 path is the path to the directory
412 path is the path to the directory
413
413
414 statuses is a set of statuses of all files in this directory (this includes
414 statuses is a set of statuses of all files in this directory (this includes
415 all the files in all the subdirectories too)
415 all the files in all the subdirectories too)
416
416
417 files is a list of files which are direct child of this directory
417 files is a list of files which are direct child of this directory
418
418
419 subdirs is a dictionary of sub-directory name as the key and it's own
419 subdirs is a dictionary of sub-directory name as the key and it's own
420 dirnode object as the value
420 dirnode object as the value
421 """
421 """
422
422
423 def __init__(self, dirpath):
423 def __init__(self, dirpath):
424 self.path = dirpath
424 self.path = dirpath
425 self.statuses = set([])
425 self.statuses = set([])
426 self.files = []
426 self.files = []
427 self.subdirs = {}
427 self.subdirs = {}
428
428
429 def _addfileindir(self, filename, status):
429 def _addfileindir(self, filename, status):
430 """Add a file in this directory as a direct child."""
430 """Add a file in this directory as a direct child."""
431 self.files.append((filename, status))
431 self.files.append((filename, status))
432
432
433 def addfile(self, filename, status):
433 def addfile(self, filename, status):
434 """
434 """
435 Add a file to this directory or to its direct parent directory.
435 Add a file to this directory or to its direct parent directory.
436
436
437 If the file is not direct child of this directory, we traverse to the
437 If the file is not direct child of this directory, we traverse to the
438 directory of which this file is a direct child of and add the file
438 directory of which this file is a direct child of and add the file
439 there.
439 there.
440 """
440 """
441
441
442 # the filename contains a path separator, it means it's not the direct
442 # the filename contains a path separator, it means it's not the direct
443 # child of this directory
443 # child of this directory
444 if '/' in filename:
444 if '/' in filename:
445 subdir, filep = filename.split('/', 1)
445 subdir, filep = filename.split('/', 1)
446
446
447 # does the dirnode object for subdir exists
447 # does the dirnode object for subdir exists
448 if subdir not in self.subdirs:
448 if subdir not in self.subdirs:
449 subdirpath = os.path.join(self.path, subdir)
449 subdirpath = os.path.join(self.path, subdir)
450 self.subdirs[subdir] = dirnode(subdirpath)
450 self.subdirs[subdir] = dirnode(subdirpath)
451
451
452 # try adding the file in subdir
452 # try adding the file in subdir
453 self.subdirs[subdir].addfile(filep, status)
453 self.subdirs[subdir].addfile(filep, status)
454
454
455 else:
455 else:
456 self._addfileindir(filename, status)
456 self._addfileindir(filename, status)
457
457
458 if status not in self.statuses:
458 if status not in self.statuses:
459 self.statuses.add(status)
459 self.statuses.add(status)
460
460
461 def iterfilepaths(self):
461 def iterfilepaths(self):
462 """Yield (status, path) for files directly under this directory."""
462 """Yield (status, path) for files directly under this directory."""
463 for f, st in self.files:
463 for f, st in self.files:
464 yield st, os.path.join(self.path, f)
464 yield st, os.path.join(self.path, f)
465
465
466 def tersewalk(self, terseargs):
466 def tersewalk(self, terseargs):
467 """
467 """
468 Yield (status, path) obtained by processing the status of this
468 Yield (status, path) obtained by processing the status of this
469 dirnode.
469 dirnode.
470
470
471 terseargs is the string of arguments passed by the user with `--terse`
471 terseargs is the string of arguments passed by the user with `--terse`
472 flag.
472 flag.
473
473
474 Following are the cases which can happen:
474 Following are the cases which can happen:
475
475
476 1) All the files in the directory (including all the files in its
476 1) All the files in the directory (including all the files in its
477 subdirectories) share the same status and the user has asked us to terse
477 subdirectories) share the same status and the user has asked us to terse
478 that status. -> yield (status, dirpath)
478 that status. -> yield (status, dirpath)
479
479
480 2) Otherwise, we do following:
480 2) Otherwise, we do following:
481
481
482 a) Yield (status, filepath) for all the files which are in this
482 a) Yield (status, filepath) for all the files which are in this
483 directory (only the ones in this directory, not the subdirs)
483 directory (only the ones in this directory, not the subdirs)
484
484
485 b) Recurse the function on all the subdirectories of this
485 b) Recurse the function on all the subdirectories of this
486 directory
486 directory
487 """
487 """
488
488
489 if len(self.statuses) == 1:
489 if len(self.statuses) == 1:
490 onlyst = self.statuses.pop()
490 onlyst = self.statuses.pop()
491
491
492 # Making sure we terse only when the status abbreviation is
492 # Making sure we terse only when the status abbreviation is
493 # passed as terse argument
493 # passed as terse argument
494 if onlyst in terseargs:
494 if onlyst in terseargs:
495 yield onlyst, self.path + pycompat.ossep
495 yield onlyst, self.path + pycompat.ossep
496 return
496 return
497
497
498 # add the files to status list
498 # add the files to status list
499 for st, fpath in self.iterfilepaths():
499 for st, fpath in self.iterfilepaths():
500 yield st, fpath
500 yield st, fpath
501
501
502 #recurse on the subdirs
502 #recurse on the subdirs
503 for dirobj in self.subdirs.values():
503 for dirobj in self.subdirs.values():
504 for st, fpath in dirobj.tersewalk(terseargs):
504 for st, fpath in dirobj.tersewalk(terseargs):
505 yield st, fpath
505 yield st, fpath
506
506
507 def tersedir(statuslist, terseargs):
507 def tersedir(statuslist, terseargs):
508 """
508 """
509 Terse the status if all the files in a directory shares the same status.
509 Terse the status if all the files in a directory shares the same status.
510
510
511 statuslist is scmutil.status() object which contains a list of files for
511 statuslist is scmutil.status() object which contains a list of files for
512 each status.
512 each status.
513 terseargs is string which is passed by the user as the argument to `--terse`
513 terseargs is string which is passed by the user as the argument to `--terse`
514 flag.
514 flag.
515
515
516 The function makes a tree of objects of dirnode class, and at each node it
516 The function makes a tree of objects of dirnode class, and at each node it
517 stores the information required to know whether we can terse a certain
517 stores the information required to know whether we can terse a certain
518 directory or not.
518 directory or not.
519 """
519 """
520 # the order matters here as that is used to produce final list
520 # the order matters here as that is used to produce final list
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
521 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
522
522
523 # checking the argument validity
523 # checking the argument validity
524 for s in pycompat.bytestr(terseargs):
524 for s in pycompat.bytestr(terseargs):
525 if s not in allst:
525 if s not in allst:
526 raise error.Abort(_("'%s' not recognized") % s)
526 raise error.Abort(_("'%s' not recognized") % s)
527
527
528 # creating a dirnode object for the root of the repo
528 # creating a dirnode object for the root of the repo
529 rootobj = dirnode('')
529 rootobj = dirnode('')
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
530 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
531 'ignored', 'removed')
531 'ignored', 'removed')
532
532
533 tersedict = {}
533 tersedict = {}
534 for attrname in pstatus:
534 for attrname in pstatus:
535 statuschar = attrname[0:1]
535 statuschar = attrname[0:1]
536 for f in getattr(statuslist, attrname):
536 for f in getattr(statuslist, attrname):
537 rootobj.addfile(f, statuschar)
537 rootobj.addfile(f, statuschar)
538 tersedict[statuschar] = []
538 tersedict[statuschar] = []
539
539
540 # we won't be tersing the root dir, so add files in it
540 # we won't be tersing the root dir, so add files in it
541 for st, fpath in rootobj.iterfilepaths():
541 for st, fpath in rootobj.iterfilepaths():
542 tersedict[st].append(fpath)
542 tersedict[st].append(fpath)
543
543
544 # process each sub-directory and build tersedict
544 # process each sub-directory and build tersedict
545 for subdir in rootobj.subdirs.values():
545 for subdir in rootobj.subdirs.values():
546 for st, f in subdir.tersewalk(terseargs):
546 for st, f in subdir.tersewalk(terseargs):
547 tersedict[st].append(f)
547 tersedict[st].append(f)
548
548
549 tersedlist = []
549 tersedlist = []
550 for st in allst:
550 for st in allst:
551 tersedict[st].sort()
551 tersedict[st].sort()
552 tersedlist.append(tersedict[st])
552 tersedlist.append(tersedict[st])
553
553
554 return tersedlist
554 return tersedlist
555
555
556 def _commentlines(raw):
556 def _commentlines(raw):
557 '''Surround lineswith a comment char and a new line'''
557 '''Surround lineswith a comment char and a new line'''
558 lines = raw.splitlines()
558 lines = raw.splitlines()
559 commentedlines = ['# %s' % line for line in lines]
559 commentedlines = ['# %s' % line for line in lines]
560 return '\n'.join(commentedlines) + '\n'
560 return '\n'.join(commentedlines) + '\n'
561
561
562 def _conflictsmsg(repo):
562 def _conflictsmsg(repo):
563 # avoid merge cycle
563 # avoid merge cycle
564 from . import merge as mergemod
564 from . import merge as mergemod
565 mergestate = mergemod.mergestate.read(repo)
565 mergestate = mergemod.mergestate.read(repo)
566 if not mergestate.active():
566 if not mergestate.active():
567 return
567 return
568
568
569 m = scmutil.match(repo[None])
569 m = scmutil.match(repo[None])
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
570 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
571 if unresolvedlist:
571 if unresolvedlist:
572 mergeliststr = '\n'.join(
572 mergeliststr = '\n'.join(
573 [' %s' % os.path.relpath(
573 [' %s' % os.path.relpath(
574 os.path.join(repo.root, path),
574 os.path.join(repo.root, path),
575 pycompat.getcwd()) for path in unresolvedlist])
575 pycompat.getcwd()) for path in unresolvedlist])
576 msg = _('''Unresolved merge conflicts:
576 msg = _('''Unresolved merge conflicts:
577
577
578 %s
578 %s
579
579
580 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
580 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
581 else:
581 else:
582 msg = _('No unresolved merge conflicts.')
582 msg = _('No unresolved merge conflicts.')
583
583
584 return _commentlines(msg)
584 return _commentlines(msg)
585
585
586 def _helpmessage(continuecmd, abortcmd):
586 def _helpmessage(continuecmd, abortcmd):
587 msg = _('To continue: %s\n'
587 msg = _('To continue: %s\n'
588 'To abort: %s') % (continuecmd, abortcmd)
588 'To abort: %s') % (continuecmd, abortcmd)
589 return _commentlines(msg)
589 return _commentlines(msg)
590
590
591 def _rebasemsg():
591 def _rebasemsg():
592 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
592 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
593
593
594 def _histeditmsg():
594 def _histeditmsg():
595 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
595 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
596
596
597 def _unshelvemsg():
597 def _unshelvemsg():
598 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
598 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
599
599
600 def _updatecleanmsg(dest=None):
600 def _updatecleanmsg(dest=None):
601 warning = _('warning: this will discard uncommitted changes')
601 warning = _('warning: this will discard uncommitted changes')
602 return 'hg update --clean %s (%s)' % (dest or '.', warning)
602 return 'hg update --clean %s (%s)' % (dest or '.', warning)
603
603
604 def _graftmsg():
604 def _graftmsg():
605 # tweakdefaults requires `update` to have a rev hence the `.`
605 # tweakdefaults requires `update` to have a rev hence the `.`
606 return _helpmessage('hg graft --continue', _updatecleanmsg())
606 return _helpmessage('hg graft --continue', _updatecleanmsg())
607
607
608 def _mergemsg():
608 def _mergemsg():
609 # tweakdefaults requires `update` to have a rev hence the `.`
609 # tweakdefaults requires `update` to have a rev hence the `.`
610 return _helpmessage('hg commit', _updatecleanmsg())
610 return _helpmessage('hg commit', _updatecleanmsg())
611
611
612 def _bisectmsg():
612 def _bisectmsg():
613 msg = _('To mark the changeset good: hg bisect --good\n'
613 msg = _('To mark the changeset good: hg bisect --good\n'
614 'To mark the changeset bad: hg bisect --bad\n'
614 'To mark the changeset bad: hg bisect --bad\n'
615 'To abort: hg bisect --reset\n')
615 'To abort: hg bisect --reset\n')
616 return _commentlines(msg)
616 return _commentlines(msg)
617
617
618 def fileexistspredicate(filename):
618 def fileexistspredicate(filename):
619 return lambda repo: repo.vfs.exists(filename)
619 return lambda repo: repo.vfs.exists(filename)
620
620
621 def _mergepredicate(repo):
621 def _mergepredicate(repo):
622 return len(repo[None].parents()) > 1
622 return len(repo[None].parents()) > 1
623
623
624 STATES = (
624 STATES = (
625 # (state, predicate to detect states, helpful message function)
625 # (state, predicate to detect states, helpful message function)
626 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
626 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
627 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
627 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
628 ('graft', fileexistspredicate('graftstate'), _graftmsg),
628 ('graft', fileexistspredicate('graftstate'), _graftmsg),
629 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
629 ('unshelve', fileexistspredicate('unshelverebasestate'), _unshelvemsg),
630 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
630 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
631 # The merge state is part of a list that will be iterated over.
631 # The merge state is part of a list that will be iterated over.
632 # They need to be last because some of the other unfinished states may also
632 # They need to be last because some of the other unfinished states may also
633 # be in a merge or update state (eg. rebase, histedit, graft, etc).
633 # be in a merge or update state (eg. rebase, histedit, graft, etc).
634 # We want those to have priority.
634 # We want those to have priority.
635 ('merge', _mergepredicate, _mergemsg),
635 ('merge', _mergepredicate, _mergemsg),
636 )
636 )
637
637
638 def _getrepostate(repo):
638 def _getrepostate(repo):
639 # experimental config: commands.status.skipstates
639 # experimental config: commands.status.skipstates
640 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
640 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
641 for state, statedetectionpredicate, msgfn in STATES:
641 for state, statedetectionpredicate, msgfn in STATES:
642 if state in skip:
642 if state in skip:
643 continue
643 continue
644 if statedetectionpredicate(repo):
644 if statedetectionpredicate(repo):
645 return (state, statedetectionpredicate, msgfn)
645 return (state, statedetectionpredicate, msgfn)
646
646
647 def morestatus(repo, fm):
647 def morestatus(repo, fm):
648 statetuple = _getrepostate(repo)
648 statetuple = _getrepostate(repo)
649 label = 'status.morestatus'
649 label = 'status.morestatus'
650 if statetuple:
650 if statetuple:
651 fm.startitem()
651 fm.startitem()
652 state, statedetectionpredicate, helpfulmsg = statetuple
652 state, statedetectionpredicate, helpfulmsg = statetuple
653 statemsg = _('The repository is in an unfinished *%s* state.') % state
653 statemsg = _('The repository is in an unfinished *%s* state.') % state
654 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
654 fm.write('statemsg', '%s\n', _commentlines(statemsg), label=label)
655 conmsg = _conflictsmsg(repo)
655 conmsg = _conflictsmsg(repo)
656 if conmsg:
656 if conmsg:
657 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
657 fm.write('conflictsmsg', '%s\n', conmsg, label=label)
658 if helpfulmsg:
658 if helpfulmsg:
659 helpmsg = helpfulmsg()
659 helpmsg = helpfulmsg()
660 fm.write('helpmsg', '%s\n', helpmsg, label=label)
660 fm.write('helpmsg', '%s\n', helpmsg, label=label)
661
661
662 def findpossible(cmd, table, strict=False):
662 def findpossible(cmd, table, strict=False):
663 """
663 """
664 Return cmd -> (aliases, command table entry)
664 Return cmd -> (aliases, command table entry)
665 for each matching command.
665 for each matching command.
666 Return debug commands (or their aliases) only if no normal command matches.
666 Return debug commands (or their aliases) only if no normal command matches.
667 """
667 """
668 choice = {}
668 choice = {}
669 debugchoice = {}
669 debugchoice = {}
670
670
671 if cmd in table:
671 if cmd in table:
672 # short-circuit exact matches, "log" alias beats "^log|history"
672 # short-circuit exact matches, "log" alias beats "^log|history"
673 keys = [cmd]
673 keys = [cmd]
674 else:
674 else:
675 keys = table.keys()
675 keys = table.keys()
676
676
677 allcmds = []
677 allcmds = []
678 for e in keys:
678 for e in keys:
679 aliases = parsealiases(e)
679 aliases = parsealiases(e)
680 allcmds.extend(aliases)
680 allcmds.extend(aliases)
681 found = None
681 found = None
682 if cmd in aliases:
682 if cmd in aliases:
683 found = cmd
683 found = cmd
684 elif not strict:
684 elif not strict:
685 for a in aliases:
685 for a in aliases:
686 if a.startswith(cmd):
686 if a.startswith(cmd):
687 found = a
687 found = a
688 break
688 break
689 if found is not None:
689 if found is not None:
690 if aliases[0].startswith("debug") or found.startswith("debug"):
690 if aliases[0].startswith("debug") or found.startswith("debug"):
691 debugchoice[found] = (aliases, table[e])
691 debugchoice[found] = (aliases, table[e])
692 else:
692 else:
693 choice[found] = (aliases, table[e])
693 choice[found] = (aliases, table[e])
694
694
695 if not choice and debugchoice:
695 if not choice and debugchoice:
696 choice = debugchoice
696 choice = debugchoice
697
697
698 return choice, allcmds
698 return choice, allcmds
699
699
700 def findcmd(cmd, table, strict=True):
700 def findcmd(cmd, table, strict=True):
701 """Return (aliases, command table entry) for command string."""
701 """Return (aliases, command table entry) for command string."""
702 choice, allcmds = findpossible(cmd, table, strict)
702 choice, allcmds = findpossible(cmd, table, strict)
703
703
704 if cmd in choice:
704 if cmd in choice:
705 return choice[cmd]
705 return choice[cmd]
706
706
707 if len(choice) > 1:
707 if len(choice) > 1:
708 clist = sorted(choice)
708 clist = sorted(choice)
709 raise error.AmbiguousCommand(cmd, clist)
709 raise error.AmbiguousCommand(cmd, clist)
710
710
711 if choice:
711 if choice:
712 return list(choice.values())[0]
712 return list(choice.values())[0]
713
713
714 raise error.UnknownCommand(cmd, allcmds)
714 raise error.UnknownCommand(cmd, allcmds)
715
715
716 def findrepo(p):
716 def findrepo(p):
717 while not os.path.isdir(os.path.join(p, ".hg")):
717 while not os.path.isdir(os.path.join(p, ".hg")):
718 oldp, p = p, os.path.dirname(p)
718 oldp, p = p, os.path.dirname(p)
719 if p == oldp:
719 if p == oldp:
720 return None
720 return None
721
721
722 return p
722 return p
723
723
724 def bailifchanged(repo, merge=True, hint=None):
724 def bailifchanged(repo, merge=True, hint=None):
725 """ enforce the precondition that working directory must be clean.
725 """ enforce the precondition that working directory must be clean.
726
726
727 'merge' can be set to false if a pending uncommitted merge should be
727 'merge' can be set to false if a pending uncommitted merge should be
728 ignored (such as when 'update --check' runs).
728 ignored (such as when 'update --check' runs).
729
729
730 'hint' is the usual hint given to Abort exception.
730 'hint' is the usual hint given to Abort exception.
731 """
731 """
732
732
733 if merge and repo.dirstate.p2() != nullid:
733 if merge and repo.dirstate.p2() != nullid:
734 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
734 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
735 modified, added, removed, deleted = repo.status()[:4]
735 modified, added, removed, deleted = repo.status()[:4]
736 if modified or added or removed or deleted:
736 if modified or added or removed or deleted:
737 raise error.Abort(_('uncommitted changes'), hint=hint)
737 raise error.Abort(_('uncommitted changes'), hint=hint)
738 ctx = repo[None]
738 ctx = repo[None]
739 for s in sorted(ctx.substate):
739 for s in sorted(ctx.substate):
740 ctx.sub(s).bailifchanged(hint=hint)
740 ctx.sub(s).bailifchanged(hint=hint)
741
741
742 def logmessage(ui, opts):
742 def logmessage(ui, opts):
743 """ get the log message according to -m and -l option """
743 """ get the log message according to -m and -l option """
744 message = opts.get('message')
744 message = opts.get('message')
745 logfile = opts.get('logfile')
745 logfile = opts.get('logfile')
746
746
747 if message and logfile:
747 if message and logfile:
748 raise error.Abort(_('options --message and --logfile are mutually '
748 raise error.Abort(_('options --message and --logfile are mutually '
749 'exclusive'))
749 'exclusive'))
750 if not message and logfile:
750 if not message and logfile:
751 try:
751 try:
752 if isstdiofilename(logfile):
752 if isstdiofilename(logfile):
753 message = ui.fin.read()
753 message = ui.fin.read()
754 else:
754 else:
755 message = '\n'.join(util.readfile(logfile).splitlines())
755 message = '\n'.join(util.readfile(logfile).splitlines())
756 except IOError as inst:
756 except IOError as inst:
757 raise error.Abort(_("can't read commit message '%s': %s") %
757 raise error.Abort(_("can't read commit message '%s': %s") %
758 (logfile, encoding.strtolocal(inst.strerror)))
758 (logfile, encoding.strtolocal(inst.strerror)))
759 return message
759 return message
760
760
761 def mergeeditform(ctxorbool, baseformname):
761 def mergeeditform(ctxorbool, baseformname):
762 """return appropriate editform name (referencing a committemplate)
762 """return appropriate editform name (referencing a committemplate)
763
763
764 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
764 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
765 merging is committed.
765 merging is committed.
766
766
767 This returns baseformname with '.merge' appended if it is a merge,
767 This returns baseformname with '.merge' appended if it is a merge,
768 otherwise '.normal' is appended.
768 otherwise '.normal' is appended.
769 """
769 """
770 if isinstance(ctxorbool, bool):
770 if isinstance(ctxorbool, bool):
771 if ctxorbool:
771 if ctxorbool:
772 return baseformname + ".merge"
772 return baseformname + ".merge"
773 elif 1 < len(ctxorbool.parents()):
773 elif 1 < len(ctxorbool.parents()):
774 return baseformname + ".merge"
774 return baseformname + ".merge"
775
775
776 return baseformname + ".normal"
776 return baseformname + ".normal"
777
777
778 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
778 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
779 editform='', **opts):
779 editform='', **opts):
780 """get appropriate commit message editor according to '--edit' option
780 """get appropriate commit message editor according to '--edit' option
781
781
782 'finishdesc' is a function to be called with edited commit message
782 'finishdesc' is a function to be called with edited commit message
783 (= 'description' of the new changeset) just after editing, but
783 (= 'description' of the new changeset) just after editing, but
784 before checking empty-ness. It should return actual text to be
784 before checking empty-ness. It should return actual text to be
785 stored into history. This allows to change description before
785 stored into history. This allows to change description before
786 storing.
786 storing.
787
787
788 'extramsg' is a extra message to be shown in the editor instead of
788 'extramsg' is a extra message to be shown in the editor instead of
789 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
789 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
790 is automatically added.
790 is automatically added.
791
791
792 'editform' is a dot-separated list of names, to distinguish
792 'editform' is a dot-separated list of names, to distinguish
793 the purpose of commit text editing.
793 the purpose of commit text editing.
794
794
795 'getcommiteditor' returns 'commitforceeditor' regardless of
795 'getcommiteditor' returns 'commitforceeditor' regardless of
796 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
796 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
797 they are specific for usage in MQ.
797 they are specific for usage in MQ.
798 """
798 """
799 if edit or finishdesc or extramsg:
799 if edit or finishdesc or extramsg:
800 return lambda r, c, s: commitforceeditor(r, c, s,
800 return lambda r, c, s: commitforceeditor(r, c, s,
801 finishdesc=finishdesc,
801 finishdesc=finishdesc,
802 extramsg=extramsg,
802 extramsg=extramsg,
803 editform=editform)
803 editform=editform)
804 elif editform:
804 elif editform:
805 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
805 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
806 else:
806 else:
807 return commiteditor
807 return commiteditor
808
808
809 def loglimit(opts):
809 def loglimit(opts):
810 """get the log limit according to option -l/--limit"""
810 """get the log limit according to option -l/--limit"""
811 limit = opts.get('limit')
811 limit = opts.get('limit')
812 if limit:
812 if limit:
813 try:
813 try:
814 limit = int(limit)
814 limit = int(limit)
815 except ValueError:
815 except ValueError:
816 raise error.Abort(_('limit must be a positive integer'))
816 raise error.Abort(_('limit must be a positive integer'))
817 if limit <= 0:
817 if limit <= 0:
818 raise error.Abort(_('limit must be positive'))
818 raise error.Abort(_('limit must be positive'))
819 else:
819 else:
820 limit = None
820 limit = None
821 return limit
821 return limit
822
822
823 def makefilename(repo, pat, node, desc=None,
823 def makefilename(repo, pat, node, desc=None,
824 total=None, seqno=None, revwidth=None, pathname=None):
824 total=None, seqno=None, revwidth=None, pathname=None):
825 node_expander = {
825 node_expander = {
826 'H': lambda: hex(node),
826 'H': lambda: hex(node),
827 'R': lambda: str(repo.changelog.rev(node)),
827 'R': lambda: str(repo.changelog.rev(node)),
828 'h': lambda: short(node),
828 'h': lambda: short(node),
829 'm': lambda: re.sub('[^\w]', '_', str(desc))
829 'm': lambda: re.sub('[^\w]', '_', str(desc))
830 }
830 }
831 expander = {
831 expander = {
832 '%': lambda: '%',
832 '%': lambda: '%',
833 'b': lambda: os.path.basename(repo.root),
833 'b': lambda: os.path.basename(repo.root),
834 }
834 }
835
835
836 try:
836 try:
837 if node:
837 if node:
838 expander.update(node_expander)
838 expander.update(node_expander)
839 if node:
839 if node:
840 expander['r'] = (lambda:
840 expander['r'] = (lambda:
841 str(repo.changelog.rev(node)).zfill(revwidth or 0))
841 str(repo.changelog.rev(node)).zfill(revwidth or 0))
842 if total is not None:
842 if total is not None:
843 expander['N'] = lambda: str(total)
843 expander['N'] = lambda: str(total)
844 if seqno is not None:
844 if seqno is not None:
845 expander['n'] = lambda: str(seqno)
845 expander['n'] = lambda: str(seqno)
846 if total is not None and seqno is not None:
846 if total is not None and seqno is not None:
847 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
847 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
848 if pathname is not None:
848 if pathname is not None:
849 expander['s'] = lambda: os.path.basename(pathname)
849 expander['s'] = lambda: os.path.basename(pathname)
850 expander['d'] = lambda: os.path.dirname(pathname) or '.'
850 expander['d'] = lambda: os.path.dirname(pathname) or '.'
851 expander['p'] = lambda: pathname
851 expander['p'] = lambda: pathname
852
852
853 newname = []
853 newname = []
854 patlen = len(pat)
854 patlen = len(pat)
855 i = 0
855 i = 0
856 while i < patlen:
856 while i < patlen:
857 c = pat[i:i + 1]
857 c = pat[i:i + 1]
858 if c == '%':
858 if c == '%':
859 i += 1
859 i += 1
860 c = pat[i:i + 1]
860 c = pat[i:i + 1]
861 c = expander[c]()
861 c = expander[c]()
862 newname.append(c)
862 newname.append(c)
863 i += 1
863 i += 1
864 return ''.join(newname)
864 return ''.join(newname)
865 except KeyError as inst:
865 except KeyError as inst:
866 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
866 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
867 inst.args[0])
867 inst.args[0])
868
868
869 def isstdiofilename(pat):
869 def isstdiofilename(pat):
870 """True if the given pat looks like a filename denoting stdin/stdout"""
870 """True if the given pat looks like a filename denoting stdin/stdout"""
871 return not pat or pat == '-'
871 return not pat or pat == '-'
872
872
873 class _unclosablefile(object):
873 class _unclosablefile(object):
874 def __init__(self, fp):
874 def __init__(self, fp):
875 self._fp = fp
875 self._fp = fp
876
876
877 def close(self):
877 def close(self):
878 pass
878 pass
879
879
880 def __iter__(self):
880 def __iter__(self):
881 return iter(self._fp)
881 return iter(self._fp)
882
882
883 def __getattr__(self, attr):
883 def __getattr__(self, attr):
884 return getattr(self._fp, attr)
884 return getattr(self._fp, attr)
885
885
886 def __enter__(self):
886 def __enter__(self):
887 return self
887 return self
888
888
889 def __exit__(self, exc_type, exc_value, exc_tb):
889 def __exit__(self, exc_type, exc_value, exc_tb):
890 pass
890 pass
891
891
892 def makefileobj(repo, pat, node=None, desc=None, total=None,
892 def makefileobj(repo, pat, node=None, desc=None, total=None,
893 seqno=None, revwidth=None, mode='wb', modemap=None,
893 seqno=None, revwidth=None, mode='wb', modemap=None,
894 pathname=None):
894 pathname=None):
895
895
896 writable = mode not in ('r', 'rb')
896 writable = mode not in ('r', 'rb')
897
897
898 if isstdiofilename(pat):
898 if isstdiofilename(pat):
899 if writable:
899 if writable:
900 fp = repo.ui.fout
900 fp = repo.ui.fout
901 else:
901 else:
902 fp = repo.ui.fin
902 fp = repo.ui.fin
903 return _unclosablefile(fp)
903 return _unclosablefile(fp)
904 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
904 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
905 if modemap is not None:
905 if modemap is not None:
906 mode = modemap.get(fn, mode)
906 mode = modemap.get(fn, mode)
907 if mode == 'wb':
907 if mode == 'wb':
908 modemap[fn] = 'ab'
908 modemap[fn] = 'ab'
909 return open(fn, mode)
909 return open(fn, mode)
910
910
911 def openrevlog(repo, cmd, file_, opts):
911 def openrevlog(repo, cmd, file_, opts):
912 """opens the changelog, manifest, a filelog or a given revlog"""
912 """opens the changelog, manifest, a filelog or a given revlog"""
913 cl = opts['changelog']
913 cl = opts['changelog']
914 mf = opts['manifest']
914 mf = opts['manifest']
915 dir = opts['dir']
915 dir = opts['dir']
916 msg = None
916 msg = None
917 if cl and mf:
917 if cl and mf:
918 msg = _('cannot specify --changelog and --manifest at the same time')
918 msg = _('cannot specify --changelog and --manifest at the same time')
919 elif cl and dir:
919 elif cl and dir:
920 msg = _('cannot specify --changelog and --dir at the same time')
920 msg = _('cannot specify --changelog and --dir at the same time')
921 elif cl or mf or dir:
921 elif cl or mf or dir:
922 if file_:
922 if file_:
923 msg = _('cannot specify filename with --changelog or --manifest')
923 msg = _('cannot specify filename with --changelog or --manifest')
924 elif not repo:
924 elif not repo:
925 msg = _('cannot specify --changelog or --manifest or --dir '
925 msg = _('cannot specify --changelog or --manifest or --dir '
926 'without a repository')
926 'without a repository')
927 if msg:
927 if msg:
928 raise error.Abort(msg)
928 raise error.Abort(msg)
929
929
930 r = None
930 r = None
931 if repo:
931 if repo:
932 if cl:
932 if cl:
933 r = repo.unfiltered().changelog
933 r = repo.unfiltered().changelog
934 elif dir:
934 elif dir:
935 if 'treemanifest' not in repo.requirements:
935 if 'treemanifest' not in repo.requirements:
936 raise error.Abort(_("--dir can only be used on repos with "
936 raise error.Abort(_("--dir can only be used on repos with "
937 "treemanifest enabled"))
937 "treemanifest enabled"))
938 dirlog = repo.manifestlog._revlog.dirlog(dir)
938 dirlog = repo.manifestlog._revlog.dirlog(dir)
939 if len(dirlog):
939 if len(dirlog):
940 r = dirlog
940 r = dirlog
941 elif mf:
941 elif mf:
942 r = repo.manifestlog._revlog
942 r = repo.manifestlog._revlog
943 elif file_:
943 elif file_:
944 filelog = repo.file(file_)
944 filelog = repo.file(file_)
945 if len(filelog):
945 if len(filelog):
946 r = filelog
946 r = filelog
947 if not r:
947 if not r:
948 if not file_:
948 if not file_:
949 raise error.CommandError(cmd, _('invalid arguments'))
949 raise error.CommandError(cmd, _('invalid arguments'))
950 if not os.path.isfile(file_):
950 if not os.path.isfile(file_):
951 raise error.Abort(_("revlog '%s' not found") % file_)
951 raise error.Abort(_("revlog '%s' not found") % file_)
952 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
952 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
953 file_[:-2] + ".i")
953 file_[:-2] + ".i")
954 return r
954 return r
955
955
956 def copy(ui, repo, pats, opts, rename=False):
956 def copy(ui, repo, pats, opts, rename=False):
957 # called with the repo lock held
957 # called with the repo lock held
958 #
958 #
959 # hgsep => pathname that uses "/" to separate directories
959 # hgsep => pathname that uses "/" to separate directories
960 # ossep => pathname that uses os.sep to separate directories
960 # ossep => pathname that uses os.sep to separate directories
961 cwd = repo.getcwd()
961 cwd = repo.getcwd()
962 targets = {}
962 targets = {}
963 after = opts.get("after")
963 after = opts.get("after")
964 dryrun = opts.get("dry_run")
964 dryrun = opts.get("dry_run")
965 wctx = repo[None]
965 wctx = repo[None]
966
966
967 def walkpat(pat):
967 def walkpat(pat):
968 srcs = []
968 srcs = []
969 if after:
969 if after:
970 badstates = '?'
970 badstates = '?'
971 else:
971 else:
972 badstates = '?r'
972 badstates = '?r'
973 m = scmutil.match(wctx, [pat], opts, globbed=True)
973 m = scmutil.match(wctx, [pat], opts, globbed=True)
974 for abs in wctx.walk(m):
974 for abs in wctx.walk(m):
975 state = repo.dirstate[abs]
975 state = repo.dirstate[abs]
976 rel = m.rel(abs)
976 rel = m.rel(abs)
977 exact = m.exact(abs)
977 exact = m.exact(abs)
978 if state in badstates:
978 if state in badstates:
979 if exact and state == '?':
979 if exact and state == '?':
980 ui.warn(_('%s: not copying - file is not managed\n') % rel)
980 ui.warn(_('%s: not copying - file is not managed\n') % rel)
981 if exact and state == 'r':
981 if exact and state == 'r':
982 ui.warn(_('%s: not copying - file has been marked for'
982 ui.warn(_('%s: not copying - file has been marked for'
983 ' remove\n') % rel)
983 ' remove\n') % rel)
984 continue
984 continue
985 # abs: hgsep
985 # abs: hgsep
986 # rel: ossep
986 # rel: ossep
987 srcs.append((abs, rel, exact))
987 srcs.append((abs, rel, exact))
988 return srcs
988 return srcs
989
989
990 # abssrc: hgsep
990 # abssrc: hgsep
991 # relsrc: ossep
991 # relsrc: ossep
992 # otarget: ossep
992 # otarget: ossep
993 def copyfile(abssrc, relsrc, otarget, exact):
993 def copyfile(abssrc, relsrc, otarget, exact):
994 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
994 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
995 if '/' in abstarget:
995 if '/' in abstarget:
996 # We cannot normalize abstarget itself, this would prevent
996 # We cannot normalize abstarget itself, this would prevent
997 # case only renames, like a => A.
997 # case only renames, like a => A.
998 abspath, absname = abstarget.rsplit('/', 1)
998 abspath, absname = abstarget.rsplit('/', 1)
999 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
999 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1000 reltarget = repo.pathto(abstarget, cwd)
1000 reltarget = repo.pathto(abstarget, cwd)
1001 target = repo.wjoin(abstarget)
1001 target = repo.wjoin(abstarget)
1002 src = repo.wjoin(abssrc)
1002 src = repo.wjoin(abssrc)
1003 state = repo.dirstate[abstarget]
1003 state = repo.dirstate[abstarget]
1004
1004
1005 scmutil.checkportable(ui, abstarget)
1005 scmutil.checkportable(ui, abstarget)
1006
1006
1007 # check for collisions
1007 # check for collisions
1008 prevsrc = targets.get(abstarget)
1008 prevsrc = targets.get(abstarget)
1009 if prevsrc is not None:
1009 if prevsrc is not None:
1010 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1010 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1011 (reltarget, repo.pathto(abssrc, cwd),
1011 (reltarget, repo.pathto(abssrc, cwd),
1012 repo.pathto(prevsrc, cwd)))
1012 repo.pathto(prevsrc, cwd)))
1013 return
1013 return
1014
1014
1015 # check for overwrites
1015 # check for overwrites
1016 exists = os.path.lexists(target)
1016 exists = os.path.lexists(target)
1017 samefile = False
1017 samefile = False
1018 if exists and abssrc != abstarget:
1018 if exists and abssrc != abstarget:
1019 if (repo.dirstate.normalize(abssrc) ==
1019 if (repo.dirstate.normalize(abssrc) ==
1020 repo.dirstate.normalize(abstarget)):
1020 repo.dirstate.normalize(abstarget)):
1021 if not rename:
1021 if not rename:
1022 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1022 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1023 return
1023 return
1024 exists = False
1024 exists = False
1025 samefile = True
1025 samefile = True
1026
1026
1027 if not after and exists or after and state in 'mn':
1027 if not after and exists or after and state in 'mn':
1028 if not opts['force']:
1028 if not opts['force']:
1029 if state in 'mn':
1029 if state in 'mn':
1030 msg = _('%s: not overwriting - file already committed\n')
1030 msg = _('%s: not overwriting - file already committed\n')
1031 if after:
1031 if after:
1032 flags = '--after --force'
1032 flags = '--after --force'
1033 else:
1033 else:
1034 flags = '--force'
1034 flags = '--force'
1035 if rename:
1035 if rename:
1036 hint = _('(hg rename %s to replace the file by '
1036 hint = _('(hg rename %s to replace the file by '
1037 'recording a rename)\n') % flags
1037 'recording a rename)\n') % flags
1038 else:
1038 else:
1039 hint = _('(hg copy %s to replace the file by '
1039 hint = _('(hg copy %s to replace the file by '
1040 'recording a copy)\n') % flags
1040 'recording a copy)\n') % flags
1041 else:
1041 else:
1042 msg = _('%s: not overwriting - file exists\n')
1042 msg = _('%s: not overwriting - file exists\n')
1043 if rename:
1043 if rename:
1044 hint = _('(hg rename --after to record the rename)\n')
1044 hint = _('(hg rename --after to record the rename)\n')
1045 else:
1045 else:
1046 hint = _('(hg copy --after to record the copy)\n')
1046 hint = _('(hg copy --after to record the copy)\n')
1047 ui.warn(msg % reltarget)
1047 ui.warn(msg % reltarget)
1048 ui.warn(hint)
1048 ui.warn(hint)
1049 return
1049 return
1050
1050
1051 if after:
1051 if after:
1052 if not exists:
1052 if not exists:
1053 if rename:
1053 if rename:
1054 ui.warn(_('%s: not recording move - %s does not exist\n') %
1054 ui.warn(_('%s: not recording move - %s does not exist\n') %
1055 (relsrc, reltarget))
1055 (relsrc, reltarget))
1056 else:
1056 else:
1057 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1057 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1058 (relsrc, reltarget))
1058 (relsrc, reltarget))
1059 return
1059 return
1060 elif not dryrun:
1060 elif not dryrun:
1061 try:
1061 try:
1062 if exists:
1062 if exists:
1063 os.unlink(target)
1063 os.unlink(target)
1064 targetdir = os.path.dirname(target) or '.'
1064 targetdir = os.path.dirname(target) or '.'
1065 if not os.path.isdir(targetdir):
1065 if not os.path.isdir(targetdir):
1066 os.makedirs(targetdir)
1066 os.makedirs(targetdir)
1067 if samefile:
1067 if samefile:
1068 tmp = target + "~hgrename"
1068 tmp = target + "~hgrename"
1069 os.rename(src, tmp)
1069 os.rename(src, tmp)
1070 os.rename(tmp, target)
1070 os.rename(tmp, target)
1071 else:
1071 else:
1072 util.copyfile(src, target)
1072 util.copyfile(src, target)
1073 srcexists = True
1073 srcexists = True
1074 except IOError as inst:
1074 except IOError as inst:
1075 if inst.errno == errno.ENOENT:
1075 if inst.errno == errno.ENOENT:
1076 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1076 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1077 srcexists = False
1077 srcexists = False
1078 else:
1078 else:
1079 ui.warn(_('%s: cannot copy - %s\n') %
1079 ui.warn(_('%s: cannot copy - %s\n') %
1080 (relsrc, encoding.strtolocal(inst.strerror)))
1080 (relsrc, encoding.strtolocal(inst.strerror)))
1081 return True # report a failure
1081 return True # report a failure
1082
1082
1083 if ui.verbose or not exact:
1083 if ui.verbose or not exact:
1084 if rename:
1084 if rename:
1085 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1085 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1086 else:
1086 else:
1087 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1087 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1088
1088
1089 targets[abstarget] = abssrc
1089 targets[abstarget] = abssrc
1090
1090
1091 # fix up dirstate
1091 # fix up dirstate
1092 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1092 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1093 dryrun=dryrun, cwd=cwd)
1093 dryrun=dryrun, cwd=cwd)
1094 if rename and not dryrun:
1094 if rename and not dryrun:
1095 if not after and srcexists and not samefile:
1095 if not after and srcexists and not samefile:
1096 repo.wvfs.unlinkpath(abssrc)
1096 repo.wvfs.unlinkpath(abssrc)
1097 wctx.forget([abssrc])
1097 wctx.forget([abssrc])
1098
1098
1099 # pat: ossep
1099 # pat: ossep
1100 # dest ossep
1100 # dest ossep
1101 # srcs: list of (hgsep, hgsep, ossep, bool)
1101 # srcs: list of (hgsep, hgsep, ossep, bool)
1102 # return: function that takes hgsep and returns ossep
1102 # return: function that takes hgsep and returns ossep
1103 def targetpathfn(pat, dest, srcs):
1103 def targetpathfn(pat, dest, srcs):
1104 if os.path.isdir(pat):
1104 if os.path.isdir(pat):
1105 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1105 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1106 abspfx = util.localpath(abspfx)
1106 abspfx = util.localpath(abspfx)
1107 if destdirexists:
1107 if destdirexists:
1108 striplen = len(os.path.split(abspfx)[0])
1108 striplen = len(os.path.split(abspfx)[0])
1109 else:
1109 else:
1110 striplen = len(abspfx)
1110 striplen = len(abspfx)
1111 if striplen:
1111 if striplen:
1112 striplen += len(pycompat.ossep)
1112 striplen += len(pycompat.ossep)
1113 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1113 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1114 elif destdirexists:
1114 elif destdirexists:
1115 res = lambda p: os.path.join(dest,
1115 res = lambda p: os.path.join(dest,
1116 os.path.basename(util.localpath(p)))
1116 os.path.basename(util.localpath(p)))
1117 else:
1117 else:
1118 res = lambda p: dest
1118 res = lambda p: dest
1119 return res
1119 return res
1120
1120
1121 # pat: ossep
1121 # pat: ossep
1122 # dest ossep
1122 # dest ossep
1123 # srcs: list of (hgsep, hgsep, ossep, bool)
1123 # srcs: list of (hgsep, hgsep, ossep, bool)
1124 # return: function that takes hgsep and returns ossep
1124 # return: function that takes hgsep and returns ossep
1125 def targetpathafterfn(pat, dest, srcs):
1125 def targetpathafterfn(pat, dest, srcs):
1126 if matchmod.patkind(pat):
1126 if matchmod.patkind(pat):
1127 # a mercurial pattern
1127 # a mercurial pattern
1128 res = lambda p: os.path.join(dest,
1128 res = lambda p: os.path.join(dest,
1129 os.path.basename(util.localpath(p)))
1129 os.path.basename(util.localpath(p)))
1130 else:
1130 else:
1131 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1131 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1132 if len(abspfx) < len(srcs[0][0]):
1132 if len(abspfx) < len(srcs[0][0]):
1133 # A directory. Either the target path contains the last
1133 # A directory. Either the target path contains the last
1134 # component of the source path or it does not.
1134 # component of the source path or it does not.
1135 def evalpath(striplen):
1135 def evalpath(striplen):
1136 score = 0
1136 score = 0
1137 for s in srcs:
1137 for s in srcs:
1138 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1138 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1139 if os.path.lexists(t):
1139 if os.path.lexists(t):
1140 score += 1
1140 score += 1
1141 return score
1141 return score
1142
1142
1143 abspfx = util.localpath(abspfx)
1143 abspfx = util.localpath(abspfx)
1144 striplen = len(abspfx)
1144 striplen = len(abspfx)
1145 if striplen:
1145 if striplen:
1146 striplen += len(pycompat.ossep)
1146 striplen += len(pycompat.ossep)
1147 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1147 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1148 score = evalpath(striplen)
1148 score = evalpath(striplen)
1149 striplen1 = len(os.path.split(abspfx)[0])
1149 striplen1 = len(os.path.split(abspfx)[0])
1150 if striplen1:
1150 if striplen1:
1151 striplen1 += len(pycompat.ossep)
1151 striplen1 += len(pycompat.ossep)
1152 if evalpath(striplen1) > score:
1152 if evalpath(striplen1) > score:
1153 striplen = striplen1
1153 striplen = striplen1
1154 res = lambda p: os.path.join(dest,
1154 res = lambda p: os.path.join(dest,
1155 util.localpath(p)[striplen:])
1155 util.localpath(p)[striplen:])
1156 else:
1156 else:
1157 # a file
1157 # a file
1158 if destdirexists:
1158 if destdirexists:
1159 res = lambda p: os.path.join(dest,
1159 res = lambda p: os.path.join(dest,
1160 os.path.basename(util.localpath(p)))
1160 os.path.basename(util.localpath(p)))
1161 else:
1161 else:
1162 res = lambda p: dest
1162 res = lambda p: dest
1163 return res
1163 return res
1164
1164
1165 pats = scmutil.expandpats(pats)
1165 pats = scmutil.expandpats(pats)
1166 if not pats:
1166 if not pats:
1167 raise error.Abort(_('no source or destination specified'))
1167 raise error.Abort(_('no source or destination specified'))
1168 if len(pats) == 1:
1168 if len(pats) == 1:
1169 raise error.Abort(_('no destination specified'))
1169 raise error.Abort(_('no destination specified'))
1170 dest = pats.pop()
1170 dest = pats.pop()
1171 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1171 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1172 if not destdirexists:
1172 if not destdirexists:
1173 if len(pats) > 1 or matchmod.patkind(pats[0]):
1173 if len(pats) > 1 or matchmod.patkind(pats[0]):
1174 raise error.Abort(_('with multiple sources, destination must be an '
1174 raise error.Abort(_('with multiple sources, destination must be an '
1175 'existing directory'))
1175 'existing directory'))
1176 if util.endswithsep(dest):
1176 if util.endswithsep(dest):
1177 raise error.Abort(_('destination %s is not a directory') % dest)
1177 raise error.Abort(_('destination %s is not a directory') % dest)
1178
1178
1179 tfn = targetpathfn
1179 tfn = targetpathfn
1180 if after:
1180 if after:
1181 tfn = targetpathafterfn
1181 tfn = targetpathafterfn
1182 copylist = []
1182 copylist = []
1183 for pat in pats:
1183 for pat in pats:
1184 srcs = walkpat(pat)
1184 srcs = walkpat(pat)
1185 if not srcs:
1185 if not srcs:
1186 continue
1186 continue
1187 copylist.append((tfn(pat, dest, srcs), srcs))
1187 copylist.append((tfn(pat, dest, srcs), srcs))
1188 if not copylist:
1188 if not copylist:
1189 raise error.Abort(_('no files to copy'))
1189 raise error.Abort(_('no files to copy'))
1190
1190
1191 errors = 0
1191 errors = 0
1192 for targetpath, srcs in copylist:
1192 for targetpath, srcs in copylist:
1193 for abssrc, relsrc, exact in srcs:
1193 for abssrc, relsrc, exact in srcs:
1194 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1194 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1195 errors += 1
1195 errors += 1
1196
1196
1197 if errors:
1197 if errors:
1198 ui.warn(_('(consider using --after)\n'))
1198 ui.warn(_('(consider using --after)\n'))
1199
1199
1200 return errors != 0
1200 return errors != 0
1201
1201
1202 ## facility to let extension process additional data into an import patch
1202 ## facility to let extension process additional data into an import patch
1203 # list of identifier to be executed in order
1203 # list of identifier to be executed in order
1204 extrapreimport = [] # run before commit
1204 extrapreimport = [] # run before commit
1205 extrapostimport = [] # run after commit
1205 extrapostimport = [] # run after commit
1206 # mapping from identifier to actual import function
1206 # mapping from identifier to actual import function
1207 #
1207 #
1208 # 'preimport' are run before the commit is made and are provided the following
1208 # 'preimport' are run before the commit is made and are provided the following
1209 # arguments:
1209 # arguments:
1210 # - repo: the localrepository instance,
1210 # - repo: the localrepository instance,
1211 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1211 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1212 # - extra: the future extra dictionary of the changeset, please mutate it,
1212 # - extra: the future extra dictionary of the changeset, please mutate it,
1213 # - opts: the import options.
1213 # - opts: the import options.
1214 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1214 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1215 # mutation of in memory commit and more. Feel free to rework the code to get
1215 # mutation of in memory commit and more. Feel free to rework the code to get
1216 # there.
1216 # there.
1217 extrapreimportmap = {}
1217 extrapreimportmap = {}
1218 # 'postimport' are run after the commit is made and are provided the following
1218 # 'postimport' are run after the commit is made and are provided the following
1219 # argument:
1219 # argument:
1220 # - ctx: the changectx created by import.
1220 # - ctx: the changectx created by import.
1221 extrapostimportmap = {}
1221 extrapostimportmap = {}
1222
1222
1223 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1223 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
1224 """Utility function used by commands.import to import a single patch
1224 """Utility function used by commands.import to import a single patch
1225
1225
1226 This function is explicitly defined here to help the evolve extension to
1226 This function is explicitly defined here to help the evolve extension to
1227 wrap this part of the import logic.
1227 wrap this part of the import logic.
1228
1228
1229 The API is currently a bit ugly because it a simple code translation from
1229 The API is currently a bit ugly because it a simple code translation from
1230 the import command. Feel free to make it better.
1230 the import command. Feel free to make it better.
1231
1231
1232 :hunk: a patch (as a binary string)
1232 :hunk: a patch (as a binary string)
1233 :parents: nodes that will be parent of the created commit
1233 :parents: nodes that will be parent of the created commit
1234 :opts: the full dict of option passed to the import command
1234 :opts: the full dict of option passed to the import command
1235 :msgs: list to save commit message to.
1235 :msgs: list to save commit message to.
1236 (used in case we need to save it when failing)
1236 (used in case we need to save it when failing)
1237 :updatefunc: a function that update a repo to a given node
1237 :updatefunc: a function that update a repo to a given node
1238 updatefunc(<repo>, <node>)
1238 updatefunc(<repo>, <node>)
1239 """
1239 """
1240 # avoid cycle context -> subrepo -> cmdutil
1240 # avoid cycle context -> subrepo -> cmdutil
1241 from . import context
1241 from . import context
1242 extractdata = patch.extract(ui, hunk)
1242 extractdata = patch.extract(ui, hunk)
1243 tmpname = extractdata.get('filename')
1243 tmpname = extractdata.get('filename')
1244 message = extractdata.get('message')
1244 message = extractdata.get('message')
1245 user = opts.get('user') or extractdata.get('user')
1245 user = opts.get('user') or extractdata.get('user')
1246 date = opts.get('date') or extractdata.get('date')
1246 date = opts.get('date') or extractdata.get('date')
1247 branch = extractdata.get('branch')
1247 branch = extractdata.get('branch')
1248 nodeid = extractdata.get('nodeid')
1248 nodeid = extractdata.get('nodeid')
1249 p1 = extractdata.get('p1')
1249 p1 = extractdata.get('p1')
1250 p2 = extractdata.get('p2')
1250 p2 = extractdata.get('p2')
1251
1251
1252 nocommit = opts.get('no_commit')
1252 nocommit = opts.get('no_commit')
1253 importbranch = opts.get('import_branch')
1253 importbranch = opts.get('import_branch')
1254 update = not opts.get('bypass')
1254 update = not opts.get('bypass')
1255 strip = opts["strip"]
1255 strip = opts["strip"]
1256 prefix = opts["prefix"]
1256 prefix = opts["prefix"]
1257 sim = float(opts.get('similarity') or 0)
1257 sim = float(opts.get('similarity') or 0)
1258 if not tmpname:
1258 if not tmpname:
1259 return (None, None, False)
1259 return (None, None, False)
1260
1260
1261 rejects = False
1261 rejects = False
1262
1262
1263 try:
1263 try:
1264 cmdline_message = logmessage(ui, opts)
1264 cmdline_message = logmessage(ui, opts)
1265 if cmdline_message:
1265 if cmdline_message:
1266 # pickup the cmdline msg
1266 # pickup the cmdline msg
1267 message = cmdline_message
1267 message = cmdline_message
1268 elif message:
1268 elif message:
1269 # pickup the patch msg
1269 # pickup the patch msg
1270 message = message.strip()
1270 message = message.strip()
1271 else:
1271 else:
1272 # launch the editor
1272 # launch the editor
1273 message = None
1273 message = None
1274 ui.debug('message:\n%s\n' % message)
1274 ui.debug('message:\n%s\n' % message)
1275
1275
1276 if len(parents) == 1:
1276 if len(parents) == 1:
1277 parents.append(repo[nullid])
1277 parents.append(repo[nullid])
1278 if opts.get('exact'):
1278 if opts.get('exact'):
1279 if not nodeid or not p1:
1279 if not nodeid or not p1:
1280 raise error.Abort(_('not a Mercurial patch'))
1280 raise error.Abort(_('not a Mercurial patch'))
1281 p1 = repo[p1]
1281 p1 = repo[p1]
1282 p2 = repo[p2 or nullid]
1282 p2 = repo[p2 or nullid]
1283 elif p2:
1283 elif p2:
1284 try:
1284 try:
1285 p1 = repo[p1]
1285 p1 = repo[p1]
1286 p2 = repo[p2]
1286 p2 = repo[p2]
1287 # Without any options, consider p2 only if the
1287 # Without any options, consider p2 only if the
1288 # patch is being applied on top of the recorded
1288 # patch is being applied on top of the recorded
1289 # first parent.
1289 # first parent.
1290 if p1 != parents[0]:
1290 if p1 != parents[0]:
1291 p1 = parents[0]
1291 p1 = parents[0]
1292 p2 = repo[nullid]
1292 p2 = repo[nullid]
1293 except error.RepoError:
1293 except error.RepoError:
1294 p1, p2 = parents
1294 p1, p2 = parents
1295 if p2.node() == nullid:
1295 if p2.node() == nullid:
1296 ui.warn(_("warning: import the patch as a normal revision\n"
1296 ui.warn(_("warning: import the patch as a normal revision\n"
1297 "(use --exact to import the patch as a merge)\n"))
1297 "(use --exact to import the patch as a merge)\n"))
1298 else:
1298 else:
1299 p1, p2 = parents
1299 p1, p2 = parents
1300
1300
1301 n = None
1301 n = None
1302 if update:
1302 if update:
1303 if p1 != parents[0]:
1303 if p1 != parents[0]:
1304 updatefunc(repo, p1.node())
1304 updatefunc(repo, p1.node())
1305 if p2 != parents[1]:
1305 if p2 != parents[1]:
1306 repo.setparents(p1.node(), p2.node())
1306 repo.setparents(p1.node(), p2.node())
1307
1307
1308 if opts.get('exact') or importbranch:
1308 if opts.get('exact') or importbranch:
1309 repo.dirstate.setbranch(branch or 'default')
1309 repo.dirstate.setbranch(branch or 'default')
1310
1310
1311 partial = opts.get('partial', False)
1311 partial = opts.get('partial', False)
1312 files = set()
1312 files = set()
1313 try:
1313 try:
1314 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1314 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1315 files=files, eolmode=None, similarity=sim / 100.0)
1315 files=files, eolmode=None, similarity=sim / 100.0)
1316 except error.PatchError as e:
1316 except error.PatchError as e:
1317 if not partial:
1317 if not partial:
1318 raise error.Abort(str(e))
1318 raise error.Abort(str(e))
1319 if partial:
1319 if partial:
1320 rejects = True
1320 rejects = True
1321
1321
1322 files = list(files)
1322 files = list(files)
1323 if nocommit:
1323 if nocommit:
1324 if message:
1324 if message:
1325 msgs.append(message)
1325 msgs.append(message)
1326 else:
1326 else:
1327 if opts.get('exact') or p2:
1327 if opts.get('exact') or p2:
1328 # If you got here, you either use --force and know what
1328 # If you got here, you either use --force and know what
1329 # you are doing or used --exact or a merge patch while
1329 # you are doing or used --exact or a merge patch while
1330 # being updated to its first parent.
1330 # being updated to its first parent.
1331 m = None
1331 m = None
1332 else:
1332 else:
1333 m = scmutil.matchfiles(repo, files or [])
1333 m = scmutil.matchfiles(repo, files or [])
1334 editform = mergeeditform(repo[None], 'import.normal')
1334 editform = mergeeditform(repo[None], 'import.normal')
1335 if opts.get('exact'):
1335 if opts.get('exact'):
1336 editor = None
1336 editor = None
1337 else:
1337 else:
1338 editor = getcommiteditor(editform=editform, **opts)
1338 editor = getcommiteditor(editform=editform, **opts)
1339 extra = {}
1339 extra = {}
1340 for idfunc in extrapreimport:
1340 for idfunc in extrapreimport:
1341 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1341 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1342 overrides = {}
1342 overrides = {}
1343 if partial:
1343 if partial:
1344 overrides[('ui', 'allowemptycommit')] = True
1344 overrides[('ui', 'allowemptycommit')] = True
1345 with repo.ui.configoverride(overrides, 'import'):
1345 with repo.ui.configoverride(overrides, 'import'):
1346 n = repo.commit(message, user,
1346 n = repo.commit(message, user,
1347 date, match=m,
1347 date, match=m,
1348 editor=editor, extra=extra)
1348 editor=editor, extra=extra)
1349 for idfunc in extrapostimport:
1349 for idfunc in extrapostimport:
1350 extrapostimportmap[idfunc](repo[n])
1350 extrapostimportmap[idfunc](repo[n])
1351 else:
1351 else:
1352 if opts.get('exact') or importbranch:
1352 if opts.get('exact') or importbranch:
1353 branch = branch or 'default'
1353 branch = branch or 'default'
1354 else:
1354 else:
1355 branch = p1.branch()
1355 branch = p1.branch()
1356 store = patch.filestore()
1356 store = patch.filestore()
1357 try:
1357 try:
1358 files = set()
1358 files = set()
1359 try:
1359 try:
1360 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1360 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1361 files, eolmode=None)
1361 files, eolmode=None)
1362 except error.PatchError as e:
1362 except error.PatchError as e:
1363 raise error.Abort(str(e))
1363 raise error.Abort(str(e))
1364 if opts.get('exact'):
1364 if opts.get('exact'):
1365 editor = None
1365 editor = None
1366 else:
1366 else:
1367 editor = getcommiteditor(editform='import.bypass')
1367 editor = getcommiteditor(editform='import.bypass')
1368 memctx = context.memctx(repo, (p1.node(), p2.node()),
1368 memctx = context.memctx(repo, (p1.node(), p2.node()),
1369 message,
1369 message,
1370 files=files,
1370 files=files,
1371 filectxfn=store,
1371 filectxfn=store,
1372 user=user,
1372 user=user,
1373 date=date,
1373 date=date,
1374 branch=branch,
1374 branch=branch,
1375 editor=editor)
1375 editor=editor)
1376 n = memctx.commit()
1376 n = memctx.commit()
1377 finally:
1377 finally:
1378 store.close()
1378 store.close()
1379 if opts.get('exact') and nocommit:
1379 if opts.get('exact') and nocommit:
1380 # --exact with --no-commit is still useful in that it does merge
1380 # --exact with --no-commit is still useful in that it does merge
1381 # and branch bits
1381 # and branch bits
1382 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1382 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1383 elif opts.get('exact') and hex(n) != nodeid:
1383 elif opts.get('exact') and hex(n) != nodeid:
1384 raise error.Abort(_('patch is damaged or loses information'))
1384 raise error.Abort(_('patch is damaged or loses information'))
1385 msg = _('applied to working directory')
1385 msg = _('applied to working directory')
1386 if n:
1386 if n:
1387 # i18n: refers to a short changeset id
1387 # i18n: refers to a short changeset id
1388 msg = _('created %s') % short(n)
1388 msg = _('created %s') % short(n)
1389 return (msg, n, rejects)
1389 return (msg, n, rejects)
1390 finally:
1390 finally:
1391 os.unlink(tmpname)
1391 os.unlink(tmpname)
1392
1392
1393 # facility to let extensions include additional data in an exported patch
1393 # facility to let extensions include additional data in an exported patch
1394 # list of identifiers to be executed in order
1394 # list of identifiers to be executed in order
1395 extraexport = []
1395 extraexport = []
1396 # mapping from identifier to actual export function
1396 # mapping from identifier to actual export function
1397 # function as to return a string to be added to the header or None
1397 # function as to return a string to be added to the header or None
1398 # it is given two arguments (sequencenumber, changectx)
1398 # it is given two arguments (sequencenumber, changectx)
1399 extraexportmap = {}
1399 extraexportmap = {}
1400
1400
1401 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1401 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1402 node = scmutil.binnode(ctx)
1402 node = scmutil.binnode(ctx)
1403 parents = [p.node() for p in ctx.parents() if p]
1403 parents = [p.node() for p in ctx.parents() if p]
1404 branch = ctx.branch()
1404 branch = ctx.branch()
1405 if switch_parent:
1405 if switch_parent:
1406 parents.reverse()
1406 parents.reverse()
1407
1407
1408 if parents:
1408 if parents:
1409 prev = parents[0]
1409 prev = parents[0]
1410 else:
1410 else:
1411 prev = nullid
1411 prev = nullid
1412
1412
1413 write("# HG changeset patch\n")
1413 write("# HG changeset patch\n")
1414 write("# User %s\n" % ctx.user())
1414 write("# User %s\n" % ctx.user())
1415 write("# Date %d %d\n" % ctx.date())
1415 write("# Date %d %d\n" % ctx.date())
1416 write("# %s\n" % util.datestr(ctx.date()))
1416 write("# %s\n" % util.datestr(ctx.date()))
1417 if branch and branch != 'default':
1417 if branch and branch != 'default':
1418 write("# Branch %s\n" % branch)
1418 write("# Branch %s\n" % branch)
1419 write("# Node ID %s\n" % hex(node))
1419 write("# Node ID %s\n" % hex(node))
1420 write("# Parent %s\n" % hex(prev))
1420 write("# Parent %s\n" % hex(prev))
1421 if len(parents) > 1:
1421 if len(parents) > 1:
1422 write("# Parent %s\n" % hex(parents[1]))
1422 write("# Parent %s\n" % hex(parents[1]))
1423
1423
1424 for headerid in extraexport:
1424 for headerid in extraexport:
1425 header = extraexportmap[headerid](seqno, ctx)
1425 header = extraexportmap[headerid](seqno, ctx)
1426 if header is not None:
1426 if header is not None:
1427 write('# %s\n' % header)
1427 write('# %s\n' % header)
1428 write(ctx.description().rstrip())
1428 write(ctx.description().rstrip())
1429 write("\n\n")
1429 write("\n\n")
1430
1430
1431 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1431 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1432 write(chunk, label=label)
1432 write(chunk, label=label)
1433
1433
1434 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1434 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1435 opts=None, match=None):
1435 opts=None, match=None):
1436 '''export changesets as hg patches
1436 '''export changesets as hg patches
1437
1437
1438 Args:
1438 Args:
1439 repo: The repository from which we're exporting revisions.
1439 repo: The repository from which we're exporting revisions.
1440 revs: A list of revisions to export as revision numbers.
1440 revs: A list of revisions to export as revision numbers.
1441 fntemplate: An optional string to use for generating patch file names.
1441 fntemplate: An optional string to use for generating patch file names.
1442 fp: An optional file-like object to which patches should be written.
1442 fp: An optional file-like object to which patches should be written.
1443 switch_parent: If True, show diffs against second parent when not nullid.
1443 switch_parent: If True, show diffs against second parent when not nullid.
1444 Default is false, which always shows diff against p1.
1444 Default is false, which always shows diff against p1.
1445 opts: diff options to use for generating the patch.
1445 opts: diff options to use for generating the patch.
1446 match: If specified, only export changes to files matching this matcher.
1446 match: If specified, only export changes to files matching this matcher.
1447
1447
1448 Returns:
1448 Returns:
1449 Nothing.
1449 Nothing.
1450
1450
1451 Side Effect:
1451 Side Effect:
1452 "HG Changeset Patch" data is emitted to one of the following
1452 "HG Changeset Patch" data is emitted to one of the following
1453 destinations:
1453 destinations:
1454 fp is specified: All revs are written to the specified
1454 fp is specified: All revs are written to the specified
1455 file-like object.
1455 file-like object.
1456 fntemplate specified: Each rev is written to a unique file named using
1456 fntemplate specified: Each rev is written to a unique file named using
1457 the given template.
1457 the given template.
1458 Neither fp nor template specified: All revs written to repo.ui.write()
1458 Neither fp nor template specified: All revs written to repo.ui.write()
1459 '''
1459 '''
1460
1460
1461 total = len(revs)
1461 total = len(revs)
1462 revwidth = max(len(str(rev)) for rev in revs)
1462 revwidth = max(len(str(rev)) for rev in revs)
1463 filemode = {}
1463 filemode = {}
1464
1464
1465 write = None
1465 write = None
1466 dest = '<unnamed>'
1466 dest = '<unnamed>'
1467 if fp:
1467 if fp:
1468 dest = getattr(fp, 'name', dest)
1468 dest = getattr(fp, 'name', dest)
1469 def write(s, **kw):
1469 def write(s, **kw):
1470 fp.write(s)
1470 fp.write(s)
1471 elif not fntemplate:
1471 elif not fntemplate:
1472 write = repo.ui.write
1472 write = repo.ui.write
1473
1473
1474 for seqno, rev in enumerate(revs, 1):
1474 for seqno, rev in enumerate(revs, 1):
1475 ctx = repo[rev]
1475 ctx = repo[rev]
1476 fo = None
1476 fo = None
1477 if not fp and fntemplate:
1477 if not fp and fntemplate:
1478 desc_lines = ctx.description().rstrip().split('\n')
1478 desc_lines = ctx.description().rstrip().split('\n')
1479 desc = desc_lines[0] #Commit always has a first line.
1479 desc = desc_lines[0] #Commit always has a first line.
1480 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1480 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1481 total=total, seqno=seqno, revwidth=revwidth,
1481 total=total, seqno=seqno, revwidth=revwidth,
1482 mode='wb', modemap=filemode)
1482 mode='wb', modemap=filemode)
1483 dest = fo.name
1483 dest = fo.name
1484 def write(s, **kw):
1484 def write(s, **kw):
1485 fo.write(s)
1485 fo.write(s)
1486 if not dest.startswith('<'):
1486 if not dest.startswith('<'):
1487 repo.ui.note("%s\n" % dest)
1487 repo.ui.note("%s\n" % dest)
1488 _exportsingle(
1488 _exportsingle(
1489 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1489 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1490 if fo is not None:
1490 if fo is not None:
1491 fo.close()
1491 fo.close()
1492
1492
1493 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1493 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1494 changes=None, stat=False, fp=None, prefix='',
1494 changes=None, stat=False, fp=None, prefix='',
1495 root='', listsubrepos=False, hunksfilterfn=None):
1495 root='', listsubrepos=False, hunksfilterfn=None):
1496 '''show diff or diffstat.'''
1496 '''show diff or diffstat.'''
1497 if fp is None:
1497 if fp is None:
1498 write = ui.write
1498 write = ui.write
1499 else:
1499 else:
1500 def write(s, **kw):
1500 def write(s, **kw):
1501 fp.write(s)
1501 fp.write(s)
1502
1502
1503 if root:
1503 if root:
1504 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1504 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1505 else:
1505 else:
1506 relroot = ''
1506 relroot = ''
1507 if relroot != '':
1507 if relroot != '':
1508 # XXX relative roots currently don't work if the root is within a
1508 # XXX relative roots currently don't work if the root is within a
1509 # subrepo
1509 # subrepo
1510 uirelroot = match.uipath(relroot)
1510 uirelroot = match.uipath(relroot)
1511 relroot += '/'
1511 relroot += '/'
1512 for matchroot in match.files():
1512 for matchroot in match.files():
1513 if not matchroot.startswith(relroot):
1513 if not matchroot.startswith(relroot):
1514 ui.warn(_('warning: %s not inside relative root %s\n') % (
1514 ui.warn(_('warning: %s not inside relative root %s\n') % (
1515 match.uipath(matchroot), uirelroot))
1515 match.uipath(matchroot), uirelroot))
1516
1516
1517 if stat:
1517 if stat:
1518 diffopts = diffopts.copy(context=0)
1518 diffopts = diffopts.copy(context=0)
1519 width = 80
1519 width = 80
1520 if not ui.plain():
1520 if not ui.plain():
1521 width = ui.termwidth()
1521 width = ui.termwidth()
1522 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1522 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1523 prefix=prefix, relroot=relroot,
1523 prefix=prefix, relroot=relroot,
1524 hunksfilterfn=hunksfilterfn)
1524 hunksfilterfn=hunksfilterfn)
1525 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1525 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1526 width=width):
1526 width=width):
1527 write(chunk, label=label)
1527 write(chunk, label=label)
1528 else:
1528 else:
1529 for chunk, label in patch.diffui(repo, node1, node2, match,
1529 for chunk, label in patch.diffui(repo, node1, node2, match,
1530 changes, diffopts, prefix=prefix,
1530 changes, diffopts, prefix=prefix,
1531 relroot=relroot,
1531 relroot=relroot,
1532 hunksfilterfn=hunksfilterfn):
1532 hunksfilterfn=hunksfilterfn):
1533 write(chunk, label=label)
1533 write(chunk, label=label)
1534
1534
1535 if listsubrepos:
1535 if listsubrepos:
1536 ctx1 = repo[node1]
1536 ctx1 = repo[node1]
1537 ctx2 = repo[node2]
1537 ctx2 = repo[node2]
1538 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1538 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1539 tempnode2 = node2
1539 tempnode2 = node2
1540 try:
1540 try:
1541 if node2 is not None:
1541 if node2 is not None:
1542 tempnode2 = ctx2.substate[subpath][1]
1542 tempnode2 = ctx2.substate[subpath][1]
1543 except KeyError:
1543 except KeyError:
1544 # A subrepo that existed in node1 was deleted between node1 and
1544 # A subrepo that existed in node1 was deleted between node1 and
1545 # node2 (inclusive). Thus, ctx2's substate won't contain that
1545 # node2 (inclusive). Thus, ctx2's substate won't contain that
1546 # subpath. The best we can do is to ignore it.
1546 # subpath. The best we can do is to ignore it.
1547 tempnode2 = None
1547 tempnode2 = None
1548 submatch = matchmod.subdirmatcher(subpath, match)
1548 submatch = matchmod.subdirmatcher(subpath, match)
1549 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1549 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1550 stat=stat, fp=fp, prefix=prefix)
1550 stat=stat, fp=fp, prefix=prefix)
1551
1551
1552 def _changesetlabels(ctx):
1552 def _changesetlabels(ctx):
1553 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1553 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1554 if ctx.obsolete():
1554 if ctx.obsolete():
1555 labels.append('changeset.obsolete')
1555 labels.append('changeset.obsolete')
1556 if ctx.isunstable():
1556 if ctx.isunstable():
1557 labels.append('changeset.unstable')
1557 labels.append('changeset.unstable')
1558 for instability in ctx.instabilities():
1558 for instability in ctx.instabilities():
1559 labels.append('instability.%s' % instability)
1559 labels.append('instability.%s' % instability)
1560 return ' '.join(labels)
1560 return ' '.join(labels)
1561
1561
1562 class changeset_printer(object):
1562 class changeset_printer(object):
1563 '''show changeset information when templating not requested.'''
1563 '''show changeset information when templating not requested.'''
1564
1564
1565 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1565 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1566 self.ui = ui
1566 self.ui = ui
1567 self.repo = repo
1567 self.repo = repo
1568 self.buffered = buffered
1568 self.buffered = buffered
1569 self.matchfn = matchfn
1569 self.matchfn = matchfn
1570 self.diffopts = diffopts
1570 self.diffopts = diffopts
1571 self.header = {}
1571 self.header = {}
1572 self.hunk = {}
1572 self.hunk = {}
1573 self.lastheader = None
1573 self.lastheader = None
1574 self.footer = None
1574 self.footer = None
1575
1575
1576 def flush(self, ctx):
1576 def flush(self, ctx):
1577 rev = ctx.rev()
1577 rev = ctx.rev()
1578 if rev in self.header:
1578 if rev in self.header:
1579 h = self.header[rev]
1579 h = self.header[rev]
1580 if h != self.lastheader:
1580 if h != self.lastheader:
1581 self.lastheader = h
1581 self.lastheader = h
1582 self.ui.write(h)
1582 self.ui.write(h)
1583 del self.header[rev]
1583 del self.header[rev]
1584 if rev in self.hunk:
1584 if rev in self.hunk:
1585 self.ui.write(self.hunk[rev])
1585 self.ui.write(self.hunk[rev])
1586 del self.hunk[rev]
1586 del self.hunk[rev]
1587 return 1
1587 return 1
1588 return 0
1588 return 0
1589
1589
1590 def close(self):
1590 def close(self):
1591 if self.footer:
1591 if self.footer:
1592 self.ui.write(self.footer)
1592 self.ui.write(self.footer)
1593
1593
1594 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1594 def show(self, ctx, copies=None, matchfn=None, hunksfilterfn=None,
1595 **props):
1595 **props):
1596 props = pycompat.byteskwargs(props)
1596 props = pycompat.byteskwargs(props)
1597 if self.buffered:
1597 if self.buffered:
1598 self.ui.pushbuffer(labeled=True)
1598 self.ui.pushbuffer(labeled=True)
1599 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1599 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1600 self.hunk[ctx.rev()] = self.ui.popbuffer()
1600 self.hunk[ctx.rev()] = self.ui.popbuffer()
1601 else:
1601 else:
1602 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1602 self._show(ctx, copies, matchfn, hunksfilterfn, props)
1603
1603
1604 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1604 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1605 '''show a single changeset or file revision'''
1605 '''show a single changeset or file revision'''
1606 changenode = ctx.node()
1606 changenode = ctx.node()
1607 rev = ctx.rev()
1607 rev = ctx.rev()
1608
1608
1609 if self.ui.quiet:
1609 if self.ui.quiet:
1610 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1610 self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
1611 label='log.node')
1611 label='log.node')
1612 return
1612 return
1613
1613
1614 date = util.datestr(ctx.date())
1614 date = util.datestr(ctx.date())
1615
1615
1616 # i18n: column positioning for "hg log"
1616 # i18n: column positioning for "hg log"
1617 self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
1617 self.ui.write(_("changeset: %s\n") % scmutil.formatchangeid(ctx),
1618 label=_changesetlabels(ctx))
1618 label=_changesetlabels(ctx))
1619
1619
1620 # branches are shown first before any other names due to backwards
1620 # branches are shown first before any other names due to backwards
1621 # compatibility
1621 # compatibility
1622 branch = ctx.branch()
1622 branch = ctx.branch()
1623 # don't show the default branch name
1623 # don't show the default branch name
1624 if branch != 'default':
1624 if branch != 'default':
1625 # i18n: column positioning for "hg log"
1625 # i18n: column positioning for "hg log"
1626 self.ui.write(_("branch: %s\n") % branch,
1626 self.ui.write(_("branch: %s\n") % branch,
1627 label='log.branch')
1627 label='log.branch')
1628
1628
1629 for nsname, ns in self.repo.names.iteritems():
1629 for nsname, ns in self.repo.names.iteritems():
1630 # branches has special logic already handled above, so here we just
1630 # branches has special logic already handled above, so here we just
1631 # skip it
1631 # skip it
1632 if nsname == 'branches':
1632 if nsname == 'branches':
1633 continue
1633 continue
1634 # we will use the templatename as the color name since those two
1634 # we will use the templatename as the color name since those two
1635 # should be the same
1635 # should be the same
1636 for name in ns.names(self.repo, changenode):
1636 for name in ns.names(self.repo, changenode):
1637 self.ui.write(ns.logfmt % name,
1637 self.ui.write(ns.logfmt % name,
1638 label='log.%s' % ns.colorname)
1638 label='log.%s' % ns.colorname)
1639 if self.ui.debugflag:
1639 if self.ui.debugflag:
1640 # i18n: column positioning for "hg log"
1640 # i18n: column positioning for "hg log"
1641 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1641 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1642 label='log.phase')
1642 label='log.phase')
1643 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1643 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1644 label = 'log.parent changeset.%s' % pctx.phasestr()
1644 label = 'log.parent changeset.%s' % pctx.phasestr()
1645 # i18n: column positioning for "hg log"
1645 # i18n: column positioning for "hg log"
1646 self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
1646 self.ui.write(_("parent: %s\n") % scmutil.formatchangeid(pctx),
1647 label=label)
1647 label=label)
1648
1648
1649 if self.ui.debugflag and rev is not None:
1649 if self.ui.debugflag and rev is not None:
1650 mnode = ctx.manifestnode()
1650 mnode = ctx.manifestnode()
1651 mrev = self.repo.manifestlog._revlog.rev(mnode)
1651 mrev = self.repo.manifestlog._revlog.rev(mnode)
1652 # i18n: column positioning for "hg log"
1652 # i18n: column positioning for "hg log"
1653 self.ui.write(_("manifest: %s\n")
1653 self.ui.write(_("manifest: %s\n")
1654 % scmutil.formatrevnode(self.ui, mrev, mnode),
1654 % scmutil.formatrevnode(self.ui, mrev, mnode),
1655 label='ui.debug log.manifest')
1655 label='ui.debug log.manifest')
1656 # i18n: column positioning for "hg log"
1656 # i18n: column positioning for "hg log"
1657 self.ui.write(_("user: %s\n") % ctx.user(),
1657 self.ui.write(_("user: %s\n") % ctx.user(),
1658 label='log.user')
1658 label='log.user')
1659 # i18n: column positioning for "hg log"
1659 # i18n: column positioning for "hg log"
1660 self.ui.write(_("date: %s\n") % date,
1660 self.ui.write(_("date: %s\n") % date,
1661 label='log.date')
1661 label='log.date')
1662
1662
1663 if ctx.isunstable():
1663 if ctx.isunstable():
1664 # i18n: column positioning for "hg log"
1664 # i18n: column positioning for "hg log"
1665 instabilities = ctx.instabilities()
1665 instabilities = ctx.instabilities()
1666 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1666 self.ui.write(_("instability: %s\n") % ', '.join(instabilities),
1667 label='log.instability')
1667 label='log.instability')
1668
1668
1669 elif ctx.obsolete():
1669 elif ctx.obsolete():
1670 self._showobsfate(ctx)
1670 self._showobsfate(ctx)
1671
1671
1672 self._exthook(ctx)
1672 self._exthook(ctx)
1673
1673
1674 if self.ui.debugflag:
1674 if self.ui.debugflag:
1675 files = ctx.p1().status(ctx)[:3]
1675 files = ctx.p1().status(ctx)[:3]
1676 for key, value in zip([# i18n: column positioning for "hg log"
1676 for key, value in zip([# i18n: column positioning for "hg log"
1677 _("files:"),
1677 _("files:"),
1678 # i18n: column positioning for "hg log"
1678 # i18n: column positioning for "hg log"
1679 _("files+:"),
1679 _("files+:"),
1680 # i18n: column positioning for "hg log"
1680 # i18n: column positioning for "hg log"
1681 _("files-:")], files):
1681 _("files-:")], files):
1682 if value:
1682 if value:
1683 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1683 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1684 label='ui.debug log.files')
1684 label='ui.debug log.files')
1685 elif ctx.files() and self.ui.verbose:
1685 elif ctx.files() and self.ui.verbose:
1686 # i18n: column positioning for "hg log"
1686 # i18n: column positioning for "hg log"
1687 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1687 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1688 label='ui.note log.files')
1688 label='ui.note log.files')
1689 if copies and self.ui.verbose:
1689 if copies and self.ui.verbose:
1690 copies = ['%s (%s)' % c for c in copies]
1690 copies = ['%s (%s)' % c for c in copies]
1691 # i18n: column positioning for "hg log"
1691 # i18n: column positioning for "hg log"
1692 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1692 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1693 label='ui.note log.copies')
1693 label='ui.note log.copies')
1694
1694
1695 extra = ctx.extra()
1695 extra = ctx.extra()
1696 if extra and self.ui.debugflag:
1696 if extra and self.ui.debugflag:
1697 for key, value in sorted(extra.items()):
1697 for key, value in sorted(extra.items()):
1698 # i18n: column positioning for "hg log"
1698 # i18n: column positioning for "hg log"
1699 self.ui.write(_("extra: %s=%s\n")
1699 self.ui.write(_("extra: %s=%s\n")
1700 % (key, util.escapestr(value)),
1700 % (key, util.escapestr(value)),
1701 label='ui.debug log.extra')
1701 label='ui.debug log.extra')
1702
1702
1703 description = ctx.description().strip()
1703 description = ctx.description().strip()
1704 if description:
1704 if description:
1705 if self.ui.verbose:
1705 if self.ui.verbose:
1706 self.ui.write(_("description:\n"),
1706 self.ui.write(_("description:\n"),
1707 label='ui.note log.description')
1707 label='ui.note log.description')
1708 self.ui.write(description,
1708 self.ui.write(description,
1709 label='ui.note log.description')
1709 label='ui.note log.description')
1710 self.ui.write("\n\n")
1710 self.ui.write("\n\n")
1711 else:
1711 else:
1712 # i18n: column positioning for "hg log"
1712 # i18n: column positioning for "hg log"
1713 self.ui.write(_("summary: %s\n") %
1713 self.ui.write(_("summary: %s\n") %
1714 description.splitlines()[0],
1714 description.splitlines()[0],
1715 label='log.summary')
1715 label='log.summary')
1716 self.ui.write("\n")
1716 self.ui.write("\n")
1717
1717
1718 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1718 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1719
1719
1720 def _showobsfate(self, ctx):
1720 def _showobsfate(self, ctx):
1721 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1721 obsfate = templatekw.showobsfate(repo=self.repo, ctx=ctx, ui=self.ui)
1722
1722
1723 if obsfate:
1723 if obsfate:
1724 for obsfateline in obsfate:
1724 for obsfateline in obsfate:
1725 # i18n: column positioning for "hg log"
1725 # i18n: column positioning for "hg log"
1726 self.ui.write(_("obsolete: %s\n") % obsfateline,
1726 self.ui.write(_("obsolete: %s\n") % obsfateline,
1727 label='log.obsfate')
1727 label='log.obsfate')
1728
1728
1729 def _exthook(self, ctx):
1729 def _exthook(self, ctx):
1730 '''empty method used by extension as a hook point
1730 '''empty method used by extension as a hook point
1731 '''
1731 '''
1732
1732
1733 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1733 def showpatch(self, ctx, matchfn, hunksfilterfn=None):
1734 if not matchfn:
1734 if not matchfn:
1735 matchfn = self.matchfn
1735 matchfn = self.matchfn
1736 if matchfn:
1736 if matchfn:
1737 stat = self.diffopts.get('stat')
1737 stat = self.diffopts.get('stat')
1738 diff = self.diffopts.get('patch')
1738 diff = self.diffopts.get('patch')
1739 diffopts = patch.diffallopts(self.ui, self.diffopts)
1739 diffopts = patch.diffallopts(self.ui, self.diffopts)
1740 node = ctx.node()
1740 node = ctx.node()
1741 prev = ctx.p1().node()
1741 prev = ctx.p1().node()
1742 if stat:
1742 if stat:
1743 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1743 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1744 match=matchfn, stat=True,
1744 match=matchfn, stat=True,
1745 hunksfilterfn=hunksfilterfn)
1745 hunksfilterfn=hunksfilterfn)
1746 if diff:
1746 if diff:
1747 if stat:
1747 if stat:
1748 self.ui.write("\n")
1748 self.ui.write("\n")
1749 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1749 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1750 match=matchfn, stat=False,
1750 match=matchfn, stat=False,
1751 hunksfilterfn=hunksfilterfn)
1751 hunksfilterfn=hunksfilterfn)
1752 self.ui.write("\n")
1752 self.ui.write("\n")
1753
1753
1754 class jsonchangeset(changeset_printer):
1754 class jsonchangeset(changeset_printer):
1755 '''format changeset information.'''
1755 '''format changeset information.'''
1756
1756
1757 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1757 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1758 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1758 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1759 self.cache = {}
1759 self.cache = {}
1760 self._first = True
1760 self._first = True
1761
1761
1762 def close(self):
1762 def close(self):
1763 if not self._first:
1763 if not self._first:
1764 self.ui.write("\n]\n")
1764 self.ui.write("\n]\n")
1765 else:
1765 else:
1766 self.ui.write("[]\n")
1766 self.ui.write("[]\n")
1767
1767
1768 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1768 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1769 '''show a single changeset or file revision'''
1769 '''show a single changeset or file revision'''
1770 rev = ctx.rev()
1770 rev = ctx.rev()
1771 if rev is None:
1771 if rev is None:
1772 jrev = jnode = 'null'
1772 jrev = jnode = 'null'
1773 else:
1773 else:
1774 jrev = '%d' % rev
1774 jrev = '%d' % rev
1775 jnode = '"%s"' % hex(ctx.node())
1775 jnode = '"%s"' % hex(ctx.node())
1776 j = encoding.jsonescape
1776 j = encoding.jsonescape
1777
1777
1778 if self._first:
1778 if self._first:
1779 self.ui.write("[\n {")
1779 self.ui.write("[\n {")
1780 self._first = False
1780 self._first = False
1781 else:
1781 else:
1782 self.ui.write(",\n {")
1782 self.ui.write(",\n {")
1783
1783
1784 if self.ui.quiet:
1784 if self.ui.quiet:
1785 self.ui.write(('\n "rev": %s') % jrev)
1785 self.ui.write(('\n "rev": %s') % jrev)
1786 self.ui.write((',\n "node": %s') % jnode)
1786 self.ui.write((',\n "node": %s') % jnode)
1787 self.ui.write('\n }')
1787 self.ui.write('\n }')
1788 return
1788 return
1789
1789
1790 self.ui.write(('\n "rev": %s') % jrev)
1790 self.ui.write(('\n "rev": %s') % jrev)
1791 self.ui.write((',\n "node": %s') % jnode)
1791 self.ui.write((',\n "node": %s') % jnode)
1792 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1792 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1793 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1793 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1794 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1794 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1795 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1795 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1796 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1796 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1797
1797
1798 self.ui.write((',\n "bookmarks": [%s]') %
1798 self.ui.write((',\n "bookmarks": [%s]') %
1799 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1799 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1800 self.ui.write((',\n "tags": [%s]') %
1800 self.ui.write((',\n "tags": [%s]') %
1801 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1801 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1802 self.ui.write((',\n "parents": [%s]') %
1802 self.ui.write((',\n "parents": [%s]') %
1803 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1803 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1804
1804
1805 if self.ui.debugflag:
1805 if self.ui.debugflag:
1806 if rev is None:
1806 if rev is None:
1807 jmanifestnode = 'null'
1807 jmanifestnode = 'null'
1808 else:
1808 else:
1809 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1809 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1810 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1810 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1811
1811
1812 self.ui.write((',\n "extra": {%s}') %
1812 self.ui.write((',\n "extra": {%s}') %
1813 ", ".join('"%s": "%s"' % (j(k), j(v))
1813 ", ".join('"%s": "%s"' % (j(k), j(v))
1814 for k, v in ctx.extra().items()))
1814 for k, v in ctx.extra().items()))
1815
1815
1816 files = ctx.p1().status(ctx)
1816 files = ctx.p1().status(ctx)
1817 self.ui.write((',\n "modified": [%s]') %
1817 self.ui.write((',\n "modified": [%s]') %
1818 ", ".join('"%s"' % j(f) for f in files[0]))
1818 ", ".join('"%s"' % j(f) for f in files[0]))
1819 self.ui.write((',\n "added": [%s]') %
1819 self.ui.write((',\n "added": [%s]') %
1820 ", ".join('"%s"' % j(f) for f in files[1]))
1820 ", ".join('"%s"' % j(f) for f in files[1]))
1821 self.ui.write((',\n "removed": [%s]') %
1821 self.ui.write((',\n "removed": [%s]') %
1822 ", ".join('"%s"' % j(f) for f in files[2]))
1822 ", ".join('"%s"' % j(f) for f in files[2]))
1823
1823
1824 elif self.ui.verbose:
1824 elif self.ui.verbose:
1825 self.ui.write((',\n "files": [%s]') %
1825 self.ui.write((',\n "files": [%s]') %
1826 ", ".join('"%s"' % j(f) for f in ctx.files()))
1826 ", ".join('"%s"' % j(f) for f in ctx.files()))
1827
1827
1828 if copies:
1828 if copies:
1829 self.ui.write((',\n "copies": {%s}') %
1829 self.ui.write((',\n "copies": {%s}') %
1830 ", ".join('"%s": "%s"' % (j(k), j(v))
1830 ", ".join('"%s": "%s"' % (j(k), j(v))
1831 for k, v in copies))
1831 for k, v in copies))
1832
1832
1833 matchfn = self.matchfn
1833 matchfn = self.matchfn
1834 if matchfn:
1834 if matchfn:
1835 stat = self.diffopts.get('stat')
1835 stat = self.diffopts.get('stat')
1836 diff = self.diffopts.get('patch')
1836 diff = self.diffopts.get('patch')
1837 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1837 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1838 node, prev = ctx.node(), ctx.p1().node()
1838 node, prev = ctx.node(), ctx.p1().node()
1839 if stat:
1839 if stat:
1840 self.ui.pushbuffer()
1840 self.ui.pushbuffer()
1841 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1841 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1842 match=matchfn, stat=True)
1842 match=matchfn, stat=True)
1843 self.ui.write((',\n "diffstat": "%s"')
1843 self.ui.write((',\n "diffstat": "%s"')
1844 % j(self.ui.popbuffer()))
1844 % j(self.ui.popbuffer()))
1845 if diff:
1845 if diff:
1846 self.ui.pushbuffer()
1846 self.ui.pushbuffer()
1847 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1847 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1848 match=matchfn, stat=False)
1848 match=matchfn, stat=False)
1849 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1849 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1850
1850
1851 self.ui.write("\n }")
1851 self.ui.write("\n }")
1852
1852
1853 class changeset_templater(changeset_printer):
1853 class changeset_templater(changeset_printer):
1854 '''format changeset information.'''
1854 '''format changeset information.'''
1855
1855
1856 # Arguments before "buffered" used to be positional. Consider not
1856 # Arguments before "buffered" used to be positional. Consider not
1857 # adding/removing arguments before "buffered" to not break callers.
1857 # adding/removing arguments before "buffered" to not break callers.
1858 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1858 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1859 buffered=False):
1859 buffered=False):
1860 diffopts = diffopts or {}
1860 diffopts = diffopts or {}
1861
1861
1862 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1862 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1863 self.t = formatter.loadtemplater(ui, tmplspec,
1863 self.t = formatter.loadtemplater(ui, tmplspec,
1864 cache=templatekw.defaulttempl)
1864 cache=templatekw.defaulttempl)
1865 self._counter = itertools.count()
1865 self._counter = itertools.count()
1866 self.cache = {}
1866 self.cache = {}
1867
1867
1868 self._tref = tmplspec.ref
1868 self._tref = tmplspec.ref
1869 self._parts = {'header': '', 'footer': '',
1869 self._parts = {'header': '', 'footer': '',
1870 tmplspec.ref: tmplspec.ref,
1870 tmplspec.ref: tmplspec.ref,
1871 'docheader': '', 'docfooter': '',
1871 'docheader': '', 'docfooter': '',
1872 'separator': ''}
1872 'separator': ''}
1873 if tmplspec.mapfile:
1873 if tmplspec.mapfile:
1874 # find correct templates for current mode, for backward
1874 # find correct templates for current mode, for backward
1875 # compatibility with 'log -v/-q/--debug' using a mapfile
1875 # compatibility with 'log -v/-q/--debug' using a mapfile
1876 tmplmodes = [
1876 tmplmodes = [
1877 (True, ''),
1877 (True, ''),
1878 (self.ui.verbose, '_verbose'),
1878 (self.ui.verbose, '_verbose'),
1879 (self.ui.quiet, '_quiet'),
1879 (self.ui.quiet, '_quiet'),
1880 (self.ui.debugflag, '_debug'),
1880 (self.ui.debugflag, '_debug'),
1881 ]
1881 ]
1882 for mode, postfix in tmplmodes:
1882 for mode, postfix in tmplmodes:
1883 for t in self._parts:
1883 for t in self._parts:
1884 cur = t + postfix
1884 cur = t + postfix
1885 if mode and cur in self.t:
1885 if mode and cur in self.t:
1886 self._parts[t] = cur
1886 self._parts[t] = cur
1887 else:
1887 else:
1888 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1888 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1889 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1889 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1890 self._parts.update(m)
1890 self._parts.update(m)
1891
1891
1892 if self._parts['docheader']:
1892 if self._parts['docheader']:
1893 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1893 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1894
1894
1895 def close(self):
1895 def close(self):
1896 if self._parts['docfooter']:
1896 if self._parts['docfooter']:
1897 if not self.footer:
1897 if not self.footer:
1898 self.footer = ""
1898 self.footer = ""
1899 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1899 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1900 return super(changeset_templater, self).close()
1900 return super(changeset_templater, self).close()
1901
1901
1902 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1902 def _show(self, ctx, copies, matchfn, hunksfilterfn, props):
1903 '''show a single changeset or file revision'''
1903 '''show a single changeset or file revision'''
1904 props = props.copy()
1904 props = props.copy()
1905 props.update(templatekw.keywords)
1905 props.update(templatekw.keywords)
1906 props['templ'] = self.t
1906 props['templ'] = self.t
1907 props['ctx'] = ctx
1907 props['ctx'] = ctx
1908 props['repo'] = self.repo
1908 props['repo'] = self.repo
1909 props['ui'] = self.repo.ui
1909 props['ui'] = self.repo.ui
1910 props['index'] = index = next(self._counter)
1910 props['index'] = index = next(self._counter)
1911 props['revcache'] = {'copies': copies}
1911 props['revcache'] = {'copies': copies}
1912 props['cache'] = self.cache
1912 props['cache'] = self.cache
1913 props = pycompat.strkwargs(props)
1913 props = pycompat.strkwargs(props)
1914
1914
1915 # write separator, which wouldn't work well with the header part below
1915 # write separator, which wouldn't work well with the header part below
1916 # since there's inherently a conflict between header (across items) and
1916 # since there's inherently a conflict between header (across items) and
1917 # separator (per item)
1917 # separator (per item)
1918 if self._parts['separator'] and index > 0:
1918 if self._parts['separator'] and index > 0:
1919 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1919 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1920
1920
1921 # write header
1921 # write header
1922 if self._parts['header']:
1922 if self._parts['header']:
1923 h = templater.stringify(self.t(self._parts['header'], **props))
1923 h = templater.stringify(self.t(self._parts['header'], **props))
1924 if self.buffered:
1924 if self.buffered:
1925 self.header[ctx.rev()] = h
1925 self.header[ctx.rev()] = h
1926 else:
1926 else:
1927 if self.lastheader != h:
1927 if self.lastheader != h:
1928 self.lastheader = h
1928 self.lastheader = h
1929 self.ui.write(h)
1929 self.ui.write(h)
1930
1930
1931 # write changeset metadata, then patch if requested
1931 # write changeset metadata, then patch if requested
1932 key = self._parts[self._tref]
1932 key = self._parts[self._tref]
1933 self.ui.write(templater.stringify(self.t(key, **props)))
1933 self.ui.write(templater.stringify(self.t(key, **props)))
1934 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1934 self.showpatch(ctx, matchfn, hunksfilterfn=hunksfilterfn)
1935
1935
1936 if self._parts['footer']:
1936 if self._parts['footer']:
1937 if not self.footer:
1937 if not self.footer:
1938 self.footer = templater.stringify(
1938 self.footer = templater.stringify(
1939 self.t(self._parts['footer'], **props))
1939 self.t(self._parts['footer'], **props))
1940
1940
1941 def logtemplatespec(tmpl, mapfile):
1941 def logtemplatespec(tmpl, mapfile):
1942 if mapfile:
1942 if mapfile:
1943 return formatter.templatespec('changeset', tmpl, mapfile)
1943 return formatter.templatespec('changeset', tmpl, mapfile)
1944 else:
1944 else:
1945 return formatter.templatespec('', tmpl, None)
1945 return formatter.templatespec('', tmpl, None)
1946
1946
1947 def _lookuplogtemplate(ui, tmpl, style):
1947 def _lookuplogtemplate(ui, tmpl, style):
1948 """Find the template matching the given template spec or style
1948 """Find the template matching the given template spec or style
1949
1949
1950 See formatter.lookuptemplate() for details.
1950 See formatter.lookuptemplate() for details.
1951 """
1951 """
1952
1952
1953 # ui settings
1953 # ui settings
1954 if not tmpl and not style: # template are stronger than style
1954 if not tmpl and not style: # template are stronger than style
1955 tmpl = ui.config('ui', 'logtemplate')
1955 tmpl = ui.config('ui', 'logtemplate')
1956 if tmpl:
1956 if tmpl:
1957 return logtemplatespec(templater.unquotestring(tmpl), None)
1957 return logtemplatespec(templater.unquotestring(tmpl), None)
1958 else:
1958 else:
1959 style = util.expandpath(ui.config('ui', 'style'))
1959 style = util.expandpath(ui.config('ui', 'style'))
1960
1960
1961 if not tmpl and style:
1961 if not tmpl and style:
1962 mapfile = style
1962 mapfile = style
1963 if not os.path.split(mapfile)[0]:
1963 if not os.path.split(mapfile)[0]:
1964 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1964 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1965 or templater.templatepath(mapfile))
1965 or templater.templatepath(mapfile))
1966 if mapname:
1966 if mapname:
1967 mapfile = mapname
1967 mapfile = mapname
1968 return logtemplatespec(None, mapfile)
1968 return logtemplatespec(None, mapfile)
1969
1969
1970 if not tmpl:
1970 if not tmpl:
1971 return logtemplatespec(None, None)
1971 return logtemplatespec(None, None)
1972
1972
1973 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1973 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1974
1974
1975 def makelogtemplater(ui, repo, tmpl, buffered=False):
1975 def makelogtemplater(ui, repo, tmpl, buffered=False):
1976 """Create a changeset_templater from a literal template 'tmpl'"""
1976 """Create a changeset_templater from a literal template 'tmpl'"""
1977 spec = logtemplatespec(tmpl, None)
1977 spec = logtemplatespec(tmpl, None)
1978 return changeset_templater(ui, repo, spec, buffered=buffered)
1978 return changeset_templater(ui, repo, spec, buffered=buffered)
1979
1979
1980 def show_changeset(ui, repo, opts, buffered=False):
1980 def show_changeset(ui, repo, opts, buffered=False):
1981 """show one changeset using template or regular display.
1981 """show one changeset using template or regular display.
1982
1982
1983 Display format will be the first non-empty hit of:
1983 Display format will be the first non-empty hit of:
1984 1. option 'template'
1984 1. option 'template'
1985 2. option 'style'
1985 2. option 'style'
1986 3. [ui] setting 'logtemplate'
1986 3. [ui] setting 'logtemplate'
1987 4. [ui] setting 'style'
1987 4. [ui] setting 'style'
1988 If all of these values are either the unset or the empty string,
1988 If all of these values are either the unset or the empty string,
1989 regular display via changeset_printer() is done.
1989 regular display via changeset_printer() is done.
1990 """
1990 """
1991 # options
1991 # options
1992 match = None
1992 match = None
1993 if opts.get('patch') or opts.get('stat'):
1993 if opts.get('patch') or opts.get('stat'):
1994 match = scmutil.matchall(repo)
1994 match = scmutil.matchall(repo)
1995
1995
1996 if opts.get('template') == 'json':
1996 if opts.get('template') == 'json':
1997 return jsonchangeset(ui, repo, match, opts, buffered)
1997 return jsonchangeset(ui, repo, match, opts, buffered)
1998
1998
1999 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1999 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
2000
2000
2001 if not spec.ref and not spec.tmpl and not spec.mapfile:
2001 if not spec.ref and not spec.tmpl and not spec.mapfile:
2002 return changeset_printer(ui, repo, match, opts, buffered)
2002 return changeset_printer(ui, repo, match, opts, buffered)
2003
2003
2004 return changeset_templater(ui, repo, spec, match, opts, buffered)
2004 return changeset_templater(ui, repo, spec, match, opts, buffered)
2005
2005
2006 def showmarker(fm, marker, index=None):
2006 def showmarker(fm, marker, index=None):
2007 """utility function to display obsolescence marker in a readable way
2007 """utility function to display obsolescence marker in a readable way
2008
2008
2009 To be used by debug function."""
2009 To be used by debug function."""
2010 if index is not None:
2010 if index is not None:
2011 fm.write('index', '%i ', index)
2011 fm.write('index', '%i ', index)
2012 fm.write('prednode', '%s ', hex(marker.prednode()))
2012 fm.write('prednode', '%s ', hex(marker.prednode()))
2013 succs = marker.succnodes()
2013 succs = marker.succnodes()
2014 fm.condwrite(succs, 'succnodes', '%s ',
2014 fm.condwrite(succs, 'succnodes', '%s ',
2015 fm.formatlist(map(hex, succs), name='node'))
2015 fm.formatlist(map(hex, succs), name='node'))
2016 fm.write('flag', '%X ', marker.flags())
2016 fm.write('flag', '%X ', marker.flags())
2017 parents = marker.parentnodes()
2017 parents = marker.parentnodes()
2018 if parents is not None:
2018 if parents is not None:
2019 fm.write('parentnodes', '{%s} ',
2019 fm.write('parentnodes', '{%s} ',
2020 fm.formatlist(map(hex, parents), name='node', sep=', '))
2020 fm.formatlist(map(hex, parents), name='node', sep=', '))
2021 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2021 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
2022 meta = marker.metadata().copy()
2022 meta = marker.metadata().copy()
2023 meta.pop('date', None)
2023 meta.pop('date', None)
2024 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2024 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
2025 fm.plain('\n')
2025 fm.plain('\n')
2026
2026
2027 def finddate(ui, repo, date):
2027 def finddate(ui, repo, date):
2028 """Find the tipmost changeset that matches the given date spec"""
2028 """Find the tipmost changeset that matches the given date spec"""
2029
2029
2030 df = util.matchdate(date)
2030 df = util.matchdate(date)
2031 m = scmutil.matchall(repo)
2031 m = scmutil.matchall(repo)
2032 results = {}
2032 results = {}
2033
2033
2034 def prep(ctx, fns):
2034 def prep(ctx, fns):
2035 d = ctx.date()
2035 d = ctx.date()
2036 if df(d[0]):
2036 if df(d[0]):
2037 results[ctx.rev()] = d
2037 results[ctx.rev()] = d
2038
2038
2039 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2039 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
2040 rev = ctx.rev()
2040 rev = ctx.rev()
2041 if rev in results:
2041 if rev in results:
2042 ui.status(_("found revision %s from %s\n") %
2042 ui.status(_("found revision %s from %s\n") %
2043 (rev, util.datestr(results[rev])))
2043 (rev, util.datestr(results[rev])))
2044 return '%d' % rev
2044 return '%d' % rev
2045
2045
2046 raise error.Abort(_("revision matching date not found"))
2046 raise error.Abort(_("revision matching date not found"))
2047
2047
2048 def increasingwindows(windowsize=8, sizelimit=512):
2048 def increasingwindows(windowsize=8, sizelimit=512):
2049 while True:
2049 while True:
2050 yield windowsize
2050 yield windowsize
2051 if windowsize < sizelimit:
2051 if windowsize < sizelimit:
2052 windowsize *= 2
2052 windowsize *= 2
2053
2053
2054 class FileWalkError(Exception):
2054 class FileWalkError(Exception):
2055 pass
2055 pass
2056
2056
2057 def walkfilerevs(repo, match, follow, revs, fncache):
2057 def walkfilerevs(repo, match, follow, revs, fncache):
2058 '''Walks the file history for the matched files.
2058 '''Walks the file history for the matched files.
2059
2059
2060 Returns the changeset revs that are involved in the file history.
2060 Returns the changeset revs that are involved in the file history.
2061
2061
2062 Throws FileWalkError if the file history can't be walked using
2062 Throws FileWalkError if the file history can't be walked using
2063 filelogs alone.
2063 filelogs alone.
2064 '''
2064 '''
2065 wanted = set()
2065 wanted = set()
2066 copies = []
2066 copies = []
2067 minrev, maxrev = min(revs), max(revs)
2067 minrev, maxrev = min(revs), max(revs)
2068 def filerevgen(filelog, last):
2068 def filerevgen(filelog, last):
2069 """
2069 """
2070 Only files, no patterns. Check the history of each file.
2070 Only files, no patterns. Check the history of each file.
2071
2071
2072 Examines filelog entries within minrev, maxrev linkrev range
2072 Examines filelog entries within minrev, maxrev linkrev range
2073 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2073 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
2074 tuples in backwards order
2074 tuples in backwards order
2075 """
2075 """
2076 cl_count = len(repo)
2076 cl_count = len(repo)
2077 revs = []
2077 revs = []
2078 for j in xrange(0, last + 1):
2078 for j in xrange(0, last + 1):
2079 linkrev = filelog.linkrev(j)
2079 linkrev = filelog.linkrev(j)
2080 if linkrev < minrev:
2080 if linkrev < minrev:
2081 continue
2081 continue
2082 # only yield rev for which we have the changelog, it can
2082 # only yield rev for which we have the changelog, it can
2083 # happen while doing "hg log" during a pull or commit
2083 # happen while doing "hg log" during a pull or commit
2084 if linkrev >= cl_count:
2084 if linkrev >= cl_count:
2085 break
2085 break
2086
2086
2087 parentlinkrevs = []
2087 parentlinkrevs = []
2088 for p in filelog.parentrevs(j):
2088 for p in filelog.parentrevs(j):
2089 if p != nullrev:
2089 if p != nullrev:
2090 parentlinkrevs.append(filelog.linkrev(p))
2090 parentlinkrevs.append(filelog.linkrev(p))
2091 n = filelog.node(j)
2091 n = filelog.node(j)
2092 revs.append((linkrev, parentlinkrevs,
2092 revs.append((linkrev, parentlinkrevs,
2093 follow and filelog.renamed(n)))
2093 follow and filelog.renamed(n)))
2094
2094
2095 return reversed(revs)
2095 return reversed(revs)
2096 def iterfiles():
2096 def iterfiles():
2097 pctx = repo['.']
2097 pctx = repo['.']
2098 for filename in match.files():
2098 for filename in match.files():
2099 if follow:
2099 if follow:
2100 if filename not in pctx:
2100 if filename not in pctx:
2101 raise error.Abort(_('cannot follow file not in parent '
2101 raise error.Abort(_('cannot follow file not in parent '
2102 'revision: "%s"') % filename)
2102 'revision: "%s"') % filename)
2103 yield filename, pctx[filename].filenode()
2103 yield filename, pctx[filename].filenode()
2104 else:
2104 else:
2105 yield filename, None
2105 yield filename, None
2106 for filename_node in copies:
2106 for filename_node in copies:
2107 yield filename_node
2107 yield filename_node
2108
2108
2109 for file_, node in iterfiles():
2109 for file_, node in iterfiles():
2110 filelog = repo.file(file_)
2110 filelog = repo.file(file_)
2111 if not len(filelog):
2111 if not len(filelog):
2112 if node is None:
2112 if node is None:
2113 # A zero count may be a directory or deleted file, so
2113 # A zero count may be a directory or deleted file, so
2114 # try to find matching entries on the slow path.
2114 # try to find matching entries on the slow path.
2115 if follow:
2115 if follow:
2116 raise error.Abort(
2116 raise error.Abort(
2117 _('cannot follow nonexistent file: "%s"') % file_)
2117 _('cannot follow nonexistent file: "%s"') % file_)
2118 raise FileWalkError("Cannot walk via filelog")
2118 raise FileWalkError("Cannot walk via filelog")
2119 else:
2119 else:
2120 continue
2120 continue
2121
2121
2122 if node is None:
2122 if node is None:
2123 last = len(filelog) - 1
2123 last = len(filelog) - 1
2124 else:
2124 else:
2125 last = filelog.rev(node)
2125 last = filelog.rev(node)
2126
2126
2127 # keep track of all ancestors of the file
2127 # keep track of all ancestors of the file
2128 ancestors = {filelog.linkrev(last)}
2128 ancestors = {filelog.linkrev(last)}
2129
2129
2130 # iterate from latest to oldest revision
2130 # iterate from latest to oldest revision
2131 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2131 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
2132 if not follow:
2132 if not follow:
2133 if rev > maxrev:
2133 if rev > maxrev:
2134 continue
2134 continue
2135 else:
2135 else:
2136 # Note that last might not be the first interesting
2136 # Note that last might not be the first interesting
2137 # rev to us:
2137 # rev to us:
2138 # if the file has been changed after maxrev, we'll
2138 # if the file has been changed after maxrev, we'll
2139 # have linkrev(last) > maxrev, and we still need
2139 # have linkrev(last) > maxrev, and we still need
2140 # to explore the file graph
2140 # to explore the file graph
2141 if rev not in ancestors:
2141 if rev not in ancestors:
2142 continue
2142 continue
2143 # XXX insert 1327 fix here
2143 # XXX insert 1327 fix here
2144 if flparentlinkrevs:
2144 if flparentlinkrevs:
2145 ancestors.update(flparentlinkrevs)
2145 ancestors.update(flparentlinkrevs)
2146
2146
2147 fncache.setdefault(rev, []).append(file_)
2147 fncache.setdefault(rev, []).append(file_)
2148 wanted.add(rev)
2148 wanted.add(rev)
2149 if copied:
2149 if copied:
2150 copies.append(copied)
2150 copies.append(copied)
2151
2151
2152 return wanted
2152 return wanted
2153
2153
2154 class _followfilter(object):
2154 class _followfilter(object):
2155 def __init__(self, repo, onlyfirst=False):
2155 def __init__(self, repo, onlyfirst=False):
2156 self.repo = repo
2156 self.repo = repo
2157 self.startrev = nullrev
2157 self.startrev = nullrev
2158 self.roots = set()
2158 self.roots = set()
2159 self.onlyfirst = onlyfirst
2159 self.onlyfirst = onlyfirst
2160
2160
2161 def match(self, rev):
2161 def match(self, rev):
2162 def realparents(rev):
2162 def realparents(rev):
2163 if self.onlyfirst:
2163 if self.onlyfirst:
2164 return self.repo.changelog.parentrevs(rev)[0:1]
2164 return self.repo.changelog.parentrevs(rev)[0:1]
2165 else:
2165 else:
2166 return filter(lambda x: x != nullrev,
2166 return filter(lambda x: x != nullrev,
2167 self.repo.changelog.parentrevs(rev))
2167 self.repo.changelog.parentrevs(rev))
2168
2168
2169 if self.startrev == nullrev:
2169 if self.startrev == nullrev:
2170 self.startrev = rev
2170 self.startrev = rev
2171 return True
2171 return True
2172
2172
2173 if rev > self.startrev:
2173 if rev > self.startrev:
2174 # forward: all descendants
2174 # forward: all descendants
2175 if not self.roots:
2175 if not self.roots:
2176 self.roots.add(self.startrev)
2176 self.roots.add(self.startrev)
2177 for parent in realparents(rev):
2177 for parent in realparents(rev):
2178 if parent in self.roots:
2178 if parent in self.roots:
2179 self.roots.add(rev)
2179 self.roots.add(rev)
2180 return True
2180 return True
2181 else:
2181 else:
2182 # backwards: all parents
2182 # backwards: all parents
2183 if not self.roots:
2183 if not self.roots:
2184 self.roots.update(realparents(self.startrev))
2184 self.roots.update(realparents(self.startrev))
2185 if rev in self.roots:
2185 if rev in self.roots:
2186 self.roots.remove(rev)
2186 self.roots.remove(rev)
2187 self.roots.update(realparents(rev))
2187 self.roots.update(realparents(rev))
2188 return True
2188 return True
2189
2189
2190 return False
2190 return False
2191
2191
2192 def walkchangerevs(repo, match, opts, prepare):
2192 def walkchangerevs(repo, match, opts, prepare):
2193 '''Iterate over files and the revs in which they changed.
2193 '''Iterate over files and the revs in which they changed.
2194
2194
2195 Callers most commonly need to iterate backwards over the history
2195 Callers most commonly need to iterate backwards over the history
2196 in which they are interested. Doing so has awful (quadratic-looking)
2196 in which they are interested. Doing so has awful (quadratic-looking)
2197 performance, so we use iterators in a "windowed" way.
2197 performance, so we use iterators in a "windowed" way.
2198
2198
2199 We walk a window of revisions in the desired order. Within the
2199 We walk a window of revisions in the desired order. Within the
2200 window, we first walk forwards to gather data, then in the desired
2200 window, we first walk forwards to gather data, then in the desired
2201 order (usually backwards) to display it.
2201 order (usually backwards) to display it.
2202
2202
2203 This function returns an iterator yielding contexts. Before
2203 This function returns an iterator yielding contexts. Before
2204 yielding each context, the iterator will first call the prepare
2204 yielding each context, the iterator will first call the prepare
2205 function on each context in the window in forward order.'''
2205 function on each context in the window in forward order.'''
2206
2206
2207 follow = opts.get('follow') or opts.get('follow_first')
2207 follow = opts.get('follow') or opts.get('follow_first')
2208 revs = _logrevs(repo, opts)
2208 revs = _logrevs(repo, opts)
2209 if not revs:
2209 if not revs:
2210 return []
2210 return []
2211 wanted = set()
2211 wanted = set()
2212 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2212 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2213 opts.get('removed'))
2213 opts.get('removed'))
2214 fncache = {}
2214 fncache = {}
2215 change = repo.changectx
2215 change = repo.changectx
2216
2216
2217 # First step is to fill wanted, the set of revisions that we want to yield.
2217 # First step is to fill wanted, the set of revisions that we want to yield.
2218 # When it does not induce extra cost, we also fill fncache for revisions in
2218 # When it does not induce extra cost, we also fill fncache for revisions in
2219 # wanted: a cache of filenames that were changed (ctx.files()) and that
2219 # wanted: a cache of filenames that were changed (ctx.files()) and that
2220 # match the file filtering conditions.
2220 # match the file filtering conditions.
2221
2221
2222 if match.always():
2222 if match.always():
2223 # No files, no patterns. Display all revs.
2223 # No files, no patterns. Display all revs.
2224 wanted = revs
2224 wanted = revs
2225 elif not slowpath:
2225 elif not slowpath:
2226 # We only have to read through the filelog to find wanted revisions
2226 # We only have to read through the filelog to find wanted revisions
2227
2227
2228 try:
2228 try:
2229 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2229 wanted = walkfilerevs(repo, match, follow, revs, fncache)
2230 except FileWalkError:
2230 except FileWalkError:
2231 slowpath = True
2231 slowpath = True
2232
2232
2233 # We decided to fall back to the slowpath because at least one
2233 # We decided to fall back to the slowpath because at least one
2234 # of the paths was not a file. Check to see if at least one of them
2234 # of the paths was not a file. Check to see if at least one of them
2235 # existed in history, otherwise simply return
2235 # existed in history, otherwise simply return
2236 for path in match.files():
2236 for path in match.files():
2237 if path == '.' or path in repo.store:
2237 if path == '.' or path in repo.store:
2238 break
2238 break
2239 else:
2239 else:
2240 return []
2240 return []
2241
2241
2242 if slowpath:
2242 if slowpath:
2243 # We have to read the changelog to match filenames against
2243 # We have to read the changelog to match filenames against
2244 # changed files
2244 # changed files
2245
2245
2246 if follow:
2246 if follow:
2247 raise error.Abort(_('can only follow copies/renames for explicit '
2247 raise error.Abort(_('can only follow copies/renames for explicit '
2248 'filenames'))
2248 'filenames'))
2249
2249
2250 # The slow path checks files modified in every changeset.
2250 # The slow path checks files modified in every changeset.
2251 # This is really slow on large repos, so compute the set lazily.
2251 # This is really slow on large repos, so compute the set lazily.
2252 class lazywantedset(object):
2252 class lazywantedset(object):
2253 def __init__(self):
2253 def __init__(self):
2254 self.set = set()
2254 self.set = set()
2255 self.revs = set(revs)
2255 self.revs = set(revs)
2256
2256
2257 # No need to worry about locality here because it will be accessed
2257 # No need to worry about locality here because it will be accessed
2258 # in the same order as the increasing window below.
2258 # in the same order as the increasing window below.
2259 def __contains__(self, value):
2259 def __contains__(self, value):
2260 if value in self.set:
2260 if value in self.set:
2261 return True
2261 return True
2262 elif not value in self.revs:
2262 elif not value in self.revs:
2263 return False
2263 return False
2264 else:
2264 else:
2265 self.revs.discard(value)
2265 self.revs.discard(value)
2266 ctx = change(value)
2266 ctx = change(value)
2267 matches = filter(match, ctx.files())
2267 matches = filter(match, ctx.files())
2268 if matches:
2268 if matches:
2269 fncache[value] = matches
2269 fncache[value] = matches
2270 self.set.add(value)
2270 self.set.add(value)
2271 return True
2271 return True
2272 return False
2272 return False
2273
2273
2274 def discard(self, value):
2274 def discard(self, value):
2275 self.revs.discard(value)
2275 self.revs.discard(value)
2276 self.set.discard(value)
2276 self.set.discard(value)
2277
2277
2278 wanted = lazywantedset()
2278 wanted = lazywantedset()
2279
2279
2280 # it might be worthwhile to do this in the iterator if the rev range
2280 # it might be worthwhile to do this in the iterator if the rev range
2281 # is descending and the prune args are all within that range
2281 # is descending and the prune args are all within that range
2282 for rev in opts.get('prune', ()):
2282 for rev in opts.get('prune', ()):
2283 rev = repo[rev].rev()
2283 rev = repo[rev].rev()
2284 ff = _followfilter(repo)
2284 ff = _followfilter(repo)
2285 stop = min(revs[0], revs[-1])
2285 stop = min(revs[0], revs[-1])
2286 for x in xrange(rev, stop - 1, -1):
2286 for x in xrange(rev, stop - 1, -1):
2287 if ff.match(x):
2287 if ff.match(x):
2288 wanted = wanted - [x]
2288 wanted = wanted - [x]
2289
2289
2290 # Now that wanted is correctly initialized, we can iterate over the
2290 # Now that wanted is correctly initialized, we can iterate over the
2291 # revision range, yielding only revisions in wanted.
2291 # revision range, yielding only revisions in wanted.
2292 def iterate():
2292 def iterate():
2293 if follow and match.always():
2293 if follow and match.always():
2294 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2294 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2295 def want(rev):
2295 def want(rev):
2296 return ff.match(rev) and rev in wanted
2296 return ff.match(rev) and rev in wanted
2297 else:
2297 else:
2298 def want(rev):
2298 def want(rev):
2299 return rev in wanted
2299 return rev in wanted
2300
2300
2301 it = iter(revs)
2301 it = iter(revs)
2302 stopiteration = False
2302 stopiteration = False
2303 for windowsize in increasingwindows():
2303 for windowsize in increasingwindows():
2304 nrevs = []
2304 nrevs = []
2305 for i in xrange(windowsize):
2305 for i in xrange(windowsize):
2306 rev = next(it, None)
2306 rev = next(it, None)
2307 if rev is None:
2307 if rev is None:
2308 stopiteration = True
2308 stopiteration = True
2309 break
2309 break
2310 elif want(rev):
2310 elif want(rev):
2311 nrevs.append(rev)
2311 nrevs.append(rev)
2312 for rev in sorted(nrevs):
2312 for rev in sorted(nrevs):
2313 fns = fncache.get(rev)
2313 fns = fncache.get(rev)
2314 ctx = change(rev)
2314 ctx = change(rev)
2315 if not fns:
2315 if not fns:
2316 def fns_generator():
2316 def fns_generator():
2317 for f in ctx.files():
2317 for f in ctx.files():
2318 if match(f):
2318 if match(f):
2319 yield f
2319 yield f
2320 fns = fns_generator()
2320 fns = fns_generator()
2321 prepare(ctx, fns)
2321 prepare(ctx, fns)
2322 for rev in nrevs:
2322 for rev in nrevs:
2323 yield change(rev)
2323 yield change(rev)
2324
2324
2325 if stopiteration:
2325 if stopiteration:
2326 break
2326 break
2327
2327
2328 return iterate()
2328 return iterate()
2329
2329
2330 def _makefollowlogfilematcher(repo, files, followfirst):
2330 def _makefollowlogfilematcher(repo, files, followfirst):
2331 # When displaying a revision with --patch --follow FILE, we have
2331 # When displaying a revision with --patch --follow FILE, we have
2332 # to know which file of the revision must be diffed. With
2332 # to know which file of the revision must be diffed. With
2333 # --follow, we want the names of the ancestors of FILE in the
2333 # --follow, we want the names of the ancestors of FILE in the
2334 # revision, stored in "fcache". "fcache" is populated by
2334 # revision, stored in "fcache". "fcache" is populated by
2335 # reproducing the graph traversal already done by --follow revset
2335 # reproducing the graph traversal already done by --follow revset
2336 # and relating revs to file names (which is not "correct" but
2336 # and relating revs to file names (which is not "correct" but
2337 # good enough).
2337 # good enough).
2338 fcache = {}
2338 fcache = {}
2339 fcacheready = [False]
2339 fcacheready = [False]
2340 pctx = repo['.']
2340 pctx = repo['.']
2341
2341
2342 def populate():
2342 def populate():
2343 for fn in files:
2343 for fn in files:
2344 fctx = pctx[fn]
2344 fctx = pctx[fn]
2345 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2345 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2346 for c in fctx.ancestors(followfirst=followfirst):
2346 for c in fctx.ancestors(followfirst=followfirst):
2347 fcache.setdefault(c.rev(), set()).add(c.path())
2347 fcache.setdefault(c.rev(), set()).add(c.path())
2348
2348
2349 def filematcher(rev):
2349 def filematcher(rev):
2350 if not fcacheready[0]:
2350 if not fcacheready[0]:
2351 # Lazy initialization
2351 # Lazy initialization
2352 fcacheready[0] = True
2352 fcacheready[0] = True
2353 populate()
2353 populate()
2354 return scmutil.matchfiles(repo, fcache.get(rev, []))
2354 return scmutil.matchfiles(repo, fcache.get(rev, []))
2355
2355
2356 return filematcher
2356 return filematcher
2357
2357
2358 def _makenofollowlogfilematcher(repo, pats, opts):
2358 def _makenofollowlogfilematcher(repo, pats, opts):
2359 '''hook for extensions to override the filematcher for non-follow cases'''
2359 '''hook for extensions to override the filematcher for non-follow cases'''
2360 return None
2360 return None
2361
2361
2362 def _makelogrevset(repo, pats, opts, revs):
2362 def _makelogrevset(repo, pats, opts, revs):
2363 """Return (expr, filematcher) where expr is a revset string built
2363 """Return (expr, filematcher) where expr is a revset string built
2364 from log options and file patterns or None. If --stat or --patch
2364 from log options and file patterns or None. If --stat or --patch
2365 are not passed filematcher is None. Otherwise it is a callable
2365 are not passed filematcher is None. Otherwise it is a callable
2366 taking a revision number and returning a match objects filtering
2366 taking a revision number and returning a match objects filtering
2367 the files to be detailed when displaying the revision.
2367 the files to be detailed when displaying the revision.
2368 """
2368 """
2369 opt2revset = {
2369 opt2revset = {
2370 'no_merges': ('not merge()', None),
2370 'no_merges': ('not merge()', None),
2371 'only_merges': ('merge()', None),
2371 'only_merges': ('merge()', None),
2372 '_ancestors': ('ancestors(%(val)s)', None),
2372 '_ancestors': ('ancestors(%(val)s)', None),
2373 '_fancestors': ('_firstancestors(%(val)s)', None),
2373 '_fancestors': ('_firstancestors(%(val)s)', None),
2374 '_descendants': ('descendants(%(val)s)', None),
2374 '_descendants': ('descendants(%(val)s)', None),
2375 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2375 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2376 '_matchfiles': ('_matchfiles(%(val)s)', None),
2376 '_matchfiles': ('_matchfiles(%(val)s)', None),
2377 'date': ('date(%(val)r)', None),
2377 'date': ('date(%(val)r)', None),
2378 'branch': ('branch(%(val)r)', ' or '),
2378 'branch': ('branch(%(val)r)', ' or '),
2379 '_patslog': ('filelog(%(val)r)', ' or '),
2379 '_patslog': ('filelog(%(val)r)', ' or '),
2380 '_patsfollow': ('follow(%(val)r)', ' or '),
2380 '_patsfollow': ('follow(%(val)r)', ' or '),
2381 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2381 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2382 'keyword': ('keyword(%(val)r)', ' or '),
2382 'keyword': ('keyword(%(val)r)', ' or '),
2383 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2383 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2384 'user': ('user(%(val)r)', ' or '),
2384 'user': ('user(%(val)r)', ' or '),
2385 }
2385 }
2386
2386
2387 opts = dict(opts)
2387 opts = dict(opts)
2388 # follow or not follow?
2388 # follow or not follow?
2389 follow = opts.get('follow') or opts.get('follow_first')
2389 follow = opts.get('follow') or opts.get('follow_first')
2390 if opts.get('follow_first'):
2390 if opts.get('follow_first'):
2391 followfirst = 1
2391 followfirst = 1
2392 else:
2392 else:
2393 followfirst = 0
2393 followfirst = 0
2394 # --follow with FILE behavior depends on revs...
2394 # --follow with FILE behavior depends on revs...
2395 it = iter(revs)
2395 it = iter(revs)
2396 startrev = next(it)
2396 startrev = next(it)
2397 followdescendants = startrev < next(it, startrev)
2397 followdescendants = startrev < next(it, startrev)
2398
2398
2399 # branch and only_branch are really aliases and must be handled at
2399 # branch and only_branch are really aliases and must be handled at
2400 # the same time
2400 # the same time
2401 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2401 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2402 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2402 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2403 # pats/include/exclude are passed to match.match() directly in
2403 # pats/include/exclude are passed to match.match() directly in
2404 # _matchfiles() revset but walkchangerevs() builds its matcher with
2404 # _matchfiles() revset but walkchangerevs() builds its matcher with
2405 # scmutil.match(). The difference is input pats are globbed on
2405 # scmutil.match(). The difference is input pats are globbed on
2406 # platforms without shell expansion (windows).
2406 # platforms without shell expansion (windows).
2407 wctx = repo[None]
2407 wctx = repo[None]
2408 match, pats = scmutil.matchandpats(wctx, pats, opts)
2408 match, pats = scmutil.matchandpats(wctx, pats, opts)
2409 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2409 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2410 opts.get('removed'))
2410 opts.get('removed'))
2411 if not slowpath:
2411 if not slowpath:
2412 for f in match.files():
2412 for f in match.files():
2413 if follow and f not in wctx:
2413 if follow and f not in wctx:
2414 # If the file exists, it may be a directory, so let it
2414 # If the file exists, it may be a directory, so let it
2415 # take the slow path.
2415 # take the slow path.
2416 if os.path.exists(repo.wjoin(f)):
2416 if os.path.exists(repo.wjoin(f)):
2417 slowpath = True
2417 slowpath = True
2418 continue
2418 continue
2419 else:
2419 else:
2420 raise error.Abort(_('cannot follow file not in parent '
2420 raise error.Abort(_('cannot follow file not in parent '
2421 'revision: "%s"') % f)
2421 'revision: "%s"') % f)
2422 filelog = repo.file(f)
2422 filelog = repo.file(f)
2423 if not filelog:
2423 if not filelog:
2424 # A zero count may be a directory or deleted file, so
2424 # A zero count may be a directory or deleted file, so
2425 # try to find matching entries on the slow path.
2425 # try to find matching entries on the slow path.
2426 if follow:
2426 if follow:
2427 raise error.Abort(
2427 raise error.Abort(
2428 _('cannot follow nonexistent file: "%s"') % f)
2428 _('cannot follow nonexistent file: "%s"') % f)
2429 slowpath = True
2429 slowpath = True
2430
2430
2431 # We decided to fall back to the slowpath because at least one
2431 # We decided to fall back to the slowpath because at least one
2432 # of the paths was not a file. Check to see if at least one of them
2432 # of the paths was not a file. Check to see if at least one of them
2433 # existed in history - in that case, we'll continue down the
2433 # existed in history - in that case, we'll continue down the
2434 # slowpath; otherwise, we can turn off the slowpath
2434 # slowpath; otherwise, we can turn off the slowpath
2435 if slowpath:
2435 if slowpath:
2436 for path in match.files():
2436 for path in match.files():
2437 if path == '.' or path in repo.store:
2437 if path == '.' or path in repo.store:
2438 break
2438 break
2439 else:
2439 else:
2440 slowpath = False
2440 slowpath = False
2441
2441
2442 fpats = ('_patsfollow', '_patsfollowfirst')
2442 fpats = ('_patsfollow', '_patsfollowfirst')
2443 fnopats = (('_ancestors', '_fancestors'),
2443 fnopats = (('_ancestors', '_fancestors'),
2444 ('_descendants', '_fdescendants'))
2444 ('_descendants', '_fdescendants'))
2445 if slowpath:
2445 if slowpath:
2446 # See walkchangerevs() slow path.
2446 # See walkchangerevs() slow path.
2447 #
2447 #
2448 # pats/include/exclude cannot be represented as separate
2448 # pats/include/exclude cannot be represented as separate
2449 # revset expressions as their filtering logic applies at file
2449 # revset expressions as their filtering logic applies at file
2450 # level. For instance "-I a -X a" matches a revision touching
2450 # level. For instance "-I a -X a" matches a revision touching
2451 # "a" and "b" while "file(a) and not file(b)" does
2451 # "a" and "b" while "file(a) and not file(b)" does
2452 # not. Besides, filesets are evaluated against the working
2452 # not. Besides, filesets are evaluated against the working
2453 # directory.
2453 # directory.
2454 matchargs = ['r:', 'd:relpath']
2454 matchargs = ['r:', 'd:relpath']
2455 for p in pats:
2455 for p in pats:
2456 matchargs.append('p:' + p)
2456 matchargs.append('p:' + p)
2457 for p in opts.get('include', []):
2457 for p in opts.get('include', []):
2458 matchargs.append('i:' + p)
2458 matchargs.append('i:' + p)
2459 for p in opts.get('exclude', []):
2459 for p in opts.get('exclude', []):
2460 matchargs.append('x:' + p)
2460 matchargs.append('x:' + p)
2461 matchargs = ','.join(('%r' % p) for p in matchargs)
2461 matchargs = ','.join(('%r' % p) for p in matchargs)
2462 opts['_matchfiles'] = matchargs
2462 opts['_matchfiles'] = matchargs
2463 if follow:
2463 if follow:
2464 opts[fnopats[0][followfirst]] = '.'
2464 opts[fnopats[0][followfirst]] = '.'
2465 else:
2465 else:
2466 if follow:
2466 if follow:
2467 if pats:
2467 if pats:
2468 # follow() revset interprets its file argument as a
2468 # follow() revset interprets its file argument as a
2469 # manifest entry, so use match.files(), not pats.
2469 # manifest entry, so use match.files(), not pats.
2470 opts[fpats[followfirst]] = list(match.files())
2470 opts[fpats[followfirst]] = list(match.files())
2471 else:
2471 else:
2472 op = fnopats[followdescendants][followfirst]
2472 op = fnopats[followdescendants][followfirst]
2473 opts[op] = 'rev(%d)' % startrev
2473 opts[op] = 'rev(%d)' % startrev
2474 else:
2474 else:
2475 opts['_patslog'] = list(pats)
2475 opts['_patslog'] = list(pats)
2476
2476
2477 filematcher = None
2477 filematcher = None
2478 if opts.get('patch') or opts.get('stat'):
2478 if opts.get('patch') or opts.get('stat'):
2479 # When following files, track renames via a special matcher.
2479 # When following files, track renames via a special matcher.
2480 # If we're forced to take the slowpath it means we're following
2480 # If we're forced to take the slowpath it means we're following
2481 # at least one pattern/directory, so don't bother with rename tracking.
2481 # at least one pattern/directory, so don't bother with rename tracking.
2482 if follow and not match.always() and not slowpath:
2482 if follow and not match.always() and not slowpath:
2483 # _makefollowlogfilematcher expects its files argument to be
2483 # _makefollowlogfilematcher expects its files argument to be
2484 # relative to the repo root, so use match.files(), not pats.
2484 # relative to the repo root, so use match.files(), not pats.
2485 filematcher = _makefollowlogfilematcher(repo, match.files(),
2485 filematcher = _makefollowlogfilematcher(repo, match.files(),
2486 followfirst)
2486 followfirst)
2487 else:
2487 else:
2488 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2488 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2489 if filematcher is None:
2489 if filematcher is None:
2490 filematcher = lambda rev: match
2490 filematcher = lambda rev: match
2491
2491
2492 expr = []
2492 expr = []
2493 for op, val in sorted(opts.iteritems()):
2493 for op, val in sorted(opts.iteritems()):
2494 if not val:
2494 if not val:
2495 continue
2495 continue
2496 if op not in opt2revset:
2496 if op not in opt2revset:
2497 continue
2497 continue
2498 revop, andor = opt2revset[op]
2498 revop, andor = opt2revset[op]
2499 if '%(val)' not in revop:
2499 if '%(val)' not in revop:
2500 expr.append(revop)
2500 expr.append(revop)
2501 else:
2501 else:
2502 if not isinstance(val, list):
2502 if not isinstance(val, list):
2503 e = revop % {'val': val}
2503 e = revop % {'val': val}
2504 else:
2504 else:
2505 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2505 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2506 expr.append(e)
2506 expr.append(e)
2507
2507
2508 if expr:
2508 if expr:
2509 expr = '(' + ' and '.join(expr) + ')'
2509 expr = '(' + ' and '.join(expr) + ')'
2510 else:
2510 else:
2511 expr = None
2511 expr = None
2512 return expr, filematcher
2512 return expr, filematcher
2513
2513
2514 def _logrevs(repo, opts):
2514 def _logrevs(repo, opts):
2515 # Default --rev value depends on --follow but --follow behavior
2515 # Default --rev value depends on --follow but --follow behavior
2516 # depends on revisions resolved from --rev...
2516 # depends on revisions resolved from --rev...
2517 follow = opts.get('follow') or opts.get('follow_first')
2517 follow = opts.get('follow') or opts.get('follow_first')
2518 if opts.get('rev'):
2518 if opts.get('rev'):
2519 revs = scmutil.revrange(repo, opts['rev'])
2519 revs = scmutil.revrange(repo, opts['rev'])
2520 elif follow and repo.dirstate.p1() == nullid:
2520 elif follow and repo.dirstate.p1() == nullid:
2521 revs = smartset.baseset()
2521 revs = smartset.baseset()
2522 elif follow:
2522 elif follow:
2523 revs = repo.revs('reverse(:.)')
2523 revs = repo.revs('reverse(:.)')
2524 else:
2524 else:
2525 revs = smartset.spanset(repo)
2525 revs = smartset.spanset(repo)
2526 revs.reverse()
2526 revs.reverse()
2527 return revs
2527 return revs
2528
2528
2529 def getgraphlogrevs(repo, pats, opts):
2529 def getgraphlogrevs(repo, pats, opts):
2530 """Return (revs, expr, filematcher) where revs is an iterable of
2530 """Return (revs, expr, filematcher) where revs is an iterable of
2531 revision numbers, expr is a revset string built from log options
2531 revision numbers, expr is a revset string built from log options
2532 and file patterns or None, and used to filter 'revs'. If --stat or
2532 and file patterns or None, and used to filter 'revs'. If --stat or
2533 --patch are not passed filematcher is None. Otherwise it is a
2533 --patch are not passed filematcher is None. Otherwise it is a
2534 callable taking a revision number and returning a match objects
2534 callable taking a revision number and returning a match objects
2535 filtering the files to be detailed when displaying the revision.
2535 filtering the files to be detailed when displaying the revision.
2536 """
2536 """
2537 limit = loglimit(opts)
2537 limit = loglimit(opts)
2538 revs = _logrevs(repo, opts)
2538 revs = _logrevs(repo, opts)
2539 if not revs:
2539 if not revs:
2540 return smartset.baseset(), None, None
2540 return smartset.baseset(), None, None
2541 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2541 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2542 if opts.get('rev'):
2542 if opts.get('rev'):
2543 # User-specified revs might be unsorted, but don't sort before
2543 # User-specified revs might be unsorted, but don't sort before
2544 # _makelogrevset because it might depend on the order of revs
2544 # _makelogrevset because it might depend on the order of revs
2545 if not (revs.isdescending() or revs.istopo()):
2545 if not (revs.isdescending() or revs.istopo()):
2546 revs.sort(reverse=True)
2546 revs.sort(reverse=True)
2547 if expr:
2547 if expr:
2548 matcher = revset.match(repo.ui, expr)
2548 matcher = revset.match(repo.ui, expr)
2549 revs = matcher(repo, revs)
2549 revs = matcher(repo, revs)
2550 if limit is not None:
2550 if limit is not None:
2551 limitedrevs = []
2551 limitedrevs = []
2552 for idx, rev in enumerate(revs):
2552 for idx, rev in enumerate(revs):
2553 if idx >= limit:
2553 if idx >= limit:
2554 break
2554 break
2555 limitedrevs.append(rev)
2555 limitedrevs.append(rev)
2556 revs = smartset.baseset(limitedrevs)
2556 revs = smartset.baseset(limitedrevs)
2557
2557
2558 return revs, expr, filematcher
2558 return revs, expr, filematcher
2559
2559
2560 def getlogrevs(repo, pats, opts):
2560 def getlogrevs(repo, pats, opts):
2561 """Return (revs, expr, filematcher) where revs is an iterable of
2561 """Return (revs, expr, filematcher) where revs is an iterable of
2562 revision numbers, expr is a revset string built from log options
2562 revision numbers, expr is a revset string built from log options
2563 and file patterns or None, and used to filter 'revs'. If --stat or
2563 and file patterns or None, and used to filter 'revs'. If --stat or
2564 --patch are not passed filematcher is None. Otherwise it is a
2564 --patch are not passed filematcher is None. Otherwise it is a
2565 callable taking a revision number and returning a match objects
2565 callable taking a revision number and returning a match objects
2566 filtering the files to be detailed when displaying the revision.
2566 filtering the files to be detailed when displaying the revision.
2567 """
2567 """
2568 limit = loglimit(opts)
2568 limit = loglimit(opts)
2569 revs = _logrevs(repo, opts)
2569 revs = _logrevs(repo, opts)
2570 if not revs:
2570 if not revs:
2571 return smartset.baseset([]), None, None
2571 return smartset.baseset([]), None, None
2572 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2572 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2573 if expr:
2573 if expr:
2574 matcher = revset.match(repo.ui, expr)
2574 matcher = revset.match(repo.ui, expr)
2575 revs = matcher(repo, revs)
2575 revs = matcher(repo, revs)
2576 if limit is not None:
2576 if limit is not None:
2577 limitedrevs = []
2577 limitedrevs = []
2578 for idx, r in enumerate(revs):
2578 for idx, r in enumerate(revs):
2579 if limit <= idx:
2579 if limit <= idx:
2580 break
2580 break
2581 limitedrevs.append(r)
2581 limitedrevs.append(r)
2582 revs = smartset.baseset(limitedrevs)
2582 revs = smartset.baseset(limitedrevs)
2583
2583
2584 return revs, expr, filematcher
2584 return revs, expr, filematcher
2585
2585
2586 def _parselinerangelogopt(repo, opts):
2586 def _parselinerangelogopt(repo, opts):
2587 """Parse --line-range log option and return a list of tuples (filename,
2587 """Parse --line-range log option and return a list of tuples (filename,
2588 (fromline, toline)).
2588 (fromline, toline)).
2589 """
2589 """
2590 linerangebyfname = []
2590 linerangebyfname = []
2591 for pat in opts.get('line_range', []):
2591 for pat in opts.get('line_range', []):
2592 try:
2592 try:
2593 pat, linerange = pat.rsplit(',', 1)
2593 pat, linerange = pat.rsplit(',', 1)
2594 except ValueError:
2594 except ValueError:
2595 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2595 raise error.Abort(_('malformatted line-range pattern %s') % pat)
2596 try:
2596 try:
2597 fromline, toline = map(int, linerange.split(':'))
2597 fromline, toline = map(int, linerange.split(':'))
2598 except ValueError:
2598 except ValueError:
2599 raise error.Abort(_("invalid line range for %s") % pat)
2599 raise error.Abort(_("invalid line range for %s") % pat)
2600 msg = _("line range pattern '%s' must match exactly one file") % pat
2600 msg = _("line range pattern '%s' must match exactly one file") % pat
2601 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2601 fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
2602 linerangebyfname.append(
2602 linerangebyfname.append(
2603 (fname, util.processlinerange(fromline, toline)))
2603 (fname, util.processlinerange(fromline, toline)))
2604 return linerangebyfname
2604 return linerangebyfname
2605
2605
2606 def getloglinerangerevs(repo, userrevs, opts):
2606 def getloglinerangerevs(repo, userrevs, opts):
2607 """Return (revs, filematcher, hunksfilter).
2607 """Return (revs, filematcher, hunksfilter).
2608
2608
2609 "revs" are revisions obtained by processing "line-range" log options and
2609 "revs" are revisions obtained by processing "line-range" log options and
2610 walking block ancestors of each specified file/line-range.
2610 walking block ancestors of each specified file/line-range.
2611
2611
2612 "filematcher(rev) -> match" is a factory function returning a match object
2612 "filematcher(rev) -> match" is a factory function returning a match object
2613 for a given revision for file patterns specified in --line-range option.
2613 for a given revision for file patterns specified in --line-range option.
2614 If neither --stat nor --patch options are passed, "filematcher" is None.
2614 If neither --stat nor --patch options are passed, "filematcher" is None.
2615
2615
2616 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2616 "hunksfilter(rev) -> filterfn(fctx, hunks)" is a factory function
2617 returning a hunks filtering function.
2617 returning a hunks filtering function.
2618 If neither --stat nor --patch options are passed, "filterhunks" is None.
2618 If neither --stat nor --patch options are passed, "filterhunks" is None.
2619 """
2619 """
2620 wctx = repo[None]
2620 wctx = repo[None]
2621
2621
2622 # Two-levels map of "rev -> file ctx -> [line range]".
2622 # Two-levels map of "rev -> file ctx -> [line range]".
2623 linerangesbyrev = {}
2623 linerangesbyrev = {}
2624 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2624 for fname, (fromline, toline) in _parselinerangelogopt(repo, opts):
2625 if fname not in wctx:
2625 if fname not in wctx:
2626 raise error.Abort(_('cannot follow file not in parent '
2626 raise error.Abort(_('cannot follow file not in parent '
2627 'revision: "%s"') % fname)
2627 'revision: "%s"') % fname)
2628 fctx = wctx.filectx(fname)
2628 fctx = wctx.filectx(fname)
2629 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2629 for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
2630 rev = fctx.introrev()
2630 rev = fctx.introrev()
2631 if rev not in userrevs:
2631 if rev not in userrevs:
2632 continue
2632 continue
2633 linerangesbyrev.setdefault(
2633 linerangesbyrev.setdefault(
2634 rev, {}).setdefault(
2634 rev, {}).setdefault(
2635 fctx.path(), []).append(linerange)
2635 fctx.path(), []).append(linerange)
2636
2636
2637 filematcher = None
2637 filematcher = None
2638 hunksfilter = None
2638 hunksfilter = None
2639 if opts.get('patch') or opts.get('stat'):
2639 if opts.get('patch') or opts.get('stat'):
2640
2640
2641 def nofilterhunksfn(fctx, hunks):
2641 def nofilterhunksfn(fctx, hunks):
2642 return hunks
2642 return hunks
2643
2643
2644 def hunksfilter(rev):
2644 def hunksfilter(rev):
2645 fctxlineranges = linerangesbyrev.get(rev)
2645 fctxlineranges = linerangesbyrev.get(rev)
2646 if fctxlineranges is None:
2646 if fctxlineranges is None:
2647 return nofilterhunksfn
2647 return nofilterhunksfn
2648
2648
2649 def filterfn(fctx, hunks):
2649 def filterfn(fctx, hunks):
2650 lineranges = fctxlineranges.get(fctx.path())
2650 lineranges = fctxlineranges.get(fctx.path())
2651 if lineranges is not None:
2651 if lineranges is not None:
2652 for hr, lines in hunks:
2652 for hr, lines in hunks:
2653 if hr is None: # binary
2653 if hr is None: # binary
2654 yield hr, lines
2654 yield hr, lines
2655 continue
2655 continue
2656 if any(mdiff.hunkinrange(hr[2:], lr)
2656 if any(mdiff.hunkinrange(hr[2:], lr)
2657 for lr in lineranges):
2657 for lr in lineranges):
2658 yield hr, lines
2658 yield hr, lines
2659 else:
2659 else:
2660 for hunk in hunks:
2660 for hunk in hunks:
2661 yield hunk
2661 yield hunk
2662
2662
2663 return filterfn
2663 return filterfn
2664
2664
2665 def filematcher(rev):
2665 def filematcher(rev):
2666 files = list(linerangesbyrev.get(rev, []))
2666 files = list(linerangesbyrev.get(rev, []))
2667 return scmutil.matchfiles(repo, files)
2667 return scmutil.matchfiles(repo, files)
2668
2668
2669 revs = sorted(linerangesbyrev, reverse=True)
2669 revs = sorted(linerangesbyrev, reverse=True)
2670
2670
2671 return revs, filematcher, hunksfilter
2671 return revs, filematcher, hunksfilter
2672
2672
2673 def _graphnodeformatter(ui, displayer):
2673 def _graphnodeformatter(ui, displayer):
2674 spec = ui.config('ui', 'graphnodetemplate')
2674 spec = ui.config('ui', 'graphnodetemplate')
2675 if not spec:
2675 if not spec:
2676 return templatekw.showgraphnode # fast path for "{graphnode}"
2676 return templatekw.showgraphnode # fast path for "{graphnode}"
2677
2677
2678 spec = templater.unquotestring(spec)
2678 spec = templater.unquotestring(spec)
2679 templ = formatter.maketemplater(ui, spec)
2679 templ = formatter.maketemplater(ui, spec)
2680 cache = {}
2680 cache = {}
2681 if isinstance(displayer, changeset_templater):
2681 if isinstance(displayer, changeset_templater):
2682 cache = displayer.cache # reuse cache of slow templates
2682 cache = displayer.cache # reuse cache of slow templates
2683 props = templatekw.keywords.copy()
2683 props = templatekw.keywords.copy()
2684 props['templ'] = templ
2684 props['templ'] = templ
2685 props['cache'] = cache
2685 props['cache'] = cache
2686 def formatnode(repo, ctx):
2686 def formatnode(repo, ctx):
2687 props['ctx'] = ctx
2687 props['ctx'] = ctx
2688 props['repo'] = repo
2688 props['repo'] = repo
2689 props['ui'] = repo.ui
2689 props['ui'] = repo.ui
2690 props['revcache'] = {}
2690 props['revcache'] = {}
2691 return templ.render(props)
2691 return templ.render(props)
2692 return formatnode
2692 return formatnode
2693
2693
2694 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2694 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2695 filematcher=None, props=None):
2695 filematcher=None, props=None):
2696 props = props or {}
2696 props = props or {}
2697 formatnode = _graphnodeformatter(ui, displayer)
2697 formatnode = _graphnodeformatter(ui, displayer)
2698 state = graphmod.asciistate()
2698 state = graphmod.asciistate()
2699 styles = state['styles']
2699 styles = state['styles']
2700
2700
2701 # only set graph styling if HGPLAIN is not set.
2701 # only set graph styling if HGPLAIN is not set.
2702 if ui.plain('graph'):
2702 if ui.plain('graph'):
2703 # set all edge styles to |, the default pre-3.8 behaviour
2703 # set all edge styles to |, the default pre-3.8 behaviour
2704 styles.update(dict.fromkeys(styles, '|'))
2704 styles.update(dict.fromkeys(styles, '|'))
2705 else:
2705 else:
2706 edgetypes = {
2706 edgetypes = {
2707 'parent': graphmod.PARENT,
2707 'parent': graphmod.PARENT,
2708 'grandparent': graphmod.GRANDPARENT,
2708 'grandparent': graphmod.GRANDPARENT,
2709 'missing': graphmod.MISSINGPARENT
2709 'missing': graphmod.MISSINGPARENT
2710 }
2710 }
2711 for name, key in edgetypes.items():
2711 for name, key in edgetypes.items():
2712 # experimental config: experimental.graphstyle.*
2712 # experimental config: experimental.graphstyle.*
2713 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2713 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2714 styles[key])
2714 styles[key])
2715 if not styles[key]:
2715 if not styles[key]:
2716 styles[key] = None
2716 styles[key] = None
2717
2717
2718 # experimental config: experimental.graphshorten
2718 # experimental config: experimental.graphshorten
2719 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2719 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2720
2720
2721 for rev, type, ctx, parents in dag:
2721 for rev, type, ctx, parents in dag:
2722 char = formatnode(repo, ctx)
2722 char = formatnode(repo, ctx)
2723 copies = None
2723 copies = None
2724 if getrenamed and ctx.rev():
2724 if getrenamed and ctx.rev():
2725 copies = []
2725 copies = []
2726 for fn in ctx.files():
2726 for fn in ctx.files():
2727 rename = getrenamed(fn, ctx.rev())
2727 rename = getrenamed(fn, ctx.rev())
2728 if rename:
2728 if rename:
2729 copies.append((fn, rename[0]))
2729 copies.append((fn, rename[0]))
2730 revmatchfn = None
2730 revmatchfn = None
2731 if filematcher is not None:
2731 if filematcher is not None:
2732 revmatchfn = filematcher(ctx.rev())
2732 revmatchfn = filematcher(ctx.rev())
2733 edges = edgefn(type, char, state, rev, parents)
2733 edges = edgefn(type, char, state, rev, parents)
2734 firstedge = next(edges)
2734 firstedge = next(edges)
2735 width = firstedge[2]
2735 width = firstedge[2]
2736 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2736 displayer.show(ctx, copies=copies, matchfn=revmatchfn,
2737 _graphwidth=width, **props)
2737 _graphwidth=width, **props)
2738 lines = displayer.hunk.pop(rev).split('\n')
2738 lines = displayer.hunk.pop(rev).split('\n')
2739 if not lines[-1]:
2739 if not lines[-1]:
2740 del lines[-1]
2740 del lines[-1]
2741 displayer.flush(ctx)
2741 displayer.flush(ctx)
2742 for type, char, width, coldata in itertools.chain([firstedge], edges):
2742 for type, char, width, coldata in itertools.chain([firstedge], edges):
2743 graphmod.ascii(ui, state, type, char, lines, coldata)
2743 graphmod.ascii(ui, state, type, char, lines, coldata)
2744 lines = []
2744 lines = []
2745 displayer.close()
2745 displayer.close()
2746
2746
2747 def graphlog(ui, repo, pats, opts):
2747 def graphlog(ui, repo, pats, opts):
2748 # Parameters are identical to log command ones
2748 # Parameters are identical to log command ones
2749 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2749 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2750 revdag = graphmod.dagwalker(repo, revs)
2750 revdag = graphmod.dagwalker(repo, revs)
2751
2751
2752 getrenamed = None
2752 getrenamed = None
2753 if opts.get('copies'):
2753 if opts.get('copies'):
2754 endrev = None
2754 endrev = None
2755 if opts.get('rev'):
2755 if opts.get('rev'):
2756 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2756 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2757 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2757 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2758
2758
2759 ui.pager('log')
2759 ui.pager('log')
2760 displayer = show_changeset(ui, repo, opts, buffered=True)
2760 displayer = show_changeset(ui, repo, opts, buffered=True)
2761 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2761 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2762 filematcher)
2762 filematcher)
2763
2763
2764 def checkunsupportedgraphflags(pats, opts):
2764 def checkunsupportedgraphflags(pats, opts):
2765 for op in ["newest_first"]:
2765 for op in ["newest_first"]:
2766 if op in opts and opts[op]:
2766 if op in opts and opts[op]:
2767 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2767 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2768 % op.replace("_", "-"))
2768 % op.replace("_", "-"))
2769
2769
2770 def graphrevs(repo, nodes, opts):
2770 def graphrevs(repo, nodes, opts):
2771 limit = loglimit(opts)
2771 limit = loglimit(opts)
2772 nodes.reverse()
2772 nodes.reverse()
2773 if limit is not None:
2773 if limit is not None:
2774 nodes = nodes[:limit]
2774 nodes = nodes[:limit]
2775 return graphmod.nodes(repo, nodes)
2775 return graphmod.nodes(repo, nodes)
2776
2776
2777 def add(ui, repo, match, prefix, explicitonly, **opts):
2777 def add(ui, repo, match, prefix, explicitonly, **opts):
2778 join = lambda f: os.path.join(prefix, f)
2778 join = lambda f: os.path.join(prefix, f)
2779 bad = []
2779 bad = []
2780
2780
2781 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2781 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2782 names = []
2782 names = []
2783 wctx = repo[None]
2783 wctx = repo[None]
2784 cca = None
2784 cca = None
2785 abort, warn = scmutil.checkportabilityalert(ui)
2785 abort, warn = scmutil.checkportabilityalert(ui)
2786 if abort or warn:
2786 if abort or warn:
2787 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2787 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2788
2788
2789 badmatch = matchmod.badmatch(match, badfn)
2789 badmatch = matchmod.badmatch(match, badfn)
2790 dirstate = repo.dirstate
2790 dirstate = repo.dirstate
2791 # We don't want to just call wctx.walk here, since it would return a lot of
2791 # We don't want to just call wctx.walk here, since it would return a lot of
2792 # clean files, which we aren't interested in and takes time.
2792 # clean files, which we aren't interested in and takes time.
2793 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2793 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2794 unknown=True, ignored=False, full=False)):
2794 unknown=True, ignored=False, full=False)):
2795 exact = match.exact(f)
2795 exact = match.exact(f)
2796 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2796 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2797 if cca:
2797 if cca:
2798 cca(f)
2798 cca(f)
2799 names.append(f)
2799 names.append(f)
2800 if ui.verbose or not exact:
2800 if ui.verbose or not exact:
2801 ui.status(_('adding %s\n') % match.rel(f))
2801 ui.status(_('adding %s\n') % match.rel(f))
2802
2802
2803 for subpath in sorted(wctx.substate):
2803 for subpath in sorted(wctx.substate):
2804 sub = wctx.sub(subpath)
2804 sub = wctx.sub(subpath)
2805 try:
2805 try:
2806 submatch = matchmod.subdirmatcher(subpath, match)
2806 submatch = matchmod.subdirmatcher(subpath, match)
2807 if opts.get(r'subrepos'):
2807 if opts.get(r'subrepos'):
2808 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2808 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2809 else:
2809 else:
2810 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2810 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2811 except error.LookupError:
2811 except error.LookupError:
2812 ui.status(_("skipping missing subrepository: %s\n")
2812 ui.status(_("skipping missing subrepository: %s\n")
2813 % join(subpath))
2813 % join(subpath))
2814
2814
2815 if not opts.get(r'dry_run'):
2815 if not opts.get(r'dry_run'):
2816 rejected = wctx.add(names, prefix)
2816 rejected = wctx.add(names, prefix)
2817 bad.extend(f for f in rejected if f in match.files())
2817 bad.extend(f for f in rejected if f in match.files())
2818 return bad
2818 return bad
2819
2819
2820 def addwebdirpath(repo, serverpath, webconf):
2820 def addwebdirpath(repo, serverpath, webconf):
2821 webconf[serverpath] = repo.root
2821 webconf[serverpath] = repo.root
2822 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2822 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2823
2823
2824 for r in repo.revs('filelog("path:.hgsub")'):
2824 for r in repo.revs('filelog("path:.hgsub")'):
2825 ctx = repo[r]
2825 ctx = repo[r]
2826 for subpath in ctx.substate:
2826 for subpath in ctx.substate:
2827 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2827 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2828
2828
2829 def forget(ui, repo, match, prefix, explicitonly):
2829 def forget(ui, repo, match, prefix, explicitonly):
2830 join = lambda f: os.path.join(prefix, f)
2830 join = lambda f: os.path.join(prefix, f)
2831 bad = []
2831 bad = []
2832 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2832 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2833 wctx = repo[None]
2833 wctx = repo[None]
2834 forgot = []
2834 forgot = []
2835
2835
2836 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2836 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2837 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2837 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2838 if explicitonly:
2838 if explicitonly:
2839 forget = [f for f in forget if match.exact(f)]
2839 forget = [f for f in forget if match.exact(f)]
2840
2840
2841 for subpath in sorted(wctx.substate):
2841 for subpath in sorted(wctx.substate):
2842 sub = wctx.sub(subpath)
2842 sub = wctx.sub(subpath)
2843 try:
2843 try:
2844 submatch = matchmod.subdirmatcher(subpath, match)
2844 submatch = matchmod.subdirmatcher(subpath, match)
2845 subbad, subforgot = sub.forget(submatch, prefix)
2845 subbad, subforgot = sub.forget(submatch, prefix)
2846 bad.extend([subpath + '/' + f for f in subbad])
2846 bad.extend([subpath + '/' + f for f in subbad])
2847 forgot.extend([subpath + '/' + f for f in subforgot])
2847 forgot.extend([subpath + '/' + f for f in subforgot])
2848 except error.LookupError:
2848 except error.LookupError:
2849 ui.status(_("skipping missing subrepository: %s\n")
2849 ui.status(_("skipping missing subrepository: %s\n")
2850 % join(subpath))
2850 % join(subpath))
2851
2851
2852 if not explicitonly:
2852 if not explicitonly:
2853 for f in match.files():
2853 for f in match.files():
2854 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2854 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2855 if f not in forgot:
2855 if f not in forgot:
2856 if repo.wvfs.exists(f):
2856 if repo.wvfs.exists(f):
2857 # Don't complain if the exact case match wasn't given.
2857 # Don't complain if the exact case match wasn't given.
2858 # But don't do this until after checking 'forgot', so
2858 # But don't do this until after checking 'forgot', so
2859 # that subrepo files aren't normalized, and this op is
2859 # that subrepo files aren't normalized, and this op is
2860 # purely from data cached by the status walk above.
2860 # purely from data cached by the status walk above.
2861 if repo.dirstate.normalize(f) in repo.dirstate:
2861 if repo.dirstate.normalize(f) in repo.dirstate:
2862 continue
2862 continue
2863 ui.warn(_('not removing %s: '
2863 ui.warn(_('not removing %s: '
2864 'file is already untracked\n')
2864 'file is already untracked\n')
2865 % match.rel(f))
2865 % match.rel(f))
2866 bad.append(f)
2866 bad.append(f)
2867
2867
2868 for f in forget:
2868 for f in forget:
2869 if ui.verbose or not match.exact(f):
2869 if ui.verbose or not match.exact(f):
2870 ui.status(_('removing %s\n') % match.rel(f))
2870 ui.status(_('removing %s\n') % match.rel(f))
2871
2871
2872 rejected = wctx.forget(forget, prefix)
2872 rejected = wctx.forget(forget, prefix)
2873 bad.extend(f for f in rejected if f in match.files())
2873 bad.extend(f for f in rejected if f in match.files())
2874 forgot.extend(f for f in forget if f not in rejected)
2874 forgot.extend(f for f in forget if f not in rejected)
2875 return bad, forgot
2875 return bad, forgot
2876
2876
2877 def files(ui, ctx, m, fm, fmt, subrepos):
2877 def files(ui, ctx, m, fm, fmt, subrepos):
2878 rev = ctx.rev()
2878 rev = ctx.rev()
2879 ret = 1
2879 ret = 1
2880 ds = ctx.repo().dirstate
2880 ds = ctx.repo().dirstate
2881
2881
2882 for f in ctx.matches(m):
2882 for f in ctx.matches(m):
2883 if rev is None and ds[f] == 'r':
2883 if rev is None and ds[f] == 'r':
2884 continue
2884 continue
2885 fm.startitem()
2885 fm.startitem()
2886 if ui.verbose:
2886 if ui.verbose:
2887 fc = ctx[f]
2887 fc = ctx[f]
2888 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2888 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2889 fm.data(abspath=f)
2889 fm.data(abspath=f)
2890 fm.write('path', fmt, m.rel(f))
2890 fm.write('path', fmt, m.rel(f))
2891 ret = 0
2891 ret = 0
2892
2892
2893 for subpath in sorted(ctx.substate):
2893 for subpath in sorted(ctx.substate):
2894 submatch = matchmod.subdirmatcher(subpath, m)
2894 submatch = matchmod.subdirmatcher(subpath, m)
2895 if (subrepos or m.exact(subpath) or any(submatch.files())):
2895 if (subrepos or m.exact(subpath) or any(submatch.files())):
2896 sub = ctx.sub(subpath)
2896 sub = ctx.sub(subpath)
2897 try:
2897 try:
2898 recurse = m.exact(subpath) or subrepos
2898 recurse = m.exact(subpath) or subrepos
2899 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2899 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2900 ret = 0
2900 ret = 0
2901 except error.LookupError:
2901 except error.LookupError:
2902 ui.status(_("skipping missing subrepository: %s\n")
2902 ui.status(_("skipping missing subrepository: %s\n")
2903 % m.abs(subpath))
2903 % m.abs(subpath))
2904
2904
2905 return ret
2905 return ret
2906
2906
2907 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2907 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2908 join = lambda f: os.path.join(prefix, f)
2908 join = lambda f: os.path.join(prefix, f)
2909 ret = 0
2909 ret = 0
2910 s = repo.status(match=m, clean=True)
2910 s = repo.status(match=m, clean=True)
2911 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2911 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2912
2912
2913 wctx = repo[None]
2913 wctx = repo[None]
2914
2914
2915 if warnings is None:
2915 if warnings is None:
2916 warnings = []
2916 warnings = []
2917 warn = True
2917 warn = True
2918 else:
2918 else:
2919 warn = False
2919 warn = False
2920
2920
2921 subs = sorted(wctx.substate)
2921 subs = sorted(wctx.substate)
2922 total = len(subs)
2922 total = len(subs)
2923 count = 0
2923 count = 0
2924 for subpath in subs:
2924 for subpath in subs:
2925 count += 1
2925 count += 1
2926 submatch = matchmod.subdirmatcher(subpath, m)
2926 submatch = matchmod.subdirmatcher(subpath, m)
2927 if subrepos or m.exact(subpath) or any(submatch.files()):
2927 if subrepos or m.exact(subpath) or any(submatch.files()):
2928 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2928 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2929 sub = wctx.sub(subpath)
2929 sub = wctx.sub(subpath)
2930 try:
2930 try:
2931 if sub.removefiles(submatch, prefix, after, force, subrepos,
2931 if sub.removefiles(submatch, prefix, after, force, subrepos,
2932 warnings):
2932 warnings):
2933 ret = 1
2933 ret = 1
2934 except error.LookupError:
2934 except error.LookupError:
2935 warnings.append(_("skipping missing subrepository: %s\n")
2935 warnings.append(_("skipping missing subrepository: %s\n")
2936 % join(subpath))
2936 % join(subpath))
2937 ui.progress(_('searching'), None)
2937 ui.progress(_('searching'), None)
2938
2938
2939 # warn about failure to delete explicit files/dirs
2939 # warn about failure to delete explicit files/dirs
2940 deleteddirs = util.dirs(deleted)
2940 deleteddirs = util.dirs(deleted)
2941 files = m.files()
2941 files = m.files()
2942 total = len(files)
2942 total = len(files)
2943 count = 0
2943 count = 0
2944 for f in files:
2944 for f in files:
2945 def insubrepo():
2945 def insubrepo():
2946 for subpath in wctx.substate:
2946 for subpath in wctx.substate:
2947 if f.startswith(subpath + '/'):
2947 if f.startswith(subpath + '/'):
2948 return True
2948 return True
2949 return False
2949 return False
2950
2950
2951 count += 1
2951 count += 1
2952 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2952 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2953 isdir = f in deleteddirs or wctx.hasdir(f)
2953 isdir = f in deleteddirs or wctx.hasdir(f)
2954 if (f in repo.dirstate or isdir or f == '.'
2954 if (f in repo.dirstate or isdir or f == '.'
2955 or insubrepo() or f in subs):
2955 or insubrepo() or f in subs):
2956 continue
2956 continue
2957
2957
2958 if repo.wvfs.exists(f):
2958 if repo.wvfs.exists(f):
2959 if repo.wvfs.isdir(f):
2959 if repo.wvfs.isdir(f):
2960 warnings.append(_('not removing %s: no tracked files\n')
2960 warnings.append(_('not removing %s: no tracked files\n')
2961 % m.rel(f))
2961 % m.rel(f))
2962 else:
2962 else:
2963 warnings.append(_('not removing %s: file is untracked\n')
2963 warnings.append(_('not removing %s: file is untracked\n')
2964 % m.rel(f))
2964 % m.rel(f))
2965 # missing files will generate a warning elsewhere
2965 # missing files will generate a warning elsewhere
2966 ret = 1
2966 ret = 1
2967 ui.progress(_('deleting'), None)
2967 ui.progress(_('deleting'), None)
2968
2968
2969 if force:
2969 if force:
2970 list = modified + deleted + clean + added
2970 list = modified + deleted + clean + added
2971 elif after:
2971 elif after:
2972 list = deleted
2972 list = deleted
2973 remaining = modified + added + clean
2973 remaining = modified + added + clean
2974 total = len(remaining)
2974 total = len(remaining)
2975 count = 0
2975 count = 0
2976 for f in remaining:
2976 for f in remaining:
2977 count += 1
2977 count += 1
2978 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2978 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2979 warnings.append(_('not removing %s: file still exists\n')
2979 warnings.append(_('not removing %s: file still exists\n')
2980 % m.rel(f))
2980 % m.rel(f))
2981 ret = 1
2981 ret = 1
2982 ui.progress(_('skipping'), None)
2982 ui.progress(_('skipping'), None)
2983 else:
2983 else:
2984 list = deleted + clean
2984 list = deleted + clean
2985 total = len(modified) + len(added)
2985 total = len(modified) + len(added)
2986 count = 0
2986 count = 0
2987 for f in modified:
2987 for f in modified:
2988 count += 1
2988 count += 1
2989 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2989 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2990 warnings.append(_('not removing %s: file is modified (use -f'
2990 warnings.append(_('not removing %s: file is modified (use -f'
2991 ' to force removal)\n') % m.rel(f))
2991 ' to force removal)\n') % m.rel(f))
2992 ret = 1
2992 ret = 1
2993 for f in added:
2993 for f in added:
2994 count += 1
2994 count += 1
2995 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2995 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2996 warnings.append(_("not removing %s: file has been marked for add"
2996 warnings.append(_("not removing %s: file has been marked for add"
2997 " (use 'hg forget' to undo add)\n") % m.rel(f))
2997 " (use 'hg forget' to undo add)\n") % m.rel(f))
2998 ret = 1
2998 ret = 1
2999 ui.progress(_('skipping'), None)
2999 ui.progress(_('skipping'), None)
3000
3000
3001 list = sorted(list)
3001 list = sorted(list)
3002 total = len(list)
3002 total = len(list)
3003 count = 0
3003 count = 0
3004 for f in list:
3004 for f in list:
3005 count += 1
3005 count += 1
3006 if ui.verbose or not m.exact(f):
3006 if ui.verbose or not m.exact(f):
3007 ui.progress(_('deleting'), count, total=total, unit=_('files'))
3007 ui.progress(_('deleting'), count, total=total, unit=_('files'))
3008 ui.status(_('removing %s\n') % m.rel(f))
3008 ui.status(_('removing %s\n') % m.rel(f))
3009 ui.progress(_('deleting'), None)
3009 ui.progress(_('deleting'), None)
3010
3010
3011 with repo.wlock():
3011 with repo.wlock():
3012 if not after:
3012 if not after:
3013 for f in list:
3013 for f in list:
3014 if f in added:
3014 if f in added:
3015 continue # we never unlink added files on remove
3015 continue # we never unlink added files on remove
3016 repo.wvfs.unlinkpath(f, ignoremissing=True)
3016 repo.wvfs.unlinkpath(f, ignoremissing=True)
3017 repo[None].forget(list)
3017 repo[None].forget(list)
3018
3018
3019 if warn:
3019 if warn:
3020 for warning in warnings:
3020 for warning in warnings:
3021 ui.warn(warning)
3021 ui.warn(warning)
3022
3022
3023 return ret
3023 return ret
3024
3024
3025 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3025 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
3026 err = 1
3026 err = 1
3027
3027
3028 def write(path):
3028 def write(path):
3029 filename = None
3029 filename = None
3030 if fntemplate:
3030 if fntemplate:
3031 filename = makefilename(repo, fntemplate, ctx.node(),
3031 filename = makefilename(repo, fntemplate, ctx.node(),
3032 pathname=os.path.join(prefix, path))
3032 pathname=os.path.join(prefix, path))
3033 with formatter.maybereopen(basefm, filename, opts) as fm:
3033 with formatter.maybereopen(basefm, filename, opts) as fm:
3034 data = ctx[path].data()
3034 data = ctx[path].data()
3035 if opts.get('decode'):
3035 if opts.get('decode'):
3036 data = repo.wwritedata(path, data)
3036 data = repo.wwritedata(path, data)
3037 fm.startitem()
3037 fm.startitem()
3038 fm.write('data', '%s', data)
3038 fm.write('data', '%s', data)
3039 fm.data(abspath=path, path=matcher.rel(path))
3039 fm.data(abspath=path, path=matcher.rel(path))
3040
3040
3041 # Automation often uses hg cat on single files, so special case it
3041 # Automation often uses hg cat on single files, so special case it
3042 # for performance to avoid the cost of parsing the manifest.
3042 # for performance to avoid the cost of parsing the manifest.
3043 if len(matcher.files()) == 1 and not matcher.anypats():
3043 if len(matcher.files()) == 1 and not matcher.anypats():
3044 file = matcher.files()[0]
3044 file = matcher.files()[0]
3045 mfl = repo.manifestlog
3045 mfl = repo.manifestlog
3046 mfnode = ctx.manifestnode()
3046 mfnode = ctx.manifestnode()
3047 try:
3047 try:
3048 if mfnode and mfl[mfnode].find(file)[0]:
3048 if mfnode and mfl[mfnode].find(file)[0]:
3049 write(file)
3049 write(file)
3050 return 0
3050 return 0
3051 except KeyError:
3051 except KeyError:
3052 pass
3052 pass
3053
3053
3054 for abs in ctx.walk(matcher):
3054 for abs in ctx.walk(matcher):
3055 write(abs)
3055 write(abs)
3056 err = 0
3056 err = 0
3057
3057
3058 for subpath in sorted(ctx.substate):
3058 for subpath in sorted(ctx.substate):
3059 sub = ctx.sub(subpath)
3059 sub = ctx.sub(subpath)
3060 try:
3060 try:
3061 submatch = matchmod.subdirmatcher(subpath, matcher)
3061 submatch = matchmod.subdirmatcher(subpath, matcher)
3062
3062
3063 if not sub.cat(submatch, basefm, fntemplate,
3063 if not sub.cat(submatch, basefm, fntemplate,
3064 os.path.join(prefix, sub._path), **opts):
3064 os.path.join(prefix, sub._path), **opts):
3065 err = 0
3065 err = 0
3066 except error.RepoLookupError:
3066 except error.RepoLookupError:
3067 ui.status(_("skipping missing subrepository: %s\n")
3067 ui.status(_("skipping missing subrepository: %s\n")
3068 % os.path.join(prefix, subpath))
3068 % os.path.join(prefix, subpath))
3069
3069
3070 return err
3070 return err
3071
3071
3072 def commit(ui, repo, commitfunc, pats, opts):
3072 def commit(ui, repo, commitfunc, pats, opts):
3073 '''commit the specified files or all outstanding changes'''
3073 '''commit the specified files or all outstanding changes'''
3074 date = opts.get('date')
3074 date = opts.get('date')
3075 if date:
3075 if date:
3076 opts['date'] = util.parsedate(date)
3076 opts['date'] = util.parsedate(date)
3077 message = logmessage(ui, opts)
3077 message = logmessage(ui, opts)
3078 matcher = scmutil.match(repo[None], pats, opts)
3078 matcher = scmutil.match(repo[None], pats, opts)
3079
3079
3080 dsguard = None
3080 dsguard = None
3081 # extract addremove carefully -- this function can be called from a command
3081 # extract addremove carefully -- this function can be called from a command
3082 # that doesn't support addremove
3082 # that doesn't support addremove
3083 if opts.get('addremove'):
3083 if opts.get('addremove'):
3084 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3084 dsguard = dirstateguard.dirstateguard(repo, 'commit')
3085 with dsguard or util.nullcontextmanager():
3085 with dsguard or util.nullcontextmanager():
3086 if dsguard:
3086 if dsguard:
3087 if scmutil.addremove(repo, matcher, "", opts) != 0:
3087 if scmutil.addremove(repo, matcher, "", opts) != 0:
3088 raise error.Abort(
3088 raise error.Abort(
3089 _("failed to mark all new/missing files as added/removed"))
3089 _("failed to mark all new/missing files as added/removed"))
3090
3090
3091 return commitfunc(ui, repo, message, matcher, opts)
3091 return commitfunc(ui, repo, message, matcher, opts)
3092
3092
3093 def samefile(f, ctx1, ctx2):
3093 def samefile(f, ctx1, ctx2):
3094 if f in ctx1.manifest():
3094 if f in ctx1.manifest():
3095 a = ctx1.filectx(f)
3095 a = ctx1.filectx(f)
3096 if f in ctx2.manifest():
3096 if f in ctx2.manifest():
3097 b = ctx2.filectx(f)
3097 b = ctx2.filectx(f)
3098 return (not a.cmp(b)
3098 return (not a.cmp(b)
3099 and a.flags() == b.flags())
3099 and a.flags() == b.flags())
3100 else:
3100 else:
3101 return False
3101 return False
3102 else:
3102 else:
3103 return f not in ctx2.manifest()
3103 return f not in ctx2.manifest()
3104
3104
3105 def amend(ui, repo, old, extra, pats, opts):
3105 def amend(ui, repo, old, extra, pats, opts):
3106 # avoid cycle context -> subrepo -> cmdutil
3106 # avoid cycle context -> subrepo -> cmdutil
3107 from . import context
3107 from . import context
3108
3108
3109 # amend will reuse the existing user if not specified, but the obsolete
3109 # amend will reuse the existing user if not specified, but the obsolete
3110 # marker creation requires that the current user's name is specified.
3110 # marker creation requires that the current user's name is specified.
3111 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3111 if obsolete.isenabled(repo, obsolete.createmarkersopt):
3112 ui.username() # raise exception if username not set
3112 ui.username() # raise exception if username not set
3113
3113
3114 ui.note(_('amending changeset %s\n') % old)
3114 ui.note(_('amending changeset %s\n') % old)
3115 base = old.p1()
3115 base = old.p1()
3116
3116
3117 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3117 with repo.wlock(), repo.lock(), repo.transaction('amend'):
3118 # Participating changesets:
3118 # Participating changesets:
3119 #
3119 #
3120 # wctx o - workingctx that contains changes from working copy
3120 # wctx o - workingctx that contains changes from working copy
3121 # | to go into amending commit
3121 # | to go into amending commit
3122 # |
3122 # |
3123 # old o - changeset to amend
3123 # old o - changeset to amend
3124 # |
3124 # |
3125 # base o - first parent of the changeset to amend
3125 # base o - first parent of the changeset to amend
3126 wctx = repo[None]
3126 wctx = repo[None]
3127
3127
3128 # Update extra dict from amended commit (e.g. to preserve graft
3128 # Update extra dict from amended commit (e.g. to preserve graft
3129 # source)
3129 # source)
3130 extra.update(old.extra())
3130 extra.update(old.extra())
3131
3131
3132 # Also update it from the from the wctx
3132 # Also update it from the from the wctx
3133 extra.update(wctx.extra())
3133 extra.update(wctx.extra())
3134
3134
3135 user = opts.get('user') or old.user()
3135 user = opts.get('user') or old.user()
3136 date = opts.get('date') or old.date()
3136 date = opts.get('date') or old.date()
3137
3137
3138 # Parse the date to allow comparison between date and old.date()
3138 # Parse the date to allow comparison between date and old.date()
3139 date = util.parsedate(date)
3139 date = util.parsedate(date)
3140
3140
3141 if len(old.parents()) > 1:
3141 if len(old.parents()) > 1:
3142 # ctx.files() isn't reliable for merges, so fall back to the
3142 # ctx.files() isn't reliable for merges, so fall back to the
3143 # slower repo.status() method
3143 # slower repo.status() method
3144 files = set([fn for st in repo.status(base, old)[:3]
3144 files = set([fn for st in repo.status(base, old)[:3]
3145 for fn in st])
3145 for fn in st])
3146 else:
3146 else:
3147 files = set(old.files())
3147 files = set(old.files())
3148
3148
3149 # add/remove the files to the working copy if the "addremove" option
3149 # add/remove the files to the working copy if the "addremove" option
3150 # was specified.
3150 # was specified.
3151 matcher = scmutil.match(wctx, pats, opts)
3151 matcher = scmutil.match(wctx, pats, opts)
3152 if (opts.get('addremove')
3152 if (opts.get('addremove')
3153 and scmutil.addremove(repo, matcher, "", opts)):
3153 and scmutil.addremove(repo, matcher, "", opts)):
3154 raise error.Abort(
3154 raise error.Abort(
3155 _("failed to mark all new/missing files as added/removed"))
3155 _("failed to mark all new/missing files as added/removed"))
3156
3156
3157 filestoamend = set(f for f in wctx.files() if matcher(f))
3157 filestoamend = set(f for f in wctx.files() if matcher(f))
3158
3158
3159 changes = (len(filestoamend) > 0)
3159 changes = (len(filestoamend) > 0)
3160 if changes:
3160 if changes:
3161 # Recompute copies (avoid recording a -> b -> a)
3161 # Recompute copies (avoid recording a -> b -> a)
3162 copied = copies.pathcopies(base, wctx, matcher)
3162 copied = copies.pathcopies(base, wctx, matcher)
3163 if old.p2:
3163 if old.p2:
3164 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3164 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
3165
3165
3166 # Prune files which were reverted by the updates: if old
3166 # Prune files which were reverted by the updates: if old
3167 # introduced file X and the file was renamed in the working
3167 # introduced file X and the file was renamed in the working
3168 # copy, then those two files are the same and
3168 # copy, then those two files are the same and
3169 # we can discard X from our list of files. Likewise if X
3169 # we can discard X from our list of files. Likewise if X
3170 # was deleted, it's no longer relevant
3170 # was deleted, it's no longer relevant
3171 files.update(filestoamend)
3171 files.update(filestoamend)
3172 files = [f for f in files if not samefile(f, wctx, base)]
3172 files = [f for f in files if not samefile(f, wctx, base)]
3173
3173
3174 def filectxfn(repo, ctx_, path):
3174 def filectxfn(repo, ctx_, path):
3175 try:
3175 try:
3176 # If the file being considered is not amongst the files
3176 # If the file being considered is not amongst the files
3177 # to be amended, we should return the file context from the
3177 # to be amended, we should return the file context from the
3178 # old changeset. This avoids issues when only some files in
3178 # old changeset. This avoids issues when only some files in
3179 # the working copy are being amended but there are also
3179 # the working copy are being amended but there are also
3180 # changes to other files from the old changeset.
3180 # changes to other files from the old changeset.
3181 if path not in filestoamend:
3181 if path not in filestoamend:
3182 return old.filectx(path)
3182 return old.filectx(path)
3183
3183
3184 fctx = wctx[path]
3184 fctx = wctx[path]
3185
3185
3186 # Return None for removed files.
3186 # Return None for removed files.
3187 if not fctx.exists():
3187 if not fctx.exists():
3188 return None
3188 return None
3189
3189
3190 flags = fctx.flags()
3190 flags = fctx.flags()
3191 mctx = context.memfilectx(repo,
3191 mctx = context.memfilectx(repo,
3192 fctx.path(), fctx.data(),
3192 fctx.path(), fctx.data(),
3193 islink='l' in flags,
3193 islink='l' in flags,
3194 isexec='x' in flags,
3194 isexec='x' in flags,
3195 copied=copied.get(path))
3195 copied=copied.get(path))
3196 return mctx
3196 return mctx
3197 except KeyError:
3197 except KeyError:
3198 return None
3198 return None
3199 else:
3199 else:
3200 ui.note(_('copying changeset %s to %s\n') % (old, base))
3200 ui.note(_('copying changeset %s to %s\n') % (old, base))
3201
3201
3202 # Use version of files as in the old cset
3202 # Use version of files as in the old cset
3203 def filectxfn(repo, ctx_, path):
3203 def filectxfn(repo, ctx_, path):
3204 try:
3204 try:
3205 return old.filectx(path)
3205 return old.filectx(path)
3206 except KeyError:
3206 except KeyError:
3207 return None
3207 return None
3208
3208
3209 # See if we got a message from -m or -l, if not, open the editor with
3209 # See if we got a message from -m or -l, if not, open the editor with
3210 # the message of the changeset to amend.
3210 # the message of the changeset to amend.
3211 message = logmessage(ui, opts)
3211 message = logmessage(ui, opts)
3212
3212
3213 editform = mergeeditform(old, 'commit.amend')
3213 editform = mergeeditform(old, 'commit.amend')
3214 editor = getcommiteditor(editform=editform,
3214 editor = getcommiteditor(editform=editform,
3215 **pycompat.strkwargs(opts))
3215 **pycompat.strkwargs(opts))
3216
3216
3217 if not message:
3217 if not message:
3218 editor = getcommiteditor(edit=True, editform=editform)
3218 editor = getcommiteditor(edit=True, editform=editform)
3219 message = old.description()
3219 message = old.description()
3220
3220
3221 pureextra = extra.copy()
3221 pureextra = extra.copy()
3222 extra['amend_source'] = old.hex()
3222 extra['amend_source'] = old.hex()
3223
3223
3224 new = context.memctx(repo,
3224 new = context.memctx(repo,
3225 parents=[base.node(), old.p2().node()],
3225 parents=[base.node(), old.p2().node()],
3226 text=message,
3226 text=message,
3227 files=files,
3227 files=files,
3228 filectxfn=filectxfn,
3228 filectxfn=filectxfn,
3229 user=user,
3229 user=user,
3230 date=date,
3230 date=date,
3231 extra=extra,
3231 extra=extra,
3232 editor=editor)
3232 editor=editor)
3233
3233
3234 newdesc = changelog.stripdesc(new.description())
3234 newdesc = changelog.stripdesc(new.description())
3235 if ((not changes)
3235 if ((not changes)
3236 and newdesc == old.description()
3236 and newdesc == old.description()
3237 and user == old.user()
3237 and user == old.user()
3238 and date == old.date()
3238 and date == old.date()
3239 and pureextra == old.extra()):
3239 and pureextra == old.extra()):
3240 # nothing changed. continuing here would create a new node
3240 # nothing changed. continuing here would create a new node
3241 # anyway because of the amend_source noise.
3241 # anyway because of the amend_source noise.
3242 #
3242 #
3243 # This not what we expect from amend.
3243 # This not what we expect from amend.
3244 return old.node()
3244 return old.node()
3245
3245
3246 if opts.get('secret'):
3246 if opts.get('secret'):
3247 commitphase = 'secret'
3247 commitphase = 'secret'
3248 else:
3248 else:
3249 commitphase = old.phase()
3249 commitphase = old.phase()
3250 overrides = {('phases', 'new-commit'): commitphase}
3250 overrides = {('phases', 'new-commit'): commitphase}
3251 with ui.configoverride(overrides, 'amend'):
3251 with ui.configoverride(overrides, 'amend'):
3252 newid = repo.commitctx(new)
3252 newid = repo.commitctx(new)
3253
3253
3254 # Reroute the working copy parent to the new changeset
3254 # Reroute the working copy parent to the new changeset
3255 repo.setparents(newid, nullid)
3255 repo.setparents(newid, nullid)
3256 mapping = {old.node(): (newid,)}
3256 mapping = {old.node(): (newid,)}
3257 obsmetadata = None
3257 obsmetadata = None
3258 if opts.get('note'):
3258 if opts.get('note'):
3259 obsmetadata = {'note': opts['note']}
3259 obsmetadata = {'note': opts['note']}
3260 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3260 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata)
3261
3261
3262 # Fixing the dirstate because localrepo.commitctx does not update
3262 # Fixing the dirstate because localrepo.commitctx does not update
3263 # it. This is rather convenient because we did not need to update
3263 # it. This is rather convenient because we did not need to update
3264 # the dirstate for all the files in the new commit which commitctx
3264 # the dirstate for all the files in the new commit which commitctx
3265 # could have done if it updated the dirstate. Now, we can
3265 # could have done if it updated the dirstate. Now, we can
3266 # selectively update the dirstate only for the amended files.
3266 # selectively update the dirstate only for the amended files.
3267 dirstate = repo.dirstate
3267 dirstate = repo.dirstate
3268
3268
3269 # Update the state of the files which were added and
3269 # Update the state of the files which were added and
3270 # and modified in the amend to "normal" in the dirstate.
3270 # and modified in the amend to "normal" in the dirstate.
3271 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3271 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
3272 for f in normalfiles:
3272 for f in normalfiles:
3273 dirstate.normal(f)
3273 dirstate.normal(f)
3274
3274
3275 # Update the state of files which were removed in the amend
3275 # Update the state of files which were removed in the amend
3276 # to "removed" in the dirstate.
3276 # to "removed" in the dirstate.
3277 removedfiles = set(wctx.removed()) & filestoamend
3277 removedfiles = set(wctx.removed()) & filestoamend
3278 for f in removedfiles:
3278 for f in removedfiles:
3279 dirstate.drop(f)
3279 dirstate.drop(f)
3280
3280
3281 return newid
3281 return newid
3282
3282
3283 def commiteditor(repo, ctx, subs, editform=''):
3283 def commiteditor(repo, ctx, subs, editform=''):
3284 if ctx.description():
3284 if ctx.description():
3285 return ctx.description()
3285 return ctx.description()
3286 return commitforceeditor(repo, ctx, subs, editform=editform,
3286 return commitforceeditor(repo, ctx, subs, editform=editform,
3287 unchangedmessagedetection=True)
3287 unchangedmessagedetection=True)
3288
3288
3289 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3289 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
3290 editform='', unchangedmessagedetection=False):
3290 editform='', unchangedmessagedetection=False):
3291 if not extramsg:
3291 if not extramsg:
3292 extramsg = _("Leave message empty to abort commit.")
3292 extramsg = _("Leave message empty to abort commit.")
3293
3293
3294 forms = [e for e in editform.split('.') if e]
3294 forms = [e for e in editform.split('.') if e]
3295 forms.insert(0, 'changeset')
3295 forms.insert(0, 'changeset')
3296 templatetext = None
3296 templatetext = None
3297 while forms:
3297 while forms:
3298 ref = '.'.join(forms)
3298 ref = '.'.join(forms)
3299 if repo.ui.config('committemplate', ref):
3299 if repo.ui.config('committemplate', ref):
3300 templatetext = committext = buildcommittemplate(
3300 templatetext = committext = buildcommittemplate(
3301 repo, ctx, subs, extramsg, ref)
3301 repo, ctx, subs, extramsg, ref)
3302 break
3302 break
3303 forms.pop()
3303 forms.pop()
3304 else:
3304 else:
3305 committext = buildcommittext(repo, ctx, subs, extramsg)
3305 committext = buildcommittext(repo, ctx, subs, extramsg)
3306
3306
3307 # run editor in the repository root
3307 # run editor in the repository root
3308 olddir = pycompat.getcwd()
3308 olddir = pycompat.getcwd()
3309 os.chdir(repo.root)
3309 os.chdir(repo.root)
3310
3310
3311 # make in-memory changes visible to external process
3311 # make in-memory changes visible to external process
3312 tr = repo.currenttransaction()
3312 tr = repo.currenttransaction()
3313 repo.dirstate.write(tr)
3313 repo.dirstate.write(tr)
3314 pending = tr and tr.writepending() and repo.root
3314 pending = tr and tr.writepending() and repo.root
3315
3315
3316 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3316 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
3317 editform=editform, pending=pending,
3317 editform=editform, pending=pending,
3318 repopath=repo.path, action='commit')
3318 repopath=repo.path, action='commit')
3319 text = editortext
3319 text = editortext
3320
3320
3321 # strip away anything below this special string (used for editors that want
3321 # strip away anything below this special string (used for editors that want
3322 # to display the diff)
3322 # to display the diff)
3323 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3323 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
3324 if stripbelow:
3324 if stripbelow:
3325 text = text[:stripbelow.start()]
3325 text = text[:stripbelow.start()]
3326
3326
3327 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3327 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
3328 os.chdir(olddir)
3328 os.chdir(olddir)
3329
3329
3330 if finishdesc:
3330 if finishdesc:
3331 text = finishdesc(text)
3331 text = finishdesc(text)
3332 if not text.strip():
3332 if not text.strip():
3333 raise error.Abort(_("empty commit message"))
3333 raise error.Abort(_("empty commit message"))
3334 if unchangedmessagedetection and editortext == templatetext:
3334 if unchangedmessagedetection and editortext == templatetext:
3335 raise error.Abort(_("commit message unchanged"))
3335 raise error.Abort(_("commit message unchanged"))
3336
3336
3337 return text
3337 return text
3338
3338
3339 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3339 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
3340 ui = repo.ui
3340 ui = repo.ui
3341 spec = formatter.templatespec(ref, None, None)
3341 spec = formatter.templatespec(ref, None, None)
3342 t = changeset_templater(ui, repo, spec, None, {}, False)
3342 t = changeset_templater(ui, repo, spec, None, {}, False)
3343 t.t.cache.update((k, templater.unquotestring(v))
3343 t.t.cache.update((k, templater.unquotestring(v))
3344 for k, v in repo.ui.configitems('committemplate'))
3344 for k, v in repo.ui.configitems('committemplate'))
3345
3345
3346 if not extramsg:
3346 if not extramsg:
3347 extramsg = '' # ensure that extramsg is string
3347 extramsg = '' # ensure that extramsg is string
3348
3348
3349 ui.pushbuffer()
3349 ui.pushbuffer()
3350 t.show(ctx, extramsg=extramsg)
3350 t.show(ctx, extramsg=extramsg)
3351 return ui.popbuffer()
3351 return ui.popbuffer()
3352
3352
3353 def hgprefix(msg):
3353 def hgprefix(msg):
3354 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3354 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
3355
3355
3356 def buildcommittext(repo, ctx, subs, extramsg):
3356 def buildcommittext(repo, ctx, subs, extramsg):
3357 edittext = []
3357 edittext = []
3358 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3358 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
3359 if ctx.description():
3359 if ctx.description():
3360 edittext.append(ctx.description())
3360 edittext.append(ctx.description())
3361 edittext.append("")
3361 edittext.append("")
3362 edittext.append("") # Empty line between message and comments.
3362 edittext.append("") # Empty line between message and comments.
3363 edittext.append(hgprefix(_("Enter commit message."
3363 edittext.append(hgprefix(_("Enter commit message."
3364 " Lines beginning with 'HG:' are removed.")))
3364 " Lines beginning with 'HG:' are removed.")))
3365 edittext.append(hgprefix(extramsg))
3365 edittext.append(hgprefix(extramsg))
3366 edittext.append("HG: --")
3366 edittext.append("HG: --")
3367 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3367 edittext.append(hgprefix(_("user: %s") % ctx.user()))
3368 if ctx.p2():
3368 if ctx.p2():
3369 edittext.append(hgprefix(_("branch merge")))
3369 edittext.append(hgprefix(_("branch merge")))
3370 if ctx.branch():
3370 if ctx.branch():
3371 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3371 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3372 if bookmarks.isactivewdirparent(repo):
3372 if bookmarks.isactivewdirparent(repo):
3373 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3373 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3374 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3374 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3375 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3375 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3376 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3376 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3377 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3377 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3378 if not added and not modified and not removed:
3378 if not added and not modified and not removed:
3379 edittext.append(hgprefix(_("no files changed")))
3379 edittext.append(hgprefix(_("no files changed")))
3380 edittext.append("")
3380 edittext.append("")
3381
3381
3382 return "\n".join(edittext)
3382 return "\n".join(edittext)
3383
3383
3384 def commitstatus(repo, node, branch, bheads=None, opts=None):
3384 def commitstatus(repo, node, branch, bheads=None, opts=None):
3385 if opts is None:
3385 if opts is None:
3386 opts = {}
3386 opts = {}
3387 ctx = repo[node]
3387 ctx = repo[node]
3388 parents = ctx.parents()
3388 parents = ctx.parents()
3389
3389
3390 if (not opts.get('amend') and bheads and node not in bheads and not
3390 if (not opts.get('amend') and bheads and node not in bheads and not
3391 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3391 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3392 repo.ui.status(_('created new head\n'))
3392 repo.ui.status(_('created new head\n'))
3393 # The message is not printed for initial roots. For the other
3393 # The message is not printed for initial roots. For the other
3394 # changesets, it is printed in the following situations:
3394 # changesets, it is printed in the following situations:
3395 #
3395 #
3396 # Par column: for the 2 parents with ...
3396 # Par column: for the 2 parents with ...
3397 # N: null or no parent
3397 # N: null or no parent
3398 # B: parent is on another named branch
3398 # B: parent is on another named branch
3399 # C: parent is a regular non head changeset
3399 # C: parent is a regular non head changeset
3400 # H: parent was a branch head of the current branch
3400 # H: parent was a branch head of the current branch
3401 # Msg column: whether we print "created new head" message
3401 # Msg column: whether we print "created new head" message
3402 # In the following, it is assumed that there already exists some
3402 # In the following, it is assumed that there already exists some
3403 # initial branch heads of the current branch, otherwise nothing is
3403 # initial branch heads of the current branch, otherwise nothing is
3404 # printed anyway.
3404 # printed anyway.
3405 #
3405 #
3406 # Par Msg Comment
3406 # Par Msg Comment
3407 # N N y additional topo root
3407 # N N y additional topo root
3408 #
3408 #
3409 # B N y additional branch root
3409 # B N y additional branch root
3410 # C N y additional topo head
3410 # C N y additional topo head
3411 # H N n usual case
3411 # H N n usual case
3412 #
3412 #
3413 # B B y weird additional branch root
3413 # B B y weird additional branch root
3414 # C B y branch merge
3414 # C B y branch merge
3415 # H B n merge with named branch
3415 # H B n merge with named branch
3416 #
3416 #
3417 # C C y additional head from merge
3417 # C C y additional head from merge
3418 # C H n merge with a head
3418 # C H n merge with a head
3419 #
3419 #
3420 # H H n head merge: head count decreases
3420 # H H n head merge: head count decreases
3421
3421
3422 if not opts.get('close_branch'):
3422 if not opts.get('close_branch'):
3423 for r in parents:
3423 for r in parents:
3424 if r.closesbranch() and r.branch() == branch:
3424 if r.closesbranch() and r.branch() == branch:
3425 repo.ui.status(_('reopening closed branch head %d\n') % r)
3425 repo.ui.status(_('reopening closed branch head %d\n') % r)
3426
3426
3427 if repo.ui.debugflag:
3427 if repo.ui.debugflag:
3428 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3428 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3429 elif repo.ui.verbose:
3429 elif repo.ui.verbose:
3430 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3430 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3431
3431
3432 def postcommitstatus(repo, pats, opts):
3432 def postcommitstatus(repo, pats, opts):
3433 return repo.status(match=scmutil.match(repo[None], pats, opts))
3433 return repo.status(match=scmutil.match(repo[None], pats, opts))
3434
3434
3435 def revert(ui, repo, ctx, parents, *pats, **opts):
3435 def revert(ui, repo, ctx, parents, *pats, **opts):
3436 parent, p2 = parents
3436 parent, p2 = parents
3437 node = ctx.node()
3437 node = ctx.node()
3438
3438
3439 mf = ctx.manifest()
3439 mf = ctx.manifest()
3440 if node == p2:
3440 if node == p2:
3441 parent = p2
3441 parent = p2
3442
3442
3443 # need all matching names in dirstate and manifest of target rev,
3443 # need all matching names in dirstate and manifest of target rev,
3444 # so have to walk both. do not print errors if files exist in one
3444 # so have to walk both. do not print errors if files exist in one
3445 # but not other. in both cases, filesets should be evaluated against
3445 # but not other. in both cases, filesets should be evaluated against
3446 # workingctx to get consistent result (issue4497). this means 'set:**'
3446 # workingctx to get consistent result (issue4497). this means 'set:**'
3447 # cannot be used to select missing files from target rev.
3447 # cannot be used to select missing files from target rev.
3448
3448
3449 # `names` is a mapping for all elements in working copy and target revision
3449 # `names` is a mapping for all elements in working copy and target revision
3450 # The mapping is in the form:
3450 # The mapping is in the form:
3451 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3451 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3452 names = {}
3452 names = {}
3453
3453
3454 with repo.wlock():
3454 with repo.wlock():
3455 ## filling of the `names` mapping
3455 ## filling of the `names` mapping
3456 # walk dirstate to fill `names`
3456 # walk dirstate to fill `names`
3457
3457
3458 interactive = opts.get('interactive', False)
3458 interactive = opts.get('interactive', False)
3459 wctx = repo[None]
3459 wctx = repo[None]
3460 m = scmutil.match(wctx, pats, opts)
3460 m = scmutil.match(wctx, pats, opts)
3461
3461
3462 # we'll need this later
3462 # we'll need this later
3463 targetsubs = sorted(s for s in wctx.substate if m(s))
3463 targetsubs = sorted(s for s in wctx.substate if m(s))
3464
3464
3465 if not m.always():
3465 if not m.always():
3466 matcher = matchmod.badmatch(m, lambda x, y: False)
3466 matcher = matchmod.badmatch(m, lambda x, y: False)
3467 for abs in wctx.walk(matcher):
3467 for abs in wctx.walk(matcher):
3468 names[abs] = m.rel(abs), m.exact(abs)
3468 names[abs] = m.rel(abs), m.exact(abs)
3469
3469
3470 # walk target manifest to fill `names`
3470 # walk target manifest to fill `names`
3471
3471
3472 def badfn(path, msg):
3472 def badfn(path, msg):
3473 if path in names:
3473 if path in names:
3474 return
3474 return
3475 if path in ctx.substate:
3475 if path in ctx.substate:
3476 return
3476 return
3477 path_ = path + '/'
3477 path_ = path + '/'
3478 for f in names:
3478 for f in names:
3479 if f.startswith(path_):
3479 if f.startswith(path_):
3480 return
3480 return
3481 ui.warn("%s: %s\n" % (m.rel(path), msg))
3481 ui.warn("%s: %s\n" % (m.rel(path), msg))
3482
3482
3483 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3483 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3484 if abs not in names:
3484 if abs not in names:
3485 names[abs] = m.rel(abs), m.exact(abs)
3485 names[abs] = m.rel(abs), m.exact(abs)
3486
3486
3487 # Find status of all file in `names`.
3487 # Find status of all file in `names`.
3488 m = scmutil.matchfiles(repo, names)
3488 m = scmutil.matchfiles(repo, names)
3489
3489
3490 changes = repo.status(node1=node, match=m,
3490 changes = repo.status(node1=node, match=m,
3491 unknown=True, ignored=True, clean=True)
3491 unknown=True, ignored=True, clean=True)
3492 else:
3492 else:
3493 changes = repo.status(node1=node, match=m)
3493 changes = repo.status(node1=node, match=m)
3494 for kind in changes:
3494 for kind in changes:
3495 for abs in kind:
3495 for abs in kind:
3496 names[abs] = m.rel(abs), m.exact(abs)
3496 names[abs] = m.rel(abs), m.exact(abs)
3497
3497
3498 m = scmutil.matchfiles(repo, names)
3498 m = scmutil.matchfiles(repo, names)
3499
3499
3500 modified = set(changes.modified)
3500 modified = set(changes.modified)
3501 added = set(changes.added)
3501 added = set(changes.added)
3502 removed = set(changes.removed)
3502 removed = set(changes.removed)
3503 _deleted = set(changes.deleted)
3503 _deleted = set(changes.deleted)
3504 unknown = set(changes.unknown)
3504 unknown = set(changes.unknown)
3505 unknown.update(changes.ignored)
3505 unknown.update(changes.ignored)
3506 clean = set(changes.clean)
3506 clean = set(changes.clean)
3507 modadded = set()
3507 modadded = set()
3508
3508
3509 # We need to account for the state of the file in the dirstate,
3509 # We need to account for the state of the file in the dirstate,
3510 # even when we revert against something else than parent. This will
3510 # even when we revert against something else than parent. This will
3511 # slightly alter the behavior of revert (doing back up or not, delete
3511 # slightly alter the behavior of revert (doing back up or not, delete
3512 # or just forget etc).
3512 # or just forget etc).
3513 if parent == node:
3513 if parent == node:
3514 dsmodified = modified
3514 dsmodified = modified
3515 dsadded = added
3515 dsadded = added
3516 dsremoved = removed
3516 dsremoved = removed
3517 # store all local modifications, useful later for rename detection
3517 # store all local modifications, useful later for rename detection
3518 localchanges = dsmodified | dsadded
3518 localchanges = dsmodified | dsadded
3519 modified, added, removed = set(), set(), set()
3519 modified, added, removed = set(), set(), set()
3520 else:
3520 else:
3521 changes = repo.status(node1=parent, match=m)
3521 changes = repo.status(node1=parent, match=m)
3522 dsmodified = set(changes.modified)
3522 dsmodified = set(changes.modified)
3523 dsadded = set(changes.added)
3523 dsadded = set(changes.added)
3524 dsremoved = set(changes.removed)
3524 dsremoved = set(changes.removed)
3525 # store all local modifications, useful later for rename detection
3525 # store all local modifications, useful later for rename detection
3526 localchanges = dsmodified | dsadded
3526 localchanges = dsmodified | dsadded
3527
3527
3528 # only take into account for removes between wc and target
3528 # only take into account for removes between wc and target
3529 clean |= dsremoved - removed
3529 clean |= dsremoved - removed
3530 dsremoved &= removed
3530 dsremoved &= removed
3531 # distinct between dirstate remove and other
3531 # distinct between dirstate remove and other
3532 removed -= dsremoved
3532 removed -= dsremoved
3533
3533
3534 modadded = added & dsmodified
3534 modadded = added & dsmodified
3535 added -= modadded
3535 added -= modadded
3536
3536
3537 # tell newly modified apart.
3537 # tell newly modified apart.
3538 dsmodified &= modified
3538 dsmodified &= modified
3539 dsmodified |= modified & dsadded # dirstate added may need backup
3539 dsmodified |= modified & dsadded # dirstate added may need backup
3540 modified -= dsmodified
3540 modified -= dsmodified
3541
3541
3542 # We need to wait for some post-processing to update this set
3542 # We need to wait for some post-processing to update this set
3543 # before making the distinction. The dirstate will be used for
3543 # before making the distinction. The dirstate will be used for
3544 # that purpose.
3544 # that purpose.
3545 dsadded = added
3545 dsadded = added
3546
3546
3547 # in case of merge, files that are actually added can be reported as
3547 # in case of merge, files that are actually added can be reported as
3548 # modified, we need to post process the result
3548 # modified, we need to post process the result
3549 if p2 != nullid:
3549 if p2 != nullid:
3550 mergeadd = set(dsmodified)
3550 mergeadd = set(dsmodified)
3551 for path in dsmodified:
3551 for path in dsmodified:
3552 if path in mf:
3552 if path in mf:
3553 mergeadd.remove(path)
3553 mergeadd.remove(path)
3554 dsadded |= mergeadd
3554 dsadded |= mergeadd
3555 dsmodified -= mergeadd
3555 dsmodified -= mergeadd
3556
3556
3557 # if f is a rename, update `names` to also revert the source
3557 # if f is a rename, update `names` to also revert the source
3558 cwd = repo.getcwd()
3558 cwd = repo.getcwd()
3559 for f in localchanges:
3559 for f in localchanges:
3560 src = repo.dirstate.copied(f)
3560 src = repo.dirstate.copied(f)
3561 # XXX should we check for rename down to target node?
3561 # XXX should we check for rename down to target node?
3562 if src and src not in names and repo.dirstate[src] == 'r':
3562 if src and src not in names and repo.dirstate[src] == 'r':
3563 dsremoved.add(src)
3563 dsremoved.add(src)
3564 names[src] = (repo.pathto(src, cwd), True)
3564 names[src] = (repo.pathto(src, cwd), True)
3565
3565
3566 # determine the exact nature of the deleted changesets
3566 # determine the exact nature of the deleted changesets
3567 deladded = set(_deleted)
3567 deladded = set(_deleted)
3568 for path in _deleted:
3568 for path in _deleted:
3569 if path in mf:
3569 if path in mf:
3570 deladded.remove(path)
3570 deladded.remove(path)
3571 deleted = _deleted - deladded
3571 deleted = _deleted - deladded
3572
3572
3573 # distinguish between file to forget and the other
3573 # distinguish between file to forget and the other
3574 added = set()
3574 added = set()
3575 for abs in dsadded:
3575 for abs in dsadded:
3576 if repo.dirstate[abs] != 'a':
3576 if repo.dirstate[abs] != 'a':
3577 added.add(abs)
3577 added.add(abs)
3578 dsadded -= added
3578 dsadded -= added
3579
3579
3580 for abs in deladded:
3580 for abs in deladded:
3581 if repo.dirstate[abs] == 'a':
3581 if repo.dirstate[abs] == 'a':
3582 dsadded.add(abs)
3582 dsadded.add(abs)
3583 deladded -= dsadded
3583 deladded -= dsadded
3584
3584
3585 # For files marked as removed, we check if an unknown file is present at
3585 # For files marked as removed, we check if an unknown file is present at
3586 # the same path. If a such file exists it may need to be backed up.
3586 # the same path. If a such file exists it may need to be backed up.
3587 # Making the distinction at this stage helps have simpler backup
3587 # Making the distinction at this stage helps have simpler backup
3588 # logic.
3588 # logic.
3589 removunk = set()
3589 removunk = set()
3590 for abs in removed:
3590 for abs in removed:
3591 target = repo.wjoin(abs)
3591 target = repo.wjoin(abs)
3592 if os.path.lexists(target):
3592 if os.path.lexists(target):
3593 removunk.add(abs)
3593 removunk.add(abs)
3594 removed -= removunk
3594 removed -= removunk
3595
3595
3596 dsremovunk = set()
3596 dsremovunk = set()
3597 for abs in dsremoved:
3597 for abs in dsremoved:
3598 target = repo.wjoin(abs)
3598 target = repo.wjoin(abs)
3599 if os.path.lexists(target):
3599 if os.path.lexists(target):
3600 dsremovunk.add(abs)
3600 dsremovunk.add(abs)
3601 dsremoved -= dsremovunk
3601 dsremoved -= dsremovunk
3602
3602
3603 # action to be actually performed by revert
3603 # action to be actually performed by revert
3604 # (<list of file>, message>) tuple
3604 # (<list of file>, message>) tuple
3605 actions = {'revert': ([], _('reverting %s\n')),
3605 actions = {'revert': ([], _('reverting %s\n')),
3606 'add': ([], _('adding %s\n')),
3606 'add': ([], _('adding %s\n')),
3607 'remove': ([], _('removing %s\n')),
3607 'remove': ([], _('removing %s\n')),
3608 'drop': ([], _('removing %s\n')),
3608 'drop': ([], _('removing %s\n')),
3609 'forget': ([], _('forgetting %s\n')),
3609 'forget': ([], _('forgetting %s\n')),
3610 'undelete': ([], _('undeleting %s\n')),
3610 'undelete': ([], _('undeleting %s\n')),
3611 'noop': (None, _('no changes needed to %s\n')),
3611 'noop': (None, _('no changes needed to %s\n')),
3612 'unknown': (None, _('file not managed: %s\n')),
3612 'unknown': (None, _('file not managed: %s\n')),
3613 }
3613 }
3614
3614
3615 # "constant" that convey the backup strategy.
3615 # "constant" that convey the backup strategy.
3616 # All set to `discard` if `no-backup` is set do avoid checking
3616 # All set to `discard` if `no-backup` is set do avoid checking
3617 # no_backup lower in the code.
3617 # no_backup lower in the code.
3618 # These values are ordered for comparison purposes
3618 # These values are ordered for comparison purposes
3619 backupinteractive = 3 # do backup if interactively modified
3619 backupinteractive = 3 # do backup if interactively modified
3620 backup = 2 # unconditionally do backup
3620 backup = 2 # unconditionally do backup
3621 check = 1 # check if the existing file differs from target
3621 check = 1 # check if the existing file differs from target
3622 discard = 0 # never do backup
3622 discard = 0 # never do backup
3623 if opts.get('no_backup'):
3623 if opts.get('no_backup'):
3624 backupinteractive = backup = check = discard
3624 backupinteractive = backup = check = discard
3625 if interactive:
3625 if interactive:
3626 dsmodifiedbackup = backupinteractive
3626 dsmodifiedbackup = backupinteractive
3627 else:
3627 else:
3628 dsmodifiedbackup = backup
3628 dsmodifiedbackup = backup
3629 tobackup = set()
3629 tobackup = set()
3630
3630
3631 backupanddel = actions['remove']
3631 backupanddel = actions['remove']
3632 if not opts.get('no_backup'):
3632 if not opts.get('no_backup'):
3633 backupanddel = actions['drop']
3633 backupanddel = actions['drop']
3634
3634
3635 disptable = (
3635 disptable = (
3636 # dispatch table:
3636 # dispatch table:
3637 # file state
3637 # file state
3638 # action
3638 # action
3639 # make backup
3639 # make backup
3640
3640
3641 ## Sets that results that will change file on disk
3641 ## Sets that results that will change file on disk
3642 # Modified compared to target, no local change
3642 # Modified compared to target, no local change
3643 (modified, actions['revert'], discard),
3643 (modified, actions['revert'], discard),
3644 # Modified compared to target, but local file is deleted
3644 # Modified compared to target, but local file is deleted
3645 (deleted, actions['revert'], discard),
3645 (deleted, actions['revert'], discard),
3646 # Modified compared to target, local change
3646 # Modified compared to target, local change
3647 (dsmodified, actions['revert'], dsmodifiedbackup),
3647 (dsmodified, actions['revert'], dsmodifiedbackup),
3648 # Added since target
3648 # Added since target
3649 (added, actions['remove'], discard),
3649 (added, actions['remove'], discard),
3650 # Added in working directory
3650 # Added in working directory
3651 (dsadded, actions['forget'], discard),
3651 (dsadded, actions['forget'], discard),
3652 # Added since target, have local modification
3652 # Added since target, have local modification
3653 (modadded, backupanddel, backup),
3653 (modadded, backupanddel, backup),
3654 # Added since target but file is missing in working directory
3654 # Added since target but file is missing in working directory
3655 (deladded, actions['drop'], discard),
3655 (deladded, actions['drop'], discard),
3656 # Removed since target, before working copy parent
3656 # Removed since target, before working copy parent
3657 (removed, actions['add'], discard),
3657 (removed, actions['add'], discard),
3658 # Same as `removed` but an unknown file exists at the same path
3658 # Same as `removed` but an unknown file exists at the same path
3659 (removunk, actions['add'], check),
3659 (removunk, actions['add'], check),
3660 # Removed since targe, marked as such in working copy parent
3660 # Removed since targe, marked as such in working copy parent
3661 (dsremoved, actions['undelete'], discard),
3661 (dsremoved, actions['undelete'], discard),
3662 # Same as `dsremoved` but an unknown file exists at the same path
3662 # Same as `dsremoved` but an unknown file exists at the same path
3663 (dsremovunk, actions['undelete'], check),
3663 (dsremovunk, actions['undelete'], check),
3664 ## the following sets does not result in any file changes
3664 ## the following sets does not result in any file changes
3665 # File with no modification
3665 # File with no modification
3666 (clean, actions['noop'], discard),
3666 (clean, actions['noop'], discard),
3667 # Existing file, not tracked anywhere
3667 # Existing file, not tracked anywhere
3668 (unknown, actions['unknown'], discard),
3668 (unknown, actions['unknown'], discard),
3669 )
3669 )
3670
3670
3671 for abs, (rel, exact) in sorted(names.items()):
3671 for abs, (rel, exact) in sorted(names.items()):
3672 # target file to be touch on disk (relative to cwd)
3672 # target file to be touch on disk (relative to cwd)
3673 target = repo.wjoin(abs)
3673 target = repo.wjoin(abs)
3674 # search the entry in the dispatch table.
3674 # search the entry in the dispatch table.
3675 # if the file is in any of these sets, it was touched in the working
3675 # if the file is in any of these sets, it was touched in the working
3676 # directory parent and we are sure it needs to be reverted.
3676 # directory parent and we are sure it needs to be reverted.
3677 for table, (xlist, msg), dobackup in disptable:
3677 for table, (xlist, msg), dobackup in disptable:
3678 if abs not in table:
3678 if abs not in table:
3679 continue
3679 continue
3680 if xlist is not None:
3680 if xlist is not None:
3681 xlist.append(abs)
3681 xlist.append(abs)
3682 if dobackup:
3682 if dobackup:
3683 # If in interactive mode, don't automatically create
3683 # If in interactive mode, don't automatically create
3684 # .orig files (issue4793)
3684 # .orig files (issue4793)
3685 if dobackup == backupinteractive:
3685 if dobackup == backupinteractive:
3686 tobackup.add(abs)
3686 tobackup.add(abs)
3687 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3687 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3688 bakname = scmutil.origpath(ui, repo, rel)
3688 bakname = scmutil.origpath(ui, repo, rel)
3689 ui.note(_('saving current version of %s as %s\n') %
3689 ui.note(_('saving current version of %s as %s\n') %
3690 (rel, bakname))
3690 (rel, bakname))
3691 if not opts.get('dry_run'):
3691 if not opts.get('dry_run'):
3692 if interactive:
3692 if interactive:
3693 util.copyfile(target, bakname)
3693 util.copyfile(target, bakname)
3694 else:
3694 else:
3695 util.rename(target, bakname)
3695 util.rename(target, bakname)
3696 if ui.verbose or not exact:
3696 if ui.verbose or not exact:
3697 if not isinstance(msg, basestring):
3697 if not isinstance(msg, basestring):
3698 msg = msg(abs)
3698 msg = msg(abs)
3699 ui.status(msg % rel)
3699 ui.status(msg % rel)
3700 elif exact:
3700 elif exact:
3701 ui.warn(msg % rel)
3701 ui.warn(msg % rel)
3702 break
3702 break
3703
3703
3704 if not opts.get('dry_run'):
3704 if not opts.get('dry_run'):
3705 needdata = ('revert', 'add', 'undelete')
3705 needdata = ('revert', 'add', 'undelete')
3706 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3706 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3707 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3707 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3708
3708
3709 if targetsubs:
3709 if targetsubs:
3710 # Revert the subrepos on the revert list
3710 # Revert the subrepos on the revert list
3711 for sub in targetsubs:
3711 for sub in targetsubs:
3712 try:
3712 try:
3713 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3713 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3714 except KeyError:
3714 except KeyError:
3715 raise error.Abort("subrepository '%s' does not exist in %s!"
3715 raise error.Abort("subrepository '%s' does not exist in %s!"
3716 % (sub, short(ctx.node())))
3716 % (sub, short(ctx.node())))
3717
3717
3718 def _revertprefetch(repo, ctx, *files):
3718 def _revertprefetch(repo, ctx, *files):
3719 """Let extension changing the storage layer prefetch content"""
3719 """Let extension changing the storage layer prefetch content"""
3720
3720
3721 def _performrevert(repo, parents, ctx, actions, interactive=False,
3721 def _performrevert(repo, parents, ctx, actions, interactive=False,
3722 tobackup=None):
3722 tobackup=None):
3723 """function that actually perform all the actions computed for revert
3723 """function that actually perform all the actions computed for revert
3724
3724
3725 This is an independent function to let extension to plug in and react to
3725 This is an independent function to let extension to plug in and react to
3726 the imminent revert.
3726 the imminent revert.
3727
3727
3728 Make sure you have the working directory locked when calling this function.
3728 Make sure you have the working directory locked when calling this function.
3729 """
3729 """
3730 parent, p2 = parents
3730 parent, p2 = parents
3731 node = ctx.node()
3731 node = ctx.node()
3732 excluded_files = []
3732 excluded_files = []
3733 matcher_opts = {"exclude": excluded_files}
3733 matcher_opts = {"exclude": excluded_files}
3734
3734
3735 def checkout(f):
3735 def checkout(f):
3736 fc = ctx[f]
3736 fc = ctx[f]
3737 repo.wwrite(f, fc.data(), fc.flags())
3737 repo.wwrite(f, fc.data(), fc.flags())
3738
3738
3739 def doremove(f):
3739 def doremove(f):
3740 try:
3740 try:
3741 repo.wvfs.unlinkpath(f)
3741 repo.wvfs.unlinkpath(f)
3742 except OSError:
3742 except OSError:
3743 pass
3743 pass
3744 repo.dirstate.remove(f)
3744 repo.dirstate.remove(f)
3745
3745
3746 audit_path = pathutil.pathauditor(repo.root, cached=True)
3746 audit_path = pathutil.pathauditor(repo.root, cached=True)
3747 for f in actions['forget'][0]:
3747 for f in actions['forget'][0]:
3748 if interactive:
3748 if interactive:
3749 choice = repo.ui.promptchoice(
3749 choice = repo.ui.promptchoice(
3750 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3750 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3751 if choice == 0:
3751 if choice == 0:
3752 repo.dirstate.drop(f)
3752 repo.dirstate.drop(f)
3753 else:
3753 else:
3754 excluded_files.append(repo.wjoin(f))
3754 excluded_files.append(repo.wjoin(f))
3755 else:
3755 else:
3756 repo.dirstate.drop(f)
3756 repo.dirstate.drop(f)
3757 for f in actions['remove'][0]:
3757 for f in actions['remove'][0]:
3758 audit_path(f)
3758 audit_path(f)
3759 if interactive:
3759 if interactive:
3760 choice = repo.ui.promptchoice(
3760 choice = repo.ui.promptchoice(
3761 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3761 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3762 if choice == 0:
3762 if choice == 0:
3763 doremove(f)
3763 doremove(f)
3764 else:
3764 else:
3765 excluded_files.append(repo.wjoin(f))
3765 excluded_files.append(repo.wjoin(f))
3766 else:
3766 else:
3767 doremove(f)
3767 doremove(f)
3768 for f in actions['drop'][0]:
3768 for f in actions['drop'][0]:
3769 audit_path(f)
3769 audit_path(f)
3770 repo.dirstate.remove(f)
3770 repo.dirstate.remove(f)
3771
3771
3772 normal = None
3772 normal = None
3773 if node == parent:
3773 if node == parent:
3774 # We're reverting to our parent. If possible, we'd like status
3774 # We're reverting to our parent. If possible, we'd like status
3775 # to report the file as clean. We have to use normallookup for
3775 # to report the file as clean. We have to use normallookup for
3776 # merges to avoid losing information about merged/dirty files.
3776 # merges to avoid losing information about merged/dirty files.
3777 if p2 != nullid:
3777 if p2 != nullid:
3778 normal = repo.dirstate.normallookup
3778 normal = repo.dirstate.normallookup
3779 else:
3779 else:
3780 normal = repo.dirstate.normal
3780 normal = repo.dirstate.normal
3781
3781
3782 newlyaddedandmodifiedfiles = set()
3782 newlyaddedandmodifiedfiles = set()
3783 if interactive:
3783 if interactive:
3784 # Prompt the user for changes to revert
3784 # Prompt the user for changes to revert
3785 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3785 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3786 m = scmutil.match(ctx, torevert, matcher_opts)
3786 m = scmutil.match(ctx, torevert, matcher_opts)
3787 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3787 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3788 diffopts.nodates = True
3788 diffopts.nodates = True
3789 diffopts.git = True
3789 diffopts.git = True
3790 operation = 'discard'
3790 operation = 'discard'
3791 reversehunks = True
3791 reversehunks = True
3792 if node != parent:
3792 if node != parent:
3793 operation = 'revert'
3793 operation = 'apply'
3794 reversehunks = repo.ui.configbool('experimental',
3794 reversehunks = False
3795 'revertalternateinteractivemode')
3796 if reversehunks:
3795 if reversehunks:
3797 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3796 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3798 else:
3797 else:
3799 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3798 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3800 originalchunks = patch.parsepatch(diff)
3799 originalchunks = patch.parsepatch(diff)
3801
3800
3802 try:
3801 try:
3803
3802
3804 chunks, opts = recordfilter(repo.ui, originalchunks,
3803 chunks, opts = recordfilter(repo.ui, originalchunks,
3805 operation=operation)
3804 operation=operation)
3806 if reversehunks:
3805 if reversehunks:
3807 chunks = patch.reversehunks(chunks)
3806 chunks = patch.reversehunks(chunks)
3808
3807
3809 except error.PatchError as err:
3808 except error.PatchError as err:
3810 raise error.Abort(_('error parsing patch: %s') % err)
3809 raise error.Abort(_('error parsing patch: %s') % err)
3811
3810
3812 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3811 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3813 if tobackup is None:
3812 if tobackup is None:
3814 tobackup = set()
3813 tobackup = set()
3815 # Apply changes
3814 # Apply changes
3816 fp = stringio()
3815 fp = stringio()
3817 for c in chunks:
3816 for c in chunks:
3818 # Create a backup file only if this hunk should be backed up
3817 # Create a backup file only if this hunk should be backed up
3819 if ishunk(c) and c.header.filename() in tobackup:
3818 if ishunk(c) and c.header.filename() in tobackup:
3820 abs = c.header.filename()
3819 abs = c.header.filename()
3821 target = repo.wjoin(abs)
3820 target = repo.wjoin(abs)
3822 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3821 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3823 util.copyfile(target, bakname)
3822 util.copyfile(target, bakname)
3824 tobackup.remove(abs)
3823 tobackup.remove(abs)
3825 c.write(fp)
3824 c.write(fp)
3826 dopatch = fp.tell()
3825 dopatch = fp.tell()
3827 fp.seek(0)
3826 fp.seek(0)
3828 if dopatch:
3827 if dopatch:
3829 try:
3828 try:
3830 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3829 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3831 except error.PatchError as err:
3830 except error.PatchError as err:
3832 raise error.Abort(str(err))
3831 raise error.Abort(str(err))
3833 del fp
3832 del fp
3834 else:
3833 else:
3835 for f in actions['revert'][0]:
3834 for f in actions['revert'][0]:
3836 checkout(f)
3835 checkout(f)
3837 if normal:
3836 if normal:
3838 normal(f)
3837 normal(f)
3839
3838
3840 for f in actions['add'][0]:
3839 for f in actions['add'][0]:
3841 # Don't checkout modified files, they are already created by the diff
3840 # Don't checkout modified files, they are already created by the diff
3842 if f not in newlyaddedandmodifiedfiles:
3841 if f not in newlyaddedandmodifiedfiles:
3843 checkout(f)
3842 checkout(f)
3844 repo.dirstate.add(f)
3843 repo.dirstate.add(f)
3845
3844
3846 normal = repo.dirstate.normallookup
3845 normal = repo.dirstate.normallookup
3847 if node == parent and p2 == nullid:
3846 if node == parent and p2 == nullid:
3848 normal = repo.dirstate.normal
3847 normal = repo.dirstate.normal
3849 for f in actions['undelete'][0]:
3848 for f in actions['undelete'][0]:
3850 checkout(f)
3849 checkout(f)
3851 normal(f)
3850 normal(f)
3852
3851
3853 copied = copies.pathcopies(repo[parent], ctx)
3852 copied = copies.pathcopies(repo[parent], ctx)
3854
3853
3855 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3854 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3856 if f in copied:
3855 if f in copied:
3857 repo.dirstate.copy(copied[f], f)
3856 repo.dirstate.copy(copied[f], f)
3858
3857
3859 class command(registrar.command):
3858 class command(registrar.command):
3860 def _doregister(self, func, name, *args, **kwargs):
3859 def _doregister(self, func, name, *args, **kwargs):
3861 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3860 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3862 return super(command, self)._doregister(func, name, *args, **kwargs)
3861 return super(command, self)._doregister(func, name, *args, **kwargs)
3863
3862
3864 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3863 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3865 # commands.outgoing. "missing" is "missing" of the result of
3864 # commands.outgoing. "missing" is "missing" of the result of
3866 # "findcommonoutgoing()"
3865 # "findcommonoutgoing()"
3867 outgoinghooks = util.hooks()
3866 outgoinghooks = util.hooks()
3868
3867
3869 # a list of (ui, repo) functions called by commands.summary
3868 # a list of (ui, repo) functions called by commands.summary
3870 summaryhooks = util.hooks()
3869 summaryhooks = util.hooks()
3871
3870
3872 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3871 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3873 #
3872 #
3874 # functions should return tuple of booleans below, if 'changes' is None:
3873 # functions should return tuple of booleans below, if 'changes' is None:
3875 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3874 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3876 #
3875 #
3877 # otherwise, 'changes' is a tuple of tuples below:
3876 # otherwise, 'changes' is a tuple of tuples below:
3878 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3877 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3879 # - (desturl, destbranch, destpeer, outgoing)
3878 # - (desturl, destbranch, destpeer, outgoing)
3880 summaryremotehooks = util.hooks()
3879 summaryremotehooks = util.hooks()
3881
3880
3882 # A list of state files kept by multistep operations like graft.
3881 # A list of state files kept by multistep operations like graft.
3883 # Since graft cannot be aborted, it is considered 'clearable' by update.
3882 # Since graft cannot be aborted, it is considered 'clearable' by update.
3884 # note: bisect is intentionally excluded
3883 # note: bisect is intentionally excluded
3885 # (state file, clearable, allowcommit, error, hint)
3884 # (state file, clearable, allowcommit, error, hint)
3886 unfinishedstates = [
3885 unfinishedstates = [
3887 ('graftstate', True, False, _('graft in progress'),
3886 ('graftstate', True, False, _('graft in progress'),
3888 _("use 'hg graft --continue' or 'hg update' to abort")),
3887 _("use 'hg graft --continue' or 'hg update' to abort")),
3889 ('updatestate', True, False, _('last update was interrupted'),
3888 ('updatestate', True, False, _('last update was interrupted'),
3890 _("use 'hg update' to get a consistent checkout"))
3889 _("use 'hg update' to get a consistent checkout"))
3891 ]
3890 ]
3892
3891
3893 def checkunfinished(repo, commit=False):
3892 def checkunfinished(repo, commit=False):
3894 '''Look for an unfinished multistep operation, like graft, and abort
3893 '''Look for an unfinished multistep operation, like graft, and abort
3895 if found. It's probably good to check this right before
3894 if found. It's probably good to check this right before
3896 bailifchanged().
3895 bailifchanged().
3897 '''
3896 '''
3898 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3897 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3899 if commit and allowcommit:
3898 if commit and allowcommit:
3900 continue
3899 continue
3901 if repo.vfs.exists(f):
3900 if repo.vfs.exists(f):
3902 raise error.Abort(msg, hint=hint)
3901 raise error.Abort(msg, hint=hint)
3903
3902
3904 def clearunfinished(repo):
3903 def clearunfinished(repo):
3905 '''Check for unfinished operations (as above), and clear the ones
3904 '''Check for unfinished operations (as above), and clear the ones
3906 that are clearable.
3905 that are clearable.
3907 '''
3906 '''
3908 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3907 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3909 if not clearable and repo.vfs.exists(f):
3908 if not clearable and repo.vfs.exists(f):
3910 raise error.Abort(msg, hint=hint)
3909 raise error.Abort(msg, hint=hint)
3911 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3910 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3912 if clearable and repo.vfs.exists(f):
3911 if clearable and repo.vfs.exists(f):
3913 util.unlink(repo.vfs.join(f))
3912 util.unlink(repo.vfs.join(f))
3914
3913
3915 afterresolvedstates = [
3914 afterresolvedstates = [
3916 ('graftstate',
3915 ('graftstate',
3917 _('hg graft --continue')),
3916 _('hg graft --continue')),
3918 ]
3917 ]
3919
3918
3920 def howtocontinue(repo):
3919 def howtocontinue(repo):
3921 '''Check for an unfinished operation and return the command to finish
3920 '''Check for an unfinished operation and return the command to finish
3922 it.
3921 it.
3923
3922
3924 afterresolvedstates tuples define a .hg/{file} and the corresponding
3923 afterresolvedstates tuples define a .hg/{file} and the corresponding
3925 command needed to finish it.
3924 command needed to finish it.
3926
3925
3927 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3926 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3928 a boolean.
3927 a boolean.
3929 '''
3928 '''
3930 contmsg = _("continue: %s")
3929 contmsg = _("continue: %s")
3931 for f, msg in afterresolvedstates:
3930 for f, msg in afterresolvedstates:
3932 if repo.vfs.exists(f):
3931 if repo.vfs.exists(f):
3933 return contmsg % msg, True
3932 return contmsg % msg, True
3934 if repo[None].dirty(missing=True, merge=False, branch=False):
3933 if repo[None].dirty(missing=True, merge=False, branch=False):
3935 return contmsg % _("hg commit"), False
3934 return contmsg % _("hg commit"), False
3936 return None, None
3935 return None, None
3937
3936
3938 def checkafterresolved(repo):
3937 def checkafterresolved(repo):
3939 '''Inform the user about the next action after completing hg resolve
3938 '''Inform the user about the next action after completing hg resolve
3940
3939
3941 If there's a matching afterresolvedstates, howtocontinue will yield
3940 If there's a matching afterresolvedstates, howtocontinue will yield
3942 repo.ui.warn as the reporter.
3941 repo.ui.warn as the reporter.
3943
3942
3944 Otherwise, it will yield repo.ui.note.
3943 Otherwise, it will yield repo.ui.note.
3945 '''
3944 '''
3946 msg, warning = howtocontinue(repo)
3945 msg, warning = howtocontinue(repo)
3947 if msg is not None:
3946 if msg is not None:
3948 if warning:
3947 if warning:
3949 repo.ui.warn("%s\n" % msg)
3948 repo.ui.warn("%s\n" % msg)
3950 else:
3949 else:
3951 repo.ui.note("%s\n" % msg)
3950 repo.ui.note("%s\n" % msg)
3952
3951
3953 def wrongtooltocontinue(repo, task):
3952 def wrongtooltocontinue(repo, task):
3954 '''Raise an abort suggesting how to properly continue if there is an
3953 '''Raise an abort suggesting how to properly continue if there is an
3955 active task.
3954 active task.
3956
3955
3957 Uses howtocontinue() to find the active task.
3956 Uses howtocontinue() to find the active task.
3958
3957
3959 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3958 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3960 a hint.
3959 a hint.
3961 '''
3960 '''
3962 after = howtocontinue(repo)
3961 after = howtocontinue(repo)
3963 hint = None
3962 hint = None
3964 if after[1]:
3963 if after[1]:
3965 hint = after[0]
3964 hint = after[0]
3966 raise error.Abort(_('no %s in progress') % task, hint=hint)
3965 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,1146 +1,1143 b''
1 # configitems.py - centralized declaration of configuration option
1 # configitems.py - centralized declaration of configuration option
2 #
2 #
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
3 # Copyright 2017 Pierre-Yves David <pierre-yves.david@octobus.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import functools
10 import functools
11 import re
11 import re
12
12
13 from . import (
13 from . import (
14 encoding,
14 encoding,
15 error,
15 error,
16 )
16 )
17
17
18 def loadconfigtable(ui, extname, configtable):
18 def loadconfigtable(ui, extname, configtable):
19 """update config item known to the ui with the extension ones"""
19 """update config item known to the ui with the extension ones"""
20 for section, items in configtable.items():
20 for section, items in configtable.items():
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
21 knownitems = ui._knownconfig.setdefault(section, itemregister())
22 knownkeys = set(knownitems)
22 knownkeys = set(knownitems)
23 newkeys = set(items)
23 newkeys = set(items)
24 for key in sorted(knownkeys & newkeys):
24 for key in sorted(knownkeys & newkeys):
25 msg = "extension '%s' overwrite config item '%s.%s'"
25 msg = "extension '%s' overwrite config item '%s.%s'"
26 msg %= (extname, section, key)
26 msg %= (extname, section, key)
27 ui.develwarn(msg, config='warn-config')
27 ui.develwarn(msg, config='warn-config')
28
28
29 knownitems.update(items)
29 knownitems.update(items)
30
30
31 class configitem(object):
31 class configitem(object):
32 """represent a known config item
32 """represent a known config item
33
33
34 :section: the official config section where to find this item,
34 :section: the official config section where to find this item,
35 :name: the official name within the section,
35 :name: the official name within the section,
36 :default: default value for this item,
36 :default: default value for this item,
37 :alias: optional list of tuples as alternatives,
37 :alias: optional list of tuples as alternatives,
38 :generic: this is a generic definition, match name using regular expression.
38 :generic: this is a generic definition, match name using regular expression.
39 """
39 """
40
40
41 def __init__(self, section, name, default=None, alias=(),
41 def __init__(self, section, name, default=None, alias=(),
42 generic=False, priority=0):
42 generic=False, priority=0):
43 self.section = section
43 self.section = section
44 self.name = name
44 self.name = name
45 self.default = default
45 self.default = default
46 self.alias = list(alias)
46 self.alias = list(alias)
47 self.generic = generic
47 self.generic = generic
48 self.priority = priority
48 self.priority = priority
49 self._re = None
49 self._re = None
50 if generic:
50 if generic:
51 self._re = re.compile(self.name)
51 self._re = re.compile(self.name)
52
52
53 class itemregister(dict):
53 class itemregister(dict):
54 """A specialized dictionary that can handle wild-card selection"""
54 """A specialized dictionary that can handle wild-card selection"""
55
55
56 def __init__(self):
56 def __init__(self):
57 super(itemregister, self).__init__()
57 super(itemregister, self).__init__()
58 self._generics = set()
58 self._generics = set()
59
59
60 def update(self, other):
60 def update(self, other):
61 super(itemregister, self).update(other)
61 super(itemregister, self).update(other)
62 self._generics.update(other._generics)
62 self._generics.update(other._generics)
63
63
64 def __setitem__(self, key, item):
64 def __setitem__(self, key, item):
65 super(itemregister, self).__setitem__(key, item)
65 super(itemregister, self).__setitem__(key, item)
66 if item.generic:
66 if item.generic:
67 self._generics.add(item)
67 self._generics.add(item)
68
68
69 def get(self, key):
69 def get(self, key):
70 baseitem = super(itemregister, self).get(key)
70 baseitem = super(itemregister, self).get(key)
71 if baseitem is not None and not baseitem.generic:
71 if baseitem is not None and not baseitem.generic:
72 return baseitem
72 return baseitem
73
73
74 # search for a matching generic item
74 # search for a matching generic item
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
75 generics = sorted(self._generics, key=(lambda x: (x.priority, x.name)))
76 for item in generics:
76 for item in generics:
77 # we use 'match' instead of 'search' to make the matching simpler
77 # we use 'match' instead of 'search' to make the matching simpler
78 # for people unfamiliar with regular expression. Having the match
78 # for people unfamiliar with regular expression. Having the match
79 # rooted to the start of the string will produce less surprising
79 # rooted to the start of the string will produce less surprising
80 # result for user writing simple regex for sub-attribute.
80 # result for user writing simple regex for sub-attribute.
81 #
81 #
82 # For example using "color\..*" match produces an unsurprising
82 # For example using "color\..*" match produces an unsurprising
83 # result, while using search could suddenly match apparently
83 # result, while using search could suddenly match apparently
84 # unrelated configuration that happens to contains "color."
84 # unrelated configuration that happens to contains "color."
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
85 # anywhere. This is a tradeoff where we favor requiring ".*" on
86 # some match to avoid the need to prefix most pattern with "^".
86 # some match to avoid the need to prefix most pattern with "^".
87 # The "^" seems more error prone.
87 # The "^" seems more error prone.
88 if item._re.match(key):
88 if item._re.match(key):
89 return item
89 return item
90
90
91 return None
91 return None
92
92
93 coreitems = {}
93 coreitems = {}
94
94
95 def _register(configtable, *args, **kwargs):
95 def _register(configtable, *args, **kwargs):
96 item = configitem(*args, **kwargs)
96 item = configitem(*args, **kwargs)
97 section = configtable.setdefault(item.section, itemregister())
97 section = configtable.setdefault(item.section, itemregister())
98 if item.name in section:
98 if item.name in section:
99 msg = "duplicated config item registration for '%s.%s'"
99 msg = "duplicated config item registration for '%s.%s'"
100 raise error.ProgrammingError(msg % (item.section, item.name))
100 raise error.ProgrammingError(msg % (item.section, item.name))
101 section[item.name] = item
101 section[item.name] = item
102
102
103 # special value for case where the default is derived from other values
103 # special value for case where the default is derived from other values
104 dynamicdefault = object()
104 dynamicdefault = object()
105
105
106 # Registering actual config items
106 # Registering actual config items
107
107
108 def getitemregister(configtable):
108 def getitemregister(configtable):
109 f = functools.partial(_register, configtable)
109 f = functools.partial(_register, configtable)
110 # export pseudo enum as configitem.*
110 # export pseudo enum as configitem.*
111 f.dynamicdefault = dynamicdefault
111 f.dynamicdefault = dynamicdefault
112 return f
112 return f
113
113
114 coreconfigitem = getitemregister(coreitems)
114 coreconfigitem = getitemregister(coreitems)
115
115
116 coreconfigitem('alias', '.*',
116 coreconfigitem('alias', '.*',
117 default=None,
117 default=None,
118 generic=True,
118 generic=True,
119 )
119 )
120 coreconfigitem('annotate', 'nodates',
120 coreconfigitem('annotate', 'nodates',
121 default=False,
121 default=False,
122 )
122 )
123 coreconfigitem('annotate', 'showfunc',
123 coreconfigitem('annotate', 'showfunc',
124 default=False,
124 default=False,
125 )
125 )
126 coreconfigitem('annotate', 'unified',
126 coreconfigitem('annotate', 'unified',
127 default=None,
127 default=None,
128 )
128 )
129 coreconfigitem('annotate', 'git',
129 coreconfigitem('annotate', 'git',
130 default=False,
130 default=False,
131 )
131 )
132 coreconfigitem('annotate', 'ignorews',
132 coreconfigitem('annotate', 'ignorews',
133 default=False,
133 default=False,
134 )
134 )
135 coreconfigitem('annotate', 'ignorewsamount',
135 coreconfigitem('annotate', 'ignorewsamount',
136 default=False,
136 default=False,
137 )
137 )
138 coreconfigitem('annotate', 'ignoreblanklines',
138 coreconfigitem('annotate', 'ignoreblanklines',
139 default=False,
139 default=False,
140 )
140 )
141 coreconfigitem('annotate', 'ignorewseol',
141 coreconfigitem('annotate', 'ignorewseol',
142 default=False,
142 default=False,
143 )
143 )
144 coreconfigitem('annotate', 'nobinary',
144 coreconfigitem('annotate', 'nobinary',
145 default=False,
145 default=False,
146 )
146 )
147 coreconfigitem('annotate', 'noprefix',
147 coreconfigitem('annotate', 'noprefix',
148 default=False,
148 default=False,
149 )
149 )
150 coreconfigitem('auth', 'cookiefile',
150 coreconfigitem('auth', 'cookiefile',
151 default=None,
151 default=None,
152 )
152 )
153 # bookmarks.pushing: internal hack for discovery
153 # bookmarks.pushing: internal hack for discovery
154 coreconfigitem('bookmarks', 'pushing',
154 coreconfigitem('bookmarks', 'pushing',
155 default=list,
155 default=list,
156 )
156 )
157 # bundle.mainreporoot: internal hack for bundlerepo
157 # bundle.mainreporoot: internal hack for bundlerepo
158 coreconfigitem('bundle', 'mainreporoot',
158 coreconfigitem('bundle', 'mainreporoot',
159 default='',
159 default='',
160 )
160 )
161 # bundle.reorder: experimental config
161 # bundle.reorder: experimental config
162 coreconfigitem('bundle', 'reorder',
162 coreconfigitem('bundle', 'reorder',
163 default='auto',
163 default='auto',
164 )
164 )
165 coreconfigitem('censor', 'policy',
165 coreconfigitem('censor', 'policy',
166 default='abort',
166 default='abort',
167 )
167 )
168 coreconfigitem('chgserver', 'idletimeout',
168 coreconfigitem('chgserver', 'idletimeout',
169 default=3600,
169 default=3600,
170 )
170 )
171 coreconfigitem('chgserver', 'skiphash',
171 coreconfigitem('chgserver', 'skiphash',
172 default=False,
172 default=False,
173 )
173 )
174 coreconfigitem('cmdserver', 'log',
174 coreconfigitem('cmdserver', 'log',
175 default=None,
175 default=None,
176 )
176 )
177 coreconfigitem('color', '.*',
177 coreconfigitem('color', '.*',
178 default=None,
178 default=None,
179 generic=True,
179 generic=True,
180 )
180 )
181 coreconfigitem('color', 'mode',
181 coreconfigitem('color', 'mode',
182 default='auto',
182 default='auto',
183 )
183 )
184 coreconfigitem('color', 'pagermode',
184 coreconfigitem('color', 'pagermode',
185 default=dynamicdefault,
185 default=dynamicdefault,
186 )
186 )
187 coreconfigitem('commands', 'show.aliasprefix',
187 coreconfigitem('commands', 'show.aliasprefix',
188 default=list,
188 default=list,
189 )
189 )
190 coreconfigitem('commands', 'status.relative',
190 coreconfigitem('commands', 'status.relative',
191 default=False,
191 default=False,
192 )
192 )
193 coreconfigitem('commands', 'status.skipstates',
193 coreconfigitem('commands', 'status.skipstates',
194 default=[],
194 default=[],
195 )
195 )
196 coreconfigitem('commands', 'status.verbose',
196 coreconfigitem('commands', 'status.verbose',
197 default=False,
197 default=False,
198 )
198 )
199 coreconfigitem('commands', 'update.check',
199 coreconfigitem('commands', 'update.check',
200 default=None,
200 default=None,
201 # Deprecated, remove after 4.4 release
201 # Deprecated, remove after 4.4 release
202 alias=[('experimental', 'updatecheck')]
202 alias=[('experimental', 'updatecheck')]
203 )
203 )
204 coreconfigitem('commands', 'update.requiredest',
204 coreconfigitem('commands', 'update.requiredest',
205 default=False,
205 default=False,
206 )
206 )
207 coreconfigitem('committemplate', '.*',
207 coreconfigitem('committemplate', '.*',
208 default=None,
208 default=None,
209 generic=True,
209 generic=True,
210 )
210 )
211 coreconfigitem('debug', 'dirstate.delaywrite',
211 coreconfigitem('debug', 'dirstate.delaywrite',
212 default=0,
212 default=0,
213 )
213 )
214 coreconfigitem('defaults', '.*',
214 coreconfigitem('defaults', '.*',
215 default=None,
215 default=None,
216 generic=True,
216 generic=True,
217 )
217 )
218 coreconfigitem('devel', 'all-warnings',
218 coreconfigitem('devel', 'all-warnings',
219 default=False,
219 default=False,
220 )
220 )
221 coreconfigitem('devel', 'bundle2.debug',
221 coreconfigitem('devel', 'bundle2.debug',
222 default=False,
222 default=False,
223 )
223 )
224 coreconfigitem('devel', 'cache-vfs',
224 coreconfigitem('devel', 'cache-vfs',
225 default=None,
225 default=None,
226 )
226 )
227 coreconfigitem('devel', 'check-locks',
227 coreconfigitem('devel', 'check-locks',
228 default=False,
228 default=False,
229 )
229 )
230 coreconfigitem('devel', 'check-relroot',
230 coreconfigitem('devel', 'check-relroot',
231 default=False,
231 default=False,
232 )
232 )
233 coreconfigitem('devel', 'default-date',
233 coreconfigitem('devel', 'default-date',
234 default=None,
234 default=None,
235 )
235 )
236 coreconfigitem('devel', 'deprec-warn',
236 coreconfigitem('devel', 'deprec-warn',
237 default=False,
237 default=False,
238 )
238 )
239 coreconfigitem('devel', 'disableloaddefaultcerts',
239 coreconfigitem('devel', 'disableloaddefaultcerts',
240 default=False,
240 default=False,
241 )
241 )
242 coreconfigitem('devel', 'warn-empty-changegroup',
242 coreconfigitem('devel', 'warn-empty-changegroup',
243 default=False,
243 default=False,
244 )
244 )
245 coreconfigitem('devel', 'legacy.exchange',
245 coreconfigitem('devel', 'legacy.exchange',
246 default=list,
246 default=list,
247 )
247 )
248 coreconfigitem('devel', 'servercafile',
248 coreconfigitem('devel', 'servercafile',
249 default='',
249 default='',
250 )
250 )
251 coreconfigitem('devel', 'serverexactprotocol',
251 coreconfigitem('devel', 'serverexactprotocol',
252 default='',
252 default='',
253 )
253 )
254 coreconfigitem('devel', 'serverrequirecert',
254 coreconfigitem('devel', 'serverrequirecert',
255 default=False,
255 default=False,
256 )
256 )
257 coreconfigitem('devel', 'strip-obsmarkers',
257 coreconfigitem('devel', 'strip-obsmarkers',
258 default=True,
258 default=True,
259 )
259 )
260 coreconfigitem('devel', 'warn-config',
260 coreconfigitem('devel', 'warn-config',
261 default=None,
261 default=None,
262 )
262 )
263 coreconfigitem('devel', 'warn-config-default',
263 coreconfigitem('devel', 'warn-config-default',
264 default=None,
264 default=None,
265 )
265 )
266 coreconfigitem('devel', 'user.obsmarker',
266 coreconfigitem('devel', 'user.obsmarker',
267 default=None,
267 default=None,
268 )
268 )
269 coreconfigitem('devel', 'warn-config-unknown',
269 coreconfigitem('devel', 'warn-config-unknown',
270 default=None,
270 default=None,
271 )
271 )
272 coreconfigitem('diff', 'nodates',
272 coreconfigitem('diff', 'nodates',
273 default=False,
273 default=False,
274 )
274 )
275 coreconfigitem('diff', 'showfunc',
275 coreconfigitem('diff', 'showfunc',
276 default=False,
276 default=False,
277 )
277 )
278 coreconfigitem('diff', 'unified',
278 coreconfigitem('diff', 'unified',
279 default=None,
279 default=None,
280 )
280 )
281 coreconfigitem('diff', 'git',
281 coreconfigitem('diff', 'git',
282 default=False,
282 default=False,
283 )
283 )
284 coreconfigitem('diff', 'ignorews',
284 coreconfigitem('diff', 'ignorews',
285 default=False,
285 default=False,
286 )
286 )
287 coreconfigitem('diff', 'ignorewsamount',
287 coreconfigitem('diff', 'ignorewsamount',
288 default=False,
288 default=False,
289 )
289 )
290 coreconfigitem('diff', 'ignoreblanklines',
290 coreconfigitem('diff', 'ignoreblanklines',
291 default=False,
291 default=False,
292 )
292 )
293 coreconfigitem('diff', 'ignorewseol',
293 coreconfigitem('diff', 'ignorewseol',
294 default=False,
294 default=False,
295 )
295 )
296 coreconfigitem('diff', 'nobinary',
296 coreconfigitem('diff', 'nobinary',
297 default=False,
297 default=False,
298 )
298 )
299 coreconfigitem('diff', 'noprefix',
299 coreconfigitem('diff', 'noprefix',
300 default=False,
300 default=False,
301 )
301 )
302 coreconfigitem('email', 'bcc',
302 coreconfigitem('email', 'bcc',
303 default=None,
303 default=None,
304 )
304 )
305 coreconfigitem('email', 'cc',
305 coreconfigitem('email', 'cc',
306 default=None,
306 default=None,
307 )
307 )
308 coreconfigitem('email', 'charsets',
308 coreconfigitem('email', 'charsets',
309 default=list,
309 default=list,
310 )
310 )
311 coreconfigitem('email', 'from',
311 coreconfigitem('email', 'from',
312 default=None,
312 default=None,
313 )
313 )
314 coreconfigitem('email', 'method',
314 coreconfigitem('email', 'method',
315 default='smtp',
315 default='smtp',
316 )
316 )
317 coreconfigitem('email', 'reply-to',
317 coreconfigitem('email', 'reply-to',
318 default=None,
318 default=None,
319 )
319 )
320 coreconfigitem('email', 'to',
320 coreconfigitem('email', 'to',
321 default=None,
321 default=None,
322 )
322 )
323 coreconfigitem('experimental', 'archivemetatemplate',
323 coreconfigitem('experimental', 'archivemetatemplate',
324 default=dynamicdefault,
324 default=dynamicdefault,
325 )
325 )
326 coreconfigitem('experimental', 'bundle-phases',
326 coreconfigitem('experimental', 'bundle-phases',
327 default=False,
327 default=False,
328 )
328 )
329 coreconfigitem('experimental', 'bundle2-advertise',
329 coreconfigitem('experimental', 'bundle2-advertise',
330 default=True,
330 default=True,
331 )
331 )
332 coreconfigitem('experimental', 'bundle2-output-capture',
332 coreconfigitem('experimental', 'bundle2-output-capture',
333 default=False,
333 default=False,
334 )
334 )
335 coreconfigitem('experimental', 'bundle2.pushback',
335 coreconfigitem('experimental', 'bundle2.pushback',
336 default=False,
336 default=False,
337 )
337 )
338 coreconfigitem('experimental', 'bundle2lazylocking',
338 coreconfigitem('experimental', 'bundle2lazylocking',
339 default=False,
339 default=False,
340 )
340 )
341 coreconfigitem('experimental', 'bundlecomplevel',
341 coreconfigitem('experimental', 'bundlecomplevel',
342 default=None,
342 default=None,
343 )
343 )
344 coreconfigitem('experimental', 'changegroup3',
344 coreconfigitem('experimental', 'changegroup3',
345 default=False,
345 default=False,
346 )
346 )
347 coreconfigitem('experimental', 'clientcompressionengines',
347 coreconfigitem('experimental', 'clientcompressionengines',
348 default=list,
348 default=list,
349 )
349 )
350 coreconfigitem('experimental', 'copytrace',
350 coreconfigitem('experimental', 'copytrace',
351 default='on',
351 default='on',
352 )
352 )
353 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
353 coreconfigitem('experimental', 'copytrace.movecandidateslimit',
354 default=100,
354 default=100,
355 )
355 )
356 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
356 coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
357 default=100,
357 default=100,
358 )
358 )
359 coreconfigitem('experimental', 'crecordtest',
359 coreconfigitem('experimental', 'crecordtest',
360 default=None,
360 default=None,
361 )
361 )
362 coreconfigitem('experimental', 'editortmpinhg',
362 coreconfigitem('experimental', 'editortmpinhg',
363 default=False,
363 default=False,
364 )
364 )
365 coreconfigitem('experimental', 'evolution',
365 coreconfigitem('experimental', 'evolution',
366 default=list,
366 default=list,
367 )
367 )
368 coreconfigitem('experimental', 'evolution.allowdivergence',
368 coreconfigitem('experimental', 'evolution.allowdivergence',
369 default=False,
369 default=False,
370 alias=[('experimental', 'allowdivergence')]
370 alias=[('experimental', 'allowdivergence')]
371 )
371 )
372 coreconfigitem('experimental', 'evolution.allowunstable',
372 coreconfigitem('experimental', 'evolution.allowunstable',
373 default=None,
373 default=None,
374 )
374 )
375 coreconfigitem('experimental', 'evolution.createmarkers',
375 coreconfigitem('experimental', 'evolution.createmarkers',
376 default=None,
376 default=None,
377 )
377 )
378 coreconfigitem('experimental', 'evolution.effect-flags',
378 coreconfigitem('experimental', 'evolution.effect-flags',
379 default=True,
379 default=True,
380 alias=[('experimental', 'effect-flags')]
380 alias=[('experimental', 'effect-flags')]
381 )
381 )
382 coreconfigitem('experimental', 'evolution.exchange',
382 coreconfigitem('experimental', 'evolution.exchange',
383 default=None,
383 default=None,
384 )
384 )
385 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
385 coreconfigitem('experimental', 'evolution.bundle-obsmarker',
386 default=False,
386 default=False,
387 )
387 )
388 coreconfigitem('experimental', 'evolution.track-operation',
388 coreconfigitem('experimental', 'evolution.track-operation',
389 default=True,
389 default=True,
390 )
390 )
391 coreconfigitem('experimental', 'maxdeltachainspan',
391 coreconfigitem('experimental', 'maxdeltachainspan',
392 default=-1,
392 default=-1,
393 )
393 )
394 coreconfigitem('experimental', 'mmapindexthreshold',
394 coreconfigitem('experimental', 'mmapindexthreshold',
395 default=None,
395 default=None,
396 )
396 )
397 coreconfigitem('experimental', 'nonnormalparanoidcheck',
397 coreconfigitem('experimental', 'nonnormalparanoidcheck',
398 default=False,
398 default=False,
399 )
399 )
400 coreconfigitem('experimental', 'exportableenviron',
400 coreconfigitem('experimental', 'exportableenviron',
401 default=list,
401 default=list,
402 )
402 )
403 coreconfigitem('experimental', 'extendedheader.index',
403 coreconfigitem('experimental', 'extendedheader.index',
404 default=None,
404 default=None,
405 )
405 )
406 coreconfigitem('experimental', 'extendedheader.similarity',
406 coreconfigitem('experimental', 'extendedheader.similarity',
407 default=False,
407 default=False,
408 )
408 )
409 coreconfigitem('experimental', 'format.compression',
409 coreconfigitem('experimental', 'format.compression',
410 default='zlib',
410 default='zlib',
411 )
411 )
412 coreconfigitem('experimental', 'graphshorten',
412 coreconfigitem('experimental', 'graphshorten',
413 default=False,
413 default=False,
414 )
414 )
415 coreconfigitem('experimental', 'graphstyle.parent',
415 coreconfigitem('experimental', 'graphstyle.parent',
416 default=dynamicdefault,
416 default=dynamicdefault,
417 )
417 )
418 coreconfigitem('experimental', 'graphstyle.missing',
418 coreconfigitem('experimental', 'graphstyle.missing',
419 default=dynamicdefault,
419 default=dynamicdefault,
420 )
420 )
421 coreconfigitem('experimental', 'graphstyle.grandparent',
421 coreconfigitem('experimental', 'graphstyle.grandparent',
422 default=dynamicdefault,
422 default=dynamicdefault,
423 )
423 )
424 coreconfigitem('experimental', 'hook-track-tags',
424 coreconfigitem('experimental', 'hook-track-tags',
425 default=False,
425 default=False,
426 )
426 )
427 coreconfigitem('experimental', 'httppostargs',
427 coreconfigitem('experimental', 'httppostargs',
428 default=False,
428 default=False,
429 )
429 )
430 coreconfigitem('experimental', 'manifestv2',
430 coreconfigitem('experimental', 'manifestv2',
431 default=False,
431 default=False,
432 )
432 )
433 coreconfigitem('experimental', 'mergedriver',
433 coreconfigitem('experimental', 'mergedriver',
434 default=None,
434 default=None,
435 )
435 )
436 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
436 coreconfigitem('experimental', 'obsmarkers-exchange-debug',
437 default=False,
437 default=False,
438 )
438 )
439 coreconfigitem('experimental', 'rebase.multidest',
439 coreconfigitem('experimental', 'rebase.multidest',
440 default=False,
440 default=False,
441 )
441 )
442 coreconfigitem('experimental', 'revertalternateinteractivemode',
443 default=True,
444 )
445 coreconfigitem('experimental', 'revlogv2',
442 coreconfigitem('experimental', 'revlogv2',
446 default=None,
443 default=None,
447 )
444 )
448 coreconfigitem('experimental', 'spacemovesdown',
445 coreconfigitem('experimental', 'spacemovesdown',
449 default=False,
446 default=False,
450 )
447 )
451 coreconfigitem('experimental', 'sparse-read',
448 coreconfigitem('experimental', 'sparse-read',
452 default=False,
449 default=False,
453 )
450 )
454 coreconfigitem('experimental', 'sparse-read.density-threshold',
451 coreconfigitem('experimental', 'sparse-read.density-threshold',
455 default=0.25,
452 default=0.25,
456 )
453 )
457 coreconfigitem('experimental', 'sparse-read.min-gap-size',
454 coreconfigitem('experimental', 'sparse-read.min-gap-size',
458 default='256K',
455 default='256K',
459 )
456 )
460 coreconfigitem('experimental', 'treemanifest',
457 coreconfigitem('experimental', 'treemanifest',
461 default=False,
458 default=False,
462 )
459 )
463 coreconfigitem('extensions', '.*',
460 coreconfigitem('extensions', '.*',
464 default=None,
461 default=None,
465 generic=True,
462 generic=True,
466 )
463 )
467 coreconfigitem('extdata', '.*',
464 coreconfigitem('extdata', '.*',
468 default=None,
465 default=None,
469 generic=True,
466 generic=True,
470 )
467 )
471 coreconfigitem('format', 'aggressivemergedeltas',
468 coreconfigitem('format', 'aggressivemergedeltas',
472 default=False,
469 default=False,
473 )
470 )
474 coreconfigitem('format', 'chunkcachesize',
471 coreconfigitem('format', 'chunkcachesize',
475 default=None,
472 default=None,
476 )
473 )
477 coreconfigitem('format', 'dotencode',
474 coreconfigitem('format', 'dotencode',
478 default=True,
475 default=True,
479 )
476 )
480 coreconfigitem('format', 'generaldelta',
477 coreconfigitem('format', 'generaldelta',
481 default=False,
478 default=False,
482 )
479 )
483 coreconfigitem('format', 'manifestcachesize',
480 coreconfigitem('format', 'manifestcachesize',
484 default=None,
481 default=None,
485 )
482 )
486 coreconfigitem('format', 'maxchainlen',
483 coreconfigitem('format', 'maxchainlen',
487 default=None,
484 default=None,
488 )
485 )
489 coreconfigitem('format', 'obsstore-version',
486 coreconfigitem('format', 'obsstore-version',
490 default=None,
487 default=None,
491 )
488 )
492 coreconfigitem('format', 'usefncache',
489 coreconfigitem('format', 'usefncache',
493 default=True,
490 default=True,
494 )
491 )
495 coreconfigitem('format', 'usegeneraldelta',
492 coreconfigitem('format', 'usegeneraldelta',
496 default=True,
493 default=True,
497 )
494 )
498 coreconfigitem('format', 'usestore',
495 coreconfigitem('format', 'usestore',
499 default=True,
496 default=True,
500 )
497 )
501 coreconfigitem('fsmonitor', 'warn_when_unused',
498 coreconfigitem('fsmonitor', 'warn_when_unused',
502 default=True,
499 default=True,
503 )
500 )
504 coreconfigitem('fsmonitor', 'warn_update_file_count',
501 coreconfigitem('fsmonitor', 'warn_update_file_count',
505 default=50000,
502 default=50000,
506 )
503 )
507 coreconfigitem('hooks', '.*',
504 coreconfigitem('hooks', '.*',
508 default=dynamicdefault,
505 default=dynamicdefault,
509 generic=True,
506 generic=True,
510 )
507 )
511 coreconfigitem('hgweb-paths', '.*',
508 coreconfigitem('hgweb-paths', '.*',
512 default=list,
509 default=list,
513 generic=True,
510 generic=True,
514 )
511 )
515 coreconfigitem('hostfingerprints', '.*',
512 coreconfigitem('hostfingerprints', '.*',
516 default=list,
513 default=list,
517 generic=True,
514 generic=True,
518 )
515 )
519 coreconfigitem('hostsecurity', 'ciphers',
516 coreconfigitem('hostsecurity', 'ciphers',
520 default=None,
517 default=None,
521 )
518 )
522 coreconfigitem('hostsecurity', 'disabletls10warning',
519 coreconfigitem('hostsecurity', 'disabletls10warning',
523 default=False,
520 default=False,
524 )
521 )
525 coreconfigitem('hostsecurity', 'minimumprotocol',
522 coreconfigitem('hostsecurity', 'minimumprotocol',
526 default=dynamicdefault,
523 default=dynamicdefault,
527 )
524 )
528 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
525 coreconfigitem('hostsecurity', '.*:minimumprotocol$',
529 default=dynamicdefault,
526 default=dynamicdefault,
530 generic=True,
527 generic=True,
531 )
528 )
532 coreconfigitem('hostsecurity', '.*:ciphers$',
529 coreconfigitem('hostsecurity', '.*:ciphers$',
533 default=dynamicdefault,
530 default=dynamicdefault,
534 generic=True,
531 generic=True,
535 )
532 )
536 coreconfigitem('hostsecurity', '.*:fingerprints$',
533 coreconfigitem('hostsecurity', '.*:fingerprints$',
537 default=list,
534 default=list,
538 generic=True,
535 generic=True,
539 )
536 )
540 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
537 coreconfigitem('hostsecurity', '.*:verifycertsfile$',
541 default=None,
538 default=None,
542 generic=True,
539 generic=True,
543 )
540 )
544
541
545 coreconfigitem('http_proxy', 'always',
542 coreconfigitem('http_proxy', 'always',
546 default=False,
543 default=False,
547 )
544 )
548 coreconfigitem('http_proxy', 'host',
545 coreconfigitem('http_proxy', 'host',
549 default=None,
546 default=None,
550 )
547 )
551 coreconfigitem('http_proxy', 'no',
548 coreconfigitem('http_proxy', 'no',
552 default=list,
549 default=list,
553 )
550 )
554 coreconfigitem('http_proxy', 'passwd',
551 coreconfigitem('http_proxy', 'passwd',
555 default=None,
552 default=None,
556 )
553 )
557 coreconfigitem('http_proxy', 'user',
554 coreconfigitem('http_proxy', 'user',
558 default=None,
555 default=None,
559 )
556 )
560 coreconfigitem('logtoprocess', 'commandexception',
557 coreconfigitem('logtoprocess', 'commandexception',
561 default=None,
558 default=None,
562 )
559 )
563 coreconfigitem('logtoprocess', 'commandfinish',
560 coreconfigitem('logtoprocess', 'commandfinish',
564 default=None,
561 default=None,
565 )
562 )
566 coreconfigitem('logtoprocess', 'command',
563 coreconfigitem('logtoprocess', 'command',
567 default=None,
564 default=None,
568 )
565 )
569 coreconfigitem('logtoprocess', 'develwarn',
566 coreconfigitem('logtoprocess', 'develwarn',
570 default=None,
567 default=None,
571 )
568 )
572 coreconfigitem('logtoprocess', 'uiblocked',
569 coreconfigitem('logtoprocess', 'uiblocked',
573 default=None,
570 default=None,
574 )
571 )
575 coreconfigitem('merge', 'checkunknown',
572 coreconfigitem('merge', 'checkunknown',
576 default='abort',
573 default='abort',
577 )
574 )
578 coreconfigitem('merge', 'checkignored',
575 coreconfigitem('merge', 'checkignored',
579 default='abort',
576 default='abort',
580 )
577 )
581 coreconfigitem('experimental', 'merge.checkpathconflicts',
578 coreconfigitem('experimental', 'merge.checkpathconflicts',
582 default=False,
579 default=False,
583 )
580 )
584 coreconfigitem('merge', 'followcopies',
581 coreconfigitem('merge', 'followcopies',
585 default=True,
582 default=True,
586 )
583 )
587 coreconfigitem('merge', 'on-failure',
584 coreconfigitem('merge', 'on-failure',
588 default='continue',
585 default='continue',
589 )
586 )
590 coreconfigitem('merge', 'preferancestor',
587 coreconfigitem('merge', 'preferancestor',
591 default=lambda: ['*'],
588 default=lambda: ['*'],
592 )
589 )
593 coreconfigitem('merge-tools', '.*',
590 coreconfigitem('merge-tools', '.*',
594 default=None,
591 default=None,
595 generic=True,
592 generic=True,
596 )
593 )
597 coreconfigitem('merge-tools', br'.*\.args$',
594 coreconfigitem('merge-tools', br'.*\.args$',
598 default="$local $base $other",
595 default="$local $base $other",
599 generic=True,
596 generic=True,
600 priority=-1,
597 priority=-1,
601 )
598 )
602 coreconfigitem('merge-tools', br'.*\.binary$',
599 coreconfigitem('merge-tools', br'.*\.binary$',
603 default=False,
600 default=False,
604 generic=True,
601 generic=True,
605 priority=-1,
602 priority=-1,
606 )
603 )
607 coreconfigitem('merge-tools', br'.*\.check$',
604 coreconfigitem('merge-tools', br'.*\.check$',
608 default=list,
605 default=list,
609 generic=True,
606 generic=True,
610 priority=-1,
607 priority=-1,
611 )
608 )
612 coreconfigitem('merge-tools', br'.*\.checkchanged$',
609 coreconfigitem('merge-tools', br'.*\.checkchanged$',
613 default=False,
610 default=False,
614 generic=True,
611 generic=True,
615 priority=-1,
612 priority=-1,
616 )
613 )
617 coreconfigitem('merge-tools', br'.*\.executable$',
614 coreconfigitem('merge-tools', br'.*\.executable$',
618 default=dynamicdefault,
615 default=dynamicdefault,
619 generic=True,
616 generic=True,
620 priority=-1,
617 priority=-1,
621 )
618 )
622 coreconfigitem('merge-tools', br'.*\.fixeol$',
619 coreconfigitem('merge-tools', br'.*\.fixeol$',
623 default=False,
620 default=False,
624 generic=True,
621 generic=True,
625 priority=-1,
622 priority=-1,
626 )
623 )
627 coreconfigitem('merge-tools', br'.*\.gui$',
624 coreconfigitem('merge-tools', br'.*\.gui$',
628 default=False,
625 default=False,
629 generic=True,
626 generic=True,
630 priority=-1,
627 priority=-1,
631 )
628 )
632 coreconfigitem('merge-tools', br'.*\.priority$',
629 coreconfigitem('merge-tools', br'.*\.priority$',
633 default=0,
630 default=0,
634 generic=True,
631 generic=True,
635 priority=-1,
632 priority=-1,
636 )
633 )
637 coreconfigitem('merge-tools', br'.*\.premerge$',
634 coreconfigitem('merge-tools', br'.*\.premerge$',
638 default=dynamicdefault,
635 default=dynamicdefault,
639 generic=True,
636 generic=True,
640 priority=-1,
637 priority=-1,
641 )
638 )
642 coreconfigitem('merge-tools', br'.*\.symlink$',
639 coreconfigitem('merge-tools', br'.*\.symlink$',
643 default=False,
640 default=False,
644 generic=True,
641 generic=True,
645 priority=-1,
642 priority=-1,
646 )
643 )
647 coreconfigitem('pager', 'attend-.*',
644 coreconfigitem('pager', 'attend-.*',
648 default=dynamicdefault,
645 default=dynamicdefault,
649 generic=True,
646 generic=True,
650 )
647 )
651 coreconfigitem('pager', 'ignore',
648 coreconfigitem('pager', 'ignore',
652 default=list,
649 default=list,
653 )
650 )
654 coreconfigitem('pager', 'pager',
651 coreconfigitem('pager', 'pager',
655 default=dynamicdefault,
652 default=dynamicdefault,
656 )
653 )
657 coreconfigitem('patch', 'eol',
654 coreconfigitem('patch', 'eol',
658 default='strict',
655 default='strict',
659 )
656 )
660 coreconfigitem('patch', 'fuzz',
657 coreconfigitem('patch', 'fuzz',
661 default=2,
658 default=2,
662 )
659 )
663 coreconfigitem('paths', 'default',
660 coreconfigitem('paths', 'default',
664 default=None,
661 default=None,
665 )
662 )
666 coreconfigitem('paths', 'default-push',
663 coreconfigitem('paths', 'default-push',
667 default=None,
664 default=None,
668 )
665 )
669 coreconfigitem('paths', '.*',
666 coreconfigitem('paths', '.*',
670 default=None,
667 default=None,
671 generic=True,
668 generic=True,
672 )
669 )
673 coreconfigitem('phases', 'checksubrepos',
670 coreconfigitem('phases', 'checksubrepos',
674 default='follow',
671 default='follow',
675 )
672 )
676 coreconfigitem('phases', 'new-commit',
673 coreconfigitem('phases', 'new-commit',
677 default='draft',
674 default='draft',
678 )
675 )
679 coreconfigitem('phases', 'publish',
676 coreconfigitem('phases', 'publish',
680 default=True,
677 default=True,
681 )
678 )
682 coreconfigitem('profiling', 'enabled',
679 coreconfigitem('profiling', 'enabled',
683 default=False,
680 default=False,
684 )
681 )
685 coreconfigitem('profiling', 'format',
682 coreconfigitem('profiling', 'format',
686 default='text',
683 default='text',
687 )
684 )
688 coreconfigitem('profiling', 'freq',
685 coreconfigitem('profiling', 'freq',
689 default=1000,
686 default=1000,
690 )
687 )
691 coreconfigitem('profiling', 'limit',
688 coreconfigitem('profiling', 'limit',
692 default=30,
689 default=30,
693 )
690 )
694 coreconfigitem('profiling', 'nested',
691 coreconfigitem('profiling', 'nested',
695 default=0,
692 default=0,
696 )
693 )
697 coreconfigitem('profiling', 'output',
694 coreconfigitem('profiling', 'output',
698 default=None,
695 default=None,
699 )
696 )
700 coreconfigitem('profiling', 'showmax',
697 coreconfigitem('profiling', 'showmax',
701 default=0.999,
698 default=0.999,
702 )
699 )
703 coreconfigitem('profiling', 'showmin',
700 coreconfigitem('profiling', 'showmin',
704 default=dynamicdefault,
701 default=dynamicdefault,
705 )
702 )
706 coreconfigitem('profiling', 'sort',
703 coreconfigitem('profiling', 'sort',
707 default='inlinetime',
704 default='inlinetime',
708 )
705 )
709 coreconfigitem('profiling', 'statformat',
706 coreconfigitem('profiling', 'statformat',
710 default='hotpath',
707 default='hotpath',
711 )
708 )
712 coreconfigitem('profiling', 'type',
709 coreconfigitem('profiling', 'type',
713 default='stat',
710 default='stat',
714 )
711 )
715 coreconfigitem('progress', 'assume-tty',
712 coreconfigitem('progress', 'assume-tty',
716 default=False,
713 default=False,
717 )
714 )
718 coreconfigitem('progress', 'changedelay',
715 coreconfigitem('progress', 'changedelay',
719 default=1,
716 default=1,
720 )
717 )
721 coreconfigitem('progress', 'clear-complete',
718 coreconfigitem('progress', 'clear-complete',
722 default=True,
719 default=True,
723 )
720 )
724 coreconfigitem('progress', 'debug',
721 coreconfigitem('progress', 'debug',
725 default=False,
722 default=False,
726 )
723 )
727 coreconfigitem('progress', 'delay',
724 coreconfigitem('progress', 'delay',
728 default=3,
725 default=3,
729 )
726 )
730 coreconfigitem('progress', 'disable',
727 coreconfigitem('progress', 'disable',
731 default=False,
728 default=False,
732 )
729 )
733 coreconfigitem('progress', 'estimateinterval',
730 coreconfigitem('progress', 'estimateinterval',
734 default=60.0,
731 default=60.0,
735 )
732 )
736 coreconfigitem('progress', 'format',
733 coreconfigitem('progress', 'format',
737 default=lambda: ['topic', 'bar', 'number', 'estimate'],
734 default=lambda: ['topic', 'bar', 'number', 'estimate'],
738 )
735 )
739 coreconfigitem('progress', 'refresh',
736 coreconfigitem('progress', 'refresh',
740 default=0.1,
737 default=0.1,
741 )
738 )
742 coreconfigitem('progress', 'width',
739 coreconfigitem('progress', 'width',
743 default=dynamicdefault,
740 default=dynamicdefault,
744 )
741 )
745 coreconfigitem('push', 'pushvars.server',
742 coreconfigitem('push', 'pushvars.server',
746 default=False,
743 default=False,
747 )
744 )
748 coreconfigitem('server', 'bundle1',
745 coreconfigitem('server', 'bundle1',
749 default=True,
746 default=True,
750 )
747 )
751 coreconfigitem('server', 'bundle1gd',
748 coreconfigitem('server', 'bundle1gd',
752 default=None,
749 default=None,
753 )
750 )
754 coreconfigitem('server', 'bundle1.pull',
751 coreconfigitem('server', 'bundle1.pull',
755 default=None,
752 default=None,
756 )
753 )
757 coreconfigitem('server', 'bundle1gd.pull',
754 coreconfigitem('server', 'bundle1gd.pull',
758 default=None,
755 default=None,
759 )
756 )
760 coreconfigitem('server', 'bundle1.push',
757 coreconfigitem('server', 'bundle1.push',
761 default=None,
758 default=None,
762 )
759 )
763 coreconfigitem('server', 'bundle1gd.push',
760 coreconfigitem('server', 'bundle1gd.push',
764 default=None,
761 default=None,
765 )
762 )
766 coreconfigitem('server', 'compressionengines',
763 coreconfigitem('server', 'compressionengines',
767 default=list,
764 default=list,
768 )
765 )
769 coreconfigitem('server', 'concurrent-push-mode',
766 coreconfigitem('server', 'concurrent-push-mode',
770 default='strict',
767 default='strict',
771 )
768 )
772 coreconfigitem('server', 'disablefullbundle',
769 coreconfigitem('server', 'disablefullbundle',
773 default=False,
770 default=False,
774 )
771 )
775 coreconfigitem('server', 'maxhttpheaderlen',
772 coreconfigitem('server', 'maxhttpheaderlen',
776 default=1024,
773 default=1024,
777 )
774 )
778 coreconfigitem('server', 'preferuncompressed',
775 coreconfigitem('server', 'preferuncompressed',
779 default=False,
776 default=False,
780 )
777 )
781 coreconfigitem('server', 'uncompressed',
778 coreconfigitem('server', 'uncompressed',
782 default=True,
779 default=True,
783 )
780 )
784 coreconfigitem('server', 'uncompressedallowsecret',
781 coreconfigitem('server', 'uncompressedallowsecret',
785 default=False,
782 default=False,
786 )
783 )
787 coreconfigitem('server', 'validate',
784 coreconfigitem('server', 'validate',
788 default=False,
785 default=False,
789 )
786 )
790 coreconfigitem('server', 'zliblevel',
787 coreconfigitem('server', 'zliblevel',
791 default=-1,
788 default=-1,
792 )
789 )
793 coreconfigitem('smtp', 'host',
790 coreconfigitem('smtp', 'host',
794 default=None,
791 default=None,
795 )
792 )
796 coreconfigitem('smtp', 'local_hostname',
793 coreconfigitem('smtp', 'local_hostname',
797 default=None,
794 default=None,
798 )
795 )
799 coreconfigitem('smtp', 'password',
796 coreconfigitem('smtp', 'password',
800 default=None,
797 default=None,
801 )
798 )
802 coreconfigitem('smtp', 'port',
799 coreconfigitem('smtp', 'port',
803 default=dynamicdefault,
800 default=dynamicdefault,
804 )
801 )
805 coreconfigitem('smtp', 'tls',
802 coreconfigitem('smtp', 'tls',
806 default='none',
803 default='none',
807 )
804 )
808 coreconfigitem('smtp', 'username',
805 coreconfigitem('smtp', 'username',
809 default=None,
806 default=None,
810 )
807 )
811 coreconfigitem('sparse', 'missingwarning',
808 coreconfigitem('sparse', 'missingwarning',
812 default=True,
809 default=True,
813 )
810 )
814 coreconfigitem('templates', '.*',
811 coreconfigitem('templates', '.*',
815 default=None,
812 default=None,
816 generic=True,
813 generic=True,
817 )
814 )
818 coreconfigitem('trusted', 'groups',
815 coreconfigitem('trusted', 'groups',
819 default=list,
816 default=list,
820 )
817 )
821 coreconfigitem('trusted', 'users',
818 coreconfigitem('trusted', 'users',
822 default=list,
819 default=list,
823 )
820 )
824 coreconfigitem('ui', '_usedassubrepo',
821 coreconfigitem('ui', '_usedassubrepo',
825 default=False,
822 default=False,
826 )
823 )
827 coreconfigitem('ui', 'allowemptycommit',
824 coreconfigitem('ui', 'allowemptycommit',
828 default=False,
825 default=False,
829 )
826 )
830 coreconfigitem('ui', 'archivemeta',
827 coreconfigitem('ui', 'archivemeta',
831 default=True,
828 default=True,
832 )
829 )
833 coreconfigitem('ui', 'askusername',
830 coreconfigitem('ui', 'askusername',
834 default=False,
831 default=False,
835 )
832 )
836 coreconfigitem('ui', 'clonebundlefallback',
833 coreconfigitem('ui', 'clonebundlefallback',
837 default=False,
834 default=False,
838 )
835 )
839 coreconfigitem('ui', 'clonebundleprefers',
836 coreconfigitem('ui', 'clonebundleprefers',
840 default=list,
837 default=list,
841 )
838 )
842 coreconfigitem('ui', 'clonebundles',
839 coreconfigitem('ui', 'clonebundles',
843 default=True,
840 default=True,
844 )
841 )
845 coreconfigitem('ui', 'color',
842 coreconfigitem('ui', 'color',
846 default='auto',
843 default='auto',
847 )
844 )
848 coreconfigitem('ui', 'commitsubrepos',
845 coreconfigitem('ui', 'commitsubrepos',
849 default=False,
846 default=False,
850 )
847 )
851 coreconfigitem('ui', 'debug',
848 coreconfigitem('ui', 'debug',
852 default=False,
849 default=False,
853 )
850 )
854 coreconfigitem('ui', 'debugger',
851 coreconfigitem('ui', 'debugger',
855 default=None,
852 default=None,
856 )
853 )
857 coreconfigitem('ui', 'editor',
854 coreconfigitem('ui', 'editor',
858 default=dynamicdefault,
855 default=dynamicdefault,
859 )
856 )
860 coreconfigitem('ui', 'fallbackencoding',
857 coreconfigitem('ui', 'fallbackencoding',
861 default=None,
858 default=None,
862 )
859 )
863 coreconfigitem('ui', 'forcecwd',
860 coreconfigitem('ui', 'forcecwd',
864 default=None,
861 default=None,
865 )
862 )
866 coreconfigitem('ui', 'forcemerge',
863 coreconfigitem('ui', 'forcemerge',
867 default=None,
864 default=None,
868 )
865 )
869 coreconfigitem('ui', 'formatdebug',
866 coreconfigitem('ui', 'formatdebug',
870 default=False,
867 default=False,
871 )
868 )
872 coreconfigitem('ui', 'formatjson',
869 coreconfigitem('ui', 'formatjson',
873 default=False,
870 default=False,
874 )
871 )
875 coreconfigitem('ui', 'formatted',
872 coreconfigitem('ui', 'formatted',
876 default=None,
873 default=None,
877 )
874 )
878 coreconfigitem('ui', 'graphnodetemplate',
875 coreconfigitem('ui', 'graphnodetemplate',
879 default=None,
876 default=None,
880 )
877 )
881 coreconfigitem('ui', 'http2debuglevel',
878 coreconfigitem('ui', 'http2debuglevel',
882 default=None,
879 default=None,
883 )
880 )
884 coreconfigitem('ui', 'interactive',
881 coreconfigitem('ui', 'interactive',
885 default=None,
882 default=None,
886 )
883 )
887 coreconfigitem('ui', 'interface',
884 coreconfigitem('ui', 'interface',
888 default=None,
885 default=None,
889 )
886 )
890 coreconfigitem('ui', 'interface.chunkselector',
887 coreconfigitem('ui', 'interface.chunkselector',
891 default=None,
888 default=None,
892 )
889 )
893 coreconfigitem('ui', 'logblockedtimes',
890 coreconfigitem('ui', 'logblockedtimes',
894 default=False,
891 default=False,
895 )
892 )
896 coreconfigitem('ui', 'logtemplate',
893 coreconfigitem('ui', 'logtemplate',
897 default=None,
894 default=None,
898 )
895 )
899 coreconfigitem('ui', 'merge',
896 coreconfigitem('ui', 'merge',
900 default=None,
897 default=None,
901 )
898 )
902 coreconfigitem('ui', 'mergemarkers',
899 coreconfigitem('ui', 'mergemarkers',
903 default='basic',
900 default='basic',
904 )
901 )
905 coreconfigitem('ui', 'mergemarkertemplate',
902 coreconfigitem('ui', 'mergemarkertemplate',
906 default=('{node|short} '
903 default=('{node|short} '
907 '{ifeq(tags, "tip", "", '
904 '{ifeq(tags, "tip", "", '
908 'ifeq(tags, "", "", "{tags} "))}'
905 'ifeq(tags, "", "", "{tags} "))}'
909 '{if(bookmarks, "{bookmarks} ")}'
906 '{if(bookmarks, "{bookmarks} ")}'
910 '{ifeq(branch, "default", "", "{branch} ")}'
907 '{ifeq(branch, "default", "", "{branch} ")}'
911 '- {author|user}: {desc|firstline}')
908 '- {author|user}: {desc|firstline}')
912 )
909 )
913 coreconfigitem('ui', 'nontty',
910 coreconfigitem('ui', 'nontty',
914 default=False,
911 default=False,
915 )
912 )
916 coreconfigitem('ui', 'origbackuppath',
913 coreconfigitem('ui', 'origbackuppath',
917 default=None,
914 default=None,
918 )
915 )
919 coreconfigitem('ui', 'paginate',
916 coreconfigitem('ui', 'paginate',
920 default=True,
917 default=True,
921 )
918 )
922 coreconfigitem('ui', 'patch',
919 coreconfigitem('ui', 'patch',
923 default=None,
920 default=None,
924 )
921 )
925 coreconfigitem('ui', 'portablefilenames',
922 coreconfigitem('ui', 'portablefilenames',
926 default='warn',
923 default='warn',
927 )
924 )
928 coreconfigitem('ui', 'promptecho',
925 coreconfigitem('ui', 'promptecho',
929 default=False,
926 default=False,
930 )
927 )
931 coreconfigitem('ui', 'quiet',
928 coreconfigitem('ui', 'quiet',
932 default=False,
929 default=False,
933 )
930 )
934 coreconfigitem('ui', 'quietbookmarkmove',
931 coreconfigitem('ui', 'quietbookmarkmove',
935 default=False,
932 default=False,
936 )
933 )
937 coreconfigitem('ui', 'remotecmd',
934 coreconfigitem('ui', 'remotecmd',
938 default='hg',
935 default='hg',
939 )
936 )
940 coreconfigitem('ui', 'report_untrusted',
937 coreconfigitem('ui', 'report_untrusted',
941 default=True,
938 default=True,
942 )
939 )
943 coreconfigitem('ui', 'rollback',
940 coreconfigitem('ui', 'rollback',
944 default=True,
941 default=True,
945 )
942 )
946 coreconfigitem('ui', 'slash',
943 coreconfigitem('ui', 'slash',
947 default=False,
944 default=False,
948 )
945 )
949 coreconfigitem('ui', 'ssh',
946 coreconfigitem('ui', 'ssh',
950 default='ssh',
947 default='ssh',
951 )
948 )
952 coreconfigitem('ui', 'statuscopies',
949 coreconfigitem('ui', 'statuscopies',
953 default=False,
950 default=False,
954 )
951 )
955 coreconfigitem('ui', 'strict',
952 coreconfigitem('ui', 'strict',
956 default=False,
953 default=False,
957 )
954 )
958 coreconfigitem('ui', 'style',
955 coreconfigitem('ui', 'style',
959 default='',
956 default='',
960 )
957 )
961 coreconfigitem('ui', 'supportcontact',
958 coreconfigitem('ui', 'supportcontact',
962 default=None,
959 default=None,
963 )
960 )
964 coreconfigitem('ui', 'textwidth',
961 coreconfigitem('ui', 'textwidth',
965 default=78,
962 default=78,
966 )
963 )
967 coreconfigitem('ui', 'timeout',
964 coreconfigitem('ui', 'timeout',
968 default='600',
965 default='600',
969 )
966 )
970 coreconfigitem('ui', 'traceback',
967 coreconfigitem('ui', 'traceback',
971 default=False,
968 default=False,
972 )
969 )
973 coreconfigitem('ui', 'tweakdefaults',
970 coreconfigitem('ui', 'tweakdefaults',
974 default=False,
971 default=False,
975 )
972 )
976 coreconfigitem('ui', 'usehttp2',
973 coreconfigitem('ui', 'usehttp2',
977 default=False,
974 default=False,
978 )
975 )
979 coreconfigitem('ui', 'username',
976 coreconfigitem('ui', 'username',
980 alias=[('ui', 'user')]
977 alias=[('ui', 'user')]
981 )
978 )
982 coreconfigitem('ui', 'verbose',
979 coreconfigitem('ui', 'verbose',
983 default=False,
980 default=False,
984 )
981 )
985 coreconfigitem('verify', 'skipflags',
982 coreconfigitem('verify', 'skipflags',
986 default=None,
983 default=None,
987 )
984 )
988 coreconfigitem('web', 'allowbz2',
985 coreconfigitem('web', 'allowbz2',
989 default=False,
986 default=False,
990 )
987 )
991 coreconfigitem('web', 'allowgz',
988 coreconfigitem('web', 'allowgz',
992 default=False,
989 default=False,
993 )
990 )
994 coreconfigitem('web', 'allowpull',
991 coreconfigitem('web', 'allowpull',
995 default=True,
992 default=True,
996 )
993 )
997 coreconfigitem('web', 'allow_push',
994 coreconfigitem('web', 'allow_push',
998 default=list,
995 default=list,
999 )
996 )
1000 coreconfigitem('web', 'allowzip',
997 coreconfigitem('web', 'allowzip',
1001 default=False,
998 default=False,
1002 )
999 )
1003 coreconfigitem('web', 'archivesubrepos',
1000 coreconfigitem('web', 'archivesubrepos',
1004 default=False,
1001 default=False,
1005 )
1002 )
1006 coreconfigitem('web', 'cache',
1003 coreconfigitem('web', 'cache',
1007 default=True,
1004 default=True,
1008 )
1005 )
1009 coreconfigitem('web', 'contact',
1006 coreconfigitem('web', 'contact',
1010 default=None,
1007 default=None,
1011 )
1008 )
1012 coreconfigitem('web', 'deny_push',
1009 coreconfigitem('web', 'deny_push',
1013 default=list,
1010 default=list,
1014 )
1011 )
1015 coreconfigitem('web', 'guessmime',
1012 coreconfigitem('web', 'guessmime',
1016 default=False,
1013 default=False,
1017 )
1014 )
1018 coreconfigitem('web', 'hidden',
1015 coreconfigitem('web', 'hidden',
1019 default=False,
1016 default=False,
1020 )
1017 )
1021 coreconfigitem('web', 'labels',
1018 coreconfigitem('web', 'labels',
1022 default=list,
1019 default=list,
1023 )
1020 )
1024 coreconfigitem('web', 'logoimg',
1021 coreconfigitem('web', 'logoimg',
1025 default='hglogo.png',
1022 default='hglogo.png',
1026 )
1023 )
1027 coreconfigitem('web', 'logourl',
1024 coreconfigitem('web', 'logourl',
1028 default='https://mercurial-scm.org/',
1025 default='https://mercurial-scm.org/',
1029 )
1026 )
1030 coreconfigitem('web', 'accesslog',
1027 coreconfigitem('web', 'accesslog',
1031 default='-',
1028 default='-',
1032 )
1029 )
1033 coreconfigitem('web', 'address',
1030 coreconfigitem('web', 'address',
1034 default='',
1031 default='',
1035 )
1032 )
1036 coreconfigitem('web', 'allow_archive',
1033 coreconfigitem('web', 'allow_archive',
1037 default=list,
1034 default=list,
1038 )
1035 )
1039 coreconfigitem('web', 'allow_read',
1036 coreconfigitem('web', 'allow_read',
1040 default=list,
1037 default=list,
1041 )
1038 )
1042 coreconfigitem('web', 'baseurl',
1039 coreconfigitem('web', 'baseurl',
1043 default=None,
1040 default=None,
1044 )
1041 )
1045 coreconfigitem('web', 'cacerts',
1042 coreconfigitem('web', 'cacerts',
1046 default=None,
1043 default=None,
1047 )
1044 )
1048 coreconfigitem('web', 'certificate',
1045 coreconfigitem('web', 'certificate',
1049 default=None,
1046 default=None,
1050 )
1047 )
1051 coreconfigitem('web', 'collapse',
1048 coreconfigitem('web', 'collapse',
1052 default=False,
1049 default=False,
1053 )
1050 )
1054 coreconfigitem('web', 'csp',
1051 coreconfigitem('web', 'csp',
1055 default=None,
1052 default=None,
1056 )
1053 )
1057 coreconfigitem('web', 'deny_read',
1054 coreconfigitem('web', 'deny_read',
1058 default=list,
1055 default=list,
1059 )
1056 )
1060 coreconfigitem('web', 'descend',
1057 coreconfigitem('web', 'descend',
1061 default=True,
1058 default=True,
1062 )
1059 )
1063 coreconfigitem('web', 'description',
1060 coreconfigitem('web', 'description',
1064 default="",
1061 default="",
1065 )
1062 )
1066 coreconfigitem('web', 'encoding',
1063 coreconfigitem('web', 'encoding',
1067 default=lambda: encoding.encoding,
1064 default=lambda: encoding.encoding,
1068 )
1065 )
1069 coreconfigitem('web', 'errorlog',
1066 coreconfigitem('web', 'errorlog',
1070 default='-',
1067 default='-',
1071 )
1068 )
1072 coreconfigitem('web', 'ipv6',
1069 coreconfigitem('web', 'ipv6',
1073 default=False,
1070 default=False,
1074 )
1071 )
1075 coreconfigitem('web', 'maxchanges',
1072 coreconfigitem('web', 'maxchanges',
1076 default=10,
1073 default=10,
1077 )
1074 )
1078 coreconfigitem('web', 'maxfiles',
1075 coreconfigitem('web', 'maxfiles',
1079 default=10,
1076 default=10,
1080 )
1077 )
1081 coreconfigitem('web', 'maxshortchanges',
1078 coreconfigitem('web', 'maxshortchanges',
1082 default=60,
1079 default=60,
1083 )
1080 )
1084 coreconfigitem('web', 'motd',
1081 coreconfigitem('web', 'motd',
1085 default='',
1082 default='',
1086 )
1083 )
1087 coreconfigitem('web', 'name',
1084 coreconfigitem('web', 'name',
1088 default=dynamicdefault,
1085 default=dynamicdefault,
1089 )
1086 )
1090 coreconfigitem('web', 'port',
1087 coreconfigitem('web', 'port',
1091 default=8000,
1088 default=8000,
1092 )
1089 )
1093 coreconfigitem('web', 'prefix',
1090 coreconfigitem('web', 'prefix',
1094 default='',
1091 default='',
1095 )
1092 )
1096 coreconfigitem('web', 'push_ssl',
1093 coreconfigitem('web', 'push_ssl',
1097 default=True,
1094 default=True,
1098 )
1095 )
1099 coreconfigitem('web', 'refreshinterval',
1096 coreconfigitem('web', 'refreshinterval',
1100 default=20,
1097 default=20,
1101 )
1098 )
1102 coreconfigitem('web', 'staticurl',
1099 coreconfigitem('web', 'staticurl',
1103 default=None,
1100 default=None,
1104 )
1101 )
1105 coreconfigitem('web', 'stripes',
1102 coreconfigitem('web', 'stripes',
1106 default=1,
1103 default=1,
1107 )
1104 )
1108 coreconfigitem('web', 'style',
1105 coreconfigitem('web', 'style',
1109 default='paper',
1106 default='paper',
1110 )
1107 )
1111 coreconfigitem('web', 'templates',
1108 coreconfigitem('web', 'templates',
1112 default=None,
1109 default=None,
1113 )
1110 )
1114 coreconfigitem('web', 'view',
1111 coreconfigitem('web', 'view',
1115 default='served',
1112 default='served',
1116 )
1113 )
1117 coreconfigitem('worker', 'backgroundclose',
1114 coreconfigitem('worker', 'backgroundclose',
1118 default=dynamicdefault,
1115 default=dynamicdefault,
1119 )
1116 )
1120 # Windows defaults to a limit of 512 open files. A buffer of 128
1117 # Windows defaults to a limit of 512 open files. A buffer of 128
1121 # should give us enough headway.
1118 # should give us enough headway.
1122 coreconfigitem('worker', 'backgroundclosemaxqueue',
1119 coreconfigitem('worker', 'backgroundclosemaxqueue',
1123 default=384,
1120 default=384,
1124 )
1121 )
1125 coreconfigitem('worker', 'backgroundcloseminfilecount',
1122 coreconfigitem('worker', 'backgroundcloseminfilecount',
1126 default=2048,
1123 default=2048,
1127 )
1124 )
1128 coreconfigitem('worker', 'backgroundclosethreadcount',
1125 coreconfigitem('worker', 'backgroundclosethreadcount',
1129 default=4,
1126 default=4,
1130 )
1127 )
1131 coreconfigitem('worker', 'numcpus',
1128 coreconfigitem('worker', 'numcpus',
1132 default=None,
1129 default=None,
1133 )
1130 )
1134
1131
1135 # Rebase related configuration moved to core because other extension are doing
1132 # Rebase related configuration moved to core because other extension are doing
1136 # strange things. For example, shelve import the extensions to reuse some bit
1133 # strange things. For example, shelve import the extensions to reuse some bit
1137 # without formally loading it.
1134 # without formally loading it.
1138 coreconfigitem('commands', 'rebase.requiredest',
1135 coreconfigitem('commands', 'rebase.requiredest',
1139 default=False,
1136 default=False,
1140 )
1137 )
1141 coreconfigitem('experimental', 'rebaseskipobsolete',
1138 coreconfigitem('experimental', 'rebaseskipobsolete',
1142 default=True,
1139 default=True,
1143 )
1140 )
1144 coreconfigitem('rebase', 'singletransaction',
1141 coreconfigitem('rebase', 'singletransaction',
1145 default=False,
1142 default=False,
1146 )
1143 )
@@ -1,2810 +1,2810 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import, print_function
9 from __future__ import absolute_import, print_function
10
10
11 import collections
11 import collections
12 import copy
12 import copy
13 import email
13 import email
14 import errno
14 import errno
15 import hashlib
15 import hashlib
16 import os
16 import os
17 import posixpath
17 import posixpath
18 import re
18 import re
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21 import zlib
21 import zlib
22
22
23 from .i18n import _
23 from .i18n import _
24 from .node import (
24 from .node import (
25 hex,
25 hex,
26 short,
26 short,
27 )
27 )
28 from . import (
28 from . import (
29 copies,
29 copies,
30 encoding,
30 encoding,
31 error,
31 error,
32 mail,
32 mail,
33 mdiff,
33 mdiff,
34 pathutil,
34 pathutil,
35 policy,
35 policy,
36 pycompat,
36 pycompat,
37 scmutil,
37 scmutil,
38 similar,
38 similar,
39 util,
39 util,
40 vfs as vfsmod,
40 vfs as vfsmod,
41 )
41 )
42
42
43 diffhelpers = policy.importmod(r'diffhelpers')
43 diffhelpers = policy.importmod(r'diffhelpers')
44 stringio = util.stringio
44 stringio = util.stringio
45
45
46 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
46 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
47 tabsplitter = re.compile(br'(\t+|[^\t]+)')
47 tabsplitter = re.compile(br'(\t+|[^\t]+)')
48
48
49 PatchError = error.PatchError
49 PatchError = error.PatchError
50
50
51 # public functions
51 # public functions
52
52
53 def split(stream):
53 def split(stream):
54 '''return an iterator of individual patches from a stream'''
54 '''return an iterator of individual patches from a stream'''
55 def isheader(line, inheader):
55 def isheader(line, inheader):
56 if inheader and line[0] in (' ', '\t'):
56 if inheader and line[0] in (' ', '\t'):
57 # continuation
57 # continuation
58 return True
58 return True
59 if line[0] in (' ', '-', '+'):
59 if line[0] in (' ', '-', '+'):
60 # diff line - don't check for header pattern in there
60 # diff line - don't check for header pattern in there
61 return False
61 return False
62 l = line.split(': ', 1)
62 l = line.split(': ', 1)
63 return len(l) == 2 and ' ' not in l[0]
63 return len(l) == 2 and ' ' not in l[0]
64
64
65 def chunk(lines):
65 def chunk(lines):
66 return stringio(''.join(lines))
66 return stringio(''.join(lines))
67
67
68 def hgsplit(stream, cur):
68 def hgsplit(stream, cur):
69 inheader = True
69 inheader = True
70
70
71 for line in stream:
71 for line in stream:
72 if not line.strip():
72 if not line.strip():
73 inheader = False
73 inheader = False
74 if not inheader and line.startswith('# HG changeset patch'):
74 if not inheader and line.startswith('# HG changeset patch'):
75 yield chunk(cur)
75 yield chunk(cur)
76 cur = []
76 cur = []
77 inheader = True
77 inheader = True
78
78
79 cur.append(line)
79 cur.append(line)
80
80
81 if cur:
81 if cur:
82 yield chunk(cur)
82 yield chunk(cur)
83
83
84 def mboxsplit(stream, cur):
84 def mboxsplit(stream, cur):
85 for line in stream:
85 for line in stream:
86 if line.startswith('From '):
86 if line.startswith('From '):
87 for c in split(chunk(cur[1:])):
87 for c in split(chunk(cur[1:])):
88 yield c
88 yield c
89 cur = []
89 cur = []
90
90
91 cur.append(line)
91 cur.append(line)
92
92
93 if cur:
93 if cur:
94 for c in split(chunk(cur[1:])):
94 for c in split(chunk(cur[1:])):
95 yield c
95 yield c
96
96
97 def mimesplit(stream, cur):
97 def mimesplit(stream, cur):
98 def msgfp(m):
98 def msgfp(m):
99 fp = stringio()
99 fp = stringio()
100 g = email.Generator.Generator(fp, mangle_from_=False)
100 g = email.Generator.Generator(fp, mangle_from_=False)
101 g.flatten(m)
101 g.flatten(m)
102 fp.seek(0)
102 fp.seek(0)
103 return fp
103 return fp
104
104
105 for line in stream:
105 for line in stream:
106 cur.append(line)
106 cur.append(line)
107 c = chunk(cur)
107 c = chunk(cur)
108
108
109 m = email.Parser.Parser().parse(c)
109 m = email.Parser.Parser().parse(c)
110 if not m.is_multipart():
110 if not m.is_multipart():
111 yield msgfp(m)
111 yield msgfp(m)
112 else:
112 else:
113 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
113 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
114 for part in m.walk():
114 for part in m.walk():
115 ct = part.get_content_type()
115 ct = part.get_content_type()
116 if ct not in ok_types:
116 if ct not in ok_types:
117 continue
117 continue
118 yield msgfp(part)
118 yield msgfp(part)
119
119
120 def headersplit(stream, cur):
120 def headersplit(stream, cur):
121 inheader = False
121 inheader = False
122
122
123 for line in stream:
123 for line in stream:
124 if not inheader and isheader(line, inheader):
124 if not inheader and isheader(line, inheader):
125 yield chunk(cur)
125 yield chunk(cur)
126 cur = []
126 cur = []
127 inheader = True
127 inheader = True
128 if inheader and not isheader(line, inheader):
128 if inheader and not isheader(line, inheader):
129 inheader = False
129 inheader = False
130
130
131 cur.append(line)
131 cur.append(line)
132
132
133 if cur:
133 if cur:
134 yield chunk(cur)
134 yield chunk(cur)
135
135
136 def remainder(cur):
136 def remainder(cur):
137 yield chunk(cur)
137 yield chunk(cur)
138
138
139 class fiter(object):
139 class fiter(object):
140 def __init__(self, fp):
140 def __init__(self, fp):
141 self.fp = fp
141 self.fp = fp
142
142
143 def __iter__(self):
143 def __iter__(self):
144 return self
144 return self
145
145
146 def next(self):
146 def next(self):
147 l = self.fp.readline()
147 l = self.fp.readline()
148 if not l:
148 if not l:
149 raise StopIteration
149 raise StopIteration
150 return l
150 return l
151
151
152 inheader = False
152 inheader = False
153 cur = []
153 cur = []
154
154
155 mimeheaders = ['content-type']
155 mimeheaders = ['content-type']
156
156
157 if not util.safehasattr(stream, 'next'):
157 if not util.safehasattr(stream, 'next'):
158 # http responses, for example, have readline but not next
158 # http responses, for example, have readline but not next
159 stream = fiter(stream)
159 stream = fiter(stream)
160
160
161 for line in stream:
161 for line in stream:
162 cur.append(line)
162 cur.append(line)
163 if line.startswith('# HG changeset patch'):
163 if line.startswith('# HG changeset patch'):
164 return hgsplit(stream, cur)
164 return hgsplit(stream, cur)
165 elif line.startswith('From '):
165 elif line.startswith('From '):
166 return mboxsplit(stream, cur)
166 return mboxsplit(stream, cur)
167 elif isheader(line, inheader):
167 elif isheader(line, inheader):
168 inheader = True
168 inheader = True
169 if line.split(':', 1)[0].lower() in mimeheaders:
169 if line.split(':', 1)[0].lower() in mimeheaders:
170 # let email parser handle this
170 # let email parser handle this
171 return mimesplit(stream, cur)
171 return mimesplit(stream, cur)
172 elif line.startswith('--- ') and inheader:
172 elif line.startswith('--- ') and inheader:
173 # No evil headers seen by diff start, split by hand
173 # No evil headers seen by diff start, split by hand
174 return headersplit(stream, cur)
174 return headersplit(stream, cur)
175 # Not enough info, keep reading
175 # Not enough info, keep reading
176
176
177 # if we are here, we have a very plain patch
177 # if we are here, we have a very plain patch
178 return remainder(cur)
178 return remainder(cur)
179
179
180 ## Some facility for extensible patch parsing:
180 ## Some facility for extensible patch parsing:
181 # list of pairs ("header to match", "data key")
181 # list of pairs ("header to match", "data key")
182 patchheadermap = [('Date', 'date'),
182 patchheadermap = [('Date', 'date'),
183 ('Branch', 'branch'),
183 ('Branch', 'branch'),
184 ('Node ID', 'nodeid'),
184 ('Node ID', 'nodeid'),
185 ]
185 ]
186
186
187 def extract(ui, fileobj):
187 def extract(ui, fileobj):
188 '''extract patch from data read from fileobj.
188 '''extract patch from data read from fileobj.
189
189
190 patch can be a normal patch or contained in an email message.
190 patch can be a normal patch or contained in an email message.
191
191
192 return a dictionary. Standard keys are:
192 return a dictionary. Standard keys are:
193 - filename,
193 - filename,
194 - message,
194 - message,
195 - user,
195 - user,
196 - date,
196 - date,
197 - branch,
197 - branch,
198 - node,
198 - node,
199 - p1,
199 - p1,
200 - p2.
200 - p2.
201 Any item can be missing from the dictionary. If filename is missing,
201 Any item can be missing from the dictionary. If filename is missing,
202 fileobj did not contain a patch. Caller must unlink filename when done.'''
202 fileobj did not contain a patch. Caller must unlink filename when done.'''
203
203
204 # attempt to detect the start of a patch
204 # attempt to detect the start of a patch
205 # (this heuristic is borrowed from quilt)
205 # (this heuristic is borrowed from quilt)
206 diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
206 diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
207 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
207 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
208 br'---[ \t].*?^\+\+\+[ \t]|'
208 br'---[ \t].*?^\+\+\+[ \t]|'
209 br'\*\*\*[ \t].*?^---[ \t])',
209 br'\*\*\*[ \t].*?^---[ \t])',
210 re.MULTILINE | re.DOTALL)
210 re.MULTILINE | re.DOTALL)
211
211
212 data = {}
212 data = {}
213 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
213 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
214 tmpfp = os.fdopen(fd, pycompat.sysstr('w'))
214 tmpfp = os.fdopen(fd, pycompat.sysstr('w'))
215 try:
215 try:
216 msg = email.Parser.Parser().parse(fileobj)
216 msg = email.Parser.Parser().parse(fileobj)
217
217
218 subject = msg['Subject'] and mail.headdecode(msg['Subject'])
218 subject = msg['Subject'] and mail.headdecode(msg['Subject'])
219 data['user'] = msg['From'] and mail.headdecode(msg['From'])
219 data['user'] = msg['From'] and mail.headdecode(msg['From'])
220 if not subject and not data['user']:
220 if not subject and not data['user']:
221 # Not an email, restore parsed headers if any
221 # Not an email, restore parsed headers if any
222 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
222 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
223
223
224 # should try to parse msg['Date']
224 # should try to parse msg['Date']
225 parents = []
225 parents = []
226
226
227 if subject:
227 if subject:
228 if subject.startswith('[PATCH'):
228 if subject.startswith('[PATCH'):
229 pend = subject.find(']')
229 pend = subject.find(']')
230 if pend >= 0:
230 if pend >= 0:
231 subject = subject[pend + 1:].lstrip()
231 subject = subject[pend + 1:].lstrip()
232 subject = re.sub(br'\n[ \t]+', ' ', subject)
232 subject = re.sub(br'\n[ \t]+', ' ', subject)
233 ui.debug('Subject: %s\n' % subject)
233 ui.debug('Subject: %s\n' % subject)
234 if data['user']:
234 if data['user']:
235 ui.debug('From: %s\n' % data['user'])
235 ui.debug('From: %s\n' % data['user'])
236 diffs_seen = 0
236 diffs_seen = 0
237 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
237 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
238 message = ''
238 message = ''
239 for part in msg.walk():
239 for part in msg.walk():
240 content_type = part.get_content_type()
240 content_type = part.get_content_type()
241 ui.debug('Content-Type: %s\n' % content_type)
241 ui.debug('Content-Type: %s\n' % content_type)
242 if content_type not in ok_types:
242 if content_type not in ok_types:
243 continue
243 continue
244 payload = part.get_payload(decode=True)
244 payload = part.get_payload(decode=True)
245 m = diffre.search(payload)
245 m = diffre.search(payload)
246 if m:
246 if m:
247 hgpatch = False
247 hgpatch = False
248 hgpatchheader = False
248 hgpatchheader = False
249 ignoretext = False
249 ignoretext = False
250
250
251 ui.debug('found patch at byte %d\n' % m.start(0))
251 ui.debug('found patch at byte %d\n' % m.start(0))
252 diffs_seen += 1
252 diffs_seen += 1
253 cfp = stringio()
253 cfp = stringio()
254 for line in payload[:m.start(0)].splitlines():
254 for line in payload[:m.start(0)].splitlines():
255 if line.startswith('# HG changeset patch') and not hgpatch:
255 if line.startswith('# HG changeset patch') and not hgpatch:
256 ui.debug('patch generated by hg export\n')
256 ui.debug('patch generated by hg export\n')
257 hgpatch = True
257 hgpatch = True
258 hgpatchheader = True
258 hgpatchheader = True
259 # drop earlier commit message content
259 # drop earlier commit message content
260 cfp.seek(0)
260 cfp.seek(0)
261 cfp.truncate()
261 cfp.truncate()
262 subject = None
262 subject = None
263 elif hgpatchheader:
263 elif hgpatchheader:
264 if line.startswith('# User '):
264 if line.startswith('# User '):
265 data['user'] = line[7:]
265 data['user'] = line[7:]
266 ui.debug('From: %s\n' % data['user'])
266 ui.debug('From: %s\n' % data['user'])
267 elif line.startswith("# Parent "):
267 elif line.startswith("# Parent "):
268 parents.append(line[9:].lstrip())
268 parents.append(line[9:].lstrip())
269 elif line.startswith("# "):
269 elif line.startswith("# "):
270 for header, key in patchheadermap:
270 for header, key in patchheadermap:
271 prefix = '# %s ' % header
271 prefix = '# %s ' % header
272 if line.startswith(prefix):
272 if line.startswith(prefix):
273 data[key] = line[len(prefix):]
273 data[key] = line[len(prefix):]
274 else:
274 else:
275 hgpatchheader = False
275 hgpatchheader = False
276 elif line == '---':
276 elif line == '---':
277 ignoretext = True
277 ignoretext = True
278 if not hgpatchheader and not ignoretext:
278 if not hgpatchheader and not ignoretext:
279 cfp.write(line)
279 cfp.write(line)
280 cfp.write('\n')
280 cfp.write('\n')
281 message = cfp.getvalue()
281 message = cfp.getvalue()
282 if tmpfp:
282 if tmpfp:
283 tmpfp.write(payload)
283 tmpfp.write(payload)
284 if not payload.endswith('\n'):
284 if not payload.endswith('\n'):
285 tmpfp.write('\n')
285 tmpfp.write('\n')
286 elif not diffs_seen and message and content_type == 'text/plain':
286 elif not diffs_seen and message and content_type == 'text/plain':
287 message += '\n' + payload
287 message += '\n' + payload
288 except: # re-raises
288 except: # re-raises
289 tmpfp.close()
289 tmpfp.close()
290 os.unlink(tmpname)
290 os.unlink(tmpname)
291 raise
291 raise
292
292
293 if subject and not message.startswith(subject):
293 if subject and not message.startswith(subject):
294 message = '%s\n%s' % (subject, message)
294 message = '%s\n%s' % (subject, message)
295 data['message'] = message
295 data['message'] = message
296 tmpfp.close()
296 tmpfp.close()
297 if parents:
297 if parents:
298 data['p1'] = parents.pop(0)
298 data['p1'] = parents.pop(0)
299 if parents:
299 if parents:
300 data['p2'] = parents.pop(0)
300 data['p2'] = parents.pop(0)
301
301
302 if diffs_seen:
302 if diffs_seen:
303 data['filename'] = tmpname
303 data['filename'] = tmpname
304 else:
304 else:
305 os.unlink(tmpname)
305 os.unlink(tmpname)
306 return data
306 return data
307
307
308 class patchmeta(object):
308 class patchmeta(object):
309 """Patched file metadata
309 """Patched file metadata
310
310
311 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
311 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
312 or COPY. 'path' is patched file path. 'oldpath' is set to the
312 or COPY. 'path' is patched file path. 'oldpath' is set to the
313 origin file when 'op' is either COPY or RENAME, None otherwise. If
313 origin file when 'op' is either COPY or RENAME, None otherwise. If
314 file mode is changed, 'mode' is a tuple (islink, isexec) where
314 file mode is changed, 'mode' is a tuple (islink, isexec) where
315 'islink' is True if the file is a symlink and 'isexec' is True if
315 'islink' is True if the file is a symlink and 'isexec' is True if
316 the file is executable. Otherwise, 'mode' is None.
316 the file is executable. Otherwise, 'mode' is None.
317 """
317 """
318 def __init__(self, path):
318 def __init__(self, path):
319 self.path = path
319 self.path = path
320 self.oldpath = None
320 self.oldpath = None
321 self.mode = None
321 self.mode = None
322 self.op = 'MODIFY'
322 self.op = 'MODIFY'
323 self.binary = False
323 self.binary = False
324
324
325 def setmode(self, mode):
325 def setmode(self, mode):
326 islink = mode & 0o20000
326 islink = mode & 0o20000
327 isexec = mode & 0o100
327 isexec = mode & 0o100
328 self.mode = (islink, isexec)
328 self.mode = (islink, isexec)
329
329
330 def copy(self):
330 def copy(self):
331 other = patchmeta(self.path)
331 other = patchmeta(self.path)
332 other.oldpath = self.oldpath
332 other.oldpath = self.oldpath
333 other.mode = self.mode
333 other.mode = self.mode
334 other.op = self.op
334 other.op = self.op
335 other.binary = self.binary
335 other.binary = self.binary
336 return other
336 return other
337
337
338 def _ispatchinga(self, afile):
338 def _ispatchinga(self, afile):
339 if afile == '/dev/null':
339 if afile == '/dev/null':
340 return self.op == 'ADD'
340 return self.op == 'ADD'
341 return afile == 'a/' + (self.oldpath or self.path)
341 return afile == 'a/' + (self.oldpath or self.path)
342
342
343 def _ispatchingb(self, bfile):
343 def _ispatchingb(self, bfile):
344 if bfile == '/dev/null':
344 if bfile == '/dev/null':
345 return self.op == 'DELETE'
345 return self.op == 'DELETE'
346 return bfile == 'b/' + self.path
346 return bfile == 'b/' + self.path
347
347
348 def ispatching(self, afile, bfile):
348 def ispatching(self, afile, bfile):
349 return self._ispatchinga(afile) and self._ispatchingb(bfile)
349 return self._ispatchinga(afile) and self._ispatchingb(bfile)
350
350
351 def __repr__(self):
351 def __repr__(self):
352 return "<patchmeta %s %r>" % (self.op, self.path)
352 return "<patchmeta %s %r>" % (self.op, self.path)
353
353
354 def readgitpatch(lr):
354 def readgitpatch(lr):
355 """extract git-style metadata about patches from <patchname>"""
355 """extract git-style metadata about patches from <patchname>"""
356
356
357 # Filter patch for git information
357 # Filter patch for git information
358 gp = None
358 gp = None
359 gitpatches = []
359 gitpatches = []
360 for line in lr:
360 for line in lr:
361 line = line.rstrip(' \r\n')
361 line = line.rstrip(' \r\n')
362 if line.startswith('diff --git a/'):
362 if line.startswith('diff --git a/'):
363 m = gitre.match(line)
363 m = gitre.match(line)
364 if m:
364 if m:
365 if gp:
365 if gp:
366 gitpatches.append(gp)
366 gitpatches.append(gp)
367 dst = m.group(2)
367 dst = m.group(2)
368 gp = patchmeta(dst)
368 gp = patchmeta(dst)
369 elif gp:
369 elif gp:
370 if line.startswith('--- '):
370 if line.startswith('--- '):
371 gitpatches.append(gp)
371 gitpatches.append(gp)
372 gp = None
372 gp = None
373 continue
373 continue
374 if line.startswith('rename from '):
374 if line.startswith('rename from '):
375 gp.op = 'RENAME'
375 gp.op = 'RENAME'
376 gp.oldpath = line[12:]
376 gp.oldpath = line[12:]
377 elif line.startswith('rename to '):
377 elif line.startswith('rename to '):
378 gp.path = line[10:]
378 gp.path = line[10:]
379 elif line.startswith('copy from '):
379 elif line.startswith('copy from '):
380 gp.op = 'COPY'
380 gp.op = 'COPY'
381 gp.oldpath = line[10:]
381 gp.oldpath = line[10:]
382 elif line.startswith('copy to '):
382 elif line.startswith('copy to '):
383 gp.path = line[8:]
383 gp.path = line[8:]
384 elif line.startswith('deleted file'):
384 elif line.startswith('deleted file'):
385 gp.op = 'DELETE'
385 gp.op = 'DELETE'
386 elif line.startswith('new file mode '):
386 elif line.startswith('new file mode '):
387 gp.op = 'ADD'
387 gp.op = 'ADD'
388 gp.setmode(int(line[-6:], 8))
388 gp.setmode(int(line[-6:], 8))
389 elif line.startswith('new mode '):
389 elif line.startswith('new mode '):
390 gp.setmode(int(line[-6:], 8))
390 gp.setmode(int(line[-6:], 8))
391 elif line.startswith('GIT binary patch'):
391 elif line.startswith('GIT binary patch'):
392 gp.binary = True
392 gp.binary = True
393 if gp:
393 if gp:
394 gitpatches.append(gp)
394 gitpatches.append(gp)
395
395
396 return gitpatches
396 return gitpatches
397
397
398 class linereader(object):
398 class linereader(object):
399 # simple class to allow pushing lines back into the input stream
399 # simple class to allow pushing lines back into the input stream
400 def __init__(self, fp):
400 def __init__(self, fp):
401 self.fp = fp
401 self.fp = fp
402 self.buf = []
402 self.buf = []
403
403
404 def push(self, line):
404 def push(self, line):
405 if line is not None:
405 if line is not None:
406 self.buf.append(line)
406 self.buf.append(line)
407
407
408 def readline(self):
408 def readline(self):
409 if self.buf:
409 if self.buf:
410 l = self.buf[0]
410 l = self.buf[0]
411 del self.buf[0]
411 del self.buf[0]
412 return l
412 return l
413 return self.fp.readline()
413 return self.fp.readline()
414
414
415 def __iter__(self):
415 def __iter__(self):
416 return iter(self.readline, '')
416 return iter(self.readline, '')
417
417
418 class abstractbackend(object):
418 class abstractbackend(object):
419 def __init__(self, ui):
419 def __init__(self, ui):
420 self.ui = ui
420 self.ui = ui
421
421
422 def getfile(self, fname):
422 def getfile(self, fname):
423 """Return target file data and flags as a (data, (islink,
423 """Return target file data and flags as a (data, (islink,
424 isexec)) tuple. Data is None if file is missing/deleted.
424 isexec)) tuple. Data is None if file is missing/deleted.
425 """
425 """
426 raise NotImplementedError
426 raise NotImplementedError
427
427
428 def setfile(self, fname, data, mode, copysource):
428 def setfile(self, fname, data, mode, copysource):
429 """Write data to target file fname and set its mode. mode is a
429 """Write data to target file fname and set its mode. mode is a
430 (islink, isexec) tuple. If data is None, the file content should
430 (islink, isexec) tuple. If data is None, the file content should
431 be left unchanged. If the file is modified after being copied,
431 be left unchanged. If the file is modified after being copied,
432 copysource is set to the original file name.
432 copysource is set to the original file name.
433 """
433 """
434 raise NotImplementedError
434 raise NotImplementedError
435
435
436 def unlink(self, fname):
436 def unlink(self, fname):
437 """Unlink target file."""
437 """Unlink target file."""
438 raise NotImplementedError
438 raise NotImplementedError
439
439
440 def writerej(self, fname, failed, total, lines):
440 def writerej(self, fname, failed, total, lines):
441 """Write rejected lines for fname. total is the number of hunks
441 """Write rejected lines for fname. total is the number of hunks
442 which failed to apply and total the total number of hunks for this
442 which failed to apply and total the total number of hunks for this
443 files.
443 files.
444 """
444 """
445
445
446 def exists(self, fname):
446 def exists(self, fname):
447 raise NotImplementedError
447 raise NotImplementedError
448
448
449 def close(self):
449 def close(self):
450 raise NotImplementedError
450 raise NotImplementedError
451
451
452 class fsbackend(abstractbackend):
452 class fsbackend(abstractbackend):
453 def __init__(self, ui, basedir):
453 def __init__(self, ui, basedir):
454 super(fsbackend, self).__init__(ui)
454 super(fsbackend, self).__init__(ui)
455 self.opener = vfsmod.vfs(basedir)
455 self.opener = vfsmod.vfs(basedir)
456
456
457 def getfile(self, fname):
457 def getfile(self, fname):
458 if self.opener.islink(fname):
458 if self.opener.islink(fname):
459 return (self.opener.readlink(fname), (True, False))
459 return (self.opener.readlink(fname), (True, False))
460
460
461 isexec = False
461 isexec = False
462 try:
462 try:
463 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
463 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
464 except OSError as e:
464 except OSError as e:
465 if e.errno != errno.ENOENT:
465 if e.errno != errno.ENOENT:
466 raise
466 raise
467 try:
467 try:
468 return (self.opener.read(fname), (False, isexec))
468 return (self.opener.read(fname), (False, isexec))
469 except IOError as e:
469 except IOError as e:
470 if e.errno != errno.ENOENT:
470 if e.errno != errno.ENOENT:
471 raise
471 raise
472 return None, None
472 return None, None
473
473
474 def setfile(self, fname, data, mode, copysource):
474 def setfile(self, fname, data, mode, copysource):
475 islink, isexec = mode
475 islink, isexec = mode
476 if data is None:
476 if data is None:
477 self.opener.setflags(fname, islink, isexec)
477 self.opener.setflags(fname, islink, isexec)
478 return
478 return
479 if islink:
479 if islink:
480 self.opener.symlink(data, fname)
480 self.opener.symlink(data, fname)
481 else:
481 else:
482 self.opener.write(fname, data)
482 self.opener.write(fname, data)
483 if isexec:
483 if isexec:
484 self.opener.setflags(fname, False, True)
484 self.opener.setflags(fname, False, True)
485
485
486 def unlink(self, fname):
486 def unlink(self, fname):
487 self.opener.unlinkpath(fname, ignoremissing=True)
487 self.opener.unlinkpath(fname, ignoremissing=True)
488
488
489 def writerej(self, fname, failed, total, lines):
489 def writerej(self, fname, failed, total, lines):
490 fname = fname + ".rej"
490 fname = fname + ".rej"
491 self.ui.warn(
491 self.ui.warn(
492 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
492 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
493 (failed, total, fname))
493 (failed, total, fname))
494 fp = self.opener(fname, 'w')
494 fp = self.opener(fname, 'w')
495 fp.writelines(lines)
495 fp.writelines(lines)
496 fp.close()
496 fp.close()
497
497
498 def exists(self, fname):
498 def exists(self, fname):
499 return self.opener.lexists(fname)
499 return self.opener.lexists(fname)
500
500
501 class workingbackend(fsbackend):
501 class workingbackend(fsbackend):
502 def __init__(self, ui, repo, similarity):
502 def __init__(self, ui, repo, similarity):
503 super(workingbackend, self).__init__(ui, repo.root)
503 super(workingbackend, self).__init__(ui, repo.root)
504 self.repo = repo
504 self.repo = repo
505 self.similarity = similarity
505 self.similarity = similarity
506 self.removed = set()
506 self.removed = set()
507 self.changed = set()
507 self.changed = set()
508 self.copied = []
508 self.copied = []
509
509
510 def _checkknown(self, fname):
510 def _checkknown(self, fname):
511 if self.repo.dirstate[fname] == '?' and self.exists(fname):
511 if self.repo.dirstate[fname] == '?' and self.exists(fname):
512 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
512 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
513
513
514 def setfile(self, fname, data, mode, copysource):
514 def setfile(self, fname, data, mode, copysource):
515 self._checkknown(fname)
515 self._checkknown(fname)
516 super(workingbackend, self).setfile(fname, data, mode, copysource)
516 super(workingbackend, self).setfile(fname, data, mode, copysource)
517 if copysource is not None:
517 if copysource is not None:
518 self.copied.append((copysource, fname))
518 self.copied.append((copysource, fname))
519 self.changed.add(fname)
519 self.changed.add(fname)
520
520
521 def unlink(self, fname):
521 def unlink(self, fname):
522 self._checkknown(fname)
522 self._checkknown(fname)
523 super(workingbackend, self).unlink(fname)
523 super(workingbackend, self).unlink(fname)
524 self.removed.add(fname)
524 self.removed.add(fname)
525 self.changed.add(fname)
525 self.changed.add(fname)
526
526
527 def close(self):
527 def close(self):
528 wctx = self.repo[None]
528 wctx = self.repo[None]
529 changed = set(self.changed)
529 changed = set(self.changed)
530 for src, dst in self.copied:
530 for src, dst in self.copied:
531 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
531 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
532 if self.removed:
532 if self.removed:
533 wctx.forget(sorted(self.removed))
533 wctx.forget(sorted(self.removed))
534 for f in self.removed:
534 for f in self.removed:
535 if f not in self.repo.dirstate:
535 if f not in self.repo.dirstate:
536 # File was deleted and no longer belongs to the
536 # File was deleted and no longer belongs to the
537 # dirstate, it was probably marked added then
537 # dirstate, it was probably marked added then
538 # deleted, and should not be considered by
538 # deleted, and should not be considered by
539 # marktouched().
539 # marktouched().
540 changed.discard(f)
540 changed.discard(f)
541 if changed:
541 if changed:
542 scmutil.marktouched(self.repo, changed, self.similarity)
542 scmutil.marktouched(self.repo, changed, self.similarity)
543 return sorted(self.changed)
543 return sorted(self.changed)
544
544
545 class filestore(object):
545 class filestore(object):
546 def __init__(self, maxsize=None):
546 def __init__(self, maxsize=None):
547 self.opener = None
547 self.opener = None
548 self.files = {}
548 self.files = {}
549 self.created = 0
549 self.created = 0
550 self.maxsize = maxsize
550 self.maxsize = maxsize
551 if self.maxsize is None:
551 if self.maxsize is None:
552 self.maxsize = 4*(2**20)
552 self.maxsize = 4*(2**20)
553 self.size = 0
553 self.size = 0
554 self.data = {}
554 self.data = {}
555
555
556 def setfile(self, fname, data, mode, copied=None):
556 def setfile(self, fname, data, mode, copied=None):
557 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
557 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
558 self.data[fname] = (data, mode, copied)
558 self.data[fname] = (data, mode, copied)
559 self.size += len(data)
559 self.size += len(data)
560 else:
560 else:
561 if self.opener is None:
561 if self.opener is None:
562 root = tempfile.mkdtemp(prefix='hg-patch-')
562 root = tempfile.mkdtemp(prefix='hg-patch-')
563 self.opener = vfsmod.vfs(root)
563 self.opener = vfsmod.vfs(root)
564 # Avoid filename issues with these simple names
564 # Avoid filename issues with these simple names
565 fn = str(self.created)
565 fn = str(self.created)
566 self.opener.write(fn, data)
566 self.opener.write(fn, data)
567 self.created += 1
567 self.created += 1
568 self.files[fname] = (fn, mode, copied)
568 self.files[fname] = (fn, mode, copied)
569
569
570 def getfile(self, fname):
570 def getfile(self, fname):
571 if fname in self.data:
571 if fname in self.data:
572 return self.data[fname]
572 return self.data[fname]
573 if not self.opener or fname not in self.files:
573 if not self.opener or fname not in self.files:
574 return None, None, None
574 return None, None, None
575 fn, mode, copied = self.files[fname]
575 fn, mode, copied = self.files[fname]
576 return self.opener.read(fn), mode, copied
576 return self.opener.read(fn), mode, copied
577
577
578 def close(self):
578 def close(self):
579 if self.opener:
579 if self.opener:
580 shutil.rmtree(self.opener.base)
580 shutil.rmtree(self.opener.base)
581
581
582 class repobackend(abstractbackend):
582 class repobackend(abstractbackend):
583 def __init__(self, ui, repo, ctx, store):
583 def __init__(self, ui, repo, ctx, store):
584 super(repobackend, self).__init__(ui)
584 super(repobackend, self).__init__(ui)
585 self.repo = repo
585 self.repo = repo
586 self.ctx = ctx
586 self.ctx = ctx
587 self.store = store
587 self.store = store
588 self.changed = set()
588 self.changed = set()
589 self.removed = set()
589 self.removed = set()
590 self.copied = {}
590 self.copied = {}
591
591
592 def _checkknown(self, fname):
592 def _checkknown(self, fname):
593 if fname not in self.ctx:
593 if fname not in self.ctx:
594 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
594 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
595
595
596 def getfile(self, fname):
596 def getfile(self, fname):
597 try:
597 try:
598 fctx = self.ctx[fname]
598 fctx = self.ctx[fname]
599 except error.LookupError:
599 except error.LookupError:
600 return None, None
600 return None, None
601 flags = fctx.flags()
601 flags = fctx.flags()
602 return fctx.data(), ('l' in flags, 'x' in flags)
602 return fctx.data(), ('l' in flags, 'x' in flags)
603
603
604 def setfile(self, fname, data, mode, copysource):
604 def setfile(self, fname, data, mode, copysource):
605 if copysource:
605 if copysource:
606 self._checkknown(copysource)
606 self._checkknown(copysource)
607 if data is None:
607 if data is None:
608 data = self.ctx[fname].data()
608 data = self.ctx[fname].data()
609 self.store.setfile(fname, data, mode, copysource)
609 self.store.setfile(fname, data, mode, copysource)
610 self.changed.add(fname)
610 self.changed.add(fname)
611 if copysource:
611 if copysource:
612 self.copied[fname] = copysource
612 self.copied[fname] = copysource
613
613
614 def unlink(self, fname):
614 def unlink(self, fname):
615 self._checkknown(fname)
615 self._checkknown(fname)
616 self.removed.add(fname)
616 self.removed.add(fname)
617
617
618 def exists(self, fname):
618 def exists(self, fname):
619 return fname in self.ctx
619 return fname in self.ctx
620
620
621 def close(self):
621 def close(self):
622 return self.changed | self.removed
622 return self.changed | self.removed
623
623
624 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
624 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
625 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
625 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
626 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
626 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
627 eolmodes = ['strict', 'crlf', 'lf', 'auto']
627 eolmodes = ['strict', 'crlf', 'lf', 'auto']
628
628
629 class patchfile(object):
629 class patchfile(object):
630 def __init__(self, ui, gp, backend, store, eolmode='strict'):
630 def __init__(self, ui, gp, backend, store, eolmode='strict'):
631 self.fname = gp.path
631 self.fname = gp.path
632 self.eolmode = eolmode
632 self.eolmode = eolmode
633 self.eol = None
633 self.eol = None
634 self.backend = backend
634 self.backend = backend
635 self.ui = ui
635 self.ui = ui
636 self.lines = []
636 self.lines = []
637 self.exists = False
637 self.exists = False
638 self.missing = True
638 self.missing = True
639 self.mode = gp.mode
639 self.mode = gp.mode
640 self.copysource = gp.oldpath
640 self.copysource = gp.oldpath
641 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
641 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
642 self.remove = gp.op == 'DELETE'
642 self.remove = gp.op == 'DELETE'
643 if self.copysource is None:
643 if self.copysource is None:
644 data, mode = backend.getfile(self.fname)
644 data, mode = backend.getfile(self.fname)
645 else:
645 else:
646 data, mode = store.getfile(self.copysource)[:2]
646 data, mode = store.getfile(self.copysource)[:2]
647 if data is not None:
647 if data is not None:
648 self.exists = self.copysource is None or backend.exists(self.fname)
648 self.exists = self.copysource is None or backend.exists(self.fname)
649 self.missing = False
649 self.missing = False
650 if data:
650 if data:
651 self.lines = mdiff.splitnewlines(data)
651 self.lines = mdiff.splitnewlines(data)
652 if self.mode is None:
652 if self.mode is None:
653 self.mode = mode
653 self.mode = mode
654 if self.lines:
654 if self.lines:
655 # Normalize line endings
655 # Normalize line endings
656 if self.lines[0].endswith('\r\n'):
656 if self.lines[0].endswith('\r\n'):
657 self.eol = '\r\n'
657 self.eol = '\r\n'
658 elif self.lines[0].endswith('\n'):
658 elif self.lines[0].endswith('\n'):
659 self.eol = '\n'
659 self.eol = '\n'
660 if eolmode != 'strict':
660 if eolmode != 'strict':
661 nlines = []
661 nlines = []
662 for l in self.lines:
662 for l in self.lines:
663 if l.endswith('\r\n'):
663 if l.endswith('\r\n'):
664 l = l[:-2] + '\n'
664 l = l[:-2] + '\n'
665 nlines.append(l)
665 nlines.append(l)
666 self.lines = nlines
666 self.lines = nlines
667 else:
667 else:
668 if self.create:
668 if self.create:
669 self.missing = False
669 self.missing = False
670 if self.mode is None:
670 if self.mode is None:
671 self.mode = (False, False)
671 self.mode = (False, False)
672 if self.missing:
672 if self.missing:
673 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
673 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
674 self.ui.warn(_("(use '--prefix' to apply patch relative to the "
674 self.ui.warn(_("(use '--prefix' to apply patch relative to the "
675 "current directory)\n"))
675 "current directory)\n"))
676
676
677 self.hash = {}
677 self.hash = {}
678 self.dirty = 0
678 self.dirty = 0
679 self.offset = 0
679 self.offset = 0
680 self.skew = 0
680 self.skew = 0
681 self.rej = []
681 self.rej = []
682 self.fileprinted = False
682 self.fileprinted = False
683 self.printfile(False)
683 self.printfile(False)
684 self.hunks = 0
684 self.hunks = 0
685
685
686 def writelines(self, fname, lines, mode):
686 def writelines(self, fname, lines, mode):
687 if self.eolmode == 'auto':
687 if self.eolmode == 'auto':
688 eol = self.eol
688 eol = self.eol
689 elif self.eolmode == 'crlf':
689 elif self.eolmode == 'crlf':
690 eol = '\r\n'
690 eol = '\r\n'
691 else:
691 else:
692 eol = '\n'
692 eol = '\n'
693
693
694 if self.eolmode != 'strict' and eol and eol != '\n':
694 if self.eolmode != 'strict' and eol and eol != '\n':
695 rawlines = []
695 rawlines = []
696 for l in lines:
696 for l in lines:
697 if l and l[-1] == '\n':
697 if l and l[-1] == '\n':
698 l = l[:-1] + eol
698 l = l[:-1] + eol
699 rawlines.append(l)
699 rawlines.append(l)
700 lines = rawlines
700 lines = rawlines
701
701
702 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
702 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
703
703
704 def printfile(self, warn):
704 def printfile(self, warn):
705 if self.fileprinted:
705 if self.fileprinted:
706 return
706 return
707 if warn or self.ui.verbose:
707 if warn or self.ui.verbose:
708 self.fileprinted = True
708 self.fileprinted = True
709 s = _("patching file %s\n") % self.fname
709 s = _("patching file %s\n") % self.fname
710 if warn:
710 if warn:
711 self.ui.warn(s)
711 self.ui.warn(s)
712 else:
712 else:
713 self.ui.note(s)
713 self.ui.note(s)
714
714
715
715
716 def findlines(self, l, linenum):
716 def findlines(self, l, linenum):
717 # looks through the hash and finds candidate lines. The
717 # looks through the hash and finds candidate lines. The
718 # result is a list of line numbers sorted based on distance
718 # result is a list of line numbers sorted based on distance
719 # from linenum
719 # from linenum
720
720
721 cand = self.hash.get(l, [])
721 cand = self.hash.get(l, [])
722 if len(cand) > 1:
722 if len(cand) > 1:
723 # resort our list of potentials forward then back.
723 # resort our list of potentials forward then back.
724 cand.sort(key=lambda x: abs(x - linenum))
724 cand.sort(key=lambda x: abs(x - linenum))
725 return cand
725 return cand
726
726
727 def write_rej(self):
727 def write_rej(self):
728 # our rejects are a little different from patch(1). This always
728 # our rejects are a little different from patch(1). This always
729 # creates rejects in the same form as the original patch. A file
729 # creates rejects in the same form as the original patch. A file
730 # header is inserted so that you can run the reject through patch again
730 # header is inserted so that you can run the reject through patch again
731 # without having to type the filename.
731 # without having to type the filename.
732 if not self.rej:
732 if not self.rej:
733 return
733 return
734 base = os.path.basename(self.fname)
734 base = os.path.basename(self.fname)
735 lines = ["--- %s\n+++ %s\n" % (base, base)]
735 lines = ["--- %s\n+++ %s\n" % (base, base)]
736 for x in self.rej:
736 for x in self.rej:
737 for l in x.hunk:
737 for l in x.hunk:
738 lines.append(l)
738 lines.append(l)
739 if l[-1:] != '\n':
739 if l[-1:] != '\n':
740 lines.append("\n\ No newline at end of file\n")
740 lines.append("\n\ No newline at end of file\n")
741 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
741 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
742
742
743 def apply(self, h):
743 def apply(self, h):
744 if not h.complete():
744 if not h.complete():
745 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
745 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
746 (h.number, h.desc, len(h.a), h.lena, len(h.b),
746 (h.number, h.desc, len(h.a), h.lena, len(h.b),
747 h.lenb))
747 h.lenb))
748
748
749 self.hunks += 1
749 self.hunks += 1
750
750
751 if self.missing:
751 if self.missing:
752 self.rej.append(h)
752 self.rej.append(h)
753 return -1
753 return -1
754
754
755 if self.exists and self.create:
755 if self.exists and self.create:
756 if self.copysource:
756 if self.copysource:
757 self.ui.warn(_("cannot create %s: destination already "
757 self.ui.warn(_("cannot create %s: destination already "
758 "exists\n") % self.fname)
758 "exists\n") % self.fname)
759 else:
759 else:
760 self.ui.warn(_("file %s already exists\n") % self.fname)
760 self.ui.warn(_("file %s already exists\n") % self.fname)
761 self.rej.append(h)
761 self.rej.append(h)
762 return -1
762 return -1
763
763
764 if isinstance(h, binhunk):
764 if isinstance(h, binhunk):
765 if self.remove:
765 if self.remove:
766 self.backend.unlink(self.fname)
766 self.backend.unlink(self.fname)
767 else:
767 else:
768 l = h.new(self.lines)
768 l = h.new(self.lines)
769 self.lines[:] = l
769 self.lines[:] = l
770 self.offset += len(l)
770 self.offset += len(l)
771 self.dirty = True
771 self.dirty = True
772 return 0
772 return 0
773
773
774 horig = h
774 horig = h
775 if (self.eolmode in ('crlf', 'lf')
775 if (self.eolmode in ('crlf', 'lf')
776 or self.eolmode == 'auto' and self.eol):
776 or self.eolmode == 'auto' and self.eol):
777 # If new eols are going to be normalized, then normalize
777 # If new eols are going to be normalized, then normalize
778 # hunk data before patching. Otherwise, preserve input
778 # hunk data before patching. Otherwise, preserve input
779 # line-endings.
779 # line-endings.
780 h = h.getnormalized()
780 h = h.getnormalized()
781
781
782 # fast case first, no offsets, no fuzz
782 # fast case first, no offsets, no fuzz
783 old, oldstart, new, newstart = h.fuzzit(0, False)
783 old, oldstart, new, newstart = h.fuzzit(0, False)
784 oldstart += self.offset
784 oldstart += self.offset
785 orig_start = oldstart
785 orig_start = oldstart
786 # if there's skew we want to emit the "(offset %d lines)" even
786 # if there's skew we want to emit the "(offset %d lines)" even
787 # when the hunk cleanly applies at start + skew, so skip the
787 # when the hunk cleanly applies at start + skew, so skip the
788 # fast case code
788 # fast case code
789 if (self.skew == 0 and
789 if (self.skew == 0 and
790 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
790 diffhelpers.testhunk(old, self.lines, oldstart) == 0):
791 if self.remove:
791 if self.remove:
792 self.backend.unlink(self.fname)
792 self.backend.unlink(self.fname)
793 else:
793 else:
794 self.lines[oldstart:oldstart + len(old)] = new
794 self.lines[oldstart:oldstart + len(old)] = new
795 self.offset += len(new) - len(old)
795 self.offset += len(new) - len(old)
796 self.dirty = True
796 self.dirty = True
797 return 0
797 return 0
798
798
799 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
799 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
800 self.hash = {}
800 self.hash = {}
801 for x, s in enumerate(self.lines):
801 for x, s in enumerate(self.lines):
802 self.hash.setdefault(s, []).append(x)
802 self.hash.setdefault(s, []).append(x)
803
803
804 for fuzzlen in xrange(self.ui.configint("patch", "fuzz") + 1):
804 for fuzzlen in xrange(self.ui.configint("patch", "fuzz") + 1):
805 for toponly in [True, False]:
805 for toponly in [True, False]:
806 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
806 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
807 oldstart = oldstart + self.offset + self.skew
807 oldstart = oldstart + self.offset + self.skew
808 oldstart = min(oldstart, len(self.lines))
808 oldstart = min(oldstart, len(self.lines))
809 if old:
809 if old:
810 cand = self.findlines(old[0][1:], oldstart)
810 cand = self.findlines(old[0][1:], oldstart)
811 else:
811 else:
812 # Only adding lines with no or fuzzed context, just
812 # Only adding lines with no or fuzzed context, just
813 # take the skew in account
813 # take the skew in account
814 cand = [oldstart]
814 cand = [oldstart]
815
815
816 for l in cand:
816 for l in cand:
817 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
817 if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
818 self.lines[l : l + len(old)] = new
818 self.lines[l : l + len(old)] = new
819 self.offset += len(new) - len(old)
819 self.offset += len(new) - len(old)
820 self.skew = l - orig_start
820 self.skew = l - orig_start
821 self.dirty = True
821 self.dirty = True
822 offset = l - orig_start - fuzzlen
822 offset = l - orig_start - fuzzlen
823 if fuzzlen:
823 if fuzzlen:
824 msg = _("Hunk #%d succeeded at %d "
824 msg = _("Hunk #%d succeeded at %d "
825 "with fuzz %d "
825 "with fuzz %d "
826 "(offset %d lines).\n")
826 "(offset %d lines).\n")
827 self.printfile(True)
827 self.printfile(True)
828 self.ui.warn(msg %
828 self.ui.warn(msg %
829 (h.number, l + 1, fuzzlen, offset))
829 (h.number, l + 1, fuzzlen, offset))
830 else:
830 else:
831 msg = _("Hunk #%d succeeded at %d "
831 msg = _("Hunk #%d succeeded at %d "
832 "(offset %d lines).\n")
832 "(offset %d lines).\n")
833 self.ui.note(msg % (h.number, l + 1, offset))
833 self.ui.note(msg % (h.number, l + 1, offset))
834 return fuzzlen
834 return fuzzlen
835 self.printfile(True)
835 self.printfile(True)
836 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
836 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
837 self.rej.append(horig)
837 self.rej.append(horig)
838 return -1
838 return -1
839
839
840 def close(self):
840 def close(self):
841 if self.dirty:
841 if self.dirty:
842 self.writelines(self.fname, self.lines, self.mode)
842 self.writelines(self.fname, self.lines, self.mode)
843 self.write_rej()
843 self.write_rej()
844 return len(self.rej)
844 return len(self.rej)
845
845
846 class header(object):
846 class header(object):
847 """patch header
847 """patch header
848 """
848 """
849 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
849 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
850 diff_re = re.compile('diff -r .* (.*)$')
850 diff_re = re.compile('diff -r .* (.*)$')
851 allhunks_re = re.compile('(?:index|deleted file) ')
851 allhunks_re = re.compile('(?:index|deleted file) ')
852 pretty_re = re.compile('(?:new file|deleted file) ')
852 pretty_re = re.compile('(?:new file|deleted file) ')
853 special_re = re.compile('(?:index|deleted|copy|rename) ')
853 special_re = re.compile('(?:index|deleted|copy|rename) ')
854 newfile_re = re.compile('(?:new file)')
854 newfile_re = re.compile('(?:new file)')
855
855
856 def __init__(self, header):
856 def __init__(self, header):
857 self.header = header
857 self.header = header
858 self.hunks = []
858 self.hunks = []
859
859
860 def binary(self):
860 def binary(self):
861 return any(h.startswith('index ') for h in self.header)
861 return any(h.startswith('index ') for h in self.header)
862
862
863 def pretty(self, fp):
863 def pretty(self, fp):
864 for h in self.header:
864 for h in self.header:
865 if h.startswith('index '):
865 if h.startswith('index '):
866 fp.write(_('this modifies a binary file (all or nothing)\n'))
866 fp.write(_('this modifies a binary file (all or nothing)\n'))
867 break
867 break
868 if self.pretty_re.match(h):
868 if self.pretty_re.match(h):
869 fp.write(h)
869 fp.write(h)
870 if self.binary():
870 if self.binary():
871 fp.write(_('this is a binary file\n'))
871 fp.write(_('this is a binary file\n'))
872 break
872 break
873 if h.startswith('---'):
873 if h.startswith('---'):
874 fp.write(_('%d hunks, %d lines changed\n') %
874 fp.write(_('%d hunks, %d lines changed\n') %
875 (len(self.hunks),
875 (len(self.hunks),
876 sum([max(h.added, h.removed) for h in self.hunks])))
876 sum([max(h.added, h.removed) for h in self.hunks])))
877 break
877 break
878 fp.write(h)
878 fp.write(h)
879
879
880 def write(self, fp):
880 def write(self, fp):
881 fp.write(''.join(self.header))
881 fp.write(''.join(self.header))
882
882
883 def allhunks(self):
883 def allhunks(self):
884 return any(self.allhunks_re.match(h) for h in self.header)
884 return any(self.allhunks_re.match(h) for h in self.header)
885
885
886 def files(self):
886 def files(self):
887 match = self.diffgit_re.match(self.header[0])
887 match = self.diffgit_re.match(self.header[0])
888 if match:
888 if match:
889 fromfile, tofile = match.groups()
889 fromfile, tofile = match.groups()
890 if fromfile == tofile:
890 if fromfile == tofile:
891 return [fromfile]
891 return [fromfile]
892 return [fromfile, tofile]
892 return [fromfile, tofile]
893 else:
893 else:
894 return self.diff_re.match(self.header[0]).groups()
894 return self.diff_re.match(self.header[0]).groups()
895
895
896 def filename(self):
896 def filename(self):
897 return self.files()[-1]
897 return self.files()[-1]
898
898
899 def __repr__(self):
899 def __repr__(self):
900 return '<header %s>' % (' '.join(map(repr, self.files())))
900 return '<header %s>' % (' '.join(map(repr, self.files())))
901
901
902 def isnewfile(self):
902 def isnewfile(self):
903 return any(self.newfile_re.match(h) for h in self.header)
903 return any(self.newfile_re.match(h) for h in self.header)
904
904
905 def special(self):
905 def special(self):
906 # Special files are shown only at the header level and not at the hunk
906 # Special files are shown only at the header level and not at the hunk
907 # level for example a file that has been deleted is a special file.
907 # level for example a file that has been deleted is a special file.
908 # The user cannot change the content of the operation, in the case of
908 # The user cannot change the content of the operation, in the case of
909 # the deleted file he has to take the deletion or not take it, he
909 # the deleted file he has to take the deletion or not take it, he
910 # cannot take some of it.
910 # cannot take some of it.
911 # Newly added files are special if they are empty, they are not special
911 # Newly added files are special if they are empty, they are not special
912 # if they have some content as we want to be able to change it
912 # if they have some content as we want to be able to change it
913 nocontent = len(self.header) == 2
913 nocontent = len(self.header) == 2
914 emptynewfile = self.isnewfile() and nocontent
914 emptynewfile = self.isnewfile() and nocontent
915 return emptynewfile or \
915 return emptynewfile or \
916 any(self.special_re.match(h) for h in self.header)
916 any(self.special_re.match(h) for h in self.header)
917
917
918 class recordhunk(object):
918 class recordhunk(object):
919 """patch hunk
919 """patch hunk
920
920
921 XXX shouldn't we merge this with the other hunk class?
921 XXX shouldn't we merge this with the other hunk class?
922 """
922 """
923
923
924 def __init__(self, header, fromline, toline, proc, before, hunk, after,
924 def __init__(self, header, fromline, toline, proc, before, hunk, after,
925 maxcontext=None):
925 maxcontext=None):
926 def trimcontext(lines, reverse=False):
926 def trimcontext(lines, reverse=False):
927 if maxcontext is not None:
927 if maxcontext is not None:
928 delta = len(lines) - maxcontext
928 delta = len(lines) - maxcontext
929 if delta > 0:
929 if delta > 0:
930 if reverse:
930 if reverse:
931 return delta, lines[delta:]
931 return delta, lines[delta:]
932 else:
932 else:
933 return delta, lines[:maxcontext]
933 return delta, lines[:maxcontext]
934 return 0, lines
934 return 0, lines
935
935
936 self.header = header
936 self.header = header
937 trimedbefore, self.before = trimcontext(before, True)
937 trimedbefore, self.before = trimcontext(before, True)
938 self.fromline = fromline + trimedbefore
938 self.fromline = fromline + trimedbefore
939 self.toline = toline + trimedbefore
939 self.toline = toline + trimedbefore
940 _trimedafter, self.after = trimcontext(after, False)
940 _trimedafter, self.after = trimcontext(after, False)
941 self.proc = proc
941 self.proc = proc
942 self.hunk = hunk
942 self.hunk = hunk
943 self.added, self.removed = self.countchanges(self.hunk)
943 self.added, self.removed = self.countchanges(self.hunk)
944
944
945 def __eq__(self, v):
945 def __eq__(self, v):
946 if not isinstance(v, recordhunk):
946 if not isinstance(v, recordhunk):
947 return False
947 return False
948
948
949 return ((v.hunk == self.hunk) and
949 return ((v.hunk == self.hunk) and
950 (v.proc == self.proc) and
950 (v.proc == self.proc) and
951 (self.fromline == v.fromline) and
951 (self.fromline == v.fromline) and
952 (self.header.files() == v.header.files()))
952 (self.header.files() == v.header.files()))
953
953
954 def __hash__(self):
954 def __hash__(self):
955 return hash((tuple(self.hunk),
955 return hash((tuple(self.hunk),
956 tuple(self.header.files()),
956 tuple(self.header.files()),
957 self.fromline,
957 self.fromline,
958 self.proc))
958 self.proc))
959
959
960 def countchanges(self, hunk):
960 def countchanges(self, hunk):
961 """hunk -> (n+,n-)"""
961 """hunk -> (n+,n-)"""
962 add = len([h for h in hunk if h.startswith('+')])
962 add = len([h for h in hunk if h.startswith('+')])
963 rem = len([h for h in hunk if h.startswith('-')])
963 rem = len([h for h in hunk if h.startswith('-')])
964 return add, rem
964 return add, rem
965
965
966 def reversehunk(self):
966 def reversehunk(self):
967 """return another recordhunk which is the reverse of the hunk
967 """return another recordhunk which is the reverse of the hunk
968
968
969 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
969 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
970 that, swap fromline/toline and +/- signs while keep other things
970 that, swap fromline/toline and +/- signs while keep other things
971 unchanged.
971 unchanged.
972 """
972 """
973 m = {'+': '-', '-': '+', '\\': '\\'}
973 m = {'+': '-', '-': '+', '\\': '\\'}
974 hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
974 hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
975 return recordhunk(self.header, self.toline, self.fromline, self.proc,
975 return recordhunk(self.header, self.toline, self.fromline, self.proc,
976 self.before, hunk, self.after)
976 self.before, hunk, self.after)
977
977
978 def write(self, fp):
978 def write(self, fp):
979 delta = len(self.before) + len(self.after)
979 delta = len(self.before) + len(self.after)
980 if self.after and self.after[-1] == '\\ No newline at end of file\n':
980 if self.after and self.after[-1] == '\\ No newline at end of file\n':
981 delta -= 1
981 delta -= 1
982 fromlen = delta + self.removed
982 fromlen = delta + self.removed
983 tolen = delta + self.added
983 tolen = delta + self.added
984 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
984 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
985 (self.fromline, fromlen, self.toline, tolen,
985 (self.fromline, fromlen, self.toline, tolen,
986 self.proc and (' ' + self.proc)))
986 self.proc and (' ' + self.proc)))
987 fp.write(''.join(self.before + self.hunk + self.after))
987 fp.write(''.join(self.before + self.hunk + self.after))
988
988
989 pretty = write
989 pretty = write
990
990
991 def filename(self):
991 def filename(self):
992 return self.header.filename()
992 return self.header.filename()
993
993
994 def __repr__(self):
994 def __repr__(self):
995 return '<hunk %r@%d>' % (self.filename(), self.fromline)
995 return '<hunk %r@%d>' % (self.filename(), self.fromline)
996
996
997 def getmessages():
997 def getmessages():
998 return {
998 return {
999 'multiple': {
999 'multiple': {
1000 'apply': _("apply change %d/%d to '%s'?"),
1000 'discard': _("discard change %d/%d to '%s'?"),
1001 'discard': _("discard change %d/%d to '%s'?"),
1001 'record': _("record change %d/%d to '%s'?"),
1002 'record': _("record change %d/%d to '%s'?"),
1002 'revert': _("revert change %d/%d to '%s'?"),
1003 },
1003 },
1004 'single': {
1004 'single': {
1005 'apply': _("apply this change to '%s'?"),
1005 'discard': _("discard this change to '%s'?"),
1006 'discard': _("discard this change to '%s'?"),
1006 'record': _("record this change to '%s'?"),
1007 'record': _("record this change to '%s'?"),
1007 'revert': _("revert this change to '%s'?"),
1008 },
1008 },
1009 'help': {
1009 'help': {
1010 'apply': _('[Ynesfdaq?]'
1011 '$$ &Yes, apply this change'
1012 '$$ &No, skip this change'
1013 '$$ &Edit this change manually'
1014 '$$ &Skip remaining changes to this file'
1015 '$$ Apply remaining changes to this &file'
1016 '$$ &Done, skip remaining changes and files'
1017 '$$ Apply &all changes to all remaining files'
1018 '$$ &Quit, applying no changes'
1019 '$$ &? (display help)'),
1010 'discard': _('[Ynesfdaq?]'
1020 'discard': _('[Ynesfdaq?]'
1011 '$$ &Yes, discard this change'
1021 '$$ &Yes, discard this change'
1012 '$$ &No, skip this change'
1022 '$$ &No, skip this change'
1013 '$$ &Edit this change manually'
1023 '$$ &Edit this change manually'
1014 '$$ &Skip remaining changes to this file'
1024 '$$ &Skip remaining changes to this file'
1015 '$$ Discard remaining changes to this &file'
1025 '$$ Discard remaining changes to this &file'
1016 '$$ &Done, skip remaining changes and files'
1026 '$$ &Done, skip remaining changes and files'
1017 '$$ Discard &all changes to all remaining files'
1027 '$$ Discard &all changes to all remaining files'
1018 '$$ &Quit, discarding no changes'
1028 '$$ &Quit, discarding no changes'
1019 '$$ &? (display help)'),
1029 '$$ &? (display help)'),
1020 'record': _('[Ynesfdaq?]'
1030 'record': _('[Ynesfdaq?]'
1021 '$$ &Yes, record this change'
1031 '$$ &Yes, record this change'
1022 '$$ &No, skip this change'
1032 '$$ &No, skip this change'
1023 '$$ &Edit this change manually'
1033 '$$ &Edit this change manually'
1024 '$$ &Skip remaining changes to this file'
1034 '$$ &Skip remaining changes to this file'
1025 '$$ Record remaining changes to this &file'
1035 '$$ Record remaining changes to this &file'
1026 '$$ &Done, skip remaining changes and files'
1036 '$$ &Done, skip remaining changes and files'
1027 '$$ Record &all changes to all remaining files'
1037 '$$ Record &all changes to all remaining files'
1028 '$$ &Quit, recording no changes'
1038 '$$ &Quit, recording no changes'
1029 '$$ &? (display help)'),
1039 '$$ &? (display help)'),
1030 'revert': _('[Ynesfdaq?]'
1031 '$$ &Yes, revert this change'
1032 '$$ &No, skip this change'
1033 '$$ &Edit this change manually'
1034 '$$ &Skip remaining changes to this file'
1035 '$$ Revert remaining changes to this &file'
1036 '$$ &Done, skip remaining changes and files'
1037 '$$ Revert &all changes to all remaining files'
1038 '$$ &Quit, reverting no changes'
1039 '$$ &? (display help)')
1040 }
1040 }
1041 }
1041 }
1042
1042
1043 def filterpatch(ui, headers, operation=None):
1043 def filterpatch(ui, headers, operation=None):
1044 """Interactively filter patch chunks into applied-only chunks"""
1044 """Interactively filter patch chunks into applied-only chunks"""
1045 messages = getmessages()
1045 messages = getmessages()
1046
1046
1047 if operation is None:
1047 if operation is None:
1048 operation = 'record'
1048 operation = 'record'
1049
1049
1050 def prompt(skipfile, skipall, query, chunk):
1050 def prompt(skipfile, skipall, query, chunk):
1051 """prompt query, and process base inputs
1051 """prompt query, and process base inputs
1052
1052
1053 - y/n for the rest of file
1053 - y/n for the rest of file
1054 - y/n for the rest
1054 - y/n for the rest
1055 - ? (help)
1055 - ? (help)
1056 - q (quit)
1056 - q (quit)
1057
1057
1058 Return True/False and possibly updated skipfile and skipall.
1058 Return True/False and possibly updated skipfile and skipall.
1059 """
1059 """
1060 newpatches = None
1060 newpatches = None
1061 if skipall is not None:
1061 if skipall is not None:
1062 return skipall, skipfile, skipall, newpatches
1062 return skipall, skipfile, skipall, newpatches
1063 if skipfile is not None:
1063 if skipfile is not None:
1064 return skipfile, skipfile, skipall, newpatches
1064 return skipfile, skipfile, skipall, newpatches
1065 while True:
1065 while True:
1066 resps = messages['help'][operation]
1066 resps = messages['help'][operation]
1067 r = ui.promptchoice("%s %s" % (query, resps))
1067 r = ui.promptchoice("%s %s" % (query, resps))
1068 ui.write("\n")
1068 ui.write("\n")
1069 if r == 8: # ?
1069 if r == 8: # ?
1070 for c, t in ui.extractchoices(resps)[1]:
1070 for c, t in ui.extractchoices(resps)[1]:
1071 ui.write('%s - %s\n' % (c, encoding.lower(t)))
1071 ui.write('%s - %s\n' % (c, encoding.lower(t)))
1072 continue
1072 continue
1073 elif r == 0: # yes
1073 elif r == 0: # yes
1074 ret = True
1074 ret = True
1075 elif r == 1: # no
1075 elif r == 1: # no
1076 ret = False
1076 ret = False
1077 elif r == 2: # Edit patch
1077 elif r == 2: # Edit patch
1078 if chunk is None:
1078 if chunk is None:
1079 ui.write(_('cannot edit patch for whole file'))
1079 ui.write(_('cannot edit patch for whole file'))
1080 ui.write("\n")
1080 ui.write("\n")
1081 continue
1081 continue
1082 if chunk.header.binary():
1082 if chunk.header.binary():
1083 ui.write(_('cannot edit patch for binary file'))
1083 ui.write(_('cannot edit patch for binary file'))
1084 ui.write("\n")
1084 ui.write("\n")
1085 continue
1085 continue
1086 # Patch comment based on the Git one (based on comment at end of
1086 # Patch comment based on the Git one (based on comment at end of
1087 # https://mercurial-scm.org/wiki/RecordExtension)
1087 # https://mercurial-scm.org/wiki/RecordExtension)
1088 phelp = '---' + _("""
1088 phelp = '---' + _("""
1089 To remove '-' lines, make them ' ' lines (context).
1089 To remove '-' lines, make them ' ' lines (context).
1090 To remove '+' lines, delete them.
1090 To remove '+' lines, delete them.
1091 Lines starting with # will be removed from the patch.
1091 Lines starting with # will be removed from the patch.
1092
1092
1093 If the patch applies cleanly, the edited hunk will immediately be
1093 If the patch applies cleanly, the edited hunk will immediately be
1094 added to the record list. If it does not apply cleanly, a rejects
1094 added to the record list. If it does not apply cleanly, a rejects
1095 file will be generated: you can use that when you try again. If
1095 file will be generated: you can use that when you try again. If
1096 all lines of the hunk are removed, then the edit is aborted and
1096 all lines of the hunk are removed, then the edit is aborted and
1097 the hunk is left unchanged.
1097 the hunk is left unchanged.
1098 """)
1098 """)
1099 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1099 (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-",
1100 suffix=".diff", text=True)
1100 suffix=".diff", text=True)
1101 ncpatchfp = None
1101 ncpatchfp = None
1102 try:
1102 try:
1103 # Write the initial patch
1103 # Write the initial patch
1104 f = os.fdopen(patchfd, pycompat.sysstr("w"))
1104 f = os.fdopen(patchfd, pycompat.sysstr("w"))
1105 chunk.header.write(f)
1105 chunk.header.write(f)
1106 chunk.write(f)
1106 chunk.write(f)
1107 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1107 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1108 f.close()
1108 f.close()
1109 # Start the editor and wait for it to complete
1109 # Start the editor and wait for it to complete
1110 editor = ui.geteditor()
1110 editor = ui.geteditor()
1111 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1111 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1112 environ={'HGUSER': ui.username()},
1112 environ={'HGUSER': ui.username()},
1113 blockedtag='filterpatch')
1113 blockedtag='filterpatch')
1114 if ret != 0:
1114 if ret != 0:
1115 ui.warn(_("editor exited with exit code %d\n") % ret)
1115 ui.warn(_("editor exited with exit code %d\n") % ret)
1116 continue
1116 continue
1117 # Remove comment lines
1117 # Remove comment lines
1118 patchfp = open(patchfn)
1118 patchfp = open(patchfn)
1119 ncpatchfp = stringio()
1119 ncpatchfp = stringio()
1120 for line in util.iterfile(patchfp):
1120 for line in util.iterfile(patchfp):
1121 if not line.startswith('#'):
1121 if not line.startswith('#'):
1122 ncpatchfp.write(line)
1122 ncpatchfp.write(line)
1123 patchfp.close()
1123 patchfp.close()
1124 ncpatchfp.seek(0)
1124 ncpatchfp.seek(0)
1125 newpatches = parsepatch(ncpatchfp)
1125 newpatches = parsepatch(ncpatchfp)
1126 finally:
1126 finally:
1127 os.unlink(patchfn)
1127 os.unlink(patchfn)
1128 del ncpatchfp
1128 del ncpatchfp
1129 # Signal that the chunk shouldn't be applied as-is, but
1129 # Signal that the chunk shouldn't be applied as-is, but
1130 # provide the new patch to be used instead.
1130 # provide the new patch to be used instead.
1131 ret = False
1131 ret = False
1132 elif r == 3: # Skip
1132 elif r == 3: # Skip
1133 ret = skipfile = False
1133 ret = skipfile = False
1134 elif r == 4: # file (Record remaining)
1134 elif r == 4: # file (Record remaining)
1135 ret = skipfile = True
1135 ret = skipfile = True
1136 elif r == 5: # done, skip remaining
1136 elif r == 5: # done, skip remaining
1137 ret = skipall = False
1137 ret = skipall = False
1138 elif r == 6: # all
1138 elif r == 6: # all
1139 ret = skipall = True
1139 ret = skipall = True
1140 elif r == 7: # quit
1140 elif r == 7: # quit
1141 raise error.Abort(_('user quit'))
1141 raise error.Abort(_('user quit'))
1142 return ret, skipfile, skipall, newpatches
1142 return ret, skipfile, skipall, newpatches
1143
1143
1144 seen = set()
1144 seen = set()
1145 applied = {} # 'filename' -> [] of chunks
1145 applied = {} # 'filename' -> [] of chunks
1146 skipfile, skipall = None, None
1146 skipfile, skipall = None, None
1147 pos, total = 1, sum(len(h.hunks) for h in headers)
1147 pos, total = 1, sum(len(h.hunks) for h in headers)
1148 for h in headers:
1148 for h in headers:
1149 pos += len(h.hunks)
1149 pos += len(h.hunks)
1150 skipfile = None
1150 skipfile = None
1151 fixoffset = 0
1151 fixoffset = 0
1152 hdr = ''.join(h.header)
1152 hdr = ''.join(h.header)
1153 if hdr in seen:
1153 if hdr in seen:
1154 continue
1154 continue
1155 seen.add(hdr)
1155 seen.add(hdr)
1156 if skipall is None:
1156 if skipall is None:
1157 h.pretty(ui)
1157 h.pretty(ui)
1158 msg = (_('examine changes to %s?') %
1158 msg = (_('examine changes to %s?') %
1159 _(' and ').join("'%s'" % f for f in h.files()))
1159 _(' and ').join("'%s'" % f for f in h.files()))
1160 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1160 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1161 if not r:
1161 if not r:
1162 continue
1162 continue
1163 applied[h.filename()] = [h]
1163 applied[h.filename()] = [h]
1164 if h.allhunks():
1164 if h.allhunks():
1165 applied[h.filename()] += h.hunks
1165 applied[h.filename()] += h.hunks
1166 continue
1166 continue
1167 for i, chunk in enumerate(h.hunks):
1167 for i, chunk in enumerate(h.hunks):
1168 if skipfile is None and skipall is None:
1168 if skipfile is None and skipall is None:
1169 chunk.pretty(ui)
1169 chunk.pretty(ui)
1170 if total == 1:
1170 if total == 1:
1171 msg = messages['single'][operation] % chunk.filename()
1171 msg = messages['single'][operation] % chunk.filename()
1172 else:
1172 else:
1173 idx = pos - len(h.hunks) + i
1173 idx = pos - len(h.hunks) + i
1174 msg = messages['multiple'][operation] % (idx, total,
1174 msg = messages['multiple'][operation] % (idx, total,
1175 chunk.filename())
1175 chunk.filename())
1176 r, skipfile, skipall, newpatches = prompt(skipfile,
1176 r, skipfile, skipall, newpatches = prompt(skipfile,
1177 skipall, msg, chunk)
1177 skipall, msg, chunk)
1178 if r:
1178 if r:
1179 if fixoffset:
1179 if fixoffset:
1180 chunk = copy.copy(chunk)
1180 chunk = copy.copy(chunk)
1181 chunk.toline += fixoffset
1181 chunk.toline += fixoffset
1182 applied[chunk.filename()].append(chunk)
1182 applied[chunk.filename()].append(chunk)
1183 elif newpatches is not None:
1183 elif newpatches is not None:
1184 for newpatch in newpatches:
1184 for newpatch in newpatches:
1185 for newhunk in newpatch.hunks:
1185 for newhunk in newpatch.hunks:
1186 if fixoffset:
1186 if fixoffset:
1187 newhunk.toline += fixoffset
1187 newhunk.toline += fixoffset
1188 applied[newhunk.filename()].append(newhunk)
1188 applied[newhunk.filename()].append(newhunk)
1189 else:
1189 else:
1190 fixoffset += chunk.removed - chunk.added
1190 fixoffset += chunk.removed - chunk.added
1191 return (sum([h for h in applied.itervalues()
1191 return (sum([h for h in applied.itervalues()
1192 if h[0].special() or len(h) > 1], []), {})
1192 if h[0].special() or len(h) > 1], []), {})
1193 class hunk(object):
1193 class hunk(object):
1194 def __init__(self, desc, num, lr, context):
1194 def __init__(self, desc, num, lr, context):
1195 self.number = num
1195 self.number = num
1196 self.desc = desc
1196 self.desc = desc
1197 self.hunk = [desc]
1197 self.hunk = [desc]
1198 self.a = []
1198 self.a = []
1199 self.b = []
1199 self.b = []
1200 self.starta = self.lena = None
1200 self.starta = self.lena = None
1201 self.startb = self.lenb = None
1201 self.startb = self.lenb = None
1202 if lr is not None:
1202 if lr is not None:
1203 if context:
1203 if context:
1204 self.read_context_hunk(lr)
1204 self.read_context_hunk(lr)
1205 else:
1205 else:
1206 self.read_unified_hunk(lr)
1206 self.read_unified_hunk(lr)
1207
1207
1208 def getnormalized(self):
1208 def getnormalized(self):
1209 """Return a copy with line endings normalized to LF."""
1209 """Return a copy with line endings normalized to LF."""
1210
1210
1211 def normalize(lines):
1211 def normalize(lines):
1212 nlines = []
1212 nlines = []
1213 for line in lines:
1213 for line in lines:
1214 if line.endswith('\r\n'):
1214 if line.endswith('\r\n'):
1215 line = line[:-2] + '\n'
1215 line = line[:-2] + '\n'
1216 nlines.append(line)
1216 nlines.append(line)
1217 return nlines
1217 return nlines
1218
1218
1219 # Dummy object, it is rebuilt manually
1219 # Dummy object, it is rebuilt manually
1220 nh = hunk(self.desc, self.number, None, None)
1220 nh = hunk(self.desc, self.number, None, None)
1221 nh.number = self.number
1221 nh.number = self.number
1222 nh.desc = self.desc
1222 nh.desc = self.desc
1223 nh.hunk = self.hunk
1223 nh.hunk = self.hunk
1224 nh.a = normalize(self.a)
1224 nh.a = normalize(self.a)
1225 nh.b = normalize(self.b)
1225 nh.b = normalize(self.b)
1226 nh.starta = self.starta
1226 nh.starta = self.starta
1227 nh.startb = self.startb
1227 nh.startb = self.startb
1228 nh.lena = self.lena
1228 nh.lena = self.lena
1229 nh.lenb = self.lenb
1229 nh.lenb = self.lenb
1230 return nh
1230 return nh
1231
1231
1232 def read_unified_hunk(self, lr):
1232 def read_unified_hunk(self, lr):
1233 m = unidesc.match(self.desc)
1233 m = unidesc.match(self.desc)
1234 if not m:
1234 if not m:
1235 raise PatchError(_("bad hunk #%d") % self.number)
1235 raise PatchError(_("bad hunk #%d") % self.number)
1236 self.starta, self.lena, self.startb, self.lenb = m.groups()
1236 self.starta, self.lena, self.startb, self.lenb = m.groups()
1237 if self.lena is None:
1237 if self.lena is None:
1238 self.lena = 1
1238 self.lena = 1
1239 else:
1239 else:
1240 self.lena = int(self.lena)
1240 self.lena = int(self.lena)
1241 if self.lenb is None:
1241 if self.lenb is None:
1242 self.lenb = 1
1242 self.lenb = 1
1243 else:
1243 else:
1244 self.lenb = int(self.lenb)
1244 self.lenb = int(self.lenb)
1245 self.starta = int(self.starta)
1245 self.starta = int(self.starta)
1246 self.startb = int(self.startb)
1246 self.startb = int(self.startb)
1247 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1247 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
1248 self.b)
1248 self.b)
1249 # if we hit eof before finishing out the hunk, the last line will
1249 # if we hit eof before finishing out the hunk, the last line will
1250 # be zero length. Lets try to fix it up.
1250 # be zero length. Lets try to fix it up.
1251 while len(self.hunk[-1]) == 0:
1251 while len(self.hunk[-1]) == 0:
1252 del self.hunk[-1]
1252 del self.hunk[-1]
1253 del self.a[-1]
1253 del self.a[-1]
1254 del self.b[-1]
1254 del self.b[-1]
1255 self.lena -= 1
1255 self.lena -= 1
1256 self.lenb -= 1
1256 self.lenb -= 1
1257 self._fixnewline(lr)
1257 self._fixnewline(lr)
1258
1258
1259 def read_context_hunk(self, lr):
1259 def read_context_hunk(self, lr):
1260 self.desc = lr.readline()
1260 self.desc = lr.readline()
1261 m = contextdesc.match(self.desc)
1261 m = contextdesc.match(self.desc)
1262 if not m:
1262 if not m:
1263 raise PatchError(_("bad hunk #%d") % self.number)
1263 raise PatchError(_("bad hunk #%d") % self.number)
1264 self.starta, aend = m.groups()
1264 self.starta, aend = m.groups()
1265 self.starta = int(self.starta)
1265 self.starta = int(self.starta)
1266 if aend is None:
1266 if aend is None:
1267 aend = self.starta
1267 aend = self.starta
1268 self.lena = int(aend) - self.starta
1268 self.lena = int(aend) - self.starta
1269 if self.starta:
1269 if self.starta:
1270 self.lena += 1
1270 self.lena += 1
1271 for x in xrange(self.lena):
1271 for x in xrange(self.lena):
1272 l = lr.readline()
1272 l = lr.readline()
1273 if l.startswith('---'):
1273 if l.startswith('---'):
1274 # lines addition, old block is empty
1274 # lines addition, old block is empty
1275 lr.push(l)
1275 lr.push(l)
1276 break
1276 break
1277 s = l[2:]
1277 s = l[2:]
1278 if l.startswith('- ') or l.startswith('! '):
1278 if l.startswith('- ') or l.startswith('! '):
1279 u = '-' + s
1279 u = '-' + s
1280 elif l.startswith(' '):
1280 elif l.startswith(' '):
1281 u = ' ' + s
1281 u = ' ' + s
1282 else:
1282 else:
1283 raise PatchError(_("bad hunk #%d old text line %d") %
1283 raise PatchError(_("bad hunk #%d old text line %d") %
1284 (self.number, x))
1284 (self.number, x))
1285 self.a.append(u)
1285 self.a.append(u)
1286 self.hunk.append(u)
1286 self.hunk.append(u)
1287
1287
1288 l = lr.readline()
1288 l = lr.readline()
1289 if l.startswith('\ '):
1289 if l.startswith('\ '):
1290 s = self.a[-1][:-1]
1290 s = self.a[-1][:-1]
1291 self.a[-1] = s
1291 self.a[-1] = s
1292 self.hunk[-1] = s
1292 self.hunk[-1] = s
1293 l = lr.readline()
1293 l = lr.readline()
1294 m = contextdesc.match(l)
1294 m = contextdesc.match(l)
1295 if not m:
1295 if not m:
1296 raise PatchError(_("bad hunk #%d") % self.number)
1296 raise PatchError(_("bad hunk #%d") % self.number)
1297 self.startb, bend = m.groups()
1297 self.startb, bend = m.groups()
1298 self.startb = int(self.startb)
1298 self.startb = int(self.startb)
1299 if bend is None:
1299 if bend is None:
1300 bend = self.startb
1300 bend = self.startb
1301 self.lenb = int(bend) - self.startb
1301 self.lenb = int(bend) - self.startb
1302 if self.startb:
1302 if self.startb:
1303 self.lenb += 1
1303 self.lenb += 1
1304 hunki = 1
1304 hunki = 1
1305 for x in xrange(self.lenb):
1305 for x in xrange(self.lenb):
1306 l = lr.readline()
1306 l = lr.readline()
1307 if l.startswith('\ '):
1307 if l.startswith('\ '):
1308 # XXX: the only way to hit this is with an invalid line range.
1308 # XXX: the only way to hit this is with an invalid line range.
1309 # The no-eol marker is not counted in the line range, but I
1309 # The no-eol marker is not counted in the line range, but I
1310 # guess there are diff(1) out there which behave differently.
1310 # guess there are diff(1) out there which behave differently.
1311 s = self.b[-1][:-1]
1311 s = self.b[-1][:-1]
1312 self.b[-1] = s
1312 self.b[-1] = s
1313 self.hunk[hunki - 1] = s
1313 self.hunk[hunki - 1] = s
1314 continue
1314 continue
1315 if not l:
1315 if not l:
1316 # line deletions, new block is empty and we hit EOF
1316 # line deletions, new block is empty and we hit EOF
1317 lr.push(l)
1317 lr.push(l)
1318 break
1318 break
1319 s = l[2:]
1319 s = l[2:]
1320 if l.startswith('+ ') or l.startswith('! '):
1320 if l.startswith('+ ') or l.startswith('! '):
1321 u = '+' + s
1321 u = '+' + s
1322 elif l.startswith(' '):
1322 elif l.startswith(' '):
1323 u = ' ' + s
1323 u = ' ' + s
1324 elif len(self.b) == 0:
1324 elif len(self.b) == 0:
1325 # line deletions, new block is empty
1325 # line deletions, new block is empty
1326 lr.push(l)
1326 lr.push(l)
1327 break
1327 break
1328 else:
1328 else:
1329 raise PatchError(_("bad hunk #%d old text line %d") %
1329 raise PatchError(_("bad hunk #%d old text line %d") %
1330 (self.number, x))
1330 (self.number, x))
1331 self.b.append(s)
1331 self.b.append(s)
1332 while True:
1332 while True:
1333 if hunki >= len(self.hunk):
1333 if hunki >= len(self.hunk):
1334 h = ""
1334 h = ""
1335 else:
1335 else:
1336 h = self.hunk[hunki]
1336 h = self.hunk[hunki]
1337 hunki += 1
1337 hunki += 1
1338 if h == u:
1338 if h == u:
1339 break
1339 break
1340 elif h.startswith('-'):
1340 elif h.startswith('-'):
1341 continue
1341 continue
1342 else:
1342 else:
1343 self.hunk.insert(hunki - 1, u)
1343 self.hunk.insert(hunki - 1, u)
1344 break
1344 break
1345
1345
1346 if not self.a:
1346 if not self.a:
1347 # this happens when lines were only added to the hunk
1347 # this happens when lines were only added to the hunk
1348 for x in self.hunk:
1348 for x in self.hunk:
1349 if x.startswith('-') or x.startswith(' '):
1349 if x.startswith('-') or x.startswith(' '):
1350 self.a.append(x)
1350 self.a.append(x)
1351 if not self.b:
1351 if not self.b:
1352 # this happens when lines were only deleted from the hunk
1352 # this happens when lines were only deleted from the hunk
1353 for x in self.hunk:
1353 for x in self.hunk:
1354 if x.startswith('+') or x.startswith(' '):
1354 if x.startswith('+') or x.startswith(' '):
1355 self.b.append(x[1:])
1355 self.b.append(x[1:])
1356 # @@ -start,len +start,len @@
1356 # @@ -start,len +start,len @@
1357 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1357 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1358 self.startb, self.lenb)
1358 self.startb, self.lenb)
1359 self.hunk[0] = self.desc
1359 self.hunk[0] = self.desc
1360 self._fixnewline(lr)
1360 self._fixnewline(lr)
1361
1361
1362 def _fixnewline(self, lr):
1362 def _fixnewline(self, lr):
1363 l = lr.readline()
1363 l = lr.readline()
1364 if l.startswith('\ '):
1364 if l.startswith('\ '):
1365 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1365 diffhelpers.fix_newline(self.hunk, self.a, self.b)
1366 else:
1366 else:
1367 lr.push(l)
1367 lr.push(l)
1368
1368
1369 def complete(self):
1369 def complete(self):
1370 return len(self.a) == self.lena and len(self.b) == self.lenb
1370 return len(self.a) == self.lena and len(self.b) == self.lenb
1371
1371
1372 def _fuzzit(self, old, new, fuzz, toponly):
1372 def _fuzzit(self, old, new, fuzz, toponly):
1373 # this removes context lines from the top and bottom of list 'l'. It
1373 # this removes context lines from the top and bottom of list 'l'. It
1374 # checks the hunk to make sure only context lines are removed, and then
1374 # checks the hunk to make sure only context lines are removed, and then
1375 # returns a new shortened list of lines.
1375 # returns a new shortened list of lines.
1376 fuzz = min(fuzz, len(old))
1376 fuzz = min(fuzz, len(old))
1377 if fuzz:
1377 if fuzz:
1378 top = 0
1378 top = 0
1379 bot = 0
1379 bot = 0
1380 hlen = len(self.hunk)
1380 hlen = len(self.hunk)
1381 for x in xrange(hlen - 1):
1381 for x in xrange(hlen - 1):
1382 # the hunk starts with the @@ line, so use x+1
1382 # the hunk starts with the @@ line, so use x+1
1383 if self.hunk[x + 1][0] == ' ':
1383 if self.hunk[x + 1][0] == ' ':
1384 top += 1
1384 top += 1
1385 else:
1385 else:
1386 break
1386 break
1387 if not toponly:
1387 if not toponly:
1388 for x in xrange(hlen - 1):
1388 for x in xrange(hlen - 1):
1389 if self.hunk[hlen - bot - 1][0] == ' ':
1389 if self.hunk[hlen - bot - 1][0] == ' ':
1390 bot += 1
1390 bot += 1
1391 else:
1391 else:
1392 break
1392 break
1393
1393
1394 bot = min(fuzz, bot)
1394 bot = min(fuzz, bot)
1395 top = min(fuzz, top)
1395 top = min(fuzz, top)
1396 return old[top:len(old) - bot], new[top:len(new) - bot], top
1396 return old[top:len(old) - bot], new[top:len(new) - bot], top
1397 return old, new, 0
1397 return old, new, 0
1398
1398
1399 def fuzzit(self, fuzz, toponly):
1399 def fuzzit(self, fuzz, toponly):
1400 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1400 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1401 oldstart = self.starta + top
1401 oldstart = self.starta + top
1402 newstart = self.startb + top
1402 newstart = self.startb + top
1403 # zero length hunk ranges already have their start decremented
1403 # zero length hunk ranges already have their start decremented
1404 if self.lena and oldstart > 0:
1404 if self.lena and oldstart > 0:
1405 oldstart -= 1
1405 oldstart -= 1
1406 if self.lenb and newstart > 0:
1406 if self.lenb and newstart > 0:
1407 newstart -= 1
1407 newstart -= 1
1408 return old, oldstart, new, newstart
1408 return old, oldstart, new, newstart
1409
1409
1410 class binhunk(object):
1410 class binhunk(object):
1411 'A binary patch file.'
1411 'A binary patch file.'
1412 def __init__(self, lr, fname):
1412 def __init__(self, lr, fname):
1413 self.text = None
1413 self.text = None
1414 self.delta = False
1414 self.delta = False
1415 self.hunk = ['GIT binary patch\n']
1415 self.hunk = ['GIT binary patch\n']
1416 self._fname = fname
1416 self._fname = fname
1417 self._read(lr)
1417 self._read(lr)
1418
1418
1419 def complete(self):
1419 def complete(self):
1420 return self.text is not None
1420 return self.text is not None
1421
1421
1422 def new(self, lines):
1422 def new(self, lines):
1423 if self.delta:
1423 if self.delta:
1424 return [applybindelta(self.text, ''.join(lines))]
1424 return [applybindelta(self.text, ''.join(lines))]
1425 return [self.text]
1425 return [self.text]
1426
1426
1427 def _read(self, lr):
1427 def _read(self, lr):
1428 def getline(lr, hunk):
1428 def getline(lr, hunk):
1429 l = lr.readline()
1429 l = lr.readline()
1430 hunk.append(l)
1430 hunk.append(l)
1431 return l.rstrip('\r\n')
1431 return l.rstrip('\r\n')
1432
1432
1433 size = 0
1433 size = 0
1434 while True:
1434 while True:
1435 line = getline(lr, self.hunk)
1435 line = getline(lr, self.hunk)
1436 if not line:
1436 if not line:
1437 raise PatchError(_('could not extract "%s" binary data')
1437 raise PatchError(_('could not extract "%s" binary data')
1438 % self._fname)
1438 % self._fname)
1439 if line.startswith('literal '):
1439 if line.startswith('literal '):
1440 size = int(line[8:].rstrip())
1440 size = int(line[8:].rstrip())
1441 break
1441 break
1442 if line.startswith('delta '):
1442 if line.startswith('delta '):
1443 size = int(line[6:].rstrip())
1443 size = int(line[6:].rstrip())
1444 self.delta = True
1444 self.delta = True
1445 break
1445 break
1446 dec = []
1446 dec = []
1447 line = getline(lr, self.hunk)
1447 line = getline(lr, self.hunk)
1448 while len(line) > 1:
1448 while len(line) > 1:
1449 l = line[0]
1449 l = line[0]
1450 if l <= 'Z' and l >= 'A':
1450 if l <= 'Z' and l >= 'A':
1451 l = ord(l) - ord('A') + 1
1451 l = ord(l) - ord('A') + 1
1452 else:
1452 else:
1453 l = ord(l) - ord('a') + 27
1453 l = ord(l) - ord('a') + 27
1454 try:
1454 try:
1455 dec.append(util.b85decode(line[1:])[:l])
1455 dec.append(util.b85decode(line[1:])[:l])
1456 except ValueError as e:
1456 except ValueError as e:
1457 raise PatchError(_('could not decode "%s" binary patch: %s')
1457 raise PatchError(_('could not decode "%s" binary patch: %s')
1458 % (self._fname, str(e)))
1458 % (self._fname, str(e)))
1459 line = getline(lr, self.hunk)
1459 line = getline(lr, self.hunk)
1460 text = zlib.decompress(''.join(dec))
1460 text = zlib.decompress(''.join(dec))
1461 if len(text) != size:
1461 if len(text) != size:
1462 raise PatchError(_('"%s" length is %d bytes, should be %d')
1462 raise PatchError(_('"%s" length is %d bytes, should be %d')
1463 % (self._fname, len(text), size))
1463 % (self._fname, len(text), size))
1464 self.text = text
1464 self.text = text
1465
1465
1466 def parsefilename(str):
1466 def parsefilename(str):
1467 # --- filename \t|space stuff
1467 # --- filename \t|space stuff
1468 s = str[4:].rstrip('\r\n')
1468 s = str[4:].rstrip('\r\n')
1469 i = s.find('\t')
1469 i = s.find('\t')
1470 if i < 0:
1470 if i < 0:
1471 i = s.find(' ')
1471 i = s.find(' ')
1472 if i < 0:
1472 if i < 0:
1473 return s
1473 return s
1474 return s[:i]
1474 return s[:i]
1475
1475
1476 def reversehunks(hunks):
1476 def reversehunks(hunks):
1477 '''reverse the signs in the hunks given as argument
1477 '''reverse the signs in the hunks given as argument
1478
1478
1479 This function operates on hunks coming out of patch.filterpatch, that is
1479 This function operates on hunks coming out of patch.filterpatch, that is
1480 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1480 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1481
1481
1482 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1482 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1483 ... --- a/folder1/g
1483 ... --- a/folder1/g
1484 ... +++ b/folder1/g
1484 ... +++ b/folder1/g
1485 ... @@ -1,7 +1,7 @@
1485 ... @@ -1,7 +1,7 @@
1486 ... +firstline
1486 ... +firstline
1487 ... c
1487 ... c
1488 ... 1
1488 ... 1
1489 ... 2
1489 ... 2
1490 ... + 3
1490 ... + 3
1491 ... -4
1491 ... -4
1492 ... 5
1492 ... 5
1493 ... d
1493 ... d
1494 ... +lastline"""
1494 ... +lastline"""
1495 >>> hunks = parsepatch([rawpatch])
1495 >>> hunks = parsepatch([rawpatch])
1496 >>> hunkscomingfromfilterpatch = []
1496 >>> hunkscomingfromfilterpatch = []
1497 >>> for h in hunks:
1497 >>> for h in hunks:
1498 ... hunkscomingfromfilterpatch.append(h)
1498 ... hunkscomingfromfilterpatch.append(h)
1499 ... hunkscomingfromfilterpatch.extend(h.hunks)
1499 ... hunkscomingfromfilterpatch.extend(h.hunks)
1500
1500
1501 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1501 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1502 >>> from . import util
1502 >>> from . import util
1503 >>> fp = util.stringio()
1503 >>> fp = util.stringio()
1504 >>> for c in reversedhunks:
1504 >>> for c in reversedhunks:
1505 ... c.write(fp)
1505 ... c.write(fp)
1506 >>> fp.seek(0) or None
1506 >>> fp.seek(0) or None
1507 >>> reversedpatch = fp.read()
1507 >>> reversedpatch = fp.read()
1508 >>> print(pycompat.sysstr(reversedpatch))
1508 >>> print(pycompat.sysstr(reversedpatch))
1509 diff --git a/folder1/g b/folder1/g
1509 diff --git a/folder1/g b/folder1/g
1510 --- a/folder1/g
1510 --- a/folder1/g
1511 +++ b/folder1/g
1511 +++ b/folder1/g
1512 @@ -1,4 +1,3 @@
1512 @@ -1,4 +1,3 @@
1513 -firstline
1513 -firstline
1514 c
1514 c
1515 1
1515 1
1516 2
1516 2
1517 @@ -2,6 +1,6 @@
1517 @@ -2,6 +1,6 @@
1518 c
1518 c
1519 1
1519 1
1520 2
1520 2
1521 - 3
1521 - 3
1522 +4
1522 +4
1523 5
1523 5
1524 d
1524 d
1525 @@ -6,3 +5,2 @@
1525 @@ -6,3 +5,2 @@
1526 5
1526 5
1527 d
1527 d
1528 -lastline
1528 -lastline
1529
1529
1530 '''
1530 '''
1531
1531
1532 newhunks = []
1532 newhunks = []
1533 for c in hunks:
1533 for c in hunks:
1534 if util.safehasattr(c, 'reversehunk'):
1534 if util.safehasattr(c, 'reversehunk'):
1535 c = c.reversehunk()
1535 c = c.reversehunk()
1536 newhunks.append(c)
1536 newhunks.append(c)
1537 return newhunks
1537 return newhunks
1538
1538
1539 def parsepatch(originalchunks, maxcontext=None):
1539 def parsepatch(originalchunks, maxcontext=None):
1540 """patch -> [] of headers -> [] of hunks
1540 """patch -> [] of headers -> [] of hunks
1541
1541
1542 If maxcontext is not None, trim context lines if necessary.
1542 If maxcontext is not None, trim context lines if necessary.
1543
1543
1544 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1544 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1545 ... --- a/folder1/g
1545 ... --- a/folder1/g
1546 ... +++ b/folder1/g
1546 ... +++ b/folder1/g
1547 ... @@ -1,8 +1,10 @@
1547 ... @@ -1,8 +1,10 @@
1548 ... 1
1548 ... 1
1549 ... 2
1549 ... 2
1550 ... -3
1550 ... -3
1551 ... 4
1551 ... 4
1552 ... 5
1552 ... 5
1553 ... 6
1553 ... 6
1554 ... +6.1
1554 ... +6.1
1555 ... +6.2
1555 ... +6.2
1556 ... 7
1556 ... 7
1557 ... 8
1557 ... 8
1558 ... +9'''
1558 ... +9'''
1559 >>> out = util.stringio()
1559 >>> out = util.stringio()
1560 >>> headers = parsepatch([rawpatch], maxcontext=1)
1560 >>> headers = parsepatch([rawpatch], maxcontext=1)
1561 >>> for header in headers:
1561 >>> for header in headers:
1562 ... header.write(out)
1562 ... header.write(out)
1563 ... for hunk in header.hunks:
1563 ... for hunk in header.hunks:
1564 ... hunk.write(out)
1564 ... hunk.write(out)
1565 >>> print(pycompat.sysstr(out.getvalue()))
1565 >>> print(pycompat.sysstr(out.getvalue()))
1566 diff --git a/folder1/g b/folder1/g
1566 diff --git a/folder1/g b/folder1/g
1567 --- a/folder1/g
1567 --- a/folder1/g
1568 +++ b/folder1/g
1568 +++ b/folder1/g
1569 @@ -2,3 +2,2 @@
1569 @@ -2,3 +2,2 @@
1570 2
1570 2
1571 -3
1571 -3
1572 4
1572 4
1573 @@ -6,2 +5,4 @@
1573 @@ -6,2 +5,4 @@
1574 6
1574 6
1575 +6.1
1575 +6.1
1576 +6.2
1576 +6.2
1577 7
1577 7
1578 @@ -8,1 +9,2 @@
1578 @@ -8,1 +9,2 @@
1579 8
1579 8
1580 +9
1580 +9
1581 """
1581 """
1582 class parser(object):
1582 class parser(object):
1583 """patch parsing state machine"""
1583 """patch parsing state machine"""
1584 def __init__(self):
1584 def __init__(self):
1585 self.fromline = 0
1585 self.fromline = 0
1586 self.toline = 0
1586 self.toline = 0
1587 self.proc = ''
1587 self.proc = ''
1588 self.header = None
1588 self.header = None
1589 self.context = []
1589 self.context = []
1590 self.before = []
1590 self.before = []
1591 self.hunk = []
1591 self.hunk = []
1592 self.headers = []
1592 self.headers = []
1593
1593
1594 def addrange(self, limits):
1594 def addrange(self, limits):
1595 fromstart, fromend, tostart, toend, proc = limits
1595 fromstart, fromend, tostart, toend, proc = limits
1596 self.fromline = int(fromstart)
1596 self.fromline = int(fromstart)
1597 self.toline = int(tostart)
1597 self.toline = int(tostart)
1598 self.proc = proc
1598 self.proc = proc
1599
1599
1600 def addcontext(self, context):
1600 def addcontext(self, context):
1601 if self.hunk:
1601 if self.hunk:
1602 h = recordhunk(self.header, self.fromline, self.toline,
1602 h = recordhunk(self.header, self.fromline, self.toline,
1603 self.proc, self.before, self.hunk, context, maxcontext)
1603 self.proc, self.before, self.hunk, context, maxcontext)
1604 self.header.hunks.append(h)
1604 self.header.hunks.append(h)
1605 self.fromline += len(self.before) + h.removed
1605 self.fromline += len(self.before) + h.removed
1606 self.toline += len(self.before) + h.added
1606 self.toline += len(self.before) + h.added
1607 self.before = []
1607 self.before = []
1608 self.hunk = []
1608 self.hunk = []
1609 self.context = context
1609 self.context = context
1610
1610
1611 def addhunk(self, hunk):
1611 def addhunk(self, hunk):
1612 if self.context:
1612 if self.context:
1613 self.before = self.context
1613 self.before = self.context
1614 self.context = []
1614 self.context = []
1615 self.hunk = hunk
1615 self.hunk = hunk
1616
1616
1617 def newfile(self, hdr):
1617 def newfile(self, hdr):
1618 self.addcontext([])
1618 self.addcontext([])
1619 h = header(hdr)
1619 h = header(hdr)
1620 self.headers.append(h)
1620 self.headers.append(h)
1621 self.header = h
1621 self.header = h
1622
1622
1623 def addother(self, line):
1623 def addother(self, line):
1624 pass # 'other' lines are ignored
1624 pass # 'other' lines are ignored
1625
1625
1626 def finished(self):
1626 def finished(self):
1627 self.addcontext([])
1627 self.addcontext([])
1628 return self.headers
1628 return self.headers
1629
1629
1630 transitions = {
1630 transitions = {
1631 'file': {'context': addcontext,
1631 'file': {'context': addcontext,
1632 'file': newfile,
1632 'file': newfile,
1633 'hunk': addhunk,
1633 'hunk': addhunk,
1634 'range': addrange},
1634 'range': addrange},
1635 'context': {'file': newfile,
1635 'context': {'file': newfile,
1636 'hunk': addhunk,
1636 'hunk': addhunk,
1637 'range': addrange,
1637 'range': addrange,
1638 'other': addother},
1638 'other': addother},
1639 'hunk': {'context': addcontext,
1639 'hunk': {'context': addcontext,
1640 'file': newfile,
1640 'file': newfile,
1641 'range': addrange},
1641 'range': addrange},
1642 'range': {'context': addcontext,
1642 'range': {'context': addcontext,
1643 'hunk': addhunk},
1643 'hunk': addhunk},
1644 'other': {'other': addother},
1644 'other': {'other': addother},
1645 }
1645 }
1646
1646
1647 p = parser()
1647 p = parser()
1648 fp = stringio()
1648 fp = stringio()
1649 fp.write(''.join(originalchunks))
1649 fp.write(''.join(originalchunks))
1650 fp.seek(0)
1650 fp.seek(0)
1651
1651
1652 state = 'context'
1652 state = 'context'
1653 for newstate, data in scanpatch(fp):
1653 for newstate, data in scanpatch(fp):
1654 try:
1654 try:
1655 p.transitions[state][newstate](p, data)
1655 p.transitions[state][newstate](p, data)
1656 except KeyError:
1656 except KeyError:
1657 raise PatchError('unhandled transition: %s -> %s' %
1657 raise PatchError('unhandled transition: %s -> %s' %
1658 (state, newstate))
1658 (state, newstate))
1659 state = newstate
1659 state = newstate
1660 del fp
1660 del fp
1661 return p.finished()
1661 return p.finished()
1662
1662
1663 def pathtransform(path, strip, prefix):
1663 def pathtransform(path, strip, prefix):
1664 '''turn a path from a patch into a path suitable for the repository
1664 '''turn a path from a patch into a path suitable for the repository
1665
1665
1666 prefix, if not empty, is expected to be normalized with a / at the end.
1666 prefix, if not empty, is expected to be normalized with a / at the end.
1667
1667
1668 Returns (stripped components, path in repository).
1668 Returns (stripped components, path in repository).
1669
1669
1670 >>> pathtransform(b'a/b/c', 0, b'')
1670 >>> pathtransform(b'a/b/c', 0, b'')
1671 ('', 'a/b/c')
1671 ('', 'a/b/c')
1672 >>> pathtransform(b' a/b/c ', 0, b'')
1672 >>> pathtransform(b' a/b/c ', 0, b'')
1673 ('', ' a/b/c')
1673 ('', ' a/b/c')
1674 >>> pathtransform(b' a/b/c ', 2, b'')
1674 >>> pathtransform(b' a/b/c ', 2, b'')
1675 ('a/b/', 'c')
1675 ('a/b/', 'c')
1676 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1676 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1677 ('', 'd/e/a/b/c')
1677 ('', 'd/e/a/b/c')
1678 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1678 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1679 ('a//b/', 'd/e/c')
1679 ('a//b/', 'd/e/c')
1680 >>> pathtransform(b'a/b/c', 3, b'')
1680 >>> pathtransform(b'a/b/c', 3, b'')
1681 Traceback (most recent call last):
1681 Traceback (most recent call last):
1682 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1682 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1683 '''
1683 '''
1684 pathlen = len(path)
1684 pathlen = len(path)
1685 i = 0
1685 i = 0
1686 if strip == 0:
1686 if strip == 0:
1687 return '', prefix + path.rstrip()
1687 return '', prefix + path.rstrip()
1688 count = strip
1688 count = strip
1689 while count > 0:
1689 while count > 0:
1690 i = path.find('/', i)
1690 i = path.find('/', i)
1691 if i == -1:
1691 if i == -1:
1692 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1692 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1693 (count, strip, path))
1693 (count, strip, path))
1694 i += 1
1694 i += 1
1695 # consume '//' in the path
1695 # consume '//' in the path
1696 while i < pathlen - 1 and path[i:i + 1] == '/':
1696 while i < pathlen - 1 and path[i:i + 1] == '/':
1697 i += 1
1697 i += 1
1698 count -= 1
1698 count -= 1
1699 return path[:i].lstrip(), prefix + path[i:].rstrip()
1699 return path[:i].lstrip(), prefix + path[i:].rstrip()
1700
1700
1701 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1701 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1702 nulla = afile_orig == "/dev/null"
1702 nulla = afile_orig == "/dev/null"
1703 nullb = bfile_orig == "/dev/null"
1703 nullb = bfile_orig == "/dev/null"
1704 create = nulla and hunk.starta == 0 and hunk.lena == 0
1704 create = nulla and hunk.starta == 0 and hunk.lena == 0
1705 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1705 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1706 abase, afile = pathtransform(afile_orig, strip, prefix)
1706 abase, afile = pathtransform(afile_orig, strip, prefix)
1707 gooda = not nulla and backend.exists(afile)
1707 gooda = not nulla and backend.exists(afile)
1708 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1708 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1709 if afile == bfile:
1709 if afile == bfile:
1710 goodb = gooda
1710 goodb = gooda
1711 else:
1711 else:
1712 goodb = not nullb and backend.exists(bfile)
1712 goodb = not nullb and backend.exists(bfile)
1713 missing = not goodb and not gooda and not create
1713 missing = not goodb and not gooda and not create
1714
1714
1715 # some diff programs apparently produce patches where the afile is
1715 # some diff programs apparently produce patches where the afile is
1716 # not /dev/null, but afile starts with bfile
1716 # not /dev/null, but afile starts with bfile
1717 abasedir = afile[:afile.rfind('/') + 1]
1717 abasedir = afile[:afile.rfind('/') + 1]
1718 bbasedir = bfile[:bfile.rfind('/') + 1]
1718 bbasedir = bfile[:bfile.rfind('/') + 1]
1719 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1719 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1720 and hunk.starta == 0 and hunk.lena == 0):
1720 and hunk.starta == 0 and hunk.lena == 0):
1721 create = True
1721 create = True
1722 missing = False
1722 missing = False
1723
1723
1724 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1724 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1725 # diff is between a file and its backup. In this case, the original
1725 # diff is between a file and its backup. In this case, the original
1726 # file should be patched (see original mpatch code).
1726 # file should be patched (see original mpatch code).
1727 isbackup = (abase == bbase and bfile.startswith(afile))
1727 isbackup = (abase == bbase and bfile.startswith(afile))
1728 fname = None
1728 fname = None
1729 if not missing:
1729 if not missing:
1730 if gooda and goodb:
1730 if gooda and goodb:
1731 if isbackup:
1731 if isbackup:
1732 fname = afile
1732 fname = afile
1733 else:
1733 else:
1734 fname = bfile
1734 fname = bfile
1735 elif gooda:
1735 elif gooda:
1736 fname = afile
1736 fname = afile
1737
1737
1738 if not fname:
1738 if not fname:
1739 if not nullb:
1739 if not nullb:
1740 if isbackup:
1740 if isbackup:
1741 fname = afile
1741 fname = afile
1742 else:
1742 else:
1743 fname = bfile
1743 fname = bfile
1744 elif not nulla:
1744 elif not nulla:
1745 fname = afile
1745 fname = afile
1746 else:
1746 else:
1747 raise PatchError(_("undefined source and destination files"))
1747 raise PatchError(_("undefined source and destination files"))
1748
1748
1749 gp = patchmeta(fname)
1749 gp = patchmeta(fname)
1750 if create:
1750 if create:
1751 gp.op = 'ADD'
1751 gp.op = 'ADD'
1752 elif remove:
1752 elif remove:
1753 gp.op = 'DELETE'
1753 gp.op = 'DELETE'
1754 return gp
1754 return gp
1755
1755
1756 def scanpatch(fp):
1756 def scanpatch(fp):
1757 """like patch.iterhunks, but yield different events
1757 """like patch.iterhunks, but yield different events
1758
1758
1759 - ('file', [header_lines + fromfile + tofile])
1759 - ('file', [header_lines + fromfile + tofile])
1760 - ('context', [context_lines])
1760 - ('context', [context_lines])
1761 - ('hunk', [hunk_lines])
1761 - ('hunk', [hunk_lines])
1762 - ('range', (-start,len, +start,len, proc))
1762 - ('range', (-start,len, +start,len, proc))
1763 """
1763 """
1764 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1764 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1765 lr = linereader(fp)
1765 lr = linereader(fp)
1766
1766
1767 def scanwhile(first, p):
1767 def scanwhile(first, p):
1768 """scan lr while predicate holds"""
1768 """scan lr while predicate holds"""
1769 lines = [first]
1769 lines = [first]
1770 for line in iter(lr.readline, ''):
1770 for line in iter(lr.readline, ''):
1771 if p(line):
1771 if p(line):
1772 lines.append(line)
1772 lines.append(line)
1773 else:
1773 else:
1774 lr.push(line)
1774 lr.push(line)
1775 break
1775 break
1776 return lines
1776 return lines
1777
1777
1778 for line in iter(lr.readline, ''):
1778 for line in iter(lr.readline, ''):
1779 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1779 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1780 def notheader(line):
1780 def notheader(line):
1781 s = line.split(None, 1)
1781 s = line.split(None, 1)
1782 return not s or s[0] not in ('---', 'diff')
1782 return not s or s[0] not in ('---', 'diff')
1783 header = scanwhile(line, notheader)
1783 header = scanwhile(line, notheader)
1784 fromfile = lr.readline()
1784 fromfile = lr.readline()
1785 if fromfile.startswith('---'):
1785 if fromfile.startswith('---'):
1786 tofile = lr.readline()
1786 tofile = lr.readline()
1787 header += [fromfile, tofile]
1787 header += [fromfile, tofile]
1788 else:
1788 else:
1789 lr.push(fromfile)
1789 lr.push(fromfile)
1790 yield 'file', header
1790 yield 'file', header
1791 elif line[0:1] == ' ':
1791 elif line[0:1] == ' ':
1792 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1792 yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
1793 elif line[0] in '-+':
1793 elif line[0] in '-+':
1794 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1794 yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
1795 else:
1795 else:
1796 m = lines_re.match(line)
1796 m = lines_re.match(line)
1797 if m:
1797 if m:
1798 yield 'range', m.groups()
1798 yield 'range', m.groups()
1799 else:
1799 else:
1800 yield 'other', line
1800 yield 'other', line
1801
1801
1802 def scangitpatch(lr, firstline):
1802 def scangitpatch(lr, firstline):
1803 """
1803 """
1804 Git patches can emit:
1804 Git patches can emit:
1805 - rename a to b
1805 - rename a to b
1806 - change b
1806 - change b
1807 - copy a to c
1807 - copy a to c
1808 - change c
1808 - change c
1809
1809
1810 We cannot apply this sequence as-is, the renamed 'a' could not be
1810 We cannot apply this sequence as-is, the renamed 'a' could not be
1811 found for it would have been renamed already. And we cannot copy
1811 found for it would have been renamed already. And we cannot copy
1812 from 'b' instead because 'b' would have been changed already. So
1812 from 'b' instead because 'b' would have been changed already. So
1813 we scan the git patch for copy and rename commands so we can
1813 we scan the git patch for copy and rename commands so we can
1814 perform the copies ahead of time.
1814 perform the copies ahead of time.
1815 """
1815 """
1816 pos = 0
1816 pos = 0
1817 try:
1817 try:
1818 pos = lr.fp.tell()
1818 pos = lr.fp.tell()
1819 fp = lr.fp
1819 fp = lr.fp
1820 except IOError:
1820 except IOError:
1821 fp = stringio(lr.fp.read())
1821 fp = stringio(lr.fp.read())
1822 gitlr = linereader(fp)
1822 gitlr = linereader(fp)
1823 gitlr.push(firstline)
1823 gitlr.push(firstline)
1824 gitpatches = readgitpatch(gitlr)
1824 gitpatches = readgitpatch(gitlr)
1825 fp.seek(pos)
1825 fp.seek(pos)
1826 return gitpatches
1826 return gitpatches
1827
1827
1828 def iterhunks(fp):
1828 def iterhunks(fp):
1829 """Read a patch and yield the following events:
1829 """Read a patch and yield the following events:
1830 - ("file", afile, bfile, firsthunk): select a new target file.
1830 - ("file", afile, bfile, firsthunk): select a new target file.
1831 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1831 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1832 "file" event.
1832 "file" event.
1833 - ("git", gitchanges): current diff is in git format, gitchanges
1833 - ("git", gitchanges): current diff is in git format, gitchanges
1834 maps filenames to gitpatch records. Unique event.
1834 maps filenames to gitpatch records. Unique event.
1835 """
1835 """
1836 afile = ""
1836 afile = ""
1837 bfile = ""
1837 bfile = ""
1838 state = None
1838 state = None
1839 hunknum = 0
1839 hunknum = 0
1840 emitfile = newfile = False
1840 emitfile = newfile = False
1841 gitpatches = None
1841 gitpatches = None
1842
1842
1843 # our states
1843 # our states
1844 BFILE = 1
1844 BFILE = 1
1845 context = None
1845 context = None
1846 lr = linereader(fp)
1846 lr = linereader(fp)
1847
1847
1848 for x in iter(lr.readline, ''):
1848 for x in iter(lr.readline, ''):
1849 if state == BFILE and (
1849 if state == BFILE and (
1850 (not context and x[0] == '@')
1850 (not context and x[0] == '@')
1851 or (context is not False and x.startswith('***************'))
1851 or (context is not False and x.startswith('***************'))
1852 or x.startswith('GIT binary patch')):
1852 or x.startswith('GIT binary patch')):
1853 gp = None
1853 gp = None
1854 if (gitpatches and
1854 if (gitpatches and
1855 gitpatches[-1].ispatching(afile, bfile)):
1855 gitpatches[-1].ispatching(afile, bfile)):
1856 gp = gitpatches.pop()
1856 gp = gitpatches.pop()
1857 if x.startswith('GIT binary patch'):
1857 if x.startswith('GIT binary patch'):
1858 h = binhunk(lr, gp.path)
1858 h = binhunk(lr, gp.path)
1859 else:
1859 else:
1860 if context is None and x.startswith('***************'):
1860 if context is None and x.startswith('***************'):
1861 context = True
1861 context = True
1862 h = hunk(x, hunknum + 1, lr, context)
1862 h = hunk(x, hunknum + 1, lr, context)
1863 hunknum += 1
1863 hunknum += 1
1864 if emitfile:
1864 if emitfile:
1865 emitfile = False
1865 emitfile = False
1866 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1866 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1867 yield 'hunk', h
1867 yield 'hunk', h
1868 elif x.startswith('diff --git a/'):
1868 elif x.startswith('diff --git a/'):
1869 m = gitre.match(x.rstrip(' \r\n'))
1869 m = gitre.match(x.rstrip(' \r\n'))
1870 if not m:
1870 if not m:
1871 continue
1871 continue
1872 if gitpatches is None:
1872 if gitpatches is None:
1873 # scan whole input for git metadata
1873 # scan whole input for git metadata
1874 gitpatches = scangitpatch(lr, x)
1874 gitpatches = scangitpatch(lr, x)
1875 yield 'git', [g.copy() for g in gitpatches
1875 yield 'git', [g.copy() for g in gitpatches
1876 if g.op in ('COPY', 'RENAME')]
1876 if g.op in ('COPY', 'RENAME')]
1877 gitpatches.reverse()
1877 gitpatches.reverse()
1878 afile = 'a/' + m.group(1)
1878 afile = 'a/' + m.group(1)
1879 bfile = 'b/' + m.group(2)
1879 bfile = 'b/' + m.group(2)
1880 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1880 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1881 gp = gitpatches.pop()
1881 gp = gitpatches.pop()
1882 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1882 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1883 if not gitpatches:
1883 if not gitpatches:
1884 raise PatchError(_('failed to synchronize metadata for "%s"')
1884 raise PatchError(_('failed to synchronize metadata for "%s"')
1885 % afile[2:])
1885 % afile[2:])
1886 gp = gitpatches[-1]
1886 gp = gitpatches[-1]
1887 newfile = True
1887 newfile = True
1888 elif x.startswith('---'):
1888 elif x.startswith('---'):
1889 # check for a unified diff
1889 # check for a unified diff
1890 l2 = lr.readline()
1890 l2 = lr.readline()
1891 if not l2.startswith('+++'):
1891 if not l2.startswith('+++'):
1892 lr.push(l2)
1892 lr.push(l2)
1893 continue
1893 continue
1894 newfile = True
1894 newfile = True
1895 context = False
1895 context = False
1896 afile = parsefilename(x)
1896 afile = parsefilename(x)
1897 bfile = parsefilename(l2)
1897 bfile = parsefilename(l2)
1898 elif x.startswith('***'):
1898 elif x.startswith('***'):
1899 # check for a context diff
1899 # check for a context diff
1900 l2 = lr.readline()
1900 l2 = lr.readline()
1901 if not l2.startswith('---'):
1901 if not l2.startswith('---'):
1902 lr.push(l2)
1902 lr.push(l2)
1903 continue
1903 continue
1904 l3 = lr.readline()
1904 l3 = lr.readline()
1905 lr.push(l3)
1905 lr.push(l3)
1906 if not l3.startswith("***************"):
1906 if not l3.startswith("***************"):
1907 lr.push(l2)
1907 lr.push(l2)
1908 continue
1908 continue
1909 newfile = True
1909 newfile = True
1910 context = True
1910 context = True
1911 afile = parsefilename(x)
1911 afile = parsefilename(x)
1912 bfile = parsefilename(l2)
1912 bfile = parsefilename(l2)
1913
1913
1914 if newfile:
1914 if newfile:
1915 newfile = False
1915 newfile = False
1916 emitfile = True
1916 emitfile = True
1917 state = BFILE
1917 state = BFILE
1918 hunknum = 0
1918 hunknum = 0
1919
1919
1920 while gitpatches:
1920 while gitpatches:
1921 gp = gitpatches.pop()
1921 gp = gitpatches.pop()
1922 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1922 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1923
1923
1924 def applybindelta(binchunk, data):
1924 def applybindelta(binchunk, data):
1925 """Apply a binary delta hunk
1925 """Apply a binary delta hunk
1926 The algorithm used is the algorithm from git's patch-delta.c
1926 The algorithm used is the algorithm from git's patch-delta.c
1927 """
1927 """
1928 def deltahead(binchunk):
1928 def deltahead(binchunk):
1929 i = 0
1929 i = 0
1930 for c in binchunk:
1930 for c in binchunk:
1931 i += 1
1931 i += 1
1932 if not (ord(c) & 0x80):
1932 if not (ord(c) & 0x80):
1933 return i
1933 return i
1934 return i
1934 return i
1935 out = ""
1935 out = ""
1936 s = deltahead(binchunk)
1936 s = deltahead(binchunk)
1937 binchunk = binchunk[s:]
1937 binchunk = binchunk[s:]
1938 s = deltahead(binchunk)
1938 s = deltahead(binchunk)
1939 binchunk = binchunk[s:]
1939 binchunk = binchunk[s:]
1940 i = 0
1940 i = 0
1941 while i < len(binchunk):
1941 while i < len(binchunk):
1942 cmd = ord(binchunk[i])
1942 cmd = ord(binchunk[i])
1943 i += 1
1943 i += 1
1944 if (cmd & 0x80):
1944 if (cmd & 0x80):
1945 offset = 0
1945 offset = 0
1946 size = 0
1946 size = 0
1947 if (cmd & 0x01):
1947 if (cmd & 0x01):
1948 offset = ord(binchunk[i])
1948 offset = ord(binchunk[i])
1949 i += 1
1949 i += 1
1950 if (cmd & 0x02):
1950 if (cmd & 0x02):
1951 offset |= ord(binchunk[i]) << 8
1951 offset |= ord(binchunk[i]) << 8
1952 i += 1
1952 i += 1
1953 if (cmd & 0x04):
1953 if (cmd & 0x04):
1954 offset |= ord(binchunk[i]) << 16
1954 offset |= ord(binchunk[i]) << 16
1955 i += 1
1955 i += 1
1956 if (cmd & 0x08):
1956 if (cmd & 0x08):
1957 offset |= ord(binchunk[i]) << 24
1957 offset |= ord(binchunk[i]) << 24
1958 i += 1
1958 i += 1
1959 if (cmd & 0x10):
1959 if (cmd & 0x10):
1960 size = ord(binchunk[i])
1960 size = ord(binchunk[i])
1961 i += 1
1961 i += 1
1962 if (cmd & 0x20):
1962 if (cmd & 0x20):
1963 size |= ord(binchunk[i]) << 8
1963 size |= ord(binchunk[i]) << 8
1964 i += 1
1964 i += 1
1965 if (cmd & 0x40):
1965 if (cmd & 0x40):
1966 size |= ord(binchunk[i]) << 16
1966 size |= ord(binchunk[i]) << 16
1967 i += 1
1967 i += 1
1968 if size == 0:
1968 if size == 0:
1969 size = 0x10000
1969 size = 0x10000
1970 offset_end = offset + size
1970 offset_end = offset + size
1971 out += data[offset:offset_end]
1971 out += data[offset:offset_end]
1972 elif cmd != 0:
1972 elif cmd != 0:
1973 offset_end = i + cmd
1973 offset_end = i + cmd
1974 out += binchunk[i:offset_end]
1974 out += binchunk[i:offset_end]
1975 i += cmd
1975 i += cmd
1976 else:
1976 else:
1977 raise PatchError(_('unexpected delta opcode 0'))
1977 raise PatchError(_('unexpected delta opcode 0'))
1978 return out
1978 return out
1979
1979
1980 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1980 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
1981 """Reads a patch from fp and tries to apply it.
1981 """Reads a patch from fp and tries to apply it.
1982
1982
1983 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1983 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
1984 there was any fuzz.
1984 there was any fuzz.
1985
1985
1986 If 'eolmode' is 'strict', the patch content and patched file are
1986 If 'eolmode' is 'strict', the patch content and patched file are
1987 read in binary mode. Otherwise, line endings are ignored when
1987 read in binary mode. Otherwise, line endings are ignored when
1988 patching then normalized according to 'eolmode'.
1988 patching then normalized according to 'eolmode'.
1989 """
1989 """
1990 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1990 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
1991 prefix=prefix, eolmode=eolmode)
1991 prefix=prefix, eolmode=eolmode)
1992
1992
1993 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
1993 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
1994 eolmode='strict'):
1994 eolmode='strict'):
1995
1995
1996 if prefix:
1996 if prefix:
1997 prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
1997 prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(),
1998 prefix)
1998 prefix)
1999 if prefix != '':
1999 if prefix != '':
2000 prefix += '/'
2000 prefix += '/'
2001 def pstrip(p):
2001 def pstrip(p):
2002 return pathtransform(p, strip - 1, prefix)[1]
2002 return pathtransform(p, strip - 1, prefix)[1]
2003
2003
2004 rejects = 0
2004 rejects = 0
2005 err = 0
2005 err = 0
2006 current_file = None
2006 current_file = None
2007
2007
2008 for state, values in iterhunks(fp):
2008 for state, values in iterhunks(fp):
2009 if state == 'hunk':
2009 if state == 'hunk':
2010 if not current_file:
2010 if not current_file:
2011 continue
2011 continue
2012 ret = current_file.apply(values)
2012 ret = current_file.apply(values)
2013 if ret > 0:
2013 if ret > 0:
2014 err = 1
2014 err = 1
2015 elif state == 'file':
2015 elif state == 'file':
2016 if current_file:
2016 if current_file:
2017 rejects += current_file.close()
2017 rejects += current_file.close()
2018 current_file = None
2018 current_file = None
2019 afile, bfile, first_hunk, gp = values
2019 afile, bfile, first_hunk, gp = values
2020 if gp:
2020 if gp:
2021 gp.path = pstrip(gp.path)
2021 gp.path = pstrip(gp.path)
2022 if gp.oldpath:
2022 if gp.oldpath:
2023 gp.oldpath = pstrip(gp.oldpath)
2023 gp.oldpath = pstrip(gp.oldpath)
2024 else:
2024 else:
2025 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2025 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2026 prefix)
2026 prefix)
2027 if gp.op == 'RENAME':
2027 if gp.op == 'RENAME':
2028 backend.unlink(gp.oldpath)
2028 backend.unlink(gp.oldpath)
2029 if not first_hunk:
2029 if not first_hunk:
2030 if gp.op == 'DELETE':
2030 if gp.op == 'DELETE':
2031 backend.unlink(gp.path)
2031 backend.unlink(gp.path)
2032 continue
2032 continue
2033 data, mode = None, None
2033 data, mode = None, None
2034 if gp.op in ('RENAME', 'COPY'):
2034 if gp.op in ('RENAME', 'COPY'):
2035 data, mode = store.getfile(gp.oldpath)[:2]
2035 data, mode = store.getfile(gp.oldpath)[:2]
2036 if data is None:
2036 if data is None:
2037 # This means that the old path does not exist
2037 # This means that the old path does not exist
2038 raise PatchError(_("source file '%s' does not exist")
2038 raise PatchError(_("source file '%s' does not exist")
2039 % gp.oldpath)
2039 % gp.oldpath)
2040 if gp.mode:
2040 if gp.mode:
2041 mode = gp.mode
2041 mode = gp.mode
2042 if gp.op == 'ADD':
2042 if gp.op == 'ADD':
2043 # Added files without content have no hunk and
2043 # Added files without content have no hunk and
2044 # must be created
2044 # must be created
2045 data = ''
2045 data = ''
2046 if data or mode:
2046 if data or mode:
2047 if (gp.op in ('ADD', 'RENAME', 'COPY')
2047 if (gp.op in ('ADD', 'RENAME', 'COPY')
2048 and backend.exists(gp.path)):
2048 and backend.exists(gp.path)):
2049 raise PatchError(_("cannot create %s: destination "
2049 raise PatchError(_("cannot create %s: destination "
2050 "already exists") % gp.path)
2050 "already exists") % gp.path)
2051 backend.setfile(gp.path, data, mode, gp.oldpath)
2051 backend.setfile(gp.path, data, mode, gp.oldpath)
2052 continue
2052 continue
2053 try:
2053 try:
2054 current_file = patcher(ui, gp, backend, store,
2054 current_file = patcher(ui, gp, backend, store,
2055 eolmode=eolmode)
2055 eolmode=eolmode)
2056 except PatchError as inst:
2056 except PatchError as inst:
2057 ui.warn(str(inst) + '\n')
2057 ui.warn(str(inst) + '\n')
2058 current_file = None
2058 current_file = None
2059 rejects += 1
2059 rejects += 1
2060 continue
2060 continue
2061 elif state == 'git':
2061 elif state == 'git':
2062 for gp in values:
2062 for gp in values:
2063 path = pstrip(gp.oldpath)
2063 path = pstrip(gp.oldpath)
2064 data, mode = backend.getfile(path)
2064 data, mode = backend.getfile(path)
2065 if data is None:
2065 if data is None:
2066 # The error ignored here will trigger a getfile()
2066 # The error ignored here will trigger a getfile()
2067 # error in a place more appropriate for error
2067 # error in a place more appropriate for error
2068 # handling, and will not interrupt the patching
2068 # handling, and will not interrupt the patching
2069 # process.
2069 # process.
2070 pass
2070 pass
2071 else:
2071 else:
2072 store.setfile(path, data, mode)
2072 store.setfile(path, data, mode)
2073 else:
2073 else:
2074 raise error.Abort(_('unsupported parser state: %s') % state)
2074 raise error.Abort(_('unsupported parser state: %s') % state)
2075
2075
2076 if current_file:
2076 if current_file:
2077 rejects += current_file.close()
2077 rejects += current_file.close()
2078
2078
2079 if rejects:
2079 if rejects:
2080 return -1
2080 return -1
2081 return err
2081 return err
2082
2082
2083 def _externalpatch(ui, repo, patcher, patchname, strip, files,
2083 def _externalpatch(ui, repo, patcher, patchname, strip, files,
2084 similarity):
2084 similarity):
2085 """use <patcher> to apply <patchname> to the working directory.
2085 """use <patcher> to apply <patchname> to the working directory.
2086 returns whether patch was applied with fuzz factor."""
2086 returns whether patch was applied with fuzz factor."""
2087
2087
2088 fuzz = False
2088 fuzz = False
2089 args = []
2089 args = []
2090 cwd = repo.root
2090 cwd = repo.root
2091 if cwd:
2091 if cwd:
2092 args.append('-d %s' % util.shellquote(cwd))
2092 args.append('-d %s' % util.shellquote(cwd))
2093 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
2093 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
2094 util.shellquote(patchname)))
2094 util.shellquote(patchname)))
2095 try:
2095 try:
2096 for line in util.iterfile(fp):
2096 for line in util.iterfile(fp):
2097 line = line.rstrip()
2097 line = line.rstrip()
2098 ui.note(line + '\n')
2098 ui.note(line + '\n')
2099 if line.startswith('patching file '):
2099 if line.startswith('patching file '):
2100 pf = util.parsepatchoutput(line)
2100 pf = util.parsepatchoutput(line)
2101 printed_file = False
2101 printed_file = False
2102 files.add(pf)
2102 files.add(pf)
2103 elif line.find('with fuzz') >= 0:
2103 elif line.find('with fuzz') >= 0:
2104 fuzz = True
2104 fuzz = True
2105 if not printed_file:
2105 if not printed_file:
2106 ui.warn(pf + '\n')
2106 ui.warn(pf + '\n')
2107 printed_file = True
2107 printed_file = True
2108 ui.warn(line + '\n')
2108 ui.warn(line + '\n')
2109 elif line.find('saving rejects to file') >= 0:
2109 elif line.find('saving rejects to file') >= 0:
2110 ui.warn(line + '\n')
2110 ui.warn(line + '\n')
2111 elif line.find('FAILED') >= 0:
2111 elif line.find('FAILED') >= 0:
2112 if not printed_file:
2112 if not printed_file:
2113 ui.warn(pf + '\n')
2113 ui.warn(pf + '\n')
2114 printed_file = True
2114 printed_file = True
2115 ui.warn(line + '\n')
2115 ui.warn(line + '\n')
2116 finally:
2116 finally:
2117 if files:
2117 if files:
2118 scmutil.marktouched(repo, files, similarity)
2118 scmutil.marktouched(repo, files, similarity)
2119 code = fp.close()
2119 code = fp.close()
2120 if code:
2120 if code:
2121 raise PatchError(_("patch command failed: %s") %
2121 raise PatchError(_("patch command failed: %s") %
2122 util.explainexit(code)[0])
2122 util.explainexit(code)[0])
2123 return fuzz
2123 return fuzz
2124
2124
2125 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2125 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2126 eolmode='strict'):
2126 eolmode='strict'):
2127 if files is None:
2127 if files is None:
2128 files = set()
2128 files = set()
2129 if eolmode is None:
2129 if eolmode is None:
2130 eolmode = ui.config('patch', 'eol')
2130 eolmode = ui.config('patch', 'eol')
2131 if eolmode.lower() not in eolmodes:
2131 if eolmode.lower() not in eolmodes:
2132 raise error.Abort(_('unsupported line endings type: %s') % eolmode)
2132 raise error.Abort(_('unsupported line endings type: %s') % eolmode)
2133 eolmode = eolmode.lower()
2133 eolmode = eolmode.lower()
2134
2134
2135 store = filestore()
2135 store = filestore()
2136 try:
2136 try:
2137 fp = open(patchobj, 'rb')
2137 fp = open(patchobj, 'rb')
2138 except TypeError:
2138 except TypeError:
2139 fp = patchobj
2139 fp = patchobj
2140 try:
2140 try:
2141 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2141 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2142 eolmode=eolmode)
2142 eolmode=eolmode)
2143 finally:
2143 finally:
2144 if fp != patchobj:
2144 if fp != patchobj:
2145 fp.close()
2145 fp.close()
2146 files.update(backend.close())
2146 files.update(backend.close())
2147 store.close()
2147 store.close()
2148 if ret < 0:
2148 if ret < 0:
2149 raise PatchError(_('patch failed to apply'))
2149 raise PatchError(_('patch failed to apply'))
2150 return ret > 0
2150 return ret > 0
2151
2151
2152 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2152 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2153 eolmode='strict', similarity=0):
2153 eolmode='strict', similarity=0):
2154 """use builtin patch to apply <patchobj> to the working directory.
2154 """use builtin patch to apply <patchobj> to the working directory.
2155 returns whether patch was applied with fuzz factor."""
2155 returns whether patch was applied with fuzz factor."""
2156 backend = workingbackend(ui, repo, similarity)
2156 backend = workingbackend(ui, repo, similarity)
2157 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2157 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2158
2158
2159 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2159 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2160 eolmode='strict'):
2160 eolmode='strict'):
2161 backend = repobackend(ui, repo, ctx, store)
2161 backend = repobackend(ui, repo, ctx, store)
2162 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2162 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2163
2163
2164 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2164 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2165 similarity=0):
2165 similarity=0):
2166 """Apply <patchname> to the working directory.
2166 """Apply <patchname> to the working directory.
2167
2167
2168 'eolmode' specifies how end of lines should be handled. It can be:
2168 'eolmode' specifies how end of lines should be handled. It can be:
2169 - 'strict': inputs are read in binary mode, EOLs are preserved
2169 - 'strict': inputs are read in binary mode, EOLs are preserved
2170 - 'crlf': EOLs are ignored when patching and reset to CRLF
2170 - 'crlf': EOLs are ignored when patching and reset to CRLF
2171 - 'lf': EOLs are ignored when patching and reset to LF
2171 - 'lf': EOLs are ignored when patching and reset to LF
2172 - None: get it from user settings, default to 'strict'
2172 - None: get it from user settings, default to 'strict'
2173 'eolmode' is ignored when using an external patcher program.
2173 'eolmode' is ignored when using an external patcher program.
2174
2174
2175 Returns whether patch was applied with fuzz factor.
2175 Returns whether patch was applied with fuzz factor.
2176 """
2176 """
2177 patcher = ui.config('ui', 'patch')
2177 patcher = ui.config('ui', 'patch')
2178 if files is None:
2178 if files is None:
2179 files = set()
2179 files = set()
2180 if patcher:
2180 if patcher:
2181 return _externalpatch(ui, repo, patcher, patchname, strip,
2181 return _externalpatch(ui, repo, patcher, patchname, strip,
2182 files, similarity)
2182 files, similarity)
2183 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2183 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2184 similarity)
2184 similarity)
2185
2185
2186 def changedfiles(ui, repo, patchpath, strip=1):
2186 def changedfiles(ui, repo, patchpath, strip=1):
2187 backend = fsbackend(ui, repo.root)
2187 backend = fsbackend(ui, repo.root)
2188 with open(patchpath, 'rb') as fp:
2188 with open(patchpath, 'rb') as fp:
2189 changed = set()
2189 changed = set()
2190 for state, values in iterhunks(fp):
2190 for state, values in iterhunks(fp):
2191 if state == 'file':
2191 if state == 'file':
2192 afile, bfile, first_hunk, gp = values
2192 afile, bfile, first_hunk, gp = values
2193 if gp:
2193 if gp:
2194 gp.path = pathtransform(gp.path, strip - 1, '')[1]
2194 gp.path = pathtransform(gp.path, strip - 1, '')[1]
2195 if gp.oldpath:
2195 if gp.oldpath:
2196 gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
2196 gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1]
2197 else:
2197 else:
2198 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2198 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2199 '')
2199 '')
2200 changed.add(gp.path)
2200 changed.add(gp.path)
2201 if gp.op == 'RENAME':
2201 if gp.op == 'RENAME':
2202 changed.add(gp.oldpath)
2202 changed.add(gp.oldpath)
2203 elif state not in ('hunk', 'git'):
2203 elif state not in ('hunk', 'git'):
2204 raise error.Abort(_('unsupported parser state: %s') % state)
2204 raise error.Abort(_('unsupported parser state: %s') % state)
2205 return changed
2205 return changed
2206
2206
2207 class GitDiffRequired(Exception):
2207 class GitDiffRequired(Exception):
2208 pass
2208 pass
2209
2209
2210 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
2210 def diffallopts(ui, opts=None, untrusted=False, section='diff'):
2211 '''return diffopts with all features supported and parsed'''
2211 '''return diffopts with all features supported and parsed'''
2212 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
2212 return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
2213 git=True, whitespace=True, formatchanging=True)
2213 git=True, whitespace=True, formatchanging=True)
2214
2214
2215 diffopts = diffallopts
2215 diffopts = diffallopts
2216
2216
2217 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
2217 def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
2218 whitespace=False, formatchanging=False):
2218 whitespace=False, formatchanging=False):
2219 '''return diffopts with only opted-in features parsed
2219 '''return diffopts with only opted-in features parsed
2220
2220
2221 Features:
2221 Features:
2222 - git: git-style diffs
2222 - git: git-style diffs
2223 - whitespace: whitespace options like ignoreblanklines and ignorews
2223 - whitespace: whitespace options like ignoreblanklines and ignorews
2224 - formatchanging: options that will likely break or cause correctness issues
2224 - formatchanging: options that will likely break or cause correctness issues
2225 with most diff parsers
2225 with most diff parsers
2226 '''
2226 '''
2227 def get(key, name=None, getter=ui.configbool, forceplain=None):
2227 def get(key, name=None, getter=ui.configbool, forceplain=None):
2228 if opts:
2228 if opts:
2229 v = opts.get(key)
2229 v = opts.get(key)
2230 # diffopts flags are either None-default (which is passed
2230 # diffopts flags are either None-default (which is passed
2231 # through unchanged, so we can identify unset values), or
2231 # through unchanged, so we can identify unset values), or
2232 # some other falsey default (eg --unified, which defaults
2232 # some other falsey default (eg --unified, which defaults
2233 # to an empty string). We only want to override the config
2233 # to an empty string). We only want to override the config
2234 # entries from hgrc with command line values if they
2234 # entries from hgrc with command line values if they
2235 # appear to have been set, which is any truthy value,
2235 # appear to have been set, which is any truthy value,
2236 # True, or False.
2236 # True, or False.
2237 if v or isinstance(v, bool):
2237 if v or isinstance(v, bool):
2238 return v
2238 return v
2239 if forceplain is not None and ui.plain():
2239 if forceplain is not None and ui.plain():
2240 return forceplain
2240 return forceplain
2241 return getter(section, name or key, untrusted=untrusted)
2241 return getter(section, name or key, untrusted=untrusted)
2242
2242
2243 # core options, expected to be understood by every diff parser
2243 # core options, expected to be understood by every diff parser
2244 buildopts = {
2244 buildopts = {
2245 'nodates': get('nodates'),
2245 'nodates': get('nodates'),
2246 'showfunc': get('show_function', 'showfunc'),
2246 'showfunc': get('show_function', 'showfunc'),
2247 'context': get('unified', getter=ui.config),
2247 'context': get('unified', getter=ui.config),
2248 }
2248 }
2249
2249
2250 if git:
2250 if git:
2251 buildopts['git'] = get('git')
2251 buildopts['git'] = get('git')
2252
2252
2253 # since this is in the experimental section, we need to call
2253 # since this is in the experimental section, we need to call
2254 # ui.configbool directory
2254 # ui.configbool directory
2255 buildopts['showsimilarity'] = ui.configbool('experimental',
2255 buildopts['showsimilarity'] = ui.configbool('experimental',
2256 'extendedheader.similarity')
2256 'extendedheader.similarity')
2257
2257
2258 # need to inspect the ui object instead of using get() since we want to
2258 # need to inspect the ui object instead of using get() since we want to
2259 # test for an int
2259 # test for an int
2260 hconf = ui.config('experimental', 'extendedheader.index')
2260 hconf = ui.config('experimental', 'extendedheader.index')
2261 if hconf is not None:
2261 if hconf is not None:
2262 hlen = None
2262 hlen = None
2263 try:
2263 try:
2264 # the hash config could be an integer (for length of hash) or a
2264 # the hash config could be an integer (for length of hash) or a
2265 # word (e.g. short, full, none)
2265 # word (e.g. short, full, none)
2266 hlen = int(hconf)
2266 hlen = int(hconf)
2267 if hlen < 0 or hlen > 40:
2267 if hlen < 0 or hlen > 40:
2268 msg = _("invalid length for extendedheader.index: '%d'\n")
2268 msg = _("invalid length for extendedheader.index: '%d'\n")
2269 ui.warn(msg % hlen)
2269 ui.warn(msg % hlen)
2270 except ValueError:
2270 except ValueError:
2271 # default value
2271 # default value
2272 if hconf == 'short' or hconf == '':
2272 if hconf == 'short' or hconf == '':
2273 hlen = 12
2273 hlen = 12
2274 elif hconf == 'full':
2274 elif hconf == 'full':
2275 hlen = 40
2275 hlen = 40
2276 elif hconf != 'none':
2276 elif hconf != 'none':
2277 msg = _("invalid value for extendedheader.index: '%s'\n")
2277 msg = _("invalid value for extendedheader.index: '%s'\n")
2278 ui.warn(msg % hconf)
2278 ui.warn(msg % hconf)
2279 finally:
2279 finally:
2280 buildopts['index'] = hlen
2280 buildopts['index'] = hlen
2281
2281
2282 if whitespace:
2282 if whitespace:
2283 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2283 buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
2284 buildopts['ignorewsamount'] = get('ignore_space_change',
2284 buildopts['ignorewsamount'] = get('ignore_space_change',
2285 'ignorewsamount')
2285 'ignorewsamount')
2286 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2286 buildopts['ignoreblanklines'] = get('ignore_blank_lines',
2287 'ignoreblanklines')
2287 'ignoreblanklines')
2288 buildopts['ignorewseol'] = get('ignore_space_at_eol', 'ignorewseol')
2288 buildopts['ignorewseol'] = get('ignore_space_at_eol', 'ignorewseol')
2289 if formatchanging:
2289 if formatchanging:
2290 buildopts['text'] = opts and opts.get('text')
2290 buildopts['text'] = opts and opts.get('text')
2291 binary = None if opts is None else opts.get('binary')
2291 binary = None if opts is None else opts.get('binary')
2292 buildopts['nobinary'] = (not binary if binary is not None
2292 buildopts['nobinary'] = (not binary if binary is not None
2293 else get('nobinary', forceplain=False))
2293 else get('nobinary', forceplain=False))
2294 buildopts['noprefix'] = get('noprefix', forceplain=False)
2294 buildopts['noprefix'] = get('noprefix', forceplain=False)
2295
2295
2296 return mdiff.diffopts(**pycompat.strkwargs(buildopts))
2296 return mdiff.diffopts(**pycompat.strkwargs(buildopts))
2297
2297
2298 def diff(repo, node1=None, node2=None, match=None, changes=None,
2298 def diff(repo, node1=None, node2=None, match=None, changes=None,
2299 opts=None, losedatafn=None, prefix='', relroot='', copy=None,
2299 opts=None, losedatafn=None, prefix='', relroot='', copy=None,
2300 hunksfilterfn=None):
2300 hunksfilterfn=None):
2301 '''yields diff of changes to files between two nodes, or node and
2301 '''yields diff of changes to files between two nodes, or node and
2302 working directory.
2302 working directory.
2303
2303
2304 if node1 is None, use first dirstate parent instead.
2304 if node1 is None, use first dirstate parent instead.
2305 if node2 is None, compare node1 with working directory.
2305 if node2 is None, compare node1 with working directory.
2306
2306
2307 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2307 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2308 every time some change cannot be represented with the current
2308 every time some change cannot be represented with the current
2309 patch format. Return False to upgrade to git patch format, True to
2309 patch format. Return False to upgrade to git patch format, True to
2310 accept the loss or raise an exception to abort the diff. It is
2310 accept the loss or raise an exception to abort the diff. It is
2311 called with the name of current file being diffed as 'fn'. If set
2311 called with the name of current file being diffed as 'fn'. If set
2312 to None, patches will always be upgraded to git format when
2312 to None, patches will always be upgraded to git format when
2313 necessary.
2313 necessary.
2314
2314
2315 prefix is a filename prefix that is prepended to all filenames on
2315 prefix is a filename prefix that is prepended to all filenames on
2316 display (used for subrepos).
2316 display (used for subrepos).
2317
2317
2318 relroot, if not empty, must be normalized with a trailing /. Any match
2318 relroot, if not empty, must be normalized with a trailing /. Any match
2319 patterns that fall outside it will be ignored.
2319 patterns that fall outside it will be ignored.
2320
2320
2321 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2321 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2322 information.
2322 information.
2323
2323
2324 hunksfilterfn, if not None, should be a function taking a filectx and
2324 hunksfilterfn, if not None, should be a function taking a filectx and
2325 hunks generator that may yield filtered hunks.
2325 hunks generator that may yield filtered hunks.
2326 '''
2326 '''
2327 for fctx1, fctx2, hdr, hunks in diffhunks(
2327 for fctx1, fctx2, hdr, hunks in diffhunks(
2328 repo, node1=node1, node2=node2,
2328 repo, node1=node1, node2=node2,
2329 match=match, changes=changes, opts=opts,
2329 match=match, changes=changes, opts=opts,
2330 losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy,
2330 losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy,
2331 ):
2331 ):
2332 if hunksfilterfn is not None:
2332 if hunksfilterfn is not None:
2333 # If the file has been removed, fctx2 is None; but this should
2333 # If the file has been removed, fctx2 is None; but this should
2334 # not occur here since we catch removed files early in
2334 # not occur here since we catch removed files early in
2335 # cmdutil.getloglinerangerevs() for 'hg log -L'.
2335 # cmdutil.getloglinerangerevs() for 'hg log -L'.
2336 assert fctx2 is not None, \
2336 assert fctx2 is not None, \
2337 'fctx2 unexpectly None in diff hunks filtering'
2337 'fctx2 unexpectly None in diff hunks filtering'
2338 hunks = hunksfilterfn(fctx2, hunks)
2338 hunks = hunksfilterfn(fctx2, hunks)
2339 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2339 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2340 if hdr and (text or len(hdr) > 1):
2340 if hdr and (text or len(hdr) > 1):
2341 yield '\n'.join(hdr) + '\n'
2341 yield '\n'.join(hdr) + '\n'
2342 if text:
2342 if text:
2343 yield text
2343 yield text
2344
2344
2345 def diffhunks(repo, node1=None, node2=None, match=None, changes=None,
2345 def diffhunks(repo, node1=None, node2=None, match=None, changes=None,
2346 opts=None, losedatafn=None, prefix='', relroot='', copy=None):
2346 opts=None, losedatafn=None, prefix='', relroot='', copy=None):
2347 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2347 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2348 where `header` is a list of diff headers and `hunks` is an iterable of
2348 where `header` is a list of diff headers and `hunks` is an iterable of
2349 (`hunkrange`, `hunklines`) tuples.
2349 (`hunkrange`, `hunklines`) tuples.
2350
2350
2351 See diff() for the meaning of parameters.
2351 See diff() for the meaning of parameters.
2352 """
2352 """
2353
2353
2354 if opts is None:
2354 if opts is None:
2355 opts = mdiff.defaultopts
2355 opts = mdiff.defaultopts
2356
2356
2357 if not node1 and not node2:
2357 if not node1 and not node2:
2358 node1 = repo.dirstate.p1()
2358 node1 = repo.dirstate.p1()
2359
2359
2360 def lrugetfilectx():
2360 def lrugetfilectx():
2361 cache = {}
2361 cache = {}
2362 order = collections.deque()
2362 order = collections.deque()
2363 def getfilectx(f, ctx):
2363 def getfilectx(f, ctx):
2364 fctx = ctx.filectx(f, filelog=cache.get(f))
2364 fctx = ctx.filectx(f, filelog=cache.get(f))
2365 if f not in cache:
2365 if f not in cache:
2366 if len(cache) > 20:
2366 if len(cache) > 20:
2367 del cache[order.popleft()]
2367 del cache[order.popleft()]
2368 cache[f] = fctx.filelog()
2368 cache[f] = fctx.filelog()
2369 else:
2369 else:
2370 order.remove(f)
2370 order.remove(f)
2371 order.append(f)
2371 order.append(f)
2372 return fctx
2372 return fctx
2373 return getfilectx
2373 return getfilectx
2374 getfilectx = lrugetfilectx()
2374 getfilectx = lrugetfilectx()
2375
2375
2376 ctx1 = repo[node1]
2376 ctx1 = repo[node1]
2377 ctx2 = repo[node2]
2377 ctx2 = repo[node2]
2378
2378
2379 relfiltered = False
2379 relfiltered = False
2380 if relroot != '' and match.always():
2380 if relroot != '' and match.always():
2381 # as a special case, create a new matcher with just the relroot
2381 # as a special case, create a new matcher with just the relroot
2382 pats = [relroot]
2382 pats = [relroot]
2383 match = scmutil.match(ctx2, pats, default='path')
2383 match = scmutil.match(ctx2, pats, default='path')
2384 relfiltered = True
2384 relfiltered = True
2385
2385
2386 if not changes:
2386 if not changes:
2387 changes = repo.status(ctx1, ctx2, match=match)
2387 changes = repo.status(ctx1, ctx2, match=match)
2388 modified, added, removed = changes[:3]
2388 modified, added, removed = changes[:3]
2389
2389
2390 if not modified and not added and not removed:
2390 if not modified and not added and not removed:
2391 return []
2391 return []
2392
2392
2393 if repo.ui.debugflag:
2393 if repo.ui.debugflag:
2394 hexfunc = hex
2394 hexfunc = hex
2395 else:
2395 else:
2396 hexfunc = short
2396 hexfunc = short
2397 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2397 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2398
2398
2399 if copy is None:
2399 if copy is None:
2400 copy = {}
2400 copy = {}
2401 if opts.git or opts.upgrade:
2401 if opts.git or opts.upgrade:
2402 copy = copies.pathcopies(ctx1, ctx2, match=match)
2402 copy = copies.pathcopies(ctx1, ctx2, match=match)
2403
2403
2404 if relroot is not None:
2404 if relroot is not None:
2405 if not relfiltered:
2405 if not relfiltered:
2406 # XXX this would ideally be done in the matcher, but that is
2406 # XXX this would ideally be done in the matcher, but that is
2407 # generally meant to 'or' patterns, not 'and' them. In this case we
2407 # generally meant to 'or' patterns, not 'and' them. In this case we
2408 # need to 'and' all the patterns from the matcher with relroot.
2408 # need to 'and' all the patterns from the matcher with relroot.
2409 def filterrel(l):
2409 def filterrel(l):
2410 return [f for f in l if f.startswith(relroot)]
2410 return [f for f in l if f.startswith(relroot)]
2411 modified = filterrel(modified)
2411 modified = filterrel(modified)
2412 added = filterrel(added)
2412 added = filterrel(added)
2413 removed = filterrel(removed)
2413 removed = filterrel(removed)
2414 relfiltered = True
2414 relfiltered = True
2415 # filter out copies where either side isn't inside the relative root
2415 # filter out copies where either side isn't inside the relative root
2416 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2416 copy = dict(((dst, src) for (dst, src) in copy.iteritems()
2417 if dst.startswith(relroot)
2417 if dst.startswith(relroot)
2418 and src.startswith(relroot)))
2418 and src.startswith(relroot)))
2419
2419
2420 modifiedset = set(modified)
2420 modifiedset = set(modified)
2421 addedset = set(added)
2421 addedset = set(added)
2422 removedset = set(removed)
2422 removedset = set(removed)
2423 for f in modified:
2423 for f in modified:
2424 if f not in ctx1:
2424 if f not in ctx1:
2425 # Fix up added, since merged-in additions appear as
2425 # Fix up added, since merged-in additions appear as
2426 # modifications during merges
2426 # modifications during merges
2427 modifiedset.remove(f)
2427 modifiedset.remove(f)
2428 addedset.add(f)
2428 addedset.add(f)
2429 for f in removed:
2429 for f in removed:
2430 if f not in ctx1:
2430 if f not in ctx1:
2431 # Merged-in additions that are then removed are reported as removed.
2431 # Merged-in additions that are then removed are reported as removed.
2432 # They are not in ctx1, so We don't want to show them in the diff.
2432 # They are not in ctx1, so We don't want to show them in the diff.
2433 removedset.remove(f)
2433 removedset.remove(f)
2434 modified = sorted(modifiedset)
2434 modified = sorted(modifiedset)
2435 added = sorted(addedset)
2435 added = sorted(addedset)
2436 removed = sorted(removedset)
2436 removed = sorted(removedset)
2437 for dst, src in copy.items():
2437 for dst, src in copy.items():
2438 if src not in ctx1:
2438 if src not in ctx1:
2439 # Files merged in during a merge and then copied/renamed are
2439 # Files merged in during a merge and then copied/renamed are
2440 # reported as copies. We want to show them in the diff as additions.
2440 # reported as copies. We want to show them in the diff as additions.
2441 del copy[dst]
2441 del copy[dst]
2442
2442
2443 def difffn(opts, losedata):
2443 def difffn(opts, losedata):
2444 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2444 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2445 copy, getfilectx, opts, losedata, prefix, relroot)
2445 copy, getfilectx, opts, losedata, prefix, relroot)
2446 if opts.upgrade and not opts.git:
2446 if opts.upgrade and not opts.git:
2447 try:
2447 try:
2448 def losedata(fn):
2448 def losedata(fn):
2449 if not losedatafn or not losedatafn(fn=fn):
2449 if not losedatafn or not losedatafn(fn=fn):
2450 raise GitDiffRequired
2450 raise GitDiffRequired
2451 # Buffer the whole output until we are sure it can be generated
2451 # Buffer the whole output until we are sure it can be generated
2452 return list(difffn(opts.copy(git=False), losedata))
2452 return list(difffn(opts.copy(git=False), losedata))
2453 except GitDiffRequired:
2453 except GitDiffRequired:
2454 return difffn(opts.copy(git=True), None)
2454 return difffn(opts.copy(git=True), None)
2455 else:
2455 else:
2456 return difffn(opts, None)
2456 return difffn(opts, None)
2457
2457
2458 def difflabel(func, *args, **kw):
2458 def difflabel(func, *args, **kw):
2459 '''yields 2-tuples of (output, label) based on the output of func()'''
2459 '''yields 2-tuples of (output, label) based on the output of func()'''
2460 headprefixes = [('diff', 'diff.diffline'),
2460 headprefixes = [('diff', 'diff.diffline'),
2461 ('copy', 'diff.extended'),
2461 ('copy', 'diff.extended'),
2462 ('rename', 'diff.extended'),
2462 ('rename', 'diff.extended'),
2463 ('old', 'diff.extended'),
2463 ('old', 'diff.extended'),
2464 ('new', 'diff.extended'),
2464 ('new', 'diff.extended'),
2465 ('deleted', 'diff.extended'),
2465 ('deleted', 'diff.extended'),
2466 ('index', 'diff.extended'),
2466 ('index', 'diff.extended'),
2467 ('similarity', 'diff.extended'),
2467 ('similarity', 'diff.extended'),
2468 ('---', 'diff.file_a'),
2468 ('---', 'diff.file_a'),
2469 ('+++', 'diff.file_b')]
2469 ('+++', 'diff.file_b')]
2470 textprefixes = [('@', 'diff.hunk'),
2470 textprefixes = [('@', 'diff.hunk'),
2471 ('-', 'diff.deleted'),
2471 ('-', 'diff.deleted'),
2472 ('+', 'diff.inserted')]
2472 ('+', 'diff.inserted')]
2473 head = False
2473 head = False
2474 for chunk in func(*args, **kw):
2474 for chunk in func(*args, **kw):
2475 lines = chunk.split('\n')
2475 lines = chunk.split('\n')
2476 for i, line in enumerate(lines):
2476 for i, line in enumerate(lines):
2477 if i != 0:
2477 if i != 0:
2478 yield ('\n', '')
2478 yield ('\n', '')
2479 if head:
2479 if head:
2480 if line.startswith('@'):
2480 if line.startswith('@'):
2481 head = False
2481 head = False
2482 else:
2482 else:
2483 if line and line[0] not in ' +-@\\':
2483 if line and line[0] not in ' +-@\\':
2484 head = True
2484 head = True
2485 stripline = line
2485 stripline = line
2486 diffline = False
2486 diffline = False
2487 if not head and line and line[0] in '+-':
2487 if not head and line and line[0] in '+-':
2488 # highlight tabs and trailing whitespace, but only in
2488 # highlight tabs and trailing whitespace, but only in
2489 # changed lines
2489 # changed lines
2490 stripline = line.rstrip()
2490 stripline = line.rstrip()
2491 diffline = True
2491 diffline = True
2492
2492
2493 prefixes = textprefixes
2493 prefixes = textprefixes
2494 if head:
2494 if head:
2495 prefixes = headprefixes
2495 prefixes = headprefixes
2496 for prefix, label in prefixes:
2496 for prefix, label in prefixes:
2497 if stripline.startswith(prefix):
2497 if stripline.startswith(prefix):
2498 if diffline:
2498 if diffline:
2499 for token in tabsplitter.findall(stripline):
2499 for token in tabsplitter.findall(stripline):
2500 if '\t' == token[0]:
2500 if '\t' == token[0]:
2501 yield (token, 'diff.tab')
2501 yield (token, 'diff.tab')
2502 else:
2502 else:
2503 yield (token, label)
2503 yield (token, label)
2504 else:
2504 else:
2505 yield (stripline, label)
2505 yield (stripline, label)
2506 break
2506 break
2507 else:
2507 else:
2508 yield (line, '')
2508 yield (line, '')
2509 if line != stripline:
2509 if line != stripline:
2510 yield (line[len(stripline):], 'diff.trailingwhitespace')
2510 yield (line[len(stripline):], 'diff.trailingwhitespace')
2511
2511
2512 def diffui(*args, **kw):
2512 def diffui(*args, **kw):
2513 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2513 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2514 return difflabel(diff, *args, **kw)
2514 return difflabel(diff, *args, **kw)
2515
2515
2516 def _filepairs(modified, added, removed, copy, opts):
2516 def _filepairs(modified, added, removed, copy, opts):
2517 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2517 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2518 before and f2 is the the name after. For added files, f1 will be None,
2518 before and f2 is the the name after. For added files, f1 will be None,
2519 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2519 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2520 or 'rename' (the latter two only if opts.git is set).'''
2520 or 'rename' (the latter two only if opts.git is set).'''
2521 gone = set()
2521 gone = set()
2522
2522
2523 copyto = dict([(v, k) for k, v in copy.items()])
2523 copyto = dict([(v, k) for k, v in copy.items()])
2524
2524
2525 addedset, removedset = set(added), set(removed)
2525 addedset, removedset = set(added), set(removed)
2526
2526
2527 for f in sorted(modified + added + removed):
2527 for f in sorted(modified + added + removed):
2528 copyop = None
2528 copyop = None
2529 f1, f2 = f, f
2529 f1, f2 = f, f
2530 if f in addedset:
2530 if f in addedset:
2531 f1 = None
2531 f1 = None
2532 if f in copy:
2532 if f in copy:
2533 if opts.git:
2533 if opts.git:
2534 f1 = copy[f]
2534 f1 = copy[f]
2535 if f1 in removedset and f1 not in gone:
2535 if f1 in removedset and f1 not in gone:
2536 copyop = 'rename'
2536 copyop = 'rename'
2537 gone.add(f1)
2537 gone.add(f1)
2538 else:
2538 else:
2539 copyop = 'copy'
2539 copyop = 'copy'
2540 elif f in removedset:
2540 elif f in removedset:
2541 f2 = None
2541 f2 = None
2542 if opts.git:
2542 if opts.git:
2543 # have we already reported a copy above?
2543 # have we already reported a copy above?
2544 if (f in copyto and copyto[f] in addedset
2544 if (f in copyto and copyto[f] in addedset
2545 and copy[copyto[f]] == f):
2545 and copy[copyto[f]] == f):
2546 continue
2546 continue
2547 yield f1, f2, copyop
2547 yield f1, f2, copyop
2548
2548
2549 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2549 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2550 copy, getfilectx, opts, losedatafn, prefix, relroot):
2550 copy, getfilectx, opts, losedatafn, prefix, relroot):
2551 '''given input data, generate a diff and yield it in blocks
2551 '''given input data, generate a diff and yield it in blocks
2552
2552
2553 If generating a diff would lose data like flags or binary data and
2553 If generating a diff would lose data like flags or binary data and
2554 losedatafn is not None, it will be called.
2554 losedatafn is not None, it will be called.
2555
2555
2556 relroot is removed and prefix is added to every path in the diff output.
2556 relroot is removed and prefix is added to every path in the diff output.
2557
2557
2558 If relroot is not empty, this function expects every path in modified,
2558 If relroot is not empty, this function expects every path in modified,
2559 added, removed and copy to start with it.'''
2559 added, removed and copy to start with it.'''
2560
2560
2561 def gitindex(text):
2561 def gitindex(text):
2562 if not text:
2562 if not text:
2563 text = ""
2563 text = ""
2564 l = len(text)
2564 l = len(text)
2565 s = hashlib.sha1('blob %d\0' % l)
2565 s = hashlib.sha1('blob %d\0' % l)
2566 s.update(text)
2566 s.update(text)
2567 return s.hexdigest()
2567 return s.hexdigest()
2568
2568
2569 if opts.noprefix:
2569 if opts.noprefix:
2570 aprefix = bprefix = ''
2570 aprefix = bprefix = ''
2571 else:
2571 else:
2572 aprefix = 'a/'
2572 aprefix = 'a/'
2573 bprefix = 'b/'
2573 bprefix = 'b/'
2574
2574
2575 def diffline(f, revs):
2575 def diffline(f, revs):
2576 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2576 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2577 return 'diff %s %s' % (revinfo, f)
2577 return 'diff %s %s' % (revinfo, f)
2578
2578
2579 def isempty(fctx):
2579 def isempty(fctx):
2580 return fctx is None or fctx.size() == 0
2580 return fctx is None or fctx.size() == 0
2581
2581
2582 date1 = util.datestr(ctx1.date())
2582 date1 = util.datestr(ctx1.date())
2583 date2 = util.datestr(ctx2.date())
2583 date2 = util.datestr(ctx2.date())
2584
2584
2585 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2585 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2586
2586
2587 if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
2587 if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
2588 or repo.ui.configbool('devel', 'check-relroot')):
2588 or repo.ui.configbool('devel', 'check-relroot')):
2589 for f in modified + added + removed + list(copy) + list(copy.values()):
2589 for f in modified + added + removed + list(copy) + list(copy.values()):
2590 if f is not None and not f.startswith(relroot):
2590 if f is not None and not f.startswith(relroot):
2591 raise AssertionError(
2591 raise AssertionError(
2592 "file %s doesn't start with relroot %s" % (f, relroot))
2592 "file %s doesn't start with relroot %s" % (f, relroot))
2593
2593
2594 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2594 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2595 content1 = None
2595 content1 = None
2596 content2 = None
2596 content2 = None
2597 fctx1 = None
2597 fctx1 = None
2598 fctx2 = None
2598 fctx2 = None
2599 flag1 = None
2599 flag1 = None
2600 flag2 = None
2600 flag2 = None
2601 if f1:
2601 if f1:
2602 fctx1 = getfilectx(f1, ctx1)
2602 fctx1 = getfilectx(f1, ctx1)
2603 if opts.git or losedatafn:
2603 if opts.git or losedatafn:
2604 flag1 = ctx1.flags(f1)
2604 flag1 = ctx1.flags(f1)
2605 if f2:
2605 if f2:
2606 fctx2 = getfilectx(f2, ctx2)
2606 fctx2 = getfilectx(f2, ctx2)
2607 if opts.git or losedatafn:
2607 if opts.git or losedatafn:
2608 flag2 = ctx2.flags(f2)
2608 flag2 = ctx2.flags(f2)
2609 # if binary is True, output "summary" or "base85", but not "text diff"
2609 # if binary is True, output "summary" or "base85", but not "text diff"
2610 binary = not opts.text and any(f.isbinary()
2610 binary = not opts.text and any(f.isbinary()
2611 for f in [fctx1, fctx2] if f is not None)
2611 for f in [fctx1, fctx2] if f is not None)
2612
2612
2613 if losedatafn and not opts.git:
2613 if losedatafn and not opts.git:
2614 if (binary or
2614 if (binary or
2615 # copy/rename
2615 # copy/rename
2616 f2 in copy or
2616 f2 in copy or
2617 # empty file creation
2617 # empty file creation
2618 (not f1 and isempty(fctx2)) or
2618 (not f1 and isempty(fctx2)) or
2619 # empty file deletion
2619 # empty file deletion
2620 (isempty(fctx1) and not f2) or
2620 (isempty(fctx1) and not f2) or
2621 # create with flags
2621 # create with flags
2622 (not f1 and flag2) or
2622 (not f1 and flag2) or
2623 # change flags
2623 # change flags
2624 (f1 and f2 and flag1 != flag2)):
2624 (f1 and f2 and flag1 != flag2)):
2625 losedatafn(f2 or f1)
2625 losedatafn(f2 or f1)
2626
2626
2627 path1 = f1 or f2
2627 path1 = f1 or f2
2628 path2 = f2 or f1
2628 path2 = f2 or f1
2629 path1 = posixpath.join(prefix, path1[len(relroot):])
2629 path1 = posixpath.join(prefix, path1[len(relroot):])
2630 path2 = posixpath.join(prefix, path2[len(relroot):])
2630 path2 = posixpath.join(prefix, path2[len(relroot):])
2631 header = []
2631 header = []
2632 if opts.git:
2632 if opts.git:
2633 header.append('diff --git %s%s %s%s' %
2633 header.append('diff --git %s%s %s%s' %
2634 (aprefix, path1, bprefix, path2))
2634 (aprefix, path1, bprefix, path2))
2635 if not f1: # added
2635 if not f1: # added
2636 header.append('new file mode %s' % gitmode[flag2])
2636 header.append('new file mode %s' % gitmode[flag2])
2637 elif not f2: # removed
2637 elif not f2: # removed
2638 header.append('deleted file mode %s' % gitmode[flag1])
2638 header.append('deleted file mode %s' % gitmode[flag1])
2639 else: # modified/copied/renamed
2639 else: # modified/copied/renamed
2640 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2640 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2641 if mode1 != mode2:
2641 if mode1 != mode2:
2642 header.append('old mode %s' % mode1)
2642 header.append('old mode %s' % mode1)
2643 header.append('new mode %s' % mode2)
2643 header.append('new mode %s' % mode2)
2644 if copyop is not None:
2644 if copyop is not None:
2645 if opts.showsimilarity:
2645 if opts.showsimilarity:
2646 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
2646 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
2647 header.append('similarity index %d%%' % sim)
2647 header.append('similarity index %d%%' % sim)
2648 header.append('%s from %s' % (copyop, path1))
2648 header.append('%s from %s' % (copyop, path1))
2649 header.append('%s to %s' % (copyop, path2))
2649 header.append('%s to %s' % (copyop, path2))
2650 elif revs and not repo.ui.quiet:
2650 elif revs and not repo.ui.quiet:
2651 header.append(diffline(path1, revs))
2651 header.append(diffline(path1, revs))
2652
2652
2653 # fctx.is | diffopts | what to | is fctx.data()
2653 # fctx.is | diffopts | what to | is fctx.data()
2654 # binary() | text nobinary git index | output? | outputted?
2654 # binary() | text nobinary git index | output? | outputted?
2655 # ------------------------------------|----------------------------
2655 # ------------------------------------|----------------------------
2656 # yes | no no no * | summary | no
2656 # yes | no no no * | summary | no
2657 # yes | no no yes * | base85 | yes
2657 # yes | no no yes * | base85 | yes
2658 # yes | no yes no * | summary | no
2658 # yes | no yes no * | summary | no
2659 # yes | no yes yes 0 | summary | no
2659 # yes | no yes yes 0 | summary | no
2660 # yes | no yes yes >0 | summary | semi [1]
2660 # yes | no yes yes >0 | summary | semi [1]
2661 # yes | yes * * * | text diff | yes
2661 # yes | yes * * * | text diff | yes
2662 # no | * * * * | text diff | yes
2662 # no | * * * * | text diff | yes
2663 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
2663 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
2664 if binary and (not opts.git or (opts.git and opts.nobinary and not
2664 if binary and (not opts.git or (opts.git and opts.nobinary and not
2665 opts.index)):
2665 opts.index)):
2666 # fast path: no binary content will be displayed, content1 and
2666 # fast path: no binary content will be displayed, content1 and
2667 # content2 are only used for equivalent test. cmp() could have a
2667 # content2 are only used for equivalent test. cmp() could have a
2668 # fast path.
2668 # fast path.
2669 if fctx1 is not None:
2669 if fctx1 is not None:
2670 content1 = b'\0'
2670 content1 = b'\0'
2671 if fctx2 is not None:
2671 if fctx2 is not None:
2672 if fctx1 is not None and not fctx1.cmp(fctx2):
2672 if fctx1 is not None and not fctx1.cmp(fctx2):
2673 content2 = b'\0' # not different
2673 content2 = b'\0' # not different
2674 else:
2674 else:
2675 content2 = b'\0\0'
2675 content2 = b'\0\0'
2676 else:
2676 else:
2677 # normal path: load contents
2677 # normal path: load contents
2678 if fctx1 is not None:
2678 if fctx1 is not None:
2679 content1 = fctx1.data()
2679 content1 = fctx1.data()
2680 if fctx2 is not None:
2680 if fctx2 is not None:
2681 content2 = fctx2.data()
2681 content2 = fctx2.data()
2682
2682
2683 if binary and opts.git and not opts.nobinary:
2683 if binary and opts.git and not opts.nobinary:
2684 text = mdiff.b85diff(content1, content2)
2684 text = mdiff.b85diff(content1, content2)
2685 if text:
2685 if text:
2686 header.append('index %s..%s' %
2686 header.append('index %s..%s' %
2687 (gitindex(content1), gitindex(content2)))
2687 (gitindex(content1), gitindex(content2)))
2688 hunks = (None, [text]),
2688 hunks = (None, [text]),
2689 else:
2689 else:
2690 if opts.git and opts.index > 0:
2690 if opts.git and opts.index > 0:
2691 flag = flag1
2691 flag = flag1
2692 if flag is None:
2692 if flag is None:
2693 flag = flag2
2693 flag = flag2
2694 header.append('index %s..%s %s' %
2694 header.append('index %s..%s %s' %
2695 (gitindex(content1)[0:opts.index],
2695 (gitindex(content1)[0:opts.index],
2696 gitindex(content2)[0:opts.index],
2696 gitindex(content2)[0:opts.index],
2697 gitmode[flag]))
2697 gitmode[flag]))
2698
2698
2699 uheaders, hunks = mdiff.unidiff(content1, date1,
2699 uheaders, hunks = mdiff.unidiff(content1, date1,
2700 content2, date2,
2700 content2, date2,
2701 path1, path2, opts=opts)
2701 path1, path2, opts=opts)
2702 header.extend(uheaders)
2702 header.extend(uheaders)
2703 yield fctx1, fctx2, header, hunks
2703 yield fctx1, fctx2, header, hunks
2704
2704
2705 def diffstatsum(stats):
2705 def diffstatsum(stats):
2706 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2706 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2707 for f, a, r, b in stats:
2707 for f, a, r, b in stats:
2708 maxfile = max(maxfile, encoding.colwidth(f))
2708 maxfile = max(maxfile, encoding.colwidth(f))
2709 maxtotal = max(maxtotal, a + r)
2709 maxtotal = max(maxtotal, a + r)
2710 addtotal += a
2710 addtotal += a
2711 removetotal += r
2711 removetotal += r
2712 binary = binary or b
2712 binary = binary or b
2713
2713
2714 return maxfile, maxtotal, addtotal, removetotal, binary
2714 return maxfile, maxtotal, addtotal, removetotal, binary
2715
2715
2716 def diffstatdata(lines):
2716 def diffstatdata(lines):
2717 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2717 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2718
2718
2719 results = []
2719 results = []
2720 filename, adds, removes, isbinary = None, 0, 0, False
2720 filename, adds, removes, isbinary = None, 0, 0, False
2721
2721
2722 def addresult():
2722 def addresult():
2723 if filename:
2723 if filename:
2724 results.append((filename, adds, removes, isbinary))
2724 results.append((filename, adds, removes, isbinary))
2725
2725
2726 # inheader is used to track if a line is in the
2726 # inheader is used to track if a line is in the
2727 # header portion of the diff. This helps properly account
2727 # header portion of the diff. This helps properly account
2728 # for lines that start with '--' or '++'
2728 # for lines that start with '--' or '++'
2729 inheader = False
2729 inheader = False
2730
2730
2731 for line in lines:
2731 for line in lines:
2732 if line.startswith('diff'):
2732 if line.startswith('diff'):
2733 addresult()
2733 addresult()
2734 # starting a new file diff
2734 # starting a new file diff
2735 # set numbers to 0 and reset inheader
2735 # set numbers to 0 and reset inheader
2736 inheader = True
2736 inheader = True
2737 adds, removes, isbinary = 0, 0, False
2737 adds, removes, isbinary = 0, 0, False
2738 if line.startswith('diff --git a/'):
2738 if line.startswith('diff --git a/'):
2739 filename = gitre.search(line).group(2)
2739 filename = gitre.search(line).group(2)
2740 elif line.startswith('diff -r'):
2740 elif line.startswith('diff -r'):
2741 # format: "diff -r ... -r ... filename"
2741 # format: "diff -r ... -r ... filename"
2742 filename = diffre.search(line).group(1)
2742 filename = diffre.search(line).group(1)
2743 elif line.startswith('@@'):
2743 elif line.startswith('@@'):
2744 inheader = False
2744 inheader = False
2745 elif line.startswith('+') and not inheader:
2745 elif line.startswith('+') and not inheader:
2746 adds += 1
2746 adds += 1
2747 elif line.startswith('-') and not inheader:
2747 elif line.startswith('-') and not inheader:
2748 removes += 1
2748 removes += 1
2749 elif (line.startswith('GIT binary patch') or
2749 elif (line.startswith('GIT binary patch') or
2750 line.startswith('Binary file')):
2750 line.startswith('Binary file')):
2751 isbinary = True
2751 isbinary = True
2752 addresult()
2752 addresult()
2753 return results
2753 return results
2754
2754
2755 def diffstat(lines, width=80):
2755 def diffstat(lines, width=80):
2756 output = []
2756 output = []
2757 stats = diffstatdata(lines)
2757 stats = diffstatdata(lines)
2758 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2758 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2759
2759
2760 countwidth = len(str(maxtotal))
2760 countwidth = len(str(maxtotal))
2761 if hasbinary and countwidth < 3:
2761 if hasbinary and countwidth < 3:
2762 countwidth = 3
2762 countwidth = 3
2763 graphwidth = width - countwidth - maxname - 6
2763 graphwidth = width - countwidth - maxname - 6
2764 if graphwidth < 10:
2764 if graphwidth < 10:
2765 graphwidth = 10
2765 graphwidth = 10
2766
2766
2767 def scale(i):
2767 def scale(i):
2768 if maxtotal <= graphwidth:
2768 if maxtotal <= graphwidth:
2769 return i
2769 return i
2770 # If diffstat runs out of room it doesn't print anything,
2770 # If diffstat runs out of room it doesn't print anything,
2771 # which isn't very useful, so always print at least one + or -
2771 # which isn't very useful, so always print at least one + or -
2772 # if there were at least some changes.
2772 # if there were at least some changes.
2773 return max(i * graphwidth // maxtotal, int(bool(i)))
2773 return max(i * graphwidth // maxtotal, int(bool(i)))
2774
2774
2775 for filename, adds, removes, isbinary in stats:
2775 for filename, adds, removes, isbinary in stats:
2776 if isbinary:
2776 if isbinary:
2777 count = 'Bin'
2777 count = 'Bin'
2778 else:
2778 else:
2779 count = '%d' % (adds + removes)
2779 count = '%d' % (adds + removes)
2780 pluses = '+' * scale(adds)
2780 pluses = '+' * scale(adds)
2781 minuses = '-' * scale(removes)
2781 minuses = '-' * scale(removes)
2782 output.append(' %s%s | %*s %s%s\n' %
2782 output.append(' %s%s | %*s %s%s\n' %
2783 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2783 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2784 countwidth, count, pluses, minuses))
2784 countwidth, count, pluses, minuses))
2785
2785
2786 if stats:
2786 if stats:
2787 output.append(_(' %d files changed, %d insertions(+), '
2787 output.append(_(' %d files changed, %d insertions(+), '
2788 '%d deletions(-)\n')
2788 '%d deletions(-)\n')
2789 % (len(stats), totaladds, totalremoves))
2789 % (len(stats), totaladds, totalremoves))
2790
2790
2791 return ''.join(output)
2791 return ''.join(output)
2792
2792
2793 def diffstatui(*args, **kw):
2793 def diffstatui(*args, **kw):
2794 '''like diffstat(), but yields 2-tuples of (output, label) for
2794 '''like diffstat(), but yields 2-tuples of (output, label) for
2795 ui.write()
2795 ui.write()
2796 '''
2796 '''
2797
2797
2798 for line in diffstat(*args, **kw).splitlines():
2798 for line in diffstat(*args, **kw).splitlines():
2799 if line and line[-1] in '+-':
2799 if line and line[-1] in '+-':
2800 name, graph = line.rsplit(' ', 1)
2800 name, graph = line.rsplit(' ', 1)
2801 yield (name + ' ', '')
2801 yield (name + ' ', '')
2802 m = re.search(br'\++', graph)
2802 m = re.search(br'\++', graph)
2803 if m:
2803 if m:
2804 yield (m.group(0), 'diffstat.inserted')
2804 yield (m.group(0), 'diffstat.inserted')
2805 m = re.search(br'-+', graph)
2805 m = re.search(br'-+', graph)
2806 if m:
2806 if m:
2807 yield (m.group(0), 'diffstat.deleted')
2807 yield (m.group(0), 'diffstat.deleted')
2808 else:
2808 else:
2809 yield (line, '')
2809 yield (line, '')
2810 yield ('\n', '')
2810 yield ('\n', '')
@@ -1,499 +1,423 b''
1 Revert interactive tests
1 Revert interactive tests
2 1 add and commit file f
2 1 add and commit file f
3 2 add commit file folder1/g
3 2 add commit file folder1/g
4 3 add and commit file folder2/h
4 3 add and commit file folder2/h
5 4 add and commit file folder1/i
5 4 add and commit file folder1/i
6 5 commit change to file f
6 5 commit change to file f
7 6 commit changes to files folder1/g folder2/h
7 6 commit changes to files folder1/g folder2/h
8 7 commit changes to files folder1/g folder2/h
8 7 commit changes to files folder1/g folder2/h
9 8 revert interactive to commit id 2 (line 3 above), check that folder1/i is removed and
9 8 revert interactive to commit id 2 (line 3 above), check that folder1/i is removed and
10 9 make workdir match 7
10 9 make workdir match 7
11 10 run the same test than 8 from within folder1 and check same expectations
11 10 run the same test than 8 from within folder1 and check same expectations
12
12
13 $ cat <<EOF >> $HGRCPATH
13 $ cat <<EOF >> $HGRCPATH
14 > [ui]
14 > [ui]
15 > interactive = true
15 > interactive = true
16 > [extensions]
16 > [extensions]
17 > record =
17 > record =
18 > purge =
18 > purge =
19 > EOF
19 > EOF
20
20
21
21
22 $ mkdir -p a/folder1 a/folder2
22 $ mkdir -p a/folder1 a/folder2
23 $ cd a
23 $ cd a
24 $ hg init
24 $ hg init
25 >>> open('f', 'wb').write("1\n2\n3\n4\n5\n")
25 >>> open('f', 'wb').write("1\n2\n3\n4\n5\n")
26 $ hg add f ; hg commit -m "adding f"
26 $ hg add f ; hg commit -m "adding f"
27 $ cat f > folder1/g ; hg add folder1/g ; hg commit -m "adding folder1/g"
27 $ cat f > folder1/g ; hg add folder1/g ; hg commit -m "adding folder1/g"
28 $ cat f > folder2/h ; hg add folder2/h ; hg commit -m "adding folder2/h"
28 $ cat f > folder2/h ; hg add folder2/h ; hg commit -m "adding folder2/h"
29 $ cat f > folder1/i ; hg add folder1/i ; hg commit -m "adding folder1/i"
29 $ cat f > folder1/i ; hg add folder1/i ; hg commit -m "adding folder1/i"
30 >>> open('f', 'wb').write("a\n1\n2\n3\n4\n5\nb\n")
30 >>> open('f', 'wb').write("a\n1\n2\n3\n4\n5\nb\n")
31 $ hg commit -m "modifying f"
31 $ hg commit -m "modifying f"
32 >>> open('folder1/g', 'wb').write("c\n1\n2\n3\n4\n5\nd\n")
32 >>> open('folder1/g', 'wb').write("c\n1\n2\n3\n4\n5\nd\n")
33 $ hg commit -m "modifying folder1/g"
33 $ hg commit -m "modifying folder1/g"
34 >>> open('folder2/h', 'wb').write("e\n1\n2\n3\n4\n5\nf\n")
34 >>> open('folder2/h', 'wb').write("e\n1\n2\n3\n4\n5\nf\n")
35 $ hg commit -m "modifying folder2/h"
35 $ hg commit -m "modifying folder2/h"
36 $ hg tip
36 $ hg tip
37 changeset: 6:59dd6e4ab63a
37 changeset: 6:59dd6e4ab63a
38 tag: tip
38 tag: tip
39 user: test
39 user: test
40 date: Thu Jan 01 00:00:00 1970 +0000
40 date: Thu Jan 01 00:00:00 1970 +0000
41 summary: modifying folder2/h
41 summary: modifying folder2/h
42
42
43 $ hg revert -i -r 2 --all -- << EOF
43 $ hg revert -i -r 2 --all -- << EOF
44 > y
44 > y
45 > y
45 > y
46 > y
46 > y
47 > y
47 > y
48 > y
48 > y
49 > ?
49 > ?
50 > y
50 > y
51 > n
51 > n
52 > n
52 > n
53 > EOF
53 > EOF
54 reverting f
54 reverting f
55 reverting folder1/g (glob)
55 reverting folder1/g (glob)
56 removing folder1/i (glob)
56 removing folder1/i (glob)
57 reverting folder2/h (glob)
57 reverting folder2/h (glob)
58 remove added file folder1/i (Yn)? y
58 remove added file folder1/i (Yn)? y
59 diff --git a/f b/f
59 diff --git a/f b/f
60 2 hunks, 2 lines changed
60 2 hunks, 2 lines changed
61 examine changes to 'f'? [Ynesfdaq?] y
61 examine changes to 'f'? [Ynesfdaq?] y
62
62
63 @@ -1,5 +1,6 @@
63 @@ -1,6 +1,5 @@
64 +a
64 -a
65 1
65 1
66 2
66 2
67 3
67 3
68 4
68 4
69 5
69 5
70 revert change 1/6 to 'f'? [Ynesfdaq?] y
70 apply change 1/6 to 'f'? [Ynesfdaq?] y
71
71
72 @@ -1,5 +2,6 @@
72 @@ -2,6 +1,5 @@
73 1
73 1
74 2
74 2
75 3
75 3
76 4
76 4
77 5
77 5
78 +b
78 -b
79 revert change 2/6 to 'f'? [Ynesfdaq?] y
79 apply change 2/6 to 'f'? [Ynesfdaq?] y
80
80
81 diff --git a/folder1/g b/folder1/g
81 diff --git a/folder1/g b/folder1/g
82 2 hunks, 2 lines changed
82 2 hunks, 2 lines changed
83 examine changes to 'folder1/g'? [Ynesfdaq?] y
83 examine changes to 'folder1/g'? [Ynesfdaq?] y
84
84
85 @@ -1,5 +1,6 @@
85 @@ -1,6 +1,5 @@
86 +c
86 -c
87 1
87 1
88 2
88 2
89 3
89 3
90 4
90 4
91 5
91 5
92 revert change 3/6 to 'folder1/g'? [Ynesfdaq?] ?
92 apply change 3/6 to 'folder1/g'? [Ynesfdaq?] ?
93
93
94 y - yes, revert this change
94 y - yes, apply this change
95 n - no, skip this change
95 n - no, skip this change
96 e - edit this change manually
96 e - edit this change manually
97 s - skip remaining changes to this file
97 s - skip remaining changes to this file
98 f - revert remaining changes to this file
98 f - apply remaining changes to this file
99 d - done, skip remaining changes and files
99 d - done, skip remaining changes and files
100 a - revert all changes to all remaining files
100 a - apply all changes to all remaining files
101 q - quit, reverting no changes
101 q - quit, applying no changes
102 ? - ? (display help)
102 ? - ? (display help)
103 revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
103 apply change 3/6 to 'folder1/g'? [Ynesfdaq?] y
104
104
105 @@ -1,5 +2,6 @@
105 @@ -2,6 +1,5 @@
106 1
106 1
107 2
107 2
108 3
108 3
109 4
109 4
110 5
110 5
111 +d
111 -d
112 revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
112 apply change 4/6 to 'folder1/g'? [Ynesfdaq?] n
113
113
114 diff --git a/folder2/h b/folder2/h
114 diff --git a/folder2/h b/folder2/h
115 2 hunks, 2 lines changed
115 2 hunks, 2 lines changed
116 examine changes to 'folder2/h'? [Ynesfdaq?] n
116 examine changes to 'folder2/h'? [Ynesfdaq?] n
117
117
118 $ cat f
118 $ cat f
119 1
119 1
120 2
120 2
121 3
121 3
122 4
122 4
123 5
123 5
124 $ cat folder1/g
124 $ cat folder1/g
125 1
125 1
126 2
126 2
127 3
127 3
128 4
128 4
129 5
129 5
130 d
130 d
131 $ cat folder2/h
131 $ cat folder2/h
132 e
132 e
133 1
133 1
134 2
134 2
135 3
135 3
136 4
136 4
137 5
137 5
138 f
138 f
139
139
140 Test that --interactive lift the need for --all
140 Test that --interactive lift the need for --all
141
141
142 $ echo q | hg revert -i -r 2
142 $ echo q | hg revert -i -r 2
143 reverting folder1/g (glob)
143 reverting folder1/g (glob)
144 reverting folder2/h (glob)
144 reverting folder2/h (glob)
145 diff --git a/folder1/g b/folder1/g
145 diff --git a/folder1/g b/folder1/g
146 1 hunks, 1 lines changed
146 1 hunks, 1 lines changed
147 examine changes to 'folder1/g'? [Ynesfdaq?] q
147 examine changes to 'folder1/g'? [Ynesfdaq?] q
148
148
149 abort: user quit
149 abort: user quit
150 [255]
150 [255]
151 $ ls folder1/
151 $ ls folder1/
152 g
152 g
153
153
154 Test that a noop revert doesn't do an unnecessary backup
154 Test that a noop revert doesn't do an unnecessary backup
155 $ (echo y; echo n) | hg revert -i -r 2 folder1/g
155 $ (echo y; echo n) | hg revert -i -r 2 folder1/g
156 diff --git a/folder1/g b/folder1/g
156 diff --git a/folder1/g b/folder1/g
157 1 hunks, 1 lines changed
157 1 hunks, 1 lines changed
158 examine changes to 'folder1/g'? [Ynesfdaq?] y
158 examine changes to 'folder1/g'? [Ynesfdaq?] y
159
159
160 @@ -3,3 +3,4 @@
160 @@ -3,4 +3,3 @@
161 3
161 3
162 4
162 4
163 5
163 5
164 +d
164 -d
165 revert this change to 'folder1/g'? [Ynesfdaq?] n
165 apply this change to 'folder1/g'? [Ynesfdaq?] n
166
166
167 $ ls folder1/
167 $ ls folder1/
168 g
168 g
169
169
170 Test --no-backup
170 Test --no-backup
171 $ (echo y; echo y) | hg revert -i -C -r 2 folder1/g
171 $ (echo y; echo y) | hg revert -i -C -r 2 folder1/g
172 diff --git a/folder1/g b/folder1/g
172 diff --git a/folder1/g b/folder1/g
173 1 hunks, 1 lines changed
173 1 hunks, 1 lines changed
174 examine changes to 'folder1/g'? [Ynesfdaq?] y
174 examine changes to 'folder1/g'? [Ynesfdaq?] y
175
175
176 @@ -3,3 +3,4 @@
176 @@ -3,4 +3,3 @@
177 3
177 3
178 4
178 4
179 5
179 5
180 +d
180 -d
181 revert this change to 'folder1/g'? [Ynesfdaq?] y
181 apply this change to 'folder1/g'? [Ynesfdaq?] y
182
182
183 $ ls folder1/
183 $ ls folder1/
184 g
184 g
185 >>> open('folder1/g', 'wb').write("1\n2\n3\n4\n5\nd\n")
185 >>> open('folder1/g', 'wb').write("1\n2\n3\n4\n5\nd\n")
186
186
187
187
188 $ hg update -C 6
188 $ hg update -C 6
189 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
189 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
190 $ hg revert -i -r 2 --all -- << EOF
190 $ hg revert -i -r 2 --all -- << EOF
191 > n
191 > n
192 > y
192 > y
193 > y
193 > y
194 > y
194 > y
195 > y
195 > y
196 > y
196 > y
197 > n
197 > n
198 > n
198 > n
199 > EOF
199 > EOF
200 reverting f
200 reverting f
201 reverting folder1/g (glob)
201 reverting folder1/g (glob)
202 removing folder1/i (glob)
202 removing folder1/i (glob)
203 reverting folder2/h (glob)
203 reverting folder2/h (glob)
204 remove added file folder1/i (Yn)? n
204 remove added file folder1/i (Yn)? n
205 diff --git a/f b/f
205 diff --git a/f b/f
206 2 hunks, 2 lines changed
206 2 hunks, 2 lines changed
207 examine changes to 'f'? [Ynesfdaq?] y
207 examine changes to 'f'? [Ynesfdaq?] y
208
208
209 @@ -1,5 +1,6 @@
209 @@ -1,6 +1,5 @@
210 +a
210 -a
211 1
211 1
212 2
212 2
213 3
213 3
214 4
214 4
215 5
215 5
216 revert change 1/6 to 'f'? [Ynesfdaq?] y
216 apply change 1/6 to 'f'? [Ynesfdaq?] y
217
217
218 @@ -1,5 +2,6 @@
218 @@ -2,6 +1,5 @@
219 1
219 1
220 2
220 2
221 3
221 3
222 4
222 4
223 5
223 5
224 +b
224 -b
225 revert change 2/6 to 'f'? [Ynesfdaq?] y
225 apply change 2/6 to 'f'? [Ynesfdaq?] y
226
226
227 diff --git a/folder1/g b/folder1/g
227 diff --git a/folder1/g b/folder1/g
228 2 hunks, 2 lines changed
228 2 hunks, 2 lines changed
229 examine changes to 'folder1/g'? [Ynesfdaq?] y
229 examine changes to 'folder1/g'? [Ynesfdaq?] y
230
230
231 @@ -1,5 +1,6 @@
231 @@ -1,6 +1,5 @@
232 +c
232 -c
233 1
233 1
234 2
234 2
235 3
235 3
236 4
236 4
237 5
237 5
238 revert change 3/6 to 'folder1/g'? [Ynesfdaq?] y
238 apply change 3/6 to 'folder1/g'? [Ynesfdaq?] y
239
239
240 @@ -1,5 +2,6 @@
240 @@ -2,6 +1,5 @@
241 1
241 1
242 2
242 2
243 3
243 3
244 4
244 4
245 5
245 5
246 +d
246 -d
247 revert change 4/6 to 'folder1/g'? [Ynesfdaq?] n
247 apply change 4/6 to 'folder1/g'? [Ynesfdaq?] n
248
248
249 diff --git a/folder2/h b/folder2/h
249 diff --git a/folder2/h b/folder2/h
250 2 hunks, 2 lines changed
250 2 hunks, 2 lines changed
251 examine changes to 'folder2/h'? [Ynesfdaq?] n
251 examine changes to 'folder2/h'? [Ynesfdaq?] n
252
252
253 $ cat f
253 $ cat f
254 1
254 1
255 2
255 2
256 3
256 3
257 4
257 4
258 5
258 5
259 $ cat folder1/g
259 $ cat folder1/g
260 1
260 1
261 2
261 2
262 3
262 3
263 4
263 4
264 5
264 5
265 d
265 d
266 $ cat folder2/h
266 $ cat folder2/h
267 e
267 e
268 1
268 1
269 2
269 2
270 3
270 3
271 4
271 4
272 5
272 5
273 f
273 f
274 $ hg st
274 $ hg st
275 M f
275 M f
276 M folder1/g
276 M folder1/g
277 $ hg revert --interactive f << EOF
277 $ hg revert --interactive f << EOF
278 > y
278 > y
279 > ?
279 > ?
280 > y
280 > y
281 > n
281 > n
282 > n
282 > n
283 > EOF
283 > EOF
284 diff --git a/f b/f
284 diff --git a/f b/f
285 2 hunks, 2 lines changed
285 2 hunks, 2 lines changed
286 examine changes to 'f'? [Ynesfdaq?] y
286 examine changes to 'f'? [Ynesfdaq?] y
287
287
288 @@ -1,6 +1,5 @@
288 @@ -1,6 +1,5 @@
289 -a
289 -a
290 1
290 1
291 2
291 2
292 3
292 3
293 4
293 4
294 5
294 5
295 discard change 1/2 to 'f'? [Ynesfdaq?] ?
295 discard change 1/2 to 'f'? [Ynesfdaq?] ?
296
296
297 y - yes, discard this change
297 y - yes, discard this change
298 n - no, skip this change
298 n - no, skip this change
299 e - edit this change manually
299 e - edit this change manually
300 s - skip remaining changes to this file
300 s - skip remaining changes to this file
301 f - discard remaining changes to this file
301 f - discard remaining changes to this file
302 d - done, skip remaining changes and files
302 d - done, skip remaining changes and files
303 a - discard all changes to all remaining files
303 a - discard all changes to all remaining files
304 q - quit, discarding no changes
304 q - quit, discarding no changes
305 ? - ? (display help)
305 ? - ? (display help)
306 discard change 1/2 to 'f'? [Ynesfdaq?] y
306 discard change 1/2 to 'f'? [Ynesfdaq?] y
307
307
308 @@ -2,6 +1,5 @@
308 @@ -2,6 +1,5 @@
309 1
309 1
310 2
310 2
311 3
311 3
312 4
312 4
313 5
313 5
314 -b
314 -b
315 discard change 2/2 to 'f'? [Ynesfdaq?] n
315 discard change 2/2 to 'f'? [Ynesfdaq?] n
316
316
317 $ hg st
317 $ hg st
318 M f
318 M f
319 M folder1/g
319 M folder1/g
320 ? f.orig
320 ? f.orig
321 $ cat f
321 $ cat f
322 a
322 a
323 1
323 1
324 2
324 2
325 3
325 3
326 4
326 4
327 5
327 5
328 $ cat f.orig
328 $ cat f.orig
329 1
329 1
330 2
330 2
331 3
331 3
332 4
332 4
333 5
333 5
334 $ rm f.orig
334 $ rm f.orig
335 $ hg update -C .
335 $ hg update -C .
336 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
336 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
337
337
338 Check editing files newly added by a revert
338 Check editing files newly added by a revert
339
339
340 1) Create a dummy editor changing 1 to 42
340 1) Create a dummy editor changing 1 to 42
341 $ cat > $TESTTMP/editor.sh << '__EOF__'
341 $ cat > $TESTTMP/editor.sh << '__EOF__'
342 > cat "$1" | sed "s/1/42/g" > tt
342 > cat "$1" | sed "s/1/42/g" > tt
343 > mv tt "$1"
343 > mv tt "$1"
344 > __EOF__
344 > __EOF__
345
345
346 2) Add k
346 2) Add k
347 $ printf "1\n" > k
347 $ printf "1\n" > k
348 $ hg add k
348 $ hg add k
349 $ hg commit -m "add k"
349 $ hg commit -m "add k"
350
350
351 3) Use interactive revert with editing (replacing +1 with +42):
351 3) Use interactive revert with editing (replacing +1 with +42):
352 $ printf "0\n2\n" > k
352 $ printf "0\n2\n" > k
353 $ HGEDITOR="\"sh\" \"${TESTTMP}/editor.sh\"" hg revert -i <<EOF
353 $ HGEDITOR="\"sh\" \"${TESTTMP}/editor.sh\"" hg revert -i <<EOF
354 > y
354 > y
355 > e
355 > e
356 > EOF
356 > EOF
357 reverting k
357 reverting k
358 diff --git a/k b/k
358 diff --git a/k b/k
359 1 hunks, 2 lines changed
359 1 hunks, 2 lines changed
360 examine changes to 'k'? [Ynesfdaq?] y
360 examine changes to 'k'? [Ynesfdaq?] y
361
361
362 @@ -1,1 +1,2 @@
362 @@ -1,1 +1,2 @@
363 -1
363 -1
364 +0
364 +0
365 +2
365 +2
366 discard this change to 'k'? [Ynesfdaq?] e
366 discard this change to 'k'? [Ynesfdaq?] e
367
367
368 $ cat k
368 $ cat k
369 42
369 42
370
370
371 Check the experimental config to invert the selection:
372 $ cat <<EOF >> $HGRCPATH
373 > [experimental]
374 > revertalternateinteractivemode=False
375 > EOF
376
377
378 $ hg up -C .
379 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
380 $ printf 'firstline\nc\n1\n2\n3\n 3\n5\nd\nlastline\n' > folder1/g
381 $ hg diff --nodates
382 diff -r a3d963a027aa folder1/g
383 --- a/folder1/g
384 +++ b/folder1/g
385 @@ -1,7 +1,9 @@
386 +firstline
387 c
388 1
389 2
390 3
391 -4
392 + 3
393 5
394 d
395 +lastline
396 $ hg revert -i <<EOF
397 > y
398 > y
399 > y
400 > n
401 > EOF
402 reverting folder1/g (glob)
403 diff --git a/folder1/g b/folder1/g
404 3 hunks, 3 lines changed
405 examine changes to 'folder1/g'? [Ynesfdaq?] y
406
407 @@ -1,4 +1,5 @@
408 +firstline
409 c
410 1
411 2
412 3
413 discard change 1/3 to 'folder1/g'? [Ynesfdaq?] y
414
415 @@ -1,7 +2,7 @@
416 c
417 1
418 2
419 3
420 -4
421 + 3
422 5
423 d
424 discard change 2/3 to 'folder1/g'? [Ynesfdaq?] y
425
426 @@ -6,2 +7,3 @@
427 5
428 d
429 +lastline
430 discard change 3/3 to 'folder1/g'? [Ynesfdaq?] n
431
432 $ hg diff --nodates
433 diff -r a3d963a027aa folder1/g
434 --- a/folder1/g
435 +++ b/folder1/g
436 @@ -5,3 +5,4 @@
437 4
438 5
439 d
440 +lastline
441
442 $ hg update -C .
371 $ hg update -C .
443 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
372 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
444 $ hg purge
373 $ hg purge
445 $ touch newfile
374 $ touch newfile
446 $ hg add newfile
375 $ hg add newfile
447 $ hg status
376 $ hg status
448 A newfile
377 A newfile
449 $ hg revert -i <<EOF
378 $ hg revert -i <<EOF
450 > n
379 > n
451 > EOF
380 > EOF
452 forgetting newfile
381 forgetting newfile
453 forget added file newfile (Yn)? n
382 forget added file newfile (Yn)? n
454 $ hg status
383 $ hg status
455 A newfile
384 A newfile
456 $ hg revert -i <<EOF
385 $ hg revert -i <<EOF
457 > y
386 > y
458 > EOF
387 > EOF
459 forgetting newfile
388 forgetting newfile
460 forget added file newfile (Yn)? y
389 forget added file newfile (Yn)? y
461 $ hg status
390 $ hg status
462 ? newfile
391 ? newfile
463
392
464 When a line without EOL is selected during "revert -i" (issue5651)
393 When a line without EOL is selected during "revert -i" (issue5651)
465
394
466 $ cat <<EOF >> $HGRCPATH
467 > [experimental]
468 > %unset revertalternateinteractivemode
469 > EOF
470
471 $ hg init $TESTTMP/revert-i-eol
395 $ hg init $TESTTMP/revert-i-eol
472 $ cd $TESTTMP/revert-i-eol
396 $ cd $TESTTMP/revert-i-eol
473 $ echo 0 > a
397 $ echo 0 > a
474 $ hg ci -qAm 0
398 $ hg ci -qAm 0
475 $ printf 1 >> a
399 $ printf 1 >> a
476 $ hg ci -qAm 1
400 $ hg ci -qAm 1
477 $ cat a
401 $ cat a
478 0
402 0
479 1 (no-eol)
403 1 (no-eol)
480
404
481 $ hg revert -ir'.^' <<EOF
405 $ hg revert -ir'.^' <<EOF
482 > y
406 > y
483 > y
407 > y
484 > EOF
408 > EOF
485 reverting a
409 reverting a
486 diff --git a/a b/a
410 diff --git a/a b/a
487 1 hunks, 1 lines changed
411 1 hunks, 1 lines changed
488 examine changes to 'a'? [Ynesfdaq?] y
412 examine changes to 'a'? [Ynesfdaq?] y
489
413
490 @@ -1,1 +1,2 @@
414 @@ -1,2 +1,1 @@
491 0
415 0
492 +1
416 -1
493 \ No newline at end of file
417 \ No newline at end of file
494 revert this change to 'a'? [Ynesfdaq?] y
418 apply this change to 'a'? [Ynesfdaq?] y
495
419
496 $ cat a
420 $ cat a
497 0
421 0
498
422
499 $ cd ..
423 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now