##// END OF EJS Templates
cat: don't prefetch files unless the output requires it...
Matt Harbison -
r42678:561cd02c default
parent child Browse files
Show More
@@ -1,3398 +1,3408
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy as copymod
10 import copy as copymod
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 hex,
17 hex,
18 nullid,
18 nullid,
19 nullrev,
19 nullrev,
20 short,
20 short,
21 )
21 )
22
22
23 from . import (
23 from . import (
24 bookmarks,
24 bookmarks,
25 changelog,
25 changelog,
26 copies,
26 copies,
27 crecord as crecordmod,
27 crecord as crecordmod,
28 dirstateguard,
28 dirstateguard,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 logcmdutil,
32 logcmdutil,
33 match as matchmod,
33 match as matchmod,
34 merge as mergemod,
34 merge as mergemod,
35 mergeutil,
35 mergeutil,
36 obsolete,
36 obsolete,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 pycompat,
40 pycompat,
41 revlog,
41 revlog,
42 rewriteutil,
42 rewriteutil,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 subrepoutil,
45 subrepoutil,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 vfs as vfsmod,
49 vfs as vfsmod,
50 )
50 )
51
51
52 from .utils import (
52 from .utils import (
53 dateutil,
53 dateutil,
54 stringutil,
54 stringutil,
55 )
55 )
56
56
57 stringio = util.stringio
57 stringio = util.stringio
58
58
59 # templates of common command options
59 # templates of common command options
60
60
61 dryrunopts = [
61 dryrunopts = [
62 ('n', 'dry-run', None,
62 ('n', 'dry-run', None,
63 _('do not perform actions, just print output')),
63 _('do not perform actions, just print output')),
64 ]
64 ]
65
65
66 confirmopts = [
66 confirmopts = [
67 ('', 'confirm', None,
67 ('', 'confirm', None,
68 _('ask before applying actions')),
68 _('ask before applying actions')),
69 ]
69 ]
70
70
71 remoteopts = [
71 remoteopts = [
72 ('e', 'ssh', '',
72 ('e', 'ssh', '',
73 _('specify ssh command to use'), _('CMD')),
73 _('specify ssh command to use'), _('CMD')),
74 ('', 'remotecmd', '',
74 ('', 'remotecmd', '',
75 _('specify hg command to run on the remote side'), _('CMD')),
75 _('specify hg command to run on the remote side'), _('CMD')),
76 ('', 'insecure', None,
76 ('', 'insecure', None,
77 _('do not verify server certificate (ignoring web.cacerts config)')),
77 _('do not verify server certificate (ignoring web.cacerts config)')),
78 ]
78 ]
79
79
80 walkopts = [
80 walkopts = [
81 ('I', 'include', [],
81 ('I', 'include', [],
82 _('include names matching the given patterns'), _('PATTERN')),
82 _('include names matching the given patterns'), _('PATTERN')),
83 ('X', 'exclude', [],
83 ('X', 'exclude', [],
84 _('exclude names matching the given patterns'), _('PATTERN')),
84 _('exclude names matching the given patterns'), _('PATTERN')),
85 ]
85 ]
86
86
87 commitopts = [
87 commitopts = [
88 ('m', 'message', '',
88 ('m', 'message', '',
89 _('use text as commit message'), _('TEXT')),
89 _('use text as commit message'), _('TEXT')),
90 ('l', 'logfile', '',
90 ('l', 'logfile', '',
91 _('read commit message from file'), _('FILE')),
91 _('read commit message from file'), _('FILE')),
92 ]
92 ]
93
93
94 commitopts2 = [
94 commitopts2 = [
95 ('d', 'date', '',
95 ('d', 'date', '',
96 _('record the specified date as commit date'), _('DATE')),
96 _('record the specified date as commit date'), _('DATE')),
97 ('u', 'user', '',
97 ('u', 'user', '',
98 _('record the specified user as committer'), _('USER')),
98 _('record the specified user as committer'), _('USER')),
99 ]
99 ]
100
100
101 formatteropts = [
101 formatteropts = [
102 ('T', 'template', '',
102 ('T', 'template', '',
103 _('display with template'), _('TEMPLATE')),
103 _('display with template'), _('TEMPLATE')),
104 ]
104 ]
105
105
106 templateopts = [
106 templateopts = [
107 ('', 'style', '',
107 ('', 'style', '',
108 _('display using template map file (DEPRECATED)'), _('STYLE')),
108 _('display using template map file (DEPRECATED)'), _('STYLE')),
109 ('T', 'template', '',
109 ('T', 'template', '',
110 _('display with template'), _('TEMPLATE')),
110 _('display with template'), _('TEMPLATE')),
111 ]
111 ]
112
112
113 logopts = [
113 logopts = [
114 ('p', 'patch', None, _('show patch')),
114 ('p', 'patch', None, _('show patch')),
115 ('g', 'git', None, _('use git extended diff format')),
115 ('g', 'git', None, _('use git extended diff format')),
116 ('l', 'limit', '',
116 ('l', 'limit', '',
117 _('limit number of changes displayed'), _('NUM')),
117 _('limit number of changes displayed'), _('NUM')),
118 ('M', 'no-merges', None, _('do not show merges')),
118 ('M', 'no-merges', None, _('do not show merges')),
119 ('', 'stat', None, _('output diffstat-style summary of changes')),
119 ('', 'stat', None, _('output diffstat-style summary of changes')),
120 ('G', 'graph', None, _("show the revision DAG")),
120 ('G', 'graph', None, _("show the revision DAG")),
121 ] + templateopts
121 ] + templateopts
122
122
123 diffopts = [
123 diffopts = [
124 ('a', 'text', None, _('treat all files as text')),
124 ('a', 'text', None, _('treat all files as text')),
125 ('g', 'git', None, _('use git extended diff format')),
125 ('g', 'git', None, _('use git extended diff format')),
126 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
126 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
127 ('', 'nodates', None, _('omit dates from diff headers'))
127 ('', 'nodates', None, _('omit dates from diff headers'))
128 ]
128 ]
129
129
130 diffwsopts = [
130 diffwsopts = [
131 ('w', 'ignore-all-space', None,
131 ('w', 'ignore-all-space', None,
132 _('ignore white space when comparing lines')),
132 _('ignore white space when comparing lines')),
133 ('b', 'ignore-space-change', None,
133 ('b', 'ignore-space-change', None,
134 _('ignore changes in the amount of white space')),
134 _('ignore changes in the amount of white space')),
135 ('B', 'ignore-blank-lines', None,
135 ('B', 'ignore-blank-lines', None,
136 _('ignore changes whose lines are all blank')),
136 _('ignore changes whose lines are all blank')),
137 ('Z', 'ignore-space-at-eol', None,
137 ('Z', 'ignore-space-at-eol', None,
138 _('ignore changes in whitespace at EOL')),
138 _('ignore changes in whitespace at EOL')),
139 ]
139 ]
140
140
141 diffopts2 = [
141 diffopts2 = [
142 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
142 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
143 ('p', 'show-function', None, _('show which function each change is in')),
143 ('p', 'show-function', None, _('show which function each change is in')),
144 ('', 'reverse', None, _('produce a diff that undoes the changes')),
144 ('', 'reverse', None, _('produce a diff that undoes the changes')),
145 ] + diffwsopts + [
145 ] + diffwsopts + [
146 ('U', 'unified', '',
146 ('U', 'unified', '',
147 _('number of lines of context to show'), _('NUM')),
147 _('number of lines of context to show'), _('NUM')),
148 ('', 'stat', None, _('output diffstat-style summary of changes')),
148 ('', 'stat', None, _('output diffstat-style summary of changes')),
149 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
149 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
150 ]
150 ]
151
151
152 mergetoolopts = [
152 mergetoolopts = [
153 ('t', 'tool', '', _('specify merge tool'), _('TOOL')),
153 ('t', 'tool', '', _('specify merge tool'), _('TOOL')),
154 ]
154 ]
155
155
156 similarityopts = [
156 similarityopts = [
157 ('s', 'similarity', '',
157 ('s', 'similarity', '',
158 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
158 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
159 ]
159 ]
160
160
161 subrepoopts = [
161 subrepoopts = [
162 ('S', 'subrepos', None,
162 ('S', 'subrepos', None,
163 _('recurse into subrepositories'))
163 _('recurse into subrepositories'))
164 ]
164 ]
165
165
166 debugrevlogopts = [
166 debugrevlogopts = [
167 ('c', 'changelog', False, _('open changelog')),
167 ('c', 'changelog', False, _('open changelog')),
168 ('m', 'manifest', False, _('open manifest')),
168 ('m', 'manifest', False, _('open manifest')),
169 ('', 'dir', '', _('open directory manifest')),
169 ('', 'dir', '', _('open directory manifest')),
170 ]
170 ]
171
171
172 # special string such that everything below this line will be ingored in the
172 # special string such that everything below this line will be ingored in the
173 # editor text
173 # editor text
174 _linebelow = "^HG: ------------------------ >8 ------------------------$"
174 _linebelow = "^HG: ------------------------ >8 ------------------------$"
175
175
176 def ishunk(x):
176 def ishunk(x):
177 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
177 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
178 return isinstance(x, hunkclasses)
178 return isinstance(x, hunkclasses)
179
179
180 def newandmodified(chunks, originalchunks):
180 def newandmodified(chunks, originalchunks):
181 newlyaddedandmodifiedfiles = set()
181 newlyaddedandmodifiedfiles = set()
182 for chunk in chunks:
182 for chunk in chunks:
183 if (ishunk(chunk) and chunk.header.isnewfile() and chunk not in
183 if (ishunk(chunk) and chunk.header.isnewfile() and chunk not in
184 originalchunks):
184 originalchunks):
185 newlyaddedandmodifiedfiles.add(chunk.header.filename())
185 newlyaddedandmodifiedfiles.add(chunk.header.filename())
186 return newlyaddedandmodifiedfiles
186 return newlyaddedandmodifiedfiles
187
187
188 def parsealiases(cmd):
188 def parsealiases(cmd):
189 return cmd.split("|")
189 return cmd.split("|")
190
190
191 def setupwrapcolorwrite(ui):
191 def setupwrapcolorwrite(ui):
192 # wrap ui.write so diff output can be labeled/colorized
192 # wrap ui.write so diff output can be labeled/colorized
193 def wrapwrite(orig, *args, **kw):
193 def wrapwrite(orig, *args, **kw):
194 label = kw.pop(r'label', '')
194 label = kw.pop(r'label', '')
195 for chunk, l in patch.difflabel(lambda: args):
195 for chunk, l in patch.difflabel(lambda: args):
196 orig(chunk, label=label + l)
196 orig(chunk, label=label + l)
197
197
198 oldwrite = ui.write
198 oldwrite = ui.write
199 def wrap(*args, **kwargs):
199 def wrap(*args, **kwargs):
200 return wrapwrite(oldwrite, *args, **kwargs)
200 return wrapwrite(oldwrite, *args, **kwargs)
201 setattr(ui, 'write', wrap)
201 setattr(ui, 'write', wrap)
202 return oldwrite
202 return oldwrite
203
203
204 def filterchunks(ui, originalhunks, usecurses, testfile, match,
204 def filterchunks(ui, originalhunks, usecurses, testfile, match,
205 operation=None):
205 operation=None):
206 try:
206 try:
207 if usecurses:
207 if usecurses:
208 if testfile:
208 if testfile:
209 recordfn = crecordmod.testdecorator(
209 recordfn = crecordmod.testdecorator(
210 testfile, crecordmod.testchunkselector)
210 testfile, crecordmod.testchunkselector)
211 else:
211 else:
212 recordfn = crecordmod.chunkselector
212 recordfn = crecordmod.chunkselector
213
213
214 return crecordmod.filterpatch(ui, originalhunks, recordfn,
214 return crecordmod.filterpatch(ui, originalhunks, recordfn,
215 operation)
215 operation)
216 except crecordmod.fallbackerror as e:
216 except crecordmod.fallbackerror as e:
217 ui.warn('%s\n' % e.message)
217 ui.warn('%s\n' % e.message)
218 ui.warn(_('falling back to text mode\n'))
218 ui.warn(_('falling back to text mode\n'))
219
219
220 return patch.filterpatch(ui, originalhunks, match, operation)
220 return patch.filterpatch(ui, originalhunks, match, operation)
221
221
222 def recordfilter(ui, originalhunks, match, operation=None):
222 def recordfilter(ui, originalhunks, match, operation=None):
223 """ Prompts the user to filter the originalhunks and return a list of
223 """ Prompts the user to filter the originalhunks and return a list of
224 selected hunks.
224 selected hunks.
225 *operation* is used for to build ui messages to indicate the user what
225 *operation* is used for to build ui messages to indicate the user what
226 kind of filtering they are doing: reverting, committing, shelving, etc.
226 kind of filtering they are doing: reverting, committing, shelving, etc.
227 (see patch.filterpatch).
227 (see patch.filterpatch).
228 """
228 """
229 usecurses = crecordmod.checkcurses(ui)
229 usecurses = crecordmod.checkcurses(ui)
230 testfile = ui.config('experimental', 'crecordtest')
230 testfile = ui.config('experimental', 'crecordtest')
231 oldwrite = setupwrapcolorwrite(ui)
231 oldwrite = setupwrapcolorwrite(ui)
232 try:
232 try:
233 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
233 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
234 testfile, match, operation)
234 testfile, match, operation)
235 finally:
235 finally:
236 ui.write = oldwrite
236 ui.write = oldwrite
237 return newchunks, newopts
237 return newchunks, newopts
238
238
239 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
239 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
240 filterfn, *pats, **opts):
240 filterfn, *pats, **opts):
241 opts = pycompat.byteskwargs(opts)
241 opts = pycompat.byteskwargs(opts)
242 if not ui.interactive():
242 if not ui.interactive():
243 if cmdsuggest:
243 if cmdsuggest:
244 msg = _('running non-interactively, use %s instead') % cmdsuggest
244 msg = _('running non-interactively, use %s instead') % cmdsuggest
245 else:
245 else:
246 msg = _('running non-interactively')
246 msg = _('running non-interactively')
247 raise error.Abort(msg)
247 raise error.Abort(msg)
248
248
249 # make sure username is set before going interactive
249 # make sure username is set before going interactive
250 if not opts.get('user'):
250 if not opts.get('user'):
251 ui.username() # raise exception, username not provided
251 ui.username() # raise exception, username not provided
252
252
253 def recordfunc(ui, repo, message, match, opts):
253 def recordfunc(ui, repo, message, match, opts):
254 """This is generic record driver.
254 """This is generic record driver.
255
255
256 Its job is to interactively filter local changes, and
256 Its job is to interactively filter local changes, and
257 accordingly prepare working directory into a state in which the
257 accordingly prepare working directory into a state in which the
258 job can be delegated to a non-interactive commit command such as
258 job can be delegated to a non-interactive commit command such as
259 'commit' or 'qrefresh'.
259 'commit' or 'qrefresh'.
260
260
261 After the actual job is done by non-interactive command, the
261 After the actual job is done by non-interactive command, the
262 working directory is restored to its original state.
262 working directory is restored to its original state.
263
263
264 In the end we'll record interesting changes, and everything else
264 In the end we'll record interesting changes, and everything else
265 will be left in place, so the user can continue working.
265 will be left in place, so the user can continue working.
266 """
266 """
267
267
268 checkunfinished(repo, commit=True)
268 checkunfinished(repo, commit=True)
269 wctx = repo[None]
269 wctx = repo[None]
270 merge = len(wctx.parents()) > 1
270 merge = len(wctx.parents()) > 1
271 if merge:
271 if merge:
272 raise error.Abort(_('cannot partially commit a merge '
272 raise error.Abort(_('cannot partially commit a merge '
273 '(use "hg commit" instead)'))
273 '(use "hg commit" instead)'))
274
274
275 def fail(f, msg):
275 def fail(f, msg):
276 raise error.Abort('%s: %s' % (f, msg))
276 raise error.Abort('%s: %s' % (f, msg))
277
277
278 force = opts.get('force')
278 force = opts.get('force')
279 if not force:
279 if not force:
280 vdirs = []
280 vdirs = []
281 match = matchmod.badmatch(match, fail)
281 match = matchmod.badmatch(match, fail)
282 match.explicitdir = vdirs.append
282 match.explicitdir = vdirs.append
283
283
284 status = repo.status(match=match)
284 status = repo.status(match=match)
285
285
286 overrides = {(b'ui', b'commitsubrepos'): True}
286 overrides = {(b'ui', b'commitsubrepos'): True}
287
287
288 with repo.ui.configoverride(overrides, b'record'):
288 with repo.ui.configoverride(overrides, b'record'):
289 # subrepoutil.precommit() modifies the status
289 # subrepoutil.precommit() modifies the status
290 tmpstatus = scmutil.status(copymod.copy(status[0]),
290 tmpstatus = scmutil.status(copymod.copy(status[0]),
291 copymod.copy(status[1]),
291 copymod.copy(status[1]),
292 copymod.copy(status[2]),
292 copymod.copy(status[2]),
293 copymod.copy(status[3]),
293 copymod.copy(status[3]),
294 copymod.copy(status[4]),
294 copymod.copy(status[4]),
295 copymod.copy(status[5]),
295 copymod.copy(status[5]),
296 copymod.copy(status[6]))
296 copymod.copy(status[6]))
297
297
298 # Force allows -X subrepo to skip the subrepo.
298 # Force allows -X subrepo to skip the subrepo.
299 subs, commitsubs, newstate = subrepoutil.precommit(
299 subs, commitsubs, newstate = subrepoutil.precommit(
300 repo.ui, wctx, tmpstatus, match, force=True)
300 repo.ui, wctx, tmpstatus, match, force=True)
301 for s in subs:
301 for s in subs:
302 if s in commitsubs:
302 if s in commitsubs:
303 dirtyreason = wctx.sub(s).dirtyreason(True)
303 dirtyreason = wctx.sub(s).dirtyreason(True)
304 raise error.Abort(dirtyreason)
304 raise error.Abort(dirtyreason)
305
305
306 if not force:
306 if not force:
307 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
307 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
308 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True,
308 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True,
309 section='commands',
309 section='commands',
310 configprefix='commit.interactive.')
310 configprefix='commit.interactive.')
311 diffopts.nodates = True
311 diffopts.nodates = True
312 diffopts.git = True
312 diffopts.git = True
313 diffopts.showfunc = True
313 diffopts.showfunc = True
314 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
314 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
315 originalchunks = patch.parsepatch(originaldiff)
315 originalchunks = patch.parsepatch(originaldiff)
316 match = scmutil.match(repo[None], pats)
316 match = scmutil.match(repo[None], pats)
317
317
318 # 1. filter patch, since we are intending to apply subset of it
318 # 1. filter patch, since we are intending to apply subset of it
319 try:
319 try:
320 chunks, newopts = filterfn(ui, originalchunks, match)
320 chunks, newopts = filterfn(ui, originalchunks, match)
321 except error.PatchError as err:
321 except error.PatchError as err:
322 raise error.Abort(_('error parsing patch: %s') % err)
322 raise error.Abort(_('error parsing patch: %s') % err)
323 opts.update(newopts)
323 opts.update(newopts)
324
324
325 # We need to keep a backup of files that have been newly added and
325 # We need to keep a backup of files that have been newly added and
326 # modified during the recording process because there is a previous
326 # modified during the recording process because there is a previous
327 # version without the edit in the workdir
327 # version without the edit in the workdir
328 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
328 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
329 contenders = set()
329 contenders = set()
330 for h in chunks:
330 for h in chunks:
331 try:
331 try:
332 contenders.update(set(h.files()))
332 contenders.update(set(h.files()))
333 except AttributeError:
333 except AttributeError:
334 pass
334 pass
335
335
336 changed = status.modified + status.added + status.removed
336 changed = status.modified + status.added + status.removed
337 newfiles = [f for f in changed if f in contenders]
337 newfiles = [f for f in changed if f in contenders]
338 if not newfiles:
338 if not newfiles:
339 ui.status(_('no changes to record\n'))
339 ui.status(_('no changes to record\n'))
340 return 0
340 return 0
341
341
342 modified = set(status.modified)
342 modified = set(status.modified)
343
343
344 # 2. backup changed files, so we can restore them in the end
344 # 2. backup changed files, so we can restore them in the end
345
345
346 if backupall:
346 if backupall:
347 tobackup = changed
347 tobackup = changed
348 else:
348 else:
349 tobackup = [f for f in newfiles if f in modified or f in
349 tobackup = [f for f in newfiles if f in modified or f in
350 newlyaddedandmodifiedfiles]
350 newlyaddedandmodifiedfiles]
351 backups = {}
351 backups = {}
352 if tobackup:
352 if tobackup:
353 backupdir = repo.vfs.join('record-backups')
353 backupdir = repo.vfs.join('record-backups')
354 try:
354 try:
355 os.mkdir(backupdir)
355 os.mkdir(backupdir)
356 except OSError as err:
356 except OSError as err:
357 if err.errno != errno.EEXIST:
357 if err.errno != errno.EEXIST:
358 raise
358 raise
359 try:
359 try:
360 # backup continues
360 # backup continues
361 for f in tobackup:
361 for f in tobackup:
362 fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
362 fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
363 dir=backupdir)
363 dir=backupdir)
364 os.close(fd)
364 os.close(fd)
365 ui.debug('backup %r as %r\n' % (f, tmpname))
365 ui.debug('backup %r as %r\n' % (f, tmpname))
366 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
366 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
367 backups[f] = tmpname
367 backups[f] = tmpname
368
368
369 fp = stringio()
369 fp = stringio()
370 for c in chunks:
370 for c in chunks:
371 fname = c.filename()
371 fname = c.filename()
372 if fname in backups:
372 if fname in backups:
373 c.write(fp)
373 c.write(fp)
374 dopatch = fp.tell()
374 dopatch = fp.tell()
375 fp.seek(0)
375 fp.seek(0)
376
376
377 # 2.5 optionally review / modify patch in text editor
377 # 2.5 optionally review / modify patch in text editor
378 if opts.get('review', False):
378 if opts.get('review', False):
379 patchtext = (crecordmod.diffhelptext
379 patchtext = (crecordmod.diffhelptext
380 + crecordmod.patchhelptext
380 + crecordmod.patchhelptext
381 + fp.read())
381 + fp.read())
382 reviewedpatch = ui.edit(patchtext, "",
382 reviewedpatch = ui.edit(patchtext, "",
383 action="diff",
383 action="diff",
384 repopath=repo.path)
384 repopath=repo.path)
385 fp.truncate(0)
385 fp.truncate(0)
386 fp.write(reviewedpatch)
386 fp.write(reviewedpatch)
387 fp.seek(0)
387 fp.seek(0)
388
388
389 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
389 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
390 # 3a. apply filtered patch to clean repo (clean)
390 # 3a. apply filtered patch to clean repo (clean)
391 if backups:
391 if backups:
392 # Equivalent to hg.revert
392 # Equivalent to hg.revert
393 m = scmutil.matchfiles(repo, backups.keys())
393 m = scmutil.matchfiles(repo, backups.keys())
394 mergemod.update(repo, repo.dirstate.p1(), branchmerge=False,
394 mergemod.update(repo, repo.dirstate.p1(), branchmerge=False,
395 force=True, matcher=m)
395 force=True, matcher=m)
396
396
397 # 3b. (apply)
397 # 3b. (apply)
398 if dopatch:
398 if dopatch:
399 try:
399 try:
400 ui.debug('applying patch\n')
400 ui.debug('applying patch\n')
401 ui.debug(fp.getvalue())
401 ui.debug(fp.getvalue())
402 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
402 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
403 except error.PatchError as err:
403 except error.PatchError as err:
404 raise error.Abort(pycompat.bytestr(err))
404 raise error.Abort(pycompat.bytestr(err))
405 del fp
405 del fp
406
406
407 # 4. We prepared working directory according to filtered
407 # 4. We prepared working directory according to filtered
408 # patch. Now is the time to delegate the job to
408 # patch. Now is the time to delegate the job to
409 # commit/qrefresh or the like!
409 # commit/qrefresh or the like!
410
410
411 # Make all of the pathnames absolute.
411 # Make all of the pathnames absolute.
412 newfiles = [repo.wjoin(nf) for nf in newfiles]
412 newfiles = [repo.wjoin(nf) for nf in newfiles]
413 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
413 return commitfunc(ui, repo, *newfiles, **pycompat.strkwargs(opts))
414 finally:
414 finally:
415 # 5. finally restore backed-up files
415 # 5. finally restore backed-up files
416 try:
416 try:
417 dirstate = repo.dirstate
417 dirstate = repo.dirstate
418 for realname, tmpname in backups.iteritems():
418 for realname, tmpname in backups.iteritems():
419 ui.debug('restoring %r to %r\n' % (tmpname, realname))
419 ui.debug('restoring %r to %r\n' % (tmpname, realname))
420
420
421 if dirstate[realname] == 'n':
421 if dirstate[realname] == 'n':
422 # without normallookup, restoring timestamp
422 # without normallookup, restoring timestamp
423 # may cause partially committed files
423 # may cause partially committed files
424 # to be treated as unmodified
424 # to be treated as unmodified
425 dirstate.normallookup(realname)
425 dirstate.normallookup(realname)
426
426
427 # copystat=True here and above are a hack to trick any
427 # copystat=True here and above are a hack to trick any
428 # editors that have f open that we haven't modified them.
428 # editors that have f open that we haven't modified them.
429 #
429 #
430 # Also note that this racy as an editor could notice the
430 # Also note that this racy as an editor could notice the
431 # file's mtime before we've finished writing it.
431 # file's mtime before we've finished writing it.
432 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
432 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
433 os.unlink(tmpname)
433 os.unlink(tmpname)
434 if tobackup:
434 if tobackup:
435 os.rmdir(backupdir)
435 os.rmdir(backupdir)
436 except OSError:
436 except OSError:
437 pass
437 pass
438
438
439 def recordinwlock(ui, repo, message, match, opts):
439 def recordinwlock(ui, repo, message, match, opts):
440 with repo.wlock():
440 with repo.wlock():
441 return recordfunc(ui, repo, message, match, opts)
441 return recordfunc(ui, repo, message, match, opts)
442
442
443 return commit(ui, repo, recordinwlock, pats, opts)
443 return commit(ui, repo, recordinwlock, pats, opts)
444
444
445 class dirnode(object):
445 class dirnode(object):
446 """
446 """
447 Represent a directory in user working copy with information required for
447 Represent a directory in user working copy with information required for
448 the purpose of tersing its status.
448 the purpose of tersing its status.
449
449
450 path is the path to the directory, without a trailing '/'
450 path is the path to the directory, without a trailing '/'
451
451
452 statuses is a set of statuses of all files in this directory (this includes
452 statuses is a set of statuses of all files in this directory (this includes
453 all the files in all the subdirectories too)
453 all the files in all the subdirectories too)
454
454
455 files is a list of files which are direct child of this directory
455 files is a list of files which are direct child of this directory
456
456
457 subdirs is a dictionary of sub-directory name as the key and it's own
457 subdirs is a dictionary of sub-directory name as the key and it's own
458 dirnode object as the value
458 dirnode object as the value
459 """
459 """
460
460
461 def __init__(self, dirpath):
461 def __init__(self, dirpath):
462 self.path = dirpath
462 self.path = dirpath
463 self.statuses = set()
463 self.statuses = set()
464 self.files = []
464 self.files = []
465 self.subdirs = {}
465 self.subdirs = {}
466
466
467 def _addfileindir(self, filename, status):
467 def _addfileindir(self, filename, status):
468 """Add a file in this directory as a direct child."""
468 """Add a file in this directory as a direct child."""
469 self.files.append((filename, status))
469 self.files.append((filename, status))
470
470
471 def addfile(self, filename, status):
471 def addfile(self, filename, status):
472 """
472 """
473 Add a file to this directory or to its direct parent directory.
473 Add a file to this directory or to its direct parent directory.
474
474
475 If the file is not direct child of this directory, we traverse to the
475 If the file is not direct child of this directory, we traverse to the
476 directory of which this file is a direct child of and add the file
476 directory of which this file is a direct child of and add the file
477 there.
477 there.
478 """
478 """
479
479
480 # the filename contains a path separator, it means it's not the direct
480 # the filename contains a path separator, it means it's not the direct
481 # child of this directory
481 # child of this directory
482 if '/' in filename:
482 if '/' in filename:
483 subdir, filep = filename.split('/', 1)
483 subdir, filep = filename.split('/', 1)
484
484
485 # does the dirnode object for subdir exists
485 # does the dirnode object for subdir exists
486 if subdir not in self.subdirs:
486 if subdir not in self.subdirs:
487 subdirpath = pathutil.join(self.path, subdir)
487 subdirpath = pathutil.join(self.path, subdir)
488 self.subdirs[subdir] = dirnode(subdirpath)
488 self.subdirs[subdir] = dirnode(subdirpath)
489
489
490 # try adding the file in subdir
490 # try adding the file in subdir
491 self.subdirs[subdir].addfile(filep, status)
491 self.subdirs[subdir].addfile(filep, status)
492
492
493 else:
493 else:
494 self._addfileindir(filename, status)
494 self._addfileindir(filename, status)
495
495
496 if status not in self.statuses:
496 if status not in self.statuses:
497 self.statuses.add(status)
497 self.statuses.add(status)
498
498
499 def iterfilepaths(self):
499 def iterfilepaths(self):
500 """Yield (status, path) for files directly under this directory."""
500 """Yield (status, path) for files directly under this directory."""
501 for f, st in self.files:
501 for f, st in self.files:
502 yield st, pathutil.join(self.path, f)
502 yield st, pathutil.join(self.path, f)
503
503
504 def tersewalk(self, terseargs):
504 def tersewalk(self, terseargs):
505 """
505 """
506 Yield (status, path) obtained by processing the status of this
506 Yield (status, path) obtained by processing the status of this
507 dirnode.
507 dirnode.
508
508
509 terseargs is the string of arguments passed by the user with `--terse`
509 terseargs is the string of arguments passed by the user with `--terse`
510 flag.
510 flag.
511
511
512 Following are the cases which can happen:
512 Following are the cases which can happen:
513
513
514 1) All the files in the directory (including all the files in its
514 1) All the files in the directory (including all the files in its
515 subdirectories) share the same status and the user has asked us to terse
515 subdirectories) share the same status and the user has asked us to terse
516 that status. -> yield (status, dirpath). dirpath will end in '/'.
516 that status. -> yield (status, dirpath). dirpath will end in '/'.
517
517
518 2) Otherwise, we do following:
518 2) Otherwise, we do following:
519
519
520 a) Yield (status, filepath) for all the files which are in this
520 a) Yield (status, filepath) for all the files which are in this
521 directory (only the ones in this directory, not the subdirs)
521 directory (only the ones in this directory, not the subdirs)
522
522
523 b) Recurse the function on all the subdirectories of this
523 b) Recurse the function on all the subdirectories of this
524 directory
524 directory
525 """
525 """
526
526
527 if len(self.statuses) == 1:
527 if len(self.statuses) == 1:
528 onlyst = self.statuses.pop()
528 onlyst = self.statuses.pop()
529
529
530 # Making sure we terse only when the status abbreviation is
530 # Making sure we terse only when the status abbreviation is
531 # passed as terse argument
531 # passed as terse argument
532 if onlyst in terseargs:
532 if onlyst in terseargs:
533 yield onlyst, self.path + '/'
533 yield onlyst, self.path + '/'
534 return
534 return
535
535
536 # add the files to status list
536 # add the files to status list
537 for st, fpath in self.iterfilepaths():
537 for st, fpath in self.iterfilepaths():
538 yield st, fpath
538 yield st, fpath
539
539
540 #recurse on the subdirs
540 #recurse on the subdirs
541 for dirobj in self.subdirs.values():
541 for dirobj in self.subdirs.values():
542 for st, fpath in dirobj.tersewalk(terseargs):
542 for st, fpath in dirobj.tersewalk(terseargs):
543 yield st, fpath
543 yield st, fpath
544
544
545 def tersedir(statuslist, terseargs):
545 def tersedir(statuslist, terseargs):
546 """
546 """
547 Terse the status if all the files in a directory shares the same status.
547 Terse the status if all the files in a directory shares the same status.
548
548
549 statuslist is scmutil.status() object which contains a list of files for
549 statuslist is scmutil.status() object which contains a list of files for
550 each status.
550 each status.
551 terseargs is string which is passed by the user as the argument to `--terse`
551 terseargs is string which is passed by the user as the argument to `--terse`
552 flag.
552 flag.
553
553
554 The function makes a tree of objects of dirnode class, and at each node it
554 The function makes a tree of objects of dirnode class, and at each node it
555 stores the information required to know whether we can terse a certain
555 stores the information required to know whether we can terse a certain
556 directory or not.
556 directory or not.
557 """
557 """
558 # the order matters here as that is used to produce final list
558 # the order matters here as that is used to produce final list
559 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
559 allst = ('m', 'a', 'r', 'd', 'u', 'i', 'c')
560
560
561 # checking the argument validity
561 # checking the argument validity
562 for s in pycompat.bytestr(terseargs):
562 for s in pycompat.bytestr(terseargs):
563 if s not in allst:
563 if s not in allst:
564 raise error.Abort(_("'%s' not recognized") % s)
564 raise error.Abort(_("'%s' not recognized") % s)
565
565
566 # creating a dirnode object for the root of the repo
566 # creating a dirnode object for the root of the repo
567 rootobj = dirnode('')
567 rootobj = dirnode('')
568 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
568 pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
569 'ignored', 'removed')
569 'ignored', 'removed')
570
570
571 tersedict = {}
571 tersedict = {}
572 for attrname in pstatus:
572 for attrname in pstatus:
573 statuschar = attrname[0:1]
573 statuschar = attrname[0:1]
574 for f in getattr(statuslist, attrname):
574 for f in getattr(statuslist, attrname):
575 rootobj.addfile(f, statuschar)
575 rootobj.addfile(f, statuschar)
576 tersedict[statuschar] = []
576 tersedict[statuschar] = []
577
577
578 # we won't be tersing the root dir, so add files in it
578 # we won't be tersing the root dir, so add files in it
579 for st, fpath in rootobj.iterfilepaths():
579 for st, fpath in rootobj.iterfilepaths():
580 tersedict[st].append(fpath)
580 tersedict[st].append(fpath)
581
581
582 # process each sub-directory and build tersedict
582 # process each sub-directory and build tersedict
583 for subdir in rootobj.subdirs.values():
583 for subdir in rootobj.subdirs.values():
584 for st, f in subdir.tersewalk(terseargs):
584 for st, f in subdir.tersewalk(terseargs):
585 tersedict[st].append(f)
585 tersedict[st].append(f)
586
586
587 tersedlist = []
587 tersedlist = []
588 for st in allst:
588 for st in allst:
589 tersedict[st].sort()
589 tersedict[st].sort()
590 tersedlist.append(tersedict[st])
590 tersedlist.append(tersedict[st])
591
591
592 return tersedlist
592 return tersedlist
593
593
594 def _commentlines(raw):
594 def _commentlines(raw):
595 '''Surround lineswith a comment char and a new line'''
595 '''Surround lineswith a comment char and a new line'''
596 lines = raw.splitlines()
596 lines = raw.splitlines()
597 commentedlines = ['# %s' % line for line in lines]
597 commentedlines = ['# %s' % line for line in lines]
598 return '\n'.join(commentedlines) + '\n'
598 return '\n'.join(commentedlines) + '\n'
599
599
600 def _conflictsmsg(repo):
600 def _conflictsmsg(repo):
601 mergestate = mergemod.mergestate.read(repo)
601 mergestate = mergemod.mergestate.read(repo)
602 if not mergestate.active():
602 if not mergestate.active():
603 return
603 return
604
604
605 m = scmutil.match(repo[None])
605 m = scmutil.match(repo[None])
606 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
606 unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
607 if unresolvedlist:
607 if unresolvedlist:
608 mergeliststr = '\n'.join(
608 mergeliststr = '\n'.join(
609 [' %s' % util.pathto(repo.root, encoding.getcwd(), path)
609 [' %s' % util.pathto(repo.root, encoding.getcwd(), path)
610 for path in sorted(unresolvedlist)])
610 for path in sorted(unresolvedlist)])
611 msg = _('''Unresolved merge conflicts:
611 msg = _('''Unresolved merge conflicts:
612
612
613 %s
613 %s
614
614
615 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
615 To mark files as resolved: hg resolve --mark FILE''') % mergeliststr
616 else:
616 else:
617 msg = _('No unresolved merge conflicts.')
617 msg = _('No unresolved merge conflicts.')
618
618
619 return _commentlines(msg)
619 return _commentlines(msg)
620
620
621 def _helpmessage(continuecmd, abortcmd):
621 def _helpmessage(continuecmd, abortcmd):
622 msg = _('To continue: %s\n'
622 msg = _('To continue: %s\n'
623 'To abort: %s') % (continuecmd, abortcmd)
623 'To abort: %s') % (continuecmd, abortcmd)
624 return _commentlines(msg)
624 return _commentlines(msg)
625
625
626 def _rebasemsg():
626 def _rebasemsg():
627 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
627 return _helpmessage('hg rebase --continue', 'hg rebase --abort')
628
628
629 def _histeditmsg():
629 def _histeditmsg():
630 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
630 return _helpmessage('hg histedit --continue', 'hg histedit --abort')
631
631
632 def _unshelvemsg():
632 def _unshelvemsg():
633 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
633 return _helpmessage('hg unshelve --continue', 'hg unshelve --abort')
634
634
635 def _graftmsg():
635 def _graftmsg():
636 return _helpmessage('hg graft --continue', 'hg graft --abort')
636 return _helpmessage('hg graft --continue', 'hg graft --abort')
637
637
638 def _mergemsg():
638 def _mergemsg():
639 return _helpmessage('hg commit', 'hg merge --abort')
639 return _helpmessage('hg commit', 'hg merge --abort')
640
640
641 def _bisectmsg():
641 def _bisectmsg():
642 msg = _('To mark the changeset good: hg bisect --good\n'
642 msg = _('To mark the changeset good: hg bisect --good\n'
643 'To mark the changeset bad: hg bisect --bad\n'
643 'To mark the changeset bad: hg bisect --bad\n'
644 'To abort: hg bisect --reset\n')
644 'To abort: hg bisect --reset\n')
645 return _commentlines(msg)
645 return _commentlines(msg)
646
646
647 def fileexistspredicate(filename):
647 def fileexistspredicate(filename):
648 return lambda repo: repo.vfs.exists(filename)
648 return lambda repo: repo.vfs.exists(filename)
649
649
650 def _mergepredicate(repo):
650 def _mergepredicate(repo):
651 return len(repo[None].parents()) > 1
651 return len(repo[None].parents()) > 1
652
652
653 STATES = (
653 STATES = (
654 # (state, predicate to detect states, helpful message function)
654 # (state, predicate to detect states, helpful message function)
655 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
655 ('histedit', fileexistspredicate('histedit-state'), _histeditmsg),
656 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
656 ('bisect', fileexistspredicate('bisect.state'), _bisectmsg),
657 ('graft', fileexistspredicate('graftstate'), _graftmsg),
657 ('graft', fileexistspredicate('graftstate'), _graftmsg),
658 ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
658 ('unshelve', fileexistspredicate('shelvedstate'), _unshelvemsg),
659 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
659 ('rebase', fileexistspredicate('rebasestate'), _rebasemsg),
660 # The merge state is part of a list that will be iterated over.
660 # The merge state is part of a list that will be iterated over.
661 # They need to be last because some of the other unfinished states may also
661 # They need to be last because some of the other unfinished states may also
662 # be in a merge or update state (eg. rebase, histedit, graft, etc).
662 # be in a merge or update state (eg. rebase, histedit, graft, etc).
663 # We want those to have priority.
663 # We want those to have priority.
664 ('merge', _mergepredicate, _mergemsg),
664 ('merge', _mergepredicate, _mergemsg),
665 )
665 )
666
666
667 def _getrepostate(repo):
667 def _getrepostate(repo):
668 # experimental config: commands.status.skipstates
668 # experimental config: commands.status.skipstates
669 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
669 skip = set(repo.ui.configlist('commands', 'status.skipstates'))
670 for state, statedetectionpredicate, msgfn in STATES:
670 for state, statedetectionpredicate, msgfn in STATES:
671 if state in skip:
671 if state in skip:
672 continue
672 continue
673 if statedetectionpredicate(repo):
673 if statedetectionpredicate(repo):
674 return (state, statedetectionpredicate, msgfn)
674 return (state, statedetectionpredicate, msgfn)
675
675
676 def morestatus(repo, fm):
676 def morestatus(repo, fm):
677 statetuple = _getrepostate(repo)
677 statetuple = _getrepostate(repo)
678 label = 'status.morestatus'
678 label = 'status.morestatus'
679 if statetuple:
679 if statetuple:
680 state, statedetectionpredicate, helpfulmsg = statetuple
680 state, statedetectionpredicate, helpfulmsg = statetuple
681 statemsg = _('The repository is in an unfinished *%s* state.') % state
681 statemsg = _('The repository is in an unfinished *%s* state.') % state
682 fm.plain('%s\n' % _commentlines(statemsg), label=label)
682 fm.plain('%s\n' % _commentlines(statemsg), label=label)
683 conmsg = _conflictsmsg(repo)
683 conmsg = _conflictsmsg(repo)
684 if conmsg:
684 if conmsg:
685 fm.plain('%s\n' % conmsg, label=label)
685 fm.plain('%s\n' % conmsg, label=label)
686 if helpfulmsg:
686 if helpfulmsg:
687 helpmsg = helpfulmsg()
687 helpmsg = helpfulmsg()
688 fm.plain('%s\n' % helpmsg, label=label)
688 fm.plain('%s\n' % helpmsg, label=label)
689
689
690 def findpossible(cmd, table, strict=False):
690 def findpossible(cmd, table, strict=False):
691 """
691 """
692 Return cmd -> (aliases, command table entry)
692 Return cmd -> (aliases, command table entry)
693 for each matching command.
693 for each matching command.
694 Return debug commands (or their aliases) only if no normal command matches.
694 Return debug commands (or their aliases) only if no normal command matches.
695 """
695 """
696 choice = {}
696 choice = {}
697 debugchoice = {}
697 debugchoice = {}
698
698
699 if cmd in table:
699 if cmd in table:
700 # short-circuit exact matches, "log" alias beats "log|history"
700 # short-circuit exact matches, "log" alias beats "log|history"
701 keys = [cmd]
701 keys = [cmd]
702 else:
702 else:
703 keys = table.keys()
703 keys = table.keys()
704
704
705 allcmds = []
705 allcmds = []
706 for e in keys:
706 for e in keys:
707 aliases = parsealiases(e)
707 aliases = parsealiases(e)
708 allcmds.extend(aliases)
708 allcmds.extend(aliases)
709 found = None
709 found = None
710 if cmd in aliases:
710 if cmd in aliases:
711 found = cmd
711 found = cmd
712 elif not strict:
712 elif not strict:
713 for a in aliases:
713 for a in aliases:
714 if a.startswith(cmd):
714 if a.startswith(cmd):
715 found = a
715 found = a
716 break
716 break
717 if found is not None:
717 if found is not None:
718 if aliases[0].startswith("debug") or found.startswith("debug"):
718 if aliases[0].startswith("debug") or found.startswith("debug"):
719 debugchoice[found] = (aliases, table[e])
719 debugchoice[found] = (aliases, table[e])
720 else:
720 else:
721 choice[found] = (aliases, table[e])
721 choice[found] = (aliases, table[e])
722
722
723 if not choice and debugchoice:
723 if not choice and debugchoice:
724 choice = debugchoice
724 choice = debugchoice
725
725
726 return choice, allcmds
726 return choice, allcmds
727
727
728 def findcmd(cmd, table, strict=True):
728 def findcmd(cmd, table, strict=True):
729 """Return (aliases, command table entry) for command string."""
729 """Return (aliases, command table entry) for command string."""
730 choice, allcmds = findpossible(cmd, table, strict)
730 choice, allcmds = findpossible(cmd, table, strict)
731
731
732 if cmd in choice:
732 if cmd in choice:
733 return choice[cmd]
733 return choice[cmd]
734
734
735 if len(choice) > 1:
735 if len(choice) > 1:
736 clist = sorted(choice)
736 clist = sorted(choice)
737 raise error.AmbiguousCommand(cmd, clist)
737 raise error.AmbiguousCommand(cmd, clist)
738
738
739 if choice:
739 if choice:
740 return list(choice.values())[0]
740 return list(choice.values())[0]
741
741
742 raise error.UnknownCommand(cmd, allcmds)
742 raise error.UnknownCommand(cmd, allcmds)
743
743
744 def changebranch(ui, repo, revs, label):
744 def changebranch(ui, repo, revs, label):
745 """ Change the branch name of given revs to label """
745 """ Change the branch name of given revs to label """
746
746
747 with repo.wlock(), repo.lock(), repo.transaction('branches'):
747 with repo.wlock(), repo.lock(), repo.transaction('branches'):
748 # abort in case of uncommitted merge or dirty wdir
748 # abort in case of uncommitted merge or dirty wdir
749 bailifchanged(repo)
749 bailifchanged(repo)
750 revs = scmutil.revrange(repo, revs)
750 revs = scmutil.revrange(repo, revs)
751 if not revs:
751 if not revs:
752 raise error.Abort("empty revision set")
752 raise error.Abort("empty revision set")
753 roots = repo.revs('roots(%ld)', revs)
753 roots = repo.revs('roots(%ld)', revs)
754 if len(roots) > 1:
754 if len(roots) > 1:
755 raise error.Abort(_("cannot change branch of non-linear revisions"))
755 raise error.Abort(_("cannot change branch of non-linear revisions"))
756 rewriteutil.precheck(repo, revs, 'change branch of')
756 rewriteutil.precheck(repo, revs, 'change branch of')
757
757
758 root = repo[roots.first()]
758 root = repo[roots.first()]
759 rpb = {parent.branch() for parent in root.parents()}
759 rpb = {parent.branch() for parent in root.parents()}
760 if label not in rpb and label in repo.branchmap():
760 if label not in rpb and label in repo.branchmap():
761 raise error.Abort(_("a branch of the same name already exists"))
761 raise error.Abort(_("a branch of the same name already exists"))
762
762
763 if repo.revs('obsolete() and %ld', revs):
763 if repo.revs('obsolete() and %ld', revs):
764 raise error.Abort(_("cannot change branch of a obsolete changeset"))
764 raise error.Abort(_("cannot change branch of a obsolete changeset"))
765
765
766 # make sure only topological heads
766 # make sure only topological heads
767 if repo.revs('heads(%ld) - head()', revs):
767 if repo.revs('heads(%ld) - head()', revs):
768 raise error.Abort(_("cannot change branch in middle of a stack"))
768 raise error.Abort(_("cannot change branch in middle of a stack"))
769
769
770 replacements = {}
770 replacements = {}
771 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
771 # avoid import cycle mercurial.cmdutil -> mercurial.context ->
772 # mercurial.subrepo -> mercurial.cmdutil
772 # mercurial.subrepo -> mercurial.cmdutil
773 from . import context
773 from . import context
774 for rev in revs:
774 for rev in revs:
775 ctx = repo[rev]
775 ctx = repo[rev]
776 oldbranch = ctx.branch()
776 oldbranch = ctx.branch()
777 # check if ctx has same branch
777 # check if ctx has same branch
778 if oldbranch == label:
778 if oldbranch == label:
779 continue
779 continue
780
780
781 def filectxfn(repo, newctx, path):
781 def filectxfn(repo, newctx, path):
782 try:
782 try:
783 return ctx[path]
783 return ctx[path]
784 except error.ManifestLookupError:
784 except error.ManifestLookupError:
785 return None
785 return None
786
786
787 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
787 ui.debug("changing branch of '%s' from '%s' to '%s'\n"
788 % (hex(ctx.node()), oldbranch, label))
788 % (hex(ctx.node()), oldbranch, label))
789 extra = ctx.extra()
789 extra = ctx.extra()
790 extra['branch_change'] = hex(ctx.node())
790 extra['branch_change'] = hex(ctx.node())
791 # While changing branch of set of linear commits, make sure that
791 # While changing branch of set of linear commits, make sure that
792 # we base our commits on new parent rather than old parent which
792 # we base our commits on new parent rather than old parent which
793 # was obsoleted while changing the branch
793 # was obsoleted while changing the branch
794 p1 = ctx.p1().node()
794 p1 = ctx.p1().node()
795 p2 = ctx.p2().node()
795 p2 = ctx.p2().node()
796 if p1 in replacements:
796 if p1 in replacements:
797 p1 = replacements[p1][0]
797 p1 = replacements[p1][0]
798 if p2 in replacements:
798 if p2 in replacements:
799 p2 = replacements[p2][0]
799 p2 = replacements[p2][0]
800
800
801 mc = context.memctx(repo, (p1, p2),
801 mc = context.memctx(repo, (p1, p2),
802 ctx.description(),
802 ctx.description(),
803 ctx.files(),
803 ctx.files(),
804 filectxfn,
804 filectxfn,
805 user=ctx.user(),
805 user=ctx.user(),
806 date=ctx.date(),
806 date=ctx.date(),
807 extra=extra,
807 extra=extra,
808 branch=label)
808 branch=label)
809
809
810 newnode = repo.commitctx(mc)
810 newnode = repo.commitctx(mc)
811 replacements[ctx.node()] = (newnode,)
811 replacements[ctx.node()] = (newnode,)
812 ui.debug('new node id is %s\n' % hex(newnode))
812 ui.debug('new node id is %s\n' % hex(newnode))
813
813
814 # create obsmarkers and move bookmarks
814 # create obsmarkers and move bookmarks
815 scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
815 scmutil.cleanupnodes(repo, replacements, 'branch-change', fixphase=True)
816
816
817 # move the working copy too
817 # move the working copy too
818 wctx = repo[None]
818 wctx = repo[None]
819 # in-progress merge is a bit too complex for now.
819 # in-progress merge is a bit too complex for now.
820 if len(wctx.parents()) == 1:
820 if len(wctx.parents()) == 1:
821 newid = replacements.get(wctx.p1().node())
821 newid = replacements.get(wctx.p1().node())
822 if newid is not None:
822 if newid is not None:
823 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
823 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
824 # mercurial.cmdutil
824 # mercurial.cmdutil
825 from . import hg
825 from . import hg
826 hg.update(repo, newid[0], quietempty=True)
826 hg.update(repo, newid[0], quietempty=True)
827
827
828 ui.status(_("changed branch on %d changesets\n") % len(replacements))
828 ui.status(_("changed branch on %d changesets\n") % len(replacements))
829
829
830 def findrepo(p):
830 def findrepo(p):
831 while not os.path.isdir(os.path.join(p, ".hg")):
831 while not os.path.isdir(os.path.join(p, ".hg")):
832 oldp, p = p, os.path.dirname(p)
832 oldp, p = p, os.path.dirname(p)
833 if p == oldp:
833 if p == oldp:
834 return None
834 return None
835
835
836 return p
836 return p
837
837
838 def bailifchanged(repo, merge=True, hint=None):
838 def bailifchanged(repo, merge=True, hint=None):
839 """ enforce the precondition that working directory must be clean.
839 """ enforce the precondition that working directory must be clean.
840
840
841 'merge' can be set to false if a pending uncommitted merge should be
841 'merge' can be set to false if a pending uncommitted merge should be
842 ignored (such as when 'update --check' runs).
842 ignored (such as when 'update --check' runs).
843
843
844 'hint' is the usual hint given to Abort exception.
844 'hint' is the usual hint given to Abort exception.
845 """
845 """
846
846
847 if merge and repo.dirstate.p2() != nullid:
847 if merge and repo.dirstate.p2() != nullid:
848 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
848 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
849 modified, added, removed, deleted = repo.status()[:4]
849 modified, added, removed, deleted = repo.status()[:4]
850 if modified or added or removed or deleted:
850 if modified or added or removed or deleted:
851 raise error.Abort(_('uncommitted changes'), hint=hint)
851 raise error.Abort(_('uncommitted changes'), hint=hint)
852 ctx = repo[None]
852 ctx = repo[None]
853 for s in sorted(ctx.substate):
853 for s in sorted(ctx.substate):
854 ctx.sub(s).bailifchanged(hint=hint)
854 ctx.sub(s).bailifchanged(hint=hint)
855
855
856 def logmessage(ui, opts):
856 def logmessage(ui, opts):
857 """ get the log message according to -m and -l option """
857 """ get the log message according to -m and -l option """
858 message = opts.get('message')
858 message = opts.get('message')
859 logfile = opts.get('logfile')
859 logfile = opts.get('logfile')
860
860
861 if message and logfile:
861 if message and logfile:
862 raise error.Abort(_('options --message and --logfile are mutually '
862 raise error.Abort(_('options --message and --logfile are mutually '
863 'exclusive'))
863 'exclusive'))
864 if not message and logfile:
864 if not message and logfile:
865 try:
865 try:
866 if isstdiofilename(logfile):
866 if isstdiofilename(logfile):
867 message = ui.fin.read()
867 message = ui.fin.read()
868 else:
868 else:
869 message = '\n'.join(util.readfile(logfile).splitlines())
869 message = '\n'.join(util.readfile(logfile).splitlines())
870 except IOError as inst:
870 except IOError as inst:
871 raise error.Abort(_("can't read commit message '%s': %s") %
871 raise error.Abort(_("can't read commit message '%s': %s") %
872 (logfile, encoding.strtolocal(inst.strerror)))
872 (logfile, encoding.strtolocal(inst.strerror)))
873 return message
873 return message
874
874
875 def mergeeditform(ctxorbool, baseformname):
875 def mergeeditform(ctxorbool, baseformname):
876 """return appropriate editform name (referencing a committemplate)
876 """return appropriate editform name (referencing a committemplate)
877
877
878 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
878 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
879 merging is committed.
879 merging is committed.
880
880
881 This returns baseformname with '.merge' appended if it is a merge,
881 This returns baseformname with '.merge' appended if it is a merge,
882 otherwise '.normal' is appended.
882 otherwise '.normal' is appended.
883 """
883 """
884 if isinstance(ctxorbool, bool):
884 if isinstance(ctxorbool, bool):
885 if ctxorbool:
885 if ctxorbool:
886 return baseformname + ".merge"
886 return baseformname + ".merge"
887 elif len(ctxorbool.parents()) > 1:
887 elif len(ctxorbool.parents()) > 1:
888 return baseformname + ".merge"
888 return baseformname + ".merge"
889
889
890 return baseformname + ".normal"
890 return baseformname + ".normal"
891
891
892 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
892 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
893 editform='', **opts):
893 editform='', **opts):
894 """get appropriate commit message editor according to '--edit' option
894 """get appropriate commit message editor according to '--edit' option
895
895
896 'finishdesc' is a function to be called with edited commit message
896 'finishdesc' is a function to be called with edited commit message
897 (= 'description' of the new changeset) just after editing, but
897 (= 'description' of the new changeset) just after editing, but
898 before checking empty-ness. It should return actual text to be
898 before checking empty-ness. It should return actual text to be
899 stored into history. This allows to change description before
899 stored into history. This allows to change description before
900 storing.
900 storing.
901
901
902 'extramsg' is a extra message to be shown in the editor instead of
902 'extramsg' is a extra message to be shown in the editor instead of
903 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
903 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
904 is automatically added.
904 is automatically added.
905
905
906 'editform' is a dot-separated list of names, to distinguish
906 'editform' is a dot-separated list of names, to distinguish
907 the purpose of commit text editing.
907 the purpose of commit text editing.
908
908
909 'getcommiteditor' returns 'commitforceeditor' regardless of
909 'getcommiteditor' returns 'commitforceeditor' regardless of
910 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
910 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
911 they are specific for usage in MQ.
911 they are specific for usage in MQ.
912 """
912 """
913 if edit or finishdesc or extramsg:
913 if edit or finishdesc or extramsg:
914 return lambda r, c, s: commitforceeditor(r, c, s,
914 return lambda r, c, s: commitforceeditor(r, c, s,
915 finishdesc=finishdesc,
915 finishdesc=finishdesc,
916 extramsg=extramsg,
916 extramsg=extramsg,
917 editform=editform)
917 editform=editform)
918 elif editform:
918 elif editform:
919 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
919 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
920 else:
920 else:
921 return commiteditor
921 return commiteditor
922
922
923 def _escapecommandtemplate(tmpl):
923 def _escapecommandtemplate(tmpl):
924 parts = []
924 parts = []
925 for typ, start, end in templater.scantemplate(tmpl, raw=True):
925 for typ, start, end in templater.scantemplate(tmpl, raw=True):
926 if typ == b'string':
926 if typ == b'string':
927 parts.append(stringutil.escapestr(tmpl[start:end]))
927 parts.append(stringutil.escapestr(tmpl[start:end]))
928 else:
928 else:
929 parts.append(tmpl[start:end])
929 parts.append(tmpl[start:end])
930 return b''.join(parts)
930 return b''.join(parts)
931
931
932 def rendercommandtemplate(ui, tmpl, props):
932 def rendercommandtemplate(ui, tmpl, props):
933 r"""Expand a literal template 'tmpl' in a way suitable for command line
933 r"""Expand a literal template 'tmpl' in a way suitable for command line
934
934
935 '\' in outermost string is not taken as an escape character because it
935 '\' in outermost string is not taken as an escape character because it
936 is a directory separator on Windows.
936 is a directory separator on Windows.
937
937
938 >>> from . import ui as uimod
938 >>> from . import ui as uimod
939 >>> ui = uimod.ui()
939 >>> ui = uimod.ui()
940 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
940 >>> rendercommandtemplate(ui, b'c:\\{path}', {b'path': b'foo'})
941 'c:\\foo'
941 'c:\\foo'
942 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
942 >>> rendercommandtemplate(ui, b'{"c:\\{path}"}', {'path': b'foo'})
943 'c:{path}'
943 'c:{path}'
944 """
944 """
945 if not tmpl:
945 if not tmpl:
946 return tmpl
946 return tmpl
947 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
947 t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
948 return t.renderdefault(props)
948 return t.renderdefault(props)
949
949
950 def rendertemplate(ctx, tmpl, props=None):
950 def rendertemplate(ctx, tmpl, props=None):
951 """Expand a literal template 'tmpl' byte-string against one changeset
951 """Expand a literal template 'tmpl' byte-string against one changeset
952
952
953 Each props item must be a stringify-able value or a callable returning
953 Each props item must be a stringify-able value or a callable returning
954 such value, i.e. no bare list nor dict should be passed.
954 such value, i.e. no bare list nor dict should be passed.
955 """
955 """
956 repo = ctx.repo()
956 repo = ctx.repo()
957 tres = formatter.templateresources(repo.ui, repo)
957 tres = formatter.templateresources(repo.ui, repo)
958 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
958 t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
959 resources=tres)
959 resources=tres)
960 mapping = {'ctx': ctx}
960 mapping = {'ctx': ctx}
961 if props:
961 if props:
962 mapping.update(props)
962 mapping.update(props)
963 return t.renderdefault(mapping)
963 return t.renderdefault(mapping)
964
964
965 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
965 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
966 r"""Convert old-style filename format string to template string
966 r"""Convert old-style filename format string to template string
967
967
968 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
968 >>> _buildfntemplate(b'foo-%b-%n.patch', seqno=0)
969 'foo-{reporoot|basename}-{seqno}.patch'
969 'foo-{reporoot|basename}-{seqno}.patch'
970 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
970 >>> _buildfntemplate(b'%R{tags % "{tag}"}%H')
971 '{rev}{tags % "{tag}"}{node}'
971 '{rev}{tags % "{tag}"}{node}'
972
972
973 '\' in outermost strings has to be escaped because it is a directory
973 '\' in outermost strings has to be escaped because it is a directory
974 separator on Windows:
974 separator on Windows:
975
975
976 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
976 >>> _buildfntemplate(b'c:\\tmp\\%R\\%n.patch', seqno=0)
977 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
977 'c:\\\\tmp\\\\{rev}\\\\{seqno}.patch'
978 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
978 >>> _buildfntemplate(b'\\\\foo\\bar.patch')
979 '\\\\\\\\foo\\\\bar.patch'
979 '\\\\\\\\foo\\\\bar.patch'
980 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
980 >>> _buildfntemplate(b'\\{tags % "{tag}"}')
981 '\\\\{tags % "{tag}"}'
981 '\\\\{tags % "{tag}"}'
982
982
983 but inner strings follow the template rules (i.e. '\' is taken as an
983 but inner strings follow the template rules (i.e. '\' is taken as an
984 escape character):
984 escape character):
985
985
986 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
986 >>> _buildfntemplate(br'{"c:\tmp"}', seqno=0)
987 '{"c:\\tmp"}'
987 '{"c:\\tmp"}'
988 """
988 """
989 expander = {
989 expander = {
990 b'H': b'{node}',
990 b'H': b'{node}',
991 b'R': b'{rev}',
991 b'R': b'{rev}',
992 b'h': b'{node|short}',
992 b'h': b'{node|short}',
993 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
993 b'm': br'{sub(r"[^\w]", "_", desc|firstline)}',
994 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
994 b'r': b'{if(revwidth, pad(rev, revwidth, "0", left=True), rev)}',
995 b'%': b'%',
995 b'%': b'%',
996 b'b': b'{reporoot|basename}',
996 b'b': b'{reporoot|basename}',
997 }
997 }
998 if total is not None:
998 if total is not None:
999 expander[b'N'] = b'{total}'
999 expander[b'N'] = b'{total}'
1000 if seqno is not None:
1000 if seqno is not None:
1001 expander[b'n'] = b'{seqno}'
1001 expander[b'n'] = b'{seqno}'
1002 if total is not None and seqno is not None:
1002 if total is not None and seqno is not None:
1003 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1003 expander[b'n'] = b'{pad(seqno, total|stringify|count, "0", left=True)}'
1004 if pathname is not None:
1004 if pathname is not None:
1005 expander[b's'] = b'{pathname|basename}'
1005 expander[b's'] = b'{pathname|basename}'
1006 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1006 expander[b'd'] = b'{if(pathname|dirname, pathname|dirname, ".")}'
1007 expander[b'p'] = b'{pathname}'
1007 expander[b'p'] = b'{pathname}'
1008
1008
1009 newname = []
1009 newname = []
1010 for typ, start, end in templater.scantemplate(pat, raw=True):
1010 for typ, start, end in templater.scantemplate(pat, raw=True):
1011 if typ != b'string':
1011 if typ != b'string':
1012 newname.append(pat[start:end])
1012 newname.append(pat[start:end])
1013 continue
1013 continue
1014 i = start
1014 i = start
1015 while i < end:
1015 while i < end:
1016 n = pat.find(b'%', i, end)
1016 n = pat.find(b'%', i, end)
1017 if n < 0:
1017 if n < 0:
1018 newname.append(stringutil.escapestr(pat[i:end]))
1018 newname.append(stringutil.escapestr(pat[i:end]))
1019 break
1019 break
1020 newname.append(stringutil.escapestr(pat[i:n]))
1020 newname.append(stringutil.escapestr(pat[i:n]))
1021 if n + 2 > end:
1021 if n + 2 > end:
1022 raise error.Abort(_("incomplete format spec in output "
1022 raise error.Abort(_("incomplete format spec in output "
1023 "filename"))
1023 "filename"))
1024 c = pat[n + 1:n + 2]
1024 c = pat[n + 1:n + 2]
1025 i = n + 2
1025 i = n + 2
1026 try:
1026 try:
1027 newname.append(expander[c])
1027 newname.append(expander[c])
1028 except KeyError:
1028 except KeyError:
1029 raise error.Abort(_("invalid format spec '%%%s' in output "
1029 raise error.Abort(_("invalid format spec '%%%s' in output "
1030 "filename") % c)
1030 "filename") % c)
1031 return ''.join(newname)
1031 return ''.join(newname)
1032
1032
1033 def makefilename(ctx, pat, **props):
1033 def makefilename(ctx, pat, **props):
1034 if not pat:
1034 if not pat:
1035 return pat
1035 return pat
1036 tmpl = _buildfntemplate(pat, **props)
1036 tmpl = _buildfntemplate(pat, **props)
1037 # BUG: alias expansion shouldn't be made against template fragments
1037 # BUG: alias expansion shouldn't be made against template fragments
1038 # rewritten from %-format strings, but we have no easy way to partially
1038 # rewritten from %-format strings, but we have no easy way to partially
1039 # disable the expansion.
1039 # disable the expansion.
1040 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1040 return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
1041
1041
1042 def isstdiofilename(pat):
1042 def isstdiofilename(pat):
1043 """True if the given pat looks like a filename denoting stdin/stdout"""
1043 """True if the given pat looks like a filename denoting stdin/stdout"""
1044 return not pat or pat == '-'
1044 return not pat or pat == '-'
1045
1045
1046 class _unclosablefile(object):
1046 class _unclosablefile(object):
1047 def __init__(self, fp):
1047 def __init__(self, fp):
1048 self._fp = fp
1048 self._fp = fp
1049
1049
1050 def close(self):
1050 def close(self):
1051 pass
1051 pass
1052
1052
1053 def __iter__(self):
1053 def __iter__(self):
1054 return iter(self._fp)
1054 return iter(self._fp)
1055
1055
1056 def __getattr__(self, attr):
1056 def __getattr__(self, attr):
1057 return getattr(self._fp, attr)
1057 return getattr(self._fp, attr)
1058
1058
1059 def __enter__(self):
1059 def __enter__(self):
1060 return self
1060 return self
1061
1061
1062 def __exit__(self, exc_type, exc_value, exc_tb):
1062 def __exit__(self, exc_type, exc_value, exc_tb):
1063 pass
1063 pass
1064
1064
1065 def makefileobj(ctx, pat, mode='wb', **props):
1065 def makefileobj(ctx, pat, mode='wb', **props):
1066 writable = mode not in ('r', 'rb')
1066 writable = mode not in ('r', 'rb')
1067
1067
1068 if isstdiofilename(pat):
1068 if isstdiofilename(pat):
1069 repo = ctx.repo()
1069 repo = ctx.repo()
1070 if writable:
1070 if writable:
1071 fp = repo.ui.fout
1071 fp = repo.ui.fout
1072 else:
1072 else:
1073 fp = repo.ui.fin
1073 fp = repo.ui.fin
1074 return _unclosablefile(fp)
1074 return _unclosablefile(fp)
1075 fn = makefilename(ctx, pat, **props)
1075 fn = makefilename(ctx, pat, **props)
1076 return open(fn, mode)
1076 return open(fn, mode)
1077
1077
1078 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1078 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
1079 """opens the changelog, manifest, a filelog or a given revlog"""
1079 """opens the changelog, manifest, a filelog or a given revlog"""
1080 cl = opts['changelog']
1080 cl = opts['changelog']
1081 mf = opts['manifest']
1081 mf = opts['manifest']
1082 dir = opts['dir']
1082 dir = opts['dir']
1083 msg = None
1083 msg = None
1084 if cl and mf:
1084 if cl and mf:
1085 msg = _('cannot specify --changelog and --manifest at the same time')
1085 msg = _('cannot specify --changelog and --manifest at the same time')
1086 elif cl and dir:
1086 elif cl and dir:
1087 msg = _('cannot specify --changelog and --dir at the same time')
1087 msg = _('cannot specify --changelog and --dir at the same time')
1088 elif cl or mf or dir:
1088 elif cl or mf or dir:
1089 if file_:
1089 if file_:
1090 msg = _('cannot specify filename with --changelog or --manifest')
1090 msg = _('cannot specify filename with --changelog or --manifest')
1091 elif not repo:
1091 elif not repo:
1092 msg = _('cannot specify --changelog or --manifest or --dir '
1092 msg = _('cannot specify --changelog or --manifest or --dir '
1093 'without a repository')
1093 'without a repository')
1094 if msg:
1094 if msg:
1095 raise error.Abort(msg)
1095 raise error.Abort(msg)
1096
1096
1097 r = None
1097 r = None
1098 if repo:
1098 if repo:
1099 if cl:
1099 if cl:
1100 r = repo.unfiltered().changelog
1100 r = repo.unfiltered().changelog
1101 elif dir:
1101 elif dir:
1102 if 'treemanifest' not in repo.requirements:
1102 if 'treemanifest' not in repo.requirements:
1103 raise error.Abort(_("--dir can only be used on repos with "
1103 raise error.Abort(_("--dir can only be used on repos with "
1104 "treemanifest enabled"))
1104 "treemanifest enabled"))
1105 if not dir.endswith('/'):
1105 if not dir.endswith('/'):
1106 dir = dir + '/'
1106 dir = dir + '/'
1107 dirlog = repo.manifestlog.getstorage(dir)
1107 dirlog = repo.manifestlog.getstorage(dir)
1108 if len(dirlog):
1108 if len(dirlog):
1109 r = dirlog
1109 r = dirlog
1110 elif mf:
1110 elif mf:
1111 r = repo.manifestlog.getstorage(b'')
1111 r = repo.manifestlog.getstorage(b'')
1112 elif file_:
1112 elif file_:
1113 filelog = repo.file(file_)
1113 filelog = repo.file(file_)
1114 if len(filelog):
1114 if len(filelog):
1115 r = filelog
1115 r = filelog
1116
1116
1117 # Not all storage may be revlogs. If requested, try to return an actual
1117 # Not all storage may be revlogs. If requested, try to return an actual
1118 # revlog instance.
1118 # revlog instance.
1119 if returnrevlog:
1119 if returnrevlog:
1120 if isinstance(r, revlog.revlog):
1120 if isinstance(r, revlog.revlog):
1121 pass
1121 pass
1122 elif util.safehasattr(r, '_revlog'):
1122 elif util.safehasattr(r, '_revlog'):
1123 r = r._revlog
1123 r = r._revlog
1124 elif r is not None:
1124 elif r is not None:
1125 raise error.Abort(_('%r does not appear to be a revlog') % r)
1125 raise error.Abort(_('%r does not appear to be a revlog') % r)
1126
1126
1127 if not r:
1127 if not r:
1128 if not returnrevlog:
1128 if not returnrevlog:
1129 raise error.Abort(_('cannot give path to non-revlog'))
1129 raise error.Abort(_('cannot give path to non-revlog'))
1130
1130
1131 if not file_:
1131 if not file_:
1132 raise error.CommandError(cmd, _('invalid arguments'))
1132 raise error.CommandError(cmd, _('invalid arguments'))
1133 if not os.path.isfile(file_):
1133 if not os.path.isfile(file_):
1134 raise error.Abort(_("revlog '%s' not found") % file_)
1134 raise error.Abort(_("revlog '%s' not found") % file_)
1135 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
1135 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
1136 file_[:-2] + ".i")
1136 file_[:-2] + ".i")
1137 return r
1137 return r
1138
1138
1139 def openrevlog(repo, cmd, file_, opts):
1139 def openrevlog(repo, cmd, file_, opts):
1140 """Obtain a revlog backing storage of an item.
1140 """Obtain a revlog backing storage of an item.
1141
1141
1142 This is similar to ``openstorage()`` except it always returns a revlog.
1142 This is similar to ``openstorage()`` except it always returns a revlog.
1143
1143
1144 In most cases, a caller cares about the main storage object - not the
1144 In most cases, a caller cares about the main storage object - not the
1145 revlog backing it. Therefore, this function should only be used by code
1145 revlog backing it. Therefore, this function should only be used by code
1146 that needs to examine low-level revlog implementation details. e.g. debug
1146 that needs to examine low-level revlog implementation details. e.g. debug
1147 commands.
1147 commands.
1148 """
1148 """
1149 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1149 return openstorage(repo, cmd, file_, opts, returnrevlog=True)
1150
1150
1151 def copy(ui, repo, pats, opts, rename=False):
1151 def copy(ui, repo, pats, opts, rename=False):
1152 # called with the repo lock held
1152 # called with the repo lock held
1153 #
1153 #
1154 # hgsep => pathname that uses "/" to separate directories
1154 # hgsep => pathname that uses "/" to separate directories
1155 # ossep => pathname that uses os.sep to separate directories
1155 # ossep => pathname that uses os.sep to separate directories
1156 cwd = repo.getcwd()
1156 cwd = repo.getcwd()
1157 targets = {}
1157 targets = {}
1158 after = opts.get("after")
1158 after = opts.get("after")
1159 dryrun = opts.get("dry_run")
1159 dryrun = opts.get("dry_run")
1160 wctx = repo[None]
1160 wctx = repo[None]
1161
1161
1162 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1162 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1163 def walkpat(pat):
1163 def walkpat(pat):
1164 srcs = []
1164 srcs = []
1165 if after:
1165 if after:
1166 badstates = '?'
1166 badstates = '?'
1167 else:
1167 else:
1168 badstates = '?r'
1168 badstates = '?r'
1169 m = scmutil.match(wctx, [pat], opts, globbed=True)
1169 m = scmutil.match(wctx, [pat], opts, globbed=True)
1170 for abs in wctx.walk(m):
1170 for abs in wctx.walk(m):
1171 state = repo.dirstate[abs]
1171 state = repo.dirstate[abs]
1172 rel = uipathfn(abs)
1172 rel = uipathfn(abs)
1173 exact = m.exact(abs)
1173 exact = m.exact(abs)
1174 if state in badstates:
1174 if state in badstates:
1175 if exact and state == '?':
1175 if exact and state == '?':
1176 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1176 ui.warn(_('%s: not copying - file is not managed\n') % rel)
1177 if exact and state == 'r':
1177 if exact and state == 'r':
1178 ui.warn(_('%s: not copying - file has been marked for'
1178 ui.warn(_('%s: not copying - file has been marked for'
1179 ' remove\n') % rel)
1179 ' remove\n') % rel)
1180 continue
1180 continue
1181 # abs: hgsep
1181 # abs: hgsep
1182 # rel: ossep
1182 # rel: ossep
1183 srcs.append((abs, rel, exact))
1183 srcs.append((abs, rel, exact))
1184 return srcs
1184 return srcs
1185
1185
1186 # abssrc: hgsep
1186 # abssrc: hgsep
1187 # relsrc: ossep
1187 # relsrc: ossep
1188 # otarget: ossep
1188 # otarget: ossep
1189 def copyfile(abssrc, relsrc, otarget, exact):
1189 def copyfile(abssrc, relsrc, otarget, exact):
1190 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1190 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
1191 if '/' in abstarget:
1191 if '/' in abstarget:
1192 # We cannot normalize abstarget itself, this would prevent
1192 # We cannot normalize abstarget itself, this would prevent
1193 # case only renames, like a => A.
1193 # case only renames, like a => A.
1194 abspath, absname = abstarget.rsplit('/', 1)
1194 abspath, absname = abstarget.rsplit('/', 1)
1195 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1195 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
1196 reltarget = repo.pathto(abstarget, cwd)
1196 reltarget = repo.pathto(abstarget, cwd)
1197 target = repo.wjoin(abstarget)
1197 target = repo.wjoin(abstarget)
1198 src = repo.wjoin(abssrc)
1198 src = repo.wjoin(abssrc)
1199 state = repo.dirstate[abstarget]
1199 state = repo.dirstate[abstarget]
1200
1200
1201 scmutil.checkportable(ui, abstarget)
1201 scmutil.checkportable(ui, abstarget)
1202
1202
1203 # check for collisions
1203 # check for collisions
1204 prevsrc = targets.get(abstarget)
1204 prevsrc = targets.get(abstarget)
1205 if prevsrc is not None:
1205 if prevsrc is not None:
1206 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1206 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1207 (reltarget, repo.pathto(abssrc, cwd),
1207 (reltarget, repo.pathto(abssrc, cwd),
1208 repo.pathto(prevsrc, cwd)))
1208 repo.pathto(prevsrc, cwd)))
1209 return True # report a failure
1209 return True # report a failure
1210
1210
1211 # check for overwrites
1211 # check for overwrites
1212 exists = os.path.lexists(target)
1212 exists = os.path.lexists(target)
1213 samefile = False
1213 samefile = False
1214 if exists and abssrc != abstarget:
1214 if exists and abssrc != abstarget:
1215 if (repo.dirstate.normalize(abssrc) ==
1215 if (repo.dirstate.normalize(abssrc) ==
1216 repo.dirstate.normalize(abstarget)):
1216 repo.dirstate.normalize(abstarget)):
1217 if not rename:
1217 if not rename:
1218 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1218 ui.warn(_("%s: can't copy - same file\n") % reltarget)
1219 return True # report a failure
1219 return True # report a failure
1220 exists = False
1220 exists = False
1221 samefile = True
1221 samefile = True
1222
1222
1223 if not after and exists or after and state in 'mn':
1223 if not after and exists or after and state in 'mn':
1224 if not opts['force']:
1224 if not opts['force']:
1225 if state in 'mn':
1225 if state in 'mn':
1226 msg = _('%s: not overwriting - file already committed\n')
1226 msg = _('%s: not overwriting - file already committed\n')
1227 if after:
1227 if after:
1228 flags = '--after --force'
1228 flags = '--after --force'
1229 else:
1229 else:
1230 flags = '--force'
1230 flags = '--force'
1231 if rename:
1231 if rename:
1232 hint = _("('hg rename %s' to replace the file by "
1232 hint = _("('hg rename %s' to replace the file by "
1233 'recording a rename)\n') % flags
1233 'recording a rename)\n') % flags
1234 else:
1234 else:
1235 hint = _("('hg copy %s' to replace the file by "
1235 hint = _("('hg copy %s' to replace the file by "
1236 'recording a copy)\n') % flags
1236 'recording a copy)\n') % flags
1237 else:
1237 else:
1238 msg = _('%s: not overwriting - file exists\n')
1238 msg = _('%s: not overwriting - file exists\n')
1239 if rename:
1239 if rename:
1240 hint = _("('hg rename --after' to record the rename)\n")
1240 hint = _("('hg rename --after' to record the rename)\n")
1241 else:
1241 else:
1242 hint = _("('hg copy --after' to record the copy)\n")
1242 hint = _("('hg copy --after' to record the copy)\n")
1243 ui.warn(msg % reltarget)
1243 ui.warn(msg % reltarget)
1244 ui.warn(hint)
1244 ui.warn(hint)
1245 return True # report a failure
1245 return True # report a failure
1246
1246
1247 if after:
1247 if after:
1248 if not exists:
1248 if not exists:
1249 if rename:
1249 if rename:
1250 ui.warn(_('%s: not recording move - %s does not exist\n') %
1250 ui.warn(_('%s: not recording move - %s does not exist\n') %
1251 (relsrc, reltarget))
1251 (relsrc, reltarget))
1252 else:
1252 else:
1253 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1253 ui.warn(_('%s: not recording copy - %s does not exist\n') %
1254 (relsrc, reltarget))
1254 (relsrc, reltarget))
1255 return True # report a failure
1255 return True # report a failure
1256 elif not dryrun:
1256 elif not dryrun:
1257 try:
1257 try:
1258 if exists:
1258 if exists:
1259 os.unlink(target)
1259 os.unlink(target)
1260 targetdir = os.path.dirname(target) or '.'
1260 targetdir = os.path.dirname(target) or '.'
1261 if not os.path.isdir(targetdir):
1261 if not os.path.isdir(targetdir):
1262 os.makedirs(targetdir)
1262 os.makedirs(targetdir)
1263 if samefile:
1263 if samefile:
1264 tmp = target + "~hgrename"
1264 tmp = target + "~hgrename"
1265 os.rename(src, tmp)
1265 os.rename(src, tmp)
1266 os.rename(tmp, target)
1266 os.rename(tmp, target)
1267 else:
1267 else:
1268 # Preserve stat info on renames, not on copies; this matches
1268 # Preserve stat info on renames, not on copies; this matches
1269 # Linux CLI behavior.
1269 # Linux CLI behavior.
1270 util.copyfile(src, target, copystat=rename)
1270 util.copyfile(src, target, copystat=rename)
1271 srcexists = True
1271 srcexists = True
1272 except IOError as inst:
1272 except IOError as inst:
1273 if inst.errno == errno.ENOENT:
1273 if inst.errno == errno.ENOENT:
1274 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1274 ui.warn(_('%s: deleted in working directory\n') % relsrc)
1275 srcexists = False
1275 srcexists = False
1276 else:
1276 else:
1277 ui.warn(_('%s: cannot copy - %s\n') %
1277 ui.warn(_('%s: cannot copy - %s\n') %
1278 (relsrc, encoding.strtolocal(inst.strerror)))
1278 (relsrc, encoding.strtolocal(inst.strerror)))
1279 return True # report a failure
1279 return True # report a failure
1280
1280
1281 if ui.verbose or not exact:
1281 if ui.verbose or not exact:
1282 if rename:
1282 if rename:
1283 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1283 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
1284 else:
1284 else:
1285 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1285 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1286
1286
1287 targets[abstarget] = abssrc
1287 targets[abstarget] = abssrc
1288
1288
1289 # fix up dirstate
1289 # fix up dirstate
1290 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1290 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
1291 dryrun=dryrun, cwd=cwd)
1291 dryrun=dryrun, cwd=cwd)
1292 if rename and not dryrun:
1292 if rename and not dryrun:
1293 if not after and srcexists and not samefile:
1293 if not after and srcexists and not samefile:
1294 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
1294 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
1295 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1295 repo.wvfs.unlinkpath(abssrc, rmdir=rmdir)
1296 wctx.forget([abssrc])
1296 wctx.forget([abssrc])
1297
1297
1298 # pat: ossep
1298 # pat: ossep
1299 # dest ossep
1299 # dest ossep
1300 # srcs: list of (hgsep, hgsep, ossep, bool)
1300 # srcs: list of (hgsep, hgsep, ossep, bool)
1301 # return: function that takes hgsep and returns ossep
1301 # return: function that takes hgsep and returns ossep
1302 def targetpathfn(pat, dest, srcs):
1302 def targetpathfn(pat, dest, srcs):
1303 if os.path.isdir(pat):
1303 if os.path.isdir(pat):
1304 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1304 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1305 abspfx = util.localpath(abspfx)
1305 abspfx = util.localpath(abspfx)
1306 if destdirexists:
1306 if destdirexists:
1307 striplen = len(os.path.split(abspfx)[0])
1307 striplen = len(os.path.split(abspfx)[0])
1308 else:
1308 else:
1309 striplen = len(abspfx)
1309 striplen = len(abspfx)
1310 if striplen:
1310 if striplen:
1311 striplen += len(pycompat.ossep)
1311 striplen += len(pycompat.ossep)
1312 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1312 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
1313 elif destdirexists:
1313 elif destdirexists:
1314 res = lambda p: os.path.join(dest,
1314 res = lambda p: os.path.join(dest,
1315 os.path.basename(util.localpath(p)))
1315 os.path.basename(util.localpath(p)))
1316 else:
1316 else:
1317 res = lambda p: dest
1317 res = lambda p: dest
1318 return res
1318 return res
1319
1319
1320 # pat: ossep
1320 # pat: ossep
1321 # dest ossep
1321 # dest ossep
1322 # srcs: list of (hgsep, hgsep, ossep, bool)
1322 # srcs: list of (hgsep, hgsep, ossep, bool)
1323 # return: function that takes hgsep and returns ossep
1323 # return: function that takes hgsep and returns ossep
1324 def targetpathafterfn(pat, dest, srcs):
1324 def targetpathafterfn(pat, dest, srcs):
1325 if matchmod.patkind(pat):
1325 if matchmod.patkind(pat):
1326 # a mercurial pattern
1326 # a mercurial pattern
1327 res = lambda p: os.path.join(dest,
1327 res = lambda p: os.path.join(dest,
1328 os.path.basename(util.localpath(p)))
1328 os.path.basename(util.localpath(p)))
1329 else:
1329 else:
1330 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1330 abspfx = pathutil.canonpath(repo.root, cwd, pat)
1331 if len(abspfx) < len(srcs[0][0]):
1331 if len(abspfx) < len(srcs[0][0]):
1332 # A directory. Either the target path contains the last
1332 # A directory. Either the target path contains the last
1333 # component of the source path or it does not.
1333 # component of the source path or it does not.
1334 def evalpath(striplen):
1334 def evalpath(striplen):
1335 score = 0
1335 score = 0
1336 for s in srcs:
1336 for s in srcs:
1337 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1337 t = os.path.join(dest, util.localpath(s[0])[striplen:])
1338 if os.path.lexists(t):
1338 if os.path.lexists(t):
1339 score += 1
1339 score += 1
1340 return score
1340 return score
1341
1341
1342 abspfx = util.localpath(abspfx)
1342 abspfx = util.localpath(abspfx)
1343 striplen = len(abspfx)
1343 striplen = len(abspfx)
1344 if striplen:
1344 if striplen:
1345 striplen += len(pycompat.ossep)
1345 striplen += len(pycompat.ossep)
1346 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1346 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1347 score = evalpath(striplen)
1347 score = evalpath(striplen)
1348 striplen1 = len(os.path.split(abspfx)[0])
1348 striplen1 = len(os.path.split(abspfx)[0])
1349 if striplen1:
1349 if striplen1:
1350 striplen1 += len(pycompat.ossep)
1350 striplen1 += len(pycompat.ossep)
1351 if evalpath(striplen1) > score:
1351 if evalpath(striplen1) > score:
1352 striplen = striplen1
1352 striplen = striplen1
1353 res = lambda p: os.path.join(dest,
1353 res = lambda p: os.path.join(dest,
1354 util.localpath(p)[striplen:])
1354 util.localpath(p)[striplen:])
1355 else:
1355 else:
1356 # a file
1356 # a file
1357 if destdirexists:
1357 if destdirexists:
1358 res = lambda p: os.path.join(dest,
1358 res = lambda p: os.path.join(dest,
1359 os.path.basename(util.localpath(p)))
1359 os.path.basename(util.localpath(p)))
1360 else:
1360 else:
1361 res = lambda p: dest
1361 res = lambda p: dest
1362 return res
1362 return res
1363
1363
1364 pats = scmutil.expandpats(pats)
1364 pats = scmutil.expandpats(pats)
1365 if not pats:
1365 if not pats:
1366 raise error.Abort(_('no source or destination specified'))
1366 raise error.Abort(_('no source or destination specified'))
1367 if len(pats) == 1:
1367 if len(pats) == 1:
1368 raise error.Abort(_('no destination specified'))
1368 raise error.Abort(_('no destination specified'))
1369 dest = pats.pop()
1369 dest = pats.pop()
1370 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1370 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
1371 if not destdirexists:
1371 if not destdirexists:
1372 if len(pats) > 1 or matchmod.patkind(pats[0]):
1372 if len(pats) > 1 or matchmod.patkind(pats[0]):
1373 raise error.Abort(_('with multiple sources, destination must be an '
1373 raise error.Abort(_('with multiple sources, destination must be an '
1374 'existing directory'))
1374 'existing directory'))
1375 if util.endswithsep(dest):
1375 if util.endswithsep(dest):
1376 raise error.Abort(_('destination %s is not a directory') % dest)
1376 raise error.Abort(_('destination %s is not a directory') % dest)
1377
1377
1378 tfn = targetpathfn
1378 tfn = targetpathfn
1379 if after:
1379 if after:
1380 tfn = targetpathafterfn
1380 tfn = targetpathafterfn
1381 copylist = []
1381 copylist = []
1382 for pat in pats:
1382 for pat in pats:
1383 srcs = walkpat(pat)
1383 srcs = walkpat(pat)
1384 if not srcs:
1384 if not srcs:
1385 continue
1385 continue
1386 copylist.append((tfn(pat, dest, srcs), srcs))
1386 copylist.append((tfn(pat, dest, srcs), srcs))
1387 if not copylist:
1387 if not copylist:
1388 raise error.Abort(_('no files to copy'))
1388 raise error.Abort(_('no files to copy'))
1389
1389
1390 errors = 0
1390 errors = 0
1391 for targetpath, srcs in copylist:
1391 for targetpath, srcs in copylist:
1392 for abssrc, relsrc, exact in srcs:
1392 for abssrc, relsrc, exact in srcs:
1393 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1393 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
1394 errors += 1
1394 errors += 1
1395
1395
1396 return errors != 0
1396 return errors != 0
1397
1397
1398 ## facility to let extension process additional data into an import patch
1398 ## facility to let extension process additional data into an import patch
1399 # list of identifier to be executed in order
1399 # list of identifier to be executed in order
1400 extrapreimport = [] # run before commit
1400 extrapreimport = [] # run before commit
1401 extrapostimport = [] # run after commit
1401 extrapostimport = [] # run after commit
1402 # mapping from identifier to actual import function
1402 # mapping from identifier to actual import function
1403 #
1403 #
1404 # 'preimport' are run before the commit is made and are provided the following
1404 # 'preimport' are run before the commit is made and are provided the following
1405 # arguments:
1405 # arguments:
1406 # - repo: the localrepository instance,
1406 # - repo: the localrepository instance,
1407 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1407 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
1408 # - extra: the future extra dictionary of the changeset, please mutate it,
1408 # - extra: the future extra dictionary of the changeset, please mutate it,
1409 # - opts: the import options.
1409 # - opts: the import options.
1410 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1410 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
1411 # mutation of in memory commit and more. Feel free to rework the code to get
1411 # mutation of in memory commit and more. Feel free to rework the code to get
1412 # there.
1412 # there.
1413 extrapreimportmap = {}
1413 extrapreimportmap = {}
1414 # 'postimport' are run after the commit is made and are provided the following
1414 # 'postimport' are run after the commit is made and are provided the following
1415 # argument:
1415 # argument:
1416 # - ctx: the changectx created by import.
1416 # - ctx: the changectx created by import.
1417 extrapostimportmap = {}
1417 extrapostimportmap = {}
1418
1418
1419 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1419 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
1420 """Utility function used by commands.import to import a single patch
1420 """Utility function used by commands.import to import a single patch
1421
1421
1422 This function is explicitly defined here to help the evolve extension to
1422 This function is explicitly defined here to help the evolve extension to
1423 wrap this part of the import logic.
1423 wrap this part of the import logic.
1424
1424
1425 The API is currently a bit ugly because it a simple code translation from
1425 The API is currently a bit ugly because it a simple code translation from
1426 the import command. Feel free to make it better.
1426 the import command. Feel free to make it better.
1427
1427
1428 :patchdata: a dictionary containing parsed patch data (such as from
1428 :patchdata: a dictionary containing parsed patch data (such as from
1429 ``patch.extract()``)
1429 ``patch.extract()``)
1430 :parents: nodes that will be parent of the created commit
1430 :parents: nodes that will be parent of the created commit
1431 :opts: the full dict of option passed to the import command
1431 :opts: the full dict of option passed to the import command
1432 :msgs: list to save commit message to.
1432 :msgs: list to save commit message to.
1433 (used in case we need to save it when failing)
1433 (used in case we need to save it when failing)
1434 :updatefunc: a function that update a repo to a given node
1434 :updatefunc: a function that update a repo to a given node
1435 updatefunc(<repo>, <node>)
1435 updatefunc(<repo>, <node>)
1436 """
1436 """
1437 # avoid cycle context -> subrepo -> cmdutil
1437 # avoid cycle context -> subrepo -> cmdutil
1438 from . import context
1438 from . import context
1439
1439
1440 tmpname = patchdata.get('filename')
1440 tmpname = patchdata.get('filename')
1441 message = patchdata.get('message')
1441 message = patchdata.get('message')
1442 user = opts.get('user') or patchdata.get('user')
1442 user = opts.get('user') or patchdata.get('user')
1443 date = opts.get('date') or patchdata.get('date')
1443 date = opts.get('date') or patchdata.get('date')
1444 branch = patchdata.get('branch')
1444 branch = patchdata.get('branch')
1445 nodeid = patchdata.get('nodeid')
1445 nodeid = patchdata.get('nodeid')
1446 p1 = patchdata.get('p1')
1446 p1 = patchdata.get('p1')
1447 p2 = patchdata.get('p2')
1447 p2 = patchdata.get('p2')
1448
1448
1449 nocommit = opts.get('no_commit')
1449 nocommit = opts.get('no_commit')
1450 importbranch = opts.get('import_branch')
1450 importbranch = opts.get('import_branch')
1451 update = not opts.get('bypass')
1451 update = not opts.get('bypass')
1452 strip = opts["strip"]
1452 strip = opts["strip"]
1453 prefix = opts["prefix"]
1453 prefix = opts["prefix"]
1454 sim = float(opts.get('similarity') or 0)
1454 sim = float(opts.get('similarity') or 0)
1455
1455
1456 if not tmpname:
1456 if not tmpname:
1457 return None, None, False
1457 return None, None, False
1458
1458
1459 rejects = False
1459 rejects = False
1460
1460
1461 cmdline_message = logmessage(ui, opts)
1461 cmdline_message = logmessage(ui, opts)
1462 if cmdline_message:
1462 if cmdline_message:
1463 # pickup the cmdline msg
1463 # pickup the cmdline msg
1464 message = cmdline_message
1464 message = cmdline_message
1465 elif message:
1465 elif message:
1466 # pickup the patch msg
1466 # pickup the patch msg
1467 message = message.strip()
1467 message = message.strip()
1468 else:
1468 else:
1469 # launch the editor
1469 # launch the editor
1470 message = None
1470 message = None
1471 ui.debug('message:\n%s\n' % (message or ''))
1471 ui.debug('message:\n%s\n' % (message or ''))
1472
1472
1473 if len(parents) == 1:
1473 if len(parents) == 1:
1474 parents.append(repo[nullid])
1474 parents.append(repo[nullid])
1475 if opts.get('exact'):
1475 if opts.get('exact'):
1476 if not nodeid or not p1:
1476 if not nodeid or not p1:
1477 raise error.Abort(_('not a Mercurial patch'))
1477 raise error.Abort(_('not a Mercurial patch'))
1478 p1 = repo[p1]
1478 p1 = repo[p1]
1479 p2 = repo[p2 or nullid]
1479 p2 = repo[p2 or nullid]
1480 elif p2:
1480 elif p2:
1481 try:
1481 try:
1482 p1 = repo[p1]
1482 p1 = repo[p1]
1483 p2 = repo[p2]
1483 p2 = repo[p2]
1484 # Without any options, consider p2 only if the
1484 # Without any options, consider p2 only if the
1485 # patch is being applied on top of the recorded
1485 # patch is being applied on top of the recorded
1486 # first parent.
1486 # first parent.
1487 if p1 != parents[0]:
1487 if p1 != parents[0]:
1488 p1 = parents[0]
1488 p1 = parents[0]
1489 p2 = repo[nullid]
1489 p2 = repo[nullid]
1490 except error.RepoError:
1490 except error.RepoError:
1491 p1, p2 = parents
1491 p1, p2 = parents
1492 if p2.node() == nullid:
1492 if p2.node() == nullid:
1493 ui.warn(_("warning: import the patch as a normal revision\n"
1493 ui.warn(_("warning: import the patch as a normal revision\n"
1494 "(use --exact to import the patch as a merge)\n"))
1494 "(use --exact to import the patch as a merge)\n"))
1495 else:
1495 else:
1496 p1, p2 = parents
1496 p1, p2 = parents
1497
1497
1498 n = None
1498 n = None
1499 if update:
1499 if update:
1500 if p1 != parents[0]:
1500 if p1 != parents[0]:
1501 updatefunc(repo, p1.node())
1501 updatefunc(repo, p1.node())
1502 if p2 != parents[1]:
1502 if p2 != parents[1]:
1503 repo.setparents(p1.node(), p2.node())
1503 repo.setparents(p1.node(), p2.node())
1504
1504
1505 if opts.get('exact') or importbranch:
1505 if opts.get('exact') or importbranch:
1506 repo.dirstate.setbranch(branch or 'default')
1506 repo.dirstate.setbranch(branch or 'default')
1507
1507
1508 partial = opts.get('partial', False)
1508 partial = opts.get('partial', False)
1509 files = set()
1509 files = set()
1510 try:
1510 try:
1511 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1511 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1512 files=files, eolmode=None, similarity=sim / 100.0)
1512 files=files, eolmode=None, similarity=sim / 100.0)
1513 except error.PatchError as e:
1513 except error.PatchError as e:
1514 if not partial:
1514 if not partial:
1515 raise error.Abort(pycompat.bytestr(e))
1515 raise error.Abort(pycompat.bytestr(e))
1516 if partial:
1516 if partial:
1517 rejects = True
1517 rejects = True
1518
1518
1519 files = list(files)
1519 files = list(files)
1520 if nocommit:
1520 if nocommit:
1521 if message:
1521 if message:
1522 msgs.append(message)
1522 msgs.append(message)
1523 else:
1523 else:
1524 if opts.get('exact') or p2:
1524 if opts.get('exact') or p2:
1525 # If you got here, you either use --force and know what
1525 # If you got here, you either use --force and know what
1526 # you are doing or used --exact or a merge patch while
1526 # you are doing or used --exact or a merge patch while
1527 # being updated to its first parent.
1527 # being updated to its first parent.
1528 m = None
1528 m = None
1529 else:
1529 else:
1530 m = scmutil.matchfiles(repo, files or [])
1530 m = scmutil.matchfiles(repo, files or [])
1531 editform = mergeeditform(repo[None], 'import.normal')
1531 editform = mergeeditform(repo[None], 'import.normal')
1532 if opts.get('exact'):
1532 if opts.get('exact'):
1533 editor = None
1533 editor = None
1534 else:
1534 else:
1535 editor = getcommiteditor(editform=editform,
1535 editor = getcommiteditor(editform=editform,
1536 **pycompat.strkwargs(opts))
1536 **pycompat.strkwargs(opts))
1537 extra = {}
1537 extra = {}
1538 for idfunc in extrapreimport:
1538 for idfunc in extrapreimport:
1539 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1539 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
1540 overrides = {}
1540 overrides = {}
1541 if partial:
1541 if partial:
1542 overrides[('ui', 'allowemptycommit')] = True
1542 overrides[('ui', 'allowemptycommit')] = True
1543 with repo.ui.configoverride(overrides, 'import'):
1543 with repo.ui.configoverride(overrides, 'import'):
1544 n = repo.commit(message, user,
1544 n = repo.commit(message, user,
1545 date, match=m,
1545 date, match=m,
1546 editor=editor, extra=extra)
1546 editor=editor, extra=extra)
1547 for idfunc in extrapostimport:
1547 for idfunc in extrapostimport:
1548 extrapostimportmap[idfunc](repo[n])
1548 extrapostimportmap[idfunc](repo[n])
1549 else:
1549 else:
1550 if opts.get('exact') or importbranch:
1550 if opts.get('exact') or importbranch:
1551 branch = branch or 'default'
1551 branch = branch or 'default'
1552 else:
1552 else:
1553 branch = p1.branch()
1553 branch = p1.branch()
1554 store = patch.filestore()
1554 store = patch.filestore()
1555 try:
1555 try:
1556 files = set()
1556 files = set()
1557 try:
1557 try:
1558 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1558 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1559 files, eolmode=None)
1559 files, eolmode=None)
1560 except error.PatchError as e:
1560 except error.PatchError as e:
1561 raise error.Abort(stringutil.forcebytestr(e))
1561 raise error.Abort(stringutil.forcebytestr(e))
1562 if opts.get('exact'):
1562 if opts.get('exact'):
1563 editor = None
1563 editor = None
1564 else:
1564 else:
1565 editor = getcommiteditor(editform='import.bypass')
1565 editor = getcommiteditor(editform='import.bypass')
1566 memctx = context.memctx(repo, (p1.node(), p2.node()),
1566 memctx = context.memctx(repo, (p1.node(), p2.node()),
1567 message,
1567 message,
1568 files=files,
1568 files=files,
1569 filectxfn=store,
1569 filectxfn=store,
1570 user=user,
1570 user=user,
1571 date=date,
1571 date=date,
1572 branch=branch,
1572 branch=branch,
1573 editor=editor)
1573 editor=editor)
1574 n = memctx.commit()
1574 n = memctx.commit()
1575 finally:
1575 finally:
1576 store.close()
1576 store.close()
1577 if opts.get('exact') and nocommit:
1577 if opts.get('exact') and nocommit:
1578 # --exact with --no-commit is still useful in that it does merge
1578 # --exact with --no-commit is still useful in that it does merge
1579 # and branch bits
1579 # and branch bits
1580 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1580 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1581 elif opts.get('exact') and (not n or hex(n) != nodeid):
1581 elif opts.get('exact') and (not n or hex(n) != nodeid):
1582 raise error.Abort(_('patch is damaged or loses information'))
1582 raise error.Abort(_('patch is damaged or loses information'))
1583 msg = _('applied to working directory')
1583 msg = _('applied to working directory')
1584 if n:
1584 if n:
1585 # i18n: refers to a short changeset id
1585 # i18n: refers to a short changeset id
1586 msg = _('created %s') % short(n)
1586 msg = _('created %s') % short(n)
1587 return msg, n, rejects
1587 return msg, n, rejects
1588
1588
1589 # facility to let extensions include additional data in an exported patch
1589 # facility to let extensions include additional data in an exported patch
1590 # list of identifiers to be executed in order
1590 # list of identifiers to be executed in order
1591 extraexport = []
1591 extraexport = []
1592 # mapping from identifier to actual export function
1592 # mapping from identifier to actual export function
1593 # function as to return a string to be added to the header or None
1593 # function as to return a string to be added to the header or None
1594 # it is given two arguments (sequencenumber, changectx)
1594 # it is given two arguments (sequencenumber, changectx)
1595 extraexportmap = {}
1595 extraexportmap = {}
1596
1596
1597 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1597 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
1598 node = scmutil.binnode(ctx)
1598 node = scmutil.binnode(ctx)
1599 parents = [p.node() for p in ctx.parents() if p]
1599 parents = [p.node() for p in ctx.parents() if p]
1600 branch = ctx.branch()
1600 branch = ctx.branch()
1601 if switch_parent:
1601 if switch_parent:
1602 parents.reverse()
1602 parents.reverse()
1603
1603
1604 if parents:
1604 if parents:
1605 prev = parents[0]
1605 prev = parents[0]
1606 else:
1606 else:
1607 prev = nullid
1607 prev = nullid
1608
1608
1609 fm.context(ctx=ctx)
1609 fm.context(ctx=ctx)
1610 fm.plain('# HG changeset patch\n')
1610 fm.plain('# HG changeset patch\n')
1611 fm.write('user', '# User %s\n', ctx.user())
1611 fm.write('user', '# User %s\n', ctx.user())
1612 fm.plain('# Date %d %d\n' % ctx.date())
1612 fm.plain('# Date %d %d\n' % ctx.date())
1613 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1613 fm.write('date', '# %s\n', fm.formatdate(ctx.date()))
1614 fm.condwrite(branch and branch != 'default',
1614 fm.condwrite(branch and branch != 'default',
1615 'branch', '# Branch %s\n', branch)
1615 'branch', '# Branch %s\n', branch)
1616 fm.write('node', '# Node ID %s\n', hex(node))
1616 fm.write('node', '# Node ID %s\n', hex(node))
1617 fm.plain('# Parent %s\n' % hex(prev))
1617 fm.plain('# Parent %s\n' % hex(prev))
1618 if len(parents) > 1:
1618 if len(parents) > 1:
1619 fm.plain('# Parent %s\n' % hex(parents[1]))
1619 fm.plain('# Parent %s\n' % hex(parents[1]))
1620 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1620 fm.data(parents=fm.formatlist(pycompat.maplist(hex, parents), name='node'))
1621
1621
1622 # TODO: redesign extraexportmap function to support formatter
1622 # TODO: redesign extraexportmap function to support formatter
1623 for headerid in extraexport:
1623 for headerid in extraexport:
1624 header = extraexportmap[headerid](seqno, ctx)
1624 header = extraexportmap[headerid](seqno, ctx)
1625 if header is not None:
1625 if header is not None:
1626 fm.plain('# %s\n' % header)
1626 fm.plain('# %s\n' % header)
1627
1627
1628 fm.write('desc', '%s\n', ctx.description().rstrip())
1628 fm.write('desc', '%s\n', ctx.description().rstrip())
1629 fm.plain('\n')
1629 fm.plain('\n')
1630
1630
1631 if fm.isplain():
1631 if fm.isplain():
1632 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1632 chunkiter = patch.diffui(repo, prev, node, match, opts=diffopts)
1633 for chunk, label in chunkiter:
1633 for chunk, label in chunkiter:
1634 fm.plain(chunk, label=label)
1634 fm.plain(chunk, label=label)
1635 else:
1635 else:
1636 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1636 chunkiter = patch.diff(repo, prev, node, match, opts=diffopts)
1637 # TODO: make it structured?
1637 # TODO: make it structured?
1638 fm.data(diff=b''.join(chunkiter))
1638 fm.data(diff=b''.join(chunkiter))
1639
1639
1640 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1640 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
1641 """Export changesets to stdout or a single file"""
1641 """Export changesets to stdout or a single file"""
1642 for seqno, rev in enumerate(revs, 1):
1642 for seqno, rev in enumerate(revs, 1):
1643 ctx = repo[rev]
1643 ctx = repo[rev]
1644 if not dest.startswith('<'):
1644 if not dest.startswith('<'):
1645 repo.ui.note("%s\n" % dest)
1645 repo.ui.note("%s\n" % dest)
1646 fm.startitem()
1646 fm.startitem()
1647 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1647 _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
1648
1648
1649 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1649 def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
1650 match):
1650 match):
1651 """Export changesets to possibly multiple files"""
1651 """Export changesets to possibly multiple files"""
1652 total = len(revs)
1652 total = len(revs)
1653 revwidth = max(len(str(rev)) for rev in revs)
1653 revwidth = max(len(str(rev)) for rev in revs)
1654 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1654 filemap = util.sortdict() # filename: [(seqno, rev), ...]
1655
1655
1656 for seqno, rev in enumerate(revs, 1):
1656 for seqno, rev in enumerate(revs, 1):
1657 ctx = repo[rev]
1657 ctx = repo[rev]
1658 dest = makefilename(ctx, fntemplate,
1658 dest = makefilename(ctx, fntemplate,
1659 total=total, seqno=seqno, revwidth=revwidth)
1659 total=total, seqno=seqno, revwidth=revwidth)
1660 filemap.setdefault(dest, []).append((seqno, rev))
1660 filemap.setdefault(dest, []).append((seqno, rev))
1661
1661
1662 for dest in filemap:
1662 for dest in filemap:
1663 with formatter.maybereopen(basefm, dest) as fm:
1663 with formatter.maybereopen(basefm, dest) as fm:
1664 repo.ui.note("%s\n" % dest)
1664 repo.ui.note("%s\n" % dest)
1665 for seqno, rev in filemap[dest]:
1665 for seqno, rev in filemap[dest]:
1666 fm.startitem()
1666 fm.startitem()
1667 ctx = repo[rev]
1667 ctx = repo[rev]
1668 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1668 _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
1669 diffopts)
1669 diffopts)
1670
1670
1671 def _prefetchchangedfiles(repo, revs, match):
1671 def _prefetchchangedfiles(repo, revs, match):
1672 allfiles = set()
1672 allfiles = set()
1673 for rev in revs:
1673 for rev in revs:
1674 for file in repo[rev].files():
1674 for file in repo[rev].files():
1675 if not match or match(file):
1675 if not match or match(file):
1676 allfiles.add(file)
1676 allfiles.add(file)
1677 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
1677 scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
1678
1678
1679 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1679 def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
1680 opts=None, match=None):
1680 opts=None, match=None):
1681 '''export changesets as hg patches
1681 '''export changesets as hg patches
1682
1682
1683 Args:
1683 Args:
1684 repo: The repository from which we're exporting revisions.
1684 repo: The repository from which we're exporting revisions.
1685 revs: A list of revisions to export as revision numbers.
1685 revs: A list of revisions to export as revision numbers.
1686 basefm: A formatter to which patches should be written.
1686 basefm: A formatter to which patches should be written.
1687 fntemplate: An optional string to use for generating patch file names.
1687 fntemplate: An optional string to use for generating patch file names.
1688 switch_parent: If True, show diffs against second parent when not nullid.
1688 switch_parent: If True, show diffs against second parent when not nullid.
1689 Default is false, which always shows diff against p1.
1689 Default is false, which always shows diff against p1.
1690 opts: diff options to use for generating the patch.
1690 opts: diff options to use for generating the patch.
1691 match: If specified, only export changes to files matching this matcher.
1691 match: If specified, only export changes to files matching this matcher.
1692
1692
1693 Returns:
1693 Returns:
1694 Nothing.
1694 Nothing.
1695
1695
1696 Side Effect:
1696 Side Effect:
1697 "HG Changeset Patch" data is emitted to one of the following
1697 "HG Changeset Patch" data is emitted to one of the following
1698 destinations:
1698 destinations:
1699 fntemplate specified: Each rev is written to a unique file named using
1699 fntemplate specified: Each rev is written to a unique file named using
1700 the given template.
1700 the given template.
1701 Otherwise: All revs will be written to basefm.
1701 Otherwise: All revs will be written to basefm.
1702 '''
1702 '''
1703 _prefetchchangedfiles(repo, revs, match)
1703 _prefetchchangedfiles(repo, revs, match)
1704
1704
1705 if not fntemplate:
1705 if not fntemplate:
1706 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1706 _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
1707 else:
1707 else:
1708 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1708 _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
1709 match)
1709 match)
1710
1710
1711 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1711 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
1712 """Export changesets to the given file stream"""
1712 """Export changesets to the given file stream"""
1713 _prefetchchangedfiles(repo, revs, match)
1713 _prefetchchangedfiles(repo, revs, match)
1714
1714
1715 dest = getattr(fp, 'name', '<unnamed>')
1715 dest = getattr(fp, 'name', '<unnamed>')
1716 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1716 with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
1717 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1717 _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
1718
1718
1719 def showmarker(fm, marker, index=None):
1719 def showmarker(fm, marker, index=None):
1720 """utility function to display obsolescence marker in a readable way
1720 """utility function to display obsolescence marker in a readable way
1721
1721
1722 To be used by debug function."""
1722 To be used by debug function."""
1723 if index is not None:
1723 if index is not None:
1724 fm.write('index', '%i ', index)
1724 fm.write('index', '%i ', index)
1725 fm.write('prednode', '%s ', hex(marker.prednode()))
1725 fm.write('prednode', '%s ', hex(marker.prednode()))
1726 succs = marker.succnodes()
1726 succs = marker.succnodes()
1727 fm.condwrite(succs, 'succnodes', '%s ',
1727 fm.condwrite(succs, 'succnodes', '%s ',
1728 fm.formatlist(map(hex, succs), name='node'))
1728 fm.formatlist(map(hex, succs), name='node'))
1729 fm.write('flag', '%X ', marker.flags())
1729 fm.write('flag', '%X ', marker.flags())
1730 parents = marker.parentnodes()
1730 parents = marker.parentnodes()
1731 if parents is not None:
1731 if parents is not None:
1732 fm.write('parentnodes', '{%s} ',
1732 fm.write('parentnodes', '{%s} ',
1733 fm.formatlist(map(hex, parents), name='node', sep=', '))
1733 fm.formatlist(map(hex, parents), name='node', sep=', '))
1734 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1734 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1735 meta = marker.metadata().copy()
1735 meta = marker.metadata().copy()
1736 meta.pop('date', None)
1736 meta.pop('date', None)
1737 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
1737 smeta = pycompat.rapply(pycompat.maybebytestr, meta)
1738 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1738 fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
1739 fm.plain('\n')
1739 fm.plain('\n')
1740
1740
1741 def finddate(ui, repo, date):
1741 def finddate(ui, repo, date):
1742 """Find the tipmost changeset that matches the given date spec"""
1742 """Find the tipmost changeset that matches the given date spec"""
1743
1743
1744 df = dateutil.matchdate(date)
1744 df = dateutil.matchdate(date)
1745 m = scmutil.matchall(repo)
1745 m = scmutil.matchall(repo)
1746 results = {}
1746 results = {}
1747
1747
1748 def prep(ctx, fns):
1748 def prep(ctx, fns):
1749 d = ctx.date()
1749 d = ctx.date()
1750 if df(d[0]):
1750 if df(d[0]):
1751 results[ctx.rev()] = d
1751 results[ctx.rev()] = d
1752
1752
1753 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1753 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1754 rev = ctx.rev()
1754 rev = ctx.rev()
1755 if rev in results:
1755 if rev in results:
1756 ui.status(_("found revision %s from %s\n") %
1756 ui.status(_("found revision %s from %s\n") %
1757 (rev, dateutil.datestr(results[rev])))
1757 (rev, dateutil.datestr(results[rev])))
1758 return '%d' % rev
1758 return '%d' % rev
1759
1759
1760 raise error.Abort(_("revision matching date not found"))
1760 raise error.Abort(_("revision matching date not found"))
1761
1761
1762 def increasingwindows(windowsize=8, sizelimit=512):
1762 def increasingwindows(windowsize=8, sizelimit=512):
1763 while True:
1763 while True:
1764 yield windowsize
1764 yield windowsize
1765 if windowsize < sizelimit:
1765 if windowsize < sizelimit:
1766 windowsize *= 2
1766 windowsize *= 2
1767
1767
1768 def _walkrevs(repo, opts):
1768 def _walkrevs(repo, opts):
1769 # Default --rev value depends on --follow but --follow behavior
1769 # Default --rev value depends on --follow but --follow behavior
1770 # depends on revisions resolved from --rev...
1770 # depends on revisions resolved from --rev...
1771 follow = opts.get('follow') or opts.get('follow_first')
1771 follow = opts.get('follow') or opts.get('follow_first')
1772 if opts.get('rev'):
1772 if opts.get('rev'):
1773 revs = scmutil.revrange(repo, opts['rev'])
1773 revs = scmutil.revrange(repo, opts['rev'])
1774 elif follow and repo.dirstate.p1() == nullid:
1774 elif follow and repo.dirstate.p1() == nullid:
1775 revs = smartset.baseset()
1775 revs = smartset.baseset()
1776 elif follow:
1776 elif follow:
1777 revs = repo.revs('reverse(:.)')
1777 revs = repo.revs('reverse(:.)')
1778 else:
1778 else:
1779 revs = smartset.spanset(repo)
1779 revs = smartset.spanset(repo)
1780 revs.reverse()
1780 revs.reverse()
1781 return revs
1781 return revs
1782
1782
1783 class FileWalkError(Exception):
1783 class FileWalkError(Exception):
1784 pass
1784 pass
1785
1785
1786 def walkfilerevs(repo, match, follow, revs, fncache):
1786 def walkfilerevs(repo, match, follow, revs, fncache):
1787 '''Walks the file history for the matched files.
1787 '''Walks the file history for the matched files.
1788
1788
1789 Returns the changeset revs that are involved in the file history.
1789 Returns the changeset revs that are involved in the file history.
1790
1790
1791 Throws FileWalkError if the file history can't be walked using
1791 Throws FileWalkError if the file history can't be walked using
1792 filelogs alone.
1792 filelogs alone.
1793 '''
1793 '''
1794 wanted = set()
1794 wanted = set()
1795 copies = []
1795 copies = []
1796 minrev, maxrev = min(revs), max(revs)
1796 minrev, maxrev = min(revs), max(revs)
1797 def filerevs(filelog, last):
1797 def filerevs(filelog, last):
1798 """
1798 """
1799 Only files, no patterns. Check the history of each file.
1799 Only files, no patterns. Check the history of each file.
1800
1800
1801 Examines filelog entries within minrev, maxrev linkrev range
1801 Examines filelog entries within minrev, maxrev linkrev range
1802 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1802 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1803 tuples in backwards order
1803 tuples in backwards order
1804 """
1804 """
1805 cl_count = len(repo)
1805 cl_count = len(repo)
1806 revs = []
1806 revs = []
1807 for j in pycompat.xrange(0, last + 1):
1807 for j in pycompat.xrange(0, last + 1):
1808 linkrev = filelog.linkrev(j)
1808 linkrev = filelog.linkrev(j)
1809 if linkrev < minrev:
1809 if linkrev < minrev:
1810 continue
1810 continue
1811 # only yield rev for which we have the changelog, it can
1811 # only yield rev for which we have the changelog, it can
1812 # happen while doing "hg log" during a pull or commit
1812 # happen while doing "hg log" during a pull or commit
1813 if linkrev >= cl_count:
1813 if linkrev >= cl_count:
1814 break
1814 break
1815
1815
1816 parentlinkrevs = []
1816 parentlinkrevs = []
1817 for p in filelog.parentrevs(j):
1817 for p in filelog.parentrevs(j):
1818 if p != nullrev:
1818 if p != nullrev:
1819 parentlinkrevs.append(filelog.linkrev(p))
1819 parentlinkrevs.append(filelog.linkrev(p))
1820 n = filelog.node(j)
1820 n = filelog.node(j)
1821 revs.append((linkrev, parentlinkrevs,
1821 revs.append((linkrev, parentlinkrevs,
1822 follow and filelog.renamed(n)))
1822 follow and filelog.renamed(n)))
1823
1823
1824 return reversed(revs)
1824 return reversed(revs)
1825 def iterfiles():
1825 def iterfiles():
1826 pctx = repo['.']
1826 pctx = repo['.']
1827 for filename in match.files():
1827 for filename in match.files():
1828 if follow:
1828 if follow:
1829 if filename not in pctx:
1829 if filename not in pctx:
1830 raise error.Abort(_('cannot follow file not in parent '
1830 raise error.Abort(_('cannot follow file not in parent '
1831 'revision: "%s"') % filename)
1831 'revision: "%s"') % filename)
1832 yield filename, pctx[filename].filenode()
1832 yield filename, pctx[filename].filenode()
1833 else:
1833 else:
1834 yield filename, None
1834 yield filename, None
1835 for filename_node in copies:
1835 for filename_node in copies:
1836 yield filename_node
1836 yield filename_node
1837
1837
1838 for file_, node in iterfiles():
1838 for file_, node in iterfiles():
1839 filelog = repo.file(file_)
1839 filelog = repo.file(file_)
1840 if not len(filelog):
1840 if not len(filelog):
1841 if node is None:
1841 if node is None:
1842 # A zero count may be a directory or deleted file, so
1842 # A zero count may be a directory or deleted file, so
1843 # try to find matching entries on the slow path.
1843 # try to find matching entries on the slow path.
1844 if follow:
1844 if follow:
1845 raise error.Abort(
1845 raise error.Abort(
1846 _('cannot follow nonexistent file: "%s"') % file_)
1846 _('cannot follow nonexistent file: "%s"') % file_)
1847 raise FileWalkError("Cannot walk via filelog")
1847 raise FileWalkError("Cannot walk via filelog")
1848 else:
1848 else:
1849 continue
1849 continue
1850
1850
1851 if node is None:
1851 if node is None:
1852 last = len(filelog) - 1
1852 last = len(filelog) - 1
1853 else:
1853 else:
1854 last = filelog.rev(node)
1854 last = filelog.rev(node)
1855
1855
1856 # keep track of all ancestors of the file
1856 # keep track of all ancestors of the file
1857 ancestors = {filelog.linkrev(last)}
1857 ancestors = {filelog.linkrev(last)}
1858
1858
1859 # iterate from latest to oldest revision
1859 # iterate from latest to oldest revision
1860 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
1860 for rev, flparentlinkrevs, copied in filerevs(filelog, last):
1861 if not follow:
1861 if not follow:
1862 if rev > maxrev:
1862 if rev > maxrev:
1863 continue
1863 continue
1864 else:
1864 else:
1865 # Note that last might not be the first interesting
1865 # Note that last might not be the first interesting
1866 # rev to us:
1866 # rev to us:
1867 # if the file has been changed after maxrev, we'll
1867 # if the file has been changed after maxrev, we'll
1868 # have linkrev(last) > maxrev, and we still need
1868 # have linkrev(last) > maxrev, and we still need
1869 # to explore the file graph
1869 # to explore the file graph
1870 if rev not in ancestors:
1870 if rev not in ancestors:
1871 continue
1871 continue
1872 # XXX insert 1327 fix here
1872 # XXX insert 1327 fix here
1873 if flparentlinkrevs:
1873 if flparentlinkrevs:
1874 ancestors.update(flparentlinkrevs)
1874 ancestors.update(flparentlinkrevs)
1875
1875
1876 fncache.setdefault(rev, []).append(file_)
1876 fncache.setdefault(rev, []).append(file_)
1877 wanted.add(rev)
1877 wanted.add(rev)
1878 if copied:
1878 if copied:
1879 copies.append(copied)
1879 copies.append(copied)
1880
1880
1881 return wanted
1881 return wanted
1882
1882
1883 class _followfilter(object):
1883 class _followfilter(object):
1884 def __init__(self, repo, onlyfirst=False):
1884 def __init__(self, repo, onlyfirst=False):
1885 self.repo = repo
1885 self.repo = repo
1886 self.startrev = nullrev
1886 self.startrev = nullrev
1887 self.roots = set()
1887 self.roots = set()
1888 self.onlyfirst = onlyfirst
1888 self.onlyfirst = onlyfirst
1889
1889
1890 def match(self, rev):
1890 def match(self, rev):
1891 def realparents(rev):
1891 def realparents(rev):
1892 if self.onlyfirst:
1892 if self.onlyfirst:
1893 return self.repo.changelog.parentrevs(rev)[0:1]
1893 return self.repo.changelog.parentrevs(rev)[0:1]
1894 else:
1894 else:
1895 return filter(lambda x: x != nullrev,
1895 return filter(lambda x: x != nullrev,
1896 self.repo.changelog.parentrevs(rev))
1896 self.repo.changelog.parentrevs(rev))
1897
1897
1898 if self.startrev == nullrev:
1898 if self.startrev == nullrev:
1899 self.startrev = rev
1899 self.startrev = rev
1900 return True
1900 return True
1901
1901
1902 if rev > self.startrev:
1902 if rev > self.startrev:
1903 # forward: all descendants
1903 # forward: all descendants
1904 if not self.roots:
1904 if not self.roots:
1905 self.roots.add(self.startrev)
1905 self.roots.add(self.startrev)
1906 for parent in realparents(rev):
1906 for parent in realparents(rev):
1907 if parent in self.roots:
1907 if parent in self.roots:
1908 self.roots.add(rev)
1908 self.roots.add(rev)
1909 return True
1909 return True
1910 else:
1910 else:
1911 # backwards: all parents
1911 # backwards: all parents
1912 if not self.roots:
1912 if not self.roots:
1913 self.roots.update(realparents(self.startrev))
1913 self.roots.update(realparents(self.startrev))
1914 if rev in self.roots:
1914 if rev in self.roots:
1915 self.roots.remove(rev)
1915 self.roots.remove(rev)
1916 self.roots.update(realparents(rev))
1916 self.roots.update(realparents(rev))
1917 return True
1917 return True
1918
1918
1919 return False
1919 return False
1920
1920
1921 def walkchangerevs(repo, match, opts, prepare):
1921 def walkchangerevs(repo, match, opts, prepare):
1922 '''Iterate over files and the revs in which they changed.
1922 '''Iterate over files and the revs in which they changed.
1923
1923
1924 Callers most commonly need to iterate backwards over the history
1924 Callers most commonly need to iterate backwards over the history
1925 in which they are interested. Doing so has awful (quadratic-looking)
1925 in which they are interested. Doing so has awful (quadratic-looking)
1926 performance, so we use iterators in a "windowed" way.
1926 performance, so we use iterators in a "windowed" way.
1927
1927
1928 We walk a window of revisions in the desired order. Within the
1928 We walk a window of revisions in the desired order. Within the
1929 window, we first walk forwards to gather data, then in the desired
1929 window, we first walk forwards to gather data, then in the desired
1930 order (usually backwards) to display it.
1930 order (usually backwards) to display it.
1931
1931
1932 This function returns an iterator yielding contexts. Before
1932 This function returns an iterator yielding contexts. Before
1933 yielding each context, the iterator will first call the prepare
1933 yielding each context, the iterator will first call the prepare
1934 function on each context in the window in forward order.'''
1934 function on each context in the window in forward order.'''
1935
1935
1936 allfiles = opts.get('all_files')
1936 allfiles = opts.get('all_files')
1937 follow = opts.get('follow') or opts.get('follow_first')
1937 follow = opts.get('follow') or opts.get('follow_first')
1938 revs = _walkrevs(repo, opts)
1938 revs = _walkrevs(repo, opts)
1939 if not revs:
1939 if not revs:
1940 return []
1940 return []
1941 wanted = set()
1941 wanted = set()
1942 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1942 slowpath = match.anypats() or (not match.always() and opts.get('removed'))
1943 fncache = {}
1943 fncache = {}
1944 change = repo.__getitem__
1944 change = repo.__getitem__
1945
1945
1946 # First step is to fill wanted, the set of revisions that we want to yield.
1946 # First step is to fill wanted, the set of revisions that we want to yield.
1947 # When it does not induce extra cost, we also fill fncache for revisions in
1947 # When it does not induce extra cost, we also fill fncache for revisions in
1948 # wanted: a cache of filenames that were changed (ctx.files()) and that
1948 # wanted: a cache of filenames that were changed (ctx.files()) and that
1949 # match the file filtering conditions.
1949 # match the file filtering conditions.
1950
1950
1951 if match.always() or allfiles:
1951 if match.always() or allfiles:
1952 # No files, no patterns. Display all revs.
1952 # No files, no patterns. Display all revs.
1953 wanted = revs
1953 wanted = revs
1954 elif not slowpath:
1954 elif not slowpath:
1955 # We only have to read through the filelog to find wanted revisions
1955 # We only have to read through the filelog to find wanted revisions
1956
1956
1957 try:
1957 try:
1958 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1958 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1959 except FileWalkError:
1959 except FileWalkError:
1960 slowpath = True
1960 slowpath = True
1961
1961
1962 # We decided to fall back to the slowpath because at least one
1962 # We decided to fall back to the slowpath because at least one
1963 # of the paths was not a file. Check to see if at least one of them
1963 # of the paths was not a file. Check to see if at least one of them
1964 # existed in history, otherwise simply return
1964 # existed in history, otherwise simply return
1965 for path in match.files():
1965 for path in match.files():
1966 if path == '.' or path in repo.store:
1966 if path == '.' or path in repo.store:
1967 break
1967 break
1968 else:
1968 else:
1969 return []
1969 return []
1970
1970
1971 if slowpath:
1971 if slowpath:
1972 # We have to read the changelog to match filenames against
1972 # We have to read the changelog to match filenames against
1973 # changed files
1973 # changed files
1974
1974
1975 if follow:
1975 if follow:
1976 raise error.Abort(_('can only follow copies/renames for explicit '
1976 raise error.Abort(_('can only follow copies/renames for explicit '
1977 'filenames'))
1977 'filenames'))
1978
1978
1979 # The slow path checks files modified in every changeset.
1979 # The slow path checks files modified in every changeset.
1980 # This is really slow on large repos, so compute the set lazily.
1980 # This is really slow on large repos, so compute the set lazily.
1981 class lazywantedset(object):
1981 class lazywantedset(object):
1982 def __init__(self):
1982 def __init__(self):
1983 self.set = set()
1983 self.set = set()
1984 self.revs = set(revs)
1984 self.revs = set(revs)
1985
1985
1986 # No need to worry about locality here because it will be accessed
1986 # No need to worry about locality here because it will be accessed
1987 # in the same order as the increasing window below.
1987 # in the same order as the increasing window below.
1988 def __contains__(self, value):
1988 def __contains__(self, value):
1989 if value in self.set:
1989 if value in self.set:
1990 return True
1990 return True
1991 elif not value in self.revs:
1991 elif not value in self.revs:
1992 return False
1992 return False
1993 else:
1993 else:
1994 self.revs.discard(value)
1994 self.revs.discard(value)
1995 ctx = change(value)
1995 ctx = change(value)
1996 if allfiles:
1996 if allfiles:
1997 matches = list(ctx.manifest().walk(match))
1997 matches = list(ctx.manifest().walk(match))
1998 else:
1998 else:
1999 matches = [f for f in ctx.files() if match(f)]
1999 matches = [f for f in ctx.files() if match(f)]
2000 if matches:
2000 if matches:
2001 fncache[value] = matches
2001 fncache[value] = matches
2002 self.set.add(value)
2002 self.set.add(value)
2003 return True
2003 return True
2004 return False
2004 return False
2005
2005
2006 def discard(self, value):
2006 def discard(self, value):
2007 self.revs.discard(value)
2007 self.revs.discard(value)
2008 self.set.discard(value)
2008 self.set.discard(value)
2009
2009
2010 wanted = lazywantedset()
2010 wanted = lazywantedset()
2011
2011
2012 # it might be worthwhile to do this in the iterator if the rev range
2012 # it might be worthwhile to do this in the iterator if the rev range
2013 # is descending and the prune args are all within that range
2013 # is descending and the prune args are all within that range
2014 for rev in opts.get('prune', ()):
2014 for rev in opts.get('prune', ()):
2015 rev = repo[rev].rev()
2015 rev = repo[rev].rev()
2016 ff = _followfilter(repo)
2016 ff = _followfilter(repo)
2017 stop = min(revs[0], revs[-1])
2017 stop = min(revs[0], revs[-1])
2018 for x in pycompat.xrange(rev, stop - 1, -1):
2018 for x in pycompat.xrange(rev, stop - 1, -1):
2019 if ff.match(x):
2019 if ff.match(x):
2020 wanted = wanted - [x]
2020 wanted = wanted - [x]
2021
2021
2022 # Now that wanted is correctly initialized, we can iterate over the
2022 # Now that wanted is correctly initialized, we can iterate over the
2023 # revision range, yielding only revisions in wanted.
2023 # revision range, yielding only revisions in wanted.
2024 def iterate():
2024 def iterate():
2025 if follow and match.always():
2025 if follow and match.always():
2026 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2026 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2027 def want(rev):
2027 def want(rev):
2028 return ff.match(rev) and rev in wanted
2028 return ff.match(rev) and rev in wanted
2029 else:
2029 else:
2030 def want(rev):
2030 def want(rev):
2031 return rev in wanted
2031 return rev in wanted
2032
2032
2033 it = iter(revs)
2033 it = iter(revs)
2034 stopiteration = False
2034 stopiteration = False
2035 for windowsize in increasingwindows():
2035 for windowsize in increasingwindows():
2036 nrevs = []
2036 nrevs = []
2037 for i in pycompat.xrange(windowsize):
2037 for i in pycompat.xrange(windowsize):
2038 rev = next(it, None)
2038 rev = next(it, None)
2039 if rev is None:
2039 if rev is None:
2040 stopiteration = True
2040 stopiteration = True
2041 break
2041 break
2042 elif want(rev):
2042 elif want(rev):
2043 nrevs.append(rev)
2043 nrevs.append(rev)
2044 for rev in sorted(nrevs):
2044 for rev in sorted(nrevs):
2045 fns = fncache.get(rev)
2045 fns = fncache.get(rev)
2046 ctx = change(rev)
2046 ctx = change(rev)
2047 if not fns:
2047 if not fns:
2048 def fns_generator():
2048 def fns_generator():
2049 if allfiles:
2049 if allfiles:
2050 fiter = iter(ctx)
2050 fiter = iter(ctx)
2051 else:
2051 else:
2052 fiter = ctx.files()
2052 fiter = ctx.files()
2053 for f in fiter:
2053 for f in fiter:
2054 if match(f):
2054 if match(f):
2055 yield f
2055 yield f
2056 fns = fns_generator()
2056 fns = fns_generator()
2057 prepare(ctx, fns)
2057 prepare(ctx, fns)
2058 for rev in nrevs:
2058 for rev in nrevs:
2059 yield change(rev)
2059 yield change(rev)
2060
2060
2061 if stopiteration:
2061 if stopiteration:
2062 break
2062 break
2063
2063
2064 return iterate()
2064 return iterate()
2065
2065
2066 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2066 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
2067 bad = []
2067 bad = []
2068
2068
2069 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2069 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2070 names = []
2070 names = []
2071 wctx = repo[None]
2071 wctx = repo[None]
2072 cca = None
2072 cca = None
2073 abort, warn = scmutil.checkportabilityalert(ui)
2073 abort, warn = scmutil.checkportabilityalert(ui)
2074 if abort or warn:
2074 if abort or warn:
2075 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2075 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2076
2076
2077 match = repo.narrowmatch(match, includeexact=True)
2077 match = repo.narrowmatch(match, includeexact=True)
2078 badmatch = matchmod.badmatch(match, badfn)
2078 badmatch = matchmod.badmatch(match, badfn)
2079 dirstate = repo.dirstate
2079 dirstate = repo.dirstate
2080 # We don't want to just call wctx.walk here, since it would return a lot of
2080 # We don't want to just call wctx.walk here, since it would return a lot of
2081 # clean files, which we aren't interested in and takes time.
2081 # clean files, which we aren't interested in and takes time.
2082 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2082 for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
2083 unknown=True, ignored=False, full=False)):
2083 unknown=True, ignored=False, full=False)):
2084 exact = match.exact(f)
2084 exact = match.exact(f)
2085 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2085 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2086 if cca:
2086 if cca:
2087 cca(f)
2087 cca(f)
2088 names.append(f)
2088 names.append(f)
2089 if ui.verbose or not exact:
2089 if ui.verbose or not exact:
2090 ui.status(_('adding %s\n') % uipathfn(f),
2090 ui.status(_('adding %s\n') % uipathfn(f),
2091 label='ui.addremove.added')
2091 label='ui.addremove.added')
2092
2092
2093 for subpath in sorted(wctx.substate):
2093 for subpath in sorted(wctx.substate):
2094 sub = wctx.sub(subpath)
2094 sub = wctx.sub(subpath)
2095 try:
2095 try:
2096 submatch = matchmod.subdirmatcher(subpath, match)
2096 submatch = matchmod.subdirmatcher(subpath, match)
2097 subprefix = repo.wvfs.reljoin(prefix, subpath)
2097 subprefix = repo.wvfs.reljoin(prefix, subpath)
2098 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2098 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2099 if opts.get(r'subrepos'):
2099 if opts.get(r'subrepos'):
2100 bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, False,
2100 bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, False,
2101 **opts))
2101 **opts))
2102 else:
2102 else:
2103 bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, True,
2103 bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, True,
2104 **opts))
2104 **opts))
2105 except error.LookupError:
2105 except error.LookupError:
2106 ui.status(_("skipping missing subrepository: %s\n")
2106 ui.status(_("skipping missing subrepository: %s\n")
2107 % uipathfn(subpath))
2107 % uipathfn(subpath))
2108
2108
2109 if not opts.get(r'dry_run'):
2109 if not opts.get(r'dry_run'):
2110 rejected = wctx.add(names, prefix)
2110 rejected = wctx.add(names, prefix)
2111 bad.extend(f for f in rejected if f in match.files())
2111 bad.extend(f for f in rejected if f in match.files())
2112 return bad
2112 return bad
2113
2113
2114 def addwebdirpath(repo, serverpath, webconf):
2114 def addwebdirpath(repo, serverpath, webconf):
2115 webconf[serverpath] = repo.root
2115 webconf[serverpath] = repo.root
2116 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2116 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2117
2117
2118 for r in repo.revs('filelog("path:.hgsub")'):
2118 for r in repo.revs('filelog("path:.hgsub")'):
2119 ctx = repo[r]
2119 ctx = repo[r]
2120 for subpath in ctx.substate:
2120 for subpath in ctx.substate:
2121 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2121 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2122
2122
2123 def forget(ui, repo, match, prefix, uipathfn, explicitonly, dryrun,
2123 def forget(ui, repo, match, prefix, uipathfn, explicitonly, dryrun,
2124 interactive):
2124 interactive):
2125 if dryrun and interactive:
2125 if dryrun and interactive:
2126 raise error.Abort(_("cannot specify both --dry-run and --interactive"))
2126 raise error.Abort(_("cannot specify both --dry-run and --interactive"))
2127 bad = []
2127 bad = []
2128 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2128 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2129 wctx = repo[None]
2129 wctx = repo[None]
2130 forgot = []
2130 forgot = []
2131
2131
2132 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2132 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2133 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2133 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2134 if explicitonly:
2134 if explicitonly:
2135 forget = [f for f in forget if match.exact(f)]
2135 forget = [f for f in forget if match.exact(f)]
2136
2136
2137 for subpath in sorted(wctx.substate):
2137 for subpath in sorted(wctx.substate):
2138 sub = wctx.sub(subpath)
2138 sub = wctx.sub(subpath)
2139 submatch = matchmod.subdirmatcher(subpath, match)
2139 submatch = matchmod.subdirmatcher(subpath, match)
2140 subprefix = repo.wvfs.reljoin(prefix, subpath)
2140 subprefix = repo.wvfs.reljoin(prefix, subpath)
2141 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2141 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2142 try:
2142 try:
2143 subbad, subforgot = sub.forget(submatch, subprefix, subuipathfn,
2143 subbad, subforgot = sub.forget(submatch, subprefix, subuipathfn,
2144 dryrun=dryrun,
2144 dryrun=dryrun,
2145 interactive=interactive)
2145 interactive=interactive)
2146 bad.extend([subpath + '/' + f for f in subbad])
2146 bad.extend([subpath + '/' + f for f in subbad])
2147 forgot.extend([subpath + '/' + f for f in subforgot])
2147 forgot.extend([subpath + '/' + f for f in subforgot])
2148 except error.LookupError:
2148 except error.LookupError:
2149 ui.status(_("skipping missing subrepository: %s\n")
2149 ui.status(_("skipping missing subrepository: %s\n")
2150 % uipathfn(subpath))
2150 % uipathfn(subpath))
2151
2151
2152 if not explicitonly:
2152 if not explicitonly:
2153 for f in match.files():
2153 for f in match.files():
2154 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2154 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2155 if f not in forgot:
2155 if f not in forgot:
2156 if repo.wvfs.exists(f):
2156 if repo.wvfs.exists(f):
2157 # Don't complain if the exact case match wasn't given.
2157 # Don't complain if the exact case match wasn't given.
2158 # But don't do this until after checking 'forgot', so
2158 # But don't do this until after checking 'forgot', so
2159 # that subrepo files aren't normalized, and this op is
2159 # that subrepo files aren't normalized, and this op is
2160 # purely from data cached by the status walk above.
2160 # purely from data cached by the status walk above.
2161 if repo.dirstate.normalize(f) in repo.dirstate:
2161 if repo.dirstate.normalize(f) in repo.dirstate:
2162 continue
2162 continue
2163 ui.warn(_('not removing %s: '
2163 ui.warn(_('not removing %s: '
2164 'file is already untracked\n')
2164 'file is already untracked\n')
2165 % uipathfn(f))
2165 % uipathfn(f))
2166 bad.append(f)
2166 bad.append(f)
2167
2167
2168 if interactive:
2168 if interactive:
2169 responses = _('[Ynsa?]'
2169 responses = _('[Ynsa?]'
2170 '$$ &Yes, forget this file'
2170 '$$ &Yes, forget this file'
2171 '$$ &No, skip this file'
2171 '$$ &No, skip this file'
2172 '$$ &Skip remaining files'
2172 '$$ &Skip remaining files'
2173 '$$ Include &all remaining files'
2173 '$$ Include &all remaining files'
2174 '$$ &? (display help)')
2174 '$$ &? (display help)')
2175 for filename in forget[:]:
2175 for filename in forget[:]:
2176 r = ui.promptchoice(_('forget %s %s') %
2176 r = ui.promptchoice(_('forget %s %s') %
2177 (uipathfn(filename), responses))
2177 (uipathfn(filename), responses))
2178 if r == 4: # ?
2178 if r == 4: # ?
2179 while r == 4:
2179 while r == 4:
2180 for c, t in ui.extractchoices(responses)[1]:
2180 for c, t in ui.extractchoices(responses)[1]:
2181 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2181 ui.write('%s - %s\n' % (c, encoding.lower(t)))
2182 r = ui.promptchoice(_('forget %s %s') %
2182 r = ui.promptchoice(_('forget %s %s') %
2183 (uipathfn(filename), responses))
2183 (uipathfn(filename), responses))
2184 if r == 0: # yes
2184 if r == 0: # yes
2185 continue
2185 continue
2186 elif r == 1: # no
2186 elif r == 1: # no
2187 forget.remove(filename)
2187 forget.remove(filename)
2188 elif r == 2: # Skip
2188 elif r == 2: # Skip
2189 fnindex = forget.index(filename)
2189 fnindex = forget.index(filename)
2190 del forget[fnindex:]
2190 del forget[fnindex:]
2191 break
2191 break
2192 elif r == 3: # All
2192 elif r == 3: # All
2193 break
2193 break
2194
2194
2195 for f in forget:
2195 for f in forget:
2196 if ui.verbose or not match.exact(f) or interactive:
2196 if ui.verbose or not match.exact(f) or interactive:
2197 ui.status(_('removing %s\n') % uipathfn(f),
2197 ui.status(_('removing %s\n') % uipathfn(f),
2198 label='ui.addremove.removed')
2198 label='ui.addremove.removed')
2199
2199
2200 if not dryrun:
2200 if not dryrun:
2201 rejected = wctx.forget(forget, prefix)
2201 rejected = wctx.forget(forget, prefix)
2202 bad.extend(f for f in rejected if f in match.files())
2202 bad.extend(f for f in rejected if f in match.files())
2203 forgot.extend(f for f in forget if f not in rejected)
2203 forgot.extend(f for f in forget if f not in rejected)
2204 return bad, forgot
2204 return bad, forgot
2205
2205
2206 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2206 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
2207 ret = 1
2207 ret = 1
2208
2208
2209 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
2209 needsfctx = ui.verbose or {'size', 'flags'} & fm.datahint()
2210 for f in ctx.matches(m):
2210 for f in ctx.matches(m):
2211 fm.startitem()
2211 fm.startitem()
2212 fm.context(ctx=ctx)
2212 fm.context(ctx=ctx)
2213 if needsfctx:
2213 if needsfctx:
2214 fc = ctx[f]
2214 fc = ctx[f]
2215 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2215 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2216 fm.data(path=f)
2216 fm.data(path=f)
2217 fm.plain(fmt % uipathfn(f))
2217 fm.plain(fmt % uipathfn(f))
2218 ret = 0
2218 ret = 0
2219
2219
2220 for subpath in sorted(ctx.substate):
2220 for subpath in sorted(ctx.substate):
2221 submatch = matchmod.subdirmatcher(subpath, m)
2221 submatch = matchmod.subdirmatcher(subpath, m)
2222 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2222 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2223 if (subrepos or m.exact(subpath) or any(submatch.files())):
2223 if (subrepos or m.exact(subpath) or any(submatch.files())):
2224 sub = ctx.sub(subpath)
2224 sub = ctx.sub(subpath)
2225 try:
2225 try:
2226 recurse = m.exact(subpath) or subrepos
2226 recurse = m.exact(subpath) or subrepos
2227 if sub.printfiles(ui, submatch, subuipathfn, fm, fmt,
2227 if sub.printfiles(ui, submatch, subuipathfn, fm, fmt,
2228 recurse) == 0:
2228 recurse) == 0:
2229 ret = 0
2229 ret = 0
2230 except error.LookupError:
2230 except error.LookupError:
2231 ui.status(_("skipping missing subrepository: %s\n")
2231 ui.status(_("skipping missing subrepository: %s\n")
2232 % uipathfn(subpath))
2232 % uipathfn(subpath))
2233
2233
2234 return ret
2234 return ret
2235
2235
2236 def remove(ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun,
2236 def remove(ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun,
2237 warnings=None):
2237 warnings=None):
2238 ret = 0
2238 ret = 0
2239 s = repo.status(match=m, clean=True)
2239 s = repo.status(match=m, clean=True)
2240 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2240 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2241
2241
2242 wctx = repo[None]
2242 wctx = repo[None]
2243
2243
2244 if warnings is None:
2244 if warnings is None:
2245 warnings = []
2245 warnings = []
2246 warn = True
2246 warn = True
2247 else:
2247 else:
2248 warn = False
2248 warn = False
2249
2249
2250 subs = sorted(wctx.substate)
2250 subs = sorted(wctx.substate)
2251 progress = ui.makeprogress(_('searching'), total=len(subs),
2251 progress = ui.makeprogress(_('searching'), total=len(subs),
2252 unit=_('subrepos'))
2252 unit=_('subrepos'))
2253 for subpath in subs:
2253 for subpath in subs:
2254 submatch = matchmod.subdirmatcher(subpath, m)
2254 submatch = matchmod.subdirmatcher(subpath, m)
2255 subprefix = repo.wvfs.reljoin(prefix, subpath)
2255 subprefix = repo.wvfs.reljoin(prefix, subpath)
2256 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2256 subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
2257 if subrepos or m.exact(subpath) or any(submatch.files()):
2257 if subrepos or m.exact(subpath) or any(submatch.files()):
2258 progress.increment()
2258 progress.increment()
2259 sub = wctx.sub(subpath)
2259 sub = wctx.sub(subpath)
2260 try:
2260 try:
2261 if sub.removefiles(submatch, subprefix, subuipathfn, after,
2261 if sub.removefiles(submatch, subprefix, subuipathfn, after,
2262 force, subrepos, dryrun, warnings):
2262 force, subrepos, dryrun, warnings):
2263 ret = 1
2263 ret = 1
2264 except error.LookupError:
2264 except error.LookupError:
2265 warnings.append(_("skipping missing subrepository: %s\n")
2265 warnings.append(_("skipping missing subrepository: %s\n")
2266 % uipathfn(subpath))
2266 % uipathfn(subpath))
2267 progress.complete()
2267 progress.complete()
2268
2268
2269 # warn about failure to delete explicit files/dirs
2269 # warn about failure to delete explicit files/dirs
2270 deleteddirs = util.dirs(deleted)
2270 deleteddirs = util.dirs(deleted)
2271 files = m.files()
2271 files = m.files()
2272 progress = ui.makeprogress(_('deleting'), total=len(files),
2272 progress = ui.makeprogress(_('deleting'), total=len(files),
2273 unit=_('files'))
2273 unit=_('files'))
2274 for f in files:
2274 for f in files:
2275 def insubrepo():
2275 def insubrepo():
2276 for subpath in wctx.substate:
2276 for subpath in wctx.substate:
2277 if f.startswith(subpath + '/'):
2277 if f.startswith(subpath + '/'):
2278 return True
2278 return True
2279 return False
2279 return False
2280
2280
2281 progress.increment()
2281 progress.increment()
2282 isdir = f in deleteddirs or wctx.hasdir(f)
2282 isdir = f in deleteddirs or wctx.hasdir(f)
2283 if (f in repo.dirstate or isdir or f == '.'
2283 if (f in repo.dirstate or isdir or f == '.'
2284 or insubrepo() or f in subs):
2284 or insubrepo() or f in subs):
2285 continue
2285 continue
2286
2286
2287 if repo.wvfs.exists(f):
2287 if repo.wvfs.exists(f):
2288 if repo.wvfs.isdir(f):
2288 if repo.wvfs.isdir(f):
2289 warnings.append(_('not removing %s: no tracked files\n')
2289 warnings.append(_('not removing %s: no tracked files\n')
2290 % uipathfn(f))
2290 % uipathfn(f))
2291 else:
2291 else:
2292 warnings.append(_('not removing %s: file is untracked\n')
2292 warnings.append(_('not removing %s: file is untracked\n')
2293 % uipathfn(f))
2293 % uipathfn(f))
2294 # missing files will generate a warning elsewhere
2294 # missing files will generate a warning elsewhere
2295 ret = 1
2295 ret = 1
2296 progress.complete()
2296 progress.complete()
2297
2297
2298 if force:
2298 if force:
2299 list = modified + deleted + clean + added
2299 list = modified + deleted + clean + added
2300 elif after:
2300 elif after:
2301 list = deleted
2301 list = deleted
2302 remaining = modified + added + clean
2302 remaining = modified + added + clean
2303 progress = ui.makeprogress(_('skipping'), total=len(remaining),
2303 progress = ui.makeprogress(_('skipping'), total=len(remaining),
2304 unit=_('files'))
2304 unit=_('files'))
2305 for f in remaining:
2305 for f in remaining:
2306 progress.increment()
2306 progress.increment()
2307 if ui.verbose or (f in files):
2307 if ui.verbose or (f in files):
2308 warnings.append(_('not removing %s: file still exists\n')
2308 warnings.append(_('not removing %s: file still exists\n')
2309 % uipathfn(f))
2309 % uipathfn(f))
2310 ret = 1
2310 ret = 1
2311 progress.complete()
2311 progress.complete()
2312 else:
2312 else:
2313 list = deleted + clean
2313 list = deleted + clean
2314 progress = ui.makeprogress(_('skipping'),
2314 progress = ui.makeprogress(_('skipping'),
2315 total=(len(modified) + len(added)),
2315 total=(len(modified) + len(added)),
2316 unit=_('files'))
2316 unit=_('files'))
2317 for f in modified:
2317 for f in modified:
2318 progress.increment()
2318 progress.increment()
2319 warnings.append(_('not removing %s: file is modified (use -f'
2319 warnings.append(_('not removing %s: file is modified (use -f'
2320 ' to force removal)\n') % uipathfn(f))
2320 ' to force removal)\n') % uipathfn(f))
2321 ret = 1
2321 ret = 1
2322 for f in added:
2322 for f in added:
2323 progress.increment()
2323 progress.increment()
2324 warnings.append(_("not removing %s: file has been marked for add"
2324 warnings.append(_("not removing %s: file has been marked for add"
2325 " (use 'hg forget' to undo add)\n") % uipathfn(f))
2325 " (use 'hg forget' to undo add)\n") % uipathfn(f))
2326 ret = 1
2326 ret = 1
2327 progress.complete()
2327 progress.complete()
2328
2328
2329 list = sorted(list)
2329 list = sorted(list)
2330 progress = ui.makeprogress(_('deleting'), total=len(list),
2330 progress = ui.makeprogress(_('deleting'), total=len(list),
2331 unit=_('files'))
2331 unit=_('files'))
2332 for f in list:
2332 for f in list:
2333 if ui.verbose or not m.exact(f):
2333 if ui.verbose or not m.exact(f):
2334 progress.increment()
2334 progress.increment()
2335 ui.status(_('removing %s\n') % uipathfn(f),
2335 ui.status(_('removing %s\n') % uipathfn(f),
2336 label='ui.addremove.removed')
2336 label='ui.addremove.removed')
2337 progress.complete()
2337 progress.complete()
2338
2338
2339 if not dryrun:
2339 if not dryrun:
2340 with repo.wlock():
2340 with repo.wlock():
2341 if not after:
2341 if not after:
2342 for f in list:
2342 for f in list:
2343 if f in added:
2343 if f in added:
2344 continue # we never unlink added files on remove
2344 continue # we never unlink added files on remove
2345 rmdir = repo.ui.configbool('experimental',
2345 rmdir = repo.ui.configbool('experimental',
2346 'removeemptydirs')
2346 'removeemptydirs')
2347 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2347 repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
2348 repo[None].forget(list)
2348 repo[None].forget(list)
2349
2349
2350 if warn:
2350 if warn:
2351 for warning in warnings:
2351 for warning in warnings:
2352 ui.warn(warning)
2352 ui.warn(warning)
2353
2353
2354 return ret
2354 return ret
2355
2355
2356 def _catfmtneedsdata(fm):
2357 return not fm.datahint() or 'data' in fm.datahint()
2358
2356 def _updatecatformatter(fm, ctx, matcher, path, decode):
2359 def _updatecatformatter(fm, ctx, matcher, path, decode):
2357 """Hook for adding data to the formatter used by ``hg cat``.
2360 """Hook for adding data to the formatter used by ``hg cat``.
2358
2361
2359 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2362 Extensions (e.g., lfs) can wrap this to inject keywords/data, but must call
2360 this method first."""
2363 this method first."""
2361 data = ctx[path].data()
2364
2362 if decode:
2365 # data() can be expensive to fetch (e.g. lfs), so don't fetch it if it
2363 data = ctx.repo().wwritedata(path, data)
2366 # wasn't requested.
2367 data = b''
2368 if _catfmtneedsdata(fm):
2369 data = ctx[path].data()
2370 if decode:
2371 data = ctx.repo().wwritedata(path, data)
2364 fm.startitem()
2372 fm.startitem()
2365 fm.context(ctx=ctx)
2373 fm.context(ctx=ctx)
2366 fm.write('data', '%s', data)
2374 fm.write('data', '%s', data)
2367 fm.data(path=path)
2375 fm.data(path=path)
2368
2376
2369 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2377 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2370 err = 1
2378 err = 1
2371 opts = pycompat.byteskwargs(opts)
2379 opts = pycompat.byteskwargs(opts)
2372
2380
2373 def write(path):
2381 def write(path):
2374 filename = None
2382 filename = None
2375 if fntemplate:
2383 if fntemplate:
2376 filename = makefilename(ctx, fntemplate,
2384 filename = makefilename(ctx, fntemplate,
2377 pathname=os.path.join(prefix, path))
2385 pathname=os.path.join(prefix, path))
2378 # attempt to create the directory if it does not already exist
2386 # attempt to create the directory if it does not already exist
2379 try:
2387 try:
2380 os.makedirs(os.path.dirname(filename))
2388 os.makedirs(os.path.dirname(filename))
2381 except OSError:
2389 except OSError:
2382 pass
2390 pass
2383 with formatter.maybereopen(basefm, filename) as fm:
2391 with formatter.maybereopen(basefm, filename) as fm:
2384 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2392 _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
2385
2393
2386 # Automation often uses hg cat on single files, so special case it
2394 # Automation often uses hg cat on single files, so special case it
2387 # for performance to avoid the cost of parsing the manifest.
2395 # for performance to avoid the cost of parsing the manifest.
2388 if len(matcher.files()) == 1 and not matcher.anypats():
2396 if len(matcher.files()) == 1 and not matcher.anypats():
2389 file = matcher.files()[0]
2397 file = matcher.files()[0]
2390 mfl = repo.manifestlog
2398 mfl = repo.manifestlog
2391 mfnode = ctx.manifestnode()
2399 mfnode = ctx.manifestnode()
2392 try:
2400 try:
2393 if mfnode and mfl[mfnode].find(file)[0]:
2401 if mfnode and mfl[mfnode].find(file)[0]:
2394 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2402 if _catfmtneedsdata(basefm):
2403 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2395 write(file)
2404 write(file)
2396 return 0
2405 return 0
2397 except KeyError:
2406 except KeyError:
2398 pass
2407 pass
2399
2408
2400 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2409 if _catfmtneedsdata(basefm):
2410 scmutil.prefetchfiles(repo, [ctx.rev()], matcher)
2401
2411
2402 for abs in ctx.walk(matcher):
2412 for abs in ctx.walk(matcher):
2403 write(abs)
2413 write(abs)
2404 err = 0
2414 err = 0
2405
2415
2406 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2416 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2407 for subpath in sorted(ctx.substate):
2417 for subpath in sorted(ctx.substate):
2408 sub = ctx.sub(subpath)
2418 sub = ctx.sub(subpath)
2409 try:
2419 try:
2410 submatch = matchmod.subdirmatcher(subpath, matcher)
2420 submatch = matchmod.subdirmatcher(subpath, matcher)
2411 subprefix = os.path.join(prefix, subpath)
2421 subprefix = os.path.join(prefix, subpath)
2412 if not sub.cat(submatch, basefm, fntemplate, subprefix,
2422 if not sub.cat(submatch, basefm, fntemplate, subprefix,
2413 **pycompat.strkwargs(opts)):
2423 **pycompat.strkwargs(opts)):
2414 err = 0
2424 err = 0
2415 except error.RepoLookupError:
2425 except error.RepoLookupError:
2416 ui.status(_("skipping missing subrepository: %s\n") %
2426 ui.status(_("skipping missing subrepository: %s\n") %
2417 uipathfn(subpath))
2427 uipathfn(subpath))
2418
2428
2419 return err
2429 return err
2420
2430
2421 def commit(ui, repo, commitfunc, pats, opts):
2431 def commit(ui, repo, commitfunc, pats, opts):
2422 '''commit the specified files or all outstanding changes'''
2432 '''commit the specified files or all outstanding changes'''
2423 date = opts.get('date')
2433 date = opts.get('date')
2424 if date:
2434 if date:
2425 opts['date'] = dateutil.parsedate(date)
2435 opts['date'] = dateutil.parsedate(date)
2426 message = logmessage(ui, opts)
2436 message = logmessage(ui, opts)
2427 matcher = scmutil.match(repo[None], pats, opts)
2437 matcher = scmutil.match(repo[None], pats, opts)
2428
2438
2429 dsguard = None
2439 dsguard = None
2430 # extract addremove carefully -- this function can be called from a command
2440 # extract addremove carefully -- this function can be called from a command
2431 # that doesn't support addremove
2441 # that doesn't support addremove
2432 if opts.get('addremove'):
2442 if opts.get('addremove'):
2433 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2443 dsguard = dirstateguard.dirstateguard(repo, 'commit')
2434 with dsguard or util.nullcontextmanager():
2444 with dsguard or util.nullcontextmanager():
2435 if dsguard:
2445 if dsguard:
2436 relative = scmutil.anypats(pats, opts)
2446 relative = scmutil.anypats(pats, opts)
2437 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2447 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2438 if scmutil.addremove(repo, matcher, "", uipathfn, opts) != 0:
2448 if scmutil.addremove(repo, matcher, "", uipathfn, opts) != 0:
2439 raise error.Abort(
2449 raise error.Abort(
2440 _("failed to mark all new/missing files as added/removed"))
2450 _("failed to mark all new/missing files as added/removed"))
2441
2451
2442 return commitfunc(ui, repo, message, matcher, opts)
2452 return commitfunc(ui, repo, message, matcher, opts)
2443
2453
2444 def samefile(f, ctx1, ctx2):
2454 def samefile(f, ctx1, ctx2):
2445 if f in ctx1.manifest():
2455 if f in ctx1.manifest():
2446 a = ctx1.filectx(f)
2456 a = ctx1.filectx(f)
2447 if f in ctx2.manifest():
2457 if f in ctx2.manifest():
2448 b = ctx2.filectx(f)
2458 b = ctx2.filectx(f)
2449 return (not a.cmp(b)
2459 return (not a.cmp(b)
2450 and a.flags() == b.flags())
2460 and a.flags() == b.flags())
2451 else:
2461 else:
2452 return False
2462 return False
2453 else:
2463 else:
2454 return f not in ctx2.manifest()
2464 return f not in ctx2.manifest()
2455
2465
2456 def amend(ui, repo, old, extra, pats, opts):
2466 def amend(ui, repo, old, extra, pats, opts):
2457 # avoid cycle context -> subrepo -> cmdutil
2467 # avoid cycle context -> subrepo -> cmdutil
2458 from . import context
2468 from . import context
2459
2469
2460 # amend will reuse the existing user if not specified, but the obsolete
2470 # amend will reuse the existing user if not specified, but the obsolete
2461 # marker creation requires that the current user's name is specified.
2471 # marker creation requires that the current user's name is specified.
2462 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2472 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2463 ui.username() # raise exception if username not set
2473 ui.username() # raise exception if username not set
2464
2474
2465 ui.note(_('amending changeset %s\n') % old)
2475 ui.note(_('amending changeset %s\n') % old)
2466 base = old.p1()
2476 base = old.p1()
2467
2477
2468 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2478 with repo.wlock(), repo.lock(), repo.transaction('amend'):
2469 # Participating changesets:
2479 # Participating changesets:
2470 #
2480 #
2471 # wctx o - workingctx that contains changes from working copy
2481 # wctx o - workingctx that contains changes from working copy
2472 # | to go into amending commit
2482 # | to go into amending commit
2473 # |
2483 # |
2474 # old o - changeset to amend
2484 # old o - changeset to amend
2475 # |
2485 # |
2476 # base o - first parent of the changeset to amend
2486 # base o - first parent of the changeset to amend
2477 wctx = repo[None]
2487 wctx = repo[None]
2478
2488
2479 # Copy to avoid mutating input
2489 # Copy to avoid mutating input
2480 extra = extra.copy()
2490 extra = extra.copy()
2481 # Update extra dict from amended commit (e.g. to preserve graft
2491 # Update extra dict from amended commit (e.g. to preserve graft
2482 # source)
2492 # source)
2483 extra.update(old.extra())
2493 extra.update(old.extra())
2484
2494
2485 # Also update it from the from the wctx
2495 # Also update it from the from the wctx
2486 extra.update(wctx.extra())
2496 extra.update(wctx.extra())
2487
2497
2488 user = opts.get('user') or old.user()
2498 user = opts.get('user') or old.user()
2489
2499
2490 datemaydiffer = False # date-only change should be ignored?
2500 datemaydiffer = False # date-only change should be ignored?
2491 if opts.get('date') and opts.get('currentdate'):
2501 if opts.get('date') and opts.get('currentdate'):
2492 raise error.Abort(_('--date and --currentdate are mutually '
2502 raise error.Abort(_('--date and --currentdate are mutually '
2493 'exclusive'))
2503 'exclusive'))
2494 if opts.get('date'):
2504 if opts.get('date'):
2495 date = dateutil.parsedate(opts.get('date'))
2505 date = dateutil.parsedate(opts.get('date'))
2496 elif opts.get('currentdate'):
2506 elif opts.get('currentdate'):
2497 date = dateutil.makedate()
2507 date = dateutil.makedate()
2498 elif (ui.configbool('rewrite', 'update-timestamp')
2508 elif (ui.configbool('rewrite', 'update-timestamp')
2499 and opts.get('currentdate') is None):
2509 and opts.get('currentdate') is None):
2500 date = dateutil.makedate()
2510 date = dateutil.makedate()
2501 datemaydiffer = True
2511 datemaydiffer = True
2502 else:
2512 else:
2503 date = old.date()
2513 date = old.date()
2504
2514
2505 if len(old.parents()) > 1:
2515 if len(old.parents()) > 1:
2506 # ctx.files() isn't reliable for merges, so fall back to the
2516 # ctx.files() isn't reliable for merges, so fall back to the
2507 # slower repo.status() method
2517 # slower repo.status() method
2508 files = {fn for st in base.status(old)[:3] for fn in st}
2518 files = {fn for st in base.status(old)[:3] for fn in st}
2509 else:
2519 else:
2510 files = set(old.files())
2520 files = set(old.files())
2511
2521
2512 # add/remove the files to the working copy if the "addremove" option
2522 # add/remove the files to the working copy if the "addremove" option
2513 # was specified.
2523 # was specified.
2514 matcher = scmutil.match(wctx, pats, opts)
2524 matcher = scmutil.match(wctx, pats, opts)
2515 relative = scmutil.anypats(pats, opts)
2525 relative = scmutil.anypats(pats, opts)
2516 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2526 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
2517 if (opts.get('addremove')
2527 if (opts.get('addremove')
2518 and scmutil.addremove(repo, matcher, "", uipathfn, opts)):
2528 and scmutil.addremove(repo, matcher, "", uipathfn, opts)):
2519 raise error.Abort(
2529 raise error.Abort(
2520 _("failed to mark all new/missing files as added/removed"))
2530 _("failed to mark all new/missing files as added/removed"))
2521
2531
2522 # Check subrepos. This depends on in-place wctx._status update in
2532 # Check subrepos. This depends on in-place wctx._status update in
2523 # subrepo.precommit(). To minimize the risk of this hack, we do
2533 # subrepo.precommit(). To minimize the risk of this hack, we do
2524 # nothing if .hgsub does not exist.
2534 # nothing if .hgsub does not exist.
2525 if '.hgsub' in wctx or '.hgsub' in old:
2535 if '.hgsub' in wctx or '.hgsub' in old:
2526 subs, commitsubs, newsubstate = subrepoutil.precommit(
2536 subs, commitsubs, newsubstate = subrepoutil.precommit(
2527 ui, wctx, wctx._status, matcher)
2537 ui, wctx, wctx._status, matcher)
2528 # amend should abort if commitsubrepos is enabled
2538 # amend should abort if commitsubrepos is enabled
2529 assert not commitsubs
2539 assert not commitsubs
2530 if subs:
2540 if subs:
2531 subrepoutil.writestate(repo, newsubstate)
2541 subrepoutil.writestate(repo, newsubstate)
2532
2542
2533 ms = mergemod.mergestate.read(repo)
2543 ms = mergemod.mergestate.read(repo)
2534 mergeutil.checkunresolved(ms)
2544 mergeutil.checkunresolved(ms)
2535
2545
2536 filestoamend = set(f for f in wctx.files() if matcher(f))
2546 filestoamend = set(f for f in wctx.files() if matcher(f))
2537
2547
2538 changes = (len(filestoamend) > 0)
2548 changes = (len(filestoamend) > 0)
2539 if changes:
2549 if changes:
2540 # Recompute copies (avoid recording a -> b -> a)
2550 # Recompute copies (avoid recording a -> b -> a)
2541 copied = copies.pathcopies(base, wctx, matcher)
2551 copied = copies.pathcopies(base, wctx, matcher)
2542 if old.p2:
2552 if old.p2:
2543 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2553 copied.update(copies.pathcopies(old.p2(), wctx, matcher))
2544
2554
2545 # Prune files which were reverted by the updates: if old
2555 # Prune files which were reverted by the updates: if old
2546 # introduced file X and the file was renamed in the working
2556 # introduced file X and the file was renamed in the working
2547 # copy, then those two files are the same and
2557 # copy, then those two files are the same and
2548 # we can discard X from our list of files. Likewise if X
2558 # we can discard X from our list of files. Likewise if X
2549 # was removed, it's no longer relevant. If X is missing (aka
2559 # was removed, it's no longer relevant. If X is missing (aka
2550 # deleted), old X must be preserved.
2560 # deleted), old X must be preserved.
2551 files.update(filestoamend)
2561 files.update(filestoamend)
2552 files = [f for f in files if (not samefile(f, wctx, base)
2562 files = [f for f in files if (not samefile(f, wctx, base)
2553 or f in wctx.deleted())]
2563 or f in wctx.deleted())]
2554
2564
2555 def filectxfn(repo, ctx_, path):
2565 def filectxfn(repo, ctx_, path):
2556 try:
2566 try:
2557 # If the file being considered is not amongst the files
2567 # If the file being considered is not amongst the files
2558 # to be amended, we should return the file context from the
2568 # to be amended, we should return the file context from the
2559 # old changeset. This avoids issues when only some files in
2569 # old changeset. This avoids issues when only some files in
2560 # the working copy are being amended but there are also
2570 # the working copy are being amended but there are also
2561 # changes to other files from the old changeset.
2571 # changes to other files from the old changeset.
2562 if path not in filestoamend:
2572 if path not in filestoamend:
2563 return old.filectx(path)
2573 return old.filectx(path)
2564
2574
2565 # Return None for removed files.
2575 # Return None for removed files.
2566 if path in wctx.removed():
2576 if path in wctx.removed():
2567 return None
2577 return None
2568
2578
2569 fctx = wctx[path]
2579 fctx = wctx[path]
2570 flags = fctx.flags()
2580 flags = fctx.flags()
2571 mctx = context.memfilectx(repo, ctx_,
2581 mctx = context.memfilectx(repo, ctx_,
2572 fctx.path(), fctx.data(),
2582 fctx.path(), fctx.data(),
2573 islink='l' in flags,
2583 islink='l' in flags,
2574 isexec='x' in flags,
2584 isexec='x' in flags,
2575 copysource=copied.get(path))
2585 copysource=copied.get(path))
2576 return mctx
2586 return mctx
2577 except KeyError:
2587 except KeyError:
2578 return None
2588 return None
2579 else:
2589 else:
2580 ui.note(_('copying changeset %s to %s\n') % (old, base))
2590 ui.note(_('copying changeset %s to %s\n') % (old, base))
2581
2591
2582 # Use version of files as in the old cset
2592 # Use version of files as in the old cset
2583 def filectxfn(repo, ctx_, path):
2593 def filectxfn(repo, ctx_, path):
2584 try:
2594 try:
2585 return old.filectx(path)
2595 return old.filectx(path)
2586 except KeyError:
2596 except KeyError:
2587 return None
2597 return None
2588
2598
2589 # See if we got a message from -m or -l, if not, open the editor with
2599 # See if we got a message from -m or -l, if not, open the editor with
2590 # the message of the changeset to amend.
2600 # the message of the changeset to amend.
2591 message = logmessage(ui, opts)
2601 message = logmessage(ui, opts)
2592
2602
2593 editform = mergeeditform(old, 'commit.amend')
2603 editform = mergeeditform(old, 'commit.amend')
2594
2604
2595 if not message:
2605 if not message:
2596 message = old.description()
2606 message = old.description()
2597 # Default if message isn't provided and --edit is not passed is to
2607 # Default if message isn't provided and --edit is not passed is to
2598 # invoke editor, but allow --no-edit. If somehow we don't have any
2608 # invoke editor, but allow --no-edit. If somehow we don't have any
2599 # description, let's always start the editor.
2609 # description, let's always start the editor.
2600 doedit = not message or opts.get('edit') in [True, None]
2610 doedit = not message or opts.get('edit') in [True, None]
2601 else:
2611 else:
2602 # Default if message is provided is to not invoke editor, but allow
2612 # Default if message is provided is to not invoke editor, but allow
2603 # --edit.
2613 # --edit.
2604 doedit = opts.get('edit') is True
2614 doedit = opts.get('edit') is True
2605 editor = getcommiteditor(edit=doedit, editform=editform)
2615 editor = getcommiteditor(edit=doedit, editform=editform)
2606
2616
2607 pureextra = extra.copy()
2617 pureextra = extra.copy()
2608 extra['amend_source'] = old.hex()
2618 extra['amend_source'] = old.hex()
2609
2619
2610 new = context.memctx(repo,
2620 new = context.memctx(repo,
2611 parents=[base.node(), old.p2().node()],
2621 parents=[base.node(), old.p2().node()],
2612 text=message,
2622 text=message,
2613 files=files,
2623 files=files,
2614 filectxfn=filectxfn,
2624 filectxfn=filectxfn,
2615 user=user,
2625 user=user,
2616 date=date,
2626 date=date,
2617 extra=extra,
2627 extra=extra,
2618 editor=editor)
2628 editor=editor)
2619
2629
2620 newdesc = changelog.stripdesc(new.description())
2630 newdesc = changelog.stripdesc(new.description())
2621 if ((not changes)
2631 if ((not changes)
2622 and newdesc == old.description()
2632 and newdesc == old.description()
2623 and user == old.user()
2633 and user == old.user()
2624 and (date == old.date() or datemaydiffer)
2634 and (date == old.date() or datemaydiffer)
2625 and pureextra == old.extra()):
2635 and pureextra == old.extra()):
2626 # nothing changed. continuing here would create a new node
2636 # nothing changed. continuing here would create a new node
2627 # anyway because of the amend_source noise.
2637 # anyway because of the amend_source noise.
2628 #
2638 #
2629 # This not what we expect from amend.
2639 # This not what we expect from amend.
2630 return old.node()
2640 return old.node()
2631
2641
2632 commitphase = None
2642 commitphase = None
2633 if opts.get('secret'):
2643 if opts.get('secret'):
2634 commitphase = phases.secret
2644 commitphase = phases.secret
2635 newid = repo.commitctx(new)
2645 newid = repo.commitctx(new)
2636
2646
2637 # Reroute the working copy parent to the new changeset
2647 # Reroute the working copy parent to the new changeset
2638 repo.setparents(newid, nullid)
2648 repo.setparents(newid, nullid)
2639 mapping = {old.node(): (newid,)}
2649 mapping = {old.node(): (newid,)}
2640 obsmetadata = None
2650 obsmetadata = None
2641 if opts.get('note'):
2651 if opts.get('note'):
2642 obsmetadata = {'note': encoding.fromlocal(opts['note'])}
2652 obsmetadata = {'note': encoding.fromlocal(opts['note'])}
2643 backup = ui.configbool('rewrite', 'backup-bundle')
2653 backup = ui.configbool('rewrite', 'backup-bundle')
2644 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
2654 scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
2645 fixphase=True, targetphase=commitphase,
2655 fixphase=True, targetphase=commitphase,
2646 backup=backup)
2656 backup=backup)
2647
2657
2648 # Fixing the dirstate because localrepo.commitctx does not update
2658 # Fixing the dirstate because localrepo.commitctx does not update
2649 # it. This is rather convenient because we did not need to update
2659 # it. This is rather convenient because we did not need to update
2650 # the dirstate for all the files in the new commit which commitctx
2660 # the dirstate for all the files in the new commit which commitctx
2651 # could have done if it updated the dirstate. Now, we can
2661 # could have done if it updated the dirstate. Now, we can
2652 # selectively update the dirstate only for the amended files.
2662 # selectively update the dirstate only for the amended files.
2653 dirstate = repo.dirstate
2663 dirstate = repo.dirstate
2654
2664
2655 # Update the state of the files which were added and
2665 # Update the state of the files which were added and
2656 # and modified in the amend to "normal" in the dirstate.
2666 # and modified in the amend to "normal" in the dirstate.
2657 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2667 normalfiles = set(wctx.modified() + wctx.added()) & filestoamend
2658 for f in normalfiles:
2668 for f in normalfiles:
2659 dirstate.normal(f)
2669 dirstate.normal(f)
2660
2670
2661 # Update the state of files which were removed in the amend
2671 # Update the state of files which were removed in the amend
2662 # to "removed" in the dirstate.
2672 # to "removed" in the dirstate.
2663 removedfiles = set(wctx.removed()) & filestoamend
2673 removedfiles = set(wctx.removed()) & filestoamend
2664 for f in removedfiles:
2674 for f in removedfiles:
2665 dirstate.drop(f)
2675 dirstate.drop(f)
2666
2676
2667 return newid
2677 return newid
2668
2678
2669 def commiteditor(repo, ctx, subs, editform=''):
2679 def commiteditor(repo, ctx, subs, editform=''):
2670 if ctx.description():
2680 if ctx.description():
2671 return ctx.description()
2681 return ctx.description()
2672 return commitforceeditor(repo, ctx, subs, editform=editform,
2682 return commitforceeditor(repo, ctx, subs, editform=editform,
2673 unchangedmessagedetection=True)
2683 unchangedmessagedetection=True)
2674
2684
2675 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2685 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2676 editform='', unchangedmessagedetection=False):
2686 editform='', unchangedmessagedetection=False):
2677 if not extramsg:
2687 if not extramsg:
2678 extramsg = _("Leave message empty to abort commit.")
2688 extramsg = _("Leave message empty to abort commit.")
2679
2689
2680 forms = [e for e in editform.split('.') if e]
2690 forms = [e for e in editform.split('.') if e]
2681 forms.insert(0, 'changeset')
2691 forms.insert(0, 'changeset')
2682 templatetext = None
2692 templatetext = None
2683 while forms:
2693 while forms:
2684 ref = '.'.join(forms)
2694 ref = '.'.join(forms)
2685 if repo.ui.config('committemplate', ref):
2695 if repo.ui.config('committemplate', ref):
2686 templatetext = committext = buildcommittemplate(
2696 templatetext = committext = buildcommittemplate(
2687 repo, ctx, subs, extramsg, ref)
2697 repo, ctx, subs, extramsg, ref)
2688 break
2698 break
2689 forms.pop()
2699 forms.pop()
2690 else:
2700 else:
2691 committext = buildcommittext(repo, ctx, subs, extramsg)
2701 committext = buildcommittext(repo, ctx, subs, extramsg)
2692
2702
2693 # run editor in the repository root
2703 # run editor in the repository root
2694 olddir = encoding.getcwd()
2704 olddir = encoding.getcwd()
2695 os.chdir(repo.root)
2705 os.chdir(repo.root)
2696
2706
2697 # make in-memory changes visible to external process
2707 # make in-memory changes visible to external process
2698 tr = repo.currenttransaction()
2708 tr = repo.currenttransaction()
2699 repo.dirstate.write(tr)
2709 repo.dirstate.write(tr)
2700 pending = tr and tr.writepending() and repo.root
2710 pending = tr and tr.writepending() and repo.root
2701
2711
2702 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2712 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2703 editform=editform, pending=pending,
2713 editform=editform, pending=pending,
2704 repopath=repo.path, action='commit')
2714 repopath=repo.path, action='commit')
2705 text = editortext
2715 text = editortext
2706
2716
2707 # strip away anything below this special string (used for editors that want
2717 # strip away anything below this special string (used for editors that want
2708 # to display the diff)
2718 # to display the diff)
2709 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2719 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2710 if stripbelow:
2720 if stripbelow:
2711 text = text[:stripbelow.start()]
2721 text = text[:stripbelow.start()]
2712
2722
2713 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2723 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2714 os.chdir(olddir)
2724 os.chdir(olddir)
2715
2725
2716 if finishdesc:
2726 if finishdesc:
2717 text = finishdesc(text)
2727 text = finishdesc(text)
2718 if not text.strip():
2728 if not text.strip():
2719 raise error.Abort(_("empty commit message"))
2729 raise error.Abort(_("empty commit message"))
2720 if unchangedmessagedetection and editortext == templatetext:
2730 if unchangedmessagedetection and editortext == templatetext:
2721 raise error.Abort(_("commit message unchanged"))
2731 raise error.Abort(_("commit message unchanged"))
2722
2732
2723 return text
2733 return text
2724
2734
2725 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2735 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2726 ui = repo.ui
2736 ui = repo.ui
2727 spec = formatter.templatespec(ref, None, None)
2737 spec = formatter.templatespec(ref, None, None)
2728 t = logcmdutil.changesettemplater(ui, repo, spec)
2738 t = logcmdutil.changesettemplater(ui, repo, spec)
2729 t.t.cache.update((k, templater.unquotestring(v))
2739 t.t.cache.update((k, templater.unquotestring(v))
2730 for k, v in repo.ui.configitems('committemplate'))
2740 for k, v in repo.ui.configitems('committemplate'))
2731
2741
2732 if not extramsg:
2742 if not extramsg:
2733 extramsg = '' # ensure that extramsg is string
2743 extramsg = '' # ensure that extramsg is string
2734
2744
2735 ui.pushbuffer()
2745 ui.pushbuffer()
2736 t.show(ctx, extramsg=extramsg)
2746 t.show(ctx, extramsg=extramsg)
2737 return ui.popbuffer()
2747 return ui.popbuffer()
2738
2748
2739 def hgprefix(msg):
2749 def hgprefix(msg):
2740 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2750 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2741
2751
2742 def buildcommittext(repo, ctx, subs, extramsg):
2752 def buildcommittext(repo, ctx, subs, extramsg):
2743 edittext = []
2753 edittext = []
2744 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2754 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2745 if ctx.description():
2755 if ctx.description():
2746 edittext.append(ctx.description())
2756 edittext.append(ctx.description())
2747 edittext.append("")
2757 edittext.append("")
2748 edittext.append("") # Empty line between message and comments.
2758 edittext.append("") # Empty line between message and comments.
2749 edittext.append(hgprefix(_("Enter commit message."
2759 edittext.append(hgprefix(_("Enter commit message."
2750 " Lines beginning with 'HG:' are removed.")))
2760 " Lines beginning with 'HG:' are removed.")))
2751 edittext.append(hgprefix(extramsg))
2761 edittext.append(hgprefix(extramsg))
2752 edittext.append("HG: --")
2762 edittext.append("HG: --")
2753 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2763 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2754 if ctx.p2():
2764 if ctx.p2():
2755 edittext.append(hgprefix(_("branch merge")))
2765 edittext.append(hgprefix(_("branch merge")))
2756 if ctx.branch():
2766 if ctx.branch():
2757 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2767 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2758 if bookmarks.isactivewdirparent(repo):
2768 if bookmarks.isactivewdirparent(repo):
2759 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2769 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2760 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2770 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2761 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2771 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2762 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2772 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2763 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2773 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2764 if not added and not modified and not removed:
2774 if not added and not modified and not removed:
2765 edittext.append(hgprefix(_("no files changed")))
2775 edittext.append(hgprefix(_("no files changed")))
2766 edittext.append("")
2776 edittext.append("")
2767
2777
2768 return "\n".join(edittext)
2778 return "\n".join(edittext)
2769
2779
2770 def commitstatus(repo, node, branch, bheads=None, opts=None):
2780 def commitstatus(repo, node, branch, bheads=None, opts=None):
2771 if opts is None:
2781 if opts is None:
2772 opts = {}
2782 opts = {}
2773 ctx = repo[node]
2783 ctx = repo[node]
2774 parents = ctx.parents()
2784 parents = ctx.parents()
2775
2785
2776 if (not opts.get('amend') and bheads and node not in bheads and not
2786 if (not opts.get('amend') and bheads and node not in bheads and not
2777 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2787 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2778 repo.ui.status(_('created new head\n'))
2788 repo.ui.status(_('created new head\n'))
2779 # The message is not printed for initial roots. For the other
2789 # The message is not printed for initial roots. For the other
2780 # changesets, it is printed in the following situations:
2790 # changesets, it is printed in the following situations:
2781 #
2791 #
2782 # Par column: for the 2 parents with ...
2792 # Par column: for the 2 parents with ...
2783 # N: null or no parent
2793 # N: null or no parent
2784 # B: parent is on another named branch
2794 # B: parent is on another named branch
2785 # C: parent is a regular non head changeset
2795 # C: parent is a regular non head changeset
2786 # H: parent was a branch head of the current branch
2796 # H: parent was a branch head of the current branch
2787 # Msg column: whether we print "created new head" message
2797 # Msg column: whether we print "created new head" message
2788 # In the following, it is assumed that there already exists some
2798 # In the following, it is assumed that there already exists some
2789 # initial branch heads of the current branch, otherwise nothing is
2799 # initial branch heads of the current branch, otherwise nothing is
2790 # printed anyway.
2800 # printed anyway.
2791 #
2801 #
2792 # Par Msg Comment
2802 # Par Msg Comment
2793 # N N y additional topo root
2803 # N N y additional topo root
2794 #
2804 #
2795 # B N y additional branch root
2805 # B N y additional branch root
2796 # C N y additional topo head
2806 # C N y additional topo head
2797 # H N n usual case
2807 # H N n usual case
2798 #
2808 #
2799 # B B y weird additional branch root
2809 # B B y weird additional branch root
2800 # C B y branch merge
2810 # C B y branch merge
2801 # H B n merge with named branch
2811 # H B n merge with named branch
2802 #
2812 #
2803 # C C y additional head from merge
2813 # C C y additional head from merge
2804 # C H n merge with a head
2814 # C H n merge with a head
2805 #
2815 #
2806 # H H n head merge: head count decreases
2816 # H H n head merge: head count decreases
2807
2817
2808 if not opts.get('close_branch'):
2818 if not opts.get('close_branch'):
2809 for r in parents:
2819 for r in parents:
2810 if r.closesbranch() and r.branch() == branch:
2820 if r.closesbranch() and r.branch() == branch:
2811 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2821 repo.ui.status(_('reopening closed branch head %d\n') % r.rev())
2812
2822
2813 if repo.ui.debugflag:
2823 if repo.ui.debugflag:
2814 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2824 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx.hex()))
2815 elif repo.ui.verbose:
2825 elif repo.ui.verbose:
2816 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2826 repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
2817
2827
2818 def postcommitstatus(repo, pats, opts):
2828 def postcommitstatus(repo, pats, opts):
2819 return repo.status(match=scmutil.match(repo[None], pats, opts))
2829 return repo.status(match=scmutil.match(repo[None], pats, opts))
2820
2830
2821 def revert(ui, repo, ctx, parents, *pats, **opts):
2831 def revert(ui, repo, ctx, parents, *pats, **opts):
2822 opts = pycompat.byteskwargs(opts)
2832 opts = pycompat.byteskwargs(opts)
2823 parent, p2 = parents
2833 parent, p2 = parents
2824 node = ctx.node()
2834 node = ctx.node()
2825
2835
2826 mf = ctx.manifest()
2836 mf = ctx.manifest()
2827 if node == p2:
2837 if node == p2:
2828 parent = p2
2838 parent = p2
2829
2839
2830 # need all matching names in dirstate and manifest of target rev,
2840 # need all matching names in dirstate and manifest of target rev,
2831 # so have to walk both. do not print errors if files exist in one
2841 # so have to walk both. do not print errors if files exist in one
2832 # but not other. in both cases, filesets should be evaluated against
2842 # but not other. in both cases, filesets should be evaluated against
2833 # workingctx to get consistent result (issue4497). this means 'set:**'
2843 # workingctx to get consistent result (issue4497). this means 'set:**'
2834 # cannot be used to select missing files from target rev.
2844 # cannot be used to select missing files from target rev.
2835
2845
2836 # `names` is a mapping for all elements in working copy and target revision
2846 # `names` is a mapping for all elements in working copy and target revision
2837 # The mapping is in the form:
2847 # The mapping is in the form:
2838 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2848 # <abs path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2839 names = {}
2849 names = {}
2840 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2850 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
2841
2851
2842 with repo.wlock():
2852 with repo.wlock():
2843 ## filling of the `names` mapping
2853 ## filling of the `names` mapping
2844 # walk dirstate to fill `names`
2854 # walk dirstate to fill `names`
2845
2855
2846 interactive = opts.get('interactive', False)
2856 interactive = opts.get('interactive', False)
2847 wctx = repo[None]
2857 wctx = repo[None]
2848 m = scmutil.match(wctx, pats, opts)
2858 m = scmutil.match(wctx, pats, opts)
2849
2859
2850 # we'll need this later
2860 # we'll need this later
2851 targetsubs = sorted(s for s in wctx.substate if m(s))
2861 targetsubs = sorted(s for s in wctx.substate if m(s))
2852
2862
2853 if not m.always():
2863 if not m.always():
2854 matcher = matchmod.badmatch(m, lambda x, y: False)
2864 matcher = matchmod.badmatch(m, lambda x, y: False)
2855 for abs in wctx.walk(matcher):
2865 for abs in wctx.walk(matcher):
2856 names[abs] = m.exact(abs)
2866 names[abs] = m.exact(abs)
2857
2867
2858 # walk target manifest to fill `names`
2868 # walk target manifest to fill `names`
2859
2869
2860 def badfn(path, msg):
2870 def badfn(path, msg):
2861 if path in names:
2871 if path in names:
2862 return
2872 return
2863 if path in ctx.substate:
2873 if path in ctx.substate:
2864 return
2874 return
2865 path_ = path + '/'
2875 path_ = path + '/'
2866 for f in names:
2876 for f in names:
2867 if f.startswith(path_):
2877 if f.startswith(path_):
2868 return
2878 return
2869 ui.warn("%s: %s\n" % (uipathfn(path), msg))
2879 ui.warn("%s: %s\n" % (uipathfn(path), msg))
2870
2880
2871 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2881 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2872 if abs not in names:
2882 if abs not in names:
2873 names[abs] = m.exact(abs)
2883 names[abs] = m.exact(abs)
2874
2884
2875 # Find status of all file in `names`.
2885 # Find status of all file in `names`.
2876 m = scmutil.matchfiles(repo, names)
2886 m = scmutil.matchfiles(repo, names)
2877
2887
2878 changes = repo.status(node1=node, match=m,
2888 changes = repo.status(node1=node, match=m,
2879 unknown=True, ignored=True, clean=True)
2889 unknown=True, ignored=True, clean=True)
2880 else:
2890 else:
2881 changes = repo.status(node1=node, match=m)
2891 changes = repo.status(node1=node, match=m)
2882 for kind in changes:
2892 for kind in changes:
2883 for abs in kind:
2893 for abs in kind:
2884 names[abs] = m.exact(abs)
2894 names[abs] = m.exact(abs)
2885
2895
2886 m = scmutil.matchfiles(repo, names)
2896 m = scmutil.matchfiles(repo, names)
2887
2897
2888 modified = set(changes.modified)
2898 modified = set(changes.modified)
2889 added = set(changes.added)
2899 added = set(changes.added)
2890 removed = set(changes.removed)
2900 removed = set(changes.removed)
2891 _deleted = set(changes.deleted)
2901 _deleted = set(changes.deleted)
2892 unknown = set(changes.unknown)
2902 unknown = set(changes.unknown)
2893 unknown.update(changes.ignored)
2903 unknown.update(changes.ignored)
2894 clean = set(changes.clean)
2904 clean = set(changes.clean)
2895 modadded = set()
2905 modadded = set()
2896
2906
2897 # We need to account for the state of the file in the dirstate,
2907 # We need to account for the state of the file in the dirstate,
2898 # even when we revert against something else than parent. This will
2908 # even when we revert against something else than parent. This will
2899 # slightly alter the behavior of revert (doing back up or not, delete
2909 # slightly alter the behavior of revert (doing back up or not, delete
2900 # or just forget etc).
2910 # or just forget etc).
2901 if parent == node:
2911 if parent == node:
2902 dsmodified = modified
2912 dsmodified = modified
2903 dsadded = added
2913 dsadded = added
2904 dsremoved = removed
2914 dsremoved = removed
2905 # store all local modifications, useful later for rename detection
2915 # store all local modifications, useful later for rename detection
2906 localchanges = dsmodified | dsadded
2916 localchanges = dsmodified | dsadded
2907 modified, added, removed = set(), set(), set()
2917 modified, added, removed = set(), set(), set()
2908 else:
2918 else:
2909 changes = repo.status(node1=parent, match=m)
2919 changes = repo.status(node1=parent, match=m)
2910 dsmodified = set(changes.modified)
2920 dsmodified = set(changes.modified)
2911 dsadded = set(changes.added)
2921 dsadded = set(changes.added)
2912 dsremoved = set(changes.removed)
2922 dsremoved = set(changes.removed)
2913 # store all local modifications, useful later for rename detection
2923 # store all local modifications, useful later for rename detection
2914 localchanges = dsmodified | dsadded
2924 localchanges = dsmodified | dsadded
2915
2925
2916 # only take into account for removes between wc and target
2926 # only take into account for removes between wc and target
2917 clean |= dsremoved - removed
2927 clean |= dsremoved - removed
2918 dsremoved &= removed
2928 dsremoved &= removed
2919 # distinct between dirstate remove and other
2929 # distinct between dirstate remove and other
2920 removed -= dsremoved
2930 removed -= dsremoved
2921
2931
2922 modadded = added & dsmodified
2932 modadded = added & dsmodified
2923 added -= modadded
2933 added -= modadded
2924
2934
2925 # tell newly modified apart.
2935 # tell newly modified apart.
2926 dsmodified &= modified
2936 dsmodified &= modified
2927 dsmodified |= modified & dsadded # dirstate added may need backup
2937 dsmodified |= modified & dsadded # dirstate added may need backup
2928 modified -= dsmodified
2938 modified -= dsmodified
2929
2939
2930 # We need to wait for some post-processing to update this set
2940 # We need to wait for some post-processing to update this set
2931 # before making the distinction. The dirstate will be used for
2941 # before making the distinction. The dirstate will be used for
2932 # that purpose.
2942 # that purpose.
2933 dsadded = added
2943 dsadded = added
2934
2944
2935 # in case of merge, files that are actually added can be reported as
2945 # in case of merge, files that are actually added can be reported as
2936 # modified, we need to post process the result
2946 # modified, we need to post process the result
2937 if p2 != nullid:
2947 if p2 != nullid:
2938 mergeadd = set(dsmodified)
2948 mergeadd = set(dsmodified)
2939 for path in dsmodified:
2949 for path in dsmodified:
2940 if path in mf:
2950 if path in mf:
2941 mergeadd.remove(path)
2951 mergeadd.remove(path)
2942 dsadded |= mergeadd
2952 dsadded |= mergeadd
2943 dsmodified -= mergeadd
2953 dsmodified -= mergeadd
2944
2954
2945 # if f is a rename, update `names` to also revert the source
2955 # if f is a rename, update `names` to also revert the source
2946 for f in localchanges:
2956 for f in localchanges:
2947 src = repo.dirstate.copied(f)
2957 src = repo.dirstate.copied(f)
2948 # XXX should we check for rename down to target node?
2958 # XXX should we check for rename down to target node?
2949 if src and src not in names and repo.dirstate[src] == 'r':
2959 if src and src not in names and repo.dirstate[src] == 'r':
2950 dsremoved.add(src)
2960 dsremoved.add(src)
2951 names[src] = True
2961 names[src] = True
2952
2962
2953 # determine the exact nature of the deleted changesets
2963 # determine the exact nature of the deleted changesets
2954 deladded = set(_deleted)
2964 deladded = set(_deleted)
2955 for path in _deleted:
2965 for path in _deleted:
2956 if path in mf:
2966 if path in mf:
2957 deladded.remove(path)
2967 deladded.remove(path)
2958 deleted = _deleted - deladded
2968 deleted = _deleted - deladded
2959
2969
2960 # distinguish between file to forget and the other
2970 # distinguish between file to forget and the other
2961 added = set()
2971 added = set()
2962 for abs in dsadded:
2972 for abs in dsadded:
2963 if repo.dirstate[abs] != 'a':
2973 if repo.dirstate[abs] != 'a':
2964 added.add(abs)
2974 added.add(abs)
2965 dsadded -= added
2975 dsadded -= added
2966
2976
2967 for abs in deladded:
2977 for abs in deladded:
2968 if repo.dirstate[abs] == 'a':
2978 if repo.dirstate[abs] == 'a':
2969 dsadded.add(abs)
2979 dsadded.add(abs)
2970 deladded -= dsadded
2980 deladded -= dsadded
2971
2981
2972 # For files marked as removed, we check if an unknown file is present at
2982 # For files marked as removed, we check if an unknown file is present at
2973 # the same path. If a such file exists it may need to be backed up.
2983 # the same path. If a such file exists it may need to be backed up.
2974 # Making the distinction at this stage helps have simpler backup
2984 # Making the distinction at this stage helps have simpler backup
2975 # logic.
2985 # logic.
2976 removunk = set()
2986 removunk = set()
2977 for abs in removed:
2987 for abs in removed:
2978 target = repo.wjoin(abs)
2988 target = repo.wjoin(abs)
2979 if os.path.lexists(target):
2989 if os.path.lexists(target):
2980 removunk.add(abs)
2990 removunk.add(abs)
2981 removed -= removunk
2991 removed -= removunk
2982
2992
2983 dsremovunk = set()
2993 dsremovunk = set()
2984 for abs in dsremoved:
2994 for abs in dsremoved:
2985 target = repo.wjoin(abs)
2995 target = repo.wjoin(abs)
2986 if os.path.lexists(target):
2996 if os.path.lexists(target):
2987 dsremovunk.add(abs)
2997 dsremovunk.add(abs)
2988 dsremoved -= dsremovunk
2998 dsremoved -= dsremovunk
2989
2999
2990 # action to be actually performed by revert
3000 # action to be actually performed by revert
2991 # (<list of file>, message>) tuple
3001 # (<list of file>, message>) tuple
2992 actions = {'revert': ([], _('reverting %s\n')),
3002 actions = {'revert': ([], _('reverting %s\n')),
2993 'add': ([], _('adding %s\n')),
3003 'add': ([], _('adding %s\n')),
2994 'remove': ([], _('removing %s\n')),
3004 'remove': ([], _('removing %s\n')),
2995 'drop': ([], _('removing %s\n')),
3005 'drop': ([], _('removing %s\n')),
2996 'forget': ([], _('forgetting %s\n')),
3006 'forget': ([], _('forgetting %s\n')),
2997 'undelete': ([], _('undeleting %s\n')),
3007 'undelete': ([], _('undeleting %s\n')),
2998 'noop': (None, _('no changes needed to %s\n')),
3008 'noop': (None, _('no changes needed to %s\n')),
2999 'unknown': (None, _('file not managed: %s\n')),
3009 'unknown': (None, _('file not managed: %s\n')),
3000 }
3010 }
3001
3011
3002 # "constant" that convey the backup strategy.
3012 # "constant" that convey the backup strategy.
3003 # All set to `discard` if `no-backup` is set do avoid checking
3013 # All set to `discard` if `no-backup` is set do avoid checking
3004 # no_backup lower in the code.
3014 # no_backup lower in the code.
3005 # These values are ordered for comparison purposes
3015 # These values are ordered for comparison purposes
3006 backupinteractive = 3 # do backup if interactively modified
3016 backupinteractive = 3 # do backup if interactively modified
3007 backup = 2 # unconditionally do backup
3017 backup = 2 # unconditionally do backup
3008 check = 1 # check if the existing file differs from target
3018 check = 1 # check if the existing file differs from target
3009 discard = 0 # never do backup
3019 discard = 0 # never do backup
3010 if opts.get('no_backup'):
3020 if opts.get('no_backup'):
3011 backupinteractive = backup = check = discard
3021 backupinteractive = backup = check = discard
3012 if interactive:
3022 if interactive:
3013 dsmodifiedbackup = backupinteractive
3023 dsmodifiedbackup = backupinteractive
3014 else:
3024 else:
3015 dsmodifiedbackup = backup
3025 dsmodifiedbackup = backup
3016 tobackup = set()
3026 tobackup = set()
3017
3027
3018 backupanddel = actions['remove']
3028 backupanddel = actions['remove']
3019 if not opts.get('no_backup'):
3029 if not opts.get('no_backup'):
3020 backupanddel = actions['drop']
3030 backupanddel = actions['drop']
3021
3031
3022 disptable = (
3032 disptable = (
3023 # dispatch table:
3033 # dispatch table:
3024 # file state
3034 # file state
3025 # action
3035 # action
3026 # make backup
3036 # make backup
3027
3037
3028 ## Sets that results that will change file on disk
3038 ## Sets that results that will change file on disk
3029 # Modified compared to target, no local change
3039 # Modified compared to target, no local change
3030 (modified, actions['revert'], discard),
3040 (modified, actions['revert'], discard),
3031 # Modified compared to target, but local file is deleted
3041 # Modified compared to target, but local file is deleted
3032 (deleted, actions['revert'], discard),
3042 (deleted, actions['revert'], discard),
3033 # Modified compared to target, local change
3043 # Modified compared to target, local change
3034 (dsmodified, actions['revert'], dsmodifiedbackup),
3044 (dsmodified, actions['revert'], dsmodifiedbackup),
3035 # Added since target
3045 # Added since target
3036 (added, actions['remove'], discard),
3046 (added, actions['remove'], discard),
3037 # Added in working directory
3047 # Added in working directory
3038 (dsadded, actions['forget'], discard),
3048 (dsadded, actions['forget'], discard),
3039 # Added since target, have local modification
3049 # Added since target, have local modification
3040 (modadded, backupanddel, backup),
3050 (modadded, backupanddel, backup),
3041 # Added since target but file is missing in working directory
3051 # Added since target but file is missing in working directory
3042 (deladded, actions['drop'], discard),
3052 (deladded, actions['drop'], discard),
3043 # Removed since target, before working copy parent
3053 # Removed since target, before working copy parent
3044 (removed, actions['add'], discard),
3054 (removed, actions['add'], discard),
3045 # Same as `removed` but an unknown file exists at the same path
3055 # Same as `removed` but an unknown file exists at the same path
3046 (removunk, actions['add'], check),
3056 (removunk, actions['add'], check),
3047 # Removed since targe, marked as such in working copy parent
3057 # Removed since targe, marked as such in working copy parent
3048 (dsremoved, actions['undelete'], discard),
3058 (dsremoved, actions['undelete'], discard),
3049 # Same as `dsremoved` but an unknown file exists at the same path
3059 # Same as `dsremoved` but an unknown file exists at the same path
3050 (dsremovunk, actions['undelete'], check),
3060 (dsremovunk, actions['undelete'], check),
3051 ## the following sets does not result in any file changes
3061 ## the following sets does not result in any file changes
3052 # File with no modification
3062 # File with no modification
3053 (clean, actions['noop'], discard),
3063 (clean, actions['noop'], discard),
3054 # Existing file, not tracked anywhere
3064 # Existing file, not tracked anywhere
3055 (unknown, actions['unknown'], discard),
3065 (unknown, actions['unknown'], discard),
3056 )
3066 )
3057
3067
3058 for abs, exact in sorted(names.items()):
3068 for abs, exact in sorted(names.items()):
3059 # target file to be touch on disk (relative to cwd)
3069 # target file to be touch on disk (relative to cwd)
3060 target = repo.wjoin(abs)
3070 target = repo.wjoin(abs)
3061 # search the entry in the dispatch table.
3071 # search the entry in the dispatch table.
3062 # if the file is in any of these sets, it was touched in the working
3072 # if the file is in any of these sets, it was touched in the working
3063 # directory parent and we are sure it needs to be reverted.
3073 # directory parent and we are sure it needs to be reverted.
3064 for table, (xlist, msg), dobackup in disptable:
3074 for table, (xlist, msg), dobackup in disptable:
3065 if abs not in table:
3075 if abs not in table:
3066 continue
3076 continue
3067 if xlist is not None:
3077 if xlist is not None:
3068 xlist.append(abs)
3078 xlist.append(abs)
3069 if dobackup:
3079 if dobackup:
3070 # If in interactive mode, don't automatically create
3080 # If in interactive mode, don't automatically create
3071 # .orig files (issue4793)
3081 # .orig files (issue4793)
3072 if dobackup == backupinteractive:
3082 if dobackup == backupinteractive:
3073 tobackup.add(abs)
3083 tobackup.add(abs)
3074 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3084 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3075 absbakname = scmutil.backuppath(ui, repo, abs)
3085 absbakname = scmutil.backuppath(ui, repo, abs)
3076 bakname = os.path.relpath(absbakname,
3086 bakname = os.path.relpath(absbakname,
3077 start=repo.root)
3087 start=repo.root)
3078 ui.note(_('saving current version of %s as %s\n') %
3088 ui.note(_('saving current version of %s as %s\n') %
3079 (uipathfn(abs), uipathfn(bakname)))
3089 (uipathfn(abs), uipathfn(bakname)))
3080 if not opts.get('dry_run'):
3090 if not opts.get('dry_run'):
3081 if interactive:
3091 if interactive:
3082 util.copyfile(target, absbakname)
3092 util.copyfile(target, absbakname)
3083 else:
3093 else:
3084 util.rename(target, absbakname)
3094 util.rename(target, absbakname)
3085 if opts.get('dry_run'):
3095 if opts.get('dry_run'):
3086 if ui.verbose or not exact:
3096 if ui.verbose or not exact:
3087 ui.status(msg % uipathfn(abs))
3097 ui.status(msg % uipathfn(abs))
3088 elif exact:
3098 elif exact:
3089 ui.warn(msg % uipathfn(abs))
3099 ui.warn(msg % uipathfn(abs))
3090 break
3100 break
3091
3101
3092 if not opts.get('dry_run'):
3102 if not opts.get('dry_run'):
3093 needdata = ('revert', 'add', 'undelete')
3103 needdata = ('revert', 'add', 'undelete')
3094 oplist = [actions[name][0] for name in needdata]
3104 oplist = [actions[name][0] for name in needdata]
3095 prefetch = scmutil.prefetchfiles
3105 prefetch = scmutil.prefetchfiles
3096 matchfiles = scmutil.matchfiles
3106 matchfiles = scmutil.matchfiles
3097 prefetch(repo, [ctx.rev()],
3107 prefetch(repo, [ctx.rev()],
3098 matchfiles(repo,
3108 matchfiles(repo,
3099 [f for sublist in oplist for f in sublist]))
3109 [f for sublist in oplist for f in sublist]))
3100 match = scmutil.match(repo[None], pats)
3110 match = scmutil.match(repo[None], pats)
3101 _performrevert(repo, parents, ctx, names, uipathfn, actions,
3111 _performrevert(repo, parents, ctx, names, uipathfn, actions,
3102 match, interactive, tobackup)
3112 match, interactive, tobackup)
3103
3113
3104 if targetsubs:
3114 if targetsubs:
3105 # Revert the subrepos on the revert list
3115 # Revert the subrepos on the revert list
3106 for sub in targetsubs:
3116 for sub in targetsubs:
3107 try:
3117 try:
3108 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3118 wctx.sub(sub).revert(ctx.substate[sub], *pats,
3109 **pycompat.strkwargs(opts))
3119 **pycompat.strkwargs(opts))
3110 except KeyError:
3120 except KeyError:
3111 raise error.Abort("subrepository '%s' does not exist in %s!"
3121 raise error.Abort("subrepository '%s' does not exist in %s!"
3112 % (sub, short(ctx.node())))
3122 % (sub, short(ctx.node())))
3113
3123
3114 def _performrevert(repo, parents, ctx, names, uipathfn, actions,
3124 def _performrevert(repo, parents, ctx, names, uipathfn, actions,
3115 match, interactive=False, tobackup=None):
3125 match, interactive=False, tobackup=None):
3116 """function that actually perform all the actions computed for revert
3126 """function that actually perform all the actions computed for revert
3117
3127
3118 This is an independent function to let extension to plug in and react to
3128 This is an independent function to let extension to plug in and react to
3119 the imminent revert.
3129 the imminent revert.
3120
3130
3121 Make sure you have the working directory locked when calling this function.
3131 Make sure you have the working directory locked when calling this function.
3122 """
3132 """
3123 parent, p2 = parents
3133 parent, p2 = parents
3124 node = ctx.node()
3134 node = ctx.node()
3125 excluded_files = []
3135 excluded_files = []
3126
3136
3127 def checkout(f):
3137 def checkout(f):
3128 fc = ctx[f]
3138 fc = ctx[f]
3129 repo.wwrite(f, fc.data(), fc.flags())
3139 repo.wwrite(f, fc.data(), fc.flags())
3130
3140
3131 def doremove(f):
3141 def doremove(f):
3132 try:
3142 try:
3133 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
3143 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
3134 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3144 repo.wvfs.unlinkpath(f, rmdir=rmdir)
3135 except OSError:
3145 except OSError:
3136 pass
3146 pass
3137 repo.dirstate.remove(f)
3147 repo.dirstate.remove(f)
3138
3148
3139 def prntstatusmsg(action, f):
3149 def prntstatusmsg(action, f):
3140 exact = names[f]
3150 exact = names[f]
3141 if repo.ui.verbose or not exact:
3151 if repo.ui.verbose or not exact:
3142 repo.ui.status(actions[action][1] % uipathfn(f))
3152 repo.ui.status(actions[action][1] % uipathfn(f))
3143
3153
3144 audit_path = pathutil.pathauditor(repo.root, cached=True)
3154 audit_path = pathutil.pathauditor(repo.root, cached=True)
3145 for f in actions['forget'][0]:
3155 for f in actions['forget'][0]:
3146 if interactive:
3156 if interactive:
3147 choice = repo.ui.promptchoice(
3157 choice = repo.ui.promptchoice(
3148 _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
3158 _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
3149 if choice == 0:
3159 if choice == 0:
3150 prntstatusmsg('forget', f)
3160 prntstatusmsg('forget', f)
3151 repo.dirstate.drop(f)
3161 repo.dirstate.drop(f)
3152 else:
3162 else:
3153 excluded_files.append(f)
3163 excluded_files.append(f)
3154 else:
3164 else:
3155 prntstatusmsg('forget', f)
3165 prntstatusmsg('forget', f)
3156 repo.dirstate.drop(f)
3166 repo.dirstate.drop(f)
3157 for f in actions['remove'][0]:
3167 for f in actions['remove'][0]:
3158 audit_path(f)
3168 audit_path(f)
3159 if interactive:
3169 if interactive:
3160 choice = repo.ui.promptchoice(
3170 choice = repo.ui.promptchoice(
3161 _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
3171 _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
3162 if choice == 0:
3172 if choice == 0:
3163 prntstatusmsg('remove', f)
3173 prntstatusmsg('remove', f)
3164 doremove(f)
3174 doremove(f)
3165 else:
3175 else:
3166 excluded_files.append(f)
3176 excluded_files.append(f)
3167 else:
3177 else:
3168 prntstatusmsg('remove', f)
3178 prntstatusmsg('remove', f)
3169 doremove(f)
3179 doremove(f)
3170 for f in actions['drop'][0]:
3180 for f in actions['drop'][0]:
3171 audit_path(f)
3181 audit_path(f)
3172 prntstatusmsg('drop', f)
3182 prntstatusmsg('drop', f)
3173 repo.dirstate.remove(f)
3183 repo.dirstate.remove(f)
3174
3184
3175 normal = None
3185 normal = None
3176 if node == parent:
3186 if node == parent:
3177 # We're reverting to our parent. If possible, we'd like status
3187 # We're reverting to our parent. If possible, we'd like status
3178 # to report the file as clean. We have to use normallookup for
3188 # to report the file as clean. We have to use normallookup for
3179 # merges to avoid losing information about merged/dirty files.
3189 # merges to avoid losing information about merged/dirty files.
3180 if p2 != nullid:
3190 if p2 != nullid:
3181 normal = repo.dirstate.normallookup
3191 normal = repo.dirstate.normallookup
3182 else:
3192 else:
3183 normal = repo.dirstate.normal
3193 normal = repo.dirstate.normal
3184
3194
3185 newlyaddedandmodifiedfiles = set()
3195 newlyaddedandmodifiedfiles = set()
3186 if interactive:
3196 if interactive:
3187 # Prompt the user for changes to revert
3197 # Prompt the user for changes to revert
3188 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3198 torevert = [f for f in actions['revert'][0] if f not in excluded_files]
3189 m = scmutil.matchfiles(repo, torevert)
3199 m = scmutil.matchfiles(repo, torevert)
3190 diffopts = patch.difffeatureopts(repo.ui, whitespace=True,
3200 diffopts = patch.difffeatureopts(repo.ui, whitespace=True,
3191 section='commands',
3201 section='commands',
3192 configprefix='revert.interactive.')
3202 configprefix='revert.interactive.')
3193 diffopts.nodates = True
3203 diffopts.nodates = True
3194 diffopts.git = True
3204 diffopts.git = True
3195 operation = 'apply'
3205 operation = 'apply'
3196 if node == parent:
3206 if node == parent:
3197 if repo.ui.configbool('experimental',
3207 if repo.ui.configbool('experimental',
3198 'revert.interactive.select-to-keep'):
3208 'revert.interactive.select-to-keep'):
3199 operation = 'keep'
3209 operation = 'keep'
3200 else:
3210 else:
3201 operation = 'discard'
3211 operation = 'discard'
3202
3212
3203 if operation == 'apply':
3213 if operation == 'apply':
3204 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3214 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3205 else:
3215 else:
3206 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3216 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3207 originalchunks = patch.parsepatch(diff)
3217 originalchunks = patch.parsepatch(diff)
3208
3218
3209 try:
3219 try:
3210
3220
3211 chunks, opts = recordfilter(repo.ui, originalchunks, match,
3221 chunks, opts = recordfilter(repo.ui, originalchunks, match,
3212 operation=operation)
3222 operation=operation)
3213 if operation == 'discard':
3223 if operation == 'discard':
3214 chunks = patch.reversehunks(chunks)
3224 chunks = patch.reversehunks(chunks)
3215
3225
3216 except error.PatchError as err:
3226 except error.PatchError as err:
3217 raise error.Abort(_('error parsing patch: %s') % err)
3227 raise error.Abort(_('error parsing patch: %s') % err)
3218
3228
3219 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3229 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3220 if tobackup is None:
3230 if tobackup is None:
3221 tobackup = set()
3231 tobackup = set()
3222 # Apply changes
3232 # Apply changes
3223 fp = stringio()
3233 fp = stringio()
3224 # chunks are serialized per file, but files aren't sorted
3234 # chunks are serialized per file, but files aren't sorted
3225 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3235 for f in sorted(set(c.header.filename() for c in chunks if ishunk(c))):
3226 prntstatusmsg('revert', f)
3236 prntstatusmsg('revert', f)
3227 files = set()
3237 files = set()
3228 for c in chunks:
3238 for c in chunks:
3229 if ishunk(c):
3239 if ishunk(c):
3230 abs = c.header.filename()
3240 abs = c.header.filename()
3231 # Create a backup file only if this hunk should be backed up
3241 # Create a backup file only if this hunk should be backed up
3232 if c.header.filename() in tobackup:
3242 if c.header.filename() in tobackup:
3233 target = repo.wjoin(abs)
3243 target = repo.wjoin(abs)
3234 bakname = scmutil.backuppath(repo.ui, repo, abs)
3244 bakname = scmutil.backuppath(repo.ui, repo, abs)
3235 util.copyfile(target, bakname)
3245 util.copyfile(target, bakname)
3236 tobackup.remove(abs)
3246 tobackup.remove(abs)
3237 if abs not in files:
3247 if abs not in files:
3238 files.add(abs)
3248 files.add(abs)
3239 if operation == 'keep':
3249 if operation == 'keep':
3240 checkout(abs)
3250 checkout(abs)
3241 c.write(fp)
3251 c.write(fp)
3242 dopatch = fp.tell()
3252 dopatch = fp.tell()
3243 fp.seek(0)
3253 fp.seek(0)
3244 if dopatch:
3254 if dopatch:
3245 try:
3255 try:
3246 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3256 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3247 except error.PatchError as err:
3257 except error.PatchError as err:
3248 raise error.Abort(pycompat.bytestr(err))
3258 raise error.Abort(pycompat.bytestr(err))
3249 del fp
3259 del fp
3250 else:
3260 else:
3251 for f in actions['revert'][0]:
3261 for f in actions['revert'][0]:
3252 prntstatusmsg('revert', f)
3262 prntstatusmsg('revert', f)
3253 checkout(f)
3263 checkout(f)
3254 if normal:
3264 if normal:
3255 normal(f)
3265 normal(f)
3256
3266
3257 for f in actions['add'][0]:
3267 for f in actions['add'][0]:
3258 # Don't checkout modified files, they are already created by the diff
3268 # Don't checkout modified files, they are already created by the diff
3259 if f not in newlyaddedandmodifiedfiles:
3269 if f not in newlyaddedandmodifiedfiles:
3260 prntstatusmsg('add', f)
3270 prntstatusmsg('add', f)
3261 checkout(f)
3271 checkout(f)
3262 repo.dirstate.add(f)
3272 repo.dirstate.add(f)
3263
3273
3264 normal = repo.dirstate.normallookup
3274 normal = repo.dirstate.normallookup
3265 if node == parent and p2 == nullid:
3275 if node == parent and p2 == nullid:
3266 normal = repo.dirstate.normal
3276 normal = repo.dirstate.normal
3267 for f in actions['undelete'][0]:
3277 for f in actions['undelete'][0]:
3268 if interactive:
3278 if interactive:
3269 choice = repo.ui.promptchoice(
3279 choice = repo.ui.promptchoice(
3270 _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f)
3280 _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f)
3271 if choice == 0:
3281 if choice == 0:
3272 prntstatusmsg('undelete', f)
3282 prntstatusmsg('undelete', f)
3273 checkout(f)
3283 checkout(f)
3274 normal(f)
3284 normal(f)
3275 else:
3285 else:
3276 excluded_files.append(f)
3286 excluded_files.append(f)
3277 else:
3287 else:
3278 prntstatusmsg('undelete', f)
3288 prntstatusmsg('undelete', f)
3279 checkout(f)
3289 checkout(f)
3280 normal(f)
3290 normal(f)
3281
3291
3282 copied = copies.pathcopies(repo[parent], ctx)
3292 copied = copies.pathcopies(repo[parent], ctx)
3283
3293
3284 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3294 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3285 if f in copied:
3295 if f in copied:
3286 repo.dirstate.copy(copied[f], f)
3296 repo.dirstate.copy(copied[f], f)
3287
3297
3288 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3298 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3289 # commands.outgoing. "missing" is "missing" of the result of
3299 # commands.outgoing. "missing" is "missing" of the result of
3290 # "findcommonoutgoing()"
3300 # "findcommonoutgoing()"
3291 outgoinghooks = util.hooks()
3301 outgoinghooks = util.hooks()
3292
3302
3293 # a list of (ui, repo) functions called by commands.summary
3303 # a list of (ui, repo) functions called by commands.summary
3294 summaryhooks = util.hooks()
3304 summaryhooks = util.hooks()
3295
3305
3296 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3306 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3297 #
3307 #
3298 # functions should return tuple of booleans below, if 'changes' is None:
3308 # functions should return tuple of booleans below, if 'changes' is None:
3299 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3309 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3300 #
3310 #
3301 # otherwise, 'changes' is a tuple of tuples below:
3311 # otherwise, 'changes' is a tuple of tuples below:
3302 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3312 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3303 # - (desturl, destbranch, destpeer, outgoing)
3313 # - (desturl, destbranch, destpeer, outgoing)
3304 summaryremotehooks = util.hooks()
3314 summaryremotehooks = util.hooks()
3305
3315
3306 # A list of state files kept by multistep operations like graft.
3316 # A list of state files kept by multistep operations like graft.
3307 # Since graft cannot be aborted, it is considered 'clearable' by update.
3317 # Since graft cannot be aborted, it is considered 'clearable' by update.
3308 # note: bisect is intentionally excluded
3318 # note: bisect is intentionally excluded
3309 # (state file, clearable, allowcommit, error, hint)
3319 # (state file, clearable, allowcommit, error, hint)
3310 unfinishedstates = [
3320 unfinishedstates = [
3311 ('graftstate', True, False, _('graft in progress'),
3321 ('graftstate', True, False, _('graft in progress'),
3312 _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
3322 _("use 'hg graft --continue' or 'hg graft --stop' to stop")),
3313 ('updatestate', True, False, _('last update was interrupted'),
3323 ('updatestate', True, False, _('last update was interrupted'),
3314 _("use 'hg update' to get a consistent checkout"))
3324 _("use 'hg update' to get a consistent checkout"))
3315 ]
3325 ]
3316
3326
3317 def checkunfinished(repo, commit=False):
3327 def checkunfinished(repo, commit=False):
3318 '''Look for an unfinished multistep operation, like graft, and abort
3328 '''Look for an unfinished multistep operation, like graft, and abort
3319 if found. It's probably good to check this right before
3329 if found. It's probably good to check this right before
3320 bailifchanged().
3330 bailifchanged().
3321 '''
3331 '''
3322 # Check for non-clearable states first, so things like rebase will take
3332 # Check for non-clearable states first, so things like rebase will take
3323 # precedence over update.
3333 # precedence over update.
3324 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3334 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3325 if clearable or (commit and allowcommit):
3335 if clearable or (commit and allowcommit):
3326 continue
3336 continue
3327 if repo.vfs.exists(f):
3337 if repo.vfs.exists(f):
3328 raise error.Abort(msg, hint=hint)
3338 raise error.Abort(msg, hint=hint)
3329
3339
3330 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3340 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3331 if not clearable or (commit and allowcommit):
3341 if not clearable or (commit and allowcommit):
3332 continue
3342 continue
3333 if repo.vfs.exists(f):
3343 if repo.vfs.exists(f):
3334 raise error.Abort(msg, hint=hint)
3344 raise error.Abort(msg, hint=hint)
3335
3345
3336 def clearunfinished(repo):
3346 def clearunfinished(repo):
3337 '''Check for unfinished operations (as above), and clear the ones
3347 '''Check for unfinished operations (as above), and clear the ones
3338 that are clearable.
3348 that are clearable.
3339 '''
3349 '''
3340 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3350 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3341 if not clearable and repo.vfs.exists(f):
3351 if not clearable and repo.vfs.exists(f):
3342 raise error.Abort(msg, hint=hint)
3352 raise error.Abort(msg, hint=hint)
3343 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3353 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3344 if clearable and repo.vfs.exists(f):
3354 if clearable and repo.vfs.exists(f):
3345 util.unlink(repo.vfs.join(f))
3355 util.unlink(repo.vfs.join(f))
3346
3356
3347 afterresolvedstates = [
3357 afterresolvedstates = [
3348 ('graftstate',
3358 ('graftstate',
3349 _('hg graft --continue')),
3359 _('hg graft --continue')),
3350 ]
3360 ]
3351
3361
3352 def howtocontinue(repo):
3362 def howtocontinue(repo):
3353 '''Check for an unfinished operation and return the command to finish
3363 '''Check for an unfinished operation and return the command to finish
3354 it.
3364 it.
3355
3365
3356 afterresolvedstates tuples define a .hg/{file} and the corresponding
3366 afterresolvedstates tuples define a .hg/{file} and the corresponding
3357 command needed to finish it.
3367 command needed to finish it.
3358
3368
3359 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3369 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3360 a boolean.
3370 a boolean.
3361 '''
3371 '''
3362 contmsg = _("continue: %s")
3372 contmsg = _("continue: %s")
3363 for f, msg in afterresolvedstates:
3373 for f, msg in afterresolvedstates:
3364 if repo.vfs.exists(f):
3374 if repo.vfs.exists(f):
3365 return contmsg % msg, True
3375 return contmsg % msg, True
3366 if repo[None].dirty(missing=True, merge=False, branch=False):
3376 if repo[None].dirty(missing=True, merge=False, branch=False):
3367 return contmsg % _("hg commit"), False
3377 return contmsg % _("hg commit"), False
3368 return None, None
3378 return None, None
3369
3379
3370 def checkafterresolved(repo):
3380 def checkafterresolved(repo):
3371 '''Inform the user about the next action after completing hg resolve
3381 '''Inform the user about the next action after completing hg resolve
3372
3382
3373 If there's a matching afterresolvedstates, howtocontinue will yield
3383 If there's a matching afterresolvedstates, howtocontinue will yield
3374 repo.ui.warn as the reporter.
3384 repo.ui.warn as the reporter.
3375
3385
3376 Otherwise, it will yield repo.ui.note.
3386 Otherwise, it will yield repo.ui.note.
3377 '''
3387 '''
3378 msg, warning = howtocontinue(repo)
3388 msg, warning = howtocontinue(repo)
3379 if msg is not None:
3389 if msg is not None:
3380 if warning:
3390 if warning:
3381 repo.ui.warn("%s\n" % msg)
3391 repo.ui.warn("%s\n" % msg)
3382 else:
3392 else:
3383 repo.ui.note("%s\n" % msg)
3393 repo.ui.note("%s\n" % msg)
3384
3394
3385 def wrongtooltocontinue(repo, task):
3395 def wrongtooltocontinue(repo, task):
3386 '''Raise an abort suggesting how to properly continue if there is an
3396 '''Raise an abort suggesting how to properly continue if there is an
3387 active task.
3397 active task.
3388
3398
3389 Uses howtocontinue() to find the active task.
3399 Uses howtocontinue() to find the active task.
3390
3400
3391 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3401 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3392 a hint.
3402 a hint.
3393 '''
3403 '''
3394 after = howtocontinue(repo)
3404 after = howtocontinue(repo)
3395 hint = None
3405 hint = None
3396 if after[1]:
3406 if after[1]:
3397 hint = after[0]
3407 hint = after[0]
3398 raise error.Abort(_('no %s in progress') % task, hint=hint)
3408 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,667 +1,714
1 #testcases lfsremote-on lfsremote-off
1 #testcases lfsremote-on lfsremote-off
2 #require serve no-reposimplestore no-chg
2 #require serve no-reposimplestore no-chg
3
3
4 This test splits `hg serve` with and without using the extension into separate
4 This test splits `hg serve` with and without using the extension into separate
5 tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
5 tests cases. The tests are broken down as follows, where "LFS"/"No-LFS"
6 indicates whether or not there are commits that use an LFS file, and "D"/"E"
6 indicates whether or not there are commits that use an LFS file, and "D"/"E"
7 indicates whether or not the extension is loaded. The "X" cases are not tested
7 indicates whether or not the extension is loaded. The "X" cases are not tested
8 individually, because the lfs requirement causes the process to bail early if
8 individually, because the lfs requirement causes the process to bail early if
9 the extension is disabled.
9 the extension is disabled.
10
10
11 . Server
11 . Server
12 .
12 .
13 . No-LFS LFS
13 . No-LFS LFS
14 . +----------------------------+
14 . +----------------------------+
15 . | || D | E | D | E |
15 . | || D | E | D | E |
16 . |---++=======================|
16 . |---++=======================|
17 . C | D || N/A | #1 | X | #4 |
17 . C | D || N/A | #1 | X | #4 |
18 . l No +---++-----------------------|
18 . l No +---++-----------------------|
19 . i LFS | E || #2 | #2 | X | #5 |
19 . i LFS | E || #2 | #2 | X | #5 |
20 . e +---++-----------------------|
20 . e +---++-----------------------|
21 . n | D || X | X | X | X |
21 . n | D || X | X | X | X |
22 . t LFS |---++-----------------------|
22 . t LFS |---++-----------------------|
23 . | E || #3 | #3 | X | #6 |
23 . | E || #3 | #3 | X | #6 |
24 . |---++-----------------------+
24 . |---++-----------------------+
25
25
26 make command server magic visible
26 make command server magic visible
27
27
28 #if windows
28 #if windows
29 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
29 $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH"
30 #else
30 #else
31 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
31 $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH"
32 #endif
32 #endif
33 $ export PYTHONPATH
33 $ export PYTHONPATH
34
34
35 $ hg init server
35 $ hg init server
36 $ SERVER_REQUIRES="$TESTTMP/server/.hg/requires"
36 $ SERVER_REQUIRES="$TESTTMP/server/.hg/requires"
37
37
38 $ cat > $TESTTMP/debugprocessors.py <<EOF
38 $ cat > $TESTTMP/debugprocessors.py <<EOF
39 > from mercurial import (
39 > from mercurial import (
40 > cmdutil,
40 > cmdutil,
41 > commands,
41 > commands,
42 > pycompat,
42 > pycompat,
43 > registrar,
43 > registrar,
44 > )
44 > )
45 > cmdtable = {}
45 > cmdtable = {}
46 > command = registrar.command(cmdtable)
46 > command = registrar.command(cmdtable)
47 > @command(b'debugprocessors', [], b'FILE')
47 > @command(b'debugprocessors', [], b'FILE')
48 > def debugprocessors(ui, repo, file_=None, **opts):
48 > def debugprocessors(ui, repo, file_=None, **opts):
49 > opts = pycompat.byteskwargs(opts)
49 > opts = pycompat.byteskwargs(opts)
50 > opts[b'changelog'] = False
50 > opts[b'changelog'] = False
51 > opts[b'manifest'] = False
51 > opts[b'manifest'] = False
52 > opts[b'dir'] = False
52 > opts[b'dir'] = False
53 > rl = cmdutil.openrevlog(repo, b'debugprocessors', file_, opts)
53 > rl = cmdutil.openrevlog(repo, b'debugprocessors', file_, opts)
54 > for flag, proc in rl._flagprocessors.items():
54 > for flag, proc in rl._flagprocessors.items():
55 > ui.status(b"registered processor '%#x'\n" % (flag))
55 > ui.status(b"registered processor '%#x'\n" % (flag))
56 > EOF
56 > EOF
57
57
58 Skip the experimental.changegroup3=True config. Failure to agree on this comes
58 Skip the experimental.changegroup3=True config. Failure to agree on this comes
59 first, and causes an "abort: no common changegroup version" if the extension is
59 first, and causes an "abort: no common changegroup version" if the extension is
60 only loaded on one side. If that *is* enabled, the subsequent failure is "abort:
60 only loaded on one side. If that *is* enabled, the subsequent failure is "abort:
61 missing processor for flag '0x2000'!" if the extension is only loaded on one side
61 missing processor for flag '0x2000'!" if the extension is only loaded on one side
62 (possibly also masked by the Internal Server Error message).
62 (possibly also masked by the Internal Server Error message).
63 $ cat >> $HGRCPATH <<EOF
63 $ cat >> $HGRCPATH <<EOF
64 > [extensions]
64 > [extensions]
65 > debugprocessors = $TESTTMP/debugprocessors.py
65 > debugprocessors = $TESTTMP/debugprocessors.py
66 > [experimental]
66 > [experimental]
67 > lfs.disableusercache = True
67 > lfs.disableusercache = True
68 > [lfs]
68 > [lfs]
69 > threshold=10
69 > threshold=10
70 > [web]
70 > [web]
71 > allow_push=*
71 > allow_push=*
72 > push_ssl=False
72 > push_ssl=False
73 > EOF
73 > EOF
74
74
75 $ cp $HGRCPATH $HGRCPATH.orig
75 $ cp $HGRCPATH $HGRCPATH.orig
76
76
77 #if lfsremote-on
77 #if lfsremote-on
78 $ hg --config extensions.lfs= -R server \
78 $ hg --config extensions.lfs= -R server \
79 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
79 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
80 #else
80 #else
81 $ hg --config extensions.lfs=! -R server \
81 $ hg --config extensions.lfs=! -R server \
82 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
82 > serve -p $HGPORT -d --pid-file=hg.pid --errorlog=$TESTTMP/errors.log
83 #endif
83 #endif
84
84
85 $ cat hg.pid >> $DAEMON_PIDS
85 $ cat hg.pid >> $DAEMON_PIDS
86 $ hg clone -q http://localhost:$HGPORT client
86 $ hg clone -q http://localhost:$HGPORT client
87 $ grep 'lfs' client/.hg/requires $SERVER_REQUIRES
87 $ grep 'lfs' client/.hg/requires $SERVER_REQUIRES
88 [1]
88 [1]
89
89
90 This trivial repo will force commandserver to load the extension, but not call
90 This trivial repo will force commandserver to load the extension, but not call
91 reposetup() on another repo actually being operated on. This gives coverage
91 reposetup() on another repo actually being operated on. This gives coverage
92 that wrapper functions are not assuming reposetup() was called.
92 that wrapper functions are not assuming reposetup() was called.
93
93
94 $ hg init $TESTTMP/cmdservelfs
94 $ hg init $TESTTMP/cmdservelfs
95 $ cat >> $TESTTMP/cmdservelfs/.hg/hgrc << EOF
95 $ cat >> $TESTTMP/cmdservelfs/.hg/hgrc << EOF
96 > [extensions]
96 > [extensions]
97 > lfs =
97 > lfs =
98 > EOF
98 > EOF
99
99
100 --------------------------------------------------------------------------------
100 --------------------------------------------------------------------------------
101 Case #1: client with non-lfs content and the extension disabled; server with
101 Case #1: client with non-lfs content and the extension disabled; server with
102 non-lfs content, and the extension enabled.
102 non-lfs content, and the extension enabled.
103
103
104 $ cd client
104 $ cd client
105 $ echo 'non-lfs' > nonlfs.txt
105 $ echo 'non-lfs' > nonlfs.txt
106 >>> from __future__ import absolute_import
106 >>> from __future__ import absolute_import
107 >>> from hgclient import check, readchannel, runcommand
107 >>> from hgclient import check, readchannel, runcommand
108 >>> @check
108 >>> @check
109 ... def diff(server):
109 ... def diff(server):
110 ... readchannel(server)
110 ... readchannel(server)
111 ... # run an arbitrary command in the repo with the extension loaded
111 ... # run an arbitrary command in the repo with the extension loaded
112 ... runcommand(server, [b'id', b'-R', b'../cmdservelfs'])
112 ... runcommand(server, [b'id', b'-R', b'../cmdservelfs'])
113 ... # now run a command in a repo without the extension to ensure that
113 ... # now run a command in a repo without the extension to ensure that
114 ... # files are added safely..
114 ... # files are added safely..
115 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
115 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
116 ... # .. and that scmutil.prefetchfiles() safely no-ops..
116 ... # .. and that scmutil.prefetchfiles() safely no-ops..
117 ... runcommand(server, [b'diff', b'-r', b'.~1'])
117 ... runcommand(server, [b'diff', b'-r', b'.~1'])
118 ... # .. and that debugupgraderepo safely no-ops.
118 ... # .. and that debugupgraderepo safely no-ops.
119 ... runcommand(server, [b'debugupgraderepo', b'-q', b'--run'])
119 ... runcommand(server, [b'debugupgraderepo', b'-q', b'--run'])
120 *** runcommand id -R ../cmdservelfs
120 *** runcommand id -R ../cmdservelfs
121 000000000000 tip
121 000000000000 tip
122 *** runcommand ci -Aqm non-lfs
122 *** runcommand ci -Aqm non-lfs
123 *** runcommand diff -r .~1
123 *** runcommand diff -r .~1
124 diff -r 000000000000 nonlfs.txt
124 diff -r 000000000000 nonlfs.txt
125 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
125 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
126 +++ b/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
126 +++ b/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
127 @@ -0,0 +1,1 @@
127 @@ -0,0 +1,1 @@
128 +non-lfs
128 +non-lfs
129 *** runcommand debugupgraderepo -q --run
129 *** runcommand debugupgraderepo -q --run
130 upgrade will perform the following actions:
130 upgrade will perform the following actions:
131
131
132 requirements
132 requirements
133 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
133 preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
134
134
135 beginning upgrade...
135 beginning upgrade...
136 repository locked and read-only
136 repository locked and read-only
137 creating temporary repository to stage migrated data: * (glob)
137 creating temporary repository to stage migrated data: * (glob)
138 (it is safe to interrupt this process any time before data migration completes)
138 (it is safe to interrupt this process any time before data migration completes)
139 migrating 3 total revisions (1 in filelogs, 1 in manifests, 1 in changelog)
139 migrating 3 total revisions (1 in filelogs, 1 in manifests, 1 in changelog)
140 migrating 324 bytes in store; 129 bytes tracked data
140 migrating 324 bytes in store; 129 bytes tracked data
141 migrating 1 filelogs containing 1 revisions (73 bytes in store; 8 bytes tracked data)
141 migrating 1 filelogs containing 1 revisions (73 bytes in store; 8 bytes tracked data)
142 finished migrating 1 filelog revisions across 1 filelogs; change in size: 0 bytes
142 finished migrating 1 filelog revisions across 1 filelogs; change in size: 0 bytes
143 migrating 1 manifests containing 1 revisions (117 bytes in store; 52 bytes tracked data)
143 migrating 1 manifests containing 1 revisions (117 bytes in store; 52 bytes tracked data)
144 finished migrating 1 manifest revisions across 1 manifests; change in size: 0 bytes
144 finished migrating 1 manifest revisions across 1 manifests; change in size: 0 bytes
145 migrating changelog containing 1 revisions (134 bytes in store; 69 bytes tracked data)
145 migrating changelog containing 1 revisions (134 bytes in store; 69 bytes tracked data)
146 finished migrating 1 changelog revisions; change in size: 0 bytes
146 finished migrating 1 changelog revisions; change in size: 0 bytes
147 finished migrating 3 total revisions; total change in store size: 0 bytes
147 finished migrating 3 total revisions; total change in store size: 0 bytes
148 copying phaseroots
148 copying phaseroots
149 data fully migrated to temporary repository
149 data fully migrated to temporary repository
150 marking source repository as being upgraded; clients will be unable to read from repository
150 marking source repository as being upgraded; clients will be unable to read from repository
151 starting in-place swap of repository data
151 starting in-place swap of repository data
152 replaced files will be backed up at * (glob)
152 replaced files will be backed up at * (glob)
153 replacing store...
153 replacing store...
154 store replacement complete; repository was inconsistent for *s (glob)
154 store replacement complete; repository was inconsistent for *s (glob)
155 finalizing requirements file and making repository readable again
155 finalizing requirements file and making repository readable again
156 removing temporary repository * (glob)
156 removing temporary repository * (glob)
157 copy of old repository backed up at * (glob)
157 copy of old repository backed up at * (glob)
158 the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
158 the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
159
159
160 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
160 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
161 [1]
161 [1]
162
162
163 #if lfsremote-on
163 #if lfsremote-on
164
164
165 $ hg push -q
165 $ hg push -q
166 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
166 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
167 [1]
167 [1]
168
168
169 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client1_clone
169 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client1_clone
170 $ grep 'lfs' $TESTTMP/client1_clone/.hg/requires $SERVER_REQUIRES
170 $ grep 'lfs' $TESTTMP/client1_clone/.hg/requires $SERVER_REQUIRES
171 [1]
171 [1]
172
172
173 $ hg init $TESTTMP/client1_pull
173 $ hg init $TESTTMP/client1_pull
174 $ hg -R $TESTTMP/client1_pull pull -q http://localhost:$HGPORT
174 $ hg -R $TESTTMP/client1_pull pull -q http://localhost:$HGPORT
175 $ grep 'lfs' $TESTTMP/client1_pull/.hg/requires $SERVER_REQUIRES
175 $ grep 'lfs' $TESTTMP/client1_pull/.hg/requires $SERVER_REQUIRES
176 [1]
176 [1]
177
177
178 $ hg identify http://localhost:$HGPORT
178 $ hg identify http://localhost:$HGPORT
179 d437e1d24fbd
179 d437e1d24fbd
180
180
181 #endif
181 #endif
182
182
183 --------------------------------------------------------------------------------
183 --------------------------------------------------------------------------------
184 Case #2: client with non-lfs content and the extension enabled; server with
184 Case #2: client with non-lfs content and the extension enabled; server with
185 non-lfs content, and the extension state controlled by #testcases.
185 non-lfs content, and the extension state controlled by #testcases.
186
186
187 $ cat >> $HGRCPATH <<EOF
187 $ cat >> $HGRCPATH <<EOF
188 > [extensions]
188 > [extensions]
189 > lfs =
189 > lfs =
190 > EOF
190 > EOF
191 $ echo 'non-lfs' > nonlfs2.txt
191 $ echo 'non-lfs' > nonlfs2.txt
192 $ hg ci -Aqm 'non-lfs file with lfs client'
192 $ hg ci -Aqm 'non-lfs file with lfs client'
193
193
194 Since no lfs content has been added yet, the push is allowed, even when the
194 Since no lfs content has been added yet, the push is allowed, even when the
195 extension is not enabled remotely.
195 extension is not enabled remotely.
196
196
197 $ hg push -q
197 $ hg push -q
198 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
198 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
199 [1]
199 [1]
200
200
201 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client2_clone
201 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client2_clone
202 $ grep 'lfs' $TESTTMP/client2_clone/.hg/requires $SERVER_REQUIRES
202 $ grep 'lfs' $TESTTMP/client2_clone/.hg/requires $SERVER_REQUIRES
203 [1]
203 [1]
204
204
205 $ hg init $TESTTMP/client2_pull
205 $ hg init $TESTTMP/client2_pull
206 $ hg -R $TESTTMP/client2_pull pull -q http://localhost:$HGPORT
206 $ hg -R $TESTTMP/client2_pull pull -q http://localhost:$HGPORT
207 $ grep 'lfs' $TESTTMP/client2_pull/.hg/requires $SERVER_REQUIRES
207 $ grep 'lfs' $TESTTMP/client2_pull/.hg/requires $SERVER_REQUIRES
208 [1]
208 [1]
209
209
210 $ hg identify http://localhost:$HGPORT
210 $ hg identify http://localhost:$HGPORT
211 1477875038c6
211 1477875038c6
212
212
213 --------------------------------------------------------------------------------
213 --------------------------------------------------------------------------------
214 Case #3: client with lfs content and the extension enabled; server with
214 Case #3: client with lfs content and the extension enabled; server with
215 non-lfs content, and the extension state controlled by #testcases. The server
215 non-lfs content, and the extension state controlled by #testcases. The server
216 should have an 'lfs' requirement after it picks up its first commit with a blob.
216 should have an 'lfs' requirement after it picks up its first commit with a blob.
217
217
218 $ echo 'this is a big lfs file' > lfs.bin
218 $ echo 'this is a big lfs file' > lfs.bin
219 $ hg ci -Aqm 'lfs'
219 $ hg ci -Aqm 'lfs'
220 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
220 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
221 .hg/requires:lfs
221 .hg/requires:lfs
222
222
223 #if lfsremote-off
223 #if lfsremote-off
224 $ hg push -q
224 $ hg push -q
225 abort: required features are not supported in the destination: lfs
225 abort: required features are not supported in the destination: lfs
226 (enable the lfs extension on the server)
226 (enable the lfs extension on the server)
227 [255]
227 [255]
228 #else
228 #else
229 $ hg push -q
229 $ hg push -q
230 #endif
230 #endif
231 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
231 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
232 .hg/requires:lfs
232 .hg/requires:lfs
233 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
233 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
234
234
235 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client3_clone
235 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client3_clone
236 $ grep 'lfs' $TESTTMP/client3_clone/.hg/requires $SERVER_REQUIRES || true
236 $ grep 'lfs' $TESTTMP/client3_clone/.hg/requires $SERVER_REQUIRES || true
237 $TESTTMP/client3_clone/.hg/requires:lfs (lfsremote-on !)
237 $TESTTMP/client3_clone/.hg/requires:lfs (lfsremote-on !)
238 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
238 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
239
239
240 $ hg init $TESTTMP/client3_pull
240 $ hg init $TESTTMP/client3_pull
241 $ hg -R $TESTTMP/client3_pull pull -q http://localhost:$HGPORT
241 $ hg -R $TESTTMP/client3_pull pull -q http://localhost:$HGPORT
242 $ grep 'lfs' $TESTTMP/client3_pull/.hg/requires $SERVER_REQUIRES || true
242 $ grep 'lfs' $TESTTMP/client3_pull/.hg/requires $SERVER_REQUIRES || true
243 $TESTTMP/client3_pull/.hg/requires:lfs (lfsremote-on !)
243 $TESTTMP/client3_pull/.hg/requires:lfs (lfsremote-on !)
244 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
244 $TESTTMP/server/.hg/requires:lfs (lfsremote-on !)
245
245
246 Test that the commit/changegroup requirement check hook can be run multiple
246 Test that the commit/changegroup requirement check hook can be run multiple
247 times.
247 times.
248
248
249 $ hg clone -qr 0 http://localhost:$HGPORT $TESTTMP/cmdserve_client3
249 $ hg clone -qr 0 http://localhost:$HGPORT $TESTTMP/cmdserve_client3
250
250
251 $ cd ../cmdserve_client3
251 $ cd ../cmdserve_client3
252
252
253 >>> from __future__ import absolute_import
253 >>> from __future__ import absolute_import
254 >>> from hgclient import check, readchannel, runcommand
254 >>> from hgclient import check, readchannel, runcommand
255 >>> @check
255 >>> @check
256 ... def addrequirement(server):
256 ... def addrequirement(server):
257 ... readchannel(server)
257 ... readchannel(server)
258 ... # change the repo in a way that adds the lfs requirement
258 ... # change the repo in a way that adds the lfs requirement
259 ... runcommand(server, [b'pull', b'-qu'])
259 ... runcommand(server, [b'pull', b'-qu'])
260 ... # Now cause the requirement adding hook to fire again, without going
260 ... # Now cause the requirement adding hook to fire again, without going
261 ... # through reposetup() again.
261 ... # through reposetup() again.
262 ... with open('file.txt', 'wb') as fp:
262 ... with open('file.txt', 'wb') as fp:
263 ... fp.write(b'data')
263 ... fp.write(b'data')
264 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
264 ... runcommand(server, [b'ci', b'-Aqm', b'non-lfs'])
265 *** runcommand pull -qu
265 *** runcommand pull -qu
266 *** runcommand ci -Aqm non-lfs
266 *** runcommand ci -Aqm non-lfs
267
267
268 $ cd ../client
268 $ cd ../client
269
269
270 The difference here is the push failed above when the extension isn't
270 The difference here is the push failed above when the extension isn't
271 enabled on the server.
271 enabled on the server.
272 $ hg identify http://localhost:$HGPORT
272 $ hg identify http://localhost:$HGPORT
273 8374dc4052cb (lfsremote-on !)
273 8374dc4052cb (lfsremote-on !)
274 1477875038c6 (lfsremote-off !)
274 1477875038c6 (lfsremote-off !)
275
275
276 Don't bother testing the lfsremote-off cases- the server won't be able
276 Don't bother testing the lfsremote-off cases- the server won't be able
277 to launch if there's lfs content and the extension is disabled.
277 to launch if there's lfs content and the extension is disabled.
278
278
279 #if lfsremote-on
279 #if lfsremote-on
280
280
281 --------------------------------------------------------------------------------
281 --------------------------------------------------------------------------------
282 Case #4: client with non-lfs content and the extension disabled; server with
282 Case #4: client with non-lfs content and the extension disabled; server with
283 lfs content, and the extension enabled.
283 lfs content, and the extension enabled.
284
284
285 $ cat >> $HGRCPATH <<EOF
285 $ cat >> $HGRCPATH <<EOF
286 > [extensions]
286 > [extensions]
287 > lfs = !
287 > lfs = !
288 > EOF
288 > EOF
289
289
290 $ hg init $TESTTMP/client4
290 $ hg init $TESTTMP/client4
291 $ cd $TESTTMP/client4
291 $ cd $TESTTMP/client4
292 $ cat >> .hg/hgrc <<EOF
292 $ cat >> .hg/hgrc <<EOF
293 > [paths]
293 > [paths]
294 > default = http://localhost:$HGPORT
294 > default = http://localhost:$HGPORT
295 > EOF
295 > EOF
296 $ echo 'non-lfs' > nonlfs2.txt
296 $ echo 'non-lfs' > nonlfs2.txt
297 $ hg ci -Aqm 'non-lfs'
297 $ hg ci -Aqm 'non-lfs'
298 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
298 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
299 $TESTTMP/server/.hg/requires:lfs
299 $TESTTMP/server/.hg/requires:lfs
300
300
301 $ hg push -q --force
301 $ hg push -q --force
302 warning: repository is unrelated
302 warning: repository is unrelated
303 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
303 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
304 $TESTTMP/server/.hg/requires:lfs
304 $TESTTMP/server/.hg/requires:lfs
305
305
306 $ hg clone http://localhost:$HGPORT $TESTTMP/client4_clone
306 $ hg clone http://localhost:$HGPORT $TESTTMP/client4_clone
307 (remote is using large file support (lfs), but it is explicitly disabled in the local configuration)
307 (remote is using large file support (lfs), but it is explicitly disabled in the local configuration)
308 abort: repository requires features unknown to this Mercurial: lfs!
308 abort: repository requires features unknown to this Mercurial: lfs!
309 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
309 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
310 [255]
310 [255]
311 $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
311 $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
312 grep: $TESTTMP/client4_clone/.hg/requires: $ENOENT$
312 grep: $TESTTMP/client4_clone/.hg/requires: $ENOENT$
313 $TESTTMP/server/.hg/requires:lfs
313 $TESTTMP/server/.hg/requires:lfs
314 [2]
314 [2]
315
315
316 TODO: fail more gracefully.
316 TODO: fail more gracefully.
317
317
318 $ hg init $TESTTMP/client4_pull
318 $ hg init $TESTTMP/client4_pull
319 $ hg -R $TESTTMP/client4_pull pull http://localhost:$HGPORT
319 $ hg -R $TESTTMP/client4_pull pull http://localhost:$HGPORT
320 pulling from http://localhost:$HGPORT/
320 pulling from http://localhost:$HGPORT/
321 requesting all changes
321 requesting all changes
322 remote: abort: no common changegroup version
322 remote: abort: no common changegroup version
323 abort: pull failed on remote
323 abort: pull failed on remote
324 [255]
324 [255]
325 $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES
325 $ grep 'lfs' $TESTTMP/client4_pull/.hg/requires $SERVER_REQUIRES
326 $TESTTMP/server/.hg/requires:lfs
326 $TESTTMP/server/.hg/requires:lfs
327
327
328 $ hg identify http://localhost:$HGPORT
328 $ hg identify http://localhost:$HGPORT
329 03b080fa9d93
329 03b080fa9d93
330
330
331 --------------------------------------------------------------------------------
331 --------------------------------------------------------------------------------
332 Case #5: client with non-lfs content and the extension enabled; server with
332 Case #5: client with non-lfs content and the extension enabled; server with
333 lfs content, and the extension enabled.
333 lfs content, and the extension enabled.
334
334
335 $ cat >> $HGRCPATH <<EOF
335 $ cat >> $HGRCPATH <<EOF
336 > [extensions]
336 > [extensions]
337 > lfs =
337 > lfs =
338 > EOF
338 > EOF
339 $ echo 'non-lfs' > nonlfs3.txt
339 $ echo 'non-lfs' > nonlfs3.txt
340 $ hg ci -Aqm 'non-lfs file with lfs client'
340 $ hg ci -Aqm 'non-lfs file with lfs client'
341
341
342 $ hg push -q
342 $ hg push -q
343 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
343 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
344 $TESTTMP/server/.hg/requires:lfs
344 $TESTTMP/server/.hg/requires:lfs
345
345
346 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client5_clone
346 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client5_clone
347 $ grep 'lfs' $TESTTMP/client5_clone/.hg/requires $SERVER_REQUIRES
347 $ grep 'lfs' $TESTTMP/client5_clone/.hg/requires $SERVER_REQUIRES
348 $TESTTMP/client5_clone/.hg/requires:lfs
348 $TESTTMP/client5_clone/.hg/requires:lfs
349 $TESTTMP/server/.hg/requires:lfs
349 $TESTTMP/server/.hg/requires:lfs
350
350
351 $ hg init $TESTTMP/client5_pull
351 $ hg init $TESTTMP/client5_pull
352 $ hg -R $TESTTMP/client5_pull pull -q http://localhost:$HGPORT
352 $ hg -R $TESTTMP/client5_pull pull -q http://localhost:$HGPORT
353 $ grep 'lfs' $TESTTMP/client5_pull/.hg/requires $SERVER_REQUIRES
353 $ grep 'lfs' $TESTTMP/client5_pull/.hg/requires $SERVER_REQUIRES
354 $TESTTMP/client5_pull/.hg/requires:lfs
354 $TESTTMP/client5_pull/.hg/requires:lfs
355 $TESTTMP/server/.hg/requires:lfs
355 $TESTTMP/server/.hg/requires:lfs
356
356
357 $ hg identify http://localhost:$HGPORT
357 $ hg identify http://localhost:$HGPORT
358 c729025cc5e3
358 c729025cc5e3
359
359
360 $ mv $HGRCPATH $HGRCPATH.tmp
360 $ mv $HGRCPATH $HGRCPATH.tmp
361 $ cp $HGRCPATH.orig $HGRCPATH
361 $ cp $HGRCPATH.orig $HGRCPATH
362
362
363 >>> from __future__ import absolute_import
363 >>> from __future__ import absolute_import
364 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
364 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
365 >>> @check
365 >>> @check
366 ... def checkflags(server):
366 ... def checkflags(server):
367 ... readchannel(server)
367 ... readchannel(server)
368 ... bprint(b'')
368 ... bprint(b'')
369 ... bprint(b'# LFS required- both lfs and non-lfs revlogs have 0x2000 flag')
369 ... bprint(b'# LFS required- both lfs and non-lfs revlogs have 0x2000 flag')
370 ... stdout.flush()
370 ... stdout.flush()
371 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
371 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
372 ... b'../server'])
372 ... b'../server'])
373 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
373 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
374 ... b'../server'])
374 ... b'../server'])
375 ... runcommand(server, [b'config', b'extensions', b'--cwd',
375 ... runcommand(server, [b'config', b'extensions', b'--cwd',
376 ... b'../server'])
376 ... b'../server'])
377 ...
377 ...
378 ... bprint(b"\n# LFS not enabled- revlogs don't have 0x2000 flag")
378 ... bprint(b"\n# LFS not enabled- revlogs don't have 0x2000 flag")
379 ... stdout.flush()
379 ... stdout.flush()
380 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
380 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
381 ... runcommand(server, [b'config', b'extensions'])
381 ... runcommand(server, [b'config', b'extensions'])
382
382
383 # LFS required- both lfs and non-lfs revlogs have 0x2000 flag
383 # LFS required- both lfs and non-lfs revlogs have 0x2000 flag
384 *** runcommand debugprocessors lfs.bin -R ../server
384 *** runcommand debugprocessors lfs.bin -R ../server
385 registered processor '0x8000'
385 registered processor '0x8000'
386 registered processor '0x2000'
386 registered processor '0x2000'
387 *** runcommand debugprocessors nonlfs2.txt -R ../server
387 *** runcommand debugprocessors nonlfs2.txt -R ../server
388 registered processor '0x8000'
388 registered processor '0x8000'
389 registered processor '0x2000'
389 registered processor '0x2000'
390 *** runcommand config extensions --cwd ../server
390 *** runcommand config extensions --cwd ../server
391 extensions.debugprocessors=$TESTTMP/debugprocessors.py
391 extensions.debugprocessors=$TESTTMP/debugprocessors.py
392 extensions.lfs=
392 extensions.lfs=
393
393
394 # LFS not enabled- revlogs don't have 0x2000 flag
394 # LFS not enabled- revlogs don't have 0x2000 flag
395 *** runcommand debugprocessors nonlfs3.txt
395 *** runcommand debugprocessors nonlfs3.txt
396 registered processor '0x8000'
396 registered processor '0x8000'
397 *** runcommand config extensions
397 *** runcommand config extensions
398 extensions.debugprocessors=$TESTTMP/debugprocessors.py
398 extensions.debugprocessors=$TESTTMP/debugprocessors.py
399
399
400 $ rm $HGRCPATH
400 $ rm $HGRCPATH
401 $ mv $HGRCPATH.tmp $HGRCPATH
401 $ mv $HGRCPATH.tmp $HGRCPATH
402
402
403 $ hg clone $TESTTMP/client $TESTTMP/nonlfs -qr 0 --config extensions.lfs=
403 $ hg clone $TESTTMP/client $TESTTMP/nonlfs -qr 0 --config extensions.lfs=
404 $ cat >> $TESTTMP/nonlfs/.hg/hgrc <<EOF
404 $ cat >> $TESTTMP/nonlfs/.hg/hgrc <<EOF
405 > [extensions]
405 > [extensions]
406 > lfs = !
406 > lfs = !
407 > EOF
407 > EOF
408
408
409 >>> from __future__ import absolute_import, print_function
409 >>> from __future__ import absolute_import, print_function
410 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
410 >>> from hgclient import bprint, check, readchannel, runcommand, stdout
411 >>> @check
411 >>> @check
412 ... def checkflags2(server):
412 ... def checkflags2(server):
413 ... readchannel(server)
413 ... readchannel(server)
414 ... bprint(b'')
414 ... bprint(b'')
415 ... bprint(b'# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag')
415 ... bprint(b'# LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag')
416 ... stdout.flush()
416 ... stdout.flush()
417 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
417 ... runcommand(server, [b'debugprocessors', b'lfs.bin', b'-R',
418 ... b'../server'])
418 ... b'../server'])
419 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
419 ... runcommand(server, [b'debugprocessors', b'nonlfs2.txt', b'-R',
420 ... b'../server'])
420 ... b'../server'])
421 ... runcommand(server, [b'config', b'extensions', b'--cwd',
421 ... runcommand(server, [b'config', b'extensions', b'--cwd',
422 ... b'../server'])
422 ... b'../server'])
423 ...
423 ...
424 ... bprint(b'\n# LFS enabled without requirement- revlogs have 0x2000 flag')
424 ... bprint(b'\n# LFS enabled without requirement- revlogs have 0x2000 flag')
425 ... stdout.flush()
425 ... stdout.flush()
426 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
426 ... runcommand(server, [b'debugprocessors', b'nonlfs3.txt'])
427 ... runcommand(server, [b'config', b'extensions'])
427 ... runcommand(server, [b'config', b'extensions'])
428 ...
428 ...
429 ... bprint(b"\n# LFS disabled locally- revlogs don't have 0x2000 flag")
429 ... bprint(b"\n# LFS disabled locally- revlogs don't have 0x2000 flag")
430 ... stdout.flush()
430 ... stdout.flush()
431 ... runcommand(server, [b'debugprocessors', b'nonlfs.txt', b'-R',
431 ... runcommand(server, [b'debugprocessors', b'nonlfs.txt', b'-R',
432 ... b'../nonlfs'])
432 ... b'../nonlfs'])
433 ... runcommand(server, [b'config', b'extensions', b'--cwd',
433 ... runcommand(server, [b'config', b'extensions', b'--cwd',
434 ... b'../nonlfs'])
434 ... b'../nonlfs'])
435
435
436 # LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag
436 # LFS enabled- both lfs and non-lfs revlogs have 0x2000 flag
437 *** runcommand debugprocessors lfs.bin -R ../server
437 *** runcommand debugprocessors lfs.bin -R ../server
438 registered processor '0x8000'
438 registered processor '0x8000'
439 registered processor '0x2000'
439 registered processor '0x2000'
440 *** runcommand debugprocessors nonlfs2.txt -R ../server
440 *** runcommand debugprocessors nonlfs2.txt -R ../server
441 registered processor '0x8000'
441 registered processor '0x8000'
442 registered processor '0x2000'
442 registered processor '0x2000'
443 *** runcommand config extensions --cwd ../server
443 *** runcommand config extensions --cwd ../server
444 extensions.debugprocessors=$TESTTMP/debugprocessors.py
444 extensions.debugprocessors=$TESTTMP/debugprocessors.py
445 extensions.lfs=
445 extensions.lfs=
446
446
447 # LFS enabled without requirement- revlogs have 0x2000 flag
447 # LFS enabled without requirement- revlogs have 0x2000 flag
448 *** runcommand debugprocessors nonlfs3.txt
448 *** runcommand debugprocessors nonlfs3.txt
449 registered processor '0x8000'
449 registered processor '0x8000'
450 registered processor '0x2000'
450 registered processor '0x2000'
451 *** runcommand config extensions
451 *** runcommand config extensions
452 extensions.debugprocessors=$TESTTMP/debugprocessors.py
452 extensions.debugprocessors=$TESTTMP/debugprocessors.py
453 extensions.lfs=
453 extensions.lfs=
454
454
455 # LFS disabled locally- revlogs don't have 0x2000 flag
455 # LFS disabled locally- revlogs don't have 0x2000 flag
456 *** runcommand debugprocessors nonlfs.txt -R ../nonlfs
456 *** runcommand debugprocessors nonlfs.txt -R ../nonlfs
457 registered processor '0x8000'
457 registered processor '0x8000'
458 *** runcommand config extensions --cwd ../nonlfs
458 *** runcommand config extensions --cwd ../nonlfs
459 extensions.debugprocessors=$TESTTMP/debugprocessors.py
459 extensions.debugprocessors=$TESTTMP/debugprocessors.py
460 extensions.lfs=!
460 extensions.lfs=!
461
461
462 --------------------------------------------------------------------------------
462 --------------------------------------------------------------------------------
463 Case #6: client with lfs content and the extension enabled; server with
463 Case #6: client with lfs content and the extension enabled; server with
464 lfs content, and the extension enabled.
464 lfs content, and the extension enabled.
465
465
466 $ echo 'this is another lfs file' > lfs2.txt
466 $ echo 'this is another lfs file' > lfs2.txt
467 $ hg ci -Aqm 'lfs file with lfs client'
467 $ hg ci -Aqm 'lfs file with lfs client'
468
468
469 $ hg --config paths.default= push -v http://localhost:$HGPORT
469 $ hg --config paths.default= push -v http://localhost:$HGPORT
470 pushing to http://localhost:$HGPORT/
470 pushing to http://localhost:$HGPORT/
471 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
471 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
472 searching for changes
472 searching for changes
473 remote has heads on branch 'default' that are not known locally: 8374dc4052cb
473 remote has heads on branch 'default' that are not known locally: 8374dc4052cb
474 lfs: uploading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
474 lfs: uploading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
475 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
475 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
476 lfs: uploaded 1 files (25 bytes)
476 lfs: uploaded 1 files (25 bytes)
477 1 changesets found
477 1 changesets found
478 uncompressed size of bundle content:
478 uncompressed size of bundle content:
479 206 (changelog)
479 206 (changelog)
480 172 (manifests)
480 172 (manifests)
481 275 lfs2.txt
481 275 lfs2.txt
482 remote: adding changesets
482 remote: adding changesets
483 remote: adding manifests
483 remote: adding manifests
484 remote: adding file changes
484 remote: adding file changes
485 remote: added 1 changesets with 1 changes to 1 files
485 remote: added 1 changesets with 1 changes to 1 files
486 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
486 $ grep 'lfs' .hg/requires $SERVER_REQUIRES
487 .hg/requires:lfs
487 .hg/requires:lfs
488 $TESTTMP/server/.hg/requires:lfs
488 $TESTTMP/server/.hg/requires:lfs
489
489
490 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client6_clone
490 $ hg clone -q http://localhost:$HGPORT $TESTTMP/client6_clone
491 $ grep 'lfs' $TESTTMP/client6_clone/.hg/requires $SERVER_REQUIRES
491 $ grep 'lfs' $TESTTMP/client6_clone/.hg/requires $SERVER_REQUIRES
492 $TESTTMP/client6_clone/.hg/requires:lfs
492 $TESTTMP/client6_clone/.hg/requires:lfs
493 $TESTTMP/server/.hg/requires:lfs
493 $TESTTMP/server/.hg/requires:lfs
494
494
495 $ hg init $TESTTMP/client6_pull
495 $ hg init $TESTTMP/client6_pull
496 $ hg -R $TESTTMP/client6_pull pull -u -v http://localhost:$HGPORT
496 $ hg -R $TESTTMP/client6_pull pull -u -v http://localhost:$HGPORT
497 pulling from http://localhost:$HGPORT/
497 pulling from http://localhost:$HGPORT/
498 requesting all changes
498 requesting all changes
499 adding changesets
499 adding changesets
500 adding manifests
500 adding manifests
501 adding file changes
501 adding file changes
502 added 6 changesets with 5 changes to 5 files (+1 heads)
502 added 6 changesets with 5 changes to 5 files (+1 heads)
503 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
503 calling hook pretxnchangegroup.lfs: hgext.lfs.checkrequireslfs
504 new changesets d437e1d24fbd:d3b84d50eacb
504 new changesets d437e1d24fbd:d3b84d50eacb
505 resolving manifests
505 resolving manifests
506 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
506 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
507 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
507 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
508 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
508 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
509 lfs: downloaded 1 files (25 bytes)
509 lfs: downloaded 1 files (25 bytes)
510 getting lfs2.txt
510 getting lfs2.txt
511 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
511 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
512 getting nonlfs2.txt
512 getting nonlfs2.txt
513 getting nonlfs3.txt
513 getting nonlfs3.txt
514 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
514 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
515 updated to "d3b84d50eacb: lfs file with lfs client"
515 updated to "d3b84d50eacb: lfs file with lfs client"
516 1 other heads for branch "default"
516 1 other heads for branch "default"
517 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
517 (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
518 $ grep 'lfs' $TESTTMP/client6_pull/.hg/requires $SERVER_REQUIRES
518 $ grep 'lfs' $TESTTMP/client6_pull/.hg/requires $SERVER_REQUIRES
519 $TESTTMP/client6_pull/.hg/requires:lfs
519 $TESTTMP/client6_pull/.hg/requires:lfs
520 $TESTTMP/server/.hg/requires:lfs
520 $TESTTMP/server/.hg/requires:lfs
521
521
522 $ hg identify http://localhost:$HGPORT
522 $ hg identify http://localhost:$HGPORT
523 d3b84d50eacb
523 d3b84d50eacb
524
524
525 --------------------------------------------------------------------------------
525 --------------------------------------------------------------------------------
526 Misc: process dies early if a requirement exists and the extension is disabled
526 Misc: process dies early if a requirement exists and the extension is disabled
527
527
528 $ hg --config extensions.lfs=! summary
528 $ hg --config extensions.lfs=! summary
529 abort: repository requires features unknown to this Mercurial: lfs!
529 abort: repository requires features unknown to this Mercurial: lfs!
530 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
530 (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
531 [255]
531 [255]
532
532
533 $ echo 'this is an lfs file' > $TESTTMP/client6_clone/lfspair1.bin
533 $ echo 'this is an lfs file' > $TESTTMP/client6_clone/lfspair1.bin
534 $ echo 'this is an lfs file too' > $TESTTMP/client6_clone/lfspair2.bin
534 $ echo 'this is an lfs file too' > $TESTTMP/client6_clone/lfspair2.bin
535 $ hg -R $TESTTMP/client6_clone ci -Aqm 'add lfs pair'
535 $ hg -R $TESTTMP/client6_clone ci -Aqm 'add lfs pair'
536 $ hg -R $TESTTMP/client6_clone push -q
536 $ hg -R $TESTTMP/client6_clone push -q
537
537
538 $ hg clone -qU http://localhost:$HGPORT $TESTTMP/bulkfetch
538 $ hg clone -qU http://localhost:$HGPORT $TESTTMP/bulkfetch
539
539
540 Cat doesn't prefetch unless data is needed (e.g. '-T {rawdata}' doesn't need it)
541
542 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{rawdata}\n{path}\n'
543 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
544 version https://git-lfs.github.com/spec/v1
545 oid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
546 size 20
547 x-is-binary 0
548
549 lfspair1.bin
550
551 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T json
552 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
553 [lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
554 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
555 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
556 lfs: downloaded 1 files (20 bytes)
557 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
558
559 {
560 "data": "this is an lfs file\n",
561 "path": "lfspair1.bin",
562 "rawdata": "version https://git-lfs.github.com/spec/v1\noid sha256:cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782\nsize 20\nx-is-binary 0\n"
563 }
564 ]
565
566 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
567
568 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair1.bin -T '{data}\n'
569 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
570 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
571 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
572 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
573 lfs: downloaded 1 files (20 bytes)
574 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
575 this is an lfs file
576
577 $ hg --cwd $TESTTMP/bulkfetch cat -vr tip lfspair2.bin
578 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
579 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
580 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
581 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
582 lfs: downloaded 1 files (24 bytes)
583 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
584 this is an lfs file too
585
540 Export will prefetch all needed files across all needed revisions
586 Export will prefetch all needed files across all needed revisions
541
587
588 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
542 $ hg -R $TESTTMP/bulkfetch -v export -r 0:tip -o all.export
589 $ hg -R $TESTTMP/bulkfetch -v export -r 0:tip -o all.export
543 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
590 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
544 exporting patches:
591 exporting patches:
545 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
592 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
546 lfs: need to transfer 4 objects (92 bytes)
593 lfs: need to transfer 4 objects (92 bytes)
547 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
594 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
548 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
595 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
549 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
596 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
550 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
597 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
551 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
598 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
552 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
599 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
553 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
600 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
554 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
601 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
555 lfs: downloaded 4 files (92 bytes)
602 lfs: downloaded 4 files (92 bytes)
556 all.export
603 all.export
557 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
604 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
558 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
605 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
559 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
606 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
560 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
607 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
561
608
562 Export with selected files is used with `extdiff --patch`
609 Export with selected files is used with `extdiff --patch`
563
610
564 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
611 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
565 $ hg --config extensions.extdiff= \
612 $ hg --config extensions.extdiff= \
566 > -R $TESTTMP/bulkfetch -v extdiff -r 2:tip --patch $TESTTMP/bulkfetch/lfs.bin
613 > -R $TESTTMP/bulkfetch -v extdiff -r 2:tip --patch $TESTTMP/bulkfetch/lfs.bin
567 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
614 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
568 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
615 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
569 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
616 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
570 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
617 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
571 lfs: downloaded 1 files (23 bytes)
618 lfs: downloaded 1 files (23 bytes)
572 */hg-8374dc4052cb.patch (glob)
619 */hg-8374dc4052cb.patch (glob)
573 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
620 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
574 */hg-9640b57e77b1.patch (glob)
621 */hg-9640b57e77b1.patch (glob)
575 --- */hg-8374dc4052cb.patch * (glob)
622 --- */hg-8374dc4052cb.patch * (glob)
576 +++ */hg-9640b57e77b1.patch * (glob)
623 +++ */hg-9640b57e77b1.patch * (glob)
577 @@ -2,12 +2,7 @@
624 @@ -2,12 +2,7 @@
578 # User test
625 # User test
579 # Date 0 0
626 # Date 0 0
580 # Thu Jan 01 00:00:00 1970 +0000
627 # Thu Jan 01 00:00:00 1970 +0000
581 -# Node ID 8374dc4052cbd388e79d9dc4ddb29784097aa354
628 -# Node ID 8374dc4052cbd388e79d9dc4ddb29784097aa354
582 -# Parent 1477875038c60152e391238920a16381c627b487
629 -# Parent 1477875038c60152e391238920a16381c627b487
583 -lfs
630 -lfs
584 +# Node ID 9640b57e77b14c3a0144fb4478b6cc13e13ea0d1
631 +# Node ID 9640b57e77b14c3a0144fb4478b6cc13e13ea0d1
585 +# Parent d3b84d50eacbd56638e11abce6b8616aaba54420
632 +# Parent d3b84d50eacbd56638e11abce6b8616aaba54420
586 +add lfs pair
633 +add lfs pair
587
634
588 -diff -r 1477875038c6 -r 8374dc4052cb lfs.bin
635 -diff -r 1477875038c6 -r 8374dc4052cb lfs.bin
589 ---- /dev/null Thu Jan 01 00:00:00 1970 +0000
636 ---- /dev/null Thu Jan 01 00:00:00 1970 +0000
590 -+++ b/lfs.bin Thu Jan 01 00:00:00 1970 +0000
637 -+++ b/lfs.bin Thu Jan 01 00:00:00 1970 +0000
591 -@@ -0,0 +1,1 @@
638 -@@ -0,0 +1,1 @@
592 -+this is a big lfs file
639 -+this is a big lfs file
593 cleaning up temp directory
640 cleaning up temp directory
594 [1]
641 [1]
595
642
596 Diff will prefetch files
643 Diff will prefetch files
597
644
598 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
645 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
599 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip
646 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip
600 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
647 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
601 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
648 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
602 lfs: need to transfer 4 objects (92 bytes)
649 lfs: need to transfer 4 objects (92 bytes)
603 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
650 lfs: downloading a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de (25 bytes)
604 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
651 lfs: processed: a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de
605 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
652 lfs: downloading bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc (23 bytes)
606 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
653 lfs: processed: bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc
607 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
654 lfs: downloading cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 (20 bytes)
608 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
655 lfs: processed: cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782
609 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
656 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
610 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
657 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
611 lfs: downloaded 4 files (92 bytes)
658 lfs: downloaded 4 files (92 bytes)
612 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
659 lfs: found bed80f00180ac404b843628ab56a1c1984d6145c391cd1628a7dd7d2598d71fc in the local lfs store
613 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
660 lfs: found a82f1c5cea0d40e3bb3a849686bb4e6ae47ca27e614de55c1ed0325698ef68de in the local lfs store
614 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
661 lfs: found cf1b2787b74e66547d931b6ebe28ff63303e803cb2baa14a8f57c4383d875782 in the local lfs store
615 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
662 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
616 diff -r 8374dc4052cb -r 9640b57e77b1 lfs.bin
663 diff -r 8374dc4052cb -r 9640b57e77b1 lfs.bin
617 --- a/lfs.bin Thu Jan 01 00:00:00 1970 +0000
664 --- a/lfs.bin Thu Jan 01 00:00:00 1970 +0000
618 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
665 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
619 @@ -1,1 +0,0 @@
666 @@ -1,1 +0,0 @@
620 -this is a big lfs file
667 -this is a big lfs file
621 diff -r 8374dc4052cb -r 9640b57e77b1 lfs2.txt
668 diff -r 8374dc4052cb -r 9640b57e77b1 lfs2.txt
622 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
669 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
623 +++ b/lfs2.txt Thu Jan 01 00:00:00 1970 +0000
670 +++ b/lfs2.txt Thu Jan 01 00:00:00 1970 +0000
624 @@ -0,0 +1,1 @@
671 @@ -0,0 +1,1 @@
625 +this is another lfs file
672 +this is another lfs file
626 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair1.bin
673 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair1.bin
627 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
674 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
628 +++ b/lfspair1.bin Thu Jan 01 00:00:00 1970 +0000
675 +++ b/lfspair1.bin Thu Jan 01 00:00:00 1970 +0000
629 @@ -0,0 +1,1 @@
676 @@ -0,0 +1,1 @@
630 +this is an lfs file
677 +this is an lfs file
631 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
678 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
632 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
679 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
633 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
680 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
634 @@ -0,0 +1,1 @@
681 @@ -0,0 +1,1 @@
635 +this is an lfs file too
682 +this is an lfs file too
636 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs.txt
683 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs.txt
637 --- a/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
684 --- a/nonlfs.txt Thu Jan 01 00:00:00 1970 +0000
638 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
685 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
639 @@ -1,1 +0,0 @@
686 @@ -1,1 +0,0 @@
640 -non-lfs
687 -non-lfs
641 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs3.txt
688 diff -r 8374dc4052cb -r 9640b57e77b1 nonlfs3.txt
642 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
689 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
643 +++ b/nonlfs3.txt Thu Jan 01 00:00:00 1970 +0000
690 +++ b/nonlfs3.txt Thu Jan 01 00:00:00 1970 +0000
644 @@ -0,0 +1,1 @@
691 @@ -0,0 +1,1 @@
645 +non-lfs
692 +non-lfs
646
693
647 Only the files required by diff are prefetched
694 Only the files required by diff are prefetched
648
695
649 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
696 $ rm -r $TESTTMP/bulkfetch/.hg/store/lfs
650 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip $TESTTMP/bulkfetch/lfspair2.bin
697 $ hg -R $TESTTMP/bulkfetch -v diff -r 2:tip $TESTTMP/bulkfetch/lfspair2.bin
651 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
698 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
652 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
699 lfs: assuming remote store: http://localhost:$HGPORT/.git/info/lfs
653 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
700 lfs: downloading d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e (24 bytes)
654 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
701 lfs: processed: d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e
655 lfs: downloaded 1 files (24 bytes)
702 lfs: downloaded 1 files (24 bytes)
656 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
703 lfs: found d96eda2c74b56e95cfb5ffb66b6503e198cc6fc4a09dc877de925feebc65786e in the local lfs store
657 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
704 diff -r 8374dc4052cb -r 9640b57e77b1 lfspair2.bin
658 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
705 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
659 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
706 +++ b/lfspair2.bin Thu Jan 01 00:00:00 1970 +0000
660 @@ -0,0 +1,1 @@
707 @@ -0,0 +1,1 @@
661 +this is an lfs file too
708 +this is an lfs file too
662
709
663 #endif
710 #endif
664
711
665 $ "$PYTHON" $TESTDIR/killdaemons.py $DAEMON_PIDS
712 $ "$PYTHON" $TESTDIR/killdaemons.py $DAEMON_PIDS
666
713
667 $ cat $TESTTMP/errors.log
714 $ cat $TESTTMP/errors.log
General Comments 0
You need to be logged in to leave comments. Login now