##// END OF EJS Templates
py3: use pycompat.byteskwargs() to convert kwargs' keys to bytes...
Pulkit Goyal -
r33100:05906b8e default
parent child Browse files
Show More
@@ -1,3600 +1,3601 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import itertools
11 import itertools
12 import os
12 import os
13 import re
13 import re
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 encoding,
29 encoding,
30 error,
30 error,
31 formatter,
31 formatter,
32 graphmod,
32 graphmod,
33 match as matchmod,
33 match as matchmod,
34 obsolete,
34 obsolete,
35 patch,
35 patch,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 pycompat,
38 pycompat,
39 registrar,
39 registrar,
40 repair,
40 repair,
41 revlog,
41 revlog,
42 revset,
42 revset,
43 scmutil,
43 scmutil,
44 smartset,
44 smartset,
45 templatekw,
45 templatekw,
46 templater,
46 templater,
47 util,
47 util,
48 vfs as vfsmod,
48 vfs as vfsmod,
49 )
49 )
50 stringio = util.stringio
50 stringio = util.stringio
51
51
52 # templates of common command options
52 # templates of common command options
53
53
54 dryrunopts = [
54 dryrunopts = [
55 ('n', 'dry-run', None,
55 ('n', 'dry-run', None,
56 _('do not perform actions, just print output')),
56 _('do not perform actions, just print output')),
57 ]
57 ]
58
58
59 remoteopts = [
59 remoteopts = [
60 ('e', 'ssh', '',
60 ('e', 'ssh', '',
61 _('specify ssh command to use'), _('CMD')),
61 _('specify ssh command to use'), _('CMD')),
62 ('', 'remotecmd', '',
62 ('', 'remotecmd', '',
63 _('specify hg command to run on the remote side'), _('CMD')),
63 _('specify hg command to run on the remote side'), _('CMD')),
64 ('', 'insecure', None,
64 ('', 'insecure', None,
65 _('do not verify server certificate (ignoring web.cacerts config)')),
65 _('do not verify server certificate (ignoring web.cacerts config)')),
66 ]
66 ]
67
67
68 walkopts = [
68 walkopts = [
69 ('I', 'include', [],
69 ('I', 'include', [],
70 _('include names matching the given patterns'), _('PATTERN')),
70 _('include names matching the given patterns'), _('PATTERN')),
71 ('X', 'exclude', [],
71 ('X', 'exclude', [],
72 _('exclude names matching the given patterns'), _('PATTERN')),
72 _('exclude names matching the given patterns'), _('PATTERN')),
73 ]
73 ]
74
74
75 commitopts = [
75 commitopts = [
76 ('m', 'message', '',
76 ('m', 'message', '',
77 _('use text as commit message'), _('TEXT')),
77 _('use text as commit message'), _('TEXT')),
78 ('l', 'logfile', '',
78 ('l', 'logfile', '',
79 _('read commit message from file'), _('FILE')),
79 _('read commit message from file'), _('FILE')),
80 ]
80 ]
81
81
82 commitopts2 = [
82 commitopts2 = [
83 ('d', 'date', '',
83 ('d', 'date', '',
84 _('record the specified date as commit date'), _('DATE')),
84 _('record the specified date as commit date'), _('DATE')),
85 ('u', 'user', '',
85 ('u', 'user', '',
86 _('record the specified user as committer'), _('USER')),
86 _('record the specified user as committer'), _('USER')),
87 ]
87 ]
88
88
89 # hidden for now
89 # hidden for now
90 formatteropts = [
90 formatteropts = [
91 ('T', 'template', '',
91 ('T', 'template', '',
92 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
92 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
93 ]
93 ]
94
94
95 templateopts = [
95 templateopts = [
96 ('', 'style', '',
96 ('', 'style', '',
97 _('display using template map file (DEPRECATED)'), _('STYLE')),
97 _('display using template map file (DEPRECATED)'), _('STYLE')),
98 ('T', 'template', '',
98 ('T', 'template', '',
99 _('display with template'), _('TEMPLATE')),
99 _('display with template'), _('TEMPLATE')),
100 ]
100 ]
101
101
102 logopts = [
102 logopts = [
103 ('p', 'patch', None, _('show patch')),
103 ('p', 'patch', None, _('show patch')),
104 ('g', 'git', None, _('use git extended diff format')),
104 ('g', 'git', None, _('use git extended diff format')),
105 ('l', 'limit', '',
105 ('l', 'limit', '',
106 _('limit number of changes displayed'), _('NUM')),
106 _('limit number of changes displayed'), _('NUM')),
107 ('M', 'no-merges', None, _('do not show merges')),
107 ('M', 'no-merges', None, _('do not show merges')),
108 ('', 'stat', None, _('output diffstat-style summary of changes')),
108 ('', 'stat', None, _('output diffstat-style summary of changes')),
109 ('G', 'graph', None, _("show the revision DAG")),
109 ('G', 'graph', None, _("show the revision DAG")),
110 ] + templateopts
110 ] + templateopts
111
111
112 diffopts = [
112 diffopts = [
113 ('a', 'text', None, _('treat all files as text')),
113 ('a', 'text', None, _('treat all files as text')),
114 ('g', 'git', None, _('use git extended diff format')),
114 ('g', 'git', None, _('use git extended diff format')),
115 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
115 ('', 'binary', None, _('generate binary diffs in git mode (default)')),
116 ('', 'nodates', None, _('omit dates from diff headers'))
116 ('', 'nodates', None, _('omit dates from diff headers'))
117 ]
117 ]
118
118
119 diffwsopts = [
119 diffwsopts = [
120 ('w', 'ignore-all-space', None,
120 ('w', 'ignore-all-space', None,
121 _('ignore white space when comparing lines')),
121 _('ignore white space when comparing lines')),
122 ('b', 'ignore-space-change', None,
122 ('b', 'ignore-space-change', None,
123 _('ignore changes in the amount of white space')),
123 _('ignore changes in the amount of white space')),
124 ('B', 'ignore-blank-lines', None,
124 ('B', 'ignore-blank-lines', None,
125 _('ignore changes whose lines are all blank')),
125 _('ignore changes whose lines are all blank')),
126 ]
126 ]
127
127
128 diffopts2 = [
128 diffopts2 = [
129 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
129 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
130 ('p', 'show-function', None, _('show which function each change is in')),
130 ('p', 'show-function', None, _('show which function each change is in')),
131 ('', 'reverse', None, _('produce a diff that undoes the changes')),
131 ('', 'reverse', None, _('produce a diff that undoes the changes')),
132 ] + diffwsopts + [
132 ] + diffwsopts + [
133 ('U', 'unified', '',
133 ('U', 'unified', '',
134 _('number of lines of context to show'), _('NUM')),
134 _('number of lines of context to show'), _('NUM')),
135 ('', 'stat', None, _('output diffstat-style summary of changes')),
135 ('', 'stat', None, _('output diffstat-style summary of changes')),
136 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
136 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
137 ]
137 ]
138
138
139 mergetoolopts = [
139 mergetoolopts = [
140 ('t', 'tool', '', _('specify merge tool')),
140 ('t', 'tool', '', _('specify merge tool')),
141 ]
141 ]
142
142
143 similarityopts = [
143 similarityopts = [
144 ('s', 'similarity', '',
144 ('s', 'similarity', '',
145 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
145 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
146 ]
146 ]
147
147
148 subrepoopts = [
148 subrepoopts = [
149 ('S', 'subrepos', None,
149 ('S', 'subrepos', None,
150 _('recurse into subrepositories'))
150 _('recurse into subrepositories'))
151 ]
151 ]
152
152
153 debugrevlogopts = [
153 debugrevlogopts = [
154 ('c', 'changelog', False, _('open changelog')),
154 ('c', 'changelog', False, _('open changelog')),
155 ('m', 'manifest', False, _('open manifest')),
155 ('m', 'manifest', False, _('open manifest')),
156 ('', 'dir', '', _('open directory manifest')),
156 ('', 'dir', '', _('open directory manifest')),
157 ]
157 ]
158
158
159 # special string such that everything below this line will be ingored in the
159 # special string such that everything below this line will be ingored in the
160 # editor text
160 # editor text
161 _linebelow = "^HG: ------------------------ >8 ------------------------$"
161 _linebelow = "^HG: ------------------------ >8 ------------------------$"
162
162
163 def ishunk(x):
163 def ishunk(x):
164 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
164 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
165 return isinstance(x, hunkclasses)
165 return isinstance(x, hunkclasses)
166
166
167 def newandmodified(chunks, originalchunks):
167 def newandmodified(chunks, originalchunks):
168 newlyaddedandmodifiedfiles = set()
168 newlyaddedandmodifiedfiles = set()
169 for chunk in chunks:
169 for chunk in chunks:
170 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
170 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
171 originalchunks:
171 originalchunks:
172 newlyaddedandmodifiedfiles.add(chunk.header.filename())
172 newlyaddedandmodifiedfiles.add(chunk.header.filename())
173 return newlyaddedandmodifiedfiles
173 return newlyaddedandmodifiedfiles
174
174
175 def parsealiases(cmd):
175 def parsealiases(cmd):
176 return cmd.lstrip("^").split("|")
176 return cmd.lstrip("^").split("|")
177
177
178 def setupwrapcolorwrite(ui):
178 def setupwrapcolorwrite(ui):
179 # wrap ui.write so diff output can be labeled/colorized
179 # wrap ui.write so diff output can be labeled/colorized
180 def wrapwrite(orig, *args, **kw):
180 def wrapwrite(orig, *args, **kw):
181 label = kw.pop('label', '')
181 label = kw.pop('label', '')
182 for chunk, l in patch.difflabel(lambda: args):
182 for chunk, l in patch.difflabel(lambda: args):
183 orig(chunk, label=label + l)
183 orig(chunk, label=label + l)
184
184
185 oldwrite = ui.write
185 oldwrite = ui.write
186 def wrap(*args, **kwargs):
186 def wrap(*args, **kwargs):
187 return wrapwrite(oldwrite, *args, **kwargs)
187 return wrapwrite(oldwrite, *args, **kwargs)
188 setattr(ui, 'write', wrap)
188 setattr(ui, 'write', wrap)
189 return oldwrite
189 return oldwrite
190
190
191 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
191 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
192 if usecurses:
192 if usecurses:
193 if testfile:
193 if testfile:
194 recordfn = crecordmod.testdecorator(testfile,
194 recordfn = crecordmod.testdecorator(testfile,
195 crecordmod.testchunkselector)
195 crecordmod.testchunkselector)
196 else:
196 else:
197 recordfn = crecordmod.chunkselector
197 recordfn = crecordmod.chunkselector
198
198
199 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
199 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
200
200
201 else:
201 else:
202 return patch.filterpatch(ui, originalhunks, operation)
202 return patch.filterpatch(ui, originalhunks, operation)
203
203
204 def recordfilter(ui, originalhunks, operation=None):
204 def recordfilter(ui, originalhunks, operation=None):
205 """ Prompts the user to filter the originalhunks and return a list of
205 """ Prompts the user to filter the originalhunks and return a list of
206 selected hunks.
206 selected hunks.
207 *operation* is used for to build ui messages to indicate the user what
207 *operation* is used for to build ui messages to indicate the user what
208 kind of filtering they are doing: reverting, committing, shelving, etc.
208 kind of filtering they are doing: reverting, committing, shelving, etc.
209 (see patch.filterpatch).
209 (see patch.filterpatch).
210 """
210 """
211 usecurses = crecordmod.checkcurses(ui)
211 usecurses = crecordmod.checkcurses(ui)
212 testfile = ui.config('experimental', 'crecordtest', None)
212 testfile = ui.config('experimental', 'crecordtest', None)
213 oldwrite = setupwrapcolorwrite(ui)
213 oldwrite = setupwrapcolorwrite(ui)
214 try:
214 try:
215 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
215 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
216 testfile, operation)
216 testfile, operation)
217 finally:
217 finally:
218 ui.write = oldwrite
218 ui.write = oldwrite
219 return newchunks, newopts
219 return newchunks, newopts
220
220
221 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
221 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
222 filterfn, *pats, **opts):
222 filterfn, *pats, **opts):
223 from . import merge as mergemod
223 from . import merge as mergemod
224 opts = pycompat.byteskwargs(opts)
224 opts = pycompat.byteskwargs(opts)
225 if not ui.interactive():
225 if not ui.interactive():
226 if cmdsuggest:
226 if cmdsuggest:
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
227 msg = _('running non-interactively, use %s instead') % cmdsuggest
228 else:
228 else:
229 msg = _('running non-interactively')
229 msg = _('running non-interactively')
230 raise error.Abort(msg)
230 raise error.Abort(msg)
231
231
232 # make sure username is set before going interactive
232 # make sure username is set before going interactive
233 if not opts.get('user'):
233 if not opts.get('user'):
234 ui.username() # raise exception, username not provided
234 ui.username() # raise exception, username not provided
235
235
236 def recordfunc(ui, repo, message, match, opts):
236 def recordfunc(ui, repo, message, match, opts):
237 """This is generic record driver.
237 """This is generic record driver.
238
238
239 Its job is to interactively filter local changes, and
239 Its job is to interactively filter local changes, and
240 accordingly prepare working directory into a state in which the
240 accordingly prepare working directory into a state in which the
241 job can be delegated to a non-interactive commit command such as
241 job can be delegated to a non-interactive commit command such as
242 'commit' or 'qrefresh'.
242 'commit' or 'qrefresh'.
243
243
244 After the actual job is done by non-interactive command, the
244 After the actual job is done by non-interactive command, the
245 working directory is restored to its original state.
245 working directory is restored to its original state.
246
246
247 In the end we'll record interesting changes, and everything else
247 In the end we'll record interesting changes, and everything else
248 will be left in place, so the user can continue working.
248 will be left in place, so the user can continue working.
249 """
249 """
250
250
251 checkunfinished(repo, commit=True)
251 checkunfinished(repo, commit=True)
252 wctx = repo[None]
252 wctx = repo[None]
253 merge = len(wctx.parents()) > 1
253 merge = len(wctx.parents()) > 1
254 if merge:
254 if merge:
255 raise error.Abort(_('cannot partially commit a merge '
255 raise error.Abort(_('cannot partially commit a merge '
256 '(use "hg commit" instead)'))
256 '(use "hg commit" instead)'))
257
257
258 def fail(f, msg):
258 def fail(f, msg):
259 raise error.Abort('%s: %s' % (f, msg))
259 raise error.Abort('%s: %s' % (f, msg))
260
260
261 force = opts.get('force')
261 force = opts.get('force')
262 if not force:
262 if not force:
263 vdirs = []
263 vdirs = []
264 match.explicitdir = vdirs.append
264 match.explicitdir = vdirs.append
265 match.bad = fail
265 match.bad = fail
266
266
267 status = repo.status(match=match)
267 status = repo.status(match=match)
268 if not force:
268 if not force:
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
269 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
270 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
271 diffopts.nodates = True
271 diffopts.nodates = True
272 diffopts.git = True
272 diffopts.git = True
273 diffopts.showfunc = True
273 diffopts.showfunc = True
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
274 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
275 originalchunks = patch.parsepatch(originaldiff)
275 originalchunks = patch.parsepatch(originaldiff)
276
276
277 # 1. filter patch, since we are intending to apply subset of it
277 # 1. filter patch, since we are intending to apply subset of it
278 try:
278 try:
279 chunks, newopts = filterfn(ui, originalchunks)
279 chunks, newopts = filterfn(ui, originalchunks)
280 except patch.PatchError as err:
280 except patch.PatchError as err:
281 raise error.Abort(_('error parsing patch: %s') % err)
281 raise error.Abort(_('error parsing patch: %s') % err)
282 opts.update(newopts)
282 opts.update(newopts)
283
283
284 # We need to keep a backup of files that have been newly added and
284 # We need to keep a backup of files that have been newly added and
285 # modified during the recording process because there is a previous
285 # modified during the recording process because there is a previous
286 # version without the edit in the workdir
286 # version without the edit in the workdir
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
287 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
288 contenders = set()
288 contenders = set()
289 for h in chunks:
289 for h in chunks:
290 try:
290 try:
291 contenders.update(set(h.files()))
291 contenders.update(set(h.files()))
292 except AttributeError:
292 except AttributeError:
293 pass
293 pass
294
294
295 changed = status.modified + status.added + status.removed
295 changed = status.modified + status.added + status.removed
296 newfiles = [f for f in changed if f in contenders]
296 newfiles = [f for f in changed if f in contenders]
297 if not newfiles:
297 if not newfiles:
298 ui.status(_('no changes to record\n'))
298 ui.status(_('no changes to record\n'))
299 return 0
299 return 0
300
300
301 modified = set(status.modified)
301 modified = set(status.modified)
302
302
303 # 2. backup changed files, so we can restore them in the end
303 # 2. backup changed files, so we can restore them in the end
304
304
305 if backupall:
305 if backupall:
306 tobackup = changed
306 tobackup = changed
307 else:
307 else:
308 tobackup = [f for f in newfiles if f in modified or f in \
308 tobackup = [f for f in newfiles if f in modified or f in \
309 newlyaddedandmodifiedfiles]
309 newlyaddedandmodifiedfiles]
310 backups = {}
310 backups = {}
311 if tobackup:
311 if tobackup:
312 backupdir = repo.vfs.join('record-backups')
312 backupdir = repo.vfs.join('record-backups')
313 try:
313 try:
314 os.mkdir(backupdir)
314 os.mkdir(backupdir)
315 except OSError as err:
315 except OSError as err:
316 if err.errno != errno.EEXIST:
316 if err.errno != errno.EEXIST:
317 raise
317 raise
318 try:
318 try:
319 # backup continues
319 # backup continues
320 for f in tobackup:
320 for f in tobackup:
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
321 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
322 dir=backupdir)
322 dir=backupdir)
323 os.close(fd)
323 os.close(fd)
324 ui.debug('backup %r as %r\n' % (f, tmpname))
324 ui.debug('backup %r as %r\n' % (f, tmpname))
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
325 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
326 backups[f] = tmpname
326 backups[f] = tmpname
327
327
328 fp = stringio()
328 fp = stringio()
329 for c in chunks:
329 for c in chunks:
330 fname = c.filename()
330 fname = c.filename()
331 if fname in backups:
331 if fname in backups:
332 c.write(fp)
332 c.write(fp)
333 dopatch = fp.tell()
333 dopatch = fp.tell()
334 fp.seek(0)
334 fp.seek(0)
335
335
336 # 2.5 optionally review / modify patch in text editor
336 # 2.5 optionally review / modify patch in text editor
337 if opts.get('review', False):
337 if opts.get('review', False):
338 patchtext = (crecordmod.diffhelptext
338 patchtext = (crecordmod.diffhelptext
339 + crecordmod.patchhelptext
339 + crecordmod.patchhelptext
340 + fp.read())
340 + fp.read())
341 reviewedpatch = ui.edit(patchtext, "",
341 reviewedpatch = ui.edit(patchtext, "",
342 extra={"suffix": ".diff"},
342 extra={"suffix": ".diff"},
343 repopath=repo.path)
343 repopath=repo.path)
344 fp.truncate(0)
344 fp.truncate(0)
345 fp.write(reviewedpatch)
345 fp.write(reviewedpatch)
346 fp.seek(0)
346 fp.seek(0)
347
347
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
348 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
349 # 3a. apply filtered patch to clean repo (clean)
349 # 3a. apply filtered patch to clean repo (clean)
350 if backups:
350 if backups:
351 # Equivalent to hg.revert
351 # Equivalent to hg.revert
352 m = scmutil.matchfiles(repo, backups.keys())
352 m = scmutil.matchfiles(repo, backups.keys())
353 mergemod.update(repo, repo.dirstate.p1(),
353 mergemod.update(repo, repo.dirstate.p1(),
354 False, True, matcher=m)
354 False, True, matcher=m)
355
355
356 # 3b. (apply)
356 # 3b. (apply)
357 if dopatch:
357 if dopatch:
358 try:
358 try:
359 ui.debug('applying patch\n')
359 ui.debug('applying patch\n')
360 ui.debug(fp.getvalue())
360 ui.debug(fp.getvalue())
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
361 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
362 except patch.PatchError as err:
362 except patch.PatchError as err:
363 raise error.Abort(str(err))
363 raise error.Abort(str(err))
364 del fp
364 del fp
365
365
366 # 4. We prepared working directory according to filtered
366 # 4. We prepared working directory according to filtered
367 # patch. Now is the time to delegate the job to
367 # patch. Now is the time to delegate the job to
368 # commit/qrefresh or the like!
368 # commit/qrefresh or the like!
369
369
370 # Make all of the pathnames absolute.
370 # Make all of the pathnames absolute.
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
371 newfiles = [repo.wjoin(nf) for nf in newfiles]
372 return commitfunc(ui, repo, *newfiles, **opts)
372 return commitfunc(ui, repo, *newfiles, **opts)
373 finally:
373 finally:
374 # 5. finally restore backed-up files
374 # 5. finally restore backed-up files
375 try:
375 try:
376 dirstate = repo.dirstate
376 dirstate = repo.dirstate
377 for realname, tmpname in backups.iteritems():
377 for realname, tmpname in backups.iteritems():
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
378 ui.debug('restoring %r to %r\n' % (tmpname, realname))
379
379
380 if dirstate[realname] == 'n':
380 if dirstate[realname] == 'n':
381 # without normallookup, restoring timestamp
381 # without normallookup, restoring timestamp
382 # may cause partially committed files
382 # may cause partially committed files
383 # to be treated as unmodified
383 # to be treated as unmodified
384 dirstate.normallookup(realname)
384 dirstate.normallookup(realname)
385
385
386 # copystat=True here and above are a hack to trick any
386 # copystat=True here and above are a hack to trick any
387 # editors that have f open that we haven't modified them.
387 # editors that have f open that we haven't modified them.
388 #
388 #
389 # Also note that this racy as an editor could notice the
389 # Also note that this racy as an editor could notice the
390 # file's mtime before we've finished writing it.
390 # file's mtime before we've finished writing it.
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
391 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
392 os.unlink(tmpname)
392 os.unlink(tmpname)
393 if tobackup:
393 if tobackup:
394 os.rmdir(backupdir)
394 os.rmdir(backupdir)
395 except OSError:
395 except OSError:
396 pass
396 pass
397
397
398 def recordinwlock(ui, repo, message, match, opts):
398 def recordinwlock(ui, repo, message, match, opts):
399 with repo.wlock():
399 with repo.wlock():
400 return recordfunc(ui, repo, message, match, opts)
400 return recordfunc(ui, repo, message, match, opts)
401
401
402 return commit(ui, repo, recordinwlock, pats, opts)
402 return commit(ui, repo, recordinwlock, pats, opts)
403
403
404 def findpossible(cmd, table, strict=False):
404 def findpossible(cmd, table, strict=False):
405 """
405 """
406 Return cmd -> (aliases, command table entry)
406 Return cmd -> (aliases, command table entry)
407 for each matching command.
407 for each matching command.
408 Return debug commands (or their aliases) only if no normal command matches.
408 Return debug commands (or their aliases) only if no normal command matches.
409 """
409 """
410 choice = {}
410 choice = {}
411 debugchoice = {}
411 debugchoice = {}
412
412
413 if cmd in table:
413 if cmd in table:
414 # short-circuit exact matches, "log" alias beats "^log|history"
414 # short-circuit exact matches, "log" alias beats "^log|history"
415 keys = [cmd]
415 keys = [cmd]
416 else:
416 else:
417 keys = table.keys()
417 keys = table.keys()
418
418
419 allcmds = []
419 allcmds = []
420 for e in keys:
420 for e in keys:
421 aliases = parsealiases(e)
421 aliases = parsealiases(e)
422 allcmds.extend(aliases)
422 allcmds.extend(aliases)
423 found = None
423 found = None
424 if cmd in aliases:
424 if cmd in aliases:
425 found = cmd
425 found = cmd
426 elif not strict:
426 elif not strict:
427 for a in aliases:
427 for a in aliases:
428 if a.startswith(cmd):
428 if a.startswith(cmd):
429 found = a
429 found = a
430 break
430 break
431 if found is not None:
431 if found is not None:
432 if aliases[0].startswith("debug") or found.startswith("debug"):
432 if aliases[0].startswith("debug") or found.startswith("debug"):
433 debugchoice[found] = (aliases, table[e])
433 debugchoice[found] = (aliases, table[e])
434 else:
434 else:
435 choice[found] = (aliases, table[e])
435 choice[found] = (aliases, table[e])
436
436
437 if not choice and debugchoice:
437 if not choice and debugchoice:
438 choice = debugchoice
438 choice = debugchoice
439
439
440 return choice, allcmds
440 return choice, allcmds
441
441
442 def findcmd(cmd, table, strict=True):
442 def findcmd(cmd, table, strict=True):
443 """Return (aliases, command table entry) for command string."""
443 """Return (aliases, command table entry) for command string."""
444 choice, allcmds = findpossible(cmd, table, strict)
444 choice, allcmds = findpossible(cmd, table, strict)
445
445
446 if cmd in choice:
446 if cmd in choice:
447 return choice[cmd]
447 return choice[cmd]
448
448
449 if len(choice) > 1:
449 if len(choice) > 1:
450 clist = sorted(choice)
450 clist = sorted(choice)
451 raise error.AmbiguousCommand(cmd, clist)
451 raise error.AmbiguousCommand(cmd, clist)
452
452
453 if choice:
453 if choice:
454 return list(choice.values())[0]
454 return list(choice.values())[0]
455
455
456 raise error.UnknownCommand(cmd, allcmds)
456 raise error.UnknownCommand(cmd, allcmds)
457
457
458 def findrepo(p):
458 def findrepo(p):
459 while not os.path.isdir(os.path.join(p, ".hg")):
459 while not os.path.isdir(os.path.join(p, ".hg")):
460 oldp, p = p, os.path.dirname(p)
460 oldp, p = p, os.path.dirname(p)
461 if p == oldp:
461 if p == oldp:
462 return None
462 return None
463
463
464 return p
464 return p
465
465
466 def bailifchanged(repo, merge=True, hint=None):
466 def bailifchanged(repo, merge=True, hint=None):
467 """ enforce the precondition that working directory must be clean.
467 """ enforce the precondition that working directory must be clean.
468
468
469 'merge' can be set to false if a pending uncommitted merge should be
469 'merge' can be set to false if a pending uncommitted merge should be
470 ignored (such as when 'update --check' runs).
470 ignored (such as when 'update --check' runs).
471
471
472 'hint' is the usual hint given to Abort exception.
472 'hint' is the usual hint given to Abort exception.
473 """
473 """
474
474
475 if merge and repo.dirstate.p2() != nullid:
475 if merge and repo.dirstate.p2() != nullid:
476 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
476 raise error.Abort(_('outstanding uncommitted merge'), hint=hint)
477 modified, added, removed, deleted = repo.status()[:4]
477 modified, added, removed, deleted = repo.status()[:4]
478 if modified or added or removed or deleted:
478 if modified or added or removed or deleted:
479 raise error.Abort(_('uncommitted changes'), hint=hint)
479 raise error.Abort(_('uncommitted changes'), hint=hint)
480 ctx = repo[None]
480 ctx = repo[None]
481 for s in sorted(ctx.substate):
481 for s in sorted(ctx.substate):
482 ctx.sub(s).bailifchanged(hint=hint)
482 ctx.sub(s).bailifchanged(hint=hint)
483
483
484 def logmessage(ui, opts):
484 def logmessage(ui, opts):
485 """ get the log message according to -m and -l option """
485 """ get the log message according to -m and -l option """
486 message = opts.get('message')
486 message = opts.get('message')
487 logfile = opts.get('logfile')
487 logfile = opts.get('logfile')
488
488
489 if message and logfile:
489 if message and logfile:
490 raise error.Abort(_('options --message and --logfile are mutually '
490 raise error.Abort(_('options --message and --logfile are mutually '
491 'exclusive'))
491 'exclusive'))
492 if not message and logfile:
492 if not message and logfile:
493 try:
493 try:
494 if isstdiofilename(logfile):
494 if isstdiofilename(logfile):
495 message = ui.fin.read()
495 message = ui.fin.read()
496 else:
496 else:
497 message = '\n'.join(util.readfile(logfile).splitlines())
497 message = '\n'.join(util.readfile(logfile).splitlines())
498 except IOError as inst:
498 except IOError as inst:
499 raise error.Abort(_("can't read commit message '%s': %s") %
499 raise error.Abort(_("can't read commit message '%s': %s") %
500 (logfile, inst.strerror))
500 (logfile, inst.strerror))
501 return message
501 return message
502
502
503 def mergeeditform(ctxorbool, baseformname):
503 def mergeeditform(ctxorbool, baseformname):
504 """return appropriate editform name (referencing a committemplate)
504 """return appropriate editform name (referencing a committemplate)
505
505
506 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
506 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
507 merging is committed.
507 merging is committed.
508
508
509 This returns baseformname with '.merge' appended if it is a merge,
509 This returns baseformname with '.merge' appended if it is a merge,
510 otherwise '.normal' is appended.
510 otherwise '.normal' is appended.
511 """
511 """
512 if isinstance(ctxorbool, bool):
512 if isinstance(ctxorbool, bool):
513 if ctxorbool:
513 if ctxorbool:
514 return baseformname + ".merge"
514 return baseformname + ".merge"
515 elif 1 < len(ctxorbool.parents()):
515 elif 1 < len(ctxorbool.parents()):
516 return baseformname + ".merge"
516 return baseformname + ".merge"
517
517
518 return baseformname + ".normal"
518 return baseformname + ".normal"
519
519
520 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
520 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
521 editform='', **opts):
521 editform='', **opts):
522 """get appropriate commit message editor according to '--edit' option
522 """get appropriate commit message editor according to '--edit' option
523
523
524 'finishdesc' is a function to be called with edited commit message
524 'finishdesc' is a function to be called with edited commit message
525 (= 'description' of the new changeset) just after editing, but
525 (= 'description' of the new changeset) just after editing, but
526 before checking empty-ness. It should return actual text to be
526 before checking empty-ness. It should return actual text to be
527 stored into history. This allows to change description before
527 stored into history. This allows to change description before
528 storing.
528 storing.
529
529
530 'extramsg' is a extra message to be shown in the editor instead of
530 'extramsg' is a extra message to be shown in the editor instead of
531 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
531 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
532 is automatically added.
532 is automatically added.
533
533
534 'editform' is a dot-separated list of names, to distinguish
534 'editform' is a dot-separated list of names, to distinguish
535 the purpose of commit text editing.
535 the purpose of commit text editing.
536
536
537 'getcommiteditor' returns 'commitforceeditor' regardless of
537 'getcommiteditor' returns 'commitforceeditor' regardless of
538 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
538 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
539 they are specific for usage in MQ.
539 they are specific for usage in MQ.
540 """
540 """
541 if edit or finishdesc or extramsg:
541 if edit or finishdesc or extramsg:
542 return lambda r, c, s: commitforceeditor(r, c, s,
542 return lambda r, c, s: commitforceeditor(r, c, s,
543 finishdesc=finishdesc,
543 finishdesc=finishdesc,
544 extramsg=extramsg,
544 extramsg=extramsg,
545 editform=editform)
545 editform=editform)
546 elif editform:
546 elif editform:
547 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
547 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
548 else:
548 else:
549 return commiteditor
549 return commiteditor
550
550
551 def loglimit(opts):
551 def loglimit(opts):
552 """get the log limit according to option -l/--limit"""
552 """get the log limit according to option -l/--limit"""
553 limit = opts.get('limit')
553 limit = opts.get('limit')
554 if limit:
554 if limit:
555 try:
555 try:
556 limit = int(limit)
556 limit = int(limit)
557 except ValueError:
557 except ValueError:
558 raise error.Abort(_('limit must be a positive integer'))
558 raise error.Abort(_('limit must be a positive integer'))
559 if limit <= 0:
559 if limit <= 0:
560 raise error.Abort(_('limit must be positive'))
560 raise error.Abort(_('limit must be positive'))
561 else:
561 else:
562 limit = None
562 limit = None
563 return limit
563 return limit
564
564
565 def makefilename(repo, pat, node, desc=None,
565 def makefilename(repo, pat, node, desc=None,
566 total=None, seqno=None, revwidth=None, pathname=None):
566 total=None, seqno=None, revwidth=None, pathname=None):
567 node_expander = {
567 node_expander = {
568 'H': lambda: hex(node),
568 'H': lambda: hex(node),
569 'R': lambda: str(repo.changelog.rev(node)),
569 'R': lambda: str(repo.changelog.rev(node)),
570 'h': lambda: short(node),
570 'h': lambda: short(node),
571 'm': lambda: re.sub('[^\w]', '_', str(desc))
571 'm': lambda: re.sub('[^\w]', '_', str(desc))
572 }
572 }
573 expander = {
573 expander = {
574 '%': lambda: '%',
574 '%': lambda: '%',
575 'b': lambda: os.path.basename(repo.root),
575 'b': lambda: os.path.basename(repo.root),
576 }
576 }
577
577
578 try:
578 try:
579 if node:
579 if node:
580 expander.update(node_expander)
580 expander.update(node_expander)
581 if node:
581 if node:
582 expander['r'] = (lambda:
582 expander['r'] = (lambda:
583 str(repo.changelog.rev(node)).zfill(revwidth or 0))
583 str(repo.changelog.rev(node)).zfill(revwidth or 0))
584 if total is not None:
584 if total is not None:
585 expander['N'] = lambda: str(total)
585 expander['N'] = lambda: str(total)
586 if seqno is not None:
586 if seqno is not None:
587 expander['n'] = lambda: str(seqno)
587 expander['n'] = lambda: str(seqno)
588 if total is not None and seqno is not None:
588 if total is not None and seqno is not None:
589 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
589 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
590 if pathname is not None:
590 if pathname is not None:
591 expander['s'] = lambda: os.path.basename(pathname)
591 expander['s'] = lambda: os.path.basename(pathname)
592 expander['d'] = lambda: os.path.dirname(pathname) or '.'
592 expander['d'] = lambda: os.path.dirname(pathname) or '.'
593 expander['p'] = lambda: pathname
593 expander['p'] = lambda: pathname
594
594
595 newname = []
595 newname = []
596 patlen = len(pat)
596 patlen = len(pat)
597 i = 0
597 i = 0
598 while i < patlen:
598 while i < patlen:
599 c = pat[i:i + 1]
599 c = pat[i:i + 1]
600 if c == '%':
600 if c == '%':
601 i += 1
601 i += 1
602 c = pat[i:i + 1]
602 c = pat[i:i + 1]
603 c = expander[c]()
603 c = expander[c]()
604 newname.append(c)
604 newname.append(c)
605 i += 1
605 i += 1
606 return ''.join(newname)
606 return ''.join(newname)
607 except KeyError as inst:
607 except KeyError as inst:
608 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
608 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
609 inst.args[0])
609 inst.args[0])
610
610
611 def isstdiofilename(pat):
611 def isstdiofilename(pat):
612 """True if the given pat looks like a filename denoting stdin/stdout"""
612 """True if the given pat looks like a filename denoting stdin/stdout"""
613 return not pat or pat == '-'
613 return not pat or pat == '-'
614
614
615 class _unclosablefile(object):
615 class _unclosablefile(object):
616 def __init__(self, fp):
616 def __init__(self, fp):
617 self._fp = fp
617 self._fp = fp
618
618
619 def close(self):
619 def close(self):
620 pass
620 pass
621
621
622 def __iter__(self):
622 def __iter__(self):
623 return iter(self._fp)
623 return iter(self._fp)
624
624
625 def __getattr__(self, attr):
625 def __getattr__(self, attr):
626 return getattr(self._fp, attr)
626 return getattr(self._fp, attr)
627
627
628 def __enter__(self):
628 def __enter__(self):
629 return self
629 return self
630
630
631 def __exit__(self, exc_type, exc_value, exc_tb):
631 def __exit__(self, exc_type, exc_value, exc_tb):
632 pass
632 pass
633
633
634 def makefileobj(repo, pat, node=None, desc=None, total=None,
634 def makefileobj(repo, pat, node=None, desc=None, total=None,
635 seqno=None, revwidth=None, mode='wb', modemap=None,
635 seqno=None, revwidth=None, mode='wb', modemap=None,
636 pathname=None):
636 pathname=None):
637
637
638 writable = mode not in ('r', 'rb')
638 writable = mode not in ('r', 'rb')
639
639
640 if isstdiofilename(pat):
640 if isstdiofilename(pat):
641 if writable:
641 if writable:
642 fp = repo.ui.fout
642 fp = repo.ui.fout
643 else:
643 else:
644 fp = repo.ui.fin
644 fp = repo.ui.fin
645 return _unclosablefile(fp)
645 return _unclosablefile(fp)
646 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
646 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
647 if modemap is not None:
647 if modemap is not None:
648 mode = modemap.get(fn, mode)
648 mode = modemap.get(fn, mode)
649 if mode == 'wb':
649 if mode == 'wb':
650 modemap[fn] = 'ab'
650 modemap[fn] = 'ab'
651 return open(fn, mode)
651 return open(fn, mode)
652
652
653 def openrevlog(repo, cmd, file_, opts):
653 def openrevlog(repo, cmd, file_, opts):
654 """opens the changelog, manifest, a filelog or a given revlog"""
654 """opens the changelog, manifest, a filelog or a given revlog"""
655 cl = opts['changelog']
655 cl = opts['changelog']
656 mf = opts['manifest']
656 mf = opts['manifest']
657 dir = opts['dir']
657 dir = opts['dir']
658 msg = None
658 msg = None
659 if cl and mf:
659 if cl and mf:
660 msg = _('cannot specify --changelog and --manifest at the same time')
660 msg = _('cannot specify --changelog and --manifest at the same time')
661 elif cl and dir:
661 elif cl and dir:
662 msg = _('cannot specify --changelog and --dir at the same time')
662 msg = _('cannot specify --changelog and --dir at the same time')
663 elif cl or mf or dir:
663 elif cl or mf or dir:
664 if file_:
664 if file_:
665 msg = _('cannot specify filename with --changelog or --manifest')
665 msg = _('cannot specify filename with --changelog or --manifest')
666 elif not repo:
666 elif not repo:
667 msg = _('cannot specify --changelog or --manifest or --dir '
667 msg = _('cannot specify --changelog or --manifest or --dir '
668 'without a repository')
668 'without a repository')
669 if msg:
669 if msg:
670 raise error.Abort(msg)
670 raise error.Abort(msg)
671
671
672 r = None
672 r = None
673 if repo:
673 if repo:
674 if cl:
674 if cl:
675 r = repo.unfiltered().changelog
675 r = repo.unfiltered().changelog
676 elif dir:
676 elif dir:
677 if 'treemanifest' not in repo.requirements:
677 if 'treemanifest' not in repo.requirements:
678 raise error.Abort(_("--dir can only be used on repos with "
678 raise error.Abort(_("--dir can only be used on repos with "
679 "treemanifest enabled"))
679 "treemanifest enabled"))
680 dirlog = repo.manifestlog._revlog.dirlog(dir)
680 dirlog = repo.manifestlog._revlog.dirlog(dir)
681 if len(dirlog):
681 if len(dirlog):
682 r = dirlog
682 r = dirlog
683 elif mf:
683 elif mf:
684 r = repo.manifestlog._revlog
684 r = repo.manifestlog._revlog
685 elif file_:
685 elif file_:
686 filelog = repo.file(file_)
686 filelog = repo.file(file_)
687 if len(filelog):
687 if len(filelog):
688 r = filelog
688 r = filelog
689 if not r:
689 if not r:
690 if not file_:
690 if not file_:
691 raise error.CommandError(cmd, _('invalid arguments'))
691 raise error.CommandError(cmd, _('invalid arguments'))
692 if not os.path.isfile(file_):
692 if not os.path.isfile(file_):
693 raise error.Abort(_("revlog '%s' not found") % file_)
693 raise error.Abort(_("revlog '%s' not found") % file_)
694 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
694 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
695 file_[:-2] + ".i")
695 file_[:-2] + ".i")
696 return r
696 return r
697
697
698 def copy(ui, repo, pats, opts, rename=False):
698 def copy(ui, repo, pats, opts, rename=False):
699 # called with the repo lock held
699 # called with the repo lock held
700 #
700 #
701 # hgsep => pathname that uses "/" to separate directories
701 # hgsep => pathname that uses "/" to separate directories
702 # ossep => pathname that uses os.sep to separate directories
702 # ossep => pathname that uses os.sep to separate directories
703 cwd = repo.getcwd()
703 cwd = repo.getcwd()
704 targets = {}
704 targets = {}
705 after = opts.get("after")
705 after = opts.get("after")
706 dryrun = opts.get("dry_run")
706 dryrun = opts.get("dry_run")
707 wctx = repo[None]
707 wctx = repo[None]
708
708
709 def walkpat(pat):
709 def walkpat(pat):
710 srcs = []
710 srcs = []
711 if after:
711 if after:
712 badstates = '?'
712 badstates = '?'
713 else:
713 else:
714 badstates = '?r'
714 badstates = '?r'
715 m = scmutil.match(wctx, [pat], opts, globbed=True)
715 m = scmutil.match(wctx, [pat], opts, globbed=True)
716 for abs in wctx.walk(m):
716 for abs in wctx.walk(m):
717 state = repo.dirstate[abs]
717 state = repo.dirstate[abs]
718 rel = m.rel(abs)
718 rel = m.rel(abs)
719 exact = m.exact(abs)
719 exact = m.exact(abs)
720 if state in badstates:
720 if state in badstates:
721 if exact and state == '?':
721 if exact and state == '?':
722 ui.warn(_('%s: not copying - file is not managed\n') % rel)
722 ui.warn(_('%s: not copying - file is not managed\n') % rel)
723 if exact and state == 'r':
723 if exact and state == 'r':
724 ui.warn(_('%s: not copying - file has been marked for'
724 ui.warn(_('%s: not copying - file has been marked for'
725 ' remove\n') % rel)
725 ' remove\n') % rel)
726 continue
726 continue
727 # abs: hgsep
727 # abs: hgsep
728 # rel: ossep
728 # rel: ossep
729 srcs.append((abs, rel, exact))
729 srcs.append((abs, rel, exact))
730 return srcs
730 return srcs
731
731
732 # abssrc: hgsep
732 # abssrc: hgsep
733 # relsrc: ossep
733 # relsrc: ossep
734 # otarget: ossep
734 # otarget: ossep
735 def copyfile(abssrc, relsrc, otarget, exact):
735 def copyfile(abssrc, relsrc, otarget, exact):
736 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
736 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
737 if '/' in abstarget:
737 if '/' in abstarget:
738 # We cannot normalize abstarget itself, this would prevent
738 # We cannot normalize abstarget itself, this would prevent
739 # case only renames, like a => A.
739 # case only renames, like a => A.
740 abspath, absname = abstarget.rsplit('/', 1)
740 abspath, absname = abstarget.rsplit('/', 1)
741 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
741 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
742 reltarget = repo.pathto(abstarget, cwd)
742 reltarget = repo.pathto(abstarget, cwd)
743 target = repo.wjoin(abstarget)
743 target = repo.wjoin(abstarget)
744 src = repo.wjoin(abssrc)
744 src = repo.wjoin(abssrc)
745 state = repo.dirstate[abstarget]
745 state = repo.dirstate[abstarget]
746
746
747 scmutil.checkportable(ui, abstarget)
747 scmutil.checkportable(ui, abstarget)
748
748
749 # check for collisions
749 # check for collisions
750 prevsrc = targets.get(abstarget)
750 prevsrc = targets.get(abstarget)
751 if prevsrc is not None:
751 if prevsrc is not None:
752 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
752 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
753 (reltarget, repo.pathto(abssrc, cwd),
753 (reltarget, repo.pathto(abssrc, cwd),
754 repo.pathto(prevsrc, cwd)))
754 repo.pathto(prevsrc, cwd)))
755 return
755 return
756
756
757 # check for overwrites
757 # check for overwrites
758 exists = os.path.lexists(target)
758 exists = os.path.lexists(target)
759 samefile = False
759 samefile = False
760 if exists and abssrc != abstarget:
760 if exists and abssrc != abstarget:
761 if (repo.dirstate.normalize(abssrc) ==
761 if (repo.dirstate.normalize(abssrc) ==
762 repo.dirstate.normalize(abstarget)):
762 repo.dirstate.normalize(abstarget)):
763 if not rename:
763 if not rename:
764 ui.warn(_("%s: can't copy - same file\n") % reltarget)
764 ui.warn(_("%s: can't copy - same file\n") % reltarget)
765 return
765 return
766 exists = False
766 exists = False
767 samefile = True
767 samefile = True
768
768
769 if not after and exists or after and state in 'mn':
769 if not after and exists or after and state in 'mn':
770 if not opts['force']:
770 if not opts['force']:
771 if state in 'mn':
771 if state in 'mn':
772 msg = _('%s: not overwriting - file already committed\n')
772 msg = _('%s: not overwriting - file already committed\n')
773 if after:
773 if after:
774 flags = '--after --force'
774 flags = '--after --force'
775 else:
775 else:
776 flags = '--force'
776 flags = '--force'
777 if rename:
777 if rename:
778 hint = _('(hg rename %s to replace the file by '
778 hint = _('(hg rename %s to replace the file by '
779 'recording a rename)\n') % flags
779 'recording a rename)\n') % flags
780 else:
780 else:
781 hint = _('(hg copy %s to replace the file by '
781 hint = _('(hg copy %s to replace the file by '
782 'recording a copy)\n') % flags
782 'recording a copy)\n') % flags
783 else:
783 else:
784 msg = _('%s: not overwriting - file exists\n')
784 msg = _('%s: not overwriting - file exists\n')
785 if rename:
785 if rename:
786 hint = _('(hg rename --after to record the rename)\n')
786 hint = _('(hg rename --after to record the rename)\n')
787 else:
787 else:
788 hint = _('(hg copy --after to record the copy)\n')
788 hint = _('(hg copy --after to record the copy)\n')
789 ui.warn(msg % reltarget)
789 ui.warn(msg % reltarget)
790 ui.warn(hint)
790 ui.warn(hint)
791 return
791 return
792
792
793 if after:
793 if after:
794 if not exists:
794 if not exists:
795 if rename:
795 if rename:
796 ui.warn(_('%s: not recording move - %s does not exist\n') %
796 ui.warn(_('%s: not recording move - %s does not exist\n') %
797 (relsrc, reltarget))
797 (relsrc, reltarget))
798 else:
798 else:
799 ui.warn(_('%s: not recording copy - %s does not exist\n') %
799 ui.warn(_('%s: not recording copy - %s does not exist\n') %
800 (relsrc, reltarget))
800 (relsrc, reltarget))
801 return
801 return
802 elif not dryrun:
802 elif not dryrun:
803 try:
803 try:
804 if exists:
804 if exists:
805 os.unlink(target)
805 os.unlink(target)
806 targetdir = os.path.dirname(target) or '.'
806 targetdir = os.path.dirname(target) or '.'
807 if not os.path.isdir(targetdir):
807 if not os.path.isdir(targetdir):
808 os.makedirs(targetdir)
808 os.makedirs(targetdir)
809 if samefile:
809 if samefile:
810 tmp = target + "~hgrename"
810 tmp = target + "~hgrename"
811 os.rename(src, tmp)
811 os.rename(src, tmp)
812 os.rename(tmp, target)
812 os.rename(tmp, target)
813 else:
813 else:
814 util.copyfile(src, target)
814 util.copyfile(src, target)
815 srcexists = True
815 srcexists = True
816 except IOError as inst:
816 except IOError as inst:
817 if inst.errno == errno.ENOENT:
817 if inst.errno == errno.ENOENT:
818 ui.warn(_('%s: deleted in working directory\n') % relsrc)
818 ui.warn(_('%s: deleted in working directory\n') % relsrc)
819 srcexists = False
819 srcexists = False
820 else:
820 else:
821 ui.warn(_('%s: cannot copy - %s\n') %
821 ui.warn(_('%s: cannot copy - %s\n') %
822 (relsrc, inst.strerror))
822 (relsrc, inst.strerror))
823 return True # report a failure
823 return True # report a failure
824
824
825 if ui.verbose or not exact:
825 if ui.verbose or not exact:
826 if rename:
826 if rename:
827 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
827 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
828 else:
828 else:
829 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
829 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
830
830
831 targets[abstarget] = abssrc
831 targets[abstarget] = abssrc
832
832
833 # fix up dirstate
833 # fix up dirstate
834 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
834 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
835 dryrun=dryrun, cwd=cwd)
835 dryrun=dryrun, cwd=cwd)
836 if rename and not dryrun:
836 if rename and not dryrun:
837 if not after and srcexists and not samefile:
837 if not after and srcexists and not samefile:
838 repo.wvfs.unlinkpath(abssrc)
838 repo.wvfs.unlinkpath(abssrc)
839 wctx.forget([abssrc])
839 wctx.forget([abssrc])
840
840
841 # pat: ossep
841 # pat: ossep
842 # dest ossep
842 # dest ossep
843 # srcs: list of (hgsep, hgsep, ossep, bool)
843 # srcs: list of (hgsep, hgsep, ossep, bool)
844 # return: function that takes hgsep and returns ossep
844 # return: function that takes hgsep and returns ossep
845 def targetpathfn(pat, dest, srcs):
845 def targetpathfn(pat, dest, srcs):
846 if os.path.isdir(pat):
846 if os.path.isdir(pat):
847 abspfx = pathutil.canonpath(repo.root, cwd, pat)
847 abspfx = pathutil.canonpath(repo.root, cwd, pat)
848 abspfx = util.localpath(abspfx)
848 abspfx = util.localpath(abspfx)
849 if destdirexists:
849 if destdirexists:
850 striplen = len(os.path.split(abspfx)[0])
850 striplen = len(os.path.split(abspfx)[0])
851 else:
851 else:
852 striplen = len(abspfx)
852 striplen = len(abspfx)
853 if striplen:
853 if striplen:
854 striplen += len(pycompat.ossep)
854 striplen += len(pycompat.ossep)
855 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
855 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
856 elif destdirexists:
856 elif destdirexists:
857 res = lambda p: os.path.join(dest,
857 res = lambda p: os.path.join(dest,
858 os.path.basename(util.localpath(p)))
858 os.path.basename(util.localpath(p)))
859 else:
859 else:
860 res = lambda p: dest
860 res = lambda p: dest
861 return res
861 return res
862
862
863 # pat: ossep
863 # pat: ossep
864 # dest ossep
864 # dest ossep
865 # srcs: list of (hgsep, hgsep, ossep, bool)
865 # srcs: list of (hgsep, hgsep, ossep, bool)
866 # return: function that takes hgsep and returns ossep
866 # return: function that takes hgsep and returns ossep
867 def targetpathafterfn(pat, dest, srcs):
867 def targetpathafterfn(pat, dest, srcs):
868 if matchmod.patkind(pat):
868 if matchmod.patkind(pat):
869 # a mercurial pattern
869 # a mercurial pattern
870 res = lambda p: os.path.join(dest,
870 res = lambda p: os.path.join(dest,
871 os.path.basename(util.localpath(p)))
871 os.path.basename(util.localpath(p)))
872 else:
872 else:
873 abspfx = pathutil.canonpath(repo.root, cwd, pat)
873 abspfx = pathutil.canonpath(repo.root, cwd, pat)
874 if len(abspfx) < len(srcs[0][0]):
874 if len(abspfx) < len(srcs[0][0]):
875 # A directory. Either the target path contains the last
875 # A directory. Either the target path contains the last
876 # component of the source path or it does not.
876 # component of the source path or it does not.
877 def evalpath(striplen):
877 def evalpath(striplen):
878 score = 0
878 score = 0
879 for s in srcs:
879 for s in srcs:
880 t = os.path.join(dest, util.localpath(s[0])[striplen:])
880 t = os.path.join(dest, util.localpath(s[0])[striplen:])
881 if os.path.lexists(t):
881 if os.path.lexists(t):
882 score += 1
882 score += 1
883 return score
883 return score
884
884
885 abspfx = util.localpath(abspfx)
885 abspfx = util.localpath(abspfx)
886 striplen = len(abspfx)
886 striplen = len(abspfx)
887 if striplen:
887 if striplen:
888 striplen += len(pycompat.ossep)
888 striplen += len(pycompat.ossep)
889 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
889 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
890 score = evalpath(striplen)
890 score = evalpath(striplen)
891 striplen1 = len(os.path.split(abspfx)[0])
891 striplen1 = len(os.path.split(abspfx)[0])
892 if striplen1:
892 if striplen1:
893 striplen1 += len(pycompat.ossep)
893 striplen1 += len(pycompat.ossep)
894 if evalpath(striplen1) > score:
894 if evalpath(striplen1) > score:
895 striplen = striplen1
895 striplen = striplen1
896 res = lambda p: os.path.join(dest,
896 res = lambda p: os.path.join(dest,
897 util.localpath(p)[striplen:])
897 util.localpath(p)[striplen:])
898 else:
898 else:
899 # a file
899 # a file
900 if destdirexists:
900 if destdirexists:
901 res = lambda p: os.path.join(dest,
901 res = lambda p: os.path.join(dest,
902 os.path.basename(util.localpath(p)))
902 os.path.basename(util.localpath(p)))
903 else:
903 else:
904 res = lambda p: dest
904 res = lambda p: dest
905 return res
905 return res
906
906
907 pats = scmutil.expandpats(pats)
907 pats = scmutil.expandpats(pats)
908 if not pats:
908 if not pats:
909 raise error.Abort(_('no source or destination specified'))
909 raise error.Abort(_('no source or destination specified'))
910 if len(pats) == 1:
910 if len(pats) == 1:
911 raise error.Abort(_('no destination specified'))
911 raise error.Abort(_('no destination specified'))
912 dest = pats.pop()
912 dest = pats.pop()
913 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
913 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
914 if not destdirexists:
914 if not destdirexists:
915 if len(pats) > 1 or matchmod.patkind(pats[0]):
915 if len(pats) > 1 or matchmod.patkind(pats[0]):
916 raise error.Abort(_('with multiple sources, destination must be an '
916 raise error.Abort(_('with multiple sources, destination must be an '
917 'existing directory'))
917 'existing directory'))
918 if util.endswithsep(dest):
918 if util.endswithsep(dest):
919 raise error.Abort(_('destination %s is not a directory') % dest)
919 raise error.Abort(_('destination %s is not a directory') % dest)
920
920
921 tfn = targetpathfn
921 tfn = targetpathfn
922 if after:
922 if after:
923 tfn = targetpathafterfn
923 tfn = targetpathafterfn
924 copylist = []
924 copylist = []
925 for pat in pats:
925 for pat in pats:
926 srcs = walkpat(pat)
926 srcs = walkpat(pat)
927 if not srcs:
927 if not srcs:
928 continue
928 continue
929 copylist.append((tfn(pat, dest, srcs), srcs))
929 copylist.append((tfn(pat, dest, srcs), srcs))
930 if not copylist:
930 if not copylist:
931 raise error.Abort(_('no files to copy'))
931 raise error.Abort(_('no files to copy'))
932
932
933 errors = 0
933 errors = 0
934 for targetpath, srcs in copylist:
934 for targetpath, srcs in copylist:
935 for abssrc, relsrc, exact in srcs:
935 for abssrc, relsrc, exact in srcs:
936 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
936 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
937 errors += 1
937 errors += 1
938
938
939 if errors:
939 if errors:
940 ui.warn(_('(consider using --after)\n'))
940 ui.warn(_('(consider using --after)\n'))
941
941
942 return errors != 0
942 return errors != 0
943
943
944 ## facility to let extension process additional data into an import patch
944 ## facility to let extension process additional data into an import patch
945 # list of identifier to be executed in order
945 # list of identifier to be executed in order
946 extrapreimport = [] # run before commit
946 extrapreimport = [] # run before commit
947 extrapostimport = [] # run after commit
947 extrapostimport = [] # run after commit
948 # mapping from identifier to actual import function
948 # mapping from identifier to actual import function
949 #
949 #
950 # 'preimport' are run before the commit is made and are provided the following
950 # 'preimport' are run before the commit is made and are provided the following
951 # arguments:
951 # arguments:
952 # - repo: the localrepository instance,
952 # - repo: the localrepository instance,
953 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
953 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
954 # - extra: the future extra dictionary of the changeset, please mutate it,
954 # - extra: the future extra dictionary of the changeset, please mutate it,
955 # - opts: the import options.
955 # - opts: the import options.
956 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
956 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
957 # mutation of in memory commit and more. Feel free to rework the code to get
957 # mutation of in memory commit and more. Feel free to rework the code to get
958 # there.
958 # there.
959 extrapreimportmap = {}
959 extrapreimportmap = {}
960 # 'postimport' are run after the commit is made and are provided the following
960 # 'postimport' are run after the commit is made and are provided the following
961 # argument:
961 # argument:
962 # - ctx: the changectx created by import.
962 # - ctx: the changectx created by import.
963 extrapostimportmap = {}
963 extrapostimportmap = {}
964
964
965 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
965 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
966 """Utility function used by commands.import to import a single patch
966 """Utility function used by commands.import to import a single patch
967
967
968 This function is explicitly defined here to help the evolve extension to
968 This function is explicitly defined here to help the evolve extension to
969 wrap this part of the import logic.
969 wrap this part of the import logic.
970
970
971 The API is currently a bit ugly because it a simple code translation from
971 The API is currently a bit ugly because it a simple code translation from
972 the import command. Feel free to make it better.
972 the import command. Feel free to make it better.
973
973
974 :hunk: a patch (as a binary string)
974 :hunk: a patch (as a binary string)
975 :parents: nodes that will be parent of the created commit
975 :parents: nodes that will be parent of the created commit
976 :opts: the full dict of option passed to the import command
976 :opts: the full dict of option passed to the import command
977 :msgs: list to save commit message to.
977 :msgs: list to save commit message to.
978 (used in case we need to save it when failing)
978 (used in case we need to save it when failing)
979 :updatefunc: a function that update a repo to a given node
979 :updatefunc: a function that update a repo to a given node
980 updatefunc(<repo>, <node>)
980 updatefunc(<repo>, <node>)
981 """
981 """
982 # avoid cycle context -> subrepo -> cmdutil
982 # avoid cycle context -> subrepo -> cmdutil
983 from . import context
983 from . import context
984 extractdata = patch.extract(ui, hunk)
984 extractdata = patch.extract(ui, hunk)
985 tmpname = extractdata.get('filename')
985 tmpname = extractdata.get('filename')
986 message = extractdata.get('message')
986 message = extractdata.get('message')
987 user = opts.get('user') or extractdata.get('user')
987 user = opts.get('user') or extractdata.get('user')
988 date = opts.get('date') or extractdata.get('date')
988 date = opts.get('date') or extractdata.get('date')
989 branch = extractdata.get('branch')
989 branch = extractdata.get('branch')
990 nodeid = extractdata.get('nodeid')
990 nodeid = extractdata.get('nodeid')
991 p1 = extractdata.get('p1')
991 p1 = extractdata.get('p1')
992 p2 = extractdata.get('p2')
992 p2 = extractdata.get('p2')
993
993
994 nocommit = opts.get('no_commit')
994 nocommit = opts.get('no_commit')
995 importbranch = opts.get('import_branch')
995 importbranch = opts.get('import_branch')
996 update = not opts.get('bypass')
996 update = not opts.get('bypass')
997 strip = opts["strip"]
997 strip = opts["strip"]
998 prefix = opts["prefix"]
998 prefix = opts["prefix"]
999 sim = float(opts.get('similarity') or 0)
999 sim = float(opts.get('similarity') or 0)
1000 if not tmpname:
1000 if not tmpname:
1001 return (None, None, False)
1001 return (None, None, False)
1002
1002
1003 rejects = False
1003 rejects = False
1004
1004
1005 try:
1005 try:
1006 cmdline_message = logmessage(ui, opts)
1006 cmdline_message = logmessage(ui, opts)
1007 if cmdline_message:
1007 if cmdline_message:
1008 # pickup the cmdline msg
1008 # pickup the cmdline msg
1009 message = cmdline_message
1009 message = cmdline_message
1010 elif message:
1010 elif message:
1011 # pickup the patch msg
1011 # pickup the patch msg
1012 message = message.strip()
1012 message = message.strip()
1013 else:
1013 else:
1014 # launch the editor
1014 # launch the editor
1015 message = None
1015 message = None
1016 ui.debug('message:\n%s\n' % message)
1016 ui.debug('message:\n%s\n' % message)
1017
1017
1018 if len(parents) == 1:
1018 if len(parents) == 1:
1019 parents.append(repo[nullid])
1019 parents.append(repo[nullid])
1020 if opts.get('exact'):
1020 if opts.get('exact'):
1021 if not nodeid or not p1:
1021 if not nodeid or not p1:
1022 raise error.Abort(_('not a Mercurial patch'))
1022 raise error.Abort(_('not a Mercurial patch'))
1023 p1 = repo[p1]
1023 p1 = repo[p1]
1024 p2 = repo[p2 or nullid]
1024 p2 = repo[p2 or nullid]
1025 elif p2:
1025 elif p2:
1026 try:
1026 try:
1027 p1 = repo[p1]
1027 p1 = repo[p1]
1028 p2 = repo[p2]
1028 p2 = repo[p2]
1029 # Without any options, consider p2 only if the
1029 # Without any options, consider p2 only if the
1030 # patch is being applied on top of the recorded
1030 # patch is being applied on top of the recorded
1031 # first parent.
1031 # first parent.
1032 if p1 != parents[0]:
1032 if p1 != parents[0]:
1033 p1 = parents[0]
1033 p1 = parents[0]
1034 p2 = repo[nullid]
1034 p2 = repo[nullid]
1035 except error.RepoError:
1035 except error.RepoError:
1036 p1, p2 = parents
1036 p1, p2 = parents
1037 if p2.node() == nullid:
1037 if p2.node() == nullid:
1038 ui.warn(_("warning: import the patch as a normal revision\n"
1038 ui.warn(_("warning: import the patch as a normal revision\n"
1039 "(use --exact to import the patch as a merge)\n"))
1039 "(use --exact to import the patch as a merge)\n"))
1040 else:
1040 else:
1041 p1, p2 = parents
1041 p1, p2 = parents
1042
1042
1043 n = None
1043 n = None
1044 if update:
1044 if update:
1045 if p1 != parents[0]:
1045 if p1 != parents[0]:
1046 updatefunc(repo, p1.node())
1046 updatefunc(repo, p1.node())
1047 if p2 != parents[1]:
1047 if p2 != parents[1]:
1048 repo.setparents(p1.node(), p2.node())
1048 repo.setparents(p1.node(), p2.node())
1049
1049
1050 if opts.get('exact') or importbranch:
1050 if opts.get('exact') or importbranch:
1051 repo.dirstate.setbranch(branch or 'default')
1051 repo.dirstate.setbranch(branch or 'default')
1052
1052
1053 partial = opts.get('partial', False)
1053 partial = opts.get('partial', False)
1054 files = set()
1054 files = set()
1055 try:
1055 try:
1056 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1056 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1057 files=files, eolmode=None, similarity=sim / 100.0)
1057 files=files, eolmode=None, similarity=sim / 100.0)
1058 except patch.PatchError as e:
1058 except patch.PatchError as e:
1059 if not partial:
1059 if not partial:
1060 raise error.Abort(str(e))
1060 raise error.Abort(str(e))
1061 if partial:
1061 if partial:
1062 rejects = True
1062 rejects = True
1063
1063
1064 files = list(files)
1064 files = list(files)
1065 if nocommit:
1065 if nocommit:
1066 if message:
1066 if message:
1067 msgs.append(message)
1067 msgs.append(message)
1068 else:
1068 else:
1069 if opts.get('exact') or p2:
1069 if opts.get('exact') or p2:
1070 # If you got here, you either use --force and know what
1070 # If you got here, you either use --force and know what
1071 # you are doing or used --exact or a merge patch while
1071 # you are doing or used --exact or a merge patch while
1072 # being updated to its first parent.
1072 # being updated to its first parent.
1073 m = None
1073 m = None
1074 else:
1074 else:
1075 m = scmutil.matchfiles(repo, files or [])
1075 m = scmutil.matchfiles(repo, files or [])
1076 editform = mergeeditform(repo[None], 'import.normal')
1076 editform = mergeeditform(repo[None], 'import.normal')
1077 if opts.get('exact'):
1077 if opts.get('exact'):
1078 editor = None
1078 editor = None
1079 else:
1079 else:
1080 editor = getcommiteditor(editform=editform, **opts)
1080 editor = getcommiteditor(editform=editform, **opts)
1081 extra = {}
1081 extra = {}
1082 for idfunc in extrapreimport:
1082 for idfunc in extrapreimport:
1083 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1083 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1084 overrides = {}
1084 overrides = {}
1085 if partial:
1085 if partial:
1086 overrides[('ui', 'allowemptycommit')] = True
1086 overrides[('ui', 'allowemptycommit')] = True
1087 with repo.ui.configoverride(overrides, 'import'):
1087 with repo.ui.configoverride(overrides, 'import'):
1088 n = repo.commit(message, user,
1088 n = repo.commit(message, user,
1089 date, match=m,
1089 date, match=m,
1090 editor=editor, extra=extra)
1090 editor=editor, extra=extra)
1091 for idfunc in extrapostimport:
1091 for idfunc in extrapostimport:
1092 extrapostimportmap[idfunc](repo[n])
1092 extrapostimportmap[idfunc](repo[n])
1093 else:
1093 else:
1094 if opts.get('exact') or importbranch:
1094 if opts.get('exact') or importbranch:
1095 branch = branch or 'default'
1095 branch = branch or 'default'
1096 else:
1096 else:
1097 branch = p1.branch()
1097 branch = p1.branch()
1098 store = patch.filestore()
1098 store = patch.filestore()
1099 try:
1099 try:
1100 files = set()
1100 files = set()
1101 try:
1101 try:
1102 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1102 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1103 files, eolmode=None)
1103 files, eolmode=None)
1104 except patch.PatchError as e:
1104 except patch.PatchError as e:
1105 raise error.Abort(str(e))
1105 raise error.Abort(str(e))
1106 if opts.get('exact'):
1106 if opts.get('exact'):
1107 editor = None
1107 editor = None
1108 else:
1108 else:
1109 editor = getcommiteditor(editform='import.bypass')
1109 editor = getcommiteditor(editform='import.bypass')
1110 memctx = context.memctx(repo, (p1.node(), p2.node()),
1110 memctx = context.memctx(repo, (p1.node(), p2.node()),
1111 message,
1111 message,
1112 files=files,
1112 files=files,
1113 filectxfn=store,
1113 filectxfn=store,
1114 user=user,
1114 user=user,
1115 date=date,
1115 date=date,
1116 branch=branch,
1116 branch=branch,
1117 editor=editor)
1117 editor=editor)
1118 n = memctx.commit()
1118 n = memctx.commit()
1119 finally:
1119 finally:
1120 store.close()
1120 store.close()
1121 if opts.get('exact') and nocommit:
1121 if opts.get('exact') and nocommit:
1122 # --exact with --no-commit is still useful in that it does merge
1122 # --exact with --no-commit is still useful in that it does merge
1123 # and branch bits
1123 # and branch bits
1124 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1124 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1125 elif opts.get('exact') and hex(n) != nodeid:
1125 elif opts.get('exact') and hex(n) != nodeid:
1126 raise error.Abort(_('patch is damaged or loses information'))
1126 raise error.Abort(_('patch is damaged or loses information'))
1127 msg = _('applied to working directory')
1127 msg = _('applied to working directory')
1128 if n:
1128 if n:
1129 # i18n: refers to a short changeset id
1129 # i18n: refers to a short changeset id
1130 msg = _('created %s') % short(n)
1130 msg = _('created %s') % short(n)
1131 return (msg, n, rejects)
1131 return (msg, n, rejects)
1132 finally:
1132 finally:
1133 os.unlink(tmpname)
1133 os.unlink(tmpname)
1134
1134
1135 # facility to let extensions include additional data in an exported patch
1135 # facility to let extensions include additional data in an exported patch
1136 # list of identifiers to be executed in order
1136 # list of identifiers to be executed in order
1137 extraexport = []
1137 extraexport = []
1138 # mapping from identifier to actual export function
1138 # mapping from identifier to actual export function
1139 # function as to return a string to be added to the header or None
1139 # function as to return a string to be added to the header or None
1140 # it is given two arguments (sequencenumber, changectx)
1140 # it is given two arguments (sequencenumber, changectx)
1141 extraexportmap = {}
1141 extraexportmap = {}
1142
1142
1143 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1143 def _exportsingle(repo, ctx, match, switch_parent, rev, seqno, write, diffopts):
1144 node = scmutil.binnode(ctx)
1144 node = scmutil.binnode(ctx)
1145 parents = [p.node() for p in ctx.parents() if p]
1145 parents = [p.node() for p in ctx.parents() if p]
1146 branch = ctx.branch()
1146 branch = ctx.branch()
1147 if switch_parent:
1147 if switch_parent:
1148 parents.reverse()
1148 parents.reverse()
1149
1149
1150 if parents:
1150 if parents:
1151 prev = parents[0]
1151 prev = parents[0]
1152 else:
1152 else:
1153 prev = nullid
1153 prev = nullid
1154
1154
1155 write("# HG changeset patch\n")
1155 write("# HG changeset patch\n")
1156 write("# User %s\n" % ctx.user())
1156 write("# User %s\n" % ctx.user())
1157 write("# Date %d %d\n" % ctx.date())
1157 write("# Date %d %d\n" % ctx.date())
1158 write("# %s\n" % util.datestr(ctx.date()))
1158 write("# %s\n" % util.datestr(ctx.date()))
1159 if branch and branch != 'default':
1159 if branch and branch != 'default':
1160 write("# Branch %s\n" % branch)
1160 write("# Branch %s\n" % branch)
1161 write("# Node ID %s\n" % hex(node))
1161 write("# Node ID %s\n" % hex(node))
1162 write("# Parent %s\n" % hex(prev))
1162 write("# Parent %s\n" % hex(prev))
1163 if len(parents) > 1:
1163 if len(parents) > 1:
1164 write("# Parent %s\n" % hex(parents[1]))
1164 write("# Parent %s\n" % hex(parents[1]))
1165
1165
1166 for headerid in extraexport:
1166 for headerid in extraexport:
1167 header = extraexportmap[headerid](seqno, ctx)
1167 header = extraexportmap[headerid](seqno, ctx)
1168 if header is not None:
1168 if header is not None:
1169 write('# %s\n' % header)
1169 write('# %s\n' % header)
1170 write(ctx.description().rstrip())
1170 write(ctx.description().rstrip())
1171 write("\n\n")
1171 write("\n\n")
1172
1172
1173 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1173 for chunk, label in patch.diffui(repo, prev, node, match, opts=diffopts):
1174 write(chunk, label=label)
1174 write(chunk, label=label)
1175
1175
1176 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1176 def export(repo, revs, fntemplate='hg-%h.patch', fp=None, switch_parent=False,
1177 opts=None, match=None):
1177 opts=None, match=None):
1178 '''export changesets as hg patches
1178 '''export changesets as hg patches
1179
1179
1180 Args:
1180 Args:
1181 repo: The repository from which we're exporting revisions.
1181 repo: The repository from which we're exporting revisions.
1182 revs: A list of revisions to export as revision numbers.
1182 revs: A list of revisions to export as revision numbers.
1183 fntemplate: An optional string to use for generating patch file names.
1183 fntemplate: An optional string to use for generating patch file names.
1184 fp: An optional file-like object to which patches should be written.
1184 fp: An optional file-like object to which patches should be written.
1185 switch_parent: If True, show diffs against second parent when not nullid.
1185 switch_parent: If True, show diffs against second parent when not nullid.
1186 Default is false, which always shows diff against p1.
1186 Default is false, which always shows diff against p1.
1187 opts: diff options to use for generating the patch.
1187 opts: diff options to use for generating the patch.
1188 match: If specified, only export changes to files matching this matcher.
1188 match: If specified, only export changes to files matching this matcher.
1189
1189
1190 Returns:
1190 Returns:
1191 Nothing.
1191 Nothing.
1192
1192
1193 Side Effect:
1193 Side Effect:
1194 "HG Changeset Patch" data is emitted to one of the following
1194 "HG Changeset Patch" data is emitted to one of the following
1195 destinations:
1195 destinations:
1196 fp is specified: All revs are written to the specified
1196 fp is specified: All revs are written to the specified
1197 file-like object.
1197 file-like object.
1198 fntemplate specified: Each rev is written to a unique file named using
1198 fntemplate specified: Each rev is written to a unique file named using
1199 the given template.
1199 the given template.
1200 Neither fp nor template specified: All revs written to repo.ui.write()
1200 Neither fp nor template specified: All revs written to repo.ui.write()
1201 '''
1201 '''
1202
1202
1203 total = len(revs)
1203 total = len(revs)
1204 revwidth = max(len(str(rev)) for rev in revs)
1204 revwidth = max(len(str(rev)) for rev in revs)
1205 filemode = {}
1205 filemode = {}
1206
1206
1207 write = None
1207 write = None
1208 dest = '<unnamed>'
1208 dest = '<unnamed>'
1209 if fp:
1209 if fp:
1210 dest = getattr(fp, 'name', dest)
1210 dest = getattr(fp, 'name', dest)
1211 def write(s, **kw):
1211 def write(s, **kw):
1212 fp.write(s)
1212 fp.write(s)
1213 elif not fntemplate:
1213 elif not fntemplate:
1214 write = repo.ui.write
1214 write = repo.ui.write
1215
1215
1216 for seqno, rev in enumerate(revs, 1):
1216 for seqno, rev in enumerate(revs, 1):
1217 ctx = repo[rev]
1217 ctx = repo[rev]
1218 fo = None
1218 fo = None
1219 if not fp and fntemplate:
1219 if not fp and fntemplate:
1220 desc_lines = ctx.description().rstrip().split('\n')
1220 desc_lines = ctx.description().rstrip().split('\n')
1221 desc = desc_lines[0] #Commit always has a first line.
1221 desc = desc_lines[0] #Commit always has a first line.
1222 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1222 fo = makefileobj(repo, fntemplate, ctx.node(), desc=desc,
1223 total=total, seqno=seqno, revwidth=revwidth,
1223 total=total, seqno=seqno, revwidth=revwidth,
1224 mode='wb', modemap=filemode)
1224 mode='wb', modemap=filemode)
1225 dest = fo.name
1225 dest = fo.name
1226 def write(s, **kw):
1226 def write(s, **kw):
1227 fo.write(s)
1227 fo.write(s)
1228 if not dest.startswith('<'):
1228 if not dest.startswith('<'):
1229 repo.ui.note("%s\n" % dest)
1229 repo.ui.note("%s\n" % dest)
1230 _exportsingle(
1230 _exportsingle(
1231 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1231 repo, ctx, match, switch_parent, rev, seqno, write, opts)
1232 if fo is not None:
1232 if fo is not None:
1233 fo.close()
1233 fo.close()
1234
1234
1235 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1235 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1236 changes=None, stat=False, fp=None, prefix='',
1236 changes=None, stat=False, fp=None, prefix='',
1237 root='', listsubrepos=False):
1237 root='', listsubrepos=False):
1238 '''show diff or diffstat.'''
1238 '''show diff or diffstat.'''
1239 if fp is None:
1239 if fp is None:
1240 write = ui.write
1240 write = ui.write
1241 else:
1241 else:
1242 def write(s, **kw):
1242 def write(s, **kw):
1243 fp.write(s)
1243 fp.write(s)
1244
1244
1245 if root:
1245 if root:
1246 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1246 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1247 else:
1247 else:
1248 relroot = ''
1248 relroot = ''
1249 if relroot != '':
1249 if relroot != '':
1250 # XXX relative roots currently don't work if the root is within a
1250 # XXX relative roots currently don't work if the root is within a
1251 # subrepo
1251 # subrepo
1252 uirelroot = match.uipath(relroot)
1252 uirelroot = match.uipath(relroot)
1253 relroot += '/'
1253 relroot += '/'
1254 for matchroot in match.files():
1254 for matchroot in match.files():
1255 if not matchroot.startswith(relroot):
1255 if not matchroot.startswith(relroot):
1256 ui.warn(_('warning: %s not inside relative root %s\n') % (
1256 ui.warn(_('warning: %s not inside relative root %s\n') % (
1257 match.uipath(matchroot), uirelroot))
1257 match.uipath(matchroot), uirelroot))
1258
1258
1259 if stat:
1259 if stat:
1260 diffopts = diffopts.copy(context=0)
1260 diffopts = diffopts.copy(context=0)
1261 width = 80
1261 width = 80
1262 if not ui.plain():
1262 if not ui.plain():
1263 width = ui.termwidth()
1263 width = ui.termwidth()
1264 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1264 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1265 prefix=prefix, relroot=relroot)
1265 prefix=prefix, relroot=relroot)
1266 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1266 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1267 width=width):
1267 width=width):
1268 write(chunk, label=label)
1268 write(chunk, label=label)
1269 else:
1269 else:
1270 for chunk, label in patch.diffui(repo, node1, node2, match,
1270 for chunk, label in patch.diffui(repo, node1, node2, match,
1271 changes, diffopts, prefix=prefix,
1271 changes, diffopts, prefix=prefix,
1272 relroot=relroot):
1272 relroot=relroot):
1273 write(chunk, label=label)
1273 write(chunk, label=label)
1274
1274
1275 if listsubrepos:
1275 if listsubrepos:
1276 ctx1 = repo[node1]
1276 ctx1 = repo[node1]
1277 ctx2 = repo[node2]
1277 ctx2 = repo[node2]
1278 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1278 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1279 tempnode2 = node2
1279 tempnode2 = node2
1280 try:
1280 try:
1281 if node2 is not None:
1281 if node2 is not None:
1282 tempnode2 = ctx2.substate[subpath][1]
1282 tempnode2 = ctx2.substate[subpath][1]
1283 except KeyError:
1283 except KeyError:
1284 # A subrepo that existed in node1 was deleted between node1 and
1284 # A subrepo that existed in node1 was deleted between node1 and
1285 # node2 (inclusive). Thus, ctx2's substate won't contain that
1285 # node2 (inclusive). Thus, ctx2's substate won't contain that
1286 # subpath. The best we can do is to ignore it.
1286 # subpath. The best we can do is to ignore it.
1287 tempnode2 = None
1287 tempnode2 = None
1288 submatch = matchmod.subdirmatcher(subpath, match)
1288 submatch = matchmod.subdirmatcher(subpath, match)
1289 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1289 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1290 stat=stat, fp=fp, prefix=prefix)
1290 stat=stat, fp=fp, prefix=prefix)
1291
1291
1292 def _changesetlabels(ctx):
1292 def _changesetlabels(ctx):
1293 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1293 labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
1294 if ctx.obsolete():
1294 if ctx.obsolete():
1295 labels.append('changeset.obsolete')
1295 labels.append('changeset.obsolete')
1296 if ctx.troubled():
1296 if ctx.troubled():
1297 labels.append('changeset.troubled')
1297 labels.append('changeset.troubled')
1298 for trouble in ctx.troubles():
1298 for trouble in ctx.troubles():
1299 labels.append('trouble.%s' % trouble)
1299 labels.append('trouble.%s' % trouble)
1300 return ' '.join(labels)
1300 return ' '.join(labels)
1301
1301
1302 class changeset_printer(object):
1302 class changeset_printer(object):
1303 '''show changeset information when templating not requested.'''
1303 '''show changeset information when templating not requested.'''
1304
1304
1305 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1305 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1306 self.ui = ui
1306 self.ui = ui
1307 self.repo = repo
1307 self.repo = repo
1308 self.buffered = buffered
1308 self.buffered = buffered
1309 self.matchfn = matchfn
1309 self.matchfn = matchfn
1310 self.diffopts = diffopts
1310 self.diffopts = diffopts
1311 self.header = {}
1311 self.header = {}
1312 self.hunk = {}
1312 self.hunk = {}
1313 self.lastheader = None
1313 self.lastheader = None
1314 self.footer = None
1314 self.footer = None
1315
1315
1316 def flush(self, ctx):
1316 def flush(self, ctx):
1317 rev = ctx.rev()
1317 rev = ctx.rev()
1318 if rev in self.header:
1318 if rev in self.header:
1319 h = self.header[rev]
1319 h = self.header[rev]
1320 if h != self.lastheader:
1320 if h != self.lastheader:
1321 self.lastheader = h
1321 self.lastheader = h
1322 self.ui.write(h)
1322 self.ui.write(h)
1323 del self.header[rev]
1323 del self.header[rev]
1324 if rev in self.hunk:
1324 if rev in self.hunk:
1325 self.ui.write(self.hunk[rev])
1325 self.ui.write(self.hunk[rev])
1326 del self.hunk[rev]
1326 del self.hunk[rev]
1327 return 1
1327 return 1
1328 return 0
1328 return 0
1329
1329
1330 def close(self):
1330 def close(self):
1331 if self.footer:
1331 if self.footer:
1332 self.ui.write(self.footer)
1332 self.ui.write(self.footer)
1333
1333
1334 def show(self, ctx, copies=None, matchfn=None, **props):
1334 def show(self, ctx, copies=None, matchfn=None, **props):
1335 props = pycompat.byteskwargs(props)
1335 if self.buffered:
1336 if self.buffered:
1336 self.ui.pushbuffer(labeled=True)
1337 self.ui.pushbuffer(labeled=True)
1337 self._show(ctx, copies, matchfn, props)
1338 self._show(ctx, copies, matchfn, props)
1338 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 self.hunk[ctx.rev()] = self.ui.popbuffer()
1339 else:
1340 else:
1340 self._show(ctx, copies, matchfn, props)
1341 self._show(ctx, copies, matchfn, props)
1341
1342
1342 def _show(self, ctx, copies, matchfn, props):
1343 def _show(self, ctx, copies, matchfn, props):
1343 '''show a single changeset or file revision'''
1344 '''show a single changeset or file revision'''
1344 changenode = ctx.node()
1345 changenode = ctx.node()
1345 rev = ctx.rev()
1346 rev = ctx.rev()
1346 if self.ui.debugflag:
1347 if self.ui.debugflag:
1347 hexfunc = hex
1348 hexfunc = hex
1348 else:
1349 else:
1349 hexfunc = short
1350 hexfunc = short
1350 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 # as of now, wctx.node() and wctx.rev() return None, but we want to
1351 # show the same values as {node} and {rev} templatekw
1352 # show the same values as {node} and {rev} templatekw
1352 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1353 revnode = (scmutil.intrev(ctx), hexfunc(scmutil.binnode(ctx)))
1353
1354
1354 if self.ui.quiet:
1355 if self.ui.quiet:
1355 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 self.ui.write("%d:%s\n" % revnode, label='log.node')
1356 return
1357 return
1357
1358
1358 date = util.datestr(ctx.date())
1359 date = util.datestr(ctx.date())
1359
1360
1360 # i18n: column positioning for "hg log"
1361 # i18n: column positioning for "hg log"
1361 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 self.ui.write(_("changeset: %d:%s\n") % revnode,
1362 label=_changesetlabels(ctx))
1363 label=_changesetlabels(ctx))
1363
1364
1364 # branches are shown first before any other names due to backwards
1365 # branches are shown first before any other names due to backwards
1365 # compatibility
1366 # compatibility
1366 branch = ctx.branch()
1367 branch = ctx.branch()
1367 # don't show the default branch name
1368 # don't show the default branch name
1368 if branch != 'default':
1369 if branch != 'default':
1369 # i18n: column positioning for "hg log"
1370 # i18n: column positioning for "hg log"
1370 self.ui.write(_("branch: %s\n") % branch,
1371 self.ui.write(_("branch: %s\n") % branch,
1371 label='log.branch')
1372 label='log.branch')
1372
1373
1373 for nsname, ns in self.repo.names.iteritems():
1374 for nsname, ns in self.repo.names.iteritems():
1374 # branches has special logic already handled above, so here we just
1375 # branches has special logic already handled above, so here we just
1375 # skip it
1376 # skip it
1376 if nsname == 'branches':
1377 if nsname == 'branches':
1377 continue
1378 continue
1378 # we will use the templatename as the color name since those two
1379 # we will use the templatename as the color name since those two
1379 # should be the same
1380 # should be the same
1380 for name in ns.names(self.repo, changenode):
1381 for name in ns.names(self.repo, changenode):
1381 self.ui.write(ns.logfmt % name,
1382 self.ui.write(ns.logfmt % name,
1382 label='log.%s' % ns.colorname)
1383 label='log.%s' % ns.colorname)
1383 if self.ui.debugflag:
1384 if self.ui.debugflag:
1384 # i18n: column positioning for "hg log"
1385 # i18n: column positioning for "hg log"
1385 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1386 label='log.phase')
1387 label='log.phase')
1387 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1388 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 label = 'log.parent changeset.%s' % pctx.phasestr()
1389 # i18n: column positioning for "hg log"
1390 # i18n: column positioning for "hg log"
1390 self.ui.write(_("parent: %d:%s\n")
1391 self.ui.write(_("parent: %d:%s\n")
1391 % (pctx.rev(), hexfunc(pctx.node())),
1392 % (pctx.rev(), hexfunc(pctx.node())),
1392 label=label)
1393 label=label)
1393
1394
1394 if self.ui.debugflag and rev is not None:
1395 if self.ui.debugflag and rev is not None:
1395 mnode = ctx.manifestnode()
1396 mnode = ctx.manifestnode()
1396 # i18n: column positioning for "hg log"
1397 # i18n: column positioning for "hg log"
1397 self.ui.write(_("manifest: %d:%s\n") %
1398 self.ui.write(_("manifest: %d:%s\n") %
1398 (self.repo.manifestlog._revlog.rev(mnode),
1399 (self.repo.manifestlog._revlog.rev(mnode),
1399 hex(mnode)),
1400 hex(mnode)),
1400 label='ui.debug log.manifest')
1401 label='ui.debug log.manifest')
1401 # i18n: column positioning for "hg log"
1402 # i18n: column positioning for "hg log"
1402 self.ui.write(_("user: %s\n") % ctx.user(),
1403 self.ui.write(_("user: %s\n") % ctx.user(),
1403 label='log.user')
1404 label='log.user')
1404 # i18n: column positioning for "hg log"
1405 # i18n: column positioning for "hg log"
1405 self.ui.write(_("date: %s\n") % date,
1406 self.ui.write(_("date: %s\n") % date,
1406 label='log.date')
1407 label='log.date')
1407
1408
1408 if ctx.troubled():
1409 if ctx.troubled():
1409 # i18n: column positioning for "hg log"
1410 # i18n: column positioning for "hg log"
1410 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 self.ui.write(_("trouble: %s\n") % ', '.join(ctx.troubles()),
1411 label='log.trouble')
1412 label='log.trouble')
1412
1413
1413 if self.ui.debugflag:
1414 if self.ui.debugflag:
1414 files = ctx.p1().status(ctx)[:3]
1415 files = ctx.p1().status(ctx)[:3]
1415 for key, value in zip([# i18n: column positioning for "hg log"
1416 for key, value in zip([# i18n: column positioning for "hg log"
1416 _("files:"),
1417 _("files:"),
1417 # i18n: column positioning for "hg log"
1418 # i18n: column positioning for "hg log"
1418 _("files+:"),
1419 _("files+:"),
1419 # i18n: column positioning for "hg log"
1420 # i18n: column positioning for "hg log"
1420 _("files-:")], files):
1421 _("files-:")], files):
1421 if value:
1422 if value:
1422 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1423 label='ui.debug log.files')
1424 label='ui.debug log.files')
1424 elif ctx.files() and self.ui.verbose:
1425 elif ctx.files() and self.ui.verbose:
1425 # i18n: column positioning for "hg log"
1426 # i18n: column positioning for "hg log"
1426 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1427 label='ui.note log.files')
1428 label='ui.note log.files')
1428 if copies and self.ui.verbose:
1429 if copies and self.ui.verbose:
1429 copies = ['%s (%s)' % c for c in copies]
1430 copies = ['%s (%s)' % c for c in copies]
1430 # i18n: column positioning for "hg log"
1431 # i18n: column positioning for "hg log"
1431 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1432 label='ui.note log.copies')
1433 label='ui.note log.copies')
1433
1434
1434 extra = ctx.extra()
1435 extra = ctx.extra()
1435 if extra and self.ui.debugflag:
1436 if extra and self.ui.debugflag:
1436 for key, value in sorted(extra.items()):
1437 for key, value in sorted(extra.items()):
1437 # i18n: column positioning for "hg log"
1438 # i18n: column positioning for "hg log"
1438 self.ui.write(_("extra: %s=%s\n")
1439 self.ui.write(_("extra: %s=%s\n")
1439 % (key, util.escapestr(value)),
1440 % (key, util.escapestr(value)),
1440 label='ui.debug log.extra')
1441 label='ui.debug log.extra')
1441
1442
1442 description = ctx.description().strip()
1443 description = ctx.description().strip()
1443 if description:
1444 if description:
1444 if self.ui.verbose:
1445 if self.ui.verbose:
1445 self.ui.write(_("description:\n"),
1446 self.ui.write(_("description:\n"),
1446 label='ui.note log.description')
1447 label='ui.note log.description')
1447 self.ui.write(description,
1448 self.ui.write(description,
1448 label='ui.note log.description')
1449 label='ui.note log.description')
1449 self.ui.write("\n\n")
1450 self.ui.write("\n\n")
1450 else:
1451 else:
1451 # i18n: column positioning for "hg log"
1452 # i18n: column positioning for "hg log"
1452 self.ui.write(_("summary: %s\n") %
1453 self.ui.write(_("summary: %s\n") %
1453 description.splitlines()[0],
1454 description.splitlines()[0],
1454 label='log.summary')
1455 label='log.summary')
1455 self.ui.write("\n")
1456 self.ui.write("\n")
1456
1457
1457 self.showpatch(ctx, matchfn)
1458 self.showpatch(ctx, matchfn)
1458
1459
1459 def showpatch(self, ctx, matchfn):
1460 def showpatch(self, ctx, matchfn):
1460 if not matchfn:
1461 if not matchfn:
1461 matchfn = self.matchfn
1462 matchfn = self.matchfn
1462 if matchfn:
1463 if matchfn:
1463 stat = self.diffopts.get('stat')
1464 stat = self.diffopts.get('stat')
1464 diff = self.diffopts.get('patch')
1465 diff = self.diffopts.get('patch')
1465 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 diffopts = patch.diffallopts(self.ui, self.diffopts)
1466 node = ctx.node()
1467 node = ctx.node()
1467 prev = ctx.p1().node()
1468 prev = ctx.p1().node()
1468 if stat:
1469 if stat:
1469 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 match=matchfn, stat=True)
1471 match=matchfn, stat=True)
1471 if diff:
1472 if diff:
1472 if stat:
1473 if stat:
1473 self.ui.write("\n")
1474 self.ui.write("\n")
1474 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1475 match=matchfn, stat=False)
1476 match=matchfn, stat=False)
1476 self.ui.write("\n")
1477 self.ui.write("\n")
1477
1478
1478 class jsonchangeset(changeset_printer):
1479 class jsonchangeset(changeset_printer):
1479 '''format changeset information.'''
1480 '''format changeset information.'''
1480
1481
1481 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1482 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1483 self.cache = {}
1484 self.cache = {}
1484 self._first = True
1485 self._first = True
1485
1486
1486 def close(self):
1487 def close(self):
1487 if not self._first:
1488 if not self._first:
1488 self.ui.write("\n]\n")
1489 self.ui.write("\n]\n")
1489 else:
1490 else:
1490 self.ui.write("[]\n")
1491 self.ui.write("[]\n")
1491
1492
1492 def _show(self, ctx, copies, matchfn, props):
1493 def _show(self, ctx, copies, matchfn, props):
1493 '''show a single changeset or file revision'''
1494 '''show a single changeset or file revision'''
1494 rev = ctx.rev()
1495 rev = ctx.rev()
1495 if rev is None:
1496 if rev is None:
1496 jrev = jnode = 'null'
1497 jrev = jnode = 'null'
1497 else:
1498 else:
1498 jrev = '%d' % rev
1499 jrev = '%d' % rev
1499 jnode = '"%s"' % hex(ctx.node())
1500 jnode = '"%s"' % hex(ctx.node())
1500 j = encoding.jsonescape
1501 j = encoding.jsonescape
1501
1502
1502 if self._first:
1503 if self._first:
1503 self.ui.write("[\n {")
1504 self.ui.write("[\n {")
1504 self._first = False
1505 self._first = False
1505 else:
1506 else:
1506 self.ui.write(",\n {")
1507 self.ui.write(",\n {")
1507
1508
1508 if self.ui.quiet:
1509 if self.ui.quiet:
1509 self.ui.write(('\n "rev": %s') % jrev)
1510 self.ui.write(('\n "rev": %s') % jrev)
1510 self.ui.write((',\n "node": %s') % jnode)
1511 self.ui.write((',\n "node": %s') % jnode)
1511 self.ui.write('\n }')
1512 self.ui.write('\n }')
1512 return
1513 return
1513
1514
1514 self.ui.write(('\n "rev": %s') % jrev)
1515 self.ui.write(('\n "rev": %s') % jrev)
1515 self.ui.write((',\n "node": %s') % jnode)
1516 self.ui.write((',\n "node": %s') % jnode)
1516 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1517 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1518 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1519 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1520 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1521
1522
1522 self.ui.write((',\n "bookmarks": [%s]') %
1523 self.ui.write((',\n "bookmarks": [%s]') %
1523 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1524 self.ui.write((',\n "tags": [%s]') %
1525 self.ui.write((',\n "tags": [%s]') %
1525 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1526 self.ui.write((',\n "parents": [%s]') %
1527 self.ui.write((',\n "parents": [%s]') %
1527 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1528
1529
1529 if self.ui.debugflag:
1530 if self.ui.debugflag:
1530 if rev is None:
1531 if rev is None:
1531 jmanifestnode = 'null'
1532 jmanifestnode = 'null'
1532 else:
1533 else:
1533 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1534 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1535
1536
1536 self.ui.write((',\n "extra": {%s}') %
1537 self.ui.write((',\n "extra": {%s}') %
1537 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 ", ".join('"%s": "%s"' % (j(k), j(v))
1538 for k, v in ctx.extra().items()))
1539 for k, v in ctx.extra().items()))
1539
1540
1540 files = ctx.p1().status(ctx)
1541 files = ctx.p1().status(ctx)
1541 self.ui.write((',\n "modified": [%s]') %
1542 self.ui.write((',\n "modified": [%s]') %
1542 ", ".join('"%s"' % j(f) for f in files[0]))
1543 ", ".join('"%s"' % j(f) for f in files[0]))
1543 self.ui.write((',\n "added": [%s]') %
1544 self.ui.write((',\n "added": [%s]') %
1544 ", ".join('"%s"' % j(f) for f in files[1]))
1545 ", ".join('"%s"' % j(f) for f in files[1]))
1545 self.ui.write((',\n "removed": [%s]') %
1546 self.ui.write((',\n "removed": [%s]') %
1546 ", ".join('"%s"' % j(f) for f in files[2]))
1547 ", ".join('"%s"' % j(f) for f in files[2]))
1547
1548
1548 elif self.ui.verbose:
1549 elif self.ui.verbose:
1549 self.ui.write((',\n "files": [%s]') %
1550 self.ui.write((',\n "files": [%s]') %
1550 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551 ", ".join('"%s"' % j(f) for f in ctx.files()))
1551
1552
1552 if copies:
1553 if copies:
1553 self.ui.write((',\n "copies": {%s}') %
1554 self.ui.write((',\n "copies": {%s}') %
1554 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 ", ".join('"%s": "%s"' % (j(k), j(v))
1555 for k, v in copies))
1556 for k, v in copies))
1556
1557
1557 matchfn = self.matchfn
1558 matchfn = self.matchfn
1558 if matchfn:
1559 if matchfn:
1559 stat = self.diffopts.get('stat')
1560 stat = self.diffopts.get('stat')
1560 diff = self.diffopts.get('patch')
1561 diff = self.diffopts.get('patch')
1561 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1562 node, prev = ctx.node(), ctx.p1().node()
1563 node, prev = ctx.node(), ctx.p1().node()
1563 if stat:
1564 if stat:
1564 self.ui.pushbuffer()
1565 self.ui.pushbuffer()
1565 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1566 match=matchfn, stat=True)
1567 match=matchfn, stat=True)
1567 self.ui.write((',\n "diffstat": "%s"')
1568 self.ui.write((',\n "diffstat": "%s"')
1568 % j(self.ui.popbuffer()))
1569 % j(self.ui.popbuffer()))
1569 if diff:
1570 if diff:
1570 self.ui.pushbuffer()
1571 self.ui.pushbuffer()
1571 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1572 match=matchfn, stat=False)
1573 match=matchfn, stat=False)
1573 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1574
1575
1575 self.ui.write("\n }")
1576 self.ui.write("\n }")
1576
1577
1577 class changeset_templater(changeset_printer):
1578 class changeset_templater(changeset_printer):
1578 '''format changeset information.'''
1579 '''format changeset information.'''
1579
1580
1580 # Arguments before "buffered" used to be positional. Consider not
1581 # Arguments before "buffered" used to be positional. Consider not
1581 # adding/removing arguments before "buffered" to not break callers.
1582 # adding/removing arguments before "buffered" to not break callers.
1582 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1583 def __init__(self, ui, repo, tmplspec, matchfn=None, diffopts=None,
1583 buffered=False):
1584 buffered=False):
1584 diffopts = diffopts or {}
1585 diffopts = diffopts or {}
1585
1586
1586 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1587 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1587 self.t = formatter.loadtemplater(ui, tmplspec,
1588 self.t = formatter.loadtemplater(ui, tmplspec,
1588 cache=templatekw.defaulttempl)
1589 cache=templatekw.defaulttempl)
1589 self._counter = itertools.count()
1590 self._counter = itertools.count()
1590 self.cache = {}
1591 self.cache = {}
1591
1592
1592 self._tref = tmplspec.ref
1593 self._tref = tmplspec.ref
1593 self._parts = {'header': '', 'footer': '',
1594 self._parts = {'header': '', 'footer': '',
1594 tmplspec.ref: tmplspec.ref,
1595 tmplspec.ref: tmplspec.ref,
1595 'docheader': '', 'docfooter': '',
1596 'docheader': '', 'docfooter': '',
1596 'separator': ''}
1597 'separator': ''}
1597 if tmplspec.mapfile:
1598 if tmplspec.mapfile:
1598 # find correct templates for current mode, for backward
1599 # find correct templates for current mode, for backward
1599 # compatibility with 'log -v/-q/--debug' using a mapfile
1600 # compatibility with 'log -v/-q/--debug' using a mapfile
1600 tmplmodes = [
1601 tmplmodes = [
1601 (True, ''),
1602 (True, ''),
1602 (self.ui.verbose, '_verbose'),
1603 (self.ui.verbose, '_verbose'),
1603 (self.ui.quiet, '_quiet'),
1604 (self.ui.quiet, '_quiet'),
1604 (self.ui.debugflag, '_debug'),
1605 (self.ui.debugflag, '_debug'),
1605 ]
1606 ]
1606 for mode, postfix in tmplmodes:
1607 for mode, postfix in tmplmodes:
1607 for t in self._parts:
1608 for t in self._parts:
1608 cur = t + postfix
1609 cur = t + postfix
1609 if mode and cur in self.t:
1610 if mode and cur in self.t:
1610 self._parts[t] = cur
1611 self._parts[t] = cur
1611 else:
1612 else:
1612 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1613 partnames = [p for p in self._parts.keys() if p != tmplspec.ref]
1613 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1614 m = formatter.templatepartsmap(tmplspec, self.t, partnames)
1614 self._parts.update(m)
1615 self._parts.update(m)
1615
1616
1616 if self._parts['docheader']:
1617 if self._parts['docheader']:
1617 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1618 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1618
1619
1619 def close(self):
1620 def close(self):
1620 if self._parts['docfooter']:
1621 if self._parts['docfooter']:
1621 if not self.footer:
1622 if not self.footer:
1622 self.footer = ""
1623 self.footer = ""
1623 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1624 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1624 return super(changeset_templater, self).close()
1625 return super(changeset_templater, self).close()
1625
1626
1626 def _show(self, ctx, copies, matchfn, props):
1627 def _show(self, ctx, copies, matchfn, props):
1627 '''show a single changeset or file revision'''
1628 '''show a single changeset or file revision'''
1628 props = props.copy()
1629 props = props.copy()
1629 props.update(templatekw.keywords)
1630 props.update(templatekw.keywords)
1630 props['templ'] = self.t
1631 props['templ'] = self.t
1631 props['ctx'] = ctx
1632 props['ctx'] = ctx
1632 props['repo'] = self.repo
1633 props['repo'] = self.repo
1633 props['ui'] = self.repo.ui
1634 props['ui'] = self.repo.ui
1634 props['index'] = index = next(self._counter)
1635 props['index'] = index = next(self._counter)
1635 props['revcache'] = {'copies': copies}
1636 props['revcache'] = {'copies': copies}
1636 props['cache'] = self.cache
1637 props['cache'] = self.cache
1637 props = pycompat.strkwargs(props)
1638 props = pycompat.strkwargs(props)
1638
1639
1639 # write separator, which wouldn't work well with the header part below
1640 # write separator, which wouldn't work well with the header part below
1640 # since there's inherently a conflict between header (across items) and
1641 # since there's inherently a conflict between header (across items) and
1641 # separator (per item)
1642 # separator (per item)
1642 if self._parts['separator'] and index > 0:
1643 if self._parts['separator'] and index > 0:
1643 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1644 self.ui.write(templater.stringify(self.t(self._parts['separator'])))
1644
1645
1645 # write header
1646 # write header
1646 if self._parts['header']:
1647 if self._parts['header']:
1647 h = templater.stringify(self.t(self._parts['header'], **props))
1648 h = templater.stringify(self.t(self._parts['header'], **props))
1648 if self.buffered:
1649 if self.buffered:
1649 self.header[ctx.rev()] = h
1650 self.header[ctx.rev()] = h
1650 else:
1651 else:
1651 if self.lastheader != h:
1652 if self.lastheader != h:
1652 self.lastheader = h
1653 self.lastheader = h
1653 self.ui.write(h)
1654 self.ui.write(h)
1654
1655
1655 # write changeset metadata, then patch if requested
1656 # write changeset metadata, then patch if requested
1656 key = self._parts[self._tref]
1657 key = self._parts[self._tref]
1657 self.ui.write(templater.stringify(self.t(key, **props)))
1658 self.ui.write(templater.stringify(self.t(key, **props)))
1658 self.showpatch(ctx, matchfn)
1659 self.showpatch(ctx, matchfn)
1659
1660
1660 if self._parts['footer']:
1661 if self._parts['footer']:
1661 if not self.footer:
1662 if not self.footer:
1662 self.footer = templater.stringify(
1663 self.footer = templater.stringify(
1663 self.t(self._parts['footer'], **props))
1664 self.t(self._parts['footer'], **props))
1664
1665
1665 def logtemplatespec(tmpl, mapfile):
1666 def logtemplatespec(tmpl, mapfile):
1666 if mapfile:
1667 if mapfile:
1667 return formatter.templatespec('changeset', tmpl, mapfile)
1668 return formatter.templatespec('changeset', tmpl, mapfile)
1668 else:
1669 else:
1669 return formatter.templatespec('', tmpl, None)
1670 return formatter.templatespec('', tmpl, None)
1670
1671
1671 def _lookuplogtemplate(ui, tmpl, style):
1672 def _lookuplogtemplate(ui, tmpl, style):
1672 """Find the template matching the given template spec or style
1673 """Find the template matching the given template spec or style
1673
1674
1674 See formatter.lookuptemplate() for details.
1675 See formatter.lookuptemplate() for details.
1675 """
1676 """
1676
1677
1677 # ui settings
1678 # ui settings
1678 if not tmpl and not style: # template are stronger than style
1679 if not tmpl and not style: # template are stronger than style
1679 tmpl = ui.config('ui', 'logtemplate')
1680 tmpl = ui.config('ui', 'logtemplate')
1680 if tmpl:
1681 if tmpl:
1681 return logtemplatespec(templater.unquotestring(tmpl), None)
1682 return logtemplatespec(templater.unquotestring(tmpl), None)
1682 else:
1683 else:
1683 style = util.expandpath(ui.config('ui', 'style', ''))
1684 style = util.expandpath(ui.config('ui', 'style', ''))
1684
1685
1685 if not tmpl and style:
1686 if not tmpl and style:
1686 mapfile = style
1687 mapfile = style
1687 if not os.path.split(mapfile)[0]:
1688 if not os.path.split(mapfile)[0]:
1688 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1689 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1689 or templater.templatepath(mapfile))
1690 or templater.templatepath(mapfile))
1690 if mapname:
1691 if mapname:
1691 mapfile = mapname
1692 mapfile = mapname
1692 return logtemplatespec(None, mapfile)
1693 return logtemplatespec(None, mapfile)
1693
1694
1694 if not tmpl:
1695 if not tmpl:
1695 return logtemplatespec(None, None)
1696 return logtemplatespec(None, None)
1696
1697
1697 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1698 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1698
1699
1699 def makelogtemplater(ui, repo, tmpl, buffered=False):
1700 def makelogtemplater(ui, repo, tmpl, buffered=False):
1700 """Create a changeset_templater from a literal template 'tmpl'"""
1701 """Create a changeset_templater from a literal template 'tmpl'"""
1701 spec = logtemplatespec(tmpl, None)
1702 spec = logtemplatespec(tmpl, None)
1702 return changeset_templater(ui, repo, spec, buffered=buffered)
1703 return changeset_templater(ui, repo, spec, buffered=buffered)
1703
1704
1704 def show_changeset(ui, repo, opts, buffered=False):
1705 def show_changeset(ui, repo, opts, buffered=False):
1705 """show one changeset using template or regular display.
1706 """show one changeset using template or regular display.
1706
1707
1707 Display format will be the first non-empty hit of:
1708 Display format will be the first non-empty hit of:
1708 1. option 'template'
1709 1. option 'template'
1709 2. option 'style'
1710 2. option 'style'
1710 3. [ui] setting 'logtemplate'
1711 3. [ui] setting 'logtemplate'
1711 4. [ui] setting 'style'
1712 4. [ui] setting 'style'
1712 If all of these values are either the unset or the empty string,
1713 If all of these values are either the unset or the empty string,
1713 regular display via changeset_printer() is done.
1714 regular display via changeset_printer() is done.
1714 """
1715 """
1715 # options
1716 # options
1716 matchfn = None
1717 matchfn = None
1717 if opts.get('patch') or opts.get('stat'):
1718 if opts.get('patch') or opts.get('stat'):
1718 matchfn = scmutil.matchall(repo)
1719 matchfn = scmutil.matchall(repo)
1719
1720
1720 if opts.get('template') == 'json':
1721 if opts.get('template') == 'json':
1721 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1722 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1722
1723
1723 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1724 spec = _lookuplogtemplate(ui, opts.get('template'), opts.get('style'))
1724
1725
1725 if not spec.ref and not spec.tmpl and not spec.mapfile:
1726 if not spec.ref and not spec.tmpl and not spec.mapfile:
1726 return changeset_printer(ui, repo, matchfn, opts, buffered)
1727 return changeset_printer(ui, repo, matchfn, opts, buffered)
1727
1728
1728 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1729 return changeset_templater(ui, repo, spec, matchfn, opts, buffered)
1729
1730
1730 def showmarker(fm, marker, index=None):
1731 def showmarker(fm, marker, index=None):
1731 """utility function to display obsolescence marker in a readable way
1732 """utility function to display obsolescence marker in a readable way
1732
1733
1733 To be used by debug function."""
1734 To be used by debug function."""
1734 if index is not None:
1735 if index is not None:
1735 fm.write('index', '%i ', index)
1736 fm.write('index', '%i ', index)
1736 fm.write('precnode', '%s ', hex(marker.precnode()))
1737 fm.write('precnode', '%s ', hex(marker.precnode()))
1737 succs = marker.succnodes()
1738 succs = marker.succnodes()
1738 fm.condwrite(succs, 'succnodes', '%s ',
1739 fm.condwrite(succs, 'succnodes', '%s ',
1739 fm.formatlist(map(hex, succs), name='node'))
1740 fm.formatlist(map(hex, succs), name='node'))
1740 fm.write('flag', '%X ', marker.flags())
1741 fm.write('flag', '%X ', marker.flags())
1741 parents = marker.parentnodes()
1742 parents = marker.parentnodes()
1742 if parents is not None:
1743 if parents is not None:
1743 fm.write('parentnodes', '{%s} ',
1744 fm.write('parentnodes', '{%s} ',
1744 fm.formatlist(map(hex, parents), name='node', sep=', '))
1745 fm.formatlist(map(hex, parents), name='node', sep=', '))
1745 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1746 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1746 meta = marker.metadata().copy()
1747 meta = marker.metadata().copy()
1747 meta.pop('date', None)
1748 meta.pop('date', None)
1748 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1749 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1749 fm.plain('\n')
1750 fm.plain('\n')
1750
1751
1751 def finddate(ui, repo, date):
1752 def finddate(ui, repo, date):
1752 """Find the tipmost changeset that matches the given date spec"""
1753 """Find the tipmost changeset that matches the given date spec"""
1753
1754
1754 df = util.matchdate(date)
1755 df = util.matchdate(date)
1755 m = scmutil.matchall(repo)
1756 m = scmutil.matchall(repo)
1756 results = {}
1757 results = {}
1757
1758
1758 def prep(ctx, fns):
1759 def prep(ctx, fns):
1759 d = ctx.date()
1760 d = ctx.date()
1760 if df(d[0]):
1761 if df(d[0]):
1761 results[ctx.rev()] = d
1762 results[ctx.rev()] = d
1762
1763
1763 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1764 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1764 rev = ctx.rev()
1765 rev = ctx.rev()
1765 if rev in results:
1766 if rev in results:
1766 ui.status(_("found revision %s from %s\n") %
1767 ui.status(_("found revision %s from %s\n") %
1767 (rev, util.datestr(results[rev])))
1768 (rev, util.datestr(results[rev])))
1768 return '%d' % rev
1769 return '%d' % rev
1769
1770
1770 raise error.Abort(_("revision matching date not found"))
1771 raise error.Abort(_("revision matching date not found"))
1771
1772
1772 def increasingwindows(windowsize=8, sizelimit=512):
1773 def increasingwindows(windowsize=8, sizelimit=512):
1773 while True:
1774 while True:
1774 yield windowsize
1775 yield windowsize
1775 if windowsize < sizelimit:
1776 if windowsize < sizelimit:
1776 windowsize *= 2
1777 windowsize *= 2
1777
1778
1778 class FileWalkError(Exception):
1779 class FileWalkError(Exception):
1779 pass
1780 pass
1780
1781
1781 def walkfilerevs(repo, match, follow, revs, fncache):
1782 def walkfilerevs(repo, match, follow, revs, fncache):
1782 '''Walks the file history for the matched files.
1783 '''Walks the file history for the matched files.
1783
1784
1784 Returns the changeset revs that are involved in the file history.
1785 Returns the changeset revs that are involved in the file history.
1785
1786
1786 Throws FileWalkError if the file history can't be walked using
1787 Throws FileWalkError if the file history can't be walked using
1787 filelogs alone.
1788 filelogs alone.
1788 '''
1789 '''
1789 wanted = set()
1790 wanted = set()
1790 copies = []
1791 copies = []
1791 minrev, maxrev = min(revs), max(revs)
1792 minrev, maxrev = min(revs), max(revs)
1792 def filerevgen(filelog, last):
1793 def filerevgen(filelog, last):
1793 """
1794 """
1794 Only files, no patterns. Check the history of each file.
1795 Only files, no patterns. Check the history of each file.
1795
1796
1796 Examines filelog entries within minrev, maxrev linkrev range
1797 Examines filelog entries within minrev, maxrev linkrev range
1797 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1798 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1798 tuples in backwards order
1799 tuples in backwards order
1799 """
1800 """
1800 cl_count = len(repo)
1801 cl_count = len(repo)
1801 revs = []
1802 revs = []
1802 for j in xrange(0, last + 1):
1803 for j in xrange(0, last + 1):
1803 linkrev = filelog.linkrev(j)
1804 linkrev = filelog.linkrev(j)
1804 if linkrev < minrev:
1805 if linkrev < minrev:
1805 continue
1806 continue
1806 # only yield rev for which we have the changelog, it can
1807 # only yield rev for which we have the changelog, it can
1807 # happen while doing "hg log" during a pull or commit
1808 # happen while doing "hg log" during a pull or commit
1808 if linkrev >= cl_count:
1809 if linkrev >= cl_count:
1809 break
1810 break
1810
1811
1811 parentlinkrevs = []
1812 parentlinkrevs = []
1812 for p in filelog.parentrevs(j):
1813 for p in filelog.parentrevs(j):
1813 if p != nullrev:
1814 if p != nullrev:
1814 parentlinkrevs.append(filelog.linkrev(p))
1815 parentlinkrevs.append(filelog.linkrev(p))
1815 n = filelog.node(j)
1816 n = filelog.node(j)
1816 revs.append((linkrev, parentlinkrevs,
1817 revs.append((linkrev, parentlinkrevs,
1817 follow and filelog.renamed(n)))
1818 follow and filelog.renamed(n)))
1818
1819
1819 return reversed(revs)
1820 return reversed(revs)
1820 def iterfiles():
1821 def iterfiles():
1821 pctx = repo['.']
1822 pctx = repo['.']
1822 for filename in match.files():
1823 for filename in match.files():
1823 if follow:
1824 if follow:
1824 if filename not in pctx:
1825 if filename not in pctx:
1825 raise error.Abort(_('cannot follow file not in parent '
1826 raise error.Abort(_('cannot follow file not in parent '
1826 'revision: "%s"') % filename)
1827 'revision: "%s"') % filename)
1827 yield filename, pctx[filename].filenode()
1828 yield filename, pctx[filename].filenode()
1828 else:
1829 else:
1829 yield filename, None
1830 yield filename, None
1830 for filename_node in copies:
1831 for filename_node in copies:
1831 yield filename_node
1832 yield filename_node
1832
1833
1833 for file_, node in iterfiles():
1834 for file_, node in iterfiles():
1834 filelog = repo.file(file_)
1835 filelog = repo.file(file_)
1835 if not len(filelog):
1836 if not len(filelog):
1836 if node is None:
1837 if node is None:
1837 # A zero count may be a directory or deleted file, so
1838 # A zero count may be a directory or deleted file, so
1838 # try to find matching entries on the slow path.
1839 # try to find matching entries on the slow path.
1839 if follow:
1840 if follow:
1840 raise error.Abort(
1841 raise error.Abort(
1841 _('cannot follow nonexistent file: "%s"') % file_)
1842 _('cannot follow nonexistent file: "%s"') % file_)
1842 raise FileWalkError("Cannot walk via filelog")
1843 raise FileWalkError("Cannot walk via filelog")
1843 else:
1844 else:
1844 continue
1845 continue
1845
1846
1846 if node is None:
1847 if node is None:
1847 last = len(filelog) - 1
1848 last = len(filelog) - 1
1848 else:
1849 else:
1849 last = filelog.rev(node)
1850 last = filelog.rev(node)
1850
1851
1851 # keep track of all ancestors of the file
1852 # keep track of all ancestors of the file
1852 ancestors = {filelog.linkrev(last)}
1853 ancestors = {filelog.linkrev(last)}
1853
1854
1854 # iterate from latest to oldest revision
1855 # iterate from latest to oldest revision
1855 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1856 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1856 if not follow:
1857 if not follow:
1857 if rev > maxrev:
1858 if rev > maxrev:
1858 continue
1859 continue
1859 else:
1860 else:
1860 # Note that last might not be the first interesting
1861 # Note that last might not be the first interesting
1861 # rev to us:
1862 # rev to us:
1862 # if the file has been changed after maxrev, we'll
1863 # if the file has been changed after maxrev, we'll
1863 # have linkrev(last) > maxrev, and we still need
1864 # have linkrev(last) > maxrev, and we still need
1864 # to explore the file graph
1865 # to explore the file graph
1865 if rev not in ancestors:
1866 if rev not in ancestors:
1866 continue
1867 continue
1867 # XXX insert 1327 fix here
1868 # XXX insert 1327 fix here
1868 if flparentlinkrevs:
1869 if flparentlinkrevs:
1869 ancestors.update(flparentlinkrevs)
1870 ancestors.update(flparentlinkrevs)
1870
1871
1871 fncache.setdefault(rev, []).append(file_)
1872 fncache.setdefault(rev, []).append(file_)
1872 wanted.add(rev)
1873 wanted.add(rev)
1873 if copied:
1874 if copied:
1874 copies.append(copied)
1875 copies.append(copied)
1875
1876
1876 return wanted
1877 return wanted
1877
1878
1878 class _followfilter(object):
1879 class _followfilter(object):
1879 def __init__(self, repo, onlyfirst=False):
1880 def __init__(self, repo, onlyfirst=False):
1880 self.repo = repo
1881 self.repo = repo
1881 self.startrev = nullrev
1882 self.startrev = nullrev
1882 self.roots = set()
1883 self.roots = set()
1883 self.onlyfirst = onlyfirst
1884 self.onlyfirst = onlyfirst
1884
1885
1885 def match(self, rev):
1886 def match(self, rev):
1886 def realparents(rev):
1887 def realparents(rev):
1887 if self.onlyfirst:
1888 if self.onlyfirst:
1888 return self.repo.changelog.parentrevs(rev)[0:1]
1889 return self.repo.changelog.parentrevs(rev)[0:1]
1889 else:
1890 else:
1890 return filter(lambda x: x != nullrev,
1891 return filter(lambda x: x != nullrev,
1891 self.repo.changelog.parentrevs(rev))
1892 self.repo.changelog.parentrevs(rev))
1892
1893
1893 if self.startrev == nullrev:
1894 if self.startrev == nullrev:
1894 self.startrev = rev
1895 self.startrev = rev
1895 return True
1896 return True
1896
1897
1897 if rev > self.startrev:
1898 if rev > self.startrev:
1898 # forward: all descendants
1899 # forward: all descendants
1899 if not self.roots:
1900 if not self.roots:
1900 self.roots.add(self.startrev)
1901 self.roots.add(self.startrev)
1901 for parent in realparents(rev):
1902 for parent in realparents(rev):
1902 if parent in self.roots:
1903 if parent in self.roots:
1903 self.roots.add(rev)
1904 self.roots.add(rev)
1904 return True
1905 return True
1905 else:
1906 else:
1906 # backwards: all parents
1907 # backwards: all parents
1907 if not self.roots:
1908 if not self.roots:
1908 self.roots.update(realparents(self.startrev))
1909 self.roots.update(realparents(self.startrev))
1909 if rev in self.roots:
1910 if rev in self.roots:
1910 self.roots.remove(rev)
1911 self.roots.remove(rev)
1911 self.roots.update(realparents(rev))
1912 self.roots.update(realparents(rev))
1912 return True
1913 return True
1913
1914
1914 return False
1915 return False
1915
1916
1916 def walkchangerevs(repo, match, opts, prepare):
1917 def walkchangerevs(repo, match, opts, prepare):
1917 '''Iterate over files and the revs in which they changed.
1918 '''Iterate over files and the revs in which they changed.
1918
1919
1919 Callers most commonly need to iterate backwards over the history
1920 Callers most commonly need to iterate backwards over the history
1920 in which they are interested. Doing so has awful (quadratic-looking)
1921 in which they are interested. Doing so has awful (quadratic-looking)
1921 performance, so we use iterators in a "windowed" way.
1922 performance, so we use iterators in a "windowed" way.
1922
1923
1923 We walk a window of revisions in the desired order. Within the
1924 We walk a window of revisions in the desired order. Within the
1924 window, we first walk forwards to gather data, then in the desired
1925 window, we first walk forwards to gather data, then in the desired
1925 order (usually backwards) to display it.
1926 order (usually backwards) to display it.
1926
1927
1927 This function returns an iterator yielding contexts. Before
1928 This function returns an iterator yielding contexts. Before
1928 yielding each context, the iterator will first call the prepare
1929 yielding each context, the iterator will first call the prepare
1929 function on each context in the window in forward order.'''
1930 function on each context in the window in forward order.'''
1930
1931
1931 follow = opts.get('follow') or opts.get('follow_first')
1932 follow = opts.get('follow') or opts.get('follow_first')
1932 revs = _logrevs(repo, opts)
1933 revs = _logrevs(repo, opts)
1933 if not revs:
1934 if not revs:
1934 return []
1935 return []
1935 wanted = set()
1936 wanted = set()
1936 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1937 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1937 opts.get('removed'))
1938 opts.get('removed'))
1938 fncache = {}
1939 fncache = {}
1939 change = repo.changectx
1940 change = repo.changectx
1940
1941
1941 # First step is to fill wanted, the set of revisions that we want to yield.
1942 # First step is to fill wanted, the set of revisions that we want to yield.
1942 # When it does not induce extra cost, we also fill fncache for revisions in
1943 # When it does not induce extra cost, we also fill fncache for revisions in
1943 # wanted: a cache of filenames that were changed (ctx.files()) and that
1944 # wanted: a cache of filenames that were changed (ctx.files()) and that
1944 # match the file filtering conditions.
1945 # match the file filtering conditions.
1945
1946
1946 if match.always():
1947 if match.always():
1947 # No files, no patterns. Display all revs.
1948 # No files, no patterns. Display all revs.
1948 wanted = revs
1949 wanted = revs
1949 elif not slowpath:
1950 elif not slowpath:
1950 # We only have to read through the filelog to find wanted revisions
1951 # We only have to read through the filelog to find wanted revisions
1951
1952
1952 try:
1953 try:
1953 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1954 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1954 except FileWalkError:
1955 except FileWalkError:
1955 slowpath = True
1956 slowpath = True
1956
1957
1957 # We decided to fall back to the slowpath because at least one
1958 # We decided to fall back to the slowpath because at least one
1958 # of the paths was not a file. Check to see if at least one of them
1959 # of the paths was not a file. Check to see if at least one of them
1959 # existed in history, otherwise simply return
1960 # existed in history, otherwise simply return
1960 for path in match.files():
1961 for path in match.files():
1961 if path == '.' or path in repo.store:
1962 if path == '.' or path in repo.store:
1962 break
1963 break
1963 else:
1964 else:
1964 return []
1965 return []
1965
1966
1966 if slowpath:
1967 if slowpath:
1967 # We have to read the changelog to match filenames against
1968 # We have to read the changelog to match filenames against
1968 # changed files
1969 # changed files
1969
1970
1970 if follow:
1971 if follow:
1971 raise error.Abort(_('can only follow copies/renames for explicit '
1972 raise error.Abort(_('can only follow copies/renames for explicit '
1972 'filenames'))
1973 'filenames'))
1973
1974
1974 # The slow path checks files modified in every changeset.
1975 # The slow path checks files modified in every changeset.
1975 # This is really slow on large repos, so compute the set lazily.
1976 # This is really slow on large repos, so compute the set lazily.
1976 class lazywantedset(object):
1977 class lazywantedset(object):
1977 def __init__(self):
1978 def __init__(self):
1978 self.set = set()
1979 self.set = set()
1979 self.revs = set(revs)
1980 self.revs = set(revs)
1980
1981
1981 # No need to worry about locality here because it will be accessed
1982 # No need to worry about locality here because it will be accessed
1982 # in the same order as the increasing window below.
1983 # in the same order as the increasing window below.
1983 def __contains__(self, value):
1984 def __contains__(self, value):
1984 if value in self.set:
1985 if value in self.set:
1985 return True
1986 return True
1986 elif not value in self.revs:
1987 elif not value in self.revs:
1987 return False
1988 return False
1988 else:
1989 else:
1989 self.revs.discard(value)
1990 self.revs.discard(value)
1990 ctx = change(value)
1991 ctx = change(value)
1991 matches = filter(match, ctx.files())
1992 matches = filter(match, ctx.files())
1992 if matches:
1993 if matches:
1993 fncache[value] = matches
1994 fncache[value] = matches
1994 self.set.add(value)
1995 self.set.add(value)
1995 return True
1996 return True
1996 return False
1997 return False
1997
1998
1998 def discard(self, value):
1999 def discard(self, value):
1999 self.revs.discard(value)
2000 self.revs.discard(value)
2000 self.set.discard(value)
2001 self.set.discard(value)
2001
2002
2002 wanted = lazywantedset()
2003 wanted = lazywantedset()
2003
2004
2004 # it might be worthwhile to do this in the iterator if the rev range
2005 # it might be worthwhile to do this in the iterator if the rev range
2005 # is descending and the prune args are all within that range
2006 # is descending and the prune args are all within that range
2006 for rev in opts.get('prune', ()):
2007 for rev in opts.get('prune', ()):
2007 rev = repo[rev].rev()
2008 rev = repo[rev].rev()
2008 ff = _followfilter(repo)
2009 ff = _followfilter(repo)
2009 stop = min(revs[0], revs[-1])
2010 stop = min(revs[0], revs[-1])
2010 for x in xrange(rev, stop - 1, -1):
2011 for x in xrange(rev, stop - 1, -1):
2011 if ff.match(x):
2012 if ff.match(x):
2012 wanted = wanted - [x]
2013 wanted = wanted - [x]
2013
2014
2014 # Now that wanted is correctly initialized, we can iterate over the
2015 # Now that wanted is correctly initialized, we can iterate over the
2015 # revision range, yielding only revisions in wanted.
2016 # revision range, yielding only revisions in wanted.
2016 def iterate():
2017 def iterate():
2017 if follow and match.always():
2018 if follow and match.always():
2018 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2019 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
2019 def want(rev):
2020 def want(rev):
2020 return ff.match(rev) and rev in wanted
2021 return ff.match(rev) and rev in wanted
2021 else:
2022 else:
2022 def want(rev):
2023 def want(rev):
2023 return rev in wanted
2024 return rev in wanted
2024
2025
2025 it = iter(revs)
2026 it = iter(revs)
2026 stopiteration = False
2027 stopiteration = False
2027 for windowsize in increasingwindows():
2028 for windowsize in increasingwindows():
2028 nrevs = []
2029 nrevs = []
2029 for i in xrange(windowsize):
2030 for i in xrange(windowsize):
2030 rev = next(it, None)
2031 rev = next(it, None)
2031 if rev is None:
2032 if rev is None:
2032 stopiteration = True
2033 stopiteration = True
2033 break
2034 break
2034 elif want(rev):
2035 elif want(rev):
2035 nrevs.append(rev)
2036 nrevs.append(rev)
2036 for rev in sorted(nrevs):
2037 for rev in sorted(nrevs):
2037 fns = fncache.get(rev)
2038 fns = fncache.get(rev)
2038 ctx = change(rev)
2039 ctx = change(rev)
2039 if not fns:
2040 if not fns:
2040 def fns_generator():
2041 def fns_generator():
2041 for f in ctx.files():
2042 for f in ctx.files():
2042 if match(f):
2043 if match(f):
2043 yield f
2044 yield f
2044 fns = fns_generator()
2045 fns = fns_generator()
2045 prepare(ctx, fns)
2046 prepare(ctx, fns)
2046 for rev in nrevs:
2047 for rev in nrevs:
2047 yield change(rev)
2048 yield change(rev)
2048
2049
2049 if stopiteration:
2050 if stopiteration:
2050 break
2051 break
2051
2052
2052 return iterate()
2053 return iterate()
2053
2054
2054 def _makefollowlogfilematcher(repo, files, followfirst):
2055 def _makefollowlogfilematcher(repo, files, followfirst):
2055 # When displaying a revision with --patch --follow FILE, we have
2056 # When displaying a revision with --patch --follow FILE, we have
2056 # to know which file of the revision must be diffed. With
2057 # to know which file of the revision must be diffed. With
2057 # --follow, we want the names of the ancestors of FILE in the
2058 # --follow, we want the names of the ancestors of FILE in the
2058 # revision, stored in "fcache". "fcache" is populated by
2059 # revision, stored in "fcache". "fcache" is populated by
2059 # reproducing the graph traversal already done by --follow revset
2060 # reproducing the graph traversal already done by --follow revset
2060 # and relating revs to file names (which is not "correct" but
2061 # and relating revs to file names (which is not "correct" but
2061 # good enough).
2062 # good enough).
2062 fcache = {}
2063 fcache = {}
2063 fcacheready = [False]
2064 fcacheready = [False]
2064 pctx = repo['.']
2065 pctx = repo['.']
2065
2066
2066 def populate():
2067 def populate():
2067 for fn in files:
2068 for fn in files:
2068 fctx = pctx[fn]
2069 fctx = pctx[fn]
2069 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2070 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
2070 for c in fctx.ancestors(followfirst=followfirst):
2071 for c in fctx.ancestors(followfirst=followfirst):
2071 fcache.setdefault(c.rev(), set()).add(c.path())
2072 fcache.setdefault(c.rev(), set()).add(c.path())
2072
2073
2073 def filematcher(rev):
2074 def filematcher(rev):
2074 if not fcacheready[0]:
2075 if not fcacheready[0]:
2075 # Lazy initialization
2076 # Lazy initialization
2076 fcacheready[0] = True
2077 fcacheready[0] = True
2077 populate()
2078 populate()
2078 return scmutil.matchfiles(repo, fcache.get(rev, []))
2079 return scmutil.matchfiles(repo, fcache.get(rev, []))
2079
2080
2080 return filematcher
2081 return filematcher
2081
2082
2082 def _makenofollowlogfilematcher(repo, pats, opts):
2083 def _makenofollowlogfilematcher(repo, pats, opts):
2083 '''hook for extensions to override the filematcher for non-follow cases'''
2084 '''hook for extensions to override the filematcher for non-follow cases'''
2084 return None
2085 return None
2085
2086
2086 def _makelogrevset(repo, pats, opts, revs):
2087 def _makelogrevset(repo, pats, opts, revs):
2087 """Return (expr, filematcher) where expr is a revset string built
2088 """Return (expr, filematcher) where expr is a revset string built
2088 from log options and file patterns or None. If --stat or --patch
2089 from log options and file patterns or None. If --stat or --patch
2089 are not passed filematcher is None. Otherwise it is a callable
2090 are not passed filematcher is None. Otherwise it is a callable
2090 taking a revision number and returning a match objects filtering
2091 taking a revision number and returning a match objects filtering
2091 the files to be detailed when displaying the revision.
2092 the files to be detailed when displaying the revision.
2092 """
2093 """
2093 opt2revset = {
2094 opt2revset = {
2094 'no_merges': ('not merge()', None),
2095 'no_merges': ('not merge()', None),
2095 'only_merges': ('merge()', None),
2096 'only_merges': ('merge()', None),
2096 '_ancestors': ('ancestors(%(val)s)', None),
2097 '_ancestors': ('ancestors(%(val)s)', None),
2097 '_fancestors': ('_firstancestors(%(val)s)', None),
2098 '_fancestors': ('_firstancestors(%(val)s)', None),
2098 '_descendants': ('descendants(%(val)s)', None),
2099 '_descendants': ('descendants(%(val)s)', None),
2099 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2100 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2100 '_matchfiles': ('_matchfiles(%(val)s)', None),
2101 '_matchfiles': ('_matchfiles(%(val)s)', None),
2101 'date': ('date(%(val)r)', None),
2102 'date': ('date(%(val)r)', None),
2102 'branch': ('branch(%(val)r)', ' or '),
2103 'branch': ('branch(%(val)r)', ' or '),
2103 '_patslog': ('filelog(%(val)r)', ' or '),
2104 '_patslog': ('filelog(%(val)r)', ' or '),
2104 '_patsfollow': ('follow(%(val)r)', ' or '),
2105 '_patsfollow': ('follow(%(val)r)', ' or '),
2105 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2106 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2106 'keyword': ('keyword(%(val)r)', ' or '),
2107 'keyword': ('keyword(%(val)r)', ' or '),
2107 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2108 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2108 'user': ('user(%(val)r)', ' or '),
2109 'user': ('user(%(val)r)', ' or '),
2109 }
2110 }
2110
2111
2111 opts = dict(opts)
2112 opts = dict(opts)
2112 # follow or not follow?
2113 # follow or not follow?
2113 follow = opts.get('follow') or opts.get('follow_first')
2114 follow = opts.get('follow') or opts.get('follow_first')
2114 if opts.get('follow_first'):
2115 if opts.get('follow_first'):
2115 followfirst = 1
2116 followfirst = 1
2116 else:
2117 else:
2117 followfirst = 0
2118 followfirst = 0
2118 # --follow with FILE behavior depends on revs...
2119 # --follow with FILE behavior depends on revs...
2119 it = iter(revs)
2120 it = iter(revs)
2120 startrev = next(it)
2121 startrev = next(it)
2121 followdescendants = startrev < next(it, startrev)
2122 followdescendants = startrev < next(it, startrev)
2122
2123
2123 # branch and only_branch are really aliases and must be handled at
2124 # branch and only_branch are really aliases and must be handled at
2124 # the same time
2125 # the same time
2125 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2126 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2126 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2127 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2127 # pats/include/exclude are passed to match.match() directly in
2128 # pats/include/exclude are passed to match.match() directly in
2128 # _matchfiles() revset but walkchangerevs() builds its matcher with
2129 # _matchfiles() revset but walkchangerevs() builds its matcher with
2129 # scmutil.match(). The difference is input pats are globbed on
2130 # scmutil.match(). The difference is input pats are globbed on
2130 # platforms without shell expansion (windows).
2131 # platforms without shell expansion (windows).
2131 wctx = repo[None]
2132 wctx = repo[None]
2132 match, pats = scmutil.matchandpats(wctx, pats, opts)
2133 match, pats = scmutil.matchandpats(wctx, pats, opts)
2133 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2134 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2134 opts.get('removed'))
2135 opts.get('removed'))
2135 if not slowpath:
2136 if not slowpath:
2136 for f in match.files():
2137 for f in match.files():
2137 if follow and f not in wctx:
2138 if follow and f not in wctx:
2138 # If the file exists, it may be a directory, so let it
2139 # If the file exists, it may be a directory, so let it
2139 # take the slow path.
2140 # take the slow path.
2140 if os.path.exists(repo.wjoin(f)):
2141 if os.path.exists(repo.wjoin(f)):
2141 slowpath = True
2142 slowpath = True
2142 continue
2143 continue
2143 else:
2144 else:
2144 raise error.Abort(_('cannot follow file not in parent '
2145 raise error.Abort(_('cannot follow file not in parent '
2145 'revision: "%s"') % f)
2146 'revision: "%s"') % f)
2146 filelog = repo.file(f)
2147 filelog = repo.file(f)
2147 if not filelog:
2148 if not filelog:
2148 # A zero count may be a directory or deleted file, so
2149 # A zero count may be a directory or deleted file, so
2149 # try to find matching entries on the slow path.
2150 # try to find matching entries on the slow path.
2150 if follow:
2151 if follow:
2151 raise error.Abort(
2152 raise error.Abort(
2152 _('cannot follow nonexistent file: "%s"') % f)
2153 _('cannot follow nonexistent file: "%s"') % f)
2153 slowpath = True
2154 slowpath = True
2154
2155
2155 # We decided to fall back to the slowpath because at least one
2156 # We decided to fall back to the slowpath because at least one
2156 # of the paths was not a file. Check to see if at least one of them
2157 # of the paths was not a file. Check to see if at least one of them
2157 # existed in history - in that case, we'll continue down the
2158 # existed in history - in that case, we'll continue down the
2158 # slowpath; otherwise, we can turn off the slowpath
2159 # slowpath; otherwise, we can turn off the slowpath
2159 if slowpath:
2160 if slowpath:
2160 for path in match.files():
2161 for path in match.files():
2161 if path == '.' or path in repo.store:
2162 if path == '.' or path in repo.store:
2162 break
2163 break
2163 else:
2164 else:
2164 slowpath = False
2165 slowpath = False
2165
2166
2166 fpats = ('_patsfollow', '_patsfollowfirst')
2167 fpats = ('_patsfollow', '_patsfollowfirst')
2167 fnopats = (('_ancestors', '_fancestors'),
2168 fnopats = (('_ancestors', '_fancestors'),
2168 ('_descendants', '_fdescendants'))
2169 ('_descendants', '_fdescendants'))
2169 if slowpath:
2170 if slowpath:
2170 # See walkchangerevs() slow path.
2171 # See walkchangerevs() slow path.
2171 #
2172 #
2172 # pats/include/exclude cannot be represented as separate
2173 # pats/include/exclude cannot be represented as separate
2173 # revset expressions as their filtering logic applies at file
2174 # revset expressions as their filtering logic applies at file
2174 # level. For instance "-I a -X a" matches a revision touching
2175 # level. For instance "-I a -X a" matches a revision touching
2175 # "a" and "b" while "file(a) and not file(b)" does
2176 # "a" and "b" while "file(a) and not file(b)" does
2176 # not. Besides, filesets are evaluated against the working
2177 # not. Besides, filesets are evaluated against the working
2177 # directory.
2178 # directory.
2178 matchargs = ['r:', 'd:relpath']
2179 matchargs = ['r:', 'd:relpath']
2179 for p in pats:
2180 for p in pats:
2180 matchargs.append('p:' + p)
2181 matchargs.append('p:' + p)
2181 for p in opts.get('include', []):
2182 for p in opts.get('include', []):
2182 matchargs.append('i:' + p)
2183 matchargs.append('i:' + p)
2183 for p in opts.get('exclude', []):
2184 for p in opts.get('exclude', []):
2184 matchargs.append('x:' + p)
2185 matchargs.append('x:' + p)
2185 matchargs = ','.join(('%r' % p) for p in matchargs)
2186 matchargs = ','.join(('%r' % p) for p in matchargs)
2186 opts['_matchfiles'] = matchargs
2187 opts['_matchfiles'] = matchargs
2187 if follow:
2188 if follow:
2188 opts[fnopats[0][followfirst]] = '.'
2189 opts[fnopats[0][followfirst]] = '.'
2189 else:
2190 else:
2190 if follow:
2191 if follow:
2191 if pats:
2192 if pats:
2192 # follow() revset interprets its file argument as a
2193 # follow() revset interprets its file argument as a
2193 # manifest entry, so use match.files(), not pats.
2194 # manifest entry, so use match.files(), not pats.
2194 opts[fpats[followfirst]] = list(match.files())
2195 opts[fpats[followfirst]] = list(match.files())
2195 else:
2196 else:
2196 op = fnopats[followdescendants][followfirst]
2197 op = fnopats[followdescendants][followfirst]
2197 opts[op] = 'rev(%d)' % startrev
2198 opts[op] = 'rev(%d)' % startrev
2198 else:
2199 else:
2199 opts['_patslog'] = list(pats)
2200 opts['_patslog'] = list(pats)
2200
2201
2201 filematcher = None
2202 filematcher = None
2202 if opts.get('patch') or opts.get('stat'):
2203 if opts.get('patch') or opts.get('stat'):
2203 # When following files, track renames via a special matcher.
2204 # When following files, track renames via a special matcher.
2204 # If we're forced to take the slowpath it means we're following
2205 # If we're forced to take the slowpath it means we're following
2205 # at least one pattern/directory, so don't bother with rename tracking.
2206 # at least one pattern/directory, so don't bother with rename tracking.
2206 if follow and not match.always() and not slowpath:
2207 if follow and not match.always() and not slowpath:
2207 # _makefollowlogfilematcher expects its files argument to be
2208 # _makefollowlogfilematcher expects its files argument to be
2208 # relative to the repo root, so use match.files(), not pats.
2209 # relative to the repo root, so use match.files(), not pats.
2209 filematcher = _makefollowlogfilematcher(repo, match.files(),
2210 filematcher = _makefollowlogfilematcher(repo, match.files(),
2210 followfirst)
2211 followfirst)
2211 else:
2212 else:
2212 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2213 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2213 if filematcher is None:
2214 if filematcher is None:
2214 filematcher = lambda rev: match
2215 filematcher = lambda rev: match
2215
2216
2216 expr = []
2217 expr = []
2217 for op, val in sorted(opts.iteritems()):
2218 for op, val in sorted(opts.iteritems()):
2218 if not val:
2219 if not val:
2219 continue
2220 continue
2220 if op not in opt2revset:
2221 if op not in opt2revset:
2221 continue
2222 continue
2222 revop, andor = opt2revset[op]
2223 revop, andor = opt2revset[op]
2223 if '%(val)' not in revop:
2224 if '%(val)' not in revop:
2224 expr.append(revop)
2225 expr.append(revop)
2225 else:
2226 else:
2226 if not isinstance(val, list):
2227 if not isinstance(val, list):
2227 e = revop % {'val': val}
2228 e = revop % {'val': val}
2228 else:
2229 else:
2229 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2230 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2230 expr.append(e)
2231 expr.append(e)
2231
2232
2232 if expr:
2233 if expr:
2233 expr = '(' + ' and '.join(expr) + ')'
2234 expr = '(' + ' and '.join(expr) + ')'
2234 else:
2235 else:
2235 expr = None
2236 expr = None
2236 return expr, filematcher
2237 return expr, filematcher
2237
2238
2238 def _logrevs(repo, opts):
2239 def _logrevs(repo, opts):
2239 # Default --rev value depends on --follow but --follow behavior
2240 # Default --rev value depends on --follow but --follow behavior
2240 # depends on revisions resolved from --rev...
2241 # depends on revisions resolved from --rev...
2241 follow = opts.get('follow') or opts.get('follow_first')
2242 follow = opts.get('follow') or opts.get('follow_first')
2242 if opts.get('rev'):
2243 if opts.get('rev'):
2243 revs = scmutil.revrange(repo, opts['rev'])
2244 revs = scmutil.revrange(repo, opts['rev'])
2244 elif follow and repo.dirstate.p1() == nullid:
2245 elif follow and repo.dirstate.p1() == nullid:
2245 revs = smartset.baseset()
2246 revs = smartset.baseset()
2246 elif follow:
2247 elif follow:
2247 revs = repo.revs('reverse(:.)')
2248 revs = repo.revs('reverse(:.)')
2248 else:
2249 else:
2249 revs = smartset.spanset(repo)
2250 revs = smartset.spanset(repo)
2250 revs.reverse()
2251 revs.reverse()
2251 return revs
2252 return revs
2252
2253
2253 def getgraphlogrevs(repo, pats, opts):
2254 def getgraphlogrevs(repo, pats, opts):
2254 """Return (revs, expr, filematcher) where revs is an iterable of
2255 """Return (revs, expr, filematcher) where revs is an iterable of
2255 revision numbers, expr is a revset string built from log options
2256 revision numbers, expr is a revset string built from log options
2256 and file patterns or None, and used to filter 'revs'. If --stat or
2257 and file patterns or None, and used to filter 'revs'. If --stat or
2257 --patch are not passed filematcher is None. Otherwise it is a
2258 --patch are not passed filematcher is None. Otherwise it is a
2258 callable taking a revision number and returning a match objects
2259 callable taking a revision number and returning a match objects
2259 filtering the files to be detailed when displaying the revision.
2260 filtering the files to be detailed when displaying the revision.
2260 """
2261 """
2261 limit = loglimit(opts)
2262 limit = loglimit(opts)
2262 revs = _logrevs(repo, opts)
2263 revs = _logrevs(repo, opts)
2263 if not revs:
2264 if not revs:
2264 return smartset.baseset(), None, None
2265 return smartset.baseset(), None, None
2265 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2266 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2266 if opts.get('rev'):
2267 if opts.get('rev'):
2267 # User-specified revs might be unsorted, but don't sort before
2268 # User-specified revs might be unsorted, but don't sort before
2268 # _makelogrevset because it might depend on the order of revs
2269 # _makelogrevset because it might depend on the order of revs
2269 if not (revs.isdescending() or revs.istopo()):
2270 if not (revs.isdescending() or revs.istopo()):
2270 revs.sort(reverse=True)
2271 revs.sort(reverse=True)
2271 if expr:
2272 if expr:
2272 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2273 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2273 revs = matcher(repo, revs)
2274 revs = matcher(repo, revs)
2274 if limit is not None:
2275 if limit is not None:
2275 limitedrevs = []
2276 limitedrevs = []
2276 for idx, rev in enumerate(revs):
2277 for idx, rev in enumerate(revs):
2277 if idx >= limit:
2278 if idx >= limit:
2278 break
2279 break
2279 limitedrevs.append(rev)
2280 limitedrevs.append(rev)
2280 revs = smartset.baseset(limitedrevs)
2281 revs = smartset.baseset(limitedrevs)
2281
2282
2282 return revs, expr, filematcher
2283 return revs, expr, filematcher
2283
2284
2284 def getlogrevs(repo, pats, opts):
2285 def getlogrevs(repo, pats, opts):
2285 """Return (revs, expr, filematcher) where revs is an iterable of
2286 """Return (revs, expr, filematcher) where revs is an iterable of
2286 revision numbers, expr is a revset string built from log options
2287 revision numbers, expr is a revset string built from log options
2287 and file patterns or None, and used to filter 'revs'. If --stat or
2288 and file patterns or None, and used to filter 'revs'. If --stat or
2288 --patch are not passed filematcher is None. Otherwise it is a
2289 --patch are not passed filematcher is None. Otherwise it is a
2289 callable taking a revision number and returning a match objects
2290 callable taking a revision number and returning a match objects
2290 filtering the files to be detailed when displaying the revision.
2291 filtering the files to be detailed when displaying the revision.
2291 """
2292 """
2292 limit = loglimit(opts)
2293 limit = loglimit(opts)
2293 revs = _logrevs(repo, opts)
2294 revs = _logrevs(repo, opts)
2294 if not revs:
2295 if not revs:
2295 return smartset.baseset([]), None, None
2296 return smartset.baseset([]), None, None
2296 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2297 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2297 if expr:
2298 if expr:
2298 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2299 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2299 revs = matcher(repo, revs)
2300 revs = matcher(repo, revs)
2300 if limit is not None:
2301 if limit is not None:
2301 limitedrevs = []
2302 limitedrevs = []
2302 for idx, r in enumerate(revs):
2303 for idx, r in enumerate(revs):
2303 if limit <= idx:
2304 if limit <= idx:
2304 break
2305 break
2305 limitedrevs.append(r)
2306 limitedrevs.append(r)
2306 revs = smartset.baseset(limitedrevs)
2307 revs = smartset.baseset(limitedrevs)
2307
2308
2308 return revs, expr, filematcher
2309 return revs, expr, filematcher
2309
2310
2310 def _graphnodeformatter(ui, displayer):
2311 def _graphnodeformatter(ui, displayer):
2311 spec = ui.config('ui', 'graphnodetemplate')
2312 spec = ui.config('ui', 'graphnodetemplate')
2312 if not spec:
2313 if not spec:
2313 return templatekw.showgraphnode # fast path for "{graphnode}"
2314 return templatekw.showgraphnode # fast path for "{graphnode}"
2314
2315
2315 spec = templater.unquotestring(spec)
2316 spec = templater.unquotestring(spec)
2316 templ = formatter.maketemplater(ui, spec)
2317 templ = formatter.maketemplater(ui, spec)
2317 cache = {}
2318 cache = {}
2318 if isinstance(displayer, changeset_templater):
2319 if isinstance(displayer, changeset_templater):
2319 cache = displayer.cache # reuse cache of slow templates
2320 cache = displayer.cache # reuse cache of slow templates
2320 props = templatekw.keywords.copy()
2321 props = templatekw.keywords.copy()
2321 props['templ'] = templ
2322 props['templ'] = templ
2322 props['cache'] = cache
2323 props['cache'] = cache
2323 def formatnode(repo, ctx):
2324 def formatnode(repo, ctx):
2324 props['ctx'] = ctx
2325 props['ctx'] = ctx
2325 props['repo'] = repo
2326 props['repo'] = repo
2326 props['ui'] = repo.ui
2327 props['ui'] = repo.ui
2327 props['revcache'] = {}
2328 props['revcache'] = {}
2328 return templ.render(props)
2329 return templ.render(props)
2329 return formatnode
2330 return formatnode
2330
2331
2331 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2332 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2332 filematcher=None):
2333 filematcher=None):
2333 formatnode = _graphnodeformatter(ui, displayer)
2334 formatnode = _graphnodeformatter(ui, displayer)
2334 state = graphmod.asciistate()
2335 state = graphmod.asciistate()
2335 styles = state['styles']
2336 styles = state['styles']
2336
2337
2337 # only set graph styling if HGPLAIN is not set.
2338 # only set graph styling if HGPLAIN is not set.
2338 if ui.plain('graph'):
2339 if ui.plain('graph'):
2339 # set all edge styles to |, the default pre-3.8 behaviour
2340 # set all edge styles to |, the default pre-3.8 behaviour
2340 styles.update(dict.fromkeys(styles, '|'))
2341 styles.update(dict.fromkeys(styles, '|'))
2341 else:
2342 else:
2342 edgetypes = {
2343 edgetypes = {
2343 'parent': graphmod.PARENT,
2344 'parent': graphmod.PARENT,
2344 'grandparent': graphmod.GRANDPARENT,
2345 'grandparent': graphmod.GRANDPARENT,
2345 'missing': graphmod.MISSINGPARENT
2346 'missing': graphmod.MISSINGPARENT
2346 }
2347 }
2347 for name, key in edgetypes.items():
2348 for name, key in edgetypes.items():
2348 # experimental config: experimental.graphstyle.*
2349 # experimental config: experimental.graphstyle.*
2349 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2350 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2350 styles[key])
2351 styles[key])
2351 if not styles[key]:
2352 if not styles[key]:
2352 styles[key] = None
2353 styles[key] = None
2353
2354
2354 # experimental config: experimental.graphshorten
2355 # experimental config: experimental.graphshorten
2355 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2356 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2356
2357
2357 for rev, type, ctx, parents in dag:
2358 for rev, type, ctx, parents in dag:
2358 char = formatnode(repo, ctx)
2359 char = formatnode(repo, ctx)
2359 copies = None
2360 copies = None
2360 if getrenamed and ctx.rev():
2361 if getrenamed and ctx.rev():
2361 copies = []
2362 copies = []
2362 for fn in ctx.files():
2363 for fn in ctx.files():
2363 rename = getrenamed(fn, ctx.rev())
2364 rename = getrenamed(fn, ctx.rev())
2364 if rename:
2365 if rename:
2365 copies.append((fn, rename[0]))
2366 copies.append((fn, rename[0]))
2366 revmatchfn = None
2367 revmatchfn = None
2367 if filematcher is not None:
2368 if filematcher is not None:
2368 revmatchfn = filematcher(ctx.rev())
2369 revmatchfn = filematcher(ctx.rev())
2369 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2370 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2370 lines = displayer.hunk.pop(rev).split('\n')
2371 lines = displayer.hunk.pop(rev).split('\n')
2371 if not lines[-1]:
2372 if not lines[-1]:
2372 del lines[-1]
2373 del lines[-1]
2373 displayer.flush(ctx)
2374 displayer.flush(ctx)
2374 edges = edgefn(type, char, lines, state, rev, parents)
2375 edges = edgefn(type, char, lines, state, rev, parents)
2375 for type, char, lines, coldata in edges:
2376 for type, char, lines, coldata in edges:
2376 graphmod.ascii(ui, state, type, char, lines, coldata)
2377 graphmod.ascii(ui, state, type, char, lines, coldata)
2377 displayer.close()
2378 displayer.close()
2378
2379
2379 def graphlog(ui, repo, pats, opts):
2380 def graphlog(ui, repo, pats, opts):
2380 # Parameters are identical to log command ones
2381 # Parameters are identical to log command ones
2381 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2382 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2382 revdag = graphmod.dagwalker(repo, revs)
2383 revdag = graphmod.dagwalker(repo, revs)
2383
2384
2384 getrenamed = None
2385 getrenamed = None
2385 if opts.get('copies'):
2386 if opts.get('copies'):
2386 endrev = None
2387 endrev = None
2387 if opts.get('rev'):
2388 if opts.get('rev'):
2388 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2389 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2389 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2390 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2390
2391
2391 ui.pager('log')
2392 ui.pager('log')
2392 displayer = show_changeset(ui, repo, opts, buffered=True)
2393 displayer = show_changeset(ui, repo, opts, buffered=True)
2393 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2394 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2394 filematcher)
2395 filematcher)
2395
2396
2396 def checkunsupportedgraphflags(pats, opts):
2397 def checkunsupportedgraphflags(pats, opts):
2397 for op in ["newest_first"]:
2398 for op in ["newest_first"]:
2398 if op in opts and opts[op]:
2399 if op in opts and opts[op]:
2399 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2400 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2400 % op.replace("_", "-"))
2401 % op.replace("_", "-"))
2401
2402
2402 def graphrevs(repo, nodes, opts):
2403 def graphrevs(repo, nodes, opts):
2403 limit = loglimit(opts)
2404 limit = loglimit(opts)
2404 nodes.reverse()
2405 nodes.reverse()
2405 if limit is not None:
2406 if limit is not None:
2406 nodes = nodes[:limit]
2407 nodes = nodes[:limit]
2407 return graphmod.nodes(repo, nodes)
2408 return graphmod.nodes(repo, nodes)
2408
2409
2409 def add(ui, repo, match, prefix, explicitonly, **opts):
2410 def add(ui, repo, match, prefix, explicitonly, **opts):
2410 join = lambda f: os.path.join(prefix, f)
2411 join = lambda f: os.path.join(prefix, f)
2411 bad = []
2412 bad = []
2412
2413
2413 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2414 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2414 names = []
2415 names = []
2415 wctx = repo[None]
2416 wctx = repo[None]
2416 cca = None
2417 cca = None
2417 abort, warn = scmutil.checkportabilityalert(ui)
2418 abort, warn = scmutil.checkportabilityalert(ui)
2418 if abort or warn:
2419 if abort or warn:
2419 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2420 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2420
2421
2421 badmatch = matchmod.badmatch(match, badfn)
2422 badmatch = matchmod.badmatch(match, badfn)
2422 dirstate = repo.dirstate
2423 dirstate = repo.dirstate
2423 # We don't want to just call wctx.walk here, since it would return a lot of
2424 # We don't want to just call wctx.walk here, since it would return a lot of
2424 # clean files, which we aren't interested in and takes time.
2425 # clean files, which we aren't interested in and takes time.
2425 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2426 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2426 True, False, full=False)):
2427 True, False, full=False)):
2427 exact = match.exact(f)
2428 exact = match.exact(f)
2428 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2429 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2429 if cca:
2430 if cca:
2430 cca(f)
2431 cca(f)
2431 names.append(f)
2432 names.append(f)
2432 if ui.verbose or not exact:
2433 if ui.verbose or not exact:
2433 ui.status(_('adding %s\n') % match.rel(f))
2434 ui.status(_('adding %s\n') % match.rel(f))
2434
2435
2435 for subpath in sorted(wctx.substate):
2436 for subpath in sorted(wctx.substate):
2436 sub = wctx.sub(subpath)
2437 sub = wctx.sub(subpath)
2437 try:
2438 try:
2438 submatch = matchmod.subdirmatcher(subpath, match)
2439 submatch = matchmod.subdirmatcher(subpath, match)
2439 if opts.get(r'subrepos'):
2440 if opts.get(r'subrepos'):
2440 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2441 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2441 else:
2442 else:
2442 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2443 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2443 except error.LookupError:
2444 except error.LookupError:
2444 ui.status(_("skipping missing subrepository: %s\n")
2445 ui.status(_("skipping missing subrepository: %s\n")
2445 % join(subpath))
2446 % join(subpath))
2446
2447
2447 if not opts.get(r'dry_run'):
2448 if not opts.get(r'dry_run'):
2448 rejected = wctx.add(names, prefix)
2449 rejected = wctx.add(names, prefix)
2449 bad.extend(f for f in rejected if f in match.files())
2450 bad.extend(f for f in rejected if f in match.files())
2450 return bad
2451 return bad
2451
2452
2452 def addwebdirpath(repo, serverpath, webconf):
2453 def addwebdirpath(repo, serverpath, webconf):
2453 webconf[serverpath] = repo.root
2454 webconf[serverpath] = repo.root
2454 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2455 repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
2455
2456
2456 for r in repo.revs('filelog("path:.hgsub")'):
2457 for r in repo.revs('filelog("path:.hgsub")'):
2457 ctx = repo[r]
2458 ctx = repo[r]
2458 for subpath in ctx.substate:
2459 for subpath in ctx.substate:
2459 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2460 ctx.sub(subpath).addwebdirpath(serverpath, webconf)
2460
2461
2461 def forget(ui, repo, match, prefix, explicitonly):
2462 def forget(ui, repo, match, prefix, explicitonly):
2462 join = lambda f: os.path.join(prefix, f)
2463 join = lambda f: os.path.join(prefix, f)
2463 bad = []
2464 bad = []
2464 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2465 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2465 wctx = repo[None]
2466 wctx = repo[None]
2466 forgot = []
2467 forgot = []
2467
2468
2468 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2469 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2469 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2470 forget = sorted(s.modified + s.added + s.deleted + s.clean)
2470 if explicitonly:
2471 if explicitonly:
2471 forget = [f for f in forget if match.exact(f)]
2472 forget = [f for f in forget if match.exact(f)]
2472
2473
2473 for subpath in sorted(wctx.substate):
2474 for subpath in sorted(wctx.substate):
2474 sub = wctx.sub(subpath)
2475 sub = wctx.sub(subpath)
2475 try:
2476 try:
2476 submatch = matchmod.subdirmatcher(subpath, match)
2477 submatch = matchmod.subdirmatcher(subpath, match)
2477 subbad, subforgot = sub.forget(submatch, prefix)
2478 subbad, subforgot = sub.forget(submatch, prefix)
2478 bad.extend([subpath + '/' + f for f in subbad])
2479 bad.extend([subpath + '/' + f for f in subbad])
2479 forgot.extend([subpath + '/' + f for f in subforgot])
2480 forgot.extend([subpath + '/' + f for f in subforgot])
2480 except error.LookupError:
2481 except error.LookupError:
2481 ui.status(_("skipping missing subrepository: %s\n")
2482 ui.status(_("skipping missing subrepository: %s\n")
2482 % join(subpath))
2483 % join(subpath))
2483
2484
2484 if not explicitonly:
2485 if not explicitonly:
2485 for f in match.files():
2486 for f in match.files():
2486 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2487 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2487 if f not in forgot:
2488 if f not in forgot:
2488 if repo.wvfs.exists(f):
2489 if repo.wvfs.exists(f):
2489 # Don't complain if the exact case match wasn't given.
2490 # Don't complain if the exact case match wasn't given.
2490 # But don't do this until after checking 'forgot', so
2491 # But don't do this until after checking 'forgot', so
2491 # that subrepo files aren't normalized, and this op is
2492 # that subrepo files aren't normalized, and this op is
2492 # purely from data cached by the status walk above.
2493 # purely from data cached by the status walk above.
2493 if repo.dirstate.normalize(f) in repo.dirstate:
2494 if repo.dirstate.normalize(f) in repo.dirstate:
2494 continue
2495 continue
2495 ui.warn(_('not removing %s: '
2496 ui.warn(_('not removing %s: '
2496 'file is already untracked\n')
2497 'file is already untracked\n')
2497 % match.rel(f))
2498 % match.rel(f))
2498 bad.append(f)
2499 bad.append(f)
2499
2500
2500 for f in forget:
2501 for f in forget:
2501 if ui.verbose or not match.exact(f):
2502 if ui.verbose or not match.exact(f):
2502 ui.status(_('removing %s\n') % match.rel(f))
2503 ui.status(_('removing %s\n') % match.rel(f))
2503
2504
2504 rejected = wctx.forget(forget, prefix)
2505 rejected = wctx.forget(forget, prefix)
2505 bad.extend(f for f in rejected if f in match.files())
2506 bad.extend(f for f in rejected if f in match.files())
2506 forgot.extend(f for f in forget if f not in rejected)
2507 forgot.extend(f for f in forget if f not in rejected)
2507 return bad, forgot
2508 return bad, forgot
2508
2509
2509 def files(ui, ctx, m, fm, fmt, subrepos):
2510 def files(ui, ctx, m, fm, fmt, subrepos):
2510 rev = ctx.rev()
2511 rev = ctx.rev()
2511 ret = 1
2512 ret = 1
2512 ds = ctx.repo().dirstate
2513 ds = ctx.repo().dirstate
2513
2514
2514 for f in ctx.matches(m):
2515 for f in ctx.matches(m):
2515 if rev is None and ds[f] == 'r':
2516 if rev is None and ds[f] == 'r':
2516 continue
2517 continue
2517 fm.startitem()
2518 fm.startitem()
2518 if ui.verbose:
2519 if ui.verbose:
2519 fc = ctx[f]
2520 fc = ctx[f]
2520 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2521 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2521 fm.data(abspath=f)
2522 fm.data(abspath=f)
2522 fm.write('path', fmt, m.rel(f))
2523 fm.write('path', fmt, m.rel(f))
2523 ret = 0
2524 ret = 0
2524
2525
2525 for subpath in sorted(ctx.substate):
2526 for subpath in sorted(ctx.substate):
2526 submatch = matchmod.subdirmatcher(subpath, m)
2527 submatch = matchmod.subdirmatcher(subpath, m)
2527 if (subrepos or m.exact(subpath) or any(submatch.files())):
2528 if (subrepos or m.exact(subpath) or any(submatch.files())):
2528 sub = ctx.sub(subpath)
2529 sub = ctx.sub(subpath)
2529 try:
2530 try:
2530 recurse = m.exact(subpath) or subrepos
2531 recurse = m.exact(subpath) or subrepos
2531 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2532 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2532 ret = 0
2533 ret = 0
2533 except error.LookupError:
2534 except error.LookupError:
2534 ui.status(_("skipping missing subrepository: %s\n")
2535 ui.status(_("skipping missing subrepository: %s\n")
2535 % m.abs(subpath))
2536 % m.abs(subpath))
2536
2537
2537 return ret
2538 return ret
2538
2539
2539 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2540 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2540 join = lambda f: os.path.join(prefix, f)
2541 join = lambda f: os.path.join(prefix, f)
2541 ret = 0
2542 ret = 0
2542 s = repo.status(match=m, clean=True)
2543 s = repo.status(match=m, clean=True)
2543 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2544 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2544
2545
2545 wctx = repo[None]
2546 wctx = repo[None]
2546
2547
2547 if warnings is None:
2548 if warnings is None:
2548 warnings = []
2549 warnings = []
2549 warn = True
2550 warn = True
2550 else:
2551 else:
2551 warn = False
2552 warn = False
2552
2553
2553 subs = sorted(wctx.substate)
2554 subs = sorted(wctx.substate)
2554 total = len(subs)
2555 total = len(subs)
2555 count = 0
2556 count = 0
2556 for subpath in subs:
2557 for subpath in subs:
2557 count += 1
2558 count += 1
2558 submatch = matchmod.subdirmatcher(subpath, m)
2559 submatch = matchmod.subdirmatcher(subpath, m)
2559 if subrepos or m.exact(subpath) or any(submatch.files()):
2560 if subrepos or m.exact(subpath) or any(submatch.files()):
2560 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2561 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2561 sub = wctx.sub(subpath)
2562 sub = wctx.sub(subpath)
2562 try:
2563 try:
2563 if sub.removefiles(submatch, prefix, after, force, subrepos,
2564 if sub.removefiles(submatch, prefix, after, force, subrepos,
2564 warnings):
2565 warnings):
2565 ret = 1
2566 ret = 1
2566 except error.LookupError:
2567 except error.LookupError:
2567 warnings.append(_("skipping missing subrepository: %s\n")
2568 warnings.append(_("skipping missing subrepository: %s\n")
2568 % join(subpath))
2569 % join(subpath))
2569 ui.progress(_('searching'), None)
2570 ui.progress(_('searching'), None)
2570
2571
2571 # warn about failure to delete explicit files/dirs
2572 # warn about failure to delete explicit files/dirs
2572 deleteddirs = util.dirs(deleted)
2573 deleteddirs = util.dirs(deleted)
2573 files = m.files()
2574 files = m.files()
2574 total = len(files)
2575 total = len(files)
2575 count = 0
2576 count = 0
2576 for f in files:
2577 for f in files:
2577 def insubrepo():
2578 def insubrepo():
2578 for subpath in wctx.substate:
2579 for subpath in wctx.substate:
2579 if f.startswith(subpath + '/'):
2580 if f.startswith(subpath + '/'):
2580 return True
2581 return True
2581 return False
2582 return False
2582
2583
2583 count += 1
2584 count += 1
2584 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2585 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2585 isdir = f in deleteddirs or wctx.hasdir(f)
2586 isdir = f in deleteddirs or wctx.hasdir(f)
2586 if (f in repo.dirstate or isdir or f == '.'
2587 if (f in repo.dirstate or isdir or f == '.'
2587 or insubrepo() or f in subs):
2588 or insubrepo() or f in subs):
2588 continue
2589 continue
2589
2590
2590 if repo.wvfs.exists(f):
2591 if repo.wvfs.exists(f):
2591 if repo.wvfs.isdir(f):
2592 if repo.wvfs.isdir(f):
2592 warnings.append(_('not removing %s: no tracked files\n')
2593 warnings.append(_('not removing %s: no tracked files\n')
2593 % m.rel(f))
2594 % m.rel(f))
2594 else:
2595 else:
2595 warnings.append(_('not removing %s: file is untracked\n')
2596 warnings.append(_('not removing %s: file is untracked\n')
2596 % m.rel(f))
2597 % m.rel(f))
2597 # missing files will generate a warning elsewhere
2598 # missing files will generate a warning elsewhere
2598 ret = 1
2599 ret = 1
2599 ui.progress(_('deleting'), None)
2600 ui.progress(_('deleting'), None)
2600
2601
2601 if force:
2602 if force:
2602 list = modified + deleted + clean + added
2603 list = modified + deleted + clean + added
2603 elif after:
2604 elif after:
2604 list = deleted
2605 list = deleted
2605 remaining = modified + added + clean
2606 remaining = modified + added + clean
2606 total = len(remaining)
2607 total = len(remaining)
2607 count = 0
2608 count = 0
2608 for f in remaining:
2609 for f in remaining:
2609 count += 1
2610 count += 1
2610 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2611 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2611 warnings.append(_('not removing %s: file still exists\n')
2612 warnings.append(_('not removing %s: file still exists\n')
2612 % m.rel(f))
2613 % m.rel(f))
2613 ret = 1
2614 ret = 1
2614 ui.progress(_('skipping'), None)
2615 ui.progress(_('skipping'), None)
2615 else:
2616 else:
2616 list = deleted + clean
2617 list = deleted + clean
2617 total = len(modified) + len(added)
2618 total = len(modified) + len(added)
2618 count = 0
2619 count = 0
2619 for f in modified:
2620 for f in modified:
2620 count += 1
2621 count += 1
2621 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2622 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2622 warnings.append(_('not removing %s: file is modified (use -f'
2623 warnings.append(_('not removing %s: file is modified (use -f'
2623 ' to force removal)\n') % m.rel(f))
2624 ' to force removal)\n') % m.rel(f))
2624 ret = 1
2625 ret = 1
2625 for f in added:
2626 for f in added:
2626 count += 1
2627 count += 1
2627 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2628 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2628 warnings.append(_("not removing %s: file has been marked for add"
2629 warnings.append(_("not removing %s: file has been marked for add"
2629 " (use 'hg forget' to undo add)\n") % m.rel(f))
2630 " (use 'hg forget' to undo add)\n") % m.rel(f))
2630 ret = 1
2631 ret = 1
2631 ui.progress(_('skipping'), None)
2632 ui.progress(_('skipping'), None)
2632
2633
2633 list = sorted(list)
2634 list = sorted(list)
2634 total = len(list)
2635 total = len(list)
2635 count = 0
2636 count = 0
2636 for f in list:
2637 for f in list:
2637 count += 1
2638 count += 1
2638 if ui.verbose or not m.exact(f):
2639 if ui.verbose or not m.exact(f):
2639 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2640 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2640 ui.status(_('removing %s\n') % m.rel(f))
2641 ui.status(_('removing %s\n') % m.rel(f))
2641 ui.progress(_('deleting'), None)
2642 ui.progress(_('deleting'), None)
2642
2643
2643 with repo.wlock():
2644 with repo.wlock():
2644 if not after:
2645 if not after:
2645 for f in list:
2646 for f in list:
2646 if f in added:
2647 if f in added:
2647 continue # we never unlink added files on remove
2648 continue # we never unlink added files on remove
2648 repo.wvfs.unlinkpath(f, ignoremissing=True)
2649 repo.wvfs.unlinkpath(f, ignoremissing=True)
2649 repo[None].forget(list)
2650 repo[None].forget(list)
2650
2651
2651 if warn:
2652 if warn:
2652 for warning in warnings:
2653 for warning in warnings:
2653 ui.warn(warning)
2654 ui.warn(warning)
2654
2655
2655 return ret
2656 return ret
2656
2657
2657 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2658 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
2658 err = 1
2659 err = 1
2659
2660
2660 def write(path):
2661 def write(path):
2661 filename = None
2662 filename = None
2662 if fntemplate:
2663 if fntemplate:
2663 filename = makefilename(repo, fntemplate, ctx.node(),
2664 filename = makefilename(repo, fntemplate, ctx.node(),
2664 pathname=os.path.join(prefix, path))
2665 pathname=os.path.join(prefix, path))
2665 with formatter.maybereopen(basefm, filename, opts) as fm:
2666 with formatter.maybereopen(basefm, filename, opts) as fm:
2666 data = ctx[path].data()
2667 data = ctx[path].data()
2667 if opts.get('decode'):
2668 if opts.get('decode'):
2668 data = repo.wwritedata(path, data)
2669 data = repo.wwritedata(path, data)
2669 fm.startitem()
2670 fm.startitem()
2670 fm.write('data', '%s', data)
2671 fm.write('data', '%s', data)
2671 fm.data(abspath=path, path=matcher.rel(path))
2672 fm.data(abspath=path, path=matcher.rel(path))
2672
2673
2673 # Automation often uses hg cat on single files, so special case it
2674 # Automation often uses hg cat on single files, so special case it
2674 # for performance to avoid the cost of parsing the manifest.
2675 # for performance to avoid the cost of parsing the manifest.
2675 if len(matcher.files()) == 1 and not matcher.anypats():
2676 if len(matcher.files()) == 1 and not matcher.anypats():
2676 file = matcher.files()[0]
2677 file = matcher.files()[0]
2677 mfl = repo.manifestlog
2678 mfl = repo.manifestlog
2678 mfnode = ctx.manifestnode()
2679 mfnode = ctx.manifestnode()
2679 try:
2680 try:
2680 if mfnode and mfl[mfnode].find(file)[0]:
2681 if mfnode and mfl[mfnode].find(file)[0]:
2681 write(file)
2682 write(file)
2682 return 0
2683 return 0
2683 except KeyError:
2684 except KeyError:
2684 pass
2685 pass
2685
2686
2686 for abs in ctx.walk(matcher):
2687 for abs in ctx.walk(matcher):
2687 write(abs)
2688 write(abs)
2688 err = 0
2689 err = 0
2689
2690
2690 for subpath in sorted(ctx.substate):
2691 for subpath in sorted(ctx.substate):
2691 sub = ctx.sub(subpath)
2692 sub = ctx.sub(subpath)
2692 try:
2693 try:
2693 submatch = matchmod.subdirmatcher(subpath, matcher)
2694 submatch = matchmod.subdirmatcher(subpath, matcher)
2694
2695
2695 if not sub.cat(submatch, basefm, fntemplate,
2696 if not sub.cat(submatch, basefm, fntemplate,
2696 os.path.join(prefix, sub._path), **opts):
2697 os.path.join(prefix, sub._path), **opts):
2697 err = 0
2698 err = 0
2698 except error.RepoLookupError:
2699 except error.RepoLookupError:
2699 ui.status(_("skipping missing subrepository: %s\n")
2700 ui.status(_("skipping missing subrepository: %s\n")
2700 % os.path.join(prefix, subpath))
2701 % os.path.join(prefix, subpath))
2701
2702
2702 return err
2703 return err
2703
2704
2704 def commit(ui, repo, commitfunc, pats, opts):
2705 def commit(ui, repo, commitfunc, pats, opts):
2705 '''commit the specified files or all outstanding changes'''
2706 '''commit the specified files or all outstanding changes'''
2706 date = opts.get('date')
2707 date = opts.get('date')
2707 if date:
2708 if date:
2708 opts['date'] = util.parsedate(date)
2709 opts['date'] = util.parsedate(date)
2709 message = logmessage(ui, opts)
2710 message = logmessage(ui, opts)
2710 matcher = scmutil.match(repo[None], pats, opts)
2711 matcher = scmutil.match(repo[None], pats, opts)
2711
2712
2712 # extract addremove carefully -- this function can be called from a command
2713 # extract addremove carefully -- this function can be called from a command
2713 # that doesn't support addremove
2714 # that doesn't support addremove
2714 if opts.get('addremove'):
2715 if opts.get('addremove'):
2715 if scmutil.addremove(repo, matcher, "", opts) != 0:
2716 if scmutil.addremove(repo, matcher, "", opts) != 0:
2716 raise error.Abort(
2717 raise error.Abort(
2717 _("failed to mark all new/missing files as added/removed"))
2718 _("failed to mark all new/missing files as added/removed"))
2718
2719
2719 return commitfunc(ui, repo, message, matcher, opts)
2720 return commitfunc(ui, repo, message, matcher, opts)
2720
2721
2721 def samefile(f, ctx1, ctx2):
2722 def samefile(f, ctx1, ctx2):
2722 if f in ctx1.manifest():
2723 if f in ctx1.manifest():
2723 a = ctx1.filectx(f)
2724 a = ctx1.filectx(f)
2724 if f in ctx2.manifest():
2725 if f in ctx2.manifest():
2725 b = ctx2.filectx(f)
2726 b = ctx2.filectx(f)
2726 return (not a.cmp(b)
2727 return (not a.cmp(b)
2727 and a.flags() == b.flags())
2728 and a.flags() == b.flags())
2728 else:
2729 else:
2729 return False
2730 return False
2730 else:
2731 else:
2731 return f not in ctx2.manifest()
2732 return f not in ctx2.manifest()
2732
2733
2733 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2734 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2734 # avoid cycle context -> subrepo -> cmdutil
2735 # avoid cycle context -> subrepo -> cmdutil
2735 from . import context
2736 from . import context
2736
2737
2737 # amend will reuse the existing user if not specified, but the obsolete
2738 # amend will reuse the existing user if not specified, but the obsolete
2738 # marker creation requires that the current user's name is specified.
2739 # marker creation requires that the current user's name is specified.
2739 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2740 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2740 ui.username() # raise exception if username not set
2741 ui.username() # raise exception if username not set
2741
2742
2742 ui.note(_('amending changeset %s\n') % old)
2743 ui.note(_('amending changeset %s\n') % old)
2743 base = old.p1()
2744 base = old.p1()
2744 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2745 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2745
2746
2746 newid = None
2747 newid = None
2747 with repo.wlock(), repo.lock():
2748 with repo.wlock(), repo.lock():
2748 with repo.transaction('amend') as tr:
2749 with repo.transaction('amend') as tr:
2749 # See if we got a message from -m or -l, if not, open the editor
2750 # See if we got a message from -m or -l, if not, open the editor
2750 # with the message of the changeset to amend
2751 # with the message of the changeset to amend
2751 message = logmessage(ui, opts)
2752 message = logmessage(ui, opts)
2752 # ensure logfile does not conflict with later enforcement of the
2753 # ensure logfile does not conflict with later enforcement of the
2753 # message. potential logfile content has been processed by
2754 # message. potential logfile content has been processed by
2754 # `logmessage` anyway.
2755 # `logmessage` anyway.
2755 opts.pop('logfile')
2756 opts.pop('logfile')
2756 # First, do a regular commit to record all changes in the working
2757 # First, do a regular commit to record all changes in the working
2757 # directory (if there are any)
2758 # directory (if there are any)
2758 ui.callhooks = False
2759 ui.callhooks = False
2759 activebookmark = repo._bookmarks.active
2760 activebookmark = repo._bookmarks.active
2760 try:
2761 try:
2761 repo._bookmarks.active = None
2762 repo._bookmarks.active = None
2762 opts['message'] = 'temporary amend commit for %s' % old
2763 opts['message'] = 'temporary amend commit for %s' % old
2763 node = commit(ui, repo, commitfunc, pats, opts)
2764 node = commit(ui, repo, commitfunc, pats, opts)
2764 finally:
2765 finally:
2765 repo._bookmarks.active = activebookmark
2766 repo._bookmarks.active = activebookmark
2766 repo._bookmarks.recordchange(tr)
2767 repo._bookmarks.recordchange(tr)
2767 ui.callhooks = True
2768 ui.callhooks = True
2768 ctx = repo[node]
2769 ctx = repo[node]
2769
2770
2770 # Participating changesets:
2771 # Participating changesets:
2771 #
2772 #
2772 # node/ctx o - new (intermediate) commit that contains changes
2773 # node/ctx o - new (intermediate) commit that contains changes
2773 # | from working dir to go into amending commit
2774 # | from working dir to go into amending commit
2774 # | (or a workingctx if there were no changes)
2775 # | (or a workingctx if there were no changes)
2775 # |
2776 # |
2776 # old o - changeset to amend
2777 # old o - changeset to amend
2777 # |
2778 # |
2778 # base o - parent of amending changeset
2779 # base o - parent of amending changeset
2779
2780
2780 # Update extra dict from amended commit (e.g. to preserve graft
2781 # Update extra dict from amended commit (e.g. to preserve graft
2781 # source)
2782 # source)
2782 extra.update(old.extra())
2783 extra.update(old.extra())
2783
2784
2784 # Also update it from the intermediate commit or from the wctx
2785 # Also update it from the intermediate commit or from the wctx
2785 extra.update(ctx.extra())
2786 extra.update(ctx.extra())
2786
2787
2787 if len(old.parents()) > 1:
2788 if len(old.parents()) > 1:
2788 # ctx.files() isn't reliable for merges, so fall back to the
2789 # ctx.files() isn't reliable for merges, so fall back to the
2789 # slower repo.status() method
2790 # slower repo.status() method
2790 files = set([fn for st in repo.status(base, old)[:3]
2791 files = set([fn for st in repo.status(base, old)[:3]
2791 for fn in st])
2792 for fn in st])
2792 else:
2793 else:
2793 files = set(old.files())
2794 files = set(old.files())
2794
2795
2795 # Second, we use either the commit we just did, or if there were no
2796 # Second, we use either the commit we just did, or if there were no
2796 # changes the parent of the working directory as the version of the
2797 # changes the parent of the working directory as the version of the
2797 # files in the final amend commit
2798 # files in the final amend commit
2798 if node:
2799 if node:
2799 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2800 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2800
2801
2801 user = ctx.user()
2802 user = ctx.user()
2802 date = ctx.date()
2803 date = ctx.date()
2803 # Recompute copies (avoid recording a -> b -> a)
2804 # Recompute copies (avoid recording a -> b -> a)
2804 copied = copies.pathcopies(base, ctx)
2805 copied = copies.pathcopies(base, ctx)
2805 if old.p2:
2806 if old.p2:
2806 copied.update(copies.pathcopies(old.p2(), ctx))
2807 copied.update(copies.pathcopies(old.p2(), ctx))
2807
2808
2808 # Prune files which were reverted by the updates: if old
2809 # Prune files which were reverted by the updates: if old
2809 # introduced file X and our intermediate commit, node,
2810 # introduced file X and our intermediate commit, node,
2810 # renamed that file, then those two files are the same and
2811 # renamed that file, then those two files are the same and
2811 # we can discard X from our list of files. Likewise if X
2812 # we can discard X from our list of files. Likewise if X
2812 # was deleted, it's no longer relevant
2813 # was deleted, it's no longer relevant
2813 files.update(ctx.files())
2814 files.update(ctx.files())
2814 files = [f for f in files if not samefile(f, ctx, base)]
2815 files = [f for f in files if not samefile(f, ctx, base)]
2815
2816
2816 def filectxfn(repo, ctx_, path):
2817 def filectxfn(repo, ctx_, path):
2817 try:
2818 try:
2818 fctx = ctx[path]
2819 fctx = ctx[path]
2819 flags = fctx.flags()
2820 flags = fctx.flags()
2820 mctx = context.memfilectx(repo,
2821 mctx = context.memfilectx(repo,
2821 fctx.path(), fctx.data(),
2822 fctx.path(), fctx.data(),
2822 islink='l' in flags,
2823 islink='l' in flags,
2823 isexec='x' in flags,
2824 isexec='x' in flags,
2824 copied=copied.get(path))
2825 copied=copied.get(path))
2825 return mctx
2826 return mctx
2826 except KeyError:
2827 except KeyError:
2827 return None
2828 return None
2828 else:
2829 else:
2829 ui.note(_('copying changeset %s to %s\n') % (old, base))
2830 ui.note(_('copying changeset %s to %s\n') % (old, base))
2830
2831
2831 # Use version of files as in the old cset
2832 # Use version of files as in the old cset
2832 def filectxfn(repo, ctx_, path):
2833 def filectxfn(repo, ctx_, path):
2833 try:
2834 try:
2834 return old.filectx(path)
2835 return old.filectx(path)
2835 except KeyError:
2836 except KeyError:
2836 return None
2837 return None
2837
2838
2838 user = opts.get('user') or old.user()
2839 user = opts.get('user') or old.user()
2839 date = opts.get('date') or old.date()
2840 date = opts.get('date') or old.date()
2840 editform = mergeeditform(old, 'commit.amend')
2841 editform = mergeeditform(old, 'commit.amend')
2841 editor = getcommiteditor(editform=editform, **opts)
2842 editor = getcommiteditor(editform=editform, **opts)
2842 if not message:
2843 if not message:
2843 editor = getcommiteditor(edit=True, editform=editform)
2844 editor = getcommiteditor(edit=True, editform=editform)
2844 message = old.description()
2845 message = old.description()
2845
2846
2846 pureextra = extra.copy()
2847 pureextra = extra.copy()
2847 extra['amend_source'] = old.hex()
2848 extra['amend_source'] = old.hex()
2848
2849
2849 new = context.memctx(repo,
2850 new = context.memctx(repo,
2850 parents=[base.node(), old.p2().node()],
2851 parents=[base.node(), old.p2().node()],
2851 text=message,
2852 text=message,
2852 files=files,
2853 files=files,
2853 filectxfn=filectxfn,
2854 filectxfn=filectxfn,
2854 user=user,
2855 user=user,
2855 date=date,
2856 date=date,
2856 extra=extra,
2857 extra=extra,
2857 editor=editor)
2858 editor=editor)
2858
2859
2859 newdesc = changelog.stripdesc(new.description())
2860 newdesc = changelog.stripdesc(new.description())
2860 if ((not node)
2861 if ((not node)
2861 and newdesc == old.description()
2862 and newdesc == old.description()
2862 and user == old.user()
2863 and user == old.user()
2863 and date == old.date()
2864 and date == old.date()
2864 and pureextra == old.extra()):
2865 and pureextra == old.extra()):
2865 # nothing changed. continuing here would create a new node
2866 # nothing changed. continuing here would create a new node
2866 # anyway because of the amend_source noise.
2867 # anyway because of the amend_source noise.
2867 #
2868 #
2868 # This not what we expect from amend.
2869 # This not what we expect from amend.
2869 return old.node()
2870 return old.node()
2870
2871
2871 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2872 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2872 try:
2873 try:
2873 if opts.get('secret'):
2874 if opts.get('secret'):
2874 commitphase = 'secret'
2875 commitphase = 'secret'
2875 else:
2876 else:
2876 commitphase = old.phase()
2877 commitphase = old.phase()
2877 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2878 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2878 newid = repo.commitctx(new)
2879 newid = repo.commitctx(new)
2879 finally:
2880 finally:
2880 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2881 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2881 if newid != old.node():
2882 if newid != old.node():
2882 # Reroute the working copy parent to the new changeset
2883 # Reroute the working copy parent to the new changeset
2883 repo.setparents(newid, nullid)
2884 repo.setparents(newid, nullid)
2884
2885
2885 # Move bookmarks from old parent to amend commit
2886 # Move bookmarks from old parent to amend commit
2886 bms = repo.nodebookmarks(old.node())
2887 bms = repo.nodebookmarks(old.node())
2887 if bms:
2888 if bms:
2888 marks = repo._bookmarks
2889 marks = repo._bookmarks
2889 for bm in bms:
2890 for bm in bms:
2890 ui.debug('moving bookmarks %r from %s to %s\n' %
2891 ui.debug('moving bookmarks %r from %s to %s\n' %
2891 (marks, old.hex(), hex(newid)))
2892 (marks, old.hex(), hex(newid)))
2892 marks[bm] = newid
2893 marks[bm] = newid
2893 marks.recordchange(tr)
2894 marks.recordchange(tr)
2894 #commit the whole amend process
2895 #commit the whole amend process
2895 if createmarkers:
2896 if createmarkers:
2896 # mark the new changeset as successor of the rewritten one
2897 # mark the new changeset as successor of the rewritten one
2897 new = repo[newid]
2898 new = repo[newid]
2898 obs = [(old, (new,))]
2899 obs = [(old, (new,))]
2899 if node:
2900 if node:
2900 obs.append((ctx, ()))
2901 obs.append((ctx, ()))
2901
2902
2902 obsolete.createmarkers(repo, obs, operation='amend')
2903 obsolete.createmarkers(repo, obs, operation='amend')
2903 if not createmarkers and newid != old.node():
2904 if not createmarkers and newid != old.node():
2904 # Strip the intermediate commit (if there was one) and the amended
2905 # Strip the intermediate commit (if there was one) and the amended
2905 # commit
2906 # commit
2906 if node:
2907 if node:
2907 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2908 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2908 ui.note(_('stripping amended changeset %s\n') % old)
2909 ui.note(_('stripping amended changeset %s\n') % old)
2909 repair.strip(ui, repo, old.node(), topic='amend-backup')
2910 repair.strip(ui, repo, old.node(), topic='amend-backup')
2910 return newid
2911 return newid
2911
2912
2912 def commiteditor(repo, ctx, subs, editform=''):
2913 def commiteditor(repo, ctx, subs, editform=''):
2913 if ctx.description():
2914 if ctx.description():
2914 return ctx.description()
2915 return ctx.description()
2915 return commitforceeditor(repo, ctx, subs, editform=editform,
2916 return commitforceeditor(repo, ctx, subs, editform=editform,
2916 unchangedmessagedetection=True)
2917 unchangedmessagedetection=True)
2917
2918
2918 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2919 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2919 editform='', unchangedmessagedetection=False):
2920 editform='', unchangedmessagedetection=False):
2920 if not extramsg:
2921 if not extramsg:
2921 extramsg = _("Leave message empty to abort commit.")
2922 extramsg = _("Leave message empty to abort commit.")
2922
2923
2923 forms = [e for e in editform.split('.') if e]
2924 forms = [e for e in editform.split('.') if e]
2924 forms.insert(0, 'changeset')
2925 forms.insert(0, 'changeset')
2925 templatetext = None
2926 templatetext = None
2926 while forms:
2927 while forms:
2927 ref = '.'.join(forms)
2928 ref = '.'.join(forms)
2928 if repo.ui.config('committemplate', ref):
2929 if repo.ui.config('committemplate', ref):
2929 templatetext = committext = buildcommittemplate(
2930 templatetext = committext = buildcommittemplate(
2930 repo, ctx, subs, extramsg, ref)
2931 repo, ctx, subs, extramsg, ref)
2931 break
2932 break
2932 forms.pop()
2933 forms.pop()
2933 else:
2934 else:
2934 committext = buildcommittext(repo, ctx, subs, extramsg)
2935 committext = buildcommittext(repo, ctx, subs, extramsg)
2935
2936
2936 # run editor in the repository root
2937 # run editor in the repository root
2937 olddir = pycompat.getcwd()
2938 olddir = pycompat.getcwd()
2938 os.chdir(repo.root)
2939 os.chdir(repo.root)
2939
2940
2940 # make in-memory changes visible to external process
2941 # make in-memory changes visible to external process
2941 tr = repo.currenttransaction()
2942 tr = repo.currenttransaction()
2942 repo.dirstate.write(tr)
2943 repo.dirstate.write(tr)
2943 pending = tr and tr.writepending() and repo.root
2944 pending = tr and tr.writepending() and repo.root
2944
2945
2945 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2946 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2946 editform=editform, pending=pending,
2947 editform=editform, pending=pending,
2947 repopath=repo.path)
2948 repopath=repo.path)
2948 text = editortext
2949 text = editortext
2949
2950
2950 # strip away anything below this special string (used for editors that want
2951 # strip away anything below this special string (used for editors that want
2951 # to display the diff)
2952 # to display the diff)
2952 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2953 stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
2953 if stripbelow:
2954 if stripbelow:
2954 text = text[:stripbelow.start()]
2955 text = text[:stripbelow.start()]
2955
2956
2956 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2957 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2957 os.chdir(olddir)
2958 os.chdir(olddir)
2958
2959
2959 if finishdesc:
2960 if finishdesc:
2960 text = finishdesc(text)
2961 text = finishdesc(text)
2961 if not text.strip():
2962 if not text.strip():
2962 raise error.Abort(_("empty commit message"))
2963 raise error.Abort(_("empty commit message"))
2963 if unchangedmessagedetection and editortext == templatetext:
2964 if unchangedmessagedetection and editortext == templatetext:
2964 raise error.Abort(_("commit message unchanged"))
2965 raise error.Abort(_("commit message unchanged"))
2965
2966
2966 return text
2967 return text
2967
2968
2968 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2969 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
2969 ui = repo.ui
2970 ui = repo.ui
2970 spec = formatter.templatespec(ref, None, None)
2971 spec = formatter.templatespec(ref, None, None)
2971 t = changeset_templater(ui, repo, spec, None, {}, False)
2972 t = changeset_templater(ui, repo, spec, None, {}, False)
2972 t.t.cache.update((k, templater.unquotestring(v))
2973 t.t.cache.update((k, templater.unquotestring(v))
2973 for k, v in repo.ui.configitems('committemplate'))
2974 for k, v in repo.ui.configitems('committemplate'))
2974
2975
2975 if not extramsg:
2976 if not extramsg:
2976 extramsg = '' # ensure that extramsg is string
2977 extramsg = '' # ensure that extramsg is string
2977
2978
2978 ui.pushbuffer()
2979 ui.pushbuffer()
2979 t.show(ctx, extramsg=extramsg)
2980 t.show(ctx, extramsg=extramsg)
2980 return ui.popbuffer()
2981 return ui.popbuffer()
2981
2982
2982 def hgprefix(msg):
2983 def hgprefix(msg):
2983 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2984 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2984
2985
2985 def buildcommittext(repo, ctx, subs, extramsg):
2986 def buildcommittext(repo, ctx, subs, extramsg):
2986 edittext = []
2987 edittext = []
2987 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2988 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2988 if ctx.description():
2989 if ctx.description():
2989 edittext.append(ctx.description())
2990 edittext.append(ctx.description())
2990 edittext.append("")
2991 edittext.append("")
2991 edittext.append("") # Empty line between message and comments.
2992 edittext.append("") # Empty line between message and comments.
2992 edittext.append(hgprefix(_("Enter commit message."
2993 edittext.append(hgprefix(_("Enter commit message."
2993 " Lines beginning with 'HG:' are removed.")))
2994 " Lines beginning with 'HG:' are removed.")))
2994 edittext.append(hgprefix(extramsg))
2995 edittext.append(hgprefix(extramsg))
2995 edittext.append("HG: --")
2996 edittext.append("HG: --")
2996 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2997 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2997 if ctx.p2():
2998 if ctx.p2():
2998 edittext.append(hgprefix(_("branch merge")))
2999 edittext.append(hgprefix(_("branch merge")))
2999 if ctx.branch():
3000 if ctx.branch():
3000 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3001 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
3001 if bookmarks.isactivewdirparent(repo):
3002 if bookmarks.isactivewdirparent(repo):
3002 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3003 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
3003 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3004 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
3004 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3005 edittext.extend([hgprefix(_("added %s") % f) for f in added])
3005 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3006 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
3006 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3007 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
3007 if not added and not modified and not removed:
3008 if not added and not modified and not removed:
3008 edittext.append(hgprefix(_("no files changed")))
3009 edittext.append(hgprefix(_("no files changed")))
3009 edittext.append("")
3010 edittext.append("")
3010
3011
3011 return "\n".join(edittext)
3012 return "\n".join(edittext)
3012
3013
3013 def commitstatus(repo, node, branch, bheads=None, opts=None):
3014 def commitstatus(repo, node, branch, bheads=None, opts=None):
3014 if opts is None:
3015 if opts is None:
3015 opts = {}
3016 opts = {}
3016 ctx = repo[node]
3017 ctx = repo[node]
3017 parents = ctx.parents()
3018 parents = ctx.parents()
3018
3019
3019 if (not opts.get('amend') and bheads and node not in bheads and not
3020 if (not opts.get('amend') and bheads and node not in bheads and not
3020 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3021 [x for x in parents if x.node() in bheads and x.branch() == branch]):
3021 repo.ui.status(_('created new head\n'))
3022 repo.ui.status(_('created new head\n'))
3022 # The message is not printed for initial roots. For the other
3023 # The message is not printed for initial roots. For the other
3023 # changesets, it is printed in the following situations:
3024 # changesets, it is printed in the following situations:
3024 #
3025 #
3025 # Par column: for the 2 parents with ...
3026 # Par column: for the 2 parents with ...
3026 # N: null or no parent
3027 # N: null or no parent
3027 # B: parent is on another named branch
3028 # B: parent is on another named branch
3028 # C: parent is a regular non head changeset
3029 # C: parent is a regular non head changeset
3029 # H: parent was a branch head of the current branch
3030 # H: parent was a branch head of the current branch
3030 # Msg column: whether we print "created new head" message
3031 # Msg column: whether we print "created new head" message
3031 # In the following, it is assumed that there already exists some
3032 # In the following, it is assumed that there already exists some
3032 # initial branch heads of the current branch, otherwise nothing is
3033 # initial branch heads of the current branch, otherwise nothing is
3033 # printed anyway.
3034 # printed anyway.
3034 #
3035 #
3035 # Par Msg Comment
3036 # Par Msg Comment
3036 # N N y additional topo root
3037 # N N y additional topo root
3037 #
3038 #
3038 # B N y additional branch root
3039 # B N y additional branch root
3039 # C N y additional topo head
3040 # C N y additional topo head
3040 # H N n usual case
3041 # H N n usual case
3041 #
3042 #
3042 # B B y weird additional branch root
3043 # B B y weird additional branch root
3043 # C B y branch merge
3044 # C B y branch merge
3044 # H B n merge with named branch
3045 # H B n merge with named branch
3045 #
3046 #
3046 # C C y additional head from merge
3047 # C C y additional head from merge
3047 # C H n merge with a head
3048 # C H n merge with a head
3048 #
3049 #
3049 # H H n head merge: head count decreases
3050 # H H n head merge: head count decreases
3050
3051
3051 if not opts.get('close_branch'):
3052 if not opts.get('close_branch'):
3052 for r in parents:
3053 for r in parents:
3053 if r.closesbranch() and r.branch() == branch:
3054 if r.closesbranch() and r.branch() == branch:
3054 repo.ui.status(_('reopening closed branch head %d\n') % r)
3055 repo.ui.status(_('reopening closed branch head %d\n') % r)
3055
3056
3056 if repo.ui.debugflag:
3057 if repo.ui.debugflag:
3057 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3058 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
3058 elif repo.ui.verbose:
3059 elif repo.ui.verbose:
3059 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3060 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
3060
3061
3061 def postcommitstatus(repo, pats, opts):
3062 def postcommitstatus(repo, pats, opts):
3062 return repo.status(match=scmutil.match(repo[None], pats, opts))
3063 return repo.status(match=scmutil.match(repo[None], pats, opts))
3063
3064
3064 def revert(ui, repo, ctx, parents, *pats, **opts):
3065 def revert(ui, repo, ctx, parents, *pats, **opts):
3065 parent, p2 = parents
3066 parent, p2 = parents
3066 node = ctx.node()
3067 node = ctx.node()
3067
3068
3068 mf = ctx.manifest()
3069 mf = ctx.manifest()
3069 if node == p2:
3070 if node == p2:
3070 parent = p2
3071 parent = p2
3071
3072
3072 # need all matching names in dirstate and manifest of target rev,
3073 # need all matching names in dirstate and manifest of target rev,
3073 # so have to walk both. do not print errors if files exist in one
3074 # so have to walk both. do not print errors if files exist in one
3074 # but not other. in both cases, filesets should be evaluated against
3075 # but not other. in both cases, filesets should be evaluated against
3075 # workingctx to get consistent result (issue4497). this means 'set:**'
3076 # workingctx to get consistent result (issue4497). this means 'set:**'
3076 # cannot be used to select missing files from target rev.
3077 # cannot be used to select missing files from target rev.
3077
3078
3078 # `names` is a mapping for all elements in working copy and target revision
3079 # `names` is a mapping for all elements in working copy and target revision
3079 # The mapping is in the form:
3080 # The mapping is in the form:
3080 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3081 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
3081 names = {}
3082 names = {}
3082
3083
3083 with repo.wlock():
3084 with repo.wlock():
3084 ## filling of the `names` mapping
3085 ## filling of the `names` mapping
3085 # walk dirstate to fill `names`
3086 # walk dirstate to fill `names`
3086
3087
3087 interactive = opts.get('interactive', False)
3088 interactive = opts.get('interactive', False)
3088 wctx = repo[None]
3089 wctx = repo[None]
3089 m = scmutil.match(wctx, pats, opts)
3090 m = scmutil.match(wctx, pats, opts)
3090
3091
3091 # we'll need this later
3092 # we'll need this later
3092 targetsubs = sorted(s for s in wctx.substate if m(s))
3093 targetsubs = sorted(s for s in wctx.substate if m(s))
3093
3094
3094 if not m.always():
3095 if not m.always():
3095 matcher = matchmod.badmatch(m, lambda x, y: False)
3096 matcher = matchmod.badmatch(m, lambda x, y: False)
3096 for abs in wctx.walk(matcher):
3097 for abs in wctx.walk(matcher):
3097 names[abs] = m.rel(abs), m.exact(abs)
3098 names[abs] = m.rel(abs), m.exact(abs)
3098
3099
3099 # walk target manifest to fill `names`
3100 # walk target manifest to fill `names`
3100
3101
3101 def badfn(path, msg):
3102 def badfn(path, msg):
3102 if path in names:
3103 if path in names:
3103 return
3104 return
3104 if path in ctx.substate:
3105 if path in ctx.substate:
3105 return
3106 return
3106 path_ = path + '/'
3107 path_ = path + '/'
3107 for f in names:
3108 for f in names:
3108 if f.startswith(path_):
3109 if f.startswith(path_):
3109 return
3110 return
3110 ui.warn("%s: %s\n" % (m.rel(path), msg))
3111 ui.warn("%s: %s\n" % (m.rel(path), msg))
3111
3112
3112 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3113 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3113 if abs not in names:
3114 if abs not in names:
3114 names[abs] = m.rel(abs), m.exact(abs)
3115 names[abs] = m.rel(abs), m.exact(abs)
3115
3116
3116 # Find status of all file in `names`.
3117 # Find status of all file in `names`.
3117 m = scmutil.matchfiles(repo, names)
3118 m = scmutil.matchfiles(repo, names)
3118
3119
3119 changes = repo.status(node1=node, match=m,
3120 changes = repo.status(node1=node, match=m,
3120 unknown=True, ignored=True, clean=True)
3121 unknown=True, ignored=True, clean=True)
3121 else:
3122 else:
3122 changes = repo.status(node1=node, match=m)
3123 changes = repo.status(node1=node, match=m)
3123 for kind in changes:
3124 for kind in changes:
3124 for abs in kind:
3125 for abs in kind:
3125 names[abs] = m.rel(abs), m.exact(abs)
3126 names[abs] = m.rel(abs), m.exact(abs)
3126
3127
3127 m = scmutil.matchfiles(repo, names)
3128 m = scmutil.matchfiles(repo, names)
3128
3129
3129 modified = set(changes.modified)
3130 modified = set(changes.modified)
3130 added = set(changes.added)
3131 added = set(changes.added)
3131 removed = set(changes.removed)
3132 removed = set(changes.removed)
3132 _deleted = set(changes.deleted)
3133 _deleted = set(changes.deleted)
3133 unknown = set(changes.unknown)
3134 unknown = set(changes.unknown)
3134 unknown.update(changes.ignored)
3135 unknown.update(changes.ignored)
3135 clean = set(changes.clean)
3136 clean = set(changes.clean)
3136 modadded = set()
3137 modadded = set()
3137
3138
3138 # We need to account for the state of the file in the dirstate,
3139 # We need to account for the state of the file in the dirstate,
3139 # even when we revert against something else than parent. This will
3140 # even when we revert against something else than parent. This will
3140 # slightly alter the behavior of revert (doing back up or not, delete
3141 # slightly alter the behavior of revert (doing back up or not, delete
3141 # or just forget etc).
3142 # or just forget etc).
3142 if parent == node:
3143 if parent == node:
3143 dsmodified = modified
3144 dsmodified = modified
3144 dsadded = added
3145 dsadded = added
3145 dsremoved = removed
3146 dsremoved = removed
3146 # store all local modifications, useful later for rename detection
3147 # store all local modifications, useful later for rename detection
3147 localchanges = dsmodified | dsadded
3148 localchanges = dsmodified | dsadded
3148 modified, added, removed = set(), set(), set()
3149 modified, added, removed = set(), set(), set()
3149 else:
3150 else:
3150 changes = repo.status(node1=parent, match=m)
3151 changes = repo.status(node1=parent, match=m)
3151 dsmodified = set(changes.modified)
3152 dsmodified = set(changes.modified)
3152 dsadded = set(changes.added)
3153 dsadded = set(changes.added)
3153 dsremoved = set(changes.removed)
3154 dsremoved = set(changes.removed)
3154 # store all local modifications, useful later for rename detection
3155 # store all local modifications, useful later for rename detection
3155 localchanges = dsmodified | dsadded
3156 localchanges = dsmodified | dsadded
3156
3157
3157 # only take into account for removes between wc and target
3158 # only take into account for removes between wc and target
3158 clean |= dsremoved - removed
3159 clean |= dsremoved - removed
3159 dsremoved &= removed
3160 dsremoved &= removed
3160 # distinct between dirstate remove and other
3161 # distinct between dirstate remove and other
3161 removed -= dsremoved
3162 removed -= dsremoved
3162
3163
3163 modadded = added & dsmodified
3164 modadded = added & dsmodified
3164 added -= modadded
3165 added -= modadded
3165
3166
3166 # tell newly modified apart.
3167 # tell newly modified apart.
3167 dsmodified &= modified
3168 dsmodified &= modified
3168 dsmodified |= modified & dsadded # dirstate added may need backup
3169 dsmodified |= modified & dsadded # dirstate added may need backup
3169 modified -= dsmodified
3170 modified -= dsmodified
3170
3171
3171 # We need to wait for some post-processing to update this set
3172 # We need to wait for some post-processing to update this set
3172 # before making the distinction. The dirstate will be used for
3173 # before making the distinction. The dirstate will be used for
3173 # that purpose.
3174 # that purpose.
3174 dsadded = added
3175 dsadded = added
3175
3176
3176 # in case of merge, files that are actually added can be reported as
3177 # in case of merge, files that are actually added can be reported as
3177 # modified, we need to post process the result
3178 # modified, we need to post process the result
3178 if p2 != nullid:
3179 if p2 != nullid:
3179 mergeadd = set(dsmodified)
3180 mergeadd = set(dsmodified)
3180 for path in dsmodified:
3181 for path in dsmodified:
3181 if path in mf:
3182 if path in mf:
3182 mergeadd.remove(path)
3183 mergeadd.remove(path)
3183 dsadded |= mergeadd
3184 dsadded |= mergeadd
3184 dsmodified -= mergeadd
3185 dsmodified -= mergeadd
3185
3186
3186 # if f is a rename, update `names` to also revert the source
3187 # if f is a rename, update `names` to also revert the source
3187 cwd = repo.getcwd()
3188 cwd = repo.getcwd()
3188 for f in localchanges:
3189 for f in localchanges:
3189 src = repo.dirstate.copied(f)
3190 src = repo.dirstate.copied(f)
3190 # XXX should we check for rename down to target node?
3191 # XXX should we check for rename down to target node?
3191 if src and src not in names and repo.dirstate[src] == 'r':
3192 if src and src not in names and repo.dirstate[src] == 'r':
3192 dsremoved.add(src)
3193 dsremoved.add(src)
3193 names[src] = (repo.pathto(src, cwd), True)
3194 names[src] = (repo.pathto(src, cwd), True)
3194
3195
3195 # determine the exact nature of the deleted changesets
3196 # determine the exact nature of the deleted changesets
3196 deladded = set(_deleted)
3197 deladded = set(_deleted)
3197 for path in _deleted:
3198 for path in _deleted:
3198 if path in mf:
3199 if path in mf:
3199 deladded.remove(path)
3200 deladded.remove(path)
3200 deleted = _deleted - deladded
3201 deleted = _deleted - deladded
3201
3202
3202 # distinguish between file to forget and the other
3203 # distinguish between file to forget and the other
3203 added = set()
3204 added = set()
3204 for abs in dsadded:
3205 for abs in dsadded:
3205 if repo.dirstate[abs] != 'a':
3206 if repo.dirstate[abs] != 'a':
3206 added.add(abs)
3207 added.add(abs)
3207 dsadded -= added
3208 dsadded -= added
3208
3209
3209 for abs in deladded:
3210 for abs in deladded:
3210 if repo.dirstate[abs] == 'a':
3211 if repo.dirstate[abs] == 'a':
3211 dsadded.add(abs)
3212 dsadded.add(abs)
3212 deladded -= dsadded
3213 deladded -= dsadded
3213
3214
3214 # For files marked as removed, we check if an unknown file is present at
3215 # For files marked as removed, we check if an unknown file is present at
3215 # the same path. If a such file exists it may need to be backed up.
3216 # the same path. If a such file exists it may need to be backed up.
3216 # Making the distinction at this stage helps have simpler backup
3217 # Making the distinction at this stage helps have simpler backup
3217 # logic.
3218 # logic.
3218 removunk = set()
3219 removunk = set()
3219 for abs in removed:
3220 for abs in removed:
3220 target = repo.wjoin(abs)
3221 target = repo.wjoin(abs)
3221 if os.path.lexists(target):
3222 if os.path.lexists(target):
3222 removunk.add(abs)
3223 removunk.add(abs)
3223 removed -= removunk
3224 removed -= removunk
3224
3225
3225 dsremovunk = set()
3226 dsremovunk = set()
3226 for abs in dsremoved:
3227 for abs in dsremoved:
3227 target = repo.wjoin(abs)
3228 target = repo.wjoin(abs)
3228 if os.path.lexists(target):
3229 if os.path.lexists(target):
3229 dsremovunk.add(abs)
3230 dsremovunk.add(abs)
3230 dsremoved -= dsremovunk
3231 dsremoved -= dsremovunk
3231
3232
3232 # action to be actually performed by revert
3233 # action to be actually performed by revert
3233 # (<list of file>, message>) tuple
3234 # (<list of file>, message>) tuple
3234 actions = {'revert': ([], _('reverting %s\n')),
3235 actions = {'revert': ([], _('reverting %s\n')),
3235 'add': ([], _('adding %s\n')),
3236 'add': ([], _('adding %s\n')),
3236 'remove': ([], _('removing %s\n')),
3237 'remove': ([], _('removing %s\n')),
3237 'drop': ([], _('removing %s\n')),
3238 'drop': ([], _('removing %s\n')),
3238 'forget': ([], _('forgetting %s\n')),
3239 'forget': ([], _('forgetting %s\n')),
3239 'undelete': ([], _('undeleting %s\n')),
3240 'undelete': ([], _('undeleting %s\n')),
3240 'noop': (None, _('no changes needed to %s\n')),
3241 'noop': (None, _('no changes needed to %s\n')),
3241 'unknown': (None, _('file not managed: %s\n')),
3242 'unknown': (None, _('file not managed: %s\n')),
3242 }
3243 }
3243
3244
3244 # "constant" that convey the backup strategy.
3245 # "constant" that convey the backup strategy.
3245 # All set to `discard` if `no-backup` is set do avoid checking
3246 # All set to `discard` if `no-backup` is set do avoid checking
3246 # no_backup lower in the code.
3247 # no_backup lower in the code.
3247 # These values are ordered for comparison purposes
3248 # These values are ordered for comparison purposes
3248 backupinteractive = 3 # do backup if interactively modified
3249 backupinteractive = 3 # do backup if interactively modified
3249 backup = 2 # unconditionally do backup
3250 backup = 2 # unconditionally do backup
3250 check = 1 # check if the existing file differs from target
3251 check = 1 # check if the existing file differs from target
3251 discard = 0 # never do backup
3252 discard = 0 # never do backup
3252 if opts.get('no_backup'):
3253 if opts.get('no_backup'):
3253 backupinteractive = backup = check = discard
3254 backupinteractive = backup = check = discard
3254 if interactive:
3255 if interactive:
3255 dsmodifiedbackup = backupinteractive
3256 dsmodifiedbackup = backupinteractive
3256 else:
3257 else:
3257 dsmodifiedbackup = backup
3258 dsmodifiedbackup = backup
3258 tobackup = set()
3259 tobackup = set()
3259
3260
3260 backupanddel = actions['remove']
3261 backupanddel = actions['remove']
3261 if not opts.get('no_backup'):
3262 if not opts.get('no_backup'):
3262 backupanddel = actions['drop']
3263 backupanddel = actions['drop']
3263
3264
3264 disptable = (
3265 disptable = (
3265 # dispatch table:
3266 # dispatch table:
3266 # file state
3267 # file state
3267 # action
3268 # action
3268 # make backup
3269 # make backup
3269
3270
3270 ## Sets that results that will change file on disk
3271 ## Sets that results that will change file on disk
3271 # Modified compared to target, no local change
3272 # Modified compared to target, no local change
3272 (modified, actions['revert'], discard),
3273 (modified, actions['revert'], discard),
3273 # Modified compared to target, but local file is deleted
3274 # Modified compared to target, but local file is deleted
3274 (deleted, actions['revert'], discard),
3275 (deleted, actions['revert'], discard),
3275 # Modified compared to target, local change
3276 # Modified compared to target, local change
3276 (dsmodified, actions['revert'], dsmodifiedbackup),
3277 (dsmodified, actions['revert'], dsmodifiedbackup),
3277 # Added since target
3278 # Added since target
3278 (added, actions['remove'], discard),
3279 (added, actions['remove'], discard),
3279 # Added in working directory
3280 # Added in working directory
3280 (dsadded, actions['forget'], discard),
3281 (dsadded, actions['forget'], discard),
3281 # Added since target, have local modification
3282 # Added since target, have local modification
3282 (modadded, backupanddel, backup),
3283 (modadded, backupanddel, backup),
3283 # Added since target but file is missing in working directory
3284 # Added since target but file is missing in working directory
3284 (deladded, actions['drop'], discard),
3285 (deladded, actions['drop'], discard),
3285 # Removed since target, before working copy parent
3286 # Removed since target, before working copy parent
3286 (removed, actions['add'], discard),
3287 (removed, actions['add'], discard),
3287 # Same as `removed` but an unknown file exists at the same path
3288 # Same as `removed` but an unknown file exists at the same path
3288 (removunk, actions['add'], check),
3289 (removunk, actions['add'], check),
3289 # Removed since targe, marked as such in working copy parent
3290 # Removed since targe, marked as such in working copy parent
3290 (dsremoved, actions['undelete'], discard),
3291 (dsremoved, actions['undelete'], discard),
3291 # Same as `dsremoved` but an unknown file exists at the same path
3292 # Same as `dsremoved` but an unknown file exists at the same path
3292 (dsremovunk, actions['undelete'], check),
3293 (dsremovunk, actions['undelete'], check),
3293 ## the following sets does not result in any file changes
3294 ## the following sets does not result in any file changes
3294 # File with no modification
3295 # File with no modification
3295 (clean, actions['noop'], discard),
3296 (clean, actions['noop'], discard),
3296 # Existing file, not tracked anywhere
3297 # Existing file, not tracked anywhere
3297 (unknown, actions['unknown'], discard),
3298 (unknown, actions['unknown'], discard),
3298 )
3299 )
3299
3300
3300 for abs, (rel, exact) in sorted(names.items()):
3301 for abs, (rel, exact) in sorted(names.items()):
3301 # target file to be touch on disk (relative to cwd)
3302 # target file to be touch on disk (relative to cwd)
3302 target = repo.wjoin(abs)
3303 target = repo.wjoin(abs)
3303 # search the entry in the dispatch table.
3304 # search the entry in the dispatch table.
3304 # if the file is in any of these sets, it was touched in the working
3305 # if the file is in any of these sets, it was touched in the working
3305 # directory parent and we are sure it needs to be reverted.
3306 # directory parent and we are sure it needs to be reverted.
3306 for table, (xlist, msg), dobackup in disptable:
3307 for table, (xlist, msg), dobackup in disptable:
3307 if abs not in table:
3308 if abs not in table:
3308 continue
3309 continue
3309 if xlist is not None:
3310 if xlist is not None:
3310 xlist.append(abs)
3311 xlist.append(abs)
3311 if dobackup:
3312 if dobackup:
3312 # If in interactive mode, don't automatically create
3313 # If in interactive mode, don't automatically create
3313 # .orig files (issue4793)
3314 # .orig files (issue4793)
3314 if dobackup == backupinteractive:
3315 if dobackup == backupinteractive:
3315 tobackup.add(abs)
3316 tobackup.add(abs)
3316 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3317 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3317 bakname = scmutil.origpath(ui, repo, rel)
3318 bakname = scmutil.origpath(ui, repo, rel)
3318 ui.note(_('saving current version of %s as %s\n') %
3319 ui.note(_('saving current version of %s as %s\n') %
3319 (rel, bakname))
3320 (rel, bakname))
3320 if not opts.get('dry_run'):
3321 if not opts.get('dry_run'):
3321 if interactive:
3322 if interactive:
3322 util.copyfile(target, bakname)
3323 util.copyfile(target, bakname)
3323 else:
3324 else:
3324 util.rename(target, bakname)
3325 util.rename(target, bakname)
3325 if ui.verbose or not exact:
3326 if ui.verbose or not exact:
3326 if not isinstance(msg, basestring):
3327 if not isinstance(msg, basestring):
3327 msg = msg(abs)
3328 msg = msg(abs)
3328 ui.status(msg % rel)
3329 ui.status(msg % rel)
3329 elif exact:
3330 elif exact:
3330 ui.warn(msg % rel)
3331 ui.warn(msg % rel)
3331 break
3332 break
3332
3333
3333 if not opts.get('dry_run'):
3334 if not opts.get('dry_run'):
3334 needdata = ('revert', 'add', 'undelete')
3335 needdata = ('revert', 'add', 'undelete')
3335 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3336 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3336 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3337 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3337
3338
3338 if targetsubs:
3339 if targetsubs:
3339 # Revert the subrepos on the revert list
3340 # Revert the subrepos on the revert list
3340 for sub in targetsubs:
3341 for sub in targetsubs:
3341 try:
3342 try:
3342 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3343 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3343 except KeyError:
3344 except KeyError:
3344 raise error.Abort("subrepository '%s' does not exist in %s!"
3345 raise error.Abort("subrepository '%s' does not exist in %s!"
3345 % (sub, short(ctx.node())))
3346 % (sub, short(ctx.node())))
3346
3347
3347 def _revertprefetch(repo, ctx, *files):
3348 def _revertprefetch(repo, ctx, *files):
3348 """Let extension changing the storage layer prefetch content"""
3349 """Let extension changing the storage layer prefetch content"""
3349 pass
3350 pass
3350
3351
3351 def _performrevert(repo, parents, ctx, actions, interactive=False,
3352 def _performrevert(repo, parents, ctx, actions, interactive=False,
3352 tobackup=None):
3353 tobackup=None):
3353 """function that actually perform all the actions computed for revert
3354 """function that actually perform all the actions computed for revert
3354
3355
3355 This is an independent function to let extension to plug in and react to
3356 This is an independent function to let extension to plug in and react to
3356 the imminent revert.
3357 the imminent revert.
3357
3358
3358 Make sure you have the working directory locked when calling this function.
3359 Make sure you have the working directory locked when calling this function.
3359 """
3360 """
3360 parent, p2 = parents
3361 parent, p2 = parents
3361 node = ctx.node()
3362 node = ctx.node()
3362 excluded_files = []
3363 excluded_files = []
3363 matcher_opts = {"exclude": excluded_files}
3364 matcher_opts = {"exclude": excluded_files}
3364
3365
3365 def checkout(f):
3366 def checkout(f):
3366 fc = ctx[f]
3367 fc = ctx[f]
3367 repo.wwrite(f, fc.data(), fc.flags())
3368 repo.wwrite(f, fc.data(), fc.flags())
3368
3369
3369 def doremove(f):
3370 def doremove(f):
3370 try:
3371 try:
3371 repo.wvfs.unlinkpath(f)
3372 repo.wvfs.unlinkpath(f)
3372 except OSError:
3373 except OSError:
3373 pass
3374 pass
3374 repo.dirstate.remove(f)
3375 repo.dirstate.remove(f)
3375
3376
3376 audit_path = pathutil.pathauditor(repo.root)
3377 audit_path = pathutil.pathauditor(repo.root)
3377 for f in actions['forget'][0]:
3378 for f in actions['forget'][0]:
3378 if interactive:
3379 if interactive:
3379 choice = repo.ui.promptchoice(
3380 choice = repo.ui.promptchoice(
3380 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3381 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3381 if choice == 0:
3382 if choice == 0:
3382 repo.dirstate.drop(f)
3383 repo.dirstate.drop(f)
3383 else:
3384 else:
3384 excluded_files.append(repo.wjoin(f))
3385 excluded_files.append(repo.wjoin(f))
3385 else:
3386 else:
3386 repo.dirstate.drop(f)
3387 repo.dirstate.drop(f)
3387 for f in actions['remove'][0]:
3388 for f in actions['remove'][0]:
3388 audit_path(f)
3389 audit_path(f)
3389 if interactive:
3390 if interactive:
3390 choice = repo.ui.promptchoice(
3391 choice = repo.ui.promptchoice(
3391 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3392 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3392 if choice == 0:
3393 if choice == 0:
3393 doremove(f)
3394 doremove(f)
3394 else:
3395 else:
3395 excluded_files.append(repo.wjoin(f))
3396 excluded_files.append(repo.wjoin(f))
3396 else:
3397 else:
3397 doremove(f)
3398 doremove(f)
3398 for f in actions['drop'][0]:
3399 for f in actions['drop'][0]:
3399 audit_path(f)
3400 audit_path(f)
3400 repo.dirstate.remove(f)
3401 repo.dirstate.remove(f)
3401
3402
3402 normal = None
3403 normal = None
3403 if node == parent:
3404 if node == parent:
3404 # We're reverting to our parent. If possible, we'd like status
3405 # We're reverting to our parent. If possible, we'd like status
3405 # to report the file as clean. We have to use normallookup for
3406 # to report the file as clean. We have to use normallookup for
3406 # merges to avoid losing information about merged/dirty files.
3407 # merges to avoid losing information about merged/dirty files.
3407 if p2 != nullid:
3408 if p2 != nullid:
3408 normal = repo.dirstate.normallookup
3409 normal = repo.dirstate.normallookup
3409 else:
3410 else:
3410 normal = repo.dirstate.normal
3411 normal = repo.dirstate.normal
3411
3412
3412 newlyaddedandmodifiedfiles = set()
3413 newlyaddedandmodifiedfiles = set()
3413 if interactive:
3414 if interactive:
3414 # Prompt the user for changes to revert
3415 # Prompt the user for changes to revert
3415 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3416 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3416 m = scmutil.match(ctx, torevert, matcher_opts)
3417 m = scmutil.match(ctx, torevert, matcher_opts)
3417 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3418 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3418 diffopts.nodates = True
3419 diffopts.nodates = True
3419 diffopts.git = True
3420 diffopts.git = True
3420 operation = 'discard'
3421 operation = 'discard'
3421 reversehunks = True
3422 reversehunks = True
3422 if node != parent:
3423 if node != parent:
3423 operation = 'revert'
3424 operation = 'revert'
3424 reversehunks = repo.ui.configbool('experimental',
3425 reversehunks = repo.ui.configbool('experimental',
3425 'revertalternateinteractivemode',
3426 'revertalternateinteractivemode',
3426 True)
3427 True)
3427 if reversehunks:
3428 if reversehunks:
3428 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3429 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3429 else:
3430 else:
3430 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3431 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3431 originalchunks = patch.parsepatch(diff)
3432 originalchunks = patch.parsepatch(diff)
3432
3433
3433 try:
3434 try:
3434
3435
3435 chunks, opts = recordfilter(repo.ui, originalchunks,
3436 chunks, opts = recordfilter(repo.ui, originalchunks,
3436 operation=operation)
3437 operation=operation)
3437 if reversehunks:
3438 if reversehunks:
3438 chunks = patch.reversehunks(chunks)
3439 chunks = patch.reversehunks(chunks)
3439
3440
3440 except patch.PatchError as err:
3441 except patch.PatchError as err:
3441 raise error.Abort(_('error parsing patch: %s') % err)
3442 raise error.Abort(_('error parsing patch: %s') % err)
3442
3443
3443 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3444 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3444 if tobackup is None:
3445 if tobackup is None:
3445 tobackup = set()
3446 tobackup = set()
3446 # Apply changes
3447 # Apply changes
3447 fp = stringio()
3448 fp = stringio()
3448 for c in chunks:
3449 for c in chunks:
3449 # Create a backup file only if this hunk should be backed up
3450 # Create a backup file only if this hunk should be backed up
3450 if ishunk(c) and c.header.filename() in tobackup:
3451 if ishunk(c) and c.header.filename() in tobackup:
3451 abs = c.header.filename()
3452 abs = c.header.filename()
3452 target = repo.wjoin(abs)
3453 target = repo.wjoin(abs)
3453 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3454 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3454 util.copyfile(target, bakname)
3455 util.copyfile(target, bakname)
3455 tobackup.remove(abs)
3456 tobackup.remove(abs)
3456 c.write(fp)
3457 c.write(fp)
3457 dopatch = fp.tell()
3458 dopatch = fp.tell()
3458 fp.seek(0)
3459 fp.seek(0)
3459 if dopatch:
3460 if dopatch:
3460 try:
3461 try:
3461 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3462 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3462 except patch.PatchError as err:
3463 except patch.PatchError as err:
3463 raise error.Abort(str(err))
3464 raise error.Abort(str(err))
3464 del fp
3465 del fp
3465 else:
3466 else:
3466 for f in actions['revert'][0]:
3467 for f in actions['revert'][0]:
3467 checkout(f)
3468 checkout(f)
3468 if normal:
3469 if normal:
3469 normal(f)
3470 normal(f)
3470
3471
3471 for f in actions['add'][0]:
3472 for f in actions['add'][0]:
3472 # Don't checkout modified files, they are already created by the diff
3473 # Don't checkout modified files, they are already created by the diff
3473 if f not in newlyaddedandmodifiedfiles:
3474 if f not in newlyaddedandmodifiedfiles:
3474 checkout(f)
3475 checkout(f)
3475 repo.dirstate.add(f)
3476 repo.dirstate.add(f)
3476
3477
3477 normal = repo.dirstate.normallookup
3478 normal = repo.dirstate.normallookup
3478 if node == parent and p2 == nullid:
3479 if node == parent and p2 == nullid:
3479 normal = repo.dirstate.normal
3480 normal = repo.dirstate.normal
3480 for f in actions['undelete'][0]:
3481 for f in actions['undelete'][0]:
3481 checkout(f)
3482 checkout(f)
3482 normal(f)
3483 normal(f)
3483
3484
3484 copied = copies.pathcopies(repo[parent], ctx)
3485 copied = copies.pathcopies(repo[parent], ctx)
3485
3486
3486 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3487 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3487 if f in copied:
3488 if f in copied:
3488 repo.dirstate.copy(copied[f], f)
3489 repo.dirstate.copy(copied[f], f)
3489
3490
3490 class command(registrar.command):
3491 class command(registrar.command):
3491 def _doregister(self, func, name, *args, **kwargs):
3492 def _doregister(self, func, name, *args, **kwargs):
3492 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3493 func._deprecatedregistrar = True # flag for deprecwarn in extensions.py
3493 return super(command, self)._doregister(func, name, *args, **kwargs)
3494 return super(command, self)._doregister(func, name, *args, **kwargs)
3494
3495
3495 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3496 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3496 # commands.outgoing. "missing" is "missing" of the result of
3497 # commands.outgoing. "missing" is "missing" of the result of
3497 # "findcommonoutgoing()"
3498 # "findcommonoutgoing()"
3498 outgoinghooks = util.hooks()
3499 outgoinghooks = util.hooks()
3499
3500
3500 # a list of (ui, repo) functions called by commands.summary
3501 # a list of (ui, repo) functions called by commands.summary
3501 summaryhooks = util.hooks()
3502 summaryhooks = util.hooks()
3502
3503
3503 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3504 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3504 #
3505 #
3505 # functions should return tuple of booleans below, if 'changes' is None:
3506 # functions should return tuple of booleans below, if 'changes' is None:
3506 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3507 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3507 #
3508 #
3508 # otherwise, 'changes' is a tuple of tuples below:
3509 # otherwise, 'changes' is a tuple of tuples below:
3509 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3510 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3510 # - (desturl, destbranch, destpeer, outgoing)
3511 # - (desturl, destbranch, destpeer, outgoing)
3511 summaryremotehooks = util.hooks()
3512 summaryremotehooks = util.hooks()
3512
3513
3513 # A list of state files kept by multistep operations like graft.
3514 # A list of state files kept by multistep operations like graft.
3514 # Since graft cannot be aborted, it is considered 'clearable' by update.
3515 # Since graft cannot be aborted, it is considered 'clearable' by update.
3515 # note: bisect is intentionally excluded
3516 # note: bisect is intentionally excluded
3516 # (state file, clearable, allowcommit, error, hint)
3517 # (state file, clearable, allowcommit, error, hint)
3517 unfinishedstates = [
3518 unfinishedstates = [
3518 ('graftstate', True, False, _('graft in progress'),
3519 ('graftstate', True, False, _('graft in progress'),
3519 _("use 'hg graft --continue' or 'hg update' to abort")),
3520 _("use 'hg graft --continue' or 'hg update' to abort")),
3520 ('updatestate', True, False, _('last update was interrupted'),
3521 ('updatestate', True, False, _('last update was interrupted'),
3521 _("use 'hg update' to get a consistent checkout"))
3522 _("use 'hg update' to get a consistent checkout"))
3522 ]
3523 ]
3523
3524
3524 def checkunfinished(repo, commit=False):
3525 def checkunfinished(repo, commit=False):
3525 '''Look for an unfinished multistep operation, like graft, and abort
3526 '''Look for an unfinished multistep operation, like graft, and abort
3526 if found. It's probably good to check this right before
3527 if found. It's probably good to check this right before
3527 bailifchanged().
3528 bailifchanged().
3528 '''
3529 '''
3529 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3530 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3530 if commit and allowcommit:
3531 if commit and allowcommit:
3531 continue
3532 continue
3532 if repo.vfs.exists(f):
3533 if repo.vfs.exists(f):
3533 raise error.Abort(msg, hint=hint)
3534 raise error.Abort(msg, hint=hint)
3534
3535
3535 def clearunfinished(repo):
3536 def clearunfinished(repo):
3536 '''Check for unfinished operations (as above), and clear the ones
3537 '''Check for unfinished operations (as above), and clear the ones
3537 that are clearable.
3538 that are clearable.
3538 '''
3539 '''
3539 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3540 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3540 if not clearable and repo.vfs.exists(f):
3541 if not clearable and repo.vfs.exists(f):
3541 raise error.Abort(msg, hint=hint)
3542 raise error.Abort(msg, hint=hint)
3542 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3543 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3543 if clearable and repo.vfs.exists(f):
3544 if clearable and repo.vfs.exists(f):
3544 util.unlink(repo.vfs.join(f))
3545 util.unlink(repo.vfs.join(f))
3545
3546
3546 afterresolvedstates = [
3547 afterresolvedstates = [
3547 ('graftstate',
3548 ('graftstate',
3548 _('hg graft --continue')),
3549 _('hg graft --continue')),
3549 ]
3550 ]
3550
3551
3551 def howtocontinue(repo):
3552 def howtocontinue(repo):
3552 '''Check for an unfinished operation and return the command to finish
3553 '''Check for an unfinished operation and return the command to finish
3553 it.
3554 it.
3554
3555
3555 afterresolvedstates tuples define a .hg/{file} and the corresponding
3556 afterresolvedstates tuples define a .hg/{file} and the corresponding
3556 command needed to finish it.
3557 command needed to finish it.
3557
3558
3558 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3559 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3559 a boolean.
3560 a boolean.
3560 '''
3561 '''
3561 contmsg = _("continue: %s")
3562 contmsg = _("continue: %s")
3562 for f, msg in afterresolvedstates:
3563 for f, msg in afterresolvedstates:
3563 if repo.vfs.exists(f):
3564 if repo.vfs.exists(f):
3564 return contmsg % msg, True
3565 return contmsg % msg, True
3565 workingctx = repo[None]
3566 workingctx = repo[None]
3566 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3567 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3567 for s in workingctx.substate)
3568 for s in workingctx.substate)
3568 if dirty:
3569 if dirty:
3569 return contmsg % _("hg commit"), False
3570 return contmsg % _("hg commit"), False
3570 return None, None
3571 return None, None
3571
3572
3572 def checkafterresolved(repo):
3573 def checkafterresolved(repo):
3573 '''Inform the user about the next action after completing hg resolve
3574 '''Inform the user about the next action after completing hg resolve
3574
3575
3575 If there's a matching afterresolvedstates, howtocontinue will yield
3576 If there's a matching afterresolvedstates, howtocontinue will yield
3576 repo.ui.warn as the reporter.
3577 repo.ui.warn as the reporter.
3577
3578
3578 Otherwise, it will yield repo.ui.note.
3579 Otherwise, it will yield repo.ui.note.
3579 '''
3580 '''
3580 msg, warning = howtocontinue(repo)
3581 msg, warning = howtocontinue(repo)
3581 if msg is not None:
3582 if msg is not None:
3582 if warning:
3583 if warning:
3583 repo.ui.warn("%s\n" % msg)
3584 repo.ui.warn("%s\n" % msg)
3584 else:
3585 else:
3585 repo.ui.note("%s\n" % msg)
3586 repo.ui.note("%s\n" % msg)
3586
3587
3587 def wrongtooltocontinue(repo, task):
3588 def wrongtooltocontinue(repo, task):
3588 '''Raise an abort suggesting how to properly continue if there is an
3589 '''Raise an abort suggesting how to properly continue if there is an
3589 active task.
3590 active task.
3590
3591
3591 Uses howtocontinue() to find the active task.
3592 Uses howtocontinue() to find the active task.
3592
3593
3593 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3594 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3594 a hint.
3595 a hint.
3595 '''
3596 '''
3596 after = howtocontinue(repo)
3597 after = howtocontinue(repo)
3597 hint = None
3598 hint = None
3598 if after[1]:
3599 if after[1]:
3599 hint = after[0]
3600 hint = after[0]
3600 raise error.Abort(_('no %s in progress') % task, hint=hint)
3601 raise error.Abort(_('no %s in progress') % task, hint=hint)
@@ -1,2226 +1,2244 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import operator
12 import operator
13 import os
13 import os
14 import random
14 import random
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 bundle2,
31 bundle2,
32 changegroup,
32 changegroup,
33 cmdutil,
33 cmdutil,
34 color,
34 color,
35 context,
35 context,
36 dagparser,
36 dagparser,
37 dagutil,
37 dagutil,
38 encoding,
38 encoding,
39 error,
39 error,
40 exchange,
40 exchange,
41 extensions,
41 extensions,
42 filemerge,
42 filemerge,
43 fileset,
43 fileset,
44 formatter,
44 formatter,
45 hg,
45 hg,
46 localrepo,
46 localrepo,
47 lock as lockmod,
47 lock as lockmod,
48 merge as mergemod,
48 merge as mergemod,
49 obsolete,
49 obsolete,
50 phases,
50 phases,
51 policy,
51 policy,
52 pvec,
52 pvec,
53 pycompat,
53 pycompat,
54 registrar,
54 registrar,
55 repair,
55 repair,
56 revlog,
56 revlog,
57 revset,
57 revset,
58 revsetlang,
58 revsetlang,
59 scmutil,
59 scmutil,
60 setdiscovery,
60 setdiscovery,
61 simplemerge,
61 simplemerge,
62 smartset,
62 smartset,
63 sslutil,
63 sslutil,
64 streamclone,
64 streamclone,
65 templater,
65 templater,
66 treediscovery,
66 treediscovery,
67 upgrade,
67 upgrade,
68 util,
68 util,
69 vfs as vfsmod,
69 vfs as vfsmod,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 command = registrar.command()
74 command = registrar.command()
75
75
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 def debugancestor(ui, repo, *args):
77 def debugancestor(ui, repo, *args):
78 """find the ancestor revision of two revisions in a given index"""
78 """find the ancestor revision of two revisions in a given index"""
79 if len(args) == 3:
79 if len(args) == 3:
80 index, rev1, rev2 = args
80 index, rev1, rev2 = args
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 lookup = r.lookup
82 lookup = r.lookup
83 elif len(args) == 2:
83 elif len(args) == 2:
84 if not repo:
84 if not repo:
85 raise error.Abort(_('there is no Mercurial repository here '
85 raise error.Abort(_('there is no Mercurial repository here '
86 '(.hg not found)'))
86 '(.hg not found)'))
87 rev1, rev2 = args
87 rev1, rev2 = args
88 r = repo.changelog
88 r = repo.changelog
89 lookup = repo.lookup
89 lookup = repo.lookup
90 else:
90 else:
91 raise error.Abort(_('either two or three arguments required'))
91 raise error.Abort(_('either two or three arguments required'))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94
94
95 @command('debugapplystreamclonebundle', [], 'FILE')
95 @command('debugapplystreamclonebundle', [], 'FILE')
96 def debugapplystreamclonebundle(ui, repo, fname):
96 def debugapplystreamclonebundle(ui, repo, fname):
97 """apply a stream clone bundle file"""
97 """apply a stream clone bundle file"""
98 f = hg.openpath(ui, fname)
98 f = hg.openpath(ui, fname)
99 gen = exchange.readbundle(ui, f, fname)
99 gen = exchange.readbundle(ui, f, fname)
100 gen.apply(repo)
100 gen.apply(repo)
101
101
102 @command('debugbuilddag',
102 @command('debugbuilddag',
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 ('n', 'new-file', None, _('add new file at each rev'))],
105 ('n', 'new-file', None, _('add new file at each rev'))],
106 _('[OPTION]... [TEXT]'))
106 _('[OPTION]... [TEXT]'))
107 def debugbuilddag(ui, repo, text=None,
107 def debugbuilddag(ui, repo, text=None,
108 mergeable_file=False,
108 mergeable_file=False,
109 overwritten_file=False,
109 overwritten_file=False,
110 new_file=False):
110 new_file=False):
111 """builds a repo with a given DAG from scratch in the current empty repo
111 """builds a repo with a given DAG from scratch in the current empty repo
112
112
113 The description of the DAG is read from stdin if not given on the
113 The description of the DAG is read from stdin if not given on the
114 command line.
114 command line.
115
115
116 Elements:
116 Elements:
117
117
118 - "+n" is a linear run of n nodes based on the current default parent
118 - "+n" is a linear run of n nodes based on the current default parent
119 - "." is a single node based on the current default parent
119 - "." is a single node based on the current default parent
120 - "$" resets the default parent to null (implied at the start);
120 - "$" resets the default parent to null (implied at the start);
121 otherwise the default parent is always the last node created
121 otherwise the default parent is always the last node created
122 - "<p" sets the default parent to the backref p
122 - "<p" sets the default parent to the backref p
123 - "*p" is a fork at parent p, which is a backref
123 - "*p" is a fork at parent p, which is a backref
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 - "/p2" is a merge of the preceding node and p2
125 - "/p2" is a merge of the preceding node and p2
126 - ":tag" defines a local tag for the preceding node
126 - ":tag" defines a local tag for the preceding node
127 - "@branch" sets the named branch for subsequent nodes
127 - "@branch" sets the named branch for subsequent nodes
128 - "#...\\n" is a comment up to the end of the line
128 - "#...\\n" is a comment up to the end of the line
129
129
130 Whitespace between the above elements is ignored.
130 Whitespace between the above elements is ignored.
131
131
132 A backref is either
132 A backref is either
133
133
134 - a number n, which references the node curr-n, where curr is the current
134 - a number n, which references the node curr-n, where curr is the current
135 node, or
135 node, or
136 - the name of a local tag you placed earlier using ":tag", or
136 - the name of a local tag you placed earlier using ":tag", or
137 - empty to denote the default parent.
137 - empty to denote the default parent.
138
138
139 All string valued-elements are either strictly alphanumeric, or must
139 All string valued-elements are either strictly alphanumeric, or must
140 be enclosed in double quotes ("..."), with "\\" as escape character.
140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 """
141 """
142
142
143 if text is None:
143 if text is None:
144 ui.status(_("reading DAG from stdin\n"))
144 ui.status(_("reading DAG from stdin\n"))
145 text = ui.fin.read()
145 text = ui.fin.read()
146
146
147 cl = repo.changelog
147 cl = repo.changelog
148 if len(cl) > 0:
148 if len(cl) > 0:
149 raise error.Abort(_('repository is not empty'))
149 raise error.Abort(_('repository is not empty'))
150
150
151 # determine number of revs in DAG
151 # determine number of revs in DAG
152 total = 0
152 total = 0
153 for type, data in dagparser.parsedag(text):
153 for type, data in dagparser.parsedag(text):
154 if type == 'n':
154 if type == 'n':
155 total += 1
155 total += 1
156
156
157 if mergeable_file:
157 if mergeable_file:
158 linesperrev = 2
158 linesperrev = 2
159 # make a file with k lines per rev
159 # make a file with k lines per rev
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 initialmergedlines.append("")
161 initialmergedlines.append("")
162
162
163 tags = []
163 tags = []
164
164
165 wlock = lock = tr = None
165 wlock = lock = tr = None
166 try:
166 try:
167 wlock = repo.wlock()
167 wlock = repo.wlock()
168 lock = repo.lock()
168 lock = repo.lock()
169 tr = repo.transaction("builddag")
169 tr = repo.transaction("builddag")
170
170
171 at = -1
171 at = -1
172 atbranch = 'default'
172 atbranch = 'default'
173 nodeids = []
173 nodeids = []
174 id = 0
174 id = 0
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 ui.note(('node %s\n' % str(data)))
178 ui.note(('node %s\n' % str(data)))
179 id, ps = data
179 id, ps = data
180
180
181 files = []
181 files = []
182 fctxs = {}
182 fctxs = {}
183
183
184 p2 = None
184 p2 = None
185 if mergeable_file:
185 if mergeable_file:
186 fn = "mf"
186 fn = "mf"
187 p1 = repo[ps[0]]
187 p1 = repo[ps[0]]
188 if len(ps) > 1:
188 if len(ps) > 1:
189 p2 = repo[ps[1]]
189 p2 = repo[ps[1]]
190 pa = p1.ancestor(p2)
190 pa = p1.ancestor(p2)
191 base, local, other = [x[fn].data() for x in (pa, p1,
191 base, local, other = [x[fn].data() for x in (pa, p1,
192 p2)]
192 p2)]
193 m3 = simplemerge.Merge3Text(base, local, other)
193 m3 = simplemerge.Merge3Text(base, local, other)
194 ml = [l.strip() for l in m3.merge_lines()]
194 ml = [l.strip() for l in m3.merge_lines()]
195 ml.append("")
195 ml.append("")
196 elif at > 0:
196 elif at > 0:
197 ml = p1[fn].data().split("\n")
197 ml = p1[fn].data().split("\n")
198 else:
198 else:
199 ml = initialmergedlines
199 ml = initialmergedlines
200 ml[id * linesperrev] += " r%i" % id
200 ml[id * linesperrev] += " r%i" % id
201 mergedtext = "\n".join(ml)
201 mergedtext = "\n".join(ml)
202 files.append(fn)
202 files.append(fn)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204
204
205 if overwritten_file:
205 if overwritten_file:
206 fn = "of"
206 fn = "of"
207 files.append(fn)
207 files.append(fn)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209
209
210 if new_file:
210 if new_file:
211 fn = "nf%i" % id
211 fn = "nf%i" % id
212 files.append(fn)
212 files.append(fn)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 if len(ps) > 1:
214 if len(ps) > 1:
215 if not p2:
215 if not p2:
216 p2 = repo[ps[1]]
216 p2 = repo[ps[1]]
217 for fn in p2:
217 for fn in p2:
218 if fn.startswith("nf"):
218 if fn.startswith("nf"):
219 files.append(fn)
219 files.append(fn)
220 fctxs[fn] = p2[fn]
220 fctxs[fn] = p2[fn]
221
221
222 def fctxfn(repo, cx, path):
222 def fctxfn(repo, cx, path):
223 return fctxs.get(path)
223 return fctxs.get(path)
224
224
225 if len(ps) == 0 or ps[0] < 0:
225 if len(ps) == 0 or ps[0] < 0:
226 pars = [None, None]
226 pars = [None, None]
227 elif len(ps) == 1:
227 elif len(ps) == 1:
228 pars = [nodeids[ps[0]], None]
228 pars = [nodeids[ps[0]], None]
229 else:
229 else:
230 pars = [nodeids[p] for p in ps]
230 pars = [nodeids[p] for p in ps]
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 date=(id, 0),
232 date=(id, 0),
233 user="debugbuilddag",
233 user="debugbuilddag",
234 extra={'branch': atbranch})
234 extra={'branch': atbranch})
235 nodeid = repo.commitctx(cx)
235 nodeid = repo.commitctx(cx)
236 nodeids.append(nodeid)
236 nodeids.append(nodeid)
237 at = id
237 at = id
238 elif type == 'l':
238 elif type == 'l':
239 id, name = data
239 id, name = data
240 ui.note(('tag %s\n' % name))
240 ui.note(('tag %s\n' % name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 elif type == 'a':
242 elif type == 'a':
243 ui.note(('branch %s\n' % data))
243 ui.note(('branch %s\n' % data))
244 atbranch = data
244 atbranch = data
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 tr.close()
246 tr.close()
247
247
248 if tags:
248 if tags:
249 repo.vfs.write("localtags", "".join(tags))
249 repo.vfs.write("localtags", "".join(tags))
250 finally:
250 finally:
251 ui.progress(_('building'), None)
251 ui.progress(_('building'), None)
252 release(tr, lock, wlock)
252 release(tr, lock, wlock)
253
253
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 indent_string = ' ' * indent
255 indent_string = ' ' * indent
256 if all:
256 if all:
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 % indent_string)
258 % indent_string)
259
259
260 def showchunks(named):
260 def showchunks(named):
261 ui.write("\n%s%s\n" % (indent_string, named))
261 ui.write("\n%s%s\n" % (indent_string, named))
262 chain = None
262 chain = None
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 node = chunkdata['node']
264 node = chunkdata['node']
265 p1 = chunkdata['p1']
265 p1 = chunkdata['p1']
266 p2 = chunkdata['p2']
266 p2 = chunkdata['p2']
267 cs = chunkdata['cs']
267 cs = chunkdata['cs']
268 deltabase = chunkdata['deltabase']
268 deltabase = chunkdata['deltabase']
269 delta = chunkdata['delta']
269 delta = chunkdata['delta']
270 ui.write("%s%s %s %s %s %s %s\n" %
270 ui.write("%s%s %s %s %s %s %s\n" %
271 (indent_string, hex(node), hex(p1), hex(p2),
271 (indent_string, hex(node), hex(p1), hex(p2),
272 hex(cs), hex(deltabase), len(delta)))
272 hex(cs), hex(deltabase), len(delta)))
273 chain = node
273 chain = node
274
274
275 chunkdata = gen.changelogheader()
275 chunkdata = gen.changelogheader()
276 showchunks("changelog")
276 showchunks("changelog")
277 chunkdata = gen.manifestheader()
277 chunkdata = gen.manifestheader()
278 showchunks("manifest")
278 showchunks("manifest")
279 for chunkdata in iter(gen.filelogheader, {}):
279 for chunkdata in iter(gen.filelogheader, {}):
280 fname = chunkdata['filename']
280 fname = chunkdata['filename']
281 showchunks(fname)
281 showchunks(fname)
282 else:
282 else:
283 if isinstance(gen, bundle2.unbundle20):
283 if isinstance(gen, bundle2.unbundle20):
284 raise error.Abort(_('use debugbundle2 for this file'))
284 raise error.Abort(_('use debugbundle2 for this file'))
285 chunkdata = gen.changelogheader()
285 chunkdata = gen.changelogheader()
286 chain = None
286 chain = None
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 node = chunkdata['node']
288 node = chunkdata['node']
289 ui.write("%s%s\n" % (indent_string, hex(node)))
289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 chain = node
290 chain = node
291
291
292 def _debugobsmarkers(ui, part, indent=0, **opts):
292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 """display version and markers contained in 'data'"""
293 """display version and markers contained in 'data'"""
294 opts = pycompat.byteskwargs(opts)
294 data = part.read()
295 data = part.read()
295 indent_string = ' ' * indent
296 indent_string = ' ' * indent
296 try:
297 try:
297 version, markers = obsolete._readmarkers(data)
298 version, markers = obsolete._readmarkers(data)
298 except error.UnknownVersion as exc:
299 except error.UnknownVersion as exc:
299 msg = "%sunsupported version: %s (%d bytes)\n"
300 msg = "%sunsupported version: %s (%d bytes)\n"
300 msg %= indent_string, exc.version, len(data)
301 msg %= indent_string, exc.version, len(data)
301 ui.write(msg)
302 ui.write(msg)
302 else:
303 else:
303 msg = "%sversion: %s (%d bytes)\n"
304 msg = "%sversion: %s (%d bytes)\n"
304 msg %= indent_string, version, len(data)
305 msg %= indent_string, version, len(data)
305 ui.write(msg)
306 ui.write(msg)
306 fm = ui.formatter('debugobsolete', opts)
307 fm = ui.formatter('debugobsolete', opts)
307 for rawmarker in sorted(markers):
308 for rawmarker in sorted(markers):
308 m = obsolete.marker(None, rawmarker)
309 m = obsolete.marker(None, rawmarker)
309 fm.startitem()
310 fm.startitem()
310 fm.plain(indent_string)
311 fm.plain(indent_string)
311 cmdutil.showmarker(fm, m)
312 cmdutil.showmarker(fm, m)
312 fm.end()
313 fm.end()
313
314
314 def _debugphaseheads(ui, data, indent=0):
315 def _debugphaseheads(ui, data, indent=0):
315 """display version and markers contained in 'data'"""
316 """display version and markers contained in 'data'"""
316 indent_string = ' ' * indent
317 indent_string = ' ' * indent
317 headsbyphase = bundle2._readphaseheads(data)
318 headsbyphase = bundle2._readphaseheads(data)
318 for phase in phases.allphases:
319 for phase in phases.allphases:
319 for head in headsbyphase[phase]:
320 for head in headsbyphase[phase]:
320 ui.write(indent_string)
321 ui.write(indent_string)
321 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
322 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
322
323
323 def _debugbundle2(ui, gen, all=None, **opts):
324 def _debugbundle2(ui, gen, all=None, **opts):
324 """lists the contents of a bundle2"""
325 """lists the contents of a bundle2"""
325 if not isinstance(gen, bundle2.unbundle20):
326 if not isinstance(gen, bundle2.unbundle20):
326 raise error.Abort(_('not a bundle2 file'))
327 raise error.Abort(_('not a bundle2 file'))
327 ui.write(('Stream params: %s\n' % repr(gen.params)))
328 ui.write(('Stream params: %s\n' % repr(gen.params)))
328 parttypes = opts.get('part_type', [])
329 parttypes = opts.get('part_type', [])
329 for part in gen.iterparts():
330 for part in gen.iterparts():
330 if parttypes and part.type not in parttypes:
331 if parttypes and part.type not in parttypes:
331 continue
332 continue
332 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
333 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
333 if part.type == 'changegroup':
334 if part.type == 'changegroup':
334 version = part.params.get('version', '01')
335 version = part.params.get('version', '01')
335 cg = changegroup.getunbundler(version, part, 'UN')
336 cg = changegroup.getunbundler(version, part, 'UN')
336 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
337 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
337 if part.type == 'obsmarkers':
338 if part.type == 'obsmarkers':
338 _debugobsmarkers(ui, part, indent=4, **opts)
339 _debugobsmarkers(ui, part, indent=4, **opts)
339 if part.type == 'phase-heads':
340 if part.type == 'phase-heads':
340 _debugphaseheads(ui, part, indent=4)
341 _debugphaseheads(ui, part, indent=4)
341
342
342 @command('debugbundle',
343 @command('debugbundle',
343 [('a', 'all', None, _('show all details')),
344 [('a', 'all', None, _('show all details')),
344 ('', 'part-type', [], _('show only the named part type')),
345 ('', 'part-type', [], _('show only the named part type')),
345 ('', 'spec', None, _('print the bundlespec of the bundle'))],
346 ('', 'spec', None, _('print the bundlespec of the bundle'))],
346 _('FILE'),
347 _('FILE'),
347 norepo=True)
348 norepo=True)
348 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
349 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
349 """lists the contents of a bundle"""
350 """lists the contents of a bundle"""
350 with hg.openpath(ui, bundlepath) as f:
351 with hg.openpath(ui, bundlepath) as f:
351 if spec:
352 if spec:
352 spec = exchange.getbundlespec(ui, f)
353 spec = exchange.getbundlespec(ui, f)
353 ui.write('%s\n' % spec)
354 ui.write('%s\n' % spec)
354 return
355 return
355
356
356 gen = exchange.readbundle(ui, f, bundlepath)
357 gen = exchange.readbundle(ui, f, bundlepath)
357 if isinstance(gen, bundle2.unbundle20):
358 if isinstance(gen, bundle2.unbundle20):
358 return _debugbundle2(ui, gen, all=all, **opts)
359 return _debugbundle2(ui, gen, all=all, **opts)
359 _debugchangegroup(ui, gen, all=all, **opts)
360 _debugchangegroup(ui, gen, all=all, **opts)
360
361
361 @command('debugcheckstate', [], '')
362 @command('debugcheckstate', [], '')
362 def debugcheckstate(ui, repo):
363 def debugcheckstate(ui, repo):
363 """validate the correctness of the current dirstate"""
364 """validate the correctness of the current dirstate"""
364 parent1, parent2 = repo.dirstate.parents()
365 parent1, parent2 = repo.dirstate.parents()
365 m1 = repo[parent1].manifest()
366 m1 = repo[parent1].manifest()
366 m2 = repo[parent2].manifest()
367 m2 = repo[parent2].manifest()
367 errors = 0
368 errors = 0
368 for f in repo.dirstate:
369 for f in repo.dirstate:
369 state = repo.dirstate[f]
370 state = repo.dirstate[f]
370 if state in "nr" and f not in m1:
371 if state in "nr" and f not in m1:
371 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
372 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
372 errors += 1
373 errors += 1
373 if state in "a" and f in m1:
374 if state in "a" and f in m1:
374 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
375 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
375 errors += 1
376 errors += 1
376 if state in "m" and f not in m1 and f not in m2:
377 if state in "m" and f not in m1 and f not in m2:
377 ui.warn(_("%s in state %s, but not in either manifest\n") %
378 ui.warn(_("%s in state %s, but not in either manifest\n") %
378 (f, state))
379 (f, state))
379 errors += 1
380 errors += 1
380 for f in m1:
381 for f in m1:
381 state = repo.dirstate[f]
382 state = repo.dirstate[f]
382 if state not in "nrm":
383 if state not in "nrm":
383 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
384 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
384 errors += 1
385 errors += 1
385 if errors:
386 if errors:
386 error = _(".hg/dirstate inconsistent with current parent's manifest")
387 error = _(".hg/dirstate inconsistent with current parent's manifest")
387 raise error.Abort(error)
388 raise error.Abort(error)
388
389
389 @command('debugcolor',
390 @command('debugcolor',
390 [('', 'style', None, _('show all configured styles'))],
391 [('', 'style', None, _('show all configured styles'))],
391 'hg debugcolor')
392 'hg debugcolor')
392 def debugcolor(ui, repo, **opts):
393 def debugcolor(ui, repo, **opts):
393 """show available color, effects or style"""
394 """show available color, effects or style"""
394 ui.write(('color mode: %s\n') % ui._colormode)
395 ui.write(('color mode: %s\n') % ui._colormode)
395 if opts.get('style'):
396 if opts.get('style'):
396 return _debugdisplaystyle(ui)
397 return _debugdisplaystyle(ui)
397 else:
398 else:
398 return _debugdisplaycolor(ui)
399 return _debugdisplaycolor(ui)
399
400
400 def _debugdisplaycolor(ui):
401 def _debugdisplaycolor(ui):
401 ui = ui.copy()
402 ui = ui.copy()
402 ui._styles.clear()
403 ui._styles.clear()
403 for effect in color._activeeffects(ui).keys():
404 for effect in color._activeeffects(ui).keys():
404 ui._styles[effect] = effect
405 ui._styles[effect] = effect
405 if ui._terminfoparams:
406 if ui._terminfoparams:
406 for k, v in ui.configitems('color'):
407 for k, v in ui.configitems('color'):
407 if k.startswith('color.'):
408 if k.startswith('color.'):
408 ui._styles[k] = k[6:]
409 ui._styles[k] = k[6:]
409 elif k.startswith('terminfo.'):
410 elif k.startswith('terminfo.'):
410 ui._styles[k] = k[9:]
411 ui._styles[k] = k[9:]
411 ui.write(_('available colors:\n'))
412 ui.write(_('available colors:\n'))
412 # sort label with a '_' after the other to group '_background' entry.
413 # sort label with a '_' after the other to group '_background' entry.
413 items = sorted(ui._styles.items(),
414 items = sorted(ui._styles.items(),
414 key=lambda i: ('_' in i[0], i[0], i[1]))
415 key=lambda i: ('_' in i[0], i[0], i[1]))
415 for colorname, label in items:
416 for colorname, label in items:
416 ui.write(('%s\n') % colorname, label=label)
417 ui.write(('%s\n') % colorname, label=label)
417
418
418 def _debugdisplaystyle(ui):
419 def _debugdisplaystyle(ui):
419 ui.write(_('available style:\n'))
420 ui.write(_('available style:\n'))
420 width = max(len(s) for s in ui._styles)
421 width = max(len(s) for s in ui._styles)
421 for label, effects in sorted(ui._styles.items()):
422 for label, effects in sorted(ui._styles.items()):
422 ui.write('%s' % label, label=label)
423 ui.write('%s' % label, label=label)
423 if effects:
424 if effects:
424 # 50
425 # 50
425 ui.write(': ')
426 ui.write(': ')
426 ui.write(' ' * (max(0, width - len(label))))
427 ui.write(' ' * (max(0, width - len(label))))
427 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
428 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
428 ui.write('\n')
429 ui.write('\n')
429
430
430 @command('debugcreatestreamclonebundle', [], 'FILE')
431 @command('debugcreatestreamclonebundle', [], 'FILE')
431 def debugcreatestreamclonebundle(ui, repo, fname):
432 def debugcreatestreamclonebundle(ui, repo, fname):
432 """create a stream clone bundle file
433 """create a stream clone bundle file
433
434
434 Stream bundles are special bundles that are essentially archives of
435 Stream bundles are special bundles that are essentially archives of
435 revlog files. They are commonly used for cloning very quickly.
436 revlog files. They are commonly used for cloning very quickly.
436 """
437 """
437 # TODO we may want to turn this into an abort when this functionality
438 # TODO we may want to turn this into an abort when this functionality
438 # is moved into `hg bundle`.
439 # is moved into `hg bundle`.
439 if phases.hassecret(repo):
440 if phases.hassecret(repo):
440 ui.warn(_('(warning: stream clone bundle will contain secret '
441 ui.warn(_('(warning: stream clone bundle will contain secret '
441 'revisions)\n'))
442 'revisions)\n'))
442
443
443 requirements, gen = streamclone.generatebundlev1(repo)
444 requirements, gen = streamclone.generatebundlev1(repo)
444 changegroup.writechunks(ui, gen, fname)
445 changegroup.writechunks(ui, gen, fname)
445
446
446 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
447 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
447
448
448 @command('debugdag',
449 @command('debugdag',
449 [('t', 'tags', None, _('use tags as labels')),
450 [('t', 'tags', None, _('use tags as labels')),
450 ('b', 'branches', None, _('annotate with branch names')),
451 ('b', 'branches', None, _('annotate with branch names')),
451 ('', 'dots', None, _('use dots for runs')),
452 ('', 'dots', None, _('use dots for runs')),
452 ('s', 'spaces', None, _('separate elements by spaces'))],
453 ('s', 'spaces', None, _('separate elements by spaces'))],
453 _('[OPTION]... [FILE [REV]...]'),
454 _('[OPTION]... [FILE [REV]...]'),
454 optionalrepo=True)
455 optionalrepo=True)
455 def debugdag(ui, repo, file_=None, *revs, **opts):
456 def debugdag(ui, repo, file_=None, *revs, **opts):
456 """format the changelog or an index DAG as a concise textual description
457 """format the changelog or an index DAG as a concise textual description
457
458
458 If you pass a revlog index, the revlog's DAG is emitted. If you list
459 If you pass a revlog index, the revlog's DAG is emitted. If you list
459 revision numbers, they get labeled in the output as rN.
460 revision numbers, they get labeled in the output as rN.
460
461
461 Otherwise, the changelog DAG of the current repo is emitted.
462 Otherwise, the changelog DAG of the current repo is emitted.
462 """
463 """
463 spaces = opts.get('spaces')
464 spaces = opts.get('spaces')
464 dots = opts.get('dots')
465 dots = opts.get('dots')
465 if file_:
466 if file_:
466 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
467 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
467 file_)
468 file_)
468 revs = set((int(r) for r in revs))
469 revs = set((int(r) for r in revs))
469 def events():
470 def events():
470 for r in rlog:
471 for r in rlog:
471 yield 'n', (r, list(p for p in rlog.parentrevs(r)
472 yield 'n', (r, list(p for p in rlog.parentrevs(r)
472 if p != -1))
473 if p != -1))
473 if r in revs:
474 if r in revs:
474 yield 'l', (r, "r%i" % r)
475 yield 'l', (r, "r%i" % r)
475 elif repo:
476 elif repo:
476 cl = repo.changelog
477 cl = repo.changelog
477 tags = opts.get('tags')
478 tags = opts.get('tags')
478 branches = opts.get('branches')
479 branches = opts.get('branches')
479 if tags:
480 if tags:
480 labels = {}
481 labels = {}
481 for l, n in repo.tags().items():
482 for l, n in repo.tags().items():
482 labels.setdefault(cl.rev(n), []).append(l)
483 labels.setdefault(cl.rev(n), []).append(l)
483 def events():
484 def events():
484 b = "default"
485 b = "default"
485 for r in cl:
486 for r in cl:
486 if branches:
487 if branches:
487 newb = cl.read(cl.node(r))[5]['branch']
488 newb = cl.read(cl.node(r))[5]['branch']
488 if newb != b:
489 if newb != b:
489 yield 'a', newb
490 yield 'a', newb
490 b = newb
491 b = newb
491 yield 'n', (r, list(p for p in cl.parentrevs(r)
492 yield 'n', (r, list(p for p in cl.parentrevs(r)
492 if p != -1))
493 if p != -1))
493 if tags:
494 if tags:
494 ls = labels.get(r)
495 ls = labels.get(r)
495 if ls:
496 if ls:
496 for l in ls:
497 for l in ls:
497 yield 'l', (r, l)
498 yield 'l', (r, l)
498 else:
499 else:
499 raise error.Abort(_('need repo for changelog dag'))
500 raise error.Abort(_('need repo for changelog dag'))
500
501
501 for line in dagparser.dagtextlines(events(),
502 for line in dagparser.dagtextlines(events(),
502 addspaces=spaces,
503 addspaces=spaces,
503 wraplabels=True,
504 wraplabels=True,
504 wrapannotations=True,
505 wrapannotations=True,
505 wrapnonlinear=dots,
506 wrapnonlinear=dots,
506 usedots=dots,
507 usedots=dots,
507 maxlinewidth=70):
508 maxlinewidth=70):
508 ui.write(line)
509 ui.write(line)
509 ui.write("\n")
510 ui.write("\n")
510
511
511 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
512 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
512 def debugdata(ui, repo, file_, rev=None, **opts):
513 def debugdata(ui, repo, file_, rev=None, **opts):
513 """dump the contents of a data file revision"""
514 """dump the contents of a data file revision"""
515 opts = pycompat.byteskwargs(opts)
514 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
516 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
515 if rev is not None:
517 if rev is not None:
516 raise error.CommandError('debugdata', _('invalid arguments'))
518 raise error.CommandError('debugdata', _('invalid arguments'))
517 file_, rev = None, file_
519 file_, rev = None, file_
518 elif rev is None:
520 elif rev is None:
519 raise error.CommandError('debugdata', _('invalid arguments'))
521 raise error.CommandError('debugdata', _('invalid arguments'))
520 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
522 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
521 try:
523 try:
522 ui.write(r.revision(r.lookup(rev), raw=True))
524 ui.write(r.revision(r.lookup(rev), raw=True))
523 except KeyError:
525 except KeyError:
524 raise error.Abort(_('invalid revision identifier %s') % rev)
526 raise error.Abort(_('invalid revision identifier %s') % rev)
525
527
526 @command('debugdate',
528 @command('debugdate',
527 [('e', 'extended', None, _('try extended date formats'))],
529 [('e', 'extended', None, _('try extended date formats'))],
528 _('[-e] DATE [RANGE]'),
530 _('[-e] DATE [RANGE]'),
529 norepo=True, optionalrepo=True)
531 norepo=True, optionalrepo=True)
530 def debugdate(ui, date, range=None, **opts):
532 def debugdate(ui, date, range=None, **opts):
531 """parse and display a date"""
533 """parse and display a date"""
532 if opts["extended"]:
534 if opts["extended"]:
533 d = util.parsedate(date, util.extendeddateformats)
535 d = util.parsedate(date, util.extendeddateformats)
534 else:
536 else:
535 d = util.parsedate(date)
537 d = util.parsedate(date)
536 ui.write(("internal: %s %s\n") % d)
538 ui.write(("internal: %s %s\n") % d)
537 ui.write(("standard: %s\n") % util.datestr(d))
539 ui.write(("standard: %s\n") % util.datestr(d))
538 if range:
540 if range:
539 m = util.matchdate(range)
541 m = util.matchdate(range)
540 ui.write(("match: %s\n") % m(d[0]))
542 ui.write(("match: %s\n") % m(d[0]))
541
543
542 @command('debugdeltachain',
544 @command('debugdeltachain',
543 cmdutil.debugrevlogopts + cmdutil.formatteropts,
545 cmdutil.debugrevlogopts + cmdutil.formatteropts,
544 _('-c|-m|FILE'),
546 _('-c|-m|FILE'),
545 optionalrepo=True)
547 optionalrepo=True)
546 def debugdeltachain(ui, repo, file_=None, **opts):
548 def debugdeltachain(ui, repo, file_=None, **opts):
547 """dump information about delta chains in a revlog
549 """dump information about delta chains in a revlog
548
550
549 Output can be templatized. Available template keywords are:
551 Output can be templatized. Available template keywords are:
550
552
551 :``rev``: revision number
553 :``rev``: revision number
552 :``chainid``: delta chain identifier (numbered by unique base)
554 :``chainid``: delta chain identifier (numbered by unique base)
553 :``chainlen``: delta chain length to this revision
555 :``chainlen``: delta chain length to this revision
554 :``prevrev``: previous revision in delta chain
556 :``prevrev``: previous revision in delta chain
555 :``deltatype``: role of delta / how it was computed
557 :``deltatype``: role of delta / how it was computed
556 :``compsize``: compressed size of revision
558 :``compsize``: compressed size of revision
557 :``uncompsize``: uncompressed size of revision
559 :``uncompsize``: uncompressed size of revision
558 :``chainsize``: total size of compressed revisions in chain
560 :``chainsize``: total size of compressed revisions in chain
559 :``chainratio``: total chain size divided by uncompressed revision size
561 :``chainratio``: total chain size divided by uncompressed revision size
560 (new delta chains typically start at ratio 2.00)
562 (new delta chains typically start at ratio 2.00)
561 :``lindist``: linear distance from base revision in delta chain to end
563 :``lindist``: linear distance from base revision in delta chain to end
562 of this revision
564 of this revision
563 :``extradist``: total size of revisions not part of this delta chain from
565 :``extradist``: total size of revisions not part of this delta chain from
564 base of delta chain to end of this revision; a measurement
566 base of delta chain to end of this revision; a measurement
565 of how much extra data we need to read/seek across to read
567 of how much extra data we need to read/seek across to read
566 the delta chain for this revision
568 the delta chain for this revision
567 :``extraratio``: extradist divided by chainsize; another representation of
569 :``extraratio``: extradist divided by chainsize; another representation of
568 how much unrelated data is needed to load this delta chain
570 how much unrelated data is needed to load this delta chain
569 """
571 """
572 opts = pycompat.byteskwargs(opts)
570 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
573 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
571 index = r.index
574 index = r.index
572 generaldelta = r.version & revlog.FLAG_GENERALDELTA
575 generaldelta = r.version & revlog.FLAG_GENERALDELTA
573
576
574 def revinfo(rev):
577 def revinfo(rev):
575 e = index[rev]
578 e = index[rev]
576 compsize = e[1]
579 compsize = e[1]
577 uncompsize = e[2]
580 uncompsize = e[2]
578 chainsize = 0
581 chainsize = 0
579
582
580 if generaldelta:
583 if generaldelta:
581 if e[3] == e[5]:
584 if e[3] == e[5]:
582 deltatype = 'p1'
585 deltatype = 'p1'
583 elif e[3] == e[6]:
586 elif e[3] == e[6]:
584 deltatype = 'p2'
587 deltatype = 'p2'
585 elif e[3] == rev - 1:
588 elif e[3] == rev - 1:
586 deltatype = 'prev'
589 deltatype = 'prev'
587 elif e[3] == rev:
590 elif e[3] == rev:
588 deltatype = 'base'
591 deltatype = 'base'
589 else:
592 else:
590 deltatype = 'other'
593 deltatype = 'other'
591 else:
594 else:
592 if e[3] == rev:
595 if e[3] == rev:
593 deltatype = 'base'
596 deltatype = 'base'
594 else:
597 else:
595 deltatype = 'prev'
598 deltatype = 'prev'
596
599
597 chain = r._deltachain(rev)[0]
600 chain = r._deltachain(rev)[0]
598 for iterrev in chain:
601 for iterrev in chain:
599 e = index[iterrev]
602 e = index[iterrev]
600 chainsize += e[1]
603 chainsize += e[1]
601
604
602 return compsize, uncompsize, deltatype, chain, chainsize
605 return compsize, uncompsize, deltatype, chain, chainsize
603
606
604 fm = ui.formatter('debugdeltachain', opts)
607 fm = ui.formatter('debugdeltachain', opts)
605
608
606 fm.plain(' rev chain# chainlen prev delta '
609 fm.plain(' rev chain# chainlen prev delta '
607 'size rawsize chainsize ratio lindist extradist '
610 'size rawsize chainsize ratio lindist extradist '
608 'extraratio\n')
611 'extraratio\n')
609
612
610 chainbases = {}
613 chainbases = {}
611 for rev in r:
614 for rev in r:
612 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
615 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
613 chainbase = chain[0]
616 chainbase = chain[0]
614 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
617 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
615 basestart = r.start(chainbase)
618 basestart = r.start(chainbase)
616 revstart = r.start(rev)
619 revstart = r.start(rev)
617 lineardist = revstart + comp - basestart
620 lineardist = revstart + comp - basestart
618 extradist = lineardist - chainsize
621 extradist = lineardist - chainsize
619 try:
622 try:
620 prevrev = chain[-2]
623 prevrev = chain[-2]
621 except IndexError:
624 except IndexError:
622 prevrev = -1
625 prevrev = -1
623
626
624 chainratio = float(chainsize) / float(uncomp)
627 chainratio = float(chainsize) / float(uncomp)
625 extraratio = float(extradist) / float(chainsize)
628 extraratio = float(extradist) / float(chainsize)
626
629
627 fm.startitem()
630 fm.startitem()
628 fm.write('rev chainid chainlen prevrev deltatype compsize '
631 fm.write('rev chainid chainlen prevrev deltatype compsize '
629 'uncompsize chainsize chainratio lindist extradist '
632 'uncompsize chainsize chainratio lindist extradist '
630 'extraratio',
633 'extraratio',
631 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
634 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
632 rev, chainid, len(chain), prevrev, deltatype, comp,
635 rev, chainid, len(chain), prevrev, deltatype, comp,
633 uncomp, chainsize, chainratio, lineardist, extradist,
636 uncomp, chainsize, chainratio, lineardist, extradist,
634 extraratio,
637 extraratio,
635 rev=rev, chainid=chainid, chainlen=len(chain),
638 rev=rev, chainid=chainid, chainlen=len(chain),
636 prevrev=prevrev, deltatype=deltatype, compsize=comp,
639 prevrev=prevrev, deltatype=deltatype, compsize=comp,
637 uncompsize=uncomp, chainsize=chainsize,
640 uncompsize=uncomp, chainsize=chainsize,
638 chainratio=chainratio, lindist=lineardist,
641 chainratio=chainratio, lindist=lineardist,
639 extradist=extradist, extraratio=extraratio)
642 extradist=extradist, extraratio=extraratio)
640
643
641 fm.end()
644 fm.end()
642
645
643 @command('debugdirstate|debugstate',
646 @command('debugdirstate|debugstate',
644 [('', 'nodates', None, _('do not display the saved mtime')),
647 [('', 'nodates', None, _('do not display the saved mtime')),
645 ('', 'datesort', None, _('sort by saved mtime'))],
648 ('', 'datesort', None, _('sort by saved mtime'))],
646 _('[OPTION]...'))
649 _('[OPTION]...'))
647 def debugstate(ui, repo, **opts):
650 def debugstate(ui, repo, **opts):
648 """show the contents of the current dirstate"""
651 """show the contents of the current dirstate"""
649
652
650 nodates = opts.get('nodates')
653 nodates = opts.get('nodates')
651 datesort = opts.get('datesort')
654 datesort = opts.get('datesort')
652
655
653 timestr = ""
656 timestr = ""
654 if datesort:
657 if datesort:
655 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
658 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
656 else:
659 else:
657 keyfunc = None # sort by filename
660 keyfunc = None # sort by filename
658 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
661 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
659 if ent[3] == -1:
662 if ent[3] == -1:
660 timestr = 'unset '
663 timestr = 'unset '
661 elif nodates:
664 elif nodates:
662 timestr = 'set '
665 timestr = 'set '
663 else:
666 else:
664 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
667 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
665 time.localtime(ent[3]))
668 time.localtime(ent[3]))
666 if ent[1] & 0o20000:
669 if ent[1] & 0o20000:
667 mode = 'lnk'
670 mode = 'lnk'
668 else:
671 else:
669 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
672 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
670 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
673 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
671 for f in repo.dirstate.copies():
674 for f in repo.dirstate.copies():
672 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
675 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
673
676
674 @command('debugdiscovery',
677 @command('debugdiscovery',
675 [('', 'old', None, _('use old-style discovery')),
678 [('', 'old', None, _('use old-style discovery')),
676 ('', 'nonheads', None,
679 ('', 'nonheads', None,
677 _('use old-style discovery with non-heads included')),
680 _('use old-style discovery with non-heads included')),
678 ] + cmdutil.remoteopts,
681 ] + cmdutil.remoteopts,
679 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
682 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
680 def debugdiscovery(ui, repo, remoteurl="default", **opts):
683 def debugdiscovery(ui, repo, remoteurl="default", **opts):
681 """runs the changeset discovery protocol in isolation"""
684 """runs the changeset discovery protocol in isolation"""
685 opts = pycompat.byteskwargs(opts)
682 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
686 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
683 opts.get('branch'))
687 opts.get('branch'))
684 remote = hg.peer(repo, opts, remoteurl)
688 remote = hg.peer(repo, opts, remoteurl)
685 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
689 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
686
690
687 # make sure tests are repeatable
691 # make sure tests are repeatable
688 random.seed(12323)
692 random.seed(12323)
689
693
690 def doit(localheads, remoteheads, remote=remote):
694 def doit(localheads, remoteheads, remote=remote):
691 if opts.get('old'):
695 if opts.get('old'):
692 if localheads:
696 if localheads:
693 raise error.Abort('cannot use localheads with old style '
697 raise error.Abort('cannot use localheads with old style '
694 'discovery')
698 'discovery')
695 if not util.safehasattr(remote, 'branches'):
699 if not util.safehasattr(remote, 'branches'):
696 # enable in-client legacy support
700 # enable in-client legacy support
697 remote = localrepo.locallegacypeer(remote.local())
701 remote = localrepo.locallegacypeer(remote.local())
698 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
702 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
699 force=True)
703 force=True)
700 common = set(common)
704 common = set(common)
701 if not opts.get('nonheads'):
705 if not opts.get('nonheads'):
702 ui.write(("unpruned common: %s\n") %
706 ui.write(("unpruned common: %s\n") %
703 " ".join(sorted(short(n) for n in common)))
707 " ".join(sorted(short(n) for n in common)))
704 dag = dagutil.revlogdag(repo.changelog)
708 dag = dagutil.revlogdag(repo.changelog)
705 all = dag.ancestorset(dag.internalizeall(common))
709 all = dag.ancestorset(dag.internalizeall(common))
706 common = dag.externalizeall(dag.headsetofconnecteds(all))
710 common = dag.externalizeall(dag.headsetofconnecteds(all))
707 else:
711 else:
708 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
712 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
709 common = set(common)
713 common = set(common)
710 rheads = set(hds)
714 rheads = set(hds)
711 lheads = set(repo.heads())
715 lheads = set(repo.heads())
712 ui.write(("common heads: %s\n") %
716 ui.write(("common heads: %s\n") %
713 " ".join(sorted(short(n) for n in common)))
717 " ".join(sorted(short(n) for n in common)))
714 if lheads <= common:
718 if lheads <= common:
715 ui.write(("local is subset\n"))
719 ui.write(("local is subset\n"))
716 elif rheads <= common:
720 elif rheads <= common:
717 ui.write(("remote is subset\n"))
721 ui.write(("remote is subset\n"))
718
722
719 serverlogs = opts.get('serverlog')
723 serverlogs = opts.get('serverlog')
720 if serverlogs:
724 if serverlogs:
721 for filename in serverlogs:
725 for filename in serverlogs:
722 with open(filename, 'r') as logfile:
726 with open(filename, 'r') as logfile:
723 line = logfile.readline()
727 line = logfile.readline()
724 while line:
728 while line:
725 parts = line.strip().split(';')
729 parts = line.strip().split(';')
726 op = parts[1]
730 op = parts[1]
727 if op == 'cg':
731 if op == 'cg':
728 pass
732 pass
729 elif op == 'cgss':
733 elif op == 'cgss':
730 doit(parts[2].split(' '), parts[3].split(' '))
734 doit(parts[2].split(' '), parts[3].split(' '))
731 elif op == 'unb':
735 elif op == 'unb':
732 doit(parts[3].split(' '), parts[2].split(' '))
736 doit(parts[3].split(' '), parts[2].split(' '))
733 line = logfile.readline()
737 line = logfile.readline()
734 else:
738 else:
735 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
739 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
736 opts.get('remote_head'))
740 opts.get('remote_head'))
737 localrevs = opts.get('local_head')
741 localrevs = opts.get('local_head')
738 doit(localrevs, remoterevs)
742 doit(localrevs, remoterevs)
739
743
740 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
744 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
741 def debugextensions(ui, **opts):
745 def debugextensions(ui, **opts):
742 '''show information about active extensions'''
746 '''show information about active extensions'''
747 opts = pycompat.byteskwargs(opts)
743 exts = extensions.extensions(ui)
748 exts = extensions.extensions(ui)
744 hgver = util.version()
749 hgver = util.version()
745 fm = ui.formatter('debugextensions', opts)
750 fm = ui.formatter('debugextensions', opts)
746 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
751 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
747 isinternal = extensions.ismoduleinternal(extmod)
752 isinternal = extensions.ismoduleinternal(extmod)
748 extsource = pycompat.fsencode(extmod.__file__)
753 extsource = pycompat.fsencode(extmod.__file__)
749 if isinternal:
754 if isinternal:
750 exttestedwith = [] # never expose magic string to users
755 exttestedwith = [] # never expose magic string to users
751 else:
756 else:
752 exttestedwith = getattr(extmod, 'testedwith', '').split()
757 exttestedwith = getattr(extmod, 'testedwith', '').split()
753 extbuglink = getattr(extmod, 'buglink', None)
758 extbuglink = getattr(extmod, 'buglink', None)
754
759
755 fm.startitem()
760 fm.startitem()
756
761
757 if ui.quiet or ui.verbose:
762 if ui.quiet or ui.verbose:
758 fm.write('name', '%s\n', extname)
763 fm.write('name', '%s\n', extname)
759 else:
764 else:
760 fm.write('name', '%s', extname)
765 fm.write('name', '%s', extname)
761 if isinternal or hgver in exttestedwith:
766 if isinternal or hgver in exttestedwith:
762 fm.plain('\n')
767 fm.plain('\n')
763 elif not exttestedwith:
768 elif not exttestedwith:
764 fm.plain(_(' (untested!)\n'))
769 fm.plain(_(' (untested!)\n'))
765 else:
770 else:
766 lasttestedversion = exttestedwith[-1]
771 lasttestedversion = exttestedwith[-1]
767 fm.plain(' (%s!)\n' % lasttestedversion)
772 fm.plain(' (%s!)\n' % lasttestedversion)
768
773
769 fm.condwrite(ui.verbose and extsource, 'source',
774 fm.condwrite(ui.verbose and extsource, 'source',
770 _(' location: %s\n'), extsource or "")
775 _(' location: %s\n'), extsource or "")
771
776
772 if ui.verbose:
777 if ui.verbose:
773 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
778 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
774 fm.data(bundled=isinternal)
779 fm.data(bundled=isinternal)
775
780
776 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
781 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
777 _(' tested with: %s\n'),
782 _(' tested with: %s\n'),
778 fm.formatlist(exttestedwith, name='ver'))
783 fm.formatlist(exttestedwith, name='ver'))
779
784
780 fm.condwrite(ui.verbose and extbuglink, 'buglink',
785 fm.condwrite(ui.verbose and extbuglink, 'buglink',
781 _(' bug reporting: %s\n'), extbuglink or "")
786 _(' bug reporting: %s\n'), extbuglink or "")
782
787
783 fm.end()
788 fm.end()
784
789
785 @command('debugfileset',
790 @command('debugfileset',
786 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
791 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
787 _('[-r REV] FILESPEC'))
792 _('[-r REV] FILESPEC'))
788 def debugfileset(ui, repo, expr, **opts):
793 def debugfileset(ui, repo, expr, **opts):
789 '''parse and apply a fileset specification'''
794 '''parse and apply a fileset specification'''
790 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
795 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
791 if ui.verbose:
796 if ui.verbose:
792 tree = fileset.parse(expr)
797 tree = fileset.parse(expr)
793 ui.note(fileset.prettyformat(tree), "\n")
798 ui.note(fileset.prettyformat(tree), "\n")
794
799
795 for f in ctx.getfileset(expr):
800 for f in ctx.getfileset(expr):
796 ui.write("%s\n" % f)
801 ui.write("%s\n" % f)
797
802
798 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
803 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
799 def debugfsinfo(ui, path="."):
804 def debugfsinfo(ui, path="."):
800 """show information detected about current filesystem"""
805 """show information detected about current filesystem"""
801 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
806 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
802 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
807 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
803 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
808 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
804 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
809 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
805 casesensitive = '(unknown)'
810 casesensitive = '(unknown)'
806 try:
811 try:
807 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
812 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
808 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
813 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
809 except OSError:
814 except OSError:
810 pass
815 pass
811 ui.write(('case-sensitive: %s\n') % casesensitive)
816 ui.write(('case-sensitive: %s\n') % casesensitive)
812
817
813 @command('debuggetbundle',
818 @command('debuggetbundle',
814 [('H', 'head', [], _('id of head node'), _('ID')),
819 [('H', 'head', [], _('id of head node'), _('ID')),
815 ('C', 'common', [], _('id of common node'), _('ID')),
820 ('C', 'common', [], _('id of common node'), _('ID')),
816 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
821 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
817 _('REPO FILE [-H|-C ID]...'),
822 _('REPO FILE [-H|-C ID]...'),
818 norepo=True)
823 norepo=True)
819 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
824 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
820 """retrieves a bundle from a repo
825 """retrieves a bundle from a repo
821
826
822 Every ID must be a full-length hex node id string. Saves the bundle to the
827 Every ID must be a full-length hex node id string. Saves the bundle to the
823 given file.
828 given file.
824 """
829 """
830 opts = pycompat.byteskwargs(opts)
825 repo = hg.peer(ui, opts, repopath)
831 repo = hg.peer(ui, opts, repopath)
826 if not repo.capable('getbundle'):
832 if not repo.capable('getbundle'):
827 raise error.Abort("getbundle() not supported by target repository")
833 raise error.Abort("getbundle() not supported by target repository")
828 args = {}
834 args = {}
829 if common:
835 if common:
830 args['common'] = [bin(s) for s in common]
836 args['common'] = [bin(s) for s in common]
831 if head:
837 if head:
832 args['heads'] = [bin(s) for s in head]
838 args['heads'] = [bin(s) for s in head]
833 # TODO: get desired bundlecaps from command line.
839 # TODO: get desired bundlecaps from command line.
834 args['bundlecaps'] = None
840 args['bundlecaps'] = None
835 bundle = repo.getbundle('debug', **args)
841 bundle = repo.getbundle('debug', **args)
836
842
837 bundletype = opts.get('type', 'bzip2').lower()
843 bundletype = opts.get('type', 'bzip2').lower()
838 btypes = {'none': 'HG10UN',
844 btypes = {'none': 'HG10UN',
839 'bzip2': 'HG10BZ',
845 'bzip2': 'HG10BZ',
840 'gzip': 'HG10GZ',
846 'gzip': 'HG10GZ',
841 'bundle2': 'HG20'}
847 'bundle2': 'HG20'}
842 bundletype = btypes.get(bundletype)
848 bundletype = btypes.get(bundletype)
843 if bundletype not in bundle2.bundletypes:
849 if bundletype not in bundle2.bundletypes:
844 raise error.Abort(_('unknown bundle type specified with --type'))
850 raise error.Abort(_('unknown bundle type specified with --type'))
845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
851 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
846
852
847 @command('debugignore', [], '[FILE]')
853 @command('debugignore', [], '[FILE]')
848 def debugignore(ui, repo, *files, **opts):
854 def debugignore(ui, repo, *files, **opts):
849 """display the combined ignore pattern and information about ignored files
855 """display the combined ignore pattern and information about ignored files
850
856
851 With no argument display the combined ignore pattern.
857 With no argument display the combined ignore pattern.
852
858
853 Given space separated file names, shows if the given file is ignored and
859 Given space separated file names, shows if the given file is ignored and
854 if so, show the ignore rule (file and line number) that matched it.
860 if so, show the ignore rule (file and line number) that matched it.
855 """
861 """
856 ignore = repo.dirstate._ignore
862 ignore = repo.dirstate._ignore
857 if not files:
863 if not files:
858 # Show all the patterns
864 # Show all the patterns
859 ui.write("%s\n" % repr(ignore))
865 ui.write("%s\n" % repr(ignore))
860 else:
866 else:
861 for f in files:
867 for f in files:
862 nf = util.normpath(f)
868 nf = util.normpath(f)
863 ignored = None
869 ignored = None
864 ignoredata = None
870 ignoredata = None
865 if nf != '.':
871 if nf != '.':
866 if ignore(nf):
872 if ignore(nf):
867 ignored = nf
873 ignored = nf
868 ignoredata = repo.dirstate._ignorefileandline(nf)
874 ignoredata = repo.dirstate._ignorefileandline(nf)
869 else:
875 else:
870 for p in util.finddirs(nf):
876 for p in util.finddirs(nf):
871 if ignore(p):
877 if ignore(p):
872 ignored = p
878 ignored = p
873 ignoredata = repo.dirstate._ignorefileandline(p)
879 ignoredata = repo.dirstate._ignorefileandline(p)
874 break
880 break
875 if ignored:
881 if ignored:
876 if ignored == nf:
882 if ignored == nf:
877 ui.write(_("%s is ignored\n") % f)
883 ui.write(_("%s is ignored\n") % f)
878 else:
884 else:
879 ui.write(_("%s is ignored because of "
885 ui.write(_("%s is ignored because of "
880 "containing folder %s\n")
886 "containing folder %s\n")
881 % (f, ignored))
887 % (f, ignored))
882 ignorefile, lineno, line = ignoredata
888 ignorefile, lineno, line = ignoredata
883 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
889 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
884 % (ignorefile, lineno, line))
890 % (ignorefile, lineno, line))
885 else:
891 else:
886 ui.write(_("%s is not ignored\n") % f)
892 ui.write(_("%s is not ignored\n") % f)
887
893
888 @command('debugindex', cmdutil.debugrevlogopts +
894 @command('debugindex', cmdutil.debugrevlogopts +
889 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
895 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
890 _('[-f FORMAT] -c|-m|FILE'),
896 _('[-f FORMAT] -c|-m|FILE'),
891 optionalrepo=True)
897 optionalrepo=True)
892 def debugindex(ui, repo, file_=None, **opts):
898 def debugindex(ui, repo, file_=None, **opts):
893 """dump the contents of an index file"""
899 """dump the contents of an index file"""
900 opts = pycompat.byteskwargs(opts)
894 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
901 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
895 format = opts.get('format', 0)
902 format = opts.get('format', 0)
896 if format not in (0, 1):
903 if format not in (0, 1):
897 raise error.Abort(_("unknown format %d") % format)
904 raise error.Abort(_("unknown format %d") % format)
898
905
899 generaldelta = r.version & revlog.FLAG_GENERALDELTA
906 generaldelta = r.version & revlog.FLAG_GENERALDELTA
900 if generaldelta:
907 if generaldelta:
901 basehdr = ' delta'
908 basehdr = ' delta'
902 else:
909 else:
903 basehdr = ' base'
910 basehdr = ' base'
904
911
905 if ui.debugflag:
912 if ui.debugflag:
906 shortfn = hex
913 shortfn = hex
907 else:
914 else:
908 shortfn = short
915 shortfn = short
909
916
910 # There might not be anything in r, so have a sane default
917 # There might not be anything in r, so have a sane default
911 idlen = 12
918 idlen = 12
912 for i in r:
919 for i in r:
913 idlen = len(shortfn(r.node(i)))
920 idlen = len(shortfn(r.node(i)))
914 break
921 break
915
922
916 if format == 0:
923 if format == 0:
917 ui.write((" rev offset length " + basehdr + " linkrev"
924 ui.write((" rev offset length " + basehdr + " linkrev"
918 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
925 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
919 elif format == 1:
926 elif format == 1:
920 ui.write((" rev flag offset length"
927 ui.write((" rev flag offset length"
921 " size " + basehdr + " link p1 p2"
928 " size " + basehdr + " link p1 p2"
922 " %s\n") % "nodeid".rjust(idlen))
929 " %s\n") % "nodeid".rjust(idlen))
923
930
924 for i in r:
931 for i in r:
925 node = r.node(i)
932 node = r.node(i)
926 if generaldelta:
933 if generaldelta:
927 base = r.deltaparent(i)
934 base = r.deltaparent(i)
928 else:
935 else:
929 base = r.chainbase(i)
936 base = r.chainbase(i)
930 if format == 0:
937 if format == 0:
931 try:
938 try:
932 pp = r.parents(node)
939 pp = r.parents(node)
933 except Exception:
940 except Exception:
934 pp = [nullid, nullid]
941 pp = [nullid, nullid]
935 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
942 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
936 i, r.start(i), r.length(i), base, r.linkrev(i),
943 i, r.start(i), r.length(i), base, r.linkrev(i),
937 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
944 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
938 elif format == 1:
945 elif format == 1:
939 pr = r.parentrevs(i)
946 pr = r.parentrevs(i)
940 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
947 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
941 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
948 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
942 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
949 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
943
950
944 @command('debugindexdot', cmdutil.debugrevlogopts,
951 @command('debugindexdot', cmdutil.debugrevlogopts,
945 _('-c|-m|FILE'), optionalrepo=True)
952 _('-c|-m|FILE'), optionalrepo=True)
946 def debugindexdot(ui, repo, file_=None, **opts):
953 def debugindexdot(ui, repo, file_=None, **opts):
947 """dump an index DAG as a graphviz dot file"""
954 """dump an index DAG as a graphviz dot file"""
955 opts = pycompat.byteskwargs(opts)
948 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
956 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
949 ui.write(("digraph G {\n"))
957 ui.write(("digraph G {\n"))
950 for i in r:
958 for i in r:
951 node = r.node(i)
959 node = r.node(i)
952 pp = r.parents(node)
960 pp = r.parents(node)
953 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
961 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
954 if pp[1] != nullid:
962 if pp[1] != nullid:
955 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
963 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
956 ui.write("}\n")
964 ui.write("}\n")
957
965
958 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
966 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
959 def debuginstall(ui, **opts):
967 def debuginstall(ui, **opts):
960 '''test Mercurial installation
968 '''test Mercurial installation
961
969
962 Returns 0 on success.
970 Returns 0 on success.
963 '''
971 '''
972 opts = pycompat.byteskwargs(opts)
964
973
965 def writetemp(contents):
974 def writetemp(contents):
966 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
975 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
967 f = os.fdopen(fd, pycompat.sysstr("wb"))
976 f = os.fdopen(fd, pycompat.sysstr("wb"))
968 f.write(contents)
977 f.write(contents)
969 f.close()
978 f.close()
970 return name
979 return name
971
980
972 problems = 0
981 problems = 0
973
982
974 fm = ui.formatter('debuginstall', opts)
983 fm = ui.formatter('debuginstall', opts)
975 fm.startitem()
984 fm.startitem()
976
985
977 # encoding
986 # encoding
978 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
987 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
979 err = None
988 err = None
980 try:
989 try:
981 encoding.fromlocal("test")
990 encoding.fromlocal("test")
982 except error.Abort as inst:
991 except error.Abort as inst:
983 err = inst
992 err = inst
984 problems += 1
993 problems += 1
985 fm.condwrite(err, 'encodingerror', _(" %s\n"
994 fm.condwrite(err, 'encodingerror', _(" %s\n"
986 " (check that your locale is properly set)\n"), err)
995 " (check that your locale is properly set)\n"), err)
987
996
988 # Python
997 # Python
989 fm.write('pythonexe', _("checking Python executable (%s)\n"),
998 fm.write('pythonexe', _("checking Python executable (%s)\n"),
990 pycompat.sysexecutable)
999 pycompat.sysexecutable)
991 fm.write('pythonver', _("checking Python version (%s)\n"),
1000 fm.write('pythonver', _("checking Python version (%s)\n"),
992 ("%d.%d.%d" % sys.version_info[:3]))
1001 ("%d.%d.%d" % sys.version_info[:3]))
993 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1002 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
994 os.path.dirname(pycompat.fsencode(os.__file__)))
1003 os.path.dirname(pycompat.fsencode(os.__file__)))
995
1004
996 security = set(sslutil.supportedprotocols)
1005 security = set(sslutil.supportedprotocols)
997 if sslutil.hassni:
1006 if sslutil.hassni:
998 security.add('sni')
1007 security.add('sni')
999
1008
1000 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1009 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1001 fm.formatlist(sorted(security), name='protocol',
1010 fm.formatlist(sorted(security), name='protocol',
1002 fmt='%s', sep=','))
1011 fmt='%s', sep=','))
1003
1012
1004 # These are warnings, not errors. So don't increment problem count. This
1013 # These are warnings, not errors. So don't increment problem count. This
1005 # may change in the future.
1014 # may change in the future.
1006 if 'tls1.2' not in security:
1015 if 'tls1.2' not in security:
1007 fm.plain(_(' TLS 1.2 not supported by Python install; '
1016 fm.plain(_(' TLS 1.2 not supported by Python install; '
1008 'network connections lack modern security\n'))
1017 'network connections lack modern security\n'))
1009 if 'sni' not in security:
1018 if 'sni' not in security:
1010 fm.plain(_(' SNI not supported by Python install; may have '
1019 fm.plain(_(' SNI not supported by Python install; may have '
1011 'connectivity issues with some servers\n'))
1020 'connectivity issues with some servers\n'))
1012
1021
1013 # TODO print CA cert info
1022 # TODO print CA cert info
1014
1023
1015 # hg version
1024 # hg version
1016 hgver = util.version()
1025 hgver = util.version()
1017 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1026 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1018 hgver.split('+')[0])
1027 hgver.split('+')[0])
1019 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1028 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1020 '+'.join(hgver.split('+')[1:]))
1029 '+'.join(hgver.split('+')[1:]))
1021
1030
1022 # compiled modules
1031 # compiled modules
1023 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1032 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1024 policy.policy)
1033 policy.policy)
1025 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1034 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1026 os.path.dirname(pycompat.fsencode(__file__)))
1035 os.path.dirname(pycompat.fsencode(__file__)))
1027
1036
1028 if policy.policy in ('c', 'allow'):
1037 if policy.policy in ('c', 'allow'):
1029 err = None
1038 err = None
1030 try:
1039 try:
1031 from .cext import (
1040 from .cext import (
1032 base85,
1041 base85,
1033 bdiff,
1042 bdiff,
1034 mpatch,
1043 mpatch,
1035 osutil,
1044 osutil,
1036 )
1045 )
1037 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1046 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1038 except Exception as inst:
1047 except Exception as inst:
1039 err = inst
1048 err = inst
1040 problems += 1
1049 problems += 1
1041 fm.condwrite(err, 'extensionserror', " %s\n", err)
1050 fm.condwrite(err, 'extensionserror', " %s\n", err)
1042
1051
1043 compengines = util.compengines._engines.values()
1052 compengines = util.compengines._engines.values()
1044 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1053 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1045 fm.formatlist(sorted(e.name() for e in compengines),
1054 fm.formatlist(sorted(e.name() for e in compengines),
1046 name='compengine', fmt='%s', sep=', '))
1055 name='compengine', fmt='%s', sep=', '))
1047 fm.write('compenginesavail', _('checking available compression engines '
1056 fm.write('compenginesavail', _('checking available compression engines '
1048 '(%s)\n'),
1057 '(%s)\n'),
1049 fm.formatlist(sorted(e.name() for e in compengines
1058 fm.formatlist(sorted(e.name() for e in compengines
1050 if e.available()),
1059 if e.available()),
1051 name='compengine', fmt='%s', sep=', '))
1060 name='compengine', fmt='%s', sep=', '))
1052 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1061 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1053 fm.write('compenginesserver', _('checking available compression engines '
1062 fm.write('compenginesserver', _('checking available compression engines '
1054 'for wire protocol (%s)\n'),
1063 'for wire protocol (%s)\n'),
1055 fm.formatlist([e.name() for e in wirecompengines
1064 fm.formatlist([e.name() for e in wirecompengines
1056 if e.wireprotosupport()],
1065 if e.wireprotosupport()],
1057 name='compengine', fmt='%s', sep=', '))
1066 name='compengine', fmt='%s', sep=', '))
1058
1067
1059 # templates
1068 # templates
1060 p = templater.templatepaths()
1069 p = templater.templatepaths()
1061 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1070 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1062 fm.condwrite(not p, '', _(" no template directories found\n"))
1071 fm.condwrite(not p, '', _(" no template directories found\n"))
1063 if p:
1072 if p:
1064 m = templater.templatepath("map-cmdline.default")
1073 m = templater.templatepath("map-cmdline.default")
1065 if m:
1074 if m:
1066 # template found, check if it is working
1075 # template found, check if it is working
1067 err = None
1076 err = None
1068 try:
1077 try:
1069 templater.templater.frommapfile(m)
1078 templater.templater.frommapfile(m)
1070 except Exception as inst:
1079 except Exception as inst:
1071 err = inst
1080 err = inst
1072 p = None
1081 p = None
1073 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1082 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1074 else:
1083 else:
1075 p = None
1084 p = None
1076 fm.condwrite(p, 'defaulttemplate',
1085 fm.condwrite(p, 'defaulttemplate',
1077 _("checking default template (%s)\n"), m)
1086 _("checking default template (%s)\n"), m)
1078 fm.condwrite(not m, 'defaulttemplatenotfound',
1087 fm.condwrite(not m, 'defaulttemplatenotfound',
1079 _(" template '%s' not found\n"), "default")
1088 _(" template '%s' not found\n"), "default")
1080 if not p:
1089 if not p:
1081 problems += 1
1090 problems += 1
1082 fm.condwrite(not p, '',
1091 fm.condwrite(not p, '',
1083 _(" (templates seem to have been installed incorrectly)\n"))
1092 _(" (templates seem to have been installed incorrectly)\n"))
1084
1093
1085 # editor
1094 # editor
1086 editor = ui.geteditor()
1095 editor = ui.geteditor()
1087 editor = util.expandpath(editor)
1096 editor = util.expandpath(editor)
1088 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1097 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1089 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1098 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1090 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1099 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1091 _(" No commit editor set and can't find %s in PATH\n"
1100 _(" No commit editor set and can't find %s in PATH\n"
1092 " (specify a commit editor in your configuration"
1101 " (specify a commit editor in your configuration"
1093 " file)\n"), not cmdpath and editor == 'vi' and editor)
1102 " file)\n"), not cmdpath and editor == 'vi' and editor)
1094 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1103 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1095 _(" Can't find editor '%s' in PATH\n"
1104 _(" Can't find editor '%s' in PATH\n"
1096 " (specify a commit editor in your configuration"
1105 " (specify a commit editor in your configuration"
1097 " file)\n"), not cmdpath and editor)
1106 " file)\n"), not cmdpath and editor)
1098 if not cmdpath and editor != 'vi':
1107 if not cmdpath and editor != 'vi':
1099 problems += 1
1108 problems += 1
1100
1109
1101 # check username
1110 # check username
1102 username = None
1111 username = None
1103 err = None
1112 err = None
1104 try:
1113 try:
1105 username = ui.username()
1114 username = ui.username()
1106 except error.Abort as e:
1115 except error.Abort as e:
1107 err = e
1116 err = e
1108 problems += 1
1117 problems += 1
1109
1118
1110 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1119 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1111 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1120 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1112 " (specify a username in your configuration file)\n"), err)
1121 " (specify a username in your configuration file)\n"), err)
1113
1122
1114 fm.condwrite(not problems, '',
1123 fm.condwrite(not problems, '',
1115 _("no problems detected\n"))
1124 _("no problems detected\n"))
1116 if not problems:
1125 if not problems:
1117 fm.data(problems=problems)
1126 fm.data(problems=problems)
1118 fm.condwrite(problems, 'problems',
1127 fm.condwrite(problems, 'problems',
1119 _("%d problems detected,"
1128 _("%d problems detected,"
1120 " please check your install!\n"), problems)
1129 " please check your install!\n"), problems)
1121 fm.end()
1130 fm.end()
1122
1131
1123 return problems
1132 return problems
1124
1133
1125 @command('debugknown', [], _('REPO ID...'), norepo=True)
1134 @command('debugknown', [], _('REPO ID...'), norepo=True)
1126 def debugknown(ui, repopath, *ids, **opts):
1135 def debugknown(ui, repopath, *ids, **opts):
1127 """test whether node ids are known to a repo
1136 """test whether node ids are known to a repo
1128
1137
1129 Every ID must be a full-length hex node id string. Returns a list of 0s
1138 Every ID must be a full-length hex node id string. Returns a list of 0s
1130 and 1s indicating unknown/known.
1139 and 1s indicating unknown/known.
1131 """
1140 """
1141 opts = pycompat.byteskwargs(opts)
1132 repo = hg.peer(ui, opts, repopath)
1142 repo = hg.peer(ui, opts, repopath)
1133 if not repo.capable('known'):
1143 if not repo.capable('known'):
1134 raise error.Abort("known() not supported by target repository")
1144 raise error.Abort("known() not supported by target repository")
1135 flags = repo.known([bin(s) for s in ids])
1145 flags = repo.known([bin(s) for s in ids])
1136 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1146 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1137
1147
1138 @command('debuglabelcomplete', [], _('LABEL...'))
1148 @command('debuglabelcomplete', [], _('LABEL...'))
1139 def debuglabelcomplete(ui, repo, *args):
1149 def debuglabelcomplete(ui, repo, *args):
1140 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1150 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1141 debugnamecomplete(ui, repo, *args)
1151 debugnamecomplete(ui, repo, *args)
1142
1152
1143 @command('debuglocks',
1153 @command('debuglocks',
1144 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1154 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1145 ('W', 'force-wlock', None,
1155 ('W', 'force-wlock', None,
1146 _('free the working state lock (DANGEROUS)'))],
1156 _('free the working state lock (DANGEROUS)'))],
1147 _('[OPTION]...'))
1157 _('[OPTION]...'))
1148 def debuglocks(ui, repo, **opts):
1158 def debuglocks(ui, repo, **opts):
1149 """show or modify state of locks
1159 """show or modify state of locks
1150
1160
1151 By default, this command will show which locks are held. This
1161 By default, this command will show which locks are held. This
1152 includes the user and process holding the lock, the amount of time
1162 includes the user and process holding the lock, the amount of time
1153 the lock has been held, and the machine name where the process is
1163 the lock has been held, and the machine name where the process is
1154 running if it's not local.
1164 running if it's not local.
1155
1165
1156 Locks protect the integrity of Mercurial's data, so should be
1166 Locks protect the integrity of Mercurial's data, so should be
1157 treated with care. System crashes or other interruptions may cause
1167 treated with care. System crashes or other interruptions may cause
1158 locks to not be properly released, though Mercurial will usually
1168 locks to not be properly released, though Mercurial will usually
1159 detect and remove such stale locks automatically.
1169 detect and remove such stale locks automatically.
1160
1170
1161 However, detecting stale locks may not always be possible (for
1171 However, detecting stale locks may not always be possible (for
1162 instance, on a shared filesystem). Removing locks may also be
1172 instance, on a shared filesystem). Removing locks may also be
1163 blocked by filesystem permissions.
1173 blocked by filesystem permissions.
1164
1174
1165 Returns 0 if no locks are held.
1175 Returns 0 if no locks are held.
1166
1176
1167 """
1177 """
1168
1178
1169 if opts.get('force_lock'):
1179 if opts.get('force_lock'):
1170 repo.svfs.unlink('lock')
1180 repo.svfs.unlink('lock')
1171 if opts.get('force_wlock'):
1181 if opts.get('force_wlock'):
1172 repo.vfs.unlink('wlock')
1182 repo.vfs.unlink('wlock')
1173 if opts.get('force_lock') or opts.get('force_lock'):
1183 if opts.get('force_lock') or opts.get('force_lock'):
1174 return 0
1184 return 0
1175
1185
1176 now = time.time()
1186 now = time.time()
1177 held = 0
1187 held = 0
1178
1188
1179 def report(vfs, name, method):
1189 def report(vfs, name, method):
1180 # this causes stale locks to get reaped for more accurate reporting
1190 # this causes stale locks to get reaped for more accurate reporting
1181 try:
1191 try:
1182 l = method(False)
1192 l = method(False)
1183 except error.LockHeld:
1193 except error.LockHeld:
1184 l = None
1194 l = None
1185
1195
1186 if l:
1196 if l:
1187 l.release()
1197 l.release()
1188 else:
1198 else:
1189 try:
1199 try:
1190 stat = vfs.lstat(name)
1200 stat = vfs.lstat(name)
1191 age = now - stat.st_mtime
1201 age = now - stat.st_mtime
1192 user = util.username(stat.st_uid)
1202 user = util.username(stat.st_uid)
1193 locker = vfs.readlock(name)
1203 locker = vfs.readlock(name)
1194 if ":" in locker:
1204 if ":" in locker:
1195 host, pid = locker.split(':')
1205 host, pid = locker.split(':')
1196 if host == socket.gethostname():
1206 if host == socket.gethostname():
1197 locker = 'user %s, process %s' % (user, pid)
1207 locker = 'user %s, process %s' % (user, pid)
1198 else:
1208 else:
1199 locker = 'user %s, process %s, host %s' \
1209 locker = 'user %s, process %s, host %s' \
1200 % (user, pid, host)
1210 % (user, pid, host)
1201 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1211 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1202 return 1
1212 return 1
1203 except OSError as e:
1213 except OSError as e:
1204 if e.errno != errno.ENOENT:
1214 if e.errno != errno.ENOENT:
1205 raise
1215 raise
1206
1216
1207 ui.write(("%-6s free\n") % (name + ":"))
1217 ui.write(("%-6s free\n") % (name + ":"))
1208 return 0
1218 return 0
1209
1219
1210 held += report(repo.svfs, "lock", repo.lock)
1220 held += report(repo.svfs, "lock", repo.lock)
1211 held += report(repo.vfs, "wlock", repo.wlock)
1221 held += report(repo.vfs, "wlock", repo.wlock)
1212
1222
1213 return held
1223 return held
1214
1224
1215 @command('debugmergestate', [], '')
1225 @command('debugmergestate', [], '')
1216 def debugmergestate(ui, repo, *args):
1226 def debugmergestate(ui, repo, *args):
1217 """print merge state
1227 """print merge state
1218
1228
1219 Use --verbose to print out information about whether v1 or v2 merge state
1229 Use --verbose to print out information about whether v1 or v2 merge state
1220 was chosen."""
1230 was chosen."""
1221 def _hashornull(h):
1231 def _hashornull(h):
1222 if h == nullhex:
1232 if h == nullhex:
1223 return 'null'
1233 return 'null'
1224 else:
1234 else:
1225 return h
1235 return h
1226
1236
1227 def printrecords(version):
1237 def printrecords(version):
1228 ui.write(('* version %s records\n') % version)
1238 ui.write(('* version %s records\n') % version)
1229 if version == 1:
1239 if version == 1:
1230 records = v1records
1240 records = v1records
1231 else:
1241 else:
1232 records = v2records
1242 records = v2records
1233
1243
1234 for rtype, record in records:
1244 for rtype, record in records:
1235 # pretty print some record types
1245 # pretty print some record types
1236 if rtype == 'L':
1246 if rtype == 'L':
1237 ui.write(('local: %s\n') % record)
1247 ui.write(('local: %s\n') % record)
1238 elif rtype == 'O':
1248 elif rtype == 'O':
1239 ui.write(('other: %s\n') % record)
1249 ui.write(('other: %s\n') % record)
1240 elif rtype == 'm':
1250 elif rtype == 'm':
1241 driver, mdstate = record.split('\0', 1)
1251 driver, mdstate = record.split('\0', 1)
1242 ui.write(('merge driver: %s (state "%s")\n')
1252 ui.write(('merge driver: %s (state "%s")\n')
1243 % (driver, mdstate))
1253 % (driver, mdstate))
1244 elif rtype in 'FDC':
1254 elif rtype in 'FDC':
1245 r = record.split('\0')
1255 r = record.split('\0')
1246 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1256 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1247 if version == 1:
1257 if version == 1:
1248 onode = 'not stored in v1 format'
1258 onode = 'not stored in v1 format'
1249 flags = r[7]
1259 flags = r[7]
1250 else:
1260 else:
1251 onode, flags = r[7:9]
1261 onode, flags = r[7:9]
1252 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1262 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1253 % (f, rtype, state, _hashornull(hash)))
1263 % (f, rtype, state, _hashornull(hash)))
1254 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1264 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1255 ui.write((' ancestor path: %s (node %s)\n')
1265 ui.write((' ancestor path: %s (node %s)\n')
1256 % (afile, _hashornull(anode)))
1266 % (afile, _hashornull(anode)))
1257 ui.write((' other path: %s (node %s)\n')
1267 ui.write((' other path: %s (node %s)\n')
1258 % (ofile, _hashornull(onode)))
1268 % (ofile, _hashornull(onode)))
1259 elif rtype == 'f':
1269 elif rtype == 'f':
1260 filename, rawextras = record.split('\0', 1)
1270 filename, rawextras = record.split('\0', 1)
1261 extras = rawextras.split('\0')
1271 extras = rawextras.split('\0')
1262 i = 0
1272 i = 0
1263 extrastrings = []
1273 extrastrings = []
1264 while i < len(extras):
1274 while i < len(extras):
1265 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1275 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1266 i += 2
1276 i += 2
1267
1277
1268 ui.write(('file extras: %s (%s)\n')
1278 ui.write(('file extras: %s (%s)\n')
1269 % (filename, ', '.join(extrastrings)))
1279 % (filename, ', '.join(extrastrings)))
1270 elif rtype == 'l':
1280 elif rtype == 'l':
1271 labels = record.split('\0', 2)
1281 labels = record.split('\0', 2)
1272 labels = [l for l in labels if len(l) > 0]
1282 labels = [l for l in labels if len(l) > 0]
1273 ui.write(('labels:\n'))
1283 ui.write(('labels:\n'))
1274 ui.write((' local: %s\n' % labels[0]))
1284 ui.write((' local: %s\n' % labels[0]))
1275 ui.write((' other: %s\n' % labels[1]))
1285 ui.write((' other: %s\n' % labels[1]))
1276 if len(labels) > 2:
1286 if len(labels) > 2:
1277 ui.write((' base: %s\n' % labels[2]))
1287 ui.write((' base: %s\n' % labels[2]))
1278 else:
1288 else:
1279 ui.write(('unrecognized entry: %s\t%s\n')
1289 ui.write(('unrecognized entry: %s\t%s\n')
1280 % (rtype, record.replace('\0', '\t')))
1290 % (rtype, record.replace('\0', '\t')))
1281
1291
1282 # Avoid mergestate.read() since it may raise an exception for unsupported
1292 # Avoid mergestate.read() since it may raise an exception for unsupported
1283 # merge state records. We shouldn't be doing this, but this is OK since this
1293 # merge state records. We shouldn't be doing this, but this is OK since this
1284 # command is pretty low-level.
1294 # command is pretty low-level.
1285 ms = mergemod.mergestate(repo)
1295 ms = mergemod.mergestate(repo)
1286
1296
1287 # sort so that reasonable information is on top
1297 # sort so that reasonable information is on top
1288 v1records = ms._readrecordsv1()
1298 v1records = ms._readrecordsv1()
1289 v2records = ms._readrecordsv2()
1299 v2records = ms._readrecordsv2()
1290 order = 'LOml'
1300 order = 'LOml'
1291 def key(r):
1301 def key(r):
1292 idx = order.find(r[0])
1302 idx = order.find(r[0])
1293 if idx == -1:
1303 if idx == -1:
1294 return (1, r[1])
1304 return (1, r[1])
1295 else:
1305 else:
1296 return (0, idx)
1306 return (0, idx)
1297 v1records.sort(key=key)
1307 v1records.sort(key=key)
1298 v2records.sort(key=key)
1308 v2records.sort(key=key)
1299
1309
1300 if not v1records and not v2records:
1310 if not v1records and not v2records:
1301 ui.write(('no merge state found\n'))
1311 ui.write(('no merge state found\n'))
1302 elif not v2records:
1312 elif not v2records:
1303 ui.note(('no version 2 merge state\n'))
1313 ui.note(('no version 2 merge state\n'))
1304 printrecords(1)
1314 printrecords(1)
1305 elif ms._v1v2match(v1records, v2records):
1315 elif ms._v1v2match(v1records, v2records):
1306 ui.note(('v1 and v2 states match: using v2\n'))
1316 ui.note(('v1 and v2 states match: using v2\n'))
1307 printrecords(2)
1317 printrecords(2)
1308 else:
1318 else:
1309 ui.note(('v1 and v2 states mismatch: using v1\n'))
1319 ui.note(('v1 and v2 states mismatch: using v1\n'))
1310 printrecords(1)
1320 printrecords(1)
1311 if ui.verbose:
1321 if ui.verbose:
1312 printrecords(2)
1322 printrecords(2)
1313
1323
1314 @command('debugnamecomplete', [], _('NAME...'))
1324 @command('debugnamecomplete', [], _('NAME...'))
1315 def debugnamecomplete(ui, repo, *args):
1325 def debugnamecomplete(ui, repo, *args):
1316 '''complete "names" - tags, open branch names, bookmark names'''
1326 '''complete "names" - tags, open branch names, bookmark names'''
1317
1327
1318 names = set()
1328 names = set()
1319 # since we previously only listed open branches, we will handle that
1329 # since we previously only listed open branches, we will handle that
1320 # specially (after this for loop)
1330 # specially (after this for loop)
1321 for name, ns in repo.names.iteritems():
1331 for name, ns in repo.names.iteritems():
1322 if name != 'branches':
1332 if name != 'branches':
1323 names.update(ns.listnames(repo))
1333 names.update(ns.listnames(repo))
1324 names.update(tag for (tag, heads, tip, closed)
1334 names.update(tag for (tag, heads, tip, closed)
1325 in repo.branchmap().iterbranches() if not closed)
1335 in repo.branchmap().iterbranches() if not closed)
1326 completions = set()
1336 completions = set()
1327 if not args:
1337 if not args:
1328 args = ['']
1338 args = ['']
1329 for a in args:
1339 for a in args:
1330 completions.update(n for n in names if n.startswith(a))
1340 completions.update(n for n in names if n.startswith(a))
1331 ui.write('\n'.join(sorted(completions)))
1341 ui.write('\n'.join(sorted(completions)))
1332 ui.write('\n')
1342 ui.write('\n')
1333
1343
1334 @command('debugobsolete',
1344 @command('debugobsolete',
1335 [('', 'flags', 0, _('markers flag')),
1345 [('', 'flags', 0, _('markers flag')),
1336 ('', 'record-parents', False,
1346 ('', 'record-parents', False,
1337 _('record parent information for the precursor')),
1347 _('record parent information for the precursor')),
1338 ('r', 'rev', [], _('display markers relevant to REV')),
1348 ('r', 'rev', [], _('display markers relevant to REV')),
1339 ('', 'exclusive', False, _('restrict display to markers only '
1349 ('', 'exclusive', False, _('restrict display to markers only '
1340 'relevant to REV')),
1350 'relevant to REV')),
1341 ('', 'index', False, _('display index of the marker')),
1351 ('', 'index', False, _('display index of the marker')),
1342 ('', 'delete', [], _('delete markers specified by indices')),
1352 ('', 'delete', [], _('delete markers specified by indices')),
1343 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1353 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1344 _('[OBSOLETED [REPLACEMENT ...]]'))
1354 _('[OBSOLETED [REPLACEMENT ...]]'))
1345 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1355 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1346 """create arbitrary obsolete marker
1356 """create arbitrary obsolete marker
1347
1357
1348 With no arguments, displays the list of obsolescence markers."""
1358 With no arguments, displays the list of obsolescence markers."""
1349
1359
1360 opts = pycompat.byteskwargs(opts)
1361
1350 def parsenodeid(s):
1362 def parsenodeid(s):
1351 try:
1363 try:
1352 # We do not use revsingle/revrange functions here to accept
1364 # We do not use revsingle/revrange functions here to accept
1353 # arbitrary node identifiers, possibly not present in the
1365 # arbitrary node identifiers, possibly not present in the
1354 # local repository.
1366 # local repository.
1355 n = bin(s)
1367 n = bin(s)
1356 if len(n) != len(nullid):
1368 if len(n) != len(nullid):
1357 raise TypeError()
1369 raise TypeError()
1358 return n
1370 return n
1359 except TypeError:
1371 except TypeError:
1360 raise error.Abort('changeset references must be full hexadecimal '
1372 raise error.Abort('changeset references must be full hexadecimal '
1361 'node identifiers')
1373 'node identifiers')
1362
1374
1363 if opts.get('delete'):
1375 if opts.get('delete'):
1364 indices = []
1376 indices = []
1365 for v in opts.get('delete'):
1377 for v in opts.get('delete'):
1366 try:
1378 try:
1367 indices.append(int(v))
1379 indices.append(int(v))
1368 except ValueError:
1380 except ValueError:
1369 raise error.Abort(_('invalid index value: %r') % v,
1381 raise error.Abort(_('invalid index value: %r') % v,
1370 hint=_('use integers for indices'))
1382 hint=_('use integers for indices'))
1371
1383
1372 if repo.currenttransaction():
1384 if repo.currenttransaction():
1373 raise error.Abort(_('cannot delete obsmarkers in the middle '
1385 raise error.Abort(_('cannot delete obsmarkers in the middle '
1374 'of transaction.'))
1386 'of transaction.'))
1375
1387
1376 with repo.lock():
1388 with repo.lock():
1377 n = repair.deleteobsmarkers(repo.obsstore, indices)
1389 n = repair.deleteobsmarkers(repo.obsstore, indices)
1378 ui.write(_('deleted %i obsolescence markers\n') % n)
1390 ui.write(_('deleted %i obsolescence markers\n') % n)
1379
1391
1380 return
1392 return
1381
1393
1382 if precursor is not None:
1394 if precursor is not None:
1383 if opts['rev']:
1395 if opts['rev']:
1384 raise error.Abort('cannot select revision when creating marker')
1396 raise error.Abort('cannot select revision when creating marker')
1385 metadata = {}
1397 metadata = {}
1386 metadata['user'] = opts['user'] or ui.username()
1398 metadata['user'] = opts['user'] or ui.username()
1387 succs = tuple(parsenodeid(succ) for succ in successors)
1399 succs = tuple(parsenodeid(succ) for succ in successors)
1388 l = repo.lock()
1400 l = repo.lock()
1389 try:
1401 try:
1390 tr = repo.transaction('debugobsolete')
1402 tr = repo.transaction('debugobsolete')
1391 try:
1403 try:
1392 date = opts.get('date')
1404 date = opts.get('date')
1393 if date:
1405 if date:
1394 date = util.parsedate(date)
1406 date = util.parsedate(date)
1395 else:
1407 else:
1396 date = None
1408 date = None
1397 prec = parsenodeid(precursor)
1409 prec = parsenodeid(precursor)
1398 parents = None
1410 parents = None
1399 if opts['record_parents']:
1411 if opts['record_parents']:
1400 if prec not in repo.unfiltered():
1412 if prec not in repo.unfiltered():
1401 raise error.Abort('cannot used --record-parents on '
1413 raise error.Abort('cannot used --record-parents on '
1402 'unknown changesets')
1414 'unknown changesets')
1403 parents = repo.unfiltered()[prec].parents()
1415 parents = repo.unfiltered()[prec].parents()
1404 parents = tuple(p.node() for p in parents)
1416 parents = tuple(p.node() for p in parents)
1405 repo.obsstore.create(tr, prec, succs, opts['flags'],
1417 repo.obsstore.create(tr, prec, succs, opts['flags'],
1406 parents=parents, date=date,
1418 parents=parents, date=date,
1407 metadata=metadata, ui=ui)
1419 metadata=metadata, ui=ui)
1408 tr.close()
1420 tr.close()
1409 except ValueError as exc:
1421 except ValueError as exc:
1410 raise error.Abort(_('bad obsmarker input: %s') % exc)
1422 raise error.Abort(_('bad obsmarker input: %s') % exc)
1411 finally:
1423 finally:
1412 tr.release()
1424 tr.release()
1413 finally:
1425 finally:
1414 l.release()
1426 l.release()
1415 else:
1427 else:
1416 if opts['rev']:
1428 if opts['rev']:
1417 revs = scmutil.revrange(repo, opts['rev'])
1429 revs = scmutil.revrange(repo, opts['rev'])
1418 nodes = [repo[r].node() for r in revs]
1430 nodes = [repo[r].node() for r in revs]
1419 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1431 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1420 exclusive=opts['exclusive']))
1432 exclusive=opts['exclusive']))
1421 markers.sort(key=lambda x: x._data)
1433 markers.sort(key=lambda x: x._data)
1422 else:
1434 else:
1423 markers = obsolete.getmarkers(repo)
1435 markers = obsolete.getmarkers(repo)
1424
1436
1425 markerstoiter = markers
1437 markerstoiter = markers
1426 isrelevant = lambda m: True
1438 isrelevant = lambda m: True
1427 if opts.get('rev') and opts.get('index'):
1439 if opts.get('rev') and opts.get('index'):
1428 markerstoiter = obsolete.getmarkers(repo)
1440 markerstoiter = obsolete.getmarkers(repo)
1429 markerset = set(markers)
1441 markerset = set(markers)
1430 isrelevant = lambda m: m in markerset
1442 isrelevant = lambda m: m in markerset
1431
1443
1432 fm = ui.formatter('debugobsolete', opts)
1444 fm = ui.formatter('debugobsolete', opts)
1433 for i, m in enumerate(markerstoiter):
1445 for i, m in enumerate(markerstoiter):
1434 if not isrelevant(m):
1446 if not isrelevant(m):
1435 # marker can be irrelevant when we're iterating over a set
1447 # marker can be irrelevant when we're iterating over a set
1436 # of markers (markerstoiter) which is bigger than the set
1448 # of markers (markerstoiter) which is bigger than the set
1437 # of markers we want to display (markers)
1449 # of markers we want to display (markers)
1438 # this can happen if both --index and --rev options are
1450 # this can happen if both --index and --rev options are
1439 # provided and thus we need to iterate over all of the markers
1451 # provided and thus we need to iterate over all of the markers
1440 # to get the correct indices, but only display the ones that
1452 # to get the correct indices, but only display the ones that
1441 # are relevant to --rev value
1453 # are relevant to --rev value
1442 continue
1454 continue
1443 fm.startitem()
1455 fm.startitem()
1444 ind = i if opts.get('index') else None
1456 ind = i if opts.get('index') else None
1445 cmdutil.showmarker(fm, m, index=ind)
1457 cmdutil.showmarker(fm, m, index=ind)
1446 fm.end()
1458 fm.end()
1447
1459
1448 @command('debugpathcomplete',
1460 @command('debugpathcomplete',
1449 [('f', 'full', None, _('complete an entire path')),
1461 [('f', 'full', None, _('complete an entire path')),
1450 ('n', 'normal', None, _('show only normal files')),
1462 ('n', 'normal', None, _('show only normal files')),
1451 ('a', 'added', None, _('show only added files')),
1463 ('a', 'added', None, _('show only added files')),
1452 ('r', 'removed', None, _('show only removed files'))],
1464 ('r', 'removed', None, _('show only removed files'))],
1453 _('FILESPEC...'))
1465 _('FILESPEC...'))
1454 def debugpathcomplete(ui, repo, *specs, **opts):
1466 def debugpathcomplete(ui, repo, *specs, **opts):
1455 '''complete part or all of a tracked path
1467 '''complete part or all of a tracked path
1456
1468
1457 This command supports shells that offer path name completion. It
1469 This command supports shells that offer path name completion. It
1458 currently completes only files already known to the dirstate.
1470 currently completes only files already known to the dirstate.
1459
1471
1460 Completion extends only to the next path segment unless
1472 Completion extends only to the next path segment unless
1461 --full is specified, in which case entire paths are used.'''
1473 --full is specified, in which case entire paths are used.'''
1462
1474
1463 def complete(path, acceptable):
1475 def complete(path, acceptable):
1464 dirstate = repo.dirstate
1476 dirstate = repo.dirstate
1465 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1477 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1466 rootdir = repo.root + pycompat.ossep
1478 rootdir = repo.root + pycompat.ossep
1467 if spec != repo.root and not spec.startswith(rootdir):
1479 if spec != repo.root and not spec.startswith(rootdir):
1468 return [], []
1480 return [], []
1469 if os.path.isdir(spec):
1481 if os.path.isdir(spec):
1470 spec += '/'
1482 spec += '/'
1471 spec = spec[len(rootdir):]
1483 spec = spec[len(rootdir):]
1472 fixpaths = pycompat.ossep != '/'
1484 fixpaths = pycompat.ossep != '/'
1473 if fixpaths:
1485 if fixpaths:
1474 spec = spec.replace(pycompat.ossep, '/')
1486 spec = spec.replace(pycompat.ossep, '/')
1475 speclen = len(spec)
1487 speclen = len(spec)
1476 fullpaths = opts['full']
1488 fullpaths = opts['full']
1477 files, dirs = set(), set()
1489 files, dirs = set(), set()
1478 adddir, addfile = dirs.add, files.add
1490 adddir, addfile = dirs.add, files.add
1479 for f, st in dirstate.iteritems():
1491 for f, st in dirstate.iteritems():
1480 if f.startswith(spec) and st[0] in acceptable:
1492 if f.startswith(spec) and st[0] in acceptable:
1481 if fixpaths:
1493 if fixpaths:
1482 f = f.replace('/', pycompat.ossep)
1494 f = f.replace('/', pycompat.ossep)
1483 if fullpaths:
1495 if fullpaths:
1484 addfile(f)
1496 addfile(f)
1485 continue
1497 continue
1486 s = f.find(pycompat.ossep, speclen)
1498 s = f.find(pycompat.ossep, speclen)
1487 if s >= 0:
1499 if s >= 0:
1488 adddir(f[:s])
1500 adddir(f[:s])
1489 else:
1501 else:
1490 addfile(f)
1502 addfile(f)
1491 return files, dirs
1503 return files, dirs
1492
1504
1493 acceptable = ''
1505 acceptable = ''
1494 if opts['normal']:
1506 if opts['normal']:
1495 acceptable += 'nm'
1507 acceptable += 'nm'
1496 if opts['added']:
1508 if opts['added']:
1497 acceptable += 'a'
1509 acceptable += 'a'
1498 if opts['removed']:
1510 if opts['removed']:
1499 acceptable += 'r'
1511 acceptable += 'r'
1500 cwd = repo.getcwd()
1512 cwd = repo.getcwd()
1501 if not specs:
1513 if not specs:
1502 specs = ['.']
1514 specs = ['.']
1503
1515
1504 files, dirs = set(), set()
1516 files, dirs = set(), set()
1505 for spec in specs:
1517 for spec in specs:
1506 f, d = complete(spec, acceptable or 'nmar')
1518 f, d = complete(spec, acceptable or 'nmar')
1507 files.update(f)
1519 files.update(f)
1508 dirs.update(d)
1520 dirs.update(d)
1509 files.update(dirs)
1521 files.update(dirs)
1510 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1522 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1511 ui.write('\n')
1523 ui.write('\n')
1512
1524
1513 @command('debugpickmergetool',
1525 @command('debugpickmergetool',
1514 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1526 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1515 ('', 'changedelete', None, _('emulate merging change and delete')),
1527 ('', 'changedelete', None, _('emulate merging change and delete')),
1516 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1528 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1517 _('[PATTERN]...'),
1529 _('[PATTERN]...'),
1518 inferrepo=True)
1530 inferrepo=True)
1519 def debugpickmergetool(ui, repo, *pats, **opts):
1531 def debugpickmergetool(ui, repo, *pats, **opts):
1520 """examine which merge tool is chosen for specified file
1532 """examine which merge tool is chosen for specified file
1521
1533
1522 As described in :hg:`help merge-tools`, Mercurial examines
1534 As described in :hg:`help merge-tools`, Mercurial examines
1523 configurations below in this order to decide which merge tool is
1535 configurations below in this order to decide which merge tool is
1524 chosen for specified file.
1536 chosen for specified file.
1525
1537
1526 1. ``--tool`` option
1538 1. ``--tool`` option
1527 2. ``HGMERGE`` environment variable
1539 2. ``HGMERGE`` environment variable
1528 3. configurations in ``merge-patterns`` section
1540 3. configurations in ``merge-patterns`` section
1529 4. configuration of ``ui.merge``
1541 4. configuration of ``ui.merge``
1530 5. configurations in ``merge-tools`` section
1542 5. configurations in ``merge-tools`` section
1531 6. ``hgmerge`` tool (for historical reason only)
1543 6. ``hgmerge`` tool (for historical reason only)
1532 7. default tool for fallback (``:merge`` or ``:prompt``)
1544 7. default tool for fallback (``:merge`` or ``:prompt``)
1533
1545
1534 This command writes out examination result in the style below::
1546 This command writes out examination result in the style below::
1535
1547
1536 FILE = MERGETOOL
1548 FILE = MERGETOOL
1537
1549
1538 By default, all files known in the first parent context of the
1550 By default, all files known in the first parent context of the
1539 working directory are examined. Use file patterns and/or -I/-X
1551 working directory are examined. Use file patterns and/or -I/-X
1540 options to limit target files. -r/--rev is also useful to examine
1552 options to limit target files. -r/--rev is also useful to examine
1541 files in another context without actual updating to it.
1553 files in another context without actual updating to it.
1542
1554
1543 With --debug, this command shows warning messages while matching
1555 With --debug, this command shows warning messages while matching
1544 against ``merge-patterns`` and so on, too. It is recommended to
1556 against ``merge-patterns`` and so on, too. It is recommended to
1545 use this option with explicit file patterns and/or -I/-X options,
1557 use this option with explicit file patterns and/or -I/-X options,
1546 because this option increases amount of output per file according
1558 because this option increases amount of output per file according
1547 to configurations in hgrc.
1559 to configurations in hgrc.
1548
1560
1549 With -v/--verbose, this command shows configurations below at
1561 With -v/--verbose, this command shows configurations below at
1550 first (only if specified).
1562 first (only if specified).
1551
1563
1552 - ``--tool`` option
1564 - ``--tool`` option
1553 - ``HGMERGE`` environment variable
1565 - ``HGMERGE`` environment variable
1554 - configuration of ``ui.merge``
1566 - configuration of ``ui.merge``
1555
1567
1556 If merge tool is chosen before matching against
1568 If merge tool is chosen before matching against
1557 ``merge-patterns``, this command can't show any helpful
1569 ``merge-patterns``, this command can't show any helpful
1558 information, even with --debug. In such case, information above is
1570 information, even with --debug. In such case, information above is
1559 useful to know why a merge tool is chosen.
1571 useful to know why a merge tool is chosen.
1560 """
1572 """
1573 opts = pycompat.byteskwargs(opts)
1561 overrides = {}
1574 overrides = {}
1562 if opts['tool']:
1575 if opts['tool']:
1563 overrides[('ui', 'forcemerge')] = opts['tool']
1576 overrides[('ui', 'forcemerge')] = opts['tool']
1564 ui.note(('with --tool %r\n') % (opts['tool']))
1577 ui.note(('with --tool %r\n') % (opts['tool']))
1565
1578
1566 with ui.configoverride(overrides, 'debugmergepatterns'):
1579 with ui.configoverride(overrides, 'debugmergepatterns'):
1567 hgmerge = encoding.environ.get("HGMERGE")
1580 hgmerge = encoding.environ.get("HGMERGE")
1568 if hgmerge is not None:
1581 if hgmerge is not None:
1569 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1582 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1570 uimerge = ui.config("ui", "merge")
1583 uimerge = ui.config("ui", "merge")
1571 if uimerge:
1584 if uimerge:
1572 ui.note(('with ui.merge=%r\n') % (uimerge))
1585 ui.note(('with ui.merge=%r\n') % (uimerge))
1573
1586
1574 ctx = scmutil.revsingle(repo, opts.get('rev'))
1587 ctx = scmutil.revsingle(repo, opts.get('rev'))
1575 m = scmutil.match(ctx, pats, opts)
1588 m = scmutil.match(ctx, pats, opts)
1576 changedelete = opts['changedelete']
1589 changedelete = opts['changedelete']
1577 for path in ctx.walk(m):
1590 for path in ctx.walk(m):
1578 fctx = ctx[path]
1591 fctx = ctx[path]
1579 try:
1592 try:
1580 if not ui.debugflag:
1593 if not ui.debugflag:
1581 ui.pushbuffer(error=True)
1594 ui.pushbuffer(error=True)
1582 tool, toolpath = filemerge._picktool(repo, ui, path,
1595 tool, toolpath = filemerge._picktool(repo, ui, path,
1583 fctx.isbinary(),
1596 fctx.isbinary(),
1584 'l' in fctx.flags(),
1597 'l' in fctx.flags(),
1585 changedelete)
1598 changedelete)
1586 finally:
1599 finally:
1587 if not ui.debugflag:
1600 if not ui.debugflag:
1588 ui.popbuffer()
1601 ui.popbuffer()
1589 ui.write(('%s = %s\n') % (path, tool))
1602 ui.write(('%s = %s\n') % (path, tool))
1590
1603
1591 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1604 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1592 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1605 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1593 '''access the pushkey key/value protocol
1606 '''access the pushkey key/value protocol
1594
1607
1595 With two args, list the keys in the given namespace.
1608 With two args, list the keys in the given namespace.
1596
1609
1597 With five args, set a key to new if it currently is set to old.
1610 With five args, set a key to new if it currently is set to old.
1598 Reports success or failure.
1611 Reports success or failure.
1599 '''
1612 '''
1600
1613
1601 target = hg.peer(ui, {}, repopath)
1614 target = hg.peer(ui, {}, repopath)
1602 if keyinfo:
1615 if keyinfo:
1603 key, old, new = keyinfo
1616 key, old, new = keyinfo
1604 r = target.pushkey(namespace, key, old, new)
1617 r = target.pushkey(namespace, key, old, new)
1605 ui.status(str(r) + '\n')
1618 ui.status(str(r) + '\n')
1606 return not r
1619 return not r
1607 else:
1620 else:
1608 for k, v in sorted(target.listkeys(namespace).iteritems()):
1621 for k, v in sorted(target.listkeys(namespace).iteritems()):
1609 ui.write("%s\t%s\n" % (util.escapestr(k),
1622 ui.write("%s\t%s\n" % (util.escapestr(k),
1610 util.escapestr(v)))
1623 util.escapestr(v)))
1611
1624
1612 @command('debugpvec', [], _('A B'))
1625 @command('debugpvec', [], _('A B'))
1613 def debugpvec(ui, repo, a, b=None):
1626 def debugpvec(ui, repo, a, b=None):
1614 ca = scmutil.revsingle(repo, a)
1627 ca = scmutil.revsingle(repo, a)
1615 cb = scmutil.revsingle(repo, b)
1628 cb = scmutil.revsingle(repo, b)
1616 pa = pvec.ctxpvec(ca)
1629 pa = pvec.ctxpvec(ca)
1617 pb = pvec.ctxpvec(cb)
1630 pb = pvec.ctxpvec(cb)
1618 if pa == pb:
1631 if pa == pb:
1619 rel = "="
1632 rel = "="
1620 elif pa > pb:
1633 elif pa > pb:
1621 rel = ">"
1634 rel = ">"
1622 elif pa < pb:
1635 elif pa < pb:
1623 rel = "<"
1636 rel = "<"
1624 elif pa | pb:
1637 elif pa | pb:
1625 rel = "|"
1638 rel = "|"
1626 ui.write(_("a: %s\n") % pa)
1639 ui.write(_("a: %s\n") % pa)
1627 ui.write(_("b: %s\n") % pb)
1640 ui.write(_("b: %s\n") % pb)
1628 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1641 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1629 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1642 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1630 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1643 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1631 pa.distance(pb), rel))
1644 pa.distance(pb), rel))
1632
1645
1633 @command('debugrebuilddirstate|debugrebuildstate',
1646 @command('debugrebuilddirstate|debugrebuildstate',
1634 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1647 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1635 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1648 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1636 'the working copy parent')),
1649 'the working copy parent')),
1637 ],
1650 ],
1638 _('[-r REV]'))
1651 _('[-r REV]'))
1639 def debugrebuilddirstate(ui, repo, rev, **opts):
1652 def debugrebuilddirstate(ui, repo, rev, **opts):
1640 """rebuild the dirstate as it would look like for the given revision
1653 """rebuild the dirstate as it would look like for the given revision
1641
1654
1642 If no revision is specified the first current parent will be used.
1655 If no revision is specified the first current parent will be used.
1643
1656
1644 The dirstate will be set to the files of the given revision.
1657 The dirstate will be set to the files of the given revision.
1645 The actual working directory content or existing dirstate
1658 The actual working directory content or existing dirstate
1646 information such as adds or removes is not considered.
1659 information such as adds or removes is not considered.
1647
1660
1648 ``minimal`` will only rebuild the dirstate status for files that claim to be
1661 ``minimal`` will only rebuild the dirstate status for files that claim to be
1649 tracked but are not in the parent manifest, or that exist in the parent
1662 tracked but are not in the parent manifest, or that exist in the parent
1650 manifest but are not in the dirstate. It will not change adds, removes, or
1663 manifest but are not in the dirstate. It will not change adds, removes, or
1651 modified files that are in the working copy parent.
1664 modified files that are in the working copy parent.
1652
1665
1653 One use of this command is to make the next :hg:`status` invocation
1666 One use of this command is to make the next :hg:`status` invocation
1654 check the actual file content.
1667 check the actual file content.
1655 """
1668 """
1656 ctx = scmutil.revsingle(repo, rev)
1669 ctx = scmutil.revsingle(repo, rev)
1657 with repo.wlock():
1670 with repo.wlock():
1658 dirstate = repo.dirstate
1671 dirstate = repo.dirstate
1659 changedfiles = None
1672 changedfiles = None
1660 # See command doc for what minimal does.
1673 # See command doc for what minimal does.
1661 if opts.get('minimal'):
1674 if opts.get('minimal'):
1662 manifestfiles = set(ctx.manifest().keys())
1675 manifestfiles = set(ctx.manifest().keys())
1663 dirstatefiles = set(dirstate)
1676 dirstatefiles = set(dirstate)
1664 manifestonly = manifestfiles - dirstatefiles
1677 manifestonly = manifestfiles - dirstatefiles
1665 dsonly = dirstatefiles - manifestfiles
1678 dsonly = dirstatefiles - manifestfiles
1666 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1679 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1667 changedfiles = manifestonly | dsnotadded
1680 changedfiles = manifestonly | dsnotadded
1668
1681
1669 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1682 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1670
1683
1671 @command('debugrebuildfncache', [], '')
1684 @command('debugrebuildfncache', [], '')
1672 def debugrebuildfncache(ui, repo):
1685 def debugrebuildfncache(ui, repo):
1673 """rebuild the fncache file"""
1686 """rebuild the fncache file"""
1674 repair.rebuildfncache(ui, repo)
1687 repair.rebuildfncache(ui, repo)
1675
1688
1676 @command('debugrename',
1689 @command('debugrename',
1677 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1690 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1678 _('[-r REV] FILE'))
1691 _('[-r REV] FILE'))
1679 def debugrename(ui, repo, file1, *pats, **opts):
1692 def debugrename(ui, repo, file1, *pats, **opts):
1680 """dump rename information"""
1693 """dump rename information"""
1681
1694
1695 opts = pycompat.byteskwargs(opts)
1682 ctx = scmutil.revsingle(repo, opts.get('rev'))
1696 ctx = scmutil.revsingle(repo, opts.get('rev'))
1683 m = scmutil.match(ctx, (file1,) + pats, opts)
1697 m = scmutil.match(ctx, (file1,) + pats, opts)
1684 for abs in ctx.walk(m):
1698 for abs in ctx.walk(m):
1685 fctx = ctx[abs]
1699 fctx = ctx[abs]
1686 o = fctx.filelog().renamed(fctx.filenode())
1700 o = fctx.filelog().renamed(fctx.filenode())
1687 rel = m.rel(abs)
1701 rel = m.rel(abs)
1688 if o:
1702 if o:
1689 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1703 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1690 else:
1704 else:
1691 ui.write(_("%s not renamed\n") % rel)
1705 ui.write(_("%s not renamed\n") % rel)
1692
1706
1693 @command('debugrevlog', cmdutil.debugrevlogopts +
1707 @command('debugrevlog', cmdutil.debugrevlogopts +
1694 [('d', 'dump', False, _('dump index data'))],
1708 [('d', 'dump', False, _('dump index data'))],
1695 _('-c|-m|FILE'),
1709 _('-c|-m|FILE'),
1696 optionalrepo=True)
1710 optionalrepo=True)
1697 def debugrevlog(ui, repo, file_=None, **opts):
1711 def debugrevlog(ui, repo, file_=None, **opts):
1698 """show data and statistics about a revlog"""
1712 """show data and statistics about a revlog"""
1713 opts = pycompat.byteskwargs(opts)
1699 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1714 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1700
1715
1701 if opts.get("dump"):
1716 if opts.get("dump"):
1702 numrevs = len(r)
1717 numrevs = len(r)
1703 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1718 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1704 " rawsize totalsize compression heads chainlen\n"))
1719 " rawsize totalsize compression heads chainlen\n"))
1705 ts = 0
1720 ts = 0
1706 heads = set()
1721 heads = set()
1707
1722
1708 for rev in xrange(numrevs):
1723 for rev in xrange(numrevs):
1709 dbase = r.deltaparent(rev)
1724 dbase = r.deltaparent(rev)
1710 if dbase == -1:
1725 if dbase == -1:
1711 dbase = rev
1726 dbase = rev
1712 cbase = r.chainbase(rev)
1727 cbase = r.chainbase(rev)
1713 clen = r.chainlen(rev)
1728 clen = r.chainlen(rev)
1714 p1, p2 = r.parentrevs(rev)
1729 p1, p2 = r.parentrevs(rev)
1715 rs = r.rawsize(rev)
1730 rs = r.rawsize(rev)
1716 ts = ts + rs
1731 ts = ts + rs
1717 heads -= set(r.parentrevs(rev))
1732 heads -= set(r.parentrevs(rev))
1718 heads.add(rev)
1733 heads.add(rev)
1719 try:
1734 try:
1720 compression = ts / r.end(rev)
1735 compression = ts / r.end(rev)
1721 except ZeroDivisionError:
1736 except ZeroDivisionError:
1722 compression = 0
1737 compression = 0
1723 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1738 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1724 "%11d %5d %8d\n" %
1739 "%11d %5d %8d\n" %
1725 (rev, p1, p2, r.start(rev), r.end(rev),
1740 (rev, p1, p2, r.start(rev), r.end(rev),
1726 r.start(dbase), r.start(cbase),
1741 r.start(dbase), r.start(cbase),
1727 r.start(p1), r.start(p2),
1742 r.start(p1), r.start(p2),
1728 rs, ts, compression, len(heads), clen))
1743 rs, ts, compression, len(heads), clen))
1729 return 0
1744 return 0
1730
1745
1731 v = r.version
1746 v = r.version
1732 format = v & 0xFFFF
1747 format = v & 0xFFFF
1733 flags = []
1748 flags = []
1734 gdelta = False
1749 gdelta = False
1735 if v & revlog.FLAG_INLINE_DATA:
1750 if v & revlog.FLAG_INLINE_DATA:
1736 flags.append('inline')
1751 flags.append('inline')
1737 if v & revlog.FLAG_GENERALDELTA:
1752 if v & revlog.FLAG_GENERALDELTA:
1738 gdelta = True
1753 gdelta = True
1739 flags.append('generaldelta')
1754 flags.append('generaldelta')
1740 if not flags:
1755 if not flags:
1741 flags = ['(none)']
1756 flags = ['(none)']
1742
1757
1743 nummerges = 0
1758 nummerges = 0
1744 numfull = 0
1759 numfull = 0
1745 numprev = 0
1760 numprev = 0
1746 nump1 = 0
1761 nump1 = 0
1747 nump2 = 0
1762 nump2 = 0
1748 numother = 0
1763 numother = 0
1749 nump1prev = 0
1764 nump1prev = 0
1750 nump2prev = 0
1765 nump2prev = 0
1751 chainlengths = []
1766 chainlengths = []
1752 chainbases = []
1767 chainbases = []
1753 chainspans = []
1768 chainspans = []
1754
1769
1755 datasize = [None, 0, 0]
1770 datasize = [None, 0, 0]
1756 fullsize = [None, 0, 0]
1771 fullsize = [None, 0, 0]
1757 deltasize = [None, 0, 0]
1772 deltasize = [None, 0, 0]
1758 chunktypecounts = {}
1773 chunktypecounts = {}
1759 chunktypesizes = {}
1774 chunktypesizes = {}
1760
1775
1761 def addsize(size, l):
1776 def addsize(size, l):
1762 if l[0] is None or size < l[0]:
1777 if l[0] is None or size < l[0]:
1763 l[0] = size
1778 l[0] = size
1764 if size > l[1]:
1779 if size > l[1]:
1765 l[1] = size
1780 l[1] = size
1766 l[2] += size
1781 l[2] += size
1767
1782
1768 numrevs = len(r)
1783 numrevs = len(r)
1769 for rev in xrange(numrevs):
1784 for rev in xrange(numrevs):
1770 p1, p2 = r.parentrevs(rev)
1785 p1, p2 = r.parentrevs(rev)
1771 delta = r.deltaparent(rev)
1786 delta = r.deltaparent(rev)
1772 if format > 0:
1787 if format > 0:
1773 addsize(r.rawsize(rev), datasize)
1788 addsize(r.rawsize(rev), datasize)
1774 if p2 != nullrev:
1789 if p2 != nullrev:
1775 nummerges += 1
1790 nummerges += 1
1776 size = r.length(rev)
1791 size = r.length(rev)
1777 if delta == nullrev:
1792 if delta == nullrev:
1778 chainlengths.append(0)
1793 chainlengths.append(0)
1779 chainbases.append(r.start(rev))
1794 chainbases.append(r.start(rev))
1780 chainspans.append(size)
1795 chainspans.append(size)
1781 numfull += 1
1796 numfull += 1
1782 addsize(size, fullsize)
1797 addsize(size, fullsize)
1783 else:
1798 else:
1784 chainlengths.append(chainlengths[delta] + 1)
1799 chainlengths.append(chainlengths[delta] + 1)
1785 baseaddr = chainbases[delta]
1800 baseaddr = chainbases[delta]
1786 revaddr = r.start(rev)
1801 revaddr = r.start(rev)
1787 chainbases.append(baseaddr)
1802 chainbases.append(baseaddr)
1788 chainspans.append((revaddr - baseaddr) + size)
1803 chainspans.append((revaddr - baseaddr) + size)
1789 addsize(size, deltasize)
1804 addsize(size, deltasize)
1790 if delta == rev - 1:
1805 if delta == rev - 1:
1791 numprev += 1
1806 numprev += 1
1792 if delta == p1:
1807 if delta == p1:
1793 nump1prev += 1
1808 nump1prev += 1
1794 elif delta == p2:
1809 elif delta == p2:
1795 nump2prev += 1
1810 nump2prev += 1
1796 elif delta == p1:
1811 elif delta == p1:
1797 nump1 += 1
1812 nump1 += 1
1798 elif delta == p2:
1813 elif delta == p2:
1799 nump2 += 1
1814 nump2 += 1
1800 elif delta != nullrev:
1815 elif delta != nullrev:
1801 numother += 1
1816 numother += 1
1802
1817
1803 # Obtain data on the raw chunks in the revlog.
1818 # Obtain data on the raw chunks in the revlog.
1804 segment = r._getsegmentforrevs(rev, rev)[1]
1819 segment = r._getsegmentforrevs(rev, rev)[1]
1805 if segment:
1820 if segment:
1806 chunktype = segment[0]
1821 chunktype = segment[0]
1807 else:
1822 else:
1808 chunktype = 'empty'
1823 chunktype = 'empty'
1809
1824
1810 if chunktype not in chunktypecounts:
1825 if chunktype not in chunktypecounts:
1811 chunktypecounts[chunktype] = 0
1826 chunktypecounts[chunktype] = 0
1812 chunktypesizes[chunktype] = 0
1827 chunktypesizes[chunktype] = 0
1813
1828
1814 chunktypecounts[chunktype] += 1
1829 chunktypecounts[chunktype] += 1
1815 chunktypesizes[chunktype] += size
1830 chunktypesizes[chunktype] += size
1816
1831
1817 # Adjust size min value for empty cases
1832 # Adjust size min value for empty cases
1818 for size in (datasize, fullsize, deltasize):
1833 for size in (datasize, fullsize, deltasize):
1819 if size[0] is None:
1834 if size[0] is None:
1820 size[0] = 0
1835 size[0] = 0
1821
1836
1822 numdeltas = numrevs - numfull
1837 numdeltas = numrevs - numfull
1823 numoprev = numprev - nump1prev - nump2prev
1838 numoprev = numprev - nump1prev - nump2prev
1824 totalrawsize = datasize[2]
1839 totalrawsize = datasize[2]
1825 datasize[2] /= numrevs
1840 datasize[2] /= numrevs
1826 fulltotal = fullsize[2]
1841 fulltotal = fullsize[2]
1827 fullsize[2] /= numfull
1842 fullsize[2] /= numfull
1828 deltatotal = deltasize[2]
1843 deltatotal = deltasize[2]
1829 if numrevs - numfull > 0:
1844 if numrevs - numfull > 0:
1830 deltasize[2] /= numrevs - numfull
1845 deltasize[2] /= numrevs - numfull
1831 totalsize = fulltotal + deltatotal
1846 totalsize = fulltotal + deltatotal
1832 avgchainlen = sum(chainlengths) / numrevs
1847 avgchainlen = sum(chainlengths) / numrevs
1833 maxchainlen = max(chainlengths)
1848 maxchainlen = max(chainlengths)
1834 maxchainspan = max(chainspans)
1849 maxchainspan = max(chainspans)
1835 compratio = 1
1850 compratio = 1
1836 if totalsize:
1851 if totalsize:
1837 compratio = totalrawsize / totalsize
1852 compratio = totalrawsize / totalsize
1838
1853
1839 basedfmtstr = '%%%dd\n'
1854 basedfmtstr = '%%%dd\n'
1840 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1855 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1841
1856
1842 def dfmtstr(max):
1857 def dfmtstr(max):
1843 return basedfmtstr % len(str(max))
1858 return basedfmtstr % len(str(max))
1844 def pcfmtstr(max, padding=0):
1859 def pcfmtstr(max, padding=0):
1845 return basepcfmtstr % (len(str(max)), ' ' * padding)
1860 return basepcfmtstr % (len(str(max)), ' ' * padding)
1846
1861
1847 def pcfmt(value, total):
1862 def pcfmt(value, total):
1848 if total:
1863 if total:
1849 return (value, 100 * float(value) / total)
1864 return (value, 100 * float(value) / total)
1850 else:
1865 else:
1851 return value, 100.0
1866 return value, 100.0
1852
1867
1853 ui.write(('format : %d\n') % format)
1868 ui.write(('format : %d\n') % format)
1854 ui.write(('flags : %s\n') % ', '.join(flags))
1869 ui.write(('flags : %s\n') % ', '.join(flags))
1855
1870
1856 ui.write('\n')
1871 ui.write('\n')
1857 fmt = pcfmtstr(totalsize)
1872 fmt = pcfmtstr(totalsize)
1858 fmt2 = dfmtstr(totalsize)
1873 fmt2 = dfmtstr(totalsize)
1859 ui.write(('revisions : ') + fmt2 % numrevs)
1874 ui.write(('revisions : ') + fmt2 % numrevs)
1860 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1875 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1861 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1876 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1862 ui.write(('revisions : ') + fmt2 % numrevs)
1877 ui.write(('revisions : ') + fmt2 % numrevs)
1863 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1878 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1864 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1879 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1865 ui.write(('revision size : ') + fmt2 % totalsize)
1880 ui.write(('revision size : ') + fmt2 % totalsize)
1866 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1881 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1867 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1882 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1868
1883
1869 def fmtchunktype(chunktype):
1884 def fmtchunktype(chunktype):
1870 if chunktype == 'empty':
1885 if chunktype == 'empty':
1871 return ' %s : ' % chunktype
1886 return ' %s : ' % chunktype
1872 elif chunktype in string.ascii_letters:
1887 elif chunktype in string.ascii_letters:
1873 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1888 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1874 else:
1889 else:
1875 return ' 0x%s : ' % hex(chunktype)
1890 return ' 0x%s : ' % hex(chunktype)
1876
1891
1877 ui.write('\n')
1892 ui.write('\n')
1878 ui.write(('chunks : ') + fmt2 % numrevs)
1893 ui.write(('chunks : ') + fmt2 % numrevs)
1879 for chunktype in sorted(chunktypecounts):
1894 for chunktype in sorted(chunktypecounts):
1880 ui.write(fmtchunktype(chunktype))
1895 ui.write(fmtchunktype(chunktype))
1881 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1896 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1882 ui.write(('chunks size : ') + fmt2 % totalsize)
1897 ui.write(('chunks size : ') + fmt2 % totalsize)
1883 for chunktype in sorted(chunktypecounts):
1898 for chunktype in sorted(chunktypecounts):
1884 ui.write(fmtchunktype(chunktype))
1899 ui.write(fmtchunktype(chunktype))
1885 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1900 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1886
1901
1887 ui.write('\n')
1902 ui.write('\n')
1888 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1903 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
1889 ui.write(('avg chain length : ') + fmt % avgchainlen)
1904 ui.write(('avg chain length : ') + fmt % avgchainlen)
1890 ui.write(('max chain length : ') + fmt % maxchainlen)
1905 ui.write(('max chain length : ') + fmt % maxchainlen)
1891 ui.write(('max chain reach : ') + fmt % maxchainspan)
1906 ui.write(('max chain reach : ') + fmt % maxchainspan)
1892 ui.write(('compression ratio : ') + fmt % compratio)
1907 ui.write(('compression ratio : ') + fmt % compratio)
1893
1908
1894 if format > 0:
1909 if format > 0:
1895 ui.write('\n')
1910 ui.write('\n')
1896 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1911 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1897 % tuple(datasize))
1912 % tuple(datasize))
1898 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1913 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1899 % tuple(fullsize))
1914 % tuple(fullsize))
1900 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1915 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1901 % tuple(deltasize))
1916 % tuple(deltasize))
1902
1917
1903 if numdeltas > 0:
1918 if numdeltas > 0:
1904 ui.write('\n')
1919 ui.write('\n')
1905 fmt = pcfmtstr(numdeltas)
1920 fmt = pcfmtstr(numdeltas)
1906 fmt2 = pcfmtstr(numdeltas, 4)
1921 fmt2 = pcfmtstr(numdeltas, 4)
1907 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1922 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1908 if numprev > 0:
1923 if numprev > 0:
1909 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1924 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1910 numprev))
1925 numprev))
1911 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1926 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1912 numprev))
1927 numprev))
1913 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1928 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1914 numprev))
1929 numprev))
1915 if gdelta:
1930 if gdelta:
1916 ui.write(('deltas against p1 : ')
1931 ui.write(('deltas against p1 : ')
1917 + fmt % pcfmt(nump1, numdeltas))
1932 + fmt % pcfmt(nump1, numdeltas))
1918 ui.write(('deltas against p2 : ')
1933 ui.write(('deltas against p2 : ')
1919 + fmt % pcfmt(nump2, numdeltas))
1934 + fmt % pcfmt(nump2, numdeltas))
1920 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1935 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1921 numdeltas))
1936 numdeltas))
1922
1937
1923 @command('debugrevspec',
1938 @command('debugrevspec',
1924 [('', 'optimize', None,
1939 [('', 'optimize', None,
1925 _('print parsed tree after optimizing (DEPRECATED)')),
1940 _('print parsed tree after optimizing (DEPRECATED)')),
1926 ('', 'show-revs', True, _('print list of result revisions (default)')),
1941 ('', 'show-revs', True, _('print list of result revisions (default)')),
1927 ('s', 'show-set', None, _('print internal representation of result set')),
1942 ('s', 'show-set', None, _('print internal representation of result set')),
1928 ('p', 'show-stage', [],
1943 ('p', 'show-stage', [],
1929 _('print parsed tree at the given stage'), _('NAME')),
1944 _('print parsed tree at the given stage'), _('NAME')),
1930 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1945 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1931 ('', 'verify-optimized', False, _('verify optimized result')),
1946 ('', 'verify-optimized', False, _('verify optimized result')),
1932 ],
1947 ],
1933 ('REVSPEC'))
1948 ('REVSPEC'))
1934 def debugrevspec(ui, repo, expr, **opts):
1949 def debugrevspec(ui, repo, expr, **opts):
1935 """parse and apply a revision specification
1950 """parse and apply a revision specification
1936
1951
1937 Use -p/--show-stage option to print the parsed tree at the given stages.
1952 Use -p/--show-stage option to print the parsed tree at the given stages.
1938 Use -p all to print tree at every stage.
1953 Use -p all to print tree at every stage.
1939
1954
1940 Use --no-show-revs option with -s or -p to print only the set
1955 Use --no-show-revs option with -s or -p to print only the set
1941 representation or the parsed tree respectively.
1956 representation or the parsed tree respectively.
1942
1957
1943 Use --verify-optimized to compare the optimized result with the unoptimized
1958 Use --verify-optimized to compare the optimized result with the unoptimized
1944 one. Returns 1 if the optimized result differs.
1959 one. Returns 1 if the optimized result differs.
1945 """
1960 """
1961 opts = pycompat.byteskwargs(opts)
1946 stages = [
1962 stages = [
1947 ('parsed', lambda tree: tree),
1963 ('parsed', lambda tree: tree),
1948 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1964 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1949 ('concatenated', revsetlang.foldconcat),
1965 ('concatenated', revsetlang.foldconcat),
1950 ('analyzed', revsetlang.analyze),
1966 ('analyzed', revsetlang.analyze),
1951 ('optimized', revsetlang.optimize),
1967 ('optimized', revsetlang.optimize),
1952 ]
1968 ]
1953 if opts['no_optimized']:
1969 if opts['no_optimized']:
1954 stages = stages[:-1]
1970 stages = stages[:-1]
1955 if opts['verify_optimized'] and opts['no_optimized']:
1971 if opts['verify_optimized'] and opts['no_optimized']:
1956 raise error.Abort(_('cannot use --verify-optimized with '
1972 raise error.Abort(_('cannot use --verify-optimized with '
1957 '--no-optimized'))
1973 '--no-optimized'))
1958 stagenames = set(n for n, f in stages)
1974 stagenames = set(n for n, f in stages)
1959
1975
1960 showalways = set()
1976 showalways = set()
1961 showchanged = set()
1977 showchanged = set()
1962 if ui.verbose and not opts['show_stage']:
1978 if ui.verbose and not opts['show_stage']:
1963 # show parsed tree by --verbose (deprecated)
1979 # show parsed tree by --verbose (deprecated)
1964 showalways.add('parsed')
1980 showalways.add('parsed')
1965 showchanged.update(['expanded', 'concatenated'])
1981 showchanged.update(['expanded', 'concatenated'])
1966 if opts['optimize']:
1982 if opts['optimize']:
1967 showalways.add('optimized')
1983 showalways.add('optimized')
1968 if opts['show_stage'] and opts['optimize']:
1984 if opts['show_stage'] and opts['optimize']:
1969 raise error.Abort(_('cannot use --optimize with --show-stage'))
1985 raise error.Abort(_('cannot use --optimize with --show-stage'))
1970 if opts['show_stage'] == ['all']:
1986 if opts['show_stage'] == ['all']:
1971 showalways.update(stagenames)
1987 showalways.update(stagenames)
1972 else:
1988 else:
1973 for n in opts['show_stage']:
1989 for n in opts['show_stage']:
1974 if n not in stagenames:
1990 if n not in stagenames:
1975 raise error.Abort(_('invalid stage name: %s') % n)
1991 raise error.Abort(_('invalid stage name: %s') % n)
1976 showalways.update(opts['show_stage'])
1992 showalways.update(opts['show_stage'])
1977
1993
1978 treebystage = {}
1994 treebystage = {}
1979 printedtree = None
1995 printedtree = None
1980 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1996 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1981 for n, f in stages:
1997 for n, f in stages:
1982 treebystage[n] = tree = f(tree)
1998 treebystage[n] = tree = f(tree)
1983 if n in showalways or (n in showchanged and tree != printedtree):
1999 if n in showalways or (n in showchanged and tree != printedtree):
1984 if opts['show_stage'] or n != 'parsed':
2000 if opts['show_stage'] or n != 'parsed':
1985 ui.write(("* %s:\n") % n)
2001 ui.write(("* %s:\n") % n)
1986 ui.write(revsetlang.prettyformat(tree), "\n")
2002 ui.write(revsetlang.prettyformat(tree), "\n")
1987 printedtree = tree
2003 printedtree = tree
1988
2004
1989 if opts['verify_optimized']:
2005 if opts['verify_optimized']:
1990 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2006 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1991 brevs = revset.makematcher(treebystage['optimized'])(repo)
2007 brevs = revset.makematcher(treebystage['optimized'])(repo)
1992 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2008 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1993 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2009 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1994 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2010 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1995 arevs = list(arevs)
2011 arevs = list(arevs)
1996 brevs = list(brevs)
2012 brevs = list(brevs)
1997 if arevs == brevs:
2013 if arevs == brevs:
1998 return 0
2014 return 0
1999 ui.write(('--- analyzed\n'), label='diff.file_a')
2015 ui.write(('--- analyzed\n'), label='diff.file_a')
2000 ui.write(('+++ optimized\n'), label='diff.file_b')
2016 ui.write(('+++ optimized\n'), label='diff.file_b')
2001 sm = difflib.SequenceMatcher(None, arevs, brevs)
2017 sm = difflib.SequenceMatcher(None, arevs, brevs)
2002 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2018 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2003 if tag in ('delete', 'replace'):
2019 if tag in ('delete', 'replace'):
2004 for c in arevs[alo:ahi]:
2020 for c in arevs[alo:ahi]:
2005 ui.write('-%s\n' % c, label='diff.deleted')
2021 ui.write('-%s\n' % c, label='diff.deleted')
2006 if tag in ('insert', 'replace'):
2022 if tag in ('insert', 'replace'):
2007 for c in brevs[blo:bhi]:
2023 for c in brevs[blo:bhi]:
2008 ui.write('+%s\n' % c, label='diff.inserted')
2024 ui.write('+%s\n' % c, label='diff.inserted')
2009 if tag == 'equal':
2025 if tag == 'equal':
2010 for c in arevs[alo:ahi]:
2026 for c in arevs[alo:ahi]:
2011 ui.write(' %s\n' % c)
2027 ui.write(' %s\n' % c)
2012 return 1
2028 return 1
2013
2029
2014 func = revset.makematcher(tree)
2030 func = revset.makematcher(tree)
2015 revs = func(repo)
2031 revs = func(repo)
2016 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2032 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2017 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2033 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2018 if not opts['show_revs']:
2034 if not opts['show_revs']:
2019 return
2035 return
2020 for c in revs:
2036 for c in revs:
2021 ui.write("%s\n" % c)
2037 ui.write("%s\n" % c)
2022
2038
2023 @command('debugsetparents', [], _('REV1 [REV2]'))
2039 @command('debugsetparents', [], _('REV1 [REV2]'))
2024 def debugsetparents(ui, repo, rev1, rev2=None):
2040 def debugsetparents(ui, repo, rev1, rev2=None):
2025 """manually set the parents of the current working directory
2041 """manually set the parents of the current working directory
2026
2042
2027 This is useful for writing repository conversion tools, but should
2043 This is useful for writing repository conversion tools, but should
2028 be used with care. For example, neither the working directory nor the
2044 be used with care. For example, neither the working directory nor the
2029 dirstate is updated, so file status may be incorrect after running this
2045 dirstate is updated, so file status may be incorrect after running this
2030 command.
2046 command.
2031
2047
2032 Returns 0 on success.
2048 Returns 0 on success.
2033 """
2049 """
2034
2050
2035 r1 = scmutil.revsingle(repo, rev1).node()
2051 r1 = scmutil.revsingle(repo, rev1).node()
2036 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2052 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2037
2053
2038 with repo.wlock():
2054 with repo.wlock():
2039 repo.setparents(r1, r2)
2055 repo.setparents(r1, r2)
2040
2056
2041 @command('debugsub',
2057 @command('debugsub',
2042 [('r', 'rev', '',
2058 [('r', 'rev', '',
2043 _('revision to check'), _('REV'))],
2059 _('revision to check'), _('REV'))],
2044 _('[-r REV] [REV]'))
2060 _('[-r REV] [REV]'))
2045 def debugsub(ui, repo, rev=None):
2061 def debugsub(ui, repo, rev=None):
2046 ctx = scmutil.revsingle(repo, rev, None)
2062 ctx = scmutil.revsingle(repo, rev, None)
2047 for k, v in sorted(ctx.substate.items()):
2063 for k, v in sorted(ctx.substate.items()):
2048 ui.write(('path %s\n') % k)
2064 ui.write(('path %s\n') % k)
2049 ui.write((' source %s\n') % v[0])
2065 ui.write((' source %s\n') % v[0])
2050 ui.write((' revision %s\n') % v[1])
2066 ui.write((' revision %s\n') % v[1])
2051
2067
2052 @command('debugsuccessorssets',
2068 @command('debugsuccessorssets',
2053 [],
2069 [],
2054 _('[REV]'))
2070 _('[REV]'))
2055 def debugsuccessorssets(ui, repo, *revs):
2071 def debugsuccessorssets(ui, repo, *revs):
2056 """show set of successors for revision
2072 """show set of successors for revision
2057
2073
2058 A successors set of changeset A is a consistent group of revisions that
2074 A successors set of changeset A is a consistent group of revisions that
2059 succeed A. It contains non-obsolete changesets only.
2075 succeed A. It contains non-obsolete changesets only.
2060
2076
2061 In most cases a changeset A has a single successors set containing a single
2077 In most cases a changeset A has a single successors set containing a single
2062 successor (changeset A replaced by A').
2078 successor (changeset A replaced by A').
2063
2079
2064 A changeset that is made obsolete with no successors are called "pruned".
2080 A changeset that is made obsolete with no successors are called "pruned".
2065 Such changesets have no successors sets at all.
2081 Such changesets have no successors sets at all.
2066
2082
2067 A changeset that has been "split" will have a successors set containing
2083 A changeset that has been "split" will have a successors set containing
2068 more than one successor.
2084 more than one successor.
2069
2085
2070 A changeset that has been rewritten in multiple different ways is called
2086 A changeset that has been rewritten in multiple different ways is called
2071 "divergent". Such changesets have multiple successor sets (each of which
2087 "divergent". Such changesets have multiple successor sets (each of which
2072 may also be split, i.e. have multiple successors).
2088 may also be split, i.e. have multiple successors).
2073
2089
2074 Results are displayed as follows::
2090 Results are displayed as follows::
2075
2091
2076 <rev1>
2092 <rev1>
2077 <successors-1A>
2093 <successors-1A>
2078 <rev2>
2094 <rev2>
2079 <successors-2A>
2095 <successors-2A>
2080 <successors-2B1> <successors-2B2> <successors-2B3>
2096 <successors-2B1> <successors-2B2> <successors-2B3>
2081
2097
2082 Here rev2 has two possible (i.e. divergent) successors sets. The first
2098 Here rev2 has two possible (i.e. divergent) successors sets. The first
2083 holds one element, whereas the second holds three (i.e. the changeset has
2099 holds one element, whereas the second holds three (i.e. the changeset has
2084 been split).
2100 been split).
2085 """
2101 """
2086 # passed to successorssets caching computation from one call to another
2102 # passed to successorssets caching computation from one call to another
2087 cache = {}
2103 cache = {}
2088 ctx2str = str
2104 ctx2str = str
2089 node2str = short
2105 node2str = short
2090 if ui.debug():
2106 if ui.debug():
2091 def ctx2str(ctx):
2107 def ctx2str(ctx):
2092 return ctx.hex()
2108 return ctx.hex()
2093 node2str = hex
2109 node2str = hex
2094 for rev in scmutil.revrange(repo, revs):
2110 for rev in scmutil.revrange(repo, revs):
2095 ctx = repo[rev]
2111 ctx = repo[rev]
2096 ui.write('%s\n'% ctx2str(ctx))
2112 ui.write('%s\n'% ctx2str(ctx))
2097 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2113 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2098 if succsset:
2114 if succsset:
2099 ui.write(' ')
2115 ui.write(' ')
2100 ui.write(node2str(succsset[0]))
2116 ui.write(node2str(succsset[0]))
2101 for node in succsset[1:]:
2117 for node in succsset[1:]:
2102 ui.write(' ')
2118 ui.write(' ')
2103 ui.write(node2str(node))
2119 ui.write(node2str(node))
2104 ui.write('\n')
2120 ui.write('\n')
2105
2121
2106 @command('debugtemplate',
2122 @command('debugtemplate',
2107 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2123 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2108 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2124 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2109 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2125 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2110 optionalrepo=True)
2126 optionalrepo=True)
2111 def debugtemplate(ui, repo, tmpl, **opts):
2127 def debugtemplate(ui, repo, tmpl, **opts):
2112 """parse and apply a template
2128 """parse and apply a template
2113
2129
2114 If -r/--rev is given, the template is processed as a log template and
2130 If -r/--rev is given, the template is processed as a log template and
2115 applied to the given changesets. Otherwise, it is processed as a generic
2131 applied to the given changesets. Otherwise, it is processed as a generic
2116 template.
2132 template.
2117
2133
2118 Use --verbose to print the parsed tree.
2134 Use --verbose to print the parsed tree.
2119 """
2135 """
2120 revs = None
2136 revs = None
2121 if opts['rev']:
2137 if opts['rev']:
2122 if repo is None:
2138 if repo is None:
2123 raise error.RepoError(_('there is no Mercurial repository here '
2139 raise error.RepoError(_('there is no Mercurial repository here '
2124 '(.hg not found)'))
2140 '(.hg not found)'))
2125 revs = scmutil.revrange(repo, opts['rev'])
2141 revs = scmutil.revrange(repo, opts['rev'])
2126
2142
2127 props = {}
2143 props = {}
2128 for d in opts['define']:
2144 for d in opts['define']:
2129 try:
2145 try:
2130 k, v = (e.strip() for e in d.split('=', 1))
2146 k, v = (e.strip() for e in d.split('=', 1))
2131 if not k or k == 'ui':
2147 if not k or k == 'ui':
2132 raise ValueError
2148 raise ValueError
2133 props[k] = v
2149 props[k] = v
2134 except ValueError:
2150 except ValueError:
2135 raise error.Abort(_('malformed keyword definition: %s') % d)
2151 raise error.Abort(_('malformed keyword definition: %s') % d)
2136
2152
2137 if ui.verbose:
2153 if ui.verbose:
2138 aliases = ui.configitems('templatealias')
2154 aliases = ui.configitems('templatealias')
2139 tree = templater.parse(tmpl)
2155 tree = templater.parse(tmpl)
2140 ui.note(templater.prettyformat(tree), '\n')
2156 ui.note(templater.prettyformat(tree), '\n')
2141 newtree = templater.expandaliases(tree, aliases)
2157 newtree = templater.expandaliases(tree, aliases)
2142 if newtree != tree:
2158 if newtree != tree:
2143 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2159 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2144
2160
2145 if revs is None:
2161 if revs is None:
2146 t = formatter.maketemplater(ui, tmpl)
2162 t = formatter.maketemplater(ui, tmpl)
2147 props['ui'] = ui
2163 props['ui'] = ui
2148 ui.write(t.render(props))
2164 ui.write(t.render(props))
2149 else:
2165 else:
2150 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2166 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2151 for r in revs:
2167 for r in revs:
2152 displayer.show(repo[r], **props)
2168 displayer.show(repo[r], **props)
2153 displayer.close()
2169 displayer.close()
2154
2170
2155 @command('debugupdatecaches', [])
2171 @command('debugupdatecaches', [])
2156 def debugupdatecaches(ui, repo, *pats, **opts):
2172 def debugupdatecaches(ui, repo, *pats, **opts):
2157 """warm all known caches in the repository"""
2173 """warm all known caches in the repository"""
2158 with repo.wlock():
2174 with repo.wlock():
2159 with repo.lock():
2175 with repo.lock():
2160 repo.updatecaches()
2176 repo.updatecaches()
2161
2177
2162 @command('debugupgraderepo', [
2178 @command('debugupgraderepo', [
2163 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2179 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2164 ('', 'run', False, _('performs an upgrade')),
2180 ('', 'run', False, _('performs an upgrade')),
2165 ])
2181 ])
2166 def debugupgraderepo(ui, repo, run=False, optimize=None):
2182 def debugupgraderepo(ui, repo, run=False, optimize=None):
2167 """upgrade a repository to use different features
2183 """upgrade a repository to use different features
2168
2184
2169 If no arguments are specified, the repository is evaluated for upgrade
2185 If no arguments are specified, the repository is evaluated for upgrade
2170 and a list of problems and potential optimizations is printed.
2186 and a list of problems and potential optimizations is printed.
2171
2187
2172 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2188 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2173 can be influenced via additional arguments. More details will be provided
2189 can be influenced via additional arguments. More details will be provided
2174 by the command output when run without ``--run``.
2190 by the command output when run without ``--run``.
2175
2191
2176 During the upgrade, the repository will be locked and no writes will be
2192 During the upgrade, the repository will be locked and no writes will be
2177 allowed.
2193 allowed.
2178
2194
2179 At the end of the upgrade, the repository may not be readable while new
2195 At the end of the upgrade, the repository may not be readable while new
2180 repository data is swapped in. This window will be as long as it takes to
2196 repository data is swapped in. This window will be as long as it takes to
2181 rename some directories inside the ``.hg`` directory. On most machines, this
2197 rename some directories inside the ``.hg`` directory. On most machines, this
2182 should complete almost instantaneously and the chances of a consumer being
2198 should complete almost instantaneously and the chances of a consumer being
2183 unable to access the repository should be low.
2199 unable to access the repository should be low.
2184 """
2200 """
2185 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2201 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2186
2202
2187 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2203 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2188 inferrepo=True)
2204 inferrepo=True)
2189 def debugwalk(ui, repo, *pats, **opts):
2205 def debugwalk(ui, repo, *pats, **opts):
2190 """show how files match on given patterns"""
2206 """show how files match on given patterns"""
2207 opts = pycompat.byteskwargs(opts)
2191 m = scmutil.match(repo[None], pats, opts)
2208 m = scmutil.match(repo[None], pats, opts)
2192 ui.write(('matcher: %r\n' % m))
2209 ui.write(('matcher: %r\n' % m))
2193 items = list(repo[None].walk(m))
2210 items = list(repo[None].walk(m))
2194 if not items:
2211 if not items:
2195 return
2212 return
2196 f = lambda fn: fn
2213 f = lambda fn: fn
2197 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2214 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2198 f = lambda fn: util.normpath(fn)
2215 f = lambda fn: util.normpath(fn)
2199 fmt = 'f %%-%ds %%-%ds %%s' % (
2216 fmt = 'f %%-%ds %%-%ds %%s' % (
2200 max([len(abs) for abs in items]),
2217 max([len(abs) for abs in items]),
2201 max([len(m.rel(abs)) for abs in items]))
2218 max([len(m.rel(abs)) for abs in items]))
2202 for abs in items:
2219 for abs in items:
2203 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2220 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2204 ui.write("%s\n" % line.rstrip())
2221 ui.write("%s\n" % line.rstrip())
2205
2222
2206 @command('debugwireargs',
2223 @command('debugwireargs',
2207 [('', 'three', '', 'three'),
2224 [('', 'three', '', 'three'),
2208 ('', 'four', '', 'four'),
2225 ('', 'four', '', 'four'),
2209 ('', 'five', '', 'five'),
2226 ('', 'five', '', 'five'),
2210 ] + cmdutil.remoteopts,
2227 ] + cmdutil.remoteopts,
2211 _('REPO [OPTIONS]... [ONE [TWO]]'),
2228 _('REPO [OPTIONS]... [ONE [TWO]]'),
2212 norepo=True)
2229 norepo=True)
2213 def debugwireargs(ui, repopath, *vals, **opts):
2230 def debugwireargs(ui, repopath, *vals, **opts):
2231 opts = pycompat.byteskwargs(opts)
2214 repo = hg.peer(ui, opts, repopath)
2232 repo = hg.peer(ui, opts, repopath)
2215 for opt in cmdutil.remoteopts:
2233 for opt in cmdutil.remoteopts:
2216 del opts[opt[1]]
2234 del opts[opt[1]]
2217 args = {}
2235 args = {}
2218 for k, v in opts.iteritems():
2236 for k, v in opts.iteritems():
2219 if v:
2237 if v:
2220 args[k] = v
2238 args[k] = v
2221 # run twice to check that we don't mess up the stream for the next command
2239 # run twice to check that we don't mess up the stream for the next command
2222 res1 = repo.debugwireargs(*vals, **args)
2240 res1 = repo.debugwireargs(*vals, **args)
2223 res2 = repo.debugwireargs(*vals, **args)
2241 res2 = repo.debugwireargs(*vals, **args)
2224 ui.write("%s\n" % res1)
2242 ui.write("%s\n" % res1)
2225 if res1 != res2:
2243 if res1 != res2:
2226 ui.warn("%s\n" % res2)
2244 ui.warn("%s\n" % res2)
@@ -1,367 +1,369 b''
1 # sshpeer.py - ssh repository proxy class for mercurial
1 # sshpeer.py - ssh repository proxy class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import re
10 import re
11
11
12 from .i18n import _
12 from .i18n import _
13 from . import (
13 from . import (
14 error,
14 error,
15 pycompat,
15 util,
16 util,
16 wireproto,
17 wireproto,
17 )
18 )
18
19
19 class remotelock(object):
20 class remotelock(object):
20 def __init__(self, repo):
21 def __init__(self, repo):
21 self.repo = repo
22 self.repo = repo
22 def release(self):
23 def release(self):
23 self.repo.unlock()
24 self.repo.unlock()
24 self.repo = None
25 self.repo = None
25 def __enter__(self):
26 def __enter__(self):
26 return self
27 return self
27 def __exit__(self, exc_type, exc_val, exc_tb):
28 def __exit__(self, exc_type, exc_val, exc_tb):
28 if self.repo:
29 if self.repo:
29 self.release()
30 self.release()
30 def __del__(self):
31 def __del__(self):
31 if self.repo:
32 if self.repo:
32 self.release()
33 self.release()
33
34
34 def _serverquote(s):
35 def _serverquote(s):
35 if not s:
36 if not s:
36 return s
37 return s
37 '''quote a string for the remote shell ... which we assume is sh'''
38 '''quote a string for the remote shell ... which we assume is sh'''
38 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
39 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
39 return s
40 return s
40 return "'%s'" % s.replace("'", "'\\''")
41 return "'%s'" % s.replace("'", "'\\''")
41
42
42 def _forwardoutput(ui, pipe):
43 def _forwardoutput(ui, pipe):
43 """display all data currently available on pipe as remote output.
44 """display all data currently available on pipe as remote output.
44
45
45 This is non blocking."""
46 This is non blocking."""
46 s = util.readpipe(pipe)
47 s = util.readpipe(pipe)
47 if s:
48 if s:
48 for l in s.splitlines():
49 for l in s.splitlines():
49 ui.status(_("remote: "), l, '\n')
50 ui.status(_("remote: "), l, '\n')
50
51
51 class doublepipe(object):
52 class doublepipe(object):
52 """Operate a side-channel pipe in addition of a main one
53 """Operate a side-channel pipe in addition of a main one
53
54
54 The side-channel pipe contains server output to be forwarded to the user
55 The side-channel pipe contains server output to be forwarded to the user
55 input. The double pipe will behave as the "main" pipe, but will ensure the
56 input. The double pipe will behave as the "main" pipe, but will ensure the
56 content of the "side" pipe is properly processed while we wait for blocking
57 content of the "side" pipe is properly processed while we wait for blocking
57 call on the "main" pipe.
58 call on the "main" pipe.
58
59
59 If large amounts of data are read from "main", the forward will cease after
60 If large amounts of data are read from "main", the forward will cease after
60 the first bytes start to appear. This simplifies the implementation
61 the first bytes start to appear. This simplifies the implementation
61 without affecting actual output of sshpeer too much as we rarely issue
62 without affecting actual output of sshpeer too much as we rarely issue
62 large read for data not yet emitted by the server.
63 large read for data not yet emitted by the server.
63
64
64 The main pipe is expected to be a 'bufferedinputpipe' from the util module
65 The main pipe is expected to be a 'bufferedinputpipe' from the util module
65 that handle all the os specific bits. This class lives in this module
66 that handle all the os specific bits. This class lives in this module
66 because it focus on behavior specific to the ssh protocol."""
67 because it focus on behavior specific to the ssh protocol."""
67
68
68 def __init__(self, ui, main, side):
69 def __init__(self, ui, main, side):
69 self._ui = ui
70 self._ui = ui
70 self._main = main
71 self._main = main
71 self._side = side
72 self._side = side
72
73
73 def _wait(self):
74 def _wait(self):
74 """wait until some data are available on main or side
75 """wait until some data are available on main or side
75
76
76 return a pair of boolean (ismainready, issideready)
77 return a pair of boolean (ismainready, issideready)
77
78
78 (This will only wait for data if the setup is supported by `util.poll`)
79 (This will only wait for data if the setup is supported by `util.poll`)
79 """
80 """
80 if getattr(self._main, 'hasbuffer', False): # getattr for classic pipe
81 if getattr(self._main, 'hasbuffer', False): # getattr for classic pipe
81 return (True, True) # main has data, assume side is worth poking at.
82 return (True, True) # main has data, assume side is worth poking at.
82 fds = [self._main.fileno(), self._side.fileno()]
83 fds = [self._main.fileno(), self._side.fileno()]
83 try:
84 try:
84 act = util.poll(fds)
85 act = util.poll(fds)
85 except NotImplementedError:
86 except NotImplementedError:
86 # non supported yet case, assume all have data.
87 # non supported yet case, assume all have data.
87 act = fds
88 act = fds
88 return (self._main.fileno() in act, self._side.fileno() in act)
89 return (self._main.fileno() in act, self._side.fileno() in act)
89
90
90 def write(self, data):
91 def write(self, data):
91 return self._call('write', data)
92 return self._call('write', data)
92
93
93 def read(self, size):
94 def read(self, size):
94 r = self._call('read', size)
95 r = self._call('read', size)
95 if size != 0 and not r:
96 if size != 0 and not r:
96 # We've observed a condition that indicates the
97 # We've observed a condition that indicates the
97 # stdout closed unexpectedly. Check stderr one
98 # stdout closed unexpectedly. Check stderr one
98 # more time and snag anything that's there before
99 # more time and snag anything that's there before
99 # letting anyone know the main part of the pipe
100 # letting anyone know the main part of the pipe
100 # closed prematurely.
101 # closed prematurely.
101 _forwardoutput(self._ui, self._side)
102 _forwardoutput(self._ui, self._side)
102 return r
103 return r
103
104
104 def readline(self):
105 def readline(self):
105 return self._call('readline')
106 return self._call('readline')
106
107
107 def _call(self, methname, data=None):
108 def _call(self, methname, data=None):
108 """call <methname> on "main", forward output of "side" while blocking
109 """call <methname> on "main", forward output of "side" while blocking
109 """
110 """
110 # data can be '' or 0
111 # data can be '' or 0
111 if (data is not None and not data) or self._main.closed:
112 if (data is not None and not data) or self._main.closed:
112 _forwardoutput(self._ui, self._side)
113 _forwardoutput(self._ui, self._side)
113 return ''
114 return ''
114 while True:
115 while True:
115 mainready, sideready = self._wait()
116 mainready, sideready = self._wait()
116 if sideready:
117 if sideready:
117 _forwardoutput(self._ui, self._side)
118 _forwardoutput(self._ui, self._side)
118 if mainready:
119 if mainready:
119 meth = getattr(self._main, methname)
120 meth = getattr(self._main, methname)
120 if data is None:
121 if data is None:
121 return meth()
122 return meth()
122 else:
123 else:
123 return meth(data)
124 return meth(data)
124
125
125 def close(self):
126 def close(self):
126 return self._main.close()
127 return self._main.close()
127
128
128 def flush(self):
129 def flush(self):
129 return self._main.flush()
130 return self._main.flush()
130
131
131 class sshpeer(wireproto.wirepeer):
132 class sshpeer(wireproto.wirepeer):
132 def __init__(self, ui, path, create=False):
133 def __init__(self, ui, path, create=False):
133 self._url = path
134 self._url = path
134 self.ui = ui
135 self.ui = ui
135 self.pipeo = self.pipei = self.pipee = None
136 self.pipeo = self.pipei = self.pipee = None
136
137
137 u = util.url(path, parsequery=False, parsefragment=False)
138 u = util.url(path, parsequery=False, parsefragment=False)
138 if u.scheme != 'ssh' or not u.host or u.path is None:
139 if u.scheme != 'ssh' or not u.host or u.path is None:
139 self._abort(error.RepoError(_("couldn't parse location %s") % path))
140 self._abort(error.RepoError(_("couldn't parse location %s") % path))
140
141
141 self.user = u.user
142 self.user = u.user
142 if u.passwd is not None:
143 if u.passwd is not None:
143 self._abort(error.RepoError(_("password in URL not supported")))
144 self._abort(error.RepoError(_("password in URL not supported")))
144 self.host = u.host
145 self.host = u.host
145 self.port = u.port
146 self.port = u.port
146 self.path = u.path or "."
147 self.path = u.path or "."
147
148
148 sshcmd = self.ui.config("ui", "ssh", "ssh")
149 sshcmd = self.ui.config("ui", "ssh", "ssh")
149 remotecmd = self.ui.config("ui", "remotecmd", "hg")
150 remotecmd = self.ui.config("ui", "remotecmd", "hg")
150
151
151 args = util.sshargs(sshcmd,
152 args = util.sshargs(sshcmd,
152 _serverquote(self.host),
153 _serverquote(self.host),
153 _serverquote(self.user),
154 _serverquote(self.user),
154 _serverquote(self.port))
155 _serverquote(self.port))
155
156
156 if create:
157 if create:
157 cmd = '%s %s %s' % (sshcmd, args,
158 cmd = '%s %s %s' % (sshcmd, args,
158 util.shellquote("%s init %s" %
159 util.shellquote("%s init %s" %
159 (_serverquote(remotecmd), _serverquote(self.path))))
160 (_serverquote(remotecmd), _serverquote(self.path))))
160 ui.debug('running %s\n' % cmd)
161 ui.debug('running %s\n' % cmd)
161 res = ui.system(cmd, blockedtag='sshpeer')
162 res = ui.system(cmd, blockedtag='sshpeer')
162 if res != 0:
163 if res != 0:
163 self._abort(error.RepoError(_("could not create remote repo")))
164 self._abort(error.RepoError(_("could not create remote repo")))
164
165
165 self._validaterepo(sshcmd, args, remotecmd)
166 self._validaterepo(sshcmd, args, remotecmd)
166
167
167 def url(self):
168 def url(self):
168 return self._url
169 return self._url
169
170
170 def _validaterepo(self, sshcmd, args, remotecmd):
171 def _validaterepo(self, sshcmd, args, remotecmd):
171 # cleanup up previous run
172 # cleanup up previous run
172 self.cleanup()
173 self.cleanup()
173
174
174 cmd = '%s %s %s' % (sshcmd, args,
175 cmd = '%s %s %s' % (sshcmd, args,
175 util.shellquote("%s -R %s serve --stdio" %
176 util.shellquote("%s -R %s serve --stdio" %
176 (_serverquote(remotecmd), _serverquote(self.path))))
177 (_serverquote(remotecmd), _serverquote(self.path))))
177 self.ui.debug('running %s\n' % cmd)
178 self.ui.debug('running %s\n' % cmd)
178 cmd = util.quotecommand(cmd)
179 cmd = util.quotecommand(cmd)
179
180
180 # while self.subprocess isn't used, having it allows the subprocess to
181 # while self.subprocess isn't used, having it allows the subprocess to
181 # to clean up correctly later
182 # to clean up correctly later
182 #
183 #
183 # no buffer allow the use of 'select'
184 # no buffer allow the use of 'select'
184 # feel free to remove buffering and select usage when we ultimately
185 # feel free to remove buffering and select usage when we ultimately
185 # move to threading.
186 # move to threading.
186 sub = util.popen4(cmd, bufsize=0)
187 sub = util.popen4(cmd, bufsize=0)
187 self.pipeo, self.pipei, self.pipee, self.subprocess = sub
188 self.pipeo, self.pipei, self.pipee, self.subprocess = sub
188
189
189 self.pipei = util.bufferedinputpipe(self.pipei)
190 self.pipei = util.bufferedinputpipe(self.pipei)
190 self.pipei = doublepipe(self.ui, self.pipei, self.pipee)
191 self.pipei = doublepipe(self.ui, self.pipei, self.pipee)
191 self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee)
192 self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee)
192
193
193 # skip any noise generated by remote shell
194 # skip any noise generated by remote shell
194 self._callstream("hello")
195 self._callstream("hello")
195 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
196 r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
196 lines = ["", "dummy"]
197 lines = ["", "dummy"]
197 max_noise = 500
198 max_noise = 500
198 while lines[-1] and max_noise:
199 while lines[-1] and max_noise:
199 l = r.readline()
200 l = r.readline()
200 self.readerr()
201 self.readerr()
201 if lines[-1] == "1\n" and l == "\n":
202 if lines[-1] == "1\n" and l == "\n":
202 break
203 break
203 if l:
204 if l:
204 self.ui.debug("remote: ", l)
205 self.ui.debug("remote: ", l)
205 lines.append(l)
206 lines.append(l)
206 max_noise -= 1
207 max_noise -= 1
207 else:
208 else:
208 self._abort(error.RepoError(_('no suitable response from '
209 self._abort(error.RepoError(_('no suitable response from '
209 'remote hg')))
210 'remote hg')))
210
211
211 self._caps = set()
212 self._caps = set()
212 for l in reversed(lines):
213 for l in reversed(lines):
213 if l.startswith("capabilities:"):
214 if l.startswith("capabilities:"):
214 self._caps.update(l[:-1].split(":")[1].split())
215 self._caps.update(l[:-1].split(":")[1].split())
215 break
216 break
216
217
217 def _capabilities(self):
218 def _capabilities(self):
218 return self._caps
219 return self._caps
219
220
220 def readerr(self):
221 def readerr(self):
221 _forwardoutput(self.ui, self.pipee)
222 _forwardoutput(self.ui, self.pipee)
222
223
223 def _abort(self, exception):
224 def _abort(self, exception):
224 self.cleanup()
225 self.cleanup()
225 raise exception
226 raise exception
226
227
227 def cleanup(self):
228 def cleanup(self):
228 if self.pipeo is None:
229 if self.pipeo is None:
229 return
230 return
230 self.pipeo.close()
231 self.pipeo.close()
231 self.pipei.close()
232 self.pipei.close()
232 try:
233 try:
233 # read the error descriptor until EOF
234 # read the error descriptor until EOF
234 for l in self.pipee:
235 for l in self.pipee:
235 self.ui.status(_("remote: "), l)
236 self.ui.status(_("remote: "), l)
236 except (IOError, ValueError):
237 except (IOError, ValueError):
237 pass
238 pass
238 self.pipee.close()
239 self.pipee.close()
239
240
240 __del__ = cleanup
241 __del__ = cleanup
241
242
242 def _submitbatch(self, req):
243 def _submitbatch(self, req):
243 rsp = self._callstream("batch", cmds=wireproto.encodebatchcmds(req))
244 rsp = self._callstream("batch", cmds=wireproto.encodebatchcmds(req))
244 available = self._getamount()
245 available = self._getamount()
245 # TODO this response parsing is probably suboptimal for large
246 # TODO this response parsing is probably suboptimal for large
246 # batches with large responses.
247 # batches with large responses.
247 toread = min(available, 1024)
248 toread = min(available, 1024)
248 work = rsp.read(toread)
249 work = rsp.read(toread)
249 available -= toread
250 available -= toread
250 chunk = work
251 chunk = work
251 while chunk:
252 while chunk:
252 while ';' in work:
253 while ';' in work:
253 one, work = work.split(';', 1)
254 one, work = work.split(';', 1)
254 yield wireproto.unescapearg(one)
255 yield wireproto.unescapearg(one)
255 toread = min(available, 1024)
256 toread = min(available, 1024)
256 chunk = rsp.read(toread)
257 chunk = rsp.read(toread)
257 available -= toread
258 available -= toread
258 work += chunk
259 work += chunk
259 yield wireproto.unescapearg(work)
260 yield wireproto.unescapearg(work)
260
261
261 def _callstream(self, cmd, **args):
262 def _callstream(self, cmd, **args):
263 args = pycompat.byteskwargs(args)
262 self.ui.debug("sending %s command\n" % cmd)
264 self.ui.debug("sending %s command\n" % cmd)
263 self.pipeo.write("%s\n" % cmd)
265 self.pipeo.write("%s\n" % cmd)
264 _func, names = wireproto.commands[cmd]
266 _func, names = wireproto.commands[cmd]
265 keys = names.split()
267 keys = names.split()
266 wireargs = {}
268 wireargs = {}
267 for k in keys:
269 for k in keys:
268 if k == '*':
270 if k == '*':
269 wireargs['*'] = args
271 wireargs['*'] = args
270 break
272 break
271 else:
273 else:
272 wireargs[k] = args[k]
274 wireargs[k] = args[k]
273 del args[k]
275 del args[k]
274 for k, v in sorted(wireargs.iteritems()):
276 for k, v in sorted(wireargs.iteritems()):
275 self.pipeo.write("%s %d\n" % (k, len(v)))
277 self.pipeo.write("%s %d\n" % (k, len(v)))
276 if isinstance(v, dict):
278 if isinstance(v, dict):
277 for dk, dv in v.iteritems():
279 for dk, dv in v.iteritems():
278 self.pipeo.write("%s %d\n" % (dk, len(dv)))
280 self.pipeo.write("%s %d\n" % (dk, len(dv)))
279 self.pipeo.write(dv)
281 self.pipeo.write(dv)
280 else:
282 else:
281 self.pipeo.write(v)
283 self.pipeo.write(v)
282 self.pipeo.flush()
284 self.pipeo.flush()
283
285
284 return self.pipei
286 return self.pipei
285
287
286 def _callcompressable(self, cmd, **args):
288 def _callcompressable(self, cmd, **args):
287 return self._callstream(cmd, **args)
289 return self._callstream(cmd, **args)
288
290
289 def _call(self, cmd, **args):
291 def _call(self, cmd, **args):
290 self._callstream(cmd, **args)
292 self._callstream(cmd, **args)
291 return self._recv()
293 return self._recv()
292
294
293 def _callpush(self, cmd, fp, **args):
295 def _callpush(self, cmd, fp, **args):
294 r = self._call(cmd, **args)
296 r = self._call(cmd, **args)
295 if r:
297 if r:
296 return '', r
298 return '', r
297 for d in iter(lambda: fp.read(4096), ''):
299 for d in iter(lambda: fp.read(4096), ''):
298 self._send(d)
300 self._send(d)
299 self._send("", flush=True)
301 self._send("", flush=True)
300 r = self._recv()
302 r = self._recv()
301 if r:
303 if r:
302 return '', r
304 return '', r
303 return self._recv(), ''
305 return self._recv(), ''
304
306
305 def _calltwowaystream(self, cmd, fp, **args):
307 def _calltwowaystream(self, cmd, fp, **args):
306 r = self._call(cmd, **args)
308 r = self._call(cmd, **args)
307 if r:
309 if r:
308 # XXX needs to be made better
310 # XXX needs to be made better
309 raise error.Abort(_('unexpected remote reply: %s') % r)
311 raise error.Abort(_('unexpected remote reply: %s') % r)
310 for d in iter(lambda: fp.read(4096), ''):
312 for d in iter(lambda: fp.read(4096), ''):
311 self._send(d)
313 self._send(d)
312 self._send("", flush=True)
314 self._send("", flush=True)
313 return self.pipei
315 return self.pipei
314
316
315 def _getamount(self):
317 def _getamount(self):
316 l = self.pipei.readline()
318 l = self.pipei.readline()
317 if l == '\n':
319 if l == '\n':
318 self.readerr()
320 self.readerr()
319 msg = _('check previous remote output')
321 msg = _('check previous remote output')
320 self._abort(error.OutOfBandError(hint=msg))
322 self._abort(error.OutOfBandError(hint=msg))
321 self.readerr()
323 self.readerr()
322 try:
324 try:
323 return int(l)
325 return int(l)
324 except ValueError:
326 except ValueError:
325 self._abort(error.ResponseError(_("unexpected response:"), l))
327 self._abort(error.ResponseError(_("unexpected response:"), l))
326
328
327 def _recv(self):
329 def _recv(self):
328 return self.pipei.read(self._getamount())
330 return self.pipei.read(self._getamount())
329
331
330 def _send(self, data, flush=False):
332 def _send(self, data, flush=False):
331 self.pipeo.write("%d\n" % len(data))
333 self.pipeo.write("%d\n" % len(data))
332 if data:
334 if data:
333 self.pipeo.write(data)
335 self.pipeo.write(data)
334 if flush:
336 if flush:
335 self.pipeo.flush()
337 self.pipeo.flush()
336 self.readerr()
338 self.readerr()
337
339
338 def lock(self):
340 def lock(self):
339 self._call("lock")
341 self._call("lock")
340 return remotelock(self)
342 return remotelock(self)
341
343
342 def unlock(self):
344 def unlock(self):
343 self._call("unlock")
345 self._call("unlock")
344
346
345 def addchangegroup(self, cg, source, url, lock=None):
347 def addchangegroup(self, cg, source, url, lock=None):
346 '''Send a changegroup to the remote server. Return an integer
348 '''Send a changegroup to the remote server. Return an integer
347 similar to unbundle(). DEPRECATED, since it requires locking the
349 similar to unbundle(). DEPRECATED, since it requires locking the
348 remote.'''
350 remote.'''
349 d = self._call("addchangegroup")
351 d = self._call("addchangegroup")
350 if d:
352 if d:
351 self._abort(error.RepoError(_("push refused: %s") % d))
353 self._abort(error.RepoError(_("push refused: %s") % d))
352 for d in iter(lambda: cg.read(4096), ''):
354 for d in iter(lambda: cg.read(4096), ''):
353 self.pipeo.write(d)
355 self.pipeo.write(d)
354 self.readerr()
356 self.readerr()
355
357
356 self.pipeo.flush()
358 self.pipeo.flush()
357
359
358 self.readerr()
360 self.readerr()
359 r = self._recv()
361 r = self._recv()
360 if not r:
362 if not r:
361 return 1
363 return 1
362 try:
364 try:
363 return int(r)
365 return int(r)
364 except ValueError:
366 except ValueError:
365 self._abort(error.ResponseError(_("unexpected response:"), r))
367 self._abort(error.ResponseError(_("unexpected response:"), r))
366
368
367 instance = sshpeer
369 instance = sshpeer
General Comments 0
You need to be logged in to leave comments. Login now