##// END OF EJS Templates
dirstateguard: move to new module so I can break some layering violations...
Augie Fackler -
r30488:751639bf default
parent child Browse files
Show More
@@ -1,3577 +1,3525 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import sys
13 import sys
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 bin,
18 bin,
19 hex,
19 hex,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 bookmarks,
26 bookmarks,
27 changelog,
27 changelog,
28 copies,
28 copies,
29 crecord as crecordmod,
29 crecord as crecordmod,
30 dirstateguard as dirstateguardmod,
30 encoding,
31 encoding,
31 error,
32 error,
32 formatter,
33 formatter,
33 graphmod,
34 graphmod,
34 lock as lockmod,
35 lock as lockmod,
35 match as matchmod,
36 match as matchmod,
36 obsolete,
37 obsolete,
37 patch,
38 patch,
38 pathutil,
39 pathutil,
39 phases,
40 phases,
40 repair,
41 repair,
41 revlog,
42 revlog,
42 revset,
43 revset,
43 scmutil,
44 scmutil,
44 templatekw,
45 templatekw,
45 templater,
46 templater,
46 util,
47 util,
47 )
48 )
48 stringio = util.stringio
49 stringio = util.stringio
49
50
50 def ishunk(x):
51 def ishunk(x):
51 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
52 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
52 return isinstance(x, hunkclasses)
53 return isinstance(x, hunkclasses)
53
54
54 def newandmodified(chunks, originalchunks):
55 def newandmodified(chunks, originalchunks):
55 newlyaddedandmodifiedfiles = set()
56 newlyaddedandmodifiedfiles = set()
56 for chunk in chunks:
57 for chunk in chunks:
57 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
58 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
58 originalchunks:
59 originalchunks:
59 newlyaddedandmodifiedfiles.add(chunk.header.filename())
60 newlyaddedandmodifiedfiles.add(chunk.header.filename())
60 return newlyaddedandmodifiedfiles
61 return newlyaddedandmodifiedfiles
61
62
62 def parsealiases(cmd):
63 def parsealiases(cmd):
63 return cmd.lstrip("^").split("|")
64 return cmd.lstrip("^").split("|")
64
65
65 def setupwrapcolorwrite(ui):
66 def setupwrapcolorwrite(ui):
66 # wrap ui.write so diff output can be labeled/colorized
67 # wrap ui.write so diff output can be labeled/colorized
67 def wrapwrite(orig, *args, **kw):
68 def wrapwrite(orig, *args, **kw):
68 label = kw.pop('label', '')
69 label = kw.pop('label', '')
69 for chunk, l in patch.difflabel(lambda: args):
70 for chunk, l in patch.difflabel(lambda: args):
70 orig(chunk, label=label + l)
71 orig(chunk, label=label + l)
71
72
72 oldwrite = ui.write
73 oldwrite = ui.write
73 def wrap(*args, **kwargs):
74 def wrap(*args, **kwargs):
74 return wrapwrite(oldwrite, *args, **kwargs)
75 return wrapwrite(oldwrite, *args, **kwargs)
75 setattr(ui, 'write', wrap)
76 setattr(ui, 'write', wrap)
76 return oldwrite
77 return oldwrite
77
78
78 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
79 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
79 if usecurses:
80 if usecurses:
80 if testfile:
81 if testfile:
81 recordfn = crecordmod.testdecorator(testfile,
82 recordfn = crecordmod.testdecorator(testfile,
82 crecordmod.testchunkselector)
83 crecordmod.testchunkselector)
83 else:
84 else:
84 recordfn = crecordmod.chunkselector
85 recordfn = crecordmod.chunkselector
85
86
86 return crecordmod.filterpatch(ui, originalhunks, recordfn)
87 return crecordmod.filterpatch(ui, originalhunks, recordfn)
87
88
88 else:
89 else:
89 return patch.filterpatch(ui, originalhunks, operation)
90 return patch.filterpatch(ui, originalhunks, operation)
90
91
91 def recordfilter(ui, originalhunks, operation=None):
92 def recordfilter(ui, originalhunks, operation=None):
92 """ Prompts the user to filter the originalhunks and return a list of
93 """ Prompts the user to filter the originalhunks and return a list of
93 selected hunks.
94 selected hunks.
94 *operation* is used for to build ui messages to indicate the user what
95 *operation* is used for to build ui messages to indicate the user what
95 kind of filtering they are doing: reverting, committing, shelving, etc.
96 kind of filtering they are doing: reverting, committing, shelving, etc.
96 (see patch.filterpatch).
97 (see patch.filterpatch).
97 """
98 """
98 usecurses = crecordmod.checkcurses(ui)
99 usecurses = crecordmod.checkcurses(ui)
99 testfile = ui.config('experimental', 'crecordtest', None)
100 testfile = ui.config('experimental', 'crecordtest', None)
100 oldwrite = setupwrapcolorwrite(ui)
101 oldwrite = setupwrapcolorwrite(ui)
101 try:
102 try:
102 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
103 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
103 testfile, operation)
104 testfile, operation)
104 finally:
105 finally:
105 ui.write = oldwrite
106 ui.write = oldwrite
106 return newchunks, newopts
107 return newchunks, newopts
107
108
108 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
109 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
109 filterfn, *pats, **opts):
110 filterfn, *pats, **opts):
110 from . import merge as mergemod
111 from . import merge as mergemod
111 if not ui.interactive():
112 if not ui.interactive():
112 if cmdsuggest:
113 if cmdsuggest:
113 msg = _('running non-interactively, use %s instead') % cmdsuggest
114 msg = _('running non-interactively, use %s instead') % cmdsuggest
114 else:
115 else:
115 msg = _('running non-interactively')
116 msg = _('running non-interactively')
116 raise error.Abort(msg)
117 raise error.Abort(msg)
117
118
118 # make sure username is set before going interactive
119 # make sure username is set before going interactive
119 if not opts.get('user'):
120 if not opts.get('user'):
120 ui.username() # raise exception, username not provided
121 ui.username() # raise exception, username not provided
121
122
122 def recordfunc(ui, repo, message, match, opts):
123 def recordfunc(ui, repo, message, match, opts):
123 """This is generic record driver.
124 """This is generic record driver.
124
125
125 Its job is to interactively filter local changes, and
126 Its job is to interactively filter local changes, and
126 accordingly prepare working directory into a state in which the
127 accordingly prepare working directory into a state in which the
127 job can be delegated to a non-interactive commit command such as
128 job can be delegated to a non-interactive commit command such as
128 'commit' or 'qrefresh'.
129 'commit' or 'qrefresh'.
129
130
130 After the actual job is done by non-interactive command, the
131 After the actual job is done by non-interactive command, the
131 working directory is restored to its original state.
132 working directory is restored to its original state.
132
133
133 In the end we'll record interesting changes, and everything else
134 In the end we'll record interesting changes, and everything else
134 will be left in place, so the user can continue working.
135 will be left in place, so the user can continue working.
135 """
136 """
136
137
137 checkunfinished(repo, commit=True)
138 checkunfinished(repo, commit=True)
138 wctx = repo[None]
139 wctx = repo[None]
139 merge = len(wctx.parents()) > 1
140 merge = len(wctx.parents()) > 1
140 if merge:
141 if merge:
141 raise error.Abort(_('cannot partially commit a merge '
142 raise error.Abort(_('cannot partially commit a merge '
142 '(use "hg commit" instead)'))
143 '(use "hg commit" instead)'))
143
144
144 def fail(f, msg):
145 def fail(f, msg):
145 raise error.Abort('%s: %s' % (f, msg))
146 raise error.Abort('%s: %s' % (f, msg))
146
147
147 force = opts.get('force')
148 force = opts.get('force')
148 if not force:
149 if not force:
149 vdirs = []
150 vdirs = []
150 match.explicitdir = vdirs.append
151 match.explicitdir = vdirs.append
151 match.bad = fail
152 match.bad = fail
152
153
153 status = repo.status(match=match)
154 status = repo.status(match=match)
154 if not force:
155 if not force:
155 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
156 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
156 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
157 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
157 diffopts.nodates = True
158 diffopts.nodates = True
158 diffopts.git = True
159 diffopts.git = True
159 diffopts.showfunc = True
160 diffopts.showfunc = True
160 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
161 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
161 originalchunks = patch.parsepatch(originaldiff)
162 originalchunks = patch.parsepatch(originaldiff)
162
163
163 # 1. filter patch, since we are intending to apply subset of it
164 # 1. filter patch, since we are intending to apply subset of it
164 try:
165 try:
165 chunks, newopts = filterfn(ui, originalchunks)
166 chunks, newopts = filterfn(ui, originalchunks)
166 except patch.PatchError as err:
167 except patch.PatchError as err:
167 raise error.Abort(_('error parsing patch: %s') % err)
168 raise error.Abort(_('error parsing patch: %s') % err)
168 opts.update(newopts)
169 opts.update(newopts)
169
170
170 # We need to keep a backup of files that have been newly added and
171 # We need to keep a backup of files that have been newly added and
171 # modified during the recording process because there is a previous
172 # modified during the recording process because there is a previous
172 # version without the edit in the workdir
173 # version without the edit in the workdir
173 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
174 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
174 contenders = set()
175 contenders = set()
175 for h in chunks:
176 for h in chunks:
176 try:
177 try:
177 contenders.update(set(h.files()))
178 contenders.update(set(h.files()))
178 except AttributeError:
179 except AttributeError:
179 pass
180 pass
180
181
181 changed = status.modified + status.added + status.removed
182 changed = status.modified + status.added + status.removed
182 newfiles = [f for f in changed if f in contenders]
183 newfiles = [f for f in changed if f in contenders]
183 if not newfiles:
184 if not newfiles:
184 ui.status(_('no changes to record\n'))
185 ui.status(_('no changes to record\n'))
185 return 0
186 return 0
186
187
187 modified = set(status.modified)
188 modified = set(status.modified)
188
189
189 # 2. backup changed files, so we can restore them in the end
190 # 2. backup changed files, so we can restore them in the end
190
191
191 if backupall:
192 if backupall:
192 tobackup = changed
193 tobackup = changed
193 else:
194 else:
194 tobackup = [f for f in newfiles if f in modified or f in \
195 tobackup = [f for f in newfiles if f in modified or f in \
195 newlyaddedandmodifiedfiles]
196 newlyaddedandmodifiedfiles]
196 backups = {}
197 backups = {}
197 if tobackup:
198 if tobackup:
198 backupdir = repo.join('record-backups')
199 backupdir = repo.join('record-backups')
199 try:
200 try:
200 os.mkdir(backupdir)
201 os.mkdir(backupdir)
201 except OSError as err:
202 except OSError as err:
202 if err.errno != errno.EEXIST:
203 if err.errno != errno.EEXIST:
203 raise
204 raise
204 try:
205 try:
205 # backup continues
206 # backup continues
206 for f in tobackup:
207 for f in tobackup:
207 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
208 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
208 dir=backupdir)
209 dir=backupdir)
209 os.close(fd)
210 os.close(fd)
210 ui.debug('backup %r as %r\n' % (f, tmpname))
211 ui.debug('backup %r as %r\n' % (f, tmpname))
211 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
212 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
212 backups[f] = tmpname
213 backups[f] = tmpname
213
214
214 fp = stringio()
215 fp = stringio()
215 for c in chunks:
216 for c in chunks:
216 fname = c.filename()
217 fname = c.filename()
217 if fname in backups:
218 if fname in backups:
218 c.write(fp)
219 c.write(fp)
219 dopatch = fp.tell()
220 dopatch = fp.tell()
220 fp.seek(0)
221 fp.seek(0)
221
222
222 # 2.5 optionally review / modify patch in text editor
223 # 2.5 optionally review / modify patch in text editor
223 if opts.get('review', False):
224 if opts.get('review', False):
224 patchtext = (crecordmod.diffhelptext
225 patchtext = (crecordmod.diffhelptext
225 + crecordmod.patchhelptext
226 + crecordmod.patchhelptext
226 + fp.read())
227 + fp.read())
227 reviewedpatch = ui.edit(patchtext, "",
228 reviewedpatch = ui.edit(patchtext, "",
228 extra={"suffix": ".diff"})
229 extra={"suffix": ".diff"})
229 fp.truncate(0)
230 fp.truncate(0)
230 fp.write(reviewedpatch)
231 fp.write(reviewedpatch)
231 fp.seek(0)
232 fp.seek(0)
232
233
233 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
234 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
234 # 3a. apply filtered patch to clean repo (clean)
235 # 3a. apply filtered patch to clean repo (clean)
235 if backups:
236 if backups:
236 # Equivalent to hg.revert
237 # Equivalent to hg.revert
237 m = scmutil.matchfiles(repo, backups.keys())
238 m = scmutil.matchfiles(repo, backups.keys())
238 mergemod.update(repo, repo.dirstate.p1(),
239 mergemod.update(repo, repo.dirstate.p1(),
239 False, True, matcher=m)
240 False, True, matcher=m)
240
241
241 # 3b. (apply)
242 # 3b. (apply)
242 if dopatch:
243 if dopatch:
243 try:
244 try:
244 ui.debug('applying patch\n')
245 ui.debug('applying patch\n')
245 ui.debug(fp.getvalue())
246 ui.debug(fp.getvalue())
246 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
247 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
247 except patch.PatchError as err:
248 except patch.PatchError as err:
248 raise error.Abort(str(err))
249 raise error.Abort(str(err))
249 del fp
250 del fp
250
251
251 # 4. We prepared working directory according to filtered
252 # 4. We prepared working directory according to filtered
252 # patch. Now is the time to delegate the job to
253 # patch. Now is the time to delegate the job to
253 # commit/qrefresh or the like!
254 # commit/qrefresh or the like!
254
255
255 # Make all of the pathnames absolute.
256 # Make all of the pathnames absolute.
256 newfiles = [repo.wjoin(nf) for nf in newfiles]
257 newfiles = [repo.wjoin(nf) for nf in newfiles]
257 return commitfunc(ui, repo, *newfiles, **opts)
258 return commitfunc(ui, repo, *newfiles, **opts)
258 finally:
259 finally:
259 # 5. finally restore backed-up files
260 # 5. finally restore backed-up files
260 try:
261 try:
261 dirstate = repo.dirstate
262 dirstate = repo.dirstate
262 for realname, tmpname in backups.iteritems():
263 for realname, tmpname in backups.iteritems():
263 ui.debug('restoring %r to %r\n' % (tmpname, realname))
264 ui.debug('restoring %r to %r\n' % (tmpname, realname))
264
265
265 if dirstate[realname] == 'n':
266 if dirstate[realname] == 'n':
266 # without normallookup, restoring timestamp
267 # without normallookup, restoring timestamp
267 # may cause partially committed files
268 # may cause partially committed files
268 # to be treated as unmodified
269 # to be treated as unmodified
269 dirstate.normallookup(realname)
270 dirstate.normallookup(realname)
270
271
271 # copystat=True here and above are a hack to trick any
272 # copystat=True here and above are a hack to trick any
272 # editors that have f open that we haven't modified them.
273 # editors that have f open that we haven't modified them.
273 #
274 #
274 # Also note that this racy as an editor could notice the
275 # Also note that this racy as an editor could notice the
275 # file's mtime before we've finished writing it.
276 # file's mtime before we've finished writing it.
276 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
277 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
277 os.unlink(tmpname)
278 os.unlink(tmpname)
278 if tobackup:
279 if tobackup:
279 os.rmdir(backupdir)
280 os.rmdir(backupdir)
280 except OSError:
281 except OSError:
281 pass
282 pass
282
283
283 def recordinwlock(ui, repo, message, match, opts):
284 def recordinwlock(ui, repo, message, match, opts):
284 with repo.wlock():
285 with repo.wlock():
285 return recordfunc(ui, repo, message, match, opts)
286 return recordfunc(ui, repo, message, match, opts)
286
287
287 return commit(ui, repo, recordinwlock, pats, opts)
288 return commit(ui, repo, recordinwlock, pats, opts)
288
289
289 def findpossible(cmd, table, strict=False):
290 def findpossible(cmd, table, strict=False):
290 """
291 """
291 Return cmd -> (aliases, command table entry)
292 Return cmd -> (aliases, command table entry)
292 for each matching command.
293 for each matching command.
293 Return debug commands (or their aliases) only if no normal command matches.
294 Return debug commands (or their aliases) only if no normal command matches.
294 """
295 """
295 choice = {}
296 choice = {}
296 debugchoice = {}
297 debugchoice = {}
297
298
298 if cmd in table:
299 if cmd in table:
299 # short-circuit exact matches, "log" alias beats "^log|history"
300 # short-circuit exact matches, "log" alias beats "^log|history"
300 keys = [cmd]
301 keys = [cmd]
301 else:
302 else:
302 keys = table.keys()
303 keys = table.keys()
303
304
304 allcmds = []
305 allcmds = []
305 for e in keys:
306 for e in keys:
306 aliases = parsealiases(e)
307 aliases = parsealiases(e)
307 allcmds.extend(aliases)
308 allcmds.extend(aliases)
308 found = None
309 found = None
309 if cmd in aliases:
310 if cmd in aliases:
310 found = cmd
311 found = cmd
311 elif not strict:
312 elif not strict:
312 for a in aliases:
313 for a in aliases:
313 if a.startswith(cmd):
314 if a.startswith(cmd):
314 found = a
315 found = a
315 break
316 break
316 if found is not None:
317 if found is not None:
317 if aliases[0].startswith("debug") or found.startswith("debug"):
318 if aliases[0].startswith("debug") or found.startswith("debug"):
318 debugchoice[found] = (aliases, table[e])
319 debugchoice[found] = (aliases, table[e])
319 else:
320 else:
320 choice[found] = (aliases, table[e])
321 choice[found] = (aliases, table[e])
321
322
322 if not choice and debugchoice:
323 if not choice and debugchoice:
323 choice = debugchoice
324 choice = debugchoice
324
325
325 return choice, allcmds
326 return choice, allcmds
326
327
327 def findcmd(cmd, table, strict=True):
328 def findcmd(cmd, table, strict=True):
328 """Return (aliases, command table entry) for command string."""
329 """Return (aliases, command table entry) for command string."""
329 choice, allcmds = findpossible(cmd, table, strict)
330 choice, allcmds = findpossible(cmd, table, strict)
330
331
331 if cmd in choice:
332 if cmd in choice:
332 return choice[cmd]
333 return choice[cmd]
333
334
334 if len(choice) > 1:
335 if len(choice) > 1:
335 clist = choice.keys()
336 clist = choice.keys()
336 clist.sort()
337 clist.sort()
337 raise error.AmbiguousCommand(cmd, clist)
338 raise error.AmbiguousCommand(cmd, clist)
338
339
339 if choice:
340 if choice:
340 return choice.values()[0]
341 return choice.values()[0]
341
342
342 raise error.UnknownCommand(cmd, allcmds)
343 raise error.UnknownCommand(cmd, allcmds)
343
344
344 def findrepo(p):
345 def findrepo(p):
345 while not os.path.isdir(os.path.join(p, ".hg")):
346 while not os.path.isdir(os.path.join(p, ".hg")):
346 oldp, p = p, os.path.dirname(p)
347 oldp, p = p, os.path.dirname(p)
347 if p == oldp:
348 if p == oldp:
348 return None
349 return None
349
350
350 return p
351 return p
351
352
352 def bailifchanged(repo, merge=True):
353 def bailifchanged(repo, merge=True):
353 if merge and repo.dirstate.p2() != nullid:
354 if merge and repo.dirstate.p2() != nullid:
354 raise error.Abort(_('outstanding uncommitted merge'))
355 raise error.Abort(_('outstanding uncommitted merge'))
355 modified, added, removed, deleted = repo.status()[:4]
356 modified, added, removed, deleted = repo.status()[:4]
356 if modified or added or removed or deleted:
357 if modified or added or removed or deleted:
357 raise error.Abort(_('uncommitted changes'))
358 raise error.Abort(_('uncommitted changes'))
358 ctx = repo[None]
359 ctx = repo[None]
359 for s in sorted(ctx.substate):
360 for s in sorted(ctx.substate):
360 ctx.sub(s).bailifchanged()
361 ctx.sub(s).bailifchanged()
361
362
362 def logmessage(ui, opts):
363 def logmessage(ui, opts):
363 """ get the log message according to -m and -l option """
364 """ get the log message according to -m and -l option """
364 message = opts.get('message')
365 message = opts.get('message')
365 logfile = opts.get('logfile')
366 logfile = opts.get('logfile')
366
367
367 if message and logfile:
368 if message and logfile:
368 raise error.Abort(_('options --message and --logfile are mutually '
369 raise error.Abort(_('options --message and --logfile are mutually '
369 'exclusive'))
370 'exclusive'))
370 if not message and logfile:
371 if not message and logfile:
371 try:
372 try:
372 if logfile == '-':
373 if logfile == '-':
373 message = ui.fin.read()
374 message = ui.fin.read()
374 else:
375 else:
375 message = '\n'.join(util.readfile(logfile).splitlines())
376 message = '\n'.join(util.readfile(logfile).splitlines())
376 except IOError as inst:
377 except IOError as inst:
377 raise error.Abort(_("can't read commit message '%s': %s") %
378 raise error.Abort(_("can't read commit message '%s': %s") %
378 (logfile, inst.strerror))
379 (logfile, inst.strerror))
379 return message
380 return message
380
381
381 def mergeeditform(ctxorbool, baseformname):
382 def mergeeditform(ctxorbool, baseformname):
382 """return appropriate editform name (referencing a committemplate)
383 """return appropriate editform name (referencing a committemplate)
383
384
384 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
385 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
385 merging is committed.
386 merging is committed.
386
387
387 This returns baseformname with '.merge' appended if it is a merge,
388 This returns baseformname with '.merge' appended if it is a merge,
388 otherwise '.normal' is appended.
389 otherwise '.normal' is appended.
389 """
390 """
390 if isinstance(ctxorbool, bool):
391 if isinstance(ctxorbool, bool):
391 if ctxorbool:
392 if ctxorbool:
392 return baseformname + ".merge"
393 return baseformname + ".merge"
393 elif 1 < len(ctxorbool.parents()):
394 elif 1 < len(ctxorbool.parents()):
394 return baseformname + ".merge"
395 return baseformname + ".merge"
395
396
396 return baseformname + ".normal"
397 return baseformname + ".normal"
397
398
398 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
399 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
399 editform='', **opts):
400 editform='', **opts):
400 """get appropriate commit message editor according to '--edit' option
401 """get appropriate commit message editor according to '--edit' option
401
402
402 'finishdesc' is a function to be called with edited commit message
403 'finishdesc' is a function to be called with edited commit message
403 (= 'description' of the new changeset) just after editing, but
404 (= 'description' of the new changeset) just after editing, but
404 before checking empty-ness. It should return actual text to be
405 before checking empty-ness. It should return actual text to be
405 stored into history. This allows to change description before
406 stored into history. This allows to change description before
406 storing.
407 storing.
407
408
408 'extramsg' is a extra message to be shown in the editor instead of
409 'extramsg' is a extra message to be shown in the editor instead of
409 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
410 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
410 is automatically added.
411 is automatically added.
411
412
412 'editform' is a dot-separated list of names, to distinguish
413 'editform' is a dot-separated list of names, to distinguish
413 the purpose of commit text editing.
414 the purpose of commit text editing.
414
415
415 'getcommiteditor' returns 'commitforceeditor' regardless of
416 'getcommiteditor' returns 'commitforceeditor' regardless of
416 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
417 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
417 they are specific for usage in MQ.
418 they are specific for usage in MQ.
418 """
419 """
419 if edit or finishdesc or extramsg:
420 if edit or finishdesc or extramsg:
420 return lambda r, c, s: commitforceeditor(r, c, s,
421 return lambda r, c, s: commitforceeditor(r, c, s,
421 finishdesc=finishdesc,
422 finishdesc=finishdesc,
422 extramsg=extramsg,
423 extramsg=extramsg,
423 editform=editform)
424 editform=editform)
424 elif editform:
425 elif editform:
425 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
426 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
426 else:
427 else:
427 return commiteditor
428 return commiteditor
428
429
429 def loglimit(opts):
430 def loglimit(opts):
430 """get the log limit according to option -l/--limit"""
431 """get the log limit according to option -l/--limit"""
431 limit = opts.get('limit')
432 limit = opts.get('limit')
432 if limit:
433 if limit:
433 try:
434 try:
434 limit = int(limit)
435 limit = int(limit)
435 except ValueError:
436 except ValueError:
436 raise error.Abort(_('limit must be a positive integer'))
437 raise error.Abort(_('limit must be a positive integer'))
437 if limit <= 0:
438 if limit <= 0:
438 raise error.Abort(_('limit must be positive'))
439 raise error.Abort(_('limit must be positive'))
439 else:
440 else:
440 limit = None
441 limit = None
441 return limit
442 return limit
442
443
443 def makefilename(repo, pat, node, desc=None,
444 def makefilename(repo, pat, node, desc=None,
444 total=None, seqno=None, revwidth=None, pathname=None):
445 total=None, seqno=None, revwidth=None, pathname=None):
445 node_expander = {
446 node_expander = {
446 'H': lambda: hex(node),
447 'H': lambda: hex(node),
447 'R': lambda: str(repo.changelog.rev(node)),
448 'R': lambda: str(repo.changelog.rev(node)),
448 'h': lambda: short(node),
449 'h': lambda: short(node),
449 'm': lambda: re.sub('[^\w]', '_', str(desc))
450 'm': lambda: re.sub('[^\w]', '_', str(desc))
450 }
451 }
451 expander = {
452 expander = {
452 '%': lambda: '%',
453 '%': lambda: '%',
453 'b': lambda: os.path.basename(repo.root),
454 'b': lambda: os.path.basename(repo.root),
454 }
455 }
455
456
456 try:
457 try:
457 if node:
458 if node:
458 expander.update(node_expander)
459 expander.update(node_expander)
459 if node:
460 if node:
460 expander['r'] = (lambda:
461 expander['r'] = (lambda:
461 str(repo.changelog.rev(node)).zfill(revwidth or 0))
462 str(repo.changelog.rev(node)).zfill(revwidth or 0))
462 if total is not None:
463 if total is not None:
463 expander['N'] = lambda: str(total)
464 expander['N'] = lambda: str(total)
464 if seqno is not None:
465 if seqno is not None:
465 expander['n'] = lambda: str(seqno)
466 expander['n'] = lambda: str(seqno)
466 if total is not None and seqno is not None:
467 if total is not None and seqno is not None:
467 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
468 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
468 if pathname is not None:
469 if pathname is not None:
469 expander['s'] = lambda: os.path.basename(pathname)
470 expander['s'] = lambda: os.path.basename(pathname)
470 expander['d'] = lambda: os.path.dirname(pathname) or '.'
471 expander['d'] = lambda: os.path.dirname(pathname) or '.'
471 expander['p'] = lambda: pathname
472 expander['p'] = lambda: pathname
472
473
473 newname = []
474 newname = []
474 patlen = len(pat)
475 patlen = len(pat)
475 i = 0
476 i = 0
476 while i < patlen:
477 while i < patlen:
477 c = pat[i]
478 c = pat[i]
478 if c == '%':
479 if c == '%':
479 i += 1
480 i += 1
480 c = pat[i]
481 c = pat[i]
481 c = expander[c]()
482 c = expander[c]()
482 newname.append(c)
483 newname.append(c)
483 i += 1
484 i += 1
484 return ''.join(newname)
485 return ''.join(newname)
485 except KeyError as inst:
486 except KeyError as inst:
486 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
487 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
487 inst.args[0])
488 inst.args[0])
488
489
489 class _unclosablefile(object):
490 class _unclosablefile(object):
490 def __init__(self, fp):
491 def __init__(self, fp):
491 self._fp = fp
492 self._fp = fp
492
493
493 def close(self):
494 def close(self):
494 pass
495 pass
495
496
496 def __iter__(self):
497 def __iter__(self):
497 return iter(self._fp)
498 return iter(self._fp)
498
499
499 def __getattr__(self, attr):
500 def __getattr__(self, attr):
500 return getattr(self._fp, attr)
501 return getattr(self._fp, attr)
501
502
502 def __enter__(self):
503 def __enter__(self):
503 return self
504 return self
504
505
505 def __exit__(self, exc_type, exc_value, exc_tb):
506 def __exit__(self, exc_type, exc_value, exc_tb):
506 pass
507 pass
507
508
508 def makefileobj(repo, pat, node=None, desc=None, total=None,
509 def makefileobj(repo, pat, node=None, desc=None, total=None,
509 seqno=None, revwidth=None, mode='wb', modemap=None,
510 seqno=None, revwidth=None, mode='wb', modemap=None,
510 pathname=None):
511 pathname=None):
511
512
512 writable = mode not in ('r', 'rb')
513 writable = mode not in ('r', 'rb')
513
514
514 if not pat or pat == '-':
515 if not pat or pat == '-':
515 if writable:
516 if writable:
516 fp = repo.ui.fout
517 fp = repo.ui.fout
517 else:
518 else:
518 fp = repo.ui.fin
519 fp = repo.ui.fin
519 return _unclosablefile(fp)
520 return _unclosablefile(fp)
520 if util.safehasattr(pat, 'write') and writable:
521 if util.safehasattr(pat, 'write') and writable:
521 return pat
522 return pat
522 if util.safehasattr(pat, 'read') and 'r' in mode:
523 if util.safehasattr(pat, 'read') and 'r' in mode:
523 return pat
524 return pat
524 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
525 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
525 if modemap is not None:
526 if modemap is not None:
526 mode = modemap.get(fn, mode)
527 mode = modemap.get(fn, mode)
527 if mode == 'wb':
528 if mode == 'wb':
528 modemap[fn] = 'ab'
529 modemap[fn] = 'ab'
529 return open(fn, mode)
530 return open(fn, mode)
530
531
531 def openrevlog(repo, cmd, file_, opts):
532 def openrevlog(repo, cmd, file_, opts):
532 """opens the changelog, manifest, a filelog or a given revlog"""
533 """opens the changelog, manifest, a filelog or a given revlog"""
533 cl = opts['changelog']
534 cl = opts['changelog']
534 mf = opts['manifest']
535 mf = opts['manifest']
535 dir = opts['dir']
536 dir = opts['dir']
536 msg = None
537 msg = None
537 if cl and mf:
538 if cl and mf:
538 msg = _('cannot specify --changelog and --manifest at the same time')
539 msg = _('cannot specify --changelog and --manifest at the same time')
539 elif cl and dir:
540 elif cl and dir:
540 msg = _('cannot specify --changelog and --dir at the same time')
541 msg = _('cannot specify --changelog and --dir at the same time')
541 elif cl or mf or dir:
542 elif cl or mf or dir:
542 if file_:
543 if file_:
543 msg = _('cannot specify filename with --changelog or --manifest')
544 msg = _('cannot specify filename with --changelog or --manifest')
544 elif not repo:
545 elif not repo:
545 msg = _('cannot specify --changelog or --manifest or --dir '
546 msg = _('cannot specify --changelog or --manifest or --dir '
546 'without a repository')
547 'without a repository')
547 if msg:
548 if msg:
548 raise error.Abort(msg)
549 raise error.Abort(msg)
549
550
550 r = None
551 r = None
551 if repo:
552 if repo:
552 if cl:
553 if cl:
553 r = repo.unfiltered().changelog
554 r = repo.unfiltered().changelog
554 elif dir:
555 elif dir:
555 if 'treemanifest' not in repo.requirements:
556 if 'treemanifest' not in repo.requirements:
556 raise error.Abort(_("--dir can only be used on repos with "
557 raise error.Abort(_("--dir can only be used on repos with "
557 "treemanifest enabled"))
558 "treemanifest enabled"))
558 dirlog = repo.manifestlog._revlog.dirlog(dir)
559 dirlog = repo.manifestlog._revlog.dirlog(dir)
559 if len(dirlog):
560 if len(dirlog):
560 r = dirlog
561 r = dirlog
561 elif mf:
562 elif mf:
562 r = repo.manifestlog._revlog
563 r = repo.manifestlog._revlog
563 elif file_:
564 elif file_:
564 filelog = repo.file(file_)
565 filelog = repo.file(file_)
565 if len(filelog):
566 if len(filelog):
566 r = filelog
567 r = filelog
567 if not r:
568 if not r:
568 if not file_:
569 if not file_:
569 raise error.CommandError(cmd, _('invalid arguments'))
570 raise error.CommandError(cmd, _('invalid arguments'))
570 if not os.path.isfile(file_):
571 if not os.path.isfile(file_):
571 raise error.Abort(_("revlog '%s' not found") % file_)
572 raise error.Abort(_("revlog '%s' not found") % file_)
572 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
573 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
573 file_[:-2] + ".i")
574 file_[:-2] + ".i")
574 return r
575 return r
575
576
576 def copy(ui, repo, pats, opts, rename=False):
577 def copy(ui, repo, pats, opts, rename=False):
577 # called with the repo lock held
578 # called with the repo lock held
578 #
579 #
579 # hgsep => pathname that uses "/" to separate directories
580 # hgsep => pathname that uses "/" to separate directories
580 # ossep => pathname that uses os.sep to separate directories
581 # ossep => pathname that uses os.sep to separate directories
581 cwd = repo.getcwd()
582 cwd = repo.getcwd()
582 targets = {}
583 targets = {}
583 after = opts.get("after")
584 after = opts.get("after")
584 dryrun = opts.get("dry_run")
585 dryrun = opts.get("dry_run")
585 wctx = repo[None]
586 wctx = repo[None]
586
587
587 def walkpat(pat):
588 def walkpat(pat):
588 srcs = []
589 srcs = []
589 if after:
590 if after:
590 badstates = '?'
591 badstates = '?'
591 else:
592 else:
592 badstates = '?r'
593 badstates = '?r'
593 m = scmutil.match(repo[None], [pat], opts, globbed=True)
594 m = scmutil.match(repo[None], [pat], opts, globbed=True)
594 for abs in repo.walk(m):
595 for abs in repo.walk(m):
595 state = repo.dirstate[abs]
596 state = repo.dirstate[abs]
596 rel = m.rel(abs)
597 rel = m.rel(abs)
597 exact = m.exact(abs)
598 exact = m.exact(abs)
598 if state in badstates:
599 if state in badstates:
599 if exact and state == '?':
600 if exact and state == '?':
600 ui.warn(_('%s: not copying - file is not managed\n') % rel)
601 ui.warn(_('%s: not copying - file is not managed\n') % rel)
601 if exact and state == 'r':
602 if exact and state == 'r':
602 ui.warn(_('%s: not copying - file has been marked for'
603 ui.warn(_('%s: not copying - file has been marked for'
603 ' remove\n') % rel)
604 ' remove\n') % rel)
604 continue
605 continue
605 # abs: hgsep
606 # abs: hgsep
606 # rel: ossep
607 # rel: ossep
607 srcs.append((abs, rel, exact))
608 srcs.append((abs, rel, exact))
608 return srcs
609 return srcs
609
610
610 # abssrc: hgsep
611 # abssrc: hgsep
611 # relsrc: ossep
612 # relsrc: ossep
612 # otarget: ossep
613 # otarget: ossep
613 def copyfile(abssrc, relsrc, otarget, exact):
614 def copyfile(abssrc, relsrc, otarget, exact):
614 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
615 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
615 if '/' in abstarget:
616 if '/' in abstarget:
616 # We cannot normalize abstarget itself, this would prevent
617 # We cannot normalize abstarget itself, this would prevent
617 # case only renames, like a => A.
618 # case only renames, like a => A.
618 abspath, absname = abstarget.rsplit('/', 1)
619 abspath, absname = abstarget.rsplit('/', 1)
619 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
620 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
620 reltarget = repo.pathto(abstarget, cwd)
621 reltarget = repo.pathto(abstarget, cwd)
621 target = repo.wjoin(abstarget)
622 target = repo.wjoin(abstarget)
622 src = repo.wjoin(abssrc)
623 src = repo.wjoin(abssrc)
623 state = repo.dirstate[abstarget]
624 state = repo.dirstate[abstarget]
624
625
625 scmutil.checkportable(ui, abstarget)
626 scmutil.checkportable(ui, abstarget)
626
627
627 # check for collisions
628 # check for collisions
628 prevsrc = targets.get(abstarget)
629 prevsrc = targets.get(abstarget)
629 if prevsrc is not None:
630 if prevsrc is not None:
630 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
631 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
631 (reltarget, repo.pathto(abssrc, cwd),
632 (reltarget, repo.pathto(abssrc, cwd),
632 repo.pathto(prevsrc, cwd)))
633 repo.pathto(prevsrc, cwd)))
633 return
634 return
634
635
635 # check for overwrites
636 # check for overwrites
636 exists = os.path.lexists(target)
637 exists = os.path.lexists(target)
637 samefile = False
638 samefile = False
638 if exists and abssrc != abstarget:
639 if exists and abssrc != abstarget:
639 if (repo.dirstate.normalize(abssrc) ==
640 if (repo.dirstate.normalize(abssrc) ==
640 repo.dirstate.normalize(abstarget)):
641 repo.dirstate.normalize(abstarget)):
641 if not rename:
642 if not rename:
642 ui.warn(_("%s: can't copy - same file\n") % reltarget)
643 ui.warn(_("%s: can't copy - same file\n") % reltarget)
643 return
644 return
644 exists = False
645 exists = False
645 samefile = True
646 samefile = True
646
647
647 if not after and exists or after and state in 'mn':
648 if not after and exists or after and state in 'mn':
648 if not opts['force']:
649 if not opts['force']:
649 if state in 'mn':
650 if state in 'mn':
650 msg = _('%s: not overwriting - file already committed\n')
651 msg = _('%s: not overwriting - file already committed\n')
651 if after:
652 if after:
652 flags = '--after --force'
653 flags = '--after --force'
653 else:
654 else:
654 flags = '--force'
655 flags = '--force'
655 if rename:
656 if rename:
656 hint = _('(hg rename %s to replace the file by '
657 hint = _('(hg rename %s to replace the file by '
657 'recording a rename)\n') % flags
658 'recording a rename)\n') % flags
658 else:
659 else:
659 hint = _('(hg copy %s to replace the file by '
660 hint = _('(hg copy %s to replace the file by '
660 'recording a copy)\n') % flags
661 'recording a copy)\n') % flags
661 else:
662 else:
662 msg = _('%s: not overwriting - file exists\n')
663 msg = _('%s: not overwriting - file exists\n')
663 if rename:
664 if rename:
664 hint = _('(hg rename --after to record the rename)\n')
665 hint = _('(hg rename --after to record the rename)\n')
665 else:
666 else:
666 hint = _('(hg copy --after to record the copy)\n')
667 hint = _('(hg copy --after to record the copy)\n')
667 ui.warn(msg % reltarget)
668 ui.warn(msg % reltarget)
668 ui.warn(hint)
669 ui.warn(hint)
669 return
670 return
670
671
671 if after:
672 if after:
672 if not exists:
673 if not exists:
673 if rename:
674 if rename:
674 ui.warn(_('%s: not recording move - %s does not exist\n') %
675 ui.warn(_('%s: not recording move - %s does not exist\n') %
675 (relsrc, reltarget))
676 (relsrc, reltarget))
676 else:
677 else:
677 ui.warn(_('%s: not recording copy - %s does not exist\n') %
678 ui.warn(_('%s: not recording copy - %s does not exist\n') %
678 (relsrc, reltarget))
679 (relsrc, reltarget))
679 return
680 return
680 elif not dryrun:
681 elif not dryrun:
681 try:
682 try:
682 if exists:
683 if exists:
683 os.unlink(target)
684 os.unlink(target)
684 targetdir = os.path.dirname(target) or '.'
685 targetdir = os.path.dirname(target) or '.'
685 if not os.path.isdir(targetdir):
686 if not os.path.isdir(targetdir):
686 os.makedirs(targetdir)
687 os.makedirs(targetdir)
687 if samefile:
688 if samefile:
688 tmp = target + "~hgrename"
689 tmp = target + "~hgrename"
689 os.rename(src, tmp)
690 os.rename(src, tmp)
690 os.rename(tmp, target)
691 os.rename(tmp, target)
691 else:
692 else:
692 util.copyfile(src, target)
693 util.copyfile(src, target)
693 srcexists = True
694 srcexists = True
694 except IOError as inst:
695 except IOError as inst:
695 if inst.errno == errno.ENOENT:
696 if inst.errno == errno.ENOENT:
696 ui.warn(_('%s: deleted in working directory\n') % relsrc)
697 ui.warn(_('%s: deleted in working directory\n') % relsrc)
697 srcexists = False
698 srcexists = False
698 else:
699 else:
699 ui.warn(_('%s: cannot copy - %s\n') %
700 ui.warn(_('%s: cannot copy - %s\n') %
700 (relsrc, inst.strerror))
701 (relsrc, inst.strerror))
701 return True # report a failure
702 return True # report a failure
702
703
703 if ui.verbose or not exact:
704 if ui.verbose or not exact:
704 if rename:
705 if rename:
705 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
706 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
706 else:
707 else:
707 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
708 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
708
709
709 targets[abstarget] = abssrc
710 targets[abstarget] = abssrc
710
711
711 # fix up dirstate
712 # fix up dirstate
712 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
713 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
713 dryrun=dryrun, cwd=cwd)
714 dryrun=dryrun, cwd=cwd)
714 if rename and not dryrun:
715 if rename and not dryrun:
715 if not after and srcexists and not samefile:
716 if not after and srcexists and not samefile:
716 util.unlinkpath(repo.wjoin(abssrc))
717 util.unlinkpath(repo.wjoin(abssrc))
717 wctx.forget([abssrc])
718 wctx.forget([abssrc])
718
719
719 # pat: ossep
720 # pat: ossep
720 # dest ossep
721 # dest ossep
721 # srcs: list of (hgsep, hgsep, ossep, bool)
722 # srcs: list of (hgsep, hgsep, ossep, bool)
722 # return: function that takes hgsep and returns ossep
723 # return: function that takes hgsep and returns ossep
723 def targetpathfn(pat, dest, srcs):
724 def targetpathfn(pat, dest, srcs):
724 if os.path.isdir(pat):
725 if os.path.isdir(pat):
725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
726 abspfx = pathutil.canonpath(repo.root, cwd, pat)
726 abspfx = util.localpath(abspfx)
727 abspfx = util.localpath(abspfx)
727 if destdirexists:
728 if destdirexists:
728 striplen = len(os.path.split(abspfx)[0])
729 striplen = len(os.path.split(abspfx)[0])
729 else:
730 else:
730 striplen = len(abspfx)
731 striplen = len(abspfx)
731 if striplen:
732 if striplen:
732 striplen += len(os.sep)
733 striplen += len(os.sep)
733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
734 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
734 elif destdirexists:
735 elif destdirexists:
735 res = lambda p: os.path.join(dest,
736 res = lambda p: os.path.join(dest,
736 os.path.basename(util.localpath(p)))
737 os.path.basename(util.localpath(p)))
737 else:
738 else:
738 res = lambda p: dest
739 res = lambda p: dest
739 return res
740 return res
740
741
741 # pat: ossep
742 # pat: ossep
742 # dest ossep
743 # dest ossep
743 # srcs: list of (hgsep, hgsep, ossep, bool)
744 # srcs: list of (hgsep, hgsep, ossep, bool)
744 # return: function that takes hgsep and returns ossep
745 # return: function that takes hgsep and returns ossep
745 def targetpathafterfn(pat, dest, srcs):
746 def targetpathafterfn(pat, dest, srcs):
746 if matchmod.patkind(pat):
747 if matchmod.patkind(pat):
747 # a mercurial pattern
748 # a mercurial pattern
748 res = lambda p: os.path.join(dest,
749 res = lambda p: os.path.join(dest,
749 os.path.basename(util.localpath(p)))
750 os.path.basename(util.localpath(p)))
750 else:
751 else:
751 abspfx = pathutil.canonpath(repo.root, cwd, pat)
752 abspfx = pathutil.canonpath(repo.root, cwd, pat)
752 if len(abspfx) < len(srcs[0][0]):
753 if len(abspfx) < len(srcs[0][0]):
753 # A directory. Either the target path contains the last
754 # A directory. Either the target path contains the last
754 # component of the source path or it does not.
755 # component of the source path or it does not.
755 def evalpath(striplen):
756 def evalpath(striplen):
756 score = 0
757 score = 0
757 for s in srcs:
758 for s in srcs:
758 t = os.path.join(dest, util.localpath(s[0])[striplen:])
759 t = os.path.join(dest, util.localpath(s[0])[striplen:])
759 if os.path.lexists(t):
760 if os.path.lexists(t):
760 score += 1
761 score += 1
761 return score
762 return score
762
763
763 abspfx = util.localpath(abspfx)
764 abspfx = util.localpath(abspfx)
764 striplen = len(abspfx)
765 striplen = len(abspfx)
765 if striplen:
766 if striplen:
766 striplen += len(os.sep)
767 striplen += len(os.sep)
767 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
768 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
768 score = evalpath(striplen)
769 score = evalpath(striplen)
769 striplen1 = len(os.path.split(abspfx)[0])
770 striplen1 = len(os.path.split(abspfx)[0])
770 if striplen1:
771 if striplen1:
771 striplen1 += len(os.sep)
772 striplen1 += len(os.sep)
772 if evalpath(striplen1) > score:
773 if evalpath(striplen1) > score:
773 striplen = striplen1
774 striplen = striplen1
774 res = lambda p: os.path.join(dest,
775 res = lambda p: os.path.join(dest,
775 util.localpath(p)[striplen:])
776 util.localpath(p)[striplen:])
776 else:
777 else:
777 # a file
778 # a file
778 if destdirexists:
779 if destdirexists:
779 res = lambda p: os.path.join(dest,
780 res = lambda p: os.path.join(dest,
780 os.path.basename(util.localpath(p)))
781 os.path.basename(util.localpath(p)))
781 else:
782 else:
782 res = lambda p: dest
783 res = lambda p: dest
783 return res
784 return res
784
785
785 pats = scmutil.expandpats(pats)
786 pats = scmutil.expandpats(pats)
786 if not pats:
787 if not pats:
787 raise error.Abort(_('no source or destination specified'))
788 raise error.Abort(_('no source or destination specified'))
788 if len(pats) == 1:
789 if len(pats) == 1:
789 raise error.Abort(_('no destination specified'))
790 raise error.Abort(_('no destination specified'))
790 dest = pats.pop()
791 dest = pats.pop()
791 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
792 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
792 if not destdirexists:
793 if not destdirexists:
793 if len(pats) > 1 or matchmod.patkind(pats[0]):
794 if len(pats) > 1 or matchmod.patkind(pats[0]):
794 raise error.Abort(_('with multiple sources, destination must be an '
795 raise error.Abort(_('with multiple sources, destination must be an '
795 'existing directory'))
796 'existing directory'))
796 if util.endswithsep(dest):
797 if util.endswithsep(dest):
797 raise error.Abort(_('destination %s is not a directory') % dest)
798 raise error.Abort(_('destination %s is not a directory') % dest)
798
799
799 tfn = targetpathfn
800 tfn = targetpathfn
800 if after:
801 if after:
801 tfn = targetpathafterfn
802 tfn = targetpathafterfn
802 copylist = []
803 copylist = []
803 for pat in pats:
804 for pat in pats:
804 srcs = walkpat(pat)
805 srcs = walkpat(pat)
805 if not srcs:
806 if not srcs:
806 continue
807 continue
807 copylist.append((tfn(pat, dest, srcs), srcs))
808 copylist.append((tfn(pat, dest, srcs), srcs))
808 if not copylist:
809 if not copylist:
809 raise error.Abort(_('no files to copy'))
810 raise error.Abort(_('no files to copy'))
810
811
811 errors = 0
812 errors = 0
812 for targetpath, srcs in copylist:
813 for targetpath, srcs in copylist:
813 for abssrc, relsrc, exact in srcs:
814 for abssrc, relsrc, exact in srcs:
814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
815 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
815 errors += 1
816 errors += 1
816
817
817 if errors:
818 if errors:
818 ui.warn(_('(consider using --after)\n'))
819 ui.warn(_('(consider using --after)\n'))
819
820
820 return errors != 0
821 return errors != 0
821
822
822 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
823 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
823 runargs=None, appendpid=False):
824 runargs=None, appendpid=False):
824 '''Run a command as a service.'''
825 '''Run a command as a service.'''
825
826
826 def writepid(pid):
827 def writepid(pid):
827 if opts['pid_file']:
828 if opts['pid_file']:
828 if appendpid:
829 if appendpid:
829 mode = 'a'
830 mode = 'a'
830 else:
831 else:
831 mode = 'w'
832 mode = 'w'
832 fp = open(opts['pid_file'], mode)
833 fp = open(opts['pid_file'], mode)
833 fp.write(str(pid) + '\n')
834 fp.write(str(pid) + '\n')
834 fp.close()
835 fp.close()
835
836
836 if opts['daemon'] and not opts['daemon_postexec']:
837 if opts['daemon'] and not opts['daemon_postexec']:
837 # Signal child process startup with file removal
838 # Signal child process startup with file removal
838 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
839 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
839 os.close(lockfd)
840 os.close(lockfd)
840 try:
841 try:
841 if not runargs:
842 if not runargs:
842 runargs = util.hgcmd() + sys.argv[1:]
843 runargs = util.hgcmd() + sys.argv[1:]
843 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
844 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
844 # Don't pass --cwd to the child process, because we've already
845 # Don't pass --cwd to the child process, because we've already
845 # changed directory.
846 # changed directory.
846 for i in xrange(1, len(runargs)):
847 for i in xrange(1, len(runargs)):
847 if runargs[i].startswith('--cwd='):
848 if runargs[i].startswith('--cwd='):
848 del runargs[i]
849 del runargs[i]
849 break
850 break
850 elif runargs[i].startswith('--cwd'):
851 elif runargs[i].startswith('--cwd'):
851 del runargs[i:i + 2]
852 del runargs[i:i + 2]
852 break
853 break
853 def condfn():
854 def condfn():
854 return not os.path.exists(lockpath)
855 return not os.path.exists(lockpath)
855 pid = util.rundetached(runargs, condfn)
856 pid = util.rundetached(runargs, condfn)
856 if pid < 0:
857 if pid < 0:
857 raise error.Abort(_('child process failed to start'))
858 raise error.Abort(_('child process failed to start'))
858 writepid(pid)
859 writepid(pid)
859 finally:
860 finally:
860 try:
861 try:
861 os.unlink(lockpath)
862 os.unlink(lockpath)
862 except OSError as e:
863 except OSError as e:
863 if e.errno != errno.ENOENT:
864 if e.errno != errno.ENOENT:
864 raise
865 raise
865 if parentfn:
866 if parentfn:
866 return parentfn(pid)
867 return parentfn(pid)
867 else:
868 else:
868 return
869 return
869
870
870 if initfn:
871 if initfn:
871 initfn()
872 initfn()
872
873
873 if not opts['daemon']:
874 if not opts['daemon']:
874 writepid(util.getpid())
875 writepid(util.getpid())
875
876
876 if opts['daemon_postexec']:
877 if opts['daemon_postexec']:
877 try:
878 try:
878 os.setsid()
879 os.setsid()
879 except AttributeError:
880 except AttributeError:
880 pass
881 pass
881 for inst in opts['daemon_postexec']:
882 for inst in opts['daemon_postexec']:
882 if inst.startswith('unlink:'):
883 if inst.startswith('unlink:'):
883 lockpath = inst[7:]
884 lockpath = inst[7:]
884 os.unlink(lockpath)
885 os.unlink(lockpath)
885 elif inst.startswith('chdir:'):
886 elif inst.startswith('chdir:'):
886 os.chdir(inst[6:])
887 os.chdir(inst[6:])
887 elif inst != 'none':
888 elif inst != 'none':
888 raise error.Abort(_('invalid value for --daemon-postexec: %s')
889 raise error.Abort(_('invalid value for --daemon-postexec: %s')
889 % inst)
890 % inst)
890 util.hidewindow()
891 util.hidewindow()
891 util.stdout.flush()
892 util.stdout.flush()
892 util.stderr.flush()
893 util.stderr.flush()
893
894
894 nullfd = os.open(os.devnull, os.O_RDWR)
895 nullfd = os.open(os.devnull, os.O_RDWR)
895 logfilefd = nullfd
896 logfilefd = nullfd
896 if logfile:
897 if logfile:
897 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
898 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
898 os.dup2(nullfd, 0)
899 os.dup2(nullfd, 0)
899 os.dup2(logfilefd, 1)
900 os.dup2(logfilefd, 1)
900 os.dup2(logfilefd, 2)
901 os.dup2(logfilefd, 2)
901 if nullfd not in (0, 1, 2):
902 if nullfd not in (0, 1, 2):
902 os.close(nullfd)
903 os.close(nullfd)
903 if logfile and logfilefd not in (0, 1, 2):
904 if logfile and logfilefd not in (0, 1, 2):
904 os.close(logfilefd)
905 os.close(logfilefd)
905
906
906 if runfn:
907 if runfn:
907 return runfn()
908 return runfn()
908
909
909 ## facility to let extension process additional data into an import patch
910 ## facility to let extension process additional data into an import patch
910 # list of identifier to be executed in order
911 # list of identifier to be executed in order
911 extrapreimport = [] # run before commit
912 extrapreimport = [] # run before commit
912 extrapostimport = [] # run after commit
913 extrapostimport = [] # run after commit
913 # mapping from identifier to actual import function
914 # mapping from identifier to actual import function
914 #
915 #
915 # 'preimport' are run before the commit is made and are provided the following
916 # 'preimport' are run before the commit is made and are provided the following
916 # arguments:
917 # arguments:
917 # - repo: the localrepository instance,
918 # - repo: the localrepository instance,
918 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
919 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
919 # - extra: the future extra dictionary of the changeset, please mutate it,
920 # - extra: the future extra dictionary of the changeset, please mutate it,
920 # - opts: the import options.
921 # - opts: the import options.
921 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
922 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
922 # mutation of in memory commit and more. Feel free to rework the code to get
923 # mutation of in memory commit and more. Feel free to rework the code to get
923 # there.
924 # there.
924 extrapreimportmap = {}
925 extrapreimportmap = {}
925 # 'postimport' are run after the commit is made and are provided the following
926 # 'postimport' are run after the commit is made and are provided the following
926 # argument:
927 # argument:
927 # - ctx: the changectx created by import.
928 # - ctx: the changectx created by import.
928 extrapostimportmap = {}
929 extrapostimportmap = {}
929
930
930 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
931 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
931 """Utility function used by commands.import to import a single patch
932 """Utility function used by commands.import to import a single patch
932
933
933 This function is explicitly defined here to help the evolve extension to
934 This function is explicitly defined here to help the evolve extension to
934 wrap this part of the import logic.
935 wrap this part of the import logic.
935
936
936 The API is currently a bit ugly because it a simple code translation from
937 The API is currently a bit ugly because it a simple code translation from
937 the import command. Feel free to make it better.
938 the import command. Feel free to make it better.
938
939
939 :hunk: a patch (as a binary string)
940 :hunk: a patch (as a binary string)
940 :parents: nodes that will be parent of the created commit
941 :parents: nodes that will be parent of the created commit
941 :opts: the full dict of option passed to the import command
942 :opts: the full dict of option passed to the import command
942 :msgs: list to save commit message to.
943 :msgs: list to save commit message to.
943 (used in case we need to save it when failing)
944 (used in case we need to save it when failing)
944 :updatefunc: a function that update a repo to a given node
945 :updatefunc: a function that update a repo to a given node
945 updatefunc(<repo>, <node>)
946 updatefunc(<repo>, <node>)
946 """
947 """
947 # avoid cycle context -> subrepo -> cmdutil
948 # avoid cycle context -> subrepo -> cmdutil
948 from . import context
949 from . import context
949 extractdata = patch.extract(ui, hunk)
950 extractdata = patch.extract(ui, hunk)
950 tmpname = extractdata.get('filename')
951 tmpname = extractdata.get('filename')
951 message = extractdata.get('message')
952 message = extractdata.get('message')
952 user = opts.get('user') or extractdata.get('user')
953 user = opts.get('user') or extractdata.get('user')
953 date = opts.get('date') or extractdata.get('date')
954 date = opts.get('date') or extractdata.get('date')
954 branch = extractdata.get('branch')
955 branch = extractdata.get('branch')
955 nodeid = extractdata.get('nodeid')
956 nodeid = extractdata.get('nodeid')
956 p1 = extractdata.get('p1')
957 p1 = extractdata.get('p1')
957 p2 = extractdata.get('p2')
958 p2 = extractdata.get('p2')
958
959
959 nocommit = opts.get('no_commit')
960 nocommit = opts.get('no_commit')
960 importbranch = opts.get('import_branch')
961 importbranch = opts.get('import_branch')
961 update = not opts.get('bypass')
962 update = not opts.get('bypass')
962 strip = opts["strip"]
963 strip = opts["strip"]
963 prefix = opts["prefix"]
964 prefix = opts["prefix"]
964 sim = float(opts.get('similarity') or 0)
965 sim = float(opts.get('similarity') or 0)
965 if not tmpname:
966 if not tmpname:
966 return (None, None, False)
967 return (None, None, False)
967
968
968 rejects = False
969 rejects = False
969
970
970 try:
971 try:
971 cmdline_message = logmessage(ui, opts)
972 cmdline_message = logmessage(ui, opts)
972 if cmdline_message:
973 if cmdline_message:
973 # pickup the cmdline msg
974 # pickup the cmdline msg
974 message = cmdline_message
975 message = cmdline_message
975 elif message:
976 elif message:
976 # pickup the patch msg
977 # pickup the patch msg
977 message = message.strip()
978 message = message.strip()
978 else:
979 else:
979 # launch the editor
980 # launch the editor
980 message = None
981 message = None
981 ui.debug('message:\n%s\n' % message)
982 ui.debug('message:\n%s\n' % message)
982
983
983 if len(parents) == 1:
984 if len(parents) == 1:
984 parents.append(repo[nullid])
985 parents.append(repo[nullid])
985 if opts.get('exact'):
986 if opts.get('exact'):
986 if not nodeid or not p1:
987 if not nodeid or not p1:
987 raise error.Abort(_('not a Mercurial patch'))
988 raise error.Abort(_('not a Mercurial patch'))
988 p1 = repo[p1]
989 p1 = repo[p1]
989 p2 = repo[p2 or nullid]
990 p2 = repo[p2 or nullid]
990 elif p2:
991 elif p2:
991 try:
992 try:
992 p1 = repo[p1]
993 p1 = repo[p1]
993 p2 = repo[p2]
994 p2 = repo[p2]
994 # Without any options, consider p2 only if the
995 # Without any options, consider p2 only if the
995 # patch is being applied on top of the recorded
996 # patch is being applied on top of the recorded
996 # first parent.
997 # first parent.
997 if p1 != parents[0]:
998 if p1 != parents[0]:
998 p1 = parents[0]
999 p1 = parents[0]
999 p2 = repo[nullid]
1000 p2 = repo[nullid]
1000 except error.RepoError:
1001 except error.RepoError:
1001 p1, p2 = parents
1002 p1, p2 = parents
1002 if p2.node() == nullid:
1003 if p2.node() == nullid:
1003 ui.warn(_("warning: import the patch as a normal revision\n"
1004 ui.warn(_("warning: import the patch as a normal revision\n"
1004 "(use --exact to import the patch as a merge)\n"))
1005 "(use --exact to import the patch as a merge)\n"))
1005 else:
1006 else:
1006 p1, p2 = parents
1007 p1, p2 = parents
1007
1008
1008 n = None
1009 n = None
1009 if update:
1010 if update:
1010 if p1 != parents[0]:
1011 if p1 != parents[0]:
1011 updatefunc(repo, p1.node())
1012 updatefunc(repo, p1.node())
1012 if p2 != parents[1]:
1013 if p2 != parents[1]:
1013 repo.setparents(p1.node(), p2.node())
1014 repo.setparents(p1.node(), p2.node())
1014
1015
1015 if opts.get('exact') or importbranch:
1016 if opts.get('exact') or importbranch:
1016 repo.dirstate.setbranch(branch or 'default')
1017 repo.dirstate.setbranch(branch or 'default')
1017
1018
1018 partial = opts.get('partial', False)
1019 partial = opts.get('partial', False)
1019 files = set()
1020 files = set()
1020 try:
1021 try:
1021 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1022 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1022 files=files, eolmode=None, similarity=sim / 100.0)
1023 files=files, eolmode=None, similarity=sim / 100.0)
1023 except patch.PatchError as e:
1024 except patch.PatchError as e:
1024 if not partial:
1025 if not partial:
1025 raise error.Abort(str(e))
1026 raise error.Abort(str(e))
1026 if partial:
1027 if partial:
1027 rejects = True
1028 rejects = True
1028
1029
1029 files = list(files)
1030 files = list(files)
1030 if nocommit:
1031 if nocommit:
1031 if message:
1032 if message:
1032 msgs.append(message)
1033 msgs.append(message)
1033 else:
1034 else:
1034 if opts.get('exact') or p2:
1035 if opts.get('exact') or p2:
1035 # If you got here, you either use --force and know what
1036 # If you got here, you either use --force and know what
1036 # you are doing or used --exact or a merge patch while
1037 # you are doing or used --exact or a merge patch while
1037 # being updated to its first parent.
1038 # being updated to its first parent.
1038 m = None
1039 m = None
1039 else:
1040 else:
1040 m = scmutil.matchfiles(repo, files or [])
1041 m = scmutil.matchfiles(repo, files or [])
1041 editform = mergeeditform(repo[None], 'import.normal')
1042 editform = mergeeditform(repo[None], 'import.normal')
1042 if opts.get('exact'):
1043 if opts.get('exact'):
1043 editor = None
1044 editor = None
1044 else:
1045 else:
1045 editor = getcommiteditor(editform=editform, **opts)
1046 editor = getcommiteditor(editform=editform, **opts)
1046 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1047 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1047 extra = {}
1048 extra = {}
1048 for idfunc in extrapreimport:
1049 for idfunc in extrapreimport:
1049 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1050 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1050 try:
1051 try:
1051 if partial:
1052 if partial:
1052 repo.ui.setconfig('ui', 'allowemptycommit', True)
1053 repo.ui.setconfig('ui', 'allowemptycommit', True)
1053 n = repo.commit(message, user,
1054 n = repo.commit(message, user,
1054 date, match=m,
1055 date, match=m,
1055 editor=editor, extra=extra)
1056 editor=editor, extra=extra)
1056 for idfunc in extrapostimport:
1057 for idfunc in extrapostimport:
1057 extrapostimportmap[idfunc](repo[n])
1058 extrapostimportmap[idfunc](repo[n])
1058 finally:
1059 finally:
1059 repo.ui.restoreconfig(allowemptyback)
1060 repo.ui.restoreconfig(allowemptyback)
1060 else:
1061 else:
1061 if opts.get('exact') or importbranch:
1062 if opts.get('exact') or importbranch:
1062 branch = branch or 'default'
1063 branch = branch or 'default'
1063 else:
1064 else:
1064 branch = p1.branch()
1065 branch = p1.branch()
1065 store = patch.filestore()
1066 store = patch.filestore()
1066 try:
1067 try:
1067 files = set()
1068 files = set()
1068 try:
1069 try:
1069 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1070 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1070 files, eolmode=None)
1071 files, eolmode=None)
1071 except patch.PatchError as e:
1072 except patch.PatchError as e:
1072 raise error.Abort(str(e))
1073 raise error.Abort(str(e))
1073 if opts.get('exact'):
1074 if opts.get('exact'):
1074 editor = None
1075 editor = None
1075 else:
1076 else:
1076 editor = getcommiteditor(editform='import.bypass')
1077 editor = getcommiteditor(editform='import.bypass')
1077 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1078 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1078 message,
1079 message,
1079 user,
1080 user,
1080 date,
1081 date,
1081 branch, files, store,
1082 branch, files, store,
1082 editor=editor)
1083 editor=editor)
1083 n = memctx.commit()
1084 n = memctx.commit()
1084 finally:
1085 finally:
1085 store.close()
1086 store.close()
1086 if opts.get('exact') and nocommit:
1087 if opts.get('exact') and nocommit:
1087 # --exact with --no-commit is still useful in that it does merge
1088 # --exact with --no-commit is still useful in that it does merge
1088 # and branch bits
1089 # and branch bits
1089 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1090 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1090 elif opts.get('exact') and hex(n) != nodeid:
1091 elif opts.get('exact') and hex(n) != nodeid:
1091 raise error.Abort(_('patch is damaged or loses information'))
1092 raise error.Abort(_('patch is damaged or loses information'))
1092 msg = _('applied to working directory')
1093 msg = _('applied to working directory')
1093 if n:
1094 if n:
1094 # i18n: refers to a short changeset id
1095 # i18n: refers to a short changeset id
1095 msg = _('created %s') % short(n)
1096 msg = _('created %s') % short(n)
1096 return (msg, n, rejects)
1097 return (msg, n, rejects)
1097 finally:
1098 finally:
1098 os.unlink(tmpname)
1099 os.unlink(tmpname)
1099
1100
1100 # facility to let extensions include additional data in an exported patch
1101 # facility to let extensions include additional data in an exported patch
1101 # list of identifiers to be executed in order
1102 # list of identifiers to be executed in order
1102 extraexport = []
1103 extraexport = []
1103 # mapping from identifier to actual export function
1104 # mapping from identifier to actual export function
1104 # function as to return a string to be added to the header or None
1105 # function as to return a string to be added to the header or None
1105 # it is given two arguments (sequencenumber, changectx)
1106 # it is given two arguments (sequencenumber, changectx)
1106 extraexportmap = {}
1107 extraexportmap = {}
1107
1108
1108 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1109 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1109 opts=None, match=None):
1110 opts=None, match=None):
1110 '''export changesets as hg patches.'''
1111 '''export changesets as hg patches.'''
1111
1112
1112 total = len(revs)
1113 total = len(revs)
1113 revwidth = max([len(str(rev)) for rev in revs])
1114 revwidth = max([len(str(rev)) for rev in revs])
1114 filemode = {}
1115 filemode = {}
1115
1116
1116 def single(rev, seqno, fp):
1117 def single(rev, seqno, fp):
1117 ctx = repo[rev]
1118 ctx = repo[rev]
1118 node = ctx.node()
1119 node = ctx.node()
1119 parents = [p.node() for p in ctx.parents() if p]
1120 parents = [p.node() for p in ctx.parents() if p]
1120 branch = ctx.branch()
1121 branch = ctx.branch()
1121 if switch_parent:
1122 if switch_parent:
1122 parents.reverse()
1123 parents.reverse()
1123
1124
1124 if parents:
1125 if parents:
1125 prev = parents[0]
1126 prev = parents[0]
1126 else:
1127 else:
1127 prev = nullid
1128 prev = nullid
1128
1129
1129 shouldclose = False
1130 shouldclose = False
1130 if not fp and len(template) > 0:
1131 if not fp and len(template) > 0:
1131 desc_lines = ctx.description().rstrip().split('\n')
1132 desc_lines = ctx.description().rstrip().split('\n')
1132 desc = desc_lines[0] #Commit always has a first line.
1133 desc = desc_lines[0] #Commit always has a first line.
1133 fp = makefileobj(repo, template, node, desc=desc, total=total,
1134 fp = makefileobj(repo, template, node, desc=desc, total=total,
1134 seqno=seqno, revwidth=revwidth, mode='wb',
1135 seqno=seqno, revwidth=revwidth, mode='wb',
1135 modemap=filemode)
1136 modemap=filemode)
1136 shouldclose = True
1137 shouldclose = True
1137 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1138 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1138 repo.ui.note("%s\n" % fp.name)
1139 repo.ui.note("%s\n" % fp.name)
1139
1140
1140 if not fp:
1141 if not fp:
1141 write = repo.ui.write
1142 write = repo.ui.write
1142 else:
1143 else:
1143 def write(s, **kw):
1144 def write(s, **kw):
1144 fp.write(s)
1145 fp.write(s)
1145
1146
1146 write("# HG changeset patch\n")
1147 write("# HG changeset patch\n")
1147 write("# User %s\n" % ctx.user())
1148 write("# User %s\n" % ctx.user())
1148 write("# Date %d %d\n" % ctx.date())
1149 write("# Date %d %d\n" % ctx.date())
1149 write("# %s\n" % util.datestr(ctx.date()))
1150 write("# %s\n" % util.datestr(ctx.date()))
1150 if branch and branch != 'default':
1151 if branch and branch != 'default':
1151 write("# Branch %s\n" % branch)
1152 write("# Branch %s\n" % branch)
1152 write("# Node ID %s\n" % hex(node))
1153 write("# Node ID %s\n" % hex(node))
1153 write("# Parent %s\n" % hex(prev))
1154 write("# Parent %s\n" % hex(prev))
1154 if len(parents) > 1:
1155 if len(parents) > 1:
1155 write("# Parent %s\n" % hex(parents[1]))
1156 write("# Parent %s\n" % hex(parents[1]))
1156
1157
1157 for headerid in extraexport:
1158 for headerid in extraexport:
1158 header = extraexportmap[headerid](seqno, ctx)
1159 header = extraexportmap[headerid](seqno, ctx)
1159 if header is not None:
1160 if header is not None:
1160 write('# %s\n' % header)
1161 write('# %s\n' % header)
1161 write(ctx.description().rstrip())
1162 write(ctx.description().rstrip())
1162 write("\n\n")
1163 write("\n\n")
1163
1164
1164 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1165 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1165 write(chunk, label=label)
1166 write(chunk, label=label)
1166
1167
1167 if shouldclose:
1168 if shouldclose:
1168 fp.close()
1169 fp.close()
1169
1170
1170 for seqno, rev in enumerate(revs):
1171 for seqno, rev in enumerate(revs):
1171 single(rev, seqno + 1, fp)
1172 single(rev, seqno + 1, fp)
1172
1173
1173 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1174 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1174 changes=None, stat=False, fp=None, prefix='',
1175 changes=None, stat=False, fp=None, prefix='',
1175 root='', listsubrepos=False):
1176 root='', listsubrepos=False):
1176 '''show diff or diffstat.'''
1177 '''show diff or diffstat.'''
1177 if fp is None:
1178 if fp is None:
1178 write = ui.write
1179 write = ui.write
1179 else:
1180 else:
1180 def write(s, **kw):
1181 def write(s, **kw):
1181 fp.write(s)
1182 fp.write(s)
1182
1183
1183 if root:
1184 if root:
1184 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1185 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1185 else:
1186 else:
1186 relroot = ''
1187 relroot = ''
1187 if relroot != '':
1188 if relroot != '':
1188 # XXX relative roots currently don't work if the root is within a
1189 # XXX relative roots currently don't work if the root is within a
1189 # subrepo
1190 # subrepo
1190 uirelroot = match.uipath(relroot)
1191 uirelroot = match.uipath(relroot)
1191 relroot += '/'
1192 relroot += '/'
1192 for matchroot in match.files():
1193 for matchroot in match.files():
1193 if not matchroot.startswith(relroot):
1194 if not matchroot.startswith(relroot):
1194 ui.warn(_('warning: %s not inside relative root %s\n') % (
1195 ui.warn(_('warning: %s not inside relative root %s\n') % (
1195 match.uipath(matchroot), uirelroot))
1196 match.uipath(matchroot), uirelroot))
1196
1197
1197 if stat:
1198 if stat:
1198 diffopts = diffopts.copy(context=0)
1199 diffopts = diffopts.copy(context=0)
1199 width = 80
1200 width = 80
1200 if not ui.plain():
1201 if not ui.plain():
1201 width = ui.termwidth()
1202 width = ui.termwidth()
1202 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1203 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1203 prefix=prefix, relroot=relroot)
1204 prefix=prefix, relroot=relroot)
1204 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1205 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1205 width=width):
1206 width=width):
1206 write(chunk, label=label)
1207 write(chunk, label=label)
1207 else:
1208 else:
1208 for chunk, label in patch.diffui(repo, node1, node2, match,
1209 for chunk, label in patch.diffui(repo, node1, node2, match,
1209 changes, diffopts, prefix=prefix,
1210 changes, diffopts, prefix=prefix,
1210 relroot=relroot):
1211 relroot=relroot):
1211 write(chunk, label=label)
1212 write(chunk, label=label)
1212
1213
1213 if listsubrepos:
1214 if listsubrepos:
1214 ctx1 = repo[node1]
1215 ctx1 = repo[node1]
1215 ctx2 = repo[node2]
1216 ctx2 = repo[node2]
1216 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1217 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1217 tempnode2 = node2
1218 tempnode2 = node2
1218 try:
1219 try:
1219 if node2 is not None:
1220 if node2 is not None:
1220 tempnode2 = ctx2.substate[subpath][1]
1221 tempnode2 = ctx2.substate[subpath][1]
1221 except KeyError:
1222 except KeyError:
1222 # A subrepo that existed in node1 was deleted between node1 and
1223 # A subrepo that existed in node1 was deleted between node1 and
1223 # node2 (inclusive). Thus, ctx2's substate won't contain that
1224 # node2 (inclusive). Thus, ctx2's substate won't contain that
1224 # subpath. The best we can do is to ignore it.
1225 # subpath. The best we can do is to ignore it.
1225 tempnode2 = None
1226 tempnode2 = None
1226 submatch = matchmod.subdirmatcher(subpath, match)
1227 submatch = matchmod.subdirmatcher(subpath, match)
1227 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1228 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1228 stat=stat, fp=fp, prefix=prefix)
1229 stat=stat, fp=fp, prefix=prefix)
1229
1230
1230 class changeset_printer(object):
1231 class changeset_printer(object):
1231 '''show changeset information when templating not requested.'''
1232 '''show changeset information when templating not requested.'''
1232
1233
1233 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1234 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1234 self.ui = ui
1235 self.ui = ui
1235 self.repo = repo
1236 self.repo = repo
1236 self.buffered = buffered
1237 self.buffered = buffered
1237 self.matchfn = matchfn
1238 self.matchfn = matchfn
1238 self.diffopts = diffopts
1239 self.diffopts = diffopts
1239 self.header = {}
1240 self.header = {}
1240 self.hunk = {}
1241 self.hunk = {}
1241 self.lastheader = None
1242 self.lastheader = None
1242 self.footer = None
1243 self.footer = None
1243
1244
1244 def flush(self, ctx):
1245 def flush(self, ctx):
1245 rev = ctx.rev()
1246 rev = ctx.rev()
1246 if rev in self.header:
1247 if rev in self.header:
1247 h = self.header[rev]
1248 h = self.header[rev]
1248 if h != self.lastheader:
1249 if h != self.lastheader:
1249 self.lastheader = h
1250 self.lastheader = h
1250 self.ui.write(h)
1251 self.ui.write(h)
1251 del self.header[rev]
1252 del self.header[rev]
1252 if rev in self.hunk:
1253 if rev in self.hunk:
1253 self.ui.write(self.hunk[rev])
1254 self.ui.write(self.hunk[rev])
1254 del self.hunk[rev]
1255 del self.hunk[rev]
1255 return 1
1256 return 1
1256 return 0
1257 return 0
1257
1258
1258 def close(self):
1259 def close(self):
1259 if self.footer:
1260 if self.footer:
1260 self.ui.write(self.footer)
1261 self.ui.write(self.footer)
1261
1262
1262 def show(self, ctx, copies=None, matchfn=None, **props):
1263 def show(self, ctx, copies=None, matchfn=None, **props):
1263 if self.buffered:
1264 if self.buffered:
1264 self.ui.pushbuffer(labeled=True)
1265 self.ui.pushbuffer(labeled=True)
1265 self._show(ctx, copies, matchfn, props)
1266 self._show(ctx, copies, matchfn, props)
1266 self.hunk[ctx.rev()] = self.ui.popbuffer()
1267 self.hunk[ctx.rev()] = self.ui.popbuffer()
1267 else:
1268 else:
1268 self._show(ctx, copies, matchfn, props)
1269 self._show(ctx, copies, matchfn, props)
1269
1270
1270 def _show(self, ctx, copies, matchfn, props):
1271 def _show(self, ctx, copies, matchfn, props):
1271 '''show a single changeset or file revision'''
1272 '''show a single changeset or file revision'''
1272 changenode = ctx.node()
1273 changenode = ctx.node()
1273 rev = ctx.rev()
1274 rev = ctx.rev()
1274 if self.ui.debugflag:
1275 if self.ui.debugflag:
1275 hexfunc = hex
1276 hexfunc = hex
1276 else:
1277 else:
1277 hexfunc = short
1278 hexfunc = short
1278 # as of now, wctx.node() and wctx.rev() return None, but we want to
1279 # as of now, wctx.node() and wctx.rev() return None, but we want to
1279 # show the same values as {node} and {rev} templatekw
1280 # show the same values as {node} and {rev} templatekw
1280 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1281 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1281
1282
1282 if self.ui.quiet:
1283 if self.ui.quiet:
1283 self.ui.write("%d:%s\n" % revnode, label='log.node')
1284 self.ui.write("%d:%s\n" % revnode, label='log.node')
1284 return
1285 return
1285
1286
1286 date = util.datestr(ctx.date())
1287 date = util.datestr(ctx.date())
1287
1288
1288 # i18n: column positioning for "hg log"
1289 # i18n: column positioning for "hg log"
1289 self.ui.write(_("changeset: %d:%s\n") % revnode,
1290 self.ui.write(_("changeset: %d:%s\n") % revnode,
1290 label='log.changeset changeset.%s' % ctx.phasestr())
1291 label='log.changeset changeset.%s' % ctx.phasestr())
1291
1292
1292 # branches are shown first before any other names due to backwards
1293 # branches are shown first before any other names due to backwards
1293 # compatibility
1294 # compatibility
1294 branch = ctx.branch()
1295 branch = ctx.branch()
1295 # don't show the default branch name
1296 # don't show the default branch name
1296 if branch != 'default':
1297 if branch != 'default':
1297 # i18n: column positioning for "hg log"
1298 # i18n: column positioning for "hg log"
1298 self.ui.write(_("branch: %s\n") % branch,
1299 self.ui.write(_("branch: %s\n") % branch,
1299 label='log.branch')
1300 label='log.branch')
1300
1301
1301 for nsname, ns in self.repo.names.iteritems():
1302 for nsname, ns in self.repo.names.iteritems():
1302 # branches has special logic already handled above, so here we just
1303 # branches has special logic already handled above, so here we just
1303 # skip it
1304 # skip it
1304 if nsname == 'branches':
1305 if nsname == 'branches':
1305 continue
1306 continue
1306 # we will use the templatename as the color name since those two
1307 # we will use the templatename as the color name since those two
1307 # should be the same
1308 # should be the same
1308 for name in ns.names(self.repo, changenode):
1309 for name in ns.names(self.repo, changenode):
1309 self.ui.write(ns.logfmt % name,
1310 self.ui.write(ns.logfmt % name,
1310 label='log.%s' % ns.colorname)
1311 label='log.%s' % ns.colorname)
1311 if self.ui.debugflag:
1312 if self.ui.debugflag:
1312 # i18n: column positioning for "hg log"
1313 # i18n: column positioning for "hg log"
1313 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1314 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1314 label='log.phase')
1315 label='log.phase')
1315 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1316 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1316 label = 'log.parent changeset.%s' % pctx.phasestr()
1317 label = 'log.parent changeset.%s' % pctx.phasestr()
1317 # i18n: column positioning for "hg log"
1318 # i18n: column positioning for "hg log"
1318 self.ui.write(_("parent: %d:%s\n")
1319 self.ui.write(_("parent: %d:%s\n")
1319 % (pctx.rev(), hexfunc(pctx.node())),
1320 % (pctx.rev(), hexfunc(pctx.node())),
1320 label=label)
1321 label=label)
1321
1322
1322 if self.ui.debugflag and rev is not None:
1323 if self.ui.debugflag and rev is not None:
1323 mnode = ctx.manifestnode()
1324 mnode = ctx.manifestnode()
1324 # i18n: column positioning for "hg log"
1325 # i18n: column positioning for "hg log"
1325 self.ui.write(_("manifest: %d:%s\n") %
1326 self.ui.write(_("manifest: %d:%s\n") %
1326 (self.repo.manifestlog._revlog.rev(mnode),
1327 (self.repo.manifestlog._revlog.rev(mnode),
1327 hex(mnode)),
1328 hex(mnode)),
1328 label='ui.debug log.manifest')
1329 label='ui.debug log.manifest')
1329 # i18n: column positioning for "hg log"
1330 # i18n: column positioning for "hg log"
1330 self.ui.write(_("user: %s\n") % ctx.user(),
1331 self.ui.write(_("user: %s\n") % ctx.user(),
1331 label='log.user')
1332 label='log.user')
1332 # i18n: column positioning for "hg log"
1333 # i18n: column positioning for "hg log"
1333 self.ui.write(_("date: %s\n") % date,
1334 self.ui.write(_("date: %s\n") % date,
1334 label='log.date')
1335 label='log.date')
1335
1336
1336 if self.ui.debugflag:
1337 if self.ui.debugflag:
1337 files = ctx.p1().status(ctx)[:3]
1338 files = ctx.p1().status(ctx)[:3]
1338 for key, value in zip([# i18n: column positioning for "hg log"
1339 for key, value in zip([# i18n: column positioning for "hg log"
1339 _("files:"),
1340 _("files:"),
1340 # i18n: column positioning for "hg log"
1341 # i18n: column positioning for "hg log"
1341 _("files+:"),
1342 _("files+:"),
1342 # i18n: column positioning for "hg log"
1343 # i18n: column positioning for "hg log"
1343 _("files-:")], files):
1344 _("files-:")], files):
1344 if value:
1345 if value:
1345 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1346 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1346 label='ui.debug log.files')
1347 label='ui.debug log.files')
1347 elif ctx.files() and self.ui.verbose:
1348 elif ctx.files() and self.ui.verbose:
1348 # i18n: column positioning for "hg log"
1349 # i18n: column positioning for "hg log"
1349 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1350 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1350 label='ui.note log.files')
1351 label='ui.note log.files')
1351 if copies and self.ui.verbose:
1352 if copies and self.ui.verbose:
1352 copies = ['%s (%s)' % c for c in copies]
1353 copies = ['%s (%s)' % c for c in copies]
1353 # i18n: column positioning for "hg log"
1354 # i18n: column positioning for "hg log"
1354 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1355 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1355 label='ui.note log.copies')
1356 label='ui.note log.copies')
1356
1357
1357 extra = ctx.extra()
1358 extra = ctx.extra()
1358 if extra and self.ui.debugflag:
1359 if extra and self.ui.debugflag:
1359 for key, value in sorted(extra.items()):
1360 for key, value in sorted(extra.items()):
1360 # i18n: column positioning for "hg log"
1361 # i18n: column positioning for "hg log"
1361 self.ui.write(_("extra: %s=%s\n")
1362 self.ui.write(_("extra: %s=%s\n")
1362 % (key, value.encode('string_escape')),
1363 % (key, value.encode('string_escape')),
1363 label='ui.debug log.extra')
1364 label='ui.debug log.extra')
1364
1365
1365 description = ctx.description().strip()
1366 description = ctx.description().strip()
1366 if description:
1367 if description:
1367 if self.ui.verbose:
1368 if self.ui.verbose:
1368 self.ui.write(_("description:\n"),
1369 self.ui.write(_("description:\n"),
1369 label='ui.note log.description')
1370 label='ui.note log.description')
1370 self.ui.write(description,
1371 self.ui.write(description,
1371 label='ui.note log.description')
1372 label='ui.note log.description')
1372 self.ui.write("\n\n")
1373 self.ui.write("\n\n")
1373 else:
1374 else:
1374 # i18n: column positioning for "hg log"
1375 # i18n: column positioning for "hg log"
1375 self.ui.write(_("summary: %s\n") %
1376 self.ui.write(_("summary: %s\n") %
1376 description.splitlines()[0],
1377 description.splitlines()[0],
1377 label='log.summary')
1378 label='log.summary')
1378 self.ui.write("\n")
1379 self.ui.write("\n")
1379
1380
1380 self.showpatch(ctx, matchfn)
1381 self.showpatch(ctx, matchfn)
1381
1382
1382 def showpatch(self, ctx, matchfn):
1383 def showpatch(self, ctx, matchfn):
1383 if not matchfn:
1384 if not matchfn:
1384 matchfn = self.matchfn
1385 matchfn = self.matchfn
1385 if matchfn:
1386 if matchfn:
1386 stat = self.diffopts.get('stat')
1387 stat = self.diffopts.get('stat')
1387 diff = self.diffopts.get('patch')
1388 diff = self.diffopts.get('patch')
1388 diffopts = patch.diffallopts(self.ui, self.diffopts)
1389 diffopts = patch.diffallopts(self.ui, self.diffopts)
1389 node = ctx.node()
1390 node = ctx.node()
1390 prev = ctx.p1().node()
1391 prev = ctx.p1().node()
1391 if stat:
1392 if stat:
1392 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1393 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1393 match=matchfn, stat=True)
1394 match=matchfn, stat=True)
1394 if diff:
1395 if diff:
1395 if stat:
1396 if stat:
1396 self.ui.write("\n")
1397 self.ui.write("\n")
1397 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1398 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1398 match=matchfn, stat=False)
1399 match=matchfn, stat=False)
1399 self.ui.write("\n")
1400 self.ui.write("\n")
1400
1401
1401 class jsonchangeset(changeset_printer):
1402 class jsonchangeset(changeset_printer):
1402 '''format changeset information.'''
1403 '''format changeset information.'''
1403
1404
1404 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1405 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1405 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1406 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1406 self.cache = {}
1407 self.cache = {}
1407 self._first = True
1408 self._first = True
1408
1409
1409 def close(self):
1410 def close(self):
1410 if not self._first:
1411 if not self._first:
1411 self.ui.write("\n]\n")
1412 self.ui.write("\n]\n")
1412 else:
1413 else:
1413 self.ui.write("[]\n")
1414 self.ui.write("[]\n")
1414
1415
1415 def _show(self, ctx, copies, matchfn, props):
1416 def _show(self, ctx, copies, matchfn, props):
1416 '''show a single changeset or file revision'''
1417 '''show a single changeset or file revision'''
1417 rev = ctx.rev()
1418 rev = ctx.rev()
1418 if rev is None:
1419 if rev is None:
1419 jrev = jnode = 'null'
1420 jrev = jnode = 'null'
1420 else:
1421 else:
1421 jrev = str(rev)
1422 jrev = str(rev)
1422 jnode = '"%s"' % hex(ctx.node())
1423 jnode = '"%s"' % hex(ctx.node())
1423 j = encoding.jsonescape
1424 j = encoding.jsonescape
1424
1425
1425 if self._first:
1426 if self._first:
1426 self.ui.write("[\n {")
1427 self.ui.write("[\n {")
1427 self._first = False
1428 self._first = False
1428 else:
1429 else:
1429 self.ui.write(",\n {")
1430 self.ui.write(",\n {")
1430
1431
1431 if self.ui.quiet:
1432 if self.ui.quiet:
1432 self.ui.write(('\n "rev": %s') % jrev)
1433 self.ui.write(('\n "rev": %s') % jrev)
1433 self.ui.write((',\n "node": %s') % jnode)
1434 self.ui.write((',\n "node": %s') % jnode)
1434 self.ui.write('\n }')
1435 self.ui.write('\n }')
1435 return
1436 return
1436
1437
1437 self.ui.write(('\n "rev": %s') % jrev)
1438 self.ui.write(('\n "rev": %s') % jrev)
1438 self.ui.write((',\n "node": %s') % jnode)
1439 self.ui.write((',\n "node": %s') % jnode)
1439 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1440 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1440 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1441 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1441 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1442 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1442 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1443 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1443 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1444 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1444
1445
1445 self.ui.write((',\n "bookmarks": [%s]') %
1446 self.ui.write((',\n "bookmarks": [%s]') %
1446 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1447 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1447 self.ui.write((',\n "tags": [%s]') %
1448 self.ui.write((',\n "tags": [%s]') %
1448 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1449 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1449 self.ui.write((',\n "parents": [%s]') %
1450 self.ui.write((',\n "parents": [%s]') %
1450 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1451 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1451
1452
1452 if self.ui.debugflag:
1453 if self.ui.debugflag:
1453 if rev is None:
1454 if rev is None:
1454 jmanifestnode = 'null'
1455 jmanifestnode = 'null'
1455 else:
1456 else:
1456 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1457 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1457 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1458 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1458
1459
1459 self.ui.write((',\n "extra": {%s}') %
1460 self.ui.write((',\n "extra": {%s}') %
1460 ", ".join('"%s": "%s"' % (j(k), j(v))
1461 ", ".join('"%s": "%s"' % (j(k), j(v))
1461 for k, v in ctx.extra().items()))
1462 for k, v in ctx.extra().items()))
1462
1463
1463 files = ctx.p1().status(ctx)
1464 files = ctx.p1().status(ctx)
1464 self.ui.write((',\n "modified": [%s]') %
1465 self.ui.write((',\n "modified": [%s]') %
1465 ", ".join('"%s"' % j(f) for f in files[0]))
1466 ", ".join('"%s"' % j(f) for f in files[0]))
1466 self.ui.write((',\n "added": [%s]') %
1467 self.ui.write((',\n "added": [%s]') %
1467 ", ".join('"%s"' % j(f) for f in files[1]))
1468 ", ".join('"%s"' % j(f) for f in files[1]))
1468 self.ui.write((',\n "removed": [%s]') %
1469 self.ui.write((',\n "removed": [%s]') %
1469 ", ".join('"%s"' % j(f) for f in files[2]))
1470 ", ".join('"%s"' % j(f) for f in files[2]))
1470
1471
1471 elif self.ui.verbose:
1472 elif self.ui.verbose:
1472 self.ui.write((',\n "files": [%s]') %
1473 self.ui.write((',\n "files": [%s]') %
1473 ", ".join('"%s"' % j(f) for f in ctx.files()))
1474 ", ".join('"%s"' % j(f) for f in ctx.files()))
1474
1475
1475 if copies:
1476 if copies:
1476 self.ui.write((',\n "copies": {%s}') %
1477 self.ui.write((',\n "copies": {%s}') %
1477 ", ".join('"%s": "%s"' % (j(k), j(v))
1478 ", ".join('"%s": "%s"' % (j(k), j(v))
1478 for k, v in copies))
1479 for k, v in copies))
1479
1480
1480 matchfn = self.matchfn
1481 matchfn = self.matchfn
1481 if matchfn:
1482 if matchfn:
1482 stat = self.diffopts.get('stat')
1483 stat = self.diffopts.get('stat')
1483 diff = self.diffopts.get('patch')
1484 diff = self.diffopts.get('patch')
1484 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1485 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1485 node, prev = ctx.node(), ctx.p1().node()
1486 node, prev = ctx.node(), ctx.p1().node()
1486 if stat:
1487 if stat:
1487 self.ui.pushbuffer()
1488 self.ui.pushbuffer()
1488 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1489 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1489 match=matchfn, stat=True)
1490 match=matchfn, stat=True)
1490 self.ui.write((',\n "diffstat": "%s"')
1491 self.ui.write((',\n "diffstat": "%s"')
1491 % j(self.ui.popbuffer()))
1492 % j(self.ui.popbuffer()))
1492 if diff:
1493 if diff:
1493 self.ui.pushbuffer()
1494 self.ui.pushbuffer()
1494 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1495 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1495 match=matchfn, stat=False)
1496 match=matchfn, stat=False)
1496 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1497 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1497
1498
1498 self.ui.write("\n }")
1499 self.ui.write("\n }")
1499
1500
1500 class changeset_templater(changeset_printer):
1501 class changeset_templater(changeset_printer):
1501 '''format changeset information.'''
1502 '''format changeset information.'''
1502
1503
1503 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1504 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1504 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1505 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1505 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1506 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1506 filters = {'formatnode': formatnode}
1507 filters = {'formatnode': formatnode}
1507 defaulttempl = {
1508 defaulttempl = {
1508 'parent': '{rev}:{node|formatnode} ',
1509 'parent': '{rev}:{node|formatnode} ',
1509 'manifest': '{rev}:{node|formatnode}',
1510 'manifest': '{rev}:{node|formatnode}',
1510 'file_copy': '{name} ({source})',
1511 'file_copy': '{name} ({source})',
1511 'extra': '{key}={value|stringescape}'
1512 'extra': '{key}={value|stringescape}'
1512 }
1513 }
1513 # filecopy is preserved for compatibility reasons
1514 # filecopy is preserved for compatibility reasons
1514 defaulttempl['filecopy'] = defaulttempl['file_copy']
1515 defaulttempl['filecopy'] = defaulttempl['file_copy']
1515 assert not (tmpl and mapfile)
1516 assert not (tmpl and mapfile)
1516 if mapfile:
1517 if mapfile:
1517 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1518 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1518 cache=defaulttempl)
1519 cache=defaulttempl)
1519 else:
1520 else:
1520 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1521 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1521 filters=filters,
1522 filters=filters,
1522 cache=defaulttempl)
1523 cache=defaulttempl)
1523
1524
1524 self.cache = {}
1525 self.cache = {}
1525
1526
1526 # find correct templates for current mode
1527 # find correct templates for current mode
1527 tmplmodes = [
1528 tmplmodes = [
1528 (True, None),
1529 (True, None),
1529 (self.ui.verbose, 'verbose'),
1530 (self.ui.verbose, 'verbose'),
1530 (self.ui.quiet, 'quiet'),
1531 (self.ui.quiet, 'quiet'),
1531 (self.ui.debugflag, 'debug'),
1532 (self.ui.debugflag, 'debug'),
1532 ]
1533 ]
1533
1534
1534 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1535 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1535 'docheader': '', 'docfooter': ''}
1536 'docheader': '', 'docfooter': ''}
1536 for mode, postfix in tmplmodes:
1537 for mode, postfix in tmplmodes:
1537 for t in self._parts:
1538 for t in self._parts:
1538 cur = t
1539 cur = t
1539 if postfix:
1540 if postfix:
1540 cur += "_" + postfix
1541 cur += "_" + postfix
1541 if mode and cur in self.t:
1542 if mode and cur in self.t:
1542 self._parts[t] = cur
1543 self._parts[t] = cur
1543
1544
1544 if self._parts['docheader']:
1545 if self._parts['docheader']:
1545 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1546 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1546
1547
1547 def close(self):
1548 def close(self):
1548 if self._parts['docfooter']:
1549 if self._parts['docfooter']:
1549 if not self.footer:
1550 if not self.footer:
1550 self.footer = ""
1551 self.footer = ""
1551 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1552 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1552 return super(changeset_templater, self).close()
1553 return super(changeset_templater, self).close()
1553
1554
1554 def _show(self, ctx, copies, matchfn, props):
1555 def _show(self, ctx, copies, matchfn, props):
1555 '''show a single changeset or file revision'''
1556 '''show a single changeset or file revision'''
1556 props = props.copy()
1557 props = props.copy()
1557 props.update(templatekw.keywords)
1558 props.update(templatekw.keywords)
1558 props['templ'] = self.t
1559 props['templ'] = self.t
1559 props['ctx'] = ctx
1560 props['ctx'] = ctx
1560 props['repo'] = self.repo
1561 props['repo'] = self.repo
1561 props['ui'] = self.repo.ui
1562 props['ui'] = self.repo.ui
1562 props['revcache'] = {'copies': copies}
1563 props['revcache'] = {'copies': copies}
1563 props['cache'] = self.cache
1564 props['cache'] = self.cache
1564
1565
1565 # write header
1566 # write header
1566 if self._parts['header']:
1567 if self._parts['header']:
1567 h = templater.stringify(self.t(self._parts['header'], **props))
1568 h = templater.stringify(self.t(self._parts['header'], **props))
1568 if self.buffered:
1569 if self.buffered:
1569 self.header[ctx.rev()] = h
1570 self.header[ctx.rev()] = h
1570 else:
1571 else:
1571 if self.lastheader != h:
1572 if self.lastheader != h:
1572 self.lastheader = h
1573 self.lastheader = h
1573 self.ui.write(h)
1574 self.ui.write(h)
1574
1575
1575 # write changeset metadata, then patch if requested
1576 # write changeset metadata, then patch if requested
1576 key = self._parts['changeset']
1577 key = self._parts['changeset']
1577 self.ui.write(templater.stringify(self.t(key, **props)))
1578 self.ui.write(templater.stringify(self.t(key, **props)))
1578 self.showpatch(ctx, matchfn)
1579 self.showpatch(ctx, matchfn)
1579
1580
1580 if self._parts['footer']:
1581 if self._parts['footer']:
1581 if not self.footer:
1582 if not self.footer:
1582 self.footer = templater.stringify(
1583 self.footer = templater.stringify(
1583 self.t(self._parts['footer'], **props))
1584 self.t(self._parts['footer'], **props))
1584
1585
1585 def gettemplate(ui, tmpl, style):
1586 def gettemplate(ui, tmpl, style):
1586 """
1587 """
1587 Find the template matching the given template spec or style.
1588 Find the template matching the given template spec or style.
1588 """
1589 """
1589
1590
1590 # ui settings
1591 # ui settings
1591 if not tmpl and not style: # template are stronger than style
1592 if not tmpl and not style: # template are stronger than style
1592 tmpl = ui.config('ui', 'logtemplate')
1593 tmpl = ui.config('ui', 'logtemplate')
1593 if tmpl:
1594 if tmpl:
1594 return templater.unquotestring(tmpl), None
1595 return templater.unquotestring(tmpl), None
1595 else:
1596 else:
1596 style = util.expandpath(ui.config('ui', 'style', ''))
1597 style = util.expandpath(ui.config('ui', 'style', ''))
1597
1598
1598 if not tmpl and style:
1599 if not tmpl and style:
1599 mapfile = style
1600 mapfile = style
1600 if not os.path.split(mapfile)[0]:
1601 if not os.path.split(mapfile)[0]:
1601 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1602 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1602 or templater.templatepath(mapfile))
1603 or templater.templatepath(mapfile))
1603 if mapname:
1604 if mapname:
1604 mapfile = mapname
1605 mapfile = mapname
1605 return None, mapfile
1606 return None, mapfile
1606
1607
1607 if not tmpl:
1608 if not tmpl:
1608 return None, None
1609 return None, None
1609
1610
1610 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1611 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1611
1612
1612 def show_changeset(ui, repo, opts, buffered=False):
1613 def show_changeset(ui, repo, opts, buffered=False):
1613 """show one changeset using template or regular display.
1614 """show one changeset using template or regular display.
1614
1615
1615 Display format will be the first non-empty hit of:
1616 Display format will be the first non-empty hit of:
1616 1. option 'template'
1617 1. option 'template'
1617 2. option 'style'
1618 2. option 'style'
1618 3. [ui] setting 'logtemplate'
1619 3. [ui] setting 'logtemplate'
1619 4. [ui] setting 'style'
1620 4. [ui] setting 'style'
1620 If all of these values are either the unset or the empty string,
1621 If all of these values are either the unset or the empty string,
1621 regular display via changeset_printer() is done.
1622 regular display via changeset_printer() is done.
1622 """
1623 """
1623 # options
1624 # options
1624 matchfn = None
1625 matchfn = None
1625 if opts.get('patch') or opts.get('stat'):
1626 if opts.get('patch') or opts.get('stat'):
1626 matchfn = scmutil.matchall(repo)
1627 matchfn = scmutil.matchall(repo)
1627
1628
1628 if opts.get('template') == 'json':
1629 if opts.get('template') == 'json':
1629 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1630 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1630
1631
1631 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1632 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1632
1633
1633 if not tmpl and not mapfile:
1634 if not tmpl and not mapfile:
1634 return changeset_printer(ui, repo, matchfn, opts, buffered)
1635 return changeset_printer(ui, repo, matchfn, opts, buffered)
1635
1636
1636 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1637 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1637
1638
1638 def showmarker(fm, marker, index=None):
1639 def showmarker(fm, marker, index=None):
1639 """utility function to display obsolescence marker in a readable way
1640 """utility function to display obsolescence marker in a readable way
1640
1641
1641 To be used by debug function."""
1642 To be used by debug function."""
1642 if index is not None:
1643 if index is not None:
1643 fm.write('index', '%i ', index)
1644 fm.write('index', '%i ', index)
1644 fm.write('precnode', '%s ', hex(marker.precnode()))
1645 fm.write('precnode', '%s ', hex(marker.precnode()))
1645 succs = marker.succnodes()
1646 succs = marker.succnodes()
1646 fm.condwrite(succs, 'succnodes', '%s ',
1647 fm.condwrite(succs, 'succnodes', '%s ',
1647 fm.formatlist(map(hex, succs), name='node'))
1648 fm.formatlist(map(hex, succs), name='node'))
1648 fm.write('flag', '%X ', marker.flags())
1649 fm.write('flag', '%X ', marker.flags())
1649 parents = marker.parentnodes()
1650 parents = marker.parentnodes()
1650 if parents is not None:
1651 if parents is not None:
1651 fm.write('parentnodes', '{%s} ',
1652 fm.write('parentnodes', '{%s} ',
1652 fm.formatlist(map(hex, parents), name='node', sep=', '))
1653 fm.formatlist(map(hex, parents), name='node', sep=', '))
1653 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1654 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1654 meta = marker.metadata().copy()
1655 meta = marker.metadata().copy()
1655 meta.pop('date', None)
1656 meta.pop('date', None)
1656 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1657 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1657 fm.plain('\n')
1658 fm.plain('\n')
1658
1659
1659 def finddate(ui, repo, date):
1660 def finddate(ui, repo, date):
1660 """Find the tipmost changeset that matches the given date spec"""
1661 """Find the tipmost changeset that matches the given date spec"""
1661
1662
1662 df = util.matchdate(date)
1663 df = util.matchdate(date)
1663 m = scmutil.matchall(repo)
1664 m = scmutil.matchall(repo)
1664 results = {}
1665 results = {}
1665
1666
1666 def prep(ctx, fns):
1667 def prep(ctx, fns):
1667 d = ctx.date()
1668 d = ctx.date()
1668 if df(d[0]):
1669 if df(d[0]):
1669 results[ctx.rev()] = d
1670 results[ctx.rev()] = d
1670
1671
1671 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1672 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1672 rev = ctx.rev()
1673 rev = ctx.rev()
1673 if rev in results:
1674 if rev in results:
1674 ui.status(_("found revision %s from %s\n") %
1675 ui.status(_("found revision %s from %s\n") %
1675 (rev, util.datestr(results[rev])))
1676 (rev, util.datestr(results[rev])))
1676 return str(rev)
1677 return str(rev)
1677
1678
1678 raise error.Abort(_("revision matching date not found"))
1679 raise error.Abort(_("revision matching date not found"))
1679
1680
1680 def increasingwindows(windowsize=8, sizelimit=512):
1681 def increasingwindows(windowsize=8, sizelimit=512):
1681 while True:
1682 while True:
1682 yield windowsize
1683 yield windowsize
1683 if windowsize < sizelimit:
1684 if windowsize < sizelimit:
1684 windowsize *= 2
1685 windowsize *= 2
1685
1686
1686 class FileWalkError(Exception):
1687 class FileWalkError(Exception):
1687 pass
1688 pass
1688
1689
1689 def walkfilerevs(repo, match, follow, revs, fncache):
1690 def walkfilerevs(repo, match, follow, revs, fncache):
1690 '''Walks the file history for the matched files.
1691 '''Walks the file history for the matched files.
1691
1692
1692 Returns the changeset revs that are involved in the file history.
1693 Returns the changeset revs that are involved in the file history.
1693
1694
1694 Throws FileWalkError if the file history can't be walked using
1695 Throws FileWalkError if the file history can't be walked using
1695 filelogs alone.
1696 filelogs alone.
1696 '''
1697 '''
1697 wanted = set()
1698 wanted = set()
1698 copies = []
1699 copies = []
1699 minrev, maxrev = min(revs), max(revs)
1700 minrev, maxrev = min(revs), max(revs)
1700 def filerevgen(filelog, last):
1701 def filerevgen(filelog, last):
1701 """
1702 """
1702 Only files, no patterns. Check the history of each file.
1703 Only files, no patterns. Check the history of each file.
1703
1704
1704 Examines filelog entries within minrev, maxrev linkrev range
1705 Examines filelog entries within minrev, maxrev linkrev range
1705 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1706 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1706 tuples in backwards order
1707 tuples in backwards order
1707 """
1708 """
1708 cl_count = len(repo)
1709 cl_count = len(repo)
1709 revs = []
1710 revs = []
1710 for j in xrange(0, last + 1):
1711 for j in xrange(0, last + 1):
1711 linkrev = filelog.linkrev(j)
1712 linkrev = filelog.linkrev(j)
1712 if linkrev < minrev:
1713 if linkrev < minrev:
1713 continue
1714 continue
1714 # only yield rev for which we have the changelog, it can
1715 # only yield rev for which we have the changelog, it can
1715 # happen while doing "hg log" during a pull or commit
1716 # happen while doing "hg log" during a pull or commit
1716 if linkrev >= cl_count:
1717 if linkrev >= cl_count:
1717 break
1718 break
1718
1719
1719 parentlinkrevs = []
1720 parentlinkrevs = []
1720 for p in filelog.parentrevs(j):
1721 for p in filelog.parentrevs(j):
1721 if p != nullrev:
1722 if p != nullrev:
1722 parentlinkrevs.append(filelog.linkrev(p))
1723 parentlinkrevs.append(filelog.linkrev(p))
1723 n = filelog.node(j)
1724 n = filelog.node(j)
1724 revs.append((linkrev, parentlinkrevs,
1725 revs.append((linkrev, parentlinkrevs,
1725 follow and filelog.renamed(n)))
1726 follow and filelog.renamed(n)))
1726
1727
1727 return reversed(revs)
1728 return reversed(revs)
1728 def iterfiles():
1729 def iterfiles():
1729 pctx = repo['.']
1730 pctx = repo['.']
1730 for filename in match.files():
1731 for filename in match.files():
1731 if follow:
1732 if follow:
1732 if filename not in pctx:
1733 if filename not in pctx:
1733 raise error.Abort(_('cannot follow file not in parent '
1734 raise error.Abort(_('cannot follow file not in parent '
1734 'revision: "%s"') % filename)
1735 'revision: "%s"') % filename)
1735 yield filename, pctx[filename].filenode()
1736 yield filename, pctx[filename].filenode()
1736 else:
1737 else:
1737 yield filename, None
1738 yield filename, None
1738 for filename_node in copies:
1739 for filename_node in copies:
1739 yield filename_node
1740 yield filename_node
1740
1741
1741 for file_, node in iterfiles():
1742 for file_, node in iterfiles():
1742 filelog = repo.file(file_)
1743 filelog = repo.file(file_)
1743 if not len(filelog):
1744 if not len(filelog):
1744 if node is None:
1745 if node is None:
1745 # A zero count may be a directory or deleted file, so
1746 # A zero count may be a directory or deleted file, so
1746 # try to find matching entries on the slow path.
1747 # try to find matching entries on the slow path.
1747 if follow:
1748 if follow:
1748 raise error.Abort(
1749 raise error.Abort(
1749 _('cannot follow nonexistent file: "%s"') % file_)
1750 _('cannot follow nonexistent file: "%s"') % file_)
1750 raise FileWalkError("Cannot walk via filelog")
1751 raise FileWalkError("Cannot walk via filelog")
1751 else:
1752 else:
1752 continue
1753 continue
1753
1754
1754 if node is None:
1755 if node is None:
1755 last = len(filelog) - 1
1756 last = len(filelog) - 1
1756 else:
1757 else:
1757 last = filelog.rev(node)
1758 last = filelog.rev(node)
1758
1759
1759 # keep track of all ancestors of the file
1760 # keep track of all ancestors of the file
1760 ancestors = set([filelog.linkrev(last)])
1761 ancestors = set([filelog.linkrev(last)])
1761
1762
1762 # iterate from latest to oldest revision
1763 # iterate from latest to oldest revision
1763 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1764 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1764 if not follow:
1765 if not follow:
1765 if rev > maxrev:
1766 if rev > maxrev:
1766 continue
1767 continue
1767 else:
1768 else:
1768 # Note that last might not be the first interesting
1769 # Note that last might not be the first interesting
1769 # rev to us:
1770 # rev to us:
1770 # if the file has been changed after maxrev, we'll
1771 # if the file has been changed after maxrev, we'll
1771 # have linkrev(last) > maxrev, and we still need
1772 # have linkrev(last) > maxrev, and we still need
1772 # to explore the file graph
1773 # to explore the file graph
1773 if rev not in ancestors:
1774 if rev not in ancestors:
1774 continue
1775 continue
1775 # XXX insert 1327 fix here
1776 # XXX insert 1327 fix here
1776 if flparentlinkrevs:
1777 if flparentlinkrevs:
1777 ancestors.update(flparentlinkrevs)
1778 ancestors.update(flparentlinkrevs)
1778
1779
1779 fncache.setdefault(rev, []).append(file_)
1780 fncache.setdefault(rev, []).append(file_)
1780 wanted.add(rev)
1781 wanted.add(rev)
1781 if copied:
1782 if copied:
1782 copies.append(copied)
1783 copies.append(copied)
1783
1784
1784 return wanted
1785 return wanted
1785
1786
1786 class _followfilter(object):
1787 class _followfilter(object):
1787 def __init__(self, repo, onlyfirst=False):
1788 def __init__(self, repo, onlyfirst=False):
1788 self.repo = repo
1789 self.repo = repo
1789 self.startrev = nullrev
1790 self.startrev = nullrev
1790 self.roots = set()
1791 self.roots = set()
1791 self.onlyfirst = onlyfirst
1792 self.onlyfirst = onlyfirst
1792
1793
1793 def match(self, rev):
1794 def match(self, rev):
1794 def realparents(rev):
1795 def realparents(rev):
1795 if self.onlyfirst:
1796 if self.onlyfirst:
1796 return self.repo.changelog.parentrevs(rev)[0:1]
1797 return self.repo.changelog.parentrevs(rev)[0:1]
1797 else:
1798 else:
1798 return filter(lambda x: x != nullrev,
1799 return filter(lambda x: x != nullrev,
1799 self.repo.changelog.parentrevs(rev))
1800 self.repo.changelog.parentrevs(rev))
1800
1801
1801 if self.startrev == nullrev:
1802 if self.startrev == nullrev:
1802 self.startrev = rev
1803 self.startrev = rev
1803 return True
1804 return True
1804
1805
1805 if rev > self.startrev:
1806 if rev > self.startrev:
1806 # forward: all descendants
1807 # forward: all descendants
1807 if not self.roots:
1808 if not self.roots:
1808 self.roots.add(self.startrev)
1809 self.roots.add(self.startrev)
1809 for parent in realparents(rev):
1810 for parent in realparents(rev):
1810 if parent in self.roots:
1811 if parent in self.roots:
1811 self.roots.add(rev)
1812 self.roots.add(rev)
1812 return True
1813 return True
1813 else:
1814 else:
1814 # backwards: all parents
1815 # backwards: all parents
1815 if not self.roots:
1816 if not self.roots:
1816 self.roots.update(realparents(self.startrev))
1817 self.roots.update(realparents(self.startrev))
1817 if rev in self.roots:
1818 if rev in self.roots:
1818 self.roots.remove(rev)
1819 self.roots.remove(rev)
1819 self.roots.update(realparents(rev))
1820 self.roots.update(realparents(rev))
1820 return True
1821 return True
1821
1822
1822 return False
1823 return False
1823
1824
1824 def walkchangerevs(repo, match, opts, prepare):
1825 def walkchangerevs(repo, match, opts, prepare):
1825 '''Iterate over files and the revs in which they changed.
1826 '''Iterate over files and the revs in which they changed.
1826
1827
1827 Callers most commonly need to iterate backwards over the history
1828 Callers most commonly need to iterate backwards over the history
1828 in which they are interested. Doing so has awful (quadratic-looking)
1829 in which they are interested. Doing so has awful (quadratic-looking)
1829 performance, so we use iterators in a "windowed" way.
1830 performance, so we use iterators in a "windowed" way.
1830
1831
1831 We walk a window of revisions in the desired order. Within the
1832 We walk a window of revisions in the desired order. Within the
1832 window, we first walk forwards to gather data, then in the desired
1833 window, we first walk forwards to gather data, then in the desired
1833 order (usually backwards) to display it.
1834 order (usually backwards) to display it.
1834
1835
1835 This function returns an iterator yielding contexts. Before
1836 This function returns an iterator yielding contexts. Before
1836 yielding each context, the iterator will first call the prepare
1837 yielding each context, the iterator will first call the prepare
1837 function on each context in the window in forward order.'''
1838 function on each context in the window in forward order.'''
1838
1839
1839 follow = opts.get('follow') or opts.get('follow_first')
1840 follow = opts.get('follow') or opts.get('follow_first')
1840 revs = _logrevs(repo, opts)
1841 revs = _logrevs(repo, opts)
1841 if not revs:
1842 if not revs:
1842 return []
1843 return []
1843 wanted = set()
1844 wanted = set()
1844 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1845 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1845 opts.get('removed'))
1846 opts.get('removed'))
1846 fncache = {}
1847 fncache = {}
1847 change = repo.changectx
1848 change = repo.changectx
1848
1849
1849 # First step is to fill wanted, the set of revisions that we want to yield.
1850 # First step is to fill wanted, the set of revisions that we want to yield.
1850 # When it does not induce extra cost, we also fill fncache for revisions in
1851 # When it does not induce extra cost, we also fill fncache for revisions in
1851 # wanted: a cache of filenames that were changed (ctx.files()) and that
1852 # wanted: a cache of filenames that were changed (ctx.files()) and that
1852 # match the file filtering conditions.
1853 # match the file filtering conditions.
1853
1854
1854 if match.always():
1855 if match.always():
1855 # No files, no patterns. Display all revs.
1856 # No files, no patterns. Display all revs.
1856 wanted = revs
1857 wanted = revs
1857 elif not slowpath:
1858 elif not slowpath:
1858 # We only have to read through the filelog to find wanted revisions
1859 # We only have to read through the filelog to find wanted revisions
1859
1860
1860 try:
1861 try:
1861 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1862 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1862 except FileWalkError:
1863 except FileWalkError:
1863 slowpath = True
1864 slowpath = True
1864
1865
1865 # We decided to fall back to the slowpath because at least one
1866 # We decided to fall back to the slowpath because at least one
1866 # of the paths was not a file. Check to see if at least one of them
1867 # of the paths was not a file. Check to see if at least one of them
1867 # existed in history, otherwise simply return
1868 # existed in history, otherwise simply return
1868 for path in match.files():
1869 for path in match.files():
1869 if path == '.' or path in repo.store:
1870 if path == '.' or path in repo.store:
1870 break
1871 break
1871 else:
1872 else:
1872 return []
1873 return []
1873
1874
1874 if slowpath:
1875 if slowpath:
1875 # We have to read the changelog to match filenames against
1876 # We have to read the changelog to match filenames against
1876 # changed files
1877 # changed files
1877
1878
1878 if follow:
1879 if follow:
1879 raise error.Abort(_('can only follow copies/renames for explicit '
1880 raise error.Abort(_('can only follow copies/renames for explicit '
1880 'filenames'))
1881 'filenames'))
1881
1882
1882 # The slow path checks files modified in every changeset.
1883 # The slow path checks files modified in every changeset.
1883 # This is really slow on large repos, so compute the set lazily.
1884 # This is really slow on large repos, so compute the set lazily.
1884 class lazywantedset(object):
1885 class lazywantedset(object):
1885 def __init__(self):
1886 def __init__(self):
1886 self.set = set()
1887 self.set = set()
1887 self.revs = set(revs)
1888 self.revs = set(revs)
1888
1889
1889 # No need to worry about locality here because it will be accessed
1890 # No need to worry about locality here because it will be accessed
1890 # in the same order as the increasing window below.
1891 # in the same order as the increasing window below.
1891 def __contains__(self, value):
1892 def __contains__(self, value):
1892 if value in self.set:
1893 if value in self.set:
1893 return True
1894 return True
1894 elif not value in self.revs:
1895 elif not value in self.revs:
1895 return False
1896 return False
1896 else:
1897 else:
1897 self.revs.discard(value)
1898 self.revs.discard(value)
1898 ctx = change(value)
1899 ctx = change(value)
1899 matches = filter(match, ctx.files())
1900 matches = filter(match, ctx.files())
1900 if matches:
1901 if matches:
1901 fncache[value] = matches
1902 fncache[value] = matches
1902 self.set.add(value)
1903 self.set.add(value)
1903 return True
1904 return True
1904 return False
1905 return False
1905
1906
1906 def discard(self, value):
1907 def discard(self, value):
1907 self.revs.discard(value)
1908 self.revs.discard(value)
1908 self.set.discard(value)
1909 self.set.discard(value)
1909
1910
1910 wanted = lazywantedset()
1911 wanted = lazywantedset()
1911
1912
1912 # it might be worthwhile to do this in the iterator if the rev range
1913 # it might be worthwhile to do this in the iterator if the rev range
1913 # is descending and the prune args are all within that range
1914 # is descending and the prune args are all within that range
1914 for rev in opts.get('prune', ()):
1915 for rev in opts.get('prune', ()):
1915 rev = repo[rev].rev()
1916 rev = repo[rev].rev()
1916 ff = _followfilter(repo)
1917 ff = _followfilter(repo)
1917 stop = min(revs[0], revs[-1])
1918 stop = min(revs[0], revs[-1])
1918 for x in xrange(rev, stop - 1, -1):
1919 for x in xrange(rev, stop - 1, -1):
1919 if ff.match(x):
1920 if ff.match(x):
1920 wanted = wanted - [x]
1921 wanted = wanted - [x]
1921
1922
1922 # Now that wanted is correctly initialized, we can iterate over the
1923 # Now that wanted is correctly initialized, we can iterate over the
1923 # revision range, yielding only revisions in wanted.
1924 # revision range, yielding only revisions in wanted.
1924 def iterate():
1925 def iterate():
1925 if follow and match.always():
1926 if follow and match.always():
1926 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1927 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1927 def want(rev):
1928 def want(rev):
1928 return ff.match(rev) and rev in wanted
1929 return ff.match(rev) and rev in wanted
1929 else:
1930 else:
1930 def want(rev):
1931 def want(rev):
1931 return rev in wanted
1932 return rev in wanted
1932
1933
1933 it = iter(revs)
1934 it = iter(revs)
1934 stopiteration = False
1935 stopiteration = False
1935 for windowsize in increasingwindows():
1936 for windowsize in increasingwindows():
1936 nrevs = []
1937 nrevs = []
1937 for i in xrange(windowsize):
1938 for i in xrange(windowsize):
1938 rev = next(it, None)
1939 rev = next(it, None)
1939 if rev is None:
1940 if rev is None:
1940 stopiteration = True
1941 stopiteration = True
1941 break
1942 break
1942 elif want(rev):
1943 elif want(rev):
1943 nrevs.append(rev)
1944 nrevs.append(rev)
1944 for rev in sorted(nrevs):
1945 for rev in sorted(nrevs):
1945 fns = fncache.get(rev)
1946 fns = fncache.get(rev)
1946 ctx = change(rev)
1947 ctx = change(rev)
1947 if not fns:
1948 if not fns:
1948 def fns_generator():
1949 def fns_generator():
1949 for f in ctx.files():
1950 for f in ctx.files():
1950 if match(f):
1951 if match(f):
1951 yield f
1952 yield f
1952 fns = fns_generator()
1953 fns = fns_generator()
1953 prepare(ctx, fns)
1954 prepare(ctx, fns)
1954 for rev in nrevs:
1955 for rev in nrevs:
1955 yield change(rev)
1956 yield change(rev)
1956
1957
1957 if stopiteration:
1958 if stopiteration:
1958 break
1959 break
1959
1960
1960 return iterate()
1961 return iterate()
1961
1962
1962 def _makefollowlogfilematcher(repo, files, followfirst):
1963 def _makefollowlogfilematcher(repo, files, followfirst):
1963 # When displaying a revision with --patch --follow FILE, we have
1964 # When displaying a revision with --patch --follow FILE, we have
1964 # to know which file of the revision must be diffed. With
1965 # to know which file of the revision must be diffed. With
1965 # --follow, we want the names of the ancestors of FILE in the
1966 # --follow, we want the names of the ancestors of FILE in the
1966 # revision, stored in "fcache". "fcache" is populated by
1967 # revision, stored in "fcache". "fcache" is populated by
1967 # reproducing the graph traversal already done by --follow revset
1968 # reproducing the graph traversal already done by --follow revset
1968 # and relating revs to file names (which is not "correct" but
1969 # and relating revs to file names (which is not "correct" but
1969 # good enough).
1970 # good enough).
1970 fcache = {}
1971 fcache = {}
1971 fcacheready = [False]
1972 fcacheready = [False]
1972 pctx = repo['.']
1973 pctx = repo['.']
1973
1974
1974 def populate():
1975 def populate():
1975 for fn in files:
1976 for fn in files:
1976 fctx = pctx[fn]
1977 fctx = pctx[fn]
1977 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1978 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1978 for c in fctx.ancestors(followfirst=followfirst):
1979 for c in fctx.ancestors(followfirst=followfirst):
1979 fcache.setdefault(c.rev(), set()).add(c.path())
1980 fcache.setdefault(c.rev(), set()).add(c.path())
1980
1981
1981 def filematcher(rev):
1982 def filematcher(rev):
1982 if not fcacheready[0]:
1983 if not fcacheready[0]:
1983 # Lazy initialization
1984 # Lazy initialization
1984 fcacheready[0] = True
1985 fcacheready[0] = True
1985 populate()
1986 populate()
1986 return scmutil.matchfiles(repo, fcache.get(rev, []))
1987 return scmutil.matchfiles(repo, fcache.get(rev, []))
1987
1988
1988 return filematcher
1989 return filematcher
1989
1990
1990 def _makenofollowlogfilematcher(repo, pats, opts):
1991 def _makenofollowlogfilematcher(repo, pats, opts):
1991 '''hook for extensions to override the filematcher for non-follow cases'''
1992 '''hook for extensions to override the filematcher for non-follow cases'''
1992 return None
1993 return None
1993
1994
1994 def _makelogrevset(repo, pats, opts, revs):
1995 def _makelogrevset(repo, pats, opts, revs):
1995 """Return (expr, filematcher) where expr is a revset string built
1996 """Return (expr, filematcher) where expr is a revset string built
1996 from log options and file patterns or None. If --stat or --patch
1997 from log options and file patterns or None. If --stat or --patch
1997 are not passed filematcher is None. Otherwise it is a callable
1998 are not passed filematcher is None. Otherwise it is a callable
1998 taking a revision number and returning a match objects filtering
1999 taking a revision number and returning a match objects filtering
1999 the files to be detailed when displaying the revision.
2000 the files to be detailed when displaying the revision.
2000 """
2001 """
2001 opt2revset = {
2002 opt2revset = {
2002 'no_merges': ('not merge()', None),
2003 'no_merges': ('not merge()', None),
2003 'only_merges': ('merge()', None),
2004 'only_merges': ('merge()', None),
2004 '_ancestors': ('ancestors(%(val)s)', None),
2005 '_ancestors': ('ancestors(%(val)s)', None),
2005 '_fancestors': ('_firstancestors(%(val)s)', None),
2006 '_fancestors': ('_firstancestors(%(val)s)', None),
2006 '_descendants': ('descendants(%(val)s)', None),
2007 '_descendants': ('descendants(%(val)s)', None),
2007 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2008 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2008 '_matchfiles': ('_matchfiles(%(val)s)', None),
2009 '_matchfiles': ('_matchfiles(%(val)s)', None),
2009 'date': ('date(%(val)r)', None),
2010 'date': ('date(%(val)r)', None),
2010 'branch': ('branch(%(val)r)', ' or '),
2011 'branch': ('branch(%(val)r)', ' or '),
2011 '_patslog': ('filelog(%(val)r)', ' or '),
2012 '_patslog': ('filelog(%(val)r)', ' or '),
2012 '_patsfollow': ('follow(%(val)r)', ' or '),
2013 '_patsfollow': ('follow(%(val)r)', ' or '),
2013 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2014 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2014 'keyword': ('keyword(%(val)r)', ' or '),
2015 'keyword': ('keyword(%(val)r)', ' or '),
2015 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2016 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2016 'user': ('user(%(val)r)', ' or '),
2017 'user': ('user(%(val)r)', ' or '),
2017 }
2018 }
2018
2019
2019 opts = dict(opts)
2020 opts = dict(opts)
2020 # follow or not follow?
2021 # follow or not follow?
2021 follow = opts.get('follow') or opts.get('follow_first')
2022 follow = opts.get('follow') or opts.get('follow_first')
2022 if opts.get('follow_first'):
2023 if opts.get('follow_first'):
2023 followfirst = 1
2024 followfirst = 1
2024 else:
2025 else:
2025 followfirst = 0
2026 followfirst = 0
2026 # --follow with FILE behavior depends on revs...
2027 # --follow with FILE behavior depends on revs...
2027 it = iter(revs)
2028 it = iter(revs)
2028 startrev = next(it)
2029 startrev = next(it)
2029 followdescendants = startrev < next(it, startrev)
2030 followdescendants = startrev < next(it, startrev)
2030
2031
2031 # branch and only_branch are really aliases and must be handled at
2032 # branch and only_branch are really aliases and must be handled at
2032 # the same time
2033 # the same time
2033 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2034 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2034 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2035 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2035 # pats/include/exclude are passed to match.match() directly in
2036 # pats/include/exclude are passed to match.match() directly in
2036 # _matchfiles() revset but walkchangerevs() builds its matcher with
2037 # _matchfiles() revset but walkchangerevs() builds its matcher with
2037 # scmutil.match(). The difference is input pats are globbed on
2038 # scmutil.match(). The difference is input pats are globbed on
2038 # platforms without shell expansion (windows).
2039 # platforms without shell expansion (windows).
2039 wctx = repo[None]
2040 wctx = repo[None]
2040 match, pats = scmutil.matchandpats(wctx, pats, opts)
2041 match, pats = scmutil.matchandpats(wctx, pats, opts)
2041 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2042 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2042 opts.get('removed'))
2043 opts.get('removed'))
2043 if not slowpath:
2044 if not slowpath:
2044 for f in match.files():
2045 for f in match.files():
2045 if follow and f not in wctx:
2046 if follow and f not in wctx:
2046 # If the file exists, it may be a directory, so let it
2047 # If the file exists, it may be a directory, so let it
2047 # take the slow path.
2048 # take the slow path.
2048 if os.path.exists(repo.wjoin(f)):
2049 if os.path.exists(repo.wjoin(f)):
2049 slowpath = True
2050 slowpath = True
2050 continue
2051 continue
2051 else:
2052 else:
2052 raise error.Abort(_('cannot follow file not in parent '
2053 raise error.Abort(_('cannot follow file not in parent '
2053 'revision: "%s"') % f)
2054 'revision: "%s"') % f)
2054 filelog = repo.file(f)
2055 filelog = repo.file(f)
2055 if not filelog:
2056 if not filelog:
2056 # A zero count may be a directory or deleted file, so
2057 # A zero count may be a directory or deleted file, so
2057 # try to find matching entries on the slow path.
2058 # try to find matching entries on the slow path.
2058 if follow:
2059 if follow:
2059 raise error.Abort(
2060 raise error.Abort(
2060 _('cannot follow nonexistent file: "%s"') % f)
2061 _('cannot follow nonexistent file: "%s"') % f)
2061 slowpath = True
2062 slowpath = True
2062
2063
2063 # We decided to fall back to the slowpath because at least one
2064 # We decided to fall back to the slowpath because at least one
2064 # of the paths was not a file. Check to see if at least one of them
2065 # of the paths was not a file. Check to see if at least one of them
2065 # existed in history - in that case, we'll continue down the
2066 # existed in history - in that case, we'll continue down the
2066 # slowpath; otherwise, we can turn off the slowpath
2067 # slowpath; otherwise, we can turn off the slowpath
2067 if slowpath:
2068 if slowpath:
2068 for path in match.files():
2069 for path in match.files():
2069 if path == '.' or path in repo.store:
2070 if path == '.' or path in repo.store:
2070 break
2071 break
2071 else:
2072 else:
2072 slowpath = False
2073 slowpath = False
2073
2074
2074 fpats = ('_patsfollow', '_patsfollowfirst')
2075 fpats = ('_patsfollow', '_patsfollowfirst')
2075 fnopats = (('_ancestors', '_fancestors'),
2076 fnopats = (('_ancestors', '_fancestors'),
2076 ('_descendants', '_fdescendants'))
2077 ('_descendants', '_fdescendants'))
2077 if slowpath:
2078 if slowpath:
2078 # See walkchangerevs() slow path.
2079 # See walkchangerevs() slow path.
2079 #
2080 #
2080 # pats/include/exclude cannot be represented as separate
2081 # pats/include/exclude cannot be represented as separate
2081 # revset expressions as their filtering logic applies at file
2082 # revset expressions as their filtering logic applies at file
2082 # level. For instance "-I a -X a" matches a revision touching
2083 # level. For instance "-I a -X a" matches a revision touching
2083 # "a" and "b" while "file(a) and not file(b)" does
2084 # "a" and "b" while "file(a) and not file(b)" does
2084 # not. Besides, filesets are evaluated against the working
2085 # not. Besides, filesets are evaluated against the working
2085 # directory.
2086 # directory.
2086 matchargs = ['r:', 'd:relpath']
2087 matchargs = ['r:', 'd:relpath']
2087 for p in pats:
2088 for p in pats:
2088 matchargs.append('p:' + p)
2089 matchargs.append('p:' + p)
2089 for p in opts.get('include', []):
2090 for p in opts.get('include', []):
2090 matchargs.append('i:' + p)
2091 matchargs.append('i:' + p)
2091 for p in opts.get('exclude', []):
2092 for p in opts.get('exclude', []):
2092 matchargs.append('x:' + p)
2093 matchargs.append('x:' + p)
2093 matchargs = ','.join(('%r' % p) for p in matchargs)
2094 matchargs = ','.join(('%r' % p) for p in matchargs)
2094 opts['_matchfiles'] = matchargs
2095 opts['_matchfiles'] = matchargs
2095 if follow:
2096 if follow:
2096 opts[fnopats[0][followfirst]] = '.'
2097 opts[fnopats[0][followfirst]] = '.'
2097 else:
2098 else:
2098 if follow:
2099 if follow:
2099 if pats:
2100 if pats:
2100 # follow() revset interprets its file argument as a
2101 # follow() revset interprets its file argument as a
2101 # manifest entry, so use match.files(), not pats.
2102 # manifest entry, so use match.files(), not pats.
2102 opts[fpats[followfirst]] = list(match.files())
2103 opts[fpats[followfirst]] = list(match.files())
2103 else:
2104 else:
2104 op = fnopats[followdescendants][followfirst]
2105 op = fnopats[followdescendants][followfirst]
2105 opts[op] = 'rev(%d)' % startrev
2106 opts[op] = 'rev(%d)' % startrev
2106 else:
2107 else:
2107 opts['_patslog'] = list(pats)
2108 opts['_patslog'] = list(pats)
2108
2109
2109 filematcher = None
2110 filematcher = None
2110 if opts.get('patch') or opts.get('stat'):
2111 if opts.get('patch') or opts.get('stat'):
2111 # When following files, track renames via a special matcher.
2112 # When following files, track renames via a special matcher.
2112 # If we're forced to take the slowpath it means we're following
2113 # If we're forced to take the slowpath it means we're following
2113 # at least one pattern/directory, so don't bother with rename tracking.
2114 # at least one pattern/directory, so don't bother with rename tracking.
2114 if follow and not match.always() and not slowpath:
2115 if follow and not match.always() and not slowpath:
2115 # _makefollowlogfilematcher expects its files argument to be
2116 # _makefollowlogfilematcher expects its files argument to be
2116 # relative to the repo root, so use match.files(), not pats.
2117 # relative to the repo root, so use match.files(), not pats.
2117 filematcher = _makefollowlogfilematcher(repo, match.files(),
2118 filematcher = _makefollowlogfilematcher(repo, match.files(),
2118 followfirst)
2119 followfirst)
2119 else:
2120 else:
2120 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2121 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2121 if filematcher is None:
2122 if filematcher is None:
2122 filematcher = lambda rev: match
2123 filematcher = lambda rev: match
2123
2124
2124 expr = []
2125 expr = []
2125 for op, val in sorted(opts.iteritems()):
2126 for op, val in sorted(opts.iteritems()):
2126 if not val:
2127 if not val:
2127 continue
2128 continue
2128 if op not in opt2revset:
2129 if op not in opt2revset:
2129 continue
2130 continue
2130 revop, andor = opt2revset[op]
2131 revop, andor = opt2revset[op]
2131 if '%(val)' not in revop:
2132 if '%(val)' not in revop:
2132 expr.append(revop)
2133 expr.append(revop)
2133 else:
2134 else:
2134 if not isinstance(val, list):
2135 if not isinstance(val, list):
2135 e = revop % {'val': val}
2136 e = revop % {'val': val}
2136 else:
2137 else:
2137 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2138 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2138 expr.append(e)
2139 expr.append(e)
2139
2140
2140 if expr:
2141 if expr:
2141 expr = '(' + ' and '.join(expr) + ')'
2142 expr = '(' + ' and '.join(expr) + ')'
2142 else:
2143 else:
2143 expr = None
2144 expr = None
2144 return expr, filematcher
2145 return expr, filematcher
2145
2146
2146 def _logrevs(repo, opts):
2147 def _logrevs(repo, opts):
2147 # Default --rev value depends on --follow but --follow behavior
2148 # Default --rev value depends on --follow but --follow behavior
2148 # depends on revisions resolved from --rev...
2149 # depends on revisions resolved from --rev...
2149 follow = opts.get('follow') or opts.get('follow_first')
2150 follow = opts.get('follow') or opts.get('follow_first')
2150 if opts.get('rev'):
2151 if opts.get('rev'):
2151 revs = scmutil.revrange(repo, opts['rev'])
2152 revs = scmutil.revrange(repo, opts['rev'])
2152 elif follow and repo.dirstate.p1() == nullid:
2153 elif follow and repo.dirstate.p1() == nullid:
2153 revs = revset.baseset()
2154 revs = revset.baseset()
2154 elif follow:
2155 elif follow:
2155 revs = repo.revs('reverse(:.)')
2156 revs = repo.revs('reverse(:.)')
2156 else:
2157 else:
2157 revs = revset.spanset(repo)
2158 revs = revset.spanset(repo)
2158 revs.reverse()
2159 revs.reverse()
2159 return revs
2160 return revs
2160
2161
2161 def getgraphlogrevs(repo, pats, opts):
2162 def getgraphlogrevs(repo, pats, opts):
2162 """Return (revs, expr, filematcher) where revs is an iterable of
2163 """Return (revs, expr, filematcher) where revs is an iterable of
2163 revision numbers, expr is a revset string built from log options
2164 revision numbers, expr is a revset string built from log options
2164 and file patterns or None, and used to filter 'revs'. If --stat or
2165 and file patterns or None, and used to filter 'revs'. If --stat or
2165 --patch are not passed filematcher is None. Otherwise it is a
2166 --patch are not passed filematcher is None. Otherwise it is a
2166 callable taking a revision number and returning a match objects
2167 callable taking a revision number and returning a match objects
2167 filtering the files to be detailed when displaying the revision.
2168 filtering the files to be detailed when displaying the revision.
2168 """
2169 """
2169 limit = loglimit(opts)
2170 limit = loglimit(opts)
2170 revs = _logrevs(repo, opts)
2171 revs = _logrevs(repo, opts)
2171 if not revs:
2172 if not revs:
2172 return revset.baseset(), None, None
2173 return revset.baseset(), None, None
2173 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2174 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2174 if opts.get('rev'):
2175 if opts.get('rev'):
2175 # User-specified revs might be unsorted, but don't sort before
2176 # User-specified revs might be unsorted, but don't sort before
2176 # _makelogrevset because it might depend on the order of revs
2177 # _makelogrevset because it might depend on the order of revs
2177 if not (revs.isdescending() or revs.istopo()):
2178 if not (revs.isdescending() or revs.istopo()):
2178 revs.sort(reverse=True)
2179 revs.sort(reverse=True)
2179 if expr:
2180 if expr:
2180 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2181 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2181 revs = matcher(repo, revs)
2182 revs = matcher(repo, revs)
2182 if limit is not None:
2183 if limit is not None:
2183 limitedrevs = []
2184 limitedrevs = []
2184 for idx, rev in enumerate(revs):
2185 for idx, rev in enumerate(revs):
2185 if idx >= limit:
2186 if idx >= limit:
2186 break
2187 break
2187 limitedrevs.append(rev)
2188 limitedrevs.append(rev)
2188 revs = revset.baseset(limitedrevs)
2189 revs = revset.baseset(limitedrevs)
2189
2190
2190 return revs, expr, filematcher
2191 return revs, expr, filematcher
2191
2192
2192 def getlogrevs(repo, pats, opts):
2193 def getlogrevs(repo, pats, opts):
2193 """Return (revs, expr, filematcher) where revs is an iterable of
2194 """Return (revs, expr, filematcher) where revs is an iterable of
2194 revision numbers, expr is a revset string built from log options
2195 revision numbers, expr is a revset string built from log options
2195 and file patterns or None, and used to filter 'revs'. If --stat or
2196 and file patterns or None, and used to filter 'revs'. If --stat or
2196 --patch are not passed filematcher is None. Otherwise it is a
2197 --patch are not passed filematcher is None. Otherwise it is a
2197 callable taking a revision number and returning a match objects
2198 callable taking a revision number and returning a match objects
2198 filtering the files to be detailed when displaying the revision.
2199 filtering the files to be detailed when displaying the revision.
2199 """
2200 """
2200 limit = loglimit(opts)
2201 limit = loglimit(opts)
2201 revs = _logrevs(repo, opts)
2202 revs = _logrevs(repo, opts)
2202 if not revs:
2203 if not revs:
2203 return revset.baseset([]), None, None
2204 return revset.baseset([]), None, None
2204 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2205 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2205 if expr:
2206 if expr:
2206 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2207 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2207 revs = matcher(repo, revs)
2208 revs = matcher(repo, revs)
2208 if limit is not None:
2209 if limit is not None:
2209 limitedrevs = []
2210 limitedrevs = []
2210 for idx, r in enumerate(revs):
2211 for idx, r in enumerate(revs):
2211 if limit <= idx:
2212 if limit <= idx:
2212 break
2213 break
2213 limitedrevs.append(r)
2214 limitedrevs.append(r)
2214 revs = revset.baseset(limitedrevs)
2215 revs = revset.baseset(limitedrevs)
2215
2216
2216 return revs, expr, filematcher
2217 return revs, expr, filematcher
2217
2218
2218 def _graphnodeformatter(ui, displayer):
2219 def _graphnodeformatter(ui, displayer):
2219 spec = ui.config('ui', 'graphnodetemplate')
2220 spec = ui.config('ui', 'graphnodetemplate')
2220 if not spec:
2221 if not spec:
2221 return templatekw.showgraphnode # fast path for "{graphnode}"
2222 return templatekw.showgraphnode # fast path for "{graphnode}"
2222
2223
2223 templ = formatter.gettemplater(ui, 'graphnode', spec)
2224 templ = formatter.gettemplater(ui, 'graphnode', spec)
2224 cache = {}
2225 cache = {}
2225 if isinstance(displayer, changeset_templater):
2226 if isinstance(displayer, changeset_templater):
2226 cache = displayer.cache # reuse cache of slow templates
2227 cache = displayer.cache # reuse cache of slow templates
2227 props = templatekw.keywords.copy()
2228 props = templatekw.keywords.copy()
2228 props['templ'] = templ
2229 props['templ'] = templ
2229 props['cache'] = cache
2230 props['cache'] = cache
2230 def formatnode(repo, ctx):
2231 def formatnode(repo, ctx):
2231 props['ctx'] = ctx
2232 props['ctx'] = ctx
2232 props['repo'] = repo
2233 props['repo'] = repo
2233 props['ui'] = repo.ui
2234 props['ui'] = repo.ui
2234 props['revcache'] = {}
2235 props['revcache'] = {}
2235 return templater.stringify(templ('graphnode', **props))
2236 return templater.stringify(templ('graphnode', **props))
2236 return formatnode
2237 return formatnode
2237
2238
2238 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2239 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2239 filematcher=None):
2240 filematcher=None):
2240 formatnode = _graphnodeformatter(ui, displayer)
2241 formatnode = _graphnodeformatter(ui, displayer)
2241 state = graphmod.asciistate()
2242 state = graphmod.asciistate()
2242 styles = state['styles']
2243 styles = state['styles']
2243
2244
2244 # only set graph styling if HGPLAIN is not set.
2245 # only set graph styling if HGPLAIN is not set.
2245 if ui.plain('graph'):
2246 if ui.plain('graph'):
2246 # set all edge styles to |, the default pre-3.8 behaviour
2247 # set all edge styles to |, the default pre-3.8 behaviour
2247 styles.update(dict.fromkeys(styles, '|'))
2248 styles.update(dict.fromkeys(styles, '|'))
2248 else:
2249 else:
2249 edgetypes = {
2250 edgetypes = {
2250 'parent': graphmod.PARENT,
2251 'parent': graphmod.PARENT,
2251 'grandparent': graphmod.GRANDPARENT,
2252 'grandparent': graphmod.GRANDPARENT,
2252 'missing': graphmod.MISSINGPARENT
2253 'missing': graphmod.MISSINGPARENT
2253 }
2254 }
2254 for name, key in edgetypes.items():
2255 for name, key in edgetypes.items():
2255 # experimental config: experimental.graphstyle.*
2256 # experimental config: experimental.graphstyle.*
2256 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2257 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2257 styles[key])
2258 styles[key])
2258 if not styles[key]:
2259 if not styles[key]:
2259 styles[key] = None
2260 styles[key] = None
2260
2261
2261 # experimental config: experimental.graphshorten
2262 # experimental config: experimental.graphshorten
2262 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2263 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2263
2264
2264 for rev, type, ctx, parents in dag:
2265 for rev, type, ctx, parents in dag:
2265 char = formatnode(repo, ctx)
2266 char = formatnode(repo, ctx)
2266 copies = None
2267 copies = None
2267 if getrenamed and ctx.rev():
2268 if getrenamed and ctx.rev():
2268 copies = []
2269 copies = []
2269 for fn in ctx.files():
2270 for fn in ctx.files():
2270 rename = getrenamed(fn, ctx.rev())
2271 rename = getrenamed(fn, ctx.rev())
2271 if rename:
2272 if rename:
2272 copies.append((fn, rename[0]))
2273 copies.append((fn, rename[0]))
2273 revmatchfn = None
2274 revmatchfn = None
2274 if filematcher is not None:
2275 if filematcher is not None:
2275 revmatchfn = filematcher(ctx.rev())
2276 revmatchfn = filematcher(ctx.rev())
2276 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2277 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2277 lines = displayer.hunk.pop(rev).split('\n')
2278 lines = displayer.hunk.pop(rev).split('\n')
2278 if not lines[-1]:
2279 if not lines[-1]:
2279 del lines[-1]
2280 del lines[-1]
2280 displayer.flush(ctx)
2281 displayer.flush(ctx)
2281 edges = edgefn(type, char, lines, state, rev, parents)
2282 edges = edgefn(type, char, lines, state, rev, parents)
2282 for type, char, lines, coldata in edges:
2283 for type, char, lines, coldata in edges:
2283 graphmod.ascii(ui, state, type, char, lines, coldata)
2284 graphmod.ascii(ui, state, type, char, lines, coldata)
2284 displayer.close()
2285 displayer.close()
2285
2286
2286 def graphlog(ui, repo, *pats, **opts):
2287 def graphlog(ui, repo, *pats, **opts):
2287 # Parameters are identical to log command ones
2288 # Parameters are identical to log command ones
2288 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2289 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2289 revdag = graphmod.dagwalker(repo, revs)
2290 revdag = graphmod.dagwalker(repo, revs)
2290
2291
2291 getrenamed = None
2292 getrenamed = None
2292 if opts.get('copies'):
2293 if opts.get('copies'):
2293 endrev = None
2294 endrev = None
2294 if opts.get('rev'):
2295 if opts.get('rev'):
2295 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2296 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2296 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2297 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2297 displayer = show_changeset(ui, repo, opts, buffered=True)
2298 displayer = show_changeset(ui, repo, opts, buffered=True)
2298 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2299 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2299 filematcher)
2300 filematcher)
2300
2301
2301 def checkunsupportedgraphflags(pats, opts):
2302 def checkunsupportedgraphflags(pats, opts):
2302 for op in ["newest_first"]:
2303 for op in ["newest_first"]:
2303 if op in opts and opts[op]:
2304 if op in opts and opts[op]:
2304 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2305 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2305 % op.replace("_", "-"))
2306 % op.replace("_", "-"))
2306
2307
2307 def graphrevs(repo, nodes, opts):
2308 def graphrevs(repo, nodes, opts):
2308 limit = loglimit(opts)
2309 limit = loglimit(opts)
2309 nodes.reverse()
2310 nodes.reverse()
2310 if limit is not None:
2311 if limit is not None:
2311 nodes = nodes[:limit]
2312 nodes = nodes[:limit]
2312 return graphmod.nodes(repo, nodes)
2313 return graphmod.nodes(repo, nodes)
2313
2314
2314 def add(ui, repo, match, prefix, explicitonly, **opts):
2315 def add(ui, repo, match, prefix, explicitonly, **opts):
2315 join = lambda f: os.path.join(prefix, f)
2316 join = lambda f: os.path.join(prefix, f)
2316 bad = []
2317 bad = []
2317
2318
2318 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2319 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2319 names = []
2320 names = []
2320 wctx = repo[None]
2321 wctx = repo[None]
2321 cca = None
2322 cca = None
2322 abort, warn = scmutil.checkportabilityalert(ui)
2323 abort, warn = scmutil.checkportabilityalert(ui)
2323 if abort or warn:
2324 if abort or warn:
2324 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2325 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2325
2326
2326 badmatch = matchmod.badmatch(match, badfn)
2327 badmatch = matchmod.badmatch(match, badfn)
2327 dirstate = repo.dirstate
2328 dirstate = repo.dirstate
2328 # We don't want to just call wctx.walk here, since it would return a lot of
2329 # We don't want to just call wctx.walk here, since it would return a lot of
2329 # clean files, which we aren't interested in and takes time.
2330 # clean files, which we aren't interested in and takes time.
2330 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2331 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2331 True, False, full=False)):
2332 True, False, full=False)):
2332 exact = match.exact(f)
2333 exact = match.exact(f)
2333 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2334 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2334 if cca:
2335 if cca:
2335 cca(f)
2336 cca(f)
2336 names.append(f)
2337 names.append(f)
2337 if ui.verbose or not exact:
2338 if ui.verbose or not exact:
2338 ui.status(_('adding %s\n') % match.rel(f))
2339 ui.status(_('adding %s\n') % match.rel(f))
2339
2340
2340 for subpath in sorted(wctx.substate):
2341 for subpath in sorted(wctx.substate):
2341 sub = wctx.sub(subpath)
2342 sub = wctx.sub(subpath)
2342 try:
2343 try:
2343 submatch = matchmod.subdirmatcher(subpath, match)
2344 submatch = matchmod.subdirmatcher(subpath, match)
2344 if opts.get('subrepos'):
2345 if opts.get('subrepos'):
2345 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2346 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2346 else:
2347 else:
2347 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2348 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2348 except error.LookupError:
2349 except error.LookupError:
2349 ui.status(_("skipping missing subrepository: %s\n")
2350 ui.status(_("skipping missing subrepository: %s\n")
2350 % join(subpath))
2351 % join(subpath))
2351
2352
2352 if not opts.get('dry_run'):
2353 if not opts.get('dry_run'):
2353 rejected = wctx.add(names, prefix)
2354 rejected = wctx.add(names, prefix)
2354 bad.extend(f for f in rejected if f in match.files())
2355 bad.extend(f for f in rejected if f in match.files())
2355 return bad
2356 return bad
2356
2357
2357 def forget(ui, repo, match, prefix, explicitonly):
2358 def forget(ui, repo, match, prefix, explicitonly):
2358 join = lambda f: os.path.join(prefix, f)
2359 join = lambda f: os.path.join(prefix, f)
2359 bad = []
2360 bad = []
2360 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2361 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2361 wctx = repo[None]
2362 wctx = repo[None]
2362 forgot = []
2363 forgot = []
2363
2364
2364 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2365 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2365 forget = sorted(s[0] + s[1] + s[3] + s[6])
2366 forget = sorted(s[0] + s[1] + s[3] + s[6])
2366 if explicitonly:
2367 if explicitonly:
2367 forget = [f for f in forget if match.exact(f)]
2368 forget = [f for f in forget if match.exact(f)]
2368
2369
2369 for subpath in sorted(wctx.substate):
2370 for subpath in sorted(wctx.substate):
2370 sub = wctx.sub(subpath)
2371 sub = wctx.sub(subpath)
2371 try:
2372 try:
2372 submatch = matchmod.subdirmatcher(subpath, match)
2373 submatch = matchmod.subdirmatcher(subpath, match)
2373 subbad, subforgot = sub.forget(submatch, prefix)
2374 subbad, subforgot = sub.forget(submatch, prefix)
2374 bad.extend([subpath + '/' + f for f in subbad])
2375 bad.extend([subpath + '/' + f for f in subbad])
2375 forgot.extend([subpath + '/' + f for f in subforgot])
2376 forgot.extend([subpath + '/' + f for f in subforgot])
2376 except error.LookupError:
2377 except error.LookupError:
2377 ui.status(_("skipping missing subrepository: %s\n")
2378 ui.status(_("skipping missing subrepository: %s\n")
2378 % join(subpath))
2379 % join(subpath))
2379
2380
2380 if not explicitonly:
2381 if not explicitonly:
2381 for f in match.files():
2382 for f in match.files():
2382 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2383 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2383 if f not in forgot:
2384 if f not in forgot:
2384 if repo.wvfs.exists(f):
2385 if repo.wvfs.exists(f):
2385 # Don't complain if the exact case match wasn't given.
2386 # Don't complain if the exact case match wasn't given.
2386 # But don't do this until after checking 'forgot', so
2387 # But don't do this until after checking 'forgot', so
2387 # that subrepo files aren't normalized, and this op is
2388 # that subrepo files aren't normalized, and this op is
2388 # purely from data cached by the status walk above.
2389 # purely from data cached by the status walk above.
2389 if repo.dirstate.normalize(f) in repo.dirstate:
2390 if repo.dirstate.normalize(f) in repo.dirstate:
2390 continue
2391 continue
2391 ui.warn(_('not removing %s: '
2392 ui.warn(_('not removing %s: '
2392 'file is already untracked\n')
2393 'file is already untracked\n')
2393 % match.rel(f))
2394 % match.rel(f))
2394 bad.append(f)
2395 bad.append(f)
2395
2396
2396 for f in forget:
2397 for f in forget:
2397 if ui.verbose or not match.exact(f):
2398 if ui.verbose or not match.exact(f):
2398 ui.status(_('removing %s\n') % match.rel(f))
2399 ui.status(_('removing %s\n') % match.rel(f))
2399
2400
2400 rejected = wctx.forget(forget, prefix)
2401 rejected = wctx.forget(forget, prefix)
2401 bad.extend(f for f in rejected if f in match.files())
2402 bad.extend(f for f in rejected if f in match.files())
2402 forgot.extend(f for f in forget if f not in rejected)
2403 forgot.extend(f for f in forget if f not in rejected)
2403 return bad, forgot
2404 return bad, forgot
2404
2405
2405 def files(ui, ctx, m, fm, fmt, subrepos):
2406 def files(ui, ctx, m, fm, fmt, subrepos):
2406 rev = ctx.rev()
2407 rev = ctx.rev()
2407 ret = 1
2408 ret = 1
2408 ds = ctx.repo().dirstate
2409 ds = ctx.repo().dirstate
2409
2410
2410 for f in ctx.matches(m):
2411 for f in ctx.matches(m):
2411 if rev is None and ds[f] == 'r':
2412 if rev is None and ds[f] == 'r':
2412 continue
2413 continue
2413 fm.startitem()
2414 fm.startitem()
2414 if ui.verbose:
2415 if ui.verbose:
2415 fc = ctx[f]
2416 fc = ctx[f]
2416 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2417 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2417 fm.data(abspath=f)
2418 fm.data(abspath=f)
2418 fm.write('path', fmt, m.rel(f))
2419 fm.write('path', fmt, m.rel(f))
2419 ret = 0
2420 ret = 0
2420
2421
2421 for subpath in sorted(ctx.substate):
2422 for subpath in sorted(ctx.substate):
2422 submatch = matchmod.subdirmatcher(subpath, m)
2423 submatch = matchmod.subdirmatcher(subpath, m)
2423 if (subrepos or m.exact(subpath) or any(submatch.files())):
2424 if (subrepos or m.exact(subpath) or any(submatch.files())):
2424 sub = ctx.sub(subpath)
2425 sub = ctx.sub(subpath)
2425 try:
2426 try:
2426 recurse = m.exact(subpath) or subrepos
2427 recurse = m.exact(subpath) or subrepos
2427 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2428 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2428 ret = 0
2429 ret = 0
2429 except error.LookupError:
2430 except error.LookupError:
2430 ui.status(_("skipping missing subrepository: %s\n")
2431 ui.status(_("skipping missing subrepository: %s\n")
2431 % m.abs(subpath))
2432 % m.abs(subpath))
2432
2433
2433 return ret
2434 return ret
2434
2435
2435 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2436 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2436 join = lambda f: os.path.join(prefix, f)
2437 join = lambda f: os.path.join(prefix, f)
2437 ret = 0
2438 ret = 0
2438 s = repo.status(match=m, clean=True)
2439 s = repo.status(match=m, clean=True)
2439 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2440 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2440
2441
2441 wctx = repo[None]
2442 wctx = repo[None]
2442
2443
2443 if warnings is None:
2444 if warnings is None:
2444 warnings = []
2445 warnings = []
2445 warn = True
2446 warn = True
2446 else:
2447 else:
2447 warn = False
2448 warn = False
2448
2449
2449 subs = sorted(wctx.substate)
2450 subs = sorted(wctx.substate)
2450 total = len(subs)
2451 total = len(subs)
2451 count = 0
2452 count = 0
2452 for subpath in subs:
2453 for subpath in subs:
2453 count += 1
2454 count += 1
2454 submatch = matchmod.subdirmatcher(subpath, m)
2455 submatch = matchmod.subdirmatcher(subpath, m)
2455 if subrepos or m.exact(subpath) or any(submatch.files()):
2456 if subrepos or m.exact(subpath) or any(submatch.files()):
2456 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2457 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2457 sub = wctx.sub(subpath)
2458 sub = wctx.sub(subpath)
2458 try:
2459 try:
2459 if sub.removefiles(submatch, prefix, after, force, subrepos,
2460 if sub.removefiles(submatch, prefix, after, force, subrepos,
2460 warnings):
2461 warnings):
2461 ret = 1
2462 ret = 1
2462 except error.LookupError:
2463 except error.LookupError:
2463 warnings.append(_("skipping missing subrepository: %s\n")
2464 warnings.append(_("skipping missing subrepository: %s\n")
2464 % join(subpath))
2465 % join(subpath))
2465 ui.progress(_('searching'), None)
2466 ui.progress(_('searching'), None)
2466
2467
2467 # warn about failure to delete explicit files/dirs
2468 # warn about failure to delete explicit files/dirs
2468 deleteddirs = util.dirs(deleted)
2469 deleteddirs = util.dirs(deleted)
2469 files = m.files()
2470 files = m.files()
2470 total = len(files)
2471 total = len(files)
2471 count = 0
2472 count = 0
2472 for f in files:
2473 for f in files:
2473 def insubrepo():
2474 def insubrepo():
2474 for subpath in wctx.substate:
2475 for subpath in wctx.substate:
2475 if f.startswith(subpath + '/'):
2476 if f.startswith(subpath + '/'):
2476 return True
2477 return True
2477 return False
2478 return False
2478
2479
2479 count += 1
2480 count += 1
2480 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2481 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2481 isdir = f in deleteddirs or wctx.hasdir(f)
2482 isdir = f in deleteddirs or wctx.hasdir(f)
2482 if (f in repo.dirstate or isdir or f == '.'
2483 if (f in repo.dirstate or isdir or f == '.'
2483 or insubrepo() or f in subs):
2484 or insubrepo() or f in subs):
2484 continue
2485 continue
2485
2486
2486 if repo.wvfs.exists(f):
2487 if repo.wvfs.exists(f):
2487 if repo.wvfs.isdir(f):
2488 if repo.wvfs.isdir(f):
2488 warnings.append(_('not removing %s: no tracked files\n')
2489 warnings.append(_('not removing %s: no tracked files\n')
2489 % m.rel(f))
2490 % m.rel(f))
2490 else:
2491 else:
2491 warnings.append(_('not removing %s: file is untracked\n')
2492 warnings.append(_('not removing %s: file is untracked\n')
2492 % m.rel(f))
2493 % m.rel(f))
2493 # missing files will generate a warning elsewhere
2494 # missing files will generate a warning elsewhere
2494 ret = 1
2495 ret = 1
2495 ui.progress(_('deleting'), None)
2496 ui.progress(_('deleting'), None)
2496
2497
2497 if force:
2498 if force:
2498 list = modified + deleted + clean + added
2499 list = modified + deleted + clean + added
2499 elif after:
2500 elif after:
2500 list = deleted
2501 list = deleted
2501 remaining = modified + added + clean
2502 remaining = modified + added + clean
2502 total = len(remaining)
2503 total = len(remaining)
2503 count = 0
2504 count = 0
2504 for f in remaining:
2505 for f in remaining:
2505 count += 1
2506 count += 1
2506 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2507 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2507 warnings.append(_('not removing %s: file still exists\n')
2508 warnings.append(_('not removing %s: file still exists\n')
2508 % m.rel(f))
2509 % m.rel(f))
2509 ret = 1
2510 ret = 1
2510 ui.progress(_('skipping'), None)
2511 ui.progress(_('skipping'), None)
2511 else:
2512 else:
2512 list = deleted + clean
2513 list = deleted + clean
2513 total = len(modified) + len(added)
2514 total = len(modified) + len(added)
2514 count = 0
2515 count = 0
2515 for f in modified:
2516 for f in modified:
2516 count += 1
2517 count += 1
2517 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2518 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2518 warnings.append(_('not removing %s: file is modified (use -f'
2519 warnings.append(_('not removing %s: file is modified (use -f'
2519 ' to force removal)\n') % m.rel(f))
2520 ' to force removal)\n') % m.rel(f))
2520 ret = 1
2521 ret = 1
2521 for f in added:
2522 for f in added:
2522 count += 1
2523 count += 1
2523 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2524 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2524 warnings.append(_("not removing %s: file has been marked for add"
2525 warnings.append(_("not removing %s: file has been marked for add"
2525 " (use 'hg forget' to undo add)\n") % m.rel(f))
2526 " (use 'hg forget' to undo add)\n") % m.rel(f))
2526 ret = 1
2527 ret = 1
2527 ui.progress(_('skipping'), None)
2528 ui.progress(_('skipping'), None)
2528
2529
2529 list = sorted(list)
2530 list = sorted(list)
2530 total = len(list)
2531 total = len(list)
2531 count = 0
2532 count = 0
2532 for f in list:
2533 for f in list:
2533 count += 1
2534 count += 1
2534 if ui.verbose or not m.exact(f):
2535 if ui.verbose or not m.exact(f):
2535 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2536 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2536 ui.status(_('removing %s\n') % m.rel(f))
2537 ui.status(_('removing %s\n') % m.rel(f))
2537 ui.progress(_('deleting'), None)
2538 ui.progress(_('deleting'), None)
2538
2539
2539 with repo.wlock():
2540 with repo.wlock():
2540 if not after:
2541 if not after:
2541 for f in list:
2542 for f in list:
2542 if f in added:
2543 if f in added:
2543 continue # we never unlink added files on remove
2544 continue # we never unlink added files on remove
2544 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2545 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2545 repo[None].forget(list)
2546 repo[None].forget(list)
2546
2547
2547 if warn:
2548 if warn:
2548 for warning in warnings:
2549 for warning in warnings:
2549 ui.warn(warning)
2550 ui.warn(warning)
2550
2551
2551 return ret
2552 return ret
2552
2553
2553 def cat(ui, repo, ctx, matcher, prefix, **opts):
2554 def cat(ui, repo, ctx, matcher, prefix, **opts):
2554 err = 1
2555 err = 1
2555
2556
2556 def write(path):
2557 def write(path):
2557 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2558 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2558 pathname=os.path.join(prefix, path))
2559 pathname=os.path.join(prefix, path))
2559 data = ctx[path].data()
2560 data = ctx[path].data()
2560 if opts.get('decode'):
2561 if opts.get('decode'):
2561 data = repo.wwritedata(path, data)
2562 data = repo.wwritedata(path, data)
2562 fp.write(data)
2563 fp.write(data)
2563 fp.close()
2564 fp.close()
2564
2565
2565 # Automation often uses hg cat on single files, so special case it
2566 # Automation often uses hg cat on single files, so special case it
2566 # for performance to avoid the cost of parsing the manifest.
2567 # for performance to avoid the cost of parsing the manifest.
2567 if len(matcher.files()) == 1 and not matcher.anypats():
2568 if len(matcher.files()) == 1 and not matcher.anypats():
2568 file = matcher.files()[0]
2569 file = matcher.files()[0]
2569 mfl = repo.manifestlog
2570 mfl = repo.manifestlog
2570 mfnode = ctx.manifestnode()
2571 mfnode = ctx.manifestnode()
2571 try:
2572 try:
2572 if mfnode and mfl[mfnode].find(file)[0]:
2573 if mfnode and mfl[mfnode].find(file)[0]:
2573 write(file)
2574 write(file)
2574 return 0
2575 return 0
2575 except KeyError:
2576 except KeyError:
2576 pass
2577 pass
2577
2578
2578 for abs in ctx.walk(matcher):
2579 for abs in ctx.walk(matcher):
2579 write(abs)
2580 write(abs)
2580 err = 0
2581 err = 0
2581
2582
2582 for subpath in sorted(ctx.substate):
2583 for subpath in sorted(ctx.substate):
2583 sub = ctx.sub(subpath)
2584 sub = ctx.sub(subpath)
2584 try:
2585 try:
2585 submatch = matchmod.subdirmatcher(subpath, matcher)
2586 submatch = matchmod.subdirmatcher(subpath, matcher)
2586
2587
2587 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2588 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2588 **opts):
2589 **opts):
2589 err = 0
2590 err = 0
2590 except error.RepoLookupError:
2591 except error.RepoLookupError:
2591 ui.status(_("skipping missing subrepository: %s\n")
2592 ui.status(_("skipping missing subrepository: %s\n")
2592 % os.path.join(prefix, subpath))
2593 % os.path.join(prefix, subpath))
2593
2594
2594 return err
2595 return err
2595
2596
2596 def commit(ui, repo, commitfunc, pats, opts):
2597 def commit(ui, repo, commitfunc, pats, opts):
2597 '''commit the specified files or all outstanding changes'''
2598 '''commit the specified files or all outstanding changes'''
2598 date = opts.get('date')
2599 date = opts.get('date')
2599 if date:
2600 if date:
2600 opts['date'] = util.parsedate(date)
2601 opts['date'] = util.parsedate(date)
2601 message = logmessage(ui, opts)
2602 message = logmessage(ui, opts)
2602 matcher = scmutil.match(repo[None], pats, opts)
2603 matcher = scmutil.match(repo[None], pats, opts)
2603
2604
2604 # extract addremove carefully -- this function can be called from a command
2605 # extract addremove carefully -- this function can be called from a command
2605 # that doesn't support addremove
2606 # that doesn't support addremove
2606 if opts.get('addremove'):
2607 if opts.get('addremove'):
2607 if scmutil.addremove(repo, matcher, "", opts) != 0:
2608 if scmutil.addremove(repo, matcher, "", opts) != 0:
2608 raise error.Abort(
2609 raise error.Abort(
2609 _("failed to mark all new/missing files as added/removed"))
2610 _("failed to mark all new/missing files as added/removed"))
2610
2611
2611 return commitfunc(ui, repo, message, matcher, opts)
2612 return commitfunc(ui, repo, message, matcher, opts)
2612
2613
2613 def samefile(f, ctx1, ctx2):
2614 def samefile(f, ctx1, ctx2):
2614 if f in ctx1.manifest():
2615 if f in ctx1.manifest():
2615 a = ctx1.filectx(f)
2616 a = ctx1.filectx(f)
2616 if f in ctx2.manifest():
2617 if f in ctx2.manifest():
2617 b = ctx2.filectx(f)
2618 b = ctx2.filectx(f)
2618 return (not a.cmp(b)
2619 return (not a.cmp(b)
2619 and a.flags() == b.flags())
2620 and a.flags() == b.flags())
2620 else:
2621 else:
2621 return False
2622 return False
2622 else:
2623 else:
2623 return f not in ctx2.manifest()
2624 return f not in ctx2.manifest()
2624
2625
2625 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2626 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2626 # avoid cycle context -> subrepo -> cmdutil
2627 # avoid cycle context -> subrepo -> cmdutil
2627 from . import context
2628 from . import context
2628
2629
2629 # amend will reuse the existing user if not specified, but the obsolete
2630 # amend will reuse the existing user if not specified, but the obsolete
2630 # marker creation requires that the current user's name is specified.
2631 # marker creation requires that the current user's name is specified.
2631 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2632 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2632 ui.username() # raise exception if username not set
2633 ui.username() # raise exception if username not set
2633
2634
2634 ui.note(_('amending changeset %s\n') % old)
2635 ui.note(_('amending changeset %s\n') % old)
2635 base = old.p1()
2636 base = old.p1()
2636 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2637 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2637
2638
2638 wlock = lock = newid = None
2639 wlock = lock = newid = None
2639 try:
2640 try:
2640 wlock = repo.wlock()
2641 wlock = repo.wlock()
2641 lock = repo.lock()
2642 lock = repo.lock()
2642 with repo.transaction('amend') as tr:
2643 with repo.transaction('amend') as tr:
2643 # See if we got a message from -m or -l, if not, open the editor
2644 # See if we got a message from -m or -l, if not, open the editor
2644 # with the message of the changeset to amend
2645 # with the message of the changeset to amend
2645 message = logmessage(ui, opts)
2646 message = logmessage(ui, opts)
2646 # ensure logfile does not conflict with later enforcement of the
2647 # ensure logfile does not conflict with later enforcement of the
2647 # message. potential logfile content has been processed by
2648 # message. potential logfile content has been processed by
2648 # `logmessage` anyway.
2649 # `logmessage` anyway.
2649 opts.pop('logfile')
2650 opts.pop('logfile')
2650 # First, do a regular commit to record all changes in the working
2651 # First, do a regular commit to record all changes in the working
2651 # directory (if there are any)
2652 # directory (if there are any)
2652 ui.callhooks = False
2653 ui.callhooks = False
2653 activebookmark = repo._bookmarks.active
2654 activebookmark = repo._bookmarks.active
2654 try:
2655 try:
2655 repo._bookmarks.active = None
2656 repo._bookmarks.active = None
2656 opts['message'] = 'temporary amend commit for %s' % old
2657 opts['message'] = 'temporary amend commit for %s' % old
2657 node = commit(ui, repo, commitfunc, pats, opts)
2658 node = commit(ui, repo, commitfunc, pats, opts)
2658 finally:
2659 finally:
2659 repo._bookmarks.active = activebookmark
2660 repo._bookmarks.active = activebookmark
2660 repo._bookmarks.recordchange(tr)
2661 repo._bookmarks.recordchange(tr)
2661 ui.callhooks = True
2662 ui.callhooks = True
2662 ctx = repo[node]
2663 ctx = repo[node]
2663
2664
2664 # Participating changesets:
2665 # Participating changesets:
2665 #
2666 #
2666 # node/ctx o - new (intermediate) commit that contains changes
2667 # node/ctx o - new (intermediate) commit that contains changes
2667 # | from working dir to go into amending commit
2668 # | from working dir to go into amending commit
2668 # | (or a workingctx if there were no changes)
2669 # | (or a workingctx if there were no changes)
2669 # |
2670 # |
2670 # old o - changeset to amend
2671 # old o - changeset to amend
2671 # |
2672 # |
2672 # base o - parent of amending changeset
2673 # base o - parent of amending changeset
2673
2674
2674 # Update extra dict from amended commit (e.g. to preserve graft
2675 # Update extra dict from amended commit (e.g. to preserve graft
2675 # source)
2676 # source)
2676 extra.update(old.extra())
2677 extra.update(old.extra())
2677
2678
2678 # Also update it from the intermediate commit or from the wctx
2679 # Also update it from the intermediate commit or from the wctx
2679 extra.update(ctx.extra())
2680 extra.update(ctx.extra())
2680
2681
2681 if len(old.parents()) > 1:
2682 if len(old.parents()) > 1:
2682 # ctx.files() isn't reliable for merges, so fall back to the
2683 # ctx.files() isn't reliable for merges, so fall back to the
2683 # slower repo.status() method
2684 # slower repo.status() method
2684 files = set([fn for st in repo.status(base, old)[:3]
2685 files = set([fn for st in repo.status(base, old)[:3]
2685 for fn in st])
2686 for fn in st])
2686 else:
2687 else:
2687 files = set(old.files())
2688 files = set(old.files())
2688
2689
2689 # Second, we use either the commit we just did, or if there were no
2690 # Second, we use either the commit we just did, or if there were no
2690 # changes the parent of the working directory as the version of the
2691 # changes the parent of the working directory as the version of the
2691 # files in the final amend commit
2692 # files in the final amend commit
2692 if node:
2693 if node:
2693 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2694 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2694
2695
2695 user = ctx.user()
2696 user = ctx.user()
2696 date = ctx.date()
2697 date = ctx.date()
2697 # Recompute copies (avoid recording a -> b -> a)
2698 # Recompute copies (avoid recording a -> b -> a)
2698 copied = copies.pathcopies(base, ctx)
2699 copied = copies.pathcopies(base, ctx)
2699 if old.p2:
2700 if old.p2:
2700 copied.update(copies.pathcopies(old.p2(), ctx))
2701 copied.update(copies.pathcopies(old.p2(), ctx))
2701
2702
2702 # Prune files which were reverted by the updates: if old
2703 # Prune files which were reverted by the updates: if old
2703 # introduced file X and our intermediate commit, node,
2704 # introduced file X and our intermediate commit, node,
2704 # renamed that file, then those two files are the same and
2705 # renamed that file, then those two files are the same and
2705 # we can discard X from our list of files. Likewise if X
2706 # we can discard X from our list of files. Likewise if X
2706 # was deleted, it's no longer relevant
2707 # was deleted, it's no longer relevant
2707 files.update(ctx.files())
2708 files.update(ctx.files())
2708 files = [f for f in files if not samefile(f, ctx, base)]
2709 files = [f for f in files if not samefile(f, ctx, base)]
2709
2710
2710 def filectxfn(repo, ctx_, path):
2711 def filectxfn(repo, ctx_, path):
2711 try:
2712 try:
2712 fctx = ctx[path]
2713 fctx = ctx[path]
2713 flags = fctx.flags()
2714 flags = fctx.flags()
2714 mctx = context.memfilectx(repo,
2715 mctx = context.memfilectx(repo,
2715 fctx.path(), fctx.data(),
2716 fctx.path(), fctx.data(),
2716 islink='l' in flags,
2717 islink='l' in flags,
2717 isexec='x' in flags,
2718 isexec='x' in flags,
2718 copied=copied.get(path))
2719 copied=copied.get(path))
2719 return mctx
2720 return mctx
2720 except KeyError:
2721 except KeyError:
2721 return None
2722 return None
2722 else:
2723 else:
2723 ui.note(_('copying changeset %s to %s\n') % (old, base))
2724 ui.note(_('copying changeset %s to %s\n') % (old, base))
2724
2725
2725 # Use version of files as in the old cset
2726 # Use version of files as in the old cset
2726 def filectxfn(repo, ctx_, path):
2727 def filectxfn(repo, ctx_, path):
2727 try:
2728 try:
2728 return old.filectx(path)
2729 return old.filectx(path)
2729 except KeyError:
2730 except KeyError:
2730 return None
2731 return None
2731
2732
2732 user = opts.get('user') or old.user()
2733 user = opts.get('user') or old.user()
2733 date = opts.get('date') or old.date()
2734 date = opts.get('date') or old.date()
2734 editform = mergeeditform(old, 'commit.amend')
2735 editform = mergeeditform(old, 'commit.amend')
2735 editor = getcommiteditor(editform=editform, **opts)
2736 editor = getcommiteditor(editform=editform, **opts)
2736 if not message:
2737 if not message:
2737 editor = getcommiteditor(edit=True, editform=editform)
2738 editor = getcommiteditor(edit=True, editform=editform)
2738 message = old.description()
2739 message = old.description()
2739
2740
2740 pureextra = extra.copy()
2741 pureextra = extra.copy()
2741 extra['amend_source'] = old.hex()
2742 extra['amend_source'] = old.hex()
2742
2743
2743 new = context.memctx(repo,
2744 new = context.memctx(repo,
2744 parents=[base.node(), old.p2().node()],
2745 parents=[base.node(), old.p2().node()],
2745 text=message,
2746 text=message,
2746 files=files,
2747 files=files,
2747 filectxfn=filectxfn,
2748 filectxfn=filectxfn,
2748 user=user,
2749 user=user,
2749 date=date,
2750 date=date,
2750 extra=extra,
2751 extra=extra,
2751 editor=editor)
2752 editor=editor)
2752
2753
2753 newdesc = changelog.stripdesc(new.description())
2754 newdesc = changelog.stripdesc(new.description())
2754 if ((not node)
2755 if ((not node)
2755 and newdesc == old.description()
2756 and newdesc == old.description()
2756 and user == old.user()
2757 and user == old.user()
2757 and date == old.date()
2758 and date == old.date()
2758 and pureextra == old.extra()):
2759 and pureextra == old.extra()):
2759 # nothing changed. continuing here would create a new node
2760 # nothing changed. continuing here would create a new node
2760 # anyway because of the amend_source noise.
2761 # anyway because of the amend_source noise.
2761 #
2762 #
2762 # This not what we expect from amend.
2763 # This not what we expect from amend.
2763 return old.node()
2764 return old.node()
2764
2765
2765 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2766 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2766 try:
2767 try:
2767 if opts.get('secret'):
2768 if opts.get('secret'):
2768 commitphase = 'secret'
2769 commitphase = 'secret'
2769 else:
2770 else:
2770 commitphase = old.phase()
2771 commitphase = old.phase()
2771 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2772 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2772 newid = repo.commitctx(new)
2773 newid = repo.commitctx(new)
2773 finally:
2774 finally:
2774 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2775 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2775 if newid != old.node():
2776 if newid != old.node():
2776 # Reroute the working copy parent to the new changeset
2777 # Reroute the working copy parent to the new changeset
2777 repo.setparents(newid, nullid)
2778 repo.setparents(newid, nullid)
2778
2779
2779 # Move bookmarks from old parent to amend commit
2780 # Move bookmarks from old parent to amend commit
2780 bms = repo.nodebookmarks(old.node())
2781 bms = repo.nodebookmarks(old.node())
2781 if bms:
2782 if bms:
2782 marks = repo._bookmarks
2783 marks = repo._bookmarks
2783 for bm in bms:
2784 for bm in bms:
2784 ui.debug('moving bookmarks %r from %s to %s\n' %
2785 ui.debug('moving bookmarks %r from %s to %s\n' %
2785 (marks, old.hex(), hex(newid)))
2786 (marks, old.hex(), hex(newid)))
2786 marks[bm] = newid
2787 marks[bm] = newid
2787 marks.recordchange(tr)
2788 marks.recordchange(tr)
2788 #commit the whole amend process
2789 #commit the whole amend process
2789 if createmarkers:
2790 if createmarkers:
2790 # mark the new changeset as successor of the rewritten one
2791 # mark the new changeset as successor of the rewritten one
2791 new = repo[newid]
2792 new = repo[newid]
2792 obs = [(old, (new,))]
2793 obs = [(old, (new,))]
2793 if node:
2794 if node:
2794 obs.append((ctx, ()))
2795 obs.append((ctx, ()))
2795
2796
2796 obsolete.createmarkers(repo, obs)
2797 obsolete.createmarkers(repo, obs)
2797 if not createmarkers and newid != old.node():
2798 if not createmarkers and newid != old.node():
2798 # Strip the intermediate commit (if there was one) and the amended
2799 # Strip the intermediate commit (if there was one) and the amended
2799 # commit
2800 # commit
2800 if node:
2801 if node:
2801 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2802 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2802 ui.note(_('stripping amended changeset %s\n') % old)
2803 ui.note(_('stripping amended changeset %s\n') % old)
2803 repair.strip(ui, repo, old.node(), topic='amend-backup')
2804 repair.strip(ui, repo, old.node(), topic='amend-backup')
2804 finally:
2805 finally:
2805 lockmod.release(lock, wlock)
2806 lockmod.release(lock, wlock)
2806 return newid
2807 return newid
2807
2808
2808 def commiteditor(repo, ctx, subs, editform=''):
2809 def commiteditor(repo, ctx, subs, editform=''):
2809 if ctx.description():
2810 if ctx.description():
2810 return ctx.description()
2811 return ctx.description()
2811 return commitforceeditor(repo, ctx, subs, editform=editform,
2812 return commitforceeditor(repo, ctx, subs, editform=editform,
2812 unchangedmessagedetection=True)
2813 unchangedmessagedetection=True)
2813
2814
2814 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2815 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2815 editform='', unchangedmessagedetection=False):
2816 editform='', unchangedmessagedetection=False):
2816 if not extramsg:
2817 if not extramsg:
2817 extramsg = _("Leave message empty to abort commit.")
2818 extramsg = _("Leave message empty to abort commit.")
2818
2819
2819 forms = [e for e in editform.split('.') if e]
2820 forms = [e for e in editform.split('.') if e]
2820 forms.insert(0, 'changeset')
2821 forms.insert(0, 'changeset')
2821 templatetext = None
2822 templatetext = None
2822 while forms:
2823 while forms:
2823 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2824 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2824 if tmpl:
2825 if tmpl:
2825 templatetext = committext = buildcommittemplate(
2826 templatetext = committext = buildcommittemplate(
2826 repo, ctx, subs, extramsg, tmpl)
2827 repo, ctx, subs, extramsg, tmpl)
2827 break
2828 break
2828 forms.pop()
2829 forms.pop()
2829 else:
2830 else:
2830 committext = buildcommittext(repo, ctx, subs, extramsg)
2831 committext = buildcommittext(repo, ctx, subs, extramsg)
2831
2832
2832 # run editor in the repository root
2833 # run editor in the repository root
2833 olddir = os.getcwd()
2834 olddir = os.getcwd()
2834 os.chdir(repo.root)
2835 os.chdir(repo.root)
2835
2836
2836 # make in-memory changes visible to external process
2837 # make in-memory changes visible to external process
2837 tr = repo.currenttransaction()
2838 tr = repo.currenttransaction()
2838 repo.dirstate.write(tr)
2839 repo.dirstate.write(tr)
2839 pending = tr and tr.writepending() and repo.root
2840 pending = tr and tr.writepending() and repo.root
2840
2841
2841 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2842 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2842 editform=editform, pending=pending)
2843 editform=editform, pending=pending)
2843 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2844 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2844 os.chdir(olddir)
2845 os.chdir(olddir)
2845
2846
2846 if finishdesc:
2847 if finishdesc:
2847 text = finishdesc(text)
2848 text = finishdesc(text)
2848 if not text.strip():
2849 if not text.strip():
2849 raise error.Abort(_("empty commit message"))
2850 raise error.Abort(_("empty commit message"))
2850 if unchangedmessagedetection and editortext == templatetext:
2851 if unchangedmessagedetection and editortext == templatetext:
2851 raise error.Abort(_("commit message unchanged"))
2852 raise error.Abort(_("commit message unchanged"))
2852
2853
2853 return text
2854 return text
2854
2855
2855 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2856 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2856 ui = repo.ui
2857 ui = repo.ui
2857 tmpl, mapfile = gettemplate(ui, tmpl, None)
2858 tmpl, mapfile = gettemplate(ui, tmpl, None)
2858
2859
2859 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2860 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2860
2861
2861 for k, v in repo.ui.configitems('committemplate'):
2862 for k, v in repo.ui.configitems('committemplate'):
2862 if k != 'changeset':
2863 if k != 'changeset':
2863 t.t.cache[k] = v
2864 t.t.cache[k] = v
2864
2865
2865 if not extramsg:
2866 if not extramsg:
2866 extramsg = '' # ensure that extramsg is string
2867 extramsg = '' # ensure that extramsg is string
2867
2868
2868 ui.pushbuffer()
2869 ui.pushbuffer()
2869 t.show(ctx, extramsg=extramsg)
2870 t.show(ctx, extramsg=extramsg)
2870 return ui.popbuffer()
2871 return ui.popbuffer()
2871
2872
2872 def hgprefix(msg):
2873 def hgprefix(msg):
2873 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2874 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2874
2875
2875 def buildcommittext(repo, ctx, subs, extramsg):
2876 def buildcommittext(repo, ctx, subs, extramsg):
2876 edittext = []
2877 edittext = []
2877 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2878 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2878 if ctx.description():
2879 if ctx.description():
2879 edittext.append(ctx.description())
2880 edittext.append(ctx.description())
2880 edittext.append("")
2881 edittext.append("")
2881 edittext.append("") # Empty line between message and comments.
2882 edittext.append("") # Empty line between message and comments.
2882 edittext.append(hgprefix(_("Enter commit message."
2883 edittext.append(hgprefix(_("Enter commit message."
2883 " Lines beginning with 'HG:' are removed.")))
2884 " Lines beginning with 'HG:' are removed.")))
2884 edittext.append(hgprefix(extramsg))
2885 edittext.append(hgprefix(extramsg))
2885 edittext.append("HG: --")
2886 edittext.append("HG: --")
2886 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2887 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2887 if ctx.p2():
2888 if ctx.p2():
2888 edittext.append(hgprefix(_("branch merge")))
2889 edittext.append(hgprefix(_("branch merge")))
2889 if ctx.branch():
2890 if ctx.branch():
2890 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2891 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2891 if bookmarks.isactivewdirparent(repo):
2892 if bookmarks.isactivewdirparent(repo):
2892 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2893 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2893 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2894 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2894 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2895 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2895 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2896 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2896 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2897 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2897 if not added and not modified and not removed:
2898 if not added and not modified and not removed:
2898 edittext.append(hgprefix(_("no files changed")))
2899 edittext.append(hgprefix(_("no files changed")))
2899 edittext.append("")
2900 edittext.append("")
2900
2901
2901 return "\n".join(edittext)
2902 return "\n".join(edittext)
2902
2903
2903 def commitstatus(repo, node, branch, bheads=None, opts=None):
2904 def commitstatus(repo, node, branch, bheads=None, opts=None):
2904 if opts is None:
2905 if opts is None:
2905 opts = {}
2906 opts = {}
2906 ctx = repo[node]
2907 ctx = repo[node]
2907 parents = ctx.parents()
2908 parents = ctx.parents()
2908
2909
2909 if (not opts.get('amend') and bheads and node not in bheads and not
2910 if (not opts.get('amend') and bheads and node not in bheads and not
2910 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2911 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2911 repo.ui.status(_('created new head\n'))
2912 repo.ui.status(_('created new head\n'))
2912 # The message is not printed for initial roots. For the other
2913 # The message is not printed for initial roots. For the other
2913 # changesets, it is printed in the following situations:
2914 # changesets, it is printed in the following situations:
2914 #
2915 #
2915 # Par column: for the 2 parents with ...
2916 # Par column: for the 2 parents with ...
2916 # N: null or no parent
2917 # N: null or no parent
2917 # B: parent is on another named branch
2918 # B: parent is on another named branch
2918 # C: parent is a regular non head changeset
2919 # C: parent is a regular non head changeset
2919 # H: parent was a branch head of the current branch
2920 # H: parent was a branch head of the current branch
2920 # Msg column: whether we print "created new head" message
2921 # Msg column: whether we print "created new head" message
2921 # In the following, it is assumed that there already exists some
2922 # In the following, it is assumed that there already exists some
2922 # initial branch heads of the current branch, otherwise nothing is
2923 # initial branch heads of the current branch, otherwise nothing is
2923 # printed anyway.
2924 # printed anyway.
2924 #
2925 #
2925 # Par Msg Comment
2926 # Par Msg Comment
2926 # N N y additional topo root
2927 # N N y additional topo root
2927 #
2928 #
2928 # B N y additional branch root
2929 # B N y additional branch root
2929 # C N y additional topo head
2930 # C N y additional topo head
2930 # H N n usual case
2931 # H N n usual case
2931 #
2932 #
2932 # B B y weird additional branch root
2933 # B B y weird additional branch root
2933 # C B y branch merge
2934 # C B y branch merge
2934 # H B n merge with named branch
2935 # H B n merge with named branch
2935 #
2936 #
2936 # C C y additional head from merge
2937 # C C y additional head from merge
2937 # C H n merge with a head
2938 # C H n merge with a head
2938 #
2939 #
2939 # H H n head merge: head count decreases
2940 # H H n head merge: head count decreases
2940
2941
2941 if not opts.get('close_branch'):
2942 if not opts.get('close_branch'):
2942 for r in parents:
2943 for r in parents:
2943 if r.closesbranch() and r.branch() == branch:
2944 if r.closesbranch() and r.branch() == branch:
2944 repo.ui.status(_('reopening closed branch head %d\n') % r)
2945 repo.ui.status(_('reopening closed branch head %d\n') % r)
2945
2946
2946 if repo.ui.debugflag:
2947 if repo.ui.debugflag:
2947 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2948 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2948 elif repo.ui.verbose:
2949 elif repo.ui.verbose:
2949 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2950 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2950
2951
2951 def postcommitstatus(repo, pats, opts):
2952 def postcommitstatus(repo, pats, opts):
2952 return repo.status(match=scmutil.match(repo[None], pats, opts))
2953 return repo.status(match=scmutil.match(repo[None], pats, opts))
2953
2954
2954 def revert(ui, repo, ctx, parents, *pats, **opts):
2955 def revert(ui, repo, ctx, parents, *pats, **opts):
2955 parent, p2 = parents
2956 parent, p2 = parents
2956 node = ctx.node()
2957 node = ctx.node()
2957
2958
2958 mf = ctx.manifest()
2959 mf = ctx.manifest()
2959 if node == p2:
2960 if node == p2:
2960 parent = p2
2961 parent = p2
2961
2962
2962 # need all matching names in dirstate and manifest of target rev,
2963 # need all matching names in dirstate and manifest of target rev,
2963 # so have to walk both. do not print errors if files exist in one
2964 # so have to walk both. do not print errors if files exist in one
2964 # but not other. in both cases, filesets should be evaluated against
2965 # but not other. in both cases, filesets should be evaluated against
2965 # workingctx to get consistent result (issue4497). this means 'set:**'
2966 # workingctx to get consistent result (issue4497). this means 'set:**'
2966 # cannot be used to select missing files from target rev.
2967 # cannot be used to select missing files from target rev.
2967
2968
2968 # `names` is a mapping for all elements in working copy and target revision
2969 # `names` is a mapping for all elements in working copy and target revision
2969 # The mapping is in the form:
2970 # The mapping is in the form:
2970 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2971 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2971 names = {}
2972 names = {}
2972
2973
2973 with repo.wlock():
2974 with repo.wlock():
2974 ## filling of the `names` mapping
2975 ## filling of the `names` mapping
2975 # walk dirstate to fill `names`
2976 # walk dirstate to fill `names`
2976
2977
2977 interactive = opts.get('interactive', False)
2978 interactive = opts.get('interactive', False)
2978 wctx = repo[None]
2979 wctx = repo[None]
2979 m = scmutil.match(wctx, pats, opts)
2980 m = scmutil.match(wctx, pats, opts)
2980
2981
2981 # we'll need this later
2982 # we'll need this later
2982 targetsubs = sorted(s for s in wctx.substate if m(s))
2983 targetsubs = sorted(s for s in wctx.substate if m(s))
2983
2984
2984 if not m.always():
2985 if not m.always():
2985 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2986 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2986 names[abs] = m.rel(abs), m.exact(abs)
2987 names[abs] = m.rel(abs), m.exact(abs)
2987
2988
2988 # walk target manifest to fill `names`
2989 # walk target manifest to fill `names`
2989
2990
2990 def badfn(path, msg):
2991 def badfn(path, msg):
2991 if path in names:
2992 if path in names:
2992 return
2993 return
2993 if path in ctx.substate:
2994 if path in ctx.substate:
2994 return
2995 return
2995 path_ = path + '/'
2996 path_ = path + '/'
2996 for f in names:
2997 for f in names:
2997 if f.startswith(path_):
2998 if f.startswith(path_):
2998 return
2999 return
2999 ui.warn("%s: %s\n" % (m.rel(path), msg))
3000 ui.warn("%s: %s\n" % (m.rel(path), msg))
3000
3001
3001 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3002 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3002 if abs not in names:
3003 if abs not in names:
3003 names[abs] = m.rel(abs), m.exact(abs)
3004 names[abs] = m.rel(abs), m.exact(abs)
3004
3005
3005 # Find status of all file in `names`.
3006 # Find status of all file in `names`.
3006 m = scmutil.matchfiles(repo, names)
3007 m = scmutil.matchfiles(repo, names)
3007
3008
3008 changes = repo.status(node1=node, match=m,
3009 changes = repo.status(node1=node, match=m,
3009 unknown=True, ignored=True, clean=True)
3010 unknown=True, ignored=True, clean=True)
3010 else:
3011 else:
3011 changes = repo.status(node1=node, match=m)
3012 changes = repo.status(node1=node, match=m)
3012 for kind in changes:
3013 for kind in changes:
3013 for abs in kind:
3014 for abs in kind:
3014 names[abs] = m.rel(abs), m.exact(abs)
3015 names[abs] = m.rel(abs), m.exact(abs)
3015
3016
3016 m = scmutil.matchfiles(repo, names)
3017 m = scmutil.matchfiles(repo, names)
3017
3018
3018 modified = set(changes.modified)
3019 modified = set(changes.modified)
3019 added = set(changes.added)
3020 added = set(changes.added)
3020 removed = set(changes.removed)
3021 removed = set(changes.removed)
3021 _deleted = set(changes.deleted)
3022 _deleted = set(changes.deleted)
3022 unknown = set(changes.unknown)
3023 unknown = set(changes.unknown)
3023 unknown.update(changes.ignored)
3024 unknown.update(changes.ignored)
3024 clean = set(changes.clean)
3025 clean = set(changes.clean)
3025 modadded = set()
3026 modadded = set()
3026
3027
3027 # split between files known in target manifest and the others
3028 # split between files known in target manifest and the others
3028 smf = set(mf)
3029 smf = set(mf)
3029
3030
3030 # determine the exact nature of the deleted changesets
3031 # determine the exact nature of the deleted changesets
3031 deladded = _deleted - smf
3032 deladded = _deleted - smf
3032 deleted = _deleted - deladded
3033 deleted = _deleted - deladded
3033
3034
3034 # We need to account for the state of the file in the dirstate,
3035 # We need to account for the state of the file in the dirstate,
3035 # even when we revert against something else than parent. This will
3036 # even when we revert against something else than parent. This will
3036 # slightly alter the behavior of revert (doing back up or not, delete
3037 # slightly alter the behavior of revert (doing back up or not, delete
3037 # or just forget etc).
3038 # or just forget etc).
3038 if parent == node:
3039 if parent == node:
3039 dsmodified = modified
3040 dsmodified = modified
3040 dsadded = added
3041 dsadded = added
3041 dsremoved = removed
3042 dsremoved = removed
3042 # store all local modifications, useful later for rename detection
3043 # store all local modifications, useful later for rename detection
3043 localchanges = dsmodified | dsadded
3044 localchanges = dsmodified | dsadded
3044 modified, added, removed = set(), set(), set()
3045 modified, added, removed = set(), set(), set()
3045 else:
3046 else:
3046 changes = repo.status(node1=parent, match=m)
3047 changes = repo.status(node1=parent, match=m)
3047 dsmodified = set(changes.modified)
3048 dsmodified = set(changes.modified)
3048 dsadded = set(changes.added)
3049 dsadded = set(changes.added)
3049 dsremoved = set(changes.removed)
3050 dsremoved = set(changes.removed)
3050 # store all local modifications, useful later for rename detection
3051 # store all local modifications, useful later for rename detection
3051 localchanges = dsmodified | dsadded
3052 localchanges = dsmodified | dsadded
3052
3053
3053 # only take into account for removes between wc and target
3054 # only take into account for removes between wc and target
3054 clean |= dsremoved - removed
3055 clean |= dsremoved - removed
3055 dsremoved &= removed
3056 dsremoved &= removed
3056 # distinct between dirstate remove and other
3057 # distinct between dirstate remove and other
3057 removed -= dsremoved
3058 removed -= dsremoved
3058
3059
3059 modadded = added & dsmodified
3060 modadded = added & dsmodified
3060 added -= modadded
3061 added -= modadded
3061
3062
3062 # tell newly modified apart.
3063 # tell newly modified apart.
3063 dsmodified &= modified
3064 dsmodified &= modified
3064 dsmodified |= modified & dsadded # dirstate added may need backup
3065 dsmodified |= modified & dsadded # dirstate added may need backup
3065 modified -= dsmodified
3066 modified -= dsmodified
3066
3067
3067 # We need to wait for some post-processing to update this set
3068 # We need to wait for some post-processing to update this set
3068 # before making the distinction. The dirstate will be used for
3069 # before making the distinction. The dirstate will be used for
3069 # that purpose.
3070 # that purpose.
3070 dsadded = added
3071 dsadded = added
3071
3072
3072 # in case of merge, files that are actually added can be reported as
3073 # in case of merge, files that are actually added can be reported as
3073 # modified, we need to post process the result
3074 # modified, we need to post process the result
3074 if p2 != nullid:
3075 if p2 != nullid:
3075 mergeadd = dsmodified - smf
3076 mergeadd = dsmodified - smf
3076 dsadded |= mergeadd
3077 dsadded |= mergeadd
3077 dsmodified -= mergeadd
3078 dsmodified -= mergeadd
3078
3079
3079 # if f is a rename, update `names` to also revert the source
3080 # if f is a rename, update `names` to also revert the source
3080 cwd = repo.getcwd()
3081 cwd = repo.getcwd()
3081 for f in localchanges:
3082 for f in localchanges:
3082 src = repo.dirstate.copied(f)
3083 src = repo.dirstate.copied(f)
3083 # XXX should we check for rename down to target node?
3084 # XXX should we check for rename down to target node?
3084 if src and src not in names and repo.dirstate[src] == 'r':
3085 if src and src not in names and repo.dirstate[src] == 'r':
3085 dsremoved.add(src)
3086 dsremoved.add(src)
3086 names[src] = (repo.pathto(src, cwd), True)
3087 names[src] = (repo.pathto(src, cwd), True)
3087
3088
3088 # distinguish between file to forget and the other
3089 # distinguish between file to forget and the other
3089 added = set()
3090 added = set()
3090 for abs in dsadded:
3091 for abs in dsadded:
3091 if repo.dirstate[abs] != 'a':
3092 if repo.dirstate[abs] != 'a':
3092 added.add(abs)
3093 added.add(abs)
3093 dsadded -= added
3094 dsadded -= added
3094
3095
3095 for abs in deladded:
3096 for abs in deladded:
3096 if repo.dirstate[abs] == 'a':
3097 if repo.dirstate[abs] == 'a':
3097 dsadded.add(abs)
3098 dsadded.add(abs)
3098 deladded -= dsadded
3099 deladded -= dsadded
3099
3100
3100 # For files marked as removed, we check if an unknown file is present at
3101 # For files marked as removed, we check if an unknown file is present at
3101 # the same path. If a such file exists it may need to be backed up.
3102 # the same path. If a such file exists it may need to be backed up.
3102 # Making the distinction at this stage helps have simpler backup
3103 # Making the distinction at this stage helps have simpler backup
3103 # logic.
3104 # logic.
3104 removunk = set()
3105 removunk = set()
3105 for abs in removed:
3106 for abs in removed:
3106 target = repo.wjoin(abs)
3107 target = repo.wjoin(abs)
3107 if os.path.lexists(target):
3108 if os.path.lexists(target):
3108 removunk.add(abs)
3109 removunk.add(abs)
3109 removed -= removunk
3110 removed -= removunk
3110
3111
3111 dsremovunk = set()
3112 dsremovunk = set()
3112 for abs in dsremoved:
3113 for abs in dsremoved:
3113 target = repo.wjoin(abs)
3114 target = repo.wjoin(abs)
3114 if os.path.lexists(target):
3115 if os.path.lexists(target):
3115 dsremovunk.add(abs)
3116 dsremovunk.add(abs)
3116 dsremoved -= dsremovunk
3117 dsremoved -= dsremovunk
3117
3118
3118 # action to be actually performed by revert
3119 # action to be actually performed by revert
3119 # (<list of file>, message>) tuple
3120 # (<list of file>, message>) tuple
3120 actions = {'revert': ([], _('reverting %s\n')),
3121 actions = {'revert': ([], _('reverting %s\n')),
3121 'add': ([], _('adding %s\n')),
3122 'add': ([], _('adding %s\n')),
3122 'remove': ([], _('removing %s\n')),
3123 'remove': ([], _('removing %s\n')),
3123 'drop': ([], _('removing %s\n')),
3124 'drop': ([], _('removing %s\n')),
3124 'forget': ([], _('forgetting %s\n')),
3125 'forget': ([], _('forgetting %s\n')),
3125 'undelete': ([], _('undeleting %s\n')),
3126 'undelete': ([], _('undeleting %s\n')),
3126 'noop': (None, _('no changes needed to %s\n')),
3127 'noop': (None, _('no changes needed to %s\n')),
3127 'unknown': (None, _('file not managed: %s\n')),
3128 'unknown': (None, _('file not managed: %s\n')),
3128 }
3129 }
3129
3130
3130 # "constant" that convey the backup strategy.
3131 # "constant" that convey the backup strategy.
3131 # All set to `discard` if `no-backup` is set do avoid checking
3132 # All set to `discard` if `no-backup` is set do avoid checking
3132 # no_backup lower in the code.
3133 # no_backup lower in the code.
3133 # These values are ordered for comparison purposes
3134 # These values are ordered for comparison purposes
3134 backupinteractive = 3 # do backup if interactively modified
3135 backupinteractive = 3 # do backup if interactively modified
3135 backup = 2 # unconditionally do backup
3136 backup = 2 # unconditionally do backup
3136 check = 1 # check if the existing file differs from target
3137 check = 1 # check if the existing file differs from target
3137 discard = 0 # never do backup
3138 discard = 0 # never do backup
3138 if opts.get('no_backup'):
3139 if opts.get('no_backup'):
3139 backupinteractive = backup = check = discard
3140 backupinteractive = backup = check = discard
3140 if interactive:
3141 if interactive:
3141 dsmodifiedbackup = backupinteractive
3142 dsmodifiedbackup = backupinteractive
3142 else:
3143 else:
3143 dsmodifiedbackup = backup
3144 dsmodifiedbackup = backup
3144 tobackup = set()
3145 tobackup = set()
3145
3146
3146 backupanddel = actions['remove']
3147 backupanddel = actions['remove']
3147 if not opts.get('no_backup'):
3148 if not opts.get('no_backup'):
3148 backupanddel = actions['drop']
3149 backupanddel = actions['drop']
3149
3150
3150 disptable = (
3151 disptable = (
3151 # dispatch table:
3152 # dispatch table:
3152 # file state
3153 # file state
3153 # action
3154 # action
3154 # make backup
3155 # make backup
3155
3156
3156 ## Sets that results that will change file on disk
3157 ## Sets that results that will change file on disk
3157 # Modified compared to target, no local change
3158 # Modified compared to target, no local change
3158 (modified, actions['revert'], discard),
3159 (modified, actions['revert'], discard),
3159 # Modified compared to target, but local file is deleted
3160 # Modified compared to target, but local file is deleted
3160 (deleted, actions['revert'], discard),
3161 (deleted, actions['revert'], discard),
3161 # Modified compared to target, local change
3162 # Modified compared to target, local change
3162 (dsmodified, actions['revert'], dsmodifiedbackup),
3163 (dsmodified, actions['revert'], dsmodifiedbackup),
3163 # Added since target
3164 # Added since target
3164 (added, actions['remove'], discard),
3165 (added, actions['remove'], discard),
3165 # Added in working directory
3166 # Added in working directory
3166 (dsadded, actions['forget'], discard),
3167 (dsadded, actions['forget'], discard),
3167 # Added since target, have local modification
3168 # Added since target, have local modification
3168 (modadded, backupanddel, backup),
3169 (modadded, backupanddel, backup),
3169 # Added since target but file is missing in working directory
3170 # Added since target but file is missing in working directory
3170 (deladded, actions['drop'], discard),
3171 (deladded, actions['drop'], discard),
3171 # Removed since target, before working copy parent
3172 # Removed since target, before working copy parent
3172 (removed, actions['add'], discard),
3173 (removed, actions['add'], discard),
3173 # Same as `removed` but an unknown file exists at the same path
3174 # Same as `removed` but an unknown file exists at the same path
3174 (removunk, actions['add'], check),
3175 (removunk, actions['add'], check),
3175 # Removed since targe, marked as such in working copy parent
3176 # Removed since targe, marked as such in working copy parent
3176 (dsremoved, actions['undelete'], discard),
3177 (dsremoved, actions['undelete'], discard),
3177 # Same as `dsremoved` but an unknown file exists at the same path
3178 # Same as `dsremoved` but an unknown file exists at the same path
3178 (dsremovunk, actions['undelete'], check),
3179 (dsremovunk, actions['undelete'], check),
3179 ## the following sets does not result in any file changes
3180 ## the following sets does not result in any file changes
3180 # File with no modification
3181 # File with no modification
3181 (clean, actions['noop'], discard),
3182 (clean, actions['noop'], discard),
3182 # Existing file, not tracked anywhere
3183 # Existing file, not tracked anywhere
3183 (unknown, actions['unknown'], discard),
3184 (unknown, actions['unknown'], discard),
3184 )
3185 )
3185
3186
3186 for abs, (rel, exact) in sorted(names.items()):
3187 for abs, (rel, exact) in sorted(names.items()):
3187 # target file to be touch on disk (relative to cwd)
3188 # target file to be touch on disk (relative to cwd)
3188 target = repo.wjoin(abs)
3189 target = repo.wjoin(abs)
3189 # search the entry in the dispatch table.
3190 # search the entry in the dispatch table.
3190 # if the file is in any of these sets, it was touched in the working
3191 # if the file is in any of these sets, it was touched in the working
3191 # directory parent and we are sure it needs to be reverted.
3192 # directory parent and we are sure it needs to be reverted.
3192 for table, (xlist, msg), dobackup in disptable:
3193 for table, (xlist, msg), dobackup in disptable:
3193 if abs not in table:
3194 if abs not in table:
3194 continue
3195 continue
3195 if xlist is not None:
3196 if xlist is not None:
3196 xlist.append(abs)
3197 xlist.append(abs)
3197 if dobackup:
3198 if dobackup:
3198 # If in interactive mode, don't automatically create
3199 # If in interactive mode, don't automatically create
3199 # .orig files (issue4793)
3200 # .orig files (issue4793)
3200 if dobackup == backupinteractive:
3201 if dobackup == backupinteractive:
3201 tobackup.add(abs)
3202 tobackup.add(abs)
3202 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3203 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3203 bakname = scmutil.origpath(ui, repo, rel)
3204 bakname = scmutil.origpath(ui, repo, rel)
3204 ui.note(_('saving current version of %s as %s\n') %
3205 ui.note(_('saving current version of %s as %s\n') %
3205 (rel, bakname))
3206 (rel, bakname))
3206 if not opts.get('dry_run'):
3207 if not opts.get('dry_run'):
3207 if interactive:
3208 if interactive:
3208 util.copyfile(target, bakname)
3209 util.copyfile(target, bakname)
3209 else:
3210 else:
3210 util.rename(target, bakname)
3211 util.rename(target, bakname)
3211 if ui.verbose or not exact:
3212 if ui.verbose or not exact:
3212 if not isinstance(msg, basestring):
3213 if not isinstance(msg, basestring):
3213 msg = msg(abs)
3214 msg = msg(abs)
3214 ui.status(msg % rel)
3215 ui.status(msg % rel)
3215 elif exact:
3216 elif exact:
3216 ui.warn(msg % rel)
3217 ui.warn(msg % rel)
3217 break
3218 break
3218
3219
3219 if not opts.get('dry_run'):
3220 if not opts.get('dry_run'):
3220 needdata = ('revert', 'add', 'undelete')
3221 needdata = ('revert', 'add', 'undelete')
3221 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3222 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3222 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3223 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3223
3224
3224 if targetsubs:
3225 if targetsubs:
3225 # Revert the subrepos on the revert list
3226 # Revert the subrepos on the revert list
3226 for sub in targetsubs:
3227 for sub in targetsubs:
3227 try:
3228 try:
3228 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3229 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3229 except KeyError:
3230 except KeyError:
3230 raise error.Abort("subrepository '%s' does not exist in %s!"
3231 raise error.Abort("subrepository '%s' does not exist in %s!"
3231 % (sub, short(ctx.node())))
3232 % (sub, short(ctx.node())))
3232
3233
3233 def _revertprefetch(repo, ctx, *files):
3234 def _revertprefetch(repo, ctx, *files):
3234 """Let extension changing the storage layer prefetch content"""
3235 """Let extension changing the storage layer prefetch content"""
3235 pass
3236 pass
3236
3237
3237 def _performrevert(repo, parents, ctx, actions, interactive=False,
3238 def _performrevert(repo, parents, ctx, actions, interactive=False,
3238 tobackup=None):
3239 tobackup=None):
3239 """function that actually perform all the actions computed for revert
3240 """function that actually perform all the actions computed for revert
3240
3241
3241 This is an independent function to let extension to plug in and react to
3242 This is an independent function to let extension to plug in and react to
3242 the imminent revert.
3243 the imminent revert.
3243
3244
3244 Make sure you have the working directory locked when calling this function.
3245 Make sure you have the working directory locked when calling this function.
3245 """
3246 """
3246 parent, p2 = parents
3247 parent, p2 = parents
3247 node = ctx.node()
3248 node = ctx.node()
3248 excluded_files = []
3249 excluded_files = []
3249 matcher_opts = {"exclude": excluded_files}
3250 matcher_opts = {"exclude": excluded_files}
3250
3251
3251 def checkout(f):
3252 def checkout(f):
3252 fc = ctx[f]
3253 fc = ctx[f]
3253 repo.wwrite(f, fc.data(), fc.flags())
3254 repo.wwrite(f, fc.data(), fc.flags())
3254
3255
3255 audit_path = pathutil.pathauditor(repo.root)
3256 audit_path = pathutil.pathauditor(repo.root)
3256 for f in actions['forget'][0]:
3257 for f in actions['forget'][0]:
3257 if interactive:
3258 if interactive:
3258 choice = \
3259 choice = \
3259 repo.ui.promptchoice(
3260 repo.ui.promptchoice(
3260 _("forget added file %s (yn)?$$ &Yes $$ &No")
3261 _("forget added file %s (yn)?$$ &Yes $$ &No")
3261 % f)
3262 % f)
3262 if choice == 0:
3263 if choice == 0:
3263 repo.dirstate.drop(f)
3264 repo.dirstate.drop(f)
3264 else:
3265 else:
3265 excluded_files.append(repo.wjoin(f))
3266 excluded_files.append(repo.wjoin(f))
3266 else:
3267 else:
3267 repo.dirstate.drop(f)
3268 repo.dirstate.drop(f)
3268 for f in actions['remove'][0]:
3269 for f in actions['remove'][0]:
3269 audit_path(f)
3270 audit_path(f)
3270 try:
3271 try:
3271 util.unlinkpath(repo.wjoin(f))
3272 util.unlinkpath(repo.wjoin(f))
3272 except OSError:
3273 except OSError:
3273 pass
3274 pass
3274 repo.dirstate.remove(f)
3275 repo.dirstate.remove(f)
3275 for f in actions['drop'][0]:
3276 for f in actions['drop'][0]:
3276 audit_path(f)
3277 audit_path(f)
3277 repo.dirstate.remove(f)
3278 repo.dirstate.remove(f)
3278
3279
3279 normal = None
3280 normal = None
3280 if node == parent:
3281 if node == parent:
3281 # We're reverting to our parent. If possible, we'd like status
3282 # We're reverting to our parent. If possible, we'd like status
3282 # to report the file as clean. We have to use normallookup for
3283 # to report the file as clean. We have to use normallookup for
3283 # merges to avoid losing information about merged/dirty files.
3284 # merges to avoid losing information about merged/dirty files.
3284 if p2 != nullid:
3285 if p2 != nullid:
3285 normal = repo.dirstate.normallookup
3286 normal = repo.dirstate.normallookup
3286 else:
3287 else:
3287 normal = repo.dirstate.normal
3288 normal = repo.dirstate.normal
3288
3289
3289 newlyaddedandmodifiedfiles = set()
3290 newlyaddedandmodifiedfiles = set()
3290 if interactive:
3291 if interactive:
3291 # Prompt the user for changes to revert
3292 # Prompt the user for changes to revert
3292 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3293 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3293 m = scmutil.match(ctx, torevert, matcher_opts)
3294 m = scmutil.match(ctx, torevert, matcher_opts)
3294 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3295 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3295 diffopts.nodates = True
3296 diffopts.nodates = True
3296 diffopts.git = True
3297 diffopts.git = True
3297 reversehunks = repo.ui.configbool('experimental',
3298 reversehunks = repo.ui.configbool('experimental',
3298 'revertalternateinteractivemode',
3299 'revertalternateinteractivemode',
3299 True)
3300 True)
3300 if reversehunks:
3301 if reversehunks:
3301 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3302 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3302 else:
3303 else:
3303 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3304 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3304 originalchunks = patch.parsepatch(diff)
3305 originalchunks = patch.parsepatch(diff)
3305 operation = 'discard' if node == parent else 'revert'
3306 operation = 'discard' if node == parent else 'revert'
3306
3307
3307 try:
3308 try:
3308
3309
3309 chunks, opts = recordfilter(repo.ui, originalchunks,
3310 chunks, opts = recordfilter(repo.ui, originalchunks,
3310 operation=operation)
3311 operation=operation)
3311 if reversehunks:
3312 if reversehunks:
3312 chunks = patch.reversehunks(chunks)
3313 chunks = patch.reversehunks(chunks)
3313
3314
3314 except patch.PatchError as err:
3315 except patch.PatchError as err:
3315 raise error.Abort(_('error parsing patch: %s') % err)
3316 raise error.Abort(_('error parsing patch: %s') % err)
3316
3317
3317 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3318 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3318 if tobackup is None:
3319 if tobackup is None:
3319 tobackup = set()
3320 tobackup = set()
3320 # Apply changes
3321 # Apply changes
3321 fp = stringio()
3322 fp = stringio()
3322 for c in chunks:
3323 for c in chunks:
3323 # Create a backup file only if this hunk should be backed up
3324 # Create a backup file only if this hunk should be backed up
3324 if ishunk(c) and c.header.filename() in tobackup:
3325 if ishunk(c) and c.header.filename() in tobackup:
3325 abs = c.header.filename()
3326 abs = c.header.filename()
3326 target = repo.wjoin(abs)
3327 target = repo.wjoin(abs)
3327 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3328 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3328 util.copyfile(target, bakname)
3329 util.copyfile(target, bakname)
3329 tobackup.remove(abs)
3330 tobackup.remove(abs)
3330 c.write(fp)
3331 c.write(fp)
3331 dopatch = fp.tell()
3332 dopatch = fp.tell()
3332 fp.seek(0)
3333 fp.seek(0)
3333 if dopatch:
3334 if dopatch:
3334 try:
3335 try:
3335 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3336 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3336 except patch.PatchError as err:
3337 except patch.PatchError as err:
3337 raise error.Abort(str(err))
3338 raise error.Abort(str(err))
3338 del fp
3339 del fp
3339 else:
3340 else:
3340 for f in actions['revert'][0]:
3341 for f in actions['revert'][0]:
3341 checkout(f)
3342 checkout(f)
3342 if normal:
3343 if normal:
3343 normal(f)
3344 normal(f)
3344
3345
3345 for f in actions['add'][0]:
3346 for f in actions['add'][0]:
3346 # Don't checkout modified files, they are already created by the diff
3347 # Don't checkout modified files, they are already created by the diff
3347 if f not in newlyaddedandmodifiedfiles:
3348 if f not in newlyaddedandmodifiedfiles:
3348 checkout(f)
3349 checkout(f)
3349 repo.dirstate.add(f)
3350 repo.dirstate.add(f)
3350
3351
3351 normal = repo.dirstate.normallookup
3352 normal = repo.dirstate.normallookup
3352 if node == parent and p2 == nullid:
3353 if node == parent and p2 == nullid:
3353 normal = repo.dirstate.normal
3354 normal = repo.dirstate.normal
3354 for f in actions['undelete'][0]:
3355 for f in actions['undelete'][0]:
3355 checkout(f)
3356 checkout(f)
3356 normal(f)
3357 normal(f)
3357
3358
3358 copied = copies.pathcopies(repo[parent], ctx)
3359 copied = copies.pathcopies(repo[parent], ctx)
3359
3360
3360 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3361 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3361 if f in copied:
3362 if f in copied:
3362 repo.dirstate.copy(copied[f], f)
3363 repo.dirstate.copy(copied[f], f)
3363
3364
3364 def command(table):
3365 def command(table):
3365 """Returns a function object to be used as a decorator for making commands.
3366 """Returns a function object to be used as a decorator for making commands.
3366
3367
3367 This function receives a command table as its argument. The table should
3368 This function receives a command table as its argument. The table should
3368 be a dict.
3369 be a dict.
3369
3370
3370 The returned function can be used as a decorator for adding commands
3371 The returned function can be used as a decorator for adding commands
3371 to that command table. This function accepts multiple arguments to define
3372 to that command table. This function accepts multiple arguments to define
3372 a command.
3373 a command.
3373
3374
3374 The first argument is the command name.
3375 The first argument is the command name.
3375
3376
3376 The options argument is an iterable of tuples defining command arguments.
3377 The options argument is an iterable of tuples defining command arguments.
3377 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3378 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3378
3379
3379 The synopsis argument defines a short, one line summary of how to use the
3380 The synopsis argument defines a short, one line summary of how to use the
3380 command. This shows up in the help output.
3381 command. This shows up in the help output.
3381
3382
3382 The norepo argument defines whether the command does not require a
3383 The norepo argument defines whether the command does not require a
3383 local repository. Most commands operate against a repository, thus the
3384 local repository. Most commands operate against a repository, thus the
3384 default is False.
3385 default is False.
3385
3386
3386 The optionalrepo argument defines whether the command optionally requires
3387 The optionalrepo argument defines whether the command optionally requires
3387 a local repository.
3388 a local repository.
3388
3389
3389 The inferrepo argument defines whether to try to find a repository from the
3390 The inferrepo argument defines whether to try to find a repository from the
3390 command line arguments. If True, arguments will be examined for potential
3391 command line arguments. If True, arguments will be examined for potential
3391 repository locations. See ``findrepo()``. If a repository is found, it
3392 repository locations. See ``findrepo()``. If a repository is found, it
3392 will be used.
3393 will be used.
3393 """
3394 """
3394 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3395 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3395 inferrepo=False):
3396 inferrepo=False):
3396 def decorator(func):
3397 def decorator(func):
3397 func.norepo = norepo
3398 func.norepo = norepo
3398 func.optionalrepo = optionalrepo
3399 func.optionalrepo = optionalrepo
3399 func.inferrepo = inferrepo
3400 func.inferrepo = inferrepo
3400 if synopsis:
3401 if synopsis:
3401 table[name] = func, list(options), synopsis
3402 table[name] = func, list(options), synopsis
3402 else:
3403 else:
3403 table[name] = func, list(options)
3404 table[name] = func, list(options)
3404 return func
3405 return func
3405 return decorator
3406 return decorator
3406
3407
3407 return cmd
3408 return cmd
3408
3409
3409 def checkunresolved(ms):
3410 def checkunresolved(ms):
3410 if list(ms.unresolved()):
3411 if list(ms.unresolved()):
3411 raise error.Abort(_("unresolved merge conflicts "
3412 raise error.Abort(_("unresolved merge conflicts "
3412 "(see 'hg help resolve')"))
3413 "(see 'hg help resolve')"))
3413 if ms.mdstate() != 's' or list(ms.driverresolved()):
3414 if ms.mdstate() != 's' or list(ms.driverresolved()):
3414 raise error.Abort(_('driver-resolved merge conflicts'),
3415 raise error.Abort(_('driver-resolved merge conflicts'),
3415 hint=_('run "hg resolve --all" to resolve'))
3416 hint=_('run "hg resolve --all" to resolve'))
3416
3417
3417 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3418 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3418 # commands.outgoing. "missing" is "missing" of the result of
3419 # commands.outgoing. "missing" is "missing" of the result of
3419 # "findcommonoutgoing()"
3420 # "findcommonoutgoing()"
3420 outgoinghooks = util.hooks()
3421 outgoinghooks = util.hooks()
3421
3422
3422 # a list of (ui, repo) functions called by commands.summary
3423 # a list of (ui, repo) functions called by commands.summary
3423 summaryhooks = util.hooks()
3424 summaryhooks = util.hooks()
3424
3425
3425 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3426 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3426 #
3427 #
3427 # functions should return tuple of booleans below, if 'changes' is None:
3428 # functions should return tuple of booleans below, if 'changes' is None:
3428 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3429 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3429 #
3430 #
3430 # otherwise, 'changes' is a tuple of tuples below:
3431 # otherwise, 'changes' is a tuple of tuples below:
3431 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3432 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3432 # - (desturl, destbranch, destpeer, outgoing)
3433 # - (desturl, destbranch, destpeer, outgoing)
3433 summaryremotehooks = util.hooks()
3434 summaryremotehooks = util.hooks()
3434
3435
3435 # A list of state files kept by multistep operations like graft.
3436 # A list of state files kept by multistep operations like graft.
3436 # Since graft cannot be aborted, it is considered 'clearable' by update.
3437 # Since graft cannot be aborted, it is considered 'clearable' by update.
3437 # note: bisect is intentionally excluded
3438 # note: bisect is intentionally excluded
3438 # (state file, clearable, allowcommit, error, hint)
3439 # (state file, clearable, allowcommit, error, hint)
3439 unfinishedstates = [
3440 unfinishedstates = [
3440 ('graftstate', True, False, _('graft in progress'),
3441 ('graftstate', True, False, _('graft in progress'),
3441 _("use 'hg graft --continue' or 'hg update' to abort")),
3442 _("use 'hg graft --continue' or 'hg update' to abort")),
3442 ('updatestate', True, False, _('last update was interrupted'),
3443 ('updatestate', True, False, _('last update was interrupted'),
3443 _("use 'hg update' to get a consistent checkout"))
3444 _("use 'hg update' to get a consistent checkout"))
3444 ]
3445 ]
3445
3446
3446 def checkunfinished(repo, commit=False):
3447 def checkunfinished(repo, commit=False):
3447 '''Look for an unfinished multistep operation, like graft, and abort
3448 '''Look for an unfinished multistep operation, like graft, and abort
3448 if found. It's probably good to check this right before
3449 if found. It's probably good to check this right before
3449 bailifchanged().
3450 bailifchanged().
3450 '''
3451 '''
3451 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3452 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3452 if commit and allowcommit:
3453 if commit and allowcommit:
3453 continue
3454 continue
3454 if repo.vfs.exists(f):
3455 if repo.vfs.exists(f):
3455 raise error.Abort(msg, hint=hint)
3456 raise error.Abort(msg, hint=hint)
3456
3457
3457 def clearunfinished(repo):
3458 def clearunfinished(repo):
3458 '''Check for unfinished operations (as above), and clear the ones
3459 '''Check for unfinished operations (as above), and clear the ones
3459 that are clearable.
3460 that are clearable.
3460 '''
3461 '''
3461 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3462 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3462 if not clearable and repo.vfs.exists(f):
3463 if not clearable and repo.vfs.exists(f):
3463 raise error.Abort(msg, hint=hint)
3464 raise error.Abort(msg, hint=hint)
3464 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3465 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3465 if clearable and repo.vfs.exists(f):
3466 if clearable and repo.vfs.exists(f):
3466 util.unlink(repo.join(f))
3467 util.unlink(repo.join(f))
3467
3468
3468 afterresolvedstates = [
3469 afterresolvedstates = [
3469 ('graftstate',
3470 ('graftstate',
3470 _('hg graft --continue')),
3471 _('hg graft --continue')),
3471 ]
3472 ]
3472
3473
3473 def howtocontinue(repo):
3474 def howtocontinue(repo):
3474 '''Check for an unfinished operation and return the command to finish
3475 '''Check for an unfinished operation and return the command to finish
3475 it.
3476 it.
3476
3477
3477 afterresolvedstates tuples define a .hg/{file} and the corresponding
3478 afterresolvedstates tuples define a .hg/{file} and the corresponding
3478 command needed to finish it.
3479 command needed to finish it.
3479
3480
3480 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3481 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3481 a boolean.
3482 a boolean.
3482 '''
3483 '''
3483 contmsg = _("continue: %s")
3484 contmsg = _("continue: %s")
3484 for f, msg in afterresolvedstates:
3485 for f, msg in afterresolvedstates:
3485 if repo.vfs.exists(f):
3486 if repo.vfs.exists(f):
3486 return contmsg % msg, True
3487 return contmsg % msg, True
3487 workingctx = repo[None]
3488 workingctx = repo[None]
3488 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3489 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3489 for s in workingctx.substate)
3490 for s in workingctx.substate)
3490 if dirty:
3491 if dirty:
3491 return contmsg % _("hg commit"), False
3492 return contmsg % _("hg commit"), False
3492 return None, None
3493 return None, None
3493
3494
3494 def checkafterresolved(repo):
3495 def checkafterresolved(repo):
3495 '''Inform the user about the next action after completing hg resolve
3496 '''Inform the user about the next action after completing hg resolve
3496
3497
3497 If there's a matching afterresolvedstates, howtocontinue will yield
3498 If there's a matching afterresolvedstates, howtocontinue will yield
3498 repo.ui.warn as the reporter.
3499 repo.ui.warn as the reporter.
3499
3500
3500 Otherwise, it will yield repo.ui.note.
3501 Otherwise, it will yield repo.ui.note.
3501 '''
3502 '''
3502 msg, warning = howtocontinue(repo)
3503 msg, warning = howtocontinue(repo)
3503 if msg is not None:
3504 if msg is not None:
3504 if warning:
3505 if warning:
3505 repo.ui.warn("%s\n" % msg)
3506 repo.ui.warn("%s\n" % msg)
3506 else:
3507 else:
3507 repo.ui.note("%s\n" % msg)
3508 repo.ui.note("%s\n" % msg)
3508
3509
3509 def wrongtooltocontinue(repo, task):
3510 def wrongtooltocontinue(repo, task):
3510 '''Raise an abort suggesting how to properly continue if there is an
3511 '''Raise an abort suggesting how to properly continue if there is an
3511 active task.
3512 active task.
3512
3513
3513 Uses howtocontinue() to find the active task.
3514 Uses howtocontinue() to find the active task.
3514
3515
3515 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3516 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3516 a hint.
3517 a hint.
3517 '''
3518 '''
3518 after = howtocontinue(repo)
3519 after = howtocontinue(repo)
3519 hint = None
3520 hint = None
3520 if after[1]:
3521 if after[1]:
3521 hint = after[0]
3522 hint = after[0]
3522 raise error.Abort(_('no %s in progress') % task, hint=hint)
3523 raise error.Abort(_('no %s in progress') % task, hint=hint)
3523
3524
3524 class dirstateguard(object):
3525 dirstateguard = dirstateguardmod.dirstateguard
3525 '''Restore dirstate at unexpected failure.
3526
3527 At the construction, this class does:
3528
3529 - write current ``repo.dirstate`` out, and
3530 - save ``.hg/dirstate`` into the backup file
3531
3532 This restores ``.hg/dirstate`` from backup file, if ``release()``
3533 is invoked before ``close()``.
3534
3535 This just removes the backup file at ``close()`` before ``release()``.
3536 '''
3537
3538 def __init__(self, repo, name):
3539 self._repo = repo
3540 self._active = False
3541 self._closed = False
3542 self._suffix = '.backup.%s.%d' % (name, id(self))
3543 repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
3544 self._active = True
3545
3546 def __del__(self):
3547 if self._active: # still active
3548 # this may occur, even if this class is used correctly:
3549 # for example, releasing other resources like transaction
3550 # may raise exception before ``dirstateguard.release`` in
3551 # ``release(tr, ....)``.
3552 self._abort()
3553
3554 def close(self):
3555 if not self._active: # already inactivated
3556 msg = (_("can't close already inactivated backup: dirstate%s")
3557 % self._suffix)
3558 raise error.Abort(msg)
3559
3560 self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
3561 self._suffix)
3562 self._active = False
3563 self._closed = True
3564
3565 def _abort(self):
3566 self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
3567 self._suffix)
3568 self._active = False
3569
3570 def release(self):
3571 if not self._closed:
3572 if not self._active: # already inactivated
3573 msg = (_("can't release already inactivated backup:"
3574 " dirstate%s")
3575 % self._suffix)
3576 raise error.Abort(msg)
3577 self._abort()
This diff has been collapsed as it changes many lines, (3510 lines changed) Show them Hide them
@@ -1,3577 +1,69 b''
1 # cmdutil.py - help for command processing in mercurial
1 # dirstateguard.py - class to allow restoring dirstate after failure
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
11 import os
12 import re
13 import sys
14 import tempfile
15
16 from .i18n import _
10 from .i18n import _
17 from .node import (
18 bin,
19 hex,
20 nullid,
21 nullrev,
22 short,
23 )
24
11
25 from . import (
12 from . import (
26 bookmarks,
27 changelog,
28 copies,
29 crecord as crecordmod,
30 encoding,
31 error,
13 error,
32 formatter,
33 graphmod,
34 lock as lockmod,
35 match as matchmod,
36 obsolete,
37 patch,
38 pathutil,
39 phases,
40 repair,
41 revlog,
42 revset,
43 scmutil,
44 templatekw,
45 templater,
46 util,
47 )
14 )
48 stringio = util.stringio
49
50 def ishunk(x):
51 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
52 return isinstance(x, hunkclasses)
53
54 def newandmodified(chunks, originalchunks):
55 newlyaddedandmodifiedfiles = set()
56 for chunk in chunks:
57 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
58 originalchunks:
59 newlyaddedandmodifiedfiles.add(chunk.header.filename())
60 return newlyaddedandmodifiedfiles
61
62 def parsealiases(cmd):
63 return cmd.lstrip("^").split("|")
64
65 def setupwrapcolorwrite(ui):
66 # wrap ui.write so diff output can be labeled/colorized
67 def wrapwrite(orig, *args, **kw):
68 label = kw.pop('label', '')
69 for chunk, l in patch.difflabel(lambda: args):
70 orig(chunk, label=label + l)
71
72 oldwrite = ui.write
73 def wrap(*args, **kwargs):
74 return wrapwrite(oldwrite, *args, **kwargs)
75 setattr(ui, 'write', wrap)
76 return oldwrite
77
78 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
79 if usecurses:
80 if testfile:
81 recordfn = crecordmod.testdecorator(testfile,
82 crecordmod.testchunkselector)
83 else:
84 recordfn = crecordmod.chunkselector
85
86 return crecordmod.filterpatch(ui, originalhunks, recordfn)
87
88 else:
89 return patch.filterpatch(ui, originalhunks, operation)
90
91 def recordfilter(ui, originalhunks, operation=None):
92 """ Prompts the user to filter the originalhunks and return a list of
93 selected hunks.
94 *operation* is used for to build ui messages to indicate the user what
95 kind of filtering they are doing: reverting, committing, shelving, etc.
96 (see patch.filterpatch).
97 """
98 usecurses = crecordmod.checkcurses(ui)
99 testfile = ui.config('experimental', 'crecordtest', None)
100 oldwrite = setupwrapcolorwrite(ui)
101 try:
102 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
103 testfile, operation)
104 finally:
105 ui.write = oldwrite
106 return newchunks, newopts
107
108 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
109 filterfn, *pats, **opts):
110 from . import merge as mergemod
111 if not ui.interactive():
112 if cmdsuggest:
113 msg = _('running non-interactively, use %s instead') % cmdsuggest
114 else:
115 msg = _('running non-interactively')
116 raise error.Abort(msg)
117
118 # make sure username is set before going interactive
119 if not opts.get('user'):
120 ui.username() # raise exception, username not provided
121
122 def recordfunc(ui, repo, message, match, opts):
123 """This is generic record driver.
124
125 Its job is to interactively filter local changes, and
126 accordingly prepare working directory into a state in which the
127 job can be delegated to a non-interactive commit command such as
128 'commit' or 'qrefresh'.
129
130 After the actual job is done by non-interactive command, the
131 working directory is restored to its original state.
132
133 In the end we'll record interesting changes, and everything else
134 will be left in place, so the user can continue working.
135 """
136
137 checkunfinished(repo, commit=True)
138 wctx = repo[None]
139 merge = len(wctx.parents()) > 1
140 if merge:
141 raise error.Abort(_('cannot partially commit a merge '
142 '(use "hg commit" instead)'))
143
144 def fail(f, msg):
145 raise error.Abort('%s: %s' % (f, msg))
146
147 force = opts.get('force')
148 if not force:
149 vdirs = []
150 match.explicitdir = vdirs.append
151 match.bad = fail
152
153 status = repo.status(match=match)
154 if not force:
155 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
156 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
157 diffopts.nodates = True
158 diffopts.git = True
159 diffopts.showfunc = True
160 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
161 originalchunks = patch.parsepatch(originaldiff)
162
163 # 1. filter patch, since we are intending to apply subset of it
164 try:
165 chunks, newopts = filterfn(ui, originalchunks)
166 except patch.PatchError as err:
167 raise error.Abort(_('error parsing patch: %s') % err)
168 opts.update(newopts)
169
170 # We need to keep a backup of files that have been newly added and
171 # modified during the recording process because there is a previous
172 # version without the edit in the workdir
173 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
174 contenders = set()
175 for h in chunks:
176 try:
177 contenders.update(set(h.files()))
178 except AttributeError:
179 pass
180
181 changed = status.modified + status.added + status.removed
182 newfiles = [f for f in changed if f in contenders]
183 if not newfiles:
184 ui.status(_('no changes to record\n'))
185 return 0
186
187 modified = set(status.modified)
188
189 # 2. backup changed files, so we can restore them in the end
190
191 if backupall:
192 tobackup = changed
193 else:
194 tobackup = [f for f in newfiles if f in modified or f in \
195 newlyaddedandmodifiedfiles]
196 backups = {}
197 if tobackup:
198 backupdir = repo.join('record-backups')
199 try:
200 os.mkdir(backupdir)
201 except OSError as err:
202 if err.errno != errno.EEXIST:
203 raise
204 try:
205 # backup continues
206 for f in tobackup:
207 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
208 dir=backupdir)
209 os.close(fd)
210 ui.debug('backup %r as %r\n' % (f, tmpname))
211 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
212 backups[f] = tmpname
213
214 fp = stringio()
215 for c in chunks:
216 fname = c.filename()
217 if fname in backups:
218 c.write(fp)
219 dopatch = fp.tell()
220 fp.seek(0)
221
222 # 2.5 optionally review / modify patch in text editor
223 if opts.get('review', False):
224 patchtext = (crecordmod.diffhelptext
225 + crecordmod.patchhelptext
226 + fp.read())
227 reviewedpatch = ui.edit(patchtext, "",
228 extra={"suffix": ".diff"})
229 fp.truncate(0)
230 fp.write(reviewedpatch)
231 fp.seek(0)
232
233 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
234 # 3a. apply filtered patch to clean repo (clean)
235 if backups:
236 # Equivalent to hg.revert
237 m = scmutil.matchfiles(repo, backups.keys())
238 mergemod.update(repo, repo.dirstate.p1(),
239 False, True, matcher=m)
240
241 # 3b. (apply)
242 if dopatch:
243 try:
244 ui.debug('applying patch\n')
245 ui.debug(fp.getvalue())
246 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
247 except patch.PatchError as err:
248 raise error.Abort(str(err))
249 del fp
250
251 # 4. We prepared working directory according to filtered
252 # patch. Now is the time to delegate the job to
253 # commit/qrefresh or the like!
254
255 # Make all of the pathnames absolute.
256 newfiles = [repo.wjoin(nf) for nf in newfiles]
257 return commitfunc(ui, repo, *newfiles, **opts)
258 finally:
259 # 5. finally restore backed-up files
260 try:
261 dirstate = repo.dirstate
262 for realname, tmpname in backups.iteritems():
263 ui.debug('restoring %r to %r\n' % (tmpname, realname))
264
265 if dirstate[realname] == 'n':
266 # without normallookup, restoring timestamp
267 # may cause partially committed files
268 # to be treated as unmodified
269 dirstate.normallookup(realname)
270
271 # copystat=True here and above are a hack to trick any
272 # editors that have f open that we haven't modified them.
273 #
274 # Also note that this racy as an editor could notice the
275 # file's mtime before we've finished writing it.
276 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
277 os.unlink(tmpname)
278 if tobackup:
279 os.rmdir(backupdir)
280 except OSError:
281 pass
282
283 def recordinwlock(ui, repo, message, match, opts):
284 with repo.wlock():
285 return recordfunc(ui, repo, message, match, opts)
286
287 return commit(ui, repo, recordinwlock, pats, opts)
288
289 def findpossible(cmd, table, strict=False):
290 """
291 Return cmd -> (aliases, command table entry)
292 for each matching command.
293 Return debug commands (or their aliases) only if no normal command matches.
294 """
295 choice = {}
296 debugchoice = {}
297
298 if cmd in table:
299 # short-circuit exact matches, "log" alias beats "^log|history"
300 keys = [cmd]
301 else:
302 keys = table.keys()
303
304 allcmds = []
305 for e in keys:
306 aliases = parsealiases(e)
307 allcmds.extend(aliases)
308 found = None
309 if cmd in aliases:
310 found = cmd
311 elif not strict:
312 for a in aliases:
313 if a.startswith(cmd):
314 found = a
315 break
316 if found is not None:
317 if aliases[0].startswith("debug") or found.startswith("debug"):
318 debugchoice[found] = (aliases, table[e])
319 else:
320 choice[found] = (aliases, table[e])
321
322 if not choice and debugchoice:
323 choice = debugchoice
324
325 return choice, allcmds
326
327 def findcmd(cmd, table, strict=True):
328 """Return (aliases, command table entry) for command string."""
329 choice, allcmds = findpossible(cmd, table, strict)
330
331 if cmd in choice:
332 return choice[cmd]
333
334 if len(choice) > 1:
335 clist = choice.keys()
336 clist.sort()
337 raise error.AmbiguousCommand(cmd, clist)
338
339 if choice:
340 return choice.values()[0]
341
342 raise error.UnknownCommand(cmd, allcmds)
343
344 def findrepo(p):
345 while not os.path.isdir(os.path.join(p, ".hg")):
346 oldp, p = p, os.path.dirname(p)
347 if p == oldp:
348 return None
349
350 return p
351
352 def bailifchanged(repo, merge=True):
353 if merge and repo.dirstate.p2() != nullid:
354 raise error.Abort(_('outstanding uncommitted merge'))
355 modified, added, removed, deleted = repo.status()[:4]
356 if modified or added or removed or deleted:
357 raise error.Abort(_('uncommitted changes'))
358 ctx = repo[None]
359 for s in sorted(ctx.substate):
360 ctx.sub(s).bailifchanged()
361
362 def logmessage(ui, opts):
363 """ get the log message according to -m and -l option """
364 message = opts.get('message')
365 logfile = opts.get('logfile')
366
367 if message and logfile:
368 raise error.Abort(_('options --message and --logfile are mutually '
369 'exclusive'))
370 if not message and logfile:
371 try:
372 if logfile == '-':
373 message = ui.fin.read()
374 else:
375 message = '\n'.join(util.readfile(logfile).splitlines())
376 except IOError as inst:
377 raise error.Abort(_("can't read commit message '%s': %s") %
378 (logfile, inst.strerror))
379 return message
380
381 def mergeeditform(ctxorbool, baseformname):
382 """return appropriate editform name (referencing a committemplate)
383
384 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
385 merging is committed.
386
387 This returns baseformname with '.merge' appended if it is a merge,
388 otherwise '.normal' is appended.
389 """
390 if isinstance(ctxorbool, bool):
391 if ctxorbool:
392 return baseformname + ".merge"
393 elif 1 < len(ctxorbool.parents()):
394 return baseformname + ".merge"
395
396 return baseformname + ".normal"
397
398 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
399 editform='', **opts):
400 """get appropriate commit message editor according to '--edit' option
401
402 'finishdesc' is a function to be called with edited commit message
403 (= 'description' of the new changeset) just after editing, but
404 before checking empty-ness. It should return actual text to be
405 stored into history. This allows to change description before
406 storing.
407
408 'extramsg' is a extra message to be shown in the editor instead of
409 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
410 is automatically added.
411
412 'editform' is a dot-separated list of names, to distinguish
413 the purpose of commit text editing.
414
415 'getcommiteditor' returns 'commitforceeditor' regardless of
416 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
417 they are specific for usage in MQ.
418 """
419 if edit or finishdesc or extramsg:
420 return lambda r, c, s: commitforceeditor(r, c, s,
421 finishdesc=finishdesc,
422 extramsg=extramsg,
423 editform=editform)
424 elif editform:
425 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
426 else:
427 return commiteditor
428
429 def loglimit(opts):
430 """get the log limit according to option -l/--limit"""
431 limit = opts.get('limit')
432 if limit:
433 try:
434 limit = int(limit)
435 except ValueError:
436 raise error.Abort(_('limit must be a positive integer'))
437 if limit <= 0:
438 raise error.Abort(_('limit must be positive'))
439 else:
440 limit = None
441 return limit
442
443 def makefilename(repo, pat, node, desc=None,
444 total=None, seqno=None, revwidth=None, pathname=None):
445 node_expander = {
446 'H': lambda: hex(node),
447 'R': lambda: str(repo.changelog.rev(node)),
448 'h': lambda: short(node),
449 'm': lambda: re.sub('[^\w]', '_', str(desc))
450 }
451 expander = {
452 '%': lambda: '%',
453 'b': lambda: os.path.basename(repo.root),
454 }
455
456 try:
457 if node:
458 expander.update(node_expander)
459 if node:
460 expander['r'] = (lambda:
461 str(repo.changelog.rev(node)).zfill(revwidth or 0))
462 if total is not None:
463 expander['N'] = lambda: str(total)
464 if seqno is not None:
465 expander['n'] = lambda: str(seqno)
466 if total is not None and seqno is not None:
467 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
468 if pathname is not None:
469 expander['s'] = lambda: os.path.basename(pathname)
470 expander['d'] = lambda: os.path.dirname(pathname) or '.'
471 expander['p'] = lambda: pathname
472
473 newname = []
474 patlen = len(pat)
475 i = 0
476 while i < patlen:
477 c = pat[i]
478 if c == '%':
479 i += 1
480 c = pat[i]
481 c = expander[c]()
482 newname.append(c)
483 i += 1
484 return ''.join(newname)
485 except KeyError as inst:
486 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
487 inst.args[0])
488
489 class _unclosablefile(object):
490 def __init__(self, fp):
491 self._fp = fp
492
493 def close(self):
494 pass
495
496 def __iter__(self):
497 return iter(self._fp)
498
499 def __getattr__(self, attr):
500 return getattr(self._fp, attr)
501
502 def __enter__(self):
503 return self
504
505 def __exit__(self, exc_type, exc_value, exc_tb):
506 pass
507
508 def makefileobj(repo, pat, node=None, desc=None, total=None,
509 seqno=None, revwidth=None, mode='wb', modemap=None,
510 pathname=None):
511
512 writable = mode not in ('r', 'rb')
513
514 if not pat or pat == '-':
515 if writable:
516 fp = repo.ui.fout
517 else:
518 fp = repo.ui.fin
519 return _unclosablefile(fp)
520 if util.safehasattr(pat, 'write') and writable:
521 return pat
522 if util.safehasattr(pat, 'read') and 'r' in mode:
523 return pat
524 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
525 if modemap is not None:
526 mode = modemap.get(fn, mode)
527 if mode == 'wb':
528 modemap[fn] = 'ab'
529 return open(fn, mode)
530
531 def openrevlog(repo, cmd, file_, opts):
532 """opens the changelog, manifest, a filelog or a given revlog"""
533 cl = opts['changelog']
534 mf = opts['manifest']
535 dir = opts['dir']
536 msg = None
537 if cl and mf:
538 msg = _('cannot specify --changelog and --manifest at the same time')
539 elif cl and dir:
540 msg = _('cannot specify --changelog and --dir at the same time')
541 elif cl or mf or dir:
542 if file_:
543 msg = _('cannot specify filename with --changelog or --manifest')
544 elif not repo:
545 msg = _('cannot specify --changelog or --manifest or --dir '
546 'without a repository')
547 if msg:
548 raise error.Abort(msg)
549
550 r = None
551 if repo:
552 if cl:
553 r = repo.unfiltered().changelog
554 elif dir:
555 if 'treemanifest' not in repo.requirements:
556 raise error.Abort(_("--dir can only be used on repos with "
557 "treemanifest enabled"))
558 dirlog = repo.manifestlog._revlog.dirlog(dir)
559 if len(dirlog):
560 r = dirlog
561 elif mf:
562 r = repo.manifestlog._revlog
563 elif file_:
564 filelog = repo.file(file_)
565 if len(filelog):
566 r = filelog
567 if not r:
568 if not file_:
569 raise error.CommandError(cmd, _('invalid arguments'))
570 if not os.path.isfile(file_):
571 raise error.Abort(_("revlog '%s' not found") % file_)
572 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
573 file_[:-2] + ".i")
574 return r
575
576 def copy(ui, repo, pats, opts, rename=False):
577 # called with the repo lock held
578 #
579 # hgsep => pathname that uses "/" to separate directories
580 # ossep => pathname that uses os.sep to separate directories
581 cwd = repo.getcwd()
582 targets = {}
583 after = opts.get("after")
584 dryrun = opts.get("dry_run")
585 wctx = repo[None]
586
587 def walkpat(pat):
588 srcs = []
589 if after:
590 badstates = '?'
591 else:
592 badstates = '?r'
593 m = scmutil.match(repo[None], [pat], opts, globbed=True)
594 for abs in repo.walk(m):
595 state = repo.dirstate[abs]
596 rel = m.rel(abs)
597 exact = m.exact(abs)
598 if state in badstates:
599 if exact and state == '?':
600 ui.warn(_('%s: not copying - file is not managed\n') % rel)
601 if exact and state == 'r':
602 ui.warn(_('%s: not copying - file has been marked for'
603 ' remove\n') % rel)
604 continue
605 # abs: hgsep
606 # rel: ossep
607 srcs.append((abs, rel, exact))
608 return srcs
609
610 # abssrc: hgsep
611 # relsrc: ossep
612 # otarget: ossep
613 def copyfile(abssrc, relsrc, otarget, exact):
614 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
615 if '/' in abstarget:
616 # We cannot normalize abstarget itself, this would prevent
617 # case only renames, like a => A.
618 abspath, absname = abstarget.rsplit('/', 1)
619 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
620 reltarget = repo.pathto(abstarget, cwd)
621 target = repo.wjoin(abstarget)
622 src = repo.wjoin(abssrc)
623 state = repo.dirstate[abstarget]
624
625 scmutil.checkportable(ui, abstarget)
626
627 # check for collisions
628 prevsrc = targets.get(abstarget)
629 if prevsrc is not None:
630 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
631 (reltarget, repo.pathto(abssrc, cwd),
632 repo.pathto(prevsrc, cwd)))
633 return
634
635 # check for overwrites
636 exists = os.path.lexists(target)
637 samefile = False
638 if exists and abssrc != abstarget:
639 if (repo.dirstate.normalize(abssrc) ==
640 repo.dirstate.normalize(abstarget)):
641 if not rename:
642 ui.warn(_("%s: can't copy - same file\n") % reltarget)
643 return
644 exists = False
645 samefile = True
646
647 if not after and exists or after and state in 'mn':
648 if not opts['force']:
649 if state in 'mn':
650 msg = _('%s: not overwriting - file already committed\n')
651 if after:
652 flags = '--after --force'
653 else:
654 flags = '--force'
655 if rename:
656 hint = _('(hg rename %s to replace the file by '
657 'recording a rename)\n') % flags
658 else:
659 hint = _('(hg copy %s to replace the file by '
660 'recording a copy)\n') % flags
661 else:
662 msg = _('%s: not overwriting - file exists\n')
663 if rename:
664 hint = _('(hg rename --after to record the rename)\n')
665 else:
666 hint = _('(hg copy --after to record the copy)\n')
667 ui.warn(msg % reltarget)
668 ui.warn(hint)
669 return
670
671 if after:
672 if not exists:
673 if rename:
674 ui.warn(_('%s: not recording move - %s does not exist\n') %
675 (relsrc, reltarget))
676 else:
677 ui.warn(_('%s: not recording copy - %s does not exist\n') %
678 (relsrc, reltarget))
679 return
680 elif not dryrun:
681 try:
682 if exists:
683 os.unlink(target)
684 targetdir = os.path.dirname(target) or '.'
685 if not os.path.isdir(targetdir):
686 os.makedirs(targetdir)
687 if samefile:
688 tmp = target + "~hgrename"
689 os.rename(src, tmp)
690 os.rename(tmp, target)
691 else:
692 util.copyfile(src, target)
693 srcexists = True
694 except IOError as inst:
695 if inst.errno == errno.ENOENT:
696 ui.warn(_('%s: deleted in working directory\n') % relsrc)
697 srcexists = False
698 else:
699 ui.warn(_('%s: cannot copy - %s\n') %
700 (relsrc, inst.strerror))
701 return True # report a failure
702
703 if ui.verbose or not exact:
704 if rename:
705 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
706 else:
707 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
708
709 targets[abstarget] = abssrc
710
711 # fix up dirstate
712 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
713 dryrun=dryrun, cwd=cwd)
714 if rename and not dryrun:
715 if not after and srcexists and not samefile:
716 util.unlinkpath(repo.wjoin(abssrc))
717 wctx.forget([abssrc])
718
719 # pat: ossep
720 # dest ossep
721 # srcs: list of (hgsep, hgsep, ossep, bool)
722 # return: function that takes hgsep and returns ossep
723 def targetpathfn(pat, dest, srcs):
724 if os.path.isdir(pat):
725 abspfx = pathutil.canonpath(repo.root, cwd, pat)
726 abspfx = util.localpath(abspfx)
727 if destdirexists:
728 striplen = len(os.path.split(abspfx)[0])
729 else:
730 striplen = len(abspfx)
731 if striplen:
732 striplen += len(os.sep)
733 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
734 elif destdirexists:
735 res = lambda p: os.path.join(dest,
736 os.path.basename(util.localpath(p)))
737 else:
738 res = lambda p: dest
739 return res
740
741 # pat: ossep
742 # dest ossep
743 # srcs: list of (hgsep, hgsep, ossep, bool)
744 # return: function that takes hgsep and returns ossep
745 def targetpathafterfn(pat, dest, srcs):
746 if matchmod.patkind(pat):
747 # a mercurial pattern
748 res = lambda p: os.path.join(dest,
749 os.path.basename(util.localpath(p)))
750 else:
751 abspfx = pathutil.canonpath(repo.root, cwd, pat)
752 if len(abspfx) < len(srcs[0][0]):
753 # A directory. Either the target path contains the last
754 # component of the source path or it does not.
755 def evalpath(striplen):
756 score = 0
757 for s in srcs:
758 t = os.path.join(dest, util.localpath(s[0])[striplen:])
759 if os.path.lexists(t):
760 score += 1
761 return score
762
763 abspfx = util.localpath(abspfx)
764 striplen = len(abspfx)
765 if striplen:
766 striplen += len(os.sep)
767 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
768 score = evalpath(striplen)
769 striplen1 = len(os.path.split(abspfx)[0])
770 if striplen1:
771 striplen1 += len(os.sep)
772 if evalpath(striplen1) > score:
773 striplen = striplen1
774 res = lambda p: os.path.join(dest,
775 util.localpath(p)[striplen:])
776 else:
777 # a file
778 if destdirexists:
779 res = lambda p: os.path.join(dest,
780 os.path.basename(util.localpath(p)))
781 else:
782 res = lambda p: dest
783 return res
784
785 pats = scmutil.expandpats(pats)
786 if not pats:
787 raise error.Abort(_('no source or destination specified'))
788 if len(pats) == 1:
789 raise error.Abort(_('no destination specified'))
790 dest = pats.pop()
791 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
792 if not destdirexists:
793 if len(pats) > 1 or matchmod.patkind(pats[0]):
794 raise error.Abort(_('with multiple sources, destination must be an '
795 'existing directory'))
796 if util.endswithsep(dest):
797 raise error.Abort(_('destination %s is not a directory') % dest)
798
799 tfn = targetpathfn
800 if after:
801 tfn = targetpathafterfn
802 copylist = []
803 for pat in pats:
804 srcs = walkpat(pat)
805 if not srcs:
806 continue
807 copylist.append((tfn(pat, dest, srcs), srcs))
808 if not copylist:
809 raise error.Abort(_('no files to copy'))
810
811 errors = 0
812 for targetpath, srcs in copylist:
813 for abssrc, relsrc, exact in srcs:
814 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
815 errors += 1
816
817 if errors:
818 ui.warn(_('(consider using --after)\n'))
819
820 return errors != 0
821
822 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
823 runargs=None, appendpid=False):
824 '''Run a command as a service.'''
825
826 def writepid(pid):
827 if opts['pid_file']:
828 if appendpid:
829 mode = 'a'
830 else:
831 mode = 'w'
832 fp = open(opts['pid_file'], mode)
833 fp.write(str(pid) + '\n')
834 fp.close()
835
836 if opts['daemon'] and not opts['daemon_postexec']:
837 # Signal child process startup with file removal
838 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
839 os.close(lockfd)
840 try:
841 if not runargs:
842 runargs = util.hgcmd() + sys.argv[1:]
843 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
844 # Don't pass --cwd to the child process, because we've already
845 # changed directory.
846 for i in xrange(1, len(runargs)):
847 if runargs[i].startswith('--cwd='):
848 del runargs[i]
849 break
850 elif runargs[i].startswith('--cwd'):
851 del runargs[i:i + 2]
852 break
853 def condfn():
854 return not os.path.exists(lockpath)
855 pid = util.rundetached(runargs, condfn)
856 if pid < 0:
857 raise error.Abort(_('child process failed to start'))
858 writepid(pid)
859 finally:
860 try:
861 os.unlink(lockpath)
862 except OSError as e:
863 if e.errno != errno.ENOENT:
864 raise
865 if parentfn:
866 return parentfn(pid)
867 else:
868 return
869
870 if initfn:
871 initfn()
872
873 if not opts['daemon']:
874 writepid(util.getpid())
875
876 if opts['daemon_postexec']:
877 try:
878 os.setsid()
879 except AttributeError:
880 pass
881 for inst in opts['daemon_postexec']:
882 if inst.startswith('unlink:'):
883 lockpath = inst[7:]
884 os.unlink(lockpath)
885 elif inst.startswith('chdir:'):
886 os.chdir(inst[6:])
887 elif inst != 'none':
888 raise error.Abort(_('invalid value for --daemon-postexec: %s')
889 % inst)
890 util.hidewindow()
891 util.stdout.flush()
892 util.stderr.flush()
893
894 nullfd = os.open(os.devnull, os.O_RDWR)
895 logfilefd = nullfd
896 if logfile:
897 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
898 os.dup2(nullfd, 0)
899 os.dup2(logfilefd, 1)
900 os.dup2(logfilefd, 2)
901 if nullfd not in (0, 1, 2):
902 os.close(nullfd)
903 if logfile and logfilefd not in (0, 1, 2):
904 os.close(logfilefd)
905
906 if runfn:
907 return runfn()
908
909 ## facility to let extension process additional data into an import patch
910 # list of identifier to be executed in order
911 extrapreimport = [] # run before commit
912 extrapostimport = [] # run after commit
913 # mapping from identifier to actual import function
914 #
915 # 'preimport' are run before the commit is made and are provided the following
916 # arguments:
917 # - repo: the localrepository instance,
918 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
919 # - extra: the future extra dictionary of the changeset, please mutate it,
920 # - opts: the import options.
921 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
922 # mutation of in memory commit and more. Feel free to rework the code to get
923 # there.
924 extrapreimportmap = {}
925 # 'postimport' are run after the commit is made and are provided the following
926 # argument:
927 # - ctx: the changectx created by import.
928 extrapostimportmap = {}
929
930 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
931 """Utility function used by commands.import to import a single patch
932
933 This function is explicitly defined here to help the evolve extension to
934 wrap this part of the import logic.
935
936 The API is currently a bit ugly because it a simple code translation from
937 the import command. Feel free to make it better.
938
939 :hunk: a patch (as a binary string)
940 :parents: nodes that will be parent of the created commit
941 :opts: the full dict of option passed to the import command
942 :msgs: list to save commit message to.
943 (used in case we need to save it when failing)
944 :updatefunc: a function that update a repo to a given node
945 updatefunc(<repo>, <node>)
946 """
947 # avoid cycle context -> subrepo -> cmdutil
948 from . import context
949 extractdata = patch.extract(ui, hunk)
950 tmpname = extractdata.get('filename')
951 message = extractdata.get('message')
952 user = opts.get('user') or extractdata.get('user')
953 date = opts.get('date') or extractdata.get('date')
954 branch = extractdata.get('branch')
955 nodeid = extractdata.get('nodeid')
956 p1 = extractdata.get('p1')
957 p2 = extractdata.get('p2')
958
959 nocommit = opts.get('no_commit')
960 importbranch = opts.get('import_branch')
961 update = not opts.get('bypass')
962 strip = opts["strip"]
963 prefix = opts["prefix"]
964 sim = float(opts.get('similarity') or 0)
965 if not tmpname:
966 return (None, None, False)
967
968 rejects = False
969
970 try:
971 cmdline_message = logmessage(ui, opts)
972 if cmdline_message:
973 # pickup the cmdline msg
974 message = cmdline_message
975 elif message:
976 # pickup the patch msg
977 message = message.strip()
978 else:
979 # launch the editor
980 message = None
981 ui.debug('message:\n%s\n' % message)
982
983 if len(parents) == 1:
984 parents.append(repo[nullid])
985 if opts.get('exact'):
986 if not nodeid or not p1:
987 raise error.Abort(_('not a Mercurial patch'))
988 p1 = repo[p1]
989 p2 = repo[p2 or nullid]
990 elif p2:
991 try:
992 p1 = repo[p1]
993 p2 = repo[p2]
994 # Without any options, consider p2 only if the
995 # patch is being applied on top of the recorded
996 # first parent.
997 if p1 != parents[0]:
998 p1 = parents[0]
999 p2 = repo[nullid]
1000 except error.RepoError:
1001 p1, p2 = parents
1002 if p2.node() == nullid:
1003 ui.warn(_("warning: import the patch as a normal revision\n"
1004 "(use --exact to import the patch as a merge)\n"))
1005 else:
1006 p1, p2 = parents
1007
1008 n = None
1009 if update:
1010 if p1 != parents[0]:
1011 updatefunc(repo, p1.node())
1012 if p2 != parents[1]:
1013 repo.setparents(p1.node(), p2.node())
1014
1015 if opts.get('exact') or importbranch:
1016 repo.dirstate.setbranch(branch or 'default')
1017
1018 partial = opts.get('partial', False)
1019 files = set()
1020 try:
1021 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
1022 files=files, eolmode=None, similarity=sim / 100.0)
1023 except patch.PatchError as e:
1024 if not partial:
1025 raise error.Abort(str(e))
1026 if partial:
1027 rejects = True
1028
1029 files = list(files)
1030 if nocommit:
1031 if message:
1032 msgs.append(message)
1033 else:
1034 if opts.get('exact') or p2:
1035 # If you got here, you either use --force and know what
1036 # you are doing or used --exact or a merge patch while
1037 # being updated to its first parent.
1038 m = None
1039 else:
1040 m = scmutil.matchfiles(repo, files or [])
1041 editform = mergeeditform(repo[None], 'import.normal')
1042 if opts.get('exact'):
1043 editor = None
1044 else:
1045 editor = getcommiteditor(editform=editform, **opts)
1046 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1047 extra = {}
1048 for idfunc in extrapreimport:
1049 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1050 try:
1051 if partial:
1052 repo.ui.setconfig('ui', 'allowemptycommit', True)
1053 n = repo.commit(message, user,
1054 date, match=m,
1055 editor=editor, extra=extra)
1056 for idfunc in extrapostimport:
1057 extrapostimportmap[idfunc](repo[n])
1058 finally:
1059 repo.ui.restoreconfig(allowemptyback)
1060 else:
1061 if opts.get('exact') or importbranch:
1062 branch = branch or 'default'
1063 else:
1064 branch = p1.branch()
1065 store = patch.filestore()
1066 try:
1067 files = set()
1068 try:
1069 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1070 files, eolmode=None)
1071 except patch.PatchError as e:
1072 raise error.Abort(str(e))
1073 if opts.get('exact'):
1074 editor = None
1075 else:
1076 editor = getcommiteditor(editform='import.bypass')
1077 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1078 message,
1079 user,
1080 date,
1081 branch, files, store,
1082 editor=editor)
1083 n = memctx.commit()
1084 finally:
1085 store.close()
1086 if opts.get('exact') and nocommit:
1087 # --exact with --no-commit is still useful in that it does merge
1088 # and branch bits
1089 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1090 elif opts.get('exact') and hex(n) != nodeid:
1091 raise error.Abort(_('patch is damaged or loses information'))
1092 msg = _('applied to working directory')
1093 if n:
1094 # i18n: refers to a short changeset id
1095 msg = _('created %s') % short(n)
1096 return (msg, n, rejects)
1097 finally:
1098 os.unlink(tmpname)
1099
1100 # facility to let extensions include additional data in an exported patch
1101 # list of identifiers to be executed in order
1102 extraexport = []
1103 # mapping from identifier to actual export function
1104 # function as to return a string to be added to the header or None
1105 # it is given two arguments (sequencenumber, changectx)
1106 extraexportmap = {}
1107
1108 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1109 opts=None, match=None):
1110 '''export changesets as hg patches.'''
1111
1112 total = len(revs)
1113 revwidth = max([len(str(rev)) for rev in revs])
1114 filemode = {}
1115
1116 def single(rev, seqno, fp):
1117 ctx = repo[rev]
1118 node = ctx.node()
1119 parents = [p.node() for p in ctx.parents() if p]
1120 branch = ctx.branch()
1121 if switch_parent:
1122 parents.reverse()
1123
1124 if parents:
1125 prev = parents[0]
1126 else:
1127 prev = nullid
1128
1129 shouldclose = False
1130 if not fp and len(template) > 0:
1131 desc_lines = ctx.description().rstrip().split('\n')
1132 desc = desc_lines[0] #Commit always has a first line.
1133 fp = makefileobj(repo, template, node, desc=desc, total=total,
1134 seqno=seqno, revwidth=revwidth, mode='wb',
1135 modemap=filemode)
1136 shouldclose = True
1137 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1138 repo.ui.note("%s\n" % fp.name)
1139
1140 if not fp:
1141 write = repo.ui.write
1142 else:
1143 def write(s, **kw):
1144 fp.write(s)
1145
1146 write("# HG changeset patch\n")
1147 write("# User %s\n" % ctx.user())
1148 write("# Date %d %d\n" % ctx.date())
1149 write("# %s\n" % util.datestr(ctx.date()))
1150 if branch and branch != 'default':
1151 write("# Branch %s\n" % branch)
1152 write("# Node ID %s\n" % hex(node))
1153 write("# Parent %s\n" % hex(prev))
1154 if len(parents) > 1:
1155 write("# Parent %s\n" % hex(parents[1]))
1156
1157 for headerid in extraexport:
1158 header = extraexportmap[headerid](seqno, ctx)
1159 if header is not None:
1160 write('# %s\n' % header)
1161 write(ctx.description().rstrip())
1162 write("\n\n")
1163
1164 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1165 write(chunk, label=label)
1166
1167 if shouldclose:
1168 fp.close()
1169
1170 for seqno, rev in enumerate(revs):
1171 single(rev, seqno + 1, fp)
1172
1173 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1174 changes=None, stat=False, fp=None, prefix='',
1175 root='', listsubrepos=False):
1176 '''show diff or diffstat.'''
1177 if fp is None:
1178 write = ui.write
1179 else:
1180 def write(s, **kw):
1181 fp.write(s)
1182
1183 if root:
1184 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1185 else:
1186 relroot = ''
1187 if relroot != '':
1188 # XXX relative roots currently don't work if the root is within a
1189 # subrepo
1190 uirelroot = match.uipath(relroot)
1191 relroot += '/'
1192 for matchroot in match.files():
1193 if not matchroot.startswith(relroot):
1194 ui.warn(_('warning: %s not inside relative root %s\n') % (
1195 match.uipath(matchroot), uirelroot))
1196
1197 if stat:
1198 diffopts = diffopts.copy(context=0)
1199 width = 80
1200 if not ui.plain():
1201 width = ui.termwidth()
1202 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1203 prefix=prefix, relroot=relroot)
1204 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1205 width=width):
1206 write(chunk, label=label)
1207 else:
1208 for chunk, label in patch.diffui(repo, node1, node2, match,
1209 changes, diffopts, prefix=prefix,
1210 relroot=relroot):
1211 write(chunk, label=label)
1212
1213 if listsubrepos:
1214 ctx1 = repo[node1]
1215 ctx2 = repo[node2]
1216 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1217 tempnode2 = node2
1218 try:
1219 if node2 is not None:
1220 tempnode2 = ctx2.substate[subpath][1]
1221 except KeyError:
1222 # A subrepo that existed in node1 was deleted between node1 and
1223 # node2 (inclusive). Thus, ctx2's substate won't contain that
1224 # subpath. The best we can do is to ignore it.
1225 tempnode2 = None
1226 submatch = matchmod.subdirmatcher(subpath, match)
1227 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1228 stat=stat, fp=fp, prefix=prefix)
1229
1230 class changeset_printer(object):
1231 '''show changeset information when templating not requested.'''
1232
1233 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1234 self.ui = ui
1235 self.repo = repo
1236 self.buffered = buffered
1237 self.matchfn = matchfn
1238 self.diffopts = diffopts
1239 self.header = {}
1240 self.hunk = {}
1241 self.lastheader = None
1242 self.footer = None
1243
1244 def flush(self, ctx):
1245 rev = ctx.rev()
1246 if rev in self.header:
1247 h = self.header[rev]
1248 if h != self.lastheader:
1249 self.lastheader = h
1250 self.ui.write(h)
1251 del self.header[rev]
1252 if rev in self.hunk:
1253 self.ui.write(self.hunk[rev])
1254 del self.hunk[rev]
1255 return 1
1256 return 0
1257
1258 def close(self):
1259 if self.footer:
1260 self.ui.write(self.footer)
1261
1262 def show(self, ctx, copies=None, matchfn=None, **props):
1263 if self.buffered:
1264 self.ui.pushbuffer(labeled=True)
1265 self._show(ctx, copies, matchfn, props)
1266 self.hunk[ctx.rev()] = self.ui.popbuffer()
1267 else:
1268 self._show(ctx, copies, matchfn, props)
1269
1270 def _show(self, ctx, copies, matchfn, props):
1271 '''show a single changeset or file revision'''
1272 changenode = ctx.node()
1273 rev = ctx.rev()
1274 if self.ui.debugflag:
1275 hexfunc = hex
1276 else:
1277 hexfunc = short
1278 # as of now, wctx.node() and wctx.rev() return None, but we want to
1279 # show the same values as {node} and {rev} templatekw
1280 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1281
1282 if self.ui.quiet:
1283 self.ui.write("%d:%s\n" % revnode, label='log.node')
1284 return
1285
1286 date = util.datestr(ctx.date())
1287
1288 # i18n: column positioning for "hg log"
1289 self.ui.write(_("changeset: %d:%s\n") % revnode,
1290 label='log.changeset changeset.%s' % ctx.phasestr())
1291
1292 # branches are shown first before any other names due to backwards
1293 # compatibility
1294 branch = ctx.branch()
1295 # don't show the default branch name
1296 if branch != 'default':
1297 # i18n: column positioning for "hg log"
1298 self.ui.write(_("branch: %s\n") % branch,
1299 label='log.branch')
1300
1301 for nsname, ns in self.repo.names.iteritems():
1302 # branches has special logic already handled above, so here we just
1303 # skip it
1304 if nsname == 'branches':
1305 continue
1306 # we will use the templatename as the color name since those two
1307 # should be the same
1308 for name in ns.names(self.repo, changenode):
1309 self.ui.write(ns.logfmt % name,
1310 label='log.%s' % ns.colorname)
1311 if self.ui.debugflag:
1312 # i18n: column positioning for "hg log"
1313 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1314 label='log.phase')
1315 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1316 label = 'log.parent changeset.%s' % pctx.phasestr()
1317 # i18n: column positioning for "hg log"
1318 self.ui.write(_("parent: %d:%s\n")
1319 % (pctx.rev(), hexfunc(pctx.node())),
1320 label=label)
1321
1322 if self.ui.debugflag and rev is not None:
1323 mnode = ctx.manifestnode()
1324 # i18n: column positioning for "hg log"
1325 self.ui.write(_("manifest: %d:%s\n") %
1326 (self.repo.manifestlog._revlog.rev(mnode),
1327 hex(mnode)),
1328 label='ui.debug log.manifest')
1329 # i18n: column positioning for "hg log"
1330 self.ui.write(_("user: %s\n") % ctx.user(),
1331 label='log.user')
1332 # i18n: column positioning for "hg log"
1333 self.ui.write(_("date: %s\n") % date,
1334 label='log.date')
1335
1336 if self.ui.debugflag:
1337 files = ctx.p1().status(ctx)[:3]
1338 for key, value in zip([# i18n: column positioning for "hg log"
1339 _("files:"),
1340 # i18n: column positioning for "hg log"
1341 _("files+:"),
1342 # i18n: column positioning for "hg log"
1343 _("files-:")], files):
1344 if value:
1345 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1346 label='ui.debug log.files')
1347 elif ctx.files() and self.ui.verbose:
1348 # i18n: column positioning for "hg log"
1349 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1350 label='ui.note log.files')
1351 if copies and self.ui.verbose:
1352 copies = ['%s (%s)' % c for c in copies]
1353 # i18n: column positioning for "hg log"
1354 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1355 label='ui.note log.copies')
1356
1357 extra = ctx.extra()
1358 if extra and self.ui.debugflag:
1359 for key, value in sorted(extra.items()):
1360 # i18n: column positioning for "hg log"
1361 self.ui.write(_("extra: %s=%s\n")
1362 % (key, value.encode('string_escape')),
1363 label='ui.debug log.extra')
1364
1365 description = ctx.description().strip()
1366 if description:
1367 if self.ui.verbose:
1368 self.ui.write(_("description:\n"),
1369 label='ui.note log.description')
1370 self.ui.write(description,
1371 label='ui.note log.description')
1372 self.ui.write("\n\n")
1373 else:
1374 # i18n: column positioning for "hg log"
1375 self.ui.write(_("summary: %s\n") %
1376 description.splitlines()[0],
1377 label='log.summary')
1378 self.ui.write("\n")
1379
1380 self.showpatch(ctx, matchfn)
1381
1382 def showpatch(self, ctx, matchfn):
1383 if not matchfn:
1384 matchfn = self.matchfn
1385 if matchfn:
1386 stat = self.diffopts.get('stat')
1387 diff = self.diffopts.get('patch')
1388 diffopts = patch.diffallopts(self.ui, self.diffopts)
1389 node = ctx.node()
1390 prev = ctx.p1().node()
1391 if stat:
1392 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1393 match=matchfn, stat=True)
1394 if diff:
1395 if stat:
1396 self.ui.write("\n")
1397 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1398 match=matchfn, stat=False)
1399 self.ui.write("\n")
1400
1401 class jsonchangeset(changeset_printer):
1402 '''format changeset information.'''
1403
1404 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1405 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1406 self.cache = {}
1407 self._first = True
1408
1409 def close(self):
1410 if not self._first:
1411 self.ui.write("\n]\n")
1412 else:
1413 self.ui.write("[]\n")
1414
1415 def _show(self, ctx, copies, matchfn, props):
1416 '''show a single changeset or file revision'''
1417 rev = ctx.rev()
1418 if rev is None:
1419 jrev = jnode = 'null'
1420 else:
1421 jrev = str(rev)
1422 jnode = '"%s"' % hex(ctx.node())
1423 j = encoding.jsonescape
1424
1425 if self._first:
1426 self.ui.write("[\n {")
1427 self._first = False
1428 else:
1429 self.ui.write(",\n {")
1430
1431 if self.ui.quiet:
1432 self.ui.write(('\n "rev": %s') % jrev)
1433 self.ui.write((',\n "node": %s') % jnode)
1434 self.ui.write('\n }')
1435 return
1436
1437 self.ui.write(('\n "rev": %s') % jrev)
1438 self.ui.write((',\n "node": %s') % jnode)
1439 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1440 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1441 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1442 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1443 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1444
1445 self.ui.write((',\n "bookmarks": [%s]') %
1446 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1447 self.ui.write((',\n "tags": [%s]') %
1448 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1449 self.ui.write((',\n "parents": [%s]') %
1450 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1451
1452 if self.ui.debugflag:
1453 if rev is None:
1454 jmanifestnode = 'null'
1455 else:
1456 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1457 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1458
1459 self.ui.write((',\n "extra": {%s}') %
1460 ", ".join('"%s": "%s"' % (j(k), j(v))
1461 for k, v in ctx.extra().items()))
1462
1463 files = ctx.p1().status(ctx)
1464 self.ui.write((',\n "modified": [%s]') %
1465 ", ".join('"%s"' % j(f) for f in files[0]))
1466 self.ui.write((',\n "added": [%s]') %
1467 ", ".join('"%s"' % j(f) for f in files[1]))
1468 self.ui.write((',\n "removed": [%s]') %
1469 ", ".join('"%s"' % j(f) for f in files[2]))
1470
1471 elif self.ui.verbose:
1472 self.ui.write((',\n "files": [%s]') %
1473 ", ".join('"%s"' % j(f) for f in ctx.files()))
1474
1475 if copies:
1476 self.ui.write((',\n "copies": {%s}') %
1477 ", ".join('"%s": "%s"' % (j(k), j(v))
1478 for k, v in copies))
1479
1480 matchfn = self.matchfn
1481 if matchfn:
1482 stat = self.diffopts.get('stat')
1483 diff = self.diffopts.get('patch')
1484 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1485 node, prev = ctx.node(), ctx.p1().node()
1486 if stat:
1487 self.ui.pushbuffer()
1488 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1489 match=matchfn, stat=True)
1490 self.ui.write((',\n "diffstat": "%s"')
1491 % j(self.ui.popbuffer()))
1492 if diff:
1493 self.ui.pushbuffer()
1494 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1495 match=matchfn, stat=False)
1496 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1497
1498 self.ui.write("\n }")
1499
1500 class changeset_templater(changeset_printer):
1501 '''format changeset information.'''
1502
1503 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1504 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1505 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1506 filters = {'formatnode': formatnode}
1507 defaulttempl = {
1508 'parent': '{rev}:{node|formatnode} ',
1509 'manifest': '{rev}:{node|formatnode}',
1510 'file_copy': '{name} ({source})',
1511 'extra': '{key}={value|stringescape}'
1512 }
1513 # filecopy is preserved for compatibility reasons
1514 defaulttempl['filecopy'] = defaulttempl['file_copy']
1515 assert not (tmpl and mapfile)
1516 if mapfile:
1517 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1518 cache=defaulttempl)
1519 else:
1520 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1521 filters=filters,
1522 cache=defaulttempl)
1523
1524 self.cache = {}
1525
1526 # find correct templates for current mode
1527 tmplmodes = [
1528 (True, None),
1529 (self.ui.verbose, 'verbose'),
1530 (self.ui.quiet, 'quiet'),
1531 (self.ui.debugflag, 'debug'),
1532 ]
1533
1534 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1535 'docheader': '', 'docfooter': ''}
1536 for mode, postfix in tmplmodes:
1537 for t in self._parts:
1538 cur = t
1539 if postfix:
1540 cur += "_" + postfix
1541 if mode and cur in self.t:
1542 self._parts[t] = cur
1543
1544 if self._parts['docheader']:
1545 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1546
1547 def close(self):
1548 if self._parts['docfooter']:
1549 if not self.footer:
1550 self.footer = ""
1551 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1552 return super(changeset_templater, self).close()
1553
1554 def _show(self, ctx, copies, matchfn, props):
1555 '''show a single changeset or file revision'''
1556 props = props.copy()
1557 props.update(templatekw.keywords)
1558 props['templ'] = self.t
1559 props['ctx'] = ctx
1560 props['repo'] = self.repo
1561 props['ui'] = self.repo.ui
1562 props['revcache'] = {'copies': copies}
1563 props['cache'] = self.cache
1564
1565 # write header
1566 if self._parts['header']:
1567 h = templater.stringify(self.t(self._parts['header'], **props))
1568 if self.buffered:
1569 self.header[ctx.rev()] = h
1570 else:
1571 if self.lastheader != h:
1572 self.lastheader = h
1573 self.ui.write(h)
1574
1575 # write changeset metadata, then patch if requested
1576 key = self._parts['changeset']
1577 self.ui.write(templater.stringify(self.t(key, **props)))
1578 self.showpatch(ctx, matchfn)
1579
1580 if self._parts['footer']:
1581 if not self.footer:
1582 self.footer = templater.stringify(
1583 self.t(self._parts['footer'], **props))
1584
1585 def gettemplate(ui, tmpl, style):
1586 """
1587 Find the template matching the given template spec or style.
1588 """
1589
1590 # ui settings
1591 if not tmpl and not style: # template are stronger than style
1592 tmpl = ui.config('ui', 'logtemplate')
1593 if tmpl:
1594 return templater.unquotestring(tmpl), None
1595 else:
1596 style = util.expandpath(ui.config('ui', 'style', ''))
1597
1598 if not tmpl and style:
1599 mapfile = style
1600 if not os.path.split(mapfile)[0]:
1601 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1602 or templater.templatepath(mapfile))
1603 if mapname:
1604 mapfile = mapname
1605 return None, mapfile
1606
1607 if not tmpl:
1608 return None, None
1609
1610 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1611
1612 def show_changeset(ui, repo, opts, buffered=False):
1613 """show one changeset using template or regular display.
1614
1615 Display format will be the first non-empty hit of:
1616 1. option 'template'
1617 2. option 'style'
1618 3. [ui] setting 'logtemplate'
1619 4. [ui] setting 'style'
1620 If all of these values are either the unset or the empty string,
1621 regular display via changeset_printer() is done.
1622 """
1623 # options
1624 matchfn = None
1625 if opts.get('patch') or opts.get('stat'):
1626 matchfn = scmutil.matchall(repo)
1627
1628 if opts.get('template') == 'json':
1629 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1630
1631 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1632
1633 if not tmpl and not mapfile:
1634 return changeset_printer(ui, repo, matchfn, opts, buffered)
1635
1636 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1637
1638 def showmarker(fm, marker, index=None):
1639 """utility function to display obsolescence marker in a readable way
1640
1641 To be used by debug function."""
1642 if index is not None:
1643 fm.write('index', '%i ', index)
1644 fm.write('precnode', '%s ', hex(marker.precnode()))
1645 succs = marker.succnodes()
1646 fm.condwrite(succs, 'succnodes', '%s ',
1647 fm.formatlist(map(hex, succs), name='node'))
1648 fm.write('flag', '%X ', marker.flags())
1649 parents = marker.parentnodes()
1650 if parents is not None:
1651 fm.write('parentnodes', '{%s} ',
1652 fm.formatlist(map(hex, parents), name='node', sep=', '))
1653 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1654 meta = marker.metadata().copy()
1655 meta.pop('date', None)
1656 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1657 fm.plain('\n')
1658
1659 def finddate(ui, repo, date):
1660 """Find the tipmost changeset that matches the given date spec"""
1661
1662 df = util.matchdate(date)
1663 m = scmutil.matchall(repo)
1664 results = {}
1665
1666 def prep(ctx, fns):
1667 d = ctx.date()
1668 if df(d[0]):
1669 results[ctx.rev()] = d
1670
1671 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1672 rev = ctx.rev()
1673 if rev in results:
1674 ui.status(_("found revision %s from %s\n") %
1675 (rev, util.datestr(results[rev])))
1676 return str(rev)
1677
1678 raise error.Abort(_("revision matching date not found"))
1679
1680 def increasingwindows(windowsize=8, sizelimit=512):
1681 while True:
1682 yield windowsize
1683 if windowsize < sizelimit:
1684 windowsize *= 2
1685
1686 class FileWalkError(Exception):
1687 pass
1688
1689 def walkfilerevs(repo, match, follow, revs, fncache):
1690 '''Walks the file history for the matched files.
1691
1692 Returns the changeset revs that are involved in the file history.
1693
1694 Throws FileWalkError if the file history can't be walked using
1695 filelogs alone.
1696 '''
1697 wanted = set()
1698 copies = []
1699 minrev, maxrev = min(revs), max(revs)
1700 def filerevgen(filelog, last):
1701 """
1702 Only files, no patterns. Check the history of each file.
1703
1704 Examines filelog entries within minrev, maxrev linkrev range
1705 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1706 tuples in backwards order
1707 """
1708 cl_count = len(repo)
1709 revs = []
1710 for j in xrange(0, last + 1):
1711 linkrev = filelog.linkrev(j)
1712 if linkrev < minrev:
1713 continue
1714 # only yield rev for which we have the changelog, it can
1715 # happen while doing "hg log" during a pull or commit
1716 if linkrev >= cl_count:
1717 break
1718
1719 parentlinkrevs = []
1720 for p in filelog.parentrevs(j):
1721 if p != nullrev:
1722 parentlinkrevs.append(filelog.linkrev(p))
1723 n = filelog.node(j)
1724 revs.append((linkrev, parentlinkrevs,
1725 follow and filelog.renamed(n)))
1726
1727 return reversed(revs)
1728 def iterfiles():
1729 pctx = repo['.']
1730 for filename in match.files():
1731 if follow:
1732 if filename not in pctx:
1733 raise error.Abort(_('cannot follow file not in parent '
1734 'revision: "%s"') % filename)
1735 yield filename, pctx[filename].filenode()
1736 else:
1737 yield filename, None
1738 for filename_node in copies:
1739 yield filename_node
1740
1741 for file_, node in iterfiles():
1742 filelog = repo.file(file_)
1743 if not len(filelog):
1744 if node is None:
1745 # A zero count may be a directory or deleted file, so
1746 # try to find matching entries on the slow path.
1747 if follow:
1748 raise error.Abort(
1749 _('cannot follow nonexistent file: "%s"') % file_)
1750 raise FileWalkError("Cannot walk via filelog")
1751 else:
1752 continue
1753
1754 if node is None:
1755 last = len(filelog) - 1
1756 else:
1757 last = filelog.rev(node)
1758
1759 # keep track of all ancestors of the file
1760 ancestors = set([filelog.linkrev(last)])
1761
1762 # iterate from latest to oldest revision
1763 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1764 if not follow:
1765 if rev > maxrev:
1766 continue
1767 else:
1768 # Note that last might not be the first interesting
1769 # rev to us:
1770 # if the file has been changed after maxrev, we'll
1771 # have linkrev(last) > maxrev, and we still need
1772 # to explore the file graph
1773 if rev not in ancestors:
1774 continue
1775 # XXX insert 1327 fix here
1776 if flparentlinkrevs:
1777 ancestors.update(flparentlinkrevs)
1778
1779 fncache.setdefault(rev, []).append(file_)
1780 wanted.add(rev)
1781 if copied:
1782 copies.append(copied)
1783
1784 return wanted
1785
1786 class _followfilter(object):
1787 def __init__(self, repo, onlyfirst=False):
1788 self.repo = repo
1789 self.startrev = nullrev
1790 self.roots = set()
1791 self.onlyfirst = onlyfirst
1792
1793 def match(self, rev):
1794 def realparents(rev):
1795 if self.onlyfirst:
1796 return self.repo.changelog.parentrevs(rev)[0:1]
1797 else:
1798 return filter(lambda x: x != nullrev,
1799 self.repo.changelog.parentrevs(rev))
1800
1801 if self.startrev == nullrev:
1802 self.startrev = rev
1803 return True
1804
1805 if rev > self.startrev:
1806 # forward: all descendants
1807 if not self.roots:
1808 self.roots.add(self.startrev)
1809 for parent in realparents(rev):
1810 if parent in self.roots:
1811 self.roots.add(rev)
1812 return True
1813 else:
1814 # backwards: all parents
1815 if not self.roots:
1816 self.roots.update(realparents(self.startrev))
1817 if rev in self.roots:
1818 self.roots.remove(rev)
1819 self.roots.update(realparents(rev))
1820 return True
1821
1822 return False
1823
1824 def walkchangerevs(repo, match, opts, prepare):
1825 '''Iterate over files and the revs in which they changed.
1826
1827 Callers most commonly need to iterate backwards over the history
1828 in which they are interested. Doing so has awful (quadratic-looking)
1829 performance, so we use iterators in a "windowed" way.
1830
1831 We walk a window of revisions in the desired order. Within the
1832 window, we first walk forwards to gather data, then in the desired
1833 order (usually backwards) to display it.
1834
1835 This function returns an iterator yielding contexts. Before
1836 yielding each context, the iterator will first call the prepare
1837 function on each context in the window in forward order.'''
1838
1839 follow = opts.get('follow') or opts.get('follow_first')
1840 revs = _logrevs(repo, opts)
1841 if not revs:
1842 return []
1843 wanted = set()
1844 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1845 opts.get('removed'))
1846 fncache = {}
1847 change = repo.changectx
1848
1849 # First step is to fill wanted, the set of revisions that we want to yield.
1850 # When it does not induce extra cost, we also fill fncache for revisions in
1851 # wanted: a cache of filenames that were changed (ctx.files()) and that
1852 # match the file filtering conditions.
1853
1854 if match.always():
1855 # No files, no patterns. Display all revs.
1856 wanted = revs
1857 elif not slowpath:
1858 # We only have to read through the filelog to find wanted revisions
1859
1860 try:
1861 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1862 except FileWalkError:
1863 slowpath = True
1864
1865 # We decided to fall back to the slowpath because at least one
1866 # of the paths was not a file. Check to see if at least one of them
1867 # existed in history, otherwise simply return
1868 for path in match.files():
1869 if path == '.' or path in repo.store:
1870 break
1871 else:
1872 return []
1873
1874 if slowpath:
1875 # We have to read the changelog to match filenames against
1876 # changed files
1877
1878 if follow:
1879 raise error.Abort(_('can only follow copies/renames for explicit '
1880 'filenames'))
1881
1882 # The slow path checks files modified in every changeset.
1883 # This is really slow on large repos, so compute the set lazily.
1884 class lazywantedset(object):
1885 def __init__(self):
1886 self.set = set()
1887 self.revs = set(revs)
1888
1889 # No need to worry about locality here because it will be accessed
1890 # in the same order as the increasing window below.
1891 def __contains__(self, value):
1892 if value in self.set:
1893 return True
1894 elif not value in self.revs:
1895 return False
1896 else:
1897 self.revs.discard(value)
1898 ctx = change(value)
1899 matches = filter(match, ctx.files())
1900 if matches:
1901 fncache[value] = matches
1902 self.set.add(value)
1903 return True
1904 return False
1905
1906 def discard(self, value):
1907 self.revs.discard(value)
1908 self.set.discard(value)
1909
1910 wanted = lazywantedset()
1911
1912 # it might be worthwhile to do this in the iterator if the rev range
1913 # is descending and the prune args are all within that range
1914 for rev in opts.get('prune', ()):
1915 rev = repo[rev].rev()
1916 ff = _followfilter(repo)
1917 stop = min(revs[0], revs[-1])
1918 for x in xrange(rev, stop - 1, -1):
1919 if ff.match(x):
1920 wanted = wanted - [x]
1921
1922 # Now that wanted is correctly initialized, we can iterate over the
1923 # revision range, yielding only revisions in wanted.
1924 def iterate():
1925 if follow and match.always():
1926 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1927 def want(rev):
1928 return ff.match(rev) and rev in wanted
1929 else:
1930 def want(rev):
1931 return rev in wanted
1932
1933 it = iter(revs)
1934 stopiteration = False
1935 for windowsize in increasingwindows():
1936 nrevs = []
1937 for i in xrange(windowsize):
1938 rev = next(it, None)
1939 if rev is None:
1940 stopiteration = True
1941 break
1942 elif want(rev):
1943 nrevs.append(rev)
1944 for rev in sorted(nrevs):
1945 fns = fncache.get(rev)
1946 ctx = change(rev)
1947 if not fns:
1948 def fns_generator():
1949 for f in ctx.files():
1950 if match(f):
1951 yield f
1952 fns = fns_generator()
1953 prepare(ctx, fns)
1954 for rev in nrevs:
1955 yield change(rev)
1956
1957 if stopiteration:
1958 break
1959
1960 return iterate()
1961
1962 def _makefollowlogfilematcher(repo, files, followfirst):
1963 # When displaying a revision with --patch --follow FILE, we have
1964 # to know which file of the revision must be diffed. With
1965 # --follow, we want the names of the ancestors of FILE in the
1966 # revision, stored in "fcache". "fcache" is populated by
1967 # reproducing the graph traversal already done by --follow revset
1968 # and relating revs to file names (which is not "correct" but
1969 # good enough).
1970 fcache = {}
1971 fcacheready = [False]
1972 pctx = repo['.']
1973
1974 def populate():
1975 for fn in files:
1976 fctx = pctx[fn]
1977 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1978 for c in fctx.ancestors(followfirst=followfirst):
1979 fcache.setdefault(c.rev(), set()).add(c.path())
1980
1981 def filematcher(rev):
1982 if not fcacheready[0]:
1983 # Lazy initialization
1984 fcacheready[0] = True
1985 populate()
1986 return scmutil.matchfiles(repo, fcache.get(rev, []))
1987
1988 return filematcher
1989
1990 def _makenofollowlogfilematcher(repo, pats, opts):
1991 '''hook for extensions to override the filematcher for non-follow cases'''
1992 return None
1993
1994 def _makelogrevset(repo, pats, opts, revs):
1995 """Return (expr, filematcher) where expr is a revset string built
1996 from log options and file patterns or None. If --stat or --patch
1997 are not passed filematcher is None. Otherwise it is a callable
1998 taking a revision number and returning a match objects filtering
1999 the files to be detailed when displaying the revision.
2000 """
2001 opt2revset = {
2002 'no_merges': ('not merge()', None),
2003 'only_merges': ('merge()', None),
2004 '_ancestors': ('ancestors(%(val)s)', None),
2005 '_fancestors': ('_firstancestors(%(val)s)', None),
2006 '_descendants': ('descendants(%(val)s)', None),
2007 '_fdescendants': ('_firstdescendants(%(val)s)', None),
2008 '_matchfiles': ('_matchfiles(%(val)s)', None),
2009 'date': ('date(%(val)r)', None),
2010 'branch': ('branch(%(val)r)', ' or '),
2011 '_patslog': ('filelog(%(val)r)', ' or '),
2012 '_patsfollow': ('follow(%(val)r)', ' or '),
2013 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
2014 'keyword': ('keyword(%(val)r)', ' or '),
2015 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
2016 'user': ('user(%(val)r)', ' or '),
2017 }
2018
2019 opts = dict(opts)
2020 # follow or not follow?
2021 follow = opts.get('follow') or opts.get('follow_first')
2022 if opts.get('follow_first'):
2023 followfirst = 1
2024 else:
2025 followfirst = 0
2026 # --follow with FILE behavior depends on revs...
2027 it = iter(revs)
2028 startrev = next(it)
2029 followdescendants = startrev < next(it, startrev)
2030
2031 # branch and only_branch are really aliases and must be handled at
2032 # the same time
2033 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2034 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2035 # pats/include/exclude are passed to match.match() directly in
2036 # _matchfiles() revset but walkchangerevs() builds its matcher with
2037 # scmutil.match(). The difference is input pats are globbed on
2038 # platforms without shell expansion (windows).
2039 wctx = repo[None]
2040 match, pats = scmutil.matchandpats(wctx, pats, opts)
2041 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2042 opts.get('removed'))
2043 if not slowpath:
2044 for f in match.files():
2045 if follow and f not in wctx:
2046 # If the file exists, it may be a directory, so let it
2047 # take the slow path.
2048 if os.path.exists(repo.wjoin(f)):
2049 slowpath = True
2050 continue
2051 else:
2052 raise error.Abort(_('cannot follow file not in parent '
2053 'revision: "%s"') % f)
2054 filelog = repo.file(f)
2055 if not filelog:
2056 # A zero count may be a directory or deleted file, so
2057 # try to find matching entries on the slow path.
2058 if follow:
2059 raise error.Abort(
2060 _('cannot follow nonexistent file: "%s"') % f)
2061 slowpath = True
2062
2063 # We decided to fall back to the slowpath because at least one
2064 # of the paths was not a file. Check to see if at least one of them
2065 # existed in history - in that case, we'll continue down the
2066 # slowpath; otherwise, we can turn off the slowpath
2067 if slowpath:
2068 for path in match.files():
2069 if path == '.' or path in repo.store:
2070 break
2071 else:
2072 slowpath = False
2073
2074 fpats = ('_patsfollow', '_patsfollowfirst')
2075 fnopats = (('_ancestors', '_fancestors'),
2076 ('_descendants', '_fdescendants'))
2077 if slowpath:
2078 # See walkchangerevs() slow path.
2079 #
2080 # pats/include/exclude cannot be represented as separate
2081 # revset expressions as their filtering logic applies at file
2082 # level. For instance "-I a -X a" matches a revision touching
2083 # "a" and "b" while "file(a) and not file(b)" does
2084 # not. Besides, filesets are evaluated against the working
2085 # directory.
2086 matchargs = ['r:', 'd:relpath']
2087 for p in pats:
2088 matchargs.append('p:' + p)
2089 for p in opts.get('include', []):
2090 matchargs.append('i:' + p)
2091 for p in opts.get('exclude', []):
2092 matchargs.append('x:' + p)
2093 matchargs = ','.join(('%r' % p) for p in matchargs)
2094 opts['_matchfiles'] = matchargs
2095 if follow:
2096 opts[fnopats[0][followfirst]] = '.'
2097 else:
2098 if follow:
2099 if pats:
2100 # follow() revset interprets its file argument as a
2101 # manifest entry, so use match.files(), not pats.
2102 opts[fpats[followfirst]] = list(match.files())
2103 else:
2104 op = fnopats[followdescendants][followfirst]
2105 opts[op] = 'rev(%d)' % startrev
2106 else:
2107 opts['_patslog'] = list(pats)
2108
2109 filematcher = None
2110 if opts.get('patch') or opts.get('stat'):
2111 # When following files, track renames via a special matcher.
2112 # If we're forced to take the slowpath it means we're following
2113 # at least one pattern/directory, so don't bother with rename tracking.
2114 if follow and not match.always() and not slowpath:
2115 # _makefollowlogfilematcher expects its files argument to be
2116 # relative to the repo root, so use match.files(), not pats.
2117 filematcher = _makefollowlogfilematcher(repo, match.files(),
2118 followfirst)
2119 else:
2120 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2121 if filematcher is None:
2122 filematcher = lambda rev: match
2123
2124 expr = []
2125 for op, val in sorted(opts.iteritems()):
2126 if not val:
2127 continue
2128 if op not in opt2revset:
2129 continue
2130 revop, andor = opt2revset[op]
2131 if '%(val)' not in revop:
2132 expr.append(revop)
2133 else:
2134 if not isinstance(val, list):
2135 e = revop % {'val': val}
2136 else:
2137 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2138 expr.append(e)
2139
2140 if expr:
2141 expr = '(' + ' and '.join(expr) + ')'
2142 else:
2143 expr = None
2144 return expr, filematcher
2145
2146 def _logrevs(repo, opts):
2147 # Default --rev value depends on --follow but --follow behavior
2148 # depends on revisions resolved from --rev...
2149 follow = opts.get('follow') or opts.get('follow_first')
2150 if opts.get('rev'):
2151 revs = scmutil.revrange(repo, opts['rev'])
2152 elif follow and repo.dirstate.p1() == nullid:
2153 revs = revset.baseset()
2154 elif follow:
2155 revs = repo.revs('reverse(:.)')
2156 else:
2157 revs = revset.spanset(repo)
2158 revs.reverse()
2159 return revs
2160
2161 def getgraphlogrevs(repo, pats, opts):
2162 """Return (revs, expr, filematcher) where revs is an iterable of
2163 revision numbers, expr is a revset string built from log options
2164 and file patterns or None, and used to filter 'revs'. If --stat or
2165 --patch are not passed filematcher is None. Otherwise it is a
2166 callable taking a revision number and returning a match objects
2167 filtering the files to be detailed when displaying the revision.
2168 """
2169 limit = loglimit(opts)
2170 revs = _logrevs(repo, opts)
2171 if not revs:
2172 return revset.baseset(), None, None
2173 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2174 if opts.get('rev'):
2175 # User-specified revs might be unsorted, but don't sort before
2176 # _makelogrevset because it might depend on the order of revs
2177 if not (revs.isdescending() or revs.istopo()):
2178 revs.sort(reverse=True)
2179 if expr:
2180 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2181 revs = matcher(repo, revs)
2182 if limit is not None:
2183 limitedrevs = []
2184 for idx, rev in enumerate(revs):
2185 if idx >= limit:
2186 break
2187 limitedrevs.append(rev)
2188 revs = revset.baseset(limitedrevs)
2189
2190 return revs, expr, filematcher
2191
2192 def getlogrevs(repo, pats, opts):
2193 """Return (revs, expr, filematcher) where revs is an iterable of
2194 revision numbers, expr is a revset string built from log options
2195 and file patterns or None, and used to filter 'revs'. If --stat or
2196 --patch are not passed filematcher is None. Otherwise it is a
2197 callable taking a revision number and returning a match objects
2198 filtering the files to be detailed when displaying the revision.
2199 """
2200 limit = loglimit(opts)
2201 revs = _logrevs(repo, opts)
2202 if not revs:
2203 return revset.baseset([]), None, None
2204 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2205 if expr:
2206 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2207 revs = matcher(repo, revs)
2208 if limit is not None:
2209 limitedrevs = []
2210 for idx, r in enumerate(revs):
2211 if limit <= idx:
2212 break
2213 limitedrevs.append(r)
2214 revs = revset.baseset(limitedrevs)
2215
2216 return revs, expr, filematcher
2217
2218 def _graphnodeformatter(ui, displayer):
2219 spec = ui.config('ui', 'graphnodetemplate')
2220 if not spec:
2221 return templatekw.showgraphnode # fast path for "{graphnode}"
2222
2223 templ = formatter.gettemplater(ui, 'graphnode', spec)
2224 cache = {}
2225 if isinstance(displayer, changeset_templater):
2226 cache = displayer.cache # reuse cache of slow templates
2227 props = templatekw.keywords.copy()
2228 props['templ'] = templ
2229 props['cache'] = cache
2230 def formatnode(repo, ctx):
2231 props['ctx'] = ctx
2232 props['repo'] = repo
2233 props['ui'] = repo.ui
2234 props['revcache'] = {}
2235 return templater.stringify(templ('graphnode', **props))
2236 return formatnode
2237
2238 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2239 filematcher=None):
2240 formatnode = _graphnodeformatter(ui, displayer)
2241 state = graphmod.asciistate()
2242 styles = state['styles']
2243
2244 # only set graph styling if HGPLAIN is not set.
2245 if ui.plain('graph'):
2246 # set all edge styles to |, the default pre-3.8 behaviour
2247 styles.update(dict.fromkeys(styles, '|'))
2248 else:
2249 edgetypes = {
2250 'parent': graphmod.PARENT,
2251 'grandparent': graphmod.GRANDPARENT,
2252 'missing': graphmod.MISSINGPARENT
2253 }
2254 for name, key in edgetypes.items():
2255 # experimental config: experimental.graphstyle.*
2256 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2257 styles[key])
2258 if not styles[key]:
2259 styles[key] = None
2260
2261 # experimental config: experimental.graphshorten
2262 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2263
2264 for rev, type, ctx, parents in dag:
2265 char = formatnode(repo, ctx)
2266 copies = None
2267 if getrenamed and ctx.rev():
2268 copies = []
2269 for fn in ctx.files():
2270 rename = getrenamed(fn, ctx.rev())
2271 if rename:
2272 copies.append((fn, rename[0]))
2273 revmatchfn = None
2274 if filematcher is not None:
2275 revmatchfn = filematcher(ctx.rev())
2276 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2277 lines = displayer.hunk.pop(rev).split('\n')
2278 if not lines[-1]:
2279 del lines[-1]
2280 displayer.flush(ctx)
2281 edges = edgefn(type, char, lines, state, rev, parents)
2282 for type, char, lines, coldata in edges:
2283 graphmod.ascii(ui, state, type, char, lines, coldata)
2284 displayer.close()
2285
2286 def graphlog(ui, repo, *pats, **opts):
2287 # Parameters are identical to log command ones
2288 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2289 revdag = graphmod.dagwalker(repo, revs)
2290
2291 getrenamed = None
2292 if opts.get('copies'):
2293 endrev = None
2294 if opts.get('rev'):
2295 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2296 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2297 displayer = show_changeset(ui, repo, opts, buffered=True)
2298 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2299 filematcher)
2300
2301 def checkunsupportedgraphflags(pats, opts):
2302 for op in ["newest_first"]:
2303 if op in opts and opts[op]:
2304 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2305 % op.replace("_", "-"))
2306
2307 def graphrevs(repo, nodes, opts):
2308 limit = loglimit(opts)
2309 nodes.reverse()
2310 if limit is not None:
2311 nodes = nodes[:limit]
2312 return graphmod.nodes(repo, nodes)
2313
2314 def add(ui, repo, match, prefix, explicitonly, **opts):
2315 join = lambda f: os.path.join(prefix, f)
2316 bad = []
2317
2318 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2319 names = []
2320 wctx = repo[None]
2321 cca = None
2322 abort, warn = scmutil.checkportabilityalert(ui)
2323 if abort or warn:
2324 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2325
2326 badmatch = matchmod.badmatch(match, badfn)
2327 dirstate = repo.dirstate
2328 # We don't want to just call wctx.walk here, since it would return a lot of
2329 # clean files, which we aren't interested in and takes time.
2330 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2331 True, False, full=False)):
2332 exact = match.exact(f)
2333 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2334 if cca:
2335 cca(f)
2336 names.append(f)
2337 if ui.verbose or not exact:
2338 ui.status(_('adding %s\n') % match.rel(f))
2339
2340 for subpath in sorted(wctx.substate):
2341 sub = wctx.sub(subpath)
2342 try:
2343 submatch = matchmod.subdirmatcher(subpath, match)
2344 if opts.get('subrepos'):
2345 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2346 else:
2347 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2348 except error.LookupError:
2349 ui.status(_("skipping missing subrepository: %s\n")
2350 % join(subpath))
2351
2352 if not opts.get('dry_run'):
2353 rejected = wctx.add(names, prefix)
2354 bad.extend(f for f in rejected if f in match.files())
2355 return bad
2356
2357 def forget(ui, repo, match, prefix, explicitonly):
2358 join = lambda f: os.path.join(prefix, f)
2359 bad = []
2360 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2361 wctx = repo[None]
2362 forgot = []
2363
2364 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2365 forget = sorted(s[0] + s[1] + s[3] + s[6])
2366 if explicitonly:
2367 forget = [f for f in forget if match.exact(f)]
2368
2369 for subpath in sorted(wctx.substate):
2370 sub = wctx.sub(subpath)
2371 try:
2372 submatch = matchmod.subdirmatcher(subpath, match)
2373 subbad, subforgot = sub.forget(submatch, prefix)
2374 bad.extend([subpath + '/' + f for f in subbad])
2375 forgot.extend([subpath + '/' + f for f in subforgot])
2376 except error.LookupError:
2377 ui.status(_("skipping missing subrepository: %s\n")
2378 % join(subpath))
2379
2380 if not explicitonly:
2381 for f in match.files():
2382 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2383 if f not in forgot:
2384 if repo.wvfs.exists(f):
2385 # Don't complain if the exact case match wasn't given.
2386 # But don't do this until after checking 'forgot', so
2387 # that subrepo files aren't normalized, and this op is
2388 # purely from data cached by the status walk above.
2389 if repo.dirstate.normalize(f) in repo.dirstate:
2390 continue
2391 ui.warn(_('not removing %s: '
2392 'file is already untracked\n')
2393 % match.rel(f))
2394 bad.append(f)
2395
2396 for f in forget:
2397 if ui.verbose or not match.exact(f):
2398 ui.status(_('removing %s\n') % match.rel(f))
2399
2400 rejected = wctx.forget(forget, prefix)
2401 bad.extend(f for f in rejected if f in match.files())
2402 forgot.extend(f for f in forget if f not in rejected)
2403 return bad, forgot
2404
2405 def files(ui, ctx, m, fm, fmt, subrepos):
2406 rev = ctx.rev()
2407 ret = 1
2408 ds = ctx.repo().dirstate
2409
2410 for f in ctx.matches(m):
2411 if rev is None and ds[f] == 'r':
2412 continue
2413 fm.startitem()
2414 if ui.verbose:
2415 fc = ctx[f]
2416 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2417 fm.data(abspath=f)
2418 fm.write('path', fmt, m.rel(f))
2419 ret = 0
2420
2421 for subpath in sorted(ctx.substate):
2422 submatch = matchmod.subdirmatcher(subpath, m)
2423 if (subrepos or m.exact(subpath) or any(submatch.files())):
2424 sub = ctx.sub(subpath)
2425 try:
2426 recurse = m.exact(subpath) or subrepos
2427 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2428 ret = 0
2429 except error.LookupError:
2430 ui.status(_("skipping missing subrepository: %s\n")
2431 % m.abs(subpath))
2432
2433 return ret
2434
2435 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2436 join = lambda f: os.path.join(prefix, f)
2437 ret = 0
2438 s = repo.status(match=m, clean=True)
2439 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2440
2441 wctx = repo[None]
2442
2443 if warnings is None:
2444 warnings = []
2445 warn = True
2446 else:
2447 warn = False
2448
2449 subs = sorted(wctx.substate)
2450 total = len(subs)
2451 count = 0
2452 for subpath in subs:
2453 count += 1
2454 submatch = matchmod.subdirmatcher(subpath, m)
2455 if subrepos or m.exact(subpath) or any(submatch.files()):
2456 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2457 sub = wctx.sub(subpath)
2458 try:
2459 if sub.removefiles(submatch, prefix, after, force, subrepos,
2460 warnings):
2461 ret = 1
2462 except error.LookupError:
2463 warnings.append(_("skipping missing subrepository: %s\n")
2464 % join(subpath))
2465 ui.progress(_('searching'), None)
2466
2467 # warn about failure to delete explicit files/dirs
2468 deleteddirs = util.dirs(deleted)
2469 files = m.files()
2470 total = len(files)
2471 count = 0
2472 for f in files:
2473 def insubrepo():
2474 for subpath in wctx.substate:
2475 if f.startswith(subpath + '/'):
2476 return True
2477 return False
2478
2479 count += 1
2480 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2481 isdir = f in deleteddirs or wctx.hasdir(f)
2482 if (f in repo.dirstate or isdir or f == '.'
2483 or insubrepo() or f in subs):
2484 continue
2485
2486 if repo.wvfs.exists(f):
2487 if repo.wvfs.isdir(f):
2488 warnings.append(_('not removing %s: no tracked files\n')
2489 % m.rel(f))
2490 else:
2491 warnings.append(_('not removing %s: file is untracked\n')
2492 % m.rel(f))
2493 # missing files will generate a warning elsewhere
2494 ret = 1
2495 ui.progress(_('deleting'), None)
2496
2497 if force:
2498 list = modified + deleted + clean + added
2499 elif after:
2500 list = deleted
2501 remaining = modified + added + clean
2502 total = len(remaining)
2503 count = 0
2504 for f in remaining:
2505 count += 1
2506 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2507 warnings.append(_('not removing %s: file still exists\n')
2508 % m.rel(f))
2509 ret = 1
2510 ui.progress(_('skipping'), None)
2511 else:
2512 list = deleted + clean
2513 total = len(modified) + len(added)
2514 count = 0
2515 for f in modified:
2516 count += 1
2517 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2518 warnings.append(_('not removing %s: file is modified (use -f'
2519 ' to force removal)\n') % m.rel(f))
2520 ret = 1
2521 for f in added:
2522 count += 1
2523 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2524 warnings.append(_("not removing %s: file has been marked for add"
2525 " (use 'hg forget' to undo add)\n") % m.rel(f))
2526 ret = 1
2527 ui.progress(_('skipping'), None)
2528
2529 list = sorted(list)
2530 total = len(list)
2531 count = 0
2532 for f in list:
2533 count += 1
2534 if ui.verbose or not m.exact(f):
2535 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2536 ui.status(_('removing %s\n') % m.rel(f))
2537 ui.progress(_('deleting'), None)
2538
2539 with repo.wlock():
2540 if not after:
2541 for f in list:
2542 if f in added:
2543 continue # we never unlink added files on remove
2544 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2545 repo[None].forget(list)
2546
2547 if warn:
2548 for warning in warnings:
2549 ui.warn(warning)
2550
2551 return ret
2552
2553 def cat(ui, repo, ctx, matcher, prefix, **opts):
2554 err = 1
2555
2556 def write(path):
2557 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2558 pathname=os.path.join(prefix, path))
2559 data = ctx[path].data()
2560 if opts.get('decode'):
2561 data = repo.wwritedata(path, data)
2562 fp.write(data)
2563 fp.close()
2564
2565 # Automation often uses hg cat on single files, so special case it
2566 # for performance to avoid the cost of parsing the manifest.
2567 if len(matcher.files()) == 1 and not matcher.anypats():
2568 file = matcher.files()[0]
2569 mfl = repo.manifestlog
2570 mfnode = ctx.manifestnode()
2571 try:
2572 if mfnode and mfl[mfnode].find(file)[0]:
2573 write(file)
2574 return 0
2575 except KeyError:
2576 pass
2577
2578 for abs in ctx.walk(matcher):
2579 write(abs)
2580 err = 0
2581
2582 for subpath in sorted(ctx.substate):
2583 sub = ctx.sub(subpath)
2584 try:
2585 submatch = matchmod.subdirmatcher(subpath, matcher)
2586
2587 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2588 **opts):
2589 err = 0
2590 except error.RepoLookupError:
2591 ui.status(_("skipping missing subrepository: %s\n")
2592 % os.path.join(prefix, subpath))
2593
2594 return err
2595
2596 def commit(ui, repo, commitfunc, pats, opts):
2597 '''commit the specified files or all outstanding changes'''
2598 date = opts.get('date')
2599 if date:
2600 opts['date'] = util.parsedate(date)
2601 message = logmessage(ui, opts)
2602 matcher = scmutil.match(repo[None], pats, opts)
2603
2604 # extract addremove carefully -- this function can be called from a command
2605 # that doesn't support addremove
2606 if opts.get('addremove'):
2607 if scmutil.addremove(repo, matcher, "", opts) != 0:
2608 raise error.Abort(
2609 _("failed to mark all new/missing files as added/removed"))
2610
2611 return commitfunc(ui, repo, message, matcher, opts)
2612
2613 def samefile(f, ctx1, ctx2):
2614 if f in ctx1.manifest():
2615 a = ctx1.filectx(f)
2616 if f in ctx2.manifest():
2617 b = ctx2.filectx(f)
2618 return (not a.cmp(b)
2619 and a.flags() == b.flags())
2620 else:
2621 return False
2622 else:
2623 return f not in ctx2.manifest()
2624
2625 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2626 # avoid cycle context -> subrepo -> cmdutil
2627 from . import context
2628
2629 # amend will reuse the existing user if not specified, but the obsolete
2630 # marker creation requires that the current user's name is specified.
2631 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2632 ui.username() # raise exception if username not set
2633
2634 ui.note(_('amending changeset %s\n') % old)
2635 base = old.p1()
2636 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2637
2638 wlock = lock = newid = None
2639 try:
2640 wlock = repo.wlock()
2641 lock = repo.lock()
2642 with repo.transaction('amend') as tr:
2643 # See if we got a message from -m or -l, if not, open the editor
2644 # with the message of the changeset to amend
2645 message = logmessage(ui, opts)
2646 # ensure logfile does not conflict with later enforcement of the
2647 # message. potential logfile content has been processed by
2648 # `logmessage` anyway.
2649 opts.pop('logfile')
2650 # First, do a regular commit to record all changes in the working
2651 # directory (if there are any)
2652 ui.callhooks = False
2653 activebookmark = repo._bookmarks.active
2654 try:
2655 repo._bookmarks.active = None
2656 opts['message'] = 'temporary amend commit for %s' % old
2657 node = commit(ui, repo, commitfunc, pats, opts)
2658 finally:
2659 repo._bookmarks.active = activebookmark
2660 repo._bookmarks.recordchange(tr)
2661 ui.callhooks = True
2662 ctx = repo[node]
2663
2664 # Participating changesets:
2665 #
2666 # node/ctx o - new (intermediate) commit that contains changes
2667 # | from working dir to go into amending commit
2668 # | (or a workingctx if there were no changes)
2669 # |
2670 # old o - changeset to amend
2671 # |
2672 # base o - parent of amending changeset
2673
2674 # Update extra dict from amended commit (e.g. to preserve graft
2675 # source)
2676 extra.update(old.extra())
2677
2678 # Also update it from the intermediate commit or from the wctx
2679 extra.update(ctx.extra())
2680
2681 if len(old.parents()) > 1:
2682 # ctx.files() isn't reliable for merges, so fall back to the
2683 # slower repo.status() method
2684 files = set([fn for st in repo.status(base, old)[:3]
2685 for fn in st])
2686 else:
2687 files = set(old.files())
2688
2689 # Second, we use either the commit we just did, or if there were no
2690 # changes the parent of the working directory as the version of the
2691 # files in the final amend commit
2692 if node:
2693 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2694
2695 user = ctx.user()
2696 date = ctx.date()
2697 # Recompute copies (avoid recording a -> b -> a)
2698 copied = copies.pathcopies(base, ctx)
2699 if old.p2:
2700 copied.update(copies.pathcopies(old.p2(), ctx))
2701
2702 # Prune files which were reverted by the updates: if old
2703 # introduced file X and our intermediate commit, node,
2704 # renamed that file, then those two files are the same and
2705 # we can discard X from our list of files. Likewise if X
2706 # was deleted, it's no longer relevant
2707 files.update(ctx.files())
2708 files = [f for f in files if not samefile(f, ctx, base)]
2709
2710 def filectxfn(repo, ctx_, path):
2711 try:
2712 fctx = ctx[path]
2713 flags = fctx.flags()
2714 mctx = context.memfilectx(repo,
2715 fctx.path(), fctx.data(),
2716 islink='l' in flags,
2717 isexec='x' in flags,
2718 copied=copied.get(path))
2719 return mctx
2720 except KeyError:
2721 return None
2722 else:
2723 ui.note(_('copying changeset %s to %s\n') % (old, base))
2724
2725 # Use version of files as in the old cset
2726 def filectxfn(repo, ctx_, path):
2727 try:
2728 return old.filectx(path)
2729 except KeyError:
2730 return None
2731
2732 user = opts.get('user') or old.user()
2733 date = opts.get('date') or old.date()
2734 editform = mergeeditform(old, 'commit.amend')
2735 editor = getcommiteditor(editform=editform, **opts)
2736 if not message:
2737 editor = getcommiteditor(edit=True, editform=editform)
2738 message = old.description()
2739
2740 pureextra = extra.copy()
2741 extra['amend_source'] = old.hex()
2742
2743 new = context.memctx(repo,
2744 parents=[base.node(), old.p2().node()],
2745 text=message,
2746 files=files,
2747 filectxfn=filectxfn,
2748 user=user,
2749 date=date,
2750 extra=extra,
2751 editor=editor)
2752
2753 newdesc = changelog.stripdesc(new.description())
2754 if ((not node)
2755 and newdesc == old.description()
2756 and user == old.user()
2757 and date == old.date()
2758 and pureextra == old.extra()):
2759 # nothing changed. continuing here would create a new node
2760 # anyway because of the amend_source noise.
2761 #
2762 # This not what we expect from amend.
2763 return old.node()
2764
2765 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2766 try:
2767 if opts.get('secret'):
2768 commitphase = 'secret'
2769 else:
2770 commitphase = old.phase()
2771 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2772 newid = repo.commitctx(new)
2773 finally:
2774 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2775 if newid != old.node():
2776 # Reroute the working copy parent to the new changeset
2777 repo.setparents(newid, nullid)
2778
2779 # Move bookmarks from old parent to amend commit
2780 bms = repo.nodebookmarks(old.node())
2781 if bms:
2782 marks = repo._bookmarks
2783 for bm in bms:
2784 ui.debug('moving bookmarks %r from %s to %s\n' %
2785 (marks, old.hex(), hex(newid)))
2786 marks[bm] = newid
2787 marks.recordchange(tr)
2788 #commit the whole amend process
2789 if createmarkers:
2790 # mark the new changeset as successor of the rewritten one
2791 new = repo[newid]
2792 obs = [(old, (new,))]
2793 if node:
2794 obs.append((ctx, ()))
2795
2796 obsolete.createmarkers(repo, obs)
2797 if not createmarkers and newid != old.node():
2798 # Strip the intermediate commit (if there was one) and the amended
2799 # commit
2800 if node:
2801 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2802 ui.note(_('stripping amended changeset %s\n') % old)
2803 repair.strip(ui, repo, old.node(), topic='amend-backup')
2804 finally:
2805 lockmod.release(lock, wlock)
2806 return newid
2807
2808 def commiteditor(repo, ctx, subs, editform=''):
2809 if ctx.description():
2810 return ctx.description()
2811 return commitforceeditor(repo, ctx, subs, editform=editform,
2812 unchangedmessagedetection=True)
2813
2814 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2815 editform='', unchangedmessagedetection=False):
2816 if not extramsg:
2817 extramsg = _("Leave message empty to abort commit.")
2818
2819 forms = [e for e in editform.split('.') if e]
2820 forms.insert(0, 'changeset')
2821 templatetext = None
2822 while forms:
2823 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2824 if tmpl:
2825 templatetext = committext = buildcommittemplate(
2826 repo, ctx, subs, extramsg, tmpl)
2827 break
2828 forms.pop()
2829 else:
2830 committext = buildcommittext(repo, ctx, subs, extramsg)
2831
2832 # run editor in the repository root
2833 olddir = os.getcwd()
2834 os.chdir(repo.root)
2835
2836 # make in-memory changes visible to external process
2837 tr = repo.currenttransaction()
2838 repo.dirstate.write(tr)
2839 pending = tr and tr.writepending() and repo.root
2840
2841 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2842 editform=editform, pending=pending)
2843 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2844 os.chdir(olddir)
2845
2846 if finishdesc:
2847 text = finishdesc(text)
2848 if not text.strip():
2849 raise error.Abort(_("empty commit message"))
2850 if unchangedmessagedetection and editortext == templatetext:
2851 raise error.Abort(_("commit message unchanged"))
2852
2853 return text
2854
2855 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2856 ui = repo.ui
2857 tmpl, mapfile = gettemplate(ui, tmpl, None)
2858
2859 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2860
2861 for k, v in repo.ui.configitems('committemplate'):
2862 if k != 'changeset':
2863 t.t.cache[k] = v
2864
2865 if not extramsg:
2866 extramsg = '' # ensure that extramsg is string
2867
2868 ui.pushbuffer()
2869 t.show(ctx, extramsg=extramsg)
2870 return ui.popbuffer()
2871
2872 def hgprefix(msg):
2873 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2874
2875 def buildcommittext(repo, ctx, subs, extramsg):
2876 edittext = []
2877 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2878 if ctx.description():
2879 edittext.append(ctx.description())
2880 edittext.append("")
2881 edittext.append("") # Empty line between message and comments.
2882 edittext.append(hgprefix(_("Enter commit message."
2883 " Lines beginning with 'HG:' are removed.")))
2884 edittext.append(hgprefix(extramsg))
2885 edittext.append("HG: --")
2886 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2887 if ctx.p2():
2888 edittext.append(hgprefix(_("branch merge")))
2889 if ctx.branch():
2890 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2891 if bookmarks.isactivewdirparent(repo):
2892 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2893 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2894 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2895 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2896 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2897 if not added and not modified and not removed:
2898 edittext.append(hgprefix(_("no files changed")))
2899 edittext.append("")
2900
2901 return "\n".join(edittext)
2902
2903 def commitstatus(repo, node, branch, bheads=None, opts=None):
2904 if opts is None:
2905 opts = {}
2906 ctx = repo[node]
2907 parents = ctx.parents()
2908
2909 if (not opts.get('amend') and bheads and node not in bheads and not
2910 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2911 repo.ui.status(_('created new head\n'))
2912 # The message is not printed for initial roots. For the other
2913 # changesets, it is printed in the following situations:
2914 #
2915 # Par column: for the 2 parents with ...
2916 # N: null or no parent
2917 # B: parent is on another named branch
2918 # C: parent is a regular non head changeset
2919 # H: parent was a branch head of the current branch
2920 # Msg column: whether we print "created new head" message
2921 # In the following, it is assumed that there already exists some
2922 # initial branch heads of the current branch, otherwise nothing is
2923 # printed anyway.
2924 #
2925 # Par Msg Comment
2926 # N N y additional topo root
2927 #
2928 # B N y additional branch root
2929 # C N y additional topo head
2930 # H N n usual case
2931 #
2932 # B B y weird additional branch root
2933 # C B y branch merge
2934 # H B n merge with named branch
2935 #
2936 # C C y additional head from merge
2937 # C H n merge with a head
2938 #
2939 # H H n head merge: head count decreases
2940
2941 if not opts.get('close_branch'):
2942 for r in parents:
2943 if r.closesbranch() and r.branch() == branch:
2944 repo.ui.status(_('reopening closed branch head %d\n') % r)
2945
2946 if repo.ui.debugflag:
2947 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2948 elif repo.ui.verbose:
2949 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2950
2951 def postcommitstatus(repo, pats, opts):
2952 return repo.status(match=scmutil.match(repo[None], pats, opts))
2953
2954 def revert(ui, repo, ctx, parents, *pats, **opts):
2955 parent, p2 = parents
2956 node = ctx.node()
2957
2958 mf = ctx.manifest()
2959 if node == p2:
2960 parent = p2
2961
2962 # need all matching names in dirstate and manifest of target rev,
2963 # so have to walk both. do not print errors if files exist in one
2964 # but not other. in both cases, filesets should be evaluated against
2965 # workingctx to get consistent result (issue4497). this means 'set:**'
2966 # cannot be used to select missing files from target rev.
2967
2968 # `names` is a mapping for all elements in working copy and target revision
2969 # The mapping is in the form:
2970 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2971 names = {}
2972
2973 with repo.wlock():
2974 ## filling of the `names` mapping
2975 # walk dirstate to fill `names`
2976
2977 interactive = opts.get('interactive', False)
2978 wctx = repo[None]
2979 m = scmutil.match(wctx, pats, opts)
2980
2981 # we'll need this later
2982 targetsubs = sorted(s for s in wctx.substate if m(s))
2983
2984 if not m.always():
2985 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2986 names[abs] = m.rel(abs), m.exact(abs)
2987
2988 # walk target manifest to fill `names`
2989
2990 def badfn(path, msg):
2991 if path in names:
2992 return
2993 if path in ctx.substate:
2994 return
2995 path_ = path + '/'
2996 for f in names:
2997 if f.startswith(path_):
2998 return
2999 ui.warn("%s: %s\n" % (m.rel(path), msg))
3000
3001 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3002 if abs not in names:
3003 names[abs] = m.rel(abs), m.exact(abs)
3004
3005 # Find status of all file in `names`.
3006 m = scmutil.matchfiles(repo, names)
3007
3008 changes = repo.status(node1=node, match=m,
3009 unknown=True, ignored=True, clean=True)
3010 else:
3011 changes = repo.status(node1=node, match=m)
3012 for kind in changes:
3013 for abs in kind:
3014 names[abs] = m.rel(abs), m.exact(abs)
3015
3016 m = scmutil.matchfiles(repo, names)
3017
3018 modified = set(changes.modified)
3019 added = set(changes.added)
3020 removed = set(changes.removed)
3021 _deleted = set(changes.deleted)
3022 unknown = set(changes.unknown)
3023 unknown.update(changes.ignored)
3024 clean = set(changes.clean)
3025 modadded = set()
3026
3027 # split between files known in target manifest and the others
3028 smf = set(mf)
3029
3030 # determine the exact nature of the deleted changesets
3031 deladded = _deleted - smf
3032 deleted = _deleted - deladded
3033
3034 # We need to account for the state of the file in the dirstate,
3035 # even when we revert against something else than parent. This will
3036 # slightly alter the behavior of revert (doing back up or not, delete
3037 # or just forget etc).
3038 if parent == node:
3039 dsmodified = modified
3040 dsadded = added
3041 dsremoved = removed
3042 # store all local modifications, useful later for rename detection
3043 localchanges = dsmodified | dsadded
3044 modified, added, removed = set(), set(), set()
3045 else:
3046 changes = repo.status(node1=parent, match=m)
3047 dsmodified = set(changes.modified)
3048 dsadded = set(changes.added)
3049 dsremoved = set(changes.removed)
3050 # store all local modifications, useful later for rename detection
3051 localchanges = dsmodified | dsadded
3052
3053 # only take into account for removes between wc and target
3054 clean |= dsremoved - removed
3055 dsremoved &= removed
3056 # distinct between dirstate remove and other
3057 removed -= dsremoved
3058
3059 modadded = added & dsmodified
3060 added -= modadded
3061
3062 # tell newly modified apart.
3063 dsmodified &= modified
3064 dsmodified |= modified & dsadded # dirstate added may need backup
3065 modified -= dsmodified
3066
3067 # We need to wait for some post-processing to update this set
3068 # before making the distinction. The dirstate will be used for
3069 # that purpose.
3070 dsadded = added
3071
3072 # in case of merge, files that are actually added can be reported as
3073 # modified, we need to post process the result
3074 if p2 != nullid:
3075 mergeadd = dsmodified - smf
3076 dsadded |= mergeadd
3077 dsmodified -= mergeadd
3078
3079 # if f is a rename, update `names` to also revert the source
3080 cwd = repo.getcwd()
3081 for f in localchanges:
3082 src = repo.dirstate.copied(f)
3083 # XXX should we check for rename down to target node?
3084 if src and src not in names and repo.dirstate[src] == 'r':
3085 dsremoved.add(src)
3086 names[src] = (repo.pathto(src, cwd), True)
3087
3088 # distinguish between file to forget and the other
3089 added = set()
3090 for abs in dsadded:
3091 if repo.dirstate[abs] != 'a':
3092 added.add(abs)
3093 dsadded -= added
3094
3095 for abs in deladded:
3096 if repo.dirstate[abs] == 'a':
3097 dsadded.add(abs)
3098 deladded -= dsadded
3099
3100 # For files marked as removed, we check if an unknown file is present at
3101 # the same path. If a such file exists it may need to be backed up.
3102 # Making the distinction at this stage helps have simpler backup
3103 # logic.
3104 removunk = set()
3105 for abs in removed:
3106 target = repo.wjoin(abs)
3107 if os.path.lexists(target):
3108 removunk.add(abs)
3109 removed -= removunk
3110
3111 dsremovunk = set()
3112 for abs in dsremoved:
3113 target = repo.wjoin(abs)
3114 if os.path.lexists(target):
3115 dsremovunk.add(abs)
3116 dsremoved -= dsremovunk
3117
3118 # action to be actually performed by revert
3119 # (<list of file>, message>) tuple
3120 actions = {'revert': ([], _('reverting %s\n')),
3121 'add': ([], _('adding %s\n')),
3122 'remove': ([], _('removing %s\n')),
3123 'drop': ([], _('removing %s\n')),
3124 'forget': ([], _('forgetting %s\n')),
3125 'undelete': ([], _('undeleting %s\n')),
3126 'noop': (None, _('no changes needed to %s\n')),
3127 'unknown': (None, _('file not managed: %s\n')),
3128 }
3129
3130 # "constant" that convey the backup strategy.
3131 # All set to `discard` if `no-backup` is set do avoid checking
3132 # no_backup lower in the code.
3133 # These values are ordered for comparison purposes
3134 backupinteractive = 3 # do backup if interactively modified
3135 backup = 2 # unconditionally do backup
3136 check = 1 # check if the existing file differs from target
3137 discard = 0 # never do backup
3138 if opts.get('no_backup'):
3139 backupinteractive = backup = check = discard
3140 if interactive:
3141 dsmodifiedbackup = backupinteractive
3142 else:
3143 dsmodifiedbackup = backup
3144 tobackup = set()
3145
3146 backupanddel = actions['remove']
3147 if not opts.get('no_backup'):
3148 backupanddel = actions['drop']
3149
3150 disptable = (
3151 # dispatch table:
3152 # file state
3153 # action
3154 # make backup
3155
3156 ## Sets that results that will change file on disk
3157 # Modified compared to target, no local change
3158 (modified, actions['revert'], discard),
3159 # Modified compared to target, but local file is deleted
3160 (deleted, actions['revert'], discard),
3161 # Modified compared to target, local change
3162 (dsmodified, actions['revert'], dsmodifiedbackup),
3163 # Added since target
3164 (added, actions['remove'], discard),
3165 # Added in working directory
3166 (dsadded, actions['forget'], discard),
3167 # Added since target, have local modification
3168 (modadded, backupanddel, backup),
3169 # Added since target but file is missing in working directory
3170 (deladded, actions['drop'], discard),
3171 # Removed since target, before working copy parent
3172 (removed, actions['add'], discard),
3173 # Same as `removed` but an unknown file exists at the same path
3174 (removunk, actions['add'], check),
3175 # Removed since targe, marked as such in working copy parent
3176 (dsremoved, actions['undelete'], discard),
3177 # Same as `dsremoved` but an unknown file exists at the same path
3178 (dsremovunk, actions['undelete'], check),
3179 ## the following sets does not result in any file changes
3180 # File with no modification
3181 (clean, actions['noop'], discard),
3182 # Existing file, not tracked anywhere
3183 (unknown, actions['unknown'], discard),
3184 )
3185
3186 for abs, (rel, exact) in sorted(names.items()):
3187 # target file to be touch on disk (relative to cwd)
3188 target = repo.wjoin(abs)
3189 # search the entry in the dispatch table.
3190 # if the file is in any of these sets, it was touched in the working
3191 # directory parent and we are sure it needs to be reverted.
3192 for table, (xlist, msg), dobackup in disptable:
3193 if abs not in table:
3194 continue
3195 if xlist is not None:
3196 xlist.append(abs)
3197 if dobackup:
3198 # If in interactive mode, don't automatically create
3199 # .orig files (issue4793)
3200 if dobackup == backupinteractive:
3201 tobackup.add(abs)
3202 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3203 bakname = scmutil.origpath(ui, repo, rel)
3204 ui.note(_('saving current version of %s as %s\n') %
3205 (rel, bakname))
3206 if not opts.get('dry_run'):
3207 if interactive:
3208 util.copyfile(target, bakname)
3209 else:
3210 util.rename(target, bakname)
3211 if ui.verbose or not exact:
3212 if not isinstance(msg, basestring):
3213 msg = msg(abs)
3214 ui.status(msg % rel)
3215 elif exact:
3216 ui.warn(msg % rel)
3217 break
3218
3219 if not opts.get('dry_run'):
3220 needdata = ('revert', 'add', 'undelete')
3221 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3222 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3223
3224 if targetsubs:
3225 # Revert the subrepos on the revert list
3226 for sub in targetsubs:
3227 try:
3228 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3229 except KeyError:
3230 raise error.Abort("subrepository '%s' does not exist in %s!"
3231 % (sub, short(ctx.node())))
3232
3233 def _revertprefetch(repo, ctx, *files):
3234 """Let extension changing the storage layer prefetch content"""
3235 pass
3236
3237 def _performrevert(repo, parents, ctx, actions, interactive=False,
3238 tobackup=None):
3239 """function that actually perform all the actions computed for revert
3240
3241 This is an independent function to let extension to plug in and react to
3242 the imminent revert.
3243
3244 Make sure you have the working directory locked when calling this function.
3245 """
3246 parent, p2 = parents
3247 node = ctx.node()
3248 excluded_files = []
3249 matcher_opts = {"exclude": excluded_files}
3250
3251 def checkout(f):
3252 fc = ctx[f]
3253 repo.wwrite(f, fc.data(), fc.flags())
3254
3255 audit_path = pathutil.pathauditor(repo.root)
3256 for f in actions['forget'][0]:
3257 if interactive:
3258 choice = \
3259 repo.ui.promptchoice(
3260 _("forget added file %s (yn)?$$ &Yes $$ &No")
3261 % f)
3262 if choice == 0:
3263 repo.dirstate.drop(f)
3264 else:
3265 excluded_files.append(repo.wjoin(f))
3266 else:
3267 repo.dirstate.drop(f)
3268 for f in actions['remove'][0]:
3269 audit_path(f)
3270 try:
3271 util.unlinkpath(repo.wjoin(f))
3272 except OSError:
3273 pass
3274 repo.dirstate.remove(f)
3275 for f in actions['drop'][0]:
3276 audit_path(f)
3277 repo.dirstate.remove(f)
3278
3279 normal = None
3280 if node == parent:
3281 # We're reverting to our parent. If possible, we'd like status
3282 # to report the file as clean. We have to use normallookup for
3283 # merges to avoid losing information about merged/dirty files.
3284 if p2 != nullid:
3285 normal = repo.dirstate.normallookup
3286 else:
3287 normal = repo.dirstate.normal
3288
3289 newlyaddedandmodifiedfiles = set()
3290 if interactive:
3291 # Prompt the user for changes to revert
3292 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3293 m = scmutil.match(ctx, torevert, matcher_opts)
3294 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3295 diffopts.nodates = True
3296 diffopts.git = True
3297 reversehunks = repo.ui.configbool('experimental',
3298 'revertalternateinteractivemode',
3299 True)
3300 if reversehunks:
3301 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3302 else:
3303 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3304 originalchunks = patch.parsepatch(diff)
3305 operation = 'discard' if node == parent else 'revert'
3306
3307 try:
3308
3309 chunks, opts = recordfilter(repo.ui, originalchunks,
3310 operation=operation)
3311 if reversehunks:
3312 chunks = patch.reversehunks(chunks)
3313
3314 except patch.PatchError as err:
3315 raise error.Abort(_('error parsing patch: %s') % err)
3316
3317 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3318 if tobackup is None:
3319 tobackup = set()
3320 # Apply changes
3321 fp = stringio()
3322 for c in chunks:
3323 # Create a backup file only if this hunk should be backed up
3324 if ishunk(c) and c.header.filename() in tobackup:
3325 abs = c.header.filename()
3326 target = repo.wjoin(abs)
3327 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3328 util.copyfile(target, bakname)
3329 tobackup.remove(abs)
3330 c.write(fp)
3331 dopatch = fp.tell()
3332 fp.seek(0)
3333 if dopatch:
3334 try:
3335 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3336 except patch.PatchError as err:
3337 raise error.Abort(str(err))
3338 del fp
3339 else:
3340 for f in actions['revert'][0]:
3341 checkout(f)
3342 if normal:
3343 normal(f)
3344
3345 for f in actions['add'][0]:
3346 # Don't checkout modified files, they are already created by the diff
3347 if f not in newlyaddedandmodifiedfiles:
3348 checkout(f)
3349 repo.dirstate.add(f)
3350
3351 normal = repo.dirstate.normallookup
3352 if node == parent and p2 == nullid:
3353 normal = repo.dirstate.normal
3354 for f in actions['undelete'][0]:
3355 checkout(f)
3356 normal(f)
3357
3358 copied = copies.pathcopies(repo[parent], ctx)
3359
3360 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3361 if f in copied:
3362 repo.dirstate.copy(copied[f], f)
3363
3364 def command(table):
3365 """Returns a function object to be used as a decorator for making commands.
3366
3367 This function receives a command table as its argument. The table should
3368 be a dict.
3369
3370 The returned function can be used as a decorator for adding commands
3371 to that command table. This function accepts multiple arguments to define
3372 a command.
3373
3374 The first argument is the command name.
3375
3376 The options argument is an iterable of tuples defining command arguments.
3377 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3378
3379 The synopsis argument defines a short, one line summary of how to use the
3380 command. This shows up in the help output.
3381
3382 The norepo argument defines whether the command does not require a
3383 local repository. Most commands operate against a repository, thus the
3384 default is False.
3385
3386 The optionalrepo argument defines whether the command optionally requires
3387 a local repository.
3388
3389 The inferrepo argument defines whether to try to find a repository from the
3390 command line arguments. If True, arguments will be examined for potential
3391 repository locations. See ``findrepo()``. If a repository is found, it
3392 will be used.
3393 """
3394 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3395 inferrepo=False):
3396 def decorator(func):
3397 func.norepo = norepo
3398 func.optionalrepo = optionalrepo
3399 func.inferrepo = inferrepo
3400 if synopsis:
3401 table[name] = func, list(options), synopsis
3402 else:
3403 table[name] = func, list(options)
3404 return func
3405 return decorator
3406
3407 return cmd
3408
3409 def checkunresolved(ms):
3410 if list(ms.unresolved()):
3411 raise error.Abort(_("unresolved merge conflicts "
3412 "(see 'hg help resolve')"))
3413 if ms.mdstate() != 's' or list(ms.driverresolved()):
3414 raise error.Abort(_('driver-resolved merge conflicts'),
3415 hint=_('run "hg resolve --all" to resolve'))
3416
3417 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3418 # commands.outgoing. "missing" is "missing" of the result of
3419 # "findcommonoutgoing()"
3420 outgoinghooks = util.hooks()
3421
3422 # a list of (ui, repo) functions called by commands.summary
3423 summaryhooks = util.hooks()
3424
3425 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3426 #
3427 # functions should return tuple of booleans below, if 'changes' is None:
3428 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3429 #
3430 # otherwise, 'changes' is a tuple of tuples below:
3431 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3432 # - (desturl, destbranch, destpeer, outgoing)
3433 summaryremotehooks = util.hooks()
3434
3435 # A list of state files kept by multistep operations like graft.
3436 # Since graft cannot be aborted, it is considered 'clearable' by update.
3437 # note: bisect is intentionally excluded
3438 # (state file, clearable, allowcommit, error, hint)
3439 unfinishedstates = [
3440 ('graftstate', True, False, _('graft in progress'),
3441 _("use 'hg graft --continue' or 'hg update' to abort")),
3442 ('updatestate', True, False, _('last update was interrupted'),
3443 _("use 'hg update' to get a consistent checkout"))
3444 ]
3445
3446 def checkunfinished(repo, commit=False):
3447 '''Look for an unfinished multistep operation, like graft, and abort
3448 if found. It's probably good to check this right before
3449 bailifchanged().
3450 '''
3451 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3452 if commit and allowcommit:
3453 continue
3454 if repo.vfs.exists(f):
3455 raise error.Abort(msg, hint=hint)
3456
3457 def clearunfinished(repo):
3458 '''Check for unfinished operations (as above), and clear the ones
3459 that are clearable.
3460 '''
3461 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3462 if not clearable and repo.vfs.exists(f):
3463 raise error.Abort(msg, hint=hint)
3464 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3465 if clearable and repo.vfs.exists(f):
3466 util.unlink(repo.join(f))
3467
3468 afterresolvedstates = [
3469 ('graftstate',
3470 _('hg graft --continue')),
3471 ]
3472
3473 def howtocontinue(repo):
3474 '''Check for an unfinished operation and return the command to finish
3475 it.
3476
3477 afterresolvedstates tuples define a .hg/{file} and the corresponding
3478 command needed to finish it.
3479
3480 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3481 a boolean.
3482 '''
3483 contmsg = _("continue: %s")
3484 for f, msg in afterresolvedstates:
3485 if repo.vfs.exists(f):
3486 return contmsg % msg, True
3487 workingctx = repo[None]
3488 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3489 for s in workingctx.substate)
3490 if dirty:
3491 return contmsg % _("hg commit"), False
3492 return None, None
3493
3494 def checkafterresolved(repo):
3495 '''Inform the user about the next action after completing hg resolve
3496
3497 If there's a matching afterresolvedstates, howtocontinue will yield
3498 repo.ui.warn as the reporter.
3499
3500 Otherwise, it will yield repo.ui.note.
3501 '''
3502 msg, warning = howtocontinue(repo)
3503 if msg is not None:
3504 if warning:
3505 repo.ui.warn("%s\n" % msg)
3506 else:
3507 repo.ui.note("%s\n" % msg)
3508
3509 def wrongtooltocontinue(repo, task):
3510 '''Raise an abort suggesting how to properly continue if there is an
3511 active task.
3512
3513 Uses howtocontinue() to find the active task.
3514
3515 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3516 a hint.
3517 '''
3518 after = howtocontinue(repo)
3519 hint = None
3520 if after[1]:
3521 hint = after[0]
3522 raise error.Abort(_('no %s in progress') % task, hint=hint)
3523
15
3524 class dirstateguard(object):
16 class dirstateguard(object):
3525 '''Restore dirstate at unexpected failure.
17 '''Restore dirstate at unexpected failure.
3526
18
3527 At the construction, this class does:
19 At the construction, this class does:
3528
20
3529 - write current ``repo.dirstate`` out, and
21 - write current ``repo.dirstate`` out, and
3530 - save ``.hg/dirstate`` into the backup file
22 - save ``.hg/dirstate`` into the backup file
3531
23
3532 This restores ``.hg/dirstate`` from backup file, if ``release()``
24 This restores ``.hg/dirstate`` from backup file, if ``release()``
3533 is invoked before ``close()``.
25 is invoked before ``close()``.
3534
26
3535 This just removes the backup file at ``close()`` before ``release()``.
27 This just removes the backup file at ``close()`` before ``release()``.
3536 '''
28 '''
3537
29
3538 def __init__(self, repo, name):
30 def __init__(self, repo, name):
3539 self._repo = repo
31 self._repo = repo
3540 self._active = False
32 self._active = False
3541 self._closed = False
33 self._closed = False
3542 self._suffix = '.backup.%s.%d' % (name, id(self))
34 self._suffix = '.backup.%s.%d' % (name, id(self))
3543 repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
35 repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
3544 self._active = True
36 self._active = True
3545
37
3546 def __del__(self):
38 def __del__(self):
3547 if self._active: # still active
39 if self._active: # still active
3548 # this may occur, even if this class is used correctly:
40 # this may occur, even if this class is used correctly:
3549 # for example, releasing other resources like transaction
41 # for example, releasing other resources like transaction
3550 # may raise exception before ``dirstateguard.release`` in
42 # may raise exception before ``dirstateguard.release`` in
3551 # ``release(tr, ....)``.
43 # ``release(tr, ....)``.
3552 self._abort()
44 self._abort()
3553
45
3554 def close(self):
46 def close(self):
3555 if not self._active: # already inactivated
47 if not self._active: # already inactivated
3556 msg = (_("can't close already inactivated backup: dirstate%s")
48 msg = (_("can't close already inactivated backup: dirstate%s")
3557 % self._suffix)
49 % self._suffix)
3558 raise error.Abort(msg)
50 raise error.Abort(msg)
3559
51
3560 self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
52 self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
3561 self._suffix)
53 self._suffix)
3562 self._active = False
54 self._active = False
3563 self._closed = True
55 self._closed = True
3564
56
3565 def _abort(self):
57 def _abort(self):
3566 self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
58 self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
3567 self._suffix)
59 self._suffix)
3568 self._active = False
60 self._active = False
3569
61
3570 def release(self):
62 def release(self):
3571 if not self._closed:
63 if not self._closed:
3572 if not self._active: # already inactivated
64 if not self._active: # already inactivated
3573 msg = (_("can't release already inactivated backup:"
65 msg = (_("can't release already inactivated backup:"
3574 " dirstate%s")
66 " dirstate%s")
3575 % self._suffix)
67 % self._suffix)
3576 raise error.Abort(msg)
68 raise error.Abort(msg)
3577 self._abort()
69 self._abort()
General Comments 0
You need to be logged in to leave comments. Login now