##// END OF EJS Templates
dirstate: make backup methods public...
Mateusz Kwapich -
r29137:d115cbf5 default
parent child Browse files
Show More
@@ -1,3556 +1,3556
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import sys
13 import sys
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 bin,
18 bin,
19 hex,
19 hex,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 bookmarks,
26 bookmarks,
27 changelog,
27 changelog,
28 copies,
28 copies,
29 crecord as crecordmod,
29 crecord as crecordmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 formatter,
32 formatter,
33 graphmod,
33 graphmod,
34 lock as lockmod,
34 lock as lockmod,
35 match as matchmod,
35 match as matchmod,
36 obsolete,
36 obsolete,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 repair,
40 repair,
41 revlog,
41 revlog,
42 revset,
42 revset,
43 scmutil,
43 scmutil,
44 templatekw,
44 templatekw,
45 templater,
45 templater,
46 util,
46 util,
47 )
47 )
48 stringio = util.stringio
48 stringio = util.stringio
49
49
50 def ishunk(x):
50 def ishunk(x):
51 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
51 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
52 return isinstance(x, hunkclasses)
52 return isinstance(x, hunkclasses)
53
53
54 def newandmodified(chunks, originalchunks):
54 def newandmodified(chunks, originalchunks):
55 newlyaddedandmodifiedfiles = set()
55 newlyaddedandmodifiedfiles = set()
56 for chunk in chunks:
56 for chunk in chunks:
57 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
57 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
58 originalchunks:
58 originalchunks:
59 newlyaddedandmodifiedfiles.add(chunk.header.filename())
59 newlyaddedandmodifiedfiles.add(chunk.header.filename())
60 return newlyaddedandmodifiedfiles
60 return newlyaddedandmodifiedfiles
61
61
62 def parsealiases(cmd):
62 def parsealiases(cmd):
63 return cmd.lstrip("^").split("|")
63 return cmd.lstrip("^").split("|")
64
64
65 def setupwrapcolorwrite(ui):
65 def setupwrapcolorwrite(ui):
66 # wrap ui.write so diff output can be labeled/colorized
66 # wrap ui.write so diff output can be labeled/colorized
67 def wrapwrite(orig, *args, **kw):
67 def wrapwrite(orig, *args, **kw):
68 label = kw.pop('label', '')
68 label = kw.pop('label', '')
69 for chunk, l in patch.difflabel(lambda: args):
69 for chunk, l in patch.difflabel(lambda: args):
70 orig(chunk, label=label + l)
70 orig(chunk, label=label + l)
71
71
72 oldwrite = ui.write
72 oldwrite = ui.write
73 def wrap(*args, **kwargs):
73 def wrap(*args, **kwargs):
74 return wrapwrite(oldwrite, *args, **kwargs)
74 return wrapwrite(oldwrite, *args, **kwargs)
75 setattr(ui, 'write', wrap)
75 setattr(ui, 'write', wrap)
76 return oldwrite
76 return oldwrite
77
77
78 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
78 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
79 if usecurses:
79 if usecurses:
80 if testfile:
80 if testfile:
81 recordfn = crecordmod.testdecorator(testfile,
81 recordfn = crecordmod.testdecorator(testfile,
82 crecordmod.testchunkselector)
82 crecordmod.testchunkselector)
83 else:
83 else:
84 recordfn = crecordmod.chunkselector
84 recordfn = crecordmod.chunkselector
85
85
86 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
86 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
87
87
88 else:
88 else:
89 return patch.filterpatch(ui, originalhunks, operation)
89 return patch.filterpatch(ui, originalhunks, operation)
90
90
91 def recordfilter(ui, originalhunks, operation=None):
91 def recordfilter(ui, originalhunks, operation=None):
92 """ Prompts the user to filter the originalhunks and return a list of
92 """ Prompts the user to filter the originalhunks and return a list of
93 selected hunks.
93 selected hunks.
94 *operation* is used for ui purposes to indicate the user
94 *operation* is used for ui purposes to indicate the user
95 what kind of filtering they are doing: reverting, committing, shelving, etc.
95 what kind of filtering they are doing: reverting, committing, shelving, etc.
96 *operation* has to be a translated string.
96 *operation* has to be a translated string.
97 """
97 """
98 usecurses = crecordmod.checkcurses(ui)
98 usecurses = crecordmod.checkcurses(ui)
99 testfile = ui.config('experimental', 'crecordtest', None)
99 testfile = ui.config('experimental', 'crecordtest', None)
100 oldwrite = setupwrapcolorwrite(ui)
100 oldwrite = setupwrapcolorwrite(ui)
101 try:
101 try:
102 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
102 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
103 testfile, operation)
103 testfile, operation)
104 finally:
104 finally:
105 ui.write = oldwrite
105 ui.write = oldwrite
106 return newchunks, newopts
106 return newchunks, newopts
107
107
108 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
108 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
109 filterfn, *pats, **opts):
109 filterfn, *pats, **opts):
110 from . import merge as mergemod
110 from . import merge as mergemod
111 if not ui.interactive():
111 if not ui.interactive():
112 if cmdsuggest:
112 if cmdsuggest:
113 msg = _('running non-interactively, use %s instead') % cmdsuggest
113 msg = _('running non-interactively, use %s instead') % cmdsuggest
114 else:
114 else:
115 msg = _('running non-interactively')
115 msg = _('running non-interactively')
116 raise error.Abort(msg)
116 raise error.Abort(msg)
117
117
118 # make sure username is set before going interactive
118 # make sure username is set before going interactive
119 if not opts.get('user'):
119 if not opts.get('user'):
120 ui.username() # raise exception, username not provided
120 ui.username() # raise exception, username not provided
121
121
122 def recordfunc(ui, repo, message, match, opts):
122 def recordfunc(ui, repo, message, match, opts):
123 """This is generic record driver.
123 """This is generic record driver.
124
124
125 Its job is to interactively filter local changes, and
125 Its job is to interactively filter local changes, and
126 accordingly prepare working directory into a state in which the
126 accordingly prepare working directory into a state in which the
127 job can be delegated to a non-interactive commit command such as
127 job can be delegated to a non-interactive commit command such as
128 'commit' or 'qrefresh'.
128 'commit' or 'qrefresh'.
129
129
130 After the actual job is done by non-interactive command, the
130 After the actual job is done by non-interactive command, the
131 working directory is restored to its original state.
131 working directory is restored to its original state.
132
132
133 In the end we'll record interesting changes, and everything else
133 In the end we'll record interesting changes, and everything else
134 will be left in place, so the user can continue working.
134 will be left in place, so the user can continue working.
135 """
135 """
136
136
137 checkunfinished(repo, commit=True)
137 checkunfinished(repo, commit=True)
138 wctx = repo[None]
138 wctx = repo[None]
139 merge = len(wctx.parents()) > 1
139 merge = len(wctx.parents()) > 1
140 if merge:
140 if merge:
141 raise error.Abort(_('cannot partially commit a merge '
141 raise error.Abort(_('cannot partially commit a merge '
142 '(use "hg commit" instead)'))
142 '(use "hg commit" instead)'))
143
143
144 def fail(f, msg):
144 def fail(f, msg):
145 raise error.Abort('%s: %s' % (f, msg))
145 raise error.Abort('%s: %s' % (f, msg))
146
146
147 force = opts.get('force')
147 force = opts.get('force')
148 if not force:
148 if not force:
149 vdirs = []
149 vdirs = []
150 match.explicitdir = vdirs.append
150 match.explicitdir = vdirs.append
151 match.bad = fail
151 match.bad = fail
152
152
153 status = repo.status(match=match)
153 status = repo.status(match=match)
154 if not force:
154 if not force:
155 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
155 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
156 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
156 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
157 diffopts.nodates = True
157 diffopts.nodates = True
158 diffopts.git = True
158 diffopts.git = True
159 diffopts.showfunc = True
159 diffopts.showfunc = True
160 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
160 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
161 originalchunks = patch.parsepatch(originaldiff)
161 originalchunks = patch.parsepatch(originaldiff)
162
162
163 # 1. filter patch, since we are intending to apply subset of it
163 # 1. filter patch, since we are intending to apply subset of it
164 try:
164 try:
165 chunks, newopts = filterfn(ui, originalchunks)
165 chunks, newopts = filterfn(ui, originalchunks)
166 except patch.PatchError as err:
166 except patch.PatchError as err:
167 raise error.Abort(_('error parsing patch: %s') % err)
167 raise error.Abort(_('error parsing patch: %s') % err)
168 opts.update(newopts)
168 opts.update(newopts)
169
169
170 # We need to keep a backup of files that have been newly added and
170 # We need to keep a backup of files that have been newly added and
171 # modified during the recording process because there is a previous
171 # modified during the recording process because there is a previous
172 # version without the edit in the workdir
172 # version without the edit in the workdir
173 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
173 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
174 contenders = set()
174 contenders = set()
175 for h in chunks:
175 for h in chunks:
176 try:
176 try:
177 contenders.update(set(h.files()))
177 contenders.update(set(h.files()))
178 except AttributeError:
178 except AttributeError:
179 pass
179 pass
180
180
181 changed = status.modified + status.added + status.removed
181 changed = status.modified + status.added + status.removed
182 newfiles = [f for f in changed if f in contenders]
182 newfiles = [f for f in changed if f in contenders]
183 if not newfiles:
183 if not newfiles:
184 ui.status(_('no changes to record\n'))
184 ui.status(_('no changes to record\n'))
185 return 0
185 return 0
186
186
187 modified = set(status.modified)
187 modified = set(status.modified)
188
188
189 # 2. backup changed files, so we can restore them in the end
189 # 2. backup changed files, so we can restore them in the end
190
190
191 if backupall:
191 if backupall:
192 tobackup = changed
192 tobackup = changed
193 else:
193 else:
194 tobackup = [f for f in newfiles if f in modified or f in \
194 tobackup = [f for f in newfiles if f in modified or f in \
195 newlyaddedandmodifiedfiles]
195 newlyaddedandmodifiedfiles]
196 backups = {}
196 backups = {}
197 if tobackup:
197 if tobackup:
198 backupdir = repo.join('record-backups')
198 backupdir = repo.join('record-backups')
199 try:
199 try:
200 os.mkdir(backupdir)
200 os.mkdir(backupdir)
201 except OSError as err:
201 except OSError as err:
202 if err.errno != errno.EEXIST:
202 if err.errno != errno.EEXIST:
203 raise
203 raise
204 try:
204 try:
205 # backup continues
205 # backup continues
206 for f in tobackup:
206 for f in tobackup:
207 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
207 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
208 dir=backupdir)
208 dir=backupdir)
209 os.close(fd)
209 os.close(fd)
210 ui.debug('backup %r as %r\n' % (f, tmpname))
210 ui.debug('backup %r as %r\n' % (f, tmpname))
211 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
211 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
212 backups[f] = tmpname
212 backups[f] = tmpname
213
213
214 fp = stringio()
214 fp = stringio()
215 for c in chunks:
215 for c in chunks:
216 fname = c.filename()
216 fname = c.filename()
217 if fname in backups:
217 if fname in backups:
218 c.write(fp)
218 c.write(fp)
219 dopatch = fp.tell()
219 dopatch = fp.tell()
220 fp.seek(0)
220 fp.seek(0)
221
221
222 # 2.5 optionally review / modify patch in text editor
222 # 2.5 optionally review / modify patch in text editor
223 if opts.get('review', False):
223 if opts.get('review', False):
224 patchtext = (crecordmod.diffhelptext
224 patchtext = (crecordmod.diffhelptext
225 + crecordmod.patchhelptext
225 + crecordmod.patchhelptext
226 + fp.read())
226 + fp.read())
227 reviewedpatch = ui.edit(patchtext, "",
227 reviewedpatch = ui.edit(patchtext, "",
228 extra={"suffix": ".diff"})
228 extra={"suffix": ".diff"})
229 fp.truncate(0)
229 fp.truncate(0)
230 fp.write(reviewedpatch)
230 fp.write(reviewedpatch)
231 fp.seek(0)
231 fp.seek(0)
232
232
233 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
233 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
234 # 3a. apply filtered patch to clean repo (clean)
234 # 3a. apply filtered patch to clean repo (clean)
235 if backups:
235 if backups:
236 # Equivalent to hg.revert
236 # Equivalent to hg.revert
237 m = scmutil.matchfiles(repo, backups.keys())
237 m = scmutil.matchfiles(repo, backups.keys())
238 mergemod.update(repo, repo.dirstate.p1(),
238 mergemod.update(repo, repo.dirstate.p1(),
239 False, True, matcher=m)
239 False, True, matcher=m)
240
240
241 # 3b. (apply)
241 # 3b. (apply)
242 if dopatch:
242 if dopatch:
243 try:
243 try:
244 ui.debug('applying patch\n')
244 ui.debug('applying patch\n')
245 ui.debug(fp.getvalue())
245 ui.debug(fp.getvalue())
246 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
246 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
247 except patch.PatchError as err:
247 except patch.PatchError as err:
248 raise error.Abort(str(err))
248 raise error.Abort(str(err))
249 del fp
249 del fp
250
250
251 # 4. We prepared working directory according to filtered
251 # 4. We prepared working directory according to filtered
252 # patch. Now is the time to delegate the job to
252 # patch. Now is the time to delegate the job to
253 # commit/qrefresh or the like!
253 # commit/qrefresh or the like!
254
254
255 # Make all of the pathnames absolute.
255 # Make all of the pathnames absolute.
256 newfiles = [repo.wjoin(nf) for nf in newfiles]
256 newfiles = [repo.wjoin(nf) for nf in newfiles]
257 return commitfunc(ui, repo, *newfiles, **opts)
257 return commitfunc(ui, repo, *newfiles, **opts)
258 finally:
258 finally:
259 # 5. finally restore backed-up files
259 # 5. finally restore backed-up files
260 try:
260 try:
261 dirstate = repo.dirstate
261 dirstate = repo.dirstate
262 for realname, tmpname in backups.iteritems():
262 for realname, tmpname in backups.iteritems():
263 ui.debug('restoring %r to %r\n' % (tmpname, realname))
263 ui.debug('restoring %r to %r\n' % (tmpname, realname))
264
264
265 if dirstate[realname] == 'n':
265 if dirstate[realname] == 'n':
266 # without normallookup, restoring timestamp
266 # without normallookup, restoring timestamp
267 # may cause partially committed files
267 # may cause partially committed files
268 # to be treated as unmodified
268 # to be treated as unmodified
269 dirstate.normallookup(realname)
269 dirstate.normallookup(realname)
270
270
271 # copystat=True here and above are a hack to trick any
271 # copystat=True here and above are a hack to trick any
272 # editors that have f open that we haven't modified them.
272 # editors that have f open that we haven't modified them.
273 #
273 #
274 # Also note that this racy as an editor could notice the
274 # Also note that this racy as an editor could notice the
275 # file's mtime before we've finished writing it.
275 # file's mtime before we've finished writing it.
276 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
276 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
277 os.unlink(tmpname)
277 os.unlink(tmpname)
278 if tobackup:
278 if tobackup:
279 os.rmdir(backupdir)
279 os.rmdir(backupdir)
280 except OSError:
280 except OSError:
281 pass
281 pass
282
282
283 def recordinwlock(ui, repo, message, match, opts):
283 def recordinwlock(ui, repo, message, match, opts):
284 with repo.wlock():
284 with repo.wlock():
285 return recordfunc(ui, repo, message, match, opts)
285 return recordfunc(ui, repo, message, match, opts)
286
286
287 return commit(ui, repo, recordinwlock, pats, opts)
287 return commit(ui, repo, recordinwlock, pats, opts)
288
288
289 def findpossible(cmd, table, strict=False):
289 def findpossible(cmd, table, strict=False):
290 """
290 """
291 Return cmd -> (aliases, command table entry)
291 Return cmd -> (aliases, command table entry)
292 for each matching command.
292 for each matching command.
293 Return debug commands (or their aliases) only if no normal command matches.
293 Return debug commands (or their aliases) only if no normal command matches.
294 """
294 """
295 choice = {}
295 choice = {}
296 debugchoice = {}
296 debugchoice = {}
297
297
298 if cmd in table:
298 if cmd in table:
299 # short-circuit exact matches, "log" alias beats "^log|history"
299 # short-circuit exact matches, "log" alias beats "^log|history"
300 keys = [cmd]
300 keys = [cmd]
301 else:
301 else:
302 keys = table.keys()
302 keys = table.keys()
303
303
304 allcmds = []
304 allcmds = []
305 for e in keys:
305 for e in keys:
306 aliases = parsealiases(e)
306 aliases = parsealiases(e)
307 allcmds.extend(aliases)
307 allcmds.extend(aliases)
308 found = None
308 found = None
309 if cmd in aliases:
309 if cmd in aliases:
310 found = cmd
310 found = cmd
311 elif not strict:
311 elif not strict:
312 for a in aliases:
312 for a in aliases:
313 if a.startswith(cmd):
313 if a.startswith(cmd):
314 found = a
314 found = a
315 break
315 break
316 if found is not None:
316 if found is not None:
317 if aliases[0].startswith("debug") or found.startswith("debug"):
317 if aliases[0].startswith("debug") or found.startswith("debug"):
318 debugchoice[found] = (aliases, table[e])
318 debugchoice[found] = (aliases, table[e])
319 else:
319 else:
320 choice[found] = (aliases, table[e])
320 choice[found] = (aliases, table[e])
321
321
322 if not choice and debugchoice:
322 if not choice and debugchoice:
323 choice = debugchoice
323 choice = debugchoice
324
324
325 return choice, allcmds
325 return choice, allcmds
326
326
327 def findcmd(cmd, table, strict=True):
327 def findcmd(cmd, table, strict=True):
328 """Return (aliases, command table entry) for command string."""
328 """Return (aliases, command table entry) for command string."""
329 choice, allcmds = findpossible(cmd, table, strict)
329 choice, allcmds = findpossible(cmd, table, strict)
330
330
331 if cmd in choice:
331 if cmd in choice:
332 return choice[cmd]
332 return choice[cmd]
333
333
334 if len(choice) > 1:
334 if len(choice) > 1:
335 clist = choice.keys()
335 clist = choice.keys()
336 clist.sort()
336 clist.sort()
337 raise error.AmbiguousCommand(cmd, clist)
337 raise error.AmbiguousCommand(cmd, clist)
338
338
339 if choice:
339 if choice:
340 return choice.values()[0]
340 return choice.values()[0]
341
341
342 raise error.UnknownCommand(cmd, allcmds)
342 raise error.UnknownCommand(cmd, allcmds)
343
343
344 def findrepo(p):
344 def findrepo(p):
345 while not os.path.isdir(os.path.join(p, ".hg")):
345 while not os.path.isdir(os.path.join(p, ".hg")):
346 oldp, p = p, os.path.dirname(p)
346 oldp, p = p, os.path.dirname(p)
347 if p == oldp:
347 if p == oldp:
348 return None
348 return None
349
349
350 return p
350 return p
351
351
352 def bailifchanged(repo, merge=True):
352 def bailifchanged(repo, merge=True):
353 if merge and repo.dirstate.p2() != nullid:
353 if merge and repo.dirstate.p2() != nullid:
354 raise error.Abort(_('outstanding uncommitted merge'))
354 raise error.Abort(_('outstanding uncommitted merge'))
355 modified, added, removed, deleted = repo.status()[:4]
355 modified, added, removed, deleted = repo.status()[:4]
356 if modified or added or removed or deleted:
356 if modified or added or removed or deleted:
357 raise error.Abort(_('uncommitted changes'))
357 raise error.Abort(_('uncommitted changes'))
358 ctx = repo[None]
358 ctx = repo[None]
359 for s in sorted(ctx.substate):
359 for s in sorted(ctx.substate):
360 ctx.sub(s).bailifchanged()
360 ctx.sub(s).bailifchanged()
361
361
362 def logmessage(ui, opts):
362 def logmessage(ui, opts):
363 """ get the log message according to -m and -l option """
363 """ get the log message according to -m and -l option """
364 message = opts.get('message')
364 message = opts.get('message')
365 logfile = opts.get('logfile')
365 logfile = opts.get('logfile')
366
366
367 if message and logfile:
367 if message and logfile:
368 raise error.Abort(_('options --message and --logfile are mutually '
368 raise error.Abort(_('options --message and --logfile are mutually '
369 'exclusive'))
369 'exclusive'))
370 if not message and logfile:
370 if not message and logfile:
371 try:
371 try:
372 if logfile == '-':
372 if logfile == '-':
373 message = ui.fin.read()
373 message = ui.fin.read()
374 else:
374 else:
375 message = '\n'.join(util.readfile(logfile).splitlines())
375 message = '\n'.join(util.readfile(logfile).splitlines())
376 except IOError as inst:
376 except IOError as inst:
377 raise error.Abort(_("can't read commit message '%s': %s") %
377 raise error.Abort(_("can't read commit message '%s': %s") %
378 (logfile, inst.strerror))
378 (logfile, inst.strerror))
379 return message
379 return message
380
380
381 def mergeeditform(ctxorbool, baseformname):
381 def mergeeditform(ctxorbool, baseformname):
382 """return appropriate editform name (referencing a committemplate)
382 """return appropriate editform name (referencing a committemplate)
383
383
384 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
384 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
385 merging is committed.
385 merging is committed.
386
386
387 This returns baseformname with '.merge' appended if it is a merge,
387 This returns baseformname with '.merge' appended if it is a merge,
388 otherwise '.normal' is appended.
388 otherwise '.normal' is appended.
389 """
389 """
390 if isinstance(ctxorbool, bool):
390 if isinstance(ctxorbool, bool):
391 if ctxorbool:
391 if ctxorbool:
392 return baseformname + ".merge"
392 return baseformname + ".merge"
393 elif 1 < len(ctxorbool.parents()):
393 elif 1 < len(ctxorbool.parents()):
394 return baseformname + ".merge"
394 return baseformname + ".merge"
395
395
396 return baseformname + ".normal"
396 return baseformname + ".normal"
397
397
398 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
398 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
399 editform='', **opts):
399 editform='', **opts):
400 """get appropriate commit message editor according to '--edit' option
400 """get appropriate commit message editor according to '--edit' option
401
401
402 'finishdesc' is a function to be called with edited commit message
402 'finishdesc' is a function to be called with edited commit message
403 (= 'description' of the new changeset) just after editing, but
403 (= 'description' of the new changeset) just after editing, but
404 before checking empty-ness. It should return actual text to be
404 before checking empty-ness. It should return actual text to be
405 stored into history. This allows to change description before
405 stored into history. This allows to change description before
406 storing.
406 storing.
407
407
408 'extramsg' is a extra message to be shown in the editor instead of
408 'extramsg' is a extra message to be shown in the editor instead of
409 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
409 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
410 is automatically added.
410 is automatically added.
411
411
412 'editform' is a dot-separated list of names, to distinguish
412 'editform' is a dot-separated list of names, to distinguish
413 the purpose of commit text editing.
413 the purpose of commit text editing.
414
414
415 'getcommiteditor' returns 'commitforceeditor' regardless of
415 'getcommiteditor' returns 'commitforceeditor' regardless of
416 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
416 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
417 they are specific for usage in MQ.
417 they are specific for usage in MQ.
418 """
418 """
419 if edit or finishdesc or extramsg:
419 if edit or finishdesc or extramsg:
420 return lambda r, c, s: commitforceeditor(r, c, s,
420 return lambda r, c, s: commitforceeditor(r, c, s,
421 finishdesc=finishdesc,
421 finishdesc=finishdesc,
422 extramsg=extramsg,
422 extramsg=extramsg,
423 editform=editform)
423 editform=editform)
424 elif editform:
424 elif editform:
425 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
425 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
426 else:
426 else:
427 return commiteditor
427 return commiteditor
428
428
429 def loglimit(opts):
429 def loglimit(opts):
430 """get the log limit according to option -l/--limit"""
430 """get the log limit according to option -l/--limit"""
431 limit = opts.get('limit')
431 limit = opts.get('limit')
432 if limit:
432 if limit:
433 try:
433 try:
434 limit = int(limit)
434 limit = int(limit)
435 except ValueError:
435 except ValueError:
436 raise error.Abort(_('limit must be a positive integer'))
436 raise error.Abort(_('limit must be a positive integer'))
437 if limit <= 0:
437 if limit <= 0:
438 raise error.Abort(_('limit must be positive'))
438 raise error.Abort(_('limit must be positive'))
439 else:
439 else:
440 limit = None
440 limit = None
441 return limit
441 return limit
442
442
443 def makefilename(repo, pat, node, desc=None,
443 def makefilename(repo, pat, node, desc=None,
444 total=None, seqno=None, revwidth=None, pathname=None):
444 total=None, seqno=None, revwidth=None, pathname=None):
445 node_expander = {
445 node_expander = {
446 'H': lambda: hex(node),
446 'H': lambda: hex(node),
447 'R': lambda: str(repo.changelog.rev(node)),
447 'R': lambda: str(repo.changelog.rev(node)),
448 'h': lambda: short(node),
448 'h': lambda: short(node),
449 'm': lambda: re.sub('[^\w]', '_', str(desc))
449 'm': lambda: re.sub('[^\w]', '_', str(desc))
450 }
450 }
451 expander = {
451 expander = {
452 '%': lambda: '%',
452 '%': lambda: '%',
453 'b': lambda: os.path.basename(repo.root),
453 'b': lambda: os.path.basename(repo.root),
454 }
454 }
455
455
456 try:
456 try:
457 if node:
457 if node:
458 expander.update(node_expander)
458 expander.update(node_expander)
459 if node:
459 if node:
460 expander['r'] = (lambda:
460 expander['r'] = (lambda:
461 str(repo.changelog.rev(node)).zfill(revwidth or 0))
461 str(repo.changelog.rev(node)).zfill(revwidth or 0))
462 if total is not None:
462 if total is not None:
463 expander['N'] = lambda: str(total)
463 expander['N'] = lambda: str(total)
464 if seqno is not None:
464 if seqno is not None:
465 expander['n'] = lambda: str(seqno)
465 expander['n'] = lambda: str(seqno)
466 if total is not None and seqno is not None:
466 if total is not None and seqno is not None:
467 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
467 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
468 if pathname is not None:
468 if pathname is not None:
469 expander['s'] = lambda: os.path.basename(pathname)
469 expander['s'] = lambda: os.path.basename(pathname)
470 expander['d'] = lambda: os.path.dirname(pathname) or '.'
470 expander['d'] = lambda: os.path.dirname(pathname) or '.'
471 expander['p'] = lambda: pathname
471 expander['p'] = lambda: pathname
472
472
473 newname = []
473 newname = []
474 patlen = len(pat)
474 patlen = len(pat)
475 i = 0
475 i = 0
476 while i < patlen:
476 while i < patlen:
477 c = pat[i]
477 c = pat[i]
478 if c == '%':
478 if c == '%':
479 i += 1
479 i += 1
480 c = pat[i]
480 c = pat[i]
481 c = expander[c]()
481 c = expander[c]()
482 newname.append(c)
482 newname.append(c)
483 i += 1
483 i += 1
484 return ''.join(newname)
484 return ''.join(newname)
485 except KeyError as inst:
485 except KeyError as inst:
486 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
486 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
487 inst.args[0])
487 inst.args[0])
488
488
489 class _unclosablefile(object):
489 class _unclosablefile(object):
490 def __init__(self, fp):
490 def __init__(self, fp):
491 self._fp = fp
491 self._fp = fp
492
492
493 def close(self):
493 def close(self):
494 pass
494 pass
495
495
496 def __iter__(self):
496 def __iter__(self):
497 return iter(self._fp)
497 return iter(self._fp)
498
498
499 def __getattr__(self, attr):
499 def __getattr__(self, attr):
500 return getattr(self._fp, attr)
500 return getattr(self._fp, attr)
501
501
502 def makefileobj(repo, pat, node=None, desc=None, total=None,
502 def makefileobj(repo, pat, node=None, desc=None, total=None,
503 seqno=None, revwidth=None, mode='wb', modemap=None,
503 seqno=None, revwidth=None, mode='wb', modemap=None,
504 pathname=None):
504 pathname=None):
505
505
506 writable = mode not in ('r', 'rb')
506 writable = mode not in ('r', 'rb')
507
507
508 if not pat or pat == '-':
508 if not pat or pat == '-':
509 if writable:
509 if writable:
510 fp = repo.ui.fout
510 fp = repo.ui.fout
511 else:
511 else:
512 fp = repo.ui.fin
512 fp = repo.ui.fin
513 return _unclosablefile(fp)
513 return _unclosablefile(fp)
514 if util.safehasattr(pat, 'write') and writable:
514 if util.safehasattr(pat, 'write') and writable:
515 return pat
515 return pat
516 if util.safehasattr(pat, 'read') and 'r' in mode:
516 if util.safehasattr(pat, 'read') and 'r' in mode:
517 return pat
517 return pat
518 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
518 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
519 if modemap is not None:
519 if modemap is not None:
520 mode = modemap.get(fn, mode)
520 mode = modemap.get(fn, mode)
521 if mode == 'wb':
521 if mode == 'wb':
522 modemap[fn] = 'ab'
522 modemap[fn] = 'ab'
523 return open(fn, mode)
523 return open(fn, mode)
524
524
525 def openrevlog(repo, cmd, file_, opts):
525 def openrevlog(repo, cmd, file_, opts):
526 """opens the changelog, manifest, a filelog or a given revlog"""
526 """opens the changelog, manifest, a filelog or a given revlog"""
527 cl = opts['changelog']
527 cl = opts['changelog']
528 mf = opts['manifest']
528 mf = opts['manifest']
529 dir = opts['dir']
529 dir = opts['dir']
530 msg = None
530 msg = None
531 if cl and mf:
531 if cl and mf:
532 msg = _('cannot specify --changelog and --manifest at the same time')
532 msg = _('cannot specify --changelog and --manifest at the same time')
533 elif cl and dir:
533 elif cl and dir:
534 msg = _('cannot specify --changelog and --dir at the same time')
534 msg = _('cannot specify --changelog and --dir at the same time')
535 elif cl or mf:
535 elif cl or mf:
536 if file_:
536 if file_:
537 msg = _('cannot specify filename with --changelog or --manifest')
537 msg = _('cannot specify filename with --changelog or --manifest')
538 elif not repo:
538 elif not repo:
539 msg = _('cannot specify --changelog or --manifest or --dir '
539 msg = _('cannot specify --changelog or --manifest or --dir '
540 'without a repository')
540 'without a repository')
541 if msg:
541 if msg:
542 raise error.Abort(msg)
542 raise error.Abort(msg)
543
543
544 r = None
544 r = None
545 if repo:
545 if repo:
546 if cl:
546 if cl:
547 r = repo.unfiltered().changelog
547 r = repo.unfiltered().changelog
548 elif dir:
548 elif dir:
549 if 'treemanifest' not in repo.requirements:
549 if 'treemanifest' not in repo.requirements:
550 raise error.Abort(_("--dir can only be used on repos with "
550 raise error.Abort(_("--dir can only be used on repos with "
551 "treemanifest enabled"))
551 "treemanifest enabled"))
552 dirlog = repo.dirlog(file_)
552 dirlog = repo.dirlog(file_)
553 if len(dirlog):
553 if len(dirlog):
554 r = dirlog
554 r = dirlog
555 elif mf:
555 elif mf:
556 r = repo.manifest
556 r = repo.manifest
557 elif file_:
557 elif file_:
558 filelog = repo.file(file_)
558 filelog = repo.file(file_)
559 if len(filelog):
559 if len(filelog):
560 r = filelog
560 r = filelog
561 if not r:
561 if not r:
562 if not file_:
562 if not file_:
563 raise error.CommandError(cmd, _('invalid arguments'))
563 raise error.CommandError(cmd, _('invalid arguments'))
564 if not os.path.isfile(file_):
564 if not os.path.isfile(file_):
565 raise error.Abort(_("revlog '%s' not found") % file_)
565 raise error.Abort(_("revlog '%s' not found") % file_)
566 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
566 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
567 file_[:-2] + ".i")
567 file_[:-2] + ".i")
568 return r
568 return r
569
569
570 def copy(ui, repo, pats, opts, rename=False):
570 def copy(ui, repo, pats, opts, rename=False):
571 # called with the repo lock held
571 # called with the repo lock held
572 #
572 #
573 # hgsep => pathname that uses "/" to separate directories
573 # hgsep => pathname that uses "/" to separate directories
574 # ossep => pathname that uses os.sep to separate directories
574 # ossep => pathname that uses os.sep to separate directories
575 cwd = repo.getcwd()
575 cwd = repo.getcwd()
576 targets = {}
576 targets = {}
577 after = opts.get("after")
577 after = opts.get("after")
578 dryrun = opts.get("dry_run")
578 dryrun = opts.get("dry_run")
579 wctx = repo[None]
579 wctx = repo[None]
580
580
581 def walkpat(pat):
581 def walkpat(pat):
582 srcs = []
582 srcs = []
583 if after:
583 if after:
584 badstates = '?'
584 badstates = '?'
585 else:
585 else:
586 badstates = '?r'
586 badstates = '?r'
587 m = scmutil.match(repo[None], [pat], opts, globbed=True)
587 m = scmutil.match(repo[None], [pat], opts, globbed=True)
588 for abs in repo.walk(m):
588 for abs in repo.walk(m):
589 state = repo.dirstate[abs]
589 state = repo.dirstate[abs]
590 rel = m.rel(abs)
590 rel = m.rel(abs)
591 exact = m.exact(abs)
591 exact = m.exact(abs)
592 if state in badstates:
592 if state in badstates:
593 if exact and state == '?':
593 if exact and state == '?':
594 ui.warn(_('%s: not copying - file is not managed\n') % rel)
594 ui.warn(_('%s: not copying - file is not managed\n') % rel)
595 if exact and state == 'r':
595 if exact and state == 'r':
596 ui.warn(_('%s: not copying - file has been marked for'
596 ui.warn(_('%s: not copying - file has been marked for'
597 ' remove\n') % rel)
597 ' remove\n') % rel)
598 continue
598 continue
599 # abs: hgsep
599 # abs: hgsep
600 # rel: ossep
600 # rel: ossep
601 srcs.append((abs, rel, exact))
601 srcs.append((abs, rel, exact))
602 return srcs
602 return srcs
603
603
604 # abssrc: hgsep
604 # abssrc: hgsep
605 # relsrc: ossep
605 # relsrc: ossep
606 # otarget: ossep
606 # otarget: ossep
607 def copyfile(abssrc, relsrc, otarget, exact):
607 def copyfile(abssrc, relsrc, otarget, exact):
608 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
608 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
609 if '/' in abstarget:
609 if '/' in abstarget:
610 # We cannot normalize abstarget itself, this would prevent
610 # We cannot normalize abstarget itself, this would prevent
611 # case only renames, like a => A.
611 # case only renames, like a => A.
612 abspath, absname = abstarget.rsplit('/', 1)
612 abspath, absname = abstarget.rsplit('/', 1)
613 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
613 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
614 reltarget = repo.pathto(abstarget, cwd)
614 reltarget = repo.pathto(abstarget, cwd)
615 target = repo.wjoin(abstarget)
615 target = repo.wjoin(abstarget)
616 src = repo.wjoin(abssrc)
616 src = repo.wjoin(abssrc)
617 state = repo.dirstate[abstarget]
617 state = repo.dirstate[abstarget]
618
618
619 scmutil.checkportable(ui, abstarget)
619 scmutil.checkportable(ui, abstarget)
620
620
621 # check for collisions
621 # check for collisions
622 prevsrc = targets.get(abstarget)
622 prevsrc = targets.get(abstarget)
623 if prevsrc is not None:
623 if prevsrc is not None:
624 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
624 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
625 (reltarget, repo.pathto(abssrc, cwd),
625 (reltarget, repo.pathto(abssrc, cwd),
626 repo.pathto(prevsrc, cwd)))
626 repo.pathto(prevsrc, cwd)))
627 return
627 return
628
628
629 # check for overwrites
629 # check for overwrites
630 exists = os.path.lexists(target)
630 exists = os.path.lexists(target)
631 samefile = False
631 samefile = False
632 if exists and abssrc != abstarget:
632 if exists and abssrc != abstarget:
633 if (repo.dirstate.normalize(abssrc) ==
633 if (repo.dirstate.normalize(abssrc) ==
634 repo.dirstate.normalize(abstarget)):
634 repo.dirstate.normalize(abstarget)):
635 if not rename:
635 if not rename:
636 ui.warn(_("%s: can't copy - same file\n") % reltarget)
636 ui.warn(_("%s: can't copy - same file\n") % reltarget)
637 return
637 return
638 exists = False
638 exists = False
639 samefile = True
639 samefile = True
640
640
641 if not after and exists or after and state in 'mn':
641 if not after and exists or after and state in 'mn':
642 if not opts['force']:
642 if not opts['force']:
643 ui.warn(_('%s: not overwriting - file exists\n') %
643 ui.warn(_('%s: not overwriting - file exists\n') %
644 reltarget)
644 reltarget)
645 return
645 return
646
646
647 if after:
647 if after:
648 if not exists:
648 if not exists:
649 if rename:
649 if rename:
650 ui.warn(_('%s: not recording move - %s does not exist\n') %
650 ui.warn(_('%s: not recording move - %s does not exist\n') %
651 (relsrc, reltarget))
651 (relsrc, reltarget))
652 else:
652 else:
653 ui.warn(_('%s: not recording copy - %s does not exist\n') %
653 ui.warn(_('%s: not recording copy - %s does not exist\n') %
654 (relsrc, reltarget))
654 (relsrc, reltarget))
655 return
655 return
656 elif not dryrun:
656 elif not dryrun:
657 try:
657 try:
658 if exists:
658 if exists:
659 os.unlink(target)
659 os.unlink(target)
660 targetdir = os.path.dirname(target) or '.'
660 targetdir = os.path.dirname(target) or '.'
661 if not os.path.isdir(targetdir):
661 if not os.path.isdir(targetdir):
662 os.makedirs(targetdir)
662 os.makedirs(targetdir)
663 if samefile:
663 if samefile:
664 tmp = target + "~hgrename"
664 tmp = target + "~hgrename"
665 os.rename(src, tmp)
665 os.rename(src, tmp)
666 os.rename(tmp, target)
666 os.rename(tmp, target)
667 else:
667 else:
668 util.copyfile(src, target)
668 util.copyfile(src, target)
669 srcexists = True
669 srcexists = True
670 except IOError as inst:
670 except IOError as inst:
671 if inst.errno == errno.ENOENT:
671 if inst.errno == errno.ENOENT:
672 ui.warn(_('%s: deleted in working directory\n') % relsrc)
672 ui.warn(_('%s: deleted in working directory\n') % relsrc)
673 srcexists = False
673 srcexists = False
674 else:
674 else:
675 ui.warn(_('%s: cannot copy - %s\n') %
675 ui.warn(_('%s: cannot copy - %s\n') %
676 (relsrc, inst.strerror))
676 (relsrc, inst.strerror))
677 return True # report a failure
677 return True # report a failure
678
678
679 if ui.verbose or not exact:
679 if ui.verbose or not exact:
680 if rename:
680 if rename:
681 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
681 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
682 else:
682 else:
683 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
683 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
684
684
685 targets[abstarget] = abssrc
685 targets[abstarget] = abssrc
686
686
687 # fix up dirstate
687 # fix up dirstate
688 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
688 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
689 dryrun=dryrun, cwd=cwd)
689 dryrun=dryrun, cwd=cwd)
690 if rename and not dryrun:
690 if rename and not dryrun:
691 if not after and srcexists and not samefile:
691 if not after and srcexists and not samefile:
692 util.unlinkpath(repo.wjoin(abssrc))
692 util.unlinkpath(repo.wjoin(abssrc))
693 wctx.forget([abssrc])
693 wctx.forget([abssrc])
694
694
695 # pat: ossep
695 # pat: ossep
696 # dest ossep
696 # dest ossep
697 # srcs: list of (hgsep, hgsep, ossep, bool)
697 # srcs: list of (hgsep, hgsep, ossep, bool)
698 # return: function that takes hgsep and returns ossep
698 # return: function that takes hgsep and returns ossep
699 def targetpathfn(pat, dest, srcs):
699 def targetpathfn(pat, dest, srcs):
700 if os.path.isdir(pat):
700 if os.path.isdir(pat):
701 abspfx = pathutil.canonpath(repo.root, cwd, pat)
701 abspfx = pathutil.canonpath(repo.root, cwd, pat)
702 abspfx = util.localpath(abspfx)
702 abspfx = util.localpath(abspfx)
703 if destdirexists:
703 if destdirexists:
704 striplen = len(os.path.split(abspfx)[0])
704 striplen = len(os.path.split(abspfx)[0])
705 else:
705 else:
706 striplen = len(abspfx)
706 striplen = len(abspfx)
707 if striplen:
707 if striplen:
708 striplen += len(os.sep)
708 striplen += len(os.sep)
709 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
709 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
710 elif destdirexists:
710 elif destdirexists:
711 res = lambda p: os.path.join(dest,
711 res = lambda p: os.path.join(dest,
712 os.path.basename(util.localpath(p)))
712 os.path.basename(util.localpath(p)))
713 else:
713 else:
714 res = lambda p: dest
714 res = lambda p: dest
715 return res
715 return res
716
716
717 # pat: ossep
717 # pat: ossep
718 # dest ossep
718 # dest ossep
719 # srcs: list of (hgsep, hgsep, ossep, bool)
719 # srcs: list of (hgsep, hgsep, ossep, bool)
720 # return: function that takes hgsep and returns ossep
720 # return: function that takes hgsep and returns ossep
721 def targetpathafterfn(pat, dest, srcs):
721 def targetpathafterfn(pat, dest, srcs):
722 if matchmod.patkind(pat):
722 if matchmod.patkind(pat):
723 # a mercurial pattern
723 # a mercurial pattern
724 res = lambda p: os.path.join(dest,
724 res = lambda p: os.path.join(dest,
725 os.path.basename(util.localpath(p)))
725 os.path.basename(util.localpath(p)))
726 else:
726 else:
727 abspfx = pathutil.canonpath(repo.root, cwd, pat)
727 abspfx = pathutil.canonpath(repo.root, cwd, pat)
728 if len(abspfx) < len(srcs[0][0]):
728 if len(abspfx) < len(srcs[0][0]):
729 # A directory. Either the target path contains the last
729 # A directory. Either the target path contains the last
730 # component of the source path or it does not.
730 # component of the source path or it does not.
731 def evalpath(striplen):
731 def evalpath(striplen):
732 score = 0
732 score = 0
733 for s in srcs:
733 for s in srcs:
734 t = os.path.join(dest, util.localpath(s[0])[striplen:])
734 t = os.path.join(dest, util.localpath(s[0])[striplen:])
735 if os.path.lexists(t):
735 if os.path.lexists(t):
736 score += 1
736 score += 1
737 return score
737 return score
738
738
739 abspfx = util.localpath(abspfx)
739 abspfx = util.localpath(abspfx)
740 striplen = len(abspfx)
740 striplen = len(abspfx)
741 if striplen:
741 if striplen:
742 striplen += len(os.sep)
742 striplen += len(os.sep)
743 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
743 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
744 score = evalpath(striplen)
744 score = evalpath(striplen)
745 striplen1 = len(os.path.split(abspfx)[0])
745 striplen1 = len(os.path.split(abspfx)[0])
746 if striplen1:
746 if striplen1:
747 striplen1 += len(os.sep)
747 striplen1 += len(os.sep)
748 if evalpath(striplen1) > score:
748 if evalpath(striplen1) > score:
749 striplen = striplen1
749 striplen = striplen1
750 res = lambda p: os.path.join(dest,
750 res = lambda p: os.path.join(dest,
751 util.localpath(p)[striplen:])
751 util.localpath(p)[striplen:])
752 else:
752 else:
753 # a file
753 # a file
754 if destdirexists:
754 if destdirexists:
755 res = lambda p: os.path.join(dest,
755 res = lambda p: os.path.join(dest,
756 os.path.basename(util.localpath(p)))
756 os.path.basename(util.localpath(p)))
757 else:
757 else:
758 res = lambda p: dest
758 res = lambda p: dest
759 return res
759 return res
760
760
761 pats = scmutil.expandpats(pats)
761 pats = scmutil.expandpats(pats)
762 if not pats:
762 if not pats:
763 raise error.Abort(_('no source or destination specified'))
763 raise error.Abort(_('no source or destination specified'))
764 if len(pats) == 1:
764 if len(pats) == 1:
765 raise error.Abort(_('no destination specified'))
765 raise error.Abort(_('no destination specified'))
766 dest = pats.pop()
766 dest = pats.pop()
767 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
767 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
768 if not destdirexists:
768 if not destdirexists:
769 if len(pats) > 1 or matchmod.patkind(pats[0]):
769 if len(pats) > 1 or matchmod.patkind(pats[0]):
770 raise error.Abort(_('with multiple sources, destination must be an '
770 raise error.Abort(_('with multiple sources, destination must be an '
771 'existing directory'))
771 'existing directory'))
772 if util.endswithsep(dest):
772 if util.endswithsep(dest):
773 raise error.Abort(_('destination %s is not a directory') % dest)
773 raise error.Abort(_('destination %s is not a directory') % dest)
774
774
775 tfn = targetpathfn
775 tfn = targetpathfn
776 if after:
776 if after:
777 tfn = targetpathafterfn
777 tfn = targetpathafterfn
778 copylist = []
778 copylist = []
779 for pat in pats:
779 for pat in pats:
780 srcs = walkpat(pat)
780 srcs = walkpat(pat)
781 if not srcs:
781 if not srcs:
782 continue
782 continue
783 copylist.append((tfn(pat, dest, srcs), srcs))
783 copylist.append((tfn(pat, dest, srcs), srcs))
784 if not copylist:
784 if not copylist:
785 raise error.Abort(_('no files to copy'))
785 raise error.Abort(_('no files to copy'))
786
786
787 errors = 0
787 errors = 0
788 for targetpath, srcs in copylist:
788 for targetpath, srcs in copylist:
789 for abssrc, relsrc, exact in srcs:
789 for abssrc, relsrc, exact in srcs:
790 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
790 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
791 errors += 1
791 errors += 1
792
792
793 if errors:
793 if errors:
794 ui.warn(_('(consider using --after)\n'))
794 ui.warn(_('(consider using --after)\n'))
795
795
796 return errors != 0
796 return errors != 0
797
797
798 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
798 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
799 runargs=None, appendpid=False):
799 runargs=None, appendpid=False):
800 '''Run a command as a service.'''
800 '''Run a command as a service.'''
801
801
802 def writepid(pid):
802 def writepid(pid):
803 if opts['pid_file']:
803 if opts['pid_file']:
804 if appendpid:
804 if appendpid:
805 mode = 'a'
805 mode = 'a'
806 else:
806 else:
807 mode = 'w'
807 mode = 'w'
808 fp = open(opts['pid_file'], mode)
808 fp = open(opts['pid_file'], mode)
809 fp.write(str(pid) + '\n')
809 fp.write(str(pid) + '\n')
810 fp.close()
810 fp.close()
811
811
812 if opts['daemon'] and not opts['daemon_postexec']:
812 if opts['daemon'] and not opts['daemon_postexec']:
813 # Signal child process startup with file removal
813 # Signal child process startup with file removal
814 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
814 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
815 os.close(lockfd)
815 os.close(lockfd)
816 try:
816 try:
817 if not runargs:
817 if not runargs:
818 runargs = util.hgcmd() + sys.argv[1:]
818 runargs = util.hgcmd() + sys.argv[1:]
819 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
819 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
820 # Don't pass --cwd to the child process, because we've already
820 # Don't pass --cwd to the child process, because we've already
821 # changed directory.
821 # changed directory.
822 for i in xrange(1, len(runargs)):
822 for i in xrange(1, len(runargs)):
823 if runargs[i].startswith('--cwd='):
823 if runargs[i].startswith('--cwd='):
824 del runargs[i]
824 del runargs[i]
825 break
825 break
826 elif runargs[i].startswith('--cwd'):
826 elif runargs[i].startswith('--cwd'):
827 del runargs[i:i + 2]
827 del runargs[i:i + 2]
828 break
828 break
829 def condfn():
829 def condfn():
830 return not os.path.exists(lockpath)
830 return not os.path.exists(lockpath)
831 pid = util.rundetached(runargs, condfn)
831 pid = util.rundetached(runargs, condfn)
832 if pid < 0:
832 if pid < 0:
833 raise error.Abort(_('child process failed to start'))
833 raise error.Abort(_('child process failed to start'))
834 writepid(pid)
834 writepid(pid)
835 finally:
835 finally:
836 try:
836 try:
837 os.unlink(lockpath)
837 os.unlink(lockpath)
838 except OSError as e:
838 except OSError as e:
839 if e.errno != errno.ENOENT:
839 if e.errno != errno.ENOENT:
840 raise
840 raise
841 if parentfn:
841 if parentfn:
842 return parentfn(pid)
842 return parentfn(pid)
843 else:
843 else:
844 return
844 return
845
845
846 if initfn:
846 if initfn:
847 initfn()
847 initfn()
848
848
849 if not opts['daemon']:
849 if not opts['daemon']:
850 writepid(util.getpid())
850 writepid(util.getpid())
851
851
852 if opts['daemon_postexec']:
852 if opts['daemon_postexec']:
853 try:
853 try:
854 os.setsid()
854 os.setsid()
855 except AttributeError:
855 except AttributeError:
856 pass
856 pass
857 for inst in opts['daemon_postexec']:
857 for inst in opts['daemon_postexec']:
858 if inst.startswith('unlink:'):
858 if inst.startswith('unlink:'):
859 lockpath = inst[7:]
859 lockpath = inst[7:]
860 os.unlink(lockpath)
860 os.unlink(lockpath)
861 elif inst.startswith('chdir:'):
861 elif inst.startswith('chdir:'):
862 os.chdir(inst[6:])
862 os.chdir(inst[6:])
863 elif inst != 'none':
863 elif inst != 'none':
864 raise error.Abort(_('invalid value for --daemon-postexec: %s')
864 raise error.Abort(_('invalid value for --daemon-postexec: %s')
865 % inst)
865 % inst)
866 util.hidewindow()
866 util.hidewindow()
867 sys.stdout.flush()
867 sys.stdout.flush()
868 sys.stderr.flush()
868 sys.stderr.flush()
869
869
870 nullfd = os.open(os.devnull, os.O_RDWR)
870 nullfd = os.open(os.devnull, os.O_RDWR)
871 logfilefd = nullfd
871 logfilefd = nullfd
872 if logfile:
872 if logfile:
873 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
873 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
874 os.dup2(nullfd, 0)
874 os.dup2(nullfd, 0)
875 os.dup2(logfilefd, 1)
875 os.dup2(logfilefd, 1)
876 os.dup2(logfilefd, 2)
876 os.dup2(logfilefd, 2)
877 if nullfd not in (0, 1, 2):
877 if nullfd not in (0, 1, 2):
878 os.close(nullfd)
878 os.close(nullfd)
879 if logfile and logfilefd not in (0, 1, 2):
879 if logfile and logfilefd not in (0, 1, 2):
880 os.close(logfilefd)
880 os.close(logfilefd)
881
881
882 if runfn:
882 if runfn:
883 return runfn()
883 return runfn()
884
884
885 ## facility to let extension process additional data into an import patch
885 ## facility to let extension process additional data into an import patch
886 # list of identifier to be executed in order
886 # list of identifier to be executed in order
887 extrapreimport = [] # run before commit
887 extrapreimport = [] # run before commit
888 extrapostimport = [] # run after commit
888 extrapostimport = [] # run after commit
889 # mapping from identifier to actual import function
889 # mapping from identifier to actual import function
890 #
890 #
891 # 'preimport' are run before the commit is made and are provided the following
891 # 'preimport' are run before the commit is made and are provided the following
892 # arguments:
892 # arguments:
893 # - repo: the localrepository instance,
893 # - repo: the localrepository instance,
894 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
894 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
895 # - extra: the future extra dictionary of the changeset, please mutate it,
895 # - extra: the future extra dictionary of the changeset, please mutate it,
896 # - opts: the import options.
896 # - opts: the import options.
897 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
897 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
898 # mutation of in memory commit and more. Feel free to rework the code to get
898 # mutation of in memory commit and more. Feel free to rework the code to get
899 # there.
899 # there.
900 extrapreimportmap = {}
900 extrapreimportmap = {}
901 # 'postimport' are run after the commit is made and are provided the following
901 # 'postimport' are run after the commit is made and are provided the following
902 # argument:
902 # argument:
903 # - ctx: the changectx created by import.
903 # - ctx: the changectx created by import.
904 extrapostimportmap = {}
904 extrapostimportmap = {}
905
905
906 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
906 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
907 """Utility function used by commands.import to import a single patch
907 """Utility function used by commands.import to import a single patch
908
908
909 This function is explicitly defined here to help the evolve extension to
909 This function is explicitly defined here to help the evolve extension to
910 wrap this part of the import logic.
910 wrap this part of the import logic.
911
911
912 The API is currently a bit ugly because it a simple code translation from
912 The API is currently a bit ugly because it a simple code translation from
913 the import command. Feel free to make it better.
913 the import command. Feel free to make it better.
914
914
915 :hunk: a patch (as a binary string)
915 :hunk: a patch (as a binary string)
916 :parents: nodes that will be parent of the created commit
916 :parents: nodes that will be parent of the created commit
917 :opts: the full dict of option passed to the import command
917 :opts: the full dict of option passed to the import command
918 :msgs: list to save commit message to.
918 :msgs: list to save commit message to.
919 (used in case we need to save it when failing)
919 (used in case we need to save it when failing)
920 :updatefunc: a function that update a repo to a given node
920 :updatefunc: a function that update a repo to a given node
921 updatefunc(<repo>, <node>)
921 updatefunc(<repo>, <node>)
922 """
922 """
923 # avoid cycle context -> subrepo -> cmdutil
923 # avoid cycle context -> subrepo -> cmdutil
924 from . import context
924 from . import context
925 extractdata = patch.extract(ui, hunk)
925 extractdata = patch.extract(ui, hunk)
926 tmpname = extractdata.get('filename')
926 tmpname = extractdata.get('filename')
927 message = extractdata.get('message')
927 message = extractdata.get('message')
928 user = opts.get('user') or extractdata.get('user')
928 user = opts.get('user') or extractdata.get('user')
929 date = opts.get('date') or extractdata.get('date')
929 date = opts.get('date') or extractdata.get('date')
930 branch = extractdata.get('branch')
930 branch = extractdata.get('branch')
931 nodeid = extractdata.get('nodeid')
931 nodeid = extractdata.get('nodeid')
932 p1 = extractdata.get('p1')
932 p1 = extractdata.get('p1')
933 p2 = extractdata.get('p2')
933 p2 = extractdata.get('p2')
934
934
935 nocommit = opts.get('no_commit')
935 nocommit = opts.get('no_commit')
936 importbranch = opts.get('import_branch')
936 importbranch = opts.get('import_branch')
937 update = not opts.get('bypass')
937 update = not opts.get('bypass')
938 strip = opts["strip"]
938 strip = opts["strip"]
939 prefix = opts["prefix"]
939 prefix = opts["prefix"]
940 sim = float(opts.get('similarity') or 0)
940 sim = float(opts.get('similarity') or 0)
941 if not tmpname:
941 if not tmpname:
942 return (None, None, False)
942 return (None, None, False)
943
943
944 rejects = False
944 rejects = False
945
945
946 try:
946 try:
947 cmdline_message = logmessage(ui, opts)
947 cmdline_message = logmessage(ui, opts)
948 if cmdline_message:
948 if cmdline_message:
949 # pickup the cmdline msg
949 # pickup the cmdline msg
950 message = cmdline_message
950 message = cmdline_message
951 elif message:
951 elif message:
952 # pickup the patch msg
952 # pickup the patch msg
953 message = message.strip()
953 message = message.strip()
954 else:
954 else:
955 # launch the editor
955 # launch the editor
956 message = None
956 message = None
957 ui.debug('message:\n%s\n' % message)
957 ui.debug('message:\n%s\n' % message)
958
958
959 if len(parents) == 1:
959 if len(parents) == 1:
960 parents.append(repo[nullid])
960 parents.append(repo[nullid])
961 if opts.get('exact'):
961 if opts.get('exact'):
962 if not nodeid or not p1:
962 if not nodeid or not p1:
963 raise error.Abort(_('not a Mercurial patch'))
963 raise error.Abort(_('not a Mercurial patch'))
964 p1 = repo[p1]
964 p1 = repo[p1]
965 p2 = repo[p2 or nullid]
965 p2 = repo[p2 or nullid]
966 elif p2:
966 elif p2:
967 try:
967 try:
968 p1 = repo[p1]
968 p1 = repo[p1]
969 p2 = repo[p2]
969 p2 = repo[p2]
970 # Without any options, consider p2 only if the
970 # Without any options, consider p2 only if the
971 # patch is being applied on top of the recorded
971 # patch is being applied on top of the recorded
972 # first parent.
972 # first parent.
973 if p1 != parents[0]:
973 if p1 != parents[0]:
974 p1 = parents[0]
974 p1 = parents[0]
975 p2 = repo[nullid]
975 p2 = repo[nullid]
976 except error.RepoError:
976 except error.RepoError:
977 p1, p2 = parents
977 p1, p2 = parents
978 if p2.node() == nullid:
978 if p2.node() == nullid:
979 ui.warn(_("warning: import the patch as a normal revision\n"
979 ui.warn(_("warning: import the patch as a normal revision\n"
980 "(use --exact to import the patch as a merge)\n"))
980 "(use --exact to import the patch as a merge)\n"))
981 else:
981 else:
982 p1, p2 = parents
982 p1, p2 = parents
983
983
984 n = None
984 n = None
985 if update:
985 if update:
986 if p1 != parents[0]:
986 if p1 != parents[0]:
987 updatefunc(repo, p1.node())
987 updatefunc(repo, p1.node())
988 if p2 != parents[1]:
988 if p2 != parents[1]:
989 repo.setparents(p1.node(), p2.node())
989 repo.setparents(p1.node(), p2.node())
990
990
991 if opts.get('exact') or importbranch:
991 if opts.get('exact') or importbranch:
992 repo.dirstate.setbranch(branch or 'default')
992 repo.dirstate.setbranch(branch or 'default')
993
993
994 partial = opts.get('partial', False)
994 partial = opts.get('partial', False)
995 files = set()
995 files = set()
996 try:
996 try:
997 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
997 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
998 files=files, eolmode=None, similarity=sim / 100.0)
998 files=files, eolmode=None, similarity=sim / 100.0)
999 except patch.PatchError as e:
999 except patch.PatchError as e:
1000 if not partial:
1000 if not partial:
1001 raise error.Abort(str(e))
1001 raise error.Abort(str(e))
1002 if partial:
1002 if partial:
1003 rejects = True
1003 rejects = True
1004
1004
1005 files = list(files)
1005 files = list(files)
1006 if nocommit:
1006 if nocommit:
1007 if message:
1007 if message:
1008 msgs.append(message)
1008 msgs.append(message)
1009 else:
1009 else:
1010 if opts.get('exact') or p2:
1010 if opts.get('exact') or p2:
1011 # If you got here, you either use --force and know what
1011 # If you got here, you either use --force and know what
1012 # you are doing or used --exact or a merge patch while
1012 # you are doing or used --exact or a merge patch while
1013 # being updated to its first parent.
1013 # being updated to its first parent.
1014 m = None
1014 m = None
1015 else:
1015 else:
1016 m = scmutil.matchfiles(repo, files or [])
1016 m = scmutil.matchfiles(repo, files or [])
1017 editform = mergeeditform(repo[None], 'import.normal')
1017 editform = mergeeditform(repo[None], 'import.normal')
1018 if opts.get('exact'):
1018 if opts.get('exact'):
1019 editor = None
1019 editor = None
1020 else:
1020 else:
1021 editor = getcommiteditor(editform=editform, **opts)
1021 editor = getcommiteditor(editform=editform, **opts)
1022 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1022 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1023 extra = {}
1023 extra = {}
1024 for idfunc in extrapreimport:
1024 for idfunc in extrapreimport:
1025 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1025 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1026 try:
1026 try:
1027 if partial:
1027 if partial:
1028 repo.ui.setconfig('ui', 'allowemptycommit', True)
1028 repo.ui.setconfig('ui', 'allowemptycommit', True)
1029 n = repo.commit(message, user,
1029 n = repo.commit(message, user,
1030 date, match=m,
1030 date, match=m,
1031 editor=editor, extra=extra)
1031 editor=editor, extra=extra)
1032 for idfunc in extrapostimport:
1032 for idfunc in extrapostimport:
1033 extrapostimportmap[idfunc](repo[n])
1033 extrapostimportmap[idfunc](repo[n])
1034 finally:
1034 finally:
1035 repo.ui.restoreconfig(allowemptyback)
1035 repo.ui.restoreconfig(allowemptyback)
1036 else:
1036 else:
1037 if opts.get('exact') or importbranch:
1037 if opts.get('exact') or importbranch:
1038 branch = branch or 'default'
1038 branch = branch or 'default'
1039 else:
1039 else:
1040 branch = p1.branch()
1040 branch = p1.branch()
1041 store = patch.filestore()
1041 store = patch.filestore()
1042 try:
1042 try:
1043 files = set()
1043 files = set()
1044 try:
1044 try:
1045 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1045 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1046 files, eolmode=None)
1046 files, eolmode=None)
1047 except patch.PatchError as e:
1047 except patch.PatchError as e:
1048 raise error.Abort(str(e))
1048 raise error.Abort(str(e))
1049 if opts.get('exact'):
1049 if opts.get('exact'):
1050 editor = None
1050 editor = None
1051 else:
1051 else:
1052 editor = getcommiteditor(editform='import.bypass')
1052 editor = getcommiteditor(editform='import.bypass')
1053 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1053 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1054 message,
1054 message,
1055 user,
1055 user,
1056 date,
1056 date,
1057 branch, files, store,
1057 branch, files, store,
1058 editor=editor)
1058 editor=editor)
1059 n = memctx.commit()
1059 n = memctx.commit()
1060 finally:
1060 finally:
1061 store.close()
1061 store.close()
1062 if opts.get('exact') and nocommit:
1062 if opts.get('exact') and nocommit:
1063 # --exact with --no-commit is still useful in that it does merge
1063 # --exact with --no-commit is still useful in that it does merge
1064 # and branch bits
1064 # and branch bits
1065 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1065 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1066 elif opts.get('exact') and hex(n) != nodeid:
1066 elif opts.get('exact') and hex(n) != nodeid:
1067 raise error.Abort(_('patch is damaged or loses information'))
1067 raise error.Abort(_('patch is damaged or loses information'))
1068 msg = _('applied to working directory')
1068 msg = _('applied to working directory')
1069 if n:
1069 if n:
1070 # i18n: refers to a short changeset id
1070 # i18n: refers to a short changeset id
1071 msg = _('created %s') % short(n)
1071 msg = _('created %s') % short(n)
1072 return (msg, n, rejects)
1072 return (msg, n, rejects)
1073 finally:
1073 finally:
1074 os.unlink(tmpname)
1074 os.unlink(tmpname)
1075
1075
1076 # facility to let extensions include additional data in an exported patch
1076 # facility to let extensions include additional data in an exported patch
1077 # list of identifiers to be executed in order
1077 # list of identifiers to be executed in order
1078 extraexport = []
1078 extraexport = []
1079 # mapping from identifier to actual export function
1079 # mapping from identifier to actual export function
1080 # function as to return a string to be added to the header or None
1080 # function as to return a string to be added to the header or None
1081 # it is given two arguments (sequencenumber, changectx)
1081 # it is given two arguments (sequencenumber, changectx)
1082 extraexportmap = {}
1082 extraexportmap = {}
1083
1083
1084 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1084 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1085 opts=None, match=None):
1085 opts=None, match=None):
1086 '''export changesets as hg patches.'''
1086 '''export changesets as hg patches.'''
1087
1087
1088 total = len(revs)
1088 total = len(revs)
1089 revwidth = max([len(str(rev)) for rev in revs])
1089 revwidth = max([len(str(rev)) for rev in revs])
1090 filemode = {}
1090 filemode = {}
1091
1091
1092 def single(rev, seqno, fp):
1092 def single(rev, seqno, fp):
1093 ctx = repo[rev]
1093 ctx = repo[rev]
1094 node = ctx.node()
1094 node = ctx.node()
1095 parents = [p.node() for p in ctx.parents() if p]
1095 parents = [p.node() for p in ctx.parents() if p]
1096 branch = ctx.branch()
1096 branch = ctx.branch()
1097 if switch_parent:
1097 if switch_parent:
1098 parents.reverse()
1098 parents.reverse()
1099
1099
1100 if parents:
1100 if parents:
1101 prev = parents[0]
1101 prev = parents[0]
1102 else:
1102 else:
1103 prev = nullid
1103 prev = nullid
1104
1104
1105 shouldclose = False
1105 shouldclose = False
1106 if not fp and len(template) > 0:
1106 if not fp and len(template) > 0:
1107 desc_lines = ctx.description().rstrip().split('\n')
1107 desc_lines = ctx.description().rstrip().split('\n')
1108 desc = desc_lines[0] #Commit always has a first line.
1108 desc = desc_lines[0] #Commit always has a first line.
1109 fp = makefileobj(repo, template, node, desc=desc, total=total,
1109 fp = makefileobj(repo, template, node, desc=desc, total=total,
1110 seqno=seqno, revwidth=revwidth, mode='wb',
1110 seqno=seqno, revwidth=revwidth, mode='wb',
1111 modemap=filemode)
1111 modemap=filemode)
1112 shouldclose = True
1112 shouldclose = True
1113 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1113 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1114 repo.ui.note("%s\n" % fp.name)
1114 repo.ui.note("%s\n" % fp.name)
1115
1115
1116 if not fp:
1116 if not fp:
1117 write = repo.ui.write
1117 write = repo.ui.write
1118 else:
1118 else:
1119 def write(s, **kw):
1119 def write(s, **kw):
1120 fp.write(s)
1120 fp.write(s)
1121
1121
1122 write("# HG changeset patch\n")
1122 write("# HG changeset patch\n")
1123 write("# User %s\n" % ctx.user())
1123 write("# User %s\n" % ctx.user())
1124 write("# Date %d %d\n" % ctx.date())
1124 write("# Date %d %d\n" % ctx.date())
1125 write("# %s\n" % util.datestr(ctx.date()))
1125 write("# %s\n" % util.datestr(ctx.date()))
1126 if branch and branch != 'default':
1126 if branch and branch != 'default':
1127 write("# Branch %s\n" % branch)
1127 write("# Branch %s\n" % branch)
1128 write("# Node ID %s\n" % hex(node))
1128 write("# Node ID %s\n" % hex(node))
1129 write("# Parent %s\n" % hex(prev))
1129 write("# Parent %s\n" % hex(prev))
1130 if len(parents) > 1:
1130 if len(parents) > 1:
1131 write("# Parent %s\n" % hex(parents[1]))
1131 write("# Parent %s\n" % hex(parents[1]))
1132
1132
1133 for headerid in extraexport:
1133 for headerid in extraexport:
1134 header = extraexportmap[headerid](seqno, ctx)
1134 header = extraexportmap[headerid](seqno, ctx)
1135 if header is not None:
1135 if header is not None:
1136 write('# %s\n' % header)
1136 write('# %s\n' % header)
1137 write(ctx.description().rstrip())
1137 write(ctx.description().rstrip())
1138 write("\n\n")
1138 write("\n\n")
1139
1139
1140 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1140 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1141 write(chunk, label=label)
1141 write(chunk, label=label)
1142
1142
1143 if shouldclose:
1143 if shouldclose:
1144 fp.close()
1144 fp.close()
1145
1145
1146 for seqno, rev in enumerate(revs):
1146 for seqno, rev in enumerate(revs):
1147 single(rev, seqno + 1, fp)
1147 single(rev, seqno + 1, fp)
1148
1148
1149 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1149 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1150 changes=None, stat=False, fp=None, prefix='',
1150 changes=None, stat=False, fp=None, prefix='',
1151 root='', listsubrepos=False):
1151 root='', listsubrepos=False):
1152 '''show diff or diffstat.'''
1152 '''show diff or diffstat.'''
1153 if fp is None:
1153 if fp is None:
1154 write = ui.write
1154 write = ui.write
1155 else:
1155 else:
1156 def write(s, **kw):
1156 def write(s, **kw):
1157 fp.write(s)
1157 fp.write(s)
1158
1158
1159 if root:
1159 if root:
1160 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1160 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1161 else:
1161 else:
1162 relroot = ''
1162 relroot = ''
1163 if relroot != '':
1163 if relroot != '':
1164 # XXX relative roots currently don't work if the root is within a
1164 # XXX relative roots currently don't work if the root is within a
1165 # subrepo
1165 # subrepo
1166 uirelroot = match.uipath(relroot)
1166 uirelroot = match.uipath(relroot)
1167 relroot += '/'
1167 relroot += '/'
1168 for matchroot in match.files():
1168 for matchroot in match.files():
1169 if not matchroot.startswith(relroot):
1169 if not matchroot.startswith(relroot):
1170 ui.warn(_('warning: %s not inside relative root %s\n') % (
1170 ui.warn(_('warning: %s not inside relative root %s\n') % (
1171 match.uipath(matchroot), uirelroot))
1171 match.uipath(matchroot), uirelroot))
1172
1172
1173 if stat:
1173 if stat:
1174 diffopts = diffopts.copy(context=0)
1174 diffopts = diffopts.copy(context=0)
1175 width = 80
1175 width = 80
1176 if not ui.plain():
1176 if not ui.plain():
1177 width = ui.termwidth()
1177 width = ui.termwidth()
1178 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1178 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1179 prefix=prefix, relroot=relroot)
1179 prefix=prefix, relroot=relroot)
1180 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1180 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1181 width=width,
1181 width=width,
1182 git=diffopts.git):
1182 git=diffopts.git):
1183 write(chunk, label=label)
1183 write(chunk, label=label)
1184 else:
1184 else:
1185 for chunk, label in patch.diffui(repo, node1, node2, match,
1185 for chunk, label in patch.diffui(repo, node1, node2, match,
1186 changes, diffopts, prefix=prefix,
1186 changes, diffopts, prefix=prefix,
1187 relroot=relroot):
1187 relroot=relroot):
1188 write(chunk, label=label)
1188 write(chunk, label=label)
1189
1189
1190 if listsubrepos:
1190 if listsubrepos:
1191 ctx1 = repo[node1]
1191 ctx1 = repo[node1]
1192 ctx2 = repo[node2]
1192 ctx2 = repo[node2]
1193 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1193 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1194 tempnode2 = node2
1194 tempnode2 = node2
1195 try:
1195 try:
1196 if node2 is not None:
1196 if node2 is not None:
1197 tempnode2 = ctx2.substate[subpath][1]
1197 tempnode2 = ctx2.substate[subpath][1]
1198 except KeyError:
1198 except KeyError:
1199 # A subrepo that existed in node1 was deleted between node1 and
1199 # A subrepo that existed in node1 was deleted between node1 and
1200 # node2 (inclusive). Thus, ctx2's substate won't contain that
1200 # node2 (inclusive). Thus, ctx2's substate won't contain that
1201 # subpath. The best we can do is to ignore it.
1201 # subpath. The best we can do is to ignore it.
1202 tempnode2 = None
1202 tempnode2 = None
1203 submatch = matchmod.subdirmatcher(subpath, match)
1203 submatch = matchmod.subdirmatcher(subpath, match)
1204 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1204 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1205 stat=stat, fp=fp, prefix=prefix)
1205 stat=stat, fp=fp, prefix=prefix)
1206
1206
1207 class changeset_printer(object):
1207 class changeset_printer(object):
1208 '''show changeset information when templating not requested.'''
1208 '''show changeset information when templating not requested.'''
1209
1209
1210 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1210 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1211 self.ui = ui
1211 self.ui = ui
1212 self.repo = repo
1212 self.repo = repo
1213 self.buffered = buffered
1213 self.buffered = buffered
1214 self.matchfn = matchfn
1214 self.matchfn = matchfn
1215 self.diffopts = diffopts
1215 self.diffopts = diffopts
1216 self.header = {}
1216 self.header = {}
1217 self.hunk = {}
1217 self.hunk = {}
1218 self.lastheader = None
1218 self.lastheader = None
1219 self.footer = None
1219 self.footer = None
1220
1220
1221 def flush(self, ctx):
1221 def flush(self, ctx):
1222 rev = ctx.rev()
1222 rev = ctx.rev()
1223 if rev in self.header:
1223 if rev in self.header:
1224 h = self.header[rev]
1224 h = self.header[rev]
1225 if h != self.lastheader:
1225 if h != self.lastheader:
1226 self.lastheader = h
1226 self.lastheader = h
1227 self.ui.write(h)
1227 self.ui.write(h)
1228 del self.header[rev]
1228 del self.header[rev]
1229 if rev in self.hunk:
1229 if rev in self.hunk:
1230 self.ui.write(self.hunk[rev])
1230 self.ui.write(self.hunk[rev])
1231 del self.hunk[rev]
1231 del self.hunk[rev]
1232 return 1
1232 return 1
1233 return 0
1233 return 0
1234
1234
1235 def close(self):
1235 def close(self):
1236 if self.footer:
1236 if self.footer:
1237 self.ui.write(self.footer)
1237 self.ui.write(self.footer)
1238
1238
1239 def show(self, ctx, copies=None, matchfn=None, **props):
1239 def show(self, ctx, copies=None, matchfn=None, **props):
1240 if self.buffered:
1240 if self.buffered:
1241 self.ui.pushbuffer(labeled=True)
1241 self.ui.pushbuffer(labeled=True)
1242 self._show(ctx, copies, matchfn, props)
1242 self._show(ctx, copies, matchfn, props)
1243 self.hunk[ctx.rev()] = self.ui.popbuffer()
1243 self.hunk[ctx.rev()] = self.ui.popbuffer()
1244 else:
1244 else:
1245 self._show(ctx, copies, matchfn, props)
1245 self._show(ctx, copies, matchfn, props)
1246
1246
1247 def _show(self, ctx, copies, matchfn, props):
1247 def _show(self, ctx, copies, matchfn, props):
1248 '''show a single changeset or file revision'''
1248 '''show a single changeset or file revision'''
1249 changenode = ctx.node()
1249 changenode = ctx.node()
1250 rev = ctx.rev()
1250 rev = ctx.rev()
1251 if self.ui.debugflag:
1251 if self.ui.debugflag:
1252 hexfunc = hex
1252 hexfunc = hex
1253 else:
1253 else:
1254 hexfunc = short
1254 hexfunc = short
1255 # as of now, wctx.node() and wctx.rev() return None, but we want to
1255 # as of now, wctx.node() and wctx.rev() return None, but we want to
1256 # show the same values as {node} and {rev} templatekw
1256 # show the same values as {node} and {rev} templatekw
1257 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1257 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1258
1258
1259 if self.ui.quiet:
1259 if self.ui.quiet:
1260 self.ui.write("%d:%s\n" % revnode, label='log.node')
1260 self.ui.write("%d:%s\n" % revnode, label='log.node')
1261 return
1261 return
1262
1262
1263 date = util.datestr(ctx.date())
1263 date = util.datestr(ctx.date())
1264
1264
1265 # i18n: column positioning for "hg log"
1265 # i18n: column positioning for "hg log"
1266 self.ui.write(_("changeset: %d:%s\n") % revnode,
1266 self.ui.write(_("changeset: %d:%s\n") % revnode,
1267 label='log.changeset changeset.%s' % ctx.phasestr())
1267 label='log.changeset changeset.%s' % ctx.phasestr())
1268
1268
1269 # branches are shown first before any other names due to backwards
1269 # branches are shown first before any other names due to backwards
1270 # compatibility
1270 # compatibility
1271 branch = ctx.branch()
1271 branch = ctx.branch()
1272 # don't show the default branch name
1272 # don't show the default branch name
1273 if branch != 'default':
1273 if branch != 'default':
1274 # i18n: column positioning for "hg log"
1274 # i18n: column positioning for "hg log"
1275 self.ui.write(_("branch: %s\n") % branch,
1275 self.ui.write(_("branch: %s\n") % branch,
1276 label='log.branch')
1276 label='log.branch')
1277
1277
1278 for nsname, ns in self.repo.names.iteritems():
1278 for nsname, ns in self.repo.names.iteritems():
1279 # branches has special logic already handled above, so here we just
1279 # branches has special logic already handled above, so here we just
1280 # skip it
1280 # skip it
1281 if nsname == 'branches':
1281 if nsname == 'branches':
1282 continue
1282 continue
1283 # we will use the templatename as the color name since those two
1283 # we will use the templatename as the color name since those two
1284 # should be the same
1284 # should be the same
1285 for name in ns.names(self.repo, changenode):
1285 for name in ns.names(self.repo, changenode):
1286 self.ui.write(ns.logfmt % name,
1286 self.ui.write(ns.logfmt % name,
1287 label='log.%s' % ns.colorname)
1287 label='log.%s' % ns.colorname)
1288 if self.ui.debugflag:
1288 if self.ui.debugflag:
1289 # i18n: column positioning for "hg log"
1289 # i18n: column positioning for "hg log"
1290 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1290 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1291 label='log.phase')
1291 label='log.phase')
1292 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1292 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1293 label = 'log.parent changeset.%s' % pctx.phasestr()
1293 label = 'log.parent changeset.%s' % pctx.phasestr()
1294 # i18n: column positioning for "hg log"
1294 # i18n: column positioning for "hg log"
1295 self.ui.write(_("parent: %d:%s\n")
1295 self.ui.write(_("parent: %d:%s\n")
1296 % (pctx.rev(), hexfunc(pctx.node())),
1296 % (pctx.rev(), hexfunc(pctx.node())),
1297 label=label)
1297 label=label)
1298
1298
1299 if self.ui.debugflag and rev is not None:
1299 if self.ui.debugflag and rev is not None:
1300 mnode = ctx.manifestnode()
1300 mnode = ctx.manifestnode()
1301 # i18n: column positioning for "hg log"
1301 # i18n: column positioning for "hg log"
1302 self.ui.write(_("manifest: %d:%s\n") %
1302 self.ui.write(_("manifest: %d:%s\n") %
1303 (self.repo.manifest.rev(mnode), hex(mnode)),
1303 (self.repo.manifest.rev(mnode), hex(mnode)),
1304 label='ui.debug log.manifest')
1304 label='ui.debug log.manifest')
1305 # i18n: column positioning for "hg log"
1305 # i18n: column positioning for "hg log"
1306 self.ui.write(_("user: %s\n") % ctx.user(),
1306 self.ui.write(_("user: %s\n") % ctx.user(),
1307 label='log.user')
1307 label='log.user')
1308 # i18n: column positioning for "hg log"
1308 # i18n: column positioning for "hg log"
1309 self.ui.write(_("date: %s\n") % date,
1309 self.ui.write(_("date: %s\n") % date,
1310 label='log.date')
1310 label='log.date')
1311
1311
1312 if self.ui.debugflag:
1312 if self.ui.debugflag:
1313 files = ctx.p1().status(ctx)[:3]
1313 files = ctx.p1().status(ctx)[:3]
1314 for key, value in zip([# i18n: column positioning for "hg log"
1314 for key, value in zip([# i18n: column positioning for "hg log"
1315 _("files:"),
1315 _("files:"),
1316 # i18n: column positioning for "hg log"
1316 # i18n: column positioning for "hg log"
1317 _("files+:"),
1317 _("files+:"),
1318 # i18n: column positioning for "hg log"
1318 # i18n: column positioning for "hg log"
1319 _("files-:")], files):
1319 _("files-:")], files):
1320 if value:
1320 if value:
1321 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1321 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1322 label='ui.debug log.files')
1322 label='ui.debug log.files')
1323 elif ctx.files() and self.ui.verbose:
1323 elif ctx.files() and self.ui.verbose:
1324 # i18n: column positioning for "hg log"
1324 # i18n: column positioning for "hg log"
1325 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1325 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1326 label='ui.note log.files')
1326 label='ui.note log.files')
1327 if copies and self.ui.verbose:
1327 if copies and self.ui.verbose:
1328 copies = ['%s (%s)' % c for c in copies]
1328 copies = ['%s (%s)' % c for c in copies]
1329 # i18n: column positioning for "hg log"
1329 # i18n: column positioning for "hg log"
1330 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1330 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1331 label='ui.note log.copies')
1331 label='ui.note log.copies')
1332
1332
1333 extra = ctx.extra()
1333 extra = ctx.extra()
1334 if extra and self.ui.debugflag:
1334 if extra and self.ui.debugflag:
1335 for key, value in sorted(extra.items()):
1335 for key, value in sorted(extra.items()):
1336 # i18n: column positioning for "hg log"
1336 # i18n: column positioning for "hg log"
1337 self.ui.write(_("extra: %s=%s\n")
1337 self.ui.write(_("extra: %s=%s\n")
1338 % (key, value.encode('string_escape')),
1338 % (key, value.encode('string_escape')),
1339 label='ui.debug log.extra')
1339 label='ui.debug log.extra')
1340
1340
1341 description = ctx.description().strip()
1341 description = ctx.description().strip()
1342 if description:
1342 if description:
1343 if self.ui.verbose:
1343 if self.ui.verbose:
1344 self.ui.write(_("description:\n"),
1344 self.ui.write(_("description:\n"),
1345 label='ui.note log.description')
1345 label='ui.note log.description')
1346 self.ui.write(description,
1346 self.ui.write(description,
1347 label='ui.note log.description')
1347 label='ui.note log.description')
1348 self.ui.write("\n\n")
1348 self.ui.write("\n\n")
1349 else:
1349 else:
1350 # i18n: column positioning for "hg log"
1350 # i18n: column positioning for "hg log"
1351 self.ui.write(_("summary: %s\n") %
1351 self.ui.write(_("summary: %s\n") %
1352 description.splitlines()[0],
1352 description.splitlines()[0],
1353 label='log.summary')
1353 label='log.summary')
1354 self.ui.write("\n")
1354 self.ui.write("\n")
1355
1355
1356 self.showpatch(ctx, matchfn)
1356 self.showpatch(ctx, matchfn)
1357
1357
1358 def showpatch(self, ctx, matchfn):
1358 def showpatch(self, ctx, matchfn):
1359 if not matchfn:
1359 if not matchfn:
1360 matchfn = self.matchfn
1360 matchfn = self.matchfn
1361 if matchfn:
1361 if matchfn:
1362 stat = self.diffopts.get('stat')
1362 stat = self.diffopts.get('stat')
1363 diff = self.diffopts.get('patch')
1363 diff = self.diffopts.get('patch')
1364 diffopts = patch.diffallopts(self.ui, self.diffopts)
1364 diffopts = patch.diffallopts(self.ui, self.diffopts)
1365 node = ctx.node()
1365 node = ctx.node()
1366 prev = ctx.p1().node()
1366 prev = ctx.p1().node()
1367 if stat:
1367 if stat:
1368 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1368 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1369 match=matchfn, stat=True)
1369 match=matchfn, stat=True)
1370 if diff:
1370 if diff:
1371 if stat:
1371 if stat:
1372 self.ui.write("\n")
1372 self.ui.write("\n")
1373 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1373 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1374 match=matchfn, stat=False)
1374 match=matchfn, stat=False)
1375 self.ui.write("\n")
1375 self.ui.write("\n")
1376
1376
1377 class jsonchangeset(changeset_printer):
1377 class jsonchangeset(changeset_printer):
1378 '''format changeset information.'''
1378 '''format changeset information.'''
1379
1379
1380 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1380 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1381 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1381 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1382 self.cache = {}
1382 self.cache = {}
1383 self._first = True
1383 self._first = True
1384
1384
1385 def close(self):
1385 def close(self):
1386 if not self._first:
1386 if not self._first:
1387 self.ui.write("\n]\n")
1387 self.ui.write("\n]\n")
1388 else:
1388 else:
1389 self.ui.write("[]\n")
1389 self.ui.write("[]\n")
1390
1390
1391 def _show(self, ctx, copies, matchfn, props):
1391 def _show(self, ctx, copies, matchfn, props):
1392 '''show a single changeset or file revision'''
1392 '''show a single changeset or file revision'''
1393 rev = ctx.rev()
1393 rev = ctx.rev()
1394 if rev is None:
1394 if rev is None:
1395 jrev = jnode = 'null'
1395 jrev = jnode = 'null'
1396 else:
1396 else:
1397 jrev = str(rev)
1397 jrev = str(rev)
1398 jnode = '"%s"' % hex(ctx.node())
1398 jnode = '"%s"' % hex(ctx.node())
1399 j = encoding.jsonescape
1399 j = encoding.jsonescape
1400
1400
1401 if self._first:
1401 if self._first:
1402 self.ui.write("[\n {")
1402 self.ui.write("[\n {")
1403 self._first = False
1403 self._first = False
1404 else:
1404 else:
1405 self.ui.write(",\n {")
1405 self.ui.write(",\n {")
1406
1406
1407 if self.ui.quiet:
1407 if self.ui.quiet:
1408 self.ui.write('\n "rev": %s' % jrev)
1408 self.ui.write('\n "rev": %s' % jrev)
1409 self.ui.write(',\n "node": %s' % jnode)
1409 self.ui.write(',\n "node": %s' % jnode)
1410 self.ui.write('\n }')
1410 self.ui.write('\n }')
1411 return
1411 return
1412
1412
1413 self.ui.write('\n "rev": %s' % jrev)
1413 self.ui.write('\n "rev": %s' % jrev)
1414 self.ui.write(',\n "node": %s' % jnode)
1414 self.ui.write(',\n "node": %s' % jnode)
1415 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1415 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1416 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1416 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1417 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1417 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1418 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1418 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1419 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1419 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1420
1420
1421 self.ui.write(',\n "bookmarks": [%s]' %
1421 self.ui.write(',\n "bookmarks": [%s]' %
1422 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1422 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1423 self.ui.write(',\n "tags": [%s]' %
1423 self.ui.write(',\n "tags": [%s]' %
1424 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1424 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1425 self.ui.write(',\n "parents": [%s]' %
1425 self.ui.write(',\n "parents": [%s]' %
1426 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1426 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1427
1427
1428 if self.ui.debugflag:
1428 if self.ui.debugflag:
1429 if rev is None:
1429 if rev is None:
1430 jmanifestnode = 'null'
1430 jmanifestnode = 'null'
1431 else:
1431 else:
1432 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1432 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1433 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1433 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1434
1434
1435 self.ui.write(',\n "extra": {%s}' %
1435 self.ui.write(',\n "extra": {%s}' %
1436 ", ".join('"%s": "%s"' % (j(k), j(v))
1436 ", ".join('"%s": "%s"' % (j(k), j(v))
1437 for k, v in ctx.extra().items()))
1437 for k, v in ctx.extra().items()))
1438
1438
1439 files = ctx.p1().status(ctx)
1439 files = ctx.p1().status(ctx)
1440 self.ui.write(',\n "modified": [%s]' %
1440 self.ui.write(',\n "modified": [%s]' %
1441 ", ".join('"%s"' % j(f) for f in files[0]))
1441 ", ".join('"%s"' % j(f) for f in files[0]))
1442 self.ui.write(',\n "added": [%s]' %
1442 self.ui.write(',\n "added": [%s]' %
1443 ", ".join('"%s"' % j(f) for f in files[1]))
1443 ", ".join('"%s"' % j(f) for f in files[1]))
1444 self.ui.write(',\n "removed": [%s]' %
1444 self.ui.write(',\n "removed": [%s]' %
1445 ", ".join('"%s"' % j(f) for f in files[2]))
1445 ", ".join('"%s"' % j(f) for f in files[2]))
1446
1446
1447 elif self.ui.verbose:
1447 elif self.ui.verbose:
1448 self.ui.write(',\n "files": [%s]' %
1448 self.ui.write(',\n "files": [%s]' %
1449 ", ".join('"%s"' % j(f) for f in ctx.files()))
1449 ", ".join('"%s"' % j(f) for f in ctx.files()))
1450
1450
1451 if copies:
1451 if copies:
1452 self.ui.write(',\n "copies": {%s}' %
1452 self.ui.write(',\n "copies": {%s}' %
1453 ", ".join('"%s": "%s"' % (j(k), j(v))
1453 ", ".join('"%s": "%s"' % (j(k), j(v))
1454 for k, v in copies))
1454 for k, v in copies))
1455
1455
1456 matchfn = self.matchfn
1456 matchfn = self.matchfn
1457 if matchfn:
1457 if matchfn:
1458 stat = self.diffopts.get('stat')
1458 stat = self.diffopts.get('stat')
1459 diff = self.diffopts.get('patch')
1459 diff = self.diffopts.get('patch')
1460 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1460 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1461 node, prev = ctx.node(), ctx.p1().node()
1461 node, prev = ctx.node(), ctx.p1().node()
1462 if stat:
1462 if stat:
1463 self.ui.pushbuffer()
1463 self.ui.pushbuffer()
1464 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1464 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1465 match=matchfn, stat=True)
1465 match=matchfn, stat=True)
1466 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1466 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1467 if diff:
1467 if diff:
1468 self.ui.pushbuffer()
1468 self.ui.pushbuffer()
1469 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1469 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 match=matchfn, stat=False)
1470 match=matchfn, stat=False)
1471 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1471 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1472
1472
1473 self.ui.write("\n }")
1473 self.ui.write("\n }")
1474
1474
1475 class changeset_templater(changeset_printer):
1475 class changeset_templater(changeset_printer):
1476 '''format changeset information.'''
1476 '''format changeset information.'''
1477
1477
1478 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1478 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1479 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1479 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1480 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1480 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1481 filters = {'formatnode': formatnode}
1481 filters = {'formatnode': formatnode}
1482 defaulttempl = {
1482 defaulttempl = {
1483 'parent': '{rev}:{node|formatnode} ',
1483 'parent': '{rev}:{node|formatnode} ',
1484 'manifest': '{rev}:{node|formatnode}',
1484 'manifest': '{rev}:{node|formatnode}',
1485 'file_copy': '{name} ({source})',
1485 'file_copy': '{name} ({source})',
1486 'extra': '{key}={value|stringescape}'
1486 'extra': '{key}={value|stringescape}'
1487 }
1487 }
1488 # filecopy is preserved for compatibility reasons
1488 # filecopy is preserved for compatibility reasons
1489 defaulttempl['filecopy'] = defaulttempl['file_copy']
1489 defaulttempl['filecopy'] = defaulttempl['file_copy']
1490 assert not (tmpl and mapfile)
1490 assert not (tmpl and mapfile)
1491 if mapfile:
1491 if mapfile:
1492 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1492 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1493 cache=defaulttempl)
1493 cache=defaulttempl)
1494 else:
1494 else:
1495 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1495 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1496 filters=filters,
1496 filters=filters,
1497 cache=defaulttempl)
1497 cache=defaulttempl)
1498
1498
1499 self.cache = {}
1499 self.cache = {}
1500
1500
1501 # find correct templates for current mode
1501 # find correct templates for current mode
1502 tmplmodes = [
1502 tmplmodes = [
1503 (True, None),
1503 (True, None),
1504 (self.ui.verbose, 'verbose'),
1504 (self.ui.verbose, 'verbose'),
1505 (self.ui.quiet, 'quiet'),
1505 (self.ui.quiet, 'quiet'),
1506 (self.ui.debugflag, 'debug'),
1506 (self.ui.debugflag, 'debug'),
1507 ]
1507 ]
1508
1508
1509 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1509 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1510 'docheader': '', 'docfooter': ''}
1510 'docheader': '', 'docfooter': ''}
1511 for mode, postfix in tmplmodes:
1511 for mode, postfix in tmplmodes:
1512 for t in self._parts:
1512 for t in self._parts:
1513 cur = t
1513 cur = t
1514 if postfix:
1514 if postfix:
1515 cur += "_" + postfix
1515 cur += "_" + postfix
1516 if mode and cur in self.t:
1516 if mode and cur in self.t:
1517 self._parts[t] = cur
1517 self._parts[t] = cur
1518
1518
1519 if self._parts['docheader']:
1519 if self._parts['docheader']:
1520 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1520 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1521
1521
1522 def close(self):
1522 def close(self):
1523 if self._parts['docfooter']:
1523 if self._parts['docfooter']:
1524 if not self.footer:
1524 if not self.footer:
1525 self.footer = ""
1525 self.footer = ""
1526 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1526 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1527 return super(changeset_templater, self).close()
1527 return super(changeset_templater, self).close()
1528
1528
1529 def _show(self, ctx, copies, matchfn, props):
1529 def _show(self, ctx, copies, matchfn, props):
1530 '''show a single changeset or file revision'''
1530 '''show a single changeset or file revision'''
1531 props = props.copy()
1531 props = props.copy()
1532 props.update(templatekw.keywords)
1532 props.update(templatekw.keywords)
1533 props['templ'] = self.t
1533 props['templ'] = self.t
1534 props['ctx'] = ctx
1534 props['ctx'] = ctx
1535 props['repo'] = self.repo
1535 props['repo'] = self.repo
1536 props['ui'] = self.repo.ui
1536 props['ui'] = self.repo.ui
1537 props['revcache'] = {'copies': copies}
1537 props['revcache'] = {'copies': copies}
1538 props['cache'] = self.cache
1538 props['cache'] = self.cache
1539
1539
1540 # write header
1540 # write header
1541 if self._parts['header']:
1541 if self._parts['header']:
1542 h = templater.stringify(self.t(self._parts['header'], **props))
1542 h = templater.stringify(self.t(self._parts['header'], **props))
1543 if self.buffered:
1543 if self.buffered:
1544 self.header[ctx.rev()] = h
1544 self.header[ctx.rev()] = h
1545 else:
1545 else:
1546 if self.lastheader != h:
1546 if self.lastheader != h:
1547 self.lastheader = h
1547 self.lastheader = h
1548 self.ui.write(h)
1548 self.ui.write(h)
1549
1549
1550 # write changeset metadata, then patch if requested
1550 # write changeset metadata, then patch if requested
1551 key = self._parts['changeset']
1551 key = self._parts['changeset']
1552 self.ui.write(templater.stringify(self.t(key, **props)))
1552 self.ui.write(templater.stringify(self.t(key, **props)))
1553 self.showpatch(ctx, matchfn)
1553 self.showpatch(ctx, matchfn)
1554
1554
1555 if self._parts['footer']:
1555 if self._parts['footer']:
1556 if not self.footer:
1556 if not self.footer:
1557 self.footer = templater.stringify(
1557 self.footer = templater.stringify(
1558 self.t(self._parts['footer'], **props))
1558 self.t(self._parts['footer'], **props))
1559
1559
1560 def gettemplate(ui, tmpl, style):
1560 def gettemplate(ui, tmpl, style):
1561 """
1561 """
1562 Find the template matching the given template spec or style.
1562 Find the template matching the given template spec or style.
1563 """
1563 """
1564
1564
1565 # ui settings
1565 # ui settings
1566 if not tmpl and not style: # template are stronger than style
1566 if not tmpl and not style: # template are stronger than style
1567 tmpl = ui.config('ui', 'logtemplate')
1567 tmpl = ui.config('ui', 'logtemplate')
1568 if tmpl:
1568 if tmpl:
1569 return templater.unquotestring(tmpl), None
1569 return templater.unquotestring(tmpl), None
1570 else:
1570 else:
1571 style = util.expandpath(ui.config('ui', 'style', ''))
1571 style = util.expandpath(ui.config('ui', 'style', ''))
1572
1572
1573 if not tmpl and style:
1573 if not tmpl and style:
1574 mapfile = style
1574 mapfile = style
1575 if not os.path.split(mapfile)[0]:
1575 if not os.path.split(mapfile)[0]:
1576 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1576 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1577 or templater.templatepath(mapfile))
1577 or templater.templatepath(mapfile))
1578 if mapname:
1578 if mapname:
1579 mapfile = mapname
1579 mapfile = mapname
1580 return None, mapfile
1580 return None, mapfile
1581
1581
1582 if not tmpl:
1582 if not tmpl:
1583 return None, None
1583 return None, None
1584
1584
1585 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1585 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1586
1586
1587 def show_changeset(ui, repo, opts, buffered=False):
1587 def show_changeset(ui, repo, opts, buffered=False):
1588 """show one changeset using template or regular display.
1588 """show one changeset using template or regular display.
1589
1589
1590 Display format will be the first non-empty hit of:
1590 Display format will be the first non-empty hit of:
1591 1. option 'template'
1591 1. option 'template'
1592 2. option 'style'
1592 2. option 'style'
1593 3. [ui] setting 'logtemplate'
1593 3. [ui] setting 'logtemplate'
1594 4. [ui] setting 'style'
1594 4. [ui] setting 'style'
1595 If all of these values are either the unset or the empty string,
1595 If all of these values are either the unset or the empty string,
1596 regular display via changeset_printer() is done.
1596 regular display via changeset_printer() is done.
1597 """
1597 """
1598 # options
1598 # options
1599 matchfn = None
1599 matchfn = None
1600 if opts.get('patch') or opts.get('stat'):
1600 if opts.get('patch') or opts.get('stat'):
1601 matchfn = scmutil.matchall(repo)
1601 matchfn = scmutil.matchall(repo)
1602
1602
1603 if opts.get('template') == 'json':
1603 if opts.get('template') == 'json':
1604 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1604 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1605
1605
1606 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1606 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1607
1607
1608 if not tmpl and not mapfile:
1608 if not tmpl and not mapfile:
1609 return changeset_printer(ui, repo, matchfn, opts, buffered)
1609 return changeset_printer(ui, repo, matchfn, opts, buffered)
1610
1610
1611 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1611 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1612
1612
1613 def showmarker(ui, marker, index=None):
1613 def showmarker(ui, marker, index=None):
1614 """utility function to display obsolescence marker in a readable way
1614 """utility function to display obsolescence marker in a readable way
1615
1615
1616 To be used by debug function."""
1616 To be used by debug function."""
1617 if index is not None:
1617 if index is not None:
1618 ui.write("%i " % index)
1618 ui.write("%i " % index)
1619 ui.write(hex(marker.precnode()))
1619 ui.write(hex(marker.precnode()))
1620 for repl in marker.succnodes():
1620 for repl in marker.succnodes():
1621 ui.write(' ')
1621 ui.write(' ')
1622 ui.write(hex(repl))
1622 ui.write(hex(repl))
1623 ui.write(' %X ' % marker.flags())
1623 ui.write(' %X ' % marker.flags())
1624 parents = marker.parentnodes()
1624 parents = marker.parentnodes()
1625 if parents is not None:
1625 if parents is not None:
1626 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1626 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1627 ui.write('(%s) ' % util.datestr(marker.date()))
1627 ui.write('(%s) ' % util.datestr(marker.date()))
1628 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1628 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1629 sorted(marker.metadata().items())
1629 sorted(marker.metadata().items())
1630 if t[0] != 'date')))
1630 if t[0] != 'date')))
1631 ui.write('\n')
1631 ui.write('\n')
1632
1632
1633 def finddate(ui, repo, date):
1633 def finddate(ui, repo, date):
1634 """Find the tipmost changeset that matches the given date spec"""
1634 """Find the tipmost changeset that matches the given date spec"""
1635
1635
1636 df = util.matchdate(date)
1636 df = util.matchdate(date)
1637 m = scmutil.matchall(repo)
1637 m = scmutil.matchall(repo)
1638 results = {}
1638 results = {}
1639
1639
1640 def prep(ctx, fns):
1640 def prep(ctx, fns):
1641 d = ctx.date()
1641 d = ctx.date()
1642 if df(d[0]):
1642 if df(d[0]):
1643 results[ctx.rev()] = d
1643 results[ctx.rev()] = d
1644
1644
1645 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1645 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1646 rev = ctx.rev()
1646 rev = ctx.rev()
1647 if rev in results:
1647 if rev in results:
1648 ui.status(_("found revision %s from %s\n") %
1648 ui.status(_("found revision %s from %s\n") %
1649 (rev, util.datestr(results[rev])))
1649 (rev, util.datestr(results[rev])))
1650 return str(rev)
1650 return str(rev)
1651
1651
1652 raise error.Abort(_("revision matching date not found"))
1652 raise error.Abort(_("revision matching date not found"))
1653
1653
1654 def increasingwindows(windowsize=8, sizelimit=512):
1654 def increasingwindows(windowsize=8, sizelimit=512):
1655 while True:
1655 while True:
1656 yield windowsize
1656 yield windowsize
1657 if windowsize < sizelimit:
1657 if windowsize < sizelimit:
1658 windowsize *= 2
1658 windowsize *= 2
1659
1659
1660 class FileWalkError(Exception):
1660 class FileWalkError(Exception):
1661 pass
1661 pass
1662
1662
1663 def walkfilerevs(repo, match, follow, revs, fncache):
1663 def walkfilerevs(repo, match, follow, revs, fncache):
1664 '''Walks the file history for the matched files.
1664 '''Walks the file history for the matched files.
1665
1665
1666 Returns the changeset revs that are involved in the file history.
1666 Returns the changeset revs that are involved in the file history.
1667
1667
1668 Throws FileWalkError if the file history can't be walked using
1668 Throws FileWalkError if the file history can't be walked using
1669 filelogs alone.
1669 filelogs alone.
1670 '''
1670 '''
1671 wanted = set()
1671 wanted = set()
1672 copies = []
1672 copies = []
1673 minrev, maxrev = min(revs), max(revs)
1673 minrev, maxrev = min(revs), max(revs)
1674 def filerevgen(filelog, last):
1674 def filerevgen(filelog, last):
1675 """
1675 """
1676 Only files, no patterns. Check the history of each file.
1676 Only files, no patterns. Check the history of each file.
1677
1677
1678 Examines filelog entries within minrev, maxrev linkrev range
1678 Examines filelog entries within minrev, maxrev linkrev range
1679 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1679 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1680 tuples in backwards order
1680 tuples in backwards order
1681 """
1681 """
1682 cl_count = len(repo)
1682 cl_count = len(repo)
1683 revs = []
1683 revs = []
1684 for j in xrange(0, last + 1):
1684 for j in xrange(0, last + 1):
1685 linkrev = filelog.linkrev(j)
1685 linkrev = filelog.linkrev(j)
1686 if linkrev < minrev:
1686 if linkrev < minrev:
1687 continue
1687 continue
1688 # only yield rev for which we have the changelog, it can
1688 # only yield rev for which we have the changelog, it can
1689 # happen while doing "hg log" during a pull or commit
1689 # happen while doing "hg log" during a pull or commit
1690 if linkrev >= cl_count:
1690 if linkrev >= cl_count:
1691 break
1691 break
1692
1692
1693 parentlinkrevs = []
1693 parentlinkrevs = []
1694 for p in filelog.parentrevs(j):
1694 for p in filelog.parentrevs(j):
1695 if p != nullrev:
1695 if p != nullrev:
1696 parentlinkrevs.append(filelog.linkrev(p))
1696 parentlinkrevs.append(filelog.linkrev(p))
1697 n = filelog.node(j)
1697 n = filelog.node(j)
1698 revs.append((linkrev, parentlinkrevs,
1698 revs.append((linkrev, parentlinkrevs,
1699 follow and filelog.renamed(n)))
1699 follow and filelog.renamed(n)))
1700
1700
1701 return reversed(revs)
1701 return reversed(revs)
1702 def iterfiles():
1702 def iterfiles():
1703 pctx = repo['.']
1703 pctx = repo['.']
1704 for filename in match.files():
1704 for filename in match.files():
1705 if follow:
1705 if follow:
1706 if filename not in pctx:
1706 if filename not in pctx:
1707 raise error.Abort(_('cannot follow file not in parent '
1707 raise error.Abort(_('cannot follow file not in parent '
1708 'revision: "%s"') % filename)
1708 'revision: "%s"') % filename)
1709 yield filename, pctx[filename].filenode()
1709 yield filename, pctx[filename].filenode()
1710 else:
1710 else:
1711 yield filename, None
1711 yield filename, None
1712 for filename_node in copies:
1712 for filename_node in copies:
1713 yield filename_node
1713 yield filename_node
1714
1714
1715 for file_, node in iterfiles():
1715 for file_, node in iterfiles():
1716 filelog = repo.file(file_)
1716 filelog = repo.file(file_)
1717 if not len(filelog):
1717 if not len(filelog):
1718 if node is None:
1718 if node is None:
1719 # A zero count may be a directory or deleted file, so
1719 # A zero count may be a directory or deleted file, so
1720 # try to find matching entries on the slow path.
1720 # try to find matching entries on the slow path.
1721 if follow:
1721 if follow:
1722 raise error.Abort(
1722 raise error.Abort(
1723 _('cannot follow nonexistent file: "%s"') % file_)
1723 _('cannot follow nonexistent file: "%s"') % file_)
1724 raise FileWalkError("Cannot walk via filelog")
1724 raise FileWalkError("Cannot walk via filelog")
1725 else:
1725 else:
1726 continue
1726 continue
1727
1727
1728 if node is None:
1728 if node is None:
1729 last = len(filelog) - 1
1729 last = len(filelog) - 1
1730 else:
1730 else:
1731 last = filelog.rev(node)
1731 last = filelog.rev(node)
1732
1732
1733 # keep track of all ancestors of the file
1733 # keep track of all ancestors of the file
1734 ancestors = set([filelog.linkrev(last)])
1734 ancestors = set([filelog.linkrev(last)])
1735
1735
1736 # iterate from latest to oldest revision
1736 # iterate from latest to oldest revision
1737 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1737 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1738 if not follow:
1738 if not follow:
1739 if rev > maxrev:
1739 if rev > maxrev:
1740 continue
1740 continue
1741 else:
1741 else:
1742 # Note that last might not be the first interesting
1742 # Note that last might not be the first interesting
1743 # rev to us:
1743 # rev to us:
1744 # if the file has been changed after maxrev, we'll
1744 # if the file has been changed after maxrev, we'll
1745 # have linkrev(last) > maxrev, and we still need
1745 # have linkrev(last) > maxrev, and we still need
1746 # to explore the file graph
1746 # to explore the file graph
1747 if rev not in ancestors:
1747 if rev not in ancestors:
1748 continue
1748 continue
1749 # XXX insert 1327 fix here
1749 # XXX insert 1327 fix here
1750 if flparentlinkrevs:
1750 if flparentlinkrevs:
1751 ancestors.update(flparentlinkrevs)
1751 ancestors.update(flparentlinkrevs)
1752
1752
1753 fncache.setdefault(rev, []).append(file_)
1753 fncache.setdefault(rev, []).append(file_)
1754 wanted.add(rev)
1754 wanted.add(rev)
1755 if copied:
1755 if copied:
1756 copies.append(copied)
1756 copies.append(copied)
1757
1757
1758 return wanted
1758 return wanted
1759
1759
1760 class _followfilter(object):
1760 class _followfilter(object):
1761 def __init__(self, repo, onlyfirst=False):
1761 def __init__(self, repo, onlyfirst=False):
1762 self.repo = repo
1762 self.repo = repo
1763 self.startrev = nullrev
1763 self.startrev = nullrev
1764 self.roots = set()
1764 self.roots = set()
1765 self.onlyfirst = onlyfirst
1765 self.onlyfirst = onlyfirst
1766
1766
1767 def match(self, rev):
1767 def match(self, rev):
1768 def realparents(rev):
1768 def realparents(rev):
1769 if self.onlyfirst:
1769 if self.onlyfirst:
1770 return self.repo.changelog.parentrevs(rev)[0:1]
1770 return self.repo.changelog.parentrevs(rev)[0:1]
1771 else:
1771 else:
1772 return filter(lambda x: x != nullrev,
1772 return filter(lambda x: x != nullrev,
1773 self.repo.changelog.parentrevs(rev))
1773 self.repo.changelog.parentrevs(rev))
1774
1774
1775 if self.startrev == nullrev:
1775 if self.startrev == nullrev:
1776 self.startrev = rev
1776 self.startrev = rev
1777 return True
1777 return True
1778
1778
1779 if rev > self.startrev:
1779 if rev > self.startrev:
1780 # forward: all descendants
1780 # forward: all descendants
1781 if not self.roots:
1781 if not self.roots:
1782 self.roots.add(self.startrev)
1782 self.roots.add(self.startrev)
1783 for parent in realparents(rev):
1783 for parent in realparents(rev):
1784 if parent in self.roots:
1784 if parent in self.roots:
1785 self.roots.add(rev)
1785 self.roots.add(rev)
1786 return True
1786 return True
1787 else:
1787 else:
1788 # backwards: all parents
1788 # backwards: all parents
1789 if not self.roots:
1789 if not self.roots:
1790 self.roots.update(realparents(self.startrev))
1790 self.roots.update(realparents(self.startrev))
1791 if rev in self.roots:
1791 if rev in self.roots:
1792 self.roots.remove(rev)
1792 self.roots.remove(rev)
1793 self.roots.update(realparents(rev))
1793 self.roots.update(realparents(rev))
1794 return True
1794 return True
1795
1795
1796 return False
1796 return False
1797
1797
1798 def walkchangerevs(repo, match, opts, prepare):
1798 def walkchangerevs(repo, match, opts, prepare):
1799 '''Iterate over files and the revs in which they changed.
1799 '''Iterate over files and the revs in which they changed.
1800
1800
1801 Callers most commonly need to iterate backwards over the history
1801 Callers most commonly need to iterate backwards over the history
1802 in which they are interested. Doing so has awful (quadratic-looking)
1802 in which they are interested. Doing so has awful (quadratic-looking)
1803 performance, so we use iterators in a "windowed" way.
1803 performance, so we use iterators in a "windowed" way.
1804
1804
1805 We walk a window of revisions in the desired order. Within the
1805 We walk a window of revisions in the desired order. Within the
1806 window, we first walk forwards to gather data, then in the desired
1806 window, we first walk forwards to gather data, then in the desired
1807 order (usually backwards) to display it.
1807 order (usually backwards) to display it.
1808
1808
1809 This function returns an iterator yielding contexts. Before
1809 This function returns an iterator yielding contexts. Before
1810 yielding each context, the iterator will first call the prepare
1810 yielding each context, the iterator will first call the prepare
1811 function on each context in the window in forward order.'''
1811 function on each context in the window in forward order.'''
1812
1812
1813 follow = opts.get('follow') or opts.get('follow_first')
1813 follow = opts.get('follow') or opts.get('follow_first')
1814 revs = _logrevs(repo, opts)
1814 revs = _logrevs(repo, opts)
1815 if not revs:
1815 if not revs:
1816 return []
1816 return []
1817 wanted = set()
1817 wanted = set()
1818 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1818 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1819 opts.get('removed'))
1819 opts.get('removed'))
1820 fncache = {}
1820 fncache = {}
1821 change = repo.changectx
1821 change = repo.changectx
1822
1822
1823 # First step is to fill wanted, the set of revisions that we want to yield.
1823 # First step is to fill wanted, the set of revisions that we want to yield.
1824 # When it does not induce extra cost, we also fill fncache for revisions in
1824 # When it does not induce extra cost, we also fill fncache for revisions in
1825 # wanted: a cache of filenames that were changed (ctx.files()) and that
1825 # wanted: a cache of filenames that were changed (ctx.files()) and that
1826 # match the file filtering conditions.
1826 # match the file filtering conditions.
1827
1827
1828 if match.always():
1828 if match.always():
1829 # No files, no patterns. Display all revs.
1829 # No files, no patterns. Display all revs.
1830 wanted = revs
1830 wanted = revs
1831 elif not slowpath:
1831 elif not slowpath:
1832 # We only have to read through the filelog to find wanted revisions
1832 # We only have to read through the filelog to find wanted revisions
1833
1833
1834 try:
1834 try:
1835 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1835 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1836 except FileWalkError:
1836 except FileWalkError:
1837 slowpath = True
1837 slowpath = True
1838
1838
1839 # We decided to fall back to the slowpath because at least one
1839 # We decided to fall back to the slowpath because at least one
1840 # of the paths was not a file. Check to see if at least one of them
1840 # of the paths was not a file. Check to see if at least one of them
1841 # existed in history, otherwise simply return
1841 # existed in history, otherwise simply return
1842 for path in match.files():
1842 for path in match.files():
1843 if path == '.' or path in repo.store:
1843 if path == '.' or path in repo.store:
1844 break
1844 break
1845 else:
1845 else:
1846 return []
1846 return []
1847
1847
1848 if slowpath:
1848 if slowpath:
1849 # We have to read the changelog to match filenames against
1849 # We have to read the changelog to match filenames against
1850 # changed files
1850 # changed files
1851
1851
1852 if follow:
1852 if follow:
1853 raise error.Abort(_('can only follow copies/renames for explicit '
1853 raise error.Abort(_('can only follow copies/renames for explicit '
1854 'filenames'))
1854 'filenames'))
1855
1855
1856 # The slow path checks files modified in every changeset.
1856 # The slow path checks files modified in every changeset.
1857 # This is really slow on large repos, so compute the set lazily.
1857 # This is really slow on large repos, so compute the set lazily.
1858 class lazywantedset(object):
1858 class lazywantedset(object):
1859 def __init__(self):
1859 def __init__(self):
1860 self.set = set()
1860 self.set = set()
1861 self.revs = set(revs)
1861 self.revs = set(revs)
1862
1862
1863 # No need to worry about locality here because it will be accessed
1863 # No need to worry about locality here because it will be accessed
1864 # in the same order as the increasing window below.
1864 # in the same order as the increasing window below.
1865 def __contains__(self, value):
1865 def __contains__(self, value):
1866 if value in self.set:
1866 if value in self.set:
1867 return True
1867 return True
1868 elif not value in self.revs:
1868 elif not value in self.revs:
1869 return False
1869 return False
1870 else:
1870 else:
1871 self.revs.discard(value)
1871 self.revs.discard(value)
1872 ctx = change(value)
1872 ctx = change(value)
1873 matches = filter(match, ctx.files())
1873 matches = filter(match, ctx.files())
1874 if matches:
1874 if matches:
1875 fncache[value] = matches
1875 fncache[value] = matches
1876 self.set.add(value)
1876 self.set.add(value)
1877 return True
1877 return True
1878 return False
1878 return False
1879
1879
1880 def discard(self, value):
1880 def discard(self, value):
1881 self.revs.discard(value)
1881 self.revs.discard(value)
1882 self.set.discard(value)
1882 self.set.discard(value)
1883
1883
1884 wanted = lazywantedset()
1884 wanted = lazywantedset()
1885
1885
1886 # it might be worthwhile to do this in the iterator if the rev range
1886 # it might be worthwhile to do this in the iterator if the rev range
1887 # is descending and the prune args are all within that range
1887 # is descending and the prune args are all within that range
1888 for rev in opts.get('prune', ()):
1888 for rev in opts.get('prune', ()):
1889 rev = repo[rev].rev()
1889 rev = repo[rev].rev()
1890 ff = _followfilter(repo)
1890 ff = _followfilter(repo)
1891 stop = min(revs[0], revs[-1])
1891 stop = min(revs[0], revs[-1])
1892 for x in xrange(rev, stop - 1, -1):
1892 for x in xrange(rev, stop - 1, -1):
1893 if ff.match(x):
1893 if ff.match(x):
1894 wanted = wanted - [x]
1894 wanted = wanted - [x]
1895
1895
1896 # Now that wanted is correctly initialized, we can iterate over the
1896 # Now that wanted is correctly initialized, we can iterate over the
1897 # revision range, yielding only revisions in wanted.
1897 # revision range, yielding only revisions in wanted.
1898 def iterate():
1898 def iterate():
1899 if follow and match.always():
1899 if follow and match.always():
1900 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1900 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1901 def want(rev):
1901 def want(rev):
1902 return ff.match(rev) and rev in wanted
1902 return ff.match(rev) and rev in wanted
1903 else:
1903 else:
1904 def want(rev):
1904 def want(rev):
1905 return rev in wanted
1905 return rev in wanted
1906
1906
1907 it = iter(revs)
1907 it = iter(revs)
1908 stopiteration = False
1908 stopiteration = False
1909 for windowsize in increasingwindows():
1909 for windowsize in increasingwindows():
1910 nrevs = []
1910 nrevs = []
1911 for i in xrange(windowsize):
1911 for i in xrange(windowsize):
1912 rev = next(it, None)
1912 rev = next(it, None)
1913 if rev is None:
1913 if rev is None:
1914 stopiteration = True
1914 stopiteration = True
1915 break
1915 break
1916 elif want(rev):
1916 elif want(rev):
1917 nrevs.append(rev)
1917 nrevs.append(rev)
1918 for rev in sorted(nrevs):
1918 for rev in sorted(nrevs):
1919 fns = fncache.get(rev)
1919 fns = fncache.get(rev)
1920 ctx = change(rev)
1920 ctx = change(rev)
1921 if not fns:
1921 if not fns:
1922 def fns_generator():
1922 def fns_generator():
1923 for f in ctx.files():
1923 for f in ctx.files():
1924 if match(f):
1924 if match(f):
1925 yield f
1925 yield f
1926 fns = fns_generator()
1926 fns = fns_generator()
1927 prepare(ctx, fns)
1927 prepare(ctx, fns)
1928 for rev in nrevs:
1928 for rev in nrevs:
1929 yield change(rev)
1929 yield change(rev)
1930
1930
1931 if stopiteration:
1931 if stopiteration:
1932 break
1932 break
1933
1933
1934 return iterate()
1934 return iterate()
1935
1935
1936 def _makefollowlogfilematcher(repo, files, followfirst):
1936 def _makefollowlogfilematcher(repo, files, followfirst):
1937 # When displaying a revision with --patch --follow FILE, we have
1937 # When displaying a revision with --patch --follow FILE, we have
1938 # to know which file of the revision must be diffed. With
1938 # to know which file of the revision must be diffed. With
1939 # --follow, we want the names of the ancestors of FILE in the
1939 # --follow, we want the names of the ancestors of FILE in the
1940 # revision, stored in "fcache". "fcache" is populated by
1940 # revision, stored in "fcache". "fcache" is populated by
1941 # reproducing the graph traversal already done by --follow revset
1941 # reproducing the graph traversal already done by --follow revset
1942 # and relating linkrevs to file names (which is not "correct" but
1942 # and relating linkrevs to file names (which is not "correct" but
1943 # good enough).
1943 # good enough).
1944 fcache = {}
1944 fcache = {}
1945 fcacheready = [False]
1945 fcacheready = [False]
1946 pctx = repo['.']
1946 pctx = repo['.']
1947
1947
1948 def populate():
1948 def populate():
1949 for fn in files:
1949 for fn in files:
1950 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1950 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1951 for c in i:
1951 for c in i:
1952 fcache.setdefault(c.linkrev(), set()).add(c.path())
1952 fcache.setdefault(c.linkrev(), set()).add(c.path())
1953
1953
1954 def filematcher(rev):
1954 def filematcher(rev):
1955 if not fcacheready[0]:
1955 if not fcacheready[0]:
1956 # Lazy initialization
1956 # Lazy initialization
1957 fcacheready[0] = True
1957 fcacheready[0] = True
1958 populate()
1958 populate()
1959 return scmutil.matchfiles(repo, fcache.get(rev, []))
1959 return scmutil.matchfiles(repo, fcache.get(rev, []))
1960
1960
1961 return filematcher
1961 return filematcher
1962
1962
1963 def _makenofollowlogfilematcher(repo, pats, opts):
1963 def _makenofollowlogfilematcher(repo, pats, opts):
1964 '''hook for extensions to override the filematcher for non-follow cases'''
1964 '''hook for extensions to override the filematcher for non-follow cases'''
1965 return None
1965 return None
1966
1966
1967 def _makelogrevset(repo, pats, opts, revs):
1967 def _makelogrevset(repo, pats, opts, revs):
1968 """Return (expr, filematcher) where expr is a revset string built
1968 """Return (expr, filematcher) where expr is a revset string built
1969 from log options and file patterns or None. If --stat or --patch
1969 from log options and file patterns or None. If --stat or --patch
1970 are not passed filematcher is None. Otherwise it is a callable
1970 are not passed filematcher is None. Otherwise it is a callable
1971 taking a revision number and returning a match objects filtering
1971 taking a revision number and returning a match objects filtering
1972 the files to be detailed when displaying the revision.
1972 the files to be detailed when displaying the revision.
1973 """
1973 """
1974 opt2revset = {
1974 opt2revset = {
1975 'no_merges': ('not merge()', None),
1975 'no_merges': ('not merge()', None),
1976 'only_merges': ('merge()', None),
1976 'only_merges': ('merge()', None),
1977 '_ancestors': ('ancestors(%(val)s)', None),
1977 '_ancestors': ('ancestors(%(val)s)', None),
1978 '_fancestors': ('_firstancestors(%(val)s)', None),
1978 '_fancestors': ('_firstancestors(%(val)s)', None),
1979 '_descendants': ('descendants(%(val)s)', None),
1979 '_descendants': ('descendants(%(val)s)', None),
1980 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1980 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1981 '_matchfiles': ('_matchfiles(%(val)s)', None),
1981 '_matchfiles': ('_matchfiles(%(val)s)', None),
1982 'date': ('date(%(val)r)', None),
1982 'date': ('date(%(val)r)', None),
1983 'branch': ('branch(%(val)r)', ' or '),
1983 'branch': ('branch(%(val)r)', ' or '),
1984 '_patslog': ('filelog(%(val)r)', ' or '),
1984 '_patslog': ('filelog(%(val)r)', ' or '),
1985 '_patsfollow': ('follow(%(val)r)', ' or '),
1985 '_patsfollow': ('follow(%(val)r)', ' or '),
1986 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1986 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1987 'keyword': ('keyword(%(val)r)', ' or '),
1987 'keyword': ('keyword(%(val)r)', ' or '),
1988 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1988 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1989 'user': ('user(%(val)r)', ' or '),
1989 'user': ('user(%(val)r)', ' or '),
1990 }
1990 }
1991
1991
1992 opts = dict(opts)
1992 opts = dict(opts)
1993 # follow or not follow?
1993 # follow or not follow?
1994 follow = opts.get('follow') or opts.get('follow_first')
1994 follow = opts.get('follow') or opts.get('follow_first')
1995 if opts.get('follow_first'):
1995 if opts.get('follow_first'):
1996 followfirst = 1
1996 followfirst = 1
1997 else:
1997 else:
1998 followfirst = 0
1998 followfirst = 0
1999 # --follow with FILE behavior depends on revs...
1999 # --follow with FILE behavior depends on revs...
2000 it = iter(revs)
2000 it = iter(revs)
2001 startrev = it.next()
2001 startrev = it.next()
2002 followdescendants = startrev < next(it, startrev)
2002 followdescendants = startrev < next(it, startrev)
2003
2003
2004 # branch and only_branch are really aliases and must be handled at
2004 # branch and only_branch are really aliases and must be handled at
2005 # the same time
2005 # the same time
2006 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2006 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2007 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2007 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2008 # pats/include/exclude are passed to match.match() directly in
2008 # pats/include/exclude are passed to match.match() directly in
2009 # _matchfiles() revset but walkchangerevs() builds its matcher with
2009 # _matchfiles() revset but walkchangerevs() builds its matcher with
2010 # scmutil.match(). The difference is input pats are globbed on
2010 # scmutil.match(). The difference is input pats are globbed on
2011 # platforms without shell expansion (windows).
2011 # platforms without shell expansion (windows).
2012 wctx = repo[None]
2012 wctx = repo[None]
2013 match, pats = scmutil.matchandpats(wctx, pats, opts)
2013 match, pats = scmutil.matchandpats(wctx, pats, opts)
2014 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2014 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2015 opts.get('removed'))
2015 opts.get('removed'))
2016 if not slowpath:
2016 if not slowpath:
2017 for f in match.files():
2017 for f in match.files():
2018 if follow and f not in wctx:
2018 if follow and f not in wctx:
2019 # If the file exists, it may be a directory, so let it
2019 # If the file exists, it may be a directory, so let it
2020 # take the slow path.
2020 # take the slow path.
2021 if os.path.exists(repo.wjoin(f)):
2021 if os.path.exists(repo.wjoin(f)):
2022 slowpath = True
2022 slowpath = True
2023 continue
2023 continue
2024 else:
2024 else:
2025 raise error.Abort(_('cannot follow file not in parent '
2025 raise error.Abort(_('cannot follow file not in parent '
2026 'revision: "%s"') % f)
2026 'revision: "%s"') % f)
2027 filelog = repo.file(f)
2027 filelog = repo.file(f)
2028 if not filelog:
2028 if not filelog:
2029 # A zero count may be a directory or deleted file, so
2029 # A zero count may be a directory or deleted file, so
2030 # try to find matching entries on the slow path.
2030 # try to find matching entries on the slow path.
2031 if follow:
2031 if follow:
2032 raise error.Abort(
2032 raise error.Abort(
2033 _('cannot follow nonexistent file: "%s"') % f)
2033 _('cannot follow nonexistent file: "%s"') % f)
2034 slowpath = True
2034 slowpath = True
2035
2035
2036 # We decided to fall back to the slowpath because at least one
2036 # We decided to fall back to the slowpath because at least one
2037 # of the paths was not a file. Check to see if at least one of them
2037 # of the paths was not a file. Check to see if at least one of them
2038 # existed in history - in that case, we'll continue down the
2038 # existed in history - in that case, we'll continue down the
2039 # slowpath; otherwise, we can turn off the slowpath
2039 # slowpath; otherwise, we can turn off the slowpath
2040 if slowpath:
2040 if slowpath:
2041 for path in match.files():
2041 for path in match.files():
2042 if path == '.' or path in repo.store:
2042 if path == '.' or path in repo.store:
2043 break
2043 break
2044 else:
2044 else:
2045 slowpath = False
2045 slowpath = False
2046
2046
2047 fpats = ('_patsfollow', '_patsfollowfirst')
2047 fpats = ('_patsfollow', '_patsfollowfirst')
2048 fnopats = (('_ancestors', '_fancestors'),
2048 fnopats = (('_ancestors', '_fancestors'),
2049 ('_descendants', '_fdescendants'))
2049 ('_descendants', '_fdescendants'))
2050 if slowpath:
2050 if slowpath:
2051 # See walkchangerevs() slow path.
2051 # See walkchangerevs() slow path.
2052 #
2052 #
2053 # pats/include/exclude cannot be represented as separate
2053 # pats/include/exclude cannot be represented as separate
2054 # revset expressions as their filtering logic applies at file
2054 # revset expressions as their filtering logic applies at file
2055 # level. For instance "-I a -X a" matches a revision touching
2055 # level. For instance "-I a -X a" matches a revision touching
2056 # "a" and "b" while "file(a) and not file(b)" does
2056 # "a" and "b" while "file(a) and not file(b)" does
2057 # not. Besides, filesets are evaluated against the working
2057 # not. Besides, filesets are evaluated against the working
2058 # directory.
2058 # directory.
2059 matchargs = ['r:', 'd:relpath']
2059 matchargs = ['r:', 'd:relpath']
2060 for p in pats:
2060 for p in pats:
2061 matchargs.append('p:' + p)
2061 matchargs.append('p:' + p)
2062 for p in opts.get('include', []):
2062 for p in opts.get('include', []):
2063 matchargs.append('i:' + p)
2063 matchargs.append('i:' + p)
2064 for p in opts.get('exclude', []):
2064 for p in opts.get('exclude', []):
2065 matchargs.append('x:' + p)
2065 matchargs.append('x:' + p)
2066 matchargs = ','.join(('%r' % p) for p in matchargs)
2066 matchargs = ','.join(('%r' % p) for p in matchargs)
2067 opts['_matchfiles'] = matchargs
2067 opts['_matchfiles'] = matchargs
2068 if follow:
2068 if follow:
2069 opts[fnopats[0][followfirst]] = '.'
2069 opts[fnopats[0][followfirst]] = '.'
2070 else:
2070 else:
2071 if follow:
2071 if follow:
2072 if pats:
2072 if pats:
2073 # follow() revset interprets its file argument as a
2073 # follow() revset interprets its file argument as a
2074 # manifest entry, so use match.files(), not pats.
2074 # manifest entry, so use match.files(), not pats.
2075 opts[fpats[followfirst]] = list(match.files())
2075 opts[fpats[followfirst]] = list(match.files())
2076 else:
2076 else:
2077 op = fnopats[followdescendants][followfirst]
2077 op = fnopats[followdescendants][followfirst]
2078 opts[op] = 'rev(%d)' % startrev
2078 opts[op] = 'rev(%d)' % startrev
2079 else:
2079 else:
2080 opts['_patslog'] = list(pats)
2080 opts['_patslog'] = list(pats)
2081
2081
2082 filematcher = None
2082 filematcher = None
2083 if opts.get('patch') or opts.get('stat'):
2083 if opts.get('patch') or opts.get('stat'):
2084 # When following files, track renames via a special matcher.
2084 # When following files, track renames via a special matcher.
2085 # If we're forced to take the slowpath it means we're following
2085 # If we're forced to take the slowpath it means we're following
2086 # at least one pattern/directory, so don't bother with rename tracking.
2086 # at least one pattern/directory, so don't bother with rename tracking.
2087 if follow and not match.always() and not slowpath:
2087 if follow and not match.always() and not slowpath:
2088 # _makefollowlogfilematcher expects its files argument to be
2088 # _makefollowlogfilematcher expects its files argument to be
2089 # relative to the repo root, so use match.files(), not pats.
2089 # relative to the repo root, so use match.files(), not pats.
2090 filematcher = _makefollowlogfilematcher(repo, match.files(),
2090 filematcher = _makefollowlogfilematcher(repo, match.files(),
2091 followfirst)
2091 followfirst)
2092 else:
2092 else:
2093 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2093 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2094 if filematcher is None:
2094 if filematcher is None:
2095 filematcher = lambda rev: match
2095 filematcher = lambda rev: match
2096
2096
2097 expr = []
2097 expr = []
2098 for op, val in sorted(opts.iteritems()):
2098 for op, val in sorted(opts.iteritems()):
2099 if not val:
2099 if not val:
2100 continue
2100 continue
2101 if op not in opt2revset:
2101 if op not in opt2revset:
2102 continue
2102 continue
2103 revop, andor = opt2revset[op]
2103 revop, andor = opt2revset[op]
2104 if '%(val)' not in revop:
2104 if '%(val)' not in revop:
2105 expr.append(revop)
2105 expr.append(revop)
2106 else:
2106 else:
2107 if not isinstance(val, list):
2107 if not isinstance(val, list):
2108 e = revop % {'val': val}
2108 e = revop % {'val': val}
2109 else:
2109 else:
2110 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2110 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2111 expr.append(e)
2111 expr.append(e)
2112
2112
2113 if expr:
2113 if expr:
2114 expr = '(' + ' and '.join(expr) + ')'
2114 expr = '(' + ' and '.join(expr) + ')'
2115 else:
2115 else:
2116 expr = None
2116 expr = None
2117 return expr, filematcher
2117 return expr, filematcher
2118
2118
2119 def _logrevs(repo, opts):
2119 def _logrevs(repo, opts):
2120 # Default --rev value depends on --follow but --follow behavior
2120 # Default --rev value depends on --follow but --follow behavior
2121 # depends on revisions resolved from --rev...
2121 # depends on revisions resolved from --rev...
2122 follow = opts.get('follow') or opts.get('follow_first')
2122 follow = opts.get('follow') or opts.get('follow_first')
2123 if opts.get('rev'):
2123 if opts.get('rev'):
2124 revs = scmutil.revrange(repo, opts['rev'])
2124 revs = scmutil.revrange(repo, opts['rev'])
2125 elif follow and repo.dirstate.p1() == nullid:
2125 elif follow and repo.dirstate.p1() == nullid:
2126 revs = revset.baseset()
2126 revs = revset.baseset()
2127 elif follow:
2127 elif follow:
2128 revs = repo.revs('reverse(:.)')
2128 revs = repo.revs('reverse(:.)')
2129 else:
2129 else:
2130 revs = revset.spanset(repo)
2130 revs = revset.spanset(repo)
2131 revs.reverse()
2131 revs.reverse()
2132 return revs
2132 return revs
2133
2133
2134 def getgraphlogrevs(repo, pats, opts):
2134 def getgraphlogrevs(repo, pats, opts):
2135 """Return (revs, expr, filematcher) where revs is an iterable of
2135 """Return (revs, expr, filematcher) where revs is an iterable of
2136 revision numbers, expr is a revset string built from log options
2136 revision numbers, expr is a revset string built from log options
2137 and file patterns or None, and used to filter 'revs'. If --stat or
2137 and file patterns or None, and used to filter 'revs'. If --stat or
2138 --patch are not passed filematcher is None. Otherwise it is a
2138 --patch are not passed filematcher is None. Otherwise it is a
2139 callable taking a revision number and returning a match objects
2139 callable taking a revision number and returning a match objects
2140 filtering the files to be detailed when displaying the revision.
2140 filtering the files to be detailed when displaying the revision.
2141 """
2141 """
2142 limit = loglimit(opts)
2142 limit = loglimit(opts)
2143 revs = _logrevs(repo, opts)
2143 revs = _logrevs(repo, opts)
2144 if not revs:
2144 if not revs:
2145 return revset.baseset(), None, None
2145 return revset.baseset(), None, None
2146 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2146 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2147 if opts.get('rev'):
2147 if opts.get('rev'):
2148 # User-specified revs might be unsorted, but don't sort before
2148 # User-specified revs might be unsorted, but don't sort before
2149 # _makelogrevset because it might depend on the order of revs
2149 # _makelogrevset because it might depend on the order of revs
2150 revs.sort(reverse=True)
2150 revs.sort(reverse=True)
2151 if expr:
2151 if expr:
2152 # Revset matchers often operate faster on revisions in changelog
2152 # Revset matchers often operate faster on revisions in changelog
2153 # order, because most filters deal with the changelog.
2153 # order, because most filters deal with the changelog.
2154 revs.reverse()
2154 revs.reverse()
2155 matcher = revset.match(repo.ui, expr)
2155 matcher = revset.match(repo.ui, expr)
2156 # Revset matches can reorder revisions. "A or B" typically returns
2156 # Revset matches can reorder revisions. "A or B" typically returns
2157 # returns the revision matching A then the revision matching B. Sort
2157 # returns the revision matching A then the revision matching B. Sort
2158 # again to fix that.
2158 # again to fix that.
2159 revs = matcher(repo, revs)
2159 revs = matcher(repo, revs)
2160 revs.sort(reverse=True)
2160 revs.sort(reverse=True)
2161 if limit is not None:
2161 if limit is not None:
2162 limitedrevs = []
2162 limitedrevs = []
2163 for idx, rev in enumerate(revs):
2163 for idx, rev in enumerate(revs):
2164 if idx >= limit:
2164 if idx >= limit:
2165 break
2165 break
2166 limitedrevs.append(rev)
2166 limitedrevs.append(rev)
2167 revs = revset.baseset(limitedrevs)
2167 revs = revset.baseset(limitedrevs)
2168
2168
2169 return revs, expr, filematcher
2169 return revs, expr, filematcher
2170
2170
2171 def getlogrevs(repo, pats, opts):
2171 def getlogrevs(repo, pats, opts):
2172 """Return (revs, expr, filematcher) where revs is an iterable of
2172 """Return (revs, expr, filematcher) where revs is an iterable of
2173 revision numbers, expr is a revset string built from log options
2173 revision numbers, expr is a revset string built from log options
2174 and file patterns or None, and used to filter 'revs'. If --stat or
2174 and file patterns or None, and used to filter 'revs'. If --stat or
2175 --patch are not passed filematcher is None. Otherwise it is a
2175 --patch are not passed filematcher is None. Otherwise it is a
2176 callable taking a revision number and returning a match objects
2176 callable taking a revision number and returning a match objects
2177 filtering the files to be detailed when displaying the revision.
2177 filtering the files to be detailed when displaying the revision.
2178 """
2178 """
2179 limit = loglimit(opts)
2179 limit = loglimit(opts)
2180 revs = _logrevs(repo, opts)
2180 revs = _logrevs(repo, opts)
2181 if not revs:
2181 if not revs:
2182 return revset.baseset([]), None, None
2182 return revset.baseset([]), None, None
2183 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2183 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2184 if expr:
2184 if expr:
2185 # Revset matchers often operate faster on revisions in changelog
2185 # Revset matchers often operate faster on revisions in changelog
2186 # order, because most filters deal with the changelog.
2186 # order, because most filters deal with the changelog.
2187 if not opts.get('rev'):
2187 if not opts.get('rev'):
2188 revs.reverse()
2188 revs.reverse()
2189 matcher = revset.match(repo.ui, expr)
2189 matcher = revset.match(repo.ui, expr)
2190 # Revset matches can reorder revisions. "A or B" typically returns
2190 # Revset matches can reorder revisions. "A or B" typically returns
2191 # returns the revision matching A then the revision matching B. Sort
2191 # returns the revision matching A then the revision matching B. Sort
2192 # again to fix that.
2192 # again to fix that.
2193 fixopts = ['branch', 'only_branch', 'keyword', 'user']
2193 fixopts = ['branch', 'only_branch', 'keyword', 'user']
2194 oldrevs = revs
2194 oldrevs = revs
2195 revs = matcher(repo, revs)
2195 revs = matcher(repo, revs)
2196 if not opts.get('rev'):
2196 if not opts.get('rev'):
2197 revs.sort(reverse=True)
2197 revs.sort(reverse=True)
2198 elif len(pats) > 1 or any(len(opts.get(op, [])) > 1 for op in fixopts):
2198 elif len(pats) > 1 or any(len(opts.get(op, [])) > 1 for op in fixopts):
2199 # XXX "A or B" is known to change the order; fix it by filtering
2199 # XXX "A or B" is known to change the order; fix it by filtering
2200 # matched set again (issue5100)
2200 # matched set again (issue5100)
2201 revs = oldrevs & revs
2201 revs = oldrevs & revs
2202 if limit is not None:
2202 if limit is not None:
2203 limitedrevs = []
2203 limitedrevs = []
2204 for idx, r in enumerate(revs):
2204 for idx, r in enumerate(revs):
2205 if limit <= idx:
2205 if limit <= idx:
2206 break
2206 break
2207 limitedrevs.append(r)
2207 limitedrevs.append(r)
2208 revs = revset.baseset(limitedrevs)
2208 revs = revset.baseset(limitedrevs)
2209
2209
2210 return revs, expr, filematcher
2210 return revs, expr, filematcher
2211
2211
2212 def _graphnodeformatter(ui, displayer):
2212 def _graphnodeformatter(ui, displayer):
2213 spec = ui.config('ui', 'graphnodetemplate')
2213 spec = ui.config('ui', 'graphnodetemplate')
2214 if not spec:
2214 if not spec:
2215 return templatekw.showgraphnode # fast path for "{graphnode}"
2215 return templatekw.showgraphnode # fast path for "{graphnode}"
2216
2216
2217 templ = formatter.gettemplater(ui, 'graphnode', spec)
2217 templ = formatter.gettemplater(ui, 'graphnode', spec)
2218 cache = {}
2218 cache = {}
2219 if isinstance(displayer, changeset_templater):
2219 if isinstance(displayer, changeset_templater):
2220 cache = displayer.cache # reuse cache of slow templates
2220 cache = displayer.cache # reuse cache of slow templates
2221 props = templatekw.keywords.copy()
2221 props = templatekw.keywords.copy()
2222 props['templ'] = templ
2222 props['templ'] = templ
2223 props['cache'] = cache
2223 props['cache'] = cache
2224 def formatnode(repo, ctx):
2224 def formatnode(repo, ctx):
2225 props['ctx'] = ctx
2225 props['ctx'] = ctx
2226 props['repo'] = repo
2226 props['repo'] = repo
2227 props['ui'] = repo.ui
2227 props['ui'] = repo.ui
2228 props['revcache'] = {}
2228 props['revcache'] = {}
2229 return templater.stringify(templ('graphnode', **props))
2229 return templater.stringify(templ('graphnode', **props))
2230 return formatnode
2230 return formatnode
2231
2231
2232 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2232 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2233 filematcher=None):
2233 filematcher=None):
2234 formatnode = _graphnodeformatter(ui, displayer)
2234 formatnode = _graphnodeformatter(ui, displayer)
2235 state = graphmod.asciistate()
2235 state = graphmod.asciistate()
2236 styles = state['styles']
2236 styles = state['styles']
2237
2237
2238 # only set graph styling if HGPLAIN is not set.
2238 # only set graph styling if HGPLAIN is not set.
2239 if ui.plain('graph'):
2239 if ui.plain('graph'):
2240 # set all edge styles to |, the default pre-3.8 behaviour
2240 # set all edge styles to |, the default pre-3.8 behaviour
2241 styles.update(dict.fromkeys(styles, '|'))
2241 styles.update(dict.fromkeys(styles, '|'))
2242 else:
2242 else:
2243 edgetypes = {
2243 edgetypes = {
2244 'parent': graphmod.PARENT,
2244 'parent': graphmod.PARENT,
2245 'grandparent': graphmod.GRANDPARENT,
2245 'grandparent': graphmod.GRANDPARENT,
2246 'missing': graphmod.MISSINGPARENT
2246 'missing': graphmod.MISSINGPARENT
2247 }
2247 }
2248 for name, key in edgetypes.items():
2248 for name, key in edgetypes.items():
2249 # experimental config: experimental.graphstyle.*
2249 # experimental config: experimental.graphstyle.*
2250 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2250 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2251 styles[key])
2251 styles[key])
2252 if not styles[key]:
2252 if not styles[key]:
2253 styles[key] = None
2253 styles[key] = None
2254
2254
2255 # experimental config: experimental.graphshorten
2255 # experimental config: experimental.graphshorten
2256 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2256 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2257
2257
2258 for rev, type, ctx, parents in dag:
2258 for rev, type, ctx, parents in dag:
2259 char = formatnode(repo, ctx)
2259 char = formatnode(repo, ctx)
2260 copies = None
2260 copies = None
2261 if getrenamed and ctx.rev():
2261 if getrenamed and ctx.rev():
2262 copies = []
2262 copies = []
2263 for fn in ctx.files():
2263 for fn in ctx.files():
2264 rename = getrenamed(fn, ctx.rev())
2264 rename = getrenamed(fn, ctx.rev())
2265 if rename:
2265 if rename:
2266 copies.append((fn, rename[0]))
2266 copies.append((fn, rename[0]))
2267 revmatchfn = None
2267 revmatchfn = None
2268 if filematcher is not None:
2268 if filematcher is not None:
2269 revmatchfn = filematcher(ctx.rev())
2269 revmatchfn = filematcher(ctx.rev())
2270 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2270 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2271 lines = displayer.hunk.pop(rev).split('\n')
2271 lines = displayer.hunk.pop(rev).split('\n')
2272 if not lines[-1]:
2272 if not lines[-1]:
2273 del lines[-1]
2273 del lines[-1]
2274 displayer.flush(ctx)
2274 displayer.flush(ctx)
2275 edges = edgefn(type, char, lines, state, rev, parents)
2275 edges = edgefn(type, char, lines, state, rev, parents)
2276 for type, char, lines, coldata in edges:
2276 for type, char, lines, coldata in edges:
2277 graphmod.ascii(ui, state, type, char, lines, coldata)
2277 graphmod.ascii(ui, state, type, char, lines, coldata)
2278 displayer.close()
2278 displayer.close()
2279
2279
2280 def graphlog(ui, repo, *pats, **opts):
2280 def graphlog(ui, repo, *pats, **opts):
2281 # Parameters are identical to log command ones
2281 # Parameters are identical to log command ones
2282 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2282 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2283 revdag = graphmod.dagwalker(repo, revs)
2283 revdag = graphmod.dagwalker(repo, revs)
2284
2284
2285 getrenamed = None
2285 getrenamed = None
2286 if opts.get('copies'):
2286 if opts.get('copies'):
2287 endrev = None
2287 endrev = None
2288 if opts.get('rev'):
2288 if opts.get('rev'):
2289 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2289 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2290 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2290 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2291 displayer = show_changeset(ui, repo, opts, buffered=True)
2291 displayer = show_changeset(ui, repo, opts, buffered=True)
2292 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2292 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2293 filematcher)
2293 filematcher)
2294
2294
2295 def checkunsupportedgraphflags(pats, opts):
2295 def checkunsupportedgraphflags(pats, opts):
2296 for op in ["newest_first"]:
2296 for op in ["newest_first"]:
2297 if op in opts and opts[op]:
2297 if op in opts and opts[op]:
2298 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2298 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2299 % op.replace("_", "-"))
2299 % op.replace("_", "-"))
2300
2300
2301 def graphrevs(repo, nodes, opts):
2301 def graphrevs(repo, nodes, opts):
2302 limit = loglimit(opts)
2302 limit = loglimit(opts)
2303 nodes.reverse()
2303 nodes.reverse()
2304 if limit is not None:
2304 if limit is not None:
2305 nodes = nodes[:limit]
2305 nodes = nodes[:limit]
2306 return graphmod.nodes(repo, nodes)
2306 return graphmod.nodes(repo, nodes)
2307
2307
2308 def add(ui, repo, match, prefix, explicitonly, **opts):
2308 def add(ui, repo, match, prefix, explicitonly, **opts):
2309 join = lambda f: os.path.join(prefix, f)
2309 join = lambda f: os.path.join(prefix, f)
2310 bad = []
2310 bad = []
2311
2311
2312 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2312 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2313 names = []
2313 names = []
2314 wctx = repo[None]
2314 wctx = repo[None]
2315 cca = None
2315 cca = None
2316 abort, warn = scmutil.checkportabilityalert(ui)
2316 abort, warn = scmutil.checkportabilityalert(ui)
2317 if abort or warn:
2317 if abort or warn:
2318 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2318 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2319
2319
2320 badmatch = matchmod.badmatch(match, badfn)
2320 badmatch = matchmod.badmatch(match, badfn)
2321 dirstate = repo.dirstate
2321 dirstate = repo.dirstate
2322 # We don't want to just call wctx.walk here, since it would return a lot of
2322 # We don't want to just call wctx.walk here, since it would return a lot of
2323 # clean files, which we aren't interested in and takes time.
2323 # clean files, which we aren't interested in and takes time.
2324 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2324 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2325 True, False, full=False)):
2325 True, False, full=False)):
2326 exact = match.exact(f)
2326 exact = match.exact(f)
2327 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2327 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2328 if cca:
2328 if cca:
2329 cca(f)
2329 cca(f)
2330 names.append(f)
2330 names.append(f)
2331 if ui.verbose or not exact:
2331 if ui.verbose or not exact:
2332 ui.status(_('adding %s\n') % match.rel(f))
2332 ui.status(_('adding %s\n') % match.rel(f))
2333
2333
2334 for subpath in sorted(wctx.substate):
2334 for subpath in sorted(wctx.substate):
2335 sub = wctx.sub(subpath)
2335 sub = wctx.sub(subpath)
2336 try:
2336 try:
2337 submatch = matchmod.subdirmatcher(subpath, match)
2337 submatch = matchmod.subdirmatcher(subpath, match)
2338 if opts.get('subrepos'):
2338 if opts.get('subrepos'):
2339 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2339 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2340 else:
2340 else:
2341 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2341 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2342 except error.LookupError:
2342 except error.LookupError:
2343 ui.status(_("skipping missing subrepository: %s\n")
2343 ui.status(_("skipping missing subrepository: %s\n")
2344 % join(subpath))
2344 % join(subpath))
2345
2345
2346 if not opts.get('dry_run'):
2346 if not opts.get('dry_run'):
2347 rejected = wctx.add(names, prefix)
2347 rejected = wctx.add(names, prefix)
2348 bad.extend(f for f in rejected if f in match.files())
2348 bad.extend(f for f in rejected if f in match.files())
2349 return bad
2349 return bad
2350
2350
2351 def forget(ui, repo, match, prefix, explicitonly):
2351 def forget(ui, repo, match, prefix, explicitonly):
2352 join = lambda f: os.path.join(prefix, f)
2352 join = lambda f: os.path.join(prefix, f)
2353 bad = []
2353 bad = []
2354 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2354 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2355 wctx = repo[None]
2355 wctx = repo[None]
2356 forgot = []
2356 forgot = []
2357
2357
2358 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2358 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2359 forget = sorted(s[0] + s[1] + s[3] + s[6])
2359 forget = sorted(s[0] + s[1] + s[3] + s[6])
2360 if explicitonly:
2360 if explicitonly:
2361 forget = [f for f in forget if match.exact(f)]
2361 forget = [f for f in forget if match.exact(f)]
2362
2362
2363 for subpath in sorted(wctx.substate):
2363 for subpath in sorted(wctx.substate):
2364 sub = wctx.sub(subpath)
2364 sub = wctx.sub(subpath)
2365 try:
2365 try:
2366 submatch = matchmod.subdirmatcher(subpath, match)
2366 submatch = matchmod.subdirmatcher(subpath, match)
2367 subbad, subforgot = sub.forget(submatch, prefix)
2367 subbad, subforgot = sub.forget(submatch, prefix)
2368 bad.extend([subpath + '/' + f for f in subbad])
2368 bad.extend([subpath + '/' + f for f in subbad])
2369 forgot.extend([subpath + '/' + f for f in subforgot])
2369 forgot.extend([subpath + '/' + f for f in subforgot])
2370 except error.LookupError:
2370 except error.LookupError:
2371 ui.status(_("skipping missing subrepository: %s\n")
2371 ui.status(_("skipping missing subrepository: %s\n")
2372 % join(subpath))
2372 % join(subpath))
2373
2373
2374 if not explicitonly:
2374 if not explicitonly:
2375 for f in match.files():
2375 for f in match.files():
2376 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2376 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2377 if f not in forgot:
2377 if f not in forgot:
2378 if repo.wvfs.exists(f):
2378 if repo.wvfs.exists(f):
2379 # Don't complain if the exact case match wasn't given.
2379 # Don't complain if the exact case match wasn't given.
2380 # But don't do this until after checking 'forgot', so
2380 # But don't do this until after checking 'forgot', so
2381 # that subrepo files aren't normalized, and this op is
2381 # that subrepo files aren't normalized, and this op is
2382 # purely from data cached by the status walk above.
2382 # purely from data cached by the status walk above.
2383 if repo.dirstate.normalize(f) in repo.dirstate:
2383 if repo.dirstate.normalize(f) in repo.dirstate:
2384 continue
2384 continue
2385 ui.warn(_('not removing %s: '
2385 ui.warn(_('not removing %s: '
2386 'file is already untracked\n')
2386 'file is already untracked\n')
2387 % match.rel(f))
2387 % match.rel(f))
2388 bad.append(f)
2388 bad.append(f)
2389
2389
2390 for f in forget:
2390 for f in forget:
2391 if ui.verbose or not match.exact(f):
2391 if ui.verbose or not match.exact(f):
2392 ui.status(_('removing %s\n') % match.rel(f))
2392 ui.status(_('removing %s\n') % match.rel(f))
2393
2393
2394 rejected = wctx.forget(forget, prefix)
2394 rejected = wctx.forget(forget, prefix)
2395 bad.extend(f for f in rejected if f in match.files())
2395 bad.extend(f for f in rejected if f in match.files())
2396 forgot.extend(f for f in forget if f not in rejected)
2396 forgot.extend(f for f in forget if f not in rejected)
2397 return bad, forgot
2397 return bad, forgot
2398
2398
2399 def files(ui, ctx, m, fm, fmt, subrepos):
2399 def files(ui, ctx, m, fm, fmt, subrepos):
2400 rev = ctx.rev()
2400 rev = ctx.rev()
2401 ret = 1
2401 ret = 1
2402 ds = ctx.repo().dirstate
2402 ds = ctx.repo().dirstate
2403
2403
2404 for f in ctx.matches(m):
2404 for f in ctx.matches(m):
2405 if rev is None and ds[f] == 'r':
2405 if rev is None and ds[f] == 'r':
2406 continue
2406 continue
2407 fm.startitem()
2407 fm.startitem()
2408 if ui.verbose:
2408 if ui.verbose:
2409 fc = ctx[f]
2409 fc = ctx[f]
2410 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2410 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2411 fm.data(abspath=f)
2411 fm.data(abspath=f)
2412 fm.write('path', fmt, m.rel(f))
2412 fm.write('path', fmt, m.rel(f))
2413 ret = 0
2413 ret = 0
2414
2414
2415 for subpath in sorted(ctx.substate):
2415 for subpath in sorted(ctx.substate):
2416 def matchessubrepo(subpath):
2416 def matchessubrepo(subpath):
2417 return (m.exact(subpath)
2417 return (m.exact(subpath)
2418 or any(f.startswith(subpath + '/') for f in m.files()))
2418 or any(f.startswith(subpath + '/') for f in m.files()))
2419
2419
2420 if subrepos or matchessubrepo(subpath):
2420 if subrepos or matchessubrepo(subpath):
2421 sub = ctx.sub(subpath)
2421 sub = ctx.sub(subpath)
2422 try:
2422 try:
2423 submatch = matchmod.subdirmatcher(subpath, m)
2423 submatch = matchmod.subdirmatcher(subpath, m)
2424 recurse = m.exact(subpath) or subrepos
2424 recurse = m.exact(subpath) or subrepos
2425 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2425 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2426 ret = 0
2426 ret = 0
2427 except error.LookupError:
2427 except error.LookupError:
2428 ui.status(_("skipping missing subrepository: %s\n")
2428 ui.status(_("skipping missing subrepository: %s\n")
2429 % m.abs(subpath))
2429 % m.abs(subpath))
2430
2430
2431 return ret
2431 return ret
2432
2432
2433 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2433 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2434 join = lambda f: os.path.join(prefix, f)
2434 join = lambda f: os.path.join(prefix, f)
2435 ret = 0
2435 ret = 0
2436 s = repo.status(match=m, clean=True)
2436 s = repo.status(match=m, clean=True)
2437 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2437 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2438
2438
2439 wctx = repo[None]
2439 wctx = repo[None]
2440
2440
2441 if warnings is None:
2441 if warnings is None:
2442 warnings = []
2442 warnings = []
2443 warn = True
2443 warn = True
2444 else:
2444 else:
2445 warn = False
2445 warn = False
2446
2446
2447 subs = sorted(wctx.substate)
2447 subs = sorted(wctx.substate)
2448 total = len(subs)
2448 total = len(subs)
2449 count = 0
2449 count = 0
2450 for subpath in subs:
2450 for subpath in subs:
2451 def matchessubrepo(matcher, subpath):
2451 def matchessubrepo(matcher, subpath):
2452 if matcher.exact(subpath):
2452 if matcher.exact(subpath):
2453 return True
2453 return True
2454 for f in matcher.files():
2454 for f in matcher.files():
2455 if f.startswith(subpath):
2455 if f.startswith(subpath):
2456 return True
2456 return True
2457 return False
2457 return False
2458
2458
2459 count += 1
2459 count += 1
2460 if subrepos or matchessubrepo(m, subpath):
2460 if subrepos or matchessubrepo(m, subpath):
2461 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2461 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2462
2462
2463 sub = wctx.sub(subpath)
2463 sub = wctx.sub(subpath)
2464 try:
2464 try:
2465 submatch = matchmod.subdirmatcher(subpath, m)
2465 submatch = matchmod.subdirmatcher(subpath, m)
2466 if sub.removefiles(submatch, prefix, after, force, subrepos,
2466 if sub.removefiles(submatch, prefix, after, force, subrepos,
2467 warnings):
2467 warnings):
2468 ret = 1
2468 ret = 1
2469 except error.LookupError:
2469 except error.LookupError:
2470 warnings.append(_("skipping missing subrepository: %s\n")
2470 warnings.append(_("skipping missing subrepository: %s\n")
2471 % join(subpath))
2471 % join(subpath))
2472 ui.progress(_('searching'), None)
2472 ui.progress(_('searching'), None)
2473
2473
2474 # warn about failure to delete explicit files/dirs
2474 # warn about failure to delete explicit files/dirs
2475 deleteddirs = util.dirs(deleted)
2475 deleteddirs = util.dirs(deleted)
2476 files = m.files()
2476 files = m.files()
2477 total = len(files)
2477 total = len(files)
2478 count = 0
2478 count = 0
2479 for f in files:
2479 for f in files:
2480 def insubrepo():
2480 def insubrepo():
2481 for subpath in wctx.substate:
2481 for subpath in wctx.substate:
2482 if f.startswith(subpath):
2482 if f.startswith(subpath):
2483 return True
2483 return True
2484 return False
2484 return False
2485
2485
2486 count += 1
2486 count += 1
2487 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2487 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2488 isdir = f in deleteddirs or wctx.hasdir(f)
2488 isdir = f in deleteddirs or wctx.hasdir(f)
2489 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2489 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2490 continue
2490 continue
2491
2491
2492 if repo.wvfs.exists(f):
2492 if repo.wvfs.exists(f):
2493 if repo.wvfs.isdir(f):
2493 if repo.wvfs.isdir(f):
2494 warnings.append(_('not removing %s: no tracked files\n')
2494 warnings.append(_('not removing %s: no tracked files\n')
2495 % m.rel(f))
2495 % m.rel(f))
2496 else:
2496 else:
2497 warnings.append(_('not removing %s: file is untracked\n')
2497 warnings.append(_('not removing %s: file is untracked\n')
2498 % m.rel(f))
2498 % m.rel(f))
2499 # missing files will generate a warning elsewhere
2499 # missing files will generate a warning elsewhere
2500 ret = 1
2500 ret = 1
2501 ui.progress(_('deleting'), None)
2501 ui.progress(_('deleting'), None)
2502
2502
2503 if force:
2503 if force:
2504 list = modified + deleted + clean + added
2504 list = modified + deleted + clean + added
2505 elif after:
2505 elif after:
2506 list = deleted
2506 list = deleted
2507 remaining = modified + added + clean
2507 remaining = modified + added + clean
2508 total = len(remaining)
2508 total = len(remaining)
2509 count = 0
2509 count = 0
2510 for f in remaining:
2510 for f in remaining:
2511 count += 1
2511 count += 1
2512 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2512 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2513 warnings.append(_('not removing %s: file still exists\n')
2513 warnings.append(_('not removing %s: file still exists\n')
2514 % m.rel(f))
2514 % m.rel(f))
2515 ret = 1
2515 ret = 1
2516 ui.progress(_('skipping'), None)
2516 ui.progress(_('skipping'), None)
2517 else:
2517 else:
2518 list = deleted + clean
2518 list = deleted + clean
2519 total = len(modified) + len(added)
2519 total = len(modified) + len(added)
2520 count = 0
2520 count = 0
2521 for f in modified:
2521 for f in modified:
2522 count += 1
2522 count += 1
2523 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2523 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2524 warnings.append(_('not removing %s: file is modified (use -f'
2524 warnings.append(_('not removing %s: file is modified (use -f'
2525 ' to force removal)\n') % m.rel(f))
2525 ' to force removal)\n') % m.rel(f))
2526 ret = 1
2526 ret = 1
2527 for f in added:
2527 for f in added:
2528 count += 1
2528 count += 1
2529 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2529 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2530 warnings.append(_('not removing %s: file has been marked for add'
2530 warnings.append(_('not removing %s: file has been marked for add'
2531 ' (use forget to undo)\n') % m.rel(f))
2531 ' (use forget to undo)\n') % m.rel(f))
2532 ret = 1
2532 ret = 1
2533 ui.progress(_('skipping'), None)
2533 ui.progress(_('skipping'), None)
2534
2534
2535 list = sorted(list)
2535 list = sorted(list)
2536 total = len(list)
2536 total = len(list)
2537 count = 0
2537 count = 0
2538 for f in list:
2538 for f in list:
2539 count += 1
2539 count += 1
2540 if ui.verbose or not m.exact(f):
2540 if ui.verbose or not m.exact(f):
2541 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2541 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2542 ui.status(_('removing %s\n') % m.rel(f))
2542 ui.status(_('removing %s\n') % m.rel(f))
2543 ui.progress(_('deleting'), None)
2543 ui.progress(_('deleting'), None)
2544
2544
2545 with repo.wlock():
2545 with repo.wlock():
2546 if not after:
2546 if not after:
2547 for f in list:
2547 for f in list:
2548 if f in added:
2548 if f in added:
2549 continue # we never unlink added files on remove
2549 continue # we never unlink added files on remove
2550 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2550 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2551 repo[None].forget(list)
2551 repo[None].forget(list)
2552
2552
2553 if warn:
2553 if warn:
2554 for warning in warnings:
2554 for warning in warnings:
2555 ui.warn(warning)
2555 ui.warn(warning)
2556
2556
2557 return ret
2557 return ret
2558
2558
2559 def cat(ui, repo, ctx, matcher, prefix, **opts):
2559 def cat(ui, repo, ctx, matcher, prefix, **opts):
2560 err = 1
2560 err = 1
2561
2561
2562 def write(path):
2562 def write(path):
2563 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2563 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2564 pathname=os.path.join(prefix, path))
2564 pathname=os.path.join(prefix, path))
2565 data = ctx[path].data()
2565 data = ctx[path].data()
2566 if opts.get('decode'):
2566 if opts.get('decode'):
2567 data = repo.wwritedata(path, data)
2567 data = repo.wwritedata(path, data)
2568 fp.write(data)
2568 fp.write(data)
2569 fp.close()
2569 fp.close()
2570
2570
2571 # Automation often uses hg cat on single files, so special case it
2571 # Automation often uses hg cat on single files, so special case it
2572 # for performance to avoid the cost of parsing the manifest.
2572 # for performance to avoid the cost of parsing the manifest.
2573 if len(matcher.files()) == 1 and not matcher.anypats():
2573 if len(matcher.files()) == 1 and not matcher.anypats():
2574 file = matcher.files()[0]
2574 file = matcher.files()[0]
2575 mf = repo.manifest
2575 mf = repo.manifest
2576 mfnode = ctx.manifestnode()
2576 mfnode = ctx.manifestnode()
2577 if mfnode and mf.find(mfnode, file)[0]:
2577 if mfnode and mf.find(mfnode, file)[0]:
2578 write(file)
2578 write(file)
2579 return 0
2579 return 0
2580
2580
2581 # Don't warn about "missing" files that are really in subrepos
2581 # Don't warn about "missing" files that are really in subrepos
2582 def badfn(path, msg):
2582 def badfn(path, msg):
2583 for subpath in ctx.substate:
2583 for subpath in ctx.substate:
2584 if path.startswith(subpath):
2584 if path.startswith(subpath):
2585 return
2585 return
2586 matcher.bad(path, msg)
2586 matcher.bad(path, msg)
2587
2587
2588 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2588 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2589 write(abs)
2589 write(abs)
2590 err = 0
2590 err = 0
2591
2591
2592 for subpath in sorted(ctx.substate):
2592 for subpath in sorted(ctx.substate):
2593 sub = ctx.sub(subpath)
2593 sub = ctx.sub(subpath)
2594 try:
2594 try:
2595 submatch = matchmod.subdirmatcher(subpath, matcher)
2595 submatch = matchmod.subdirmatcher(subpath, matcher)
2596
2596
2597 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2597 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2598 **opts):
2598 **opts):
2599 err = 0
2599 err = 0
2600 except error.RepoLookupError:
2600 except error.RepoLookupError:
2601 ui.status(_("skipping missing subrepository: %s\n")
2601 ui.status(_("skipping missing subrepository: %s\n")
2602 % os.path.join(prefix, subpath))
2602 % os.path.join(prefix, subpath))
2603
2603
2604 return err
2604 return err
2605
2605
2606 def commit(ui, repo, commitfunc, pats, opts):
2606 def commit(ui, repo, commitfunc, pats, opts):
2607 '''commit the specified files or all outstanding changes'''
2607 '''commit the specified files or all outstanding changes'''
2608 date = opts.get('date')
2608 date = opts.get('date')
2609 if date:
2609 if date:
2610 opts['date'] = util.parsedate(date)
2610 opts['date'] = util.parsedate(date)
2611 message = logmessage(ui, opts)
2611 message = logmessage(ui, opts)
2612 matcher = scmutil.match(repo[None], pats, opts)
2612 matcher = scmutil.match(repo[None], pats, opts)
2613
2613
2614 # extract addremove carefully -- this function can be called from a command
2614 # extract addremove carefully -- this function can be called from a command
2615 # that doesn't support addremove
2615 # that doesn't support addremove
2616 if opts.get('addremove'):
2616 if opts.get('addremove'):
2617 if scmutil.addremove(repo, matcher, "", opts) != 0:
2617 if scmutil.addremove(repo, matcher, "", opts) != 0:
2618 raise error.Abort(
2618 raise error.Abort(
2619 _("failed to mark all new/missing files as added/removed"))
2619 _("failed to mark all new/missing files as added/removed"))
2620
2620
2621 return commitfunc(ui, repo, message, matcher, opts)
2621 return commitfunc(ui, repo, message, matcher, opts)
2622
2622
2623 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2623 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2624 # avoid cycle context -> subrepo -> cmdutil
2624 # avoid cycle context -> subrepo -> cmdutil
2625 from . import context
2625 from . import context
2626
2626
2627 # amend will reuse the existing user if not specified, but the obsolete
2627 # amend will reuse the existing user if not specified, but the obsolete
2628 # marker creation requires that the current user's name is specified.
2628 # marker creation requires that the current user's name is specified.
2629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2629 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2630 ui.username() # raise exception if username not set
2630 ui.username() # raise exception if username not set
2631
2631
2632 ui.note(_('amending changeset %s\n') % old)
2632 ui.note(_('amending changeset %s\n') % old)
2633 base = old.p1()
2633 base = old.p1()
2634 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2634 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2635
2635
2636 wlock = lock = newid = None
2636 wlock = lock = newid = None
2637 try:
2637 try:
2638 wlock = repo.wlock()
2638 wlock = repo.wlock()
2639 lock = repo.lock()
2639 lock = repo.lock()
2640 with repo.transaction('amend') as tr:
2640 with repo.transaction('amend') as tr:
2641 # See if we got a message from -m or -l, if not, open the editor
2641 # See if we got a message from -m or -l, if not, open the editor
2642 # with the message of the changeset to amend
2642 # with the message of the changeset to amend
2643 message = logmessage(ui, opts)
2643 message = logmessage(ui, opts)
2644 # ensure logfile does not conflict with later enforcement of the
2644 # ensure logfile does not conflict with later enforcement of the
2645 # message. potential logfile content has been processed by
2645 # message. potential logfile content has been processed by
2646 # `logmessage` anyway.
2646 # `logmessage` anyway.
2647 opts.pop('logfile')
2647 opts.pop('logfile')
2648 # First, do a regular commit to record all changes in the working
2648 # First, do a regular commit to record all changes in the working
2649 # directory (if there are any)
2649 # directory (if there are any)
2650 ui.callhooks = False
2650 ui.callhooks = False
2651 activebookmark = repo._bookmarks.active
2651 activebookmark = repo._bookmarks.active
2652 try:
2652 try:
2653 repo._bookmarks.active = None
2653 repo._bookmarks.active = None
2654 opts['message'] = 'temporary amend commit for %s' % old
2654 opts['message'] = 'temporary amend commit for %s' % old
2655 node = commit(ui, repo, commitfunc, pats, opts)
2655 node = commit(ui, repo, commitfunc, pats, opts)
2656 finally:
2656 finally:
2657 repo._bookmarks.active = activebookmark
2657 repo._bookmarks.active = activebookmark
2658 repo._bookmarks.recordchange(tr)
2658 repo._bookmarks.recordchange(tr)
2659 ui.callhooks = True
2659 ui.callhooks = True
2660 ctx = repo[node]
2660 ctx = repo[node]
2661
2661
2662 # Participating changesets:
2662 # Participating changesets:
2663 #
2663 #
2664 # node/ctx o - new (intermediate) commit that contains changes
2664 # node/ctx o - new (intermediate) commit that contains changes
2665 # | from working dir to go into amending commit
2665 # | from working dir to go into amending commit
2666 # | (or a workingctx if there were no changes)
2666 # | (or a workingctx if there were no changes)
2667 # |
2667 # |
2668 # old o - changeset to amend
2668 # old o - changeset to amend
2669 # |
2669 # |
2670 # base o - parent of amending changeset
2670 # base o - parent of amending changeset
2671
2671
2672 # Update extra dict from amended commit (e.g. to preserve graft
2672 # Update extra dict from amended commit (e.g. to preserve graft
2673 # source)
2673 # source)
2674 extra.update(old.extra())
2674 extra.update(old.extra())
2675
2675
2676 # Also update it from the intermediate commit or from the wctx
2676 # Also update it from the intermediate commit or from the wctx
2677 extra.update(ctx.extra())
2677 extra.update(ctx.extra())
2678
2678
2679 if len(old.parents()) > 1:
2679 if len(old.parents()) > 1:
2680 # ctx.files() isn't reliable for merges, so fall back to the
2680 # ctx.files() isn't reliable for merges, so fall back to the
2681 # slower repo.status() method
2681 # slower repo.status() method
2682 files = set([fn for st in repo.status(base, old)[:3]
2682 files = set([fn for st in repo.status(base, old)[:3]
2683 for fn in st])
2683 for fn in st])
2684 else:
2684 else:
2685 files = set(old.files())
2685 files = set(old.files())
2686
2686
2687 # Second, we use either the commit we just did, or if there were no
2687 # Second, we use either the commit we just did, or if there were no
2688 # changes the parent of the working directory as the version of the
2688 # changes the parent of the working directory as the version of the
2689 # files in the final amend commit
2689 # files in the final amend commit
2690 if node:
2690 if node:
2691 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2691 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2692
2692
2693 user = ctx.user()
2693 user = ctx.user()
2694 date = ctx.date()
2694 date = ctx.date()
2695 # Recompute copies (avoid recording a -> b -> a)
2695 # Recompute copies (avoid recording a -> b -> a)
2696 copied = copies.pathcopies(base, ctx)
2696 copied = copies.pathcopies(base, ctx)
2697 if old.p2:
2697 if old.p2:
2698 copied.update(copies.pathcopies(old.p2(), ctx))
2698 copied.update(copies.pathcopies(old.p2(), ctx))
2699
2699
2700 # Prune files which were reverted by the updates: if old
2700 # Prune files which were reverted by the updates: if old
2701 # introduced file X and our intermediate commit, node,
2701 # introduced file X and our intermediate commit, node,
2702 # renamed that file, then those two files are the same and
2702 # renamed that file, then those two files are the same and
2703 # we can discard X from our list of files. Likewise if X
2703 # we can discard X from our list of files. Likewise if X
2704 # was deleted, it's no longer relevant
2704 # was deleted, it's no longer relevant
2705 files.update(ctx.files())
2705 files.update(ctx.files())
2706
2706
2707 def samefile(f):
2707 def samefile(f):
2708 if f in ctx.manifest():
2708 if f in ctx.manifest():
2709 a = ctx.filectx(f)
2709 a = ctx.filectx(f)
2710 if f in base.manifest():
2710 if f in base.manifest():
2711 b = base.filectx(f)
2711 b = base.filectx(f)
2712 return (not a.cmp(b)
2712 return (not a.cmp(b)
2713 and a.flags() == b.flags())
2713 and a.flags() == b.flags())
2714 else:
2714 else:
2715 return False
2715 return False
2716 else:
2716 else:
2717 return f not in base.manifest()
2717 return f not in base.manifest()
2718 files = [f for f in files if not samefile(f)]
2718 files = [f for f in files if not samefile(f)]
2719
2719
2720 def filectxfn(repo, ctx_, path):
2720 def filectxfn(repo, ctx_, path):
2721 try:
2721 try:
2722 fctx = ctx[path]
2722 fctx = ctx[path]
2723 flags = fctx.flags()
2723 flags = fctx.flags()
2724 mctx = context.memfilectx(repo,
2724 mctx = context.memfilectx(repo,
2725 fctx.path(), fctx.data(),
2725 fctx.path(), fctx.data(),
2726 islink='l' in flags,
2726 islink='l' in flags,
2727 isexec='x' in flags,
2727 isexec='x' in flags,
2728 copied=copied.get(path))
2728 copied=copied.get(path))
2729 return mctx
2729 return mctx
2730 except KeyError:
2730 except KeyError:
2731 return None
2731 return None
2732 else:
2732 else:
2733 ui.note(_('copying changeset %s to %s\n') % (old, base))
2733 ui.note(_('copying changeset %s to %s\n') % (old, base))
2734
2734
2735 # Use version of files as in the old cset
2735 # Use version of files as in the old cset
2736 def filectxfn(repo, ctx_, path):
2736 def filectxfn(repo, ctx_, path):
2737 try:
2737 try:
2738 return old.filectx(path)
2738 return old.filectx(path)
2739 except KeyError:
2739 except KeyError:
2740 return None
2740 return None
2741
2741
2742 user = opts.get('user') or old.user()
2742 user = opts.get('user') or old.user()
2743 date = opts.get('date') or old.date()
2743 date = opts.get('date') or old.date()
2744 editform = mergeeditform(old, 'commit.amend')
2744 editform = mergeeditform(old, 'commit.amend')
2745 editor = getcommiteditor(editform=editform, **opts)
2745 editor = getcommiteditor(editform=editform, **opts)
2746 if not message:
2746 if not message:
2747 editor = getcommiteditor(edit=True, editform=editform)
2747 editor = getcommiteditor(edit=True, editform=editform)
2748 message = old.description()
2748 message = old.description()
2749
2749
2750 pureextra = extra.copy()
2750 pureextra = extra.copy()
2751 extra['amend_source'] = old.hex()
2751 extra['amend_source'] = old.hex()
2752
2752
2753 new = context.memctx(repo,
2753 new = context.memctx(repo,
2754 parents=[base.node(), old.p2().node()],
2754 parents=[base.node(), old.p2().node()],
2755 text=message,
2755 text=message,
2756 files=files,
2756 files=files,
2757 filectxfn=filectxfn,
2757 filectxfn=filectxfn,
2758 user=user,
2758 user=user,
2759 date=date,
2759 date=date,
2760 extra=extra,
2760 extra=extra,
2761 editor=editor)
2761 editor=editor)
2762
2762
2763 newdesc = changelog.stripdesc(new.description())
2763 newdesc = changelog.stripdesc(new.description())
2764 if ((not node)
2764 if ((not node)
2765 and newdesc == old.description()
2765 and newdesc == old.description()
2766 and user == old.user()
2766 and user == old.user()
2767 and date == old.date()
2767 and date == old.date()
2768 and pureextra == old.extra()):
2768 and pureextra == old.extra()):
2769 # nothing changed. continuing here would create a new node
2769 # nothing changed. continuing here would create a new node
2770 # anyway because of the amend_source noise.
2770 # anyway because of the amend_source noise.
2771 #
2771 #
2772 # This not what we expect from amend.
2772 # This not what we expect from amend.
2773 return old.node()
2773 return old.node()
2774
2774
2775 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2775 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2776 try:
2776 try:
2777 if opts.get('secret'):
2777 if opts.get('secret'):
2778 commitphase = 'secret'
2778 commitphase = 'secret'
2779 else:
2779 else:
2780 commitphase = old.phase()
2780 commitphase = old.phase()
2781 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2781 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2782 newid = repo.commitctx(new)
2782 newid = repo.commitctx(new)
2783 finally:
2783 finally:
2784 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2784 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2785 if newid != old.node():
2785 if newid != old.node():
2786 # Reroute the working copy parent to the new changeset
2786 # Reroute the working copy parent to the new changeset
2787 repo.setparents(newid, nullid)
2787 repo.setparents(newid, nullid)
2788
2788
2789 # Move bookmarks from old parent to amend commit
2789 # Move bookmarks from old parent to amend commit
2790 bms = repo.nodebookmarks(old.node())
2790 bms = repo.nodebookmarks(old.node())
2791 if bms:
2791 if bms:
2792 marks = repo._bookmarks
2792 marks = repo._bookmarks
2793 for bm in bms:
2793 for bm in bms:
2794 ui.debug('moving bookmarks %r from %s to %s\n' %
2794 ui.debug('moving bookmarks %r from %s to %s\n' %
2795 (marks, old.hex(), hex(newid)))
2795 (marks, old.hex(), hex(newid)))
2796 marks[bm] = newid
2796 marks[bm] = newid
2797 marks.recordchange(tr)
2797 marks.recordchange(tr)
2798 #commit the whole amend process
2798 #commit the whole amend process
2799 if createmarkers:
2799 if createmarkers:
2800 # mark the new changeset as successor of the rewritten one
2800 # mark the new changeset as successor of the rewritten one
2801 new = repo[newid]
2801 new = repo[newid]
2802 obs = [(old, (new,))]
2802 obs = [(old, (new,))]
2803 if node:
2803 if node:
2804 obs.append((ctx, ()))
2804 obs.append((ctx, ()))
2805
2805
2806 obsolete.createmarkers(repo, obs)
2806 obsolete.createmarkers(repo, obs)
2807 if not createmarkers and newid != old.node():
2807 if not createmarkers and newid != old.node():
2808 # Strip the intermediate commit (if there was one) and the amended
2808 # Strip the intermediate commit (if there was one) and the amended
2809 # commit
2809 # commit
2810 if node:
2810 if node:
2811 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2811 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2812 ui.note(_('stripping amended changeset %s\n') % old)
2812 ui.note(_('stripping amended changeset %s\n') % old)
2813 repair.strip(ui, repo, old.node(), topic='amend-backup')
2813 repair.strip(ui, repo, old.node(), topic='amend-backup')
2814 finally:
2814 finally:
2815 lockmod.release(lock, wlock)
2815 lockmod.release(lock, wlock)
2816 return newid
2816 return newid
2817
2817
2818 def commiteditor(repo, ctx, subs, editform=''):
2818 def commiteditor(repo, ctx, subs, editform=''):
2819 if ctx.description():
2819 if ctx.description():
2820 return ctx.description()
2820 return ctx.description()
2821 return commitforceeditor(repo, ctx, subs, editform=editform,
2821 return commitforceeditor(repo, ctx, subs, editform=editform,
2822 unchangedmessagedetection=True)
2822 unchangedmessagedetection=True)
2823
2823
2824 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2824 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2825 editform='', unchangedmessagedetection=False):
2825 editform='', unchangedmessagedetection=False):
2826 if not extramsg:
2826 if not extramsg:
2827 extramsg = _("Leave message empty to abort commit.")
2827 extramsg = _("Leave message empty to abort commit.")
2828
2828
2829 forms = [e for e in editform.split('.') if e]
2829 forms = [e for e in editform.split('.') if e]
2830 forms.insert(0, 'changeset')
2830 forms.insert(0, 'changeset')
2831 templatetext = None
2831 templatetext = None
2832 while forms:
2832 while forms:
2833 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2833 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2834 if tmpl:
2834 if tmpl:
2835 templatetext = committext = buildcommittemplate(
2835 templatetext = committext = buildcommittemplate(
2836 repo, ctx, subs, extramsg, tmpl)
2836 repo, ctx, subs, extramsg, tmpl)
2837 break
2837 break
2838 forms.pop()
2838 forms.pop()
2839 else:
2839 else:
2840 committext = buildcommittext(repo, ctx, subs, extramsg)
2840 committext = buildcommittext(repo, ctx, subs, extramsg)
2841
2841
2842 # run editor in the repository root
2842 # run editor in the repository root
2843 olddir = os.getcwd()
2843 olddir = os.getcwd()
2844 os.chdir(repo.root)
2844 os.chdir(repo.root)
2845
2845
2846 # make in-memory changes visible to external process
2846 # make in-memory changes visible to external process
2847 tr = repo.currenttransaction()
2847 tr = repo.currenttransaction()
2848 repo.dirstate.write(tr)
2848 repo.dirstate.write(tr)
2849 pending = tr and tr.writepending() and repo.root
2849 pending = tr and tr.writepending() and repo.root
2850
2850
2851 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2851 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2852 editform=editform, pending=pending)
2852 editform=editform, pending=pending)
2853 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2853 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2854 os.chdir(olddir)
2854 os.chdir(olddir)
2855
2855
2856 if finishdesc:
2856 if finishdesc:
2857 text = finishdesc(text)
2857 text = finishdesc(text)
2858 if not text.strip():
2858 if not text.strip():
2859 raise error.Abort(_("empty commit message"))
2859 raise error.Abort(_("empty commit message"))
2860 if unchangedmessagedetection and editortext == templatetext:
2860 if unchangedmessagedetection and editortext == templatetext:
2861 raise error.Abort(_("commit message unchanged"))
2861 raise error.Abort(_("commit message unchanged"))
2862
2862
2863 return text
2863 return text
2864
2864
2865 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2865 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2866 ui = repo.ui
2866 ui = repo.ui
2867 tmpl, mapfile = gettemplate(ui, tmpl, None)
2867 tmpl, mapfile = gettemplate(ui, tmpl, None)
2868
2868
2869 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2869 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2870
2870
2871 for k, v in repo.ui.configitems('committemplate'):
2871 for k, v in repo.ui.configitems('committemplate'):
2872 if k != 'changeset':
2872 if k != 'changeset':
2873 t.t.cache[k] = v
2873 t.t.cache[k] = v
2874
2874
2875 if not extramsg:
2875 if not extramsg:
2876 extramsg = '' # ensure that extramsg is string
2876 extramsg = '' # ensure that extramsg is string
2877
2877
2878 ui.pushbuffer()
2878 ui.pushbuffer()
2879 t.show(ctx, extramsg=extramsg)
2879 t.show(ctx, extramsg=extramsg)
2880 return ui.popbuffer()
2880 return ui.popbuffer()
2881
2881
2882 def hgprefix(msg):
2882 def hgprefix(msg):
2883 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2883 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2884
2884
2885 def buildcommittext(repo, ctx, subs, extramsg):
2885 def buildcommittext(repo, ctx, subs, extramsg):
2886 edittext = []
2886 edittext = []
2887 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2887 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2888 if ctx.description():
2888 if ctx.description():
2889 edittext.append(ctx.description())
2889 edittext.append(ctx.description())
2890 edittext.append("")
2890 edittext.append("")
2891 edittext.append("") # Empty line between message and comments.
2891 edittext.append("") # Empty line between message and comments.
2892 edittext.append(hgprefix(_("Enter commit message."
2892 edittext.append(hgprefix(_("Enter commit message."
2893 " Lines beginning with 'HG:' are removed.")))
2893 " Lines beginning with 'HG:' are removed.")))
2894 edittext.append(hgprefix(extramsg))
2894 edittext.append(hgprefix(extramsg))
2895 edittext.append("HG: --")
2895 edittext.append("HG: --")
2896 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2896 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2897 if ctx.p2():
2897 if ctx.p2():
2898 edittext.append(hgprefix(_("branch merge")))
2898 edittext.append(hgprefix(_("branch merge")))
2899 if ctx.branch():
2899 if ctx.branch():
2900 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2900 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2901 if bookmarks.isactivewdirparent(repo):
2901 if bookmarks.isactivewdirparent(repo):
2902 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2902 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2903 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2903 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2904 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2904 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2905 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2905 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2906 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2906 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2907 if not added and not modified and not removed:
2907 if not added and not modified and not removed:
2908 edittext.append(hgprefix(_("no files changed")))
2908 edittext.append(hgprefix(_("no files changed")))
2909 edittext.append("")
2909 edittext.append("")
2910
2910
2911 return "\n".join(edittext)
2911 return "\n".join(edittext)
2912
2912
2913 def commitstatus(repo, node, branch, bheads=None, opts=None):
2913 def commitstatus(repo, node, branch, bheads=None, opts=None):
2914 if opts is None:
2914 if opts is None:
2915 opts = {}
2915 opts = {}
2916 ctx = repo[node]
2916 ctx = repo[node]
2917 parents = ctx.parents()
2917 parents = ctx.parents()
2918
2918
2919 if (not opts.get('amend') and bheads and node not in bheads and not
2919 if (not opts.get('amend') and bheads and node not in bheads and not
2920 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2920 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2921 repo.ui.status(_('created new head\n'))
2921 repo.ui.status(_('created new head\n'))
2922 # The message is not printed for initial roots. For the other
2922 # The message is not printed for initial roots. For the other
2923 # changesets, it is printed in the following situations:
2923 # changesets, it is printed in the following situations:
2924 #
2924 #
2925 # Par column: for the 2 parents with ...
2925 # Par column: for the 2 parents with ...
2926 # N: null or no parent
2926 # N: null or no parent
2927 # B: parent is on another named branch
2927 # B: parent is on another named branch
2928 # C: parent is a regular non head changeset
2928 # C: parent is a regular non head changeset
2929 # H: parent was a branch head of the current branch
2929 # H: parent was a branch head of the current branch
2930 # Msg column: whether we print "created new head" message
2930 # Msg column: whether we print "created new head" message
2931 # In the following, it is assumed that there already exists some
2931 # In the following, it is assumed that there already exists some
2932 # initial branch heads of the current branch, otherwise nothing is
2932 # initial branch heads of the current branch, otherwise nothing is
2933 # printed anyway.
2933 # printed anyway.
2934 #
2934 #
2935 # Par Msg Comment
2935 # Par Msg Comment
2936 # N N y additional topo root
2936 # N N y additional topo root
2937 #
2937 #
2938 # B N y additional branch root
2938 # B N y additional branch root
2939 # C N y additional topo head
2939 # C N y additional topo head
2940 # H N n usual case
2940 # H N n usual case
2941 #
2941 #
2942 # B B y weird additional branch root
2942 # B B y weird additional branch root
2943 # C B y branch merge
2943 # C B y branch merge
2944 # H B n merge with named branch
2944 # H B n merge with named branch
2945 #
2945 #
2946 # C C y additional head from merge
2946 # C C y additional head from merge
2947 # C H n merge with a head
2947 # C H n merge with a head
2948 #
2948 #
2949 # H H n head merge: head count decreases
2949 # H H n head merge: head count decreases
2950
2950
2951 if not opts.get('close_branch'):
2951 if not opts.get('close_branch'):
2952 for r in parents:
2952 for r in parents:
2953 if r.closesbranch() and r.branch() == branch:
2953 if r.closesbranch() and r.branch() == branch:
2954 repo.ui.status(_('reopening closed branch head %d\n') % r)
2954 repo.ui.status(_('reopening closed branch head %d\n') % r)
2955
2955
2956 if repo.ui.debugflag:
2956 if repo.ui.debugflag:
2957 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2957 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2958 elif repo.ui.verbose:
2958 elif repo.ui.verbose:
2959 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2959 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2960
2960
2961 def postcommitstatus(repo, pats, opts):
2961 def postcommitstatus(repo, pats, opts):
2962 return repo.status(match=scmutil.match(repo[None], pats, opts))
2962 return repo.status(match=scmutil.match(repo[None], pats, opts))
2963
2963
2964 def revert(ui, repo, ctx, parents, *pats, **opts):
2964 def revert(ui, repo, ctx, parents, *pats, **opts):
2965 parent, p2 = parents
2965 parent, p2 = parents
2966 node = ctx.node()
2966 node = ctx.node()
2967
2967
2968 mf = ctx.manifest()
2968 mf = ctx.manifest()
2969 if node == p2:
2969 if node == p2:
2970 parent = p2
2970 parent = p2
2971
2971
2972 # need all matching names in dirstate and manifest of target rev,
2972 # need all matching names in dirstate and manifest of target rev,
2973 # so have to walk both. do not print errors if files exist in one
2973 # so have to walk both. do not print errors if files exist in one
2974 # but not other. in both cases, filesets should be evaluated against
2974 # but not other. in both cases, filesets should be evaluated against
2975 # workingctx to get consistent result (issue4497). this means 'set:**'
2975 # workingctx to get consistent result (issue4497). this means 'set:**'
2976 # cannot be used to select missing files from target rev.
2976 # cannot be used to select missing files from target rev.
2977
2977
2978 # `names` is a mapping for all elements in working copy and target revision
2978 # `names` is a mapping for all elements in working copy and target revision
2979 # The mapping is in the form:
2979 # The mapping is in the form:
2980 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2980 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2981 names = {}
2981 names = {}
2982
2982
2983 with repo.wlock():
2983 with repo.wlock():
2984 ## filling of the `names` mapping
2984 ## filling of the `names` mapping
2985 # walk dirstate to fill `names`
2985 # walk dirstate to fill `names`
2986
2986
2987 interactive = opts.get('interactive', False)
2987 interactive = opts.get('interactive', False)
2988 wctx = repo[None]
2988 wctx = repo[None]
2989 m = scmutil.match(wctx, pats, opts)
2989 m = scmutil.match(wctx, pats, opts)
2990
2990
2991 # we'll need this later
2991 # we'll need this later
2992 targetsubs = sorted(s for s in wctx.substate if m(s))
2992 targetsubs = sorted(s for s in wctx.substate if m(s))
2993
2993
2994 if not m.always():
2994 if not m.always():
2995 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2995 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2996 names[abs] = m.rel(abs), m.exact(abs)
2996 names[abs] = m.rel(abs), m.exact(abs)
2997
2997
2998 # walk target manifest to fill `names`
2998 # walk target manifest to fill `names`
2999
2999
3000 def badfn(path, msg):
3000 def badfn(path, msg):
3001 if path in names:
3001 if path in names:
3002 return
3002 return
3003 if path in ctx.substate:
3003 if path in ctx.substate:
3004 return
3004 return
3005 path_ = path + '/'
3005 path_ = path + '/'
3006 for f in names:
3006 for f in names:
3007 if f.startswith(path_):
3007 if f.startswith(path_):
3008 return
3008 return
3009 ui.warn("%s: %s\n" % (m.rel(path), msg))
3009 ui.warn("%s: %s\n" % (m.rel(path), msg))
3010
3010
3011 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3011 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3012 if abs not in names:
3012 if abs not in names:
3013 names[abs] = m.rel(abs), m.exact(abs)
3013 names[abs] = m.rel(abs), m.exact(abs)
3014
3014
3015 # Find status of all file in `names`.
3015 # Find status of all file in `names`.
3016 m = scmutil.matchfiles(repo, names)
3016 m = scmutil.matchfiles(repo, names)
3017
3017
3018 changes = repo.status(node1=node, match=m,
3018 changes = repo.status(node1=node, match=m,
3019 unknown=True, ignored=True, clean=True)
3019 unknown=True, ignored=True, clean=True)
3020 else:
3020 else:
3021 changes = repo.status(node1=node, match=m)
3021 changes = repo.status(node1=node, match=m)
3022 for kind in changes:
3022 for kind in changes:
3023 for abs in kind:
3023 for abs in kind:
3024 names[abs] = m.rel(abs), m.exact(abs)
3024 names[abs] = m.rel(abs), m.exact(abs)
3025
3025
3026 m = scmutil.matchfiles(repo, names)
3026 m = scmutil.matchfiles(repo, names)
3027
3027
3028 modified = set(changes.modified)
3028 modified = set(changes.modified)
3029 added = set(changes.added)
3029 added = set(changes.added)
3030 removed = set(changes.removed)
3030 removed = set(changes.removed)
3031 _deleted = set(changes.deleted)
3031 _deleted = set(changes.deleted)
3032 unknown = set(changes.unknown)
3032 unknown = set(changes.unknown)
3033 unknown.update(changes.ignored)
3033 unknown.update(changes.ignored)
3034 clean = set(changes.clean)
3034 clean = set(changes.clean)
3035 modadded = set()
3035 modadded = set()
3036
3036
3037 # split between files known in target manifest and the others
3037 # split between files known in target manifest and the others
3038 smf = set(mf)
3038 smf = set(mf)
3039
3039
3040 # determine the exact nature of the deleted changesets
3040 # determine the exact nature of the deleted changesets
3041 deladded = _deleted - smf
3041 deladded = _deleted - smf
3042 deleted = _deleted - deladded
3042 deleted = _deleted - deladded
3043
3043
3044 # We need to account for the state of the file in the dirstate,
3044 # We need to account for the state of the file in the dirstate,
3045 # even when we revert against something else than parent. This will
3045 # even when we revert against something else than parent. This will
3046 # slightly alter the behavior of revert (doing back up or not, delete
3046 # slightly alter the behavior of revert (doing back up or not, delete
3047 # or just forget etc).
3047 # or just forget etc).
3048 if parent == node:
3048 if parent == node:
3049 dsmodified = modified
3049 dsmodified = modified
3050 dsadded = added
3050 dsadded = added
3051 dsremoved = removed
3051 dsremoved = removed
3052 # store all local modifications, useful later for rename detection
3052 # store all local modifications, useful later for rename detection
3053 localchanges = dsmodified | dsadded
3053 localchanges = dsmodified | dsadded
3054 modified, added, removed = set(), set(), set()
3054 modified, added, removed = set(), set(), set()
3055 else:
3055 else:
3056 changes = repo.status(node1=parent, match=m)
3056 changes = repo.status(node1=parent, match=m)
3057 dsmodified = set(changes.modified)
3057 dsmodified = set(changes.modified)
3058 dsadded = set(changes.added)
3058 dsadded = set(changes.added)
3059 dsremoved = set(changes.removed)
3059 dsremoved = set(changes.removed)
3060 # store all local modifications, useful later for rename detection
3060 # store all local modifications, useful later for rename detection
3061 localchanges = dsmodified | dsadded
3061 localchanges = dsmodified | dsadded
3062
3062
3063 # only take into account for removes between wc and target
3063 # only take into account for removes between wc and target
3064 clean |= dsremoved - removed
3064 clean |= dsremoved - removed
3065 dsremoved &= removed
3065 dsremoved &= removed
3066 # distinct between dirstate remove and other
3066 # distinct between dirstate remove and other
3067 removed -= dsremoved
3067 removed -= dsremoved
3068
3068
3069 modadded = added & dsmodified
3069 modadded = added & dsmodified
3070 added -= modadded
3070 added -= modadded
3071
3071
3072 # tell newly modified apart.
3072 # tell newly modified apart.
3073 dsmodified &= modified
3073 dsmodified &= modified
3074 dsmodified |= modified & dsadded # dirstate added may need backup
3074 dsmodified |= modified & dsadded # dirstate added may need backup
3075 modified -= dsmodified
3075 modified -= dsmodified
3076
3076
3077 # We need to wait for some post-processing to update this set
3077 # We need to wait for some post-processing to update this set
3078 # before making the distinction. The dirstate will be used for
3078 # before making the distinction. The dirstate will be used for
3079 # that purpose.
3079 # that purpose.
3080 dsadded = added
3080 dsadded = added
3081
3081
3082 # in case of merge, files that are actually added can be reported as
3082 # in case of merge, files that are actually added can be reported as
3083 # modified, we need to post process the result
3083 # modified, we need to post process the result
3084 if p2 != nullid:
3084 if p2 != nullid:
3085 mergeadd = dsmodified - smf
3085 mergeadd = dsmodified - smf
3086 dsadded |= mergeadd
3086 dsadded |= mergeadd
3087 dsmodified -= mergeadd
3087 dsmodified -= mergeadd
3088
3088
3089 # if f is a rename, update `names` to also revert the source
3089 # if f is a rename, update `names` to also revert the source
3090 cwd = repo.getcwd()
3090 cwd = repo.getcwd()
3091 for f in localchanges:
3091 for f in localchanges:
3092 src = repo.dirstate.copied(f)
3092 src = repo.dirstate.copied(f)
3093 # XXX should we check for rename down to target node?
3093 # XXX should we check for rename down to target node?
3094 if src and src not in names and repo.dirstate[src] == 'r':
3094 if src and src not in names and repo.dirstate[src] == 'r':
3095 dsremoved.add(src)
3095 dsremoved.add(src)
3096 names[src] = (repo.pathto(src, cwd), True)
3096 names[src] = (repo.pathto(src, cwd), True)
3097
3097
3098 # distinguish between file to forget and the other
3098 # distinguish between file to forget and the other
3099 added = set()
3099 added = set()
3100 for abs in dsadded:
3100 for abs in dsadded:
3101 if repo.dirstate[abs] != 'a':
3101 if repo.dirstate[abs] != 'a':
3102 added.add(abs)
3102 added.add(abs)
3103 dsadded -= added
3103 dsadded -= added
3104
3104
3105 for abs in deladded:
3105 for abs in deladded:
3106 if repo.dirstate[abs] == 'a':
3106 if repo.dirstate[abs] == 'a':
3107 dsadded.add(abs)
3107 dsadded.add(abs)
3108 deladded -= dsadded
3108 deladded -= dsadded
3109
3109
3110 # For files marked as removed, we check if an unknown file is present at
3110 # For files marked as removed, we check if an unknown file is present at
3111 # the same path. If a such file exists it may need to be backed up.
3111 # the same path. If a such file exists it may need to be backed up.
3112 # Making the distinction at this stage helps have simpler backup
3112 # Making the distinction at this stage helps have simpler backup
3113 # logic.
3113 # logic.
3114 removunk = set()
3114 removunk = set()
3115 for abs in removed:
3115 for abs in removed:
3116 target = repo.wjoin(abs)
3116 target = repo.wjoin(abs)
3117 if os.path.lexists(target):
3117 if os.path.lexists(target):
3118 removunk.add(abs)
3118 removunk.add(abs)
3119 removed -= removunk
3119 removed -= removunk
3120
3120
3121 dsremovunk = set()
3121 dsremovunk = set()
3122 for abs in dsremoved:
3122 for abs in dsremoved:
3123 target = repo.wjoin(abs)
3123 target = repo.wjoin(abs)
3124 if os.path.lexists(target):
3124 if os.path.lexists(target):
3125 dsremovunk.add(abs)
3125 dsremovunk.add(abs)
3126 dsremoved -= dsremovunk
3126 dsremoved -= dsremovunk
3127
3127
3128 # action to be actually performed by revert
3128 # action to be actually performed by revert
3129 # (<list of file>, message>) tuple
3129 # (<list of file>, message>) tuple
3130 actions = {'revert': ([], _('reverting %s\n')),
3130 actions = {'revert': ([], _('reverting %s\n')),
3131 'add': ([], _('adding %s\n')),
3131 'add': ([], _('adding %s\n')),
3132 'remove': ([], _('removing %s\n')),
3132 'remove': ([], _('removing %s\n')),
3133 'drop': ([], _('removing %s\n')),
3133 'drop': ([], _('removing %s\n')),
3134 'forget': ([], _('forgetting %s\n')),
3134 'forget': ([], _('forgetting %s\n')),
3135 'undelete': ([], _('undeleting %s\n')),
3135 'undelete': ([], _('undeleting %s\n')),
3136 'noop': (None, _('no changes needed to %s\n')),
3136 'noop': (None, _('no changes needed to %s\n')),
3137 'unknown': (None, _('file not managed: %s\n')),
3137 'unknown': (None, _('file not managed: %s\n')),
3138 }
3138 }
3139
3139
3140 # "constant" that convey the backup strategy.
3140 # "constant" that convey the backup strategy.
3141 # All set to `discard` if `no-backup` is set do avoid checking
3141 # All set to `discard` if `no-backup` is set do avoid checking
3142 # no_backup lower in the code.
3142 # no_backup lower in the code.
3143 # These values are ordered for comparison purposes
3143 # These values are ordered for comparison purposes
3144 backup = 2 # unconditionally do backup
3144 backup = 2 # unconditionally do backup
3145 check = 1 # check if the existing file differs from target
3145 check = 1 # check if the existing file differs from target
3146 discard = 0 # never do backup
3146 discard = 0 # never do backup
3147 if opts.get('no_backup'):
3147 if opts.get('no_backup'):
3148 backup = check = discard
3148 backup = check = discard
3149
3149
3150 backupanddel = actions['remove']
3150 backupanddel = actions['remove']
3151 if not opts.get('no_backup'):
3151 if not opts.get('no_backup'):
3152 backupanddel = actions['drop']
3152 backupanddel = actions['drop']
3153
3153
3154 disptable = (
3154 disptable = (
3155 # dispatch table:
3155 # dispatch table:
3156 # file state
3156 # file state
3157 # action
3157 # action
3158 # make backup
3158 # make backup
3159
3159
3160 ## Sets that results that will change file on disk
3160 ## Sets that results that will change file on disk
3161 # Modified compared to target, no local change
3161 # Modified compared to target, no local change
3162 (modified, actions['revert'], discard),
3162 (modified, actions['revert'], discard),
3163 # Modified compared to target, but local file is deleted
3163 # Modified compared to target, but local file is deleted
3164 (deleted, actions['revert'], discard),
3164 (deleted, actions['revert'], discard),
3165 # Modified compared to target, local change
3165 # Modified compared to target, local change
3166 (dsmodified, actions['revert'], backup),
3166 (dsmodified, actions['revert'], backup),
3167 # Added since target
3167 # Added since target
3168 (added, actions['remove'], discard),
3168 (added, actions['remove'], discard),
3169 # Added in working directory
3169 # Added in working directory
3170 (dsadded, actions['forget'], discard),
3170 (dsadded, actions['forget'], discard),
3171 # Added since target, have local modification
3171 # Added since target, have local modification
3172 (modadded, backupanddel, backup),
3172 (modadded, backupanddel, backup),
3173 # Added since target but file is missing in working directory
3173 # Added since target but file is missing in working directory
3174 (deladded, actions['drop'], discard),
3174 (deladded, actions['drop'], discard),
3175 # Removed since target, before working copy parent
3175 # Removed since target, before working copy parent
3176 (removed, actions['add'], discard),
3176 (removed, actions['add'], discard),
3177 # Same as `removed` but an unknown file exists at the same path
3177 # Same as `removed` but an unknown file exists at the same path
3178 (removunk, actions['add'], check),
3178 (removunk, actions['add'], check),
3179 # Removed since targe, marked as such in working copy parent
3179 # Removed since targe, marked as such in working copy parent
3180 (dsremoved, actions['undelete'], discard),
3180 (dsremoved, actions['undelete'], discard),
3181 # Same as `dsremoved` but an unknown file exists at the same path
3181 # Same as `dsremoved` but an unknown file exists at the same path
3182 (dsremovunk, actions['undelete'], check),
3182 (dsremovunk, actions['undelete'], check),
3183 ## the following sets does not result in any file changes
3183 ## the following sets does not result in any file changes
3184 # File with no modification
3184 # File with no modification
3185 (clean, actions['noop'], discard),
3185 (clean, actions['noop'], discard),
3186 # Existing file, not tracked anywhere
3186 # Existing file, not tracked anywhere
3187 (unknown, actions['unknown'], discard),
3187 (unknown, actions['unknown'], discard),
3188 )
3188 )
3189
3189
3190 for abs, (rel, exact) in sorted(names.items()):
3190 for abs, (rel, exact) in sorted(names.items()):
3191 # target file to be touch on disk (relative to cwd)
3191 # target file to be touch on disk (relative to cwd)
3192 target = repo.wjoin(abs)
3192 target = repo.wjoin(abs)
3193 # search the entry in the dispatch table.
3193 # search the entry in the dispatch table.
3194 # if the file is in any of these sets, it was touched in the working
3194 # if the file is in any of these sets, it was touched in the working
3195 # directory parent and we are sure it needs to be reverted.
3195 # directory parent and we are sure it needs to be reverted.
3196 for table, (xlist, msg), dobackup in disptable:
3196 for table, (xlist, msg), dobackup in disptable:
3197 if abs not in table:
3197 if abs not in table:
3198 continue
3198 continue
3199 if xlist is not None:
3199 if xlist is not None:
3200 xlist.append(abs)
3200 xlist.append(abs)
3201 if dobackup and (backup <= dobackup
3201 if dobackup and (backup <= dobackup
3202 or wctx[abs].cmp(ctx[abs])):
3202 or wctx[abs].cmp(ctx[abs])):
3203 bakname = scmutil.origpath(ui, repo, rel)
3203 bakname = scmutil.origpath(ui, repo, rel)
3204 ui.note(_('saving current version of %s as %s\n') %
3204 ui.note(_('saving current version of %s as %s\n') %
3205 (rel, bakname))
3205 (rel, bakname))
3206 if not opts.get('dry_run'):
3206 if not opts.get('dry_run'):
3207 if interactive:
3207 if interactive:
3208 util.copyfile(target, bakname)
3208 util.copyfile(target, bakname)
3209 else:
3209 else:
3210 util.rename(target, bakname)
3210 util.rename(target, bakname)
3211 if ui.verbose or not exact:
3211 if ui.verbose or not exact:
3212 if not isinstance(msg, basestring):
3212 if not isinstance(msg, basestring):
3213 msg = msg(abs)
3213 msg = msg(abs)
3214 ui.status(msg % rel)
3214 ui.status(msg % rel)
3215 elif exact:
3215 elif exact:
3216 ui.warn(msg % rel)
3216 ui.warn(msg % rel)
3217 break
3217 break
3218
3218
3219 if not opts.get('dry_run'):
3219 if not opts.get('dry_run'):
3220 needdata = ('revert', 'add', 'undelete')
3220 needdata = ('revert', 'add', 'undelete')
3221 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3221 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3222 _performrevert(repo, parents, ctx, actions, interactive)
3222 _performrevert(repo, parents, ctx, actions, interactive)
3223
3223
3224 if targetsubs:
3224 if targetsubs:
3225 # Revert the subrepos on the revert list
3225 # Revert the subrepos on the revert list
3226 for sub in targetsubs:
3226 for sub in targetsubs:
3227 try:
3227 try:
3228 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3228 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3229 except KeyError:
3229 except KeyError:
3230 raise error.Abort("subrepository '%s' does not exist in %s!"
3230 raise error.Abort("subrepository '%s' does not exist in %s!"
3231 % (sub, short(ctx.node())))
3231 % (sub, short(ctx.node())))
3232
3232
3233 def _revertprefetch(repo, ctx, *files):
3233 def _revertprefetch(repo, ctx, *files):
3234 """Let extension changing the storage layer prefetch content"""
3234 """Let extension changing the storage layer prefetch content"""
3235 pass
3235 pass
3236
3236
3237 def _performrevert(repo, parents, ctx, actions, interactive=False):
3237 def _performrevert(repo, parents, ctx, actions, interactive=False):
3238 """function that actually perform all the actions computed for revert
3238 """function that actually perform all the actions computed for revert
3239
3239
3240 This is an independent function to let extension to plug in and react to
3240 This is an independent function to let extension to plug in and react to
3241 the imminent revert.
3241 the imminent revert.
3242
3242
3243 Make sure you have the working directory locked when calling this function.
3243 Make sure you have the working directory locked when calling this function.
3244 """
3244 """
3245 parent, p2 = parents
3245 parent, p2 = parents
3246 node = ctx.node()
3246 node = ctx.node()
3247 excluded_files = []
3247 excluded_files = []
3248 matcher_opts = {"exclude": excluded_files}
3248 matcher_opts = {"exclude": excluded_files}
3249
3249
3250 def checkout(f):
3250 def checkout(f):
3251 fc = ctx[f]
3251 fc = ctx[f]
3252 repo.wwrite(f, fc.data(), fc.flags())
3252 repo.wwrite(f, fc.data(), fc.flags())
3253
3253
3254 audit_path = pathutil.pathauditor(repo.root)
3254 audit_path = pathutil.pathauditor(repo.root)
3255 for f in actions['forget'][0]:
3255 for f in actions['forget'][0]:
3256 if interactive:
3256 if interactive:
3257 choice = \
3257 choice = \
3258 repo.ui.promptchoice(
3258 repo.ui.promptchoice(
3259 _("forget added file %s (yn)?$$ &Yes $$ &No")
3259 _("forget added file %s (yn)?$$ &Yes $$ &No")
3260 % f)
3260 % f)
3261 if choice == 0:
3261 if choice == 0:
3262 repo.dirstate.drop(f)
3262 repo.dirstate.drop(f)
3263 else:
3263 else:
3264 excluded_files.append(repo.wjoin(f))
3264 excluded_files.append(repo.wjoin(f))
3265 else:
3265 else:
3266 repo.dirstate.drop(f)
3266 repo.dirstate.drop(f)
3267 for f in actions['remove'][0]:
3267 for f in actions['remove'][0]:
3268 audit_path(f)
3268 audit_path(f)
3269 try:
3269 try:
3270 util.unlinkpath(repo.wjoin(f))
3270 util.unlinkpath(repo.wjoin(f))
3271 except OSError:
3271 except OSError:
3272 pass
3272 pass
3273 repo.dirstate.remove(f)
3273 repo.dirstate.remove(f)
3274 for f in actions['drop'][0]:
3274 for f in actions['drop'][0]:
3275 audit_path(f)
3275 audit_path(f)
3276 repo.dirstate.remove(f)
3276 repo.dirstate.remove(f)
3277
3277
3278 normal = None
3278 normal = None
3279 if node == parent:
3279 if node == parent:
3280 # We're reverting to our parent. If possible, we'd like status
3280 # We're reverting to our parent. If possible, we'd like status
3281 # to report the file as clean. We have to use normallookup for
3281 # to report the file as clean. We have to use normallookup for
3282 # merges to avoid losing information about merged/dirty files.
3282 # merges to avoid losing information about merged/dirty files.
3283 if p2 != nullid:
3283 if p2 != nullid:
3284 normal = repo.dirstate.normallookup
3284 normal = repo.dirstate.normallookup
3285 else:
3285 else:
3286 normal = repo.dirstate.normal
3286 normal = repo.dirstate.normal
3287
3287
3288 newlyaddedandmodifiedfiles = set()
3288 newlyaddedandmodifiedfiles = set()
3289 if interactive:
3289 if interactive:
3290 # Prompt the user for changes to revert
3290 # Prompt the user for changes to revert
3291 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3291 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3292 m = scmutil.match(ctx, torevert, matcher_opts)
3292 m = scmutil.match(ctx, torevert, matcher_opts)
3293 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3293 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3294 diffopts.nodates = True
3294 diffopts.nodates = True
3295 diffopts.git = True
3295 diffopts.git = True
3296 reversehunks = repo.ui.configbool('experimental',
3296 reversehunks = repo.ui.configbool('experimental',
3297 'revertalternateinteractivemode',
3297 'revertalternateinteractivemode',
3298 True)
3298 True)
3299 if reversehunks:
3299 if reversehunks:
3300 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3300 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3301 else:
3301 else:
3302 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3302 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3303 originalchunks = patch.parsepatch(diff)
3303 originalchunks = patch.parsepatch(diff)
3304
3304
3305 try:
3305 try:
3306
3306
3307 chunks, opts = recordfilter(repo.ui, originalchunks)
3307 chunks, opts = recordfilter(repo.ui, originalchunks)
3308 if reversehunks:
3308 if reversehunks:
3309 chunks = patch.reversehunks(chunks)
3309 chunks = patch.reversehunks(chunks)
3310
3310
3311 except patch.PatchError as err:
3311 except patch.PatchError as err:
3312 raise error.Abort(_('error parsing patch: %s') % err)
3312 raise error.Abort(_('error parsing patch: %s') % err)
3313
3313
3314 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3314 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3315 # Apply changes
3315 # Apply changes
3316 fp = stringio()
3316 fp = stringio()
3317 for c in chunks:
3317 for c in chunks:
3318 c.write(fp)
3318 c.write(fp)
3319 dopatch = fp.tell()
3319 dopatch = fp.tell()
3320 fp.seek(0)
3320 fp.seek(0)
3321 if dopatch:
3321 if dopatch:
3322 try:
3322 try:
3323 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3323 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3324 except patch.PatchError as err:
3324 except patch.PatchError as err:
3325 raise error.Abort(str(err))
3325 raise error.Abort(str(err))
3326 del fp
3326 del fp
3327 else:
3327 else:
3328 for f in actions['revert'][0]:
3328 for f in actions['revert'][0]:
3329 checkout(f)
3329 checkout(f)
3330 if normal:
3330 if normal:
3331 normal(f)
3331 normal(f)
3332
3332
3333 for f in actions['add'][0]:
3333 for f in actions['add'][0]:
3334 # Don't checkout modified files, they are already created by the diff
3334 # Don't checkout modified files, they are already created by the diff
3335 if f not in newlyaddedandmodifiedfiles:
3335 if f not in newlyaddedandmodifiedfiles:
3336 checkout(f)
3336 checkout(f)
3337 repo.dirstate.add(f)
3337 repo.dirstate.add(f)
3338
3338
3339 normal = repo.dirstate.normallookup
3339 normal = repo.dirstate.normallookup
3340 if node == parent and p2 == nullid:
3340 if node == parent and p2 == nullid:
3341 normal = repo.dirstate.normal
3341 normal = repo.dirstate.normal
3342 for f in actions['undelete'][0]:
3342 for f in actions['undelete'][0]:
3343 checkout(f)
3343 checkout(f)
3344 normal(f)
3344 normal(f)
3345
3345
3346 copied = copies.pathcopies(repo[parent], ctx)
3346 copied = copies.pathcopies(repo[parent], ctx)
3347
3347
3348 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3348 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3349 if f in copied:
3349 if f in copied:
3350 repo.dirstate.copy(copied[f], f)
3350 repo.dirstate.copy(copied[f], f)
3351
3351
3352 def command(table):
3352 def command(table):
3353 """Returns a function object to be used as a decorator for making commands.
3353 """Returns a function object to be used as a decorator for making commands.
3354
3354
3355 This function receives a command table as its argument. The table should
3355 This function receives a command table as its argument. The table should
3356 be a dict.
3356 be a dict.
3357
3357
3358 The returned function can be used as a decorator for adding commands
3358 The returned function can be used as a decorator for adding commands
3359 to that command table. This function accepts multiple arguments to define
3359 to that command table. This function accepts multiple arguments to define
3360 a command.
3360 a command.
3361
3361
3362 The first argument is the command name.
3362 The first argument is the command name.
3363
3363
3364 The options argument is an iterable of tuples defining command arguments.
3364 The options argument is an iterable of tuples defining command arguments.
3365 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3365 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3366
3366
3367 The synopsis argument defines a short, one line summary of how to use the
3367 The synopsis argument defines a short, one line summary of how to use the
3368 command. This shows up in the help output.
3368 command. This shows up in the help output.
3369
3369
3370 The norepo argument defines whether the command does not require a
3370 The norepo argument defines whether the command does not require a
3371 local repository. Most commands operate against a repository, thus the
3371 local repository. Most commands operate against a repository, thus the
3372 default is False.
3372 default is False.
3373
3373
3374 The optionalrepo argument defines whether the command optionally requires
3374 The optionalrepo argument defines whether the command optionally requires
3375 a local repository.
3375 a local repository.
3376
3376
3377 The inferrepo argument defines whether to try to find a repository from the
3377 The inferrepo argument defines whether to try to find a repository from the
3378 command line arguments. If True, arguments will be examined for potential
3378 command line arguments. If True, arguments will be examined for potential
3379 repository locations. See ``findrepo()``. If a repository is found, it
3379 repository locations. See ``findrepo()``. If a repository is found, it
3380 will be used.
3380 will be used.
3381 """
3381 """
3382 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3382 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3383 inferrepo=False):
3383 inferrepo=False):
3384 def decorator(func):
3384 def decorator(func):
3385 func.norepo = norepo
3385 func.norepo = norepo
3386 func.optionalrepo = optionalrepo
3386 func.optionalrepo = optionalrepo
3387 func.inferrepo = inferrepo
3387 func.inferrepo = inferrepo
3388 if synopsis:
3388 if synopsis:
3389 table[name] = func, list(options), synopsis
3389 table[name] = func, list(options), synopsis
3390 else:
3390 else:
3391 table[name] = func, list(options)
3391 table[name] = func, list(options)
3392 return func
3392 return func
3393 return decorator
3393 return decorator
3394
3394
3395 return cmd
3395 return cmd
3396
3396
3397 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3397 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3398 # commands.outgoing. "missing" is "missing" of the result of
3398 # commands.outgoing. "missing" is "missing" of the result of
3399 # "findcommonoutgoing()"
3399 # "findcommonoutgoing()"
3400 outgoinghooks = util.hooks()
3400 outgoinghooks = util.hooks()
3401
3401
3402 # a list of (ui, repo) functions called by commands.summary
3402 # a list of (ui, repo) functions called by commands.summary
3403 summaryhooks = util.hooks()
3403 summaryhooks = util.hooks()
3404
3404
3405 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3405 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3406 #
3406 #
3407 # functions should return tuple of booleans below, if 'changes' is None:
3407 # functions should return tuple of booleans below, if 'changes' is None:
3408 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3408 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3409 #
3409 #
3410 # otherwise, 'changes' is a tuple of tuples below:
3410 # otherwise, 'changes' is a tuple of tuples below:
3411 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3411 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3412 # - (desturl, destbranch, destpeer, outgoing)
3412 # - (desturl, destbranch, destpeer, outgoing)
3413 summaryremotehooks = util.hooks()
3413 summaryremotehooks = util.hooks()
3414
3414
3415 # A list of state files kept by multistep operations like graft.
3415 # A list of state files kept by multistep operations like graft.
3416 # Since graft cannot be aborted, it is considered 'clearable' by update.
3416 # Since graft cannot be aborted, it is considered 'clearable' by update.
3417 # note: bisect is intentionally excluded
3417 # note: bisect is intentionally excluded
3418 # (state file, clearable, allowcommit, error, hint)
3418 # (state file, clearable, allowcommit, error, hint)
3419 unfinishedstates = [
3419 unfinishedstates = [
3420 ('graftstate', True, False, _('graft in progress'),
3420 ('graftstate', True, False, _('graft in progress'),
3421 _("use 'hg graft --continue' or 'hg update' to abort")),
3421 _("use 'hg graft --continue' or 'hg update' to abort")),
3422 ('updatestate', True, False, _('last update was interrupted'),
3422 ('updatestate', True, False, _('last update was interrupted'),
3423 _("use 'hg update' to get a consistent checkout"))
3423 _("use 'hg update' to get a consistent checkout"))
3424 ]
3424 ]
3425
3425
3426 def checkunfinished(repo, commit=False):
3426 def checkunfinished(repo, commit=False):
3427 '''Look for an unfinished multistep operation, like graft, and abort
3427 '''Look for an unfinished multistep operation, like graft, and abort
3428 if found. It's probably good to check this right before
3428 if found. It's probably good to check this right before
3429 bailifchanged().
3429 bailifchanged().
3430 '''
3430 '''
3431 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3431 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3432 if commit and allowcommit:
3432 if commit and allowcommit:
3433 continue
3433 continue
3434 if repo.vfs.exists(f):
3434 if repo.vfs.exists(f):
3435 raise error.Abort(msg, hint=hint)
3435 raise error.Abort(msg, hint=hint)
3436
3436
3437 def clearunfinished(repo):
3437 def clearunfinished(repo):
3438 '''Check for unfinished operations (as above), and clear the ones
3438 '''Check for unfinished operations (as above), and clear the ones
3439 that are clearable.
3439 that are clearable.
3440 '''
3440 '''
3441 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3441 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3442 if not clearable and repo.vfs.exists(f):
3442 if not clearable and repo.vfs.exists(f):
3443 raise error.Abort(msg, hint=hint)
3443 raise error.Abort(msg, hint=hint)
3444 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3444 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3445 if clearable and repo.vfs.exists(f):
3445 if clearable and repo.vfs.exists(f):
3446 util.unlink(repo.join(f))
3446 util.unlink(repo.join(f))
3447
3447
3448 afterresolvedstates = [
3448 afterresolvedstates = [
3449 ('graftstate',
3449 ('graftstate',
3450 _('hg graft --continue')),
3450 _('hg graft --continue')),
3451 ]
3451 ]
3452
3452
3453 def howtocontinue(repo):
3453 def howtocontinue(repo):
3454 '''Check for an unfinished operation and return the command to finish
3454 '''Check for an unfinished operation and return the command to finish
3455 it.
3455 it.
3456
3456
3457 afterresolvedstates tupples define a .hg/{file} and the corresponding
3457 afterresolvedstates tupples define a .hg/{file} and the corresponding
3458 command needed to finish it.
3458 command needed to finish it.
3459
3459
3460 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3460 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3461 a boolean.
3461 a boolean.
3462 '''
3462 '''
3463 contmsg = _("continue: %s")
3463 contmsg = _("continue: %s")
3464 for f, msg in afterresolvedstates:
3464 for f, msg in afterresolvedstates:
3465 if repo.vfs.exists(f):
3465 if repo.vfs.exists(f):
3466 return contmsg % msg, True
3466 return contmsg % msg, True
3467 workingctx = repo[None]
3467 workingctx = repo[None]
3468 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3468 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3469 for s in workingctx.substate)
3469 for s in workingctx.substate)
3470 if dirty:
3470 if dirty:
3471 return contmsg % _("hg commit"), False
3471 return contmsg % _("hg commit"), False
3472 return None, None
3472 return None, None
3473
3473
3474 def checkafterresolved(repo):
3474 def checkafterresolved(repo):
3475 '''Inform the user about the next action after completing hg resolve
3475 '''Inform the user about the next action after completing hg resolve
3476
3476
3477 If there's a matching afterresolvedstates, howtocontinue will yield
3477 If there's a matching afterresolvedstates, howtocontinue will yield
3478 repo.ui.warn as the reporter.
3478 repo.ui.warn as the reporter.
3479
3479
3480 Otherwise, it will yield repo.ui.note.
3480 Otherwise, it will yield repo.ui.note.
3481 '''
3481 '''
3482 msg, warning = howtocontinue(repo)
3482 msg, warning = howtocontinue(repo)
3483 if msg is not None:
3483 if msg is not None:
3484 if warning:
3484 if warning:
3485 repo.ui.warn("%s\n" % msg)
3485 repo.ui.warn("%s\n" % msg)
3486 else:
3486 else:
3487 repo.ui.note("%s\n" % msg)
3487 repo.ui.note("%s\n" % msg)
3488
3488
3489 def wrongtooltocontinue(repo, task):
3489 def wrongtooltocontinue(repo, task):
3490 '''Raise an abort suggesting how to properly continue if there is an
3490 '''Raise an abort suggesting how to properly continue if there is an
3491 active task.
3491 active task.
3492
3492
3493 Uses howtocontinue() to find the active task.
3493 Uses howtocontinue() to find the active task.
3494
3494
3495 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3495 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3496 a hint.
3496 a hint.
3497 '''
3497 '''
3498 after = howtocontinue(repo)
3498 after = howtocontinue(repo)
3499 hint = None
3499 hint = None
3500 if after[1]:
3500 if after[1]:
3501 hint = after[0]
3501 hint = after[0]
3502 raise error.Abort(_('no %s in progress') % task, hint=hint)
3502 raise error.Abort(_('no %s in progress') % task, hint=hint)
3503
3503
3504 class dirstateguard(object):
3504 class dirstateguard(object):
3505 '''Restore dirstate at unexpected failure.
3505 '''Restore dirstate at unexpected failure.
3506
3506
3507 At the construction, this class does:
3507 At the construction, this class does:
3508
3508
3509 - write current ``repo.dirstate`` out, and
3509 - write current ``repo.dirstate`` out, and
3510 - save ``.hg/dirstate`` into the backup file
3510 - save ``.hg/dirstate`` into the backup file
3511
3511
3512 This restores ``.hg/dirstate`` from backup file, if ``release()``
3512 This restores ``.hg/dirstate`` from backup file, if ``release()``
3513 is invoked before ``close()``.
3513 is invoked before ``close()``.
3514
3514
3515 This just removes the backup file at ``close()`` before ``release()``.
3515 This just removes the backup file at ``close()`` before ``release()``.
3516 '''
3516 '''
3517
3517
3518 def __init__(self, repo, name):
3518 def __init__(self, repo, name):
3519 self._repo = repo
3519 self._repo = repo
3520 self._suffix = '.backup.%s.%d' % (name, id(self))
3520 self._suffix = '.backup.%s.%d' % (name, id(self))
3521 repo.dirstate._savebackup(repo.currenttransaction(), self._suffix)
3521 repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
3522 self._active = True
3522 self._active = True
3523 self._closed = False
3523 self._closed = False
3524
3524
3525 def __del__(self):
3525 def __del__(self):
3526 if self._active: # still active
3526 if self._active: # still active
3527 # this may occur, even if this class is used correctly:
3527 # this may occur, even if this class is used correctly:
3528 # for example, releasing other resources like transaction
3528 # for example, releasing other resources like transaction
3529 # may raise exception before ``dirstateguard.release`` in
3529 # may raise exception before ``dirstateguard.release`` in
3530 # ``release(tr, ....)``.
3530 # ``release(tr, ....)``.
3531 self._abort()
3531 self._abort()
3532
3532
3533 def close(self):
3533 def close(self):
3534 if not self._active: # already inactivated
3534 if not self._active: # already inactivated
3535 msg = (_("can't close already inactivated backup: dirstate%s")
3535 msg = (_("can't close already inactivated backup: dirstate%s")
3536 % self._suffix)
3536 % self._suffix)
3537 raise error.Abort(msg)
3537 raise error.Abort(msg)
3538
3538
3539 self._repo.dirstate._clearbackup(self._repo.currenttransaction(),
3539 self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
3540 self._suffix)
3540 self._suffix)
3541 self._active = False
3541 self._active = False
3542 self._closed = True
3542 self._closed = True
3543
3543
3544 def _abort(self):
3544 def _abort(self):
3545 self._repo.dirstate._restorebackup(self._repo.currenttransaction(),
3545 self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
3546 self._suffix)
3546 self._suffix)
3547 self._active = False
3547 self._active = False
3548
3548
3549 def release(self):
3549 def release(self):
3550 if not self._closed:
3550 if not self._closed:
3551 if not self._active: # already inactivated
3551 if not self._active: # already inactivated
3552 msg = (_("can't release already inactivated backup:"
3552 msg = (_("can't release already inactivated backup:"
3553 " dirstate%s")
3553 " dirstate%s")
3554 % self._suffix)
3554 % self._suffix)
3555 raise error.Abort(msg)
3555 raise error.Abort(msg)
3556 self._abort()
3556 self._abort()
@@ -1,1242 +1,1242
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import errno
11 import errno
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid
16 from .node import nullid
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 match as matchmod,
20 match as matchmod,
21 osutil,
21 osutil,
22 parsers,
22 parsers,
23 pathutil,
23 pathutil,
24 scmutil,
24 scmutil,
25 util,
25 util,
26 )
26 )
27
27
28 propertycache = util.propertycache
28 propertycache = util.propertycache
29 filecache = scmutil.filecache
29 filecache = scmutil.filecache
30 _rangemask = 0x7fffffff
30 _rangemask = 0x7fffffff
31
31
32 dirstatetuple = parsers.dirstatetuple
32 dirstatetuple = parsers.dirstatetuple
33
33
34 class repocache(filecache):
34 class repocache(filecache):
35 """filecache for files in .hg/"""
35 """filecache for files in .hg/"""
36 def join(self, obj, fname):
36 def join(self, obj, fname):
37 return obj._opener.join(fname)
37 return obj._opener.join(fname)
38
38
39 class rootcache(filecache):
39 class rootcache(filecache):
40 """filecache for files in the repository root"""
40 """filecache for files in the repository root"""
41 def join(self, obj, fname):
41 def join(self, obj, fname):
42 return obj._join(fname)
42 return obj._join(fname)
43
43
44 def _getfsnow(vfs):
44 def _getfsnow(vfs):
45 '''Get "now" timestamp on filesystem'''
45 '''Get "now" timestamp on filesystem'''
46 tmpfd, tmpname = vfs.mkstemp()
46 tmpfd, tmpname = vfs.mkstemp()
47 try:
47 try:
48 return os.fstat(tmpfd).st_mtime
48 return os.fstat(tmpfd).st_mtime
49 finally:
49 finally:
50 os.close(tmpfd)
50 os.close(tmpfd)
51 vfs.unlink(tmpname)
51 vfs.unlink(tmpname)
52
52
53 def nonnormalentries(dmap):
53 def nonnormalentries(dmap):
54 '''Compute the nonnormal dirstate entries from the dmap'''
54 '''Compute the nonnormal dirstate entries from the dmap'''
55 try:
55 try:
56 return parsers.nonnormalentries(dmap)
56 return parsers.nonnormalentries(dmap)
57 except AttributeError:
57 except AttributeError:
58 return set(fname for fname, e in dmap.iteritems()
58 return set(fname for fname, e in dmap.iteritems()
59 if e[0] != 'n' or e[3] == -1)
59 if e[0] != 'n' or e[3] == -1)
60
60
61 def _trypending(root, vfs, filename):
61 def _trypending(root, vfs, filename):
62 '''Open file to be read according to HG_PENDING environment variable
62 '''Open file to be read according to HG_PENDING environment variable
63
63
64 This opens '.pending' of specified 'filename' only when HG_PENDING
64 This opens '.pending' of specified 'filename' only when HG_PENDING
65 is equal to 'root'.
65 is equal to 'root'.
66
66
67 This returns '(fp, is_pending_opened)' tuple.
67 This returns '(fp, is_pending_opened)' tuple.
68 '''
68 '''
69 if root == os.environ.get('HG_PENDING'):
69 if root == os.environ.get('HG_PENDING'):
70 try:
70 try:
71 return (vfs('%s.pending' % filename), True)
71 return (vfs('%s.pending' % filename), True)
72 except IOError as inst:
72 except IOError as inst:
73 if inst.errno != errno.ENOENT:
73 if inst.errno != errno.ENOENT:
74 raise
74 raise
75 return (vfs(filename), False)
75 return (vfs(filename), False)
76
76
77 class dirstate(object):
77 class dirstate(object):
78
78
79 def __init__(self, opener, ui, root, validate):
79 def __init__(self, opener, ui, root, validate):
80 '''Create a new dirstate object.
80 '''Create a new dirstate object.
81
81
82 opener is an open()-like callable that can be used to open the
82 opener is an open()-like callable that can be used to open the
83 dirstate file; root is the root of the directory tracked by
83 dirstate file; root is the root of the directory tracked by
84 the dirstate.
84 the dirstate.
85 '''
85 '''
86 self._opener = opener
86 self._opener = opener
87 self._validate = validate
87 self._validate = validate
88 self._root = root
88 self._root = root
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
89 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
90 # UNC path pointing to root share (issue4557)
90 # UNC path pointing to root share (issue4557)
91 self._rootdir = pathutil.normasprefix(root)
91 self._rootdir = pathutil.normasprefix(root)
92 # internal config: ui.forcecwd
92 # internal config: ui.forcecwd
93 forcecwd = ui.config('ui', 'forcecwd')
93 forcecwd = ui.config('ui', 'forcecwd')
94 if forcecwd:
94 if forcecwd:
95 self._cwd = forcecwd
95 self._cwd = forcecwd
96 self._dirty = False
96 self._dirty = False
97 self._dirtypl = False
97 self._dirtypl = False
98 self._lastnormaltime = 0
98 self._lastnormaltime = 0
99 self._ui = ui
99 self._ui = ui
100 self._filecache = {}
100 self._filecache = {}
101 self._parentwriters = 0
101 self._parentwriters = 0
102 self._filename = 'dirstate'
102 self._filename = 'dirstate'
103 self._pendingfilename = '%s.pending' % self._filename
103 self._pendingfilename = '%s.pending' % self._filename
104
104
105 # for consistent view between _pl() and _read() invocations
105 # for consistent view between _pl() and _read() invocations
106 self._pendingmode = None
106 self._pendingmode = None
107
107
108 def beginparentchange(self):
108 def beginparentchange(self):
109 '''Marks the beginning of a set of changes that involve changing
109 '''Marks the beginning of a set of changes that involve changing
110 the dirstate parents. If there is an exception during this time,
110 the dirstate parents. If there is an exception during this time,
111 the dirstate will not be written when the wlock is released. This
111 the dirstate will not be written when the wlock is released. This
112 prevents writing an incoherent dirstate where the parent doesn't
112 prevents writing an incoherent dirstate where the parent doesn't
113 match the contents.
113 match the contents.
114 '''
114 '''
115 self._parentwriters += 1
115 self._parentwriters += 1
116
116
117 def endparentchange(self):
117 def endparentchange(self):
118 '''Marks the end of a set of changes that involve changing the
118 '''Marks the end of a set of changes that involve changing the
119 dirstate parents. Once all parent changes have been marked done,
119 dirstate parents. Once all parent changes have been marked done,
120 the wlock will be free to write the dirstate on release.
120 the wlock will be free to write the dirstate on release.
121 '''
121 '''
122 if self._parentwriters > 0:
122 if self._parentwriters > 0:
123 self._parentwriters -= 1
123 self._parentwriters -= 1
124
124
125 def pendingparentchange(self):
125 def pendingparentchange(self):
126 '''Returns true if the dirstate is in the middle of a set of changes
126 '''Returns true if the dirstate is in the middle of a set of changes
127 that modify the dirstate parent.
127 that modify the dirstate parent.
128 '''
128 '''
129 return self._parentwriters > 0
129 return self._parentwriters > 0
130
130
131 @propertycache
131 @propertycache
132 def _map(self):
132 def _map(self):
133 '''Return the dirstate contents as a map from filename to
133 '''Return the dirstate contents as a map from filename to
134 (state, mode, size, time).'''
134 (state, mode, size, time).'''
135 self._read()
135 self._read()
136 return self._map
136 return self._map
137
137
138 @propertycache
138 @propertycache
139 def _copymap(self):
139 def _copymap(self):
140 self._read()
140 self._read()
141 return self._copymap
141 return self._copymap
142
142
143 @propertycache
143 @propertycache
144 def _nonnormalset(self):
144 def _nonnormalset(self):
145 return nonnormalentries(self._map)
145 return nonnormalentries(self._map)
146
146
147 @propertycache
147 @propertycache
148 def _filefoldmap(self):
148 def _filefoldmap(self):
149 try:
149 try:
150 makefilefoldmap = parsers.make_file_foldmap
150 makefilefoldmap = parsers.make_file_foldmap
151 except AttributeError:
151 except AttributeError:
152 pass
152 pass
153 else:
153 else:
154 return makefilefoldmap(self._map, util.normcasespec,
154 return makefilefoldmap(self._map, util.normcasespec,
155 util.normcasefallback)
155 util.normcasefallback)
156
156
157 f = {}
157 f = {}
158 normcase = util.normcase
158 normcase = util.normcase
159 for name, s in self._map.iteritems():
159 for name, s in self._map.iteritems():
160 if s[0] != 'r':
160 if s[0] != 'r':
161 f[normcase(name)] = name
161 f[normcase(name)] = name
162 f['.'] = '.' # prevents useless util.fspath() invocation
162 f['.'] = '.' # prevents useless util.fspath() invocation
163 return f
163 return f
164
164
165 @propertycache
165 @propertycache
166 def _dirfoldmap(self):
166 def _dirfoldmap(self):
167 f = {}
167 f = {}
168 normcase = util.normcase
168 normcase = util.normcase
169 for name in self._dirs:
169 for name in self._dirs:
170 f[normcase(name)] = name
170 f[normcase(name)] = name
171 return f
171 return f
172
172
173 @repocache('branch')
173 @repocache('branch')
174 def _branch(self):
174 def _branch(self):
175 try:
175 try:
176 return self._opener.read("branch").strip() or "default"
176 return self._opener.read("branch").strip() or "default"
177 except IOError as inst:
177 except IOError as inst:
178 if inst.errno != errno.ENOENT:
178 if inst.errno != errno.ENOENT:
179 raise
179 raise
180 return "default"
180 return "default"
181
181
182 @propertycache
182 @propertycache
183 def _pl(self):
183 def _pl(self):
184 try:
184 try:
185 fp = self._opendirstatefile()
185 fp = self._opendirstatefile()
186 st = fp.read(40)
186 st = fp.read(40)
187 fp.close()
187 fp.close()
188 l = len(st)
188 l = len(st)
189 if l == 40:
189 if l == 40:
190 return st[:20], st[20:40]
190 return st[:20], st[20:40]
191 elif l > 0 and l < 40:
191 elif l > 0 and l < 40:
192 raise error.Abort(_('working directory state appears damaged!'))
192 raise error.Abort(_('working directory state appears damaged!'))
193 except IOError as err:
193 except IOError as err:
194 if err.errno != errno.ENOENT:
194 if err.errno != errno.ENOENT:
195 raise
195 raise
196 return [nullid, nullid]
196 return [nullid, nullid]
197
197
198 @propertycache
198 @propertycache
199 def _dirs(self):
199 def _dirs(self):
200 return util.dirs(self._map, 'r')
200 return util.dirs(self._map, 'r')
201
201
202 def dirs(self):
202 def dirs(self):
203 return self._dirs
203 return self._dirs
204
204
205 @rootcache('.hgignore')
205 @rootcache('.hgignore')
206 def _ignore(self):
206 def _ignore(self):
207 files = self._ignorefiles()
207 files = self._ignorefiles()
208 if not files:
208 if not files:
209 return util.never
209 return util.never
210
210
211 pats = ['include:%s' % f for f in files]
211 pats = ['include:%s' % f for f in files]
212 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
212 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
213
213
214 @propertycache
214 @propertycache
215 def _slash(self):
215 def _slash(self):
216 return self._ui.configbool('ui', 'slash') and os.sep != '/'
216 return self._ui.configbool('ui', 'slash') and os.sep != '/'
217
217
218 @propertycache
218 @propertycache
219 def _checklink(self):
219 def _checklink(self):
220 return util.checklink(self._root)
220 return util.checklink(self._root)
221
221
222 @propertycache
222 @propertycache
223 def _checkexec(self):
223 def _checkexec(self):
224 return util.checkexec(self._root)
224 return util.checkexec(self._root)
225
225
226 @propertycache
226 @propertycache
227 def _checkcase(self):
227 def _checkcase(self):
228 return not util.checkcase(self._join('.hg'))
228 return not util.checkcase(self._join('.hg'))
229
229
230 def _join(self, f):
230 def _join(self, f):
231 # much faster than os.path.join()
231 # much faster than os.path.join()
232 # it's safe because f is always a relative path
232 # it's safe because f is always a relative path
233 return self._rootdir + f
233 return self._rootdir + f
234
234
235 def flagfunc(self, buildfallback):
235 def flagfunc(self, buildfallback):
236 if self._checklink and self._checkexec:
236 if self._checklink and self._checkexec:
237 def f(x):
237 def f(x):
238 try:
238 try:
239 st = os.lstat(self._join(x))
239 st = os.lstat(self._join(x))
240 if util.statislink(st):
240 if util.statislink(st):
241 return 'l'
241 return 'l'
242 if util.statisexec(st):
242 if util.statisexec(st):
243 return 'x'
243 return 'x'
244 except OSError:
244 except OSError:
245 pass
245 pass
246 return ''
246 return ''
247 return f
247 return f
248
248
249 fallback = buildfallback()
249 fallback = buildfallback()
250 if self._checklink:
250 if self._checklink:
251 def f(x):
251 def f(x):
252 if os.path.islink(self._join(x)):
252 if os.path.islink(self._join(x)):
253 return 'l'
253 return 'l'
254 if 'x' in fallback(x):
254 if 'x' in fallback(x):
255 return 'x'
255 return 'x'
256 return ''
256 return ''
257 return f
257 return f
258 if self._checkexec:
258 if self._checkexec:
259 def f(x):
259 def f(x):
260 if 'l' in fallback(x):
260 if 'l' in fallback(x):
261 return 'l'
261 return 'l'
262 if util.isexec(self._join(x)):
262 if util.isexec(self._join(x)):
263 return 'x'
263 return 'x'
264 return ''
264 return ''
265 return f
265 return f
266 else:
266 else:
267 return fallback
267 return fallback
268
268
269 @propertycache
269 @propertycache
270 def _cwd(self):
270 def _cwd(self):
271 return os.getcwd()
271 return os.getcwd()
272
272
273 def getcwd(self):
273 def getcwd(self):
274 '''Return the path from which a canonical path is calculated.
274 '''Return the path from which a canonical path is calculated.
275
275
276 This path should be used to resolve file patterns or to convert
276 This path should be used to resolve file patterns or to convert
277 canonical paths back to file paths for display. It shouldn't be
277 canonical paths back to file paths for display. It shouldn't be
278 used to get real file paths. Use vfs functions instead.
278 used to get real file paths. Use vfs functions instead.
279 '''
279 '''
280 cwd = self._cwd
280 cwd = self._cwd
281 if cwd == self._root:
281 if cwd == self._root:
282 return ''
282 return ''
283 # self._root ends with a path separator if self._root is '/' or 'C:\'
283 # self._root ends with a path separator if self._root is '/' or 'C:\'
284 rootsep = self._root
284 rootsep = self._root
285 if not util.endswithsep(rootsep):
285 if not util.endswithsep(rootsep):
286 rootsep += os.sep
286 rootsep += os.sep
287 if cwd.startswith(rootsep):
287 if cwd.startswith(rootsep):
288 return cwd[len(rootsep):]
288 return cwd[len(rootsep):]
289 else:
289 else:
290 # we're outside the repo. return an absolute path.
290 # we're outside the repo. return an absolute path.
291 return cwd
291 return cwd
292
292
293 def pathto(self, f, cwd=None):
293 def pathto(self, f, cwd=None):
294 if cwd is None:
294 if cwd is None:
295 cwd = self.getcwd()
295 cwd = self.getcwd()
296 path = util.pathto(self._root, cwd, f)
296 path = util.pathto(self._root, cwd, f)
297 if self._slash:
297 if self._slash:
298 return util.pconvert(path)
298 return util.pconvert(path)
299 return path
299 return path
300
300
301 def __getitem__(self, key):
301 def __getitem__(self, key):
302 '''Return the current state of key (a filename) in the dirstate.
302 '''Return the current state of key (a filename) in the dirstate.
303
303
304 States are:
304 States are:
305 n normal
305 n normal
306 m needs merging
306 m needs merging
307 r marked for removal
307 r marked for removal
308 a marked for addition
308 a marked for addition
309 ? not tracked
309 ? not tracked
310 '''
310 '''
311 return self._map.get(key, ("?",))[0]
311 return self._map.get(key, ("?",))[0]
312
312
313 def __contains__(self, key):
313 def __contains__(self, key):
314 return key in self._map
314 return key in self._map
315
315
316 def __iter__(self):
316 def __iter__(self):
317 for x in sorted(self._map):
317 for x in sorted(self._map):
318 yield x
318 yield x
319
319
320 def iteritems(self):
320 def iteritems(self):
321 return self._map.iteritems()
321 return self._map.iteritems()
322
322
323 def parents(self):
323 def parents(self):
324 return [self._validate(p) for p in self._pl]
324 return [self._validate(p) for p in self._pl]
325
325
326 def p1(self):
326 def p1(self):
327 return self._validate(self._pl[0])
327 return self._validate(self._pl[0])
328
328
329 def p2(self):
329 def p2(self):
330 return self._validate(self._pl[1])
330 return self._validate(self._pl[1])
331
331
332 def branch(self):
332 def branch(self):
333 return encoding.tolocal(self._branch)
333 return encoding.tolocal(self._branch)
334
334
335 def setparents(self, p1, p2=nullid):
335 def setparents(self, p1, p2=nullid):
336 """Set dirstate parents to p1 and p2.
336 """Set dirstate parents to p1 and p2.
337
337
338 When moving from two parents to one, 'm' merged entries a
338 When moving from two parents to one, 'm' merged entries a
339 adjusted to normal and previous copy records discarded and
339 adjusted to normal and previous copy records discarded and
340 returned by the call.
340 returned by the call.
341
341
342 See localrepo.setparents()
342 See localrepo.setparents()
343 """
343 """
344 if self._parentwriters == 0:
344 if self._parentwriters == 0:
345 raise ValueError("cannot set dirstate parent without "
345 raise ValueError("cannot set dirstate parent without "
346 "calling dirstate.beginparentchange")
346 "calling dirstate.beginparentchange")
347
347
348 self._dirty = self._dirtypl = True
348 self._dirty = self._dirtypl = True
349 oldp2 = self._pl[1]
349 oldp2 = self._pl[1]
350 self._pl = p1, p2
350 self._pl = p1, p2
351 copies = {}
351 copies = {}
352 if oldp2 != nullid and p2 == nullid:
352 if oldp2 != nullid and p2 == nullid:
353 for f, s in self._map.iteritems():
353 for f, s in self._map.iteritems():
354 # Discard 'm' markers when moving away from a merge state
354 # Discard 'm' markers when moving away from a merge state
355 if s[0] == 'm':
355 if s[0] == 'm':
356 if f in self._copymap:
356 if f in self._copymap:
357 copies[f] = self._copymap[f]
357 copies[f] = self._copymap[f]
358 self.normallookup(f)
358 self.normallookup(f)
359 # Also fix up otherparent markers
359 # Also fix up otherparent markers
360 elif s[0] == 'n' and s[2] == -2:
360 elif s[0] == 'n' and s[2] == -2:
361 if f in self._copymap:
361 if f in self._copymap:
362 copies[f] = self._copymap[f]
362 copies[f] = self._copymap[f]
363 self.add(f)
363 self.add(f)
364 return copies
364 return copies
365
365
366 def setbranch(self, branch):
366 def setbranch(self, branch):
367 self._branch = encoding.fromlocal(branch)
367 self._branch = encoding.fromlocal(branch)
368 f = self._opener('branch', 'w', atomictemp=True)
368 f = self._opener('branch', 'w', atomictemp=True)
369 try:
369 try:
370 f.write(self._branch + '\n')
370 f.write(self._branch + '\n')
371 f.close()
371 f.close()
372
372
373 # make sure filecache has the correct stat info for _branch after
373 # make sure filecache has the correct stat info for _branch after
374 # replacing the underlying file
374 # replacing the underlying file
375 ce = self._filecache['_branch']
375 ce = self._filecache['_branch']
376 if ce:
376 if ce:
377 ce.refresh()
377 ce.refresh()
378 except: # re-raises
378 except: # re-raises
379 f.discard()
379 f.discard()
380 raise
380 raise
381
381
382 def _opendirstatefile(self):
382 def _opendirstatefile(self):
383 fp, mode = _trypending(self._root, self._opener, self._filename)
383 fp, mode = _trypending(self._root, self._opener, self._filename)
384 if self._pendingmode is not None and self._pendingmode != mode:
384 if self._pendingmode is not None and self._pendingmode != mode:
385 fp.close()
385 fp.close()
386 raise error.Abort(_('working directory state may be '
386 raise error.Abort(_('working directory state may be '
387 'changed parallelly'))
387 'changed parallelly'))
388 self._pendingmode = mode
388 self._pendingmode = mode
389 return fp
389 return fp
390
390
391 def _read(self):
391 def _read(self):
392 self._map = {}
392 self._map = {}
393 self._copymap = {}
393 self._copymap = {}
394 try:
394 try:
395 fp = self._opendirstatefile()
395 fp = self._opendirstatefile()
396 try:
396 try:
397 st = fp.read()
397 st = fp.read()
398 finally:
398 finally:
399 fp.close()
399 fp.close()
400 except IOError as err:
400 except IOError as err:
401 if err.errno != errno.ENOENT:
401 if err.errno != errno.ENOENT:
402 raise
402 raise
403 return
403 return
404 if not st:
404 if not st:
405 return
405 return
406
406
407 if util.safehasattr(parsers, 'dict_new_presized'):
407 if util.safehasattr(parsers, 'dict_new_presized'):
408 # Make an estimate of the number of files in the dirstate based on
408 # Make an estimate of the number of files in the dirstate based on
409 # its size. From a linear regression on a set of real-world repos,
409 # its size. From a linear regression on a set of real-world repos,
410 # all over 10,000 files, the size of a dirstate entry is 85
410 # all over 10,000 files, the size of a dirstate entry is 85
411 # bytes. The cost of resizing is significantly higher than the cost
411 # bytes. The cost of resizing is significantly higher than the cost
412 # of filling in a larger presized dict, so subtract 20% from the
412 # of filling in a larger presized dict, so subtract 20% from the
413 # size.
413 # size.
414 #
414 #
415 # This heuristic is imperfect in many ways, so in a future dirstate
415 # This heuristic is imperfect in many ways, so in a future dirstate
416 # format update it makes sense to just record the number of entries
416 # format update it makes sense to just record the number of entries
417 # on write.
417 # on write.
418 self._map = parsers.dict_new_presized(len(st) / 71)
418 self._map = parsers.dict_new_presized(len(st) / 71)
419
419
420 # Python's garbage collector triggers a GC each time a certain number
420 # Python's garbage collector triggers a GC each time a certain number
421 # of container objects (the number being defined by
421 # of container objects (the number being defined by
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
422 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
423 # for each file in the dirstate. The C version then immediately marks
423 # for each file in the dirstate. The C version then immediately marks
424 # them as not to be tracked by the collector. However, this has no
424 # them as not to be tracked by the collector. However, this has no
425 # effect on when GCs are triggered, only on what objects the GC looks
425 # effect on when GCs are triggered, only on what objects the GC looks
426 # into. This means that O(number of files) GCs are unavoidable.
426 # into. This means that O(number of files) GCs are unavoidable.
427 # Depending on when in the process's lifetime the dirstate is parsed,
427 # Depending on when in the process's lifetime the dirstate is parsed,
428 # this can get very expensive. As a workaround, disable GC while
428 # this can get very expensive. As a workaround, disable GC while
429 # parsing the dirstate.
429 # parsing the dirstate.
430 #
430 #
431 # (we cannot decorate the function directly since it is in a C module)
431 # (we cannot decorate the function directly since it is in a C module)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
432 parse_dirstate = util.nogc(parsers.parse_dirstate)
433 p = parse_dirstate(self._map, self._copymap, st)
433 p = parse_dirstate(self._map, self._copymap, st)
434 if not self._dirtypl:
434 if not self._dirtypl:
435 self._pl = p
435 self._pl = p
436
436
437 def invalidate(self):
437 def invalidate(self):
438 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
438 for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch",
439 "_pl", "_dirs", "_ignore", "_nonnormalset"):
439 "_pl", "_dirs", "_ignore", "_nonnormalset"):
440 if a in self.__dict__:
440 if a in self.__dict__:
441 delattr(self, a)
441 delattr(self, a)
442 self._lastnormaltime = 0
442 self._lastnormaltime = 0
443 self._dirty = False
443 self._dirty = False
444 self._parentwriters = 0
444 self._parentwriters = 0
445
445
446 def copy(self, source, dest):
446 def copy(self, source, dest):
447 """Mark dest as a copy of source. Unmark dest if source is None."""
447 """Mark dest as a copy of source. Unmark dest if source is None."""
448 if source == dest:
448 if source == dest:
449 return
449 return
450 self._dirty = True
450 self._dirty = True
451 if source is not None:
451 if source is not None:
452 self._copymap[dest] = source
452 self._copymap[dest] = source
453 elif dest in self._copymap:
453 elif dest in self._copymap:
454 del self._copymap[dest]
454 del self._copymap[dest]
455
455
456 def copied(self, file):
456 def copied(self, file):
457 return self._copymap.get(file, None)
457 return self._copymap.get(file, None)
458
458
459 def copies(self):
459 def copies(self):
460 return self._copymap
460 return self._copymap
461
461
462 def _droppath(self, f):
462 def _droppath(self, f):
463 if self[f] not in "?r" and "_dirs" in self.__dict__:
463 if self[f] not in "?r" and "_dirs" in self.__dict__:
464 self._dirs.delpath(f)
464 self._dirs.delpath(f)
465
465
466 if "_filefoldmap" in self.__dict__:
466 if "_filefoldmap" in self.__dict__:
467 normed = util.normcase(f)
467 normed = util.normcase(f)
468 if normed in self._filefoldmap:
468 if normed in self._filefoldmap:
469 del self._filefoldmap[normed]
469 del self._filefoldmap[normed]
470
470
471 def _addpath(self, f, state, mode, size, mtime):
471 def _addpath(self, f, state, mode, size, mtime):
472 oldstate = self[f]
472 oldstate = self[f]
473 if state == 'a' or oldstate == 'r':
473 if state == 'a' or oldstate == 'r':
474 scmutil.checkfilename(f)
474 scmutil.checkfilename(f)
475 if f in self._dirs:
475 if f in self._dirs:
476 raise error.Abort(_('directory %r already in dirstate') % f)
476 raise error.Abort(_('directory %r already in dirstate') % f)
477 # shadows
477 # shadows
478 for d in util.finddirs(f):
478 for d in util.finddirs(f):
479 if d in self._dirs:
479 if d in self._dirs:
480 break
480 break
481 if d in self._map and self[d] != 'r':
481 if d in self._map and self[d] != 'r':
482 raise error.Abort(
482 raise error.Abort(
483 _('file %r in dirstate clashes with %r') % (d, f))
483 _('file %r in dirstate clashes with %r') % (d, f))
484 if oldstate in "?r" and "_dirs" in self.__dict__:
484 if oldstate in "?r" and "_dirs" in self.__dict__:
485 self._dirs.addpath(f)
485 self._dirs.addpath(f)
486 self._dirty = True
486 self._dirty = True
487 self._map[f] = dirstatetuple(state, mode, size, mtime)
487 self._map[f] = dirstatetuple(state, mode, size, mtime)
488 if state != 'n' or mtime == -1:
488 if state != 'n' or mtime == -1:
489 self._nonnormalset.add(f)
489 self._nonnormalset.add(f)
490
490
491 def normal(self, f):
491 def normal(self, f):
492 '''Mark a file normal and clean.'''
492 '''Mark a file normal and clean.'''
493 s = os.lstat(self._join(f))
493 s = os.lstat(self._join(f))
494 mtime = s.st_mtime
494 mtime = s.st_mtime
495 self._addpath(f, 'n', s.st_mode,
495 self._addpath(f, 'n', s.st_mode,
496 s.st_size & _rangemask, mtime & _rangemask)
496 s.st_size & _rangemask, mtime & _rangemask)
497 if f in self._copymap:
497 if f in self._copymap:
498 del self._copymap[f]
498 del self._copymap[f]
499 if f in self._nonnormalset:
499 if f in self._nonnormalset:
500 self._nonnormalset.remove(f)
500 self._nonnormalset.remove(f)
501 if mtime > self._lastnormaltime:
501 if mtime > self._lastnormaltime:
502 # Remember the most recent modification timeslot for status(),
502 # Remember the most recent modification timeslot for status(),
503 # to make sure we won't miss future size-preserving file content
503 # to make sure we won't miss future size-preserving file content
504 # modifications that happen within the same timeslot.
504 # modifications that happen within the same timeslot.
505 self._lastnormaltime = mtime
505 self._lastnormaltime = mtime
506
506
507 def normallookup(self, f):
507 def normallookup(self, f):
508 '''Mark a file normal, but possibly dirty.'''
508 '''Mark a file normal, but possibly dirty.'''
509 if self._pl[1] != nullid and f in self._map:
509 if self._pl[1] != nullid and f in self._map:
510 # if there is a merge going on and the file was either
510 # if there is a merge going on and the file was either
511 # in state 'm' (-1) or coming from other parent (-2) before
511 # in state 'm' (-1) or coming from other parent (-2) before
512 # being removed, restore that state.
512 # being removed, restore that state.
513 entry = self._map[f]
513 entry = self._map[f]
514 if entry[0] == 'r' and entry[2] in (-1, -2):
514 if entry[0] == 'r' and entry[2] in (-1, -2):
515 source = self._copymap.get(f)
515 source = self._copymap.get(f)
516 if entry[2] == -1:
516 if entry[2] == -1:
517 self.merge(f)
517 self.merge(f)
518 elif entry[2] == -2:
518 elif entry[2] == -2:
519 self.otherparent(f)
519 self.otherparent(f)
520 if source:
520 if source:
521 self.copy(source, f)
521 self.copy(source, f)
522 return
522 return
523 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
523 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
524 return
524 return
525 self._addpath(f, 'n', 0, -1, -1)
525 self._addpath(f, 'n', 0, -1, -1)
526 if f in self._copymap:
526 if f in self._copymap:
527 del self._copymap[f]
527 del self._copymap[f]
528 if f in self._nonnormalset:
528 if f in self._nonnormalset:
529 self._nonnormalset.remove(f)
529 self._nonnormalset.remove(f)
530
530
531 def otherparent(self, f):
531 def otherparent(self, f):
532 '''Mark as coming from the other parent, always dirty.'''
532 '''Mark as coming from the other parent, always dirty.'''
533 if self._pl[1] == nullid:
533 if self._pl[1] == nullid:
534 raise error.Abort(_("setting %r to other parent "
534 raise error.Abort(_("setting %r to other parent "
535 "only allowed in merges") % f)
535 "only allowed in merges") % f)
536 if f in self and self[f] == 'n':
536 if f in self and self[f] == 'n':
537 # merge-like
537 # merge-like
538 self._addpath(f, 'm', 0, -2, -1)
538 self._addpath(f, 'm', 0, -2, -1)
539 else:
539 else:
540 # add-like
540 # add-like
541 self._addpath(f, 'n', 0, -2, -1)
541 self._addpath(f, 'n', 0, -2, -1)
542
542
543 if f in self._copymap:
543 if f in self._copymap:
544 del self._copymap[f]
544 del self._copymap[f]
545
545
546 def add(self, f):
546 def add(self, f):
547 '''Mark a file added.'''
547 '''Mark a file added.'''
548 self._addpath(f, 'a', 0, -1, -1)
548 self._addpath(f, 'a', 0, -1, -1)
549 if f in self._copymap:
549 if f in self._copymap:
550 del self._copymap[f]
550 del self._copymap[f]
551
551
552 def remove(self, f):
552 def remove(self, f):
553 '''Mark a file removed.'''
553 '''Mark a file removed.'''
554 self._dirty = True
554 self._dirty = True
555 self._droppath(f)
555 self._droppath(f)
556 size = 0
556 size = 0
557 if self._pl[1] != nullid and f in self._map:
557 if self._pl[1] != nullid and f in self._map:
558 # backup the previous state
558 # backup the previous state
559 entry = self._map[f]
559 entry = self._map[f]
560 if entry[0] == 'm': # merge
560 if entry[0] == 'm': # merge
561 size = -1
561 size = -1
562 elif entry[0] == 'n' and entry[2] == -2: # other parent
562 elif entry[0] == 'n' and entry[2] == -2: # other parent
563 size = -2
563 size = -2
564 self._map[f] = dirstatetuple('r', 0, size, 0)
564 self._map[f] = dirstatetuple('r', 0, size, 0)
565 self._nonnormalset.add(f)
565 self._nonnormalset.add(f)
566 if size == 0 and f in self._copymap:
566 if size == 0 and f in self._copymap:
567 del self._copymap[f]
567 del self._copymap[f]
568
568
569 def merge(self, f):
569 def merge(self, f):
570 '''Mark a file merged.'''
570 '''Mark a file merged.'''
571 if self._pl[1] == nullid:
571 if self._pl[1] == nullid:
572 return self.normallookup(f)
572 return self.normallookup(f)
573 return self.otherparent(f)
573 return self.otherparent(f)
574
574
575 def drop(self, f):
575 def drop(self, f):
576 '''Drop a file from the dirstate'''
576 '''Drop a file from the dirstate'''
577 if f in self._map:
577 if f in self._map:
578 self._dirty = True
578 self._dirty = True
579 self._droppath(f)
579 self._droppath(f)
580 del self._map[f]
580 del self._map[f]
581 if f in self._nonnormalset:
581 if f in self._nonnormalset:
582 self._nonnormalset.remove(f)
582 self._nonnormalset.remove(f)
583
583
584 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
584 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
585 if exists is None:
585 if exists is None:
586 exists = os.path.lexists(os.path.join(self._root, path))
586 exists = os.path.lexists(os.path.join(self._root, path))
587 if not exists:
587 if not exists:
588 # Maybe a path component exists
588 # Maybe a path component exists
589 if not ignoremissing and '/' in path:
589 if not ignoremissing and '/' in path:
590 d, f = path.rsplit('/', 1)
590 d, f = path.rsplit('/', 1)
591 d = self._normalize(d, False, ignoremissing, None)
591 d = self._normalize(d, False, ignoremissing, None)
592 folded = d + "/" + f
592 folded = d + "/" + f
593 else:
593 else:
594 # No path components, preserve original case
594 # No path components, preserve original case
595 folded = path
595 folded = path
596 else:
596 else:
597 # recursively normalize leading directory components
597 # recursively normalize leading directory components
598 # against dirstate
598 # against dirstate
599 if '/' in normed:
599 if '/' in normed:
600 d, f = normed.rsplit('/', 1)
600 d, f = normed.rsplit('/', 1)
601 d = self._normalize(d, False, ignoremissing, True)
601 d = self._normalize(d, False, ignoremissing, True)
602 r = self._root + "/" + d
602 r = self._root + "/" + d
603 folded = d + "/" + util.fspath(f, r)
603 folded = d + "/" + util.fspath(f, r)
604 else:
604 else:
605 folded = util.fspath(normed, self._root)
605 folded = util.fspath(normed, self._root)
606 storemap[normed] = folded
606 storemap[normed] = folded
607
607
608 return folded
608 return folded
609
609
610 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
610 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
611 normed = util.normcase(path)
611 normed = util.normcase(path)
612 folded = self._filefoldmap.get(normed, None)
612 folded = self._filefoldmap.get(normed, None)
613 if folded is None:
613 if folded is None:
614 if isknown:
614 if isknown:
615 folded = path
615 folded = path
616 else:
616 else:
617 folded = self._discoverpath(path, normed, ignoremissing, exists,
617 folded = self._discoverpath(path, normed, ignoremissing, exists,
618 self._filefoldmap)
618 self._filefoldmap)
619 return folded
619 return folded
620
620
621 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
621 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
622 normed = util.normcase(path)
622 normed = util.normcase(path)
623 folded = self._filefoldmap.get(normed, None)
623 folded = self._filefoldmap.get(normed, None)
624 if folded is None:
624 if folded is None:
625 folded = self._dirfoldmap.get(normed, None)
625 folded = self._dirfoldmap.get(normed, None)
626 if folded is None:
626 if folded is None:
627 if isknown:
627 if isknown:
628 folded = path
628 folded = path
629 else:
629 else:
630 # store discovered result in dirfoldmap so that future
630 # store discovered result in dirfoldmap so that future
631 # normalizefile calls don't start matching directories
631 # normalizefile calls don't start matching directories
632 folded = self._discoverpath(path, normed, ignoremissing, exists,
632 folded = self._discoverpath(path, normed, ignoremissing, exists,
633 self._dirfoldmap)
633 self._dirfoldmap)
634 return folded
634 return folded
635
635
636 def normalize(self, path, isknown=False, ignoremissing=False):
636 def normalize(self, path, isknown=False, ignoremissing=False):
637 '''
637 '''
638 normalize the case of a pathname when on a casefolding filesystem
638 normalize the case of a pathname when on a casefolding filesystem
639
639
640 isknown specifies whether the filename came from walking the
640 isknown specifies whether the filename came from walking the
641 disk, to avoid extra filesystem access.
641 disk, to avoid extra filesystem access.
642
642
643 If ignoremissing is True, missing path are returned
643 If ignoremissing is True, missing path are returned
644 unchanged. Otherwise, we try harder to normalize possibly
644 unchanged. Otherwise, we try harder to normalize possibly
645 existing path components.
645 existing path components.
646
646
647 The normalized case is determined based on the following precedence:
647 The normalized case is determined based on the following precedence:
648
648
649 - version of name already stored in the dirstate
649 - version of name already stored in the dirstate
650 - version of name stored on disk
650 - version of name stored on disk
651 - version provided via command arguments
651 - version provided via command arguments
652 '''
652 '''
653
653
654 if self._checkcase:
654 if self._checkcase:
655 return self._normalize(path, isknown, ignoremissing)
655 return self._normalize(path, isknown, ignoremissing)
656 return path
656 return path
657
657
658 def clear(self):
658 def clear(self):
659 self._map = {}
659 self._map = {}
660 self._nonnormalset = set()
660 self._nonnormalset = set()
661 if "_dirs" in self.__dict__:
661 if "_dirs" in self.__dict__:
662 delattr(self, "_dirs")
662 delattr(self, "_dirs")
663 self._copymap = {}
663 self._copymap = {}
664 self._pl = [nullid, nullid]
664 self._pl = [nullid, nullid]
665 self._lastnormaltime = 0
665 self._lastnormaltime = 0
666 self._dirty = True
666 self._dirty = True
667
667
668 def rebuild(self, parent, allfiles, changedfiles=None):
668 def rebuild(self, parent, allfiles, changedfiles=None):
669 if changedfiles is None:
669 if changedfiles is None:
670 # Rebuild entire dirstate
670 # Rebuild entire dirstate
671 changedfiles = allfiles
671 changedfiles = allfiles
672 lastnormaltime = self._lastnormaltime
672 lastnormaltime = self._lastnormaltime
673 self.clear()
673 self.clear()
674 self._lastnormaltime = lastnormaltime
674 self._lastnormaltime = lastnormaltime
675
675
676 for f in changedfiles:
676 for f in changedfiles:
677 mode = 0o666
677 mode = 0o666
678 if f in allfiles and 'x' in allfiles.flags(f):
678 if f in allfiles and 'x' in allfiles.flags(f):
679 mode = 0o777
679 mode = 0o777
680
680
681 if f in allfiles:
681 if f in allfiles:
682 self._map[f] = dirstatetuple('n', mode, -1, 0)
682 self._map[f] = dirstatetuple('n', mode, -1, 0)
683 else:
683 else:
684 self._map.pop(f, None)
684 self._map.pop(f, None)
685 if f in self._nonnormalset:
685 if f in self._nonnormalset:
686 self._nonnormalset.remove(f)
686 self._nonnormalset.remove(f)
687
687
688 self._pl = (parent, nullid)
688 self._pl = (parent, nullid)
689 self._dirty = True
689 self._dirty = True
690
690
691 def write(self, tr=False):
691 def write(self, tr=False):
692 if not self._dirty:
692 if not self._dirty:
693 return
693 return
694
694
695 filename = self._filename
695 filename = self._filename
696 if tr is False: # not explicitly specified
696 if tr is False: # not explicitly specified
697 self._ui.develwarn('use dirstate.write with '
697 self._ui.develwarn('use dirstate.write with '
698 'repo.currenttransaction()',
698 'repo.currenttransaction()',
699 config='check-dirstate-write')
699 config='check-dirstate-write')
700
700
701 if self._opener.lexists(self._pendingfilename):
701 if self._opener.lexists(self._pendingfilename):
702 # if pending file already exists, in-memory changes
702 # if pending file already exists, in-memory changes
703 # should be written into it, because it has priority
703 # should be written into it, because it has priority
704 # to '.hg/dirstate' at reading under HG_PENDING mode
704 # to '.hg/dirstate' at reading under HG_PENDING mode
705 filename = self._pendingfilename
705 filename = self._pendingfilename
706 elif tr:
706 elif tr:
707 # 'dirstate.write()' is not only for writing in-memory
707 # 'dirstate.write()' is not only for writing in-memory
708 # changes out, but also for dropping ambiguous timestamp.
708 # changes out, but also for dropping ambiguous timestamp.
709 # delayed writing re-raise "ambiguous timestamp issue".
709 # delayed writing re-raise "ambiguous timestamp issue".
710 # See also the wiki page below for detail:
710 # See also the wiki page below for detail:
711 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
711 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
712
712
713 # emulate dropping timestamp in 'parsers.pack_dirstate'
713 # emulate dropping timestamp in 'parsers.pack_dirstate'
714 now = _getfsnow(self._opener)
714 now = _getfsnow(self._opener)
715 dmap = self._map
715 dmap = self._map
716 for f, e in dmap.iteritems():
716 for f, e in dmap.iteritems():
717 if e[0] == 'n' and e[3] == now:
717 if e[0] == 'n' and e[3] == now:
718 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
718 dmap[f] = dirstatetuple(e[0], e[1], e[2], -1)
719 self._nonnormalset.add(f)
719 self._nonnormalset.add(f)
720
720
721 # emulate that all 'dirstate.normal' results are written out
721 # emulate that all 'dirstate.normal' results are written out
722 self._lastnormaltime = 0
722 self._lastnormaltime = 0
723
723
724 # delay writing in-memory changes out
724 # delay writing in-memory changes out
725 tr.addfilegenerator('dirstate', (self._filename,),
725 tr.addfilegenerator('dirstate', (self._filename,),
726 self._writedirstate, location='plain')
726 self._writedirstate, location='plain')
727 return
727 return
728
728
729 st = self._opener(filename, "w", atomictemp=True)
729 st = self._opener(filename, "w", atomictemp=True)
730 self._writedirstate(st)
730 self._writedirstate(st)
731
731
732 def _writedirstate(self, st):
732 def _writedirstate(self, st):
733 # use the modification time of the newly created temporary file as the
733 # use the modification time of the newly created temporary file as the
734 # filesystem's notion of 'now'
734 # filesystem's notion of 'now'
735 now = util.fstat(st).st_mtime & _rangemask
735 now = util.fstat(st).st_mtime & _rangemask
736
736
737 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
737 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
738 # timestamp of each entries in dirstate, because of 'now > mtime'
738 # timestamp of each entries in dirstate, because of 'now > mtime'
739 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
739 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0)
740 if delaywrite > 0:
740 if delaywrite > 0:
741 # do we have any files to delay for?
741 # do we have any files to delay for?
742 for f, e in self._map.iteritems():
742 for f, e in self._map.iteritems():
743 if e[0] == 'n' and e[3] == now:
743 if e[0] == 'n' and e[3] == now:
744 import time # to avoid useless import
744 import time # to avoid useless import
745 # rather than sleep n seconds, sleep until the next
745 # rather than sleep n seconds, sleep until the next
746 # multiple of n seconds
746 # multiple of n seconds
747 clock = time.time()
747 clock = time.time()
748 start = int(clock) - (int(clock) % delaywrite)
748 start = int(clock) - (int(clock) % delaywrite)
749 end = start + delaywrite
749 end = start + delaywrite
750 time.sleep(end - clock)
750 time.sleep(end - clock)
751 break
751 break
752
752
753 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
753 st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
754 self._nonnormalset = nonnormalentries(self._map)
754 self._nonnormalset = nonnormalentries(self._map)
755 st.close()
755 st.close()
756 self._lastnormaltime = 0
756 self._lastnormaltime = 0
757 self._dirty = self._dirtypl = False
757 self._dirty = self._dirtypl = False
758
758
759 def _dirignore(self, f):
759 def _dirignore(self, f):
760 if f == '.':
760 if f == '.':
761 return False
761 return False
762 if self._ignore(f):
762 if self._ignore(f):
763 return True
763 return True
764 for p in util.finddirs(f):
764 for p in util.finddirs(f):
765 if self._ignore(p):
765 if self._ignore(p):
766 return True
766 return True
767 return False
767 return False
768
768
769 def _ignorefiles(self):
769 def _ignorefiles(self):
770 files = []
770 files = []
771 if os.path.exists(self._join('.hgignore')):
771 if os.path.exists(self._join('.hgignore')):
772 files.append(self._join('.hgignore'))
772 files.append(self._join('.hgignore'))
773 for name, path in self._ui.configitems("ui"):
773 for name, path in self._ui.configitems("ui"):
774 if name == 'ignore' or name.startswith('ignore.'):
774 if name == 'ignore' or name.startswith('ignore.'):
775 # we need to use os.path.join here rather than self._join
775 # we need to use os.path.join here rather than self._join
776 # because path is arbitrary and user-specified
776 # because path is arbitrary and user-specified
777 files.append(os.path.join(self._rootdir, util.expandpath(path)))
777 files.append(os.path.join(self._rootdir, util.expandpath(path)))
778 return files
778 return files
779
779
780 def _ignorefileandline(self, f):
780 def _ignorefileandline(self, f):
781 files = collections.deque(self._ignorefiles())
781 files = collections.deque(self._ignorefiles())
782 visited = set()
782 visited = set()
783 while files:
783 while files:
784 i = files.popleft()
784 i = files.popleft()
785 patterns = matchmod.readpatternfile(i, self._ui.warn,
785 patterns = matchmod.readpatternfile(i, self._ui.warn,
786 sourceinfo=True)
786 sourceinfo=True)
787 for pattern, lineno, line in patterns:
787 for pattern, lineno, line in patterns:
788 kind, p = matchmod._patsplit(pattern, 'glob')
788 kind, p = matchmod._patsplit(pattern, 'glob')
789 if kind == "subinclude":
789 if kind == "subinclude":
790 if p not in visited:
790 if p not in visited:
791 files.append(p)
791 files.append(p)
792 continue
792 continue
793 m = matchmod.match(self._root, '', [], [pattern],
793 m = matchmod.match(self._root, '', [], [pattern],
794 warn=self._ui.warn)
794 warn=self._ui.warn)
795 if m(f):
795 if m(f):
796 return (i, lineno, line)
796 return (i, lineno, line)
797 visited.add(i)
797 visited.add(i)
798 return (None, -1, "")
798 return (None, -1, "")
799
799
800 def _walkexplicit(self, match, subrepos):
800 def _walkexplicit(self, match, subrepos):
801 '''Get stat data about the files explicitly specified by match.
801 '''Get stat data about the files explicitly specified by match.
802
802
803 Return a triple (results, dirsfound, dirsnotfound).
803 Return a triple (results, dirsfound, dirsnotfound).
804 - results is a mapping from filename to stat result. It also contains
804 - results is a mapping from filename to stat result. It also contains
805 listings mapping subrepos and .hg to None.
805 listings mapping subrepos and .hg to None.
806 - dirsfound is a list of files found to be directories.
806 - dirsfound is a list of files found to be directories.
807 - dirsnotfound is a list of files that the dirstate thinks are
807 - dirsnotfound is a list of files that the dirstate thinks are
808 directories and that were not found.'''
808 directories and that were not found.'''
809
809
810 def badtype(mode):
810 def badtype(mode):
811 kind = _('unknown')
811 kind = _('unknown')
812 if stat.S_ISCHR(mode):
812 if stat.S_ISCHR(mode):
813 kind = _('character device')
813 kind = _('character device')
814 elif stat.S_ISBLK(mode):
814 elif stat.S_ISBLK(mode):
815 kind = _('block device')
815 kind = _('block device')
816 elif stat.S_ISFIFO(mode):
816 elif stat.S_ISFIFO(mode):
817 kind = _('fifo')
817 kind = _('fifo')
818 elif stat.S_ISSOCK(mode):
818 elif stat.S_ISSOCK(mode):
819 kind = _('socket')
819 kind = _('socket')
820 elif stat.S_ISDIR(mode):
820 elif stat.S_ISDIR(mode):
821 kind = _('directory')
821 kind = _('directory')
822 return _('unsupported file type (type is %s)') % kind
822 return _('unsupported file type (type is %s)') % kind
823
823
824 matchedir = match.explicitdir
824 matchedir = match.explicitdir
825 badfn = match.bad
825 badfn = match.bad
826 dmap = self._map
826 dmap = self._map
827 lstat = os.lstat
827 lstat = os.lstat
828 getkind = stat.S_IFMT
828 getkind = stat.S_IFMT
829 dirkind = stat.S_IFDIR
829 dirkind = stat.S_IFDIR
830 regkind = stat.S_IFREG
830 regkind = stat.S_IFREG
831 lnkkind = stat.S_IFLNK
831 lnkkind = stat.S_IFLNK
832 join = self._join
832 join = self._join
833 dirsfound = []
833 dirsfound = []
834 foundadd = dirsfound.append
834 foundadd = dirsfound.append
835 dirsnotfound = []
835 dirsnotfound = []
836 notfoundadd = dirsnotfound.append
836 notfoundadd = dirsnotfound.append
837
837
838 if not match.isexact() and self._checkcase:
838 if not match.isexact() and self._checkcase:
839 normalize = self._normalize
839 normalize = self._normalize
840 else:
840 else:
841 normalize = None
841 normalize = None
842
842
843 files = sorted(match.files())
843 files = sorted(match.files())
844 subrepos.sort()
844 subrepos.sort()
845 i, j = 0, 0
845 i, j = 0, 0
846 while i < len(files) and j < len(subrepos):
846 while i < len(files) and j < len(subrepos):
847 subpath = subrepos[j] + "/"
847 subpath = subrepos[j] + "/"
848 if files[i] < subpath:
848 if files[i] < subpath:
849 i += 1
849 i += 1
850 continue
850 continue
851 while i < len(files) and files[i].startswith(subpath):
851 while i < len(files) and files[i].startswith(subpath):
852 del files[i]
852 del files[i]
853 j += 1
853 j += 1
854
854
855 if not files or '.' in files:
855 if not files or '.' in files:
856 files = ['.']
856 files = ['.']
857 results = dict.fromkeys(subrepos)
857 results = dict.fromkeys(subrepos)
858 results['.hg'] = None
858 results['.hg'] = None
859
859
860 alldirs = None
860 alldirs = None
861 for ff in files:
861 for ff in files:
862 # constructing the foldmap is expensive, so don't do it for the
862 # constructing the foldmap is expensive, so don't do it for the
863 # common case where files is ['.']
863 # common case where files is ['.']
864 if normalize and ff != '.':
864 if normalize and ff != '.':
865 nf = normalize(ff, False, True)
865 nf = normalize(ff, False, True)
866 else:
866 else:
867 nf = ff
867 nf = ff
868 if nf in results:
868 if nf in results:
869 continue
869 continue
870
870
871 try:
871 try:
872 st = lstat(join(nf))
872 st = lstat(join(nf))
873 kind = getkind(st.st_mode)
873 kind = getkind(st.st_mode)
874 if kind == dirkind:
874 if kind == dirkind:
875 if nf in dmap:
875 if nf in dmap:
876 # file replaced by dir on disk but still in dirstate
876 # file replaced by dir on disk but still in dirstate
877 results[nf] = None
877 results[nf] = None
878 if matchedir:
878 if matchedir:
879 matchedir(nf)
879 matchedir(nf)
880 foundadd((nf, ff))
880 foundadd((nf, ff))
881 elif kind == regkind or kind == lnkkind:
881 elif kind == regkind or kind == lnkkind:
882 results[nf] = st
882 results[nf] = st
883 else:
883 else:
884 badfn(ff, badtype(kind))
884 badfn(ff, badtype(kind))
885 if nf in dmap:
885 if nf in dmap:
886 results[nf] = None
886 results[nf] = None
887 except OSError as inst: # nf not found on disk - it is dirstate only
887 except OSError as inst: # nf not found on disk - it is dirstate only
888 if nf in dmap: # does it exactly match a missing file?
888 if nf in dmap: # does it exactly match a missing file?
889 results[nf] = None
889 results[nf] = None
890 else: # does it match a missing directory?
890 else: # does it match a missing directory?
891 if alldirs is None:
891 if alldirs is None:
892 alldirs = util.dirs(dmap)
892 alldirs = util.dirs(dmap)
893 if nf in alldirs:
893 if nf in alldirs:
894 if matchedir:
894 if matchedir:
895 matchedir(nf)
895 matchedir(nf)
896 notfoundadd(nf)
896 notfoundadd(nf)
897 else:
897 else:
898 badfn(ff, inst.strerror)
898 badfn(ff, inst.strerror)
899
899
900 # Case insensitive filesystems cannot rely on lstat() failing to detect
900 # Case insensitive filesystems cannot rely on lstat() failing to detect
901 # a case-only rename. Prune the stat object for any file that does not
901 # a case-only rename. Prune the stat object for any file that does not
902 # match the case in the filesystem, if there are multiple files that
902 # match the case in the filesystem, if there are multiple files that
903 # normalize to the same path.
903 # normalize to the same path.
904 if match.isexact() and self._checkcase:
904 if match.isexact() and self._checkcase:
905 normed = {}
905 normed = {}
906
906
907 for f, st in results.iteritems():
907 for f, st in results.iteritems():
908 if st is None:
908 if st is None:
909 continue
909 continue
910
910
911 nc = util.normcase(f)
911 nc = util.normcase(f)
912 paths = normed.get(nc)
912 paths = normed.get(nc)
913
913
914 if paths is None:
914 if paths is None:
915 paths = set()
915 paths = set()
916 normed[nc] = paths
916 normed[nc] = paths
917
917
918 paths.add(f)
918 paths.add(f)
919
919
920 for norm, paths in normed.iteritems():
920 for norm, paths in normed.iteritems():
921 if len(paths) > 1:
921 if len(paths) > 1:
922 for path in paths:
922 for path in paths:
923 folded = self._discoverpath(path, norm, True, None,
923 folded = self._discoverpath(path, norm, True, None,
924 self._dirfoldmap)
924 self._dirfoldmap)
925 if path != folded:
925 if path != folded:
926 results[path] = None
926 results[path] = None
927
927
928 return results, dirsfound, dirsnotfound
928 return results, dirsfound, dirsnotfound
929
929
930 def walk(self, match, subrepos, unknown, ignored, full=True):
930 def walk(self, match, subrepos, unknown, ignored, full=True):
931 '''
931 '''
932 Walk recursively through the directory tree, finding all files
932 Walk recursively through the directory tree, finding all files
933 matched by match.
933 matched by match.
934
934
935 If full is False, maybe skip some known-clean files.
935 If full is False, maybe skip some known-clean files.
936
936
937 Return a dict mapping filename to stat-like object (either
937 Return a dict mapping filename to stat-like object (either
938 mercurial.osutil.stat instance or return value of os.stat()).
938 mercurial.osutil.stat instance or return value of os.stat()).
939
939
940 '''
940 '''
941 # full is a flag that extensions that hook into walk can use -- this
941 # full is a flag that extensions that hook into walk can use -- this
942 # implementation doesn't use it at all. This satisfies the contract
942 # implementation doesn't use it at all. This satisfies the contract
943 # because we only guarantee a "maybe".
943 # because we only guarantee a "maybe".
944
944
945 if ignored:
945 if ignored:
946 ignore = util.never
946 ignore = util.never
947 dirignore = util.never
947 dirignore = util.never
948 elif unknown:
948 elif unknown:
949 ignore = self._ignore
949 ignore = self._ignore
950 dirignore = self._dirignore
950 dirignore = self._dirignore
951 else:
951 else:
952 # if not unknown and not ignored, drop dir recursion and step 2
952 # if not unknown and not ignored, drop dir recursion and step 2
953 ignore = util.always
953 ignore = util.always
954 dirignore = util.always
954 dirignore = util.always
955
955
956 matchfn = match.matchfn
956 matchfn = match.matchfn
957 matchalways = match.always()
957 matchalways = match.always()
958 matchtdir = match.traversedir
958 matchtdir = match.traversedir
959 dmap = self._map
959 dmap = self._map
960 listdir = osutil.listdir
960 listdir = osutil.listdir
961 lstat = os.lstat
961 lstat = os.lstat
962 dirkind = stat.S_IFDIR
962 dirkind = stat.S_IFDIR
963 regkind = stat.S_IFREG
963 regkind = stat.S_IFREG
964 lnkkind = stat.S_IFLNK
964 lnkkind = stat.S_IFLNK
965 join = self._join
965 join = self._join
966
966
967 exact = skipstep3 = False
967 exact = skipstep3 = False
968 if match.isexact(): # match.exact
968 if match.isexact(): # match.exact
969 exact = True
969 exact = True
970 dirignore = util.always # skip step 2
970 dirignore = util.always # skip step 2
971 elif match.prefix(): # match.match, no patterns
971 elif match.prefix(): # match.match, no patterns
972 skipstep3 = True
972 skipstep3 = True
973
973
974 if not exact and self._checkcase:
974 if not exact and self._checkcase:
975 normalize = self._normalize
975 normalize = self._normalize
976 normalizefile = self._normalizefile
976 normalizefile = self._normalizefile
977 skipstep3 = False
977 skipstep3 = False
978 else:
978 else:
979 normalize = self._normalize
979 normalize = self._normalize
980 normalizefile = None
980 normalizefile = None
981
981
982 # step 1: find all explicit files
982 # step 1: find all explicit files
983 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
983 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
984
984
985 skipstep3 = skipstep3 and not (work or dirsnotfound)
985 skipstep3 = skipstep3 and not (work or dirsnotfound)
986 work = [d for d in work if not dirignore(d[0])]
986 work = [d for d in work if not dirignore(d[0])]
987
987
988 # step 2: visit subdirectories
988 # step 2: visit subdirectories
989 def traverse(work, alreadynormed):
989 def traverse(work, alreadynormed):
990 wadd = work.append
990 wadd = work.append
991 while work:
991 while work:
992 nd = work.pop()
992 nd = work.pop()
993 skip = None
993 skip = None
994 if nd == '.':
994 if nd == '.':
995 nd = ''
995 nd = ''
996 else:
996 else:
997 skip = '.hg'
997 skip = '.hg'
998 try:
998 try:
999 entries = listdir(join(nd), stat=True, skip=skip)
999 entries = listdir(join(nd), stat=True, skip=skip)
1000 except OSError as inst:
1000 except OSError as inst:
1001 if inst.errno in (errno.EACCES, errno.ENOENT):
1001 if inst.errno in (errno.EACCES, errno.ENOENT):
1002 match.bad(self.pathto(nd), inst.strerror)
1002 match.bad(self.pathto(nd), inst.strerror)
1003 continue
1003 continue
1004 raise
1004 raise
1005 for f, kind, st in entries:
1005 for f, kind, st in entries:
1006 if normalizefile:
1006 if normalizefile:
1007 # even though f might be a directory, we're only
1007 # even though f might be a directory, we're only
1008 # interested in comparing it to files currently in the
1008 # interested in comparing it to files currently in the
1009 # dmap -- therefore normalizefile is enough
1009 # dmap -- therefore normalizefile is enough
1010 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1010 nf = normalizefile(nd and (nd + "/" + f) or f, True,
1011 True)
1011 True)
1012 else:
1012 else:
1013 nf = nd and (nd + "/" + f) or f
1013 nf = nd and (nd + "/" + f) or f
1014 if nf not in results:
1014 if nf not in results:
1015 if kind == dirkind:
1015 if kind == dirkind:
1016 if not ignore(nf):
1016 if not ignore(nf):
1017 if matchtdir:
1017 if matchtdir:
1018 matchtdir(nf)
1018 matchtdir(nf)
1019 wadd(nf)
1019 wadd(nf)
1020 if nf in dmap and (matchalways or matchfn(nf)):
1020 if nf in dmap and (matchalways or matchfn(nf)):
1021 results[nf] = None
1021 results[nf] = None
1022 elif kind == regkind or kind == lnkkind:
1022 elif kind == regkind or kind == lnkkind:
1023 if nf in dmap:
1023 if nf in dmap:
1024 if matchalways or matchfn(nf):
1024 if matchalways or matchfn(nf):
1025 results[nf] = st
1025 results[nf] = st
1026 elif ((matchalways or matchfn(nf))
1026 elif ((matchalways or matchfn(nf))
1027 and not ignore(nf)):
1027 and not ignore(nf)):
1028 # unknown file -- normalize if necessary
1028 # unknown file -- normalize if necessary
1029 if not alreadynormed:
1029 if not alreadynormed:
1030 nf = normalize(nf, False, True)
1030 nf = normalize(nf, False, True)
1031 results[nf] = st
1031 results[nf] = st
1032 elif nf in dmap and (matchalways or matchfn(nf)):
1032 elif nf in dmap and (matchalways or matchfn(nf)):
1033 results[nf] = None
1033 results[nf] = None
1034
1034
1035 for nd, d in work:
1035 for nd, d in work:
1036 # alreadynormed means that processwork doesn't have to do any
1036 # alreadynormed means that processwork doesn't have to do any
1037 # expensive directory normalization
1037 # expensive directory normalization
1038 alreadynormed = not normalize or nd == d
1038 alreadynormed = not normalize or nd == d
1039 traverse([d], alreadynormed)
1039 traverse([d], alreadynormed)
1040
1040
1041 for s in subrepos:
1041 for s in subrepos:
1042 del results[s]
1042 del results[s]
1043 del results['.hg']
1043 del results['.hg']
1044
1044
1045 # step 3: visit remaining files from dmap
1045 # step 3: visit remaining files from dmap
1046 if not skipstep3 and not exact:
1046 if not skipstep3 and not exact:
1047 # If a dmap file is not in results yet, it was either
1047 # If a dmap file is not in results yet, it was either
1048 # a) not matching matchfn b) ignored, c) missing, or d) under a
1048 # a) not matching matchfn b) ignored, c) missing, or d) under a
1049 # symlink directory.
1049 # symlink directory.
1050 if not results and matchalways:
1050 if not results and matchalways:
1051 visit = dmap.keys()
1051 visit = dmap.keys()
1052 else:
1052 else:
1053 visit = [f for f in dmap if f not in results and matchfn(f)]
1053 visit = [f for f in dmap if f not in results and matchfn(f)]
1054 visit.sort()
1054 visit.sort()
1055
1055
1056 if unknown:
1056 if unknown:
1057 # unknown == True means we walked all dirs under the roots
1057 # unknown == True means we walked all dirs under the roots
1058 # that wasn't ignored, and everything that matched was stat'ed
1058 # that wasn't ignored, and everything that matched was stat'ed
1059 # and is already in results.
1059 # and is already in results.
1060 # The rest must thus be ignored or under a symlink.
1060 # The rest must thus be ignored or under a symlink.
1061 audit_path = pathutil.pathauditor(self._root)
1061 audit_path = pathutil.pathauditor(self._root)
1062
1062
1063 for nf in iter(visit):
1063 for nf in iter(visit):
1064 # If a stat for the same file was already added with a
1064 # If a stat for the same file was already added with a
1065 # different case, don't add one for this, since that would
1065 # different case, don't add one for this, since that would
1066 # make it appear as if the file exists under both names
1066 # make it appear as if the file exists under both names
1067 # on disk.
1067 # on disk.
1068 if (normalizefile and
1068 if (normalizefile and
1069 normalizefile(nf, True, True) in results):
1069 normalizefile(nf, True, True) in results):
1070 results[nf] = None
1070 results[nf] = None
1071 # Report ignored items in the dmap as long as they are not
1071 # Report ignored items in the dmap as long as they are not
1072 # under a symlink directory.
1072 # under a symlink directory.
1073 elif audit_path.check(nf):
1073 elif audit_path.check(nf):
1074 try:
1074 try:
1075 results[nf] = lstat(join(nf))
1075 results[nf] = lstat(join(nf))
1076 # file was just ignored, no links, and exists
1076 # file was just ignored, no links, and exists
1077 except OSError:
1077 except OSError:
1078 # file doesn't exist
1078 # file doesn't exist
1079 results[nf] = None
1079 results[nf] = None
1080 else:
1080 else:
1081 # It's either missing or under a symlink directory
1081 # It's either missing or under a symlink directory
1082 # which we in this case report as missing
1082 # which we in this case report as missing
1083 results[nf] = None
1083 results[nf] = None
1084 else:
1084 else:
1085 # We may not have walked the full directory tree above,
1085 # We may not have walked the full directory tree above,
1086 # so stat and check everything we missed.
1086 # so stat and check everything we missed.
1087 nf = iter(visit).next
1087 nf = iter(visit).next
1088 for st in util.statfiles([join(i) for i in visit]):
1088 for st in util.statfiles([join(i) for i in visit]):
1089 results[nf()] = st
1089 results[nf()] = st
1090 return results
1090 return results
1091
1091
1092 def status(self, match, subrepos, ignored, clean, unknown):
1092 def status(self, match, subrepos, ignored, clean, unknown):
1093 '''Determine the status of the working copy relative to the
1093 '''Determine the status of the working copy relative to the
1094 dirstate and return a pair of (unsure, status), where status is of type
1094 dirstate and return a pair of (unsure, status), where status is of type
1095 scmutil.status and:
1095 scmutil.status and:
1096
1096
1097 unsure:
1097 unsure:
1098 files that might have been modified since the dirstate was
1098 files that might have been modified since the dirstate was
1099 written, but need to be read to be sure (size is the same
1099 written, but need to be read to be sure (size is the same
1100 but mtime differs)
1100 but mtime differs)
1101 status.modified:
1101 status.modified:
1102 files that have definitely been modified since the dirstate
1102 files that have definitely been modified since the dirstate
1103 was written (different size or mode)
1103 was written (different size or mode)
1104 status.clean:
1104 status.clean:
1105 files that have definitely not been modified since the
1105 files that have definitely not been modified since the
1106 dirstate was written
1106 dirstate was written
1107 '''
1107 '''
1108 listignored, listclean, listunknown = ignored, clean, unknown
1108 listignored, listclean, listunknown = ignored, clean, unknown
1109 lookup, modified, added, unknown, ignored = [], [], [], [], []
1109 lookup, modified, added, unknown, ignored = [], [], [], [], []
1110 removed, deleted, clean = [], [], []
1110 removed, deleted, clean = [], [], []
1111
1111
1112 dmap = self._map
1112 dmap = self._map
1113 ladd = lookup.append # aka "unsure"
1113 ladd = lookup.append # aka "unsure"
1114 madd = modified.append
1114 madd = modified.append
1115 aadd = added.append
1115 aadd = added.append
1116 uadd = unknown.append
1116 uadd = unknown.append
1117 iadd = ignored.append
1117 iadd = ignored.append
1118 radd = removed.append
1118 radd = removed.append
1119 dadd = deleted.append
1119 dadd = deleted.append
1120 cadd = clean.append
1120 cadd = clean.append
1121 mexact = match.exact
1121 mexact = match.exact
1122 dirignore = self._dirignore
1122 dirignore = self._dirignore
1123 checkexec = self._checkexec
1123 checkexec = self._checkexec
1124 copymap = self._copymap
1124 copymap = self._copymap
1125 lastnormaltime = self._lastnormaltime
1125 lastnormaltime = self._lastnormaltime
1126
1126
1127 # We need to do full walks when either
1127 # We need to do full walks when either
1128 # - we're listing all clean files, or
1128 # - we're listing all clean files, or
1129 # - match.traversedir does something, because match.traversedir should
1129 # - match.traversedir does something, because match.traversedir should
1130 # be called for every dir in the working dir
1130 # be called for every dir in the working dir
1131 full = listclean or match.traversedir is not None
1131 full = listclean or match.traversedir is not None
1132 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1132 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1133 full=full).iteritems():
1133 full=full).iteritems():
1134 if fn not in dmap:
1134 if fn not in dmap:
1135 if (listignored or mexact(fn)) and dirignore(fn):
1135 if (listignored or mexact(fn)) and dirignore(fn):
1136 if listignored:
1136 if listignored:
1137 iadd(fn)
1137 iadd(fn)
1138 else:
1138 else:
1139 uadd(fn)
1139 uadd(fn)
1140 continue
1140 continue
1141
1141
1142 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1142 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1143 # written like that for performance reasons. dmap[fn] is not a
1143 # written like that for performance reasons. dmap[fn] is not a
1144 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1144 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1145 # opcode has fast paths when the value to be unpacked is a tuple or
1145 # opcode has fast paths when the value to be unpacked is a tuple or
1146 # a list, but falls back to creating a full-fledged iterator in
1146 # a list, but falls back to creating a full-fledged iterator in
1147 # general. That is much slower than simply accessing and storing the
1147 # general. That is much slower than simply accessing and storing the
1148 # tuple members one by one.
1148 # tuple members one by one.
1149 t = dmap[fn]
1149 t = dmap[fn]
1150 state = t[0]
1150 state = t[0]
1151 mode = t[1]
1151 mode = t[1]
1152 size = t[2]
1152 size = t[2]
1153 time = t[3]
1153 time = t[3]
1154
1154
1155 if not st and state in "nma":
1155 if not st and state in "nma":
1156 dadd(fn)
1156 dadd(fn)
1157 elif state == 'n':
1157 elif state == 'n':
1158 if (size >= 0 and
1158 if (size >= 0 and
1159 ((size != st.st_size and size != st.st_size & _rangemask)
1159 ((size != st.st_size and size != st.st_size & _rangemask)
1160 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1160 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1161 or size == -2 # other parent
1161 or size == -2 # other parent
1162 or fn in copymap):
1162 or fn in copymap):
1163 madd(fn)
1163 madd(fn)
1164 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1164 elif time != st.st_mtime and time != st.st_mtime & _rangemask:
1165 ladd(fn)
1165 ladd(fn)
1166 elif st.st_mtime == lastnormaltime:
1166 elif st.st_mtime == lastnormaltime:
1167 # fn may have just been marked as normal and it may have
1167 # fn may have just been marked as normal and it may have
1168 # changed in the same second without changing its size.
1168 # changed in the same second without changing its size.
1169 # This can happen if we quickly do multiple commits.
1169 # This can happen if we quickly do multiple commits.
1170 # Force lookup, so we don't miss such a racy file change.
1170 # Force lookup, so we don't miss such a racy file change.
1171 ladd(fn)
1171 ladd(fn)
1172 elif listclean:
1172 elif listclean:
1173 cadd(fn)
1173 cadd(fn)
1174 elif state == 'm':
1174 elif state == 'm':
1175 madd(fn)
1175 madd(fn)
1176 elif state == 'a':
1176 elif state == 'a':
1177 aadd(fn)
1177 aadd(fn)
1178 elif state == 'r':
1178 elif state == 'r':
1179 radd(fn)
1179 radd(fn)
1180
1180
1181 return (lookup, scmutil.status(modified, added, removed, deleted,
1181 return (lookup, scmutil.status(modified, added, removed, deleted,
1182 unknown, ignored, clean))
1182 unknown, ignored, clean))
1183
1183
1184 def matches(self, match):
1184 def matches(self, match):
1185 '''
1185 '''
1186 return files in the dirstate (in whatever state) filtered by match
1186 return files in the dirstate (in whatever state) filtered by match
1187 '''
1187 '''
1188 dmap = self._map
1188 dmap = self._map
1189 if match.always():
1189 if match.always():
1190 return dmap.keys()
1190 return dmap.keys()
1191 files = match.files()
1191 files = match.files()
1192 if match.isexact():
1192 if match.isexact():
1193 # fast path -- filter the other way around, since typically files is
1193 # fast path -- filter the other way around, since typically files is
1194 # much smaller than dmap
1194 # much smaller than dmap
1195 return [f for f in files if f in dmap]
1195 return [f for f in files if f in dmap]
1196 if match.prefix() and all(fn in dmap for fn in files):
1196 if match.prefix() and all(fn in dmap for fn in files):
1197 # fast path -- all the values are known to be files, so just return
1197 # fast path -- all the values are known to be files, so just return
1198 # that
1198 # that
1199 return list(files)
1199 return list(files)
1200 return [f for f in dmap if match(f)]
1200 return [f for f in dmap if match(f)]
1201
1201
1202 def _actualfilename(self, tr):
1202 def _actualfilename(self, tr):
1203 if tr:
1203 if tr:
1204 return self._pendingfilename
1204 return self._pendingfilename
1205 else:
1205 else:
1206 return self._filename
1206 return self._filename
1207
1207
1208 def _savebackup(self, tr, suffix):
1208 def savebackup(self, tr, suffix):
1209 '''Save current dirstate into backup file with suffix'''
1209 '''Save current dirstate into backup file with suffix'''
1210 filename = self._actualfilename(tr)
1210 filename = self._actualfilename(tr)
1211
1211
1212 # use '_writedirstate' instead of 'write' to write changes certainly,
1212 # use '_writedirstate' instead of 'write' to write changes certainly,
1213 # because the latter omits writing out if transaction is running.
1213 # because the latter omits writing out if transaction is running.
1214 # output file will be used to create backup of dirstate at this point.
1214 # output file will be used to create backup of dirstate at this point.
1215 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1215 self._writedirstate(self._opener(filename, "w", atomictemp=True))
1216
1216
1217 if tr:
1217 if tr:
1218 # ensure that subsequent tr.writepending returns True for
1218 # ensure that subsequent tr.writepending returns True for
1219 # changes written out above, even if dirstate is never
1219 # changes written out above, even if dirstate is never
1220 # changed after this
1220 # changed after this
1221 tr.addfilegenerator('dirstate', (self._filename,),
1221 tr.addfilegenerator('dirstate', (self._filename,),
1222 self._writedirstate, location='plain')
1222 self._writedirstate, location='plain')
1223
1223
1224 # ensure that pending file written above is unlinked at
1224 # ensure that pending file written above is unlinked at
1225 # failure, even if tr.writepending isn't invoked until the
1225 # failure, even if tr.writepending isn't invoked until the
1226 # end of this transaction
1226 # end of this transaction
1227 tr.registertmp(filename, location='plain')
1227 tr.registertmp(filename, location='plain')
1228
1228
1229 self._opener.write(filename + suffix, self._opener.tryread(filename))
1229 self._opener.write(filename + suffix, self._opener.tryread(filename))
1230
1230
1231 def _restorebackup(self, tr, suffix):
1231 def restorebackup(self, tr, suffix):
1232 '''Restore dirstate by backup file with suffix'''
1232 '''Restore dirstate by backup file with suffix'''
1233 # this "invalidate()" prevents "wlock.release()" from writing
1233 # this "invalidate()" prevents "wlock.release()" from writing
1234 # changes of dirstate out after restoring from backup file
1234 # changes of dirstate out after restoring from backup file
1235 self.invalidate()
1235 self.invalidate()
1236 filename = self._actualfilename(tr)
1236 filename = self._actualfilename(tr)
1237 self._opener.rename(filename + suffix, filename)
1237 self._opener.rename(filename + suffix, filename)
1238
1238
1239 def _clearbackup(self, tr, suffix):
1239 def clearbackup(self, tr, suffix):
1240 '''Clear backup file with suffix'''
1240 '''Clear backup file with suffix'''
1241 filename = self._actualfilename(tr)
1241 filename = self._actualfilename(tr)
1242 self._opener.unlink(filename + suffix)
1242 self._opener.unlink(filename + suffix)
General Comments 0
You need to be logged in to leave comments. Login now