##// END OF EJS Templates
match: added matchessubrepo method to matcher...
Hannes Oldenburg -
r29758:2372182e default
parent child Browse files
Show More
@@ -1,3574 +1,3562 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import sys
13 import sys
14 import tempfile
14 import tempfile
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 bin,
18 bin,
19 hex,
19 hex,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 bookmarks,
26 bookmarks,
27 changelog,
27 changelog,
28 copies,
28 copies,
29 crecord as crecordmod,
29 crecord as crecordmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 formatter,
32 formatter,
33 graphmod,
33 graphmod,
34 lock as lockmod,
34 lock as lockmod,
35 match as matchmod,
35 match as matchmod,
36 obsolete,
36 obsolete,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 repair,
40 repair,
41 revlog,
41 revlog,
42 revset,
42 revset,
43 scmutil,
43 scmutil,
44 templatekw,
44 templatekw,
45 templater,
45 templater,
46 util,
46 util,
47 )
47 )
48 stringio = util.stringio
48 stringio = util.stringio
49
49
50 def ishunk(x):
50 def ishunk(x):
51 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
51 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
52 return isinstance(x, hunkclasses)
52 return isinstance(x, hunkclasses)
53
53
54 def newandmodified(chunks, originalchunks):
54 def newandmodified(chunks, originalchunks):
55 newlyaddedandmodifiedfiles = set()
55 newlyaddedandmodifiedfiles = set()
56 for chunk in chunks:
56 for chunk in chunks:
57 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
57 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
58 originalchunks:
58 originalchunks:
59 newlyaddedandmodifiedfiles.add(chunk.header.filename())
59 newlyaddedandmodifiedfiles.add(chunk.header.filename())
60 return newlyaddedandmodifiedfiles
60 return newlyaddedandmodifiedfiles
61
61
62 def parsealiases(cmd):
62 def parsealiases(cmd):
63 return cmd.lstrip("^").split("|")
63 return cmd.lstrip("^").split("|")
64
64
65 def setupwrapcolorwrite(ui):
65 def setupwrapcolorwrite(ui):
66 # wrap ui.write so diff output can be labeled/colorized
66 # wrap ui.write so diff output can be labeled/colorized
67 def wrapwrite(orig, *args, **kw):
67 def wrapwrite(orig, *args, **kw):
68 label = kw.pop('label', '')
68 label = kw.pop('label', '')
69 for chunk, l in patch.difflabel(lambda: args):
69 for chunk, l in patch.difflabel(lambda: args):
70 orig(chunk, label=label + l)
70 orig(chunk, label=label + l)
71
71
72 oldwrite = ui.write
72 oldwrite = ui.write
73 def wrap(*args, **kwargs):
73 def wrap(*args, **kwargs):
74 return wrapwrite(oldwrite, *args, **kwargs)
74 return wrapwrite(oldwrite, *args, **kwargs)
75 setattr(ui, 'write', wrap)
75 setattr(ui, 'write', wrap)
76 return oldwrite
76 return oldwrite
77
77
78 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
78 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
79 if usecurses:
79 if usecurses:
80 if testfile:
80 if testfile:
81 recordfn = crecordmod.testdecorator(testfile,
81 recordfn = crecordmod.testdecorator(testfile,
82 crecordmod.testchunkselector)
82 crecordmod.testchunkselector)
83 else:
83 else:
84 recordfn = crecordmod.chunkselector
84 recordfn = crecordmod.chunkselector
85
85
86 return crecordmod.filterpatch(ui, originalhunks, recordfn)
86 return crecordmod.filterpatch(ui, originalhunks, recordfn)
87
87
88 else:
88 else:
89 return patch.filterpatch(ui, originalhunks, operation)
89 return patch.filterpatch(ui, originalhunks, operation)
90
90
91 def recordfilter(ui, originalhunks, operation=None):
91 def recordfilter(ui, originalhunks, operation=None):
92 """ Prompts the user to filter the originalhunks and return a list of
92 """ Prompts the user to filter the originalhunks and return a list of
93 selected hunks.
93 selected hunks.
94 *operation* is used for to build ui messages to indicate the user what
94 *operation* is used for to build ui messages to indicate the user what
95 kind of filtering they are doing: reverting, committing, shelving, etc.
95 kind of filtering they are doing: reverting, committing, shelving, etc.
96 (see patch.filterpatch).
96 (see patch.filterpatch).
97 """
97 """
98 usecurses = crecordmod.checkcurses(ui)
98 usecurses = crecordmod.checkcurses(ui)
99 testfile = ui.config('experimental', 'crecordtest', None)
99 testfile = ui.config('experimental', 'crecordtest', None)
100 oldwrite = setupwrapcolorwrite(ui)
100 oldwrite = setupwrapcolorwrite(ui)
101 try:
101 try:
102 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
102 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
103 testfile, operation)
103 testfile, operation)
104 finally:
104 finally:
105 ui.write = oldwrite
105 ui.write = oldwrite
106 return newchunks, newopts
106 return newchunks, newopts
107
107
108 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
108 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
109 filterfn, *pats, **opts):
109 filterfn, *pats, **opts):
110 from . import merge as mergemod
110 from . import merge as mergemod
111 if not ui.interactive():
111 if not ui.interactive():
112 if cmdsuggest:
112 if cmdsuggest:
113 msg = _('running non-interactively, use %s instead') % cmdsuggest
113 msg = _('running non-interactively, use %s instead') % cmdsuggest
114 else:
114 else:
115 msg = _('running non-interactively')
115 msg = _('running non-interactively')
116 raise error.Abort(msg)
116 raise error.Abort(msg)
117
117
118 # make sure username is set before going interactive
118 # make sure username is set before going interactive
119 if not opts.get('user'):
119 if not opts.get('user'):
120 ui.username() # raise exception, username not provided
120 ui.username() # raise exception, username not provided
121
121
122 def recordfunc(ui, repo, message, match, opts):
122 def recordfunc(ui, repo, message, match, opts):
123 """This is generic record driver.
123 """This is generic record driver.
124
124
125 Its job is to interactively filter local changes, and
125 Its job is to interactively filter local changes, and
126 accordingly prepare working directory into a state in which the
126 accordingly prepare working directory into a state in which the
127 job can be delegated to a non-interactive commit command such as
127 job can be delegated to a non-interactive commit command such as
128 'commit' or 'qrefresh'.
128 'commit' or 'qrefresh'.
129
129
130 After the actual job is done by non-interactive command, the
130 After the actual job is done by non-interactive command, the
131 working directory is restored to its original state.
131 working directory is restored to its original state.
132
132
133 In the end we'll record interesting changes, and everything else
133 In the end we'll record interesting changes, and everything else
134 will be left in place, so the user can continue working.
134 will be left in place, so the user can continue working.
135 """
135 """
136
136
137 checkunfinished(repo, commit=True)
137 checkunfinished(repo, commit=True)
138 wctx = repo[None]
138 wctx = repo[None]
139 merge = len(wctx.parents()) > 1
139 merge = len(wctx.parents()) > 1
140 if merge:
140 if merge:
141 raise error.Abort(_('cannot partially commit a merge '
141 raise error.Abort(_('cannot partially commit a merge '
142 '(use "hg commit" instead)'))
142 '(use "hg commit" instead)'))
143
143
144 def fail(f, msg):
144 def fail(f, msg):
145 raise error.Abort('%s: %s' % (f, msg))
145 raise error.Abort('%s: %s' % (f, msg))
146
146
147 force = opts.get('force')
147 force = opts.get('force')
148 if not force:
148 if not force:
149 vdirs = []
149 vdirs = []
150 match.explicitdir = vdirs.append
150 match.explicitdir = vdirs.append
151 match.bad = fail
151 match.bad = fail
152
152
153 status = repo.status(match=match)
153 status = repo.status(match=match)
154 if not force:
154 if not force:
155 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
155 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
156 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
156 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
157 diffopts.nodates = True
157 diffopts.nodates = True
158 diffopts.git = True
158 diffopts.git = True
159 diffopts.showfunc = True
159 diffopts.showfunc = True
160 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
160 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
161 originalchunks = patch.parsepatch(originaldiff)
161 originalchunks = patch.parsepatch(originaldiff)
162
162
163 # 1. filter patch, since we are intending to apply subset of it
163 # 1. filter patch, since we are intending to apply subset of it
164 try:
164 try:
165 chunks, newopts = filterfn(ui, originalchunks)
165 chunks, newopts = filterfn(ui, originalchunks)
166 except patch.PatchError as err:
166 except patch.PatchError as err:
167 raise error.Abort(_('error parsing patch: %s') % err)
167 raise error.Abort(_('error parsing patch: %s') % err)
168 opts.update(newopts)
168 opts.update(newopts)
169
169
170 # We need to keep a backup of files that have been newly added and
170 # We need to keep a backup of files that have been newly added and
171 # modified during the recording process because there is a previous
171 # modified during the recording process because there is a previous
172 # version without the edit in the workdir
172 # version without the edit in the workdir
173 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
173 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
174 contenders = set()
174 contenders = set()
175 for h in chunks:
175 for h in chunks:
176 try:
176 try:
177 contenders.update(set(h.files()))
177 contenders.update(set(h.files()))
178 except AttributeError:
178 except AttributeError:
179 pass
179 pass
180
180
181 changed = status.modified + status.added + status.removed
181 changed = status.modified + status.added + status.removed
182 newfiles = [f for f in changed if f in contenders]
182 newfiles = [f for f in changed if f in contenders]
183 if not newfiles:
183 if not newfiles:
184 ui.status(_('no changes to record\n'))
184 ui.status(_('no changes to record\n'))
185 return 0
185 return 0
186
186
187 modified = set(status.modified)
187 modified = set(status.modified)
188
188
189 # 2. backup changed files, so we can restore them in the end
189 # 2. backup changed files, so we can restore them in the end
190
190
191 if backupall:
191 if backupall:
192 tobackup = changed
192 tobackup = changed
193 else:
193 else:
194 tobackup = [f for f in newfiles if f in modified or f in \
194 tobackup = [f for f in newfiles if f in modified or f in \
195 newlyaddedandmodifiedfiles]
195 newlyaddedandmodifiedfiles]
196 backups = {}
196 backups = {}
197 if tobackup:
197 if tobackup:
198 backupdir = repo.join('record-backups')
198 backupdir = repo.join('record-backups')
199 try:
199 try:
200 os.mkdir(backupdir)
200 os.mkdir(backupdir)
201 except OSError as err:
201 except OSError as err:
202 if err.errno != errno.EEXIST:
202 if err.errno != errno.EEXIST:
203 raise
203 raise
204 try:
204 try:
205 # backup continues
205 # backup continues
206 for f in tobackup:
206 for f in tobackup:
207 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
207 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
208 dir=backupdir)
208 dir=backupdir)
209 os.close(fd)
209 os.close(fd)
210 ui.debug('backup %r as %r\n' % (f, tmpname))
210 ui.debug('backup %r as %r\n' % (f, tmpname))
211 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
211 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
212 backups[f] = tmpname
212 backups[f] = tmpname
213
213
214 fp = stringio()
214 fp = stringio()
215 for c in chunks:
215 for c in chunks:
216 fname = c.filename()
216 fname = c.filename()
217 if fname in backups:
217 if fname in backups:
218 c.write(fp)
218 c.write(fp)
219 dopatch = fp.tell()
219 dopatch = fp.tell()
220 fp.seek(0)
220 fp.seek(0)
221
221
222 # 2.5 optionally review / modify patch in text editor
222 # 2.5 optionally review / modify patch in text editor
223 if opts.get('review', False):
223 if opts.get('review', False):
224 patchtext = (crecordmod.diffhelptext
224 patchtext = (crecordmod.diffhelptext
225 + crecordmod.patchhelptext
225 + crecordmod.patchhelptext
226 + fp.read())
226 + fp.read())
227 reviewedpatch = ui.edit(patchtext, "",
227 reviewedpatch = ui.edit(patchtext, "",
228 extra={"suffix": ".diff"})
228 extra={"suffix": ".diff"})
229 fp.truncate(0)
229 fp.truncate(0)
230 fp.write(reviewedpatch)
230 fp.write(reviewedpatch)
231 fp.seek(0)
231 fp.seek(0)
232
232
233 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
233 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
234 # 3a. apply filtered patch to clean repo (clean)
234 # 3a. apply filtered patch to clean repo (clean)
235 if backups:
235 if backups:
236 # Equivalent to hg.revert
236 # Equivalent to hg.revert
237 m = scmutil.matchfiles(repo, backups.keys())
237 m = scmutil.matchfiles(repo, backups.keys())
238 mergemod.update(repo, repo.dirstate.p1(),
238 mergemod.update(repo, repo.dirstate.p1(),
239 False, True, matcher=m)
239 False, True, matcher=m)
240
240
241 # 3b. (apply)
241 # 3b. (apply)
242 if dopatch:
242 if dopatch:
243 try:
243 try:
244 ui.debug('applying patch\n')
244 ui.debug('applying patch\n')
245 ui.debug(fp.getvalue())
245 ui.debug(fp.getvalue())
246 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
246 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
247 except patch.PatchError as err:
247 except patch.PatchError as err:
248 raise error.Abort(str(err))
248 raise error.Abort(str(err))
249 del fp
249 del fp
250
250
251 # 4. We prepared working directory according to filtered
251 # 4. We prepared working directory according to filtered
252 # patch. Now is the time to delegate the job to
252 # patch. Now is the time to delegate the job to
253 # commit/qrefresh or the like!
253 # commit/qrefresh or the like!
254
254
255 # Make all of the pathnames absolute.
255 # Make all of the pathnames absolute.
256 newfiles = [repo.wjoin(nf) for nf in newfiles]
256 newfiles = [repo.wjoin(nf) for nf in newfiles]
257 return commitfunc(ui, repo, *newfiles, **opts)
257 return commitfunc(ui, repo, *newfiles, **opts)
258 finally:
258 finally:
259 # 5. finally restore backed-up files
259 # 5. finally restore backed-up files
260 try:
260 try:
261 dirstate = repo.dirstate
261 dirstate = repo.dirstate
262 for realname, tmpname in backups.iteritems():
262 for realname, tmpname in backups.iteritems():
263 ui.debug('restoring %r to %r\n' % (tmpname, realname))
263 ui.debug('restoring %r to %r\n' % (tmpname, realname))
264
264
265 if dirstate[realname] == 'n':
265 if dirstate[realname] == 'n':
266 # without normallookup, restoring timestamp
266 # without normallookup, restoring timestamp
267 # may cause partially committed files
267 # may cause partially committed files
268 # to be treated as unmodified
268 # to be treated as unmodified
269 dirstate.normallookup(realname)
269 dirstate.normallookup(realname)
270
270
271 # copystat=True here and above are a hack to trick any
271 # copystat=True here and above are a hack to trick any
272 # editors that have f open that we haven't modified them.
272 # editors that have f open that we haven't modified them.
273 #
273 #
274 # Also note that this racy as an editor could notice the
274 # Also note that this racy as an editor could notice the
275 # file's mtime before we've finished writing it.
275 # file's mtime before we've finished writing it.
276 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
276 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
277 os.unlink(tmpname)
277 os.unlink(tmpname)
278 if tobackup:
278 if tobackup:
279 os.rmdir(backupdir)
279 os.rmdir(backupdir)
280 except OSError:
280 except OSError:
281 pass
281 pass
282
282
283 def recordinwlock(ui, repo, message, match, opts):
283 def recordinwlock(ui, repo, message, match, opts):
284 with repo.wlock():
284 with repo.wlock():
285 return recordfunc(ui, repo, message, match, opts)
285 return recordfunc(ui, repo, message, match, opts)
286
286
287 return commit(ui, repo, recordinwlock, pats, opts)
287 return commit(ui, repo, recordinwlock, pats, opts)
288
288
289 def findpossible(cmd, table, strict=False):
289 def findpossible(cmd, table, strict=False):
290 """
290 """
291 Return cmd -> (aliases, command table entry)
291 Return cmd -> (aliases, command table entry)
292 for each matching command.
292 for each matching command.
293 Return debug commands (or their aliases) only if no normal command matches.
293 Return debug commands (or their aliases) only if no normal command matches.
294 """
294 """
295 choice = {}
295 choice = {}
296 debugchoice = {}
296 debugchoice = {}
297
297
298 if cmd in table:
298 if cmd in table:
299 # short-circuit exact matches, "log" alias beats "^log|history"
299 # short-circuit exact matches, "log" alias beats "^log|history"
300 keys = [cmd]
300 keys = [cmd]
301 else:
301 else:
302 keys = table.keys()
302 keys = table.keys()
303
303
304 allcmds = []
304 allcmds = []
305 for e in keys:
305 for e in keys:
306 aliases = parsealiases(e)
306 aliases = parsealiases(e)
307 allcmds.extend(aliases)
307 allcmds.extend(aliases)
308 found = None
308 found = None
309 if cmd in aliases:
309 if cmd in aliases:
310 found = cmd
310 found = cmd
311 elif not strict:
311 elif not strict:
312 for a in aliases:
312 for a in aliases:
313 if a.startswith(cmd):
313 if a.startswith(cmd):
314 found = a
314 found = a
315 break
315 break
316 if found is not None:
316 if found is not None:
317 if aliases[0].startswith("debug") or found.startswith("debug"):
317 if aliases[0].startswith("debug") or found.startswith("debug"):
318 debugchoice[found] = (aliases, table[e])
318 debugchoice[found] = (aliases, table[e])
319 else:
319 else:
320 choice[found] = (aliases, table[e])
320 choice[found] = (aliases, table[e])
321
321
322 if not choice and debugchoice:
322 if not choice and debugchoice:
323 choice = debugchoice
323 choice = debugchoice
324
324
325 return choice, allcmds
325 return choice, allcmds
326
326
327 def findcmd(cmd, table, strict=True):
327 def findcmd(cmd, table, strict=True):
328 """Return (aliases, command table entry) for command string."""
328 """Return (aliases, command table entry) for command string."""
329 choice, allcmds = findpossible(cmd, table, strict)
329 choice, allcmds = findpossible(cmd, table, strict)
330
330
331 if cmd in choice:
331 if cmd in choice:
332 return choice[cmd]
332 return choice[cmd]
333
333
334 if len(choice) > 1:
334 if len(choice) > 1:
335 clist = choice.keys()
335 clist = choice.keys()
336 clist.sort()
336 clist.sort()
337 raise error.AmbiguousCommand(cmd, clist)
337 raise error.AmbiguousCommand(cmd, clist)
338
338
339 if choice:
339 if choice:
340 return choice.values()[0]
340 return choice.values()[0]
341
341
342 raise error.UnknownCommand(cmd, allcmds)
342 raise error.UnknownCommand(cmd, allcmds)
343
343
344 def findrepo(p):
344 def findrepo(p):
345 while not os.path.isdir(os.path.join(p, ".hg")):
345 while not os.path.isdir(os.path.join(p, ".hg")):
346 oldp, p = p, os.path.dirname(p)
346 oldp, p = p, os.path.dirname(p)
347 if p == oldp:
347 if p == oldp:
348 return None
348 return None
349
349
350 return p
350 return p
351
351
352 def bailifchanged(repo, merge=True):
352 def bailifchanged(repo, merge=True):
353 if merge and repo.dirstate.p2() != nullid:
353 if merge and repo.dirstate.p2() != nullid:
354 raise error.Abort(_('outstanding uncommitted merge'))
354 raise error.Abort(_('outstanding uncommitted merge'))
355 modified, added, removed, deleted = repo.status()[:4]
355 modified, added, removed, deleted = repo.status()[:4]
356 if modified or added or removed or deleted:
356 if modified or added or removed or deleted:
357 raise error.Abort(_('uncommitted changes'))
357 raise error.Abort(_('uncommitted changes'))
358 ctx = repo[None]
358 ctx = repo[None]
359 for s in sorted(ctx.substate):
359 for s in sorted(ctx.substate):
360 ctx.sub(s).bailifchanged()
360 ctx.sub(s).bailifchanged()
361
361
362 def logmessage(ui, opts):
362 def logmessage(ui, opts):
363 """ get the log message according to -m and -l option """
363 """ get the log message according to -m and -l option """
364 message = opts.get('message')
364 message = opts.get('message')
365 logfile = opts.get('logfile')
365 logfile = opts.get('logfile')
366
366
367 if message and logfile:
367 if message and logfile:
368 raise error.Abort(_('options --message and --logfile are mutually '
368 raise error.Abort(_('options --message and --logfile are mutually '
369 'exclusive'))
369 'exclusive'))
370 if not message and logfile:
370 if not message and logfile:
371 try:
371 try:
372 if logfile == '-':
372 if logfile == '-':
373 message = ui.fin.read()
373 message = ui.fin.read()
374 else:
374 else:
375 message = '\n'.join(util.readfile(logfile).splitlines())
375 message = '\n'.join(util.readfile(logfile).splitlines())
376 except IOError as inst:
376 except IOError as inst:
377 raise error.Abort(_("can't read commit message '%s': %s") %
377 raise error.Abort(_("can't read commit message '%s': %s") %
378 (logfile, inst.strerror))
378 (logfile, inst.strerror))
379 return message
379 return message
380
380
381 def mergeeditform(ctxorbool, baseformname):
381 def mergeeditform(ctxorbool, baseformname):
382 """return appropriate editform name (referencing a committemplate)
382 """return appropriate editform name (referencing a committemplate)
383
383
384 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
384 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
385 merging is committed.
385 merging is committed.
386
386
387 This returns baseformname with '.merge' appended if it is a merge,
387 This returns baseformname with '.merge' appended if it is a merge,
388 otherwise '.normal' is appended.
388 otherwise '.normal' is appended.
389 """
389 """
390 if isinstance(ctxorbool, bool):
390 if isinstance(ctxorbool, bool):
391 if ctxorbool:
391 if ctxorbool:
392 return baseformname + ".merge"
392 return baseformname + ".merge"
393 elif 1 < len(ctxorbool.parents()):
393 elif 1 < len(ctxorbool.parents()):
394 return baseformname + ".merge"
394 return baseformname + ".merge"
395
395
396 return baseformname + ".normal"
396 return baseformname + ".normal"
397
397
398 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
398 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
399 editform='', **opts):
399 editform='', **opts):
400 """get appropriate commit message editor according to '--edit' option
400 """get appropriate commit message editor according to '--edit' option
401
401
402 'finishdesc' is a function to be called with edited commit message
402 'finishdesc' is a function to be called with edited commit message
403 (= 'description' of the new changeset) just after editing, but
403 (= 'description' of the new changeset) just after editing, but
404 before checking empty-ness. It should return actual text to be
404 before checking empty-ness. It should return actual text to be
405 stored into history. This allows to change description before
405 stored into history. This allows to change description before
406 storing.
406 storing.
407
407
408 'extramsg' is a extra message to be shown in the editor instead of
408 'extramsg' is a extra message to be shown in the editor instead of
409 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
409 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
410 is automatically added.
410 is automatically added.
411
411
412 'editform' is a dot-separated list of names, to distinguish
412 'editform' is a dot-separated list of names, to distinguish
413 the purpose of commit text editing.
413 the purpose of commit text editing.
414
414
415 'getcommiteditor' returns 'commitforceeditor' regardless of
415 'getcommiteditor' returns 'commitforceeditor' regardless of
416 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
416 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
417 they are specific for usage in MQ.
417 they are specific for usage in MQ.
418 """
418 """
419 if edit or finishdesc or extramsg:
419 if edit or finishdesc or extramsg:
420 return lambda r, c, s: commitforceeditor(r, c, s,
420 return lambda r, c, s: commitforceeditor(r, c, s,
421 finishdesc=finishdesc,
421 finishdesc=finishdesc,
422 extramsg=extramsg,
422 extramsg=extramsg,
423 editform=editform)
423 editform=editform)
424 elif editform:
424 elif editform:
425 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
425 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
426 else:
426 else:
427 return commiteditor
427 return commiteditor
428
428
429 def loglimit(opts):
429 def loglimit(opts):
430 """get the log limit according to option -l/--limit"""
430 """get the log limit according to option -l/--limit"""
431 limit = opts.get('limit')
431 limit = opts.get('limit')
432 if limit:
432 if limit:
433 try:
433 try:
434 limit = int(limit)
434 limit = int(limit)
435 except ValueError:
435 except ValueError:
436 raise error.Abort(_('limit must be a positive integer'))
436 raise error.Abort(_('limit must be a positive integer'))
437 if limit <= 0:
437 if limit <= 0:
438 raise error.Abort(_('limit must be positive'))
438 raise error.Abort(_('limit must be positive'))
439 else:
439 else:
440 limit = None
440 limit = None
441 return limit
441 return limit
442
442
443 def makefilename(repo, pat, node, desc=None,
443 def makefilename(repo, pat, node, desc=None,
444 total=None, seqno=None, revwidth=None, pathname=None):
444 total=None, seqno=None, revwidth=None, pathname=None):
445 node_expander = {
445 node_expander = {
446 'H': lambda: hex(node),
446 'H': lambda: hex(node),
447 'R': lambda: str(repo.changelog.rev(node)),
447 'R': lambda: str(repo.changelog.rev(node)),
448 'h': lambda: short(node),
448 'h': lambda: short(node),
449 'm': lambda: re.sub('[^\w]', '_', str(desc))
449 'm': lambda: re.sub('[^\w]', '_', str(desc))
450 }
450 }
451 expander = {
451 expander = {
452 '%': lambda: '%',
452 '%': lambda: '%',
453 'b': lambda: os.path.basename(repo.root),
453 'b': lambda: os.path.basename(repo.root),
454 }
454 }
455
455
456 try:
456 try:
457 if node:
457 if node:
458 expander.update(node_expander)
458 expander.update(node_expander)
459 if node:
459 if node:
460 expander['r'] = (lambda:
460 expander['r'] = (lambda:
461 str(repo.changelog.rev(node)).zfill(revwidth or 0))
461 str(repo.changelog.rev(node)).zfill(revwidth or 0))
462 if total is not None:
462 if total is not None:
463 expander['N'] = lambda: str(total)
463 expander['N'] = lambda: str(total)
464 if seqno is not None:
464 if seqno is not None:
465 expander['n'] = lambda: str(seqno)
465 expander['n'] = lambda: str(seqno)
466 if total is not None and seqno is not None:
466 if total is not None and seqno is not None:
467 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
467 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
468 if pathname is not None:
468 if pathname is not None:
469 expander['s'] = lambda: os.path.basename(pathname)
469 expander['s'] = lambda: os.path.basename(pathname)
470 expander['d'] = lambda: os.path.dirname(pathname) or '.'
470 expander['d'] = lambda: os.path.dirname(pathname) or '.'
471 expander['p'] = lambda: pathname
471 expander['p'] = lambda: pathname
472
472
473 newname = []
473 newname = []
474 patlen = len(pat)
474 patlen = len(pat)
475 i = 0
475 i = 0
476 while i < patlen:
476 while i < patlen:
477 c = pat[i]
477 c = pat[i]
478 if c == '%':
478 if c == '%':
479 i += 1
479 i += 1
480 c = pat[i]
480 c = pat[i]
481 c = expander[c]()
481 c = expander[c]()
482 newname.append(c)
482 newname.append(c)
483 i += 1
483 i += 1
484 return ''.join(newname)
484 return ''.join(newname)
485 except KeyError as inst:
485 except KeyError as inst:
486 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
486 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
487 inst.args[0])
487 inst.args[0])
488
488
489 class _unclosablefile(object):
489 class _unclosablefile(object):
490 def __init__(self, fp):
490 def __init__(self, fp):
491 self._fp = fp
491 self._fp = fp
492
492
493 def close(self):
493 def close(self):
494 pass
494 pass
495
495
496 def __iter__(self):
496 def __iter__(self):
497 return iter(self._fp)
497 return iter(self._fp)
498
498
499 def __getattr__(self, attr):
499 def __getattr__(self, attr):
500 return getattr(self._fp, attr)
500 return getattr(self._fp, attr)
501
501
502 def makefileobj(repo, pat, node=None, desc=None, total=None,
502 def makefileobj(repo, pat, node=None, desc=None, total=None,
503 seqno=None, revwidth=None, mode='wb', modemap=None,
503 seqno=None, revwidth=None, mode='wb', modemap=None,
504 pathname=None):
504 pathname=None):
505
505
506 writable = mode not in ('r', 'rb')
506 writable = mode not in ('r', 'rb')
507
507
508 if not pat or pat == '-':
508 if not pat or pat == '-':
509 if writable:
509 if writable:
510 fp = repo.ui.fout
510 fp = repo.ui.fout
511 else:
511 else:
512 fp = repo.ui.fin
512 fp = repo.ui.fin
513 return _unclosablefile(fp)
513 return _unclosablefile(fp)
514 if util.safehasattr(pat, 'write') and writable:
514 if util.safehasattr(pat, 'write') and writable:
515 return pat
515 return pat
516 if util.safehasattr(pat, 'read') and 'r' in mode:
516 if util.safehasattr(pat, 'read') and 'r' in mode:
517 return pat
517 return pat
518 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
518 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
519 if modemap is not None:
519 if modemap is not None:
520 mode = modemap.get(fn, mode)
520 mode = modemap.get(fn, mode)
521 if mode == 'wb':
521 if mode == 'wb':
522 modemap[fn] = 'ab'
522 modemap[fn] = 'ab'
523 return open(fn, mode)
523 return open(fn, mode)
524
524
525 def openrevlog(repo, cmd, file_, opts):
525 def openrevlog(repo, cmd, file_, opts):
526 """opens the changelog, manifest, a filelog or a given revlog"""
526 """opens the changelog, manifest, a filelog or a given revlog"""
527 cl = opts['changelog']
527 cl = opts['changelog']
528 mf = opts['manifest']
528 mf = opts['manifest']
529 dir = opts['dir']
529 dir = opts['dir']
530 msg = None
530 msg = None
531 if cl and mf:
531 if cl and mf:
532 msg = _('cannot specify --changelog and --manifest at the same time')
532 msg = _('cannot specify --changelog and --manifest at the same time')
533 elif cl and dir:
533 elif cl and dir:
534 msg = _('cannot specify --changelog and --dir at the same time')
534 msg = _('cannot specify --changelog and --dir at the same time')
535 elif cl or mf or dir:
535 elif cl or mf or dir:
536 if file_:
536 if file_:
537 msg = _('cannot specify filename with --changelog or --manifest')
537 msg = _('cannot specify filename with --changelog or --manifest')
538 elif not repo:
538 elif not repo:
539 msg = _('cannot specify --changelog or --manifest or --dir '
539 msg = _('cannot specify --changelog or --manifest or --dir '
540 'without a repository')
540 'without a repository')
541 if msg:
541 if msg:
542 raise error.Abort(msg)
542 raise error.Abort(msg)
543
543
544 r = None
544 r = None
545 if repo:
545 if repo:
546 if cl:
546 if cl:
547 r = repo.unfiltered().changelog
547 r = repo.unfiltered().changelog
548 elif dir:
548 elif dir:
549 if 'treemanifest' not in repo.requirements:
549 if 'treemanifest' not in repo.requirements:
550 raise error.Abort(_("--dir can only be used on repos with "
550 raise error.Abort(_("--dir can only be used on repos with "
551 "treemanifest enabled"))
551 "treemanifest enabled"))
552 dirlog = repo.manifest.dirlog(dir)
552 dirlog = repo.manifest.dirlog(dir)
553 if len(dirlog):
553 if len(dirlog):
554 r = dirlog
554 r = dirlog
555 elif mf:
555 elif mf:
556 r = repo.manifest
556 r = repo.manifest
557 elif file_:
557 elif file_:
558 filelog = repo.file(file_)
558 filelog = repo.file(file_)
559 if len(filelog):
559 if len(filelog):
560 r = filelog
560 r = filelog
561 if not r:
561 if not r:
562 if not file_:
562 if not file_:
563 raise error.CommandError(cmd, _('invalid arguments'))
563 raise error.CommandError(cmd, _('invalid arguments'))
564 if not os.path.isfile(file_):
564 if not os.path.isfile(file_):
565 raise error.Abort(_("revlog '%s' not found") % file_)
565 raise error.Abort(_("revlog '%s' not found") % file_)
566 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
566 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
567 file_[:-2] + ".i")
567 file_[:-2] + ".i")
568 return r
568 return r
569
569
570 def copy(ui, repo, pats, opts, rename=False):
570 def copy(ui, repo, pats, opts, rename=False):
571 # called with the repo lock held
571 # called with the repo lock held
572 #
572 #
573 # hgsep => pathname that uses "/" to separate directories
573 # hgsep => pathname that uses "/" to separate directories
574 # ossep => pathname that uses os.sep to separate directories
574 # ossep => pathname that uses os.sep to separate directories
575 cwd = repo.getcwd()
575 cwd = repo.getcwd()
576 targets = {}
576 targets = {}
577 after = opts.get("after")
577 after = opts.get("after")
578 dryrun = opts.get("dry_run")
578 dryrun = opts.get("dry_run")
579 wctx = repo[None]
579 wctx = repo[None]
580
580
581 def walkpat(pat):
581 def walkpat(pat):
582 srcs = []
582 srcs = []
583 if after:
583 if after:
584 badstates = '?'
584 badstates = '?'
585 else:
585 else:
586 badstates = '?r'
586 badstates = '?r'
587 m = scmutil.match(repo[None], [pat], opts, globbed=True)
587 m = scmutil.match(repo[None], [pat], opts, globbed=True)
588 for abs in repo.walk(m):
588 for abs in repo.walk(m):
589 state = repo.dirstate[abs]
589 state = repo.dirstate[abs]
590 rel = m.rel(abs)
590 rel = m.rel(abs)
591 exact = m.exact(abs)
591 exact = m.exact(abs)
592 if state in badstates:
592 if state in badstates:
593 if exact and state == '?':
593 if exact and state == '?':
594 ui.warn(_('%s: not copying - file is not managed\n') % rel)
594 ui.warn(_('%s: not copying - file is not managed\n') % rel)
595 if exact and state == 'r':
595 if exact and state == 'r':
596 ui.warn(_('%s: not copying - file has been marked for'
596 ui.warn(_('%s: not copying - file has been marked for'
597 ' remove\n') % rel)
597 ' remove\n') % rel)
598 continue
598 continue
599 # abs: hgsep
599 # abs: hgsep
600 # rel: ossep
600 # rel: ossep
601 srcs.append((abs, rel, exact))
601 srcs.append((abs, rel, exact))
602 return srcs
602 return srcs
603
603
604 # abssrc: hgsep
604 # abssrc: hgsep
605 # relsrc: ossep
605 # relsrc: ossep
606 # otarget: ossep
606 # otarget: ossep
607 def copyfile(abssrc, relsrc, otarget, exact):
607 def copyfile(abssrc, relsrc, otarget, exact):
608 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
608 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
609 if '/' in abstarget:
609 if '/' in abstarget:
610 # We cannot normalize abstarget itself, this would prevent
610 # We cannot normalize abstarget itself, this would prevent
611 # case only renames, like a => A.
611 # case only renames, like a => A.
612 abspath, absname = abstarget.rsplit('/', 1)
612 abspath, absname = abstarget.rsplit('/', 1)
613 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
613 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
614 reltarget = repo.pathto(abstarget, cwd)
614 reltarget = repo.pathto(abstarget, cwd)
615 target = repo.wjoin(abstarget)
615 target = repo.wjoin(abstarget)
616 src = repo.wjoin(abssrc)
616 src = repo.wjoin(abssrc)
617 state = repo.dirstate[abstarget]
617 state = repo.dirstate[abstarget]
618
618
619 scmutil.checkportable(ui, abstarget)
619 scmutil.checkportable(ui, abstarget)
620
620
621 # check for collisions
621 # check for collisions
622 prevsrc = targets.get(abstarget)
622 prevsrc = targets.get(abstarget)
623 if prevsrc is not None:
623 if prevsrc is not None:
624 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
624 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
625 (reltarget, repo.pathto(abssrc, cwd),
625 (reltarget, repo.pathto(abssrc, cwd),
626 repo.pathto(prevsrc, cwd)))
626 repo.pathto(prevsrc, cwd)))
627 return
627 return
628
628
629 # check for overwrites
629 # check for overwrites
630 exists = os.path.lexists(target)
630 exists = os.path.lexists(target)
631 samefile = False
631 samefile = False
632 if exists and abssrc != abstarget:
632 if exists and abssrc != abstarget:
633 if (repo.dirstate.normalize(abssrc) ==
633 if (repo.dirstate.normalize(abssrc) ==
634 repo.dirstate.normalize(abstarget)):
634 repo.dirstate.normalize(abstarget)):
635 if not rename:
635 if not rename:
636 ui.warn(_("%s: can't copy - same file\n") % reltarget)
636 ui.warn(_("%s: can't copy - same file\n") % reltarget)
637 return
637 return
638 exists = False
638 exists = False
639 samefile = True
639 samefile = True
640
640
641 if not after and exists or after and state in 'mn':
641 if not after and exists or after and state in 'mn':
642 if not opts['force']:
642 if not opts['force']:
643 ui.warn(_('%s: not overwriting - file exists\n') %
643 ui.warn(_('%s: not overwriting - file exists\n') %
644 reltarget)
644 reltarget)
645 return
645 return
646
646
647 if after:
647 if after:
648 if not exists:
648 if not exists:
649 if rename:
649 if rename:
650 ui.warn(_('%s: not recording move - %s does not exist\n') %
650 ui.warn(_('%s: not recording move - %s does not exist\n') %
651 (relsrc, reltarget))
651 (relsrc, reltarget))
652 else:
652 else:
653 ui.warn(_('%s: not recording copy - %s does not exist\n') %
653 ui.warn(_('%s: not recording copy - %s does not exist\n') %
654 (relsrc, reltarget))
654 (relsrc, reltarget))
655 return
655 return
656 elif not dryrun:
656 elif not dryrun:
657 try:
657 try:
658 if exists:
658 if exists:
659 os.unlink(target)
659 os.unlink(target)
660 targetdir = os.path.dirname(target) or '.'
660 targetdir = os.path.dirname(target) or '.'
661 if not os.path.isdir(targetdir):
661 if not os.path.isdir(targetdir):
662 os.makedirs(targetdir)
662 os.makedirs(targetdir)
663 if samefile:
663 if samefile:
664 tmp = target + "~hgrename"
664 tmp = target + "~hgrename"
665 os.rename(src, tmp)
665 os.rename(src, tmp)
666 os.rename(tmp, target)
666 os.rename(tmp, target)
667 else:
667 else:
668 util.copyfile(src, target)
668 util.copyfile(src, target)
669 srcexists = True
669 srcexists = True
670 except IOError as inst:
670 except IOError as inst:
671 if inst.errno == errno.ENOENT:
671 if inst.errno == errno.ENOENT:
672 ui.warn(_('%s: deleted in working directory\n') % relsrc)
672 ui.warn(_('%s: deleted in working directory\n') % relsrc)
673 srcexists = False
673 srcexists = False
674 else:
674 else:
675 ui.warn(_('%s: cannot copy - %s\n') %
675 ui.warn(_('%s: cannot copy - %s\n') %
676 (relsrc, inst.strerror))
676 (relsrc, inst.strerror))
677 return True # report a failure
677 return True # report a failure
678
678
679 if ui.verbose or not exact:
679 if ui.verbose or not exact:
680 if rename:
680 if rename:
681 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
681 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
682 else:
682 else:
683 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
683 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
684
684
685 targets[abstarget] = abssrc
685 targets[abstarget] = abssrc
686
686
687 # fix up dirstate
687 # fix up dirstate
688 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
688 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
689 dryrun=dryrun, cwd=cwd)
689 dryrun=dryrun, cwd=cwd)
690 if rename and not dryrun:
690 if rename and not dryrun:
691 if not after and srcexists and not samefile:
691 if not after and srcexists and not samefile:
692 util.unlinkpath(repo.wjoin(abssrc))
692 util.unlinkpath(repo.wjoin(abssrc))
693 wctx.forget([abssrc])
693 wctx.forget([abssrc])
694
694
695 # pat: ossep
695 # pat: ossep
696 # dest ossep
696 # dest ossep
697 # srcs: list of (hgsep, hgsep, ossep, bool)
697 # srcs: list of (hgsep, hgsep, ossep, bool)
698 # return: function that takes hgsep and returns ossep
698 # return: function that takes hgsep and returns ossep
699 def targetpathfn(pat, dest, srcs):
699 def targetpathfn(pat, dest, srcs):
700 if os.path.isdir(pat):
700 if os.path.isdir(pat):
701 abspfx = pathutil.canonpath(repo.root, cwd, pat)
701 abspfx = pathutil.canonpath(repo.root, cwd, pat)
702 abspfx = util.localpath(abspfx)
702 abspfx = util.localpath(abspfx)
703 if destdirexists:
703 if destdirexists:
704 striplen = len(os.path.split(abspfx)[0])
704 striplen = len(os.path.split(abspfx)[0])
705 else:
705 else:
706 striplen = len(abspfx)
706 striplen = len(abspfx)
707 if striplen:
707 if striplen:
708 striplen += len(os.sep)
708 striplen += len(os.sep)
709 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
709 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
710 elif destdirexists:
710 elif destdirexists:
711 res = lambda p: os.path.join(dest,
711 res = lambda p: os.path.join(dest,
712 os.path.basename(util.localpath(p)))
712 os.path.basename(util.localpath(p)))
713 else:
713 else:
714 res = lambda p: dest
714 res = lambda p: dest
715 return res
715 return res
716
716
717 # pat: ossep
717 # pat: ossep
718 # dest ossep
718 # dest ossep
719 # srcs: list of (hgsep, hgsep, ossep, bool)
719 # srcs: list of (hgsep, hgsep, ossep, bool)
720 # return: function that takes hgsep and returns ossep
720 # return: function that takes hgsep and returns ossep
721 def targetpathafterfn(pat, dest, srcs):
721 def targetpathafterfn(pat, dest, srcs):
722 if matchmod.patkind(pat):
722 if matchmod.patkind(pat):
723 # a mercurial pattern
723 # a mercurial pattern
724 res = lambda p: os.path.join(dest,
724 res = lambda p: os.path.join(dest,
725 os.path.basename(util.localpath(p)))
725 os.path.basename(util.localpath(p)))
726 else:
726 else:
727 abspfx = pathutil.canonpath(repo.root, cwd, pat)
727 abspfx = pathutil.canonpath(repo.root, cwd, pat)
728 if len(abspfx) < len(srcs[0][0]):
728 if len(abspfx) < len(srcs[0][0]):
729 # A directory. Either the target path contains the last
729 # A directory. Either the target path contains the last
730 # component of the source path or it does not.
730 # component of the source path or it does not.
731 def evalpath(striplen):
731 def evalpath(striplen):
732 score = 0
732 score = 0
733 for s in srcs:
733 for s in srcs:
734 t = os.path.join(dest, util.localpath(s[0])[striplen:])
734 t = os.path.join(dest, util.localpath(s[0])[striplen:])
735 if os.path.lexists(t):
735 if os.path.lexists(t):
736 score += 1
736 score += 1
737 return score
737 return score
738
738
739 abspfx = util.localpath(abspfx)
739 abspfx = util.localpath(abspfx)
740 striplen = len(abspfx)
740 striplen = len(abspfx)
741 if striplen:
741 if striplen:
742 striplen += len(os.sep)
742 striplen += len(os.sep)
743 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
743 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
744 score = evalpath(striplen)
744 score = evalpath(striplen)
745 striplen1 = len(os.path.split(abspfx)[0])
745 striplen1 = len(os.path.split(abspfx)[0])
746 if striplen1:
746 if striplen1:
747 striplen1 += len(os.sep)
747 striplen1 += len(os.sep)
748 if evalpath(striplen1) > score:
748 if evalpath(striplen1) > score:
749 striplen = striplen1
749 striplen = striplen1
750 res = lambda p: os.path.join(dest,
750 res = lambda p: os.path.join(dest,
751 util.localpath(p)[striplen:])
751 util.localpath(p)[striplen:])
752 else:
752 else:
753 # a file
753 # a file
754 if destdirexists:
754 if destdirexists:
755 res = lambda p: os.path.join(dest,
755 res = lambda p: os.path.join(dest,
756 os.path.basename(util.localpath(p)))
756 os.path.basename(util.localpath(p)))
757 else:
757 else:
758 res = lambda p: dest
758 res = lambda p: dest
759 return res
759 return res
760
760
761 pats = scmutil.expandpats(pats)
761 pats = scmutil.expandpats(pats)
762 if not pats:
762 if not pats:
763 raise error.Abort(_('no source or destination specified'))
763 raise error.Abort(_('no source or destination specified'))
764 if len(pats) == 1:
764 if len(pats) == 1:
765 raise error.Abort(_('no destination specified'))
765 raise error.Abort(_('no destination specified'))
766 dest = pats.pop()
766 dest = pats.pop()
767 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
767 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
768 if not destdirexists:
768 if not destdirexists:
769 if len(pats) > 1 or matchmod.patkind(pats[0]):
769 if len(pats) > 1 or matchmod.patkind(pats[0]):
770 raise error.Abort(_('with multiple sources, destination must be an '
770 raise error.Abort(_('with multiple sources, destination must be an '
771 'existing directory'))
771 'existing directory'))
772 if util.endswithsep(dest):
772 if util.endswithsep(dest):
773 raise error.Abort(_('destination %s is not a directory') % dest)
773 raise error.Abort(_('destination %s is not a directory') % dest)
774
774
775 tfn = targetpathfn
775 tfn = targetpathfn
776 if after:
776 if after:
777 tfn = targetpathafterfn
777 tfn = targetpathafterfn
778 copylist = []
778 copylist = []
779 for pat in pats:
779 for pat in pats:
780 srcs = walkpat(pat)
780 srcs = walkpat(pat)
781 if not srcs:
781 if not srcs:
782 continue
782 continue
783 copylist.append((tfn(pat, dest, srcs), srcs))
783 copylist.append((tfn(pat, dest, srcs), srcs))
784 if not copylist:
784 if not copylist:
785 raise error.Abort(_('no files to copy'))
785 raise error.Abort(_('no files to copy'))
786
786
787 errors = 0
787 errors = 0
788 for targetpath, srcs in copylist:
788 for targetpath, srcs in copylist:
789 for abssrc, relsrc, exact in srcs:
789 for abssrc, relsrc, exact in srcs:
790 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
790 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
791 errors += 1
791 errors += 1
792
792
793 if errors:
793 if errors:
794 ui.warn(_('(consider using --after)\n'))
794 ui.warn(_('(consider using --after)\n'))
795
795
796 return errors != 0
796 return errors != 0
797
797
798 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
798 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
799 runargs=None, appendpid=False):
799 runargs=None, appendpid=False):
800 '''Run a command as a service.'''
800 '''Run a command as a service.'''
801
801
802 def writepid(pid):
802 def writepid(pid):
803 if opts['pid_file']:
803 if opts['pid_file']:
804 if appendpid:
804 if appendpid:
805 mode = 'a'
805 mode = 'a'
806 else:
806 else:
807 mode = 'w'
807 mode = 'w'
808 fp = open(opts['pid_file'], mode)
808 fp = open(opts['pid_file'], mode)
809 fp.write(str(pid) + '\n')
809 fp.write(str(pid) + '\n')
810 fp.close()
810 fp.close()
811
811
812 if opts['daemon'] and not opts['daemon_postexec']:
812 if opts['daemon'] and not opts['daemon_postexec']:
813 # Signal child process startup with file removal
813 # Signal child process startup with file removal
814 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
814 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
815 os.close(lockfd)
815 os.close(lockfd)
816 try:
816 try:
817 if not runargs:
817 if not runargs:
818 runargs = util.hgcmd() + sys.argv[1:]
818 runargs = util.hgcmd() + sys.argv[1:]
819 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
819 runargs.append('--daemon-postexec=unlink:%s' % lockpath)
820 # Don't pass --cwd to the child process, because we've already
820 # Don't pass --cwd to the child process, because we've already
821 # changed directory.
821 # changed directory.
822 for i in xrange(1, len(runargs)):
822 for i in xrange(1, len(runargs)):
823 if runargs[i].startswith('--cwd='):
823 if runargs[i].startswith('--cwd='):
824 del runargs[i]
824 del runargs[i]
825 break
825 break
826 elif runargs[i].startswith('--cwd'):
826 elif runargs[i].startswith('--cwd'):
827 del runargs[i:i + 2]
827 del runargs[i:i + 2]
828 break
828 break
829 def condfn():
829 def condfn():
830 return not os.path.exists(lockpath)
830 return not os.path.exists(lockpath)
831 pid = util.rundetached(runargs, condfn)
831 pid = util.rundetached(runargs, condfn)
832 if pid < 0:
832 if pid < 0:
833 raise error.Abort(_('child process failed to start'))
833 raise error.Abort(_('child process failed to start'))
834 writepid(pid)
834 writepid(pid)
835 finally:
835 finally:
836 try:
836 try:
837 os.unlink(lockpath)
837 os.unlink(lockpath)
838 except OSError as e:
838 except OSError as e:
839 if e.errno != errno.ENOENT:
839 if e.errno != errno.ENOENT:
840 raise
840 raise
841 if parentfn:
841 if parentfn:
842 return parentfn(pid)
842 return parentfn(pid)
843 else:
843 else:
844 return
844 return
845
845
846 if initfn:
846 if initfn:
847 initfn()
847 initfn()
848
848
849 if not opts['daemon']:
849 if not opts['daemon']:
850 writepid(util.getpid())
850 writepid(util.getpid())
851
851
852 if opts['daemon_postexec']:
852 if opts['daemon_postexec']:
853 try:
853 try:
854 os.setsid()
854 os.setsid()
855 except AttributeError:
855 except AttributeError:
856 pass
856 pass
857 for inst in opts['daemon_postexec']:
857 for inst in opts['daemon_postexec']:
858 if inst.startswith('unlink:'):
858 if inst.startswith('unlink:'):
859 lockpath = inst[7:]
859 lockpath = inst[7:]
860 os.unlink(lockpath)
860 os.unlink(lockpath)
861 elif inst.startswith('chdir:'):
861 elif inst.startswith('chdir:'):
862 os.chdir(inst[6:])
862 os.chdir(inst[6:])
863 elif inst != 'none':
863 elif inst != 'none':
864 raise error.Abort(_('invalid value for --daemon-postexec: %s')
864 raise error.Abort(_('invalid value for --daemon-postexec: %s')
865 % inst)
865 % inst)
866 util.hidewindow()
866 util.hidewindow()
867 sys.stdout.flush()
867 sys.stdout.flush()
868 sys.stderr.flush()
868 sys.stderr.flush()
869
869
870 nullfd = os.open(os.devnull, os.O_RDWR)
870 nullfd = os.open(os.devnull, os.O_RDWR)
871 logfilefd = nullfd
871 logfilefd = nullfd
872 if logfile:
872 if logfile:
873 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
873 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
874 os.dup2(nullfd, 0)
874 os.dup2(nullfd, 0)
875 os.dup2(logfilefd, 1)
875 os.dup2(logfilefd, 1)
876 os.dup2(logfilefd, 2)
876 os.dup2(logfilefd, 2)
877 if nullfd not in (0, 1, 2):
877 if nullfd not in (0, 1, 2):
878 os.close(nullfd)
878 os.close(nullfd)
879 if logfile and logfilefd not in (0, 1, 2):
879 if logfile and logfilefd not in (0, 1, 2):
880 os.close(logfilefd)
880 os.close(logfilefd)
881
881
882 if runfn:
882 if runfn:
883 return runfn()
883 return runfn()
884
884
885 ## facility to let extension process additional data into an import patch
885 ## facility to let extension process additional data into an import patch
886 # list of identifier to be executed in order
886 # list of identifier to be executed in order
887 extrapreimport = [] # run before commit
887 extrapreimport = [] # run before commit
888 extrapostimport = [] # run after commit
888 extrapostimport = [] # run after commit
889 # mapping from identifier to actual import function
889 # mapping from identifier to actual import function
890 #
890 #
891 # 'preimport' are run before the commit is made and are provided the following
891 # 'preimport' are run before the commit is made and are provided the following
892 # arguments:
892 # arguments:
893 # - repo: the localrepository instance,
893 # - repo: the localrepository instance,
894 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
894 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
895 # - extra: the future extra dictionary of the changeset, please mutate it,
895 # - extra: the future extra dictionary of the changeset, please mutate it,
896 # - opts: the import options.
896 # - opts: the import options.
897 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
897 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
898 # mutation of in memory commit and more. Feel free to rework the code to get
898 # mutation of in memory commit and more. Feel free to rework the code to get
899 # there.
899 # there.
900 extrapreimportmap = {}
900 extrapreimportmap = {}
901 # 'postimport' are run after the commit is made and are provided the following
901 # 'postimport' are run after the commit is made and are provided the following
902 # argument:
902 # argument:
903 # - ctx: the changectx created by import.
903 # - ctx: the changectx created by import.
904 extrapostimportmap = {}
904 extrapostimportmap = {}
905
905
906 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
906 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
907 """Utility function used by commands.import to import a single patch
907 """Utility function used by commands.import to import a single patch
908
908
909 This function is explicitly defined here to help the evolve extension to
909 This function is explicitly defined here to help the evolve extension to
910 wrap this part of the import logic.
910 wrap this part of the import logic.
911
911
912 The API is currently a bit ugly because it a simple code translation from
912 The API is currently a bit ugly because it a simple code translation from
913 the import command. Feel free to make it better.
913 the import command. Feel free to make it better.
914
914
915 :hunk: a patch (as a binary string)
915 :hunk: a patch (as a binary string)
916 :parents: nodes that will be parent of the created commit
916 :parents: nodes that will be parent of the created commit
917 :opts: the full dict of option passed to the import command
917 :opts: the full dict of option passed to the import command
918 :msgs: list to save commit message to.
918 :msgs: list to save commit message to.
919 (used in case we need to save it when failing)
919 (used in case we need to save it when failing)
920 :updatefunc: a function that update a repo to a given node
920 :updatefunc: a function that update a repo to a given node
921 updatefunc(<repo>, <node>)
921 updatefunc(<repo>, <node>)
922 """
922 """
923 # avoid cycle context -> subrepo -> cmdutil
923 # avoid cycle context -> subrepo -> cmdutil
924 from . import context
924 from . import context
925 extractdata = patch.extract(ui, hunk)
925 extractdata = patch.extract(ui, hunk)
926 tmpname = extractdata.get('filename')
926 tmpname = extractdata.get('filename')
927 message = extractdata.get('message')
927 message = extractdata.get('message')
928 user = opts.get('user') or extractdata.get('user')
928 user = opts.get('user') or extractdata.get('user')
929 date = opts.get('date') or extractdata.get('date')
929 date = opts.get('date') or extractdata.get('date')
930 branch = extractdata.get('branch')
930 branch = extractdata.get('branch')
931 nodeid = extractdata.get('nodeid')
931 nodeid = extractdata.get('nodeid')
932 p1 = extractdata.get('p1')
932 p1 = extractdata.get('p1')
933 p2 = extractdata.get('p2')
933 p2 = extractdata.get('p2')
934
934
935 nocommit = opts.get('no_commit')
935 nocommit = opts.get('no_commit')
936 importbranch = opts.get('import_branch')
936 importbranch = opts.get('import_branch')
937 update = not opts.get('bypass')
937 update = not opts.get('bypass')
938 strip = opts["strip"]
938 strip = opts["strip"]
939 prefix = opts["prefix"]
939 prefix = opts["prefix"]
940 sim = float(opts.get('similarity') or 0)
940 sim = float(opts.get('similarity') or 0)
941 if not tmpname:
941 if not tmpname:
942 return (None, None, False)
942 return (None, None, False)
943
943
944 rejects = False
944 rejects = False
945
945
946 try:
946 try:
947 cmdline_message = logmessage(ui, opts)
947 cmdline_message = logmessage(ui, opts)
948 if cmdline_message:
948 if cmdline_message:
949 # pickup the cmdline msg
949 # pickup the cmdline msg
950 message = cmdline_message
950 message = cmdline_message
951 elif message:
951 elif message:
952 # pickup the patch msg
952 # pickup the patch msg
953 message = message.strip()
953 message = message.strip()
954 else:
954 else:
955 # launch the editor
955 # launch the editor
956 message = None
956 message = None
957 ui.debug('message:\n%s\n' % message)
957 ui.debug('message:\n%s\n' % message)
958
958
959 if len(parents) == 1:
959 if len(parents) == 1:
960 parents.append(repo[nullid])
960 parents.append(repo[nullid])
961 if opts.get('exact'):
961 if opts.get('exact'):
962 if not nodeid or not p1:
962 if not nodeid or not p1:
963 raise error.Abort(_('not a Mercurial patch'))
963 raise error.Abort(_('not a Mercurial patch'))
964 p1 = repo[p1]
964 p1 = repo[p1]
965 p2 = repo[p2 or nullid]
965 p2 = repo[p2 or nullid]
966 elif p2:
966 elif p2:
967 try:
967 try:
968 p1 = repo[p1]
968 p1 = repo[p1]
969 p2 = repo[p2]
969 p2 = repo[p2]
970 # Without any options, consider p2 only if the
970 # Without any options, consider p2 only if the
971 # patch is being applied on top of the recorded
971 # patch is being applied on top of the recorded
972 # first parent.
972 # first parent.
973 if p1 != parents[0]:
973 if p1 != parents[0]:
974 p1 = parents[0]
974 p1 = parents[0]
975 p2 = repo[nullid]
975 p2 = repo[nullid]
976 except error.RepoError:
976 except error.RepoError:
977 p1, p2 = parents
977 p1, p2 = parents
978 if p2.node() == nullid:
978 if p2.node() == nullid:
979 ui.warn(_("warning: import the patch as a normal revision\n"
979 ui.warn(_("warning: import the patch as a normal revision\n"
980 "(use --exact to import the patch as a merge)\n"))
980 "(use --exact to import the patch as a merge)\n"))
981 else:
981 else:
982 p1, p2 = parents
982 p1, p2 = parents
983
983
984 n = None
984 n = None
985 if update:
985 if update:
986 if p1 != parents[0]:
986 if p1 != parents[0]:
987 updatefunc(repo, p1.node())
987 updatefunc(repo, p1.node())
988 if p2 != parents[1]:
988 if p2 != parents[1]:
989 repo.setparents(p1.node(), p2.node())
989 repo.setparents(p1.node(), p2.node())
990
990
991 if opts.get('exact') or importbranch:
991 if opts.get('exact') or importbranch:
992 repo.dirstate.setbranch(branch or 'default')
992 repo.dirstate.setbranch(branch or 'default')
993
993
994 partial = opts.get('partial', False)
994 partial = opts.get('partial', False)
995 files = set()
995 files = set()
996 try:
996 try:
997 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
997 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
998 files=files, eolmode=None, similarity=sim / 100.0)
998 files=files, eolmode=None, similarity=sim / 100.0)
999 except patch.PatchError as e:
999 except patch.PatchError as e:
1000 if not partial:
1000 if not partial:
1001 raise error.Abort(str(e))
1001 raise error.Abort(str(e))
1002 if partial:
1002 if partial:
1003 rejects = True
1003 rejects = True
1004
1004
1005 files = list(files)
1005 files = list(files)
1006 if nocommit:
1006 if nocommit:
1007 if message:
1007 if message:
1008 msgs.append(message)
1008 msgs.append(message)
1009 else:
1009 else:
1010 if opts.get('exact') or p2:
1010 if opts.get('exact') or p2:
1011 # If you got here, you either use --force and know what
1011 # If you got here, you either use --force and know what
1012 # you are doing or used --exact or a merge patch while
1012 # you are doing or used --exact or a merge patch while
1013 # being updated to its first parent.
1013 # being updated to its first parent.
1014 m = None
1014 m = None
1015 else:
1015 else:
1016 m = scmutil.matchfiles(repo, files or [])
1016 m = scmutil.matchfiles(repo, files or [])
1017 editform = mergeeditform(repo[None], 'import.normal')
1017 editform = mergeeditform(repo[None], 'import.normal')
1018 if opts.get('exact'):
1018 if opts.get('exact'):
1019 editor = None
1019 editor = None
1020 else:
1020 else:
1021 editor = getcommiteditor(editform=editform, **opts)
1021 editor = getcommiteditor(editform=editform, **opts)
1022 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1022 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
1023 extra = {}
1023 extra = {}
1024 for idfunc in extrapreimport:
1024 for idfunc in extrapreimport:
1025 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1025 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
1026 try:
1026 try:
1027 if partial:
1027 if partial:
1028 repo.ui.setconfig('ui', 'allowemptycommit', True)
1028 repo.ui.setconfig('ui', 'allowemptycommit', True)
1029 n = repo.commit(message, user,
1029 n = repo.commit(message, user,
1030 date, match=m,
1030 date, match=m,
1031 editor=editor, extra=extra)
1031 editor=editor, extra=extra)
1032 for idfunc in extrapostimport:
1032 for idfunc in extrapostimport:
1033 extrapostimportmap[idfunc](repo[n])
1033 extrapostimportmap[idfunc](repo[n])
1034 finally:
1034 finally:
1035 repo.ui.restoreconfig(allowemptyback)
1035 repo.ui.restoreconfig(allowemptyback)
1036 else:
1036 else:
1037 if opts.get('exact') or importbranch:
1037 if opts.get('exact') or importbranch:
1038 branch = branch or 'default'
1038 branch = branch or 'default'
1039 else:
1039 else:
1040 branch = p1.branch()
1040 branch = p1.branch()
1041 store = patch.filestore()
1041 store = patch.filestore()
1042 try:
1042 try:
1043 files = set()
1043 files = set()
1044 try:
1044 try:
1045 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1045 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
1046 files, eolmode=None)
1046 files, eolmode=None)
1047 except patch.PatchError as e:
1047 except patch.PatchError as e:
1048 raise error.Abort(str(e))
1048 raise error.Abort(str(e))
1049 if opts.get('exact'):
1049 if opts.get('exact'):
1050 editor = None
1050 editor = None
1051 else:
1051 else:
1052 editor = getcommiteditor(editform='import.bypass')
1052 editor = getcommiteditor(editform='import.bypass')
1053 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1053 memctx = context.makememctx(repo, (p1.node(), p2.node()),
1054 message,
1054 message,
1055 user,
1055 user,
1056 date,
1056 date,
1057 branch, files, store,
1057 branch, files, store,
1058 editor=editor)
1058 editor=editor)
1059 n = memctx.commit()
1059 n = memctx.commit()
1060 finally:
1060 finally:
1061 store.close()
1061 store.close()
1062 if opts.get('exact') and nocommit:
1062 if opts.get('exact') and nocommit:
1063 # --exact with --no-commit is still useful in that it does merge
1063 # --exact with --no-commit is still useful in that it does merge
1064 # and branch bits
1064 # and branch bits
1065 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1065 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1066 elif opts.get('exact') and hex(n) != nodeid:
1066 elif opts.get('exact') and hex(n) != nodeid:
1067 raise error.Abort(_('patch is damaged or loses information'))
1067 raise error.Abort(_('patch is damaged or loses information'))
1068 msg = _('applied to working directory')
1068 msg = _('applied to working directory')
1069 if n:
1069 if n:
1070 # i18n: refers to a short changeset id
1070 # i18n: refers to a short changeset id
1071 msg = _('created %s') % short(n)
1071 msg = _('created %s') % short(n)
1072 return (msg, n, rejects)
1072 return (msg, n, rejects)
1073 finally:
1073 finally:
1074 os.unlink(tmpname)
1074 os.unlink(tmpname)
1075
1075
1076 # facility to let extensions include additional data in an exported patch
1076 # facility to let extensions include additional data in an exported patch
1077 # list of identifiers to be executed in order
1077 # list of identifiers to be executed in order
1078 extraexport = []
1078 extraexport = []
1079 # mapping from identifier to actual export function
1079 # mapping from identifier to actual export function
1080 # function as to return a string to be added to the header or None
1080 # function as to return a string to be added to the header or None
1081 # it is given two arguments (sequencenumber, changectx)
1081 # it is given two arguments (sequencenumber, changectx)
1082 extraexportmap = {}
1082 extraexportmap = {}
1083
1083
1084 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1084 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1085 opts=None, match=None):
1085 opts=None, match=None):
1086 '''export changesets as hg patches.'''
1086 '''export changesets as hg patches.'''
1087
1087
1088 total = len(revs)
1088 total = len(revs)
1089 revwidth = max([len(str(rev)) for rev in revs])
1089 revwidth = max([len(str(rev)) for rev in revs])
1090 filemode = {}
1090 filemode = {}
1091
1091
1092 def single(rev, seqno, fp):
1092 def single(rev, seqno, fp):
1093 ctx = repo[rev]
1093 ctx = repo[rev]
1094 node = ctx.node()
1094 node = ctx.node()
1095 parents = [p.node() for p in ctx.parents() if p]
1095 parents = [p.node() for p in ctx.parents() if p]
1096 branch = ctx.branch()
1096 branch = ctx.branch()
1097 if switch_parent:
1097 if switch_parent:
1098 parents.reverse()
1098 parents.reverse()
1099
1099
1100 if parents:
1100 if parents:
1101 prev = parents[0]
1101 prev = parents[0]
1102 else:
1102 else:
1103 prev = nullid
1103 prev = nullid
1104
1104
1105 shouldclose = False
1105 shouldclose = False
1106 if not fp and len(template) > 0:
1106 if not fp and len(template) > 0:
1107 desc_lines = ctx.description().rstrip().split('\n')
1107 desc_lines = ctx.description().rstrip().split('\n')
1108 desc = desc_lines[0] #Commit always has a first line.
1108 desc = desc_lines[0] #Commit always has a first line.
1109 fp = makefileobj(repo, template, node, desc=desc, total=total,
1109 fp = makefileobj(repo, template, node, desc=desc, total=total,
1110 seqno=seqno, revwidth=revwidth, mode='wb',
1110 seqno=seqno, revwidth=revwidth, mode='wb',
1111 modemap=filemode)
1111 modemap=filemode)
1112 shouldclose = True
1112 shouldclose = True
1113 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1113 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1114 repo.ui.note("%s\n" % fp.name)
1114 repo.ui.note("%s\n" % fp.name)
1115
1115
1116 if not fp:
1116 if not fp:
1117 write = repo.ui.write
1117 write = repo.ui.write
1118 else:
1118 else:
1119 def write(s, **kw):
1119 def write(s, **kw):
1120 fp.write(s)
1120 fp.write(s)
1121
1121
1122 write("# HG changeset patch\n")
1122 write("# HG changeset patch\n")
1123 write("# User %s\n" % ctx.user())
1123 write("# User %s\n" % ctx.user())
1124 write("# Date %d %d\n" % ctx.date())
1124 write("# Date %d %d\n" % ctx.date())
1125 write("# %s\n" % util.datestr(ctx.date()))
1125 write("# %s\n" % util.datestr(ctx.date()))
1126 if branch and branch != 'default':
1126 if branch and branch != 'default':
1127 write("# Branch %s\n" % branch)
1127 write("# Branch %s\n" % branch)
1128 write("# Node ID %s\n" % hex(node))
1128 write("# Node ID %s\n" % hex(node))
1129 write("# Parent %s\n" % hex(prev))
1129 write("# Parent %s\n" % hex(prev))
1130 if len(parents) > 1:
1130 if len(parents) > 1:
1131 write("# Parent %s\n" % hex(parents[1]))
1131 write("# Parent %s\n" % hex(parents[1]))
1132
1132
1133 for headerid in extraexport:
1133 for headerid in extraexport:
1134 header = extraexportmap[headerid](seqno, ctx)
1134 header = extraexportmap[headerid](seqno, ctx)
1135 if header is not None:
1135 if header is not None:
1136 write('# %s\n' % header)
1136 write('# %s\n' % header)
1137 write(ctx.description().rstrip())
1137 write(ctx.description().rstrip())
1138 write("\n\n")
1138 write("\n\n")
1139
1139
1140 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1140 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1141 write(chunk, label=label)
1141 write(chunk, label=label)
1142
1142
1143 if shouldclose:
1143 if shouldclose:
1144 fp.close()
1144 fp.close()
1145
1145
1146 for seqno, rev in enumerate(revs):
1146 for seqno, rev in enumerate(revs):
1147 single(rev, seqno + 1, fp)
1147 single(rev, seqno + 1, fp)
1148
1148
1149 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1149 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1150 changes=None, stat=False, fp=None, prefix='',
1150 changes=None, stat=False, fp=None, prefix='',
1151 root='', listsubrepos=False):
1151 root='', listsubrepos=False):
1152 '''show diff or diffstat.'''
1152 '''show diff or diffstat.'''
1153 if fp is None:
1153 if fp is None:
1154 write = ui.write
1154 write = ui.write
1155 else:
1155 else:
1156 def write(s, **kw):
1156 def write(s, **kw):
1157 fp.write(s)
1157 fp.write(s)
1158
1158
1159 if root:
1159 if root:
1160 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1160 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1161 else:
1161 else:
1162 relroot = ''
1162 relroot = ''
1163 if relroot != '':
1163 if relroot != '':
1164 # XXX relative roots currently don't work if the root is within a
1164 # XXX relative roots currently don't work if the root is within a
1165 # subrepo
1165 # subrepo
1166 uirelroot = match.uipath(relroot)
1166 uirelroot = match.uipath(relroot)
1167 relroot += '/'
1167 relroot += '/'
1168 for matchroot in match.files():
1168 for matchroot in match.files():
1169 if not matchroot.startswith(relroot):
1169 if not matchroot.startswith(relroot):
1170 ui.warn(_('warning: %s not inside relative root %s\n') % (
1170 ui.warn(_('warning: %s not inside relative root %s\n') % (
1171 match.uipath(matchroot), uirelroot))
1171 match.uipath(matchroot), uirelroot))
1172
1172
1173 if stat:
1173 if stat:
1174 diffopts = diffopts.copy(context=0)
1174 diffopts = diffopts.copy(context=0)
1175 width = 80
1175 width = 80
1176 if not ui.plain():
1176 if not ui.plain():
1177 width = ui.termwidth()
1177 width = ui.termwidth()
1178 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1178 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1179 prefix=prefix, relroot=relroot)
1179 prefix=prefix, relroot=relroot)
1180 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1180 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1181 width=width,
1181 width=width,
1182 git=diffopts.git):
1182 git=diffopts.git):
1183 write(chunk, label=label)
1183 write(chunk, label=label)
1184 else:
1184 else:
1185 for chunk, label in patch.diffui(repo, node1, node2, match,
1185 for chunk, label in patch.diffui(repo, node1, node2, match,
1186 changes, diffopts, prefix=prefix,
1186 changes, diffopts, prefix=prefix,
1187 relroot=relroot):
1187 relroot=relroot):
1188 write(chunk, label=label)
1188 write(chunk, label=label)
1189
1189
1190 if listsubrepos:
1190 if listsubrepos:
1191 ctx1 = repo[node1]
1191 ctx1 = repo[node1]
1192 ctx2 = repo[node2]
1192 ctx2 = repo[node2]
1193 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1193 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1194 tempnode2 = node2
1194 tempnode2 = node2
1195 try:
1195 try:
1196 if node2 is not None:
1196 if node2 is not None:
1197 tempnode2 = ctx2.substate[subpath][1]
1197 tempnode2 = ctx2.substate[subpath][1]
1198 except KeyError:
1198 except KeyError:
1199 # A subrepo that existed in node1 was deleted between node1 and
1199 # A subrepo that existed in node1 was deleted between node1 and
1200 # node2 (inclusive). Thus, ctx2's substate won't contain that
1200 # node2 (inclusive). Thus, ctx2's substate won't contain that
1201 # subpath. The best we can do is to ignore it.
1201 # subpath. The best we can do is to ignore it.
1202 tempnode2 = None
1202 tempnode2 = None
1203 submatch = matchmod.subdirmatcher(subpath, match)
1203 submatch = matchmod.subdirmatcher(subpath, match)
1204 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1204 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1205 stat=stat, fp=fp, prefix=prefix)
1205 stat=stat, fp=fp, prefix=prefix)
1206
1206
1207 class changeset_printer(object):
1207 class changeset_printer(object):
1208 '''show changeset information when templating not requested.'''
1208 '''show changeset information when templating not requested.'''
1209
1209
1210 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1210 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1211 self.ui = ui
1211 self.ui = ui
1212 self.repo = repo
1212 self.repo = repo
1213 self.buffered = buffered
1213 self.buffered = buffered
1214 self.matchfn = matchfn
1214 self.matchfn = matchfn
1215 self.diffopts = diffopts
1215 self.diffopts = diffopts
1216 self.header = {}
1216 self.header = {}
1217 self.hunk = {}
1217 self.hunk = {}
1218 self.lastheader = None
1218 self.lastheader = None
1219 self.footer = None
1219 self.footer = None
1220
1220
1221 def flush(self, ctx):
1221 def flush(self, ctx):
1222 rev = ctx.rev()
1222 rev = ctx.rev()
1223 if rev in self.header:
1223 if rev in self.header:
1224 h = self.header[rev]
1224 h = self.header[rev]
1225 if h != self.lastheader:
1225 if h != self.lastheader:
1226 self.lastheader = h
1226 self.lastheader = h
1227 self.ui.write(h)
1227 self.ui.write(h)
1228 del self.header[rev]
1228 del self.header[rev]
1229 if rev in self.hunk:
1229 if rev in self.hunk:
1230 self.ui.write(self.hunk[rev])
1230 self.ui.write(self.hunk[rev])
1231 del self.hunk[rev]
1231 del self.hunk[rev]
1232 return 1
1232 return 1
1233 return 0
1233 return 0
1234
1234
1235 def close(self):
1235 def close(self):
1236 if self.footer:
1236 if self.footer:
1237 self.ui.write(self.footer)
1237 self.ui.write(self.footer)
1238
1238
1239 def show(self, ctx, copies=None, matchfn=None, **props):
1239 def show(self, ctx, copies=None, matchfn=None, **props):
1240 if self.buffered:
1240 if self.buffered:
1241 self.ui.pushbuffer(labeled=True)
1241 self.ui.pushbuffer(labeled=True)
1242 self._show(ctx, copies, matchfn, props)
1242 self._show(ctx, copies, matchfn, props)
1243 self.hunk[ctx.rev()] = self.ui.popbuffer()
1243 self.hunk[ctx.rev()] = self.ui.popbuffer()
1244 else:
1244 else:
1245 self._show(ctx, copies, matchfn, props)
1245 self._show(ctx, copies, matchfn, props)
1246
1246
1247 def _show(self, ctx, copies, matchfn, props):
1247 def _show(self, ctx, copies, matchfn, props):
1248 '''show a single changeset or file revision'''
1248 '''show a single changeset or file revision'''
1249 changenode = ctx.node()
1249 changenode = ctx.node()
1250 rev = ctx.rev()
1250 rev = ctx.rev()
1251 if self.ui.debugflag:
1251 if self.ui.debugflag:
1252 hexfunc = hex
1252 hexfunc = hex
1253 else:
1253 else:
1254 hexfunc = short
1254 hexfunc = short
1255 # as of now, wctx.node() and wctx.rev() return None, but we want to
1255 # as of now, wctx.node() and wctx.rev() return None, but we want to
1256 # show the same values as {node} and {rev} templatekw
1256 # show the same values as {node} and {rev} templatekw
1257 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1257 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1258
1258
1259 if self.ui.quiet:
1259 if self.ui.quiet:
1260 self.ui.write("%d:%s\n" % revnode, label='log.node')
1260 self.ui.write("%d:%s\n" % revnode, label='log.node')
1261 return
1261 return
1262
1262
1263 date = util.datestr(ctx.date())
1263 date = util.datestr(ctx.date())
1264
1264
1265 # i18n: column positioning for "hg log"
1265 # i18n: column positioning for "hg log"
1266 self.ui.write(_("changeset: %d:%s\n") % revnode,
1266 self.ui.write(_("changeset: %d:%s\n") % revnode,
1267 label='log.changeset changeset.%s' % ctx.phasestr())
1267 label='log.changeset changeset.%s' % ctx.phasestr())
1268
1268
1269 # branches are shown first before any other names due to backwards
1269 # branches are shown first before any other names due to backwards
1270 # compatibility
1270 # compatibility
1271 branch = ctx.branch()
1271 branch = ctx.branch()
1272 # don't show the default branch name
1272 # don't show the default branch name
1273 if branch != 'default':
1273 if branch != 'default':
1274 # i18n: column positioning for "hg log"
1274 # i18n: column positioning for "hg log"
1275 self.ui.write(_("branch: %s\n") % branch,
1275 self.ui.write(_("branch: %s\n") % branch,
1276 label='log.branch')
1276 label='log.branch')
1277
1277
1278 for nsname, ns in self.repo.names.iteritems():
1278 for nsname, ns in self.repo.names.iteritems():
1279 # branches has special logic already handled above, so here we just
1279 # branches has special logic already handled above, so here we just
1280 # skip it
1280 # skip it
1281 if nsname == 'branches':
1281 if nsname == 'branches':
1282 continue
1282 continue
1283 # we will use the templatename as the color name since those two
1283 # we will use the templatename as the color name since those two
1284 # should be the same
1284 # should be the same
1285 for name in ns.names(self.repo, changenode):
1285 for name in ns.names(self.repo, changenode):
1286 self.ui.write(ns.logfmt % name,
1286 self.ui.write(ns.logfmt % name,
1287 label='log.%s' % ns.colorname)
1287 label='log.%s' % ns.colorname)
1288 if self.ui.debugflag:
1288 if self.ui.debugflag:
1289 # i18n: column positioning for "hg log"
1289 # i18n: column positioning for "hg log"
1290 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1290 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1291 label='log.phase')
1291 label='log.phase')
1292 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1292 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1293 label = 'log.parent changeset.%s' % pctx.phasestr()
1293 label = 'log.parent changeset.%s' % pctx.phasestr()
1294 # i18n: column positioning for "hg log"
1294 # i18n: column positioning for "hg log"
1295 self.ui.write(_("parent: %d:%s\n")
1295 self.ui.write(_("parent: %d:%s\n")
1296 % (pctx.rev(), hexfunc(pctx.node())),
1296 % (pctx.rev(), hexfunc(pctx.node())),
1297 label=label)
1297 label=label)
1298
1298
1299 if self.ui.debugflag and rev is not None:
1299 if self.ui.debugflag and rev is not None:
1300 mnode = ctx.manifestnode()
1300 mnode = ctx.manifestnode()
1301 # i18n: column positioning for "hg log"
1301 # i18n: column positioning for "hg log"
1302 self.ui.write(_("manifest: %d:%s\n") %
1302 self.ui.write(_("manifest: %d:%s\n") %
1303 (self.repo.manifest.rev(mnode), hex(mnode)),
1303 (self.repo.manifest.rev(mnode), hex(mnode)),
1304 label='ui.debug log.manifest')
1304 label='ui.debug log.manifest')
1305 # i18n: column positioning for "hg log"
1305 # i18n: column positioning for "hg log"
1306 self.ui.write(_("user: %s\n") % ctx.user(),
1306 self.ui.write(_("user: %s\n") % ctx.user(),
1307 label='log.user')
1307 label='log.user')
1308 # i18n: column positioning for "hg log"
1308 # i18n: column positioning for "hg log"
1309 self.ui.write(_("date: %s\n") % date,
1309 self.ui.write(_("date: %s\n") % date,
1310 label='log.date')
1310 label='log.date')
1311
1311
1312 if self.ui.debugflag:
1312 if self.ui.debugflag:
1313 files = ctx.p1().status(ctx)[:3]
1313 files = ctx.p1().status(ctx)[:3]
1314 for key, value in zip([# i18n: column positioning for "hg log"
1314 for key, value in zip([# i18n: column positioning for "hg log"
1315 _("files:"),
1315 _("files:"),
1316 # i18n: column positioning for "hg log"
1316 # i18n: column positioning for "hg log"
1317 _("files+:"),
1317 _("files+:"),
1318 # i18n: column positioning for "hg log"
1318 # i18n: column positioning for "hg log"
1319 _("files-:")], files):
1319 _("files-:")], files):
1320 if value:
1320 if value:
1321 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1321 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1322 label='ui.debug log.files')
1322 label='ui.debug log.files')
1323 elif ctx.files() and self.ui.verbose:
1323 elif ctx.files() and self.ui.verbose:
1324 # i18n: column positioning for "hg log"
1324 # i18n: column positioning for "hg log"
1325 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1325 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1326 label='ui.note log.files')
1326 label='ui.note log.files')
1327 if copies and self.ui.verbose:
1327 if copies and self.ui.verbose:
1328 copies = ['%s (%s)' % c for c in copies]
1328 copies = ['%s (%s)' % c for c in copies]
1329 # i18n: column positioning for "hg log"
1329 # i18n: column positioning for "hg log"
1330 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1330 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1331 label='ui.note log.copies')
1331 label='ui.note log.copies')
1332
1332
1333 extra = ctx.extra()
1333 extra = ctx.extra()
1334 if extra and self.ui.debugflag:
1334 if extra and self.ui.debugflag:
1335 for key, value in sorted(extra.items()):
1335 for key, value in sorted(extra.items()):
1336 # i18n: column positioning for "hg log"
1336 # i18n: column positioning for "hg log"
1337 self.ui.write(_("extra: %s=%s\n")
1337 self.ui.write(_("extra: %s=%s\n")
1338 % (key, value.encode('string_escape')),
1338 % (key, value.encode('string_escape')),
1339 label='ui.debug log.extra')
1339 label='ui.debug log.extra')
1340
1340
1341 description = ctx.description().strip()
1341 description = ctx.description().strip()
1342 if description:
1342 if description:
1343 if self.ui.verbose:
1343 if self.ui.verbose:
1344 self.ui.write(_("description:\n"),
1344 self.ui.write(_("description:\n"),
1345 label='ui.note log.description')
1345 label='ui.note log.description')
1346 self.ui.write(description,
1346 self.ui.write(description,
1347 label='ui.note log.description')
1347 label='ui.note log.description')
1348 self.ui.write("\n\n")
1348 self.ui.write("\n\n")
1349 else:
1349 else:
1350 # i18n: column positioning for "hg log"
1350 # i18n: column positioning for "hg log"
1351 self.ui.write(_("summary: %s\n") %
1351 self.ui.write(_("summary: %s\n") %
1352 description.splitlines()[0],
1352 description.splitlines()[0],
1353 label='log.summary')
1353 label='log.summary')
1354 self.ui.write("\n")
1354 self.ui.write("\n")
1355
1355
1356 self.showpatch(ctx, matchfn)
1356 self.showpatch(ctx, matchfn)
1357
1357
1358 def showpatch(self, ctx, matchfn):
1358 def showpatch(self, ctx, matchfn):
1359 if not matchfn:
1359 if not matchfn:
1360 matchfn = self.matchfn
1360 matchfn = self.matchfn
1361 if matchfn:
1361 if matchfn:
1362 stat = self.diffopts.get('stat')
1362 stat = self.diffopts.get('stat')
1363 diff = self.diffopts.get('patch')
1363 diff = self.diffopts.get('patch')
1364 diffopts = patch.diffallopts(self.ui, self.diffopts)
1364 diffopts = patch.diffallopts(self.ui, self.diffopts)
1365 node = ctx.node()
1365 node = ctx.node()
1366 prev = ctx.p1().node()
1366 prev = ctx.p1().node()
1367 if stat:
1367 if stat:
1368 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1368 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1369 match=matchfn, stat=True)
1369 match=matchfn, stat=True)
1370 if diff:
1370 if diff:
1371 if stat:
1371 if stat:
1372 self.ui.write("\n")
1372 self.ui.write("\n")
1373 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1373 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1374 match=matchfn, stat=False)
1374 match=matchfn, stat=False)
1375 self.ui.write("\n")
1375 self.ui.write("\n")
1376
1376
1377 class jsonchangeset(changeset_printer):
1377 class jsonchangeset(changeset_printer):
1378 '''format changeset information.'''
1378 '''format changeset information.'''
1379
1379
1380 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1380 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1381 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1381 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1382 self.cache = {}
1382 self.cache = {}
1383 self._first = True
1383 self._first = True
1384
1384
1385 def close(self):
1385 def close(self):
1386 if not self._first:
1386 if not self._first:
1387 self.ui.write("\n]\n")
1387 self.ui.write("\n]\n")
1388 else:
1388 else:
1389 self.ui.write("[]\n")
1389 self.ui.write("[]\n")
1390
1390
1391 def _show(self, ctx, copies, matchfn, props):
1391 def _show(self, ctx, copies, matchfn, props):
1392 '''show a single changeset or file revision'''
1392 '''show a single changeset or file revision'''
1393 rev = ctx.rev()
1393 rev = ctx.rev()
1394 if rev is None:
1394 if rev is None:
1395 jrev = jnode = 'null'
1395 jrev = jnode = 'null'
1396 else:
1396 else:
1397 jrev = str(rev)
1397 jrev = str(rev)
1398 jnode = '"%s"' % hex(ctx.node())
1398 jnode = '"%s"' % hex(ctx.node())
1399 j = encoding.jsonescape
1399 j = encoding.jsonescape
1400
1400
1401 if self._first:
1401 if self._first:
1402 self.ui.write("[\n {")
1402 self.ui.write("[\n {")
1403 self._first = False
1403 self._first = False
1404 else:
1404 else:
1405 self.ui.write(",\n {")
1405 self.ui.write(",\n {")
1406
1406
1407 if self.ui.quiet:
1407 if self.ui.quiet:
1408 self.ui.write(('\n "rev": %s') % jrev)
1408 self.ui.write(('\n "rev": %s') % jrev)
1409 self.ui.write((',\n "node": %s') % jnode)
1409 self.ui.write((',\n "node": %s') % jnode)
1410 self.ui.write('\n }')
1410 self.ui.write('\n }')
1411 return
1411 return
1412
1412
1413 self.ui.write(('\n "rev": %s') % jrev)
1413 self.ui.write(('\n "rev": %s') % jrev)
1414 self.ui.write((',\n "node": %s') % jnode)
1414 self.ui.write((',\n "node": %s') % jnode)
1415 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1415 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1416 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1416 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1417 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1417 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1418 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1418 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1419 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1419 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1420
1420
1421 self.ui.write((',\n "bookmarks": [%s]') %
1421 self.ui.write((',\n "bookmarks": [%s]') %
1422 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1422 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1423 self.ui.write((',\n "tags": [%s]') %
1423 self.ui.write((',\n "tags": [%s]') %
1424 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1424 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1425 self.ui.write((',\n "parents": [%s]') %
1425 self.ui.write((',\n "parents": [%s]') %
1426 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1426 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1427
1427
1428 if self.ui.debugflag:
1428 if self.ui.debugflag:
1429 if rev is None:
1429 if rev is None:
1430 jmanifestnode = 'null'
1430 jmanifestnode = 'null'
1431 else:
1431 else:
1432 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1432 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1433 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1433 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1434
1434
1435 self.ui.write((',\n "extra": {%s}') %
1435 self.ui.write((',\n "extra": {%s}') %
1436 ", ".join('"%s": "%s"' % (j(k), j(v))
1436 ", ".join('"%s": "%s"' % (j(k), j(v))
1437 for k, v in ctx.extra().items()))
1437 for k, v in ctx.extra().items()))
1438
1438
1439 files = ctx.p1().status(ctx)
1439 files = ctx.p1().status(ctx)
1440 self.ui.write((',\n "modified": [%s]') %
1440 self.ui.write((',\n "modified": [%s]') %
1441 ", ".join('"%s"' % j(f) for f in files[0]))
1441 ", ".join('"%s"' % j(f) for f in files[0]))
1442 self.ui.write((',\n "added": [%s]') %
1442 self.ui.write((',\n "added": [%s]') %
1443 ", ".join('"%s"' % j(f) for f in files[1]))
1443 ", ".join('"%s"' % j(f) for f in files[1]))
1444 self.ui.write((',\n "removed": [%s]') %
1444 self.ui.write((',\n "removed": [%s]') %
1445 ", ".join('"%s"' % j(f) for f in files[2]))
1445 ", ".join('"%s"' % j(f) for f in files[2]))
1446
1446
1447 elif self.ui.verbose:
1447 elif self.ui.verbose:
1448 self.ui.write((',\n "files": [%s]') %
1448 self.ui.write((',\n "files": [%s]') %
1449 ", ".join('"%s"' % j(f) for f in ctx.files()))
1449 ", ".join('"%s"' % j(f) for f in ctx.files()))
1450
1450
1451 if copies:
1451 if copies:
1452 self.ui.write((',\n "copies": {%s}') %
1452 self.ui.write((',\n "copies": {%s}') %
1453 ", ".join('"%s": "%s"' % (j(k), j(v))
1453 ", ".join('"%s": "%s"' % (j(k), j(v))
1454 for k, v in copies))
1454 for k, v in copies))
1455
1455
1456 matchfn = self.matchfn
1456 matchfn = self.matchfn
1457 if matchfn:
1457 if matchfn:
1458 stat = self.diffopts.get('stat')
1458 stat = self.diffopts.get('stat')
1459 diff = self.diffopts.get('patch')
1459 diff = self.diffopts.get('patch')
1460 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1460 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1461 node, prev = ctx.node(), ctx.p1().node()
1461 node, prev = ctx.node(), ctx.p1().node()
1462 if stat:
1462 if stat:
1463 self.ui.pushbuffer()
1463 self.ui.pushbuffer()
1464 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1464 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1465 match=matchfn, stat=True)
1465 match=matchfn, stat=True)
1466 self.ui.write((',\n "diffstat": "%s"')
1466 self.ui.write((',\n "diffstat": "%s"')
1467 % j(self.ui.popbuffer()))
1467 % j(self.ui.popbuffer()))
1468 if diff:
1468 if diff:
1469 self.ui.pushbuffer()
1469 self.ui.pushbuffer()
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1470 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1471 match=matchfn, stat=False)
1471 match=matchfn, stat=False)
1472 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1472 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1473
1473
1474 self.ui.write("\n }")
1474 self.ui.write("\n }")
1475
1475
1476 class changeset_templater(changeset_printer):
1476 class changeset_templater(changeset_printer):
1477 '''format changeset information.'''
1477 '''format changeset information.'''
1478
1478
1479 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1479 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1480 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1480 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1481 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1481 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1482 filters = {'formatnode': formatnode}
1482 filters = {'formatnode': formatnode}
1483 defaulttempl = {
1483 defaulttempl = {
1484 'parent': '{rev}:{node|formatnode} ',
1484 'parent': '{rev}:{node|formatnode} ',
1485 'manifest': '{rev}:{node|formatnode}',
1485 'manifest': '{rev}:{node|formatnode}',
1486 'file_copy': '{name} ({source})',
1486 'file_copy': '{name} ({source})',
1487 'extra': '{key}={value|stringescape}'
1487 'extra': '{key}={value|stringescape}'
1488 }
1488 }
1489 # filecopy is preserved for compatibility reasons
1489 # filecopy is preserved for compatibility reasons
1490 defaulttempl['filecopy'] = defaulttempl['file_copy']
1490 defaulttempl['filecopy'] = defaulttempl['file_copy']
1491 assert not (tmpl and mapfile)
1491 assert not (tmpl and mapfile)
1492 if mapfile:
1492 if mapfile:
1493 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1493 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1494 cache=defaulttempl)
1494 cache=defaulttempl)
1495 else:
1495 else:
1496 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1496 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1497 filters=filters,
1497 filters=filters,
1498 cache=defaulttempl)
1498 cache=defaulttempl)
1499
1499
1500 self.cache = {}
1500 self.cache = {}
1501
1501
1502 # find correct templates for current mode
1502 # find correct templates for current mode
1503 tmplmodes = [
1503 tmplmodes = [
1504 (True, None),
1504 (True, None),
1505 (self.ui.verbose, 'verbose'),
1505 (self.ui.verbose, 'verbose'),
1506 (self.ui.quiet, 'quiet'),
1506 (self.ui.quiet, 'quiet'),
1507 (self.ui.debugflag, 'debug'),
1507 (self.ui.debugflag, 'debug'),
1508 ]
1508 ]
1509
1509
1510 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1510 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1511 'docheader': '', 'docfooter': ''}
1511 'docheader': '', 'docfooter': ''}
1512 for mode, postfix in tmplmodes:
1512 for mode, postfix in tmplmodes:
1513 for t in self._parts:
1513 for t in self._parts:
1514 cur = t
1514 cur = t
1515 if postfix:
1515 if postfix:
1516 cur += "_" + postfix
1516 cur += "_" + postfix
1517 if mode and cur in self.t:
1517 if mode and cur in self.t:
1518 self._parts[t] = cur
1518 self._parts[t] = cur
1519
1519
1520 if self._parts['docheader']:
1520 if self._parts['docheader']:
1521 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1521 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1522
1522
1523 def close(self):
1523 def close(self):
1524 if self._parts['docfooter']:
1524 if self._parts['docfooter']:
1525 if not self.footer:
1525 if not self.footer:
1526 self.footer = ""
1526 self.footer = ""
1527 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1527 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1528 return super(changeset_templater, self).close()
1528 return super(changeset_templater, self).close()
1529
1529
1530 def _show(self, ctx, copies, matchfn, props):
1530 def _show(self, ctx, copies, matchfn, props):
1531 '''show a single changeset or file revision'''
1531 '''show a single changeset or file revision'''
1532 props = props.copy()
1532 props = props.copy()
1533 props.update(templatekw.keywords)
1533 props.update(templatekw.keywords)
1534 props['templ'] = self.t
1534 props['templ'] = self.t
1535 props['ctx'] = ctx
1535 props['ctx'] = ctx
1536 props['repo'] = self.repo
1536 props['repo'] = self.repo
1537 props['ui'] = self.repo.ui
1537 props['ui'] = self.repo.ui
1538 props['revcache'] = {'copies': copies}
1538 props['revcache'] = {'copies': copies}
1539 props['cache'] = self.cache
1539 props['cache'] = self.cache
1540
1540
1541 # write header
1541 # write header
1542 if self._parts['header']:
1542 if self._parts['header']:
1543 h = templater.stringify(self.t(self._parts['header'], **props))
1543 h = templater.stringify(self.t(self._parts['header'], **props))
1544 if self.buffered:
1544 if self.buffered:
1545 self.header[ctx.rev()] = h
1545 self.header[ctx.rev()] = h
1546 else:
1546 else:
1547 if self.lastheader != h:
1547 if self.lastheader != h:
1548 self.lastheader = h
1548 self.lastheader = h
1549 self.ui.write(h)
1549 self.ui.write(h)
1550
1550
1551 # write changeset metadata, then patch if requested
1551 # write changeset metadata, then patch if requested
1552 key = self._parts['changeset']
1552 key = self._parts['changeset']
1553 self.ui.write(templater.stringify(self.t(key, **props)))
1553 self.ui.write(templater.stringify(self.t(key, **props)))
1554 self.showpatch(ctx, matchfn)
1554 self.showpatch(ctx, matchfn)
1555
1555
1556 if self._parts['footer']:
1556 if self._parts['footer']:
1557 if not self.footer:
1557 if not self.footer:
1558 self.footer = templater.stringify(
1558 self.footer = templater.stringify(
1559 self.t(self._parts['footer'], **props))
1559 self.t(self._parts['footer'], **props))
1560
1560
1561 def gettemplate(ui, tmpl, style):
1561 def gettemplate(ui, tmpl, style):
1562 """
1562 """
1563 Find the template matching the given template spec or style.
1563 Find the template matching the given template spec or style.
1564 """
1564 """
1565
1565
1566 # ui settings
1566 # ui settings
1567 if not tmpl and not style: # template are stronger than style
1567 if not tmpl and not style: # template are stronger than style
1568 tmpl = ui.config('ui', 'logtemplate')
1568 tmpl = ui.config('ui', 'logtemplate')
1569 if tmpl:
1569 if tmpl:
1570 return templater.unquotestring(tmpl), None
1570 return templater.unquotestring(tmpl), None
1571 else:
1571 else:
1572 style = util.expandpath(ui.config('ui', 'style', ''))
1572 style = util.expandpath(ui.config('ui', 'style', ''))
1573
1573
1574 if not tmpl and style:
1574 if not tmpl and style:
1575 mapfile = style
1575 mapfile = style
1576 if not os.path.split(mapfile)[0]:
1576 if not os.path.split(mapfile)[0]:
1577 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1577 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1578 or templater.templatepath(mapfile))
1578 or templater.templatepath(mapfile))
1579 if mapname:
1579 if mapname:
1580 mapfile = mapname
1580 mapfile = mapname
1581 return None, mapfile
1581 return None, mapfile
1582
1582
1583 if not tmpl:
1583 if not tmpl:
1584 return None, None
1584 return None, None
1585
1585
1586 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1586 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1587
1587
1588 def show_changeset(ui, repo, opts, buffered=False):
1588 def show_changeset(ui, repo, opts, buffered=False):
1589 """show one changeset using template or regular display.
1589 """show one changeset using template or regular display.
1590
1590
1591 Display format will be the first non-empty hit of:
1591 Display format will be the first non-empty hit of:
1592 1. option 'template'
1592 1. option 'template'
1593 2. option 'style'
1593 2. option 'style'
1594 3. [ui] setting 'logtemplate'
1594 3. [ui] setting 'logtemplate'
1595 4. [ui] setting 'style'
1595 4. [ui] setting 'style'
1596 If all of these values are either the unset or the empty string,
1596 If all of these values are either the unset or the empty string,
1597 regular display via changeset_printer() is done.
1597 regular display via changeset_printer() is done.
1598 """
1598 """
1599 # options
1599 # options
1600 matchfn = None
1600 matchfn = None
1601 if opts.get('patch') or opts.get('stat'):
1601 if opts.get('patch') or opts.get('stat'):
1602 matchfn = scmutil.matchall(repo)
1602 matchfn = scmutil.matchall(repo)
1603
1603
1604 if opts.get('template') == 'json':
1604 if opts.get('template') == 'json':
1605 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1605 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1606
1606
1607 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1607 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1608
1608
1609 if not tmpl and not mapfile:
1609 if not tmpl and not mapfile:
1610 return changeset_printer(ui, repo, matchfn, opts, buffered)
1610 return changeset_printer(ui, repo, matchfn, opts, buffered)
1611
1611
1612 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1612 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1613
1613
1614 def showmarker(ui, marker, index=None):
1614 def showmarker(ui, marker, index=None):
1615 """utility function to display obsolescence marker in a readable way
1615 """utility function to display obsolescence marker in a readable way
1616
1616
1617 To be used by debug function."""
1617 To be used by debug function."""
1618 if index is not None:
1618 if index is not None:
1619 ui.write("%i " % index)
1619 ui.write("%i " % index)
1620 ui.write(hex(marker.precnode()))
1620 ui.write(hex(marker.precnode()))
1621 for repl in marker.succnodes():
1621 for repl in marker.succnodes():
1622 ui.write(' ')
1622 ui.write(' ')
1623 ui.write(hex(repl))
1623 ui.write(hex(repl))
1624 ui.write(' %X ' % marker.flags())
1624 ui.write(' %X ' % marker.flags())
1625 parents = marker.parentnodes()
1625 parents = marker.parentnodes()
1626 if parents is not None:
1626 if parents is not None:
1627 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1627 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1628 ui.write('(%s) ' % util.datestr(marker.date()))
1628 ui.write('(%s) ' % util.datestr(marker.date()))
1629 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1629 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1630 sorted(marker.metadata().items())
1630 sorted(marker.metadata().items())
1631 if t[0] != 'date')))
1631 if t[0] != 'date')))
1632 ui.write('\n')
1632 ui.write('\n')
1633
1633
1634 def finddate(ui, repo, date):
1634 def finddate(ui, repo, date):
1635 """Find the tipmost changeset that matches the given date spec"""
1635 """Find the tipmost changeset that matches the given date spec"""
1636
1636
1637 df = util.matchdate(date)
1637 df = util.matchdate(date)
1638 m = scmutil.matchall(repo)
1638 m = scmutil.matchall(repo)
1639 results = {}
1639 results = {}
1640
1640
1641 def prep(ctx, fns):
1641 def prep(ctx, fns):
1642 d = ctx.date()
1642 d = ctx.date()
1643 if df(d[0]):
1643 if df(d[0]):
1644 results[ctx.rev()] = d
1644 results[ctx.rev()] = d
1645
1645
1646 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1646 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1647 rev = ctx.rev()
1647 rev = ctx.rev()
1648 if rev in results:
1648 if rev in results:
1649 ui.status(_("found revision %s from %s\n") %
1649 ui.status(_("found revision %s from %s\n") %
1650 (rev, util.datestr(results[rev])))
1650 (rev, util.datestr(results[rev])))
1651 return str(rev)
1651 return str(rev)
1652
1652
1653 raise error.Abort(_("revision matching date not found"))
1653 raise error.Abort(_("revision matching date not found"))
1654
1654
1655 def increasingwindows(windowsize=8, sizelimit=512):
1655 def increasingwindows(windowsize=8, sizelimit=512):
1656 while True:
1656 while True:
1657 yield windowsize
1657 yield windowsize
1658 if windowsize < sizelimit:
1658 if windowsize < sizelimit:
1659 windowsize *= 2
1659 windowsize *= 2
1660
1660
1661 class FileWalkError(Exception):
1661 class FileWalkError(Exception):
1662 pass
1662 pass
1663
1663
1664 def walkfilerevs(repo, match, follow, revs, fncache):
1664 def walkfilerevs(repo, match, follow, revs, fncache):
1665 '''Walks the file history for the matched files.
1665 '''Walks the file history for the matched files.
1666
1666
1667 Returns the changeset revs that are involved in the file history.
1667 Returns the changeset revs that are involved in the file history.
1668
1668
1669 Throws FileWalkError if the file history can't be walked using
1669 Throws FileWalkError if the file history can't be walked using
1670 filelogs alone.
1670 filelogs alone.
1671 '''
1671 '''
1672 wanted = set()
1672 wanted = set()
1673 copies = []
1673 copies = []
1674 minrev, maxrev = min(revs), max(revs)
1674 minrev, maxrev = min(revs), max(revs)
1675 def filerevgen(filelog, last):
1675 def filerevgen(filelog, last):
1676 """
1676 """
1677 Only files, no patterns. Check the history of each file.
1677 Only files, no patterns. Check the history of each file.
1678
1678
1679 Examines filelog entries within minrev, maxrev linkrev range
1679 Examines filelog entries within minrev, maxrev linkrev range
1680 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1680 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1681 tuples in backwards order
1681 tuples in backwards order
1682 """
1682 """
1683 cl_count = len(repo)
1683 cl_count = len(repo)
1684 revs = []
1684 revs = []
1685 for j in xrange(0, last + 1):
1685 for j in xrange(0, last + 1):
1686 linkrev = filelog.linkrev(j)
1686 linkrev = filelog.linkrev(j)
1687 if linkrev < minrev:
1687 if linkrev < minrev:
1688 continue
1688 continue
1689 # only yield rev for which we have the changelog, it can
1689 # only yield rev for which we have the changelog, it can
1690 # happen while doing "hg log" during a pull or commit
1690 # happen while doing "hg log" during a pull or commit
1691 if linkrev >= cl_count:
1691 if linkrev >= cl_count:
1692 break
1692 break
1693
1693
1694 parentlinkrevs = []
1694 parentlinkrevs = []
1695 for p in filelog.parentrevs(j):
1695 for p in filelog.parentrevs(j):
1696 if p != nullrev:
1696 if p != nullrev:
1697 parentlinkrevs.append(filelog.linkrev(p))
1697 parentlinkrevs.append(filelog.linkrev(p))
1698 n = filelog.node(j)
1698 n = filelog.node(j)
1699 revs.append((linkrev, parentlinkrevs,
1699 revs.append((linkrev, parentlinkrevs,
1700 follow and filelog.renamed(n)))
1700 follow and filelog.renamed(n)))
1701
1701
1702 return reversed(revs)
1702 return reversed(revs)
1703 def iterfiles():
1703 def iterfiles():
1704 pctx = repo['.']
1704 pctx = repo['.']
1705 for filename in match.files():
1705 for filename in match.files():
1706 if follow:
1706 if follow:
1707 if filename not in pctx:
1707 if filename not in pctx:
1708 raise error.Abort(_('cannot follow file not in parent '
1708 raise error.Abort(_('cannot follow file not in parent '
1709 'revision: "%s"') % filename)
1709 'revision: "%s"') % filename)
1710 yield filename, pctx[filename].filenode()
1710 yield filename, pctx[filename].filenode()
1711 else:
1711 else:
1712 yield filename, None
1712 yield filename, None
1713 for filename_node in copies:
1713 for filename_node in copies:
1714 yield filename_node
1714 yield filename_node
1715
1715
1716 for file_, node in iterfiles():
1716 for file_, node in iterfiles():
1717 filelog = repo.file(file_)
1717 filelog = repo.file(file_)
1718 if not len(filelog):
1718 if not len(filelog):
1719 if node is None:
1719 if node is None:
1720 # A zero count may be a directory or deleted file, so
1720 # A zero count may be a directory or deleted file, so
1721 # try to find matching entries on the slow path.
1721 # try to find matching entries on the slow path.
1722 if follow:
1722 if follow:
1723 raise error.Abort(
1723 raise error.Abort(
1724 _('cannot follow nonexistent file: "%s"') % file_)
1724 _('cannot follow nonexistent file: "%s"') % file_)
1725 raise FileWalkError("Cannot walk via filelog")
1725 raise FileWalkError("Cannot walk via filelog")
1726 else:
1726 else:
1727 continue
1727 continue
1728
1728
1729 if node is None:
1729 if node is None:
1730 last = len(filelog) - 1
1730 last = len(filelog) - 1
1731 else:
1731 else:
1732 last = filelog.rev(node)
1732 last = filelog.rev(node)
1733
1733
1734 # keep track of all ancestors of the file
1734 # keep track of all ancestors of the file
1735 ancestors = set([filelog.linkrev(last)])
1735 ancestors = set([filelog.linkrev(last)])
1736
1736
1737 # iterate from latest to oldest revision
1737 # iterate from latest to oldest revision
1738 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1738 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1739 if not follow:
1739 if not follow:
1740 if rev > maxrev:
1740 if rev > maxrev:
1741 continue
1741 continue
1742 else:
1742 else:
1743 # Note that last might not be the first interesting
1743 # Note that last might not be the first interesting
1744 # rev to us:
1744 # rev to us:
1745 # if the file has been changed after maxrev, we'll
1745 # if the file has been changed after maxrev, we'll
1746 # have linkrev(last) > maxrev, and we still need
1746 # have linkrev(last) > maxrev, and we still need
1747 # to explore the file graph
1747 # to explore the file graph
1748 if rev not in ancestors:
1748 if rev not in ancestors:
1749 continue
1749 continue
1750 # XXX insert 1327 fix here
1750 # XXX insert 1327 fix here
1751 if flparentlinkrevs:
1751 if flparentlinkrevs:
1752 ancestors.update(flparentlinkrevs)
1752 ancestors.update(flparentlinkrevs)
1753
1753
1754 fncache.setdefault(rev, []).append(file_)
1754 fncache.setdefault(rev, []).append(file_)
1755 wanted.add(rev)
1755 wanted.add(rev)
1756 if copied:
1756 if copied:
1757 copies.append(copied)
1757 copies.append(copied)
1758
1758
1759 return wanted
1759 return wanted
1760
1760
1761 class _followfilter(object):
1761 class _followfilter(object):
1762 def __init__(self, repo, onlyfirst=False):
1762 def __init__(self, repo, onlyfirst=False):
1763 self.repo = repo
1763 self.repo = repo
1764 self.startrev = nullrev
1764 self.startrev = nullrev
1765 self.roots = set()
1765 self.roots = set()
1766 self.onlyfirst = onlyfirst
1766 self.onlyfirst = onlyfirst
1767
1767
1768 def match(self, rev):
1768 def match(self, rev):
1769 def realparents(rev):
1769 def realparents(rev):
1770 if self.onlyfirst:
1770 if self.onlyfirst:
1771 return self.repo.changelog.parentrevs(rev)[0:1]
1771 return self.repo.changelog.parentrevs(rev)[0:1]
1772 else:
1772 else:
1773 return filter(lambda x: x != nullrev,
1773 return filter(lambda x: x != nullrev,
1774 self.repo.changelog.parentrevs(rev))
1774 self.repo.changelog.parentrevs(rev))
1775
1775
1776 if self.startrev == nullrev:
1776 if self.startrev == nullrev:
1777 self.startrev = rev
1777 self.startrev = rev
1778 return True
1778 return True
1779
1779
1780 if rev > self.startrev:
1780 if rev > self.startrev:
1781 # forward: all descendants
1781 # forward: all descendants
1782 if not self.roots:
1782 if not self.roots:
1783 self.roots.add(self.startrev)
1783 self.roots.add(self.startrev)
1784 for parent in realparents(rev):
1784 for parent in realparents(rev):
1785 if parent in self.roots:
1785 if parent in self.roots:
1786 self.roots.add(rev)
1786 self.roots.add(rev)
1787 return True
1787 return True
1788 else:
1788 else:
1789 # backwards: all parents
1789 # backwards: all parents
1790 if not self.roots:
1790 if not self.roots:
1791 self.roots.update(realparents(self.startrev))
1791 self.roots.update(realparents(self.startrev))
1792 if rev in self.roots:
1792 if rev in self.roots:
1793 self.roots.remove(rev)
1793 self.roots.remove(rev)
1794 self.roots.update(realparents(rev))
1794 self.roots.update(realparents(rev))
1795 return True
1795 return True
1796
1796
1797 return False
1797 return False
1798
1798
1799 def walkchangerevs(repo, match, opts, prepare):
1799 def walkchangerevs(repo, match, opts, prepare):
1800 '''Iterate over files and the revs in which they changed.
1800 '''Iterate over files and the revs in which they changed.
1801
1801
1802 Callers most commonly need to iterate backwards over the history
1802 Callers most commonly need to iterate backwards over the history
1803 in which they are interested. Doing so has awful (quadratic-looking)
1803 in which they are interested. Doing so has awful (quadratic-looking)
1804 performance, so we use iterators in a "windowed" way.
1804 performance, so we use iterators in a "windowed" way.
1805
1805
1806 We walk a window of revisions in the desired order. Within the
1806 We walk a window of revisions in the desired order. Within the
1807 window, we first walk forwards to gather data, then in the desired
1807 window, we first walk forwards to gather data, then in the desired
1808 order (usually backwards) to display it.
1808 order (usually backwards) to display it.
1809
1809
1810 This function returns an iterator yielding contexts. Before
1810 This function returns an iterator yielding contexts. Before
1811 yielding each context, the iterator will first call the prepare
1811 yielding each context, the iterator will first call the prepare
1812 function on each context in the window in forward order.'''
1812 function on each context in the window in forward order.'''
1813
1813
1814 follow = opts.get('follow') or opts.get('follow_first')
1814 follow = opts.get('follow') or opts.get('follow_first')
1815 revs = _logrevs(repo, opts)
1815 revs = _logrevs(repo, opts)
1816 if not revs:
1816 if not revs:
1817 return []
1817 return []
1818 wanted = set()
1818 wanted = set()
1819 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1819 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1820 opts.get('removed'))
1820 opts.get('removed'))
1821 fncache = {}
1821 fncache = {}
1822 change = repo.changectx
1822 change = repo.changectx
1823
1823
1824 # First step is to fill wanted, the set of revisions that we want to yield.
1824 # First step is to fill wanted, the set of revisions that we want to yield.
1825 # When it does not induce extra cost, we also fill fncache for revisions in
1825 # When it does not induce extra cost, we also fill fncache for revisions in
1826 # wanted: a cache of filenames that were changed (ctx.files()) and that
1826 # wanted: a cache of filenames that were changed (ctx.files()) and that
1827 # match the file filtering conditions.
1827 # match the file filtering conditions.
1828
1828
1829 if match.always():
1829 if match.always():
1830 # No files, no patterns. Display all revs.
1830 # No files, no patterns. Display all revs.
1831 wanted = revs
1831 wanted = revs
1832 elif not slowpath:
1832 elif not slowpath:
1833 # We only have to read through the filelog to find wanted revisions
1833 # We only have to read through the filelog to find wanted revisions
1834
1834
1835 try:
1835 try:
1836 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1836 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1837 except FileWalkError:
1837 except FileWalkError:
1838 slowpath = True
1838 slowpath = True
1839
1839
1840 # We decided to fall back to the slowpath because at least one
1840 # We decided to fall back to the slowpath because at least one
1841 # of the paths was not a file. Check to see if at least one of them
1841 # of the paths was not a file. Check to see if at least one of them
1842 # existed in history, otherwise simply return
1842 # existed in history, otherwise simply return
1843 for path in match.files():
1843 for path in match.files():
1844 if path == '.' or path in repo.store:
1844 if path == '.' or path in repo.store:
1845 break
1845 break
1846 else:
1846 else:
1847 return []
1847 return []
1848
1848
1849 if slowpath:
1849 if slowpath:
1850 # We have to read the changelog to match filenames against
1850 # We have to read the changelog to match filenames against
1851 # changed files
1851 # changed files
1852
1852
1853 if follow:
1853 if follow:
1854 raise error.Abort(_('can only follow copies/renames for explicit '
1854 raise error.Abort(_('can only follow copies/renames for explicit '
1855 'filenames'))
1855 'filenames'))
1856
1856
1857 # The slow path checks files modified in every changeset.
1857 # The slow path checks files modified in every changeset.
1858 # This is really slow on large repos, so compute the set lazily.
1858 # This is really slow on large repos, so compute the set lazily.
1859 class lazywantedset(object):
1859 class lazywantedset(object):
1860 def __init__(self):
1860 def __init__(self):
1861 self.set = set()
1861 self.set = set()
1862 self.revs = set(revs)
1862 self.revs = set(revs)
1863
1863
1864 # No need to worry about locality here because it will be accessed
1864 # No need to worry about locality here because it will be accessed
1865 # in the same order as the increasing window below.
1865 # in the same order as the increasing window below.
1866 def __contains__(self, value):
1866 def __contains__(self, value):
1867 if value in self.set:
1867 if value in self.set:
1868 return True
1868 return True
1869 elif not value in self.revs:
1869 elif not value in self.revs:
1870 return False
1870 return False
1871 else:
1871 else:
1872 self.revs.discard(value)
1872 self.revs.discard(value)
1873 ctx = change(value)
1873 ctx = change(value)
1874 matches = filter(match, ctx.files())
1874 matches = filter(match, ctx.files())
1875 if matches:
1875 if matches:
1876 fncache[value] = matches
1876 fncache[value] = matches
1877 self.set.add(value)
1877 self.set.add(value)
1878 return True
1878 return True
1879 return False
1879 return False
1880
1880
1881 def discard(self, value):
1881 def discard(self, value):
1882 self.revs.discard(value)
1882 self.revs.discard(value)
1883 self.set.discard(value)
1883 self.set.discard(value)
1884
1884
1885 wanted = lazywantedset()
1885 wanted = lazywantedset()
1886
1886
1887 # it might be worthwhile to do this in the iterator if the rev range
1887 # it might be worthwhile to do this in the iterator if the rev range
1888 # is descending and the prune args are all within that range
1888 # is descending and the prune args are all within that range
1889 for rev in opts.get('prune', ()):
1889 for rev in opts.get('prune', ()):
1890 rev = repo[rev].rev()
1890 rev = repo[rev].rev()
1891 ff = _followfilter(repo)
1891 ff = _followfilter(repo)
1892 stop = min(revs[0], revs[-1])
1892 stop = min(revs[0], revs[-1])
1893 for x in xrange(rev, stop - 1, -1):
1893 for x in xrange(rev, stop - 1, -1):
1894 if ff.match(x):
1894 if ff.match(x):
1895 wanted = wanted - [x]
1895 wanted = wanted - [x]
1896
1896
1897 # Now that wanted is correctly initialized, we can iterate over the
1897 # Now that wanted is correctly initialized, we can iterate over the
1898 # revision range, yielding only revisions in wanted.
1898 # revision range, yielding only revisions in wanted.
1899 def iterate():
1899 def iterate():
1900 if follow and match.always():
1900 if follow and match.always():
1901 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1901 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1902 def want(rev):
1902 def want(rev):
1903 return ff.match(rev) and rev in wanted
1903 return ff.match(rev) and rev in wanted
1904 else:
1904 else:
1905 def want(rev):
1905 def want(rev):
1906 return rev in wanted
1906 return rev in wanted
1907
1907
1908 it = iter(revs)
1908 it = iter(revs)
1909 stopiteration = False
1909 stopiteration = False
1910 for windowsize in increasingwindows():
1910 for windowsize in increasingwindows():
1911 nrevs = []
1911 nrevs = []
1912 for i in xrange(windowsize):
1912 for i in xrange(windowsize):
1913 rev = next(it, None)
1913 rev = next(it, None)
1914 if rev is None:
1914 if rev is None:
1915 stopiteration = True
1915 stopiteration = True
1916 break
1916 break
1917 elif want(rev):
1917 elif want(rev):
1918 nrevs.append(rev)
1918 nrevs.append(rev)
1919 for rev in sorted(nrevs):
1919 for rev in sorted(nrevs):
1920 fns = fncache.get(rev)
1920 fns = fncache.get(rev)
1921 ctx = change(rev)
1921 ctx = change(rev)
1922 if not fns:
1922 if not fns:
1923 def fns_generator():
1923 def fns_generator():
1924 for f in ctx.files():
1924 for f in ctx.files():
1925 if match(f):
1925 if match(f):
1926 yield f
1926 yield f
1927 fns = fns_generator()
1927 fns = fns_generator()
1928 prepare(ctx, fns)
1928 prepare(ctx, fns)
1929 for rev in nrevs:
1929 for rev in nrevs:
1930 yield change(rev)
1930 yield change(rev)
1931
1931
1932 if stopiteration:
1932 if stopiteration:
1933 break
1933 break
1934
1934
1935 return iterate()
1935 return iterate()
1936
1936
1937 def _makefollowlogfilematcher(repo, files, followfirst):
1937 def _makefollowlogfilematcher(repo, files, followfirst):
1938 # When displaying a revision with --patch --follow FILE, we have
1938 # When displaying a revision with --patch --follow FILE, we have
1939 # to know which file of the revision must be diffed. With
1939 # to know which file of the revision must be diffed. With
1940 # --follow, we want the names of the ancestors of FILE in the
1940 # --follow, we want the names of the ancestors of FILE in the
1941 # revision, stored in "fcache". "fcache" is populated by
1941 # revision, stored in "fcache". "fcache" is populated by
1942 # reproducing the graph traversal already done by --follow revset
1942 # reproducing the graph traversal already done by --follow revset
1943 # and relating linkrevs to file names (which is not "correct" but
1943 # and relating linkrevs to file names (which is not "correct" but
1944 # good enough).
1944 # good enough).
1945 fcache = {}
1945 fcache = {}
1946 fcacheready = [False]
1946 fcacheready = [False]
1947 pctx = repo['.']
1947 pctx = repo['.']
1948
1948
1949 def populate():
1949 def populate():
1950 for fn in files:
1950 for fn in files:
1951 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1951 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1952 for c in i:
1952 for c in i:
1953 fcache.setdefault(c.linkrev(), set()).add(c.path())
1953 fcache.setdefault(c.linkrev(), set()).add(c.path())
1954
1954
1955 def filematcher(rev):
1955 def filematcher(rev):
1956 if not fcacheready[0]:
1956 if not fcacheready[0]:
1957 # Lazy initialization
1957 # Lazy initialization
1958 fcacheready[0] = True
1958 fcacheready[0] = True
1959 populate()
1959 populate()
1960 return scmutil.matchfiles(repo, fcache.get(rev, []))
1960 return scmutil.matchfiles(repo, fcache.get(rev, []))
1961
1961
1962 return filematcher
1962 return filematcher
1963
1963
1964 def _makenofollowlogfilematcher(repo, pats, opts):
1964 def _makenofollowlogfilematcher(repo, pats, opts):
1965 '''hook for extensions to override the filematcher for non-follow cases'''
1965 '''hook for extensions to override the filematcher for non-follow cases'''
1966 return None
1966 return None
1967
1967
1968 def _makelogrevset(repo, pats, opts, revs):
1968 def _makelogrevset(repo, pats, opts, revs):
1969 """Return (expr, filematcher) where expr is a revset string built
1969 """Return (expr, filematcher) where expr is a revset string built
1970 from log options and file patterns or None. If --stat or --patch
1970 from log options and file patterns or None. If --stat or --patch
1971 are not passed filematcher is None. Otherwise it is a callable
1971 are not passed filematcher is None. Otherwise it is a callable
1972 taking a revision number and returning a match objects filtering
1972 taking a revision number and returning a match objects filtering
1973 the files to be detailed when displaying the revision.
1973 the files to be detailed when displaying the revision.
1974 """
1974 """
1975 opt2revset = {
1975 opt2revset = {
1976 'no_merges': ('not merge()', None),
1976 'no_merges': ('not merge()', None),
1977 'only_merges': ('merge()', None),
1977 'only_merges': ('merge()', None),
1978 '_ancestors': ('ancestors(%(val)s)', None),
1978 '_ancestors': ('ancestors(%(val)s)', None),
1979 '_fancestors': ('_firstancestors(%(val)s)', None),
1979 '_fancestors': ('_firstancestors(%(val)s)', None),
1980 '_descendants': ('descendants(%(val)s)', None),
1980 '_descendants': ('descendants(%(val)s)', None),
1981 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1981 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1982 '_matchfiles': ('_matchfiles(%(val)s)', None),
1982 '_matchfiles': ('_matchfiles(%(val)s)', None),
1983 'date': ('date(%(val)r)', None),
1983 'date': ('date(%(val)r)', None),
1984 'branch': ('branch(%(val)r)', ' or '),
1984 'branch': ('branch(%(val)r)', ' or '),
1985 '_patslog': ('filelog(%(val)r)', ' or '),
1985 '_patslog': ('filelog(%(val)r)', ' or '),
1986 '_patsfollow': ('follow(%(val)r)', ' or '),
1986 '_patsfollow': ('follow(%(val)r)', ' or '),
1987 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1987 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1988 'keyword': ('keyword(%(val)r)', ' or '),
1988 'keyword': ('keyword(%(val)r)', ' or '),
1989 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1989 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1990 'user': ('user(%(val)r)', ' or '),
1990 'user': ('user(%(val)r)', ' or '),
1991 }
1991 }
1992
1992
1993 opts = dict(opts)
1993 opts = dict(opts)
1994 # follow or not follow?
1994 # follow or not follow?
1995 follow = opts.get('follow') or opts.get('follow_first')
1995 follow = opts.get('follow') or opts.get('follow_first')
1996 if opts.get('follow_first'):
1996 if opts.get('follow_first'):
1997 followfirst = 1
1997 followfirst = 1
1998 else:
1998 else:
1999 followfirst = 0
1999 followfirst = 0
2000 # --follow with FILE behavior depends on revs...
2000 # --follow with FILE behavior depends on revs...
2001 it = iter(revs)
2001 it = iter(revs)
2002 startrev = next(it)
2002 startrev = next(it)
2003 followdescendants = startrev < next(it, startrev)
2003 followdescendants = startrev < next(it, startrev)
2004
2004
2005 # branch and only_branch are really aliases and must be handled at
2005 # branch and only_branch are really aliases and must be handled at
2006 # the same time
2006 # the same time
2007 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2007 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
2008 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2008 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
2009 # pats/include/exclude are passed to match.match() directly in
2009 # pats/include/exclude are passed to match.match() directly in
2010 # _matchfiles() revset but walkchangerevs() builds its matcher with
2010 # _matchfiles() revset but walkchangerevs() builds its matcher with
2011 # scmutil.match(). The difference is input pats are globbed on
2011 # scmutil.match(). The difference is input pats are globbed on
2012 # platforms without shell expansion (windows).
2012 # platforms without shell expansion (windows).
2013 wctx = repo[None]
2013 wctx = repo[None]
2014 match, pats = scmutil.matchandpats(wctx, pats, opts)
2014 match, pats = scmutil.matchandpats(wctx, pats, opts)
2015 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2015 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
2016 opts.get('removed'))
2016 opts.get('removed'))
2017 if not slowpath:
2017 if not slowpath:
2018 for f in match.files():
2018 for f in match.files():
2019 if follow and f not in wctx:
2019 if follow and f not in wctx:
2020 # If the file exists, it may be a directory, so let it
2020 # If the file exists, it may be a directory, so let it
2021 # take the slow path.
2021 # take the slow path.
2022 if os.path.exists(repo.wjoin(f)):
2022 if os.path.exists(repo.wjoin(f)):
2023 slowpath = True
2023 slowpath = True
2024 continue
2024 continue
2025 else:
2025 else:
2026 raise error.Abort(_('cannot follow file not in parent '
2026 raise error.Abort(_('cannot follow file not in parent '
2027 'revision: "%s"') % f)
2027 'revision: "%s"') % f)
2028 filelog = repo.file(f)
2028 filelog = repo.file(f)
2029 if not filelog:
2029 if not filelog:
2030 # A zero count may be a directory or deleted file, so
2030 # A zero count may be a directory or deleted file, so
2031 # try to find matching entries on the slow path.
2031 # try to find matching entries on the slow path.
2032 if follow:
2032 if follow:
2033 raise error.Abort(
2033 raise error.Abort(
2034 _('cannot follow nonexistent file: "%s"') % f)
2034 _('cannot follow nonexistent file: "%s"') % f)
2035 slowpath = True
2035 slowpath = True
2036
2036
2037 # We decided to fall back to the slowpath because at least one
2037 # We decided to fall back to the slowpath because at least one
2038 # of the paths was not a file. Check to see if at least one of them
2038 # of the paths was not a file. Check to see if at least one of them
2039 # existed in history - in that case, we'll continue down the
2039 # existed in history - in that case, we'll continue down the
2040 # slowpath; otherwise, we can turn off the slowpath
2040 # slowpath; otherwise, we can turn off the slowpath
2041 if slowpath:
2041 if slowpath:
2042 for path in match.files():
2042 for path in match.files():
2043 if path == '.' or path in repo.store:
2043 if path == '.' or path in repo.store:
2044 break
2044 break
2045 else:
2045 else:
2046 slowpath = False
2046 slowpath = False
2047
2047
2048 fpats = ('_patsfollow', '_patsfollowfirst')
2048 fpats = ('_patsfollow', '_patsfollowfirst')
2049 fnopats = (('_ancestors', '_fancestors'),
2049 fnopats = (('_ancestors', '_fancestors'),
2050 ('_descendants', '_fdescendants'))
2050 ('_descendants', '_fdescendants'))
2051 if slowpath:
2051 if slowpath:
2052 # See walkchangerevs() slow path.
2052 # See walkchangerevs() slow path.
2053 #
2053 #
2054 # pats/include/exclude cannot be represented as separate
2054 # pats/include/exclude cannot be represented as separate
2055 # revset expressions as their filtering logic applies at file
2055 # revset expressions as their filtering logic applies at file
2056 # level. For instance "-I a -X a" matches a revision touching
2056 # level. For instance "-I a -X a" matches a revision touching
2057 # "a" and "b" while "file(a) and not file(b)" does
2057 # "a" and "b" while "file(a) and not file(b)" does
2058 # not. Besides, filesets are evaluated against the working
2058 # not. Besides, filesets are evaluated against the working
2059 # directory.
2059 # directory.
2060 matchargs = ['r:', 'd:relpath']
2060 matchargs = ['r:', 'd:relpath']
2061 for p in pats:
2061 for p in pats:
2062 matchargs.append('p:' + p)
2062 matchargs.append('p:' + p)
2063 for p in opts.get('include', []):
2063 for p in opts.get('include', []):
2064 matchargs.append('i:' + p)
2064 matchargs.append('i:' + p)
2065 for p in opts.get('exclude', []):
2065 for p in opts.get('exclude', []):
2066 matchargs.append('x:' + p)
2066 matchargs.append('x:' + p)
2067 matchargs = ','.join(('%r' % p) for p in matchargs)
2067 matchargs = ','.join(('%r' % p) for p in matchargs)
2068 opts['_matchfiles'] = matchargs
2068 opts['_matchfiles'] = matchargs
2069 if follow:
2069 if follow:
2070 opts[fnopats[0][followfirst]] = '.'
2070 opts[fnopats[0][followfirst]] = '.'
2071 else:
2071 else:
2072 if follow:
2072 if follow:
2073 if pats:
2073 if pats:
2074 # follow() revset interprets its file argument as a
2074 # follow() revset interprets its file argument as a
2075 # manifest entry, so use match.files(), not pats.
2075 # manifest entry, so use match.files(), not pats.
2076 opts[fpats[followfirst]] = list(match.files())
2076 opts[fpats[followfirst]] = list(match.files())
2077 else:
2077 else:
2078 op = fnopats[followdescendants][followfirst]
2078 op = fnopats[followdescendants][followfirst]
2079 opts[op] = 'rev(%d)' % startrev
2079 opts[op] = 'rev(%d)' % startrev
2080 else:
2080 else:
2081 opts['_patslog'] = list(pats)
2081 opts['_patslog'] = list(pats)
2082
2082
2083 filematcher = None
2083 filematcher = None
2084 if opts.get('patch') or opts.get('stat'):
2084 if opts.get('patch') or opts.get('stat'):
2085 # When following files, track renames via a special matcher.
2085 # When following files, track renames via a special matcher.
2086 # If we're forced to take the slowpath it means we're following
2086 # If we're forced to take the slowpath it means we're following
2087 # at least one pattern/directory, so don't bother with rename tracking.
2087 # at least one pattern/directory, so don't bother with rename tracking.
2088 if follow and not match.always() and not slowpath:
2088 if follow and not match.always() and not slowpath:
2089 # _makefollowlogfilematcher expects its files argument to be
2089 # _makefollowlogfilematcher expects its files argument to be
2090 # relative to the repo root, so use match.files(), not pats.
2090 # relative to the repo root, so use match.files(), not pats.
2091 filematcher = _makefollowlogfilematcher(repo, match.files(),
2091 filematcher = _makefollowlogfilematcher(repo, match.files(),
2092 followfirst)
2092 followfirst)
2093 else:
2093 else:
2094 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2094 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2095 if filematcher is None:
2095 if filematcher is None:
2096 filematcher = lambda rev: match
2096 filematcher = lambda rev: match
2097
2097
2098 expr = []
2098 expr = []
2099 for op, val in sorted(opts.iteritems()):
2099 for op, val in sorted(opts.iteritems()):
2100 if not val:
2100 if not val:
2101 continue
2101 continue
2102 if op not in opt2revset:
2102 if op not in opt2revset:
2103 continue
2103 continue
2104 revop, andor = opt2revset[op]
2104 revop, andor = opt2revset[op]
2105 if '%(val)' not in revop:
2105 if '%(val)' not in revop:
2106 expr.append(revop)
2106 expr.append(revop)
2107 else:
2107 else:
2108 if not isinstance(val, list):
2108 if not isinstance(val, list):
2109 e = revop % {'val': val}
2109 e = revop % {'val': val}
2110 else:
2110 else:
2111 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2111 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2112 expr.append(e)
2112 expr.append(e)
2113
2113
2114 if expr:
2114 if expr:
2115 expr = '(' + ' and '.join(expr) + ')'
2115 expr = '(' + ' and '.join(expr) + ')'
2116 else:
2116 else:
2117 expr = None
2117 expr = None
2118 return expr, filematcher
2118 return expr, filematcher
2119
2119
2120 def _logrevs(repo, opts):
2120 def _logrevs(repo, opts):
2121 # Default --rev value depends on --follow but --follow behavior
2121 # Default --rev value depends on --follow but --follow behavior
2122 # depends on revisions resolved from --rev...
2122 # depends on revisions resolved from --rev...
2123 follow = opts.get('follow') or opts.get('follow_first')
2123 follow = opts.get('follow') or opts.get('follow_first')
2124 if opts.get('rev'):
2124 if opts.get('rev'):
2125 revs = scmutil.revrange(repo, opts['rev'])
2125 revs = scmutil.revrange(repo, opts['rev'])
2126 elif follow and repo.dirstate.p1() == nullid:
2126 elif follow and repo.dirstate.p1() == nullid:
2127 revs = revset.baseset()
2127 revs = revset.baseset()
2128 elif follow:
2128 elif follow:
2129 revs = repo.revs('reverse(:.)')
2129 revs = repo.revs('reverse(:.)')
2130 else:
2130 else:
2131 revs = revset.spanset(repo)
2131 revs = revset.spanset(repo)
2132 revs.reverse()
2132 revs.reverse()
2133 return revs
2133 return revs
2134
2134
2135 def getgraphlogrevs(repo, pats, opts):
2135 def getgraphlogrevs(repo, pats, opts):
2136 """Return (revs, expr, filematcher) where revs is an iterable of
2136 """Return (revs, expr, filematcher) where revs is an iterable of
2137 revision numbers, expr is a revset string built from log options
2137 revision numbers, expr is a revset string built from log options
2138 and file patterns or None, and used to filter 'revs'. If --stat or
2138 and file patterns or None, and used to filter 'revs'. If --stat or
2139 --patch are not passed filematcher is None. Otherwise it is a
2139 --patch are not passed filematcher is None. Otherwise it is a
2140 callable taking a revision number and returning a match objects
2140 callable taking a revision number and returning a match objects
2141 filtering the files to be detailed when displaying the revision.
2141 filtering the files to be detailed when displaying the revision.
2142 """
2142 """
2143 limit = loglimit(opts)
2143 limit = loglimit(opts)
2144 revs = _logrevs(repo, opts)
2144 revs = _logrevs(repo, opts)
2145 if not revs:
2145 if not revs:
2146 return revset.baseset(), None, None
2146 return revset.baseset(), None, None
2147 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2147 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2148 if opts.get('rev'):
2148 if opts.get('rev'):
2149 # User-specified revs might be unsorted, but don't sort before
2149 # User-specified revs might be unsorted, but don't sort before
2150 # _makelogrevset because it might depend on the order of revs
2150 # _makelogrevset because it might depend on the order of revs
2151 if not (revs.isdescending() or revs.istopo()):
2151 if not (revs.isdescending() or revs.istopo()):
2152 revs.sort(reverse=True)
2152 revs.sort(reverse=True)
2153 if expr:
2153 if expr:
2154 # Revset matchers often operate faster on revisions in changelog
2154 # Revset matchers often operate faster on revisions in changelog
2155 # order, because most filters deal with the changelog.
2155 # order, because most filters deal with the changelog.
2156 revs.reverse()
2156 revs.reverse()
2157 matcher = revset.match(repo.ui, expr)
2157 matcher = revset.match(repo.ui, expr)
2158 # Revset matches can reorder revisions. "A or B" typically returns
2158 # Revset matches can reorder revisions. "A or B" typically returns
2159 # returns the revision matching A then the revision matching B. Sort
2159 # returns the revision matching A then the revision matching B. Sort
2160 # again to fix that.
2160 # again to fix that.
2161 revs = matcher(repo, revs)
2161 revs = matcher(repo, revs)
2162 revs.sort(reverse=True)
2162 revs.sort(reverse=True)
2163 if limit is not None:
2163 if limit is not None:
2164 limitedrevs = []
2164 limitedrevs = []
2165 for idx, rev in enumerate(revs):
2165 for idx, rev in enumerate(revs):
2166 if idx >= limit:
2166 if idx >= limit:
2167 break
2167 break
2168 limitedrevs.append(rev)
2168 limitedrevs.append(rev)
2169 revs = revset.baseset(limitedrevs)
2169 revs = revset.baseset(limitedrevs)
2170
2170
2171 return revs, expr, filematcher
2171 return revs, expr, filematcher
2172
2172
2173 def getlogrevs(repo, pats, opts):
2173 def getlogrevs(repo, pats, opts):
2174 """Return (revs, expr, filematcher) where revs is an iterable of
2174 """Return (revs, expr, filematcher) where revs is an iterable of
2175 revision numbers, expr is a revset string built from log options
2175 revision numbers, expr is a revset string built from log options
2176 and file patterns or None, and used to filter 'revs'. If --stat or
2176 and file patterns or None, and used to filter 'revs'. If --stat or
2177 --patch are not passed filematcher is None. Otherwise it is a
2177 --patch are not passed filematcher is None. Otherwise it is a
2178 callable taking a revision number and returning a match objects
2178 callable taking a revision number and returning a match objects
2179 filtering the files to be detailed when displaying the revision.
2179 filtering the files to be detailed when displaying the revision.
2180 """
2180 """
2181 limit = loglimit(opts)
2181 limit = loglimit(opts)
2182 revs = _logrevs(repo, opts)
2182 revs = _logrevs(repo, opts)
2183 if not revs:
2183 if not revs:
2184 return revset.baseset([]), None, None
2184 return revset.baseset([]), None, None
2185 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2185 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2186 if expr:
2186 if expr:
2187 # Revset matchers often operate faster on revisions in changelog
2187 # Revset matchers often operate faster on revisions in changelog
2188 # order, because most filters deal with the changelog.
2188 # order, because most filters deal with the changelog.
2189 if not opts.get('rev'):
2189 if not opts.get('rev'):
2190 revs.reverse()
2190 revs.reverse()
2191 matcher = revset.match(repo.ui, expr)
2191 matcher = revset.match(repo.ui, expr)
2192 # Revset matches can reorder revisions. "A or B" typically returns
2192 # Revset matches can reorder revisions. "A or B" typically returns
2193 # returns the revision matching A then the revision matching B. Sort
2193 # returns the revision matching A then the revision matching B. Sort
2194 # again to fix that.
2194 # again to fix that.
2195 fixopts = ['branch', 'only_branch', 'keyword', 'user']
2195 fixopts = ['branch', 'only_branch', 'keyword', 'user']
2196 oldrevs = revs
2196 oldrevs = revs
2197 revs = matcher(repo, revs)
2197 revs = matcher(repo, revs)
2198 if not opts.get('rev'):
2198 if not opts.get('rev'):
2199 revs.sort(reverse=True)
2199 revs.sort(reverse=True)
2200 elif len(pats) > 1 or any(len(opts.get(op, [])) > 1 for op in fixopts):
2200 elif len(pats) > 1 or any(len(opts.get(op, [])) > 1 for op in fixopts):
2201 # XXX "A or B" is known to change the order; fix it by filtering
2201 # XXX "A or B" is known to change the order; fix it by filtering
2202 # matched set again (issue5100)
2202 # matched set again (issue5100)
2203 revs = oldrevs & revs
2203 revs = oldrevs & revs
2204 if limit is not None:
2204 if limit is not None:
2205 limitedrevs = []
2205 limitedrevs = []
2206 for idx, r in enumerate(revs):
2206 for idx, r in enumerate(revs):
2207 if limit <= idx:
2207 if limit <= idx:
2208 break
2208 break
2209 limitedrevs.append(r)
2209 limitedrevs.append(r)
2210 revs = revset.baseset(limitedrevs)
2210 revs = revset.baseset(limitedrevs)
2211
2211
2212 return revs, expr, filematcher
2212 return revs, expr, filematcher
2213
2213
2214 def _graphnodeformatter(ui, displayer):
2214 def _graphnodeformatter(ui, displayer):
2215 spec = ui.config('ui', 'graphnodetemplate')
2215 spec = ui.config('ui', 'graphnodetemplate')
2216 if not spec:
2216 if not spec:
2217 return templatekw.showgraphnode # fast path for "{graphnode}"
2217 return templatekw.showgraphnode # fast path for "{graphnode}"
2218
2218
2219 templ = formatter.gettemplater(ui, 'graphnode', spec)
2219 templ = formatter.gettemplater(ui, 'graphnode', spec)
2220 cache = {}
2220 cache = {}
2221 if isinstance(displayer, changeset_templater):
2221 if isinstance(displayer, changeset_templater):
2222 cache = displayer.cache # reuse cache of slow templates
2222 cache = displayer.cache # reuse cache of slow templates
2223 props = templatekw.keywords.copy()
2223 props = templatekw.keywords.copy()
2224 props['templ'] = templ
2224 props['templ'] = templ
2225 props['cache'] = cache
2225 props['cache'] = cache
2226 def formatnode(repo, ctx):
2226 def formatnode(repo, ctx):
2227 props['ctx'] = ctx
2227 props['ctx'] = ctx
2228 props['repo'] = repo
2228 props['repo'] = repo
2229 props['ui'] = repo.ui
2229 props['ui'] = repo.ui
2230 props['revcache'] = {}
2230 props['revcache'] = {}
2231 return templater.stringify(templ('graphnode', **props))
2231 return templater.stringify(templ('graphnode', **props))
2232 return formatnode
2232 return formatnode
2233
2233
2234 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2234 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2235 filematcher=None):
2235 filematcher=None):
2236 formatnode = _graphnodeformatter(ui, displayer)
2236 formatnode = _graphnodeformatter(ui, displayer)
2237 state = graphmod.asciistate()
2237 state = graphmod.asciistate()
2238 styles = state['styles']
2238 styles = state['styles']
2239
2239
2240 # only set graph styling if HGPLAIN is not set.
2240 # only set graph styling if HGPLAIN is not set.
2241 if ui.plain('graph'):
2241 if ui.plain('graph'):
2242 # set all edge styles to |, the default pre-3.8 behaviour
2242 # set all edge styles to |, the default pre-3.8 behaviour
2243 styles.update(dict.fromkeys(styles, '|'))
2243 styles.update(dict.fromkeys(styles, '|'))
2244 else:
2244 else:
2245 edgetypes = {
2245 edgetypes = {
2246 'parent': graphmod.PARENT,
2246 'parent': graphmod.PARENT,
2247 'grandparent': graphmod.GRANDPARENT,
2247 'grandparent': graphmod.GRANDPARENT,
2248 'missing': graphmod.MISSINGPARENT
2248 'missing': graphmod.MISSINGPARENT
2249 }
2249 }
2250 for name, key in edgetypes.items():
2250 for name, key in edgetypes.items():
2251 # experimental config: experimental.graphstyle.*
2251 # experimental config: experimental.graphstyle.*
2252 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2252 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2253 styles[key])
2253 styles[key])
2254 if not styles[key]:
2254 if not styles[key]:
2255 styles[key] = None
2255 styles[key] = None
2256
2256
2257 # experimental config: experimental.graphshorten
2257 # experimental config: experimental.graphshorten
2258 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2258 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2259
2259
2260 for rev, type, ctx, parents in dag:
2260 for rev, type, ctx, parents in dag:
2261 char = formatnode(repo, ctx)
2261 char = formatnode(repo, ctx)
2262 copies = None
2262 copies = None
2263 if getrenamed and ctx.rev():
2263 if getrenamed and ctx.rev():
2264 copies = []
2264 copies = []
2265 for fn in ctx.files():
2265 for fn in ctx.files():
2266 rename = getrenamed(fn, ctx.rev())
2266 rename = getrenamed(fn, ctx.rev())
2267 if rename:
2267 if rename:
2268 copies.append((fn, rename[0]))
2268 copies.append((fn, rename[0]))
2269 revmatchfn = None
2269 revmatchfn = None
2270 if filematcher is not None:
2270 if filematcher is not None:
2271 revmatchfn = filematcher(ctx.rev())
2271 revmatchfn = filematcher(ctx.rev())
2272 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2272 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2273 lines = displayer.hunk.pop(rev).split('\n')
2273 lines = displayer.hunk.pop(rev).split('\n')
2274 if not lines[-1]:
2274 if not lines[-1]:
2275 del lines[-1]
2275 del lines[-1]
2276 displayer.flush(ctx)
2276 displayer.flush(ctx)
2277 edges = edgefn(type, char, lines, state, rev, parents)
2277 edges = edgefn(type, char, lines, state, rev, parents)
2278 for type, char, lines, coldata in edges:
2278 for type, char, lines, coldata in edges:
2279 graphmod.ascii(ui, state, type, char, lines, coldata)
2279 graphmod.ascii(ui, state, type, char, lines, coldata)
2280 displayer.close()
2280 displayer.close()
2281
2281
2282 def graphlog(ui, repo, *pats, **opts):
2282 def graphlog(ui, repo, *pats, **opts):
2283 # Parameters are identical to log command ones
2283 # Parameters are identical to log command ones
2284 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2284 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2285 revdag = graphmod.dagwalker(repo, revs)
2285 revdag = graphmod.dagwalker(repo, revs)
2286
2286
2287 getrenamed = None
2287 getrenamed = None
2288 if opts.get('copies'):
2288 if opts.get('copies'):
2289 endrev = None
2289 endrev = None
2290 if opts.get('rev'):
2290 if opts.get('rev'):
2291 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2291 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2292 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2292 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2293 displayer = show_changeset(ui, repo, opts, buffered=True)
2293 displayer = show_changeset(ui, repo, opts, buffered=True)
2294 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2294 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2295 filematcher)
2295 filematcher)
2296
2296
2297 def checkunsupportedgraphflags(pats, opts):
2297 def checkunsupportedgraphflags(pats, opts):
2298 for op in ["newest_first"]:
2298 for op in ["newest_first"]:
2299 if op in opts and opts[op]:
2299 if op in opts and opts[op]:
2300 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2300 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2301 % op.replace("_", "-"))
2301 % op.replace("_", "-"))
2302
2302
2303 def graphrevs(repo, nodes, opts):
2303 def graphrevs(repo, nodes, opts):
2304 limit = loglimit(opts)
2304 limit = loglimit(opts)
2305 nodes.reverse()
2305 nodes.reverse()
2306 if limit is not None:
2306 if limit is not None:
2307 nodes = nodes[:limit]
2307 nodes = nodes[:limit]
2308 return graphmod.nodes(repo, nodes)
2308 return graphmod.nodes(repo, nodes)
2309
2309
2310 def add(ui, repo, match, prefix, explicitonly, **opts):
2310 def add(ui, repo, match, prefix, explicitonly, **opts):
2311 join = lambda f: os.path.join(prefix, f)
2311 join = lambda f: os.path.join(prefix, f)
2312 bad = []
2312 bad = []
2313
2313
2314 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2314 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2315 names = []
2315 names = []
2316 wctx = repo[None]
2316 wctx = repo[None]
2317 cca = None
2317 cca = None
2318 abort, warn = scmutil.checkportabilityalert(ui)
2318 abort, warn = scmutil.checkportabilityalert(ui)
2319 if abort or warn:
2319 if abort or warn:
2320 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2320 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2321
2321
2322 badmatch = matchmod.badmatch(match, badfn)
2322 badmatch = matchmod.badmatch(match, badfn)
2323 dirstate = repo.dirstate
2323 dirstate = repo.dirstate
2324 # We don't want to just call wctx.walk here, since it would return a lot of
2324 # We don't want to just call wctx.walk here, since it would return a lot of
2325 # clean files, which we aren't interested in and takes time.
2325 # clean files, which we aren't interested in and takes time.
2326 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2326 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2327 True, False, full=False)):
2327 True, False, full=False)):
2328 exact = match.exact(f)
2328 exact = match.exact(f)
2329 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2329 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2330 if cca:
2330 if cca:
2331 cca(f)
2331 cca(f)
2332 names.append(f)
2332 names.append(f)
2333 if ui.verbose or not exact:
2333 if ui.verbose or not exact:
2334 ui.status(_('adding %s\n') % match.rel(f))
2334 ui.status(_('adding %s\n') % match.rel(f))
2335
2335
2336 for subpath in sorted(wctx.substate):
2336 for subpath in sorted(wctx.substate):
2337 sub = wctx.sub(subpath)
2337 sub = wctx.sub(subpath)
2338 try:
2338 try:
2339 submatch = matchmod.subdirmatcher(subpath, match)
2339 submatch = matchmod.subdirmatcher(subpath, match)
2340 if opts.get('subrepos'):
2340 if opts.get('subrepos'):
2341 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2341 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2342 else:
2342 else:
2343 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2343 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2344 except error.LookupError:
2344 except error.LookupError:
2345 ui.status(_("skipping missing subrepository: %s\n")
2345 ui.status(_("skipping missing subrepository: %s\n")
2346 % join(subpath))
2346 % join(subpath))
2347
2347
2348 if not opts.get('dry_run'):
2348 if not opts.get('dry_run'):
2349 rejected = wctx.add(names, prefix)
2349 rejected = wctx.add(names, prefix)
2350 bad.extend(f for f in rejected if f in match.files())
2350 bad.extend(f for f in rejected if f in match.files())
2351 return bad
2351 return bad
2352
2352
2353 def forget(ui, repo, match, prefix, explicitonly):
2353 def forget(ui, repo, match, prefix, explicitonly):
2354 join = lambda f: os.path.join(prefix, f)
2354 join = lambda f: os.path.join(prefix, f)
2355 bad = []
2355 bad = []
2356 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2356 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2357 wctx = repo[None]
2357 wctx = repo[None]
2358 forgot = []
2358 forgot = []
2359
2359
2360 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2360 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2361 forget = sorted(s[0] + s[1] + s[3] + s[6])
2361 forget = sorted(s[0] + s[1] + s[3] + s[6])
2362 if explicitonly:
2362 if explicitonly:
2363 forget = [f for f in forget if match.exact(f)]
2363 forget = [f for f in forget if match.exact(f)]
2364
2364
2365 for subpath in sorted(wctx.substate):
2365 for subpath in sorted(wctx.substate):
2366 sub = wctx.sub(subpath)
2366 sub = wctx.sub(subpath)
2367 try:
2367 try:
2368 submatch = matchmod.subdirmatcher(subpath, match)
2368 submatch = matchmod.subdirmatcher(subpath, match)
2369 subbad, subforgot = sub.forget(submatch, prefix)
2369 subbad, subforgot = sub.forget(submatch, prefix)
2370 bad.extend([subpath + '/' + f for f in subbad])
2370 bad.extend([subpath + '/' + f for f in subbad])
2371 forgot.extend([subpath + '/' + f for f in subforgot])
2371 forgot.extend([subpath + '/' + f for f in subforgot])
2372 except error.LookupError:
2372 except error.LookupError:
2373 ui.status(_("skipping missing subrepository: %s\n")
2373 ui.status(_("skipping missing subrepository: %s\n")
2374 % join(subpath))
2374 % join(subpath))
2375
2375
2376 if not explicitonly:
2376 if not explicitonly:
2377 for f in match.files():
2377 for f in match.files():
2378 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2378 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2379 if f not in forgot:
2379 if f not in forgot:
2380 if repo.wvfs.exists(f):
2380 if repo.wvfs.exists(f):
2381 # Don't complain if the exact case match wasn't given.
2381 # Don't complain if the exact case match wasn't given.
2382 # But don't do this until after checking 'forgot', so
2382 # But don't do this until after checking 'forgot', so
2383 # that subrepo files aren't normalized, and this op is
2383 # that subrepo files aren't normalized, and this op is
2384 # purely from data cached by the status walk above.
2384 # purely from data cached by the status walk above.
2385 if repo.dirstate.normalize(f) in repo.dirstate:
2385 if repo.dirstate.normalize(f) in repo.dirstate:
2386 continue
2386 continue
2387 ui.warn(_('not removing %s: '
2387 ui.warn(_('not removing %s: '
2388 'file is already untracked\n')
2388 'file is already untracked\n')
2389 % match.rel(f))
2389 % match.rel(f))
2390 bad.append(f)
2390 bad.append(f)
2391
2391
2392 for f in forget:
2392 for f in forget:
2393 if ui.verbose or not match.exact(f):
2393 if ui.verbose or not match.exact(f):
2394 ui.status(_('removing %s\n') % match.rel(f))
2394 ui.status(_('removing %s\n') % match.rel(f))
2395
2395
2396 rejected = wctx.forget(forget, prefix)
2396 rejected = wctx.forget(forget, prefix)
2397 bad.extend(f for f in rejected if f in match.files())
2397 bad.extend(f for f in rejected if f in match.files())
2398 forgot.extend(f for f in forget if f not in rejected)
2398 forgot.extend(f for f in forget if f not in rejected)
2399 return bad, forgot
2399 return bad, forgot
2400
2400
2401 def files(ui, ctx, m, fm, fmt, subrepos):
2401 def files(ui, ctx, m, fm, fmt, subrepos):
2402 rev = ctx.rev()
2402 rev = ctx.rev()
2403 ret = 1
2403 ret = 1
2404 ds = ctx.repo().dirstate
2404 ds = ctx.repo().dirstate
2405
2405
2406 for f in ctx.matches(m):
2406 for f in ctx.matches(m):
2407 if rev is None and ds[f] == 'r':
2407 if rev is None and ds[f] == 'r':
2408 continue
2408 continue
2409 fm.startitem()
2409 fm.startitem()
2410 if ui.verbose:
2410 if ui.verbose:
2411 fc = ctx[f]
2411 fc = ctx[f]
2412 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2412 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2413 fm.data(abspath=f)
2413 fm.data(abspath=f)
2414 fm.write('path', fmt, m.rel(f))
2414 fm.write('path', fmt, m.rel(f))
2415 ret = 0
2415 ret = 0
2416
2416
2417 for subpath in sorted(ctx.substate):
2417 for subpath in sorted(ctx.substate):
2418 def matchessubrepo(subpath):
2418 if subrepos or m.matchessubrepo(subpath):
2419 return (m.exact(subpath)
2420 or any(f.startswith(subpath + '/') for f in m.files()))
2421
2422 if subrepos or matchessubrepo(subpath):
2423 sub = ctx.sub(subpath)
2419 sub = ctx.sub(subpath)
2424 try:
2420 try:
2425 submatch = matchmod.subdirmatcher(subpath, m)
2421 submatch = matchmod.subdirmatcher(subpath, m)
2426 recurse = m.exact(subpath) or subrepos
2422 recurse = m.exact(subpath) or subrepos
2427 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2423 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2428 ret = 0
2424 ret = 0
2429 except error.LookupError:
2425 except error.LookupError:
2430 ui.status(_("skipping missing subrepository: %s\n")
2426 ui.status(_("skipping missing subrepository: %s\n")
2431 % m.abs(subpath))
2427 % m.abs(subpath))
2432
2428
2433 return ret
2429 return ret
2434
2430
2435 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2431 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2436 join = lambda f: os.path.join(prefix, f)
2432 join = lambda f: os.path.join(prefix, f)
2437 ret = 0
2433 ret = 0
2438 s = repo.status(match=m, clean=True)
2434 s = repo.status(match=m, clean=True)
2439 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2435 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2440
2436
2441 wctx = repo[None]
2437 wctx = repo[None]
2442
2438
2443 if warnings is None:
2439 if warnings is None:
2444 warnings = []
2440 warnings = []
2445 warn = True
2441 warn = True
2446 else:
2442 else:
2447 warn = False
2443 warn = False
2448
2444
2449 subs = sorted(wctx.substate)
2445 subs = sorted(wctx.substate)
2450 total = len(subs)
2446 total = len(subs)
2451 count = 0
2447 count = 0
2452 for subpath in subs:
2448 for subpath in subs:
2453 def matchessubrepo(matcher, subpath):
2454 if matcher.exact(subpath):
2455 return True
2456 for f in matcher.files():
2457 if f.startswith(subpath):
2458 return True
2459 return False
2460
2461 count += 1
2449 count += 1
2462 if subrepos or matchessubrepo(m, subpath):
2450 if subrepos or m.matchessubrepo(subpath):
2463 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2451 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2464
2452
2465 sub = wctx.sub(subpath)
2453 sub = wctx.sub(subpath)
2466 try:
2454 try:
2467 submatch = matchmod.subdirmatcher(subpath, m)
2455 submatch = matchmod.subdirmatcher(subpath, m)
2468 if sub.removefiles(submatch, prefix, after, force, subrepos,
2456 if sub.removefiles(submatch, prefix, after, force, subrepos,
2469 warnings):
2457 warnings):
2470 ret = 1
2458 ret = 1
2471 except error.LookupError:
2459 except error.LookupError:
2472 warnings.append(_("skipping missing subrepository: %s\n")
2460 warnings.append(_("skipping missing subrepository: %s\n")
2473 % join(subpath))
2461 % join(subpath))
2474 ui.progress(_('searching'), None)
2462 ui.progress(_('searching'), None)
2475
2463
2476 # warn about failure to delete explicit files/dirs
2464 # warn about failure to delete explicit files/dirs
2477 deleteddirs = util.dirs(deleted)
2465 deleteddirs = util.dirs(deleted)
2478 files = m.files()
2466 files = m.files()
2479 total = len(files)
2467 total = len(files)
2480 count = 0
2468 count = 0
2481 for f in files:
2469 for f in files:
2482 def insubrepo():
2470 def insubrepo():
2483 for subpath in wctx.substate:
2471 for subpath in wctx.substate:
2484 if f.startswith(subpath + '/'):
2472 if f.startswith(subpath + '/'):
2485 return True
2473 return True
2486 return False
2474 return False
2487
2475
2488 count += 1
2476 count += 1
2489 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2477 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2490 isdir = f in deleteddirs or wctx.hasdir(f)
2478 isdir = f in deleteddirs or wctx.hasdir(f)
2491 if (f in repo.dirstate or isdir or f == '.'
2479 if (f in repo.dirstate or isdir or f == '.'
2492 or insubrepo() or f in subs):
2480 or insubrepo() or f in subs):
2493 continue
2481 continue
2494
2482
2495 if repo.wvfs.exists(f):
2483 if repo.wvfs.exists(f):
2496 if repo.wvfs.isdir(f):
2484 if repo.wvfs.isdir(f):
2497 warnings.append(_('not removing %s: no tracked files\n')
2485 warnings.append(_('not removing %s: no tracked files\n')
2498 % m.rel(f))
2486 % m.rel(f))
2499 else:
2487 else:
2500 warnings.append(_('not removing %s: file is untracked\n')
2488 warnings.append(_('not removing %s: file is untracked\n')
2501 % m.rel(f))
2489 % m.rel(f))
2502 # missing files will generate a warning elsewhere
2490 # missing files will generate a warning elsewhere
2503 ret = 1
2491 ret = 1
2504 ui.progress(_('deleting'), None)
2492 ui.progress(_('deleting'), None)
2505
2493
2506 if force:
2494 if force:
2507 list = modified + deleted + clean + added
2495 list = modified + deleted + clean + added
2508 elif after:
2496 elif after:
2509 list = deleted
2497 list = deleted
2510 remaining = modified + added + clean
2498 remaining = modified + added + clean
2511 total = len(remaining)
2499 total = len(remaining)
2512 count = 0
2500 count = 0
2513 for f in remaining:
2501 for f in remaining:
2514 count += 1
2502 count += 1
2515 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2503 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2516 warnings.append(_('not removing %s: file still exists\n')
2504 warnings.append(_('not removing %s: file still exists\n')
2517 % m.rel(f))
2505 % m.rel(f))
2518 ret = 1
2506 ret = 1
2519 ui.progress(_('skipping'), None)
2507 ui.progress(_('skipping'), None)
2520 else:
2508 else:
2521 list = deleted + clean
2509 list = deleted + clean
2522 total = len(modified) + len(added)
2510 total = len(modified) + len(added)
2523 count = 0
2511 count = 0
2524 for f in modified:
2512 for f in modified:
2525 count += 1
2513 count += 1
2526 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2514 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2527 warnings.append(_('not removing %s: file is modified (use -f'
2515 warnings.append(_('not removing %s: file is modified (use -f'
2528 ' to force removal)\n') % m.rel(f))
2516 ' to force removal)\n') % m.rel(f))
2529 ret = 1
2517 ret = 1
2530 for f in added:
2518 for f in added:
2531 count += 1
2519 count += 1
2532 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2520 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2533 warnings.append(_('not removing %s: file has been marked for add'
2521 warnings.append(_('not removing %s: file has been marked for add'
2534 ' (use forget to undo)\n') % m.rel(f))
2522 ' (use forget to undo)\n') % m.rel(f))
2535 ret = 1
2523 ret = 1
2536 ui.progress(_('skipping'), None)
2524 ui.progress(_('skipping'), None)
2537
2525
2538 list = sorted(list)
2526 list = sorted(list)
2539 total = len(list)
2527 total = len(list)
2540 count = 0
2528 count = 0
2541 for f in list:
2529 for f in list:
2542 count += 1
2530 count += 1
2543 if ui.verbose or not m.exact(f):
2531 if ui.verbose or not m.exact(f):
2544 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2532 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2545 ui.status(_('removing %s\n') % m.rel(f))
2533 ui.status(_('removing %s\n') % m.rel(f))
2546 ui.progress(_('deleting'), None)
2534 ui.progress(_('deleting'), None)
2547
2535
2548 with repo.wlock():
2536 with repo.wlock():
2549 if not after:
2537 if not after:
2550 for f in list:
2538 for f in list:
2551 if f in added:
2539 if f in added:
2552 continue # we never unlink added files on remove
2540 continue # we never unlink added files on remove
2553 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2541 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2554 repo[None].forget(list)
2542 repo[None].forget(list)
2555
2543
2556 if warn:
2544 if warn:
2557 for warning in warnings:
2545 for warning in warnings:
2558 ui.warn(warning)
2546 ui.warn(warning)
2559
2547
2560 return ret
2548 return ret
2561
2549
2562 def cat(ui, repo, ctx, matcher, prefix, **opts):
2550 def cat(ui, repo, ctx, matcher, prefix, **opts):
2563 err = 1
2551 err = 1
2564
2552
2565 def write(path):
2553 def write(path):
2566 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2554 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2567 pathname=os.path.join(prefix, path))
2555 pathname=os.path.join(prefix, path))
2568 data = ctx[path].data()
2556 data = ctx[path].data()
2569 if opts.get('decode'):
2557 if opts.get('decode'):
2570 data = repo.wwritedata(path, data)
2558 data = repo.wwritedata(path, data)
2571 fp.write(data)
2559 fp.write(data)
2572 fp.close()
2560 fp.close()
2573
2561
2574 # Automation often uses hg cat on single files, so special case it
2562 # Automation often uses hg cat on single files, so special case it
2575 # for performance to avoid the cost of parsing the manifest.
2563 # for performance to avoid the cost of parsing the manifest.
2576 if len(matcher.files()) == 1 and not matcher.anypats():
2564 if len(matcher.files()) == 1 and not matcher.anypats():
2577 file = matcher.files()[0]
2565 file = matcher.files()[0]
2578 mf = repo.manifest
2566 mf = repo.manifest
2579 mfnode = ctx.manifestnode()
2567 mfnode = ctx.manifestnode()
2580 if mfnode and mf.find(mfnode, file)[0]:
2568 if mfnode and mf.find(mfnode, file)[0]:
2581 write(file)
2569 write(file)
2582 return 0
2570 return 0
2583
2571
2584 for abs in ctx.walk(matcher):
2572 for abs in ctx.walk(matcher):
2585 write(abs)
2573 write(abs)
2586 err = 0
2574 err = 0
2587
2575
2588 for subpath in sorted(ctx.substate):
2576 for subpath in sorted(ctx.substate):
2589 sub = ctx.sub(subpath)
2577 sub = ctx.sub(subpath)
2590 try:
2578 try:
2591 submatch = matchmod.subdirmatcher(subpath, matcher)
2579 submatch = matchmod.subdirmatcher(subpath, matcher)
2592
2580
2593 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2581 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2594 **opts):
2582 **opts):
2595 err = 0
2583 err = 0
2596 except error.RepoLookupError:
2584 except error.RepoLookupError:
2597 ui.status(_("skipping missing subrepository: %s\n")
2585 ui.status(_("skipping missing subrepository: %s\n")
2598 % os.path.join(prefix, subpath))
2586 % os.path.join(prefix, subpath))
2599
2587
2600 return err
2588 return err
2601
2589
2602 def commit(ui, repo, commitfunc, pats, opts):
2590 def commit(ui, repo, commitfunc, pats, opts):
2603 '''commit the specified files or all outstanding changes'''
2591 '''commit the specified files or all outstanding changes'''
2604 date = opts.get('date')
2592 date = opts.get('date')
2605 if date:
2593 if date:
2606 opts['date'] = util.parsedate(date)
2594 opts['date'] = util.parsedate(date)
2607 message = logmessage(ui, opts)
2595 message = logmessage(ui, opts)
2608 matcher = scmutil.match(repo[None], pats, opts)
2596 matcher = scmutil.match(repo[None], pats, opts)
2609
2597
2610 # extract addremove carefully -- this function can be called from a command
2598 # extract addremove carefully -- this function can be called from a command
2611 # that doesn't support addremove
2599 # that doesn't support addremove
2612 if opts.get('addremove'):
2600 if opts.get('addremove'):
2613 if scmutil.addremove(repo, matcher, "", opts) != 0:
2601 if scmutil.addremove(repo, matcher, "", opts) != 0:
2614 raise error.Abort(
2602 raise error.Abort(
2615 _("failed to mark all new/missing files as added/removed"))
2603 _("failed to mark all new/missing files as added/removed"))
2616
2604
2617 return commitfunc(ui, repo, message, matcher, opts)
2605 return commitfunc(ui, repo, message, matcher, opts)
2618
2606
2619 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2607 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2620 # avoid cycle context -> subrepo -> cmdutil
2608 # avoid cycle context -> subrepo -> cmdutil
2621 from . import context
2609 from . import context
2622
2610
2623 # amend will reuse the existing user if not specified, but the obsolete
2611 # amend will reuse the existing user if not specified, but the obsolete
2624 # marker creation requires that the current user's name is specified.
2612 # marker creation requires that the current user's name is specified.
2625 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2613 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2626 ui.username() # raise exception if username not set
2614 ui.username() # raise exception if username not set
2627
2615
2628 ui.note(_('amending changeset %s\n') % old)
2616 ui.note(_('amending changeset %s\n') % old)
2629 base = old.p1()
2617 base = old.p1()
2630 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2618 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2631
2619
2632 wlock = lock = newid = None
2620 wlock = lock = newid = None
2633 try:
2621 try:
2634 wlock = repo.wlock()
2622 wlock = repo.wlock()
2635 lock = repo.lock()
2623 lock = repo.lock()
2636 with repo.transaction('amend') as tr:
2624 with repo.transaction('amend') as tr:
2637 # See if we got a message from -m or -l, if not, open the editor
2625 # See if we got a message from -m or -l, if not, open the editor
2638 # with the message of the changeset to amend
2626 # with the message of the changeset to amend
2639 message = logmessage(ui, opts)
2627 message = logmessage(ui, opts)
2640 # ensure logfile does not conflict with later enforcement of the
2628 # ensure logfile does not conflict with later enforcement of the
2641 # message. potential logfile content has been processed by
2629 # message. potential logfile content has been processed by
2642 # `logmessage` anyway.
2630 # `logmessage` anyway.
2643 opts.pop('logfile')
2631 opts.pop('logfile')
2644 # First, do a regular commit to record all changes in the working
2632 # First, do a regular commit to record all changes in the working
2645 # directory (if there are any)
2633 # directory (if there are any)
2646 ui.callhooks = False
2634 ui.callhooks = False
2647 activebookmark = repo._bookmarks.active
2635 activebookmark = repo._bookmarks.active
2648 try:
2636 try:
2649 repo._bookmarks.active = None
2637 repo._bookmarks.active = None
2650 opts['message'] = 'temporary amend commit for %s' % old
2638 opts['message'] = 'temporary amend commit for %s' % old
2651 node = commit(ui, repo, commitfunc, pats, opts)
2639 node = commit(ui, repo, commitfunc, pats, opts)
2652 finally:
2640 finally:
2653 repo._bookmarks.active = activebookmark
2641 repo._bookmarks.active = activebookmark
2654 repo._bookmarks.recordchange(tr)
2642 repo._bookmarks.recordchange(tr)
2655 ui.callhooks = True
2643 ui.callhooks = True
2656 ctx = repo[node]
2644 ctx = repo[node]
2657
2645
2658 # Participating changesets:
2646 # Participating changesets:
2659 #
2647 #
2660 # node/ctx o - new (intermediate) commit that contains changes
2648 # node/ctx o - new (intermediate) commit that contains changes
2661 # | from working dir to go into amending commit
2649 # | from working dir to go into amending commit
2662 # | (or a workingctx if there were no changes)
2650 # | (or a workingctx if there were no changes)
2663 # |
2651 # |
2664 # old o - changeset to amend
2652 # old o - changeset to amend
2665 # |
2653 # |
2666 # base o - parent of amending changeset
2654 # base o - parent of amending changeset
2667
2655
2668 # Update extra dict from amended commit (e.g. to preserve graft
2656 # Update extra dict from amended commit (e.g. to preserve graft
2669 # source)
2657 # source)
2670 extra.update(old.extra())
2658 extra.update(old.extra())
2671
2659
2672 # Also update it from the intermediate commit or from the wctx
2660 # Also update it from the intermediate commit or from the wctx
2673 extra.update(ctx.extra())
2661 extra.update(ctx.extra())
2674
2662
2675 if len(old.parents()) > 1:
2663 if len(old.parents()) > 1:
2676 # ctx.files() isn't reliable for merges, so fall back to the
2664 # ctx.files() isn't reliable for merges, so fall back to the
2677 # slower repo.status() method
2665 # slower repo.status() method
2678 files = set([fn for st in repo.status(base, old)[:3]
2666 files = set([fn for st in repo.status(base, old)[:3]
2679 for fn in st])
2667 for fn in st])
2680 else:
2668 else:
2681 files = set(old.files())
2669 files = set(old.files())
2682
2670
2683 # Second, we use either the commit we just did, or if there were no
2671 # Second, we use either the commit we just did, or if there were no
2684 # changes the parent of the working directory as the version of the
2672 # changes the parent of the working directory as the version of the
2685 # files in the final amend commit
2673 # files in the final amend commit
2686 if node:
2674 if node:
2687 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2675 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2688
2676
2689 user = ctx.user()
2677 user = ctx.user()
2690 date = ctx.date()
2678 date = ctx.date()
2691 # Recompute copies (avoid recording a -> b -> a)
2679 # Recompute copies (avoid recording a -> b -> a)
2692 copied = copies.pathcopies(base, ctx)
2680 copied = copies.pathcopies(base, ctx)
2693 if old.p2:
2681 if old.p2:
2694 copied.update(copies.pathcopies(old.p2(), ctx))
2682 copied.update(copies.pathcopies(old.p2(), ctx))
2695
2683
2696 # Prune files which were reverted by the updates: if old
2684 # Prune files which were reverted by the updates: if old
2697 # introduced file X and our intermediate commit, node,
2685 # introduced file X and our intermediate commit, node,
2698 # renamed that file, then those two files are the same and
2686 # renamed that file, then those two files are the same and
2699 # we can discard X from our list of files. Likewise if X
2687 # we can discard X from our list of files. Likewise if X
2700 # was deleted, it's no longer relevant
2688 # was deleted, it's no longer relevant
2701 files.update(ctx.files())
2689 files.update(ctx.files())
2702
2690
2703 def samefile(f):
2691 def samefile(f):
2704 if f in ctx.manifest():
2692 if f in ctx.manifest():
2705 a = ctx.filectx(f)
2693 a = ctx.filectx(f)
2706 if f in base.manifest():
2694 if f in base.manifest():
2707 b = base.filectx(f)
2695 b = base.filectx(f)
2708 return (not a.cmp(b)
2696 return (not a.cmp(b)
2709 and a.flags() == b.flags())
2697 and a.flags() == b.flags())
2710 else:
2698 else:
2711 return False
2699 return False
2712 else:
2700 else:
2713 return f not in base.manifest()
2701 return f not in base.manifest()
2714 files = [f for f in files if not samefile(f)]
2702 files = [f for f in files if not samefile(f)]
2715
2703
2716 def filectxfn(repo, ctx_, path):
2704 def filectxfn(repo, ctx_, path):
2717 try:
2705 try:
2718 fctx = ctx[path]
2706 fctx = ctx[path]
2719 flags = fctx.flags()
2707 flags = fctx.flags()
2720 mctx = context.memfilectx(repo,
2708 mctx = context.memfilectx(repo,
2721 fctx.path(), fctx.data(),
2709 fctx.path(), fctx.data(),
2722 islink='l' in flags,
2710 islink='l' in flags,
2723 isexec='x' in flags,
2711 isexec='x' in flags,
2724 copied=copied.get(path))
2712 copied=copied.get(path))
2725 return mctx
2713 return mctx
2726 except KeyError:
2714 except KeyError:
2727 return None
2715 return None
2728 else:
2716 else:
2729 ui.note(_('copying changeset %s to %s\n') % (old, base))
2717 ui.note(_('copying changeset %s to %s\n') % (old, base))
2730
2718
2731 # Use version of files as in the old cset
2719 # Use version of files as in the old cset
2732 def filectxfn(repo, ctx_, path):
2720 def filectxfn(repo, ctx_, path):
2733 try:
2721 try:
2734 return old.filectx(path)
2722 return old.filectx(path)
2735 except KeyError:
2723 except KeyError:
2736 return None
2724 return None
2737
2725
2738 user = opts.get('user') or old.user()
2726 user = opts.get('user') or old.user()
2739 date = opts.get('date') or old.date()
2727 date = opts.get('date') or old.date()
2740 editform = mergeeditform(old, 'commit.amend')
2728 editform = mergeeditform(old, 'commit.amend')
2741 editor = getcommiteditor(editform=editform, **opts)
2729 editor = getcommiteditor(editform=editform, **opts)
2742 if not message:
2730 if not message:
2743 editor = getcommiteditor(edit=True, editform=editform)
2731 editor = getcommiteditor(edit=True, editform=editform)
2744 message = old.description()
2732 message = old.description()
2745
2733
2746 pureextra = extra.copy()
2734 pureextra = extra.copy()
2747 extra['amend_source'] = old.hex()
2735 extra['amend_source'] = old.hex()
2748
2736
2749 new = context.memctx(repo,
2737 new = context.memctx(repo,
2750 parents=[base.node(), old.p2().node()],
2738 parents=[base.node(), old.p2().node()],
2751 text=message,
2739 text=message,
2752 files=files,
2740 files=files,
2753 filectxfn=filectxfn,
2741 filectxfn=filectxfn,
2754 user=user,
2742 user=user,
2755 date=date,
2743 date=date,
2756 extra=extra,
2744 extra=extra,
2757 editor=editor)
2745 editor=editor)
2758
2746
2759 newdesc = changelog.stripdesc(new.description())
2747 newdesc = changelog.stripdesc(new.description())
2760 if ((not node)
2748 if ((not node)
2761 and newdesc == old.description()
2749 and newdesc == old.description()
2762 and user == old.user()
2750 and user == old.user()
2763 and date == old.date()
2751 and date == old.date()
2764 and pureextra == old.extra()):
2752 and pureextra == old.extra()):
2765 # nothing changed. continuing here would create a new node
2753 # nothing changed. continuing here would create a new node
2766 # anyway because of the amend_source noise.
2754 # anyway because of the amend_source noise.
2767 #
2755 #
2768 # This not what we expect from amend.
2756 # This not what we expect from amend.
2769 return old.node()
2757 return old.node()
2770
2758
2771 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2759 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2772 try:
2760 try:
2773 if opts.get('secret'):
2761 if opts.get('secret'):
2774 commitphase = 'secret'
2762 commitphase = 'secret'
2775 else:
2763 else:
2776 commitphase = old.phase()
2764 commitphase = old.phase()
2777 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2765 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2778 newid = repo.commitctx(new)
2766 newid = repo.commitctx(new)
2779 finally:
2767 finally:
2780 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2768 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2781 if newid != old.node():
2769 if newid != old.node():
2782 # Reroute the working copy parent to the new changeset
2770 # Reroute the working copy parent to the new changeset
2783 repo.setparents(newid, nullid)
2771 repo.setparents(newid, nullid)
2784
2772
2785 # Move bookmarks from old parent to amend commit
2773 # Move bookmarks from old parent to amend commit
2786 bms = repo.nodebookmarks(old.node())
2774 bms = repo.nodebookmarks(old.node())
2787 if bms:
2775 if bms:
2788 marks = repo._bookmarks
2776 marks = repo._bookmarks
2789 for bm in bms:
2777 for bm in bms:
2790 ui.debug('moving bookmarks %r from %s to %s\n' %
2778 ui.debug('moving bookmarks %r from %s to %s\n' %
2791 (marks, old.hex(), hex(newid)))
2779 (marks, old.hex(), hex(newid)))
2792 marks[bm] = newid
2780 marks[bm] = newid
2793 marks.recordchange(tr)
2781 marks.recordchange(tr)
2794 #commit the whole amend process
2782 #commit the whole amend process
2795 if createmarkers:
2783 if createmarkers:
2796 # mark the new changeset as successor of the rewritten one
2784 # mark the new changeset as successor of the rewritten one
2797 new = repo[newid]
2785 new = repo[newid]
2798 obs = [(old, (new,))]
2786 obs = [(old, (new,))]
2799 if node:
2787 if node:
2800 obs.append((ctx, ()))
2788 obs.append((ctx, ()))
2801
2789
2802 obsolete.createmarkers(repo, obs)
2790 obsolete.createmarkers(repo, obs)
2803 if not createmarkers and newid != old.node():
2791 if not createmarkers and newid != old.node():
2804 # Strip the intermediate commit (if there was one) and the amended
2792 # Strip the intermediate commit (if there was one) and the amended
2805 # commit
2793 # commit
2806 if node:
2794 if node:
2807 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2795 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2808 ui.note(_('stripping amended changeset %s\n') % old)
2796 ui.note(_('stripping amended changeset %s\n') % old)
2809 repair.strip(ui, repo, old.node(), topic='amend-backup')
2797 repair.strip(ui, repo, old.node(), topic='amend-backup')
2810 finally:
2798 finally:
2811 lockmod.release(lock, wlock)
2799 lockmod.release(lock, wlock)
2812 return newid
2800 return newid
2813
2801
2814 def commiteditor(repo, ctx, subs, editform=''):
2802 def commiteditor(repo, ctx, subs, editform=''):
2815 if ctx.description():
2803 if ctx.description():
2816 return ctx.description()
2804 return ctx.description()
2817 return commitforceeditor(repo, ctx, subs, editform=editform,
2805 return commitforceeditor(repo, ctx, subs, editform=editform,
2818 unchangedmessagedetection=True)
2806 unchangedmessagedetection=True)
2819
2807
2820 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2808 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2821 editform='', unchangedmessagedetection=False):
2809 editform='', unchangedmessagedetection=False):
2822 if not extramsg:
2810 if not extramsg:
2823 extramsg = _("Leave message empty to abort commit.")
2811 extramsg = _("Leave message empty to abort commit.")
2824
2812
2825 forms = [e for e in editform.split('.') if e]
2813 forms = [e for e in editform.split('.') if e]
2826 forms.insert(0, 'changeset')
2814 forms.insert(0, 'changeset')
2827 templatetext = None
2815 templatetext = None
2828 while forms:
2816 while forms:
2829 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2817 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2830 if tmpl:
2818 if tmpl:
2831 templatetext = committext = buildcommittemplate(
2819 templatetext = committext = buildcommittemplate(
2832 repo, ctx, subs, extramsg, tmpl)
2820 repo, ctx, subs, extramsg, tmpl)
2833 break
2821 break
2834 forms.pop()
2822 forms.pop()
2835 else:
2823 else:
2836 committext = buildcommittext(repo, ctx, subs, extramsg)
2824 committext = buildcommittext(repo, ctx, subs, extramsg)
2837
2825
2838 # run editor in the repository root
2826 # run editor in the repository root
2839 olddir = os.getcwd()
2827 olddir = os.getcwd()
2840 os.chdir(repo.root)
2828 os.chdir(repo.root)
2841
2829
2842 # make in-memory changes visible to external process
2830 # make in-memory changes visible to external process
2843 tr = repo.currenttransaction()
2831 tr = repo.currenttransaction()
2844 repo.dirstate.write(tr)
2832 repo.dirstate.write(tr)
2845 pending = tr and tr.writepending() and repo.root
2833 pending = tr and tr.writepending() and repo.root
2846
2834
2847 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2835 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2848 editform=editform, pending=pending)
2836 editform=editform, pending=pending)
2849 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2837 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2850 os.chdir(olddir)
2838 os.chdir(olddir)
2851
2839
2852 if finishdesc:
2840 if finishdesc:
2853 text = finishdesc(text)
2841 text = finishdesc(text)
2854 if not text.strip():
2842 if not text.strip():
2855 raise error.Abort(_("empty commit message"))
2843 raise error.Abort(_("empty commit message"))
2856 if unchangedmessagedetection and editortext == templatetext:
2844 if unchangedmessagedetection and editortext == templatetext:
2857 raise error.Abort(_("commit message unchanged"))
2845 raise error.Abort(_("commit message unchanged"))
2858
2846
2859 return text
2847 return text
2860
2848
2861 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2849 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2862 ui = repo.ui
2850 ui = repo.ui
2863 tmpl, mapfile = gettemplate(ui, tmpl, None)
2851 tmpl, mapfile = gettemplate(ui, tmpl, None)
2864
2852
2865 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2853 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2866
2854
2867 for k, v in repo.ui.configitems('committemplate'):
2855 for k, v in repo.ui.configitems('committemplate'):
2868 if k != 'changeset':
2856 if k != 'changeset':
2869 t.t.cache[k] = v
2857 t.t.cache[k] = v
2870
2858
2871 if not extramsg:
2859 if not extramsg:
2872 extramsg = '' # ensure that extramsg is string
2860 extramsg = '' # ensure that extramsg is string
2873
2861
2874 ui.pushbuffer()
2862 ui.pushbuffer()
2875 t.show(ctx, extramsg=extramsg)
2863 t.show(ctx, extramsg=extramsg)
2876 return ui.popbuffer()
2864 return ui.popbuffer()
2877
2865
2878 def hgprefix(msg):
2866 def hgprefix(msg):
2879 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2867 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2880
2868
2881 def buildcommittext(repo, ctx, subs, extramsg):
2869 def buildcommittext(repo, ctx, subs, extramsg):
2882 edittext = []
2870 edittext = []
2883 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2871 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2884 if ctx.description():
2872 if ctx.description():
2885 edittext.append(ctx.description())
2873 edittext.append(ctx.description())
2886 edittext.append("")
2874 edittext.append("")
2887 edittext.append("") # Empty line between message and comments.
2875 edittext.append("") # Empty line between message and comments.
2888 edittext.append(hgprefix(_("Enter commit message."
2876 edittext.append(hgprefix(_("Enter commit message."
2889 " Lines beginning with 'HG:' are removed.")))
2877 " Lines beginning with 'HG:' are removed.")))
2890 edittext.append(hgprefix(extramsg))
2878 edittext.append(hgprefix(extramsg))
2891 edittext.append("HG: --")
2879 edittext.append("HG: --")
2892 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2880 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2893 if ctx.p2():
2881 if ctx.p2():
2894 edittext.append(hgprefix(_("branch merge")))
2882 edittext.append(hgprefix(_("branch merge")))
2895 if ctx.branch():
2883 if ctx.branch():
2896 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2884 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2897 if bookmarks.isactivewdirparent(repo):
2885 if bookmarks.isactivewdirparent(repo):
2898 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2886 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2899 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2887 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2900 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2888 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2901 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2889 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2902 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2890 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2903 if not added and not modified and not removed:
2891 if not added and not modified and not removed:
2904 edittext.append(hgprefix(_("no files changed")))
2892 edittext.append(hgprefix(_("no files changed")))
2905 edittext.append("")
2893 edittext.append("")
2906
2894
2907 return "\n".join(edittext)
2895 return "\n".join(edittext)
2908
2896
2909 def commitstatus(repo, node, branch, bheads=None, opts=None):
2897 def commitstatus(repo, node, branch, bheads=None, opts=None):
2910 if opts is None:
2898 if opts is None:
2911 opts = {}
2899 opts = {}
2912 ctx = repo[node]
2900 ctx = repo[node]
2913 parents = ctx.parents()
2901 parents = ctx.parents()
2914
2902
2915 if (not opts.get('amend') and bheads and node not in bheads and not
2903 if (not opts.get('amend') and bheads and node not in bheads and not
2916 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2904 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2917 repo.ui.status(_('created new head\n'))
2905 repo.ui.status(_('created new head\n'))
2918 # The message is not printed for initial roots. For the other
2906 # The message is not printed for initial roots. For the other
2919 # changesets, it is printed in the following situations:
2907 # changesets, it is printed in the following situations:
2920 #
2908 #
2921 # Par column: for the 2 parents with ...
2909 # Par column: for the 2 parents with ...
2922 # N: null or no parent
2910 # N: null or no parent
2923 # B: parent is on another named branch
2911 # B: parent is on another named branch
2924 # C: parent is a regular non head changeset
2912 # C: parent is a regular non head changeset
2925 # H: parent was a branch head of the current branch
2913 # H: parent was a branch head of the current branch
2926 # Msg column: whether we print "created new head" message
2914 # Msg column: whether we print "created new head" message
2927 # In the following, it is assumed that there already exists some
2915 # In the following, it is assumed that there already exists some
2928 # initial branch heads of the current branch, otherwise nothing is
2916 # initial branch heads of the current branch, otherwise nothing is
2929 # printed anyway.
2917 # printed anyway.
2930 #
2918 #
2931 # Par Msg Comment
2919 # Par Msg Comment
2932 # N N y additional topo root
2920 # N N y additional topo root
2933 #
2921 #
2934 # B N y additional branch root
2922 # B N y additional branch root
2935 # C N y additional topo head
2923 # C N y additional topo head
2936 # H N n usual case
2924 # H N n usual case
2937 #
2925 #
2938 # B B y weird additional branch root
2926 # B B y weird additional branch root
2939 # C B y branch merge
2927 # C B y branch merge
2940 # H B n merge with named branch
2928 # H B n merge with named branch
2941 #
2929 #
2942 # C C y additional head from merge
2930 # C C y additional head from merge
2943 # C H n merge with a head
2931 # C H n merge with a head
2944 #
2932 #
2945 # H H n head merge: head count decreases
2933 # H H n head merge: head count decreases
2946
2934
2947 if not opts.get('close_branch'):
2935 if not opts.get('close_branch'):
2948 for r in parents:
2936 for r in parents:
2949 if r.closesbranch() and r.branch() == branch:
2937 if r.closesbranch() and r.branch() == branch:
2950 repo.ui.status(_('reopening closed branch head %d\n') % r)
2938 repo.ui.status(_('reopening closed branch head %d\n') % r)
2951
2939
2952 if repo.ui.debugflag:
2940 if repo.ui.debugflag:
2953 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2941 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2954 elif repo.ui.verbose:
2942 elif repo.ui.verbose:
2955 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2943 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2956
2944
2957 def postcommitstatus(repo, pats, opts):
2945 def postcommitstatus(repo, pats, opts):
2958 return repo.status(match=scmutil.match(repo[None], pats, opts))
2946 return repo.status(match=scmutil.match(repo[None], pats, opts))
2959
2947
2960 def revert(ui, repo, ctx, parents, *pats, **opts):
2948 def revert(ui, repo, ctx, parents, *pats, **opts):
2961 parent, p2 = parents
2949 parent, p2 = parents
2962 node = ctx.node()
2950 node = ctx.node()
2963
2951
2964 mf = ctx.manifest()
2952 mf = ctx.manifest()
2965 if node == p2:
2953 if node == p2:
2966 parent = p2
2954 parent = p2
2967
2955
2968 # need all matching names in dirstate and manifest of target rev,
2956 # need all matching names in dirstate and manifest of target rev,
2969 # so have to walk both. do not print errors if files exist in one
2957 # so have to walk both. do not print errors if files exist in one
2970 # but not other. in both cases, filesets should be evaluated against
2958 # but not other. in both cases, filesets should be evaluated against
2971 # workingctx to get consistent result (issue4497). this means 'set:**'
2959 # workingctx to get consistent result (issue4497). this means 'set:**'
2972 # cannot be used to select missing files from target rev.
2960 # cannot be used to select missing files from target rev.
2973
2961
2974 # `names` is a mapping for all elements in working copy and target revision
2962 # `names` is a mapping for all elements in working copy and target revision
2975 # The mapping is in the form:
2963 # The mapping is in the form:
2976 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2964 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2977 names = {}
2965 names = {}
2978
2966
2979 with repo.wlock():
2967 with repo.wlock():
2980 ## filling of the `names` mapping
2968 ## filling of the `names` mapping
2981 # walk dirstate to fill `names`
2969 # walk dirstate to fill `names`
2982
2970
2983 interactive = opts.get('interactive', False)
2971 interactive = opts.get('interactive', False)
2984 wctx = repo[None]
2972 wctx = repo[None]
2985 m = scmutil.match(wctx, pats, opts)
2973 m = scmutil.match(wctx, pats, opts)
2986
2974
2987 # we'll need this later
2975 # we'll need this later
2988 targetsubs = sorted(s for s in wctx.substate if m(s))
2976 targetsubs = sorted(s for s in wctx.substate if m(s))
2989
2977
2990 if not m.always():
2978 if not m.always():
2991 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2979 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2992 names[abs] = m.rel(abs), m.exact(abs)
2980 names[abs] = m.rel(abs), m.exact(abs)
2993
2981
2994 # walk target manifest to fill `names`
2982 # walk target manifest to fill `names`
2995
2983
2996 def badfn(path, msg):
2984 def badfn(path, msg):
2997 if path in names:
2985 if path in names:
2998 return
2986 return
2999 if path in ctx.substate:
2987 if path in ctx.substate:
3000 return
2988 return
3001 path_ = path + '/'
2989 path_ = path + '/'
3002 for f in names:
2990 for f in names:
3003 if f.startswith(path_):
2991 if f.startswith(path_):
3004 return
2992 return
3005 ui.warn("%s: %s\n" % (m.rel(path), msg))
2993 ui.warn("%s: %s\n" % (m.rel(path), msg))
3006
2994
3007 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2995 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
3008 if abs not in names:
2996 if abs not in names:
3009 names[abs] = m.rel(abs), m.exact(abs)
2997 names[abs] = m.rel(abs), m.exact(abs)
3010
2998
3011 # Find status of all file in `names`.
2999 # Find status of all file in `names`.
3012 m = scmutil.matchfiles(repo, names)
3000 m = scmutil.matchfiles(repo, names)
3013
3001
3014 changes = repo.status(node1=node, match=m,
3002 changes = repo.status(node1=node, match=m,
3015 unknown=True, ignored=True, clean=True)
3003 unknown=True, ignored=True, clean=True)
3016 else:
3004 else:
3017 changes = repo.status(node1=node, match=m)
3005 changes = repo.status(node1=node, match=m)
3018 for kind in changes:
3006 for kind in changes:
3019 for abs in kind:
3007 for abs in kind:
3020 names[abs] = m.rel(abs), m.exact(abs)
3008 names[abs] = m.rel(abs), m.exact(abs)
3021
3009
3022 m = scmutil.matchfiles(repo, names)
3010 m = scmutil.matchfiles(repo, names)
3023
3011
3024 modified = set(changes.modified)
3012 modified = set(changes.modified)
3025 added = set(changes.added)
3013 added = set(changes.added)
3026 removed = set(changes.removed)
3014 removed = set(changes.removed)
3027 _deleted = set(changes.deleted)
3015 _deleted = set(changes.deleted)
3028 unknown = set(changes.unknown)
3016 unknown = set(changes.unknown)
3029 unknown.update(changes.ignored)
3017 unknown.update(changes.ignored)
3030 clean = set(changes.clean)
3018 clean = set(changes.clean)
3031 modadded = set()
3019 modadded = set()
3032
3020
3033 # split between files known in target manifest and the others
3021 # split between files known in target manifest and the others
3034 smf = set(mf)
3022 smf = set(mf)
3035
3023
3036 # determine the exact nature of the deleted changesets
3024 # determine the exact nature of the deleted changesets
3037 deladded = _deleted - smf
3025 deladded = _deleted - smf
3038 deleted = _deleted - deladded
3026 deleted = _deleted - deladded
3039
3027
3040 # We need to account for the state of the file in the dirstate,
3028 # We need to account for the state of the file in the dirstate,
3041 # even when we revert against something else than parent. This will
3029 # even when we revert against something else than parent. This will
3042 # slightly alter the behavior of revert (doing back up or not, delete
3030 # slightly alter the behavior of revert (doing back up or not, delete
3043 # or just forget etc).
3031 # or just forget etc).
3044 if parent == node:
3032 if parent == node:
3045 dsmodified = modified
3033 dsmodified = modified
3046 dsadded = added
3034 dsadded = added
3047 dsremoved = removed
3035 dsremoved = removed
3048 # store all local modifications, useful later for rename detection
3036 # store all local modifications, useful later for rename detection
3049 localchanges = dsmodified | dsadded
3037 localchanges = dsmodified | dsadded
3050 modified, added, removed = set(), set(), set()
3038 modified, added, removed = set(), set(), set()
3051 else:
3039 else:
3052 changes = repo.status(node1=parent, match=m)
3040 changes = repo.status(node1=parent, match=m)
3053 dsmodified = set(changes.modified)
3041 dsmodified = set(changes.modified)
3054 dsadded = set(changes.added)
3042 dsadded = set(changes.added)
3055 dsremoved = set(changes.removed)
3043 dsremoved = set(changes.removed)
3056 # store all local modifications, useful later for rename detection
3044 # store all local modifications, useful later for rename detection
3057 localchanges = dsmodified | dsadded
3045 localchanges = dsmodified | dsadded
3058
3046
3059 # only take into account for removes between wc and target
3047 # only take into account for removes between wc and target
3060 clean |= dsremoved - removed
3048 clean |= dsremoved - removed
3061 dsremoved &= removed
3049 dsremoved &= removed
3062 # distinct between dirstate remove and other
3050 # distinct between dirstate remove and other
3063 removed -= dsremoved
3051 removed -= dsremoved
3064
3052
3065 modadded = added & dsmodified
3053 modadded = added & dsmodified
3066 added -= modadded
3054 added -= modadded
3067
3055
3068 # tell newly modified apart.
3056 # tell newly modified apart.
3069 dsmodified &= modified
3057 dsmodified &= modified
3070 dsmodified |= modified & dsadded # dirstate added may need backup
3058 dsmodified |= modified & dsadded # dirstate added may need backup
3071 modified -= dsmodified
3059 modified -= dsmodified
3072
3060
3073 # We need to wait for some post-processing to update this set
3061 # We need to wait for some post-processing to update this set
3074 # before making the distinction. The dirstate will be used for
3062 # before making the distinction. The dirstate will be used for
3075 # that purpose.
3063 # that purpose.
3076 dsadded = added
3064 dsadded = added
3077
3065
3078 # in case of merge, files that are actually added can be reported as
3066 # in case of merge, files that are actually added can be reported as
3079 # modified, we need to post process the result
3067 # modified, we need to post process the result
3080 if p2 != nullid:
3068 if p2 != nullid:
3081 mergeadd = dsmodified - smf
3069 mergeadd = dsmodified - smf
3082 dsadded |= mergeadd
3070 dsadded |= mergeadd
3083 dsmodified -= mergeadd
3071 dsmodified -= mergeadd
3084
3072
3085 # if f is a rename, update `names` to also revert the source
3073 # if f is a rename, update `names` to also revert the source
3086 cwd = repo.getcwd()
3074 cwd = repo.getcwd()
3087 for f in localchanges:
3075 for f in localchanges:
3088 src = repo.dirstate.copied(f)
3076 src = repo.dirstate.copied(f)
3089 # XXX should we check for rename down to target node?
3077 # XXX should we check for rename down to target node?
3090 if src and src not in names and repo.dirstate[src] == 'r':
3078 if src and src not in names and repo.dirstate[src] == 'r':
3091 dsremoved.add(src)
3079 dsremoved.add(src)
3092 names[src] = (repo.pathto(src, cwd), True)
3080 names[src] = (repo.pathto(src, cwd), True)
3093
3081
3094 # distinguish between file to forget and the other
3082 # distinguish between file to forget and the other
3095 added = set()
3083 added = set()
3096 for abs in dsadded:
3084 for abs in dsadded:
3097 if repo.dirstate[abs] != 'a':
3085 if repo.dirstate[abs] != 'a':
3098 added.add(abs)
3086 added.add(abs)
3099 dsadded -= added
3087 dsadded -= added
3100
3088
3101 for abs in deladded:
3089 for abs in deladded:
3102 if repo.dirstate[abs] == 'a':
3090 if repo.dirstate[abs] == 'a':
3103 dsadded.add(abs)
3091 dsadded.add(abs)
3104 deladded -= dsadded
3092 deladded -= dsadded
3105
3093
3106 # For files marked as removed, we check if an unknown file is present at
3094 # For files marked as removed, we check if an unknown file is present at
3107 # the same path. If a such file exists it may need to be backed up.
3095 # the same path. If a such file exists it may need to be backed up.
3108 # Making the distinction at this stage helps have simpler backup
3096 # Making the distinction at this stage helps have simpler backup
3109 # logic.
3097 # logic.
3110 removunk = set()
3098 removunk = set()
3111 for abs in removed:
3099 for abs in removed:
3112 target = repo.wjoin(abs)
3100 target = repo.wjoin(abs)
3113 if os.path.lexists(target):
3101 if os.path.lexists(target):
3114 removunk.add(abs)
3102 removunk.add(abs)
3115 removed -= removunk
3103 removed -= removunk
3116
3104
3117 dsremovunk = set()
3105 dsremovunk = set()
3118 for abs in dsremoved:
3106 for abs in dsremoved:
3119 target = repo.wjoin(abs)
3107 target = repo.wjoin(abs)
3120 if os.path.lexists(target):
3108 if os.path.lexists(target):
3121 dsremovunk.add(abs)
3109 dsremovunk.add(abs)
3122 dsremoved -= dsremovunk
3110 dsremoved -= dsremovunk
3123
3111
3124 # action to be actually performed by revert
3112 # action to be actually performed by revert
3125 # (<list of file>, message>) tuple
3113 # (<list of file>, message>) tuple
3126 actions = {'revert': ([], _('reverting %s\n')),
3114 actions = {'revert': ([], _('reverting %s\n')),
3127 'add': ([], _('adding %s\n')),
3115 'add': ([], _('adding %s\n')),
3128 'remove': ([], _('removing %s\n')),
3116 'remove': ([], _('removing %s\n')),
3129 'drop': ([], _('removing %s\n')),
3117 'drop': ([], _('removing %s\n')),
3130 'forget': ([], _('forgetting %s\n')),
3118 'forget': ([], _('forgetting %s\n')),
3131 'undelete': ([], _('undeleting %s\n')),
3119 'undelete': ([], _('undeleting %s\n')),
3132 'noop': (None, _('no changes needed to %s\n')),
3120 'noop': (None, _('no changes needed to %s\n')),
3133 'unknown': (None, _('file not managed: %s\n')),
3121 'unknown': (None, _('file not managed: %s\n')),
3134 }
3122 }
3135
3123
3136 # "constant" that convey the backup strategy.
3124 # "constant" that convey the backup strategy.
3137 # All set to `discard` if `no-backup` is set do avoid checking
3125 # All set to `discard` if `no-backup` is set do avoid checking
3138 # no_backup lower in the code.
3126 # no_backup lower in the code.
3139 # These values are ordered for comparison purposes
3127 # These values are ordered for comparison purposes
3140 backupinteractive = 3 # do backup if interactively modified
3128 backupinteractive = 3 # do backup if interactively modified
3141 backup = 2 # unconditionally do backup
3129 backup = 2 # unconditionally do backup
3142 check = 1 # check if the existing file differs from target
3130 check = 1 # check if the existing file differs from target
3143 discard = 0 # never do backup
3131 discard = 0 # never do backup
3144 if opts.get('no_backup'):
3132 if opts.get('no_backup'):
3145 backupinteractive = backup = check = discard
3133 backupinteractive = backup = check = discard
3146 if interactive:
3134 if interactive:
3147 dsmodifiedbackup = backupinteractive
3135 dsmodifiedbackup = backupinteractive
3148 else:
3136 else:
3149 dsmodifiedbackup = backup
3137 dsmodifiedbackup = backup
3150 tobackup = set()
3138 tobackup = set()
3151
3139
3152 backupanddel = actions['remove']
3140 backupanddel = actions['remove']
3153 if not opts.get('no_backup'):
3141 if not opts.get('no_backup'):
3154 backupanddel = actions['drop']
3142 backupanddel = actions['drop']
3155
3143
3156 disptable = (
3144 disptable = (
3157 # dispatch table:
3145 # dispatch table:
3158 # file state
3146 # file state
3159 # action
3147 # action
3160 # make backup
3148 # make backup
3161
3149
3162 ## Sets that results that will change file on disk
3150 ## Sets that results that will change file on disk
3163 # Modified compared to target, no local change
3151 # Modified compared to target, no local change
3164 (modified, actions['revert'], discard),
3152 (modified, actions['revert'], discard),
3165 # Modified compared to target, but local file is deleted
3153 # Modified compared to target, but local file is deleted
3166 (deleted, actions['revert'], discard),
3154 (deleted, actions['revert'], discard),
3167 # Modified compared to target, local change
3155 # Modified compared to target, local change
3168 (dsmodified, actions['revert'], dsmodifiedbackup),
3156 (dsmodified, actions['revert'], dsmodifiedbackup),
3169 # Added since target
3157 # Added since target
3170 (added, actions['remove'], discard),
3158 (added, actions['remove'], discard),
3171 # Added in working directory
3159 # Added in working directory
3172 (dsadded, actions['forget'], discard),
3160 (dsadded, actions['forget'], discard),
3173 # Added since target, have local modification
3161 # Added since target, have local modification
3174 (modadded, backupanddel, backup),
3162 (modadded, backupanddel, backup),
3175 # Added since target but file is missing in working directory
3163 # Added since target but file is missing in working directory
3176 (deladded, actions['drop'], discard),
3164 (deladded, actions['drop'], discard),
3177 # Removed since target, before working copy parent
3165 # Removed since target, before working copy parent
3178 (removed, actions['add'], discard),
3166 (removed, actions['add'], discard),
3179 # Same as `removed` but an unknown file exists at the same path
3167 # Same as `removed` but an unknown file exists at the same path
3180 (removunk, actions['add'], check),
3168 (removunk, actions['add'], check),
3181 # Removed since targe, marked as such in working copy parent
3169 # Removed since targe, marked as such in working copy parent
3182 (dsremoved, actions['undelete'], discard),
3170 (dsremoved, actions['undelete'], discard),
3183 # Same as `dsremoved` but an unknown file exists at the same path
3171 # Same as `dsremoved` but an unknown file exists at the same path
3184 (dsremovunk, actions['undelete'], check),
3172 (dsremovunk, actions['undelete'], check),
3185 ## the following sets does not result in any file changes
3173 ## the following sets does not result in any file changes
3186 # File with no modification
3174 # File with no modification
3187 (clean, actions['noop'], discard),
3175 (clean, actions['noop'], discard),
3188 # Existing file, not tracked anywhere
3176 # Existing file, not tracked anywhere
3189 (unknown, actions['unknown'], discard),
3177 (unknown, actions['unknown'], discard),
3190 )
3178 )
3191
3179
3192 for abs, (rel, exact) in sorted(names.items()):
3180 for abs, (rel, exact) in sorted(names.items()):
3193 # target file to be touch on disk (relative to cwd)
3181 # target file to be touch on disk (relative to cwd)
3194 target = repo.wjoin(abs)
3182 target = repo.wjoin(abs)
3195 # search the entry in the dispatch table.
3183 # search the entry in the dispatch table.
3196 # if the file is in any of these sets, it was touched in the working
3184 # if the file is in any of these sets, it was touched in the working
3197 # directory parent and we are sure it needs to be reverted.
3185 # directory parent and we are sure it needs to be reverted.
3198 for table, (xlist, msg), dobackup in disptable:
3186 for table, (xlist, msg), dobackup in disptable:
3199 if abs not in table:
3187 if abs not in table:
3200 continue
3188 continue
3201 if xlist is not None:
3189 if xlist is not None:
3202 xlist.append(abs)
3190 xlist.append(abs)
3203 if dobackup:
3191 if dobackup:
3204 # If in interactive mode, don't automatically create
3192 # If in interactive mode, don't automatically create
3205 # .orig files (issue4793)
3193 # .orig files (issue4793)
3206 if dobackup == backupinteractive:
3194 if dobackup == backupinteractive:
3207 tobackup.add(abs)
3195 tobackup.add(abs)
3208 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3196 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3209 bakname = scmutil.origpath(ui, repo, rel)
3197 bakname = scmutil.origpath(ui, repo, rel)
3210 ui.note(_('saving current version of %s as %s\n') %
3198 ui.note(_('saving current version of %s as %s\n') %
3211 (rel, bakname))
3199 (rel, bakname))
3212 if not opts.get('dry_run'):
3200 if not opts.get('dry_run'):
3213 if interactive:
3201 if interactive:
3214 util.copyfile(target, bakname)
3202 util.copyfile(target, bakname)
3215 else:
3203 else:
3216 util.rename(target, bakname)
3204 util.rename(target, bakname)
3217 if ui.verbose or not exact:
3205 if ui.verbose or not exact:
3218 if not isinstance(msg, basestring):
3206 if not isinstance(msg, basestring):
3219 msg = msg(abs)
3207 msg = msg(abs)
3220 ui.status(msg % rel)
3208 ui.status(msg % rel)
3221 elif exact:
3209 elif exact:
3222 ui.warn(msg % rel)
3210 ui.warn(msg % rel)
3223 break
3211 break
3224
3212
3225 if not opts.get('dry_run'):
3213 if not opts.get('dry_run'):
3226 needdata = ('revert', 'add', 'undelete')
3214 needdata = ('revert', 'add', 'undelete')
3227 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3215 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3228 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3216 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3229
3217
3230 if targetsubs:
3218 if targetsubs:
3231 # Revert the subrepos on the revert list
3219 # Revert the subrepos on the revert list
3232 for sub in targetsubs:
3220 for sub in targetsubs:
3233 try:
3221 try:
3234 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3222 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3235 except KeyError:
3223 except KeyError:
3236 raise error.Abort("subrepository '%s' does not exist in %s!"
3224 raise error.Abort("subrepository '%s' does not exist in %s!"
3237 % (sub, short(ctx.node())))
3225 % (sub, short(ctx.node())))
3238
3226
3239 def _revertprefetch(repo, ctx, *files):
3227 def _revertprefetch(repo, ctx, *files):
3240 """Let extension changing the storage layer prefetch content"""
3228 """Let extension changing the storage layer prefetch content"""
3241 pass
3229 pass
3242
3230
3243 def _performrevert(repo, parents, ctx, actions, interactive=False,
3231 def _performrevert(repo, parents, ctx, actions, interactive=False,
3244 tobackup=None):
3232 tobackup=None):
3245 """function that actually perform all the actions computed for revert
3233 """function that actually perform all the actions computed for revert
3246
3234
3247 This is an independent function to let extension to plug in and react to
3235 This is an independent function to let extension to plug in and react to
3248 the imminent revert.
3236 the imminent revert.
3249
3237
3250 Make sure you have the working directory locked when calling this function.
3238 Make sure you have the working directory locked when calling this function.
3251 """
3239 """
3252 parent, p2 = parents
3240 parent, p2 = parents
3253 node = ctx.node()
3241 node = ctx.node()
3254 excluded_files = []
3242 excluded_files = []
3255 matcher_opts = {"exclude": excluded_files}
3243 matcher_opts = {"exclude": excluded_files}
3256
3244
3257 def checkout(f):
3245 def checkout(f):
3258 fc = ctx[f]
3246 fc = ctx[f]
3259 repo.wwrite(f, fc.data(), fc.flags())
3247 repo.wwrite(f, fc.data(), fc.flags())
3260
3248
3261 audit_path = pathutil.pathauditor(repo.root)
3249 audit_path = pathutil.pathauditor(repo.root)
3262 for f in actions['forget'][0]:
3250 for f in actions['forget'][0]:
3263 if interactive:
3251 if interactive:
3264 choice = \
3252 choice = \
3265 repo.ui.promptchoice(
3253 repo.ui.promptchoice(
3266 _("forget added file %s (yn)?$$ &Yes $$ &No")
3254 _("forget added file %s (yn)?$$ &Yes $$ &No")
3267 % f)
3255 % f)
3268 if choice == 0:
3256 if choice == 0:
3269 repo.dirstate.drop(f)
3257 repo.dirstate.drop(f)
3270 else:
3258 else:
3271 excluded_files.append(repo.wjoin(f))
3259 excluded_files.append(repo.wjoin(f))
3272 else:
3260 else:
3273 repo.dirstate.drop(f)
3261 repo.dirstate.drop(f)
3274 for f in actions['remove'][0]:
3262 for f in actions['remove'][0]:
3275 audit_path(f)
3263 audit_path(f)
3276 try:
3264 try:
3277 util.unlinkpath(repo.wjoin(f))
3265 util.unlinkpath(repo.wjoin(f))
3278 except OSError:
3266 except OSError:
3279 pass
3267 pass
3280 repo.dirstate.remove(f)
3268 repo.dirstate.remove(f)
3281 for f in actions['drop'][0]:
3269 for f in actions['drop'][0]:
3282 audit_path(f)
3270 audit_path(f)
3283 repo.dirstate.remove(f)
3271 repo.dirstate.remove(f)
3284
3272
3285 normal = None
3273 normal = None
3286 if node == parent:
3274 if node == parent:
3287 # We're reverting to our parent. If possible, we'd like status
3275 # We're reverting to our parent. If possible, we'd like status
3288 # to report the file as clean. We have to use normallookup for
3276 # to report the file as clean. We have to use normallookup for
3289 # merges to avoid losing information about merged/dirty files.
3277 # merges to avoid losing information about merged/dirty files.
3290 if p2 != nullid:
3278 if p2 != nullid:
3291 normal = repo.dirstate.normallookup
3279 normal = repo.dirstate.normallookup
3292 else:
3280 else:
3293 normal = repo.dirstate.normal
3281 normal = repo.dirstate.normal
3294
3282
3295 newlyaddedandmodifiedfiles = set()
3283 newlyaddedandmodifiedfiles = set()
3296 if interactive:
3284 if interactive:
3297 # Prompt the user for changes to revert
3285 # Prompt the user for changes to revert
3298 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3286 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3299 m = scmutil.match(ctx, torevert, matcher_opts)
3287 m = scmutil.match(ctx, torevert, matcher_opts)
3300 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3288 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3301 diffopts.nodates = True
3289 diffopts.nodates = True
3302 diffopts.git = True
3290 diffopts.git = True
3303 reversehunks = repo.ui.configbool('experimental',
3291 reversehunks = repo.ui.configbool('experimental',
3304 'revertalternateinteractivemode',
3292 'revertalternateinteractivemode',
3305 True)
3293 True)
3306 if reversehunks:
3294 if reversehunks:
3307 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3295 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3308 else:
3296 else:
3309 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3297 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3310 originalchunks = patch.parsepatch(diff)
3298 originalchunks = patch.parsepatch(diff)
3311 operation = 'discard' if node == parent else 'revert'
3299 operation = 'discard' if node == parent else 'revert'
3312
3300
3313 try:
3301 try:
3314
3302
3315 chunks, opts = recordfilter(repo.ui, originalchunks,
3303 chunks, opts = recordfilter(repo.ui, originalchunks,
3316 operation=operation)
3304 operation=operation)
3317 if reversehunks:
3305 if reversehunks:
3318 chunks = patch.reversehunks(chunks)
3306 chunks = patch.reversehunks(chunks)
3319
3307
3320 except patch.PatchError as err:
3308 except patch.PatchError as err:
3321 raise error.Abort(_('error parsing patch: %s') % err)
3309 raise error.Abort(_('error parsing patch: %s') % err)
3322
3310
3323 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3311 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3324 if tobackup is None:
3312 if tobackup is None:
3325 tobackup = set()
3313 tobackup = set()
3326 # Apply changes
3314 # Apply changes
3327 fp = stringio()
3315 fp = stringio()
3328 for c in chunks:
3316 for c in chunks:
3329 # Create a backup file only if this hunk should be backed up
3317 # Create a backup file only if this hunk should be backed up
3330 if ishunk(c) and c.header.filename() in tobackup:
3318 if ishunk(c) and c.header.filename() in tobackup:
3331 abs = c.header.filename()
3319 abs = c.header.filename()
3332 target = repo.wjoin(abs)
3320 target = repo.wjoin(abs)
3333 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3321 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3334 util.copyfile(target, bakname)
3322 util.copyfile(target, bakname)
3335 tobackup.remove(abs)
3323 tobackup.remove(abs)
3336 c.write(fp)
3324 c.write(fp)
3337 dopatch = fp.tell()
3325 dopatch = fp.tell()
3338 fp.seek(0)
3326 fp.seek(0)
3339 if dopatch:
3327 if dopatch:
3340 try:
3328 try:
3341 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3329 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3342 except patch.PatchError as err:
3330 except patch.PatchError as err:
3343 raise error.Abort(str(err))
3331 raise error.Abort(str(err))
3344 del fp
3332 del fp
3345 else:
3333 else:
3346 for f in actions['revert'][0]:
3334 for f in actions['revert'][0]:
3347 checkout(f)
3335 checkout(f)
3348 if normal:
3336 if normal:
3349 normal(f)
3337 normal(f)
3350
3338
3351 for f in actions['add'][0]:
3339 for f in actions['add'][0]:
3352 # Don't checkout modified files, they are already created by the diff
3340 # Don't checkout modified files, they are already created by the diff
3353 if f not in newlyaddedandmodifiedfiles:
3341 if f not in newlyaddedandmodifiedfiles:
3354 checkout(f)
3342 checkout(f)
3355 repo.dirstate.add(f)
3343 repo.dirstate.add(f)
3356
3344
3357 normal = repo.dirstate.normallookup
3345 normal = repo.dirstate.normallookup
3358 if node == parent and p2 == nullid:
3346 if node == parent and p2 == nullid:
3359 normal = repo.dirstate.normal
3347 normal = repo.dirstate.normal
3360 for f in actions['undelete'][0]:
3348 for f in actions['undelete'][0]:
3361 checkout(f)
3349 checkout(f)
3362 normal(f)
3350 normal(f)
3363
3351
3364 copied = copies.pathcopies(repo[parent], ctx)
3352 copied = copies.pathcopies(repo[parent], ctx)
3365
3353
3366 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3354 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3367 if f in copied:
3355 if f in copied:
3368 repo.dirstate.copy(copied[f], f)
3356 repo.dirstate.copy(copied[f], f)
3369
3357
3370 def command(table):
3358 def command(table):
3371 """Returns a function object to be used as a decorator for making commands.
3359 """Returns a function object to be used as a decorator for making commands.
3372
3360
3373 This function receives a command table as its argument. The table should
3361 This function receives a command table as its argument. The table should
3374 be a dict.
3362 be a dict.
3375
3363
3376 The returned function can be used as a decorator for adding commands
3364 The returned function can be used as a decorator for adding commands
3377 to that command table. This function accepts multiple arguments to define
3365 to that command table. This function accepts multiple arguments to define
3378 a command.
3366 a command.
3379
3367
3380 The first argument is the command name.
3368 The first argument is the command name.
3381
3369
3382 The options argument is an iterable of tuples defining command arguments.
3370 The options argument is an iterable of tuples defining command arguments.
3383 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3371 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3384
3372
3385 The synopsis argument defines a short, one line summary of how to use the
3373 The synopsis argument defines a short, one line summary of how to use the
3386 command. This shows up in the help output.
3374 command. This shows up in the help output.
3387
3375
3388 The norepo argument defines whether the command does not require a
3376 The norepo argument defines whether the command does not require a
3389 local repository. Most commands operate against a repository, thus the
3377 local repository. Most commands operate against a repository, thus the
3390 default is False.
3378 default is False.
3391
3379
3392 The optionalrepo argument defines whether the command optionally requires
3380 The optionalrepo argument defines whether the command optionally requires
3393 a local repository.
3381 a local repository.
3394
3382
3395 The inferrepo argument defines whether to try to find a repository from the
3383 The inferrepo argument defines whether to try to find a repository from the
3396 command line arguments. If True, arguments will be examined for potential
3384 command line arguments. If True, arguments will be examined for potential
3397 repository locations. See ``findrepo()``. If a repository is found, it
3385 repository locations. See ``findrepo()``. If a repository is found, it
3398 will be used.
3386 will be used.
3399 """
3387 """
3400 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3388 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3401 inferrepo=False):
3389 inferrepo=False):
3402 def decorator(func):
3390 def decorator(func):
3403 func.norepo = norepo
3391 func.norepo = norepo
3404 func.optionalrepo = optionalrepo
3392 func.optionalrepo = optionalrepo
3405 func.inferrepo = inferrepo
3393 func.inferrepo = inferrepo
3406 if synopsis:
3394 if synopsis:
3407 table[name] = func, list(options), synopsis
3395 table[name] = func, list(options), synopsis
3408 else:
3396 else:
3409 table[name] = func, list(options)
3397 table[name] = func, list(options)
3410 return func
3398 return func
3411 return decorator
3399 return decorator
3412
3400
3413 return cmd
3401 return cmd
3414
3402
3415 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3403 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3416 # commands.outgoing. "missing" is "missing" of the result of
3404 # commands.outgoing. "missing" is "missing" of the result of
3417 # "findcommonoutgoing()"
3405 # "findcommonoutgoing()"
3418 outgoinghooks = util.hooks()
3406 outgoinghooks = util.hooks()
3419
3407
3420 # a list of (ui, repo) functions called by commands.summary
3408 # a list of (ui, repo) functions called by commands.summary
3421 summaryhooks = util.hooks()
3409 summaryhooks = util.hooks()
3422
3410
3423 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3411 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3424 #
3412 #
3425 # functions should return tuple of booleans below, if 'changes' is None:
3413 # functions should return tuple of booleans below, if 'changes' is None:
3426 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3414 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3427 #
3415 #
3428 # otherwise, 'changes' is a tuple of tuples below:
3416 # otherwise, 'changes' is a tuple of tuples below:
3429 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3417 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3430 # - (desturl, destbranch, destpeer, outgoing)
3418 # - (desturl, destbranch, destpeer, outgoing)
3431 summaryremotehooks = util.hooks()
3419 summaryremotehooks = util.hooks()
3432
3420
3433 # A list of state files kept by multistep operations like graft.
3421 # A list of state files kept by multistep operations like graft.
3434 # Since graft cannot be aborted, it is considered 'clearable' by update.
3422 # Since graft cannot be aborted, it is considered 'clearable' by update.
3435 # note: bisect is intentionally excluded
3423 # note: bisect is intentionally excluded
3436 # (state file, clearable, allowcommit, error, hint)
3424 # (state file, clearable, allowcommit, error, hint)
3437 unfinishedstates = [
3425 unfinishedstates = [
3438 ('graftstate', True, False, _('graft in progress'),
3426 ('graftstate', True, False, _('graft in progress'),
3439 _("use 'hg graft --continue' or 'hg update' to abort")),
3427 _("use 'hg graft --continue' or 'hg update' to abort")),
3440 ('updatestate', True, False, _('last update was interrupted'),
3428 ('updatestate', True, False, _('last update was interrupted'),
3441 _("use 'hg update' to get a consistent checkout"))
3429 _("use 'hg update' to get a consistent checkout"))
3442 ]
3430 ]
3443
3431
3444 def checkunfinished(repo, commit=False):
3432 def checkunfinished(repo, commit=False):
3445 '''Look for an unfinished multistep operation, like graft, and abort
3433 '''Look for an unfinished multistep operation, like graft, and abort
3446 if found. It's probably good to check this right before
3434 if found. It's probably good to check this right before
3447 bailifchanged().
3435 bailifchanged().
3448 '''
3436 '''
3449 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3437 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3450 if commit and allowcommit:
3438 if commit and allowcommit:
3451 continue
3439 continue
3452 if repo.vfs.exists(f):
3440 if repo.vfs.exists(f):
3453 raise error.Abort(msg, hint=hint)
3441 raise error.Abort(msg, hint=hint)
3454
3442
3455 def clearunfinished(repo):
3443 def clearunfinished(repo):
3456 '''Check for unfinished operations (as above), and clear the ones
3444 '''Check for unfinished operations (as above), and clear the ones
3457 that are clearable.
3445 that are clearable.
3458 '''
3446 '''
3459 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3447 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3460 if not clearable and repo.vfs.exists(f):
3448 if not clearable and repo.vfs.exists(f):
3461 raise error.Abort(msg, hint=hint)
3449 raise error.Abort(msg, hint=hint)
3462 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3450 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3463 if clearable and repo.vfs.exists(f):
3451 if clearable and repo.vfs.exists(f):
3464 util.unlink(repo.join(f))
3452 util.unlink(repo.join(f))
3465
3453
3466 afterresolvedstates = [
3454 afterresolvedstates = [
3467 ('graftstate',
3455 ('graftstate',
3468 _('hg graft --continue')),
3456 _('hg graft --continue')),
3469 ]
3457 ]
3470
3458
3471 def howtocontinue(repo):
3459 def howtocontinue(repo):
3472 '''Check for an unfinished operation and return the command to finish
3460 '''Check for an unfinished operation and return the command to finish
3473 it.
3461 it.
3474
3462
3475 afterresolvedstates tupples define a .hg/{file} and the corresponding
3463 afterresolvedstates tupples define a .hg/{file} and the corresponding
3476 command needed to finish it.
3464 command needed to finish it.
3477
3465
3478 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3466 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3479 a boolean.
3467 a boolean.
3480 '''
3468 '''
3481 contmsg = _("continue: %s")
3469 contmsg = _("continue: %s")
3482 for f, msg in afterresolvedstates:
3470 for f, msg in afterresolvedstates:
3483 if repo.vfs.exists(f):
3471 if repo.vfs.exists(f):
3484 return contmsg % msg, True
3472 return contmsg % msg, True
3485 workingctx = repo[None]
3473 workingctx = repo[None]
3486 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3474 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3487 for s in workingctx.substate)
3475 for s in workingctx.substate)
3488 if dirty:
3476 if dirty:
3489 return contmsg % _("hg commit"), False
3477 return contmsg % _("hg commit"), False
3490 return None, None
3478 return None, None
3491
3479
3492 def checkafterresolved(repo):
3480 def checkafterresolved(repo):
3493 '''Inform the user about the next action after completing hg resolve
3481 '''Inform the user about the next action after completing hg resolve
3494
3482
3495 If there's a matching afterresolvedstates, howtocontinue will yield
3483 If there's a matching afterresolvedstates, howtocontinue will yield
3496 repo.ui.warn as the reporter.
3484 repo.ui.warn as the reporter.
3497
3485
3498 Otherwise, it will yield repo.ui.note.
3486 Otherwise, it will yield repo.ui.note.
3499 '''
3487 '''
3500 msg, warning = howtocontinue(repo)
3488 msg, warning = howtocontinue(repo)
3501 if msg is not None:
3489 if msg is not None:
3502 if warning:
3490 if warning:
3503 repo.ui.warn("%s\n" % msg)
3491 repo.ui.warn("%s\n" % msg)
3504 else:
3492 else:
3505 repo.ui.note("%s\n" % msg)
3493 repo.ui.note("%s\n" % msg)
3506
3494
3507 def wrongtooltocontinue(repo, task):
3495 def wrongtooltocontinue(repo, task):
3508 '''Raise an abort suggesting how to properly continue if there is an
3496 '''Raise an abort suggesting how to properly continue if there is an
3509 active task.
3497 active task.
3510
3498
3511 Uses howtocontinue() to find the active task.
3499 Uses howtocontinue() to find the active task.
3512
3500
3513 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3501 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3514 a hint.
3502 a hint.
3515 '''
3503 '''
3516 after = howtocontinue(repo)
3504 after = howtocontinue(repo)
3517 hint = None
3505 hint = None
3518 if after[1]:
3506 if after[1]:
3519 hint = after[0]
3507 hint = after[0]
3520 raise error.Abort(_('no %s in progress') % task, hint=hint)
3508 raise error.Abort(_('no %s in progress') % task, hint=hint)
3521
3509
3522 class dirstateguard(object):
3510 class dirstateguard(object):
3523 '''Restore dirstate at unexpected failure.
3511 '''Restore dirstate at unexpected failure.
3524
3512
3525 At the construction, this class does:
3513 At the construction, this class does:
3526
3514
3527 - write current ``repo.dirstate`` out, and
3515 - write current ``repo.dirstate`` out, and
3528 - save ``.hg/dirstate`` into the backup file
3516 - save ``.hg/dirstate`` into the backup file
3529
3517
3530 This restores ``.hg/dirstate`` from backup file, if ``release()``
3518 This restores ``.hg/dirstate`` from backup file, if ``release()``
3531 is invoked before ``close()``.
3519 is invoked before ``close()``.
3532
3520
3533 This just removes the backup file at ``close()`` before ``release()``.
3521 This just removes the backup file at ``close()`` before ``release()``.
3534 '''
3522 '''
3535
3523
3536 def __init__(self, repo, name):
3524 def __init__(self, repo, name):
3537 self._repo = repo
3525 self._repo = repo
3538 self._suffix = '.backup.%s.%d' % (name, id(self))
3526 self._suffix = '.backup.%s.%d' % (name, id(self))
3539 repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
3527 repo.dirstate.savebackup(repo.currenttransaction(), self._suffix)
3540 self._active = True
3528 self._active = True
3541 self._closed = False
3529 self._closed = False
3542
3530
3543 def __del__(self):
3531 def __del__(self):
3544 if self._active: # still active
3532 if self._active: # still active
3545 # this may occur, even if this class is used correctly:
3533 # this may occur, even if this class is used correctly:
3546 # for example, releasing other resources like transaction
3534 # for example, releasing other resources like transaction
3547 # may raise exception before ``dirstateguard.release`` in
3535 # may raise exception before ``dirstateguard.release`` in
3548 # ``release(tr, ....)``.
3536 # ``release(tr, ....)``.
3549 self._abort()
3537 self._abort()
3550
3538
3551 def close(self):
3539 def close(self):
3552 if not self._active: # already inactivated
3540 if not self._active: # already inactivated
3553 msg = (_("can't close already inactivated backup: dirstate%s")
3541 msg = (_("can't close already inactivated backup: dirstate%s")
3554 % self._suffix)
3542 % self._suffix)
3555 raise error.Abort(msg)
3543 raise error.Abort(msg)
3556
3544
3557 self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
3545 self._repo.dirstate.clearbackup(self._repo.currenttransaction(),
3558 self._suffix)
3546 self._suffix)
3559 self._active = False
3547 self._active = False
3560 self._closed = True
3548 self._closed = True
3561
3549
3562 def _abort(self):
3550 def _abort(self):
3563 self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
3551 self._repo.dirstate.restorebackup(self._repo.currenttransaction(),
3564 self._suffix)
3552 self._suffix)
3565 self._active = False
3553 self._active = False
3566
3554
3567 def release(self):
3555 def release(self):
3568 if not self._closed:
3556 if not self._closed:
3569 if not self._active: # already inactivated
3557 if not self._active: # already inactivated
3570 msg = (_("can't release already inactivated backup:"
3558 msg = (_("can't release already inactivated backup:"
3571 " dirstate%s")
3559 " dirstate%s")
3572 % self._suffix)
3560 % self._suffix)
3573 raise error.Abort(msg)
3561 raise error.Abort(msg)
3574 self._abort()
3562 self._abort()
@@ -1,712 +1,716 b''
1 # match.py - filename matching
1 # match.py - filename matching
2 #
2 #
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import os
11 import os
12 import re
12 import re
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 error,
16 error,
17 pathutil,
17 pathutil,
18 util,
18 util,
19 )
19 )
20
20
21 propertycache = util.propertycache
21 propertycache = util.propertycache
22
22
23 def _rematcher(regex):
23 def _rematcher(regex):
24 '''compile the regexp with the best available regexp engine and return a
24 '''compile the regexp with the best available regexp engine and return a
25 matcher function'''
25 matcher function'''
26 m = util.re.compile(regex)
26 m = util.re.compile(regex)
27 try:
27 try:
28 # slightly faster, provided by facebook's re2 bindings
28 # slightly faster, provided by facebook's re2 bindings
29 return m.test_match
29 return m.test_match
30 except AttributeError:
30 except AttributeError:
31 return m.match
31 return m.match
32
32
33 def _expandsets(kindpats, ctx, listsubrepos):
33 def _expandsets(kindpats, ctx, listsubrepos):
34 '''Returns the kindpats list with the 'set' patterns expanded.'''
34 '''Returns the kindpats list with the 'set' patterns expanded.'''
35 fset = set()
35 fset = set()
36 other = []
36 other = []
37
37
38 for kind, pat, source in kindpats:
38 for kind, pat, source in kindpats:
39 if kind == 'set':
39 if kind == 'set':
40 if not ctx:
40 if not ctx:
41 raise error.Abort(_("fileset expression with no context"))
41 raise error.Abort(_("fileset expression with no context"))
42 s = ctx.getfileset(pat)
42 s = ctx.getfileset(pat)
43 fset.update(s)
43 fset.update(s)
44
44
45 if listsubrepos:
45 if listsubrepos:
46 for subpath in ctx.substate:
46 for subpath in ctx.substate:
47 s = ctx.sub(subpath).getfileset(pat)
47 s = ctx.sub(subpath).getfileset(pat)
48 fset.update(subpath + '/' + f for f in s)
48 fset.update(subpath + '/' + f for f in s)
49
49
50 continue
50 continue
51 other.append((kind, pat, source))
51 other.append((kind, pat, source))
52 return fset, other
52 return fset, other
53
53
54 def _expandsubinclude(kindpats, root):
54 def _expandsubinclude(kindpats, root):
55 '''Returns the list of subinclude matchers and the kindpats without the
55 '''Returns the list of subinclude matchers and the kindpats without the
56 subincludes in it.'''
56 subincludes in it.'''
57 relmatchers = []
57 relmatchers = []
58 other = []
58 other = []
59
59
60 for kind, pat, source in kindpats:
60 for kind, pat, source in kindpats:
61 if kind == 'subinclude':
61 if kind == 'subinclude':
62 sourceroot = pathutil.dirname(util.normpath(source))
62 sourceroot = pathutil.dirname(util.normpath(source))
63 pat = util.pconvert(pat)
63 pat = util.pconvert(pat)
64 path = pathutil.join(sourceroot, pat)
64 path = pathutil.join(sourceroot, pat)
65
65
66 newroot = pathutil.dirname(path)
66 newroot = pathutil.dirname(path)
67 relmatcher = match(newroot, '', [], ['include:%s' % path])
67 relmatcher = match(newroot, '', [], ['include:%s' % path])
68
68
69 prefix = pathutil.canonpath(root, root, newroot)
69 prefix = pathutil.canonpath(root, root, newroot)
70 if prefix:
70 if prefix:
71 prefix += '/'
71 prefix += '/'
72 relmatchers.append((prefix, relmatcher))
72 relmatchers.append((prefix, relmatcher))
73 else:
73 else:
74 other.append((kind, pat, source))
74 other.append((kind, pat, source))
75
75
76 return relmatchers, other
76 return relmatchers, other
77
77
78 def _kindpatsalwaysmatch(kindpats):
78 def _kindpatsalwaysmatch(kindpats):
79 """"Checks whether the kindspats match everything, as e.g.
79 """"Checks whether the kindspats match everything, as e.g.
80 'relpath:.' does.
80 'relpath:.' does.
81 """
81 """
82 for kind, pat, source in kindpats:
82 for kind, pat, source in kindpats:
83 if pat != '' or kind not in ['relpath', 'glob']:
83 if pat != '' or kind not in ['relpath', 'glob']:
84 return False
84 return False
85 return True
85 return True
86
86
87 class match(object):
87 class match(object):
88 def __init__(self, root, cwd, patterns, include=[], exclude=[],
88 def __init__(self, root, cwd, patterns, include=[], exclude=[],
89 default='glob', exact=False, auditor=None, ctx=None,
89 default='glob', exact=False, auditor=None, ctx=None,
90 listsubrepos=False, warn=None, badfn=None):
90 listsubrepos=False, warn=None, badfn=None):
91 """build an object to match a set of file patterns
91 """build an object to match a set of file patterns
92
92
93 arguments:
93 arguments:
94 root - the canonical root of the tree you're matching against
94 root - the canonical root of the tree you're matching against
95 cwd - the current working directory, if relevant
95 cwd - the current working directory, if relevant
96 patterns - patterns to find
96 patterns - patterns to find
97 include - patterns to include (unless they are excluded)
97 include - patterns to include (unless they are excluded)
98 exclude - patterns to exclude (even if they are included)
98 exclude - patterns to exclude (even if they are included)
99 default - if a pattern in patterns has no explicit type, assume this one
99 default - if a pattern in patterns has no explicit type, assume this one
100 exact - patterns are actually filenames (include/exclude still apply)
100 exact - patterns are actually filenames (include/exclude still apply)
101 warn - optional function used for printing warnings
101 warn - optional function used for printing warnings
102 badfn - optional bad() callback for this matcher instead of the default
102 badfn - optional bad() callback for this matcher instead of the default
103
103
104 a pattern is one of:
104 a pattern is one of:
105 'glob:<glob>' - a glob relative to cwd
105 'glob:<glob>' - a glob relative to cwd
106 're:<regexp>' - a regular expression
106 're:<regexp>' - a regular expression
107 'path:<path>' - a path relative to repository root
107 'path:<path>' - a path relative to repository root
108 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
108 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
109 'relpath:<path>' - a path relative to cwd
109 'relpath:<path>' - a path relative to cwd
110 'relre:<regexp>' - a regexp that needn't match the start of a name
110 'relre:<regexp>' - a regexp that needn't match the start of a name
111 'set:<fileset>' - a fileset expression
111 'set:<fileset>' - a fileset expression
112 'include:<path>' - a file of patterns to read and include
112 'include:<path>' - a file of patterns to read and include
113 'subinclude:<path>' - a file of patterns to match against files under
113 'subinclude:<path>' - a file of patterns to match against files under
114 the same directory
114 the same directory
115 '<something>' - a pattern of the specified default type
115 '<something>' - a pattern of the specified default type
116 """
116 """
117
117
118 self._root = root
118 self._root = root
119 self._cwd = cwd
119 self._cwd = cwd
120 self._files = [] # exact files and roots of patterns
120 self._files = [] # exact files and roots of patterns
121 self._anypats = bool(include or exclude)
121 self._anypats = bool(include or exclude)
122 self._always = False
122 self._always = False
123 self._pathrestricted = bool(include or exclude or patterns)
123 self._pathrestricted = bool(include or exclude or patterns)
124 self._warn = warn
124 self._warn = warn
125 self._includeroots = set()
125 self._includeroots = set()
126 self._includedirs = set(['.'])
126 self._includedirs = set(['.'])
127 self._excluderoots = set()
127 self._excluderoots = set()
128
128
129 if badfn is not None:
129 if badfn is not None:
130 self.bad = badfn
130 self.bad = badfn
131
131
132 matchfns = []
132 matchfns = []
133 if include:
133 if include:
134 kindpats = self._normalize(include, 'glob', root, cwd, auditor)
134 kindpats = self._normalize(include, 'glob', root, cwd, auditor)
135 self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)',
135 self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)',
136 listsubrepos, root)
136 listsubrepos, root)
137 self._includeroots.update(_roots(kindpats))
137 self._includeroots.update(_roots(kindpats))
138 self._includedirs.update(util.dirs(self._includeroots))
138 self._includedirs.update(util.dirs(self._includeroots))
139 matchfns.append(im)
139 matchfns.append(im)
140 if exclude:
140 if exclude:
141 kindpats = self._normalize(exclude, 'glob', root, cwd, auditor)
141 kindpats = self._normalize(exclude, 'glob', root, cwd, auditor)
142 self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)',
142 self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)',
143 listsubrepos, root)
143 listsubrepos, root)
144 if not _anypats(kindpats):
144 if not _anypats(kindpats):
145 self._excluderoots.update(_roots(kindpats))
145 self._excluderoots.update(_roots(kindpats))
146 matchfns.append(lambda f: not em(f))
146 matchfns.append(lambda f: not em(f))
147 if exact:
147 if exact:
148 if isinstance(patterns, list):
148 if isinstance(patterns, list):
149 self._files = patterns
149 self._files = patterns
150 else:
150 else:
151 self._files = list(patterns)
151 self._files = list(patterns)
152 matchfns.append(self.exact)
152 matchfns.append(self.exact)
153 elif patterns:
153 elif patterns:
154 kindpats = self._normalize(patterns, default, root, cwd, auditor)
154 kindpats = self._normalize(patterns, default, root, cwd, auditor)
155 if not _kindpatsalwaysmatch(kindpats):
155 if not _kindpatsalwaysmatch(kindpats):
156 self._files = _roots(kindpats)
156 self._files = _roots(kindpats)
157 self._anypats = self._anypats or _anypats(kindpats)
157 self._anypats = self._anypats or _anypats(kindpats)
158 self.patternspat, pm = _buildmatch(ctx, kindpats, '$',
158 self.patternspat, pm = _buildmatch(ctx, kindpats, '$',
159 listsubrepos, root)
159 listsubrepos, root)
160 matchfns.append(pm)
160 matchfns.append(pm)
161
161
162 if not matchfns:
162 if not matchfns:
163 m = util.always
163 m = util.always
164 self._always = True
164 self._always = True
165 elif len(matchfns) == 1:
165 elif len(matchfns) == 1:
166 m = matchfns[0]
166 m = matchfns[0]
167 else:
167 else:
168 def m(f):
168 def m(f):
169 for matchfn in matchfns:
169 for matchfn in matchfns:
170 if not matchfn(f):
170 if not matchfn(f):
171 return False
171 return False
172 return True
172 return True
173
173
174 self.matchfn = m
174 self.matchfn = m
175 self._fileroots = set(self._files)
175 self._fileroots = set(self._files)
176
176
177 def __call__(self, fn):
177 def __call__(self, fn):
178 return self.matchfn(fn)
178 return self.matchfn(fn)
179 def __iter__(self):
179 def __iter__(self):
180 for f in self._files:
180 for f in self._files:
181 yield f
181 yield f
182
182
183 # Callbacks related to how the matcher is used by dirstate.walk.
183 # Callbacks related to how the matcher is used by dirstate.walk.
184 # Subscribers to these events must monkeypatch the matcher object.
184 # Subscribers to these events must monkeypatch the matcher object.
185 def bad(self, f, msg):
185 def bad(self, f, msg):
186 '''Callback from dirstate.walk for each explicit file that can't be
186 '''Callback from dirstate.walk for each explicit file that can't be
187 found/accessed, with an error message.'''
187 found/accessed, with an error message.'''
188 pass
188 pass
189
189
190 # If an explicitdir is set, it will be called when an explicitly listed
190 # If an explicitdir is set, it will be called when an explicitly listed
191 # directory is visited.
191 # directory is visited.
192 explicitdir = None
192 explicitdir = None
193
193
194 # If an traversedir is set, it will be called when a directory discovered
194 # If an traversedir is set, it will be called when a directory discovered
195 # by recursive traversal is visited.
195 # by recursive traversal is visited.
196 traversedir = None
196 traversedir = None
197
197
198 def abs(self, f):
198 def abs(self, f):
199 '''Convert a repo path back to path that is relative to the root of the
199 '''Convert a repo path back to path that is relative to the root of the
200 matcher.'''
200 matcher.'''
201 return f
201 return f
202
202
203 def rel(self, f):
203 def rel(self, f):
204 '''Convert repo path back to path that is relative to cwd of matcher.'''
204 '''Convert repo path back to path that is relative to cwd of matcher.'''
205 return util.pathto(self._root, self._cwd, f)
205 return util.pathto(self._root, self._cwd, f)
206
206
207 def uipath(self, f):
207 def uipath(self, f):
208 '''Convert repo path to a display path. If patterns or -I/-X were used
208 '''Convert repo path to a display path. If patterns or -I/-X were used
209 to create this matcher, the display path will be relative to cwd.
209 to create this matcher, the display path will be relative to cwd.
210 Otherwise it is relative to the root of the repo.'''
210 Otherwise it is relative to the root of the repo.'''
211 return (self._pathrestricted and self.rel(f)) or self.abs(f)
211 return (self._pathrestricted and self.rel(f)) or self.abs(f)
212
212
213 def files(self):
213 def files(self):
214 '''Explicitly listed files or patterns or roots:
214 '''Explicitly listed files or patterns or roots:
215 if no patterns or .always(): empty list,
215 if no patterns or .always(): empty list,
216 if exact: list exact files,
216 if exact: list exact files,
217 if not .anypats(): list all files and dirs,
217 if not .anypats(): list all files and dirs,
218 else: optimal roots'''
218 else: optimal roots'''
219 return self._files
219 return self._files
220
220
221 @propertycache
221 @propertycache
222 def _dirs(self):
222 def _dirs(self):
223 return set(util.dirs(self._fileroots)) | set(['.'])
223 return set(util.dirs(self._fileroots)) | set(['.'])
224
224
225 def visitdir(self, dir):
225 def visitdir(self, dir):
226 '''Decides whether a directory should be visited based on whether it
226 '''Decides whether a directory should be visited based on whether it
227 has potential matches in it or one of its subdirectories. This is
227 has potential matches in it or one of its subdirectories. This is
228 based on the match's primary, included, and excluded patterns.
228 based on the match's primary, included, and excluded patterns.
229
229
230 Returns the string 'all' if the given directory and all subdirectories
230 Returns the string 'all' if the given directory and all subdirectories
231 should be visited. Otherwise returns True or False indicating whether
231 should be visited. Otherwise returns True or False indicating whether
232 the given directory should be visited.
232 the given directory should be visited.
233
233
234 This function's behavior is undefined if it has returned False for
234 This function's behavior is undefined if it has returned False for
235 one of the dir's parent directories.
235 one of the dir's parent directories.
236 '''
236 '''
237 if self.prefix() and dir in self._fileroots:
237 if self.prefix() and dir in self._fileroots:
238 return 'all'
238 return 'all'
239 if dir in self._excluderoots:
239 if dir in self._excluderoots:
240 return False
240 return False
241 if (self._includeroots and
241 if (self._includeroots and
242 '.' not in self._includeroots and
242 '.' not in self._includeroots and
243 dir not in self._includeroots and
243 dir not in self._includeroots and
244 dir not in self._includedirs and
244 dir not in self._includedirs and
245 not any(parent in self._includeroots
245 not any(parent in self._includeroots
246 for parent in util.finddirs(dir))):
246 for parent in util.finddirs(dir))):
247 return False
247 return False
248 return (not self._fileroots or
248 return (not self._fileroots or
249 '.' in self._fileroots or
249 '.' in self._fileroots or
250 dir in self._fileroots or
250 dir in self._fileroots or
251 dir in self._dirs or
251 dir in self._dirs or
252 any(parentdir in self._fileroots
252 any(parentdir in self._fileroots
253 for parentdir in util.finddirs(dir)))
253 for parentdir in util.finddirs(dir)))
254
254
255 def exact(self, f):
255 def exact(self, f):
256 '''Returns True if f is in .files().'''
256 '''Returns True if f is in .files().'''
257 return f in self._fileroots
257 return f in self._fileroots
258
258
259 def anypats(self):
259 def anypats(self):
260 '''Matcher uses patterns or include/exclude.'''
260 '''Matcher uses patterns or include/exclude.'''
261 return self._anypats
261 return self._anypats
262
262
263 def always(self):
263 def always(self):
264 '''Matcher will match everything and .files() will be empty
264 '''Matcher will match everything and .files() will be empty
265 - optimization might be possible and necessary.'''
265 - optimization might be possible and necessary.'''
266 return self._always
266 return self._always
267
267
268 def ispartial(self):
268 def ispartial(self):
269 '''True if the matcher won't always match.
269 '''True if the matcher won't always match.
270
270
271 Although it's just the inverse of _always in this implementation,
271 Although it's just the inverse of _always in this implementation,
272 an extension such as narrowhg might make it return something
272 an extension such as narrowhg might make it return something
273 slightly different.'''
273 slightly different.'''
274 return not self._always
274 return not self._always
275
275
276 def isexact(self):
276 def isexact(self):
277 return self.matchfn == self.exact
277 return self.matchfn == self.exact
278
278
279 def prefix(self):
279 def prefix(self):
280 return not self.always() and not self.isexact() and not self.anypats()
280 return not self.always() and not self.isexact() and not self.anypats()
281
281
282 def _normalize(self, patterns, default, root, cwd, auditor):
282 def _normalize(self, patterns, default, root, cwd, auditor):
283 '''Convert 'kind:pat' from the patterns list to tuples with kind and
283 '''Convert 'kind:pat' from the patterns list to tuples with kind and
284 normalized and rooted patterns and with listfiles expanded.'''
284 normalized and rooted patterns and with listfiles expanded.'''
285 kindpats = []
285 kindpats = []
286 for kind, pat in [_patsplit(p, default) for p in patterns]:
286 for kind, pat in [_patsplit(p, default) for p in patterns]:
287 if kind in ('glob', 'relpath'):
287 if kind in ('glob', 'relpath'):
288 pat = pathutil.canonpath(root, cwd, pat, auditor)
288 pat = pathutil.canonpath(root, cwd, pat, auditor)
289 elif kind in ('relglob', 'path'):
289 elif kind in ('relglob', 'path'):
290 pat = util.normpath(pat)
290 pat = util.normpath(pat)
291 elif kind in ('listfile', 'listfile0'):
291 elif kind in ('listfile', 'listfile0'):
292 try:
292 try:
293 files = util.readfile(pat)
293 files = util.readfile(pat)
294 if kind == 'listfile0':
294 if kind == 'listfile0':
295 files = files.split('\0')
295 files = files.split('\0')
296 else:
296 else:
297 files = files.splitlines()
297 files = files.splitlines()
298 files = [f for f in files if f]
298 files = [f for f in files if f]
299 except EnvironmentError:
299 except EnvironmentError:
300 raise error.Abort(_("unable to read file list (%s)") % pat)
300 raise error.Abort(_("unable to read file list (%s)") % pat)
301 for k, p, source in self._normalize(files, default, root, cwd,
301 for k, p, source in self._normalize(files, default, root, cwd,
302 auditor):
302 auditor):
303 kindpats.append((k, p, pat))
303 kindpats.append((k, p, pat))
304 continue
304 continue
305 elif kind == 'include':
305 elif kind == 'include':
306 try:
306 try:
307 fullpath = os.path.join(root, util.localpath(pat))
307 fullpath = os.path.join(root, util.localpath(pat))
308 includepats = readpatternfile(fullpath, self._warn)
308 includepats = readpatternfile(fullpath, self._warn)
309 for k, p, source in self._normalize(includepats, default,
309 for k, p, source in self._normalize(includepats, default,
310 root, cwd, auditor):
310 root, cwd, auditor):
311 kindpats.append((k, p, source or pat))
311 kindpats.append((k, p, source or pat))
312 except error.Abort as inst:
312 except error.Abort as inst:
313 raise error.Abort('%s: %s' % (pat, inst[0]))
313 raise error.Abort('%s: %s' % (pat, inst[0]))
314 except IOError as inst:
314 except IOError as inst:
315 if self._warn:
315 if self._warn:
316 self._warn(_("skipping unreadable pattern file "
316 self._warn(_("skipping unreadable pattern file "
317 "'%s': %s\n") % (pat, inst.strerror))
317 "'%s': %s\n") % (pat, inst.strerror))
318 continue
318 continue
319 # else: re or relre - which cannot be normalized
319 # else: re or relre - which cannot be normalized
320 kindpats.append((kind, pat, ''))
320 kindpats.append((kind, pat, ''))
321 return kindpats
321 return kindpats
322
322
323 def matchessubrepo(self, subpath):
324 return (self.exact(subpath)
325 or any(f.startswith(subpath + '/') for f in self.files()))
326
323 def exact(root, cwd, files, badfn=None):
327 def exact(root, cwd, files, badfn=None):
324 return match(root, cwd, files, exact=True, badfn=badfn)
328 return match(root, cwd, files, exact=True, badfn=badfn)
325
329
326 def always(root, cwd):
330 def always(root, cwd):
327 return match(root, cwd, [])
331 return match(root, cwd, [])
328
332
329 def badmatch(match, badfn):
333 def badmatch(match, badfn):
330 """Make a copy of the given matcher, replacing its bad method with the given
334 """Make a copy of the given matcher, replacing its bad method with the given
331 one.
335 one.
332 """
336 """
333 m = copy.copy(match)
337 m = copy.copy(match)
334 m.bad = badfn
338 m.bad = badfn
335 return m
339 return m
336
340
337 class subdirmatcher(match):
341 class subdirmatcher(match):
338 """Adapt a matcher to work on a subdirectory only.
342 """Adapt a matcher to work on a subdirectory only.
339
343
340 The paths are remapped to remove/insert the path as needed:
344 The paths are remapped to remove/insert the path as needed:
341
345
342 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
346 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
343 >>> m2 = subdirmatcher('sub', m1)
347 >>> m2 = subdirmatcher('sub', m1)
344 >>> bool(m2('a.txt'))
348 >>> bool(m2('a.txt'))
345 False
349 False
346 >>> bool(m2('b.txt'))
350 >>> bool(m2('b.txt'))
347 True
351 True
348 >>> bool(m2.matchfn('a.txt'))
352 >>> bool(m2.matchfn('a.txt'))
349 False
353 False
350 >>> bool(m2.matchfn('b.txt'))
354 >>> bool(m2.matchfn('b.txt'))
351 True
355 True
352 >>> m2.files()
356 >>> m2.files()
353 ['b.txt']
357 ['b.txt']
354 >>> m2.exact('b.txt')
358 >>> m2.exact('b.txt')
355 True
359 True
356 >>> util.pconvert(m2.rel('b.txt'))
360 >>> util.pconvert(m2.rel('b.txt'))
357 'sub/b.txt'
361 'sub/b.txt'
358 >>> def bad(f, msg):
362 >>> def bad(f, msg):
359 ... print "%s: %s" % (f, msg)
363 ... print "%s: %s" % (f, msg)
360 >>> m1.bad = bad
364 >>> m1.bad = bad
361 >>> m2.bad('x.txt', 'No such file')
365 >>> m2.bad('x.txt', 'No such file')
362 sub/x.txt: No such file
366 sub/x.txt: No such file
363 >>> m2.abs('c.txt')
367 >>> m2.abs('c.txt')
364 'sub/c.txt'
368 'sub/c.txt'
365 """
369 """
366
370
367 def __init__(self, path, matcher):
371 def __init__(self, path, matcher):
368 self._root = matcher._root
372 self._root = matcher._root
369 self._cwd = matcher._cwd
373 self._cwd = matcher._cwd
370 self._path = path
374 self._path = path
371 self._matcher = matcher
375 self._matcher = matcher
372 self._always = matcher._always
376 self._always = matcher._always
373 self._pathrestricted = matcher._pathrestricted
377 self._pathrestricted = matcher._pathrestricted
374
378
375 self._files = [f[len(path) + 1:] for f in matcher._files
379 self._files = [f[len(path) + 1:] for f in matcher._files
376 if f.startswith(path + "/")]
380 if f.startswith(path + "/")]
377
381
378 # If the parent repo had a path to this subrepo and no patterns are
382 # If the parent repo had a path to this subrepo and no patterns are
379 # specified, this submatcher always matches.
383 # specified, this submatcher always matches.
380 if not self._always and not matcher._anypats:
384 if not self._always and not matcher._anypats:
381 self._always = any(f == path for f in matcher._files)
385 self._always = any(f == path for f in matcher._files)
382
386
383 self._anypats = matcher._anypats
387 self._anypats = matcher._anypats
384 # Some information is lost in the superclass's constructor, so we
388 # Some information is lost in the superclass's constructor, so we
385 # can not accurately create the matching function for the subdirectory
389 # can not accurately create the matching function for the subdirectory
386 # from the inputs. Instead, we override matchfn() and visitdir() to
390 # from the inputs. Instead, we override matchfn() and visitdir() to
387 # call the original matcher with the subdirectory path prepended.
391 # call the original matcher with the subdirectory path prepended.
388 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
392 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
389 def visitdir(dir):
393 def visitdir(dir):
390 if dir == '.':
394 if dir == '.':
391 return matcher.visitdir(self._path)
395 return matcher.visitdir(self._path)
392 return matcher.visitdir(self._path + "/" + dir)
396 return matcher.visitdir(self._path + "/" + dir)
393 self.visitdir = visitdir
397 self.visitdir = visitdir
394 self._fileroots = set(self._files)
398 self._fileroots = set(self._files)
395
399
396 def abs(self, f):
400 def abs(self, f):
397 return self._matcher.abs(self._path + "/" + f)
401 return self._matcher.abs(self._path + "/" + f)
398
402
399 def bad(self, f, msg):
403 def bad(self, f, msg):
400 self._matcher.bad(self._path + "/" + f, msg)
404 self._matcher.bad(self._path + "/" + f, msg)
401
405
402 def rel(self, f):
406 def rel(self, f):
403 return self._matcher.rel(self._path + "/" + f)
407 return self._matcher.rel(self._path + "/" + f)
404
408
405 class icasefsmatcher(match):
409 class icasefsmatcher(match):
406 """A matcher for wdir on case insensitive filesystems, which normalizes the
410 """A matcher for wdir on case insensitive filesystems, which normalizes the
407 given patterns to the case in the filesystem.
411 given patterns to the case in the filesystem.
408 """
412 """
409
413
410 def __init__(self, root, cwd, patterns, include, exclude, default, auditor,
414 def __init__(self, root, cwd, patterns, include, exclude, default, auditor,
411 ctx, listsubrepos=False, badfn=None):
415 ctx, listsubrepos=False, badfn=None):
412 init = super(icasefsmatcher, self).__init__
416 init = super(icasefsmatcher, self).__init__
413 self._dirstate = ctx.repo().dirstate
417 self._dirstate = ctx.repo().dirstate
414 self._dsnormalize = self._dirstate.normalize
418 self._dsnormalize = self._dirstate.normalize
415
419
416 init(root, cwd, patterns, include, exclude, default, auditor=auditor,
420 init(root, cwd, patterns, include, exclude, default, auditor=auditor,
417 ctx=ctx, listsubrepos=listsubrepos, badfn=badfn)
421 ctx=ctx, listsubrepos=listsubrepos, badfn=badfn)
418
422
419 # m.exact(file) must be based off of the actual user input, otherwise
423 # m.exact(file) must be based off of the actual user input, otherwise
420 # inexact case matches are treated as exact, and not noted without -v.
424 # inexact case matches are treated as exact, and not noted without -v.
421 if self._files:
425 if self._files:
422 self._fileroots = set(_roots(self._kp))
426 self._fileroots = set(_roots(self._kp))
423
427
424 def _normalize(self, patterns, default, root, cwd, auditor):
428 def _normalize(self, patterns, default, root, cwd, auditor):
425 self._kp = super(icasefsmatcher, self)._normalize(patterns, default,
429 self._kp = super(icasefsmatcher, self)._normalize(patterns, default,
426 root, cwd, auditor)
430 root, cwd, auditor)
427 kindpats = []
431 kindpats = []
428 for kind, pats, source in self._kp:
432 for kind, pats, source in self._kp:
429 if kind not in ('re', 'relre'): # regex can't be normalized
433 if kind not in ('re', 'relre'): # regex can't be normalized
430 p = pats
434 p = pats
431 pats = self._dsnormalize(pats)
435 pats = self._dsnormalize(pats)
432
436
433 # Preserve the original to handle a case only rename.
437 # Preserve the original to handle a case only rename.
434 if p != pats and p in self._dirstate:
438 if p != pats and p in self._dirstate:
435 kindpats.append((kind, p, source))
439 kindpats.append((kind, p, source))
436
440
437 kindpats.append((kind, pats, source))
441 kindpats.append((kind, pats, source))
438 return kindpats
442 return kindpats
439
443
440 def patkind(pattern, default=None):
444 def patkind(pattern, default=None):
441 '''If pattern is 'kind:pat' with a known kind, return kind.'''
445 '''If pattern is 'kind:pat' with a known kind, return kind.'''
442 return _patsplit(pattern, default)[0]
446 return _patsplit(pattern, default)[0]
443
447
444 def _patsplit(pattern, default):
448 def _patsplit(pattern, default):
445 """Split a string into the optional pattern kind prefix and the actual
449 """Split a string into the optional pattern kind prefix and the actual
446 pattern."""
450 pattern."""
447 if ':' in pattern:
451 if ':' in pattern:
448 kind, pat = pattern.split(':', 1)
452 kind, pat = pattern.split(':', 1)
449 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
453 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
450 'listfile', 'listfile0', 'set', 'include', 'subinclude'):
454 'listfile', 'listfile0', 'set', 'include', 'subinclude'):
451 return kind, pat
455 return kind, pat
452 return default, pattern
456 return default, pattern
453
457
454 def _globre(pat):
458 def _globre(pat):
455 r'''Convert an extended glob string to a regexp string.
459 r'''Convert an extended glob string to a regexp string.
456
460
457 >>> print _globre(r'?')
461 >>> print _globre(r'?')
458 .
462 .
459 >>> print _globre(r'*')
463 >>> print _globre(r'*')
460 [^/]*
464 [^/]*
461 >>> print _globre(r'**')
465 >>> print _globre(r'**')
462 .*
466 .*
463 >>> print _globre(r'**/a')
467 >>> print _globre(r'**/a')
464 (?:.*/)?a
468 (?:.*/)?a
465 >>> print _globre(r'a/**/b')
469 >>> print _globre(r'a/**/b')
466 a\/(?:.*/)?b
470 a\/(?:.*/)?b
467 >>> print _globre(r'[a*?!^][^b][!c]')
471 >>> print _globre(r'[a*?!^][^b][!c]')
468 [a*?!^][\^b][^c]
472 [a*?!^][\^b][^c]
469 >>> print _globre(r'{a,b}')
473 >>> print _globre(r'{a,b}')
470 (?:a|b)
474 (?:a|b)
471 >>> print _globre(r'.\*\?')
475 >>> print _globre(r'.\*\?')
472 \.\*\?
476 \.\*\?
473 '''
477 '''
474 i, n = 0, len(pat)
478 i, n = 0, len(pat)
475 res = ''
479 res = ''
476 group = 0
480 group = 0
477 escape = util.re.escape
481 escape = util.re.escape
478 def peek():
482 def peek():
479 return i < n and pat[i]
483 return i < n and pat[i]
480 while i < n:
484 while i < n:
481 c = pat[i]
485 c = pat[i]
482 i += 1
486 i += 1
483 if c not in '*?[{},\\':
487 if c not in '*?[{},\\':
484 res += escape(c)
488 res += escape(c)
485 elif c == '*':
489 elif c == '*':
486 if peek() == '*':
490 if peek() == '*':
487 i += 1
491 i += 1
488 if peek() == '/':
492 if peek() == '/':
489 i += 1
493 i += 1
490 res += '(?:.*/)?'
494 res += '(?:.*/)?'
491 else:
495 else:
492 res += '.*'
496 res += '.*'
493 else:
497 else:
494 res += '[^/]*'
498 res += '[^/]*'
495 elif c == '?':
499 elif c == '?':
496 res += '.'
500 res += '.'
497 elif c == '[':
501 elif c == '[':
498 j = i
502 j = i
499 if j < n and pat[j] in '!]':
503 if j < n and pat[j] in '!]':
500 j += 1
504 j += 1
501 while j < n and pat[j] != ']':
505 while j < n and pat[j] != ']':
502 j += 1
506 j += 1
503 if j >= n:
507 if j >= n:
504 res += '\\['
508 res += '\\['
505 else:
509 else:
506 stuff = pat[i:j].replace('\\','\\\\')
510 stuff = pat[i:j].replace('\\','\\\\')
507 i = j + 1
511 i = j + 1
508 if stuff[0] == '!':
512 if stuff[0] == '!':
509 stuff = '^' + stuff[1:]
513 stuff = '^' + stuff[1:]
510 elif stuff[0] == '^':
514 elif stuff[0] == '^':
511 stuff = '\\' + stuff
515 stuff = '\\' + stuff
512 res = '%s[%s]' % (res, stuff)
516 res = '%s[%s]' % (res, stuff)
513 elif c == '{':
517 elif c == '{':
514 group += 1
518 group += 1
515 res += '(?:'
519 res += '(?:'
516 elif c == '}' and group:
520 elif c == '}' and group:
517 res += ')'
521 res += ')'
518 group -= 1
522 group -= 1
519 elif c == ',' and group:
523 elif c == ',' and group:
520 res += '|'
524 res += '|'
521 elif c == '\\':
525 elif c == '\\':
522 p = peek()
526 p = peek()
523 if p:
527 if p:
524 i += 1
528 i += 1
525 res += escape(p)
529 res += escape(p)
526 else:
530 else:
527 res += escape(c)
531 res += escape(c)
528 else:
532 else:
529 res += escape(c)
533 res += escape(c)
530 return res
534 return res
531
535
532 def _regex(kind, pat, globsuffix):
536 def _regex(kind, pat, globsuffix):
533 '''Convert a (normalized) pattern of any kind into a regular expression.
537 '''Convert a (normalized) pattern of any kind into a regular expression.
534 globsuffix is appended to the regexp of globs.'''
538 globsuffix is appended to the regexp of globs.'''
535 if not pat:
539 if not pat:
536 return ''
540 return ''
537 if kind == 're':
541 if kind == 're':
538 return pat
542 return pat
539 if kind == 'path':
543 if kind == 'path':
540 if pat == '.':
544 if pat == '.':
541 return ''
545 return ''
542 return '^' + util.re.escape(pat) + '(?:/|$)'
546 return '^' + util.re.escape(pat) + '(?:/|$)'
543 if kind == 'relglob':
547 if kind == 'relglob':
544 return '(?:|.*/)' + _globre(pat) + globsuffix
548 return '(?:|.*/)' + _globre(pat) + globsuffix
545 if kind == 'relpath':
549 if kind == 'relpath':
546 return util.re.escape(pat) + '(?:/|$)'
550 return util.re.escape(pat) + '(?:/|$)'
547 if kind == 'relre':
551 if kind == 'relre':
548 if pat.startswith('^'):
552 if pat.startswith('^'):
549 return pat
553 return pat
550 return '.*' + pat
554 return '.*' + pat
551 return _globre(pat) + globsuffix
555 return _globre(pat) + globsuffix
552
556
553 def _buildmatch(ctx, kindpats, globsuffix, listsubrepos, root):
557 def _buildmatch(ctx, kindpats, globsuffix, listsubrepos, root):
554 '''Return regexp string and a matcher function for kindpats.
558 '''Return regexp string and a matcher function for kindpats.
555 globsuffix is appended to the regexp of globs.'''
559 globsuffix is appended to the regexp of globs.'''
556 matchfuncs = []
560 matchfuncs = []
557
561
558 subincludes, kindpats = _expandsubinclude(kindpats, root)
562 subincludes, kindpats = _expandsubinclude(kindpats, root)
559 if subincludes:
563 if subincludes:
560 def matchsubinclude(f):
564 def matchsubinclude(f):
561 for prefix, mf in subincludes:
565 for prefix, mf in subincludes:
562 if f.startswith(prefix) and mf(f[len(prefix):]):
566 if f.startswith(prefix) and mf(f[len(prefix):]):
563 return True
567 return True
564 return False
568 return False
565 matchfuncs.append(matchsubinclude)
569 matchfuncs.append(matchsubinclude)
566
570
567 fset, kindpats = _expandsets(kindpats, ctx, listsubrepos)
571 fset, kindpats = _expandsets(kindpats, ctx, listsubrepos)
568 if fset:
572 if fset:
569 matchfuncs.append(fset.__contains__)
573 matchfuncs.append(fset.__contains__)
570
574
571 regex = ''
575 regex = ''
572 if kindpats:
576 if kindpats:
573 regex, mf = _buildregexmatch(kindpats, globsuffix)
577 regex, mf = _buildregexmatch(kindpats, globsuffix)
574 matchfuncs.append(mf)
578 matchfuncs.append(mf)
575
579
576 if len(matchfuncs) == 1:
580 if len(matchfuncs) == 1:
577 return regex, matchfuncs[0]
581 return regex, matchfuncs[0]
578 else:
582 else:
579 return regex, lambda f: any(mf(f) for mf in matchfuncs)
583 return regex, lambda f: any(mf(f) for mf in matchfuncs)
580
584
581 def _buildregexmatch(kindpats, globsuffix):
585 def _buildregexmatch(kindpats, globsuffix):
582 """Build a match function from a list of kinds and kindpats,
586 """Build a match function from a list of kinds and kindpats,
583 return regexp string and a matcher function."""
587 return regexp string and a matcher function."""
584 try:
588 try:
585 regex = '(?:%s)' % '|'.join([_regex(k, p, globsuffix)
589 regex = '(?:%s)' % '|'.join([_regex(k, p, globsuffix)
586 for (k, p, s) in kindpats])
590 for (k, p, s) in kindpats])
587 if len(regex) > 20000:
591 if len(regex) > 20000:
588 raise OverflowError
592 raise OverflowError
589 return regex, _rematcher(regex)
593 return regex, _rematcher(regex)
590 except OverflowError:
594 except OverflowError:
591 # We're using a Python with a tiny regex engine and we
595 # We're using a Python with a tiny regex engine and we
592 # made it explode, so we'll divide the pattern list in two
596 # made it explode, so we'll divide the pattern list in two
593 # until it works
597 # until it works
594 l = len(kindpats)
598 l = len(kindpats)
595 if l < 2:
599 if l < 2:
596 raise
600 raise
597 regexa, a = _buildregexmatch(kindpats[:l//2], globsuffix)
601 regexa, a = _buildregexmatch(kindpats[:l//2], globsuffix)
598 regexb, b = _buildregexmatch(kindpats[l//2:], globsuffix)
602 regexb, b = _buildregexmatch(kindpats[l//2:], globsuffix)
599 return regex, lambda s: a(s) or b(s)
603 return regex, lambda s: a(s) or b(s)
600 except re.error:
604 except re.error:
601 for k, p, s in kindpats:
605 for k, p, s in kindpats:
602 try:
606 try:
603 _rematcher('(?:%s)' % _regex(k, p, globsuffix))
607 _rematcher('(?:%s)' % _regex(k, p, globsuffix))
604 except re.error:
608 except re.error:
605 if s:
609 if s:
606 raise error.Abort(_("%s: invalid pattern (%s): %s") %
610 raise error.Abort(_("%s: invalid pattern (%s): %s") %
607 (s, k, p))
611 (s, k, p))
608 else:
612 else:
609 raise error.Abort(_("invalid pattern (%s): %s") % (k, p))
613 raise error.Abort(_("invalid pattern (%s): %s") % (k, p))
610 raise error.Abort(_("invalid pattern"))
614 raise error.Abort(_("invalid pattern"))
611
615
612 def _roots(kindpats):
616 def _roots(kindpats):
613 '''return roots and exact explicitly listed files from patterns
617 '''return roots and exact explicitly listed files from patterns
614
618
615 >>> _roots([('glob', 'g/*', ''), ('glob', 'g', ''), ('glob', 'g*', '')])
619 >>> _roots([('glob', 'g/*', ''), ('glob', 'g', ''), ('glob', 'g*', '')])
616 ['g', 'g', '.']
620 ['g', 'g', '.']
617 >>> _roots([('relpath', 'r', ''), ('path', 'p/p', ''), ('path', '', '')])
621 >>> _roots([('relpath', 'r', ''), ('path', 'p/p', ''), ('path', '', '')])
618 ['r', 'p/p', '.']
622 ['r', 'p/p', '.']
619 >>> _roots([('relglob', 'rg*', ''), ('re', 're/', ''), ('relre', 'rr', '')])
623 >>> _roots([('relglob', 'rg*', ''), ('re', 're/', ''), ('relre', 'rr', '')])
620 ['.', '.', '.']
624 ['.', '.', '.']
621 '''
625 '''
622 r = []
626 r = []
623 for kind, pat, source in kindpats:
627 for kind, pat, source in kindpats:
624 if kind == 'glob': # find the non-glob prefix
628 if kind == 'glob': # find the non-glob prefix
625 root = []
629 root = []
626 for p in pat.split('/'):
630 for p in pat.split('/'):
627 if '[' in p or '{' in p or '*' in p or '?' in p:
631 if '[' in p or '{' in p or '*' in p or '?' in p:
628 break
632 break
629 root.append(p)
633 root.append(p)
630 r.append('/'.join(root) or '.')
634 r.append('/'.join(root) or '.')
631 elif kind in ('relpath', 'path'):
635 elif kind in ('relpath', 'path'):
632 r.append(pat or '.')
636 r.append(pat or '.')
633 else: # relglob, re, relre
637 else: # relglob, re, relre
634 r.append('.')
638 r.append('.')
635 return r
639 return r
636
640
637 def _anypats(kindpats):
641 def _anypats(kindpats):
638 for kind, pat, source in kindpats:
642 for kind, pat, source in kindpats:
639 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
643 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
640 return True
644 return True
641
645
642 _commentre = None
646 _commentre = None
643
647
644 def readpatternfile(filepath, warn, sourceinfo=False):
648 def readpatternfile(filepath, warn, sourceinfo=False):
645 '''parse a pattern file, returning a list of
649 '''parse a pattern file, returning a list of
646 patterns. These patterns should be given to compile()
650 patterns. These patterns should be given to compile()
647 to be validated and converted into a match function.
651 to be validated and converted into a match function.
648
652
649 trailing white space is dropped.
653 trailing white space is dropped.
650 the escape character is backslash.
654 the escape character is backslash.
651 comments start with #.
655 comments start with #.
652 empty lines are skipped.
656 empty lines are skipped.
653
657
654 lines can be of the following formats:
658 lines can be of the following formats:
655
659
656 syntax: regexp # defaults following lines to non-rooted regexps
660 syntax: regexp # defaults following lines to non-rooted regexps
657 syntax: glob # defaults following lines to non-rooted globs
661 syntax: glob # defaults following lines to non-rooted globs
658 re:pattern # non-rooted regular expression
662 re:pattern # non-rooted regular expression
659 glob:pattern # non-rooted glob
663 glob:pattern # non-rooted glob
660 pattern # pattern of the current default type
664 pattern # pattern of the current default type
661
665
662 if sourceinfo is set, returns a list of tuples:
666 if sourceinfo is set, returns a list of tuples:
663 (pattern, lineno, originalline). This is useful to debug ignore patterns.
667 (pattern, lineno, originalline). This is useful to debug ignore patterns.
664 '''
668 '''
665
669
666 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:',
670 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:',
667 'include': 'include', 'subinclude': 'subinclude'}
671 'include': 'include', 'subinclude': 'subinclude'}
668 syntax = 'relre:'
672 syntax = 'relre:'
669 patterns = []
673 patterns = []
670
674
671 fp = open(filepath)
675 fp = open(filepath)
672 for lineno, line in enumerate(fp, start=1):
676 for lineno, line in enumerate(fp, start=1):
673 if "#" in line:
677 if "#" in line:
674 global _commentre
678 global _commentre
675 if not _commentre:
679 if not _commentre:
676 _commentre = util.re.compile(r'((?:^|[^\\])(?:\\\\)*)#.*')
680 _commentre = util.re.compile(r'((?:^|[^\\])(?:\\\\)*)#.*')
677 # remove comments prefixed by an even number of escapes
681 # remove comments prefixed by an even number of escapes
678 m = _commentre.search(line)
682 m = _commentre.search(line)
679 if m:
683 if m:
680 line = line[:m.end(1)]
684 line = line[:m.end(1)]
681 # fixup properly escaped comments that survived the above
685 # fixup properly escaped comments that survived the above
682 line = line.replace("\\#", "#")
686 line = line.replace("\\#", "#")
683 line = line.rstrip()
687 line = line.rstrip()
684 if not line:
688 if not line:
685 continue
689 continue
686
690
687 if line.startswith('syntax:'):
691 if line.startswith('syntax:'):
688 s = line[7:].strip()
692 s = line[7:].strip()
689 try:
693 try:
690 syntax = syntaxes[s]
694 syntax = syntaxes[s]
691 except KeyError:
695 except KeyError:
692 if warn:
696 if warn:
693 warn(_("%s: ignoring invalid syntax '%s'\n") %
697 warn(_("%s: ignoring invalid syntax '%s'\n") %
694 (filepath, s))
698 (filepath, s))
695 continue
699 continue
696
700
697 linesyntax = syntax
701 linesyntax = syntax
698 for s, rels in syntaxes.iteritems():
702 for s, rels in syntaxes.iteritems():
699 if line.startswith(rels):
703 if line.startswith(rels):
700 linesyntax = rels
704 linesyntax = rels
701 line = line[len(rels):]
705 line = line[len(rels):]
702 break
706 break
703 elif line.startswith(s+':'):
707 elif line.startswith(s+':'):
704 linesyntax = rels
708 linesyntax = rels
705 line = line[len(s) + 1:]
709 line = line[len(s) + 1:]
706 break
710 break
707 if sourceinfo:
711 if sourceinfo:
708 patterns.append((linesyntax + line, lineno, line))
712 patterns.append((linesyntax + line, lineno, line))
709 else:
713 else:
710 patterns.append(linesyntax + line)
714 patterns.append(linesyntax + line)
711 fp.close()
715 fp.close()
712 return patterns
716 return patterns
@@ -1,1429 +1,1421 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import contextlib
10 import contextlib
11 import errno
11 import errno
12 import glob
12 import glob
13 import hashlib
13 import hashlib
14 import os
14 import os
15 import re
15 import re
16 import shutil
16 import shutil
17 import stat
17 import stat
18 import tempfile
18 import tempfile
19 import threading
19 import threading
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import wdirrev
22 from .node import wdirrev
23 from . import (
23 from . import (
24 encoding,
24 encoding,
25 error,
25 error,
26 match as matchmod,
26 match as matchmod,
27 osutil,
27 osutil,
28 pathutil,
28 pathutil,
29 phases,
29 phases,
30 revset,
30 revset,
31 similar,
31 similar,
32 util,
32 util,
33 )
33 )
34
34
35 if os.name == 'nt':
35 if os.name == 'nt':
36 from . import scmwindows as scmplatform
36 from . import scmwindows as scmplatform
37 else:
37 else:
38 from . import scmposix as scmplatform
38 from . import scmposix as scmplatform
39
39
40 systemrcpath = scmplatform.systemrcpath
40 systemrcpath = scmplatform.systemrcpath
41 userrcpath = scmplatform.userrcpath
41 userrcpath = scmplatform.userrcpath
42
42
43 class status(tuple):
43 class status(tuple):
44 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
44 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
45 and 'ignored' properties are only relevant to the working copy.
45 and 'ignored' properties are only relevant to the working copy.
46 '''
46 '''
47
47
48 __slots__ = ()
48 __slots__ = ()
49
49
50 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
50 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
51 clean):
51 clean):
52 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
52 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
53 ignored, clean))
53 ignored, clean))
54
54
55 @property
55 @property
56 def modified(self):
56 def modified(self):
57 '''files that have been modified'''
57 '''files that have been modified'''
58 return self[0]
58 return self[0]
59
59
60 @property
60 @property
61 def added(self):
61 def added(self):
62 '''files that have been added'''
62 '''files that have been added'''
63 return self[1]
63 return self[1]
64
64
65 @property
65 @property
66 def removed(self):
66 def removed(self):
67 '''files that have been removed'''
67 '''files that have been removed'''
68 return self[2]
68 return self[2]
69
69
70 @property
70 @property
71 def deleted(self):
71 def deleted(self):
72 '''files that are in the dirstate, but have been deleted from the
72 '''files that are in the dirstate, but have been deleted from the
73 working copy (aka "missing")
73 working copy (aka "missing")
74 '''
74 '''
75 return self[3]
75 return self[3]
76
76
77 @property
77 @property
78 def unknown(self):
78 def unknown(self):
79 '''files not in the dirstate that are not ignored'''
79 '''files not in the dirstate that are not ignored'''
80 return self[4]
80 return self[4]
81
81
82 @property
82 @property
83 def ignored(self):
83 def ignored(self):
84 '''files not in the dirstate that are ignored (by _dirignore())'''
84 '''files not in the dirstate that are ignored (by _dirignore())'''
85 return self[5]
85 return self[5]
86
86
87 @property
87 @property
88 def clean(self):
88 def clean(self):
89 '''files that have not been modified'''
89 '''files that have not been modified'''
90 return self[6]
90 return self[6]
91
91
92 def __repr__(self, *args, **kwargs):
92 def __repr__(self, *args, **kwargs):
93 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
93 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
94 'unknown=%r, ignored=%r, clean=%r>') % self)
94 'unknown=%r, ignored=%r, clean=%r>') % self)
95
95
96 def itersubrepos(ctx1, ctx2):
96 def itersubrepos(ctx1, ctx2):
97 """find subrepos in ctx1 or ctx2"""
97 """find subrepos in ctx1 or ctx2"""
98 # Create a (subpath, ctx) mapping where we prefer subpaths from
98 # Create a (subpath, ctx) mapping where we prefer subpaths from
99 # ctx1. The subpaths from ctx2 are important when the .hgsub file
99 # ctx1. The subpaths from ctx2 are important when the .hgsub file
100 # has been modified (in ctx2) but not yet committed (in ctx1).
100 # has been modified (in ctx2) but not yet committed (in ctx1).
101 subpaths = dict.fromkeys(ctx2.substate, ctx2)
101 subpaths = dict.fromkeys(ctx2.substate, ctx2)
102 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
102 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
103
103
104 missing = set()
104 missing = set()
105
105
106 for subpath in ctx2.substate:
106 for subpath in ctx2.substate:
107 if subpath not in ctx1.substate:
107 if subpath not in ctx1.substate:
108 del subpaths[subpath]
108 del subpaths[subpath]
109 missing.add(subpath)
109 missing.add(subpath)
110
110
111 for subpath, ctx in sorted(subpaths.iteritems()):
111 for subpath, ctx in sorted(subpaths.iteritems()):
112 yield subpath, ctx.sub(subpath)
112 yield subpath, ctx.sub(subpath)
113
113
114 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
114 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
115 # status and diff will have an accurate result when it does
115 # status and diff will have an accurate result when it does
116 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
116 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
117 # against itself.
117 # against itself.
118 for subpath in missing:
118 for subpath in missing:
119 yield subpath, ctx2.nullsub(subpath, ctx1)
119 yield subpath, ctx2.nullsub(subpath, ctx1)
120
120
121 def nochangesfound(ui, repo, excluded=None):
121 def nochangesfound(ui, repo, excluded=None):
122 '''Report no changes for push/pull, excluded is None or a list of
122 '''Report no changes for push/pull, excluded is None or a list of
123 nodes excluded from the push/pull.
123 nodes excluded from the push/pull.
124 '''
124 '''
125 secretlist = []
125 secretlist = []
126 if excluded:
126 if excluded:
127 for n in excluded:
127 for n in excluded:
128 if n not in repo:
128 if n not in repo:
129 # discovery should not have included the filtered revision,
129 # discovery should not have included the filtered revision,
130 # we have to explicitly exclude it until discovery is cleanup.
130 # we have to explicitly exclude it until discovery is cleanup.
131 continue
131 continue
132 ctx = repo[n]
132 ctx = repo[n]
133 if ctx.phase() >= phases.secret and not ctx.extinct():
133 if ctx.phase() >= phases.secret and not ctx.extinct():
134 secretlist.append(n)
134 secretlist.append(n)
135
135
136 if secretlist:
136 if secretlist:
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
137 ui.status(_("no changes found (ignored %d secret changesets)\n")
138 % len(secretlist))
138 % len(secretlist))
139 else:
139 else:
140 ui.status(_("no changes found\n"))
140 ui.status(_("no changes found\n"))
141
141
142 def checknewlabel(repo, lbl, kind):
142 def checknewlabel(repo, lbl, kind):
143 # Do not use the "kind" parameter in ui output.
143 # Do not use the "kind" parameter in ui output.
144 # It makes strings difficult to translate.
144 # It makes strings difficult to translate.
145 if lbl in ['tip', '.', 'null']:
145 if lbl in ['tip', '.', 'null']:
146 raise error.Abort(_("the name '%s' is reserved") % lbl)
146 raise error.Abort(_("the name '%s' is reserved") % lbl)
147 for c in (':', '\0', '\n', '\r'):
147 for c in (':', '\0', '\n', '\r'):
148 if c in lbl:
148 if c in lbl:
149 raise error.Abort(_("%r cannot be used in a name") % c)
149 raise error.Abort(_("%r cannot be used in a name") % c)
150 try:
150 try:
151 int(lbl)
151 int(lbl)
152 raise error.Abort(_("cannot use an integer as a name"))
152 raise error.Abort(_("cannot use an integer as a name"))
153 except ValueError:
153 except ValueError:
154 pass
154 pass
155
155
156 def checkfilename(f):
156 def checkfilename(f):
157 '''Check that the filename f is an acceptable filename for a tracked file'''
157 '''Check that the filename f is an acceptable filename for a tracked file'''
158 if '\r' in f or '\n' in f:
158 if '\r' in f or '\n' in f:
159 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
159 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
160
160
161 def checkportable(ui, f):
161 def checkportable(ui, f):
162 '''Check if filename f is portable and warn or abort depending on config'''
162 '''Check if filename f is portable and warn or abort depending on config'''
163 checkfilename(f)
163 checkfilename(f)
164 abort, warn = checkportabilityalert(ui)
164 abort, warn = checkportabilityalert(ui)
165 if abort or warn:
165 if abort or warn:
166 msg = util.checkwinfilename(f)
166 msg = util.checkwinfilename(f)
167 if msg:
167 if msg:
168 msg = "%s: %r" % (msg, f)
168 msg = "%s: %r" % (msg, f)
169 if abort:
169 if abort:
170 raise error.Abort(msg)
170 raise error.Abort(msg)
171 ui.warn(_("warning: %s\n") % msg)
171 ui.warn(_("warning: %s\n") % msg)
172
172
173 def checkportabilityalert(ui):
173 def checkportabilityalert(ui):
174 '''check if the user's config requests nothing, a warning, or abort for
174 '''check if the user's config requests nothing, a warning, or abort for
175 non-portable filenames'''
175 non-portable filenames'''
176 val = ui.config('ui', 'portablefilenames', 'warn')
176 val = ui.config('ui', 'portablefilenames', 'warn')
177 lval = val.lower()
177 lval = val.lower()
178 bval = util.parsebool(val)
178 bval = util.parsebool(val)
179 abort = os.name == 'nt' or lval == 'abort'
179 abort = os.name == 'nt' or lval == 'abort'
180 warn = bval or lval == 'warn'
180 warn = bval or lval == 'warn'
181 if bval is None and not (warn or abort or lval == 'ignore'):
181 if bval is None and not (warn or abort or lval == 'ignore'):
182 raise error.ConfigError(
182 raise error.ConfigError(
183 _("ui.portablefilenames value is invalid ('%s')") % val)
183 _("ui.portablefilenames value is invalid ('%s')") % val)
184 return abort, warn
184 return abort, warn
185
185
186 class casecollisionauditor(object):
186 class casecollisionauditor(object):
187 def __init__(self, ui, abort, dirstate):
187 def __init__(self, ui, abort, dirstate):
188 self._ui = ui
188 self._ui = ui
189 self._abort = abort
189 self._abort = abort
190 allfiles = '\0'.join(dirstate._map)
190 allfiles = '\0'.join(dirstate._map)
191 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
191 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
192 self._dirstate = dirstate
192 self._dirstate = dirstate
193 # The purpose of _newfiles is so that we don't complain about
193 # The purpose of _newfiles is so that we don't complain about
194 # case collisions if someone were to call this object with the
194 # case collisions if someone were to call this object with the
195 # same filename twice.
195 # same filename twice.
196 self._newfiles = set()
196 self._newfiles = set()
197
197
198 def __call__(self, f):
198 def __call__(self, f):
199 if f in self._newfiles:
199 if f in self._newfiles:
200 return
200 return
201 fl = encoding.lower(f)
201 fl = encoding.lower(f)
202 if fl in self._loweredfiles and f not in self._dirstate:
202 if fl in self._loweredfiles and f not in self._dirstate:
203 msg = _('possible case-folding collision for %s') % f
203 msg = _('possible case-folding collision for %s') % f
204 if self._abort:
204 if self._abort:
205 raise error.Abort(msg)
205 raise error.Abort(msg)
206 self._ui.warn(_("warning: %s\n") % msg)
206 self._ui.warn(_("warning: %s\n") % msg)
207 self._loweredfiles.add(fl)
207 self._loweredfiles.add(fl)
208 self._newfiles.add(f)
208 self._newfiles.add(f)
209
209
210 def filteredhash(repo, maxrev):
210 def filteredhash(repo, maxrev):
211 """build hash of filtered revisions in the current repoview.
211 """build hash of filtered revisions in the current repoview.
212
212
213 Multiple caches perform up-to-date validation by checking that the
213 Multiple caches perform up-to-date validation by checking that the
214 tiprev and tipnode stored in the cache file match the current repository.
214 tiprev and tipnode stored in the cache file match the current repository.
215 However, this is not sufficient for validating repoviews because the set
215 However, this is not sufficient for validating repoviews because the set
216 of revisions in the view may change without the repository tiprev and
216 of revisions in the view may change without the repository tiprev and
217 tipnode changing.
217 tipnode changing.
218
218
219 This function hashes all the revs filtered from the view and returns
219 This function hashes all the revs filtered from the view and returns
220 that SHA-1 digest.
220 that SHA-1 digest.
221 """
221 """
222 cl = repo.changelog
222 cl = repo.changelog
223 if not cl.filteredrevs:
223 if not cl.filteredrevs:
224 return None
224 return None
225 key = None
225 key = None
226 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
226 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
227 if revs:
227 if revs:
228 s = hashlib.sha1()
228 s = hashlib.sha1()
229 for rev in revs:
229 for rev in revs:
230 s.update('%s;' % rev)
230 s.update('%s;' % rev)
231 key = s.digest()
231 key = s.digest()
232 return key
232 return key
233
233
234 class abstractvfs(object):
234 class abstractvfs(object):
235 """Abstract base class; cannot be instantiated"""
235 """Abstract base class; cannot be instantiated"""
236
236
237 def __init__(self, *args, **kwargs):
237 def __init__(self, *args, **kwargs):
238 '''Prevent instantiation; don't call this from subclasses.'''
238 '''Prevent instantiation; don't call this from subclasses.'''
239 raise NotImplementedError('attempted instantiating ' + str(type(self)))
239 raise NotImplementedError('attempted instantiating ' + str(type(self)))
240
240
241 def tryread(self, path):
241 def tryread(self, path):
242 '''gracefully return an empty string for missing files'''
242 '''gracefully return an empty string for missing files'''
243 try:
243 try:
244 return self.read(path)
244 return self.read(path)
245 except IOError as inst:
245 except IOError as inst:
246 if inst.errno != errno.ENOENT:
246 if inst.errno != errno.ENOENT:
247 raise
247 raise
248 return ""
248 return ""
249
249
250 def tryreadlines(self, path, mode='rb'):
250 def tryreadlines(self, path, mode='rb'):
251 '''gracefully return an empty array for missing files'''
251 '''gracefully return an empty array for missing files'''
252 try:
252 try:
253 return self.readlines(path, mode=mode)
253 return self.readlines(path, mode=mode)
254 except IOError as inst:
254 except IOError as inst:
255 if inst.errno != errno.ENOENT:
255 if inst.errno != errno.ENOENT:
256 raise
256 raise
257 return []
257 return []
258
258
259 @util.propertycache
259 @util.propertycache
260 def open(self):
260 def open(self):
261 '''Open ``path`` file, which is relative to vfs root.
261 '''Open ``path`` file, which is relative to vfs root.
262
262
263 Newly created directories are marked as "not to be indexed by
263 Newly created directories are marked as "not to be indexed by
264 the content indexing service", if ``notindexed`` is specified
264 the content indexing service", if ``notindexed`` is specified
265 for "write" mode access.
265 for "write" mode access.
266 '''
266 '''
267 return self.__call__
267 return self.__call__
268
268
269 def read(self, path):
269 def read(self, path):
270 with self(path, 'rb') as fp:
270 with self(path, 'rb') as fp:
271 return fp.read()
271 return fp.read()
272
272
273 def readlines(self, path, mode='rb'):
273 def readlines(self, path, mode='rb'):
274 with self(path, mode=mode) as fp:
274 with self(path, mode=mode) as fp:
275 return fp.readlines()
275 return fp.readlines()
276
276
277 def write(self, path, data, backgroundclose=False):
277 def write(self, path, data, backgroundclose=False):
278 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
278 with self(path, 'wb', backgroundclose=backgroundclose) as fp:
279 return fp.write(data)
279 return fp.write(data)
280
280
281 def writelines(self, path, data, mode='wb', notindexed=False):
281 def writelines(self, path, data, mode='wb', notindexed=False):
282 with self(path, mode=mode, notindexed=notindexed) as fp:
282 with self(path, mode=mode, notindexed=notindexed) as fp:
283 return fp.writelines(data)
283 return fp.writelines(data)
284
284
285 def append(self, path, data):
285 def append(self, path, data):
286 with self(path, 'ab') as fp:
286 with self(path, 'ab') as fp:
287 return fp.write(data)
287 return fp.write(data)
288
288
289 def basename(self, path):
289 def basename(self, path):
290 """return base element of a path (as os.path.basename would do)
290 """return base element of a path (as os.path.basename would do)
291
291
292 This exists to allow handling of strange encoding if needed."""
292 This exists to allow handling of strange encoding if needed."""
293 return os.path.basename(path)
293 return os.path.basename(path)
294
294
295 def chmod(self, path, mode):
295 def chmod(self, path, mode):
296 return os.chmod(self.join(path), mode)
296 return os.chmod(self.join(path), mode)
297
297
298 def dirname(self, path):
298 def dirname(self, path):
299 """return dirname element of a path (as os.path.dirname would do)
299 """return dirname element of a path (as os.path.dirname would do)
300
300
301 This exists to allow handling of strange encoding if needed."""
301 This exists to allow handling of strange encoding if needed."""
302 return os.path.dirname(path)
302 return os.path.dirname(path)
303
303
304 def exists(self, path=None):
304 def exists(self, path=None):
305 return os.path.exists(self.join(path))
305 return os.path.exists(self.join(path))
306
306
307 def fstat(self, fp):
307 def fstat(self, fp):
308 return util.fstat(fp)
308 return util.fstat(fp)
309
309
310 def isdir(self, path=None):
310 def isdir(self, path=None):
311 return os.path.isdir(self.join(path))
311 return os.path.isdir(self.join(path))
312
312
313 def isfile(self, path=None):
313 def isfile(self, path=None):
314 return os.path.isfile(self.join(path))
314 return os.path.isfile(self.join(path))
315
315
316 def islink(self, path=None):
316 def islink(self, path=None):
317 return os.path.islink(self.join(path))
317 return os.path.islink(self.join(path))
318
318
319 def isfileorlink(self, path=None):
319 def isfileorlink(self, path=None):
320 '''return whether path is a regular file or a symlink
320 '''return whether path is a regular file or a symlink
321
321
322 Unlike isfile, this doesn't follow symlinks.'''
322 Unlike isfile, this doesn't follow symlinks.'''
323 try:
323 try:
324 st = self.lstat(path)
324 st = self.lstat(path)
325 except OSError:
325 except OSError:
326 return False
326 return False
327 mode = st.st_mode
327 mode = st.st_mode
328 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
328 return stat.S_ISREG(mode) or stat.S_ISLNK(mode)
329
329
330 def reljoin(self, *paths):
330 def reljoin(self, *paths):
331 """join various elements of a path together (as os.path.join would do)
331 """join various elements of a path together (as os.path.join would do)
332
332
333 The vfs base is not injected so that path stay relative. This exists
333 The vfs base is not injected so that path stay relative. This exists
334 to allow handling of strange encoding if needed."""
334 to allow handling of strange encoding if needed."""
335 return os.path.join(*paths)
335 return os.path.join(*paths)
336
336
337 def split(self, path):
337 def split(self, path):
338 """split top-most element of a path (as os.path.split would do)
338 """split top-most element of a path (as os.path.split would do)
339
339
340 This exists to allow handling of strange encoding if needed."""
340 This exists to allow handling of strange encoding if needed."""
341 return os.path.split(path)
341 return os.path.split(path)
342
342
343 def lexists(self, path=None):
343 def lexists(self, path=None):
344 return os.path.lexists(self.join(path))
344 return os.path.lexists(self.join(path))
345
345
346 def lstat(self, path=None):
346 def lstat(self, path=None):
347 return os.lstat(self.join(path))
347 return os.lstat(self.join(path))
348
348
349 def listdir(self, path=None):
349 def listdir(self, path=None):
350 return os.listdir(self.join(path))
350 return os.listdir(self.join(path))
351
351
352 def makedir(self, path=None, notindexed=True):
352 def makedir(self, path=None, notindexed=True):
353 return util.makedir(self.join(path), notindexed)
353 return util.makedir(self.join(path), notindexed)
354
354
355 def makedirs(self, path=None, mode=None):
355 def makedirs(self, path=None, mode=None):
356 return util.makedirs(self.join(path), mode)
356 return util.makedirs(self.join(path), mode)
357
357
358 def makelock(self, info, path):
358 def makelock(self, info, path):
359 return util.makelock(info, self.join(path))
359 return util.makelock(info, self.join(path))
360
360
361 def mkdir(self, path=None):
361 def mkdir(self, path=None):
362 return os.mkdir(self.join(path))
362 return os.mkdir(self.join(path))
363
363
364 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
364 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
365 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
365 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
366 dir=self.join(dir), text=text)
366 dir=self.join(dir), text=text)
367 dname, fname = util.split(name)
367 dname, fname = util.split(name)
368 if dir:
368 if dir:
369 return fd, os.path.join(dir, fname)
369 return fd, os.path.join(dir, fname)
370 else:
370 else:
371 return fd, fname
371 return fd, fname
372
372
373 def readdir(self, path=None, stat=None, skip=None):
373 def readdir(self, path=None, stat=None, skip=None):
374 return osutil.listdir(self.join(path), stat, skip)
374 return osutil.listdir(self.join(path), stat, skip)
375
375
376 def readlock(self, path):
376 def readlock(self, path):
377 return util.readlock(self.join(path))
377 return util.readlock(self.join(path))
378
378
379 def rename(self, src, dst, checkambig=False):
379 def rename(self, src, dst, checkambig=False):
380 """Rename from src to dst
380 """Rename from src to dst
381
381
382 checkambig argument is used with util.filestat, and is useful
382 checkambig argument is used with util.filestat, and is useful
383 only if destination file is guarded by any lock
383 only if destination file is guarded by any lock
384 (e.g. repo.lock or repo.wlock).
384 (e.g. repo.lock or repo.wlock).
385 """
385 """
386 dstpath = self.join(dst)
386 dstpath = self.join(dst)
387 oldstat = checkambig and util.filestat(dstpath)
387 oldstat = checkambig and util.filestat(dstpath)
388 if oldstat and oldstat.stat:
388 if oldstat and oldstat.stat:
389 ret = util.rename(self.join(src), dstpath)
389 ret = util.rename(self.join(src), dstpath)
390 newstat = util.filestat(dstpath)
390 newstat = util.filestat(dstpath)
391 if newstat.isambig(oldstat):
391 if newstat.isambig(oldstat):
392 # stat of renamed file is ambiguous to original one
392 # stat of renamed file is ambiguous to original one
393 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
393 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
394 os.utime(dstpath, (advanced, advanced))
394 os.utime(dstpath, (advanced, advanced))
395 return ret
395 return ret
396 return util.rename(self.join(src), dstpath)
396 return util.rename(self.join(src), dstpath)
397
397
398 def readlink(self, path):
398 def readlink(self, path):
399 return os.readlink(self.join(path))
399 return os.readlink(self.join(path))
400
400
401 def removedirs(self, path=None):
401 def removedirs(self, path=None):
402 """Remove a leaf directory and all empty intermediate ones
402 """Remove a leaf directory and all empty intermediate ones
403 """
403 """
404 return util.removedirs(self.join(path))
404 return util.removedirs(self.join(path))
405
405
406 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
406 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
407 """Remove a directory tree recursively
407 """Remove a directory tree recursively
408
408
409 If ``forcibly``, this tries to remove READ-ONLY files, too.
409 If ``forcibly``, this tries to remove READ-ONLY files, too.
410 """
410 """
411 if forcibly:
411 if forcibly:
412 def onerror(function, path, excinfo):
412 def onerror(function, path, excinfo):
413 if function is not os.remove:
413 if function is not os.remove:
414 raise
414 raise
415 # read-only files cannot be unlinked under Windows
415 # read-only files cannot be unlinked under Windows
416 s = os.stat(path)
416 s = os.stat(path)
417 if (s.st_mode & stat.S_IWRITE) != 0:
417 if (s.st_mode & stat.S_IWRITE) != 0:
418 raise
418 raise
419 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
419 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
420 os.remove(path)
420 os.remove(path)
421 else:
421 else:
422 onerror = None
422 onerror = None
423 return shutil.rmtree(self.join(path),
423 return shutil.rmtree(self.join(path),
424 ignore_errors=ignore_errors, onerror=onerror)
424 ignore_errors=ignore_errors, onerror=onerror)
425
425
426 def setflags(self, path, l, x):
426 def setflags(self, path, l, x):
427 return util.setflags(self.join(path), l, x)
427 return util.setflags(self.join(path), l, x)
428
428
429 def stat(self, path=None):
429 def stat(self, path=None):
430 return os.stat(self.join(path))
430 return os.stat(self.join(path))
431
431
432 def unlink(self, path=None):
432 def unlink(self, path=None):
433 return util.unlink(self.join(path))
433 return util.unlink(self.join(path))
434
434
435 def unlinkpath(self, path=None, ignoremissing=False):
435 def unlinkpath(self, path=None, ignoremissing=False):
436 return util.unlinkpath(self.join(path), ignoremissing)
436 return util.unlinkpath(self.join(path), ignoremissing)
437
437
438 def utime(self, path=None, t=None):
438 def utime(self, path=None, t=None):
439 return os.utime(self.join(path), t)
439 return os.utime(self.join(path), t)
440
440
441 def walk(self, path=None, onerror=None):
441 def walk(self, path=None, onerror=None):
442 """Yield (dirpath, dirs, files) tuple for each directories under path
442 """Yield (dirpath, dirs, files) tuple for each directories under path
443
443
444 ``dirpath`` is relative one from the root of this vfs. This
444 ``dirpath`` is relative one from the root of this vfs. This
445 uses ``os.sep`` as path separator, even you specify POSIX
445 uses ``os.sep`` as path separator, even you specify POSIX
446 style ``path``.
446 style ``path``.
447
447
448 "The root of this vfs" is represented as empty ``dirpath``.
448 "The root of this vfs" is represented as empty ``dirpath``.
449 """
449 """
450 root = os.path.normpath(self.join(None))
450 root = os.path.normpath(self.join(None))
451 # when dirpath == root, dirpath[prefixlen:] becomes empty
451 # when dirpath == root, dirpath[prefixlen:] becomes empty
452 # because len(dirpath) < prefixlen.
452 # because len(dirpath) < prefixlen.
453 prefixlen = len(pathutil.normasprefix(root))
453 prefixlen = len(pathutil.normasprefix(root))
454 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
454 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
455 yield (dirpath[prefixlen:], dirs, files)
455 yield (dirpath[prefixlen:], dirs, files)
456
456
457 @contextlib.contextmanager
457 @contextlib.contextmanager
458 def backgroundclosing(self, ui, expectedcount=-1):
458 def backgroundclosing(self, ui, expectedcount=-1):
459 """Allow files to be closed asynchronously.
459 """Allow files to be closed asynchronously.
460
460
461 When this context manager is active, ``backgroundclose`` can be passed
461 When this context manager is active, ``backgroundclose`` can be passed
462 to ``__call__``/``open`` to result in the file possibly being closed
462 to ``__call__``/``open`` to result in the file possibly being closed
463 asynchronously, on a background thread.
463 asynchronously, on a background thread.
464 """
464 """
465 # This is an arbitrary restriction and could be changed if we ever
465 # This is an arbitrary restriction and could be changed if we ever
466 # have a use case.
466 # have a use case.
467 vfs = getattr(self, 'vfs', self)
467 vfs = getattr(self, 'vfs', self)
468 if getattr(vfs, '_backgroundfilecloser', None):
468 if getattr(vfs, '_backgroundfilecloser', None):
469 raise error.Abort(
469 raise error.Abort(
470 _('can only have 1 active background file closer'))
470 _('can only have 1 active background file closer'))
471
471
472 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
472 with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
473 try:
473 try:
474 vfs._backgroundfilecloser = bfc
474 vfs._backgroundfilecloser = bfc
475 yield bfc
475 yield bfc
476 finally:
476 finally:
477 vfs._backgroundfilecloser = None
477 vfs._backgroundfilecloser = None
478
478
479 class vfs(abstractvfs):
479 class vfs(abstractvfs):
480 '''Operate files relative to a base directory
480 '''Operate files relative to a base directory
481
481
482 This class is used to hide the details of COW semantics and
482 This class is used to hide the details of COW semantics and
483 remote file access from higher level code.
483 remote file access from higher level code.
484 '''
484 '''
485 def __init__(self, base, audit=True, expandpath=False, realpath=False):
485 def __init__(self, base, audit=True, expandpath=False, realpath=False):
486 if expandpath:
486 if expandpath:
487 base = util.expandpath(base)
487 base = util.expandpath(base)
488 if realpath:
488 if realpath:
489 base = os.path.realpath(base)
489 base = os.path.realpath(base)
490 self.base = base
490 self.base = base
491 self.mustaudit = audit
491 self.mustaudit = audit
492 self.createmode = None
492 self.createmode = None
493 self._trustnlink = None
493 self._trustnlink = None
494
494
495 @property
495 @property
496 def mustaudit(self):
496 def mustaudit(self):
497 return self._audit
497 return self._audit
498
498
499 @mustaudit.setter
499 @mustaudit.setter
500 def mustaudit(self, onoff):
500 def mustaudit(self, onoff):
501 self._audit = onoff
501 self._audit = onoff
502 if onoff:
502 if onoff:
503 self.audit = pathutil.pathauditor(self.base)
503 self.audit = pathutil.pathauditor(self.base)
504 else:
504 else:
505 self.audit = util.always
505 self.audit = util.always
506
506
507 @util.propertycache
507 @util.propertycache
508 def _cansymlink(self):
508 def _cansymlink(self):
509 return util.checklink(self.base)
509 return util.checklink(self.base)
510
510
511 @util.propertycache
511 @util.propertycache
512 def _chmod(self):
512 def _chmod(self):
513 return util.checkexec(self.base)
513 return util.checkexec(self.base)
514
514
515 def _fixfilemode(self, name):
515 def _fixfilemode(self, name):
516 if self.createmode is None or not self._chmod:
516 if self.createmode is None or not self._chmod:
517 return
517 return
518 os.chmod(name, self.createmode & 0o666)
518 os.chmod(name, self.createmode & 0o666)
519
519
520 def __call__(self, path, mode="r", text=False, atomictemp=False,
520 def __call__(self, path, mode="r", text=False, atomictemp=False,
521 notindexed=False, backgroundclose=False, checkambig=False):
521 notindexed=False, backgroundclose=False, checkambig=False):
522 '''Open ``path`` file, which is relative to vfs root.
522 '''Open ``path`` file, which is relative to vfs root.
523
523
524 Newly created directories are marked as "not to be indexed by
524 Newly created directories are marked as "not to be indexed by
525 the content indexing service", if ``notindexed`` is specified
525 the content indexing service", if ``notindexed`` is specified
526 for "write" mode access.
526 for "write" mode access.
527
527
528 If ``backgroundclose`` is passed, the file may be closed asynchronously.
528 If ``backgroundclose`` is passed, the file may be closed asynchronously.
529 It can only be used if the ``self.backgroundclosing()`` context manager
529 It can only be used if the ``self.backgroundclosing()`` context manager
530 is active. This should only be specified if the following criteria hold:
530 is active. This should only be specified if the following criteria hold:
531
531
532 1. There is a potential for writing thousands of files. Unless you
532 1. There is a potential for writing thousands of files. Unless you
533 are writing thousands of files, the performance benefits of
533 are writing thousands of files, the performance benefits of
534 asynchronously closing files is not realized.
534 asynchronously closing files is not realized.
535 2. Files are opened exactly once for the ``backgroundclosing``
535 2. Files are opened exactly once for the ``backgroundclosing``
536 active duration and are therefore free of race conditions between
536 active duration and are therefore free of race conditions between
537 closing a file on a background thread and reopening it. (If the
537 closing a file on a background thread and reopening it. (If the
538 file were opened multiple times, there could be unflushed data
538 file were opened multiple times, there could be unflushed data
539 because the original file handle hasn't been flushed/closed yet.)
539 because the original file handle hasn't been flushed/closed yet.)
540
540
541 ``checkambig`` argument is passed to atomictemplfile (valid
541 ``checkambig`` argument is passed to atomictemplfile (valid
542 only for writing), and is useful only if target file is
542 only for writing), and is useful only if target file is
543 guarded by any lock (e.g. repo.lock or repo.wlock).
543 guarded by any lock (e.g. repo.lock or repo.wlock).
544 '''
544 '''
545 if self._audit:
545 if self._audit:
546 r = util.checkosfilename(path)
546 r = util.checkosfilename(path)
547 if r:
547 if r:
548 raise error.Abort("%s: %r" % (r, path))
548 raise error.Abort("%s: %r" % (r, path))
549 self.audit(path)
549 self.audit(path)
550 f = self.join(path)
550 f = self.join(path)
551
551
552 if not text and "b" not in mode:
552 if not text and "b" not in mode:
553 mode += "b" # for that other OS
553 mode += "b" # for that other OS
554
554
555 nlink = -1
555 nlink = -1
556 if mode not in ('r', 'rb'):
556 if mode not in ('r', 'rb'):
557 dirname, basename = util.split(f)
557 dirname, basename = util.split(f)
558 # If basename is empty, then the path is malformed because it points
558 # If basename is empty, then the path is malformed because it points
559 # to a directory. Let the posixfile() call below raise IOError.
559 # to a directory. Let the posixfile() call below raise IOError.
560 if basename:
560 if basename:
561 if atomictemp:
561 if atomictemp:
562 util.makedirs(dirname, self.createmode, notindexed)
562 util.makedirs(dirname, self.createmode, notindexed)
563 return util.atomictempfile(f, mode, self.createmode,
563 return util.atomictempfile(f, mode, self.createmode,
564 checkambig=checkambig)
564 checkambig=checkambig)
565 try:
565 try:
566 if 'w' in mode:
566 if 'w' in mode:
567 util.unlink(f)
567 util.unlink(f)
568 nlink = 0
568 nlink = 0
569 else:
569 else:
570 # nlinks() may behave differently for files on Windows
570 # nlinks() may behave differently for files on Windows
571 # shares if the file is open.
571 # shares if the file is open.
572 with util.posixfile(f):
572 with util.posixfile(f):
573 nlink = util.nlinks(f)
573 nlink = util.nlinks(f)
574 if nlink < 1:
574 if nlink < 1:
575 nlink = 2 # force mktempcopy (issue1922)
575 nlink = 2 # force mktempcopy (issue1922)
576 except (OSError, IOError) as e:
576 except (OSError, IOError) as e:
577 if e.errno != errno.ENOENT:
577 if e.errno != errno.ENOENT:
578 raise
578 raise
579 nlink = 0
579 nlink = 0
580 util.makedirs(dirname, self.createmode, notindexed)
580 util.makedirs(dirname, self.createmode, notindexed)
581 if nlink > 0:
581 if nlink > 0:
582 if self._trustnlink is None:
582 if self._trustnlink is None:
583 self._trustnlink = nlink > 1 or util.checknlink(f)
583 self._trustnlink = nlink > 1 or util.checknlink(f)
584 if nlink > 1 or not self._trustnlink:
584 if nlink > 1 or not self._trustnlink:
585 util.rename(util.mktempcopy(f), f)
585 util.rename(util.mktempcopy(f), f)
586 fp = util.posixfile(f, mode)
586 fp = util.posixfile(f, mode)
587 if nlink == 0:
587 if nlink == 0:
588 self._fixfilemode(f)
588 self._fixfilemode(f)
589
589
590 if backgroundclose:
590 if backgroundclose:
591 if not self._backgroundfilecloser:
591 if not self._backgroundfilecloser:
592 raise error.Abort(_('backgroundclose can only be used when a '
592 raise error.Abort(_('backgroundclose can only be used when a '
593 'backgroundclosing context manager is active')
593 'backgroundclosing context manager is active')
594 )
594 )
595
595
596 fp = delayclosedfile(fp, self._backgroundfilecloser)
596 fp = delayclosedfile(fp, self._backgroundfilecloser)
597
597
598 return fp
598 return fp
599
599
600 def symlink(self, src, dst):
600 def symlink(self, src, dst):
601 self.audit(dst)
601 self.audit(dst)
602 linkname = self.join(dst)
602 linkname = self.join(dst)
603 try:
603 try:
604 os.unlink(linkname)
604 os.unlink(linkname)
605 except OSError:
605 except OSError:
606 pass
606 pass
607
607
608 util.makedirs(os.path.dirname(linkname), self.createmode)
608 util.makedirs(os.path.dirname(linkname), self.createmode)
609
609
610 if self._cansymlink:
610 if self._cansymlink:
611 try:
611 try:
612 os.symlink(src, linkname)
612 os.symlink(src, linkname)
613 except OSError as err:
613 except OSError as err:
614 raise OSError(err.errno, _('could not symlink to %r: %s') %
614 raise OSError(err.errno, _('could not symlink to %r: %s') %
615 (src, err.strerror), linkname)
615 (src, err.strerror), linkname)
616 else:
616 else:
617 self.write(dst, src)
617 self.write(dst, src)
618
618
619 def join(self, path, *insidef):
619 def join(self, path, *insidef):
620 if path:
620 if path:
621 return os.path.join(self.base, path, *insidef)
621 return os.path.join(self.base, path, *insidef)
622 else:
622 else:
623 return self.base
623 return self.base
624
624
625 opener = vfs
625 opener = vfs
626
626
627 class auditvfs(object):
627 class auditvfs(object):
628 def __init__(self, vfs):
628 def __init__(self, vfs):
629 self.vfs = vfs
629 self.vfs = vfs
630
630
631 @property
631 @property
632 def mustaudit(self):
632 def mustaudit(self):
633 return self.vfs.mustaudit
633 return self.vfs.mustaudit
634
634
635 @mustaudit.setter
635 @mustaudit.setter
636 def mustaudit(self, onoff):
636 def mustaudit(self, onoff):
637 self.vfs.mustaudit = onoff
637 self.vfs.mustaudit = onoff
638
638
639 @property
639 @property
640 def options(self):
640 def options(self):
641 return self.vfs.options
641 return self.vfs.options
642
642
643 @options.setter
643 @options.setter
644 def options(self, value):
644 def options(self, value):
645 self.vfs.options = value
645 self.vfs.options = value
646
646
647 class filtervfs(abstractvfs, auditvfs):
647 class filtervfs(abstractvfs, auditvfs):
648 '''Wrapper vfs for filtering filenames with a function.'''
648 '''Wrapper vfs for filtering filenames with a function.'''
649
649
650 def __init__(self, vfs, filter):
650 def __init__(self, vfs, filter):
651 auditvfs.__init__(self, vfs)
651 auditvfs.__init__(self, vfs)
652 self._filter = filter
652 self._filter = filter
653
653
654 def __call__(self, path, *args, **kwargs):
654 def __call__(self, path, *args, **kwargs):
655 return self.vfs(self._filter(path), *args, **kwargs)
655 return self.vfs(self._filter(path), *args, **kwargs)
656
656
657 def join(self, path, *insidef):
657 def join(self, path, *insidef):
658 if path:
658 if path:
659 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
659 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
660 else:
660 else:
661 return self.vfs.join(path)
661 return self.vfs.join(path)
662
662
663 filteropener = filtervfs
663 filteropener = filtervfs
664
664
665 class readonlyvfs(abstractvfs, auditvfs):
665 class readonlyvfs(abstractvfs, auditvfs):
666 '''Wrapper vfs preventing any writing.'''
666 '''Wrapper vfs preventing any writing.'''
667
667
668 def __init__(self, vfs):
668 def __init__(self, vfs):
669 auditvfs.__init__(self, vfs)
669 auditvfs.__init__(self, vfs)
670
670
671 def __call__(self, path, mode='r', *args, **kw):
671 def __call__(self, path, mode='r', *args, **kw):
672 if mode not in ('r', 'rb'):
672 if mode not in ('r', 'rb'):
673 raise error.Abort(_('this vfs is read only'))
673 raise error.Abort(_('this vfs is read only'))
674 return self.vfs(path, mode, *args, **kw)
674 return self.vfs(path, mode, *args, **kw)
675
675
676 def join(self, path, *insidef):
676 def join(self, path, *insidef):
677 return self.vfs.join(path, *insidef)
677 return self.vfs.join(path, *insidef)
678
678
679 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
679 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
680 '''yield every hg repository under path, always recursively.
680 '''yield every hg repository under path, always recursively.
681 The recurse flag will only control recursion into repo working dirs'''
681 The recurse flag will only control recursion into repo working dirs'''
682 def errhandler(err):
682 def errhandler(err):
683 if err.filename == path:
683 if err.filename == path:
684 raise err
684 raise err
685 samestat = getattr(os.path, 'samestat', None)
685 samestat = getattr(os.path, 'samestat', None)
686 if followsym and samestat is not None:
686 if followsym and samestat is not None:
687 def adddir(dirlst, dirname):
687 def adddir(dirlst, dirname):
688 match = False
688 match = False
689 dirstat = os.stat(dirname)
689 dirstat = os.stat(dirname)
690 for lstdirstat in dirlst:
690 for lstdirstat in dirlst:
691 if samestat(dirstat, lstdirstat):
691 if samestat(dirstat, lstdirstat):
692 match = True
692 match = True
693 break
693 break
694 if not match:
694 if not match:
695 dirlst.append(dirstat)
695 dirlst.append(dirstat)
696 return not match
696 return not match
697 else:
697 else:
698 followsym = False
698 followsym = False
699
699
700 if (seen_dirs is None) and followsym:
700 if (seen_dirs is None) and followsym:
701 seen_dirs = []
701 seen_dirs = []
702 adddir(seen_dirs, path)
702 adddir(seen_dirs, path)
703 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
703 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
704 dirs.sort()
704 dirs.sort()
705 if '.hg' in dirs:
705 if '.hg' in dirs:
706 yield root # found a repository
706 yield root # found a repository
707 qroot = os.path.join(root, '.hg', 'patches')
707 qroot = os.path.join(root, '.hg', 'patches')
708 if os.path.isdir(os.path.join(qroot, '.hg')):
708 if os.path.isdir(os.path.join(qroot, '.hg')):
709 yield qroot # we have a patch queue repo here
709 yield qroot # we have a patch queue repo here
710 if recurse:
710 if recurse:
711 # avoid recursing inside the .hg directory
711 # avoid recursing inside the .hg directory
712 dirs.remove('.hg')
712 dirs.remove('.hg')
713 else:
713 else:
714 dirs[:] = [] # don't descend further
714 dirs[:] = [] # don't descend further
715 elif followsym:
715 elif followsym:
716 newdirs = []
716 newdirs = []
717 for d in dirs:
717 for d in dirs:
718 fname = os.path.join(root, d)
718 fname = os.path.join(root, d)
719 if adddir(seen_dirs, fname):
719 if adddir(seen_dirs, fname):
720 if os.path.islink(fname):
720 if os.path.islink(fname):
721 for hgname in walkrepos(fname, True, seen_dirs):
721 for hgname in walkrepos(fname, True, seen_dirs):
722 yield hgname
722 yield hgname
723 else:
723 else:
724 newdirs.append(d)
724 newdirs.append(d)
725 dirs[:] = newdirs
725 dirs[:] = newdirs
726
726
727 def osrcpath():
727 def osrcpath():
728 '''return default os-specific hgrc search path'''
728 '''return default os-specific hgrc search path'''
729 path = []
729 path = []
730 defaultpath = os.path.join(util.datapath, 'default.d')
730 defaultpath = os.path.join(util.datapath, 'default.d')
731 if os.path.isdir(defaultpath):
731 if os.path.isdir(defaultpath):
732 for f, kind in osutil.listdir(defaultpath):
732 for f, kind in osutil.listdir(defaultpath):
733 if f.endswith('.rc'):
733 if f.endswith('.rc'):
734 path.append(os.path.join(defaultpath, f))
734 path.append(os.path.join(defaultpath, f))
735 path.extend(systemrcpath())
735 path.extend(systemrcpath())
736 path.extend(userrcpath())
736 path.extend(userrcpath())
737 path = [os.path.normpath(f) for f in path]
737 path = [os.path.normpath(f) for f in path]
738 return path
738 return path
739
739
740 _rcpath = None
740 _rcpath = None
741
741
742 def rcpath():
742 def rcpath():
743 '''return hgrc search path. if env var HGRCPATH is set, use it.
743 '''return hgrc search path. if env var HGRCPATH is set, use it.
744 for each item in path, if directory, use files ending in .rc,
744 for each item in path, if directory, use files ending in .rc,
745 else use item.
745 else use item.
746 make HGRCPATH empty to only look in .hg/hgrc of current repo.
746 make HGRCPATH empty to only look in .hg/hgrc of current repo.
747 if no HGRCPATH, use default os-specific path.'''
747 if no HGRCPATH, use default os-specific path.'''
748 global _rcpath
748 global _rcpath
749 if _rcpath is None:
749 if _rcpath is None:
750 if 'HGRCPATH' in os.environ:
750 if 'HGRCPATH' in os.environ:
751 _rcpath = []
751 _rcpath = []
752 for p in os.environ['HGRCPATH'].split(os.pathsep):
752 for p in os.environ['HGRCPATH'].split(os.pathsep):
753 if not p:
753 if not p:
754 continue
754 continue
755 p = util.expandpath(p)
755 p = util.expandpath(p)
756 if os.path.isdir(p):
756 if os.path.isdir(p):
757 for f, kind in osutil.listdir(p):
757 for f, kind in osutil.listdir(p):
758 if f.endswith('.rc'):
758 if f.endswith('.rc'):
759 _rcpath.append(os.path.join(p, f))
759 _rcpath.append(os.path.join(p, f))
760 else:
760 else:
761 _rcpath.append(p)
761 _rcpath.append(p)
762 else:
762 else:
763 _rcpath = osrcpath()
763 _rcpath = osrcpath()
764 return _rcpath
764 return _rcpath
765
765
766 def intrev(rev):
766 def intrev(rev):
767 """Return integer for a given revision that can be used in comparison or
767 """Return integer for a given revision that can be used in comparison or
768 arithmetic operation"""
768 arithmetic operation"""
769 if rev is None:
769 if rev is None:
770 return wdirrev
770 return wdirrev
771 return rev
771 return rev
772
772
773 def revsingle(repo, revspec, default='.'):
773 def revsingle(repo, revspec, default='.'):
774 if not revspec and revspec != 0:
774 if not revspec and revspec != 0:
775 return repo[default]
775 return repo[default]
776
776
777 l = revrange(repo, [revspec])
777 l = revrange(repo, [revspec])
778 if not l:
778 if not l:
779 raise error.Abort(_('empty revision set'))
779 raise error.Abort(_('empty revision set'))
780 return repo[l.last()]
780 return repo[l.last()]
781
781
782 def _pairspec(revspec):
782 def _pairspec(revspec):
783 tree = revset.parse(revspec)
783 tree = revset.parse(revspec)
784 tree = revset.optimize(tree) # fix up "x^:y" -> "(x^):y"
784 tree = revset.optimize(tree) # fix up "x^:y" -> "(x^):y"
785 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
785 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
786
786
787 def revpair(repo, revs):
787 def revpair(repo, revs):
788 if not revs:
788 if not revs:
789 return repo.dirstate.p1(), None
789 return repo.dirstate.p1(), None
790
790
791 l = revrange(repo, revs)
791 l = revrange(repo, revs)
792
792
793 if not l:
793 if not l:
794 first = second = None
794 first = second = None
795 elif l.isascending():
795 elif l.isascending():
796 first = l.min()
796 first = l.min()
797 second = l.max()
797 second = l.max()
798 elif l.isdescending():
798 elif l.isdescending():
799 first = l.max()
799 first = l.max()
800 second = l.min()
800 second = l.min()
801 else:
801 else:
802 first = l.first()
802 first = l.first()
803 second = l.last()
803 second = l.last()
804
804
805 if first is None:
805 if first is None:
806 raise error.Abort(_('empty revision range'))
806 raise error.Abort(_('empty revision range'))
807 if (first == second and len(revs) >= 2
807 if (first == second and len(revs) >= 2
808 and not all(revrange(repo, [r]) for r in revs)):
808 and not all(revrange(repo, [r]) for r in revs)):
809 raise error.Abort(_('empty revision on one side of range'))
809 raise error.Abort(_('empty revision on one side of range'))
810
810
811 # if top-level is range expression, the result must always be a pair
811 # if top-level is range expression, the result must always be a pair
812 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
812 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
813 return repo.lookup(first), None
813 return repo.lookup(first), None
814
814
815 return repo.lookup(first), repo.lookup(second)
815 return repo.lookup(first), repo.lookup(second)
816
816
817 def revrange(repo, specs):
817 def revrange(repo, specs):
818 """Execute 1 to many revsets and return the union.
818 """Execute 1 to many revsets and return the union.
819
819
820 This is the preferred mechanism for executing revsets using user-specified
820 This is the preferred mechanism for executing revsets using user-specified
821 config options, such as revset aliases.
821 config options, such as revset aliases.
822
822
823 The revsets specified by ``specs`` will be executed via a chained ``OR``
823 The revsets specified by ``specs`` will be executed via a chained ``OR``
824 expression. If ``specs`` is empty, an empty result is returned.
824 expression. If ``specs`` is empty, an empty result is returned.
825
825
826 ``specs`` can contain integers, in which case they are assumed to be
826 ``specs`` can contain integers, in which case they are assumed to be
827 revision numbers.
827 revision numbers.
828
828
829 It is assumed the revsets are already formatted. If you have arguments
829 It is assumed the revsets are already formatted. If you have arguments
830 that need to be expanded in the revset, call ``revset.formatspec()``
830 that need to be expanded in the revset, call ``revset.formatspec()``
831 and pass the result as an element of ``specs``.
831 and pass the result as an element of ``specs``.
832
832
833 Specifying a single revset is allowed.
833 Specifying a single revset is allowed.
834
834
835 Returns a ``revset.abstractsmartset`` which is a list-like interface over
835 Returns a ``revset.abstractsmartset`` which is a list-like interface over
836 integer revisions.
836 integer revisions.
837 """
837 """
838 allspecs = []
838 allspecs = []
839 for spec in specs:
839 for spec in specs:
840 if isinstance(spec, int):
840 if isinstance(spec, int):
841 spec = revset.formatspec('rev(%d)', spec)
841 spec = revset.formatspec('rev(%d)', spec)
842 allspecs.append(spec)
842 allspecs.append(spec)
843 m = revset.matchany(repo.ui, allspecs, repo)
843 m = revset.matchany(repo.ui, allspecs, repo)
844 return m(repo)
844 return m(repo)
845
845
846 def meaningfulparents(repo, ctx):
846 def meaningfulparents(repo, ctx):
847 """Return list of meaningful (or all if debug) parentrevs for rev.
847 """Return list of meaningful (or all if debug) parentrevs for rev.
848
848
849 For merges (two non-nullrev revisions) both parents are meaningful.
849 For merges (two non-nullrev revisions) both parents are meaningful.
850 Otherwise the first parent revision is considered meaningful if it
850 Otherwise the first parent revision is considered meaningful if it
851 is not the preceding revision.
851 is not the preceding revision.
852 """
852 """
853 parents = ctx.parents()
853 parents = ctx.parents()
854 if len(parents) > 1:
854 if len(parents) > 1:
855 return parents
855 return parents
856 if repo.ui.debugflag:
856 if repo.ui.debugflag:
857 return [parents[0], repo['null']]
857 return [parents[0], repo['null']]
858 if parents[0].rev() >= intrev(ctx.rev()) - 1:
858 if parents[0].rev() >= intrev(ctx.rev()) - 1:
859 return []
859 return []
860 return parents
860 return parents
861
861
862 def expandpats(pats):
862 def expandpats(pats):
863 '''Expand bare globs when running on windows.
863 '''Expand bare globs when running on windows.
864 On posix we assume it already has already been done by sh.'''
864 On posix we assume it already has already been done by sh.'''
865 if not util.expandglobs:
865 if not util.expandglobs:
866 return list(pats)
866 return list(pats)
867 ret = []
867 ret = []
868 for kindpat in pats:
868 for kindpat in pats:
869 kind, pat = matchmod._patsplit(kindpat, None)
869 kind, pat = matchmod._patsplit(kindpat, None)
870 if kind is None:
870 if kind is None:
871 try:
871 try:
872 globbed = glob.glob(pat)
872 globbed = glob.glob(pat)
873 except re.error:
873 except re.error:
874 globbed = [pat]
874 globbed = [pat]
875 if globbed:
875 if globbed:
876 ret.extend(globbed)
876 ret.extend(globbed)
877 continue
877 continue
878 ret.append(kindpat)
878 ret.append(kindpat)
879 return ret
879 return ret
880
880
881 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
881 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
882 badfn=None):
882 badfn=None):
883 '''Return a matcher and the patterns that were used.
883 '''Return a matcher and the patterns that were used.
884 The matcher will warn about bad matches, unless an alternate badfn callback
884 The matcher will warn about bad matches, unless an alternate badfn callback
885 is provided.'''
885 is provided.'''
886 if pats == ("",):
886 if pats == ("",):
887 pats = []
887 pats = []
888 if opts is None:
888 if opts is None:
889 opts = {}
889 opts = {}
890 if not globbed and default == 'relpath':
890 if not globbed and default == 'relpath':
891 pats = expandpats(pats or [])
891 pats = expandpats(pats or [])
892
892
893 def bad(f, msg):
893 def bad(f, msg):
894 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
894 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
895
895
896 if badfn is None:
896 if badfn is None:
897 badfn = bad
897 badfn = bad
898
898
899 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
899 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
900 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
900 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
901
901
902 if m.always():
902 if m.always():
903 pats = []
903 pats = []
904 return m, pats
904 return m, pats
905
905
906 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
906 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
907 badfn=None):
907 badfn=None):
908 '''Return a matcher that will warn about bad matches.'''
908 '''Return a matcher that will warn about bad matches.'''
909 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
909 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
910
910
911 def matchall(repo):
911 def matchall(repo):
912 '''Return a matcher that will efficiently match everything.'''
912 '''Return a matcher that will efficiently match everything.'''
913 return matchmod.always(repo.root, repo.getcwd())
913 return matchmod.always(repo.root, repo.getcwd())
914
914
915 def matchfiles(repo, files, badfn=None):
915 def matchfiles(repo, files, badfn=None):
916 '''Return a matcher that will efficiently match exactly these files.'''
916 '''Return a matcher that will efficiently match exactly these files.'''
917 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
917 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
918
918
919 def origpath(ui, repo, filepath):
919 def origpath(ui, repo, filepath):
920 '''customize where .orig files are created
920 '''customize where .orig files are created
921
921
922 Fetch user defined path from config file: [ui] origbackuppath = <path>
922 Fetch user defined path from config file: [ui] origbackuppath = <path>
923 Fall back to default (filepath) if not specified
923 Fall back to default (filepath) if not specified
924 '''
924 '''
925 origbackuppath = ui.config('ui', 'origbackuppath', None)
925 origbackuppath = ui.config('ui', 'origbackuppath', None)
926 if origbackuppath is None:
926 if origbackuppath is None:
927 return filepath + ".orig"
927 return filepath + ".orig"
928
928
929 filepathfromroot = os.path.relpath(filepath, start=repo.root)
929 filepathfromroot = os.path.relpath(filepath, start=repo.root)
930 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
930 fullorigpath = repo.wjoin(origbackuppath, filepathfromroot)
931
931
932 origbackupdir = repo.vfs.dirname(fullorigpath)
932 origbackupdir = repo.vfs.dirname(fullorigpath)
933 if not repo.vfs.exists(origbackupdir):
933 if not repo.vfs.exists(origbackupdir):
934 ui.note(_('creating directory: %s\n') % origbackupdir)
934 ui.note(_('creating directory: %s\n') % origbackupdir)
935 util.makedirs(origbackupdir)
935 util.makedirs(origbackupdir)
936
936
937 return fullorigpath + ".orig"
937 return fullorigpath + ".orig"
938
938
939 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
939 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
940 if opts is None:
940 if opts is None:
941 opts = {}
941 opts = {}
942 m = matcher
942 m = matcher
943 if dry_run is None:
943 if dry_run is None:
944 dry_run = opts.get('dry_run')
944 dry_run = opts.get('dry_run')
945 if similarity is None:
945 if similarity is None:
946 similarity = float(opts.get('similarity') or 0)
946 similarity = float(opts.get('similarity') or 0)
947
947
948 ret = 0
948 ret = 0
949 join = lambda f: os.path.join(prefix, f)
949 join = lambda f: os.path.join(prefix, f)
950
950
951 def matchessubrepo(matcher, subpath):
952 if matcher.exact(subpath):
953 return True
954 for f in matcher.files():
955 if f.startswith(subpath):
956 return True
957 return False
958
959 wctx = repo[None]
951 wctx = repo[None]
960 for subpath in sorted(wctx.substate):
952 for subpath in sorted(wctx.substate):
961 if opts.get('subrepos') or matchessubrepo(m, subpath):
953 if opts.get('subrepos') or m.matchessubrepo(subpath):
962 sub = wctx.sub(subpath)
954 sub = wctx.sub(subpath)
963 try:
955 try:
964 submatch = matchmod.subdirmatcher(subpath, m)
956 submatch = matchmod.subdirmatcher(subpath, m)
965 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
957 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
966 ret = 1
958 ret = 1
967 except error.LookupError:
959 except error.LookupError:
968 repo.ui.status(_("skipping missing subrepository: %s\n")
960 repo.ui.status(_("skipping missing subrepository: %s\n")
969 % join(subpath))
961 % join(subpath))
970
962
971 rejected = []
963 rejected = []
972 def badfn(f, msg):
964 def badfn(f, msg):
973 if f in m.files():
965 if f in m.files():
974 m.bad(f, msg)
966 m.bad(f, msg)
975 rejected.append(f)
967 rejected.append(f)
976
968
977 badmatch = matchmod.badmatch(m, badfn)
969 badmatch = matchmod.badmatch(m, badfn)
978 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
970 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
979 badmatch)
971 badmatch)
980
972
981 unknownset = set(unknown + forgotten)
973 unknownset = set(unknown + forgotten)
982 toprint = unknownset.copy()
974 toprint = unknownset.copy()
983 toprint.update(deleted)
975 toprint.update(deleted)
984 for abs in sorted(toprint):
976 for abs in sorted(toprint):
985 if repo.ui.verbose or not m.exact(abs):
977 if repo.ui.verbose or not m.exact(abs):
986 if abs in unknownset:
978 if abs in unknownset:
987 status = _('adding %s\n') % m.uipath(abs)
979 status = _('adding %s\n') % m.uipath(abs)
988 else:
980 else:
989 status = _('removing %s\n') % m.uipath(abs)
981 status = _('removing %s\n') % m.uipath(abs)
990 repo.ui.status(status)
982 repo.ui.status(status)
991
983
992 renames = _findrenames(repo, m, added + unknown, removed + deleted,
984 renames = _findrenames(repo, m, added + unknown, removed + deleted,
993 similarity)
985 similarity)
994
986
995 if not dry_run:
987 if not dry_run:
996 _markchanges(repo, unknown + forgotten, deleted, renames)
988 _markchanges(repo, unknown + forgotten, deleted, renames)
997
989
998 for f in rejected:
990 for f in rejected:
999 if f in m.files():
991 if f in m.files():
1000 return 1
992 return 1
1001 return ret
993 return ret
1002
994
1003 def marktouched(repo, files, similarity=0.0):
995 def marktouched(repo, files, similarity=0.0):
1004 '''Assert that files have somehow been operated upon. files are relative to
996 '''Assert that files have somehow been operated upon. files are relative to
1005 the repo root.'''
997 the repo root.'''
1006 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
998 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1007 rejected = []
999 rejected = []
1008
1000
1009 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1001 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1010
1002
1011 if repo.ui.verbose:
1003 if repo.ui.verbose:
1012 unknownset = set(unknown + forgotten)
1004 unknownset = set(unknown + forgotten)
1013 toprint = unknownset.copy()
1005 toprint = unknownset.copy()
1014 toprint.update(deleted)
1006 toprint.update(deleted)
1015 for abs in sorted(toprint):
1007 for abs in sorted(toprint):
1016 if abs in unknownset:
1008 if abs in unknownset:
1017 status = _('adding %s\n') % abs
1009 status = _('adding %s\n') % abs
1018 else:
1010 else:
1019 status = _('removing %s\n') % abs
1011 status = _('removing %s\n') % abs
1020 repo.ui.status(status)
1012 repo.ui.status(status)
1021
1013
1022 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1014 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1023 similarity)
1015 similarity)
1024
1016
1025 _markchanges(repo, unknown + forgotten, deleted, renames)
1017 _markchanges(repo, unknown + forgotten, deleted, renames)
1026
1018
1027 for f in rejected:
1019 for f in rejected:
1028 if f in m.files():
1020 if f in m.files():
1029 return 1
1021 return 1
1030 return 0
1022 return 0
1031
1023
1032 def _interestingfiles(repo, matcher):
1024 def _interestingfiles(repo, matcher):
1033 '''Walk dirstate with matcher, looking for files that addremove would care
1025 '''Walk dirstate with matcher, looking for files that addremove would care
1034 about.
1026 about.
1035
1027
1036 This is different from dirstate.status because it doesn't care about
1028 This is different from dirstate.status because it doesn't care about
1037 whether files are modified or clean.'''
1029 whether files are modified or clean.'''
1038 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1030 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1039 audit_path = pathutil.pathauditor(repo.root)
1031 audit_path = pathutil.pathauditor(repo.root)
1040
1032
1041 ctx = repo[None]
1033 ctx = repo[None]
1042 dirstate = repo.dirstate
1034 dirstate = repo.dirstate
1043 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
1035 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
1044 full=False)
1036 full=False)
1045 for abs, st in walkresults.iteritems():
1037 for abs, st in walkresults.iteritems():
1046 dstate = dirstate[abs]
1038 dstate = dirstate[abs]
1047 if dstate == '?' and audit_path.check(abs):
1039 if dstate == '?' and audit_path.check(abs):
1048 unknown.append(abs)
1040 unknown.append(abs)
1049 elif dstate != 'r' and not st:
1041 elif dstate != 'r' and not st:
1050 deleted.append(abs)
1042 deleted.append(abs)
1051 elif dstate == 'r' and st:
1043 elif dstate == 'r' and st:
1052 forgotten.append(abs)
1044 forgotten.append(abs)
1053 # for finding renames
1045 # for finding renames
1054 elif dstate == 'r' and not st:
1046 elif dstate == 'r' and not st:
1055 removed.append(abs)
1047 removed.append(abs)
1056 elif dstate == 'a':
1048 elif dstate == 'a':
1057 added.append(abs)
1049 added.append(abs)
1058
1050
1059 return added, unknown, deleted, removed, forgotten
1051 return added, unknown, deleted, removed, forgotten
1060
1052
1061 def _findrenames(repo, matcher, added, removed, similarity):
1053 def _findrenames(repo, matcher, added, removed, similarity):
1062 '''Find renames from removed files to added ones.'''
1054 '''Find renames from removed files to added ones.'''
1063 renames = {}
1055 renames = {}
1064 if similarity > 0:
1056 if similarity > 0:
1065 for old, new, score in similar.findrenames(repo, added, removed,
1057 for old, new, score in similar.findrenames(repo, added, removed,
1066 similarity):
1058 similarity):
1067 if (repo.ui.verbose or not matcher.exact(old)
1059 if (repo.ui.verbose or not matcher.exact(old)
1068 or not matcher.exact(new)):
1060 or not matcher.exact(new)):
1069 repo.ui.status(_('recording removal of %s as rename to %s '
1061 repo.ui.status(_('recording removal of %s as rename to %s '
1070 '(%d%% similar)\n') %
1062 '(%d%% similar)\n') %
1071 (matcher.rel(old), matcher.rel(new),
1063 (matcher.rel(old), matcher.rel(new),
1072 score * 100))
1064 score * 100))
1073 renames[new] = old
1065 renames[new] = old
1074 return renames
1066 return renames
1075
1067
1076 def _markchanges(repo, unknown, deleted, renames):
1068 def _markchanges(repo, unknown, deleted, renames):
1077 '''Marks the files in unknown as added, the files in deleted as removed,
1069 '''Marks the files in unknown as added, the files in deleted as removed,
1078 and the files in renames as copied.'''
1070 and the files in renames as copied.'''
1079 wctx = repo[None]
1071 wctx = repo[None]
1080 with repo.wlock():
1072 with repo.wlock():
1081 wctx.forget(deleted)
1073 wctx.forget(deleted)
1082 wctx.add(unknown)
1074 wctx.add(unknown)
1083 for new, old in renames.iteritems():
1075 for new, old in renames.iteritems():
1084 wctx.copy(old, new)
1076 wctx.copy(old, new)
1085
1077
1086 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1078 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1087 """Update the dirstate to reflect the intent of copying src to dst. For
1079 """Update the dirstate to reflect the intent of copying src to dst. For
1088 different reasons it might not end with dst being marked as copied from src.
1080 different reasons it might not end with dst being marked as copied from src.
1089 """
1081 """
1090 origsrc = repo.dirstate.copied(src) or src
1082 origsrc = repo.dirstate.copied(src) or src
1091 if dst == origsrc: # copying back a copy?
1083 if dst == origsrc: # copying back a copy?
1092 if repo.dirstate[dst] not in 'mn' and not dryrun:
1084 if repo.dirstate[dst] not in 'mn' and not dryrun:
1093 repo.dirstate.normallookup(dst)
1085 repo.dirstate.normallookup(dst)
1094 else:
1086 else:
1095 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1087 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1096 if not ui.quiet:
1088 if not ui.quiet:
1097 ui.warn(_("%s has not been committed yet, so no copy "
1089 ui.warn(_("%s has not been committed yet, so no copy "
1098 "data will be stored for %s.\n")
1090 "data will be stored for %s.\n")
1099 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1091 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1100 if repo.dirstate[dst] in '?r' and not dryrun:
1092 if repo.dirstate[dst] in '?r' and not dryrun:
1101 wctx.add([dst])
1093 wctx.add([dst])
1102 elif not dryrun:
1094 elif not dryrun:
1103 wctx.copy(origsrc, dst)
1095 wctx.copy(origsrc, dst)
1104
1096
1105 def readrequires(opener, supported):
1097 def readrequires(opener, supported):
1106 '''Reads and parses .hg/requires and checks if all entries found
1098 '''Reads and parses .hg/requires and checks if all entries found
1107 are in the list of supported features.'''
1099 are in the list of supported features.'''
1108 requirements = set(opener.read("requires").splitlines())
1100 requirements = set(opener.read("requires").splitlines())
1109 missings = []
1101 missings = []
1110 for r in requirements:
1102 for r in requirements:
1111 if r not in supported:
1103 if r not in supported:
1112 if not r or not r[0].isalnum():
1104 if not r or not r[0].isalnum():
1113 raise error.RequirementError(_(".hg/requires file is corrupt"))
1105 raise error.RequirementError(_(".hg/requires file is corrupt"))
1114 missings.append(r)
1106 missings.append(r)
1115 missings.sort()
1107 missings.sort()
1116 if missings:
1108 if missings:
1117 raise error.RequirementError(
1109 raise error.RequirementError(
1118 _("repository requires features unknown to this Mercurial: %s")
1110 _("repository requires features unknown to this Mercurial: %s")
1119 % " ".join(missings),
1111 % " ".join(missings),
1120 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1112 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
1121 " for more information"))
1113 " for more information"))
1122 return requirements
1114 return requirements
1123
1115
1124 def writerequires(opener, requirements):
1116 def writerequires(opener, requirements):
1125 with opener('requires', 'w') as fp:
1117 with opener('requires', 'w') as fp:
1126 for r in sorted(requirements):
1118 for r in sorted(requirements):
1127 fp.write("%s\n" % r)
1119 fp.write("%s\n" % r)
1128
1120
1129 class filecachesubentry(object):
1121 class filecachesubentry(object):
1130 def __init__(self, path, stat):
1122 def __init__(self, path, stat):
1131 self.path = path
1123 self.path = path
1132 self.cachestat = None
1124 self.cachestat = None
1133 self._cacheable = None
1125 self._cacheable = None
1134
1126
1135 if stat:
1127 if stat:
1136 self.cachestat = filecachesubentry.stat(self.path)
1128 self.cachestat = filecachesubentry.stat(self.path)
1137
1129
1138 if self.cachestat:
1130 if self.cachestat:
1139 self._cacheable = self.cachestat.cacheable()
1131 self._cacheable = self.cachestat.cacheable()
1140 else:
1132 else:
1141 # None means we don't know yet
1133 # None means we don't know yet
1142 self._cacheable = None
1134 self._cacheable = None
1143
1135
1144 def refresh(self):
1136 def refresh(self):
1145 if self.cacheable():
1137 if self.cacheable():
1146 self.cachestat = filecachesubentry.stat(self.path)
1138 self.cachestat = filecachesubentry.stat(self.path)
1147
1139
1148 def cacheable(self):
1140 def cacheable(self):
1149 if self._cacheable is not None:
1141 if self._cacheable is not None:
1150 return self._cacheable
1142 return self._cacheable
1151
1143
1152 # we don't know yet, assume it is for now
1144 # we don't know yet, assume it is for now
1153 return True
1145 return True
1154
1146
1155 def changed(self):
1147 def changed(self):
1156 # no point in going further if we can't cache it
1148 # no point in going further if we can't cache it
1157 if not self.cacheable():
1149 if not self.cacheable():
1158 return True
1150 return True
1159
1151
1160 newstat = filecachesubentry.stat(self.path)
1152 newstat = filecachesubentry.stat(self.path)
1161
1153
1162 # we may not know if it's cacheable yet, check again now
1154 # we may not know if it's cacheable yet, check again now
1163 if newstat and self._cacheable is None:
1155 if newstat and self._cacheable is None:
1164 self._cacheable = newstat.cacheable()
1156 self._cacheable = newstat.cacheable()
1165
1157
1166 # check again
1158 # check again
1167 if not self._cacheable:
1159 if not self._cacheable:
1168 return True
1160 return True
1169
1161
1170 if self.cachestat != newstat:
1162 if self.cachestat != newstat:
1171 self.cachestat = newstat
1163 self.cachestat = newstat
1172 return True
1164 return True
1173 else:
1165 else:
1174 return False
1166 return False
1175
1167
1176 @staticmethod
1168 @staticmethod
1177 def stat(path):
1169 def stat(path):
1178 try:
1170 try:
1179 return util.cachestat(path)
1171 return util.cachestat(path)
1180 except OSError as e:
1172 except OSError as e:
1181 if e.errno != errno.ENOENT:
1173 if e.errno != errno.ENOENT:
1182 raise
1174 raise
1183
1175
1184 class filecacheentry(object):
1176 class filecacheentry(object):
1185 def __init__(self, paths, stat=True):
1177 def __init__(self, paths, stat=True):
1186 self._entries = []
1178 self._entries = []
1187 for path in paths:
1179 for path in paths:
1188 self._entries.append(filecachesubentry(path, stat))
1180 self._entries.append(filecachesubentry(path, stat))
1189
1181
1190 def changed(self):
1182 def changed(self):
1191 '''true if any entry has changed'''
1183 '''true if any entry has changed'''
1192 for entry in self._entries:
1184 for entry in self._entries:
1193 if entry.changed():
1185 if entry.changed():
1194 return True
1186 return True
1195 return False
1187 return False
1196
1188
1197 def refresh(self):
1189 def refresh(self):
1198 for entry in self._entries:
1190 for entry in self._entries:
1199 entry.refresh()
1191 entry.refresh()
1200
1192
1201 class filecache(object):
1193 class filecache(object):
1202 '''A property like decorator that tracks files under .hg/ for updates.
1194 '''A property like decorator that tracks files under .hg/ for updates.
1203
1195
1204 Records stat info when called in _filecache.
1196 Records stat info when called in _filecache.
1205
1197
1206 On subsequent calls, compares old stat info with new info, and recreates the
1198 On subsequent calls, compares old stat info with new info, and recreates the
1207 object when any of the files changes, updating the new stat info in
1199 object when any of the files changes, updating the new stat info in
1208 _filecache.
1200 _filecache.
1209
1201
1210 Mercurial either atomic renames or appends for files under .hg,
1202 Mercurial either atomic renames or appends for files under .hg,
1211 so to ensure the cache is reliable we need the filesystem to be able
1203 so to ensure the cache is reliable we need the filesystem to be able
1212 to tell us if a file has been replaced. If it can't, we fallback to
1204 to tell us if a file has been replaced. If it can't, we fallback to
1213 recreating the object on every call (essentially the same behavior as
1205 recreating the object on every call (essentially the same behavior as
1214 propertycache).
1206 propertycache).
1215
1207
1216 '''
1208 '''
1217 def __init__(self, *paths):
1209 def __init__(self, *paths):
1218 self.paths = paths
1210 self.paths = paths
1219
1211
1220 def join(self, obj, fname):
1212 def join(self, obj, fname):
1221 """Used to compute the runtime path of a cached file.
1213 """Used to compute the runtime path of a cached file.
1222
1214
1223 Users should subclass filecache and provide their own version of this
1215 Users should subclass filecache and provide their own version of this
1224 function to call the appropriate join function on 'obj' (an instance
1216 function to call the appropriate join function on 'obj' (an instance
1225 of the class that its member function was decorated).
1217 of the class that its member function was decorated).
1226 """
1218 """
1227 return obj.join(fname)
1219 return obj.join(fname)
1228
1220
1229 def __call__(self, func):
1221 def __call__(self, func):
1230 self.func = func
1222 self.func = func
1231 self.name = func.__name__
1223 self.name = func.__name__
1232 return self
1224 return self
1233
1225
1234 def __get__(self, obj, type=None):
1226 def __get__(self, obj, type=None):
1235 # if accessed on the class, return the descriptor itself.
1227 # if accessed on the class, return the descriptor itself.
1236 if obj is None:
1228 if obj is None:
1237 return self
1229 return self
1238 # do we need to check if the file changed?
1230 # do we need to check if the file changed?
1239 if self.name in obj.__dict__:
1231 if self.name in obj.__dict__:
1240 assert self.name in obj._filecache, self.name
1232 assert self.name in obj._filecache, self.name
1241 return obj.__dict__[self.name]
1233 return obj.__dict__[self.name]
1242
1234
1243 entry = obj._filecache.get(self.name)
1235 entry = obj._filecache.get(self.name)
1244
1236
1245 if entry:
1237 if entry:
1246 if entry.changed():
1238 if entry.changed():
1247 entry.obj = self.func(obj)
1239 entry.obj = self.func(obj)
1248 else:
1240 else:
1249 paths = [self.join(obj, path) for path in self.paths]
1241 paths = [self.join(obj, path) for path in self.paths]
1250
1242
1251 # We stat -before- creating the object so our cache doesn't lie if
1243 # We stat -before- creating the object so our cache doesn't lie if
1252 # a writer modified between the time we read and stat
1244 # a writer modified between the time we read and stat
1253 entry = filecacheentry(paths, True)
1245 entry = filecacheentry(paths, True)
1254 entry.obj = self.func(obj)
1246 entry.obj = self.func(obj)
1255
1247
1256 obj._filecache[self.name] = entry
1248 obj._filecache[self.name] = entry
1257
1249
1258 obj.__dict__[self.name] = entry.obj
1250 obj.__dict__[self.name] = entry.obj
1259 return entry.obj
1251 return entry.obj
1260
1252
1261 def __set__(self, obj, value):
1253 def __set__(self, obj, value):
1262 if self.name not in obj._filecache:
1254 if self.name not in obj._filecache:
1263 # we add an entry for the missing value because X in __dict__
1255 # we add an entry for the missing value because X in __dict__
1264 # implies X in _filecache
1256 # implies X in _filecache
1265 paths = [self.join(obj, path) for path in self.paths]
1257 paths = [self.join(obj, path) for path in self.paths]
1266 ce = filecacheentry(paths, False)
1258 ce = filecacheentry(paths, False)
1267 obj._filecache[self.name] = ce
1259 obj._filecache[self.name] = ce
1268 else:
1260 else:
1269 ce = obj._filecache[self.name]
1261 ce = obj._filecache[self.name]
1270
1262
1271 ce.obj = value # update cached copy
1263 ce.obj = value # update cached copy
1272 obj.__dict__[self.name] = value # update copy returned by obj.x
1264 obj.__dict__[self.name] = value # update copy returned by obj.x
1273
1265
1274 def __delete__(self, obj):
1266 def __delete__(self, obj):
1275 try:
1267 try:
1276 del obj.__dict__[self.name]
1268 del obj.__dict__[self.name]
1277 except KeyError:
1269 except KeyError:
1278 raise AttributeError(self.name)
1270 raise AttributeError(self.name)
1279
1271
1280 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1272 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1281 if lock is None:
1273 if lock is None:
1282 raise error.LockInheritanceContractViolation(
1274 raise error.LockInheritanceContractViolation(
1283 'lock can only be inherited while held')
1275 'lock can only be inherited while held')
1284 if environ is None:
1276 if environ is None:
1285 environ = {}
1277 environ = {}
1286 with lock.inherit() as locker:
1278 with lock.inherit() as locker:
1287 environ[envvar] = locker
1279 environ[envvar] = locker
1288 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1280 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1289
1281
1290 def wlocksub(repo, cmd, *args, **kwargs):
1282 def wlocksub(repo, cmd, *args, **kwargs):
1291 """run cmd as a subprocess that allows inheriting repo's wlock
1283 """run cmd as a subprocess that allows inheriting repo's wlock
1292
1284
1293 This can only be called while the wlock is held. This takes all the
1285 This can only be called while the wlock is held. This takes all the
1294 arguments that ui.system does, and returns the exit code of the
1286 arguments that ui.system does, and returns the exit code of the
1295 subprocess."""
1287 subprocess."""
1296 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1288 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1297 **kwargs)
1289 **kwargs)
1298
1290
1299 def gdinitconfig(ui):
1291 def gdinitconfig(ui):
1300 """helper function to know if a repo should be created as general delta
1292 """helper function to know if a repo should be created as general delta
1301 """
1293 """
1302 # experimental config: format.generaldelta
1294 # experimental config: format.generaldelta
1303 return (ui.configbool('format', 'generaldelta', False)
1295 return (ui.configbool('format', 'generaldelta', False)
1304 or ui.configbool('format', 'usegeneraldelta', True))
1296 or ui.configbool('format', 'usegeneraldelta', True))
1305
1297
1306 def gddeltaconfig(ui):
1298 def gddeltaconfig(ui):
1307 """helper function to know if incoming delta should be optimised
1299 """helper function to know if incoming delta should be optimised
1308 """
1300 """
1309 # experimental config: format.generaldelta
1301 # experimental config: format.generaldelta
1310 return ui.configbool('format', 'generaldelta', False)
1302 return ui.configbool('format', 'generaldelta', False)
1311
1303
1312 class delayclosedfile(object):
1304 class delayclosedfile(object):
1313 """Proxy for a file object whose close is delayed.
1305 """Proxy for a file object whose close is delayed.
1314
1306
1315 Do not instantiate outside of the vfs layer.
1307 Do not instantiate outside of the vfs layer.
1316 """
1308 """
1317
1309
1318 def __init__(self, fh, closer):
1310 def __init__(self, fh, closer):
1319 object.__setattr__(self, '_origfh', fh)
1311 object.__setattr__(self, '_origfh', fh)
1320 object.__setattr__(self, '_closer', closer)
1312 object.__setattr__(self, '_closer', closer)
1321
1313
1322 def __getattr__(self, attr):
1314 def __getattr__(self, attr):
1323 return getattr(self._origfh, attr)
1315 return getattr(self._origfh, attr)
1324
1316
1325 def __setattr__(self, attr, value):
1317 def __setattr__(self, attr, value):
1326 return setattr(self._origfh, attr, value)
1318 return setattr(self._origfh, attr, value)
1327
1319
1328 def __delattr__(self, attr):
1320 def __delattr__(self, attr):
1329 return delattr(self._origfh, attr)
1321 return delattr(self._origfh, attr)
1330
1322
1331 def __enter__(self):
1323 def __enter__(self):
1332 return self._origfh.__enter__()
1324 return self._origfh.__enter__()
1333
1325
1334 def __exit__(self, exc_type, exc_value, exc_tb):
1326 def __exit__(self, exc_type, exc_value, exc_tb):
1335 self._closer.close(self._origfh)
1327 self._closer.close(self._origfh)
1336
1328
1337 def close(self):
1329 def close(self):
1338 self._closer.close(self._origfh)
1330 self._closer.close(self._origfh)
1339
1331
1340 class backgroundfilecloser(object):
1332 class backgroundfilecloser(object):
1341 """Coordinates background closing of file handles on multiple threads."""
1333 """Coordinates background closing of file handles on multiple threads."""
1342 def __init__(self, ui, expectedcount=-1):
1334 def __init__(self, ui, expectedcount=-1):
1343 self._running = False
1335 self._running = False
1344 self._entered = False
1336 self._entered = False
1345 self._threads = []
1337 self._threads = []
1346 self._threadexception = None
1338 self._threadexception = None
1347
1339
1348 # Only Windows/NTFS has slow file closing. So only enable by default
1340 # Only Windows/NTFS has slow file closing. So only enable by default
1349 # on that platform. But allow to be enabled elsewhere for testing.
1341 # on that platform. But allow to be enabled elsewhere for testing.
1350 defaultenabled = os.name == 'nt'
1342 defaultenabled = os.name == 'nt'
1351 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1343 enabled = ui.configbool('worker', 'backgroundclose', defaultenabled)
1352
1344
1353 if not enabled:
1345 if not enabled:
1354 return
1346 return
1355
1347
1356 # There is overhead to starting and stopping the background threads.
1348 # There is overhead to starting and stopping the background threads.
1357 # Don't do background processing unless the file count is large enough
1349 # Don't do background processing unless the file count is large enough
1358 # to justify it.
1350 # to justify it.
1359 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1351 minfilecount = ui.configint('worker', 'backgroundcloseminfilecount',
1360 2048)
1352 2048)
1361 # FUTURE dynamically start background threads after minfilecount closes.
1353 # FUTURE dynamically start background threads after minfilecount closes.
1362 # (We don't currently have any callers that don't know their file count)
1354 # (We don't currently have any callers that don't know their file count)
1363 if expectedcount > 0 and expectedcount < minfilecount:
1355 if expectedcount > 0 and expectedcount < minfilecount:
1364 return
1356 return
1365
1357
1366 # Windows defaults to a limit of 512 open files. A buffer of 128
1358 # Windows defaults to a limit of 512 open files. A buffer of 128
1367 # should give us enough headway.
1359 # should give us enough headway.
1368 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1360 maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384)
1369 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1361 threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4)
1370
1362
1371 ui.debug('starting %d threads for background file closing\n' %
1363 ui.debug('starting %d threads for background file closing\n' %
1372 threadcount)
1364 threadcount)
1373
1365
1374 self._queue = util.queue(maxsize=maxqueue)
1366 self._queue = util.queue(maxsize=maxqueue)
1375 self._running = True
1367 self._running = True
1376
1368
1377 for i in range(threadcount):
1369 for i in range(threadcount):
1378 t = threading.Thread(target=self._worker, name='backgroundcloser')
1370 t = threading.Thread(target=self._worker, name='backgroundcloser')
1379 self._threads.append(t)
1371 self._threads.append(t)
1380 t.start()
1372 t.start()
1381
1373
1382 def __enter__(self):
1374 def __enter__(self):
1383 self._entered = True
1375 self._entered = True
1384 return self
1376 return self
1385
1377
1386 def __exit__(self, exc_type, exc_value, exc_tb):
1378 def __exit__(self, exc_type, exc_value, exc_tb):
1387 self._running = False
1379 self._running = False
1388
1380
1389 # Wait for threads to finish closing so open files don't linger for
1381 # Wait for threads to finish closing so open files don't linger for
1390 # longer than lifetime of context manager.
1382 # longer than lifetime of context manager.
1391 for t in self._threads:
1383 for t in self._threads:
1392 t.join()
1384 t.join()
1393
1385
1394 def _worker(self):
1386 def _worker(self):
1395 """Main routine for worker thread."""
1387 """Main routine for worker thread."""
1396 while True:
1388 while True:
1397 try:
1389 try:
1398 fh = self._queue.get(block=True, timeout=0.100)
1390 fh = self._queue.get(block=True, timeout=0.100)
1399 # Need to catch or the thread will terminate and
1391 # Need to catch or the thread will terminate and
1400 # we could orphan file descriptors.
1392 # we could orphan file descriptors.
1401 try:
1393 try:
1402 fh.close()
1394 fh.close()
1403 except Exception as e:
1395 except Exception as e:
1404 # Stash so can re-raise from main thread later.
1396 # Stash so can re-raise from main thread later.
1405 self._threadexception = e
1397 self._threadexception = e
1406 except util.empty:
1398 except util.empty:
1407 if not self._running:
1399 if not self._running:
1408 break
1400 break
1409
1401
1410 def close(self, fh):
1402 def close(self, fh):
1411 """Schedule a file for closing."""
1403 """Schedule a file for closing."""
1412 if not self._entered:
1404 if not self._entered:
1413 raise error.Abort(_('can only call close() when context manager '
1405 raise error.Abort(_('can only call close() when context manager '
1414 'active'))
1406 'active'))
1415
1407
1416 # If a background thread encountered an exception, raise now so we fail
1408 # If a background thread encountered an exception, raise now so we fail
1417 # fast. Otherwise we may potentially go on for minutes until the error
1409 # fast. Otherwise we may potentially go on for minutes until the error
1418 # is acted on.
1410 # is acted on.
1419 if self._threadexception:
1411 if self._threadexception:
1420 e = self._threadexception
1412 e = self._threadexception
1421 self._threadexception = None
1413 self._threadexception = None
1422 raise e
1414 raise e
1423
1415
1424 # If we're not actively running, close synchronously.
1416 # If we're not actively running, close synchronously.
1425 if not self._running:
1417 if not self._running:
1426 fh.close()
1418 fh.close()
1427 return
1419 return
1428
1420
1429 self._queue.put(fh, block=True, timeout=None)
1421 self._queue.put(fh, block=True, timeout=None)
General Comments 0
You need to be logged in to leave comments. Login now