##// END OF EJS Templates
changeset_printer: move _meaningful_parentrevs() to scmutil...
Yuya Nishihara -
r26433:3ad41638 default
parent child Browse files
Show More
@@ -1,3364 +1,3348 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, bin, nullid, nullrev, short
8 from node import hex, bin, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile, cStringIO, shutil
10 import os, sys, errno, re, tempfile, cStringIO, shutil
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import repair, graphmod, revset, phases, obsolete, pathutil
13 import repair, graphmod, revset, phases, obsolete, pathutil
14 import changelog
14 import changelog
15 import bookmarks
15 import bookmarks
16 import encoding
16 import encoding
17 import formatter
17 import formatter
18 import crecord as crecordmod
18 import crecord as crecordmod
19 import lock as lockmod
19 import lock as lockmod
20
20
21 def ishunk(x):
21 def ishunk(x):
22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
22 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
23 return isinstance(x, hunkclasses)
23 return isinstance(x, hunkclasses)
24
24
25 def newandmodified(chunks, originalchunks):
25 def newandmodified(chunks, originalchunks):
26 newlyaddedandmodifiedfiles = set()
26 newlyaddedandmodifiedfiles = set()
27 for chunk in chunks:
27 for chunk in chunks:
28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
28 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
29 originalchunks:
29 originalchunks:
30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
30 newlyaddedandmodifiedfiles.add(chunk.header.filename())
31 return newlyaddedandmodifiedfiles
31 return newlyaddedandmodifiedfiles
32
32
33 def parsealiases(cmd):
33 def parsealiases(cmd):
34 return cmd.lstrip("^").split("|")
34 return cmd.lstrip("^").split("|")
35
35
36 def setupwrapcolorwrite(ui):
36 def setupwrapcolorwrite(ui):
37 # wrap ui.write so diff output can be labeled/colorized
37 # wrap ui.write so diff output can be labeled/colorized
38 def wrapwrite(orig, *args, **kw):
38 def wrapwrite(orig, *args, **kw):
39 label = kw.pop('label', '')
39 label = kw.pop('label', '')
40 for chunk, l in patch.difflabel(lambda: args):
40 for chunk, l in patch.difflabel(lambda: args):
41 orig(chunk, label=label + l)
41 orig(chunk, label=label + l)
42
42
43 oldwrite = ui.write
43 oldwrite = ui.write
44 def wrap(*args, **kwargs):
44 def wrap(*args, **kwargs):
45 return wrapwrite(oldwrite, *args, **kwargs)
45 return wrapwrite(oldwrite, *args, **kwargs)
46 setattr(ui, 'write', wrap)
46 setattr(ui, 'write', wrap)
47 return oldwrite
47 return oldwrite
48
48
49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
49 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
50 if usecurses:
50 if usecurses:
51 if testfile:
51 if testfile:
52 recordfn = crecordmod.testdecorator(testfile,
52 recordfn = crecordmod.testdecorator(testfile,
53 crecordmod.testchunkselector)
53 crecordmod.testchunkselector)
54 else:
54 else:
55 recordfn = crecordmod.chunkselector
55 recordfn = crecordmod.chunkselector
56
56
57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
57 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
58
58
59 else:
59 else:
60 return patch.filterpatch(ui, originalhunks, operation)
60 return patch.filterpatch(ui, originalhunks, operation)
61
61
62 def recordfilter(ui, originalhunks, operation=None):
62 def recordfilter(ui, originalhunks, operation=None):
63 """ Prompts the user to filter the originalhunks and return a list of
63 """ Prompts the user to filter the originalhunks and return a list of
64 selected hunks.
64 selected hunks.
65 *operation* is used for ui purposes to indicate the user
65 *operation* is used for ui purposes to indicate the user
66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
66 what kind of filtering they are doing: reverting, commiting, shelving, etc.
67 *operation* has to be a translated string.
67 *operation* has to be a translated string.
68 """
68 """
69 usecurses = ui.configbool('experimental', 'crecord', False)
69 usecurses = ui.configbool('experimental', 'crecord', False)
70 testfile = ui.config('experimental', 'crecordtest', None)
70 testfile = ui.config('experimental', 'crecordtest', None)
71 oldwrite = setupwrapcolorwrite(ui)
71 oldwrite = setupwrapcolorwrite(ui)
72 try:
72 try:
73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
73 newchunks = filterchunks(ui, originalhunks, usecurses, testfile,
74 operation)
74 operation)
75 finally:
75 finally:
76 ui.write = oldwrite
76 ui.write = oldwrite
77 return newchunks
77 return newchunks
78
78
79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
79 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
80 filterfn, *pats, **opts):
80 filterfn, *pats, **opts):
81 import merge as mergemod
81 import merge as mergemod
82
82
83 if not ui.interactive():
83 if not ui.interactive():
84 if cmdsuggest:
84 if cmdsuggest:
85 msg = _('running non-interactively, use %s instead') % cmdsuggest
85 msg = _('running non-interactively, use %s instead') % cmdsuggest
86 else:
86 else:
87 msg = _('running non-interactively')
87 msg = _('running non-interactively')
88 raise util.Abort(msg)
88 raise util.Abort(msg)
89
89
90 # make sure username is set before going interactive
90 # make sure username is set before going interactive
91 if not opts.get('user'):
91 if not opts.get('user'):
92 ui.username() # raise exception, username not provided
92 ui.username() # raise exception, username not provided
93
93
94 def recordfunc(ui, repo, message, match, opts):
94 def recordfunc(ui, repo, message, match, opts):
95 """This is generic record driver.
95 """This is generic record driver.
96
96
97 Its job is to interactively filter local changes, and
97 Its job is to interactively filter local changes, and
98 accordingly prepare working directory into a state in which the
98 accordingly prepare working directory into a state in which the
99 job can be delegated to a non-interactive commit command such as
99 job can be delegated to a non-interactive commit command such as
100 'commit' or 'qrefresh'.
100 'commit' or 'qrefresh'.
101
101
102 After the actual job is done by non-interactive command, the
102 After the actual job is done by non-interactive command, the
103 working directory is restored to its original state.
103 working directory is restored to its original state.
104
104
105 In the end we'll record interesting changes, and everything else
105 In the end we'll record interesting changes, and everything else
106 will be left in place, so the user can continue working.
106 will be left in place, so the user can continue working.
107 """
107 """
108
108
109 checkunfinished(repo, commit=True)
109 checkunfinished(repo, commit=True)
110 merge = len(repo[None].parents()) > 1
110 merge = len(repo[None].parents()) > 1
111 if merge:
111 if merge:
112 raise util.Abort(_('cannot partially commit a merge '
112 raise util.Abort(_('cannot partially commit a merge '
113 '(use "hg commit" instead)'))
113 '(use "hg commit" instead)'))
114
114
115 status = repo.status(match=match)
115 status = repo.status(match=match)
116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
116 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
117 diffopts.nodates = True
117 diffopts.nodates = True
118 diffopts.git = True
118 diffopts.git = True
119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
119 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
120 originalchunks = patch.parsepatch(originaldiff)
120 originalchunks = patch.parsepatch(originaldiff)
121
121
122 # 1. filter patch, so we have intending-to apply subset of it
122 # 1. filter patch, so we have intending-to apply subset of it
123 try:
123 try:
124 chunks = filterfn(ui, originalchunks)
124 chunks = filterfn(ui, originalchunks)
125 except patch.PatchError as err:
125 except patch.PatchError as err:
126 raise util.Abort(_('error parsing patch: %s') % err)
126 raise util.Abort(_('error parsing patch: %s') % err)
127
127
128 # We need to keep a backup of files that have been newly added and
128 # We need to keep a backup of files that have been newly added and
129 # modified during the recording process because there is a previous
129 # modified during the recording process because there is a previous
130 # version without the edit in the workdir
130 # version without the edit in the workdir
131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
131 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
132 contenders = set()
132 contenders = set()
133 for h in chunks:
133 for h in chunks:
134 try:
134 try:
135 contenders.update(set(h.files()))
135 contenders.update(set(h.files()))
136 except AttributeError:
136 except AttributeError:
137 pass
137 pass
138
138
139 changed = status.modified + status.added + status.removed
139 changed = status.modified + status.added + status.removed
140 newfiles = [f for f in changed if f in contenders]
140 newfiles = [f for f in changed if f in contenders]
141 if not newfiles:
141 if not newfiles:
142 ui.status(_('no changes to record\n'))
142 ui.status(_('no changes to record\n'))
143 return 0
143 return 0
144
144
145 modified = set(status.modified)
145 modified = set(status.modified)
146
146
147 # 2. backup changed files, so we can restore them in the end
147 # 2. backup changed files, so we can restore them in the end
148
148
149 if backupall:
149 if backupall:
150 tobackup = changed
150 tobackup = changed
151 else:
151 else:
152 tobackup = [f for f in newfiles if f in modified or f in \
152 tobackup = [f for f in newfiles if f in modified or f in \
153 newlyaddedandmodifiedfiles]
153 newlyaddedandmodifiedfiles]
154 backups = {}
154 backups = {}
155 if tobackup:
155 if tobackup:
156 backupdir = repo.join('record-backups')
156 backupdir = repo.join('record-backups')
157 try:
157 try:
158 os.mkdir(backupdir)
158 os.mkdir(backupdir)
159 except OSError as err:
159 except OSError as err:
160 if err.errno != errno.EEXIST:
160 if err.errno != errno.EEXIST:
161 raise
161 raise
162 try:
162 try:
163 # backup continues
163 # backup continues
164 for f in tobackup:
164 for f in tobackup:
165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
165 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
166 dir=backupdir)
166 dir=backupdir)
167 os.close(fd)
167 os.close(fd)
168 ui.debug('backup %r as %r\n' % (f, tmpname))
168 ui.debug('backup %r as %r\n' % (f, tmpname))
169 util.copyfile(repo.wjoin(f), tmpname)
169 util.copyfile(repo.wjoin(f), tmpname)
170 shutil.copystat(repo.wjoin(f), tmpname)
170 shutil.copystat(repo.wjoin(f), tmpname)
171 backups[f] = tmpname
171 backups[f] = tmpname
172
172
173 fp = cStringIO.StringIO()
173 fp = cStringIO.StringIO()
174 for c in chunks:
174 for c in chunks:
175 fname = c.filename()
175 fname = c.filename()
176 if fname in backups:
176 if fname in backups:
177 c.write(fp)
177 c.write(fp)
178 dopatch = fp.tell()
178 dopatch = fp.tell()
179 fp.seek(0)
179 fp.seek(0)
180
180
181 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
181 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
182 # 3a. apply filtered patch to clean repo (clean)
182 # 3a. apply filtered patch to clean repo (clean)
183 if backups:
183 if backups:
184 # Equivalent to hg.revert
184 # Equivalent to hg.revert
185 choices = lambda key: key in backups
185 choices = lambda key: key in backups
186 mergemod.update(repo, repo.dirstate.p1(),
186 mergemod.update(repo, repo.dirstate.p1(),
187 False, True, choices)
187 False, True, choices)
188
188
189 # 3b. (apply)
189 # 3b. (apply)
190 if dopatch:
190 if dopatch:
191 try:
191 try:
192 ui.debug('applying patch\n')
192 ui.debug('applying patch\n')
193 ui.debug(fp.getvalue())
193 ui.debug(fp.getvalue())
194 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
194 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
195 except patch.PatchError as err:
195 except patch.PatchError as err:
196 raise util.Abort(str(err))
196 raise util.Abort(str(err))
197 del fp
197 del fp
198
198
199 # 4. We prepared working directory according to filtered
199 # 4. We prepared working directory according to filtered
200 # patch. Now is the time to delegate the job to
200 # patch. Now is the time to delegate the job to
201 # commit/qrefresh or the like!
201 # commit/qrefresh or the like!
202
202
203 # Make all of the pathnames absolute.
203 # Make all of the pathnames absolute.
204 newfiles = [repo.wjoin(nf) for nf in newfiles]
204 newfiles = [repo.wjoin(nf) for nf in newfiles]
205 return commitfunc(ui, repo, *newfiles, **opts)
205 return commitfunc(ui, repo, *newfiles, **opts)
206 finally:
206 finally:
207 # 5. finally restore backed-up files
207 # 5. finally restore backed-up files
208 try:
208 try:
209 dirstate = repo.dirstate
209 dirstate = repo.dirstate
210 for realname, tmpname in backups.iteritems():
210 for realname, tmpname in backups.iteritems():
211 ui.debug('restoring %r to %r\n' % (tmpname, realname))
211 ui.debug('restoring %r to %r\n' % (tmpname, realname))
212
212
213 if dirstate[realname] == 'n':
213 if dirstate[realname] == 'n':
214 # without normallookup, restoring timestamp
214 # without normallookup, restoring timestamp
215 # may cause partially committed files
215 # may cause partially committed files
216 # to be treated as unmodified
216 # to be treated as unmodified
217 dirstate.normallookup(realname)
217 dirstate.normallookup(realname)
218
218
219 util.copyfile(tmpname, repo.wjoin(realname))
219 util.copyfile(tmpname, repo.wjoin(realname))
220 # Our calls to copystat() here and above are a
220 # Our calls to copystat() here and above are a
221 # hack to trick any editors that have f open that
221 # hack to trick any editors that have f open that
222 # we haven't modified them.
222 # we haven't modified them.
223 #
223 #
224 # Also note that this racy as an editor could
224 # Also note that this racy as an editor could
225 # notice the file's mtime before we've finished
225 # notice the file's mtime before we've finished
226 # writing it.
226 # writing it.
227 shutil.copystat(tmpname, repo.wjoin(realname))
227 shutil.copystat(tmpname, repo.wjoin(realname))
228 os.unlink(tmpname)
228 os.unlink(tmpname)
229 if tobackup:
229 if tobackup:
230 os.rmdir(backupdir)
230 os.rmdir(backupdir)
231 except OSError:
231 except OSError:
232 pass
232 pass
233
233
234 def recordinwlock(ui, repo, message, match, opts):
234 def recordinwlock(ui, repo, message, match, opts):
235 wlock = repo.wlock()
235 wlock = repo.wlock()
236 try:
236 try:
237 return recordfunc(ui, repo, message, match, opts)
237 return recordfunc(ui, repo, message, match, opts)
238 finally:
238 finally:
239 wlock.release()
239 wlock.release()
240
240
241 return commit(ui, repo, recordinwlock, pats, opts)
241 return commit(ui, repo, recordinwlock, pats, opts)
242
242
243 def findpossible(cmd, table, strict=False):
243 def findpossible(cmd, table, strict=False):
244 """
244 """
245 Return cmd -> (aliases, command table entry)
245 Return cmd -> (aliases, command table entry)
246 for each matching command.
246 for each matching command.
247 Return debug commands (or their aliases) only if no normal command matches.
247 Return debug commands (or their aliases) only if no normal command matches.
248 """
248 """
249 choice = {}
249 choice = {}
250 debugchoice = {}
250 debugchoice = {}
251
251
252 if cmd in table:
252 if cmd in table:
253 # short-circuit exact matches, "log" alias beats "^log|history"
253 # short-circuit exact matches, "log" alias beats "^log|history"
254 keys = [cmd]
254 keys = [cmd]
255 else:
255 else:
256 keys = table.keys()
256 keys = table.keys()
257
257
258 allcmds = []
258 allcmds = []
259 for e in keys:
259 for e in keys:
260 aliases = parsealiases(e)
260 aliases = parsealiases(e)
261 allcmds.extend(aliases)
261 allcmds.extend(aliases)
262 found = None
262 found = None
263 if cmd in aliases:
263 if cmd in aliases:
264 found = cmd
264 found = cmd
265 elif not strict:
265 elif not strict:
266 for a in aliases:
266 for a in aliases:
267 if a.startswith(cmd):
267 if a.startswith(cmd):
268 found = a
268 found = a
269 break
269 break
270 if found is not None:
270 if found is not None:
271 if aliases[0].startswith("debug") or found.startswith("debug"):
271 if aliases[0].startswith("debug") or found.startswith("debug"):
272 debugchoice[found] = (aliases, table[e])
272 debugchoice[found] = (aliases, table[e])
273 else:
273 else:
274 choice[found] = (aliases, table[e])
274 choice[found] = (aliases, table[e])
275
275
276 if not choice and debugchoice:
276 if not choice and debugchoice:
277 choice = debugchoice
277 choice = debugchoice
278
278
279 return choice, allcmds
279 return choice, allcmds
280
280
281 def findcmd(cmd, table, strict=True):
281 def findcmd(cmd, table, strict=True):
282 """Return (aliases, command table entry) for command string."""
282 """Return (aliases, command table entry) for command string."""
283 choice, allcmds = findpossible(cmd, table, strict)
283 choice, allcmds = findpossible(cmd, table, strict)
284
284
285 if cmd in choice:
285 if cmd in choice:
286 return choice[cmd]
286 return choice[cmd]
287
287
288 if len(choice) > 1:
288 if len(choice) > 1:
289 clist = choice.keys()
289 clist = choice.keys()
290 clist.sort()
290 clist.sort()
291 raise error.AmbiguousCommand(cmd, clist)
291 raise error.AmbiguousCommand(cmd, clist)
292
292
293 if choice:
293 if choice:
294 return choice.values()[0]
294 return choice.values()[0]
295
295
296 raise error.UnknownCommand(cmd, allcmds)
296 raise error.UnknownCommand(cmd, allcmds)
297
297
298 def findrepo(p):
298 def findrepo(p):
299 while not os.path.isdir(os.path.join(p, ".hg")):
299 while not os.path.isdir(os.path.join(p, ".hg")):
300 oldp, p = p, os.path.dirname(p)
300 oldp, p = p, os.path.dirname(p)
301 if p == oldp:
301 if p == oldp:
302 return None
302 return None
303
303
304 return p
304 return p
305
305
306 def bailifchanged(repo, merge=True):
306 def bailifchanged(repo, merge=True):
307 if merge and repo.dirstate.p2() != nullid:
307 if merge and repo.dirstate.p2() != nullid:
308 raise util.Abort(_('outstanding uncommitted merge'))
308 raise util.Abort(_('outstanding uncommitted merge'))
309 modified, added, removed, deleted = repo.status()[:4]
309 modified, added, removed, deleted = repo.status()[:4]
310 if modified or added or removed or deleted:
310 if modified or added or removed or deleted:
311 raise util.Abort(_('uncommitted changes'))
311 raise util.Abort(_('uncommitted changes'))
312 ctx = repo[None]
312 ctx = repo[None]
313 for s in sorted(ctx.substate):
313 for s in sorted(ctx.substate):
314 ctx.sub(s).bailifchanged()
314 ctx.sub(s).bailifchanged()
315
315
316 def logmessage(ui, opts):
316 def logmessage(ui, opts):
317 """ get the log message according to -m and -l option """
317 """ get the log message according to -m and -l option """
318 message = opts.get('message')
318 message = opts.get('message')
319 logfile = opts.get('logfile')
319 logfile = opts.get('logfile')
320
320
321 if message and logfile:
321 if message and logfile:
322 raise util.Abort(_('options --message and --logfile are mutually '
322 raise util.Abort(_('options --message and --logfile are mutually '
323 'exclusive'))
323 'exclusive'))
324 if not message and logfile:
324 if not message and logfile:
325 try:
325 try:
326 if logfile == '-':
326 if logfile == '-':
327 message = ui.fin.read()
327 message = ui.fin.read()
328 else:
328 else:
329 message = '\n'.join(util.readfile(logfile).splitlines())
329 message = '\n'.join(util.readfile(logfile).splitlines())
330 except IOError as inst:
330 except IOError as inst:
331 raise util.Abort(_("can't read commit message '%s': %s") %
331 raise util.Abort(_("can't read commit message '%s': %s") %
332 (logfile, inst.strerror))
332 (logfile, inst.strerror))
333 return message
333 return message
334
334
335 def mergeeditform(ctxorbool, baseformname):
335 def mergeeditform(ctxorbool, baseformname):
336 """return appropriate editform name (referencing a committemplate)
336 """return appropriate editform name (referencing a committemplate)
337
337
338 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
338 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
339 merging is committed.
339 merging is committed.
340
340
341 This returns baseformname with '.merge' appended if it is a merge,
341 This returns baseformname with '.merge' appended if it is a merge,
342 otherwise '.normal' is appended.
342 otherwise '.normal' is appended.
343 """
343 """
344 if isinstance(ctxorbool, bool):
344 if isinstance(ctxorbool, bool):
345 if ctxorbool:
345 if ctxorbool:
346 return baseformname + ".merge"
346 return baseformname + ".merge"
347 elif 1 < len(ctxorbool.parents()):
347 elif 1 < len(ctxorbool.parents()):
348 return baseformname + ".merge"
348 return baseformname + ".merge"
349
349
350 return baseformname + ".normal"
350 return baseformname + ".normal"
351
351
352 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
352 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
353 editform='', **opts):
353 editform='', **opts):
354 """get appropriate commit message editor according to '--edit' option
354 """get appropriate commit message editor according to '--edit' option
355
355
356 'finishdesc' is a function to be called with edited commit message
356 'finishdesc' is a function to be called with edited commit message
357 (= 'description' of the new changeset) just after editing, but
357 (= 'description' of the new changeset) just after editing, but
358 before checking empty-ness. It should return actual text to be
358 before checking empty-ness. It should return actual text to be
359 stored into history. This allows to change description before
359 stored into history. This allows to change description before
360 storing.
360 storing.
361
361
362 'extramsg' is a extra message to be shown in the editor instead of
362 'extramsg' is a extra message to be shown in the editor instead of
363 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
363 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
364 is automatically added.
364 is automatically added.
365
365
366 'editform' is a dot-separated list of names, to distinguish
366 'editform' is a dot-separated list of names, to distinguish
367 the purpose of commit text editing.
367 the purpose of commit text editing.
368
368
369 'getcommiteditor' returns 'commitforceeditor' regardless of
369 'getcommiteditor' returns 'commitforceeditor' regardless of
370 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
370 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
371 they are specific for usage in MQ.
371 they are specific for usage in MQ.
372 """
372 """
373 if edit or finishdesc or extramsg:
373 if edit or finishdesc or extramsg:
374 return lambda r, c, s: commitforceeditor(r, c, s,
374 return lambda r, c, s: commitforceeditor(r, c, s,
375 finishdesc=finishdesc,
375 finishdesc=finishdesc,
376 extramsg=extramsg,
376 extramsg=extramsg,
377 editform=editform)
377 editform=editform)
378 elif editform:
378 elif editform:
379 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
379 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
380 else:
380 else:
381 return commiteditor
381 return commiteditor
382
382
383 def loglimit(opts):
383 def loglimit(opts):
384 """get the log limit according to option -l/--limit"""
384 """get the log limit according to option -l/--limit"""
385 limit = opts.get('limit')
385 limit = opts.get('limit')
386 if limit:
386 if limit:
387 try:
387 try:
388 limit = int(limit)
388 limit = int(limit)
389 except ValueError:
389 except ValueError:
390 raise util.Abort(_('limit must be a positive integer'))
390 raise util.Abort(_('limit must be a positive integer'))
391 if limit <= 0:
391 if limit <= 0:
392 raise util.Abort(_('limit must be positive'))
392 raise util.Abort(_('limit must be positive'))
393 else:
393 else:
394 limit = None
394 limit = None
395 return limit
395 return limit
396
396
397 def makefilename(repo, pat, node, desc=None,
397 def makefilename(repo, pat, node, desc=None,
398 total=None, seqno=None, revwidth=None, pathname=None):
398 total=None, seqno=None, revwidth=None, pathname=None):
399 node_expander = {
399 node_expander = {
400 'H': lambda: hex(node),
400 'H': lambda: hex(node),
401 'R': lambda: str(repo.changelog.rev(node)),
401 'R': lambda: str(repo.changelog.rev(node)),
402 'h': lambda: short(node),
402 'h': lambda: short(node),
403 'm': lambda: re.sub('[^\w]', '_', str(desc))
403 'm': lambda: re.sub('[^\w]', '_', str(desc))
404 }
404 }
405 expander = {
405 expander = {
406 '%': lambda: '%',
406 '%': lambda: '%',
407 'b': lambda: os.path.basename(repo.root),
407 'b': lambda: os.path.basename(repo.root),
408 }
408 }
409
409
410 try:
410 try:
411 if node:
411 if node:
412 expander.update(node_expander)
412 expander.update(node_expander)
413 if node:
413 if node:
414 expander['r'] = (lambda:
414 expander['r'] = (lambda:
415 str(repo.changelog.rev(node)).zfill(revwidth or 0))
415 str(repo.changelog.rev(node)).zfill(revwidth or 0))
416 if total is not None:
416 if total is not None:
417 expander['N'] = lambda: str(total)
417 expander['N'] = lambda: str(total)
418 if seqno is not None:
418 if seqno is not None:
419 expander['n'] = lambda: str(seqno)
419 expander['n'] = lambda: str(seqno)
420 if total is not None and seqno is not None:
420 if total is not None and seqno is not None:
421 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
421 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
422 if pathname is not None:
422 if pathname is not None:
423 expander['s'] = lambda: os.path.basename(pathname)
423 expander['s'] = lambda: os.path.basename(pathname)
424 expander['d'] = lambda: os.path.dirname(pathname) or '.'
424 expander['d'] = lambda: os.path.dirname(pathname) or '.'
425 expander['p'] = lambda: pathname
425 expander['p'] = lambda: pathname
426
426
427 newname = []
427 newname = []
428 patlen = len(pat)
428 patlen = len(pat)
429 i = 0
429 i = 0
430 while i < patlen:
430 while i < patlen:
431 c = pat[i]
431 c = pat[i]
432 if c == '%':
432 if c == '%':
433 i += 1
433 i += 1
434 c = pat[i]
434 c = pat[i]
435 c = expander[c]()
435 c = expander[c]()
436 newname.append(c)
436 newname.append(c)
437 i += 1
437 i += 1
438 return ''.join(newname)
438 return ''.join(newname)
439 except KeyError as inst:
439 except KeyError as inst:
440 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
440 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
441 inst.args[0])
441 inst.args[0])
442
442
443 def makefileobj(repo, pat, node=None, desc=None, total=None,
443 def makefileobj(repo, pat, node=None, desc=None, total=None,
444 seqno=None, revwidth=None, mode='wb', modemap=None,
444 seqno=None, revwidth=None, mode='wb', modemap=None,
445 pathname=None):
445 pathname=None):
446
446
447 writable = mode not in ('r', 'rb')
447 writable = mode not in ('r', 'rb')
448
448
449 if not pat or pat == '-':
449 if not pat or pat == '-':
450 if writable:
450 if writable:
451 fp = repo.ui.fout
451 fp = repo.ui.fout
452 else:
452 else:
453 fp = repo.ui.fin
453 fp = repo.ui.fin
454 if util.safehasattr(fp, 'fileno'):
454 if util.safehasattr(fp, 'fileno'):
455 return os.fdopen(os.dup(fp.fileno()), mode)
455 return os.fdopen(os.dup(fp.fileno()), mode)
456 else:
456 else:
457 # if this fp can't be duped properly, return
457 # if this fp can't be duped properly, return
458 # a dummy object that can be closed
458 # a dummy object that can be closed
459 class wrappedfileobj(object):
459 class wrappedfileobj(object):
460 noop = lambda x: None
460 noop = lambda x: None
461 def __init__(self, f):
461 def __init__(self, f):
462 self.f = f
462 self.f = f
463 def __getattr__(self, attr):
463 def __getattr__(self, attr):
464 if attr == 'close':
464 if attr == 'close':
465 return self.noop
465 return self.noop
466 else:
466 else:
467 return getattr(self.f, attr)
467 return getattr(self.f, attr)
468
468
469 return wrappedfileobj(fp)
469 return wrappedfileobj(fp)
470 if util.safehasattr(pat, 'write') and writable:
470 if util.safehasattr(pat, 'write') and writable:
471 return pat
471 return pat
472 if util.safehasattr(pat, 'read') and 'r' in mode:
472 if util.safehasattr(pat, 'read') and 'r' in mode:
473 return pat
473 return pat
474 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
474 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
475 if modemap is not None:
475 if modemap is not None:
476 mode = modemap.get(fn, mode)
476 mode = modemap.get(fn, mode)
477 if mode == 'wb':
477 if mode == 'wb':
478 modemap[fn] = 'ab'
478 modemap[fn] = 'ab'
479 return open(fn, mode)
479 return open(fn, mode)
480
480
481 def openrevlog(repo, cmd, file_, opts):
481 def openrevlog(repo, cmd, file_, opts):
482 """opens the changelog, manifest, a filelog or a given revlog"""
482 """opens the changelog, manifest, a filelog or a given revlog"""
483 cl = opts['changelog']
483 cl = opts['changelog']
484 mf = opts['manifest']
484 mf = opts['manifest']
485 dir = opts['dir']
485 dir = opts['dir']
486 msg = None
486 msg = None
487 if cl and mf:
487 if cl and mf:
488 msg = _('cannot specify --changelog and --manifest at the same time')
488 msg = _('cannot specify --changelog and --manifest at the same time')
489 elif cl and dir:
489 elif cl and dir:
490 msg = _('cannot specify --changelog and --dir at the same time')
490 msg = _('cannot specify --changelog and --dir at the same time')
491 elif cl or mf:
491 elif cl or mf:
492 if file_:
492 if file_:
493 msg = _('cannot specify filename with --changelog or --manifest')
493 msg = _('cannot specify filename with --changelog or --manifest')
494 elif not repo:
494 elif not repo:
495 msg = _('cannot specify --changelog or --manifest or --dir '
495 msg = _('cannot specify --changelog or --manifest or --dir '
496 'without a repository')
496 'without a repository')
497 if msg:
497 if msg:
498 raise util.Abort(msg)
498 raise util.Abort(msg)
499
499
500 r = None
500 r = None
501 if repo:
501 if repo:
502 if cl:
502 if cl:
503 r = repo.unfiltered().changelog
503 r = repo.unfiltered().changelog
504 elif dir:
504 elif dir:
505 if 'treemanifest' not in repo.requirements:
505 if 'treemanifest' not in repo.requirements:
506 raise util.Abort(_("--dir can only be used on repos with "
506 raise util.Abort(_("--dir can only be used on repos with "
507 "treemanifest enabled"))
507 "treemanifest enabled"))
508 dirlog = repo.dirlog(file_)
508 dirlog = repo.dirlog(file_)
509 if len(dirlog):
509 if len(dirlog):
510 r = dirlog
510 r = dirlog
511 elif mf:
511 elif mf:
512 r = repo.manifest
512 r = repo.manifest
513 elif file_:
513 elif file_:
514 filelog = repo.file(file_)
514 filelog = repo.file(file_)
515 if len(filelog):
515 if len(filelog):
516 r = filelog
516 r = filelog
517 if not r:
517 if not r:
518 if not file_:
518 if not file_:
519 raise error.CommandError(cmd, _('invalid arguments'))
519 raise error.CommandError(cmd, _('invalid arguments'))
520 if not os.path.isfile(file_):
520 if not os.path.isfile(file_):
521 raise util.Abort(_("revlog '%s' not found") % file_)
521 raise util.Abort(_("revlog '%s' not found") % file_)
522 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
522 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
523 file_[:-2] + ".i")
523 file_[:-2] + ".i")
524 return r
524 return r
525
525
526 def copy(ui, repo, pats, opts, rename=False):
526 def copy(ui, repo, pats, opts, rename=False):
527 # called with the repo lock held
527 # called with the repo lock held
528 #
528 #
529 # hgsep => pathname that uses "/" to separate directories
529 # hgsep => pathname that uses "/" to separate directories
530 # ossep => pathname that uses os.sep to separate directories
530 # ossep => pathname that uses os.sep to separate directories
531 cwd = repo.getcwd()
531 cwd = repo.getcwd()
532 targets = {}
532 targets = {}
533 after = opts.get("after")
533 after = opts.get("after")
534 dryrun = opts.get("dry_run")
534 dryrun = opts.get("dry_run")
535 wctx = repo[None]
535 wctx = repo[None]
536
536
537 def walkpat(pat):
537 def walkpat(pat):
538 srcs = []
538 srcs = []
539 if after:
539 if after:
540 badstates = '?'
540 badstates = '?'
541 else:
541 else:
542 badstates = '?r'
542 badstates = '?r'
543 m = scmutil.match(repo[None], [pat], opts, globbed=True)
543 m = scmutil.match(repo[None], [pat], opts, globbed=True)
544 for abs in repo.walk(m):
544 for abs in repo.walk(m):
545 state = repo.dirstate[abs]
545 state = repo.dirstate[abs]
546 rel = m.rel(abs)
546 rel = m.rel(abs)
547 exact = m.exact(abs)
547 exact = m.exact(abs)
548 if state in badstates:
548 if state in badstates:
549 if exact and state == '?':
549 if exact and state == '?':
550 ui.warn(_('%s: not copying - file is not managed\n') % rel)
550 ui.warn(_('%s: not copying - file is not managed\n') % rel)
551 if exact and state == 'r':
551 if exact and state == 'r':
552 ui.warn(_('%s: not copying - file has been marked for'
552 ui.warn(_('%s: not copying - file has been marked for'
553 ' remove\n') % rel)
553 ' remove\n') % rel)
554 continue
554 continue
555 # abs: hgsep
555 # abs: hgsep
556 # rel: ossep
556 # rel: ossep
557 srcs.append((abs, rel, exact))
557 srcs.append((abs, rel, exact))
558 return srcs
558 return srcs
559
559
560 # abssrc: hgsep
560 # abssrc: hgsep
561 # relsrc: ossep
561 # relsrc: ossep
562 # otarget: ossep
562 # otarget: ossep
563 def copyfile(abssrc, relsrc, otarget, exact):
563 def copyfile(abssrc, relsrc, otarget, exact):
564 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
564 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
565 if '/' in abstarget:
565 if '/' in abstarget:
566 # We cannot normalize abstarget itself, this would prevent
566 # We cannot normalize abstarget itself, this would prevent
567 # case only renames, like a => A.
567 # case only renames, like a => A.
568 abspath, absname = abstarget.rsplit('/', 1)
568 abspath, absname = abstarget.rsplit('/', 1)
569 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
569 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
570 reltarget = repo.pathto(abstarget, cwd)
570 reltarget = repo.pathto(abstarget, cwd)
571 target = repo.wjoin(abstarget)
571 target = repo.wjoin(abstarget)
572 src = repo.wjoin(abssrc)
572 src = repo.wjoin(abssrc)
573 state = repo.dirstate[abstarget]
573 state = repo.dirstate[abstarget]
574
574
575 scmutil.checkportable(ui, abstarget)
575 scmutil.checkportable(ui, abstarget)
576
576
577 # check for collisions
577 # check for collisions
578 prevsrc = targets.get(abstarget)
578 prevsrc = targets.get(abstarget)
579 if prevsrc is not None:
579 if prevsrc is not None:
580 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
580 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
581 (reltarget, repo.pathto(abssrc, cwd),
581 (reltarget, repo.pathto(abssrc, cwd),
582 repo.pathto(prevsrc, cwd)))
582 repo.pathto(prevsrc, cwd)))
583 return
583 return
584
584
585 # check for overwrites
585 # check for overwrites
586 exists = os.path.lexists(target)
586 exists = os.path.lexists(target)
587 samefile = False
587 samefile = False
588 if exists and abssrc != abstarget:
588 if exists and abssrc != abstarget:
589 if (repo.dirstate.normalize(abssrc) ==
589 if (repo.dirstate.normalize(abssrc) ==
590 repo.dirstate.normalize(abstarget)):
590 repo.dirstate.normalize(abstarget)):
591 if not rename:
591 if not rename:
592 ui.warn(_("%s: can't copy - same file\n") % reltarget)
592 ui.warn(_("%s: can't copy - same file\n") % reltarget)
593 return
593 return
594 exists = False
594 exists = False
595 samefile = True
595 samefile = True
596
596
597 if not after and exists or after and state in 'mn':
597 if not after and exists or after and state in 'mn':
598 if not opts['force']:
598 if not opts['force']:
599 ui.warn(_('%s: not overwriting - file exists\n') %
599 ui.warn(_('%s: not overwriting - file exists\n') %
600 reltarget)
600 reltarget)
601 return
601 return
602
602
603 if after:
603 if after:
604 if not exists:
604 if not exists:
605 if rename:
605 if rename:
606 ui.warn(_('%s: not recording move - %s does not exist\n') %
606 ui.warn(_('%s: not recording move - %s does not exist\n') %
607 (relsrc, reltarget))
607 (relsrc, reltarget))
608 else:
608 else:
609 ui.warn(_('%s: not recording copy - %s does not exist\n') %
609 ui.warn(_('%s: not recording copy - %s does not exist\n') %
610 (relsrc, reltarget))
610 (relsrc, reltarget))
611 return
611 return
612 elif not dryrun:
612 elif not dryrun:
613 try:
613 try:
614 if exists:
614 if exists:
615 os.unlink(target)
615 os.unlink(target)
616 targetdir = os.path.dirname(target) or '.'
616 targetdir = os.path.dirname(target) or '.'
617 if not os.path.isdir(targetdir):
617 if not os.path.isdir(targetdir):
618 os.makedirs(targetdir)
618 os.makedirs(targetdir)
619 if samefile:
619 if samefile:
620 tmp = target + "~hgrename"
620 tmp = target + "~hgrename"
621 os.rename(src, tmp)
621 os.rename(src, tmp)
622 os.rename(tmp, target)
622 os.rename(tmp, target)
623 else:
623 else:
624 util.copyfile(src, target)
624 util.copyfile(src, target)
625 srcexists = True
625 srcexists = True
626 except IOError as inst:
626 except IOError as inst:
627 if inst.errno == errno.ENOENT:
627 if inst.errno == errno.ENOENT:
628 ui.warn(_('%s: deleted in working directory\n') % relsrc)
628 ui.warn(_('%s: deleted in working directory\n') % relsrc)
629 srcexists = False
629 srcexists = False
630 else:
630 else:
631 ui.warn(_('%s: cannot copy - %s\n') %
631 ui.warn(_('%s: cannot copy - %s\n') %
632 (relsrc, inst.strerror))
632 (relsrc, inst.strerror))
633 return True # report a failure
633 return True # report a failure
634
634
635 if ui.verbose or not exact:
635 if ui.verbose or not exact:
636 if rename:
636 if rename:
637 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
637 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
638 else:
638 else:
639 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
639 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
640
640
641 targets[abstarget] = abssrc
641 targets[abstarget] = abssrc
642
642
643 # fix up dirstate
643 # fix up dirstate
644 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
644 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
645 dryrun=dryrun, cwd=cwd)
645 dryrun=dryrun, cwd=cwd)
646 if rename and not dryrun:
646 if rename and not dryrun:
647 if not after and srcexists and not samefile:
647 if not after and srcexists and not samefile:
648 util.unlinkpath(repo.wjoin(abssrc))
648 util.unlinkpath(repo.wjoin(abssrc))
649 wctx.forget([abssrc])
649 wctx.forget([abssrc])
650
650
651 # pat: ossep
651 # pat: ossep
652 # dest ossep
652 # dest ossep
653 # srcs: list of (hgsep, hgsep, ossep, bool)
653 # srcs: list of (hgsep, hgsep, ossep, bool)
654 # return: function that takes hgsep and returns ossep
654 # return: function that takes hgsep and returns ossep
655 def targetpathfn(pat, dest, srcs):
655 def targetpathfn(pat, dest, srcs):
656 if os.path.isdir(pat):
656 if os.path.isdir(pat):
657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
657 abspfx = pathutil.canonpath(repo.root, cwd, pat)
658 abspfx = util.localpath(abspfx)
658 abspfx = util.localpath(abspfx)
659 if destdirexists:
659 if destdirexists:
660 striplen = len(os.path.split(abspfx)[0])
660 striplen = len(os.path.split(abspfx)[0])
661 else:
661 else:
662 striplen = len(abspfx)
662 striplen = len(abspfx)
663 if striplen:
663 if striplen:
664 striplen += len(os.sep)
664 striplen += len(os.sep)
665 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
665 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
666 elif destdirexists:
666 elif destdirexists:
667 res = lambda p: os.path.join(dest,
667 res = lambda p: os.path.join(dest,
668 os.path.basename(util.localpath(p)))
668 os.path.basename(util.localpath(p)))
669 else:
669 else:
670 res = lambda p: dest
670 res = lambda p: dest
671 return res
671 return res
672
672
673 # pat: ossep
673 # pat: ossep
674 # dest ossep
674 # dest ossep
675 # srcs: list of (hgsep, hgsep, ossep, bool)
675 # srcs: list of (hgsep, hgsep, ossep, bool)
676 # return: function that takes hgsep and returns ossep
676 # return: function that takes hgsep and returns ossep
677 def targetpathafterfn(pat, dest, srcs):
677 def targetpathafterfn(pat, dest, srcs):
678 if matchmod.patkind(pat):
678 if matchmod.patkind(pat):
679 # a mercurial pattern
679 # a mercurial pattern
680 res = lambda p: os.path.join(dest,
680 res = lambda p: os.path.join(dest,
681 os.path.basename(util.localpath(p)))
681 os.path.basename(util.localpath(p)))
682 else:
682 else:
683 abspfx = pathutil.canonpath(repo.root, cwd, pat)
683 abspfx = pathutil.canonpath(repo.root, cwd, pat)
684 if len(abspfx) < len(srcs[0][0]):
684 if len(abspfx) < len(srcs[0][0]):
685 # A directory. Either the target path contains the last
685 # A directory. Either the target path contains the last
686 # component of the source path or it does not.
686 # component of the source path or it does not.
687 def evalpath(striplen):
687 def evalpath(striplen):
688 score = 0
688 score = 0
689 for s in srcs:
689 for s in srcs:
690 t = os.path.join(dest, util.localpath(s[0])[striplen:])
690 t = os.path.join(dest, util.localpath(s[0])[striplen:])
691 if os.path.lexists(t):
691 if os.path.lexists(t):
692 score += 1
692 score += 1
693 return score
693 return score
694
694
695 abspfx = util.localpath(abspfx)
695 abspfx = util.localpath(abspfx)
696 striplen = len(abspfx)
696 striplen = len(abspfx)
697 if striplen:
697 if striplen:
698 striplen += len(os.sep)
698 striplen += len(os.sep)
699 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
699 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
700 score = evalpath(striplen)
700 score = evalpath(striplen)
701 striplen1 = len(os.path.split(abspfx)[0])
701 striplen1 = len(os.path.split(abspfx)[0])
702 if striplen1:
702 if striplen1:
703 striplen1 += len(os.sep)
703 striplen1 += len(os.sep)
704 if evalpath(striplen1) > score:
704 if evalpath(striplen1) > score:
705 striplen = striplen1
705 striplen = striplen1
706 res = lambda p: os.path.join(dest,
706 res = lambda p: os.path.join(dest,
707 util.localpath(p)[striplen:])
707 util.localpath(p)[striplen:])
708 else:
708 else:
709 # a file
709 # a file
710 if destdirexists:
710 if destdirexists:
711 res = lambda p: os.path.join(dest,
711 res = lambda p: os.path.join(dest,
712 os.path.basename(util.localpath(p)))
712 os.path.basename(util.localpath(p)))
713 else:
713 else:
714 res = lambda p: dest
714 res = lambda p: dest
715 return res
715 return res
716
716
717 pats = scmutil.expandpats(pats)
717 pats = scmutil.expandpats(pats)
718 if not pats:
718 if not pats:
719 raise util.Abort(_('no source or destination specified'))
719 raise util.Abort(_('no source or destination specified'))
720 if len(pats) == 1:
720 if len(pats) == 1:
721 raise util.Abort(_('no destination specified'))
721 raise util.Abort(_('no destination specified'))
722 dest = pats.pop()
722 dest = pats.pop()
723 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
723 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
724 if not destdirexists:
724 if not destdirexists:
725 if len(pats) > 1 or matchmod.patkind(pats[0]):
725 if len(pats) > 1 or matchmod.patkind(pats[0]):
726 raise util.Abort(_('with multiple sources, destination must be an '
726 raise util.Abort(_('with multiple sources, destination must be an '
727 'existing directory'))
727 'existing directory'))
728 if util.endswithsep(dest):
728 if util.endswithsep(dest):
729 raise util.Abort(_('destination %s is not a directory') % dest)
729 raise util.Abort(_('destination %s is not a directory') % dest)
730
730
731 tfn = targetpathfn
731 tfn = targetpathfn
732 if after:
732 if after:
733 tfn = targetpathafterfn
733 tfn = targetpathafterfn
734 copylist = []
734 copylist = []
735 for pat in pats:
735 for pat in pats:
736 srcs = walkpat(pat)
736 srcs = walkpat(pat)
737 if not srcs:
737 if not srcs:
738 continue
738 continue
739 copylist.append((tfn(pat, dest, srcs), srcs))
739 copylist.append((tfn(pat, dest, srcs), srcs))
740 if not copylist:
740 if not copylist:
741 raise util.Abort(_('no files to copy'))
741 raise util.Abort(_('no files to copy'))
742
742
743 errors = 0
743 errors = 0
744 for targetpath, srcs in copylist:
744 for targetpath, srcs in copylist:
745 for abssrc, relsrc, exact in srcs:
745 for abssrc, relsrc, exact in srcs:
746 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
746 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
747 errors += 1
747 errors += 1
748
748
749 if errors:
749 if errors:
750 ui.warn(_('(consider using --after)\n'))
750 ui.warn(_('(consider using --after)\n'))
751
751
752 return errors != 0
752 return errors != 0
753
753
754 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
754 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
755 runargs=None, appendpid=False):
755 runargs=None, appendpid=False):
756 '''Run a command as a service.'''
756 '''Run a command as a service.'''
757
757
758 def writepid(pid):
758 def writepid(pid):
759 if opts['pid_file']:
759 if opts['pid_file']:
760 if appendpid:
760 if appendpid:
761 mode = 'a'
761 mode = 'a'
762 else:
762 else:
763 mode = 'w'
763 mode = 'w'
764 fp = open(opts['pid_file'], mode)
764 fp = open(opts['pid_file'], mode)
765 fp.write(str(pid) + '\n')
765 fp.write(str(pid) + '\n')
766 fp.close()
766 fp.close()
767
767
768 if opts['daemon'] and not opts['daemon_pipefds']:
768 if opts['daemon'] and not opts['daemon_pipefds']:
769 # Signal child process startup with file removal
769 # Signal child process startup with file removal
770 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
770 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
771 os.close(lockfd)
771 os.close(lockfd)
772 try:
772 try:
773 if not runargs:
773 if not runargs:
774 runargs = util.hgcmd() + sys.argv[1:]
774 runargs = util.hgcmd() + sys.argv[1:]
775 runargs.append('--daemon-pipefds=%s' % lockpath)
775 runargs.append('--daemon-pipefds=%s' % lockpath)
776 # Don't pass --cwd to the child process, because we've already
776 # Don't pass --cwd to the child process, because we've already
777 # changed directory.
777 # changed directory.
778 for i in xrange(1, len(runargs)):
778 for i in xrange(1, len(runargs)):
779 if runargs[i].startswith('--cwd='):
779 if runargs[i].startswith('--cwd='):
780 del runargs[i]
780 del runargs[i]
781 break
781 break
782 elif runargs[i].startswith('--cwd'):
782 elif runargs[i].startswith('--cwd'):
783 del runargs[i:i + 2]
783 del runargs[i:i + 2]
784 break
784 break
785 def condfn():
785 def condfn():
786 return not os.path.exists(lockpath)
786 return not os.path.exists(lockpath)
787 pid = util.rundetached(runargs, condfn)
787 pid = util.rundetached(runargs, condfn)
788 if pid < 0:
788 if pid < 0:
789 raise util.Abort(_('child process failed to start'))
789 raise util.Abort(_('child process failed to start'))
790 writepid(pid)
790 writepid(pid)
791 finally:
791 finally:
792 try:
792 try:
793 os.unlink(lockpath)
793 os.unlink(lockpath)
794 except OSError as e:
794 except OSError as e:
795 if e.errno != errno.ENOENT:
795 if e.errno != errno.ENOENT:
796 raise
796 raise
797 if parentfn:
797 if parentfn:
798 return parentfn(pid)
798 return parentfn(pid)
799 else:
799 else:
800 return
800 return
801
801
802 if initfn:
802 if initfn:
803 initfn()
803 initfn()
804
804
805 if not opts['daemon']:
805 if not opts['daemon']:
806 writepid(os.getpid())
806 writepid(os.getpid())
807
807
808 if opts['daemon_pipefds']:
808 if opts['daemon_pipefds']:
809 lockpath = opts['daemon_pipefds']
809 lockpath = opts['daemon_pipefds']
810 try:
810 try:
811 os.setsid()
811 os.setsid()
812 except AttributeError:
812 except AttributeError:
813 pass
813 pass
814 os.unlink(lockpath)
814 os.unlink(lockpath)
815 util.hidewindow()
815 util.hidewindow()
816 sys.stdout.flush()
816 sys.stdout.flush()
817 sys.stderr.flush()
817 sys.stderr.flush()
818
818
819 nullfd = os.open(os.devnull, os.O_RDWR)
819 nullfd = os.open(os.devnull, os.O_RDWR)
820 logfilefd = nullfd
820 logfilefd = nullfd
821 if logfile:
821 if logfile:
822 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
822 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
823 os.dup2(nullfd, 0)
823 os.dup2(nullfd, 0)
824 os.dup2(logfilefd, 1)
824 os.dup2(logfilefd, 1)
825 os.dup2(logfilefd, 2)
825 os.dup2(logfilefd, 2)
826 if nullfd not in (0, 1, 2):
826 if nullfd not in (0, 1, 2):
827 os.close(nullfd)
827 os.close(nullfd)
828 if logfile and logfilefd not in (0, 1, 2):
828 if logfile and logfilefd not in (0, 1, 2):
829 os.close(logfilefd)
829 os.close(logfilefd)
830
830
831 if runfn:
831 if runfn:
832 return runfn()
832 return runfn()
833
833
834 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
834 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
835 """Utility function used by commands.import to import a single patch
835 """Utility function used by commands.import to import a single patch
836
836
837 This function is explicitly defined here to help the evolve extension to
837 This function is explicitly defined here to help the evolve extension to
838 wrap this part of the import logic.
838 wrap this part of the import logic.
839
839
840 The API is currently a bit ugly because it a simple code translation from
840 The API is currently a bit ugly because it a simple code translation from
841 the import command. Feel free to make it better.
841 the import command. Feel free to make it better.
842
842
843 :hunk: a patch (as a binary string)
843 :hunk: a patch (as a binary string)
844 :parents: nodes that will be parent of the created commit
844 :parents: nodes that will be parent of the created commit
845 :opts: the full dict of option passed to the import command
845 :opts: the full dict of option passed to the import command
846 :msgs: list to save commit message to.
846 :msgs: list to save commit message to.
847 (used in case we need to save it when failing)
847 (used in case we need to save it when failing)
848 :updatefunc: a function that update a repo to a given node
848 :updatefunc: a function that update a repo to a given node
849 updatefunc(<repo>, <node>)
849 updatefunc(<repo>, <node>)
850 """
850 """
851 # avoid cycle context -> subrepo -> cmdutil
851 # avoid cycle context -> subrepo -> cmdutil
852 import context
852 import context
853 tmpname, message, user, date, branch, nodeid, p1, p2 = \
853 tmpname, message, user, date, branch, nodeid, p1, p2 = \
854 patch.extract(ui, hunk)
854 patch.extract(ui, hunk)
855
855
856 update = not opts.get('bypass')
856 update = not opts.get('bypass')
857 strip = opts["strip"]
857 strip = opts["strip"]
858 prefix = opts["prefix"]
858 prefix = opts["prefix"]
859 sim = float(opts.get('similarity') or 0)
859 sim = float(opts.get('similarity') or 0)
860 if not tmpname:
860 if not tmpname:
861 return (None, None, False)
861 return (None, None, False)
862 msg = _('applied to working directory')
862 msg = _('applied to working directory')
863
863
864 rejects = False
864 rejects = False
865 dsguard = None
865 dsguard = None
866
866
867 try:
867 try:
868 cmdline_message = logmessage(ui, opts)
868 cmdline_message = logmessage(ui, opts)
869 if cmdline_message:
869 if cmdline_message:
870 # pickup the cmdline msg
870 # pickup the cmdline msg
871 message = cmdline_message
871 message = cmdline_message
872 elif message:
872 elif message:
873 # pickup the patch msg
873 # pickup the patch msg
874 message = message.strip()
874 message = message.strip()
875 else:
875 else:
876 # launch the editor
876 # launch the editor
877 message = None
877 message = None
878 ui.debug('message:\n%s\n' % message)
878 ui.debug('message:\n%s\n' % message)
879
879
880 if len(parents) == 1:
880 if len(parents) == 1:
881 parents.append(repo[nullid])
881 parents.append(repo[nullid])
882 if opts.get('exact'):
882 if opts.get('exact'):
883 if not nodeid or not p1:
883 if not nodeid or not p1:
884 raise util.Abort(_('not a Mercurial patch'))
884 raise util.Abort(_('not a Mercurial patch'))
885 p1 = repo[p1]
885 p1 = repo[p1]
886 p2 = repo[p2 or nullid]
886 p2 = repo[p2 or nullid]
887 elif p2:
887 elif p2:
888 try:
888 try:
889 p1 = repo[p1]
889 p1 = repo[p1]
890 p2 = repo[p2]
890 p2 = repo[p2]
891 # Without any options, consider p2 only if the
891 # Without any options, consider p2 only if the
892 # patch is being applied on top of the recorded
892 # patch is being applied on top of the recorded
893 # first parent.
893 # first parent.
894 if p1 != parents[0]:
894 if p1 != parents[0]:
895 p1 = parents[0]
895 p1 = parents[0]
896 p2 = repo[nullid]
896 p2 = repo[nullid]
897 except error.RepoError:
897 except error.RepoError:
898 p1, p2 = parents
898 p1, p2 = parents
899 if p2.node() == nullid:
899 if p2.node() == nullid:
900 ui.warn(_("warning: import the patch as a normal revision\n"
900 ui.warn(_("warning: import the patch as a normal revision\n"
901 "(use --exact to import the patch as a merge)\n"))
901 "(use --exact to import the patch as a merge)\n"))
902 else:
902 else:
903 p1, p2 = parents
903 p1, p2 = parents
904
904
905 n = None
905 n = None
906 if update:
906 if update:
907 dsguard = dirstateguard(repo, 'tryimportone')
907 dsguard = dirstateguard(repo, 'tryimportone')
908 if p1 != parents[0]:
908 if p1 != parents[0]:
909 updatefunc(repo, p1.node())
909 updatefunc(repo, p1.node())
910 if p2 != parents[1]:
910 if p2 != parents[1]:
911 repo.setparents(p1.node(), p2.node())
911 repo.setparents(p1.node(), p2.node())
912
912
913 if opts.get('exact') or opts.get('import_branch'):
913 if opts.get('exact') or opts.get('import_branch'):
914 repo.dirstate.setbranch(branch or 'default')
914 repo.dirstate.setbranch(branch or 'default')
915
915
916 partial = opts.get('partial', False)
916 partial = opts.get('partial', False)
917 files = set()
917 files = set()
918 try:
918 try:
919 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
919 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
920 files=files, eolmode=None, similarity=sim / 100.0)
920 files=files, eolmode=None, similarity=sim / 100.0)
921 except patch.PatchError as e:
921 except patch.PatchError as e:
922 if not partial:
922 if not partial:
923 raise util.Abort(str(e))
923 raise util.Abort(str(e))
924 if partial:
924 if partial:
925 rejects = True
925 rejects = True
926
926
927 files = list(files)
927 files = list(files)
928 if opts.get('no_commit'):
928 if opts.get('no_commit'):
929 if message:
929 if message:
930 msgs.append(message)
930 msgs.append(message)
931 else:
931 else:
932 if opts.get('exact') or p2:
932 if opts.get('exact') or p2:
933 # If you got here, you either use --force and know what
933 # If you got here, you either use --force and know what
934 # you are doing or used --exact or a merge patch while
934 # you are doing or used --exact or a merge patch while
935 # being updated to its first parent.
935 # being updated to its first parent.
936 m = None
936 m = None
937 else:
937 else:
938 m = scmutil.matchfiles(repo, files or [])
938 m = scmutil.matchfiles(repo, files or [])
939 editform = mergeeditform(repo[None], 'import.normal')
939 editform = mergeeditform(repo[None], 'import.normal')
940 if opts.get('exact'):
940 if opts.get('exact'):
941 editor = None
941 editor = None
942 else:
942 else:
943 editor = getcommiteditor(editform=editform, **opts)
943 editor = getcommiteditor(editform=editform, **opts)
944 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
944 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
945 try:
945 try:
946 if partial:
946 if partial:
947 repo.ui.setconfig('ui', 'allowemptycommit', True)
947 repo.ui.setconfig('ui', 'allowemptycommit', True)
948 n = repo.commit(message, opts.get('user') or user,
948 n = repo.commit(message, opts.get('user') or user,
949 opts.get('date') or date, match=m,
949 opts.get('date') or date, match=m,
950 editor=editor)
950 editor=editor)
951 finally:
951 finally:
952 repo.ui.restoreconfig(allowemptyback)
952 repo.ui.restoreconfig(allowemptyback)
953 dsguard.close()
953 dsguard.close()
954 else:
954 else:
955 if opts.get('exact') or opts.get('import_branch'):
955 if opts.get('exact') or opts.get('import_branch'):
956 branch = branch or 'default'
956 branch = branch or 'default'
957 else:
957 else:
958 branch = p1.branch()
958 branch = p1.branch()
959 store = patch.filestore()
959 store = patch.filestore()
960 try:
960 try:
961 files = set()
961 files = set()
962 try:
962 try:
963 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
963 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
964 files, eolmode=None)
964 files, eolmode=None)
965 except patch.PatchError as e:
965 except patch.PatchError as e:
966 raise util.Abort(str(e))
966 raise util.Abort(str(e))
967 if opts.get('exact'):
967 if opts.get('exact'):
968 editor = None
968 editor = None
969 else:
969 else:
970 editor = getcommiteditor(editform='import.bypass')
970 editor = getcommiteditor(editform='import.bypass')
971 memctx = context.makememctx(repo, (p1.node(), p2.node()),
971 memctx = context.makememctx(repo, (p1.node(), p2.node()),
972 message,
972 message,
973 opts.get('user') or user,
973 opts.get('user') or user,
974 opts.get('date') or date,
974 opts.get('date') or date,
975 branch, files, store,
975 branch, files, store,
976 editor=editor)
976 editor=editor)
977 n = memctx.commit()
977 n = memctx.commit()
978 finally:
978 finally:
979 store.close()
979 store.close()
980 if opts.get('exact') and opts.get('no_commit'):
980 if opts.get('exact') and opts.get('no_commit'):
981 # --exact with --no-commit is still useful in that it does merge
981 # --exact with --no-commit is still useful in that it does merge
982 # and branch bits
982 # and branch bits
983 ui.warn(_("warning: can't check exact import with --no-commit\n"))
983 ui.warn(_("warning: can't check exact import with --no-commit\n"))
984 elif opts.get('exact') and hex(n) != nodeid:
984 elif opts.get('exact') and hex(n) != nodeid:
985 raise util.Abort(_('patch is damaged or loses information'))
985 raise util.Abort(_('patch is damaged or loses information'))
986 if n:
986 if n:
987 # i18n: refers to a short changeset id
987 # i18n: refers to a short changeset id
988 msg = _('created %s') % short(n)
988 msg = _('created %s') % short(n)
989 return (msg, n, rejects)
989 return (msg, n, rejects)
990 finally:
990 finally:
991 lockmod.release(dsguard)
991 lockmod.release(dsguard)
992 os.unlink(tmpname)
992 os.unlink(tmpname)
993
993
994 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
994 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
995 opts=None, match=None):
995 opts=None, match=None):
996 '''export changesets as hg patches.'''
996 '''export changesets as hg patches.'''
997
997
998 total = len(revs)
998 total = len(revs)
999 revwidth = max([len(str(rev)) for rev in revs])
999 revwidth = max([len(str(rev)) for rev in revs])
1000 filemode = {}
1000 filemode = {}
1001
1001
1002 def single(rev, seqno, fp):
1002 def single(rev, seqno, fp):
1003 ctx = repo[rev]
1003 ctx = repo[rev]
1004 node = ctx.node()
1004 node = ctx.node()
1005 parents = [p.node() for p in ctx.parents() if p]
1005 parents = [p.node() for p in ctx.parents() if p]
1006 branch = ctx.branch()
1006 branch = ctx.branch()
1007 if switch_parent:
1007 if switch_parent:
1008 parents.reverse()
1008 parents.reverse()
1009
1009
1010 if parents:
1010 if parents:
1011 prev = parents[0]
1011 prev = parents[0]
1012 else:
1012 else:
1013 prev = nullid
1013 prev = nullid
1014
1014
1015 shouldclose = False
1015 shouldclose = False
1016 if not fp and len(template) > 0:
1016 if not fp and len(template) > 0:
1017 desc_lines = ctx.description().rstrip().split('\n')
1017 desc_lines = ctx.description().rstrip().split('\n')
1018 desc = desc_lines[0] #Commit always has a first line.
1018 desc = desc_lines[0] #Commit always has a first line.
1019 fp = makefileobj(repo, template, node, desc=desc, total=total,
1019 fp = makefileobj(repo, template, node, desc=desc, total=total,
1020 seqno=seqno, revwidth=revwidth, mode='wb',
1020 seqno=seqno, revwidth=revwidth, mode='wb',
1021 modemap=filemode)
1021 modemap=filemode)
1022 if fp != template:
1022 if fp != template:
1023 shouldclose = True
1023 shouldclose = True
1024 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1024 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
1025 repo.ui.note("%s\n" % fp.name)
1025 repo.ui.note("%s\n" % fp.name)
1026
1026
1027 if not fp:
1027 if not fp:
1028 write = repo.ui.write
1028 write = repo.ui.write
1029 else:
1029 else:
1030 def write(s, **kw):
1030 def write(s, **kw):
1031 fp.write(s)
1031 fp.write(s)
1032
1032
1033 write("# HG changeset patch\n")
1033 write("# HG changeset patch\n")
1034 write("# User %s\n" % ctx.user())
1034 write("# User %s\n" % ctx.user())
1035 write("# Date %d %d\n" % ctx.date())
1035 write("# Date %d %d\n" % ctx.date())
1036 write("# %s\n" % util.datestr(ctx.date()))
1036 write("# %s\n" % util.datestr(ctx.date()))
1037 if branch and branch != 'default':
1037 if branch and branch != 'default':
1038 write("# Branch %s\n" % branch)
1038 write("# Branch %s\n" % branch)
1039 write("# Node ID %s\n" % hex(node))
1039 write("# Node ID %s\n" % hex(node))
1040 write("# Parent %s\n" % hex(prev))
1040 write("# Parent %s\n" % hex(prev))
1041 if len(parents) > 1:
1041 if len(parents) > 1:
1042 write("# Parent %s\n" % hex(parents[1]))
1042 write("# Parent %s\n" % hex(parents[1]))
1043 write(ctx.description().rstrip())
1043 write(ctx.description().rstrip())
1044 write("\n\n")
1044 write("\n\n")
1045
1045
1046 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1046 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1047 write(chunk, label=label)
1047 write(chunk, label=label)
1048
1048
1049 if shouldclose:
1049 if shouldclose:
1050 fp.close()
1050 fp.close()
1051
1051
1052 for seqno, rev in enumerate(revs):
1052 for seqno, rev in enumerate(revs):
1053 single(rev, seqno + 1, fp)
1053 single(rev, seqno + 1, fp)
1054
1054
1055 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1055 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1056 changes=None, stat=False, fp=None, prefix='',
1056 changes=None, stat=False, fp=None, prefix='',
1057 root='', listsubrepos=False):
1057 root='', listsubrepos=False):
1058 '''show diff or diffstat.'''
1058 '''show diff or diffstat.'''
1059 if fp is None:
1059 if fp is None:
1060 write = ui.write
1060 write = ui.write
1061 else:
1061 else:
1062 def write(s, **kw):
1062 def write(s, **kw):
1063 fp.write(s)
1063 fp.write(s)
1064
1064
1065 if root:
1065 if root:
1066 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1066 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1067 else:
1067 else:
1068 relroot = ''
1068 relroot = ''
1069 if relroot != '':
1069 if relroot != '':
1070 # XXX relative roots currently don't work if the root is within a
1070 # XXX relative roots currently don't work if the root is within a
1071 # subrepo
1071 # subrepo
1072 uirelroot = match.uipath(relroot)
1072 uirelroot = match.uipath(relroot)
1073 relroot += '/'
1073 relroot += '/'
1074 for matchroot in match.files():
1074 for matchroot in match.files():
1075 if not matchroot.startswith(relroot):
1075 if not matchroot.startswith(relroot):
1076 ui.warn(_('warning: %s not inside relative root %s\n') % (
1076 ui.warn(_('warning: %s not inside relative root %s\n') % (
1077 match.uipath(matchroot), uirelroot))
1077 match.uipath(matchroot), uirelroot))
1078
1078
1079 if stat:
1079 if stat:
1080 diffopts = diffopts.copy(context=0)
1080 diffopts = diffopts.copy(context=0)
1081 width = 80
1081 width = 80
1082 if not ui.plain():
1082 if not ui.plain():
1083 width = ui.termwidth()
1083 width = ui.termwidth()
1084 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1084 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1085 prefix=prefix, relroot=relroot)
1085 prefix=prefix, relroot=relroot)
1086 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1086 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1087 width=width,
1087 width=width,
1088 git=diffopts.git):
1088 git=diffopts.git):
1089 write(chunk, label=label)
1089 write(chunk, label=label)
1090 else:
1090 else:
1091 for chunk, label in patch.diffui(repo, node1, node2, match,
1091 for chunk, label in patch.diffui(repo, node1, node2, match,
1092 changes, diffopts, prefix=prefix,
1092 changes, diffopts, prefix=prefix,
1093 relroot=relroot):
1093 relroot=relroot):
1094 write(chunk, label=label)
1094 write(chunk, label=label)
1095
1095
1096 if listsubrepos:
1096 if listsubrepos:
1097 ctx1 = repo[node1]
1097 ctx1 = repo[node1]
1098 ctx2 = repo[node2]
1098 ctx2 = repo[node2]
1099 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1099 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1100 tempnode2 = node2
1100 tempnode2 = node2
1101 try:
1101 try:
1102 if node2 is not None:
1102 if node2 is not None:
1103 tempnode2 = ctx2.substate[subpath][1]
1103 tempnode2 = ctx2.substate[subpath][1]
1104 except KeyError:
1104 except KeyError:
1105 # A subrepo that existed in node1 was deleted between node1 and
1105 # A subrepo that existed in node1 was deleted between node1 and
1106 # node2 (inclusive). Thus, ctx2's substate won't contain that
1106 # node2 (inclusive). Thus, ctx2's substate won't contain that
1107 # subpath. The best we can do is to ignore it.
1107 # subpath. The best we can do is to ignore it.
1108 tempnode2 = None
1108 tempnode2 = None
1109 submatch = matchmod.narrowmatcher(subpath, match)
1109 submatch = matchmod.narrowmatcher(subpath, match)
1110 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1110 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1111 stat=stat, fp=fp, prefix=prefix)
1111 stat=stat, fp=fp, prefix=prefix)
1112
1112
1113 class changeset_printer(object):
1113 class changeset_printer(object):
1114 '''show changeset information when templating not requested.'''
1114 '''show changeset information when templating not requested.'''
1115
1115
1116 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1116 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1117 self.ui = ui
1117 self.ui = ui
1118 self.repo = repo
1118 self.repo = repo
1119 self.buffered = buffered
1119 self.buffered = buffered
1120 self.matchfn = matchfn
1120 self.matchfn = matchfn
1121 self.diffopts = diffopts
1121 self.diffopts = diffopts
1122 self.header = {}
1122 self.header = {}
1123 self.hunk = {}
1123 self.hunk = {}
1124 self.lastheader = None
1124 self.lastheader = None
1125 self.footer = None
1125 self.footer = None
1126
1126
1127 def flush(self, ctx):
1127 def flush(self, ctx):
1128 rev = ctx.rev()
1128 rev = ctx.rev()
1129 if rev in self.header:
1129 if rev in self.header:
1130 h = self.header[rev]
1130 h = self.header[rev]
1131 if h != self.lastheader:
1131 if h != self.lastheader:
1132 self.lastheader = h
1132 self.lastheader = h
1133 self.ui.write(h)
1133 self.ui.write(h)
1134 del self.header[rev]
1134 del self.header[rev]
1135 if rev in self.hunk:
1135 if rev in self.hunk:
1136 self.ui.write(self.hunk[rev])
1136 self.ui.write(self.hunk[rev])
1137 del self.hunk[rev]
1137 del self.hunk[rev]
1138 return 1
1138 return 1
1139 return 0
1139 return 0
1140
1140
1141 def close(self):
1141 def close(self):
1142 if self.footer:
1142 if self.footer:
1143 self.ui.write(self.footer)
1143 self.ui.write(self.footer)
1144
1144
1145 def show(self, ctx, copies=None, matchfn=None, **props):
1145 def show(self, ctx, copies=None, matchfn=None, **props):
1146 if self.buffered:
1146 if self.buffered:
1147 self.ui.pushbuffer()
1147 self.ui.pushbuffer()
1148 self._show(ctx, copies, matchfn, props)
1148 self._show(ctx, copies, matchfn, props)
1149 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1149 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
1150 else:
1150 else:
1151 self._show(ctx, copies, matchfn, props)
1151 self._show(ctx, copies, matchfn, props)
1152
1152
1153 def _show(self, ctx, copies, matchfn, props):
1153 def _show(self, ctx, copies, matchfn, props):
1154 '''show a single changeset or file revision'''
1154 '''show a single changeset or file revision'''
1155 changenode = ctx.node()
1155 changenode = ctx.node()
1156 rev = ctx.rev()
1156 rev = ctx.rev()
1157 if self.ui.debugflag:
1157 if self.ui.debugflag:
1158 hexfunc = hex
1158 hexfunc = hex
1159 else:
1159 else:
1160 hexfunc = short
1160 hexfunc = short
1161 # as of now, wctx.node() and wctx.rev() return None, but we want to
1161 # as of now, wctx.node() and wctx.rev() return None, but we want to
1162 # show the same values as {node} and {rev} templatekw
1162 # show the same values as {node} and {rev} templatekw
1163 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1163 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1164
1164
1165 if self.ui.quiet:
1165 if self.ui.quiet:
1166 self.ui.write("%d:%s\n" % revnode, label='log.node')
1166 self.ui.write("%d:%s\n" % revnode, label='log.node')
1167 return
1167 return
1168
1168
1169 date = util.datestr(ctx.date())
1169 date = util.datestr(ctx.date())
1170
1170
1171 # i18n: column positioning for "hg log"
1171 # i18n: column positioning for "hg log"
1172 self.ui.write(_("changeset: %d:%s\n") % revnode,
1172 self.ui.write(_("changeset: %d:%s\n") % revnode,
1173 label='log.changeset changeset.%s' % ctx.phasestr())
1173 label='log.changeset changeset.%s' % ctx.phasestr())
1174
1174
1175 # branches are shown first before any other names due to backwards
1175 # branches are shown first before any other names due to backwards
1176 # compatibility
1176 # compatibility
1177 branch = ctx.branch()
1177 branch = ctx.branch()
1178 # don't show the default branch name
1178 # don't show the default branch name
1179 if branch != 'default':
1179 if branch != 'default':
1180 # i18n: column positioning for "hg log"
1180 # i18n: column positioning for "hg log"
1181 self.ui.write(_("branch: %s\n") % branch,
1181 self.ui.write(_("branch: %s\n") % branch,
1182 label='log.branch')
1182 label='log.branch')
1183
1183
1184 for name, ns in self.repo.names.iteritems():
1184 for name, ns in self.repo.names.iteritems():
1185 # branches has special logic already handled above, so here we just
1185 # branches has special logic already handled above, so here we just
1186 # skip it
1186 # skip it
1187 if name == 'branches':
1187 if name == 'branches':
1188 continue
1188 continue
1189 # we will use the templatename as the color name since those two
1189 # we will use the templatename as the color name since those two
1190 # should be the same
1190 # should be the same
1191 for name in ns.names(self.repo, changenode):
1191 for name in ns.names(self.repo, changenode):
1192 self.ui.write(ns.logfmt % name,
1192 self.ui.write(ns.logfmt % name,
1193 label='log.%s' % ns.colorname)
1193 label='log.%s' % ns.colorname)
1194 if self.ui.debugflag:
1194 if self.ui.debugflag:
1195 # i18n: column positioning for "hg log"
1195 # i18n: column positioning for "hg log"
1196 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1196 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1197 label='log.phase')
1197 label='log.phase')
1198 for pctx in self._meaningful_parentrevs(ctx):
1198 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1199 label = 'log.parent changeset.%s' % pctx.phasestr()
1199 label = 'log.parent changeset.%s' % pctx.phasestr()
1200 # i18n: column positioning for "hg log"
1200 # i18n: column positioning for "hg log"
1201 self.ui.write(_("parent: %d:%s\n")
1201 self.ui.write(_("parent: %d:%s\n")
1202 % (pctx.rev(), hexfunc(pctx.node())),
1202 % (pctx.rev(), hexfunc(pctx.node())),
1203 label=label)
1203 label=label)
1204
1204
1205 if self.ui.debugflag and rev is not None:
1205 if self.ui.debugflag and rev is not None:
1206 mnode = ctx.manifestnode()
1206 mnode = ctx.manifestnode()
1207 # i18n: column positioning for "hg log"
1207 # i18n: column positioning for "hg log"
1208 self.ui.write(_("manifest: %d:%s\n") %
1208 self.ui.write(_("manifest: %d:%s\n") %
1209 (self.repo.manifest.rev(mnode), hex(mnode)),
1209 (self.repo.manifest.rev(mnode), hex(mnode)),
1210 label='ui.debug log.manifest')
1210 label='ui.debug log.manifest')
1211 # i18n: column positioning for "hg log"
1211 # i18n: column positioning for "hg log"
1212 self.ui.write(_("user: %s\n") % ctx.user(),
1212 self.ui.write(_("user: %s\n") % ctx.user(),
1213 label='log.user')
1213 label='log.user')
1214 # i18n: column positioning for "hg log"
1214 # i18n: column positioning for "hg log"
1215 self.ui.write(_("date: %s\n") % date,
1215 self.ui.write(_("date: %s\n") % date,
1216 label='log.date')
1216 label='log.date')
1217
1217
1218 if self.ui.debugflag:
1218 if self.ui.debugflag:
1219 files = ctx.p1().status(ctx)[:3]
1219 files = ctx.p1().status(ctx)[:3]
1220 for key, value in zip([# i18n: column positioning for "hg log"
1220 for key, value in zip([# i18n: column positioning for "hg log"
1221 _("files:"),
1221 _("files:"),
1222 # i18n: column positioning for "hg log"
1222 # i18n: column positioning for "hg log"
1223 _("files+:"),
1223 _("files+:"),
1224 # i18n: column positioning for "hg log"
1224 # i18n: column positioning for "hg log"
1225 _("files-:")], files):
1225 _("files-:")], files):
1226 if value:
1226 if value:
1227 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1227 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1228 label='ui.debug log.files')
1228 label='ui.debug log.files')
1229 elif ctx.files() and self.ui.verbose:
1229 elif ctx.files() and self.ui.verbose:
1230 # i18n: column positioning for "hg log"
1230 # i18n: column positioning for "hg log"
1231 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1231 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1232 label='ui.note log.files')
1232 label='ui.note log.files')
1233 if copies and self.ui.verbose:
1233 if copies and self.ui.verbose:
1234 copies = ['%s (%s)' % c for c in copies]
1234 copies = ['%s (%s)' % c for c in copies]
1235 # i18n: column positioning for "hg log"
1235 # i18n: column positioning for "hg log"
1236 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1236 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1237 label='ui.note log.copies')
1237 label='ui.note log.copies')
1238
1238
1239 extra = ctx.extra()
1239 extra = ctx.extra()
1240 if extra and self.ui.debugflag:
1240 if extra and self.ui.debugflag:
1241 for key, value in sorted(extra.items()):
1241 for key, value in sorted(extra.items()):
1242 # i18n: column positioning for "hg log"
1242 # i18n: column positioning for "hg log"
1243 self.ui.write(_("extra: %s=%s\n")
1243 self.ui.write(_("extra: %s=%s\n")
1244 % (key, value.encode('string_escape')),
1244 % (key, value.encode('string_escape')),
1245 label='ui.debug log.extra')
1245 label='ui.debug log.extra')
1246
1246
1247 description = ctx.description().strip()
1247 description = ctx.description().strip()
1248 if description:
1248 if description:
1249 if self.ui.verbose:
1249 if self.ui.verbose:
1250 self.ui.write(_("description:\n"),
1250 self.ui.write(_("description:\n"),
1251 label='ui.note log.description')
1251 label='ui.note log.description')
1252 self.ui.write(description,
1252 self.ui.write(description,
1253 label='ui.note log.description')
1253 label='ui.note log.description')
1254 self.ui.write("\n\n")
1254 self.ui.write("\n\n")
1255 else:
1255 else:
1256 # i18n: column positioning for "hg log"
1256 # i18n: column positioning for "hg log"
1257 self.ui.write(_("summary: %s\n") %
1257 self.ui.write(_("summary: %s\n") %
1258 description.splitlines()[0],
1258 description.splitlines()[0],
1259 label='log.summary')
1259 label='log.summary')
1260 self.ui.write("\n")
1260 self.ui.write("\n")
1261
1261
1262 self.showpatch(changenode, matchfn)
1262 self.showpatch(changenode, matchfn)
1263
1263
1264 def showpatch(self, node, matchfn):
1264 def showpatch(self, node, matchfn):
1265 if not matchfn:
1265 if not matchfn:
1266 matchfn = self.matchfn
1266 matchfn = self.matchfn
1267 if matchfn:
1267 if matchfn:
1268 stat = self.diffopts.get('stat')
1268 stat = self.diffopts.get('stat')
1269 diff = self.diffopts.get('patch')
1269 diff = self.diffopts.get('patch')
1270 diffopts = patch.diffallopts(self.ui, self.diffopts)
1270 diffopts = patch.diffallopts(self.ui, self.diffopts)
1271 prev = self.repo.changelog.parents(node)[0]
1271 prev = self.repo.changelog.parents(node)[0]
1272 if stat:
1272 if stat:
1273 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1273 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1274 match=matchfn, stat=True)
1274 match=matchfn, stat=True)
1275 if diff:
1275 if diff:
1276 if stat:
1276 if stat:
1277 self.ui.write("\n")
1277 self.ui.write("\n")
1278 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1278 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1279 match=matchfn, stat=False)
1279 match=matchfn, stat=False)
1280 self.ui.write("\n")
1280 self.ui.write("\n")
1281
1281
1282 def _meaningful_parentrevs(self, ctx):
1283 """Return list of meaningful (or all if debug) parentrevs for rev.
1284
1285 For merges (two non-nullrev revisions) both parents are meaningful.
1286 Otherwise the first parent revision is considered meaningful if it
1287 is not the preceding revision.
1288 """
1289 parents = ctx.parents()
1290 if len(parents) > 1:
1291 return parents
1292 if self.ui.debugflag:
1293 return [parents[0], self.repo['null']]
1294 if parents[0].rev() >= scmutil.intrev(ctx.rev()) - 1:
1295 return []
1296 return parents
1297
1298 class jsonchangeset(changeset_printer):
1282 class jsonchangeset(changeset_printer):
1299 '''format changeset information.'''
1283 '''format changeset information.'''
1300
1284
1301 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1285 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1302 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1286 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1303 self.cache = {}
1287 self.cache = {}
1304 self._first = True
1288 self._first = True
1305
1289
1306 def close(self):
1290 def close(self):
1307 if not self._first:
1291 if not self._first:
1308 self.ui.write("\n]\n")
1292 self.ui.write("\n]\n")
1309 else:
1293 else:
1310 self.ui.write("[]\n")
1294 self.ui.write("[]\n")
1311
1295
1312 def _show(self, ctx, copies, matchfn, props):
1296 def _show(self, ctx, copies, matchfn, props):
1313 '''show a single changeset or file revision'''
1297 '''show a single changeset or file revision'''
1314 rev = ctx.rev()
1298 rev = ctx.rev()
1315 if rev is None:
1299 if rev is None:
1316 jrev = jnode = 'null'
1300 jrev = jnode = 'null'
1317 else:
1301 else:
1318 jrev = str(rev)
1302 jrev = str(rev)
1319 jnode = '"%s"' % hex(ctx.node())
1303 jnode = '"%s"' % hex(ctx.node())
1320 j = encoding.jsonescape
1304 j = encoding.jsonescape
1321
1305
1322 if self._first:
1306 if self._first:
1323 self.ui.write("[\n {")
1307 self.ui.write("[\n {")
1324 self._first = False
1308 self._first = False
1325 else:
1309 else:
1326 self.ui.write(",\n {")
1310 self.ui.write(",\n {")
1327
1311
1328 if self.ui.quiet:
1312 if self.ui.quiet:
1329 self.ui.write('\n "rev": %s' % jrev)
1313 self.ui.write('\n "rev": %s' % jrev)
1330 self.ui.write(',\n "node": %s' % jnode)
1314 self.ui.write(',\n "node": %s' % jnode)
1331 self.ui.write('\n }')
1315 self.ui.write('\n }')
1332 return
1316 return
1333
1317
1334 self.ui.write('\n "rev": %s' % jrev)
1318 self.ui.write('\n "rev": %s' % jrev)
1335 self.ui.write(',\n "node": %s' % jnode)
1319 self.ui.write(',\n "node": %s' % jnode)
1336 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1320 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1337 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1321 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1338 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1322 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1339 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1323 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1340 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1324 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1341
1325
1342 self.ui.write(',\n "bookmarks": [%s]' %
1326 self.ui.write(',\n "bookmarks": [%s]' %
1343 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1327 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1344 self.ui.write(',\n "tags": [%s]' %
1328 self.ui.write(',\n "tags": [%s]' %
1345 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1329 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1346 self.ui.write(',\n "parents": [%s]' %
1330 self.ui.write(',\n "parents": [%s]' %
1347 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1331 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1348
1332
1349 if self.ui.debugflag:
1333 if self.ui.debugflag:
1350 if rev is None:
1334 if rev is None:
1351 jmanifestnode = 'null'
1335 jmanifestnode = 'null'
1352 else:
1336 else:
1353 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1337 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1354 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1338 self.ui.write(',\n "manifest": %s' % jmanifestnode)
1355
1339
1356 self.ui.write(',\n "extra": {%s}' %
1340 self.ui.write(',\n "extra": {%s}' %
1357 ", ".join('"%s": "%s"' % (j(k), j(v))
1341 ", ".join('"%s": "%s"' % (j(k), j(v))
1358 for k, v in ctx.extra().items()))
1342 for k, v in ctx.extra().items()))
1359
1343
1360 files = ctx.p1().status(ctx)
1344 files = ctx.p1().status(ctx)
1361 self.ui.write(',\n "modified": [%s]' %
1345 self.ui.write(',\n "modified": [%s]' %
1362 ", ".join('"%s"' % j(f) for f in files[0]))
1346 ", ".join('"%s"' % j(f) for f in files[0]))
1363 self.ui.write(',\n "added": [%s]' %
1347 self.ui.write(',\n "added": [%s]' %
1364 ", ".join('"%s"' % j(f) for f in files[1]))
1348 ", ".join('"%s"' % j(f) for f in files[1]))
1365 self.ui.write(',\n "removed": [%s]' %
1349 self.ui.write(',\n "removed": [%s]' %
1366 ", ".join('"%s"' % j(f) for f in files[2]))
1350 ", ".join('"%s"' % j(f) for f in files[2]))
1367
1351
1368 elif self.ui.verbose:
1352 elif self.ui.verbose:
1369 self.ui.write(',\n "files": [%s]' %
1353 self.ui.write(',\n "files": [%s]' %
1370 ", ".join('"%s"' % j(f) for f in ctx.files()))
1354 ", ".join('"%s"' % j(f) for f in ctx.files()))
1371
1355
1372 if copies:
1356 if copies:
1373 self.ui.write(',\n "copies": {%s}' %
1357 self.ui.write(',\n "copies": {%s}' %
1374 ", ".join('"%s": "%s"' % (j(k), j(v))
1358 ", ".join('"%s": "%s"' % (j(k), j(v))
1375 for k, v in copies))
1359 for k, v in copies))
1376
1360
1377 matchfn = self.matchfn
1361 matchfn = self.matchfn
1378 if matchfn:
1362 if matchfn:
1379 stat = self.diffopts.get('stat')
1363 stat = self.diffopts.get('stat')
1380 diff = self.diffopts.get('patch')
1364 diff = self.diffopts.get('patch')
1381 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1365 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1382 node, prev = ctx.node(), ctx.p1().node()
1366 node, prev = ctx.node(), ctx.p1().node()
1383 if stat:
1367 if stat:
1384 self.ui.pushbuffer()
1368 self.ui.pushbuffer()
1385 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1369 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1386 match=matchfn, stat=True)
1370 match=matchfn, stat=True)
1387 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1371 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1388 if diff:
1372 if diff:
1389 self.ui.pushbuffer()
1373 self.ui.pushbuffer()
1390 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1374 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1391 match=matchfn, stat=False)
1375 match=matchfn, stat=False)
1392 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1376 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1393
1377
1394 self.ui.write("\n }")
1378 self.ui.write("\n }")
1395
1379
1396 class changeset_templater(changeset_printer):
1380 class changeset_templater(changeset_printer):
1397 '''format changeset information.'''
1381 '''format changeset information.'''
1398
1382
1399 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1383 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1400 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1384 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1401 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1385 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1402 defaulttempl = {
1386 defaulttempl = {
1403 'parent': '{rev}:{node|formatnode} ',
1387 'parent': '{rev}:{node|formatnode} ',
1404 'manifest': '{rev}:{node|formatnode}',
1388 'manifest': '{rev}:{node|formatnode}',
1405 'file_copy': '{name} ({source})',
1389 'file_copy': '{name} ({source})',
1406 'extra': '{key}={value|stringescape}'
1390 'extra': '{key}={value|stringescape}'
1407 }
1391 }
1408 # filecopy is preserved for compatibility reasons
1392 # filecopy is preserved for compatibility reasons
1409 defaulttempl['filecopy'] = defaulttempl['file_copy']
1393 defaulttempl['filecopy'] = defaulttempl['file_copy']
1410 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1394 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1411 cache=defaulttempl)
1395 cache=defaulttempl)
1412 if tmpl:
1396 if tmpl:
1413 self.t.cache['changeset'] = tmpl
1397 self.t.cache['changeset'] = tmpl
1414
1398
1415 self.cache = {}
1399 self.cache = {}
1416
1400
1417 # find correct templates for current mode
1401 # find correct templates for current mode
1418 tmplmodes = [
1402 tmplmodes = [
1419 (True, None),
1403 (True, None),
1420 (self.ui.verbose, 'verbose'),
1404 (self.ui.verbose, 'verbose'),
1421 (self.ui.quiet, 'quiet'),
1405 (self.ui.quiet, 'quiet'),
1422 (self.ui.debugflag, 'debug'),
1406 (self.ui.debugflag, 'debug'),
1423 ]
1407 ]
1424
1408
1425 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1409 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1426 'docheader': '', 'docfooter': ''}
1410 'docheader': '', 'docfooter': ''}
1427 for mode, postfix in tmplmodes:
1411 for mode, postfix in tmplmodes:
1428 for t in self._parts:
1412 for t in self._parts:
1429 cur = t
1413 cur = t
1430 if postfix:
1414 if postfix:
1431 cur += "_" + postfix
1415 cur += "_" + postfix
1432 if mode and cur in self.t:
1416 if mode and cur in self.t:
1433 self._parts[t] = cur
1417 self._parts[t] = cur
1434
1418
1435 if self._parts['docheader']:
1419 if self._parts['docheader']:
1436 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1420 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1437
1421
1438 def close(self):
1422 def close(self):
1439 if self._parts['docfooter']:
1423 if self._parts['docfooter']:
1440 if not self.footer:
1424 if not self.footer:
1441 self.footer = ""
1425 self.footer = ""
1442 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1426 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1443 return super(changeset_templater, self).close()
1427 return super(changeset_templater, self).close()
1444
1428
1445 def _show(self, ctx, copies, matchfn, props):
1429 def _show(self, ctx, copies, matchfn, props):
1446 '''show a single changeset or file revision'''
1430 '''show a single changeset or file revision'''
1447
1431
1448 showlist = templatekw.showlist
1432 showlist = templatekw.showlist
1449
1433
1450 # showparents() behavior depends on ui trace level which
1434 # showparents() behavior depends on ui trace level which
1451 # causes unexpected behaviors at templating level and makes
1435 # causes unexpected behaviors at templating level and makes
1452 # it harder to extract it in a standalone function. Its
1436 # it harder to extract it in a standalone function. Its
1453 # behavior cannot be changed so leave it here for now.
1437 # behavior cannot be changed so leave it here for now.
1454 def showparents(**args):
1438 def showparents(**args):
1455 ctx = args['ctx']
1439 ctx = args['ctx']
1456 parents = [[('rev', p.rev()),
1440 parents = [[('rev', p.rev()),
1457 ('node', p.hex()),
1441 ('node', p.hex()),
1458 ('phase', p.phasestr())]
1442 ('phase', p.phasestr())]
1459 for p in self._meaningful_parentrevs(ctx)]
1443 for p in scmutil.meaningfulparents(self.repo, ctx)]
1460 return showlist('parent', parents, **args)
1444 return showlist('parent', parents, **args)
1461
1445
1462 props = props.copy()
1446 props = props.copy()
1463 props.update(templatekw.keywords)
1447 props.update(templatekw.keywords)
1464 props['parents'] = showparents
1448 props['parents'] = showparents
1465 props['templ'] = self.t
1449 props['templ'] = self.t
1466 props['ctx'] = ctx
1450 props['ctx'] = ctx
1467 props['repo'] = self.repo
1451 props['repo'] = self.repo
1468 props['revcache'] = {'copies': copies}
1452 props['revcache'] = {'copies': copies}
1469 props['cache'] = self.cache
1453 props['cache'] = self.cache
1470
1454
1471 try:
1455 try:
1472 # write header
1456 # write header
1473 if self._parts['header']:
1457 if self._parts['header']:
1474 h = templater.stringify(self.t(self._parts['header'], **props))
1458 h = templater.stringify(self.t(self._parts['header'], **props))
1475 if self.buffered:
1459 if self.buffered:
1476 self.header[ctx.rev()] = h
1460 self.header[ctx.rev()] = h
1477 else:
1461 else:
1478 if self.lastheader != h:
1462 if self.lastheader != h:
1479 self.lastheader = h
1463 self.lastheader = h
1480 self.ui.write(h)
1464 self.ui.write(h)
1481
1465
1482 # write changeset metadata, then patch if requested
1466 # write changeset metadata, then patch if requested
1483 key = self._parts['changeset']
1467 key = self._parts['changeset']
1484 self.ui.write(templater.stringify(self.t(key, **props)))
1468 self.ui.write(templater.stringify(self.t(key, **props)))
1485 self.showpatch(ctx.node(), matchfn)
1469 self.showpatch(ctx.node(), matchfn)
1486
1470
1487 if self._parts['footer']:
1471 if self._parts['footer']:
1488 if not self.footer:
1472 if not self.footer:
1489 self.footer = templater.stringify(
1473 self.footer = templater.stringify(
1490 self.t(self._parts['footer'], **props))
1474 self.t(self._parts['footer'], **props))
1491 except KeyError as inst:
1475 except KeyError as inst:
1492 msg = _("%s: no key named '%s'")
1476 msg = _("%s: no key named '%s'")
1493 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1477 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1494 except SyntaxError as inst:
1478 except SyntaxError as inst:
1495 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1479 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1496
1480
1497 def gettemplate(ui, tmpl, style):
1481 def gettemplate(ui, tmpl, style):
1498 """
1482 """
1499 Find the template matching the given template spec or style.
1483 Find the template matching the given template spec or style.
1500 """
1484 """
1501
1485
1502 # ui settings
1486 # ui settings
1503 if not tmpl and not style: # template are stronger than style
1487 if not tmpl and not style: # template are stronger than style
1504 tmpl = ui.config('ui', 'logtemplate')
1488 tmpl = ui.config('ui', 'logtemplate')
1505 if tmpl:
1489 if tmpl:
1506 try:
1490 try:
1507 tmpl = templater.unquotestring(tmpl)
1491 tmpl = templater.unquotestring(tmpl)
1508 except SyntaxError:
1492 except SyntaxError:
1509 pass
1493 pass
1510 return tmpl, None
1494 return tmpl, None
1511 else:
1495 else:
1512 style = util.expandpath(ui.config('ui', 'style', ''))
1496 style = util.expandpath(ui.config('ui', 'style', ''))
1513
1497
1514 if not tmpl and style:
1498 if not tmpl and style:
1515 mapfile = style
1499 mapfile = style
1516 if not os.path.split(mapfile)[0]:
1500 if not os.path.split(mapfile)[0]:
1517 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1501 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1518 or templater.templatepath(mapfile))
1502 or templater.templatepath(mapfile))
1519 if mapname:
1503 if mapname:
1520 mapfile = mapname
1504 mapfile = mapname
1521 return None, mapfile
1505 return None, mapfile
1522
1506
1523 if not tmpl:
1507 if not tmpl:
1524 return None, None
1508 return None, None
1525
1509
1526 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1510 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1527
1511
1528 def show_changeset(ui, repo, opts, buffered=False):
1512 def show_changeset(ui, repo, opts, buffered=False):
1529 """show one changeset using template or regular display.
1513 """show one changeset using template or regular display.
1530
1514
1531 Display format will be the first non-empty hit of:
1515 Display format will be the first non-empty hit of:
1532 1. option 'template'
1516 1. option 'template'
1533 2. option 'style'
1517 2. option 'style'
1534 3. [ui] setting 'logtemplate'
1518 3. [ui] setting 'logtemplate'
1535 4. [ui] setting 'style'
1519 4. [ui] setting 'style'
1536 If all of these values are either the unset or the empty string,
1520 If all of these values are either the unset or the empty string,
1537 regular display via changeset_printer() is done.
1521 regular display via changeset_printer() is done.
1538 """
1522 """
1539 # options
1523 # options
1540 matchfn = None
1524 matchfn = None
1541 if opts.get('patch') or opts.get('stat'):
1525 if opts.get('patch') or opts.get('stat'):
1542 matchfn = scmutil.matchall(repo)
1526 matchfn = scmutil.matchall(repo)
1543
1527
1544 if opts.get('template') == 'json':
1528 if opts.get('template') == 'json':
1545 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1529 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1546
1530
1547 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1531 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1548
1532
1549 if not tmpl and not mapfile:
1533 if not tmpl and not mapfile:
1550 return changeset_printer(ui, repo, matchfn, opts, buffered)
1534 return changeset_printer(ui, repo, matchfn, opts, buffered)
1551
1535
1552 try:
1536 try:
1553 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1537 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1554 buffered)
1538 buffered)
1555 except SyntaxError as inst:
1539 except SyntaxError as inst:
1556 raise util.Abort(inst.args[0])
1540 raise util.Abort(inst.args[0])
1557 return t
1541 return t
1558
1542
1559 def showmarker(ui, marker):
1543 def showmarker(ui, marker):
1560 """utility function to display obsolescence marker in a readable way
1544 """utility function to display obsolescence marker in a readable way
1561
1545
1562 To be used by debug function."""
1546 To be used by debug function."""
1563 ui.write(hex(marker.precnode()))
1547 ui.write(hex(marker.precnode()))
1564 for repl in marker.succnodes():
1548 for repl in marker.succnodes():
1565 ui.write(' ')
1549 ui.write(' ')
1566 ui.write(hex(repl))
1550 ui.write(hex(repl))
1567 ui.write(' %X ' % marker.flags())
1551 ui.write(' %X ' % marker.flags())
1568 parents = marker.parentnodes()
1552 parents = marker.parentnodes()
1569 if parents is not None:
1553 if parents is not None:
1570 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1554 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1571 ui.write('(%s) ' % util.datestr(marker.date()))
1555 ui.write('(%s) ' % util.datestr(marker.date()))
1572 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1556 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1573 sorted(marker.metadata().items())
1557 sorted(marker.metadata().items())
1574 if t[0] != 'date')))
1558 if t[0] != 'date')))
1575 ui.write('\n')
1559 ui.write('\n')
1576
1560
1577 def finddate(ui, repo, date):
1561 def finddate(ui, repo, date):
1578 """Find the tipmost changeset that matches the given date spec"""
1562 """Find the tipmost changeset that matches the given date spec"""
1579
1563
1580 df = util.matchdate(date)
1564 df = util.matchdate(date)
1581 m = scmutil.matchall(repo)
1565 m = scmutil.matchall(repo)
1582 results = {}
1566 results = {}
1583
1567
1584 def prep(ctx, fns):
1568 def prep(ctx, fns):
1585 d = ctx.date()
1569 d = ctx.date()
1586 if df(d[0]):
1570 if df(d[0]):
1587 results[ctx.rev()] = d
1571 results[ctx.rev()] = d
1588
1572
1589 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1573 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1590 rev = ctx.rev()
1574 rev = ctx.rev()
1591 if rev in results:
1575 if rev in results:
1592 ui.status(_("found revision %s from %s\n") %
1576 ui.status(_("found revision %s from %s\n") %
1593 (rev, util.datestr(results[rev])))
1577 (rev, util.datestr(results[rev])))
1594 return str(rev)
1578 return str(rev)
1595
1579
1596 raise util.Abort(_("revision matching date not found"))
1580 raise util.Abort(_("revision matching date not found"))
1597
1581
1598 def increasingwindows(windowsize=8, sizelimit=512):
1582 def increasingwindows(windowsize=8, sizelimit=512):
1599 while True:
1583 while True:
1600 yield windowsize
1584 yield windowsize
1601 if windowsize < sizelimit:
1585 if windowsize < sizelimit:
1602 windowsize *= 2
1586 windowsize *= 2
1603
1587
1604 class FileWalkError(Exception):
1588 class FileWalkError(Exception):
1605 pass
1589 pass
1606
1590
1607 def walkfilerevs(repo, match, follow, revs, fncache):
1591 def walkfilerevs(repo, match, follow, revs, fncache):
1608 '''Walks the file history for the matched files.
1592 '''Walks the file history for the matched files.
1609
1593
1610 Returns the changeset revs that are involved in the file history.
1594 Returns the changeset revs that are involved in the file history.
1611
1595
1612 Throws FileWalkError if the file history can't be walked using
1596 Throws FileWalkError if the file history can't be walked using
1613 filelogs alone.
1597 filelogs alone.
1614 '''
1598 '''
1615 wanted = set()
1599 wanted = set()
1616 copies = []
1600 copies = []
1617 minrev, maxrev = min(revs), max(revs)
1601 minrev, maxrev = min(revs), max(revs)
1618 def filerevgen(filelog, last):
1602 def filerevgen(filelog, last):
1619 """
1603 """
1620 Only files, no patterns. Check the history of each file.
1604 Only files, no patterns. Check the history of each file.
1621
1605
1622 Examines filelog entries within minrev, maxrev linkrev range
1606 Examines filelog entries within minrev, maxrev linkrev range
1623 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1607 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1624 tuples in backwards order
1608 tuples in backwards order
1625 """
1609 """
1626 cl_count = len(repo)
1610 cl_count = len(repo)
1627 revs = []
1611 revs = []
1628 for j in xrange(0, last + 1):
1612 for j in xrange(0, last + 1):
1629 linkrev = filelog.linkrev(j)
1613 linkrev = filelog.linkrev(j)
1630 if linkrev < minrev:
1614 if linkrev < minrev:
1631 continue
1615 continue
1632 # only yield rev for which we have the changelog, it can
1616 # only yield rev for which we have the changelog, it can
1633 # happen while doing "hg log" during a pull or commit
1617 # happen while doing "hg log" during a pull or commit
1634 if linkrev >= cl_count:
1618 if linkrev >= cl_count:
1635 break
1619 break
1636
1620
1637 parentlinkrevs = []
1621 parentlinkrevs = []
1638 for p in filelog.parentrevs(j):
1622 for p in filelog.parentrevs(j):
1639 if p != nullrev:
1623 if p != nullrev:
1640 parentlinkrevs.append(filelog.linkrev(p))
1624 parentlinkrevs.append(filelog.linkrev(p))
1641 n = filelog.node(j)
1625 n = filelog.node(j)
1642 revs.append((linkrev, parentlinkrevs,
1626 revs.append((linkrev, parentlinkrevs,
1643 follow and filelog.renamed(n)))
1627 follow and filelog.renamed(n)))
1644
1628
1645 return reversed(revs)
1629 return reversed(revs)
1646 def iterfiles():
1630 def iterfiles():
1647 pctx = repo['.']
1631 pctx = repo['.']
1648 for filename in match.files():
1632 for filename in match.files():
1649 if follow:
1633 if follow:
1650 if filename not in pctx:
1634 if filename not in pctx:
1651 raise util.Abort(_('cannot follow file not in parent '
1635 raise util.Abort(_('cannot follow file not in parent '
1652 'revision: "%s"') % filename)
1636 'revision: "%s"') % filename)
1653 yield filename, pctx[filename].filenode()
1637 yield filename, pctx[filename].filenode()
1654 else:
1638 else:
1655 yield filename, None
1639 yield filename, None
1656 for filename_node in copies:
1640 for filename_node in copies:
1657 yield filename_node
1641 yield filename_node
1658
1642
1659 for file_, node in iterfiles():
1643 for file_, node in iterfiles():
1660 filelog = repo.file(file_)
1644 filelog = repo.file(file_)
1661 if not len(filelog):
1645 if not len(filelog):
1662 if node is None:
1646 if node is None:
1663 # A zero count may be a directory or deleted file, so
1647 # A zero count may be a directory or deleted file, so
1664 # try to find matching entries on the slow path.
1648 # try to find matching entries on the slow path.
1665 if follow:
1649 if follow:
1666 raise util.Abort(
1650 raise util.Abort(
1667 _('cannot follow nonexistent file: "%s"') % file_)
1651 _('cannot follow nonexistent file: "%s"') % file_)
1668 raise FileWalkError("Cannot walk via filelog")
1652 raise FileWalkError("Cannot walk via filelog")
1669 else:
1653 else:
1670 continue
1654 continue
1671
1655
1672 if node is None:
1656 if node is None:
1673 last = len(filelog) - 1
1657 last = len(filelog) - 1
1674 else:
1658 else:
1675 last = filelog.rev(node)
1659 last = filelog.rev(node)
1676
1660
1677 # keep track of all ancestors of the file
1661 # keep track of all ancestors of the file
1678 ancestors = set([filelog.linkrev(last)])
1662 ancestors = set([filelog.linkrev(last)])
1679
1663
1680 # iterate from latest to oldest revision
1664 # iterate from latest to oldest revision
1681 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1665 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1682 if not follow:
1666 if not follow:
1683 if rev > maxrev:
1667 if rev > maxrev:
1684 continue
1668 continue
1685 else:
1669 else:
1686 # Note that last might not be the first interesting
1670 # Note that last might not be the first interesting
1687 # rev to us:
1671 # rev to us:
1688 # if the file has been changed after maxrev, we'll
1672 # if the file has been changed after maxrev, we'll
1689 # have linkrev(last) > maxrev, and we still need
1673 # have linkrev(last) > maxrev, and we still need
1690 # to explore the file graph
1674 # to explore the file graph
1691 if rev not in ancestors:
1675 if rev not in ancestors:
1692 continue
1676 continue
1693 # XXX insert 1327 fix here
1677 # XXX insert 1327 fix here
1694 if flparentlinkrevs:
1678 if flparentlinkrevs:
1695 ancestors.update(flparentlinkrevs)
1679 ancestors.update(flparentlinkrevs)
1696
1680
1697 fncache.setdefault(rev, []).append(file_)
1681 fncache.setdefault(rev, []).append(file_)
1698 wanted.add(rev)
1682 wanted.add(rev)
1699 if copied:
1683 if copied:
1700 copies.append(copied)
1684 copies.append(copied)
1701
1685
1702 return wanted
1686 return wanted
1703
1687
1704 class _followfilter(object):
1688 class _followfilter(object):
1705 def __init__(self, repo, onlyfirst=False):
1689 def __init__(self, repo, onlyfirst=False):
1706 self.repo = repo
1690 self.repo = repo
1707 self.startrev = nullrev
1691 self.startrev = nullrev
1708 self.roots = set()
1692 self.roots = set()
1709 self.onlyfirst = onlyfirst
1693 self.onlyfirst = onlyfirst
1710
1694
1711 def match(self, rev):
1695 def match(self, rev):
1712 def realparents(rev):
1696 def realparents(rev):
1713 if self.onlyfirst:
1697 if self.onlyfirst:
1714 return self.repo.changelog.parentrevs(rev)[0:1]
1698 return self.repo.changelog.parentrevs(rev)[0:1]
1715 else:
1699 else:
1716 return filter(lambda x: x != nullrev,
1700 return filter(lambda x: x != nullrev,
1717 self.repo.changelog.parentrevs(rev))
1701 self.repo.changelog.parentrevs(rev))
1718
1702
1719 if self.startrev == nullrev:
1703 if self.startrev == nullrev:
1720 self.startrev = rev
1704 self.startrev = rev
1721 return True
1705 return True
1722
1706
1723 if rev > self.startrev:
1707 if rev > self.startrev:
1724 # forward: all descendants
1708 # forward: all descendants
1725 if not self.roots:
1709 if not self.roots:
1726 self.roots.add(self.startrev)
1710 self.roots.add(self.startrev)
1727 for parent in realparents(rev):
1711 for parent in realparents(rev):
1728 if parent in self.roots:
1712 if parent in self.roots:
1729 self.roots.add(rev)
1713 self.roots.add(rev)
1730 return True
1714 return True
1731 else:
1715 else:
1732 # backwards: all parents
1716 # backwards: all parents
1733 if not self.roots:
1717 if not self.roots:
1734 self.roots.update(realparents(self.startrev))
1718 self.roots.update(realparents(self.startrev))
1735 if rev in self.roots:
1719 if rev in self.roots:
1736 self.roots.remove(rev)
1720 self.roots.remove(rev)
1737 self.roots.update(realparents(rev))
1721 self.roots.update(realparents(rev))
1738 return True
1722 return True
1739
1723
1740 return False
1724 return False
1741
1725
1742 def walkchangerevs(repo, match, opts, prepare):
1726 def walkchangerevs(repo, match, opts, prepare):
1743 '''Iterate over files and the revs in which they changed.
1727 '''Iterate over files and the revs in which they changed.
1744
1728
1745 Callers most commonly need to iterate backwards over the history
1729 Callers most commonly need to iterate backwards over the history
1746 in which they are interested. Doing so has awful (quadratic-looking)
1730 in which they are interested. Doing so has awful (quadratic-looking)
1747 performance, so we use iterators in a "windowed" way.
1731 performance, so we use iterators in a "windowed" way.
1748
1732
1749 We walk a window of revisions in the desired order. Within the
1733 We walk a window of revisions in the desired order. Within the
1750 window, we first walk forwards to gather data, then in the desired
1734 window, we first walk forwards to gather data, then in the desired
1751 order (usually backwards) to display it.
1735 order (usually backwards) to display it.
1752
1736
1753 This function returns an iterator yielding contexts. Before
1737 This function returns an iterator yielding contexts. Before
1754 yielding each context, the iterator will first call the prepare
1738 yielding each context, the iterator will first call the prepare
1755 function on each context in the window in forward order.'''
1739 function on each context in the window in forward order.'''
1756
1740
1757 follow = opts.get('follow') or opts.get('follow_first')
1741 follow = opts.get('follow') or opts.get('follow_first')
1758 revs = _logrevs(repo, opts)
1742 revs = _logrevs(repo, opts)
1759 if not revs:
1743 if not revs:
1760 return []
1744 return []
1761 wanted = set()
1745 wanted = set()
1762 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1746 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1763 opts.get('removed'))
1747 opts.get('removed'))
1764 fncache = {}
1748 fncache = {}
1765 change = repo.changectx
1749 change = repo.changectx
1766
1750
1767 # First step is to fill wanted, the set of revisions that we want to yield.
1751 # First step is to fill wanted, the set of revisions that we want to yield.
1768 # When it does not induce extra cost, we also fill fncache for revisions in
1752 # When it does not induce extra cost, we also fill fncache for revisions in
1769 # wanted: a cache of filenames that were changed (ctx.files()) and that
1753 # wanted: a cache of filenames that were changed (ctx.files()) and that
1770 # match the file filtering conditions.
1754 # match the file filtering conditions.
1771
1755
1772 if match.always():
1756 if match.always():
1773 # No files, no patterns. Display all revs.
1757 # No files, no patterns. Display all revs.
1774 wanted = revs
1758 wanted = revs
1775 elif not slowpath:
1759 elif not slowpath:
1776 # We only have to read through the filelog to find wanted revisions
1760 # We only have to read through the filelog to find wanted revisions
1777
1761
1778 try:
1762 try:
1779 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1763 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1780 except FileWalkError:
1764 except FileWalkError:
1781 slowpath = True
1765 slowpath = True
1782
1766
1783 # We decided to fall back to the slowpath because at least one
1767 # We decided to fall back to the slowpath because at least one
1784 # of the paths was not a file. Check to see if at least one of them
1768 # of the paths was not a file. Check to see if at least one of them
1785 # existed in history, otherwise simply return
1769 # existed in history, otherwise simply return
1786 for path in match.files():
1770 for path in match.files():
1787 if path == '.' or path in repo.store:
1771 if path == '.' or path in repo.store:
1788 break
1772 break
1789 else:
1773 else:
1790 return []
1774 return []
1791
1775
1792 if slowpath:
1776 if slowpath:
1793 # We have to read the changelog to match filenames against
1777 # We have to read the changelog to match filenames against
1794 # changed files
1778 # changed files
1795
1779
1796 if follow:
1780 if follow:
1797 raise util.Abort(_('can only follow copies/renames for explicit '
1781 raise util.Abort(_('can only follow copies/renames for explicit '
1798 'filenames'))
1782 'filenames'))
1799
1783
1800 # The slow path checks files modified in every changeset.
1784 # The slow path checks files modified in every changeset.
1801 # This is really slow on large repos, so compute the set lazily.
1785 # This is really slow on large repos, so compute the set lazily.
1802 class lazywantedset(object):
1786 class lazywantedset(object):
1803 def __init__(self):
1787 def __init__(self):
1804 self.set = set()
1788 self.set = set()
1805 self.revs = set(revs)
1789 self.revs = set(revs)
1806
1790
1807 # No need to worry about locality here because it will be accessed
1791 # No need to worry about locality here because it will be accessed
1808 # in the same order as the increasing window below.
1792 # in the same order as the increasing window below.
1809 def __contains__(self, value):
1793 def __contains__(self, value):
1810 if value in self.set:
1794 if value in self.set:
1811 return True
1795 return True
1812 elif not value in self.revs:
1796 elif not value in self.revs:
1813 return False
1797 return False
1814 else:
1798 else:
1815 self.revs.discard(value)
1799 self.revs.discard(value)
1816 ctx = change(value)
1800 ctx = change(value)
1817 matches = filter(match, ctx.files())
1801 matches = filter(match, ctx.files())
1818 if matches:
1802 if matches:
1819 fncache[value] = matches
1803 fncache[value] = matches
1820 self.set.add(value)
1804 self.set.add(value)
1821 return True
1805 return True
1822 return False
1806 return False
1823
1807
1824 def discard(self, value):
1808 def discard(self, value):
1825 self.revs.discard(value)
1809 self.revs.discard(value)
1826 self.set.discard(value)
1810 self.set.discard(value)
1827
1811
1828 wanted = lazywantedset()
1812 wanted = lazywantedset()
1829
1813
1830 # it might be worthwhile to do this in the iterator if the rev range
1814 # it might be worthwhile to do this in the iterator if the rev range
1831 # is descending and the prune args are all within that range
1815 # is descending and the prune args are all within that range
1832 for rev in opts.get('prune', ()):
1816 for rev in opts.get('prune', ()):
1833 rev = repo[rev].rev()
1817 rev = repo[rev].rev()
1834 ff = _followfilter(repo)
1818 ff = _followfilter(repo)
1835 stop = min(revs[0], revs[-1])
1819 stop = min(revs[0], revs[-1])
1836 for x in xrange(rev, stop - 1, -1):
1820 for x in xrange(rev, stop - 1, -1):
1837 if ff.match(x):
1821 if ff.match(x):
1838 wanted = wanted - [x]
1822 wanted = wanted - [x]
1839
1823
1840 # Now that wanted is correctly initialized, we can iterate over the
1824 # Now that wanted is correctly initialized, we can iterate over the
1841 # revision range, yielding only revisions in wanted.
1825 # revision range, yielding only revisions in wanted.
1842 def iterate():
1826 def iterate():
1843 if follow and match.always():
1827 if follow and match.always():
1844 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1828 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1845 def want(rev):
1829 def want(rev):
1846 return ff.match(rev) and rev in wanted
1830 return ff.match(rev) and rev in wanted
1847 else:
1831 else:
1848 def want(rev):
1832 def want(rev):
1849 return rev in wanted
1833 return rev in wanted
1850
1834
1851 it = iter(revs)
1835 it = iter(revs)
1852 stopiteration = False
1836 stopiteration = False
1853 for windowsize in increasingwindows():
1837 for windowsize in increasingwindows():
1854 nrevs = []
1838 nrevs = []
1855 for i in xrange(windowsize):
1839 for i in xrange(windowsize):
1856 rev = next(it, None)
1840 rev = next(it, None)
1857 if rev is None:
1841 if rev is None:
1858 stopiteration = True
1842 stopiteration = True
1859 break
1843 break
1860 elif want(rev):
1844 elif want(rev):
1861 nrevs.append(rev)
1845 nrevs.append(rev)
1862 for rev in sorted(nrevs):
1846 for rev in sorted(nrevs):
1863 fns = fncache.get(rev)
1847 fns = fncache.get(rev)
1864 ctx = change(rev)
1848 ctx = change(rev)
1865 if not fns:
1849 if not fns:
1866 def fns_generator():
1850 def fns_generator():
1867 for f in ctx.files():
1851 for f in ctx.files():
1868 if match(f):
1852 if match(f):
1869 yield f
1853 yield f
1870 fns = fns_generator()
1854 fns = fns_generator()
1871 prepare(ctx, fns)
1855 prepare(ctx, fns)
1872 for rev in nrevs:
1856 for rev in nrevs:
1873 yield change(rev)
1857 yield change(rev)
1874
1858
1875 if stopiteration:
1859 if stopiteration:
1876 break
1860 break
1877
1861
1878 return iterate()
1862 return iterate()
1879
1863
1880 def _makefollowlogfilematcher(repo, files, followfirst):
1864 def _makefollowlogfilematcher(repo, files, followfirst):
1881 # When displaying a revision with --patch --follow FILE, we have
1865 # When displaying a revision with --patch --follow FILE, we have
1882 # to know which file of the revision must be diffed. With
1866 # to know which file of the revision must be diffed. With
1883 # --follow, we want the names of the ancestors of FILE in the
1867 # --follow, we want the names of the ancestors of FILE in the
1884 # revision, stored in "fcache". "fcache" is populated by
1868 # revision, stored in "fcache". "fcache" is populated by
1885 # reproducing the graph traversal already done by --follow revset
1869 # reproducing the graph traversal already done by --follow revset
1886 # and relating linkrevs to file names (which is not "correct" but
1870 # and relating linkrevs to file names (which is not "correct" but
1887 # good enough).
1871 # good enough).
1888 fcache = {}
1872 fcache = {}
1889 fcacheready = [False]
1873 fcacheready = [False]
1890 pctx = repo['.']
1874 pctx = repo['.']
1891
1875
1892 def populate():
1876 def populate():
1893 for fn in files:
1877 for fn in files:
1894 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1878 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1895 for c in i:
1879 for c in i:
1896 fcache.setdefault(c.linkrev(), set()).add(c.path())
1880 fcache.setdefault(c.linkrev(), set()).add(c.path())
1897
1881
1898 def filematcher(rev):
1882 def filematcher(rev):
1899 if not fcacheready[0]:
1883 if not fcacheready[0]:
1900 # Lazy initialization
1884 # Lazy initialization
1901 fcacheready[0] = True
1885 fcacheready[0] = True
1902 populate()
1886 populate()
1903 return scmutil.matchfiles(repo, fcache.get(rev, []))
1887 return scmutil.matchfiles(repo, fcache.get(rev, []))
1904
1888
1905 return filematcher
1889 return filematcher
1906
1890
1907 def _makenofollowlogfilematcher(repo, pats, opts):
1891 def _makenofollowlogfilematcher(repo, pats, opts):
1908 '''hook for extensions to override the filematcher for non-follow cases'''
1892 '''hook for extensions to override the filematcher for non-follow cases'''
1909 return None
1893 return None
1910
1894
1911 def _makelogrevset(repo, pats, opts, revs):
1895 def _makelogrevset(repo, pats, opts, revs):
1912 """Return (expr, filematcher) where expr is a revset string built
1896 """Return (expr, filematcher) where expr is a revset string built
1913 from log options and file patterns or None. If --stat or --patch
1897 from log options and file patterns or None. If --stat or --patch
1914 are not passed filematcher is None. Otherwise it is a callable
1898 are not passed filematcher is None. Otherwise it is a callable
1915 taking a revision number and returning a match objects filtering
1899 taking a revision number and returning a match objects filtering
1916 the files to be detailed when displaying the revision.
1900 the files to be detailed when displaying the revision.
1917 """
1901 """
1918 opt2revset = {
1902 opt2revset = {
1919 'no_merges': ('not merge()', None),
1903 'no_merges': ('not merge()', None),
1920 'only_merges': ('merge()', None),
1904 'only_merges': ('merge()', None),
1921 '_ancestors': ('ancestors(%(val)s)', None),
1905 '_ancestors': ('ancestors(%(val)s)', None),
1922 '_fancestors': ('_firstancestors(%(val)s)', None),
1906 '_fancestors': ('_firstancestors(%(val)s)', None),
1923 '_descendants': ('descendants(%(val)s)', None),
1907 '_descendants': ('descendants(%(val)s)', None),
1924 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1908 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1925 '_matchfiles': ('_matchfiles(%(val)s)', None),
1909 '_matchfiles': ('_matchfiles(%(val)s)', None),
1926 'date': ('date(%(val)r)', None),
1910 'date': ('date(%(val)r)', None),
1927 'branch': ('branch(%(val)r)', ' or '),
1911 'branch': ('branch(%(val)r)', ' or '),
1928 '_patslog': ('filelog(%(val)r)', ' or '),
1912 '_patslog': ('filelog(%(val)r)', ' or '),
1929 '_patsfollow': ('follow(%(val)r)', ' or '),
1913 '_patsfollow': ('follow(%(val)r)', ' or '),
1930 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1914 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1931 'keyword': ('keyword(%(val)r)', ' or '),
1915 'keyword': ('keyword(%(val)r)', ' or '),
1932 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1916 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1933 'user': ('user(%(val)r)', ' or '),
1917 'user': ('user(%(val)r)', ' or '),
1934 }
1918 }
1935
1919
1936 opts = dict(opts)
1920 opts = dict(opts)
1937 # follow or not follow?
1921 # follow or not follow?
1938 follow = opts.get('follow') or opts.get('follow_first')
1922 follow = opts.get('follow') or opts.get('follow_first')
1939 if opts.get('follow_first'):
1923 if opts.get('follow_first'):
1940 followfirst = 1
1924 followfirst = 1
1941 else:
1925 else:
1942 followfirst = 0
1926 followfirst = 0
1943 # --follow with FILE behavior depends on revs...
1927 # --follow with FILE behavior depends on revs...
1944 it = iter(revs)
1928 it = iter(revs)
1945 startrev = it.next()
1929 startrev = it.next()
1946 followdescendants = startrev < next(it, startrev)
1930 followdescendants = startrev < next(it, startrev)
1947
1931
1948 # branch and only_branch are really aliases and must be handled at
1932 # branch and only_branch are really aliases and must be handled at
1949 # the same time
1933 # the same time
1950 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1934 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1951 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1935 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1952 # pats/include/exclude are passed to match.match() directly in
1936 # pats/include/exclude are passed to match.match() directly in
1953 # _matchfiles() revset but walkchangerevs() builds its matcher with
1937 # _matchfiles() revset but walkchangerevs() builds its matcher with
1954 # scmutil.match(). The difference is input pats are globbed on
1938 # scmutil.match(). The difference is input pats are globbed on
1955 # platforms without shell expansion (windows).
1939 # platforms without shell expansion (windows).
1956 wctx = repo[None]
1940 wctx = repo[None]
1957 match, pats = scmutil.matchandpats(wctx, pats, opts)
1941 match, pats = scmutil.matchandpats(wctx, pats, opts)
1958 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1942 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1959 opts.get('removed'))
1943 opts.get('removed'))
1960 if not slowpath:
1944 if not slowpath:
1961 for f in match.files():
1945 for f in match.files():
1962 if follow and f not in wctx:
1946 if follow and f not in wctx:
1963 # If the file exists, it may be a directory, so let it
1947 # If the file exists, it may be a directory, so let it
1964 # take the slow path.
1948 # take the slow path.
1965 if os.path.exists(repo.wjoin(f)):
1949 if os.path.exists(repo.wjoin(f)):
1966 slowpath = True
1950 slowpath = True
1967 continue
1951 continue
1968 else:
1952 else:
1969 raise util.Abort(_('cannot follow file not in parent '
1953 raise util.Abort(_('cannot follow file not in parent '
1970 'revision: "%s"') % f)
1954 'revision: "%s"') % f)
1971 filelog = repo.file(f)
1955 filelog = repo.file(f)
1972 if not filelog:
1956 if not filelog:
1973 # A zero count may be a directory or deleted file, so
1957 # A zero count may be a directory or deleted file, so
1974 # try to find matching entries on the slow path.
1958 # try to find matching entries on the slow path.
1975 if follow:
1959 if follow:
1976 raise util.Abort(
1960 raise util.Abort(
1977 _('cannot follow nonexistent file: "%s"') % f)
1961 _('cannot follow nonexistent file: "%s"') % f)
1978 slowpath = True
1962 slowpath = True
1979
1963
1980 # We decided to fall back to the slowpath because at least one
1964 # We decided to fall back to the slowpath because at least one
1981 # of the paths was not a file. Check to see if at least one of them
1965 # of the paths was not a file. Check to see if at least one of them
1982 # existed in history - in that case, we'll continue down the
1966 # existed in history - in that case, we'll continue down the
1983 # slowpath; otherwise, we can turn off the slowpath
1967 # slowpath; otherwise, we can turn off the slowpath
1984 if slowpath:
1968 if slowpath:
1985 for path in match.files():
1969 for path in match.files():
1986 if path == '.' or path in repo.store:
1970 if path == '.' or path in repo.store:
1987 break
1971 break
1988 else:
1972 else:
1989 slowpath = False
1973 slowpath = False
1990
1974
1991 fpats = ('_patsfollow', '_patsfollowfirst')
1975 fpats = ('_patsfollow', '_patsfollowfirst')
1992 fnopats = (('_ancestors', '_fancestors'),
1976 fnopats = (('_ancestors', '_fancestors'),
1993 ('_descendants', '_fdescendants'))
1977 ('_descendants', '_fdescendants'))
1994 if slowpath:
1978 if slowpath:
1995 # See walkchangerevs() slow path.
1979 # See walkchangerevs() slow path.
1996 #
1980 #
1997 # pats/include/exclude cannot be represented as separate
1981 # pats/include/exclude cannot be represented as separate
1998 # revset expressions as their filtering logic applies at file
1982 # revset expressions as their filtering logic applies at file
1999 # level. For instance "-I a -X a" matches a revision touching
1983 # level. For instance "-I a -X a" matches a revision touching
2000 # "a" and "b" while "file(a) and not file(b)" does
1984 # "a" and "b" while "file(a) and not file(b)" does
2001 # not. Besides, filesets are evaluated against the working
1985 # not. Besides, filesets are evaluated against the working
2002 # directory.
1986 # directory.
2003 matchargs = ['r:', 'd:relpath']
1987 matchargs = ['r:', 'd:relpath']
2004 for p in pats:
1988 for p in pats:
2005 matchargs.append('p:' + p)
1989 matchargs.append('p:' + p)
2006 for p in opts.get('include', []):
1990 for p in opts.get('include', []):
2007 matchargs.append('i:' + p)
1991 matchargs.append('i:' + p)
2008 for p in opts.get('exclude', []):
1992 for p in opts.get('exclude', []):
2009 matchargs.append('x:' + p)
1993 matchargs.append('x:' + p)
2010 matchargs = ','.join(('%r' % p) for p in matchargs)
1994 matchargs = ','.join(('%r' % p) for p in matchargs)
2011 opts['_matchfiles'] = matchargs
1995 opts['_matchfiles'] = matchargs
2012 if follow:
1996 if follow:
2013 opts[fnopats[0][followfirst]] = '.'
1997 opts[fnopats[0][followfirst]] = '.'
2014 else:
1998 else:
2015 if follow:
1999 if follow:
2016 if pats:
2000 if pats:
2017 # follow() revset interprets its file argument as a
2001 # follow() revset interprets its file argument as a
2018 # manifest entry, so use match.files(), not pats.
2002 # manifest entry, so use match.files(), not pats.
2019 opts[fpats[followfirst]] = list(match.files())
2003 opts[fpats[followfirst]] = list(match.files())
2020 else:
2004 else:
2021 op = fnopats[followdescendants][followfirst]
2005 op = fnopats[followdescendants][followfirst]
2022 opts[op] = 'rev(%d)' % startrev
2006 opts[op] = 'rev(%d)' % startrev
2023 else:
2007 else:
2024 opts['_patslog'] = list(pats)
2008 opts['_patslog'] = list(pats)
2025
2009
2026 filematcher = None
2010 filematcher = None
2027 if opts.get('patch') or opts.get('stat'):
2011 if opts.get('patch') or opts.get('stat'):
2028 # When following files, track renames via a special matcher.
2012 # When following files, track renames via a special matcher.
2029 # If we're forced to take the slowpath it means we're following
2013 # If we're forced to take the slowpath it means we're following
2030 # at least one pattern/directory, so don't bother with rename tracking.
2014 # at least one pattern/directory, so don't bother with rename tracking.
2031 if follow and not match.always() and not slowpath:
2015 if follow and not match.always() and not slowpath:
2032 # _makefollowlogfilematcher expects its files argument to be
2016 # _makefollowlogfilematcher expects its files argument to be
2033 # relative to the repo root, so use match.files(), not pats.
2017 # relative to the repo root, so use match.files(), not pats.
2034 filematcher = _makefollowlogfilematcher(repo, match.files(),
2018 filematcher = _makefollowlogfilematcher(repo, match.files(),
2035 followfirst)
2019 followfirst)
2036 else:
2020 else:
2037 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2021 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2038 if filematcher is None:
2022 if filematcher is None:
2039 filematcher = lambda rev: match
2023 filematcher = lambda rev: match
2040
2024
2041 expr = []
2025 expr = []
2042 for op, val in sorted(opts.iteritems()):
2026 for op, val in sorted(opts.iteritems()):
2043 if not val:
2027 if not val:
2044 continue
2028 continue
2045 if op not in opt2revset:
2029 if op not in opt2revset:
2046 continue
2030 continue
2047 revop, andor = opt2revset[op]
2031 revop, andor = opt2revset[op]
2048 if '%(val)' not in revop:
2032 if '%(val)' not in revop:
2049 expr.append(revop)
2033 expr.append(revop)
2050 else:
2034 else:
2051 if not isinstance(val, list):
2035 if not isinstance(val, list):
2052 e = revop % {'val': val}
2036 e = revop % {'val': val}
2053 else:
2037 else:
2054 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2038 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2055 expr.append(e)
2039 expr.append(e)
2056
2040
2057 if expr:
2041 if expr:
2058 expr = '(' + ' and '.join(expr) + ')'
2042 expr = '(' + ' and '.join(expr) + ')'
2059 else:
2043 else:
2060 expr = None
2044 expr = None
2061 return expr, filematcher
2045 return expr, filematcher
2062
2046
2063 def _logrevs(repo, opts):
2047 def _logrevs(repo, opts):
2064 # Default --rev value depends on --follow but --follow behavior
2048 # Default --rev value depends on --follow but --follow behavior
2065 # depends on revisions resolved from --rev...
2049 # depends on revisions resolved from --rev...
2066 follow = opts.get('follow') or opts.get('follow_first')
2050 follow = opts.get('follow') or opts.get('follow_first')
2067 if opts.get('rev'):
2051 if opts.get('rev'):
2068 revs = scmutil.revrange(repo, opts['rev'])
2052 revs = scmutil.revrange(repo, opts['rev'])
2069 elif follow and repo.dirstate.p1() == nullid:
2053 elif follow and repo.dirstate.p1() == nullid:
2070 revs = revset.baseset()
2054 revs = revset.baseset()
2071 elif follow:
2055 elif follow:
2072 revs = repo.revs('reverse(:.)')
2056 revs = repo.revs('reverse(:.)')
2073 else:
2057 else:
2074 revs = revset.spanset(repo)
2058 revs = revset.spanset(repo)
2075 revs.reverse()
2059 revs.reverse()
2076 return revs
2060 return revs
2077
2061
2078 def getgraphlogrevs(repo, pats, opts):
2062 def getgraphlogrevs(repo, pats, opts):
2079 """Return (revs, expr, filematcher) where revs is an iterable of
2063 """Return (revs, expr, filematcher) where revs is an iterable of
2080 revision numbers, expr is a revset string built from log options
2064 revision numbers, expr is a revset string built from log options
2081 and file patterns or None, and used to filter 'revs'. If --stat or
2065 and file patterns or None, and used to filter 'revs'. If --stat or
2082 --patch are not passed filematcher is None. Otherwise it is a
2066 --patch are not passed filematcher is None. Otherwise it is a
2083 callable taking a revision number and returning a match objects
2067 callable taking a revision number and returning a match objects
2084 filtering the files to be detailed when displaying the revision.
2068 filtering the files to be detailed when displaying the revision.
2085 """
2069 """
2086 limit = loglimit(opts)
2070 limit = loglimit(opts)
2087 revs = _logrevs(repo, opts)
2071 revs = _logrevs(repo, opts)
2088 if not revs:
2072 if not revs:
2089 return revset.baseset(), None, None
2073 return revset.baseset(), None, None
2090 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2074 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2091 if opts.get('rev'):
2075 if opts.get('rev'):
2092 # User-specified revs might be unsorted, but don't sort before
2076 # User-specified revs might be unsorted, but don't sort before
2093 # _makelogrevset because it might depend on the order of revs
2077 # _makelogrevset because it might depend on the order of revs
2094 revs.sort(reverse=True)
2078 revs.sort(reverse=True)
2095 if expr:
2079 if expr:
2096 # Revset matchers often operate faster on revisions in changelog
2080 # Revset matchers often operate faster on revisions in changelog
2097 # order, because most filters deal with the changelog.
2081 # order, because most filters deal with the changelog.
2098 revs.reverse()
2082 revs.reverse()
2099 matcher = revset.match(repo.ui, expr)
2083 matcher = revset.match(repo.ui, expr)
2100 # Revset matches can reorder revisions. "A or B" typically returns
2084 # Revset matches can reorder revisions. "A or B" typically returns
2101 # returns the revision matching A then the revision matching B. Sort
2085 # returns the revision matching A then the revision matching B. Sort
2102 # again to fix that.
2086 # again to fix that.
2103 revs = matcher(repo, revs)
2087 revs = matcher(repo, revs)
2104 revs.sort(reverse=True)
2088 revs.sort(reverse=True)
2105 if limit is not None:
2089 if limit is not None:
2106 limitedrevs = []
2090 limitedrevs = []
2107 for idx, rev in enumerate(revs):
2091 for idx, rev in enumerate(revs):
2108 if idx >= limit:
2092 if idx >= limit:
2109 break
2093 break
2110 limitedrevs.append(rev)
2094 limitedrevs.append(rev)
2111 revs = revset.baseset(limitedrevs)
2095 revs = revset.baseset(limitedrevs)
2112
2096
2113 return revs, expr, filematcher
2097 return revs, expr, filematcher
2114
2098
2115 def getlogrevs(repo, pats, opts):
2099 def getlogrevs(repo, pats, opts):
2116 """Return (revs, expr, filematcher) where revs is an iterable of
2100 """Return (revs, expr, filematcher) where revs is an iterable of
2117 revision numbers, expr is a revset string built from log options
2101 revision numbers, expr is a revset string built from log options
2118 and file patterns or None, and used to filter 'revs'. If --stat or
2102 and file patterns or None, and used to filter 'revs'. If --stat or
2119 --patch are not passed filematcher is None. Otherwise it is a
2103 --patch are not passed filematcher is None. Otherwise it is a
2120 callable taking a revision number and returning a match objects
2104 callable taking a revision number and returning a match objects
2121 filtering the files to be detailed when displaying the revision.
2105 filtering the files to be detailed when displaying the revision.
2122 """
2106 """
2123 limit = loglimit(opts)
2107 limit = loglimit(opts)
2124 revs = _logrevs(repo, opts)
2108 revs = _logrevs(repo, opts)
2125 if not revs:
2109 if not revs:
2126 return revset.baseset([]), None, None
2110 return revset.baseset([]), None, None
2127 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2111 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2128 if expr:
2112 if expr:
2129 # Revset matchers often operate faster on revisions in changelog
2113 # Revset matchers often operate faster on revisions in changelog
2130 # order, because most filters deal with the changelog.
2114 # order, because most filters deal with the changelog.
2131 if not opts.get('rev'):
2115 if not opts.get('rev'):
2132 revs.reverse()
2116 revs.reverse()
2133 matcher = revset.match(repo.ui, expr)
2117 matcher = revset.match(repo.ui, expr)
2134 # Revset matches can reorder revisions. "A or B" typically returns
2118 # Revset matches can reorder revisions. "A or B" typically returns
2135 # returns the revision matching A then the revision matching B. Sort
2119 # returns the revision matching A then the revision matching B. Sort
2136 # again to fix that.
2120 # again to fix that.
2137 revs = matcher(repo, revs)
2121 revs = matcher(repo, revs)
2138 if not opts.get('rev'):
2122 if not opts.get('rev'):
2139 revs.sort(reverse=True)
2123 revs.sort(reverse=True)
2140 if limit is not None:
2124 if limit is not None:
2141 limitedrevs = []
2125 limitedrevs = []
2142 for idx, r in enumerate(revs):
2126 for idx, r in enumerate(revs):
2143 if limit <= idx:
2127 if limit <= idx:
2144 break
2128 break
2145 limitedrevs.append(r)
2129 limitedrevs.append(r)
2146 revs = revset.baseset(limitedrevs)
2130 revs = revset.baseset(limitedrevs)
2147
2131
2148 return revs, expr, filematcher
2132 return revs, expr, filematcher
2149
2133
2150 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2134 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
2151 filematcher=None):
2135 filematcher=None):
2152 seen, state = [], graphmod.asciistate()
2136 seen, state = [], graphmod.asciistate()
2153 for rev, type, ctx, parents in dag:
2137 for rev, type, ctx, parents in dag:
2154 char = 'o'
2138 char = 'o'
2155 if ctx.node() in showparents:
2139 if ctx.node() in showparents:
2156 char = '@'
2140 char = '@'
2157 elif ctx.obsolete():
2141 elif ctx.obsolete():
2158 char = 'x'
2142 char = 'x'
2159 elif ctx.closesbranch():
2143 elif ctx.closesbranch():
2160 char = '_'
2144 char = '_'
2161 copies = None
2145 copies = None
2162 if getrenamed and ctx.rev():
2146 if getrenamed and ctx.rev():
2163 copies = []
2147 copies = []
2164 for fn in ctx.files():
2148 for fn in ctx.files():
2165 rename = getrenamed(fn, ctx.rev())
2149 rename = getrenamed(fn, ctx.rev())
2166 if rename:
2150 if rename:
2167 copies.append((fn, rename[0]))
2151 copies.append((fn, rename[0]))
2168 revmatchfn = None
2152 revmatchfn = None
2169 if filematcher is not None:
2153 if filematcher is not None:
2170 revmatchfn = filematcher(ctx.rev())
2154 revmatchfn = filematcher(ctx.rev())
2171 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2155 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2172 lines = displayer.hunk.pop(rev).split('\n')
2156 lines = displayer.hunk.pop(rev).split('\n')
2173 if not lines[-1]:
2157 if not lines[-1]:
2174 del lines[-1]
2158 del lines[-1]
2175 displayer.flush(ctx)
2159 displayer.flush(ctx)
2176 edges = edgefn(type, char, lines, seen, rev, parents)
2160 edges = edgefn(type, char, lines, seen, rev, parents)
2177 for type, char, lines, coldata in edges:
2161 for type, char, lines, coldata in edges:
2178 graphmod.ascii(ui, state, type, char, lines, coldata)
2162 graphmod.ascii(ui, state, type, char, lines, coldata)
2179 displayer.close()
2163 displayer.close()
2180
2164
2181 def graphlog(ui, repo, *pats, **opts):
2165 def graphlog(ui, repo, *pats, **opts):
2182 # Parameters are identical to log command ones
2166 # Parameters are identical to log command ones
2183 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2167 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2184 revdag = graphmod.dagwalker(repo, revs)
2168 revdag = graphmod.dagwalker(repo, revs)
2185
2169
2186 getrenamed = None
2170 getrenamed = None
2187 if opts.get('copies'):
2171 if opts.get('copies'):
2188 endrev = None
2172 endrev = None
2189 if opts.get('rev'):
2173 if opts.get('rev'):
2190 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2174 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2191 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2175 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2192 displayer = show_changeset(ui, repo, opts, buffered=True)
2176 displayer = show_changeset(ui, repo, opts, buffered=True)
2193 showparents = [ctx.node() for ctx in repo[None].parents()]
2177 showparents = [ctx.node() for ctx in repo[None].parents()]
2194 displaygraph(ui, revdag, displayer, showparents,
2178 displaygraph(ui, revdag, displayer, showparents,
2195 graphmod.asciiedges, getrenamed, filematcher)
2179 graphmod.asciiedges, getrenamed, filematcher)
2196
2180
2197 def checkunsupportedgraphflags(pats, opts):
2181 def checkunsupportedgraphflags(pats, opts):
2198 for op in ["newest_first"]:
2182 for op in ["newest_first"]:
2199 if op in opts and opts[op]:
2183 if op in opts and opts[op]:
2200 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2184 raise util.Abort(_("-G/--graph option is incompatible with --%s")
2201 % op.replace("_", "-"))
2185 % op.replace("_", "-"))
2202
2186
2203 def graphrevs(repo, nodes, opts):
2187 def graphrevs(repo, nodes, opts):
2204 limit = loglimit(opts)
2188 limit = loglimit(opts)
2205 nodes.reverse()
2189 nodes.reverse()
2206 if limit is not None:
2190 if limit is not None:
2207 nodes = nodes[:limit]
2191 nodes = nodes[:limit]
2208 return graphmod.nodes(repo, nodes)
2192 return graphmod.nodes(repo, nodes)
2209
2193
2210 def add(ui, repo, match, prefix, explicitonly, **opts):
2194 def add(ui, repo, match, prefix, explicitonly, **opts):
2211 join = lambda f: os.path.join(prefix, f)
2195 join = lambda f: os.path.join(prefix, f)
2212 bad = []
2196 bad = []
2213
2197
2214 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2198 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2215 names = []
2199 names = []
2216 wctx = repo[None]
2200 wctx = repo[None]
2217 cca = None
2201 cca = None
2218 abort, warn = scmutil.checkportabilityalert(ui)
2202 abort, warn = scmutil.checkportabilityalert(ui)
2219 if abort or warn:
2203 if abort or warn:
2220 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2204 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2221
2205
2222 badmatch = matchmod.badmatch(match, badfn)
2206 badmatch = matchmod.badmatch(match, badfn)
2223 dirstate = repo.dirstate
2207 dirstate = repo.dirstate
2224 # We don't want to just call wctx.walk here, since it would return a lot of
2208 # We don't want to just call wctx.walk here, since it would return a lot of
2225 # clean files, which we aren't interested in and takes time.
2209 # clean files, which we aren't interested in and takes time.
2226 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2210 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2227 True, False, full=False)):
2211 True, False, full=False)):
2228 exact = match.exact(f)
2212 exact = match.exact(f)
2229 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2213 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2230 if cca:
2214 if cca:
2231 cca(f)
2215 cca(f)
2232 names.append(f)
2216 names.append(f)
2233 if ui.verbose or not exact:
2217 if ui.verbose or not exact:
2234 ui.status(_('adding %s\n') % match.rel(f))
2218 ui.status(_('adding %s\n') % match.rel(f))
2235
2219
2236 for subpath in sorted(wctx.substate):
2220 for subpath in sorted(wctx.substate):
2237 sub = wctx.sub(subpath)
2221 sub = wctx.sub(subpath)
2238 try:
2222 try:
2239 submatch = matchmod.narrowmatcher(subpath, match)
2223 submatch = matchmod.narrowmatcher(subpath, match)
2240 if opts.get('subrepos'):
2224 if opts.get('subrepos'):
2241 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2225 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2242 else:
2226 else:
2243 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2227 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2244 except error.LookupError:
2228 except error.LookupError:
2245 ui.status(_("skipping missing subrepository: %s\n")
2229 ui.status(_("skipping missing subrepository: %s\n")
2246 % join(subpath))
2230 % join(subpath))
2247
2231
2248 if not opts.get('dry_run'):
2232 if not opts.get('dry_run'):
2249 rejected = wctx.add(names, prefix)
2233 rejected = wctx.add(names, prefix)
2250 bad.extend(f for f in rejected if f in match.files())
2234 bad.extend(f for f in rejected if f in match.files())
2251 return bad
2235 return bad
2252
2236
2253 def forget(ui, repo, match, prefix, explicitonly):
2237 def forget(ui, repo, match, prefix, explicitonly):
2254 join = lambda f: os.path.join(prefix, f)
2238 join = lambda f: os.path.join(prefix, f)
2255 bad = []
2239 bad = []
2256 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2240 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2257 wctx = repo[None]
2241 wctx = repo[None]
2258 forgot = []
2242 forgot = []
2259
2243
2260 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2244 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2261 forget = sorted(s[0] + s[1] + s[3] + s[6])
2245 forget = sorted(s[0] + s[1] + s[3] + s[6])
2262 if explicitonly:
2246 if explicitonly:
2263 forget = [f for f in forget if match.exact(f)]
2247 forget = [f for f in forget if match.exact(f)]
2264
2248
2265 for subpath in sorted(wctx.substate):
2249 for subpath in sorted(wctx.substate):
2266 sub = wctx.sub(subpath)
2250 sub = wctx.sub(subpath)
2267 try:
2251 try:
2268 submatch = matchmod.narrowmatcher(subpath, match)
2252 submatch = matchmod.narrowmatcher(subpath, match)
2269 subbad, subforgot = sub.forget(submatch, prefix)
2253 subbad, subforgot = sub.forget(submatch, prefix)
2270 bad.extend([subpath + '/' + f for f in subbad])
2254 bad.extend([subpath + '/' + f for f in subbad])
2271 forgot.extend([subpath + '/' + f for f in subforgot])
2255 forgot.extend([subpath + '/' + f for f in subforgot])
2272 except error.LookupError:
2256 except error.LookupError:
2273 ui.status(_("skipping missing subrepository: %s\n")
2257 ui.status(_("skipping missing subrepository: %s\n")
2274 % join(subpath))
2258 % join(subpath))
2275
2259
2276 if not explicitonly:
2260 if not explicitonly:
2277 for f in match.files():
2261 for f in match.files():
2278 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2262 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2279 if f not in forgot:
2263 if f not in forgot:
2280 if repo.wvfs.exists(f):
2264 if repo.wvfs.exists(f):
2281 # Don't complain if the exact case match wasn't given.
2265 # Don't complain if the exact case match wasn't given.
2282 # But don't do this until after checking 'forgot', so
2266 # But don't do this until after checking 'forgot', so
2283 # that subrepo files aren't normalized, and this op is
2267 # that subrepo files aren't normalized, and this op is
2284 # purely from data cached by the status walk above.
2268 # purely from data cached by the status walk above.
2285 if repo.dirstate.normalize(f) in repo.dirstate:
2269 if repo.dirstate.normalize(f) in repo.dirstate:
2286 continue
2270 continue
2287 ui.warn(_('not removing %s: '
2271 ui.warn(_('not removing %s: '
2288 'file is already untracked\n')
2272 'file is already untracked\n')
2289 % match.rel(f))
2273 % match.rel(f))
2290 bad.append(f)
2274 bad.append(f)
2291
2275
2292 for f in forget:
2276 for f in forget:
2293 if ui.verbose or not match.exact(f):
2277 if ui.verbose or not match.exact(f):
2294 ui.status(_('removing %s\n') % match.rel(f))
2278 ui.status(_('removing %s\n') % match.rel(f))
2295
2279
2296 rejected = wctx.forget(forget, prefix)
2280 rejected = wctx.forget(forget, prefix)
2297 bad.extend(f for f in rejected if f in match.files())
2281 bad.extend(f for f in rejected if f in match.files())
2298 forgot.extend(f for f in forget if f not in rejected)
2282 forgot.extend(f for f in forget if f not in rejected)
2299 return bad, forgot
2283 return bad, forgot
2300
2284
2301 def files(ui, ctx, m, fm, fmt, subrepos):
2285 def files(ui, ctx, m, fm, fmt, subrepos):
2302 rev = ctx.rev()
2286 rev = ctx.rev()
2303 ret = 1
2287 ret = 1
2304 ds = ctx.repo().dirstate
2288 ds = ctx.repo().dirstate
2305
2289
2306 for f in ctx.matches(m):
2290 for f in ctx.matches(m):
2307 if rev is None and ds[f] == 'r':
2291 if rev is None and ds[f] == 'r':
2308 continue
2292 continue
2309 fm.startitem()
2293 fm.startitem()
2310 if ui.verbose:
2294 if ui.verbose:
2311 fc = ctx[f]
2295 fc = ctx[f]
2312 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2296 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2313 fm.data(abspath=f)
2297 fm.data(abspath=f)
2314 fm.write('path', fmt, m.rel(f))
2298 fm.write('path', fmt, m.rel(f))
2315 ret = 0
2299 ret = 0
2316
2300
2317 for subpath in sorted(ctx.substate):
2301 for subpath in sorted(ctx.substate):
2318 def matchessubrepo(subpath):
2302 def matchessubrepo(subpath):
2319 return (m.always() or m.exact(subpath)
2303 return (m.always() or m.exact(subpath)
2320 or any(f.startswith(subpath + '/') for f in m.files()))
2304 or any(f.startswith(subpath + '/') for f in m.files()))
2321
2305
2322 if subrepos or matchessubrepo(subpath):
2306 if subrepos or matchessubrepo(subpath):
2323 sub = ctx.sub(subpath)
2307 sub = ctx.sub(subpath)
2324 try:
2308 try:
2325 submatch = matchmod.narrowmatcher(subpath, m)
2309 submatch = matchmod.narrowmatcher(subpath, m)
2326 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2310 if sub.printfiles(ui, submatch, fm, fmt, subrepos) == 0:
2327 ret = 0
2311 ret = 0
2328 except error.LookupError:
2312 except error.LookupError:
2329 ui.status(_("skipping missing subrepository: %s\n")
2313 ui.status(_("skipping missing subrepository: %s\n")
2330 % m.abs(subpath))
2314 % m.abs(subpath))
2331
2315
2332 return ret
2316 return ret
2333
2317
2334 def remove(ui, repo, m, prefix, after, force, subrepos):
2318 def remove(ui, repo, m, prefix, after, force, subrepos):
2335 join = lambda f: os.path.join(prefix, f)
2319 join = lambda f: os.path.join(prefix, f)
2336 ret = 0
2320 ret = 0
2337 s = repo.status(match=m, clean=True)
2321 s = repo.status(match=m, clean=True)
2338 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2322 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2339
2323
2340 wctx = repo[None]
2324 wctx = repo[None]
2341
2325
2342 for subpath in sorted(wctx.substate):
2326 for subpath in sorted(wctx.substate):
2343 def matchessubrepo(matcher, subpath):
2327 def matchessubrepo(matcher, subpath):
2344 if matcher.exact(subpath):
2328 if matcher.exact(subpath):
2345 return True
2329 return True
2346 for f in matcher.files():
2330 for f in matcher.files():
2347 if f.startswith(subpath):
2331 if f.startswith(subpath):
2348 return True
2332 return True
2349 return False
2333 return False
2350
2334
2351 if subrepos or matchessubrepo(m, subpath):
2335 if subrepos or matchessubrepo(m, subpath):
2352 sub = wctx.sub(subpath)
2336 sub = wctx.sub(subpath)
2353 try:
2337 try:
2354 submatch = matchmod.narrowmatcher(subpath, m)
2338 submatch = matchmod.narrowmatcher(subpath, m)
2355 if sub.removefiles(submatch, prefix, after, force, subrepos):
2339 if sub.removefiles(submatch, prefix, after, force, subrepos):
2356 ret = 1
2340 ret = 1
2357 except error.LookupError:
2341 except error.LookupError:
2358 ui.status(_("skipping missing subrepository: %s\n")
2342 ui.status(_("skipping missing subrepository: %s\n")
2359 % join(subpath))
2343 % join(subpath))
2360
2344
2361 # warn about failure to delete explicit files/dirs
2345 # warn about failure to delete explicit files/dirs
2362 deleteddirs = util.dirs(deleted)
2346 deleteddirs = util.dirs(deleted)
2363 for f in m.files():
2347 for f in m.files():
2364 def insubrepo():
2348 def insubrepo():
2365 for subpath in wctx.substate:
2349 for subpath in wctx.substate:
2366 if f.startswith(subpath):
2350 if f.startswith(subpath):
2367 return True
2351 return True
2368 return False
2352 return False
2369
2353
2370 isdir = f in deleteddirs or wctx.hasdir(f)
2354 isdir = f in deleteddirs or wctx.hasdir(f)
2371 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2355 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2372 continue
2356 continue
2373
2357
2374 if repo.wvfs.exists(f):
2358 if repo.wvfs.exists(f):
2375 if repo.wvfs.isdir(f):
2359 if repo.wvfs.isdir(f):
2376 ui.warn(_('not removing %s: no tracked files\n')
2360 ui.warn(_('not removing %s: no tracked files\n')
2377 % m.rel(f))
2361 % m.rel(f))
2378 else:
2362 else:
2379 ui.warn(_('not removing %s: file is untracked\n')
2363 ui.warn(_('not removing %s: file is untracked\n')
2380 % m.rel(f))
2364 % m.rel(f))
2381 # missing files will generate a warning elsewhere
2365 # missing files will generate a warning elsewhere
2382 ret = 1
2366 ret = 1
2383
2367
2384 if force:
2368 if force:
2385 list = modified + deleted + clean + added
2369 list = modified + deleted + clean + added
2386 elif after:
2370 elif after:
2387 list = deleted
2371 list = deleted
2388 for f in modified + added + clean:
2372 for f in modified + added + clean:
2389 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2373 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2390 ret = 1
2374 ret = 1
2391 else:
2375 else:
2392 list = deleted + clean
2376 list = deleted + clean
2393 for f in modified:
2377 for f in modified:
2394 ui.warn(_('not removing %s: file is modified (use -f'
2378 ui.warn(_('not removing %s: file is modified (use -f'
2395 ' to force removal)\n') % m.rel(f))
2379 ' to force removal)\n') % m.rel(f))
2396 ret = 1
2380 ret = 1
2397 for f in added:
2381 for f in added:
2398 ui.warn(_('not removing %s: file has been marked for add'
2382 ui.warn(_('not removing %s: file has been marked for add'
2399 ' (use forget to undo)\n') % m.rel(f))
2383 ' (use forget to undo)\n') % m.rel(f))
2400 ret = 1
2384 ret = 1
2401
2385
2402 for f in sorted(list):
2386 for f in sorted(list):
2403 if ui.verbose or not m.exact(f):
2387 if ui.verbose or not m.exact(f):
2404 ui.status(_('removing %s\n') % m.rel(f))
2388 ui.status(_('removing %s\n') % m.rel(f))
2405
2389
2406 wlock = repo.wlock()
2390 wlock = repo.wlock()
2407 try:
2391 try:
2408 if not after:
2392 if not after:
2409 for f in list:
2393 for f in list:
2410 if f in added:
2394 if f in added:
2411 continue # we never unlink added files on remove
2395 continue # we never unlink added files on remove
2412 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2396 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2413 repo[None].forget(list)
2397 repo[None].forget(list)
2414 finally:
2398 finally:
2415 wlock.release()
2399 wlock.release()
2416
2400
2417 return ret
2401 return ret
2418
2402
2419 def cat(ui, repo, ctx, matcher, prefix, **opts):
2403 def cat(ui, repo, ctx, matcher, prefix, **opts):
2420 err = 1
2404 err = 1
2421
2405
2422 def write(path):
2406 def write(path):
2423 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2407 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2424 pathname=os.path.join(prefix, path))
2408 pathname=os.path.join(prefix, path))
2425 data = ctx[path].data()
2409 data = ctx[path].data()
2426 if opts.get('decode'):
2410 if opts.get('decode'):
2427 data = repo.wwritedata(path, data)
2411 data = repo.wwritedata(path, data)
2428 fp.write(data)
2412 fp.write(data)
2429 fp.close()
2413 fp.close()
2430
2414
2431 # Automation often uses hg cat on single files, so special case it
2415 # Automation often uses hg cat on single files, so special case it
2432 # for performance to avoid the cost of parsing the manifest.
2416 # for performance to avoid the cost of parsing the manifest.
2433 if len(matcher.files()) == 1 and not matcher.anypats():
2417 if len(matcher.files()) == 1 and not matcher.anypats():
2434 file = matcher.files()[0]
2418 file = matcher.files()[0]
2435 mf = repo.manifest
2419 mf = repo.manifest
2436 mfnode = ctx.manifestnode()
2420 mfnode = ctx.manifestnode()
2437 if mfnode and mf.find(mfnode, file)[0]:
2421 if mfnode and mf.find(mfnode, file)[0]:
2438 write(file)
2422 write(file)
2439 return 0
2423 return 0
2440
2424
2441 # Don't warn about "missing" files that are really in subrepos
2425 # Don't warn about "missing" files that are really in subrepos
2442 def badfn(path, msg):
2426 def badfn(path, msg):
2443 for subpath in ctx.substate:
2427 for subpath in ctx.substate:
2444 if path.startswith(subpath):
2428 if path.startswith(subpath):
2445 return
2429 return
2446 matcher.bad(path, msg)
2430 matcher.bad(path, msg)
2447
2431
2448 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2432 for abs in ctx.walk(matchmod.badmatch(matcher, badfn)):
2449 write(abs)
2433 write(abs)
2450 err = 0
2434 err = 0
2451
2435
2452 for subpath in sorted(ctx.substate):
2436 for subpath in sorted(ctx.substate):
2453 sub = ctx.sub(subpath)
2437 sub = ctx.sub(subpath)
2454 try:
2438 try:
2455 submatch = matchmod.narrowmatcher(subpath, matcher)
2439 submatch = matchmod.narrowmatcher(subpath, matcher)
2456
2440
2457 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2441 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2458 **opts):
2442 **opts):
2459 err = 0
2443 err = 0
2460 except error.RepoLookupError:
2444 except error.RepoLookupError:
2461 ui.status(_("skipping missing subrepository: %s\n")
2445 ui.status(_("skipping missing subrepository: %s\n")
2462 % os.path.join(prefix, subpath))
2446 % os.path.join(prefix, subpath))
2463
2447
2464 return err
2448 return err
2465
2449
2466 def commit(ui, repo, commitfunc, pats, opts):
2450 def commit(ui, repo, commitfunc, pats, opts):
2467 '''commit the specified files or all outstanding changes'''
2451 '''commit the specified files or all outstanding changes'''
2468 date = opts.get('date')
2452 date = opts.get('date')
2469 if date:
2453 if date:
2470 opts['date'] = util.parsedate(date)
2454 opts['date'] = util.parsedate(date)
2471 message = logmessage(ui, opts)
2455 message = logmessage(ui, opts)
2472 matcher = scmutil.match(repo[None], pats, opts)
2456 matcher = scmutil.match(repo[None], pats, opts)
2473
2457
2474 # extract addremove carefully -- this function can be called from a command
2458 # extract addremove carefully -- this function can be called from a command
2475 # that doesn't support addremove
2459 # that doesn't support addremove
2476 if opts.get('addremove'):
2460 if opts.get('addremove'):
2477 if scmutil.addremove(repo, matcher, "", opts) != 0:
2461 if scmutil.addremove(repo, matcher, "", opts) != 0:
2478 raise util.Abort(
2462 raise util.Abort(
2479 _("failed to mark all new/missing files as added/removed"))
2463 _("failed to mark all new/missing files as added/removed"))
2480
2464
2481 return commitfunc(ui, repo, message, matcher, opts)
2465 return commitfunc(ui, repo, message, matcher, opts)
2482
2466
2483 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2467 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2484 # avoid cycle context -> subrepo -> cmdutil
2468 # avoid cycle context -> subrepo -> cmdutil
2485 import context
2469 import context
2486
2470
2487 # amend will reuse the existing user if not specified, but the obsolete
2471 # amend will reuse the existing user if not specified, but the obsolete
2488 # marker creation requires that the current user's name is specified.
2472 # marker creation requires that the current user's name is specified.
2489 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2473 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2490 ui.username() # raise exception if username not set
2474 ui.username() # raise exception if username not set
2491
2475
2492 ui.note(_('amending changeset %s\n') % old)
2476 ui.note(_('amending changeset %s\n') % old)
2493 base = old.p1()
2477 base = old.p1()
2494 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2478 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2495
2479
2496 wlock = dsguard = lock = newid = None
2480 wlock = dsguard = lock = newid = None
2497 try:
2481 try:
2498 wlock = repo.wlock()
2482 wlock = repo.wlock()
2499 dsguard = dirstateguard(repo, 'amend')
2483 dsguard = dirstateguard(repo, 'amend')
2500 lock = repo.lock()
2484 lock = repo.lock()
2501 tr = repo.transaction('amend')
2485 tr = repo.transaction('amend')
2502 try:
2486 try:
2503 # See if we got a message from -m or -l, if not, open the editor
2487 # See if we got a message from -m or -l, if not, open the editor
2504 # with the message of the changeset to amend
2488 # with the message of the changeset to amend
2505 message = logmessage(ui, opts)
2489 message = logmessage(ui, opts)
2506 # ensure logfile does not conflict with later enforcement of the
2490 # ensure logfile does not conflict with later enforcement of the
2507 # message. potential logfile content has been processed by
2491 # message. potential logfile content has been processed by
2508 # `logmessage` anyway.
2492 # `logmessage` anyway.
2509 opts.pop('logfile')
2493 opts.pop('logfile')
2510 # First, do a regular commit to record all changes in the working
2494 # First, do a regular commit to record all changes in the working
2511 # directory (if there are any)
2495 # directory (if there are any)
2512 ui.callhooks = False
2496 ui.callhooks = False
2513 activebookmark = repo._activebookmark
2497 activebookmark = repo._activebookmark
2514 try:
2498 try:
2515 repo._activebookmark = None
2499 repo._activebookmark = None
2516 opts['message'] = 'temporary amend commit for %s' % old
2500 opts['message'] = 'temporary amend commit for %s' % old
2517 node = commit(ui, repo, commitfunc, pats, opts)
2501 node = commit(ui, repo, commitfunc, pats, opts)
2518 finally:
2502 finally:
2519 repo._activebookmark = activebookmark
2503 repo._activebookmark = activebookmark
2520 ui.callhooks = True
2504 ui.callhooks = True
2521 ctx = repo[node]
2505 ctx = repo[node]
2522
2506
2523 # Participating changesets:
2507 # Participating changesets:
2524 #
2508 #
2525 # node/ctx o - new (intermediate) commit that contains changes
2509 # node/ctx o - new (intermediate) commit that contains changes
2526 # | from working dir to go into amending commit
2510 # | from working dir to go into amending commit
2527 # | (or a workingctx if there were no changes)
2511 # | (or a workingctx if there were no changes)
2528 # |
2512 # |
2529 # old o - changeset to amend
2513 # old o - changeset to amend
2530 # |
2514 # |
2531 # base o - parent of amending changeset
2515 # base o - parent of amending changeset
2532
2516
2533 # Update extra dict from amended commit (e.g. to preserve graft
2517 # Update extra dict from amended commit (e.g. to preserve graft
2534 # source)
2518 # source)
2535 extra.update(old.extra())
2519 extra.update(old.extra())
2536
2520
2537 # Also update it from the intermediate commit or from the wctx
2521 # Also update it from the intermediate commit or from the wctx
2538 extra.update(ctx.extra())
2522 extra.update(ctx.extra())
2539
2523
2540 if len(old.parents()) > 1:
2524 if len(old.parents()) > 1:
2541 # ctx.files() isn't reliable for merges, so fall back to the
2525 # ctx.files() isn't reliable for merges, so fall back to the
2542 # slower repo.status() method
2526 # slower repo.status() method
2543 files = set([fn for st in repo.status(base, old)[:3]
2527 files = set([fn for st in repo.status(base, old)[:3]
2544 for fn in st])
2528 for fn in st])
2545 else:
2529 else:
2546 files = set(old.files())
2530 files = set(old.files())
2547
2531
2548 # Second, we use either the commit we just did, or if there were no
2532 # Second, we use either the commit we just did, or if there were no
2549 # changes the parent of the working directory as the version of the
2533 # changes the parent of the working directory as the version of the
2550 # files in the final amend commit
2534 # files in the final amend commit
2551 if node:
2535 if node:
2552 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2536 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2553
2537
2554 user = ctx.user()
2538 user = ctx.user()
2555 date = ctx.date()
2539 date = ctx.date()
2556 # Recompute copies (avoid recording a -> b -> a)
2540 # Recompute copies (avoid recording a -> b -> a)
2557 copied = copies.pathcopies(base, ctx)
2541 copied = copies.pathcopies(base, ctx)
2558 if old.p2:
2542 if old.p2:
2559 copied.update(copies.pathcopies(old.p2(), ctx))
2543 copied.update(copies.pathcopies(old.p2(), ctx))
2560
2544
2561 # Prune files which were reverted by the updates: if old
2545 # Prune files which were reverted by the updates: if old
2562 # introduced file X and our intermediate commit, node,
2546 # introduced file X and our intermediate commit, node,
2563 # renamed that file, then those two files are the same and
2547 # renamed that file, then those two files are the same and
2564 # we can discard X from our list of files. Likewise if X
2548 # we can discard X from our list of files. Likewise if X
2565 # was deleted, it's no longer relevant
2549 # was deleted, it's no longer relevant
2566 files.update(ctx.files())
2550 files.update(ctx.files())
2567
2551
2568 def samefile(f):
2552 def samefile(f):
2569 if f in ctx.manifest():
2553 if f in ctx.manifest():
2570 a = ctx.filectx(f)
2554 a = ctx.filectx(f)
2571 if f in base.manifest():
2555 if f in base.manifest():
2572 b = base.filectx(f)
2556 b = base.filectx(f)
2573 return (not a.cmp(b)
2557 return (not a.cmp(b)
2574 and a.flags() == b.flags())
2558 and a.flags() == b.flags())
2575 else:
2559 else:
2576 return False
2560 return False
2577 else:
2561 else:
2578 return f not in base.manifest()
2562 return f not in base.manifest()
2579 files = [f for f in files if not samefile(f)]
2563 files = [f for f in files if not samefile(f)]
2580
2564
2581 def filectxfn(repo, ctx_, path):
2565 def filectxfn(repo, ctx_, path):
2582 try:
2566 try:
2583 fctx = ctx[path]
2567 fctx = ctx[path]
2584 flags = fctx.flags()
2568 flags = fctx.flags()
2585 mctx = context.memfilectx(repo,
2569 mctx = context.memfilectx(repo,
2586 fctx.path(), fctx.data(),
2570 fctx.path(), fctx.data(),
2587 islink='l' in flags,
2571 islink='l' in flags,
2588 isexec='x' in flags,
2572 isexec='x' in flags,
2589 copied=copied.get(path))
2573 copied=copied.get(path))
2590 return mctx
2574 return mctx
2591 except KeyError:
2575 except KeyError:
2592 return None
2576 return None
2593 else:
2577 else:
2594 ui.note(_('copying changeset %s to %s\n') % (old, base))
2578 ui.note(_('copying changeset %s to %s\n') % (old, base))
2595
2579
2596 # Use version of files as in the old cset
2580 # Use version of files as in the old cset
2597 def filectxfn(repo, ctx_, path):
2581 def filectxfn(repo, ctx_, path):
2598 try:
2582 try:
2599 return old.filectx(path)
2583 return old.filectx(path)
2600 except KeyError:
2584 except KeyError:
2601 return None
2585 return None
2602
2586
2603 user = opts.get('user') or old.user()
2587 user = opts.get('user') or old.user()
2604 date = opts.get('date') or old.date()
2588 date = opts.get('date') or old.date()
2605 editform = mergeeditform(old, 'commit.amend')
2589 editform = mergeeditform(old, 'commit.amend')
2606 editor = getcommiteditor(editform=editform, **opts)
2590 editor = getcommiteditor(editform=editform, **opts)
2607 if not message:
2591 if not message:
2608 editor = getcommiteditor(edit=True, editform=editform)
2592 editor = getcommiteditor(edit=True, editform=editform)
2609 message = old.description()
2593 message = old.description()
2610
2594
2611 pureextra = extra.copy()
2595 pureextra = extra.copy()
2612 extra['amend_source'] = old.hex()
2596 extra['amend_source'] = old.hex()
2613
2597
2614 new = context.memctx(repo,
2598 new = context.memctx(repo,
2615 parents=[base.node(), old.p2().node()],
2599 parents=[base.node(), old.p2().node()],
2616 text=message,
2600 text=message,
2617 files=files,
2601 files=files,
2618 filectxfn=filectxfn,
2602 filectxfn=filectxfn,
2619 user=user,
2603 user=user,
2620 date=date,
2604 date=date,
2621 extra=extra,
2605 extra=extra,
2622 editor=editor)
2606 editor=editor)
2623
2607
2624 newdesc = changelog.stripdesc(new.description())
2608 newdesc = changelog.stripdesc(new.description())
2625 if ((not node)
2609 if ((not node)
2626 and newdesc == old.description()
2610 and newdesc == old.description()
2627 and user == old.user()
2611 and user == old.user()
2628 and date == old.date()
2612 and date == old.date()
2629 and pureextra == old.extra()):
2613 and pureextra == old.extra()):
2630 # nothing changed. continuing here would create a new node
2614 # nothing changed. continuing here would create a new node
2631 # anyway because of the amend_source noise.
2615 # anyway because of the amend_source noise.
2632 #
2616 #
2633 # This not what we expect from amend.
2617 # This not what we expect from amend.
2634 return old.node()
2618 return old.node()
2635
2619
2636 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2620 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2637 try:
2621 try:
2638 if opts.get('secret'):
2622 if opts.get('secret'):
2639 commitphase = 'secret'
2623 commitphase = 'secret'
2640 else:
2624 else:
2641 commitphase = old.phase()
2625 commitphase = old.phase()
2642 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2626 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2643 newid = repo.commitctx(new)
2627 newid = repo.commitctx(new)
2644 finally:
2628 finally:
2645 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2629 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2646 if newid != old.node():
2630 if newid != old.node():
2647 # Reroute the working copy parent to the new changeset
2631 # Reroute the working copy parent to the new changeset
2648 repo.setparents(newid, nullid)
2632 repo.setparents(newid, nullid)
2649
2633
2650 # Move bookmarks from old parent to amend commit
2634 # Move bookmarks from old parent to amend commit
2651 bms = repo.nodebookmarks(old.node())
2635 bms = repo.nodebookmarks(old.node())
2652 if bms:
2636 if bms:
2653 marks = repo._bookmarks
2637 marks = repo._bookmarks
2654 for bm in bms:
2638 for bm in bms:
2655 ui.debug('moving bookmarks %r from %s to %s\n' %
2639 ui.debug('moving bookmarks %r from %s to %s\n' %
2656 (marks, old.hex(), hex(newid)))
2640 (marks, old.hex(), hex(newid)))
2657 marks[bm] = newid
2641 marks[bm] = newid
2658 marks.recordchange(tr)
2642 marks.recordchange(tr)
2659 #commit the whole amend process
2643 #commit the whole amend process
2660 if createmarkers:
2644 if createmarkers:
2661 # mark the new changeset as successor of the rewritten one
2645 # mark the new changeset as successor of the rewritten one
2662 new = repo[newid]
2646 new = repo[newid]
2663 obs = [(old, (new,))]
2647 obs = [(old, (new,))]
2664 if node:
2648 if node:
2665 obs.append((ctx, ()))
2649 obs.append((ctx, ()))
2666
2650
2667 obsolete.createmarkers(repo, obs)
2651 obsolete.createmarkers(repo, obs)
2668 tr.close()
2652 tr.close()
2669 finally:
2653 finally:
2670 tr.release()
2654 tr.release()
2671 dsguard.close()
2655 dsguard.close()
2672 if not createmarkers and newid != old.node():
2656 if not createmarkers and newid != old.node():
2673 # Strip the intermediate commit (if there was one) and the amended
2657 # Strip the intermediate commit (if there was one) and the amended
2674 # commit
2658 # commit
2675 if node:
2659 if node:
2676 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2660 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2677 ui.note(_('stripping amended changeset %s\n') % old)
2661 ui.note(_('stripping amended changeset %s\n') % old)
2678 repair.strip(ui, repo, old.node(), topic='amend-backup')
2662 repair.strip(ui, repo, old.node(), topic='amend-backup')
2679 finally:
2663 finally:
2680 lockmod.release(lock, dsguard, wlock)
2664 lockmod.release(lock, dsguard, wlock)
2681 return newid
2665 return newid
2682
2666
2683 def commiteditor(repo, ctx, subs, editform=''):
2667 def commiteditor(repo, ctx, subs, editform=''):
2684 if ctx.description():
2668 if ctx.description():
2685 return ctx.description()
2669 return ctx.description()
2686 return commitforceeditor(repo, ctx, subs, editform=editform)
2670 return commitforceeditor(repo, ctx, subs, editform=editform)
2687
2671
2688 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2672 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2689 editform=''):
2673 editform=''):
2690 if not extramsg:
2674 if not extramsg:
2691 extramsg = _("Leave message empty to abort commit.")
2675 extramsg = _("Leave message empty to abort commit.")
2692
2676
2693 forms = [e for e in editform.split('.') if e]
2677 forms = [e for e in editform.split('.') if e]
2694 forms.insert(0, 'changeset')
2678 forms.insert(0, 'changeset')
2695 while forms:
2679 while forms:
2696 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2680 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2697 if tmpl:
2681 if tmpl:
2698 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2682 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2699 break
2683 break
2700 forms.pop()
2684 forms.pop()
2701 else:
2685 else:
2702 committext = buildcommittext(repo, ctx, subs, extramsg)
2686 committext = buildcommittext(repo, ctx, subs, extramsg)
2703
2687
2704 # run editor in the repository root
2688 # run editor in the repository root
2705 olddir = os.getcwd()
2689 olddir = os.getcwd()
2706 os.chdir(repo.root)
2690 os.chdir(repo.root)
2707 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2691 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2708 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2692 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2709 os.chdir(olddir)
2693 os.chdir(olddir)
2710
2694
2711 if finishdesc:
2695 if finishdesc:
2712 text = finishdesc(text)
2696 text = finishdesc(text)
2713 if not text.strip():
2697 if not text.strip():
2714 raise util.Abort(_("empty commit message"))
2698 raise util.Abort(_("empty commit message"))
2715
2699
2716 return text
2700 return text
2717
2701
2718 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2702 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2719 ui = repo.ui
2703 ui = repo.ui
2720 tmpl, mapfile = gettemplate(ui, tmpl, None)
2704 tmpl, mapfile = gettemplate(ui, tmpl, None)
2721
2705
2722 try:
2706 try:
2723 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2707 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2724 except SyntaxError as inst:
2708 except SyntaxError as inst:
2725 raise util.Abort(inst.args[0])
2709 raise util.Abort(inst.args[0])
2726
2710
2727 for k, v in repo.ui.configitems('committemplate'):
2711 for k, v in repo.ui.configitems('committemplate'):
2728 if k != 'changeset':
2712 if k != 'changeset':
2729 t.t.cache[k] = v
2713 t.t.cache[k] = v
2730
2714
2731 if not extramsg:
2715 if not extramsg:
2732 extramsg = '' # ensure that extramsg is string
2716 extramsg = '' # ensure that extramsg is string
2733
2717
2734 ui.pushbuffer()
2718 ui.pushbuffer()
2735 t.show(ctx, extramsg=extramsg)
2719 t.show(ctx, extramsg=extramsg)
2736 return ui.popbuffer()
2720 return ui.popbuffer()
2737
2721
2738 def hgprefix(msg):
2722 def hgprefix(msg):
2739 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2723 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2740
2724
2741 def buildcommittext(repo, ctx, subs, extramsg):
2725 def buildcommittext(repo, ctx, subs, extramsg):
2742 edittext = []
2726 edittext = []
2743 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2727 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2744 if ctx.description():
2728 if ctx.description():
2745 edittext.append(ctx.description())
2729 edittext.append(ctx.description())
2746 edittext.append("")
2730 edittext.append("")
2747 edittext.append("") # Empty line between message and comments.
2731 edittext.append("") # Empty line between message and comments.
2748 edittext.append(hgprefix(_("Enter commit message."
2732 edittext.append(hgprefix(_("Enter commit message."
2749 " Lines beginning with 'HG:' are removed.")))
2733 " Lines beginning with 'HG:' are removed.")))
2750 edittext.append(hgprefix(extramsg))
2734 edittext.append(hgprefix(extramsg))
2751 edittext.append("HG: --")
2735 edittext.append("HG: --")
2752 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2736 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2753 if ctx.p2():
2737 if ctx.p2():
2754 edittext.append(hgprefix(_("branch merge")))
2738 edittext.append(hgprefix(_("branch merge")))
2755 if ctx.branch():
2739 if ctx.branch():
2756 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2740 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2757 if bookmarks.isactivewdirparent(repo):
2741 if bookmarks.isactivewdirparent(repo):
2758 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2742 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2759 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2743 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2760 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2744 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2761 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2745 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2762 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2746 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2763 if not added and not modified and not removed:
2747 if not added and not modified and not removed:
2764 edittext.append(hgprefix(_("no files changed")))
2748 edittext.append(hgprefix(_("no files changed")))
2765 edittext.append("")
2749 edittext.append("")
2766
2750
2767 return "\n".join(edittext)
2751 return "\n".join(edittext)
2768
2752
2769 def commitstatus(repo, node, branch, bheads=None, opts=None):
2753 def commitstatus(repo, node, branch, bheads=None, opts=None):
2770 if opts is None:
2754 if opts is None:
2771 opts = {}
2755 opts = {}
2772 ctx = repo[node]
2756 ctx = repo[node]
2773 parents = ctx.parents()
2757 parents = ctx.parents()
2774
2758
2775 if (not opts.get('amend') and bheads and node not in bheads and not
2759 if (not opts.get('amend') and bheads and node not in bheads and not
2776 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2760 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2777 repo.ui.status(_('created new head\n'))
2761 repo.ui.status(_('created new head\n'))
2778 # The message is not printed for initial roots. For the other
2762 # The message is not printed for initial roots. For the other
2779 # changesets, it is printed in the following situations:
2763 # changesets, it is printed in the following situations:
2780 #
2764 #
2781 # Par column: for the 2 parents with ...
2765 # Par column: for the 2 parents with ...
2782 # N: null or no parent
2766 # N: null or no parent
2783 # B: parent is on another named branch
2767 # B: parent is on another named branch
2784 # C: parent is a regular non head changeset
2768 # C: parent is a regular non head changeset
2785 # H: parent was a branch head of the current branch
2769 # H: parent was a branch head of the current branch
2786 # Msg column: whether we print "created new head" message
2770 # Msg column: whether we print "created new head" message
2787 # In the following, it is assumed that there already exists some
2771 # In the following, it is assumed that there already exists some
2788 # initial branch heads of the current branch, otherwise nothing is
2772 # initial branch heads of the current branch, otherwise nothing is
2789 # printed anyway.
2773 # printed anyway.
2790 #
2774 #
2791 # Par Msg Comment
2775 # Par Msg Comment
2792 # N N y additional topo root
2776 # N N y additional topo root
2793 #
2777 #
2794 # B N y additional branch root
2778 # B N y additional branch root
2795 # C N y additional topo head
2779 # C N y additional topo head
2796 # H N n usual case
2780 # H N n usual case
2797 #
2781 #
2798 # B B y weird additional branch root
2782 # B B y weird additional branch root
2799 # C B y branch merge
2783 # C B y branch merge
2800 # H B n merge with named branch
2784 # H B n merge with named branch
2801 #
2785 #
2802 # C C y additional head from merge
2786 # C C y additional head from merge
2803 # C H n merge with a head
2787 # C H n merge with a head
2804 #
2788 #
2805 # H H n head merge: head count decreases
2789 # H H n head merge: head count decreases
2806
2790
2807 if not opts.get('close_branch'):
2791 if not opts.get('close_branch'):
2808 for r in parents:
2792 for r in parents:
2809 if r.closesbranch() and r.branch() == branch:
2793 if r.closesbranch() and r.branch() == branch:
2810 repo.ui.status(_('reopening closed branch head %d\n') % r)
2794 repo.ui.status(_('reopening closed branch head %d\n') % r)
2811
2795
2812 if repo.ui.debugflag:
2796 if repo.ui.debugflag:
2813 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2797 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2814 elif repo.ui.verbose:
2798 elif repo.ui.verbose:
2815 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2799 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2816
2800
2817 def revert(ui, repo, ctx, parents, *pats, **opts):
2801 def revert(ui, repo, ctx, parents, *pats, **opts):
2818 parent, p2 = parents
2802 parent, p2 = parents
2819 node = ctx.node()
2803 node = ctx.node()
2820
2804
2821 mf = ctx.manifest()
2805 mf = ctx.manifest()
2822 if node == p2:
2806 if node == p2:
2823 parent = p2
2807 parent = p2
2824 if node == parent:
2808 if node == parent:
2825 pmf = mf
2809 pmf = mf
2826 else:
2810 else:
2827 pmf = None
2811 pmf = None
2828
2812
2829 # need all matching names in dirstate and manifest of target rev,
2813 # need all matching names in dirstate and manifest of target rev,
2830 # so have to walk both. do not print errors if files exist in one
2814 # so have to walk both. do not print errors if files exist in one
2831 # but not other. in both cases, filesets should be evaluated against
2815 # but not other. in both cases, filesets should be evaluated against
2832 # workingctx to get consistent result (issue4497). this means 'set:**'
2816 # workingctx to get consistent result (issue4497). this means 'set:**'
2833 # cannot be used to select missing files from target rev.
2817 # cannot be used to select missing files from target rev.
2834
2818
2835 # `names` is a mapping for all elements in working copy and target revision
2819 # `names` is a mapping for all elements in working copy and target revision
2836 # The mapping is in the form:
2820 # The mapping is in the form:
2837 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2821 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2838 names = {}
2822 names = {}
2839
2823
2840 wlock = repo.wlock()
2824 wlock = repo.wlock()
2841 try:
2825 try:
2842 ## filling of the `names` mapping
2826 ## filling of the `names` mapping
2843 # walk dirstate to fill `names`
2827 # walk dirstate to fill `names`
2844
2828
2845 interactive = opts.get('interactive', False)
2829 interactive = opts.get('interactive', False)
2846 wctx = repo[None]
2830 wctx = repo[None]
2847 m = scmutil.match(wctx, pats, opts)
2831 m = scmutil.match(wctx, pats, opts)
2848
2832
2849 # we'll need this later
2833 # we'll need this later
2850 targetsubs = sorted(s for s in wctx.substate if m(s))
2834 targetsubs = sorted(s for s in wctx.substate if m(s))
2851
2835
2852 if not m.always():
2836 if not m.always():
2853 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2837 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2854 names[abs] = m.rel(abs), m.exact(abs)
2838 names[abs] = m.rel(abs), m.exact(abs)
2855
2839
2856 # walk target manifest to fill `names`
2840 # walk target manifest to fill `names`
2857
2841
2858 def badfn(path, msg):
2842 def badfn(path, msg):
2859 if path in names:
2843 if path in names:
2860 return
2844 return
2861 if path in ctx.substate:
2845 if path in ctx.substate:
2862 return
2846 return
2863 path_ = path + '/'
2847 path_ = path + '/'
2864 for f in names:
2848 for f in names:
2865 if f.startswith(path_):
2849 if f.startswith(path_):
2866 return
2850 return
2867 ui.warn("%s: %s\n" % (m.rel(path), msg))
2851 ui.warn("%s: %s\n" % (m.rel(path), msg))
2868
2852
2869 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2853 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2870 if abs not in names:
2854 if abs not in names:
2871 names[abs] = m.rel(abs), m.exact(abs)
2855 names[abs] = m.rel(abs), m.exact(abs)
2872
2856
2873 # Find status of all file in `names`.
2857 # Find status of all file in `names`.
2874 m = scmutil.matchfiles(repo, names)
2858 m = scmutil.matchfiles(repo, names)
2875
2859
2876 changes = repo.status(node1=node, match=m,
2860 changes = repo.status(node1=node, match=m,
2877 unknown=True, ignored=True, clean=True)
2861 unknown=True, ignored=True, clean=True)
2878 else:
2862 else:
2879 changes = repo.status(node1=node, match=m)
2863 changes = repo.status(node1=node, match=m)
2880 for kind in changes:
2864 for kind in changes:
2881 for abs in kind:
2865 for abs in kind:
2882 names[abs] = m.rel(abs), m.exact(abs)
2866 names[abs] = m.rel(abs), m.exact(abs)
2883
2867
2884 m = scmutil.matchfiles(repo, names)
2868 m = scmutil.matchfiles(repo, names)
2885
2869
2886 modified = set(changes.modified)
2870 modified = set(changes.modified)
2887 added = set(changes.added)
2871 added = set(changes.added)
2888 removed = set(changes.removed)
2872 removed = set(changes.removed)
2889 _deleted = set(changes.deleted)
2873 _deleted = set(changes.deleted)
2890 unknown = set(changes.unknown)
2874 unknown = set(changes.unknown)
2891 unknown.update(changes.ignored)
2875 unknown.update(changes.ignored)
2892 clean = set(changes.clean)
2876 clean = set(changes.clean)
2893 modadded = set()
2877 modadded = set()
2894
2878
2895 # split between files known in target manifest and the others
2879 # split between files known in target manifest and the others
2896 smf = set(mf)
2880 smf = set(mf)
2897
2881
2898 # determine the exact nature of the deleted changesets
2882 # determine the exact nature of the deleted changesets
2899 deladded = _deleted - smf
2883 deladded = _deleted - smf
2900 deleted = _deleted - deladded
2884 deleted = _deleted - deladded
2901
2885
2902 # We need to account for the state of the file in the dirstate,
2886 # We need to account for the state of the file in the dirstate,
2903 # even when we revert against something else than parent. This will
2887 # even when we revert against something else than parent. This will
2904 # slightly alter the behavior of revert (doing back up or not, delete
2888 # slightly alter the behavior of revert (doing back up or not, delete
2905 # or just forget etc).
2889 # or just forget etc).
2906 if parent == node:
2890 if parent == node:
2907 dsmodified = modified
2891 dsmodified = modified
2908 dsadded = added
2892 dsadded = added
2909 dsremoved = removed
2893 dsremoved = removed
2910 # store all local modifications, useful later for rename detection
2894 # store all local modifications, useful later for rename detection
2911 localchanges = dsmodified | dsadded
2895 localchanges = dsmodified | dsadded
2912 modified, added, removed = set(), set(), set()
2896 modified, added, removed = set(), set(), set()
2913 else:
2897 else:
2914 changes = repo.status(node1=parent, match=m)
2898 changes = repo.status(node1=parent, match=m)
2915 dsmodified = set(changes.modified)
2899 dsmodified = set(changes.modified)
2916 dsadded = set(changes.added)
2900 dsadded = set(changes.added)
2917 dsremoved = set(changes.removed)
2901 dsremoved = set(changes.removed)
2918 # store all local modifications, useful later for rename detection
2902 # store all local modifications, useful later for rename detection
2919 localchanges = dsmodified | dsadded
2903 localchanges = dsmodified | dsadded
2920
2904
2921 # only take into account for removes between wc and target
2905 # only take into account for removes between wc and target
2922 clean |= dsremoved - removed
2906 clean |= dsremoved - removed
2923 dsremoved &= removed
2907 dsremoved &= removed
2924 # distinct between dirstate remove and other
2908 # distinct between dirstate remove and other
2925 removed -= dsremoved
2909 removed -= dsremoved
2926
2910
2927 modadded = added & dsmodified
2911 modadded = added & dsmodified
2928 added -= modadded
2912 added -= modadded
2929
2913
2930 # tell newly modified apart.
2914 # tell newly modified apart.
2931 dsmodified &= modified
2915 dsmodified &= modified
2932 dsmodified |= modified & dsadded # dirstate added may needs backup
2916 dsmodified |= modified & dsadded # dirstate added may needs backup
2933 modified -= dsmodified
2917 modified -= dsmodified
2934
2918
2935 # We need to wait for some post-processing to update this set
2919 # We need to wait for some post-processing to update this set
2936 # before making the distinction. The dirstate will be used for
2920 # before making the distinction. The dirstate will be used for
2937 # that purpose.
2921 # that purpose.
2938 dsadded = added
2922 dsadded = added
2939
2923
2940 # in case of merge, files that are actually added can be reported as
2924 # in case of merge, files that are actually added can be reported as
2941 # modified, we need to post process the result
2925 # modified, we need to post process the result
2942 if p2 != nullid:
2926 if p2 != nullid:
2943 if pmf is None:
2927 if pmf is None:
2944 # only need parent manifest in the merge case,
2928 # only need parent manifest in the merge case,
2945 # so do not read by default
2929 # so do not read by default
2946 pmf = repo[parent].manifest()
2930 pmf = repo[parent].manifest()
2947 mergeadd = dsmodified - set(pmf)
2931 mergeadd = dsmodified - set(pmf)
2948 dsadded |= mergeadd
2932 dsadded |= mergeadd
2949 dsmodified -= mergeadd
2933 dsmodified -= mergeadd
2950
2934
2951 # if f is a rename, update `names` to also revert the source
2935 # if f is a rename, update `names` to also revert the source
2952 cwd = repo.getcwd()
2936 cwd = repo.getcwd()
2953 for f in localchanges:
2937 for f in localchanges:
2954 src = repo.dirstate.copied(f)
2938 src = repo.dirstate.copied(f)
2955 # XXX should we check for rename down to target node?
2939 # XXX should we check for rename down to target node?
2956 if src and src not in names and repo.dirstate[src] == 'r':
2940 if src and src not in names and repo.dirstate[src] == 'r':
2957 dsremoved.add(src)
2941 dsremoved.add(src)
2958 names[src] = (repo.pathto(src, cwd), True)
2942 names[src] = (repo.pathto(src, cwd), True)
2959
2943
2960 # distinguish between file to forget and the other
2944 # distinguish between file to forget and the other
2961 added = set()
2945 added = set()
2962 for abs in dsadded:
2946 for abs in dsadded:
2963 if repo.dirstate[abs] != 'a':
2947 if repo.dirstate[abs] != 'a':
2964 added.add(abs)
2948 added.add(abs)
2965 dsadded -= added
2949 dsadded -= added
2966
2950
2967 for abs in deladded:
2951 for abs in deladded:
2968 if repo.dirstate[abs] == 'a':
2952 if repo.dirstate[abs] == 'a':
2969 dsadded.add(abs)
2953 dsadded.add(abs)
2970 deladded -= dsadded
2954 deladded -= dsadded
2971
2955
2972 # For files marked as removed, we check if an unknown file is present at
2956 # For files marked as removed, we check if an unknown file is present at
2973 # the same path. If a such file exists it may need to be backed up.
2957 # the same path. If a such file exists it may need to be backed up.
2974 # Making the distinction at this stage helps have simpler backup
2958 # Making the distinction at this stage helps have simpler backup
2975 # logic.
2959 # logic.
2976 removunk = set()
2960 removunk = set()
2977 for abs in removed:
2961 for abs in removed:
2978 target = repo.wjoin(abs)
2962 target = repo.wjoin(abs)
2979 if os.path.lexists(target):
2963 if os.path.lexists(target):
2980 removunk.add(abs)
2964 removunk.add(abs)
2981 removed -= removunk
2965 removed -= removunk
2982
2966
2983 dsremovunk = set()
2967 dsremovunk = set()
2984 for abs in dsremoved:
2968 for abs in dsremoved:
2985 target = repo.wjoin(abs)
2969 target = repo.wjoin(abs)
2986 if os.path.lexists(target):
2970 if os.path.lexists(target):
2987 dsremovunk.add(abs)
2971 dsremovunk.add(abs)
2988 dsremoved -= dsremovunk
2972 dsremoved -= dsremovunk
2989
2973
2990 # action to be actually performed by revert
2974 # action to be actually performed by revert
2991 # (<list of file>, message>) tuple
2975 # (<list of file>, message>) tuple
2992 actions = {'revert': ([], _('reverting %s\n')),
2976 actions = {'revert': ([], _('reverting %s\n')),
2993 'add': ([], _('adding %s\n')),
2977 'add': ([], _('adding %s\n')),
2994 'remove': ([], _('removing %s\n')),
2978 'remove': ([], _('removing %s\n')),
2995 'drop': ([], _('removing %s\n')),
2979 'drop': ([], _('removing %s\n')),
2996 'forget': ([], _('forgetting %s\n')),
2980 'forget': ([], _('forgetting %s\n')),
2997 'undelete': ([], _('undeleting %s\n')),
2981 'undelete': ([], _('undeleting %s\n')),
2998 'noop': (None, _('no changes needed to %s\n')),
2982 'noop': (None, _('no changes needed to %s\n')),
2999 'unknown': (None, _('file not managed: %s\n')),
2983 'unknown': (None, _('file not managed: %s\n')),
3000 }
2984 }
3001
2985
3002 # "constant" that convey the backup strategy.
2986 # "constant" that convey the backup strategy.
3003 # All set to `discard` if `no-backup` is set do avoid checking
2987 # All set to `discard` if `no-backup` is set do avoid checking
3004 # no_backup lower in the code.
2988 # no_backup lower in the code.
3005 # These values are ordered for comparison purposes
2989 # These values are ordered for comparison purposes
3006 backup = 2 # unconditionally do backup
2990 backup = 2 # unconditionally do backup
3007 check = 1 # check if the existing file differs from target
2991 check = 1 # check if the existing file differs from target
3008 discard = 0 # never do backup
2992 discard = 0 # never do backup
3009 if opts.get('no_backup'):
2993 if opts.get('no_backup'):
3010 backup = check = discard
2994 backup = check = discard
3011
2995
3012 backupanddel = actions['remove']
2996 backupanddel = actions['remove']
3013 if not opts.get('no_backup'):
2997 if not opts.get('no_backup'):
3014 backupanddel = actions['drop']
2998 backupanddel = actions['drop']
3015
2999
3016 disptable = (
3000 disptable = (
3017 # dispatch table:
3001 # dispatch table:
3018 # file state
3002 # file state
3019 # action
3003 # action
3020 # make backup
3004 # make backup
3021
3005
3022 ## Sets that results that will change file on disk
3006 ## Sets that results that will change file on disk
3023 # Modified compared to target, no local change
3007 # Modified compared to target, no local change
3024 (modified, actions['revert'], discard),
3008 (modified, actions['revert'], discard),
3025 # Modified compared to target, but local file is deleted
3009 # Modified compared to target, but local file is deleted
3026 (deleted, actions['revert'], discard),
3010 (deleted, actions['revert'], discard),
3027 # Modified compared to target, local change
3011 # Modified compared to target, local change
3028 (dsmodified, actions['revert'], backup),
3012 (dsmodified, actions['revert'], backup),
3029 # Added since target
3013 # Added since target
3030 (added, actions['remove'], discard),
3014 (added, actions['remove'], discard),
3031 # Added in working directory
3015 # Added in working directory
3032 (dsadded, actions['forget'], discard),
3016 (dsadded, actions['forget'], discard),
3033 # Added since target, have local modification
3017 # Added since target, have local modification
3034 (modadded, backupanddel, backup),
3018 (modadded, backupanddel, backup),
3035 # Added since target but file is missing in working directory
3019 # Added since target but file is missing in working directory
3036 (deladded, actions['drop'], discard),
3020 (deladded, actions['drop'], discard),
3037 # Removed since target, before working copy parent
3021 # Removed since target, before working copy parent
3038 (removed, actions['add'], discard),
3022 (removed, actions['add'], discard),
3039 # Same as `removed` but an unknown file exists at the same path
3023 # Same as `removed` but an unknown file exists at the same path
3040 (removunk, actions['add'], check),
3024 (removunk, actions['add'], check),
3041 # Removed since targe, marked as such in working copy parent
3025 # Removed since targe, marked as such in working copy parent
3042 (dsremoved, actions['undelete'], discard),
3026 (dsremoved, actions['undelete'], discard),
3043 # Same as `dsremoved` but an unknown file exists at the same path
3027 # Same as `dsremoved` but an unknown file exists at the same path
3044 (dsremovunk, actions['undelete'], check),
3028 (dsremovunk, actions['undelete'], check),
3045 ## the following sets does not result in any file changes
3029 ## the following sets does not result in any file changes
3046 # File with no modification
3030 # File with no modification
3047 (clean, actions['noop'], discard),
3031 (clean, actions['noop'], discard),
3048 # Existing file, not tracked anywhere
3032 # Existing file, not tracked anywhere
3049 (unknown, actions['unknown'], discard),
3033 (unknown, actions['unknown'], discard),
3050 )
3034 )
3051
3035
3052 for abs, (rel, exact) in sorted(names.items()):
3036 for abs, (rel, exact) in sorted(names.items()):
3053 # target file to be touch on disk (relative to cwd)
3037 # target file to be touch on disk (relative to cwd)
3054 target = repo.wjoin(abs)
3038 target = repo.wjoin(abs)
3055 # search the entry in the dispatch table.
3039 # search the entry in the dispatch table.
3056 # if the file is in any of these sets, it was touched in the working
3040 # if the file is in any of these sets, it was touched in the working
3057 # directory parent and we are sure it needs to be reverted.
3041 # directory parent and we are sure it needs to be reverted.
3058 for table, (xlist, msg), dobackup in disptable:
3042 for table, (xlist, msg), dobackup in disptable:
3059 if abs not in table:
3043 if abs not in table:
3060 continue
3044 continue
3061 if xlist is not None:
3045 if xlist is not None:
3062 xlist.append(abs)
3046 xlist.append(abs)
3063 if dobackup and (backup <= dobackup
3047 if dobackup and (backup <= dobackup
3064 or wctx[abs].cmp(ctx[abs])):
3048 or wctx[abs].cmp(ctx[abs])):
3065 bakname = "%s.orig" % rel
3049 bakname = "%s.orig" % rel
3066 ui.note(_('saving current version of %s as %s\n') %
3050 ui.note(_('saving current version of %s as %s\n') %
3067 (rel, bakname))
3051 (rel, bakname))
3068 if not opts.get('dry_run'):
3052 if not opts.get('dry_run'):
3069 if interactive:
3053 if interactive:
3070 util.copyfile(target, bakname)
3054 util.copyfile(target, bakname)
3071 else:
3055 else:
3072 util.rename(target, bakname)
3056 util.rename(target, bakname)
3073 if ui.verbose or not exact:
3057 if ui.verbose or not exact:
3074 if not isinstance(msg, basestring):
3058 if not isinstance(msg, basestring):
3075 msg = msg(abs)
3059 msg = msg(abs)
3076 ui.status(msg % rel)
3060 ui.status(msg % rel)
3077 elif exact:
3061 elif exact:
3078 ui.warn(msg % rel)
3062 ui.warn(msg % rel)
3079 break
3063 break
3080
3064
3081 if not opts.get('dry_run'):
3065 if not opts.get('dry_run'):
3082 needdata = ('revert', 'add', 'undelete')
3066 needdata = ('revert', 'add', 'undelete')
3083 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3067 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3084 _performrevert(repo, parents, ctx, actions, interactive)
3068 _performrevert(repo, parents, ctx, actions, interactive)
3085
3069
3086 if targetsubs:
3070 if targetsubs:
3087 # Revert the subrepos on the revert list
3071 # Revert the subrepos on the revert list
3088 for sub in targetsubs:
3072 for sub in targetsubs:
3089 try:
3073 try:
3090 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3074 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3091 except KeyError:
3075 except KeyError:
3092 raise util.Abort("subrepository '%s' does not exist in %s!"
3076 raise util.Abort("subrepository '%s' does not exist in %s!"
3093 % (sub, short(ctx.node())))
3077 % (sub, short(ctx.node())))
3094 finally:
3078 finally:
3095 wlock.release()
3079 wlock.release()
3096
3080
3097 def _revertprefetch(repo, ctx, *files):
3081 def _revertprefetch(repo, ctx, *files):
3098 """Let extension changing the storage layer prefetch content"""
3082 """Let extension changing the storage layer prefetch content"""
3099 pass
3083 pass
3100
3084
3101 def _performrevert(repo, parents, ctx, actions, interactive=False):
3085 def _performrevert(repo, parents, ctx, actions, interactive=False):
3102 """function that actually perform all the actions computed for revert
3086 """function that actually perform all the actions computed for revert
3103
3087
3104 This is an independent function to let extension to plug in and react to
3088 This is an independent function to let extension to plug in and react to
3105 the imminent revert.
3089 the imminent revert.
3106
3090
3107 Make sure you have the working directory locked when calling this function.
3091 Make sure you have the working directory locked when calling this function.
3108 """
3092 """
3109 parent, p2 = parents
3093 parent, p2 = parents
3110 node = ctx.node()
3094 node = ctx.node()
3111 def checkout(f):
3095 def checkout(f):
3112 fc = ctx[f]
3096 fc = ctx[f]
3113 repo.wwrite(f, fc.data(), fc.flags())
3097 repo.wwrite(f, fc.data(), fc.flags())
3114
3098
3115 audit_path = pathutil.pathauditor(repo.root)
3099 audit_path = pathutil.pathauditor(repo.root)
3116 for f in actions['forget'][0]:
3100 for f in actions['forget'][0]:
3117 repo.dirstate.drop(f)
3101 repo.dirstate.drop(f)
3118 for f in actions['remove'][0]:
3102 for f in actions['remove'][0]:
3119 audit_path(f)
3103 audit_path(f)
3120 try:
3104 try:
3121 util.unlinkpath(repo.wjoin(f))
3105 util.unlinkpath(repo.wjoin(f))
3122 except OSError:
3106 except OSError:
3123 pass
3107 pass
3124 repo.dirstate.remove(f)
3108 repo.dirstate.remove(f)
3125 for f in actions['drop'][0]:
3109 for f in actions['drop'][0]:
3126 audit_path(f)
3110 audit_path(f)
3127 repo.dirstate.remove(f)
3111 repo.dirstate.remove(f)
3128
3112
3129 normal = None
3113 normal = None
3130 if node == parent:
3114 if node == parent:
3131 # We're reverting to our parent. If possible, we'd like status
3115 # We're reverting to our parent. If possible, we'd like status
3132 # to report the file as clean. We have to use normallookup for
3116 # to report the file as clean. We have to use normallookup for
3133 # merges to avoid losing information about merged/dirty files.
3117 # merges to avoid losing information about merged/dirty files.
3134 if p2 != nullid:
3118 if p2 != nullid:
3135 normal = repo.dirstate.normallookup
3119 normal = repo.dirstate.normallookup
3136 else:
3120 else:
3137 normal = repo.dirstate.normal
3121 normal = repo.dirstate.normal
3138
3122
3139 newlyaddedandmodifiedfiles = set()
3123 newlyaddedandmodifiedfiles = set()
3140 if interactive:
3124 if interactive:
3141 # Prompt the user for changes to revert
3125 # Prompt the user for changes to revert
3142 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3126 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3143 m = scmutil.match(ctx, torevert, {})
3127 m = scmutil.match(ctx, torevert, {})
3144 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3128 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3145 diffopts.nodates = True
3129 diffopts.nodates = True
3146 diffopts.git = True
3130 diffopts.git = True
3147 reversehunks = repo.ui.configbool('experimental',
3131 reversehunks = repo.ui.configbool('experimental',
3148 'revertalternateinteractivemode',
3132 'revertalternateinteractivemode',
3149 True)
3133 True)
3150 if reversehunks:
3134 if reversehunks:
3151 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3135 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3152 else:
3136 else:
3153 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3137 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3154 originalchunks = patch.parsepatch(diff)
3138 originalchunks = patch.parsepatch(diff)
3155
3139
3156 try:
3140 try:
3157
3141
3158 chunks = recordfilter(repo.ui, originalchunks)
3142 chunks = recordfilter(repo.ui, originalchunks)
3159 if reversehunks:
3143 if reversehunks:
3160 chunks = patch.reversehunks(chunks)
3144 chunks = patch.reversehunks(chunks)
3161
3145
3162 except patch.PatchError as err:
3146 except patch.PatchError as err:
3163 raise util.Abort(_('error parsing patch: %s') % err)
3147 raise util.Abort(_('error parsing patch: %s') % err)
3164
3148
3165 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3149 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3166 # Apply changes
3150 # Apply changes
3167 fp = cStringIO.StringIO()
3151 fp = cStringIO.StringIO()
3168 for c in chunks:
3152 for c in chunks:
3169 c.write(fp)
3153 c.write(fp)
3170 dopatch = fp.tell()
3154 dopatch = fp.tell()
3171 fp.seek(0)
3155 fp.seek(0)
3172 if dopatch:
3156 if dopatch:
3173 try:
3157 try:
3174 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3158 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3175 except patch.PatchError as err:
3159 except patch.PatchError as err:
3176 raise util.Abort(str(err))
3160 raise util.Abort(str(err))
3177 del fp
3161 del fp
3178 else:
3162 else:
3179 for f in actions['revert'][0]:
3163 for f in actions['revert'][0]:
3180 checkout(f)
3164 checkout(f)
3181 if normal:
3165 if normal:
3182 normal(f)
3166 normal(f)
3183
3167
3184 for f in actions['add'][0]:
3168 for f in actions['add'][0]:
3185 # Don't checkout modified files, they are already created by the diff
3169 # Don't checkout modified files, they are already created by the diff
3186 if f not in newlyaddedandmodifiedfiles:
3170 if f not in newlyaddedandmodifiedfiles:
3187 checkout(f)
3171 checkout(f)
3188 repo.dirstate.add(f)
3172 repo.dirstate.add(f)
3189
3173
3190 normal = repo.dirstate.normallookup
3174 normal = repo.dirstate.normallookup
3191 if node == parent and p2 == nullid:
3175 if node == parent and p2 == nullid:
3192 normal = repo.dirstate.normal
3176 normal = repo.dirstate.normal
3193 for f in actions['undelete'][0]:
3177 for f in actions['undelete'][0]:
3194 checkout(f)
3178 checkout(f)
3195 normal(f)
3179 normal(f)
3196
3180
3197 copied = copies.pathcopies(repo[parent], ctx)
3181 copied = copies.pathcopies(repo[parent], ctx)
3198
3182
3199 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3183 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3200 if f in copied:
3184 if f in copied:
3201 repo.dirstate.copy(copied[f], f)
3185 repo.dirstate.copy(copied[f], f)
3202
3186
3203 def command(table):
3187 def command(table):
3204 """Returns a function object to be used as a decorator for making commands.
3188 """Returns a function object to be used as a decorator for making commands.
3205
3189
3206 This function receives a command table as its argument. The table should
3190 This function receives a command table as its argument. The table should
3207 be a dict.
3191 be a dict.
3208
3192
3209 The returned function can be used as a decorator for adding commands
3193 The returned function can be used as a decorator for adding commands
3210 to that command table. This function accepts multiple arguments to define
3194 to that command table. This function accepts multiple arguments to define
3211 a command.
3195 a command.
3212
3196
3213 The first argument is the command name.
3197 The first argument is the command name.
3214
3198
3215 The options argument is an iterable of tuples defining command arguments.
3199 The options argument is an iterable of tuples defining command arguments.
3216 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3200 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3217
3201
3218 The synopsis argument defines a short, one line summary of how to use the
3202 The synopsis argument defines a short, one line summary of how to use the
3219 command. This shows up in the help output.
3203 command. This shows up in the help output.
3220
3204
3221 The norepo argument defines whether the command does not require a
3205 The norepo argument defines whether the command does not require a
3222 local repository. Most commands operate against a repository, thus the
3206 local repository. Most commands operate against a repository, thus the
3223 default is False.
3207 default is False.
3224
3208
3225 The optionalrepo argument defines whether the command optionally requires
3209 The optionalrepo argument defines whether the command optionally requires
3226 a local repository.
3210 a local repository.
3227
3211
3228 The inferrepo argument defines whether to try to find a repository from the
3212 The inferrepo argument defines whether to try to find a repository from the
3229 command line arguments. If True, arguments will be examined for potential
3213 command line arguments. If True, arguments will be examined for potential
3230 repository locations. See ``findrepo()``. If a repository is found, it
3214 repository locations. See ``findrepo()``. If a repository is found, it
3231 will be used.
3215 will be used.
3232 """
3216 """
3233 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3217 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3234 inferrepo=False):
3218 inferrepo=False):
3235 def decorator(func):
3219 def decorator(func):
3236 if synopsis:
3220 if synopsis:
3237 table[name] = func, list(options), synopsis
3221 table[name] = func, list(options), synopsis
3238 else:
3222 else:
3239 table[name] = func, list(options)
3223 table[name] = func, list(options)
3240
3224
3241 if norepo:
3225 if norepo:
3242 # Avoid import cycle.
3226 # Avoid import cycle.
3243 import commands
3227 import commands
3244 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3228 commands.norepo += ' %s' % ' '.join(parsealiases(name))
3245
3229
3246 if optionalrepo:
3230 if optionalrepo:
3247 import commands
3231 import commands
3248 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3232 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
3249
3233
3250 if inferrepo:
3234 if inferrepo:
3251 import commands
3235 import commands
3252 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3236 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
3253
3237
3254 return func
3238 return func
3255 return decorator
3239 return decorator
3256
3240
3257 return cmd
3241 return cmd
3258
3242
3259 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3243 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3260 # commands.outgoing. "missing" is "missing" of the result of
3244 # commands.outgoing. "missing" is "missing" of the result of
3261 # "findcommonoutgoing()"
3245 # "findcommonoutgoing()"
3262 outgoinghooks = util.hooks()
3246 outgoinghooks = util.hooks()
3263
3247
3264 # a list of (ui, repo) functions called by commands.summary
3248 # a list of (ui, repo) functions called by commands.summary
3265 summaryhooks = util.hooks()
3249 summaryhooks = util.hooks()
3266
3250
3267 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3251 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3268 #
3252 #
3269 # functions should return tuple of booleans below, if 'changes' is None:
3253 # functions should return tuple of booleans below, if 'changes' is None:
3270 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3254 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3271 #
3255 #
3272 # otherwise, 'changes' is a tuple of tuples below:
3256 # otherwise, 'changes' is a tuple of tuples below:
3273 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3257 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3274 # - (desturl, destbranch, destpeer, outgoing)
3258 # - (desturl, destbranch, destpeer, outgoing)
3275 summaryremotehooks = util.hooks()
3259 summaryremotehooks = util.hooks()
3276
3260
3277 # A list of state files kept by multistep operations like graft.
3261 # A list of state files kept by multistep operations like graft.
3278 # Since graft cannot be aborted, it is considered 'clearable' by update.
3262 # Since graft cannot be aborted, it is considered 'clearable' by update.
3279 # note: bisect is intentionally excluded
3263 # note: bisect is intentionally excluded
3280 # (state file, clearable, allowcommit, error, hint)
3264 # (state file, clearable, allowcommit, error, hint)
3281 unfinishedstates = [
3265 unfinishedstates = [
3282 ('graftstate', True, False, _('graft in progress'),
3266 ('graftstate', True, False, _('graft in progress'),
3283 _("use 'hg graft --continue' or 'hg update' to abort")),
3267 _("use 'hg graft --continue' or 'hg update' to abort")),
3284 ('updatestate', True, False, _('last update was interrupted'),
3268 ('updatestate', True, False, _('last update was interrupted'),
3285 _("use 'hg update' to get a consistent checkout"))
3269 _("use 'hg update' to get a consistent checkout"))
3286 ]
3270 ]
3287
3271
3288 def checkunfinished(repo, commit=False):
3272 def checkunfinished(repo, commit=False):
3289 '''Look for an unfinished multistep operation, like graft, and abort
3273 '''Look for an unfinished multistep operation, like graft, and abort
3290 if found. It's probably good to check this right before
3274 if found. It's probably good to check this right before
3291 bailifchanged().
3275 bailifchanged().
3292 '''
3276 '''
3293 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3277 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3294 if commit and allowcommit:
3278 if commit and allowcommit:
3295 continue
3279 continue
3296 if repo.vfs.exists(f):
3280 if repo.vfs.exists(f):
3297 raise util.Abort(msg, hint=hint)
3281 raise util.Abort(msg, hint=hint)
3298
3282
3299 def clearunfinished(repo):
3283 def clearunfinished(repo):
3300 '''Check for unfinished operations (as above), and clear the ones
3284 '''Check for unfinished operations (as above), and clear the ones
3301 that are clearable.
3285 that are clearable.
3302 '''
3286 '''
3303 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3287 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3304 if not clearable and repo.vfs.exists(f):
3288 if not clearable and repo.vfs.exists(f):
3305 raise util.Abort(msg, hint=hint)
3289 raise util.Abort(msg, hint=hint)
3306 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3290 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3307 if clearable and repo.vfs.exists(f):
3291 if clearable and repo.vfs.exists(f):
3308 util.unlink(repo.join(f))
3292 util.unlink(repo.join(f))
3309
3293
3310 class dirstateguard(object):
3294 class dirstateguard(object):
3311 '''Restore dirstate at unexpected failure.
3295 '''Restore dirstate at unexpected failure.
3312
3296
3313 At the construction, this class does:
3297 At the construction, this class does:
3314
3298
3315 - write current ``repo.dirstate`` out, and
3299 - write current ``repo.dirstate`` out, and
3316 - save ``.hg/dirstate`` into the backup file
3300 - save ``.hg/dirstate`` into the backup file
3317
3301
3318 This restores ``.hg/dirstate`` from backup file, if ``release()``
3302 This restores ``.hg/dirstate`` from backup file, if ``release()``
3319 is invoked before ``close()``.
3303 is invoked before ``close()``.
3320
3304
3321 This just removes the backup file at ``close()`` before ``release()``.
3305 This just removes the backup file at ``close()`` before ``release()``.
3322 '''
3306 '''
3323
3307
3324 def __init__(self, repo, name):
3308 def __init__(self, repo, name):
3325 repo.dirstate.write()
3309 repo.dirstate.write()
3326 self._repo = repo
3310 self._repo = repo
3327 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3311 self._filename = 'dirstate.backup.%s.%d' % (name, id(self))
3328 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3312 repo.vfs.write(self._filename, repo.vfs.tryread('dirstate'))
3329 self._active = True
3313 self._active = True
3330 self._closed = False
3314 self._closed = False
3331
3315
3332 def __del__(self):
3316 def __del__(self):
3333 if self._active: # still active
3317 if self._active: # still active
3334 # this may occur, even if this class is used correctly:
3318 # this may occur, even if this class is used correctly:
3335 # for example, releasing other resources like transaction
3319 # for example, releasing other resources like transaction
3336 # may raise exception before ``dirstateguard.release`` in
3320 # may raise exception before ``dirstateguard.release`` in
3337 # ``release(tr, ....)``.
3321 # ``release(tr, ....)``.
3338 self._abort()
3322 self._abort()
3339
3323
3340 def close(self):
3324 def close(self):
3341 if not self._active: # already inactivated
3325 if not self._active: # already inactivated
3342 msg = (_("can't close already inactivated backup: %s")
3326 msg = (_("can't close already inactivated backup: %s")
3343 % self._filename)
3327 % self._filename)
3344 raise util.Abort(msg)
3328 raise util.Abort(msg)
3345
3329
3346 self._repo.vfs.unlink(self._filename)
3330 self._repo.vfs.unlink(self._filename)
3347 self._active = False
3331 self._active = False
3348 self._closed = True
3332 self._closed = True
3349
3333
3350 def _abort(self):
3334 def _abort(self):
3351 # this "invalidate()" prevents "wlock.release()" from writing
3335 # this "invalidate()" prevents "wlock.release()" from writing
3352 # changes of dirstate out after restoring to original status
3336 # changes of dirstate out after restoring to original status
3353 self._repo.dirstate.invalidate()
3337 self._repo.dirstate.invalidate()
3354
3338
3355 self._repo.vfs.rename(self._filename, 'dirstate')
3339 self._repo.vfs.rename(self._filename, 'dirstate')
3356 self._active = False
3340 self._active = False
3357
3341
3358 def release(self):
3342 def release(self):
3359 if not self._closed:
3343 if not self._closed:
3360 if not self._active: # already inactivated
3344 if not self._active: # already inactivated
3361 msg = (_("can't release already inactivated backup: %s")
3345 msg = (_("can't release already inactivated backup: %s")
3362 % self._filename)
3346 % self._filename)
3363 raise util.Abort(msg)
3347 raise util.Abort(msg)
3364 self._abort()
3348 self._abort()
@@ -1,1134 +1,1150 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import wdirrev
9 from mercurial.node import wdirrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile, shutil, stat
13 import os, errno, re, glob, tempfile, shutil, stat
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83
83
84 missing = set()
84 missing = set()
85
85
86 for subpath in ctx2.substate:
86 for subpath in ctx2.substate:
87 if subpath not in ctx1.substate:
87 if subpath not in ctx1.substate:
88 del subpaths[subpath]
88 del subpaths[subpath]
89 missing.add(subpath)
89 missing.add(subpath)
90
90
91 for subpath, ctx in sorted(subpaths.iteritems()):
91 for subpath, ctx in sorted(subpaths.iteritems()):
92 yield subpath, ctx.sub(subpath)
92 yield subpath, ctx.sub(subpath)
93
93
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
95 # status and diff will have an accurate result when it does
95 # status and diff will have an accurate result when it does
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
97 # against itself.
97 # against itself.
98 for subpath in missing:
98 for subpath in missing:
99 yield subpath, ctx2.nullsub(subpath, ctx1)
99 yield subpath, ctx2.nullsub(subpath, ctx1)
100
100
101 def nochangesfound(ui, repo, excluded=None):
101 def nochangesfound(ui, repo, excluded=None):
102 '''Report no changes for push/pull, excluded is None or a list of
102 '''Report no changes for push/pull, excluded is None or a list of
103 nodes excluded from the push/pull.
103 nodes excluded from the push/pull.
104 '''
104 '''
105 secretlist = []
105 secretlist = []
106 if excluded:
106 if excluded:
107 for n in excluded:
107 for n in excluded:
108 if n not in repo:
108 if n not in repo:
109 # discovery should not have included the filtered revision,
109 # discovery should not have included the filtered revision,
110 # we have to explicitly exclude it until discovery is cleanup.
110 # we have to explicitly exclude it until discovery is cleanup.
111 continue
111 continue
112 ctx = repo[n]
112 ctx = repo[n]
113 if ctx.phase() >= phases.secret and not ctx.extinct():
113 if ctx.phase() >= phases.secret and not ctx.extinct():
114 secretlist.append(n)
114 secretlist.append(n)
115
115
116 if secretlist:
116 if secretlist:
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
118 % len(secretlist))
118 % len(secretlist))
119 else:
119 else:
120 ui.status(_("no changes found\n"))
120 ui.status(_("no changes found\n"))
121
121
122 def checknewlabel(repo, lbl, kind):
122 def checknewlabel(repo, lbl, kind):
123 # Do not use the "kind" parameter in ui output.
123 # Do not use the "kind" parameter in ui output.
124 # It makes strings difficult to translate.
124 # It makes strings difficult to translate.
125 if lbl in ['tip', '.', 'null']:
125 if lbl in ['tip', '.', 'null']:
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
127 for c in (':', '\0', '\n', '\r'):
127 for c in (':', '\0', '\n', '\r'):
128 if c in lbl:
128 if c in lbl:
129 raise util.Abort(_("%r cannot be used in a name") % c)
129 raise util.Abort(_("%r cannot be used in a name") % c)
130 try:
130 try:
131 int(lbl)
131 int(lbl)
132 raise util.Abort(_("cannot use an integer as a name"))
132 raise util.Abort(_("cannot use an integer as a name"))
133 except ValueError:
133 except ValueError:
134 pass
134 pass
135
135
136 def checkfilename(f):
136 def checkfilename(f):
137 '''Check that the filename f is an acceptable filename for a tracked file'''
137 '''Check that the filename f is an acceptable filename for a tracked file'''
138 if '\r' in f or '\n' in f:
138 if '\r' in f or '\n' in f:
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
140
140
141 def checkportable(ui, f):
141 def checkportable(ui, f):
142 '''Check if filename f is portable and warn or abort depending on config'''
142 '''Check if filename f is portable and warn or abort depending on config'''
143 checkfilename(f)
143 checkfilename(f)
144 abort, warn = checkportabilityalert(ui)
144 abort, warn = checkportabilityalert(ui)
145 if abort or warn:
145 if abort or warn:
146 msg = util.checkwinfilename(f)
146 msg = util.checkwinfilename(f)
147 if msg:
147 if msg:
148 msg = "%s: %r" % (msg, f)
148 msg = "%s: %r" % (msg, f)
149 if abort:
149 if abort:
150 raise util.Abort(msg)
150 raise util.Abort(msg)
151 ui.warn(_("warning: %s\n") % msg)
151 ui.warn(_("warning: %s\n") % msg)
152
152
153 def checkportabilityalert(ui):
153 def checkportabilityalert(ui):
154 '''check if the user's config requests nothing, a warning, or abort for
154 '''check if the user's config requests nothing, a warning, or abort for
155 non-portable filenames'''
155 non-portable filenames'''
156 val = ui.config('ui', 'portablefilenames', 'warn')
156 val = ui.config('ui', 'portablefilenames', 'warn')
157 lval = val.lower()
157 lval = val.lower()
158 bval = util.parsebool(val)
158 bval = util.parsebool(val)
159 abort = os.name == 'nt' or lval == 'abort'
159 abort = os.name == 'nt' or lval == 'abort'
160 warn = bval or lval == 'warn'
160 warn = bval or lval == 'warn'
161 if bval is None and not (warn or abort or lval == 'ignore'):
161 if bval is None and not (warn or abort or lval == 'ignore'):
162 raise error.ConfigError(
162 raise error.ConfigError(
163 _("ui.portablefilenames value is invalid ('%s')") % val)
163 _("ui.portablefilenames value is invalid ('%s')") % val)
164 return abort, warn
164 return abort, warn
165
165
166 class casecollisionauditor(object):
166 class casecollisionauditor(object):
167 def __init__(self, ui, abort, dirstate):
167 def __init__(self, ui, abort, dirstate):
168 self._ui = ui
168 self._ui = ui
169 self._abort = abort
169 self._abort = abort
170 allfiles = '\0'.join(dirstate._map)
170 allfiles = '\0'.join(dirstate._map)
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
172 self._dirstate = dirstate
172 self._dirstate = dirstate
173 # The purpose of _newfiles is so that we don't complain about
173 # The purpose of _newfiles is so that we don't complain about
174 # case collisions if someone were to call this object with the
174 # case collisions if someone were to call this object with the
175 # same filename twice.
175 # same filename twice.
176 self._newfiles = set()
176 self._newfiles = set()
177
177
178 def __call__(self, f):
178 def __call__(self, f):
179 if f in self._newfiles:
179 if f in self._newfiles:
180 return
180 return
181 fl = encoding.lower(f)
181 fl = encoding.lower(f)
182 if fl in self._loweredfiles and f not in self._dirstate:
182 if fl in self._loweredfiles and f not in self._dirstate:
183 msg = _('possible case-folding collision for %s') % f
183 msg = _('possible case-folding collision for %s') % f
184 if self._abort:
184 if self._abort:
185 raise util.Abort(msg)
185 raise util.Abort(msg)
186 self._ui.warn(_("warning: %s\n") % msg)
186 self._ui.warn(_("warning: %s\n") % msg)
187 self._loweredfiles.add(fl)
187 self._loweredfiles.add(fl)
188 self._newfiles.add(f)
188 self._newfiles.add(f)
189
189
190 def filteredhash(repo, maxrev):
190 def filteredhash(repo, maxrev):
191 """build hash of filtered revisions in the current repoview.
191 """build hash of filtered revisions in the current repoview.
192
192
193 Multiple caches perform up-to-date validation by checking that the
193 Multiple caches perform up-to-date validation by checking that the
194 tiprev and tipnode stored in the cache file match the current repository.
194 tiprev and tipnode stored in the cache file match the current repository.
195 However, this is not sufficient for validating repoviews because the set
195 However, this is not sufficient for validating repoviews because the set
196 of revisions in the view may change without the repository tiprev and
196 of revisions in the view may change without the repository tiprev and
197 tipnode changing.
197 tipnode changing.
198
198
199 This function hashes all the revs filtered from the view and returns
199 This function hashes all the revs filtered from the view and returns
200 that SHA-1 digest.
200 that SHA-1 digest.
201 """
201 """
202 cl = repo.changelog
202 cl = repo.changelog
203 if not cl.filteredrevs:
203 if not cl.filteredrevs:
204 return None
204 return None
205 key = None
205 key = None
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
207 if revs:
207 if revs:
208 s = util.sha1()
208 s = util.sha1()
209 for rev in revs:
209 for rev in revs:
210 s.update('%s;' % rev)
210 s.update('%s;' % rev)
211 key = s.digest()
211 key = s.digest()
212 return key
212 return key
213
213
214 class abstractvfs(object):
214 class abstractvfs(object):
215 """Abstract base class; cannot be instantiated"""
215 """Abstract base class; cannot be instantiated"""
216
216
217 def __init__(self, *args, **kwargs):
217 def __init__(self, *args, **kwargs):
218 '''Prevent instantiation; don't call this from subclasses.'''
218 '''Prevent instantiation; don't call this from subclasses.'''
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
220
220
221 def tryread(self, path):
221 def tryread(self, path):
222 '''gracefully return an empty string for missing files'''
222 '''gracefully return an empty string for missing files'''
223 try:
223 try:
224 return self.read(path)
224 return self.read(path)
225 except IOError as inst:
225 except IOError as inst:
226 if inst.errno != errno.ENOENT:
226 if inst.errno != errno.ENOENT:
227 raise
227 raise
228 return ""
228 return ""
229
229
230 def tryreadlines(self, path, mode='rb'):
230 def tryreadlines(self, path, mode='rb'):
231 '''gracefully return an empty array for missing files'''
231 '''gracefully return an empty array for missing files'''
232 try:
232 try:
233 return self.readlines(path, mode=mode)
233 return self.readlines(path, mode=mode)
234 except IOError as inst:
234 except IOError as inst:
235 if inst.errno != errno.ENOENT:
235 if inst.errno != errno.ENOENT:
236 raise
236 raise
237 return []
237 return []
238
238
239 def open(self, path, mode="r", text=False, atomictemp=False,
239 def open(self, path, mode="r", text=False, atomictemp=False,
240 notindexed=False):
240 notindexed=False):
241 '''Open ``path`` file, which is relative to vfs root.
241 '''Open ``path`` file, which is relative to vfs root.
242
242
243 Newly created directories are marked as "not to be indexed by
243 Newly created directories are marked as "not to be indexed by
244 the content indexing service", if ``notindexed`` is specified
244 the content indexing service", if ``notindexed`` is specified
245 for "write" mode access.
245 for "write" mode access.
246 '''
246 '''
247 self.open = self.__call__
247 self.open = self.__call__
248 return self.__call__(path, mode, text, atomictemp, notindexed)
248 return self.__call__(path, mode, text, atomictemp, notindexed)
249
249
250 def read(self, path):
250 def read(self, path):
251 fp = self(path, 'rb')
251 fp = self(path, 'rb')
252 try:
252 try:
253 return fp.read()
253 return fp.read()
254 finally:
254 finally:
255 fp.close()
255 fp.close()
256
256
257 def readlines(self, path, mode='rb'):
257 def readlines(self, path, mode='rb'):
258 fp = self(path, mode=mode)
258 fp = self(path, mode=mode)
259 try:
259 try:
260 return fp.readlines()
260 return fp.readlines()
261 finally:
261 finally:
262 fp.close()
262 fp.close()
263
263
264 def write(self, path, data):
264 def write(self, path, data):
265 fp = self(path, 'wb')
265 fp = self(path, 'wb')
266 try:
266 try:
267 return fp.write(data)
267 return fp.write(data)
268 finally:
268 finally:
269 fp.close()
269 fp.close()
270
270
271 def writelines(self, path, data, mode='wb', notindexed=False):
271 def writelines(self, path, data, mode='wb', notindexed=False):
272 fp = self(path, mode=mode, notindexed=notindexed)
272 fp = self(path, mode=mode, notindexed=notindexed)
273 try:
273 try:
274 return fp.writelines(data)
274 return fp.writelines(data)
275 finally:
275 finally:
276 fp.close()
276 fp.close()
277
277
278 def append(self, path, data):
278 def append(self, path, data):
279 fp = self(path, 'ab')
279 fp = self(path, 'ab')
280 try:
280 try:
281 return fp.write(data)
281 return fp.write(data)
282 finally:
282 finally:
283 fp.close()
283 fp.close()
284
284
285 def basename(self, path):
285 def basename(self, path):
286 """return base element of a path (as os.path.basename would do)
286 """return base element of a path (as os.path.basename would do)
287
287
288 This exists to allow handling of strange encoding if needed."""
288 This exists to allow handling of strange encoding if needed."""
289 return os.path.basename(path)
289 return os.path.basename(path)
290
290
291 def chmod(self, path, mode):
291 def chmod(self, path, mode):
292 return os.chmod(self.join(path), mode)
292 return os.chmod(self.join(path), mode)
293
293
294 def dirname(self, path):
294 def dirname(self, path):
295 """return dirname element of a path (as os.path.dirname would do)
295 """return dirname element of a path (as os.path.dirname would do)
296
296
297 This exists to allow handling of strange encoding if needed."""
297 This exists to allow handling of strange encoding if needed."""
298 return os.path.dirname(path)
298 return os.path.dirname(path)
299
299
300 def exists(self, path=None):
300 def exists(self, path=None):
301 return os.path.exists(self.join(path))
301 return os.path.exists(self.join(path))
302
302
303 def fstat(self, fp):
303 def fstat(self, fp):
304 return util.fstat(fp)
304 return util.fstat(fp)
305
305
306 def isdir(self, path=None):
306 def isdir(self, path=None):
307 return os.path.isdir(self.join(path))
307 return os.path.isdir(self.join(path))
308
308
309 def isfile(self, path=None):
309 def isfile(self, path=None):
310 return os.path.isfile(self.join(path))
310 return os.path.isfile(self.join(path))
311
311
312 def islink(self, path=None):
312 def islink(self, path=None):
313 return os.path.islink(self.join(path))
313 return os.path.islink(self.join(path))
314
314
315 def reljoin(self, *paths):
315 def reljoin(self, *paths):
316 """join various elements of a path together (as os.path.join would do)
316 """join various elements of a path together (as os.path.join would do)
317
317
318 The vfs base is not injected so that path stay relative. This exists
318 The vfs base is not injected so that path stay relative. This exists
319 to allow handling of strange encoding if needed."""
319 to allow handling of strange encoding if needed."""
320 return os.path.join(*paths)
320 return os.path.join(*paths)
321
321
322 def split(self, path):
322 def split(self, path):
323 """split top-most element of a path (as os.path.split would do)
323 """split top-most element of a path (as os.path.split would do)
324
324
325 This exists to allow handling of strange encoding if needed."""
325 This exists to allow handling of strange encoding if needed."""
326 return os.path.split(path)
326 return os.path.split(path)
327
327
328 def lexists(self, path=None):
328 def lexists(self, path=None):
329 return os.path.lexists(self.join(path))
329 return os.path.lexists(self.join(path))
330
330
331 def lstat(self, path=None):
331 def lstat(self, path=None):
332 return os.lstat(self.join(path))
332 return os.lstat(self.join(path))
333
333
334 def listdir(self, path=None):
334 def listdir(self, path=None):
335 return os.listdir(self.join(path))
335 return os.listdir(self.join(path))
336
336
337 def makedir(self, path=None, notindexed=True):
337 def makedir(self, path=None, notindexed=True):
338 return util.makedir(self.join(path), notindexed)
338 return util.makedir(self.join(path), notindexed)
339
339
340 def makedirs(self, path=None, mode=None):
340 def makedirs(self, path=None, mode=None):
341 return util.makedirs(self.join(path), mode)
341 return util.makedirs(self.join(path), mode)
342
342
343 def makelock(self, info, path):
343 def makelock(self, info, path):
344 return util.makelock(info, self.join(path))
344 return util.makelock(info, self.join(path))
345
345
346 def mkdir(self, path=None):
346 def mkdir(self, path=None):
347 return os.mkdir(self.join(path))
347 return os.mkdir(self.join(path))
348
348
349 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
349 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
350 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
350 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
351 dir=self.join(dir), text=text)
351 dir=self.join(dir), text=text)
352 dname, fname = util.split(name)
352 dname, fname = util.split(name)
353 if dir:
353 if dir:
354 return fd, os.path.join(dir, fname)
354 return fd, os.path.join(dir, fname)
355 else:
355 else:
356 return fd, fname
356 return fd, fname
357
357
358 def readdir(self, path=None, stat=None, skip=None):
358 def readdir(self, path=None, stat=None, skip=None):
359 return osutil.listdir(self.join(path), stat, skip)
359 return osutil.listdir(self.join(path), stat, skip)
360
360
361 def readlock(self, path):
361 def readlock(self, path):
362 return util.readlock(self.join(path))
362 return util.readlock(self.join(path))
363
363
364 def rename(self, src, dst):
364 def rename(self, src, dst):
365 return util.rename(self.join(src), self.join(dst))
365 return util.rename(self.join(src), self.join(dst))
366
366
367 def readlink(self, path):
367 def readlink(self, path):
368 return os.readlink(self.join(path))
368 return os.readlink(self.join(path))
369
369
370 def removedirs(self, path=None):
370 def removedirs(self, path=None):
371 """Remove a leaf directory and all empty intermediate ones
371 """Remove a leaf directory and all empty intermediate ones
372 """
372 """
373 return util.removedirs(self.join(path))
373 return util.removedirs(self.join(path))
374
374
375 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
375 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
376 """Remove a directory tree recursively
376 """Remove a directory tree recursively
377
377
378 If ``forcibly``, this tries to remove READ-ONLY files, too.
378 If ``forcibly``, this tries to remove READ-ONLY files, too.
379 """
379 """
380 if forcibly:
380 if forcibly:
381 def onerror(function, path, excinfo):
381 def onerror(function, path, excinfo):
382 if function is not os.remove:
382 if function is not os.remove:
383 raise
383 raise
384 # read-only files cannot be unlinked under Windows
384 # read-only files cannot be unlinked under Windows
385 s = os.stat(path)
385 s = os.stat(path)
386 if (s.st_mode & stat.S_IWRITE) != 0:
386 if (s.st_mode & stat.S_IWRITE) != 0:
387 raise
387 raise
388 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
388 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
389 os.remove(path)
389 os.remove(path)
390 else:
390 else:
391 onerror = None
391 onerror = None
392 return shutil.rmtree(self.join(path),
392 return shutil.rmtree(self.join(path),
393 ignore_errors=ignore_errors, onerror=onerror)
393 ignore_errors=ignore_errors, onerror=onerror)
394
394
395 def setflags(self, path, l, x):
395 def setflags(self, path, l, x):
396 return util.setflags(self.join(path), l, x)
396 return util.setflags(self.join(path), l, x)
397
397
398 def stat(self, path=None):
398 def stat(self, path=None):
399 return os.stat(self.join(path))
399 return os.stat(self.join(path))
400
400
401 def unlink(self, path=None):
401 def unlink(self, path=None):
402 return util.unlink(self.join(path))
402 return util.unlink(self.join(path))
403
403
404 def unlinkpath(self, path=None, ignoremissing=False):
404 def unlinkpath(self, path=None, ignoremissing=False):
405 return util.unlinkpath(self.join(path), ignoremissing)
405 return util.unlinkpath(self.join(path), ignoremissing)
406
406
407 def utime(self, path=None, t=None):
407 def utime(self, path=None, t=None):
408 return os.utime(self.join(path), t)
408 return os.utime(self.join(path), t)
409
409
410 def walk(self, path=None, onerror=None):
410 def walk(self, path=None, onerror=None):
411 """Yield (dirpath, dirs, files) tuple for each directories under path
411 """Yield (dirpath, dirs, files) tuple for each directories under path
412
412
413 ``dirpath`` is relative one from the root of this vfs. This
413 ``dirpath`` is relative one from the root of this vfs. This
414 uses ``os.sep`` as path separator, even you specify POSIX
414 uses ``os.sep`` as path separator, even you specify POSIX
415 style ``path``.
415 style ``path``.
416
416
417 "The root of this vfs" is represented as empty ``dirpath``.
417 "The root of this vfs" is represented as empty ``dirpath``.
418 """
418 """
419 root = os.path.normpath(self.join(None))
419 root = os.path.normpath(self.join(None))
420 # when dirpath == root, dirpath[prefixlen:] becomes empty
420 # when dirpath == root, dirpath[prefixlen:] becomes empty
421 # because len(dirpath) < prefixlen.
421 # because len(dirpath) < prefixlen.
422 prefixlen = len(pathutil.normasprefix(root))
422 prefixlen = len(pathutil.normasprefix(root))
423 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
423 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
424 yield (dirpath[prefixlen:], dirs, files)
424 yield (dirpath[prefixlen:], dirs, files)
425
425
426 class vfs(abstractvfs):
426 class vfs(abstractvfs):
427 '''Operate files relative to a base directory
427 '''Operate files relative to a base directory
428
428
429 This class is used to hide the details of COW semantics and
429 This class is used to hide the details of COW semantics and
430 remote file access from higher level code.
430 remote file access from higher level code.
431 '''
431 '''
432 def __init__(self, base, audit=True, expandpath=False, realpath=False):
432 def __init__(self, base, audit=True, expandpath=False, realpath=False):
433 if expandpath:
433 if expandpath:
434 base = util.expandpath(base)
434 base = util.expandpath(base)
435 if realpath:
435 if realpath:
436 base = os.path.realpath(base)
436 base = os.path.realpath(base)
437 self.base = base
437 self.base = base
438 self._setmustaudit(audit)
438 self._setmustaudit(audit)
439 self.createmode = None
439 self.createmode = None
440 self._trustnlink = None
440 self._trustnlink = None
441
441
442 def _getmustaudit(self):
442 def _getmustaudit(self):
443 return self._audit
443 return self._audit
444
444
445 def _setmustaudit(self, onoff):
445 def _setmustaudit(self, onoff):
446 self._audit = onoff
446 self._audit = onoff
447 if onoff:
447 if onoff:
448 self.audit = pathutil.pathauditor(self.base)
448 self.audit = pathutil.pathauditor(self.base)
449 else:
449 else:
450 self.audit = util.always
450 self.audit = util.always
451
451
452 mustaudit = property(_getmustaudit, _setmustaudit)
452 mustaudit = property(_getmustaudit, _setmustaudit)
453
453
454 @util.propertycache
454 @util.propertycache
455 def _cansymlink(self):
455 def _cansymlink(self):
456 return util.checklink(self.base)
456 return util.checklink(self.base)
457
457
458 @util.propertycache
458 @util.propertycache
459 def _chmod(self):
459 def _chmod(self):
460 return util.checkexec(self.base)
460 return util.checkexec(self.base)
461
461
462 def _fixfilemode(self, name):
462 def _fixfilemode(self, name):
463 if self.createmode is None or not self._chmod:
463 if self.createmode is None or not self._chmod:
464 return
464 return
465 os.chmod(name, self.createmode & 0o666)
465 os.chmod(name, self.createmode & 0o666)
466
466
467 def __call__(self, path, mode="r", text=False, atomictemp=False,
467 def __call__(self, path, mode="r", text=False, atomictemp=False,
468 notindexed=False):
468 notindexed=False):
469 '''Open ``path`` file, which is relative to vfs root.
469 '''Open ``path`` file, which is relative to vfs root.
470
470
471 Newly created directories are marked as "not to be indexed by
471 Newly created directories are marked as "not to be indexed by
472 the content indexing service", if ``notindexed`` is specified
472 the content indexing service", if ``notindexed`` is specified
473 for "write" mode access.
473 for "write" mode access.
474 '''
474 '''
475 if self._audit:
475 if self._audit:
476 r = util.checkosfilename(path)
476 r = util.checkosfilename(path)
477 if r:
477 if r:
478 raise util.Abort("%s: %r" % (r, path))
478 raise util.Abort("%s: %r" % (r, path))
479 self.audit(path)
479 self.audit(path)
480 f = self.join(path)
480 f = self.join(path)
481
481
482 if not text and "b" not in mode:
482 if not text and "b" not in mode:
483 mode += "b" # for that other OS
483 mode += "b" # for that other OS
484
484
485 nlink = -1
485 nlink = -1
486 if mode not in ('r', 'rb'):
486 if mode not in ('r', 'rb'):
487 dirname, basename = util.split(f)
487 dirname, basename = util.split(f)
488 # If basename is empty, then the path is malformed because it points
488 # If basename is empty, then the path is malformed because it points
489 # to a directory. Let the posixfile() call below raise IOError.
489 # to a directory. Let the posixfile() call below raise IOError.
490 if basename:
490 if basename:
491 if atomictemp:
491 if atomictemp:
492 util.ensuredirs(dirname, self.createmode, notindexed)
492 util.ensuredirs(dirname, self.createmode, notindexed)
493 return util.atomictempfile(f, mode, self.createmode)
493 return util.atomictempfile(f, mode, self.createmode)
494 try:
494 try:
495 if 'w' in mode:
495 if 'w' in mode:
496 util.unlink(f)
496 util.unlink(f)
497 nlink = 0
497 nlink = 0
498 else:
498 else:
499 # nlinks() may behave differently for files on Windows
499 # nlinks() may behave differently for files on Windows
500 # shares if the file is open.
500 # shares if the file is open.
501 fd = util.posixfile(f)
501 fd = util.posixfile(f)
502 nlink = util.nlinks(f)
502 nlink = util.nlinks(f)
503 if nlink < 1:
503 if nlink < 1:
504 nlink = 2 # force mktempcopy (issue1922)
504 nlink = 2 # force mktempcopy (issue1922)
505 fd.close()
505 fd.close()
506 except (OSError, IOError) as e:
506 except (OSError, IOError) as e:
507 if e.errno != errno.ENOENT:
507 if e.errno != errno.ENOENT:
508 raise
508 raise
509 nlink = 0
509 nlink = 0
510 util.ensuredirs(dirname, self.createmode, notindexed)
510 util.ensuredirs(dirname, self.createmode, notindexed)
511 if nlink > 0:
511 if nlink > 0:
512 if self._trustnlink is None:
512 if self._trustnlink is None:
513 self._trustnlink = nlink > 1 or util.checknlink(f)
513 self._trustnlink = nlink > 1 or util.checknlink(f)
514 if nlink > 1 or not self._trustnlink:
514 if nlink > 1 or not self._trustnlink:
515 util.rename(util.mktempcopy(f), f)
515 util.rename(util.mktempcopy(f), f)
516 fp = util.posixfile(f, mode)
516 fp = util.posixfile(f, mode)
517 if nlink == 0:
517 if nlink == 0:
518 self._fixfilemode(f)
518 self._fixfilemode(f)
519 return fp
519 return fp
520
520
521 def symlink(self, src, dst):
521 def symlink(self, src, dst):
522 self.audit(dst)
522 self.audit(dst)
523 linkname = self.join(dst)
523 linkname = self.join(dst)
524 try:
524 try:
525 os.unlink(linkname)
525 os.unlink(linkname)
526 except OSError:
526 except OSError:
527 pass
527 pass
528
528
529 util.ensuredirs(os.path.dirname(linkname), self.createmode)
529 util.ensuredirs(os.path.dirname(linkname), self.createmode)
530
530
531 if self._cansymlink:
531 if self._cansymlink:
532 try:
532 try:
533 os.symlink(src, linkname)
533 os.symlink(src, linkname)
534 except OSError as err:
534 except OSError as err:
535 raise OSError(err.errno, _('could not symlink to %r: %s') %
535 raise OSError(err.errno, _('could not symlink to %r: %s') %
536 (src, err.strerror), linkname)
536 (src, err.strerror), linkname)
537 else:
537 else:
538 self.write(dst, src)
538 self.write(dst, src)
539
539
540 def join(self, path, *insidef):
540 def join(self, path, *insidef):
541 if path:
541 if path:
542 return os.path.join(self.base, path, *insidef)
542 return os.path.join(self.base, path, *insidef)
543 else:
543 else:
544 return self.base
544 return self.base
545
545
546 opener = vfs
546 opener = vfs
547
547
548 class auditvfs(object):
548 class auditvfs(object):
549 def __init__(self, vfs):
549 def __init__(self, vfs):
550 self.vfs = vfs
550 self.vfs = vfs
551
551
552 def _getmustaudit(self):
552 def _getmustaudit(self):
553 return self.vfs.mustaudit
553 return self.vfs.mustaudit
554
554
555 def _setmustaudit(self, onoff):
555 def _setmustaudit(self, onoff):
556 self.vfs.mustaudit = onoff
556 self.vfs.mustaudit = onoff
557
557
558 mustaudit = property(_getmustaudit, _setmustaudit)
558 mustaudit = property(_getmustaudit, _setmustaudit)
559
559
560 class filtervfs(abstractvfs, auditvfs):
560 class filtervfs(abstractvfs, auditvfs):
561 '''Wrapper vfs for filtering filenames with a function.'''
561 '''Wrapper vfs for filtering filenames with a function.'''
562
562
563 def __init__(self, vfs, filter):
563 def __init__(self, vfs, filter):
564 auditvfs.__init__(self, vfs)
564 auditvfs.__init__(self, vfs)
565 self._filter = filter
565 self._filter = filter
566
566
567 def __call__(self, path, *args, **kwargs):
567 def __call__(self, path, *args, **kwargs):
568 return self.vfs(self._filter(path), *args, **kwargs)
568 return self.vfs(self._filter(path), *args, **kwargs)
569
569
570 def join(self, path, *insidef):
570 def join(self, path, *insidef):
571 if path:
571 if path:
572 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
572 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
573 else:
573 else:
574 return self.vfs.join(path)
574 return self.vfs.join(path)
575
575
576 filteropener = filtervfs
576 filteropener = filtervfs
577
577
578 class readonlyvfs(abstractvfs, auditvfs):
578 class readonlyvfs(abstractvfs, auditvfs):
579 '''Wrapper vfs preventing any writing.'''
579 '''Wrapper vfs preventing any writing.'''
580
580
581 def __init__(self, vfs):
581 def __init__(self, vfs):
582 auditvfs.__init__(self, vfs)
582 auditvfs.__init__(self, vfs)
583
583
584 def __call__(self, path, mode='r', *args, **kw):
584 def __call__(self, path, mode='r', *args, **kw):
585 if mode not in ('r', 'rb'):
585 if mode not in ('r', 'rb'):
586 raise util.Abort('this vfs is read only')
586 raise util.Abort('this vfs is read only')
587 return self.vfs(path, mode, *args, **kw)
587 return self.vfs(path, mode, *args, **kw)
588
588
589 def join(self, path, *insidef):
589 def join(self, path, *insidef):
590 return self.vfs.join(path, *insidef)
590 return self.vfs.join(path, *insidef)
591
591
592 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
592 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
593 '''yield every hg repository under path, always recursively.
593 '''yield every hg repository under path, always recursively.
594 The recurse flag will only control recursion into repo working dirs'''
594 The recurse flag will only control recursion into repo working dirs'''
595 def errhandler(err):
595 def errhandler(err):
596 if err.filename == path:
596 if err.filename == path:
597 raise err
597 raise err
598 samestat = getattr(os.path, 'samestat', None)
598 samestat = getattr(os.path, 'samestat', None)
599 if followsym and samestat is not None:
599 if followsym and samestat is not None:
600 def adddir(dirlst, dirname):
600 def adddir(dirlst, dirname):
601 match = False
601 match = False
602 dirstat = os.stat(dirname)
602 dirstat = os.stat(dirname)
603 for lstdirstat in dirlst:
603 for lstdirstat in dirlst:
604 if samestat(dirstat, lstdirstat):
604 if samestat(dirstat, lstdirstat):
605 match = True
605 match = True
606 break
606 break
607 if not match:
607 if not match:
608 dirlst.append(dirstat)
608 dirlst.append(dirstat)
609 return not match
609 return not match
610 else:
610 else:
611 followsym = False
611 followsym = False
612
612
613 if (seen_dirs is None) and followsym:
613 if (seen_dirs is None) and followsym:
614 seen_dirs = []
614 seen_dirs = []
615 adddir(seen_dirs, path)
615 adddir(seen_dirs, path)
616 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
616 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
617 dirs.sort()
617 dirs.sort()
618 if '.hg' in dirs:
618 if '.hg' in dirs:
619 yield root # found a repository
619 yield root # found a repository
620 qroot = os.path.join(root, '.hg', 'patches')
620 qroot = os.path.join(root, '.hg', 'patches')
621 if os.path.isdir(os.path.join(qroot, '.hg')):
621 if os.path.isdir(os.path.join(qroot, '.hg')):
622 yield qroot # we have a patch queue repo here
622 yield qroot # we have a patch queue repo here
623 if recurse:
623 if recurse:
624 # avoid recursing inside the .hg directory
624 # avoid recursing inside the .hg directory
625 dirs.remove('.hg')
625 dirs.remove('.hg')
626 else:
626 else:
627 dirs[:] = [] # don't descend further
627 dirs[:] = [] # don't descend further
628 elif followsym:
628 elif followsym:
629 newdirs = []
629 newdirs = []
630 for d in dirs:
630 for d in dirs:
631 fname = os.path.join(root, d)
631 fname = os.path.join(root, d)
632 if adddir(seen_dirs, fname):
632 if adddir(seen_dirs, fname):
633 if os.path.islink(fname):
633 if os.path.islink(fname):
634 for hgname in walkrepos(fname, True, seen_dirs):
634 for hgname in walkrepos(fname, True, seen_dirs):
635 yield hgname
635 yield hgname
636 else:
636 else:
637 newdirs.append(d)
637 newdirs.append(d)
638 dirs[:] = newdirs
638 dirs[:] = newdirs
639
639
640 def osrcpath():
640 def osrcpath():
641 '''return default os-specific hgrc search path'''
641 '''return default os-specific hgrc search path'''
642 path = []
642 path = []
643 defaultpath = os.path.join(util.datapath, 'default.d')
643 defaultpath = os.path.join(util.datapath, 'default.d')
644 if os.path.isdir(defaultpath):
644 if os.path.isdir(defaultpath):
645 for f, kind in osutil.listdir(defaultpath):
645 for f, kind in osutil.listdir(defaultpath):
646 if f.endswith('.rc'):
646 if f.endswith('.rc'):
647 path.append(os.path.join(defaultpath, f))
647 path.append(os.path.join(defaultpath, f))
648 path.extend(systemrcpath())
648 path.extend(systemrcpath())
649 path.extend(userrcpath())
649 path.extend(userrcpath())
650 path = [os.path.normpath(f) for f in path]
650 path = [os.path.normpath(f) for f in path]
651 return path
651 return path
652
652
653 _rcpath = None
653 _rcpath = None
654
654
655 def rcpath():
655 def rcpath():
656 '''return hgrc search path. if env var HGRCPATH is set, use it.
656 '''return hgrc search path. if env var HGRCPATH is set, use it.
657 for each item in path, if directory, use files ending in .rc,
657 for each item in path, if directory, use files ending in .rc,
658 else use item.
658 else use item.
659 make HGRCPATH empty to only look in .hg/hgrc of current repo.
659 make HGRCPATH empty to only look in .hg/hgrc of current repo.
660 if no HGRCPATH, use default os-specific path.'''
660 if no HGRCPATH, use default os-specific path.'''
661 global _rcpath
661 global _rcpath
662 if _rcpath is None:
662 if _rcpath is None:
663 if 'HGRCPATH' in os.environ:
663 if 'HGRCPATH' in os.environ:
664 _rcpath = []
664 _rcpath = []
665 for p in os.environ['HGRCPATH'].split(os.pathsep):
665 for p in os.environ['HGRCPATH'].split(os.pathsep):
666 if not p:
666 if not p:
667 continue
667 continue
668 p = util.expandpath(p)
668 p = util.expandpath(p)
669 if os.path.isdir(p):
669 if os.path.isdir(p):
670 for f, kind in osutil.listdir(p):
670 for f, kind in osutil.listdir(p):
671 if f.endswith('.rc'):
671 if f.endswith('.rc'):
672 _rcpath.append(os.path.join(p, f))
672 _rcpath.append(os.path.join(p, f))
673 else:
673 else:
674 _rcpath.append(p)
674 _rcpath.append(p)
675 else:
675 else:
676 _rcpath = osrcpath()
676 _rcpath = osrcpath()
677 return _rcpath
677 return _rcpath
678
678
679 def intrev(rev):
679 def intrev(rev):
680 """Return integer for a given revision that can be used in comparison or
680 """Return integer for a given revision that can be used in comparison or
681 arithmetic operation"""
681 arithmetic operation"""
682 if rev is None:
682 if rev is None:
683 return wdirrev
683 return wdirrev
684 return rev
684 return rev
685
685
686 def revsingle(repo, revspec, default='.'):
686 def revsingle(repo, revspec, default='.'):
687 if not revspec and revspec != 0:
687 if not revspec and revspec != 0:
688 return repo[default]
688 return repo[default]
689
689
690 l = revrange(repo, [revspec])
690 l = revrange(repo, [revspec])
691 if not l:
691 if not l:
692 raise util.Abort(_('empty revision set'))
692 raise util.Abort(_('empty revision set'))
693 return repo[l.last()]
693 return repo[l.last()]
694
694
695 def _pairspec(revspec):
695 def _pairspec(revspec):
696 tree = revset.parse(revspec)
696 tree = revset.parse(revspec)
697 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
697 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
698 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
698 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
699
699
700 def revpair(repo, revs):
700 def revpair(repo, revs):
701 if not revs:
701 if not revs:
702 return repo.dirstate.p1(), None
702 return repo.dirstate.p1(), None
703
703
704 l = revrange(repo, revs)
704 l = revrange(repo, revs)
705
705
706 if not l:
706 if not l:
707 first = second = None
707 first = second = None
708 elif l.isascending():
708 elif l.isascending():
709 first = l.min()
709 first = l.min()
710 second = l.max()
710 second = l.max()
711 elif l.isdescending():
711 elif l.isdescending():
712 first = l.max()
712 first = l.max()
713 second = l.min()
713 second = l.min()
714 else:
714 else:
715 first = l.first()
715 first = l.first()
716 second = l.last()
716 second = l.last()
717
717
718 if first is None:
718 if first is None:
719 raise util.Abort(_('empty revision range'))
719 raise util.Abort(_('empty revision range'))
720
720
721 # if top-level is range expression, the result must always be a pair
721 # if top-level is range expression, the result must always be a pair
722 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
722 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
723 return repo.lookup(first), None
723 return repo.lookup(first), None
724
724
725 return repo.lookup(first), repo.lookup(second)
725 return repo.lookup(first), repo.lookup(second)
726
726
727 def revrange(repo, revs):
727 def revrange(repo, revs):
728 """Yield revision as strings from a list of revision specifications."""
728 """Yield revision as strings from a list of revision specifications."""
729 allspecs = []
729 allspecs = []
730 for spec in revs:
730 for spec in revs:
731 if isinstance(spec, int):
731 if isinstance(spec, int):
732 spec = revset.formatspec('rev(%d)', spec)
732 spec = revset.formatspec('rev(%d)', spec)
733 allspecs.append(spec)
733 allspecs.append(spec)
734 m = revset.matchany(repo.ui, allspecs, repo)
734 m = revset.matchany(repo.ui, allspecs, repo)
735 return m(repo)
735 return m(repo)
736
736
737 def meaningfulparents(repo, ctx):
738 """Return list of meaningful (or all if debug) parentrevs for rev.
739
740 For merges (two non-nullrev revisions) both parents are meaningful.
741 Otherwise the first parent revision is considered meaningful if it
742 is not the preceding revision.
743 """
744 parents = ctx.parents()
745 if len(parents) > 1:
746 return parents
747 if repo.ui.debugflag:
748 return [parents[0], repo['null']]
749 if parents[0].rev() >= intrev(ctx.rev()) - 1:
750 return []
751 return parents
752
737 def expandpats(pats):
753 def expandpats(pats):
738 '''Expand bare globs when running on windows.
754 '''Expand bare globs when running on windows.
739 On posix we assume it already has already been done by sh.'''
755 On posix we assume it already has already been done by sh.'''
740 if not util.expandglobs:
756 if not util.expandglobs:
741 return list(pats)
757 return list(pats)
742 ret = []
758 ret = []
743 for kindpat in pats:
759 for kindpat in pats:
744 kind, pat = matchmod._patsplit(kindpat, None)
760 kind, pat = matchmod._patsplit(kindpat, None)
745 if kind is None:
761 if kind is None:
746 try:
762 try:
747 globbed = glob.glob(pat)
763 globbed = glob.glob(pat)
748 except re.error:
764 except re.error:
749 globbed = [pat]
765 globbed = [pat]
750 if globbed:
766 if globbed:
751 ret.extend(globbed)
767 ret.extend(globbed)
752 continue
768 continue
753 ret.append(kindpat)
769 ret.append(kindpat)
754 return ret
770 return ret
755
771
756 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
772 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
757 badfn=None):
773 badfn=None):
758 '''Return a matcher and the patterns that were used.
774 '''Return a matcher and the patterns that were used.
759 The matcher will warn about bad matches, unless an alternate badfn callback
775 The matcher will warn about bad matches, unless an alternate badfn callback
760 is provided.'''
776 is provided.'''
761 if pats == ("",):
777 if pats == ("",):
762 pats = []
778 pats = []
763 if opts is None:
779 if opts is None:
764 opts = {}
780 opts = {}
765 if not globbed and default == 'relpath':
781 if not globbed and default == 'relpath':
766 pats = expandpats(pats or [])
782 pats = expandpats(pats or [])
767
783
768 def bad(f, msg):
784 def bad(f, msg):
769 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
785 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
770
786
771 if badfn is None:
787 if badfn is None:
772 badfn = bad
788 badfn = bad
773
789
774 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
790 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
775 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
791 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
776
792
777 if m.always():
793 if m.always():
778 pats = []
794 pats = []
779 return m, pats
795 return m, pats
780
796
781 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
797 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
782 badfn=None):
798 badfn=None):
783 '''Return a matcher that will warn about bad matches.'''
799 '''Return a matcher that will warn about bad matches.'''
784 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
800 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
785
801
786 def matchall(repo):
802 def matchall(repo):
787 '''Return a matcher that will efficiently match everything.'''
803 '''Return a matcher that will efficiently match everything.'''
788 return matchmod.always(repo.root, repo.getcwd())
804 return matchmod.always(repo.root, repo.getcwd())
789
805
790 def matchfiles(repo, files, badfn=None):
806 def matchfiles(repo, files, badfn=None):
791 '''Return a matcher that will efficiently match exactly these files.'''
807 '''Return a matcher that will efficiently match exactly these files.'''
792 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
808 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
793
809
794 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
810 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
795 if opts is None:
811 if opts is None:
796 opts = {}
812 opts = {}
797 m = matcher
813 m = matcher
798 if dry_run is None:
814 if dry_run is None:
799 dry_run = opts.get('dry_run')
815 dry_run = opts.get('dry_run')
800 if similarity is None:
816 if similarity is None:
801 similarity = float(opts.get('similarity') or 0)
817 similarity = float(opts.get('similarity') or 0)
802
818
803 ret = 0
819 ret = 0
804 join = lambda f: os.path.join(prefix, f)
820 join = lambda f: os.path.join(prefix, f)
805
821
806 def matchessubrepo(matcher, subpath):
822 def matchessubrepo(matcher, subpath):
807 if matcher.exact(subpath):
823 if matcher.exact(subpath):
808 return True
824 return True
809 for f in matcher.files():
825 for f in matcher.files():
810 if f.startswith(subpath):
826 if f.startswith(subpath):
811 return True
827 return True
812 return False
828 return False
813
829
814 wctx = repo[None]
830 wctx = repo[None]
815 for subpath in sorted(wctx.substate):
831 for subpath in sorted(wctx.substate):
816 if opts.get('subrepos') or matchessubrepo(m, subpath):
832 if opts.get('subrepos') or matchessubrepo(m, subpath):
817 sub = wctx.sub(subpath)
833 sub = wctx.sub(subpath)
818 try:
834 try:
819 submatch = matchmod.narrowmatcher(subpath, m)
835 submatch = matchmod.narrowmatcher(subpath, m)
820 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
836 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
821 ret = 1
837 ret = 1
822 except error.LookupError:
838 except error.LookupError:
823 repo.ui.status(_("skipping missing subrepository: %s\n")
839 repo.ui.status(_("skipping missing subrepository: %s\n")
824 % join(subpath))
840 % join(subpath))
825
841
826 rejected = []
842 rejected = []
827 def badfn(f, msg):
843 def badfn(f, msg):
828 if f in m.files():
844 if f in m.files():
829 m.bad(f, msg)
845 m.bad(f, msg)
830 rejected.append(f)
846 rejected.append(f)
831
847
832 badmatch = matchmod.badmatch(m, badfn)
848 badmatch = matchmod.badmatch(m, badfn)
833 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
849 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
834 badmatch)
850 badmatch)
835
851
836 unknownset = set(unknown + forgotten)
852 unknownset = set(unknown + forgotten)
837 toprint = unknownset.copy()
853 toprint = unknownset.copy()
838 toprint.update(deleted)
854 toprint.update(deleted)
839 for abs in sorted(toprint):
855 for abs in sorted(toprint):
840 if repo.ui.verbose or not m.exact(abs):
856 if repo.ui.verbose or not m.exact(abs):
841 if abs in unknownset:
857 if abs in unknownset:
842 status = _('adding %s\n') % m.uipath(abs)
858 status = _('adding %s\n') % m.uipath(abs)
843 else:
859 else:
844 status = _('removing %s\n') % m.uipath(abs)
860 status = _('removing %s\n') % m.uipath(abs)
845 repo.ui.status(status)
861 repo.ui.status(status)
846
862
847 renames = _findrenames(repo, m, added + unknown, removed + deleted,
863 renames = _findrenames(repo, m, added + unknown, removed + deleted,
848 similarity)
864 similarity)
849
865
850 if not dry_run:
866 if not dry_run:
851 _markchanges(repo, unknown + forgotten, deleted, renames)
867 _markchanges(repo, unknown + forgotten, deleted, renames)
852
868
853 for f in rejected:
869 for f in rejected:
854 if f in m.files():
870 if f in m.files():
855 return 1
871 return 1
856 return ret
872 return ret
857
873
858 def marktouched(repo, files, similarity=0.0):
874 def marktouched(repo, files, similarity=0.0):
859 '''Assert that files have somehow been operated upon. files are relative to
875 '''Assert that files have somehow been operated upon. files are relative to
860 the repo root.'''
876 the repo root.'''
861 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
877 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
862 rejected = []
878 rejected = []
863
879
864 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
880 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
865
881
866 if repo.ui.verbose:
882 if repo.ui.verbose:
867 unknownset = set(unknown + forgotten)
883 unknownset = set(unknown + forgotten)
868 toprint = unknownset.copy()
884 toprint = unknownset.copy()
869 toprint.update(deleted)
885 toprint.update(deleted)
870 for abs in sorted(toprint):
886 for abs in sorted(toprint):
871 if abs in unknownset:
887 if abs in unknownset:
872 status = _('adding %s\n') % abs
888 status = _('adding %s\n') % abs
873 else:
889 else:
874 status = _('removing %s\n') % abs
890 status = _('removing %s\n') % abs
875 repo.ui.status(status)
891 repo.ui.status(status)
876
892
877 renames = _findrenames(repo, m, added + unknown, removed + deleted,
893 renames = _findrenames(repo, m, added + unknown, removed + deleted,
878 similarity)
894 similarity)
879
895
880 _markchanges(repo, unknown + forgotten, deleted, renames)
896 _markchanges(repo, unknown + forgotten, deleted, renames)
881
897
882 for f in rejected:
898 for f in rejected:
883 if f in m.files():
899 if f in m.files():
884 return 1
900 return 1
885 return 0
901 return 0
886
902
887 def _interestingfiles(repo, matcher):
903 def _interestingfiles(repo, matcher):
888 '''Walk dirstate with matcher, looking for files that addremove would care
904 '''Walk dirstate with matcher, looking for files that addremove would care
889 about.
905 about.
890
906
891 This is different from dirstate.status because it doesn't care about
907 This is different from dirstate.status because it doesn't care about
892 whether files are modified or clean.'''
908 whether files are modified or clean.'''
893 added, unknown, deleted, removed, forgotten = [], [], [], [], []
909 added, unknown, deleted, removed, forgotten = [], [], [], [], []
894 audit_path = pathutil.pathauditor(repo.root)
910 audit_path = pathutil.pathauditor(repo.root)
895
911
896 ctx = repo[None]
912 ctx = repo[None]
897 dirstate = repo.dirstate
913 dirstate = repo.dirstate
898 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
914 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
899 full=False)
915 full=False)
900 for abs, st in walkresults.iteritems():
916 for abs, st in walkresults.iteritems():
901 dstate = dirstate[abs]
917 dstate = dirstate[abs]
902 if dstate == '?' and audit_path.check(abs):
918 if dstate == '?' and audit_path.check(abs):
903 unknown.append(abs)
919 unknown.append(abs)
904 elif dstate != 'r' and not st:
920 elif dstate != 'r' and not st:
905 deleted.append(abs)
921 deleted.append(abs)
906 elif dstate == 'r' and st:
922 elif dstate == 'r' and st:
907 forgotten.append(abs)
923 forgotten.append(abs)
908 # for finding renames
924 # for finding renames
909 elif dstate == 'r' and not st:
925 elif dstate == 'r' and not st:
910 removed.append(abs)
926 removed.append(abs)
911 elif dstate == 'a':
927 elif dstate == 'a':
912 added.append(abs)
928 added.append(abs)
913
929
914 return added, unknown, deleted, removed, forgotten
930 return added, unknown, deleted, removed, forgotten
915
931
916 def _findrenames(repo, matcher, added, removed, similarity):
932 def _findrenames(repo, matcher, added, removed, similarity):
917 '''Find renames from removed files to added ones.'''
933 '''Find renames from removed files to added ones.'''
918 renames = {}
934 renames = {}
919 if similarity > 0:
935 if similarity > 0:
920 for old, new, score in similar.findrenames(repo, added, removed,
936 for old, new, score in similar.findrenames(repo, added, removed,
921 similarity):
937 similarity):
922 if (repo.ui.verbose or not matcher.exact(old)
938 if (repo.ui.verbose or not matcher.exact(old)
923 or not matcher.exact(new)):
939 or not matcher.exact(new)):
924 repo.ui.status(_('recording removal of %s as rename to %s '
940 repo.ui.status(_('recording removal of %s as rename to %s '
925 '(%d%% similar)\n') %
941 '(%d%% similar)\n') %
926 (matcher.rel(old), matcher.rel(new),
942 (matcher.rel(old), matcher.rel(new),
927 score * 100))
943 score * 100))
928 renames[new] = old
944 renames[new] = old
929 return renames
945 return renames
930
946
931 def _markchanges(repo, unknown, deleted, renames):
947 def _markchanges(repo, unknown, deleted, renames):
932 '''Marks the files in unknown as added, the files in deleted as removed,
948 '''Marks the files in unknown as added, the files in deleted as removed,
933 and the files in renames as copied.'''
949 and the files in renames as copied.'''
934 wctx = repo[None]
950 wctx = repo[None]
935 wlock = repo.wlock()
951 wlock = repo.wlock()
936 try:
952 try:
937 wctx.forget(deleted)
953 wctx.forget(deleted)
938 wctx.add(unknown)
954 wctx.add(unknown)
939 for new, old in renames.iteritems():
955 for new, old in renames.iteritems():
940 wctx.copy(old, new)
956 wctx.copy(old, new)
941 finally:
957 finally:
942 wlock.release()
958 wlock.release()
943
959
944 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
960 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
945 """Update the dirstate to reflect the intent of copying src to dst. For
961 """Update the dirstate to reflect the intent of copying src to dst. For
946 different reasons it might not end with dst being marked as copied from src.
962 different reasons it might not end with dst being marked as copied from src.
947 """
963 """
948 origsrc = repo.dirstate.copied(src) or src
964 origsrc = repo.dirstate.copied(src) or src
949 if dst == origsrc: # copying back a copy?
965 if dst == origsrc: # copying back a copy?
950 if repo.dirstate[dst] not in 'mn' and not dryrun:
966 if repo.dirstate[dst] not in 'mn' and not dryrun:
951 repo.dirstate.normallookup(dst)
967 repo.dirstate.normallookup(dst)
952 else:
968 else:
953 if repo.dirstate[origsrc] == 'a' and origsrc == src:
969 if repo.dirstate[origsrc] == 'a' and origsrc == src:
954 if not ui.quiet:
970 if not ui.quiet:
955 ui.warn(_("%s has not been committed yet, so no copy "
971 ui.warn(_("%s has not been committed yet, so no copy "
956 "data will be stored for %s.\n")
972 "data will be stored for %s.\n")
957 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
973 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
958 if repo.dirstate[dst] in '?r' and not dryrun:
974 if repo.dirstate[dst] in '?r' and not dryrun:
959 wctx.add([dst])
975 wctx.add([dst])
960 elif not dryrun:
976 elif not dryrun:
961 wctx.copy(origsrc, dst)
977 wctx.copy(origsrc, dst)
962
978
963 def readrequires(opener, supported):
979 def readrequires(opener, supported):
964 '''Reads and parses .hg/requires and checks if all entries found
980 '''Reads and parses .hg/requires and checks if all entries found
965 are in the list of supported features.'''
981 are in the list of supported features.'''
966 requirements = set(opener.read("requires").splitlines())
982 requirements = set(opener.read("requires").splitlines())
967 missings = []
983 missings = []
968 for r in requirements:
984 for r in requirements:
969 if r not in supported:
985 if r not in supported:
970 if not r or not r[0].isalnum():
986 if not r or not r[0].isalnum():
971 raise error.RequirementError(_(".hg/requires file is corrupt"))
987 raise error.RequirementError(_(".hg/requires file is corrupt"))
972 missings.append(r)
988 missings.append(r)
973 missings.sort()
989 missings.sort()
974 if missings:
990 if missings:
975 raise error.RequirementError(
991 raise error.RequirementError(
976 _("repository requires features unknown to this Mercurial: %s")
992 _("repository requires features unknown to this Mercurial: %s")
977 % " ".join(missings),
993 % " ".join(missings),
978 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
994 hint=_("see https://mercurial-scm.org/wiki/MissingRequirement"
979 " for more information"))
995 " for more information"))
980 return requirements
996 return requirements
981
997
982 def writerequires(opener, requirements):
998 def writerequires(opener, requirements):
983 reqfile = opener("requires", "w")
999 reqfile = opener("requires", "w")
984 for r in sorted(requirements):
1000 for r in sorted(requirements):
985 reqfile.write("%s\n" % r)
1001 reqfile.write("%s\n" % r)
986 reqfile.close()
1002 reqfile.close()
987
1003
988 class filecachesubentry(object):
1004 class filecachesubentry(object):
989 def __init__(self, path, stat):
1005 def __init__(self, path, stat):
990 self.path = path
1006 self.path = path
991 self.cachestat = None
1007 self.cachestat = None
992 self._cacheable = None
1008 self._cacheable = None
993
1009
994 if stat:
1010 if stat:
995 self.cachestat = filecachesubentry.stat(self.path)
1011 self.cachestat = filecachesubentry.stat(self.path)
996
1012
997 if self.cachestat:
1013 if self.cachestat:
998 self._cacheable = self.cachestat.cacheable()
1014 self._cacheable = self.cachestat.cacheable()
999 else:
1015 else:
1000 # None means we don't know yet
1016 # None means we don't know yet
1001 self._cacheable = None
1017 self._cacheable = None
1002
1018
1003 def refresh(self):
1019 def refresh(self):
1004 if self.cacheable():
1020 if self.cacheable():
1005 self.cachestat = filecachesubentry.stat(self.path)
1021 self.cachestat = filecachesubentry.stat(self.path)
1006
1022
1007 def cacheable(self):
1023 def cacheable(self):
1008 if self._cacheable is not None:
1024 if self._cacheable is not None:
1009 return self._cacheable
1025 return self._cacheable
1010
1026
1011 # we don't know yet, assume it is for now
1027 # we don't know yet, assume it is for now
1012 return True
1028 return True
1013
1029
1014 def changed(self):
1030 def changed(self):
1015 # no point in going further if we can't cache it
1031 # no point in going further if we can't cache it
1016 if not self.cacheable():
1032 if not self.cacheable():
1017 return True
1033 return True
1018
1034
1019 newstat = filecachesubentry.stat(self.path)
1035 newstat = filecachesubentry.stat(self.path)
1020
1036
1021 # we may not know if it's cacheable yet, check again now
1037 # we may not know if it's cacheable yet, check again now
1022 if newstat and self._cacheable is None:
1038 if newstat and self._cacheable is None:
1023 self._cacheable = newstat.cacheable()
1039 self._cacheable = newstat.cacheable()
1024
1040
1025 # check again
1041 # check again
1026 if not self._cacheable:
1042 if not self._cacheable:
1027 return True
1043 return True
1028
1044
1029 if self.cachestat != newstat:
1045 if self.cachestat != newstat:
1030 self.cachestat = newstat
1046 self.cachestat = newstat
1031 return True
1047 return True
1032 else:
1048 else:
1033 return False
1049 return False
1034
1050
1035 @staticmethod
1051 @staticmethod
1036 def stat(path):
1052 def stat(path):
1037 try:
1053 try:
1038 return util.cachestat(path)
1054 return util.cachestat(path)
1039 except OSError as e:
1055 except OSError as e:
1040 if e.errno != errno.ENOENT:
1056 if e.errno != errno.ENOENT:
1041 raise
1057 raise
1042
1058
1043 class filecacheentry(object):
1059 class filecacheentry(object):
1044 def __init__(self, paths, stat=True):
1060 def __init__(self, paths, stat=True):
1045 self._entries = []
1061 self._entries = []
1046 for path in paths:
1062 for path in paths:
1047 self._entries.append(filecachesubentry(path, stat))
1063 self._entries.append(filecachesubentry(path, stat))
1048
1064
1049 def changed(self):
1065 def changed(self):
1050 '''true if any entry has changed'''
1066 '''true if any entry has changed'''
1051 for entry in self._entries:
1067 for entry in self._entries:
1052 if entry.changed():
1068 if entry.changed():
1053 return True
1069 return True
1054 return False
1070 return False
1055
1071
1056 def refresh(self):
1072 def refresh(self):
1057 for entry in self._entries:
1073 for entry in self._entries:
1058 entry.refresh()
1074 entry.refresh()
1059
1075
1060 class filecache(object):
1076 class filecache(object):
1061 '''A property like decorator that tracks files under .hg/ for updates.
1077 '''A property like decorator that tracks files under .hg/ for updates.
1062
1078
1063 Records stat info when called in _filecache.
1079 Records stat info when called in _filecache.
1064
1080
1065 On subsequent calls, compares old stat info with new info, and recreates the
1081 On subsequent calls, compares old stat info with new info, and recreates the
1066 object when any of the files changes, updating the new stat info in
1082 object when any of the files changes, updating the new stat info in
1067 _filecache.
1083 _filecache.
1068
1084
1069 Mercurial either atomic renames or appends for files under .hg,
1085 Mercurial either atomic renames or appends for files under .hg,
1070 so to ensure the cache is reliable we need the filesystem to be able
1086 so to ensure the cache is reliable we need the filesystem to be able
1071 to tell us if a file has been replaced. If it can't, we fallback to
1087 to tell us if a file has been replaced. If it can't, we fallback to
1072 recreating the object on every call (essentially the same behavior as
1088 recreating the object on every call (essentially the same behavior as
1073 propertycache).
1089 propertycache).
1074
1090
1075 '''
1091 '''
1076 def __init__(self, *paths):
1092 def __init__(self, *paths):
1077 self.paths = paths
1093 self.paths = paths
1078
1094
1079 def join(self, obj, fname):
1095 def join(self, obj, fname):
1080 """Used to compute the runtime path of a cached file.
1096 """Used to compute the runtime path of a cached file.
1081
1097
1082 Users should subclass filecache and provide their own version of this
1098 Users should subclass filecache and provide their own version of this
1083 function to call the appropriate join function on 'obj' (an instance
1099 function to call the appropriate join function on 'obj' (an instance
1084 of the class that its member function was decorated).
1100 of the class that its member function was decorated).
1085 """
1101 """
1086 return obj.join(fname)
1102 return obj.join(fname)
1087
1103
1088 def __call__(self, func):
1104 def __call__(self, func):
1089 self.func = func
1105 self.func = func
1090 self.name = func.__name__
1106 self.name = func.__name__
1091 return self
1107 return self
1092
1108
1093 def __get__(self, obj, type=None):
1109 def __get__(self, obj, type=None):
1094 # do we need to check if the file changed?
1110 # do we need to check if the file changed?
1095 if self.name in obj.__dict__:
1111 if self.name in obj.__dict__:
1096 assert self.name in obj._filecache, self.name
1112 assert self.name in obj._filecache, self.name
1097 return obj.__dict__[self.name]
1113 return obj.__dict__[self.name]
1098
1114
1099 entry = obj._filecache.get(self.name)
1115 entry = obj._filecache.get(self.name)
1100
1116
1101 if entry:
1117 if entry:
1102 if entry.changed():
1118 if entry.changed():
1103 entry.obj = self.func(obj)
1119 entry.obj = self.func(obj)
1104 else:
1120 else:
1105 paths = [self.join(obj, path) for path in self.paths]
1121 paths = [self.join(obj, path) for path in self.paths]
1106
1122
1107 # We stat -before- creating the object so our cache doesn't lie if
1123 # We stat -before- creating the object so our cache doesn't lie if
1108 # a writer modified between the time we read and stat
1124 # a writer modified between the time we read and stat
1109 entry = filecacheentry(paths, True)
1125 entry = filecacheentry(paths, True)
1110 entry.obj = self.func(obj)
1126 entry.obj = self.func(obj)
1111
1127
1112 obj._filecache[self.name] = entry
1128 obj._filecache[self.name] = entry
1113
1129
1114 obj.__dict__[self.name] = entry.obj
1130 obj.__dict__[self.name] = entry.obj
1115 return entry.obj
1131 return entry.obj
1116
1132
1117 def __set__(self, obj, value):
1133 def __set__(self, obj, value):
1118 if self.name not in obj._filecache:
1134 if self.name not in obj._filecache:
1119 # we add an entry for the missing value because X in __dict__
1135 # we add an entry for the missing value because X in __dict__
1120 # implies X in _filecache
1136 # implies X in _filecache
1121 paths = [self.join(obj, path) for path in self.paths]
1137 paths = [self.join(obj, path) for path in self.paths]
1122 ce = filecacheentry(paths, False)
1138 ce = filecacheentry(paths, False)
1123 obj._filecache[self.name] = ce
1139 obj._filecache[self.name] = ce
1124 else:
1140 else:
1125 ce = obj._filecache[self.name]
1141 ce = obj._filecache[self.name]
1126
1142
1127 ce.obj = value # update cached copy
1143 ce.obj = value # update cached copy
1128 obj.__dict__[self.name] = value # update copy returned by obj.x
1144 obj.__dict__[self.name] = value # update copy returned by obj.x
1129
1145
1130 def __delete__(self, obj):
1146 def __delete__(self, obj):
1131 try:
1147 try:
1132 del obj.__dict__[self.name]
1148 del obj.__dict__[self.name]
1133 except KeyError:
1149 except KeyError:
1134 raise AttributeError(self.name)
1150 raise AttributeError(self.name)
General Comments 0
You need to be logged in to leave comments. Login now