##// END OF EJS Templates
workingctx: use normal dirs() instead of dirstate.dirs()...
Durham Goode -
r24213:e0c1328d default
parent child Browse files
Show More
@@ -1,2963 +1,2965 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile
10 import os, sys, errno, re, tempfile
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 import changelog
14 import changelog
15 import bookmarks
15 import bookmarks
16 import encoding
16 import encoding
17 import lock as lockmod
17 import lock as lockmod
18
18
19 def parsealiases(cmd):
19 def parsealiases(cmd):
20 return cmd.lstrip("^").split("|")
20 return cmd.lstrip("^").split("|")
21
21
22 def findpossible(cmd, table, strict=False):
22 def findpossible(cmd, table, strict=False):
23 """
23 """
24 Return cmd -> (aliases, command table entry)
24 Return cmd -> (aliases, command table entry)
25 for each matching command.
25 for each matching command.
26 Return debug commands (or their aliases) only if no normal command matches.
26 Return debug commands (or their aliases) only if no normal command matches.
27 """
27 """
28 choice = {}
28 choice = {}
29 debugchoice = {}
29 debugchoice = {}
30
30
31 if cmd in table:
31 if cmd in table:
32 # short-circuit exact matches, "log" alias beats "^log|history"
32 # short-circuit exact matches, "log" alias beats "^log|history"
33 keys = [cmd]
33 keys = [cmd]
34 else:
34 else:
35 keys = table.keys()
35 keys = table.keys()
36
36
37 for e in keys:
37 for e in keys:
38 aliases = parsealiases(e)
38 aliases = parsealiases(e)
39 found = None
39 found = None
40 if cmd in aliases:
40 if cmd in aliases:
41 found = cmd
41 found = cmd
42 elif not strict:
42 elif not strict:
43 for a in aliases:
43 for a in aliases:
44 if a.startswith(cmd):
44 if a.startswith(cmd):
45 found = a
45 found = a
46 break
46 break
47 if found is not None:
47 if found is not None:
48 if aliases[0].startswith("debug") or found.startswith("debug"):
48 if aliases[0].startswith("debug") or found.startswith("debug"):
49 debugchoice[found] = (aliases, table[e])
49 debugchoice[found] = (aliases, table[e])
50 else:
50 else:
51 choice[found] = (aliases, table[e])
51 choice[found] = (aliases, table[e])
52
52
53 if not choice and debugchoice:
53 if not choice and debugchoice:
54 choice = debugchoice
54 choice = debugchoice
55
55
56 return choice
56 return choice
57
57
58 def findcmd(cmd, table, strict=True):
58 def findcmd(cmd, table, strict=True):
59 """Return (aliases, command table entry) for command string."""
59 """Return (aliases, command table entry) for command string."""
60 choice = findpossible(cmd, table, strict)
60 choice = findpossible(cmd, table, strict)
61
61
62 if cmd in choice:
62 if cmd in choice:
63 return choice[cmd]
63 return choice[cmd]
64
64
65 if len(choice) > 1:
65 if len(choice) > 1:
66 clist = choice.keys()
66 clist = choice.keys()
67 clist.sort()
67 clist.sort()
68 raise error.AmbiguousCommand(cmd, clist)
68 raise error.AmbiguousCommand(cmd, clist)
69
69
70 if choice:
70 if choice:
71 return choice.values()[0]
71 return choice.values()[0]
72
72
73 raise error.UnknownCommand(cmd)
73 raise error.UnknownCommand(cmd)
74
74
75 def findrepo(p):
75 def findrepo(p):
76 while not os.path.isdir(os.path.join(p, ".hg")):
76 while not os.path.isdir(os.path.join(p, ".hg")):
77 oldp, p = p, os.path.dirname(p)
77 oldp, p = p, os.path.dirname(p)
78 if p == oldp:
78 if p == oldp:
79 return None
79 return None
80
80
81 return p
81 return p
82
82
83 def bailifchanged(repo):
83 def bailifchanged(repo):
84 if repo.dirstate.p2() != nullid:
84 if repo.dirstate.p2() != nullid:
85 raise util.Abort(_('outstanding uncommitted merge'))
85 raise util.Abort(_('outstanding uncommitted merge'))
86 modified, added, removed, deleted = repo.status()[:4]
86 modified, added, removed, deleted = repo.status()[:4]
87 if modified or added or removed or deleted:
87 if modified or added or removed or deleted:
88 raise util.Abort(_('uncommitted changes'))
88 raise util.Abort(_('uncommitted changes'))
89 ctx = repo[None]
89 ctx = repo[None]
90 for s in sorted(ctx.substate):
90 for s in sorted(ctx.substate):
91 if ctx.sub(s).dirty():
91 if ctx.sub(s).dirty():
92 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
92 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
93
93
94 def logmessage(ui, opts):
94 def logmessage(ui, opts):
95 """ get the log message according to -m and -l option """
95 """ get the log message according to -m and -l option """
96 message = opts.get('message')
96 message = opts.get('message')
97 logfile = opts.get('logfile')
97 logfile = opts.get('logfile')
98
98
99 if message and logfile:
99 if message and logfile:
100 raise util.Abort(_('options --message and --logfile are mutually '
100 raise util.Abort(_('options --message and --logfile are mutually '
101 'exclusive'))
101 'exclusive'))
102 if not message and logfile:
102 if not message and logfile:
103 try:
103 try:
104 if logfile == '-':
104 if logfile == '-':
105 message = ui.fin.read()
105 message = ui.fin.read()
106 else:
106 else:
107 message = '\n'.join(util.readfile(logfile).splitlines())
107 message = '\n'.join(util.readfile(logfile).splitlines())
108 except IOError, inst:
108 except IOError, inst:
109 raise util.Abort(_("can't read commit message '%s': %s") %
109 raise util.Abort(_("can't read commit message '%s': %s") %
110 (logfile, inst.strerror))
110 (logfile, inst.strerror))
111 return message
111 return message
112
112
113 def mergeeditform(ctxorbool, baseformname):
113 def mergeeditform(ctxorbool, baseformname):
114 """return appropriate editform name (referencing a committemplate)
114 """return appropriate editform name (referencing a committemplate)
115
115
116 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
116 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
117 merging is committed.
117 merging is committed.
118
118
119 This returns baseformname with '.merge' appended if it is a merge,
119 This returns baseformname with '.merge' appended if it is a merge,
120 otherwise '.normal' is appended.
120 otherwise '.normal' is appended.
121 """
121 """
122 if isinstance(ctxorbool, bool):
122 if isinstance(ctxorbool, bool):
123 if ctxorbool:
123 if ctxorbool:
124 return baseformname + ".merge"
124 return baseformname + ".merge"
125 elif 1 < len(ctxorbool.parents()):
125 elif 1 < len(ctxorbool.parents()):
126 return baseformname + ".merge"
126 return baseformname + ".merge"
127
127
128 return baseformname + ".normal"
128 return baseformname + ".normal"
129
129
130 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
130 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
131 editform='', **opts):
131 editform='', **opts):
132 """get appropriate commit message editor according to '--edit' option
132 """get appropriate commit message editor according to '--edit' option
133
133
134 'finishdesc' is a function to be called with edited commit message
134 'finishdesc' is a function to be called with edited commit message
135 (= 'description' of the new changeset) just after editing, but
135 (= 'description' of the new changeset) just after editing, but
136 before checking empty-ness. It should return actual text to be
136 before checking empty-ness. It should return actual text to be
137 stored into history. This allows to change description before
137 stored into history. This allows to change description before
138 storing.
138 storing.
139
139
140 'extramsg' is a extra message to be shown in the editor instead of
140 'extramsg' is a extra message to be shown in the editor instead of
141 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
141 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
142 is automatically added.
142 is automatically added.
143
143
144 'editform' is a dot-separated list of names, to distinguish
144 'editform' is a dot-separated list of names, to distinguish
145 the purpose of commit text editing.
145 the purpose of commit text editing.
146
146
147 'getcommiteditor' returns 'commitforceeditor' regardless of
147 'getcommiteditor' returns 'commitforceeditor' regardless of
148 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
148 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
149 they are specific for usage in MQ.
149 they are specific for usage in MQ.
150 """
150 """
151 if edit or finishdesc or extramsg:
151 if edit or finishdesc or extramsg:
152 return lambda r, c, s: commitforceeditor(r, c, s,
152 return lambda r, c, s: commitforceeditor(r, c, s,
153 finishdesc=finishdesc,
153 finishdesc=finishdesc,
154 extramsg=extramsg,
154 extramsg=extramsg,
155 editform=editform)
155 editform=editform)
156 elif editform:
156 elif editform:
157 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
157 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
158 else:
158 else:
159 return commiteditor
159 return commiteditor
160
160
161 def loglimit(opts):
161 def loglimit(opts):
162 """get the log limit according to option -l/--limit"""
162 """get the log limit according to option -l/--limit"""
163 limit = opts.get('limit')
163 limit = opts.get('limit')
164 if limit:
164 if limit:
165 try:
165 try:
166 limit = int(limit)
166 limit = int(limit)
167 except ValueError:
167 except ValueError:
168 raise util.Abort(_('limit must be a positive integer'))
168 raise util.Abort(_('limit must be a positive integer'))
169 if limit <= 0:
169 if limit <= 0:
170 raise util.Abort(_('limit must be positive'))
170 raise util.Abort(_('limit must be positive'))
171 else:
171 else:
172 limit = None
172 limit = None
173 return limit
173 return limit
174
174
175 def makefilename(repo, pat, node, desc=None,
175 def makefilename(repo, pat, node, desc=None,
176 total=None, seqno=None, revwidth=None, pathname=None):
176 total=None, seqno=None, revwidth=None, pathname=None):
177 node_expander = {
177 node_expander = {
178 'H': lambda: hex(node),
178 'H': lambda: hex(node),
179 'R': lambda: str(repo.changelog.rev(node)),
179 'R': lambda: str(repo.changelog.rev(node)),
180 'h': lambda: short(node),
180 'h': lambda: short(node),
181 'm': lambda: re.sub('[^\w]', '_', str(desc))
181 'm': lambda: re.sub('[^\w]', '_', str(desc))
182 }
182 }
183 expander = {
183 expander = {
184 '%': lambda: '%',
184 '%': lambda: '%',
185 'b': lambda: os.path.basename(repo.root),
185 'b': lambda: os.path.basename(repo.root),
186 }
186 }
187
187
188 try:
188 try:
189 if node:
189 if node:
190 expander.update(node_expander)
190 expander.update(node_expander)
191 if node:
191 if node:
192 expander['r'] = (lambda:
192 expander['r'] = (lambda:
193 str(repo.changelog.rev(node)).zfill(revwidth or 0))
193 str(repo.changelog.rev(node)).zfill(revwidth or 0))
194 if total is not None:
194 if total is not None:
195 expander['N'] = lambda: str(total)
195 expander['N'] = lambda: str(total)
196 if seqno is not None:
196 if seqno is not None:
197 expander['n'] = lambda: str(seqno)
197 expander['n'] = lambda: str(seqno)
198 if total is not None and seqno is not None:
198 if total is not None and seqno is not None:
199 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
199 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
200 if pathname is not None:
200 if pathname is not None:
201 expander['s'] = lambda: os.path.basename(pathname)
201 expander['s'] = lambda: os.path.basename(pathname)
202 expander['d'] = lambda: os.path.dirname(pathname) or '.'
202 expander['d'] = lambda: os.path.dirname(pathname) or '.'
203 expander['p'] = lambda: pathname
203 expander['p'] = lambda: pathname
204
204
205 newname = []
205 newname = []
206 patlen = len(pat)
206 patlen = len(pat)
207 i = 0
207 i = 0
208 while i < patlen:
208 while i < patlen:
209 c = pat[i]
209 c = pat[i]
210 if c == '%':
210 if c == '%':
211 i += 1
211 i += 1
212 c = pat[i]
212 c = pat[i]
213 c = expander[c]()
213 c = expander[c]()
214 newname.append(c)
214 newname.append(c)
215 i += 1
215 i += 1
216 return ''.join(newname)
216 return ''.join(newname)
217 except KeyError, inst:
217 except KeyError, inst:
218 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
218 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
219 inst.args[0])
219 inst.args[0])
220
220
221 def makefileobj(repo, pat, node=None, desc=None, total=None,
221 def makefileobj(repo, pat, node=None, desc=None, total=None,
222 seqno=None, revwidth=None, mode='wb', modemap=None,
222 seqno=None, revwidth=None, mode='wb', modemap=None,
223 pathname=None):
223 pathname=None):
224
224
225 writable = mode not in ('r', 'rb')
225 writable = mode not in ('r', 'rb')
226
226
227 if not pat or pat == '-':
227 if not pat or pat == '-':
228 fp = writable and repo.ui.fout or repo.ui.fin
228 fp = writable and repo.ui.fout or repo.ui.fin
229 if util.safehasattr(fp, 'fileno'):
229 if util.safehasattr(fp, 'fileno'):
230 return os.fdopen(os.dup(fp.fileno()), mode)
230 return os.fdopen(os.dup(fp.fileno()), mode)
231 else:
231 else:
232 # if this fp can't be duped properly, return
232 # if this fp can't be duped properly, return
233 # a dummy object that can be closed
233 # a dummy object that can be closed
234 class wrappedfileobj(object):
234 class wrappedfileobj(object):
235 noop = lambda x: None
235 noop = lambda x: None
236 def __init__(self, f):
236 def __init__(self, f):
237 self.f = f
237 self.f = f
238 def __getattr__(self, attr):
238 def __getattr__(self, attr):
239 if attr == 'close':
239 if attr == 'close':
240 return self.noop
240 return self.noop
241 else:
241 else:
242 return getattr(self.f, attr)
242 return getattr(self.f, attr)
243
243
244 return wrappedfileobj(fp)
244 return wrappedfileobj(fp)
245 if util.safehasattr(pat, 'write') and writable:
245 if util.safehasattr(pat, 'write') and writable:
246 return pat
246 return pat
247 if util.safehasattr(pat, 'read') and 'r' in mode:
247 if util.safehasattr(pat, 'read') and 'r' in mode:
248 return pat
248 return pat
249 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
249 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
250 if modemap is not None:
250 if modemap is not None:
251 mode = modemap.get(fn, mode)
251 mode = modemap.get(fn, mode)
252 if mode == 'wb':
252 if mode == 'wb':
253 modemap[fn] = 'ab'
253 modemap[fn] = 'ab'
254 return open(fn, mode)
254 return open(fn, mode)
255
255
256 def openrevlog(repo, cmd, file_, opts):
256 def openrevlog(repo, cmd, file_, opts):
257 """opens the changelog, manifest, a filelog or a given revlog"""
257 """opens the changelog, manifest, a filelog or a given revlog"""
258 cl = opts['changelog']
258 cl = opts['changelog']
259 mf = opts['manifest']
259 mf = opts['manifest']
260 msg = None
260 msg = None
261 if cl and mf:
261 if cl and mf:
262 msg = _('cannot specify --changelog and --manifest at the same time')
262 msg = _('cannot specify --changelog and --manifest at the same time')
263 elif cl or mf:
263 elif cl or mf:
264 if file_:
264 if file_:
265 msg = _('cannot specify filename with --changelog or --manifest')
265 msg = _('cannot specify filename with --changelog or --manifest')
266 elif not repo:
266 elif not repo:
267 msg = _('cannot specify --changelog or --manifest '
267 msg = _('cannot specify --changelog or --manifest '
268 'without a repository')
268 'without a repository')
269 if msg:
269 if msg:
270 raise util.Abort(msg)
270 raise util.Abort(msg)
271
271
272 r = None
272 r = None
273 if repo:
273 if repo:
274 if cl:
274 if cl:
275 r = repo.unfiltered().changelog
275 r = repo.unfiltered().changelog
276 elif mf:
276 elif mf:
277 r = repo.manifest
277 r = repo.manifest
278 elif file_:
278 elif file_:
279 filelog = repo.file(file_)
279 filelog = repo.file(file_)
280 if len(filelog):
280 if len(filelog):
281 r = filelog
281 r = filelog
282 if not r:
282 if not r:
283 if not file_:
283 if not file_:
284 raise error.CommandError(cmd, _('invalid arguments'))
284 raise error.CommandError(cmd, _('invalid arguments'))
285 if not os.path.isfile(file_):
285 if not os.path.isfile(file_):
286 raise util.Abort(_("revlog '%s' not found") % file_)
286 raise util.Abort(_("revlog '%s' not found") % file_)
287 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
287 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
288 file_[:-2] + ".i")
288 file_[:-2] + ".i")
289 return r
289 return r
290
290
291 def copy(ui, repo, pats, opts, rename=False):
291 def copy(ui, repo, pats, opts, rename=False):
292 # called with the repo lock held
292 # called with the repo lock held
293 #
293 #
294 # hgsep => pathname that uses "/" to separate directories
294 # hgsep => pathname that uses "/" to separate directories
295 # ossep => pathname that uses os.sep to separate directories
295 # ossep => pathname that uses os.sep to separate directories
296 cwd = repo.getcwd()
296 cwd = repo.getcwd()
297 targets = {}
297 targets = {}
298 after = opts.get("after")
298 after = opts.get("after")
299 dryrun = opts.get("dry_run")
299 dryrun = opts.get("dry_run")
300 wctx = repo[None]
300 wctx = repo[None]
301
301
302 def walkpat(pat):
302 def walkpat(pat):
303 srcs = []
303 srcs = []
304 badstates = after and '?' or '?r'
304 badstates = after and '?' or '?r'
305 m = scmutil.match(repo[None], [pat], opts, globbed=True)
305 m = scmutil.match(repo[None], [pat], opts, globbed=True)
306 for abs in repo.walk(m):
306 for abs in repo.walk(m):
307 state = repo.dirstate[abs]
307 state = repo.dirstate[abs]
308 rel = m.rel(abs)
308 rel = m.rel(abs)
309 exact = m.exact(abs)
309 exact = m.exact(abs)
310 if state in badstates:
310 if state in badstates:
311 if exact and state == '?':
311 if exact and state == '?':
312 ui.warn(_('%s: not copying - file is not managed\n') % rel)
312 ui.warn(_('%s: not copying - file is not managed\n') % rel)
313 if exact and state == 'r':
313 if exact and state == 'r':
314 ui.warn(_('%s: not copying - file has been marked for'
314 ui.warn(_('%s: not copying - file has been marked for'
315 ' remove\n') % rel)
315 ' remove\n') % rel)
316 continue
316 continue
317 # abs: hgsep
317 # abs: hgsep
318 # rel: ossep
318 # rel: ossep
319 srcs.append((abs, rel, exact))
319 srcs.append((abs, rel, exact))
320 return srcs
320 return srcs
321
321
322 # abssrc: hgsep
322 # abssrc: hgsep
323 # relsrc: ossep
323 # relsrc: ossep
324 # otarget: ossep
324 # otarget: ossep
325 def copyfile(abssrc, relsrc, otarget, exact):
325 def copyfile(abssrc, relsrc, otarget, exact):
326 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
326 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
327 if '/' in abstarget:
327 if '/' in abstarget:
328 # We cannot normalize abstarget itself, this would prevent
328 # We cannot normalize abstarget itself, this would prevent
329 # case only renames, like a => A.
329 # case only renames, like a => A.
330 abspath, absname = abstarget.rsplit('/', 1)
330 abspath, absname = abstarget.rsplit('/', 1)
331 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
331 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
332 reltarget = repo.pathto(abstarget, cwd)
332 reltarget = repo.pathto(abstarget, cwd)
333 target = repo.wjoin(abstarget)
333 target = repo.wjoin(abstarget)
334 src = repo.wjoin(abssrc)
334 src = repo.wjoin(abssrc)
335 state = repo.dirstate[abstarget]
335 state = repo.dirstate[abstarget]
336
336
337 scmutil.checkportable(ui, abstarget)
337 scmutil.checkportable(ui, abstarget)
338
338
339 # check for collisions
339 # check for collisions
340 prevsrc = targets.get(abstarget)
340 prevsrc = targets.get(abstarget)
341 if prevsrc is not None:
341 if prevsrc is not None:
342 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
342 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
343 (reltarget, repo.pathto(abssrc, cwd),
343 (reltarget, repo.pathto(abssrc, cwd),
344 repo.pathto(prevsrc, cwd)))
344 repo.pathto(prevsrc, cwd)))
345 return
345 return
346
346
347 # check for overwrites
347 # check for overwrites
348 exists = os.path.lexists(target)
348 exists = os.path.lexists(target)
349 samefile = False
349 samefile = False
350 if exists and abssrc != abstarget:
350 if exists and abssrc != abstarget:
351 if (repo.dirstate.normalize(abssrc) ==
351 if (repo.dirstate.normalize(abssrc) ==
352 repo.dirstate.normalize(abstarget)):
352 repo.dirstate.normalize(abstarget)):
353 if not rename:
353 if not rename:
354 ui.warn(_("%s: can't copy - same file\n") % reltarget)
354 ui.warn(_("%s: can't copy - same file\n") % reltarget)
355 return
355 return
356 exists = False
356 exists = False
357 samefile = True
357 samefile = True
358
358
359 if not after and exists or after and state in 'mn':
359 if not after and exists or after and state in 'mn':
360 if not opts['force']:
360 if not opts['force']:
361 ui.warn(_('%s: not overwriting - file exists\n') %
361 ui.warn(_('%s: not overwriting - file exists\n') %
362 reltarget)
362 reltarget)
363 return
363 return
364
364
365 if after:
365 if after:
366 if not exists:
366 if not exists:
367 if rename:
367 if rename:
368 ui.warn(_('%s: not recording move - %s does not exist\n') %
368 ui.warn(_('%s: not recording move - %s does not exist\n') %
369 (relsrc, reltarget))
369 (relsrc, reltarget))
370 else:
370 else:
371 ui.warn(_('%s: not recording copy - %s does not exist\n') %
371 ui.warn(_('%s: not recording copy - %s does not exist\n') %
372 (relsrc, reltarget))
372 (relsrc, reltarget))
373 return
373 return
374 elif not dryrun:
374 elif not dryrun:
375 try:
375 try:
376 if exists:
376 if exists:
377 os.unlink(target)
377 os.unlink(target)
378 targetdir = os.path.dirname(target) or '.'
378 targetdir = os.path.dirname(target) or '.'
379 if not os.path.isdir(targetdir):
379 if not os.path.isdir(targetdir):
380 os.makedirs(targetdir)
380 os.makedirs(targetdir)
381 if samefile:
381 if samefile:
382 tmp = target + "~hgrename"
382 tmp = target + "~hgrename"
383 os.rename(src, tmp)
383 os.rename(src, tmp)
384 os.rename(tmp, target)
384 os.rename(tmp, target)
385 else:
385 else:
386 util.copyfile(src, target)
386 util.copyfile(src, target)
387 srcexists = True
387 srcexists = True
388 except IOError, inst:
388 except IOError, inst:
389 if inst.errno == errno.ENOENT:
389 if inst.errno == errno.ENOENT:
390 ui.warn(_('%s: deleted in working copy\n') % relsrc)
390 ui.warn(_('%s: deleted in working copy\n') % relsrc)
391 srcexists = False
391 srcexists = False
392 else:
392 else:
393 ui.warn(_('%s: cannot copy - %s\n') %
393 ui.warn(_('%s: cannot copy - %s\n') %
394 (relsrc, inst.strerror))
394 (relsrc, inst.strerror))
395 return True # report a failure
395 return True # report a failure
396
396
397 if ui.verbose or not exact:
397 if ui.verbose or not exact:
398 if rename:
398 if rename:
399 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
399 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
400 else:
400 else:
401 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
401 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
402
402
403 targets[abstarget] = abssrc
403 targets[abstarget] = abssrc
404
404
405 # fix up dirstate
405 # fix up dirstate
406 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
406 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
407 dryrun=dryrun, cwd=cwd)
407 dryrun=dryrun, cwd=cwd)
408 if rename and not dryrun:
408 if rename and not dryrun:
409 if not after and srcexists and not samefile:
409 if not after and srcexists and not samefile:
410 util.unlinkpath(repo.wjoin(abssrc))
410 util.unlinkpath(repo.wjoin(abssrc))
411 wctx.forget([abssrc])
411 wctx.forget([abssrc])
412
412
413 # pat: ossep
413 # pat: ossep
414 # dest ossep
414 # dest ossep
415 # srcs: list of (hgsep, hgsep, ossep, bool)
415 # srcs: list of (hgsep, hgsep, ossep, bool)
416 # return: function that takes hgsep and returns ossep
416 # return: function that takes hgsep and returns ossep
417 def targetpathfn(pat, dest, srcs):
417 def targetpathfn(pat, dest, srcs):
418 if os.path.isdir(pat):
418 if os.path.isdir(pat):
419 abspfx = pathutil.canonpath(repo.root, cwd, pat)
419 abspfx = pathutil.canonpath(repo.root, cwd, pat)
420 abspfx = util.localpath(abspfx)
420 abspfx = util.localpath(abspfx)
421 if destdirexists:
421 if destdirexists:
422 striplen = len(os.path.split(abspfx)[0])
422 striplen = len(os.path.split(abspfx)[0])
423 else:
423 else:
424 striplen = len(abspfx)
424 striplen = len(abspfx)
425 if striplen:
425 if striplen:
426 striplen += len(os.sep)
426 striplen += len(os.sep)
427 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
427 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
428 elif destdirexists:
428 elif destdirexists:
429 res = lambda p: os.path.join(dest,
429 res = lambda p: os.path.join(dest,
430 os.path.basename(util.localpath(p)))
430 os.path.basename(util.localpath(p)))
431 else:
431 else:
432 res = lambda p: dest
432 res = lambda p: dest
433 return res
433 return res
434
434
435 # pat: ossep
435 # pat: ossep
436 # dest ossep
436 # dest ossep
437 # srcs: list of (hgsep, hgsep, ossep, bool)
437 # srcs: list of (hgsep, hgsep, ossep, bool)
438 # return: function that takes hgsep and returns ossep
438 # return: function that takes hgsep and returns ossep
439 def targetpathafterfn(pat, dest, srcs):
439 def targetpathafterfn(pat, dest, srcs):
440 if matchmod.patkind(pat):
440 if matchmod.patkind(pat):
441 # a mercurial pattern
441 # a mercurial pattern
442 res = lambda p: os.path.join(dest,
442 res = lambda p: os.path.join(dest,
443 os.path.basename(util.localpath(p)))
443 os.path.basename(util.localpath(p)))
444 else:
444 else:
445 abspfx = pathutil.canonpath(repo.root, cwd, pat)
445 abspfx = pathutil.canonpath(repo.root, cwd, pat)
446 if len(abspfx) < len(srcs[0][0]):
446 if len(abspfx) < len(srcs[0][0]):
447 # A directory. Either the target path contains the last
447 # A directory. Either the target path contains the last
448 # component of the source path or it does not.
448 # component of the source path or it does not.
449 def evalpath(striplen):
449 def evalpath(striplen):
450 score = 0
450 score = 0
451 for s in srcs:
451 for s in srcs:
452 t = os.path.join(dest, util.localpath(s[0])[striplen:])
452 t = os.path.join(dest, util.localpath(s[0])[striplen:])
453 if os.path.lexists(t):
453 if os.path.lexists(t):
454 score += 1
454 score += 1
455 return score
455 return score
456
456
457 abspfx = util.localpath(abspfx)
457 abspfx = util.localpath(abspfx)
458 striplen = len(abspfx)
458 striplen = len(abspfx)
459 if striplen:
459 if striplen:
460 striplen += len(os.sep)
460 striplen += len(os.sep)
461 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
461 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
462 score = evalpath(striplen)
462 score = evalpath(striplen)
463 striplen1 = len(os.path.split(abspfx)[0])
463 striplen1 = len(os.path.split(abspfx)[0])
464 if striplen1:
464 if striplen1:
465 striplen1 += len(os.sep)
465 striplen1 += len(os.sep)
466 if evalpath(striplen1) > score:
466 if evalpath(striplen1) > score:
467 striplen = striplen1
467 striplen = striplen1
468 res = lambda p: os.path.join(dest,
468 res = lambda p: os.path.join(dest,
469 util.localpath(p)[striplen:])
469 util.localpath(p)[striplen:])
470 else:
470 else:
471 # a file
471 # a file
472 if destdirexists:
472 if destdirexists:
473 res = lambda p: os.path.join(dest,
473 res = lambda p: os.path.join(dest,
474 os.path.basename(util.localpath(p)))
474 os.path.basename(util.localpath(p)))
475 else:
475 else:
476 res = lambda p: dest
476 res = lambda p: dest
477 return res
477 return res
478
478
479
479
480 pats = scmutil.expandpats(pats)
480 pats = scmutil.expandpats(pats)
481 if not pats:
481 if not pats:
482 raise util.Abort(_('no source or destination specified'))
482 raise util.Abort(_('no source or destination specified'))
483 if len(pats) == 1:
483 if len(pats) == 1:
484 raise util.Abort(_('no destination specified'))
484 raise util.Abort(_('no destination specified'))
485 dest = pats.pop()
485 dest = pats.pop()
486 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
486 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
487 if not destdirexists:
487 if not destdirexists:
488 if len(pats) > 1 or matchmod.patkind(pats[0]):
488 if len(pats) > 1 or matchmod.patkind(pats[0]):
489 raise util.Abort(_('with multiple sources, destination must be an '
489 raise util.Abort(_('with multiple sources, destination must be an '
490 'existing directory'))
490 'existing directory'))
491 if util.endswithsep(dest):
491 if util.endswithsep(dest):
492 raise util.Abort(_('destination %s is not a directory') % dest)
492 raise util.Abort(_('destination %s is not a directory') % dest)
493
493
494 tfn = targetpathfn
494 tfn = targetpathfn
495 if after:
495 if after:
496 tfn = targetpathafterfn
496 tfn = targetpathafterfn
497 copylist = []
497 copylist = []
498 for pat in pats:
498 for pat in pats:
499 srcs = walkpat(pat)
499 srcs = walkpat(pat)
500 if not srcs:
500 if not srcs:
501 continue
501 continue
502 copylist.append((tfn(pat, dest, srcs), srcs))
502 copylist.append((tfn(pat, dest, srcs), srcs))
503 if not copylist:
503 if not copylist:
504 raise util.Abort(_('no files to copy'))
504 raise util.Abort(_('no files to copy'))
505
505
506 errors = 0
506 errors = 0
507 for targetpath, srcs in copylist:
507 for targetpath, srcs in copylist:
508 for abssrc, relsrc, exact in srcs:
508 for abssrc, relsrc, exact in srcs:
509 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
509 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
510 errors += 1
510 errors += 1
511
511
512 if errors:
512 if errors:
513 ui.warn(_('(consider using --after)\n'))
513 ui.warn(_('(consider using --after)\n'))
514
514
515 return errors != 0
515 return errors != 0
516
516
517 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
517 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
518 runargs=None, appendpid=False):
518 runargs=None, appendpid=False):
519 '''Run a command as a service.'''
519 '''Run a command as a service.'''
520
520
521 def writepid(pid):
521 def writepid(pid):
522 if opts['pid_file']:
522 if opts['pid_file']:
523 mode = appendpid and 'a' or 'w'
523 mode = appendpid and 'a' or 'w'
524 fp = open(opts['pid_file'], mode)
524 fp = open(opts['pid_file'], mode)
525 fp.write(str(pid) + '\n')
525 fp.write(str(pid) + '\n')
526 fp.close()
526 fp.close()
527
527
528 if opts['daemon'] and not opts['daemon_pipefds']:
528 if opts['daemon'] and not opts['daemon_pipefds']:
529 # Signal child process startup with file removal
529 # Signal child process startup with file removal
530 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
530 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
531 os.close(lockfd)
531 os.close(lockfd)
532 try:
532 try:
533 if not runargs:
533 if not runargs:
534 runargs = util.hgcmd() + sys.argv[1:]
534 runargs = util.hgcmd() + sys.argv[1:]
535 runargs.append('--daemon-pipefds=%s' % lockpath)
535 runargs.append('--daemon-pipefds=%s' % lockpath)
536 # Don't pass --cwd to the child process, because we've already
536 # Don't pass --cwd to the child process, because we've already
537 # changed directory.
537 # changed directory.
538 for i in xrange(1, len(runargs)):
538 for i in xrange(1, len(runargs)):
539 if runargs[i].startswith('--cwd='):
539 if runargs[i].startswith('--cwd='):
540 del runargs[i]
540 del runargs[i]
541 break
541 break
542 elif runargs[i].startswith('--cwd'):
542 elif runargs[i].startswith('--cwd'):
543 del runargs[i:i + 2]
543 del runargs[i:i + 2]
544 break
544 break
545 def condfn():
545 def condfn():
546 return not os.path.exists(lockpath)
546 return not os.path.exists(lockpath)
547 pid = util.rundetached(runargs, condfn)
547 pid = util.rundetached(runargs, condfn)
548 if pid < 0:
548 if pid < 0:
549 raise util.Abort(_('child process failed to start'))
549 raise util.Abort(_('child process failed to start'))
550 writepid(pid)
550 writepid(pid)
551 finally:
551 finally:
552 try:
552 try:
553 os.unlink(lockpath)
553 os.unlink(lockpath)
554 except OSError, e:
554 except OSError, e:
555 if e.errno != errno.ENOENT:
555 if e.errno != errno.ENOENT:
556 raise
556 raise
557 if parentfn:
557 if parentfn:
558 return parentfn(pid)
558 return parentfn(pid)
559 else:
559 else:
560 return
560 return
561
561
562 if initfn:
562 if initfn:
563 initfn()
563 initfn()
564
564
565 if not opts['daemon']:
565 if not opts['daemon']:
566 writepid(os.getpid())
566 writepid(os.getpid())
567
567
568 if opts['daemon_pipefds']:
568 if opts['daemon_pipefds']:
569 lockpath = opts['daemon_pipefds']
569 lockpath = opts['daemon_pipefds']
570 try:
570 try:
571 os.setsid()
571 os.setsid()
572 except AttributeError:
572 except AttributeError:
573 pass
573 pass
574 os.unlink(lockpath)
574 os.unlink(lockpath)
575 util.hidewindow()
575 util.hidewindow()
576 sys.stdout.flush()
576 sys.stdout.flush()
577 sys.stderr.flush()
577 sys.stderr.flush()
578
578
579 nullfd = os.open(os.devnull, os.O_RDWR)
579 nullfd = os.open(os.devnull, os.O_RDWR)
580 logfilefd = nullfd
580 logfilefd = nullfd
581 if logfile:
581 if logfile:
582 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
582 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
583 os.dup2(nullfd, 0)
583 os.dup2(nullfd, 0)
584 os.dup2(logfilefd, 1)
584 os.dup2(logfilefd, 1)
585 os.dup2(logfilefd, 2)
585 os.dup2(logfilefd, 2)
586 if nullfd not in (0, 1, 2):
586 if nullfd not in (0, 1, 2):
587 os.close(nullfd)
587 os.close(nullfd)
588 if logfile and logfilefd not in (0, 1, 2):
588 if logfile and logfilefd not in (0, 1, 2):
589 os.close(logfilefd)
589 os.close(logfilefd)
590
590
591 if runfn:
591 if runfn:
592 return runfn()
592 return runfn()
593
593
594 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
594 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
595 """Utility function used by commands.import to import a single patch
595 """Utility function used by commands.import to import a single patch
596
596
597 This function is explicitly defined here to help the evolve extension to
597 This function is explicitly defined here to help the evolve extension to
598 wrap this part of the import logic.
598 wrap this part of the import logic.
599
599
600 The API is currently a bit ugly because it a simple code translation from
600 The API is currently a bit ugly because it a simple code translation from
601 the import command. Feel free to make it better.
601 the import command. Feel free to make it better.
602
602
603 :hunk: a patch (as a binary string)
603 :hunk: a patch (as a binary string)
604 :parents: nodes that will be parent of the created commit
604 :parents: nodes that will be parent of the created commit
605 :opts: the full dict of option passed to the import command
605 :opts: the full dict of option passed to the import command
606 :msgs: list to save commit message to.
606 :msgs: list to save commit message to.
607 (used in case we need to save it when failing)
607 (used in case we need to save it when failing)
608 :updatefunc: a function that update a repo to a given node
608 :updatefunc: a function that update a repo to a given node
609 updatefunc(<repo>, <node>)
609 updatefunc(<repo>, <node>)
610 """
610 """
611 tmpname, message, user, date, branch, nodeid, p1, p2 = \
611 tmpname, message, user, date, branch, nodeid, p1, p2 = \
612 patch.extract(ui, hunk)
612 patch.extract(ui, hunk)
613
613
614 update = not opts.get('bypass')
614 update = not opts.get('bypass')
615 strip = opts["strip"]
615 strip = opts["strip"]
616 sim = float(opts.get('similarity') or 0)
616 sim = float(opts.get('similarity') or 0)
617 if not tmpname:
617 if not tmpname:
618 return (None, None, False)
618 return (None, None, False)
619 msg = _('applied to working directory')
619 msg = _('applied to working directory')
620
620
621 rejects = False
621 rejects = False
622
622
623 try:
623 try:
624 cmdline_message = logmessage(ui, opts)
624 cmdline_message = logmessage(ui, opts)
625 if cmdline_message:
625 if cmdline_message:
626 # pickup the cmdline msg
626 # pickup the cmdline msg
627 message = cmdline_message
627 message = cmdline_message
628 elif message:
628 elif message:
629 # pickup the patch msg
629 # pickup the patch msg
630 message = message.strip()
630 message = message.strip()
631 else:
631 else:
632 # launch the editor
632 # launch the editor
633 message = None
633 message = None
634 ui.debug('message:\n%s\n' % message)
634 ui.debug('message:\n%s\n' % message)
635
635
636 if len(parents) == 1:
636 if len(parents) == 1:
637 parents.append(repo[nullid])
637 parents.append(repo[nullid])
638 if opts.get('exact'):
638 if opts.get('exact'):
639 if not nodeid or not p1:
639 if not nodeid or not p1:
640 raise util.Abort(_('not a Mercurial patch'))
640 raise util.Abort(_('not a Mercurial patch'))
641 p1 = repo[p1]
641 p1 = repo[p1]
642 p2 = repo[p2 or nullid]
642 p2 = repo[p2 or nullid]
643 elif p2:
643 elif p2:
644 try:
644 try:
645 p1 = repo[p1]
645 p1 = repo[p1]
646 p2 = repo[p2]
646 p2 = repo[p2]
647 # Without any options, consider p2 only if the
647 # Without any options, consider p2 only if the
648 # patch is being applied on top of the recorded
648 # patch is being applied on top of the recorded
649 # first parent.
649 # first parent.
650 if p1 != parents[0]:
650 if p1 != parents[0]:
651 p1 = parents[0]
651 p1 = parents[0]
652 p2 = repo[nullid]
652 p2 = repo[nullid]
653 except error.RepoError:
653 except error.RepoError:
654 p1, p2 = parents
654 p1, p2 = parents
655 if p2.node() == nullid:
655 if p2.node() == nullid:
656 ui.warn(_("warning: import the patch as a normal revision\n"
656 ui.warn(_("warning: import the patch as a normal revision\n"
657 "(use --exact to import the patch as a merge)\n"))
657 "(use --exact to import the patch as a merge)\n"))
658 else:
658 else:
659 p1, p2 = parents
659 p1, p2 = parents
660
660
661 n = None
661 n = None
662 if update:
662 if update:
663 repo.dirstate.beginparentchange()
663 repo.dirstate.beginparentchange()
664 if p1 != parents[0]:
664 if p1 != parents[0]:
665 updatefunc(repo, p1.node())
665 updatefunc(repo, p1.node())
666 if p2 != parents[1]:
666 if p2 != parents[1]:
667 repo.setparents(p1.node(), p2.node())
667 repo.setparents(p1.node(), p2.node())
668
668
669 if opts.get('exact') or opts.get('import_branch'):
669 if opts.get('exact') or opts.get('import_branch'):
670 repo.dirstate.setbranch(branch or 'default')
670 repo.dirstate.setbranch(branch or 'default')
671
671
672 partial = opts.get('partial', False)
672 partial = opts.get('partial', False)
673 files = set()
673 files = set()
674 try:
674 try:
675 patch.patch(ui, repo, tmpname, strip=strip, files=files,
675 patch.patch(ui, repo, tmpname, strip=strip, files=files,
676 eolmode=None, similarity=sim / 100.0)
676 eolmode=None, similarity=sim / 100.0)
677 except patch.PatchError, e:
677 except patch.PatchError, e:
678 if not partial:
678 if not partial:
679 raise util.Abort(str(e))
679 raise util.Abort(str(e))
680 if partial:
680 if partial:
681 rejects = True
681 rejects = True
682
682
683 files = list(files)
683 files = list(files)
684 if opts.get('no_commit'):
684 if opts.get('no_commit'):
685 if message:
685 if message:
686 msgs.append(message)
686 msgs.append(message)
687 else:
687 else:
688 if opts.get('exact') or p2:
688 if opts.get('exact') or p2:
689 # If you got here, you either use --force and know what
689 # If you got here, you either use --force and know what
690 # you are doing or used --exact or a merge patch while
690 # you are doing or used --exact or a merge patch while
691 # being updated to its first parent.
691 # being updated to its first parent.
692 m = None
692 m = None
693 else:
693 else:
694 m = scmutil.matchfiles(repo, files or [])
694 m = scmutil.matchfiles(repo, files or [])
695 editform = mergeeditform(repo[None], 'import.normal')
695 editform = mergeeditform(repo[None], 'import.normal')
696 if opts.get('exact'):
696 if opts.get('exact'):
697 editor = None
697 editor = None
698 else:
698 else:
699 editor = getcommiteditor(editform=editform, **opts)
699 editor = getcommiteditor(editform=editform, **opts)
700 n = repo.commit(message, opts.get('user') or user,
700 n = repo.commit(message, opts.get('user') or user,
701 opts.get('date') or date, match=m,
701 opts.get('date') or date, match=m,
702 editor=editor, force=partial)
702 editor=editor, force=partial)
703 repo.dirstate.endparentchange()
703 repo.dirstate.endparentchange()
704 else:
704 else:
705 if opts.get('exact') or opts.get('import_branch'):
705 if opts.get('exact') or opts.get('import_branch'):
706 branch = branch or 'default'
706 branch = branch or 'default'
707 else:
707 else:
708 branch = p1.branch()
708 branch = p1.branch()
709 store = patch.filestore()
709 store = patch.filestore()
710 try:
710 try:
711 files = set()
711 files = set()
712 try:
712 try:
713 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
713 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
714 files, eolmode=None)
714 files, eolmode=None)
715 except patch.PatchError, e:
715 except patch.PatchError, e:
716 raise util.Abort(str(e))
716 raise util.Abort(str(e))
717 if opts.get('exact'):
717 if opts.get('exact'):
718 editor = None
718 editor = None
719 else:
719 else:
720 editor = getcommiteditor(editform='import.bypass')
720 editor = getcommiteditor(editform='import.bypass')
721 memctx = context.makememctx(repo, (p1.node(), p2.node()),
721 memctx = context.makememctx(repo, (p1.node(), p2.node()),
722 message,
722 message,
723 opts.get('user') or user,
723 opts.get('user') or user,
724 opts.get('date') or date,
724 opts.get('date') or date,
725 branch, files, store,
725 branch, files, store,
726 editor=editor)
726 editor=editor)
727 n = memctx.commit()
727 n = memctx.commit()
728 finally:
728 finally:
729 store.close()
729 store.close()
730 if opts.get('exact') and opts.get('no_commit'):
730 if opts.get('exact') and opts.get('no_commit'):
731 # --exact with --no-commit is still useful in that it does merge
731 # --exact with --no-commit is still useful in that it does merge
732 # and branch bits
732 # and branch bits
733 ui.warn(_("warning: can't check exact import with --no-commit\n"))
733 ui.warn(_("warning: can't check exact import with --no-commit\n"))
734 elif opts.get('exact') and hex(n) != nodeid:
734 elif opts.get('exact') and hex(n) != nodeid:
735 raise util.Abort(_('patch is damaged or loses information'))
735 raise util.Abort(_('patch is damaged or loses information'))
736 if n:
736 if n:
737 # i18n: refers to a short changeset id
737 # i18n: refers to a short changeset id
738 msg = _('created %s') % short(n)
738 msg = _('created %s') % short(n)
739 return (msg, n, rejects)
739 return (msg, n, rejects)
740 finally:
740 finally:
741 os.unlink(tmpname)
741 os.unlink(tmpname)
742
742
743 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
743 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
744 opts=None):
744 opts=None):
745 '''export changesets as hg patches.'''
745 '''export changesets as hg patches.'''
746
746
747 total = len(revs)
747 total = len(revs)
748 revwidth = max([len(str(rev)) for rev in revs])
748 revwidth = max([len(str(rev)) for rev in revs])
749 filemode = {}
749 filemode = {}
750
750
751 def single(rev, seqno, fp):
751 def single(rev, seqno, fp):
752 ctx = repo[rev]
752 ctx = repo[rev]
753 node = ctx.node()
753 node = ctx.node()
754 parents = [p.node() for p in ctx.parents() if p]
754 parents = [p.node() for p in ctx.parents() if p]
755 branch = ctx.branch()
755 branch = ctx.branch()
756 if switch_parent:
756 if switch_parent:
757 parents.reverse()
757 parents.reverse()
758 prev = (parents and parents[0]) or nullid
758 prev = (parents and parents[0]) or nullid
759
759
760 shouldclose = False
760 shouldclose = False
761 if not fp and len(template) > 0:
761 if not fp and len(template) > 0:
762 desc_lines = ctx.description().rstrip().split('\n')
762 desc_lines = ctx.description().rstrip().split('\n')
763 desc = desc_lines[0] #Commit always has a first line.
763 desc = desc_lines[0] #Commit always has a first line.
764 fp = makefileobj(repo, template, node, desc=desc, total=total,
764 fp = makefileobj(repo, template, node, desc=desc, total=total,
765 seqno=seqno, revwidth=revwidth, mode='wb',
765 seqno=seqno, revwidth=revwidth, mode='wb',
766 modemap=filemode)
766 modemap=filemode)
767 if fp != template:
767 if fp != template:
768 shouldclose = True
768 shouldclose = True
769 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
769 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
770 repo.ui.note("%s\n" % fp.name)
770 repo.ui.note("%s\n" % fp.name)
771
771
772 if not fp:
772 if not fp:
773 write = repo.ui.write
773 write = repo.ui.write
774 else:
774 else:
775 def write(s, **kw):
775 def write(s, **kw):
776 fp.write(s)
776 fp.write(s)
777
777
778
778
779 write("# HG changeset patch\n")
779 write("# HG changeset patch\n")
780 write("# User %s\n" % ctx.user())
780 write("# User %s\n" % ctx.user())
781 write("# Date %d %d\n" % ctx.date())
781 write("# Date %d %d\n" % ctx.date())
782 write("# %s\n" % util.datestr(ctx.date()))
782 write("# %s\n" % util.datestr(ctx.date()))
783 if branch and branch != 'default':
783 if branch and branch != 'default':
784 write("# Branch %s\n" % branch)
784 write("# Branch %s\n" % branch)
785 write("# Node ID %s\n" % hex(node))
785 write("# Node ID %s\n" % hex(node))
786 write("# Parent %s\n" % hex(prev))
786 write("# Parent %s\n" % hex(prev))
787 if len(parents) > 1:
787 if len(parents) > 1:
788 write("# Parent %s\n" % hex(parents[1]))
788 write("# Parent %s\n" % hex(parents[1]))
789 write(ctx.description().rstrip())
789 write(ctx.description().rstrip())
790 write("\n\n")
790 write("\n\n")
791
791
792 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
792 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
793 write(chunk, label=label)
793 write(chunk, label=label)
794
794
795 if shouldclose:
795 if shouldclose:
796 fp.close()
796 fp.close()
797
797
798 for seqno, rev in enumerate(revs):
798 for seqno, rev in enumerate(revs):
799 single(rev, seqno + 1, fp)
799 single(rev, seqno + 1, fp)
800
800
801 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
801 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
802 changes=None, stat=False, fp=None, prefix='',
802 changes=None, stat=False, fp=None, prefix='',
803 listsubrepos=False):
803 listsubrepos=False):
804 '''show diff or diffstat.'''
804 '''show diff or diffstat.'''
805 if fp is None:
805 if fp is None:
806 write = ui.write
806 write = ui.write
807 else:
807 else:
808 def write(s, **kw):
808 def write(s, **kw):
809 fp.write(s)
809 fp.write(s)
810
810
811 if stat:
811 if stat:
812 diffopts = diffopts.copy(context=0)
812 diffopts = diffopts.copy(context=0)
813 width = 80
813 width = 80
814 if not ui.plain():
814 if not ui.plain():
815 width = ui.termwidth()
815 width = ui.termwidth()
816 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
816 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
817 prefix=prefix)
817 prefix=prefix)
818 for chunk, label in patch.diffstatui(util.iterlines(chunks),
818 for chunk, label in patch.diffstatui(util.iterlines(chunks),
819 width=width,
819 width=width,
820 git=diffopts.git):
820 git=diffopts.git):
821 write(chunk, label=label)
821 write(chunk, label=label)
822 else:
822 else:
823 for chunk, label in patch.diffui(repo, node1, node2, match,
823 for chunk, label in patch.diffui(repo, node1, node2, match,
824 changes, diffopts, prefix=prefix):
824 changes, diffopts, prefix=prefix):
825 write(chunk, label=label)
825 write(chunk, label=label)
826
826
827 if listsubrepos:
827 if listsubrepos:
828 ctx1 = repo[node1]
828 ctx1 = repo[node1]
829 ctx2 = repo[node2]
829 ctx2 = repo[node2]
830 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
830 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
831 tempnode2 = node2
831 tempnode2 = node2
832 try:
832 try:
833 if node2 is not None:
833 if node2 is not None:
834 tempnode2 = ctx2.substate[subpath][1]
834 tempnode2 = ctx2.substate[subpath][1]
835 except KeyError:
835 except KeyError:
836 # A subrepo that existed in node1 was deleted between node1 and
836 # A subrepo that existed in node1 was deleted between node1 and
837 # node2 (inclusive). Thus, ctx2's substate won't contain that
837 # node2 (inclusive). Thus, ctx2's substate won't contain that
838 # subpath. The best we can do is to ignore it.
838 # subpath. The best we can do is to ignore it.
839 tempnode2 = None
839 tempnode2 = None
840 submatch = matchmod.narrowmatcher(subpath, match)
840 submatch = matchmod.narrowmatcher(subpath, match)
841 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
841 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
842 stat=stat, fp=fp, prefix=prefix)
842 stat=stat, fp=fp, prefix=prefix)
843
843
844 class changeset_printer(object):
844 class changeset_printer(object):
845 '''show changeset information when templating not requested.'''
845 '''show changeset information when templating not requested.'''
846
846
847 def __init__(self, ui, repo, matchfn, diffopts, buffered):
847 def __init__(self, ui, repo, matchfn, diffopts, buffered):
848 self.ui = ui
848 self.ui = ui
849 self.repo = repo
849 self.repo = repo
850 self.buffered = buffered
850 self.buffered = buffered
851 self.matchfn = matchfn
851 self.matchfn = matchfn
852 self.diffopts = diffopts
852 self.diffopts = diffopts
853 self.header = {}
853 self.header = {}
854 self.hunk = {}
854 self.hunk = {}
855 self.lastheader = None
855 self.lastheader = None
856 self.footer = None
856 self.footer = None
857
857
858 def flush(self, rev):
858 def flush(self, rev):
859 if rev in self.header:
859 if rev in self.header:
860 h = self.header[rev]
860 h = self.header[rev]
861 if h != self.lastheader:
861 if h != self.lastheader:
862 self.lastheader = h
862 self.lastheader = h
863 self.ui.write(h)
863 self.ui.write(h)
864 del self.header[rev]
864 del self.header[rev]
865 if rev in self.hunk:
865 if rev in self.hunk:
866 self.ui.write(self.hunk[rev])
866 self.ui.write(self.hunk[rev])
867 del self.hunk[rev]
867 del self.hunk[rev]
868 return 1
868 return 1
869 return 0
869 return 0
870
870
871 def close(self):
871 def close(self):
872 if self.footer:
872 if self.footer:
873 self.ui.write(self.footer)
873 self.ui.write(self.footer)
874
874
875 def show(self, ctx, copies=None, matchfn=None, **props):
875 def show(self, ctx, copies=None, matchfn=None, **props):
876 if self.buffered:
876 if self.buffered:
877 self.ui.pushbuffer()
877 self.ui.pushbuffer()
878 self._show(ctx, copies, matchfn, props)
878 self._show(ctx, copies, matchfn, props)
879 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
879 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
880 else:
880 else:
881 self._show(ctx, copies, matchfn, props)
881 self._show(ctx, copies, matchfn, props)
882
882
883 def _show(self, ctx, copies, matchfn, props):
883 def _show(self, ctx, copies, matchfn, props):
884 '''show a single changeset or file revision'''
884 '''show a single changeset or file revision'''
885 changenode = ctx.node()
885 changenode = ctx.node()
886 rev = ctx.rev()
886 rev = ctx.rev()
887
887
888 if self.ui.quiet:
888 if self.ui.quiet:
889 self.ui.write("%d:%s\n" % (rev, short(changenode)),
889 self.ui.write("%d:%s\n" % (rev, short(changenode)),
890 label='log.node')
890 label='log.node')
891 return
891 return
892
892
893 log = self.repo.changelog
893 log = self.repo.changelog
894 date = util.datestr(ctx.date())
894 date = util.datestr(ctx.date())
895
895
896 hexfunc = self.ui.debugflag and hex or short
896 hexfunc = self.ui.debugflag and hex or short
897
897
898 parents = [(p, hexfunc(log.node(p)))
898 parents = [(p, hexfunc(log.node(p)))
899 for p in self._meaningful_parentrevs(log, rev)]
899 for p in self._meaningful_parentrevs(log, rev)]
900
900
901 # i18n: column positioning for "hg log"
901 # i18n: column positioning for "hg log"
902 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
902 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
903 label='log.changeset changeset.%s' % ctx.phasestr())
903 label='log.changeset changeset.%s' % ctx.phasestr())
904
904
905 # branches are shown first before any other names due to backwards
905 # branches are shown first before any other names due to backwards
906 # compatibility
906 # compatibility
907 branch = ctx.branch()
907 branch = ctx.branch()
908 # don't show the default branch name
908 # don't show the default branch name
909 if branch != 'default':
909 if branch != 'default':
910 # i18n: column positioning for "hg log"
910 # i18n: column positioning for "hg log"
911 self.ui.write(_("branch: %s\n") % branch,
911 self.ui.write(_("branch: %s\n") % branch,
912 label='log.branch')
912 label='log.branch')
913
913
914 for name, ns in self.repo.names.iteritems():
914 for name, ns in self.repo.names.iteritems():
915 # branches has special logic already handled above, so here we just
915 # branches has special logic already handled above, so here we just
916 # skip it
916 # skip it
917 if name == 'branches':
917 if name == 'branches':
918 continue
918 continue
919 # we will use the templatename as the color name since those two
919 # we will use the templatename as the color name since those two
920 # should be the same
920 # should be the same
921 for name in ns.names(self.repo, changenode):
921 for name in ns.names(self.repo, changenode):
922 self.ui.write(ns.logfmt % name,
922 self.ui.write(ns.logfmt % name,
923 label='log.%s' % ns.colorname)
923 label='log.%s' % ns.colorname)
924 if self.ui.debugflag:
924 if self.ui.debugflag:
925 # i18n: column positioning for "hg log"
925 # i18n: column positioning for "hg log"
926 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
926 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
927 label='log.phase')
927 label='log.phase')
928 for parent in parents:
928 for parent in parents:
929 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
929 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
930 # i18n: column positioning for "hg log"
930 # i18n: column positioning for "hg log"
931 self.ui.write(_("parent: %d:%s\n") % parent,
931 self.ui.write(_("parent: %d:%s\n") % parent,
932 label=label)
932 label=label)
933
933
934 if self.ui.debugflag:
934 if self.ui.debugflag:
935 mnode = ctx.manifestnode()
935 mnode = ctx.manifestnode()
936 # i18n: column positioning for "hg log"
936 # i18n: column positioning for "hg log"
937 self.ui.write(_("manifest: %d:%s\n") %
937 self.ui.write(_("manifest: %d:%s\n") %
938 (self.repo.manifest.rev(mnode), hex(mnode)),
938 (self.repo.manifest.rev(mnode), hex(mnode)),
939 label='ui.debug log.manifest')
939 label='ui.debug log.manifest')
940 # i18n: column positioning for "hg log"
940 # i18n: column positioning for "hg log"
941 self.ui.write(_("user: %s\n") % ctx.user(),
941 self.ui.write(_("user: %s\n") % ctx.user(),
942 label='log.user')
942 label='log.user')
943 # i18n: column positioning for "hg log"
943 # i18n: column positioning for "hg log"
944 self.ui.write(_("date: %s\n") % date,
944 self.ui.write(_("date: %s\n") % date,
945 label='log.date')
945 label='log.date')
946
946
947 if self.ui.debugflag:
947 if self.ui.debugflag:
948 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
948 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
949 for key, value in zip([# i18n: column positioning for "hg log"
949 for key, value in zip([# i18n: column positioning for "hg log"
950 _("files:"),
950 _("files:"),
951 # i18n: column positioning for "hg log"
951 # i18n: column positioning for "hg log"
952 _("files+:"),
952 _("files+:"),
953 # i18n: column positioning for "hg log"
953 # i18n: column positioning for "hg log"
954 _("files-:")], files):
954 _("files-:")], files):
955 if value:
955 if value:
956 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
956 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
957 label='ui.debug log.files')
957 label='ui.debug log.files')
958 elif ctx.files() and self.ui.verbose:
958 elif ctx.files() and self.ui.verbose:
959 # i18n: column positioning for "hg log"
959 # i18n: column positioning for "hg log"
960 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
960 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
961 label='ui.note log.files')
961 label='ui.note log.files')
962 if copies and self.ui.verbose:
962 if copies and self.ui.verbose:
963 copies = ['%s (%s)' % c for c in copies]
963 copies = ['%s (%s)' % c for c in copies]
964 # i18n: column positioning for "hg log"
964 # i18n: column positioning for "hg log"
965 self.ui.write(_("copies: %s\n") % ' '.join(copies),
965 self.ui.write(_("copies: %s\n") % ' '.join(copies),
966 label='ui.note log.copies')
966 label='ui.note log.copies')
967
967
968 extra = ctx.extra()
968 extra = ctx.extra()
969 if extra and self.ui.debugflag:
969 if extra and self.ui.debugflag:
970 for key, value in sorted(extra.items()):
970 for key, value in sorted(extra.items()):
971 # i18n: column positioning for "hg log"
971 # i18n: column positioning for "hg log"
972 self.ui.write(_("extra: %s=%s\n")
972 self.ui.write(_("extra: %s=%s\n")
973 % (key, value.encode('string_escape')),
973 % (key, value.encode('string_escape')),
974 label='ui.debug log.extra')
974 label='ui.debug log.extra')
975
975
976 description = ctx.description().strip()
976 description = ctx.description().strip()
977 if description:
977 if description:
978 if self.ui.verbose:
978 if self.ui.verbose:
979 self.ui.write(_("description:\n"),
979 self.ui.write(_("description:\n"),
980 label='ui.note log.description')
980 label='ui.note log.description')
981 self.ui.write(description,
981 self.ui.write(description,
982 label='ui.note log.description')
982 label='ui.note log.description')
983 self.ui.write("\n\n")
983 self.ui.write("\n\n")
984 else:
984 else:
985 # i18n: column positioning for "hg log"
985 # i18n: column positioning for "hg log"
986 self.ui.write(_("summary: %s\n") %
986 self.ui.write(_("summary: %s\n") %
987 description.splitlines()[0],
987 description.splitlines()[0],
988 label='log.summary')
988 label='log.summary')
989 self.ui.write("\n")
989 self.ui.write("\n")
990
990
991 self.showpatch(changenode, matchfn)
991 self.showpatch(changenode, matchfn)
992
992
993 def showpatch(self, node, matchfn):
993 def showpatch(self, node, matchfn):
994 if not matchfn:
994 if not matchfn:
995 matchfn = self.matchfn
995 matchfn = self.matchfn
996 if matchfn:
996 if matchfn:
997 stat = self.diffopts.get('stat')
997 stat = self.diffopts.get('stat')
998 diff = self.diffopts.get('patch')
998 diff = self.diffopts.get('patch')
999 diffopts = patch.diffallopts(self.ui, self.diffopts)
999 diffopts = patch.diffallopts(self.ui, self.diffopts)
1000 prev = self.repo.changelog.parents(node)[0]
1000 prev = self.repo.changelog.parents(node)[0]
1001 if stat:
1001 if stat:
1002 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1002 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1003 match=matchfn, stat=True)
1003 match=matchfn, stat=True)
1004 if diff:
1004 if diff:
1005 if stat:
1005 if stat:
1006 self.ui.write("\n")
1006 self.ui.write("\n")
1007 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1007 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1008 match=matchfn, stat=False)
1008 match=matchfn, stat=False)
1009 self.ui.write("\n")
1009 self.ui.write("\n")
1010
1010
1011 def _meaningful_parentrevs(self, log, rev):
1011 def _meaningful_parentrevs(self, log, rev):
1012 """Return list of meaningful (or all if debug) parentrevs for rev.
1012 """Return list of meaningful (or all if debug) parentrevs for rev.
1013
1013
1014 For merges (two non-nullrev revisions) both parents are meaningful.
1014 For merges (two non-nullrev revisions) both parents are meaningful.
1015 Otherwise the first parent revision is considered meaningful if it
1015 Otherwise the first parent revision is considered meaningful if it
1016 is not the preceding revision.
1016 is not the preceding revision.
1017 """
1017 """
1018 parents = log.parentrevs(rev)
1018 parents = log.parentrevs(rev)
1019 if not self.ui.debugflag and parents[1] == nullrev:
1019 if not self.ui.debugflag and parents[1] == nullrev:
1020 if parents[0] >= rev - 1:
1020 if parents[0] >= rev - 1:
1021 parents = []
1021 parents = []
1022 else:
1022 else:
1023 parents = [parents[0]]
1023 parents = [parents[0]]
1024 return parents
1024 return parents
1025
1025
1026 class jsonchangeset(changeset_printer):
1026 class jsonchangeset(changeset_printer):
1027 '''format changeset information.'''
1027 '''format changeset information.'''
1028
1028
1029 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1029 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1030 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1030 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1031 self.cache = {}
1031 self.cache = {}
1032 self._first = True
1032 self._first = True
1033
1033
1034 def close(self):
1034 def close(self):
1035 if not self._first:
1035 if not self._first:
1036 self.ui.write("\n]\n")
1036 self.ui.write("\n]\n")
1037 else:
1037 else:
1038 self.ui.write("[]\n")
1038 self.ui.write("[]\n")
1039
1039
1040 def _show(self, ctx, copies, matchfn, props):
1040 def _show(self, ctx, copies, matchfn, props):
1041 '''show a single changeset or file revision'''
1041 '''show a single changeset or file revision'''
1042 hexnode = hex(ctx.node())
1042 hexnode = hex(ctx.node())
1043 rev = ctx.rev()
1043 rev = ctx.rev()
1044 j = encoding.jsonescape
1044 j = encoding.jsonescape
1045
1045
1046 if self._first:
1046 if self._first:
1047 self.ui.write("[\n {")
1047 self.ui.write("[\n {")
1048 self._first = False
1048 self._first = False
1049 else:
1049 else:
1050 self.ui.write(",\n {")
1050 self.ui.write(",\n {")
1051
1051
1052 if self.ui.quiet:
1052 if self.ui.quiet:
1053 self.ui.write('\n "rev": %d' % rev)
1053 self.ui.write('\n "rev": %d' % rev)
1054 self.ui.write(',\n "node": "%s"' % hexnode)
1054 self.ui.write(',\n "node": "%s"' % hexnode)
1055 self.ui.write('\n }')
1055 self.ui.write('\n }')
1056 return
1056 return
1057
1057
1058 self.ui.write('\n "rev": %d' % rev)
1058 self.ui.write('\n "rev": %d' % rev)
1059 self.ui.write(',\n "node": "%s"' % hexnode)
1059 self.ui.write(',\n "node": "%s"' % hexnode)
1060 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1060 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1061 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1061 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1062 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1062 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1063 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1063 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1064 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1064 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1065
1065
1066 self.ui.write(',\n "bookmarks": [%s]' %
1066 self.ui.write(',\n "bookmarks": [%s]' %
1067 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1067 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1068 self.ui.write(',\n "tags": [%s]' %
1068 self.ui.write(',\n "tags": [%s]' %
1069 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1069 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1070 self.ui.write(',\n "parents": [%s]' %
1070 self.ui.write(',\n "parents": [%s]' %
1071 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1071 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1072
1072
1073 if self.ui.debugflag:
1073 if self.ui.debugflag:
1074 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1074 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1075
1075
1076 self.ui.write(',\n "extra": {%s}' %
1076 self.ui.write(',\n "extra": {%s}' %
1077 ", ".join('"%s": "%s"' % (j(k), j(v))
1077 ", ".join('"%s": "%s"' % (j(k), j(v))
1078 for k, v in ctx.extra().items()))
1078 for k, v in ctx.extra().items()))
1079
1079
1080 files = ctx.p1().status(ctx)
1080 files = ctx.p1().status(ctx)
1081 self.ui.write(',\n "modified": [%s]' %
1081 self.ui.write(',\n "modified": [%s]' %
1082 ", ".join('"%s"' % j(f) for f in files[0]))
1082 ", ".join('"%s"' % j(f) for f in files[0]))
1083 self.ui.write(',\n "added": [%s]' %
1083 self.ui.write(',\n "added": [%s]' %
1084 ", ".join('"%s"' % j(f) for f in files[1]))
1084 ", ".join('"%s"' % j(f) for f in files[1]))
1085 self.ui.write(',\n "removed": [%s]' %
1085 self.ui.write(',\n "removed": [%s]' %
1086 ", ".join('"%s"' % j(f) for f in files[2]))
1086 ", ".join('"%s"' % j(f) for f in files[2]))
1087
1087
1088 elif self.ui.verbose:
1088 elif self.ui.verbose:
1089 self.ui.write(',\n "files": [%s]' %
1089 self.ui.write(',\n "files": [%s]' %
1090 ", ".join('"%s"' % j(f) for f in ctx.files()))
1090 ", ".join('"%s"' % j(f) for f in ctx.files()))
1091
1091
1092 if copies:
1092 if copies:
1093 self.ui.write(',\n "copies": {%s}' %
1093 self.ui.write(',\n "copies": {%s}' %
1094 ", ".join('"%s": "%s"' % (j(k), j(v))
1094 ", ".join('"%s": "%s"' % (j(k), j(v))
1095 for k, v in copies))
1095 for k, v in copies))
1096
1096
1097 matchfn = self.matchfn
1097 matchfn = self.matchfn
1098 if matchfn:
1098 if matchfn:
1099 stat = self.diffopts.get('stat')
1099 stat = self.diffopts.get('stat')
1100 diff = self.diffopts.get('patch')
1100 diff = self.diffopts.get('patch')
1101 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1101 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1102 node, prev = ctx.node(), ctx.p1().node()
1102 node, prev = ctx.node(), ctx.p1().node()
1103 if stat:
1103 if stat:
1104 self.ui.pushbuffer()
1104 self.ui.pushbuffer()
1105 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1105 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1106 match=matchfn, stat=True)
1106 match=matchfn, stat=True)
1107 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1107 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1108 if diff:
1108 if diff:
1109 self.ui.pushbuffer()
1109 self.ui.pushbuffer()
1110 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1110 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1111 match=matchfn, stat=False)
1111 match=matchfn, stat=False)
1112 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1112 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1113
1113
1114 self.ui.write("\n }")
1114 self.ui.write("\n }")
1115
1115
1116 class changeset_templater(changeset_printer):
1116 class changeset_templater(changeset_printer):
1117 '''format changeset information.'''
1117 '''format changeset information.'''
1118
1118
1119 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1119 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1120 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1120 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1121 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1121 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1122 defaulttempl = {
1122 defaulttempl = {
1123 'parent': '{rev}:{node|formatnode} ',
1123 'parent': '{rev}:{node|formatnode} ',
1124 'manifest': '{rev}:{node|formatnode}',
1124 'manifest': '{rev}:{node|formatnode}',
1125 'file_copy': '{name} ({source})',
1125 'file_copy': '{name} ({source})',
1126 'extra': '{key}={value|stringescape}'
1126 'extra': '{key}={value|stringescape}'
1127 }
1127 }
1128 # filecopy is preserved for compatibility reasons
1128 # filecopy is preserved for compatibility reasons
1129 defaulttempl['filecopy'] = defaulttempl['file_copy']
1129 defaulttempl['filecopy'] = defaulttempl['file_copy']
1130 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1130 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1131 cache=defaulttempl)
1131 cache=defaulttempl)
1132 if tmpl:
1132 if tmpl:
1133 self.t.cache['changeset'] = tmpl
1133 self.t.cache['changeset'] = tmpl
1134
1134
1135 self.cache = {}
1135 self.cache = {}
1136
1136
1137 def _meaningful_parentrevs(self, ctx):
1137 def _meaningful_parentrevs(self, ctx):
1138 """Return list of meaningful (or all if debug) parentrevs for rev.
1138 """Return list of meaningful (or all if debug) parentrevs for rev.
1139 """
1139 """
1140 parents = ctx.parents()
1140 parents = ctx.parents()
1141 if len(parents) > 1:
1141 if len(parents) > 1:
1142 return parents
1142 return parents
1143 if self.ui.debugflag:
1143 if self.ui.debugflag:
1144 return [parents[0], self.repo['null']]
1144 return [parents[0], self.repo['null']]
1145 if parents[0].rev() >= ctx.rev() - 1:
1145 if parents[0].rev() >= ctx.rev() - 1:
1146 return []
1146 return []
1147 return parents
1147 return parents
1148
1148
1149 def _show(self, ctx, copies, matchfn, props):
1149 def _show(self, ctx, copies, matchfn, props):
1150 '''show a single changeset or file revision'''
1150 '''show a single changeset or file revision'''
1151
1151
1152 showlist = templatekw.showlist
1152 showlist = templatekw.showlist
1153
1153
1154 # showparents() behaviour depends on ui trace level which
1154 # showparents() behaviour depends on ui trace level which
1155 # causes unexpected behaviours at templating level and makes
1155 # causes unexpected behaviours at templating level and makes
1156 # it harder to extract it in a standalone function. Its
1156 # it harder to extract it in a standalone function. Its
1157 # behaviour cannot be changed so leave it here for now.
1157 # behaviour cannot be changed so leave it here for now.
1158 def showparents(**args):
1158 def showparents(**args):
1159 ctx = args['ctx']
1159 ctx = args['ctx']
1160 parents = [[('rev', p.rev()),
1160 parents = [[('rev', p.rev()),
1161 ('node', p.hex()),
1161 ('node', p.hex()),
1162 ('phase', p.phasestr())]
1162 ('phase', p.phasestr())]
1163 for p in self._meaningful_parentrevs(ctx)]
1163 for p in self._meaningful_parentrevs(ctx)]
1164 return showlist('parent', parents, **args)
1164 return showlist('parent', parents, **args)
1165
1165
1166 props = props.copy()
1166 props = props.copy()
1167 props.update(templatekw.keywords)
1167 props.update(templatekw.keywords)
1168 props['parents'] = showparents
1168 props['parents'] = showparents
1169 props['templ'] = self.t
1169 props['templ'] = self.t
1170 props['ctx'] = ctx
1170 props['ctx'] = ctx
1171 props['repo'] = self.repo
1171 props['repo'] = self.repo
1172 props['revcache'] = {'copies': copies}
1172 props['revcache'] = {'copies': copies}
1173 props['cache'] = self.cache
1173 props['cache'] = self.cache
1174
1174
1175 # find correct templates for current mode
1175 # find correct templates for current mode
1176
1176
1177 tmplmodes = [
1177 tmplmodes = [
1178 (True, None),
1178 (True, None),
1179 (self.ui.verbose, 'verbose'),
1179 (self.ui.verbose, 'verbose'),
1180 (self.ui.quiet, 'quiet'),
1180 (self.ui.quiet, 'quiet'),
1181 (self.ui.debugflag, 'debug'),
1181 (self.ui.debugflag, 'debug'),
1182 ]
1182 ]
1183
1183
1184 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1184 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1185 for mode, postfix in tmplmodes:
1185 for mode, postfix in tmplmodes:
1186 for type in types:
1186 for type in types:
1187 cur = postfix and ('%s_%s' % (type, postfix)) or type
1187 cur = postfix and ('%s_%s' % (type, postfix)) or type
1188 if mode and cur in self.t:
1188 if mode and cur in self.t:
1189 types[type] = cur
1189 types[type] = cur
1190
1190
1191 try:
1191 try:
1192
1192
1193 # write header
1193 # write header
1194 if types['header']:
1194 if types['header']:
1195 h = templater.stringify(self.t(types['header'], **props))
1195 h = templater.stringify(self.t(types['header'], **props))
1196 if self.buffered:
1196 if self.buffered:
1197 self.header[ctx.rev()] = h
1197 self.header[ctx.rev()] = h
1198 else:
1198 else:
1199 if self.lastheader != h:
1199 if self.lastheader != h:
1200 self.lastheader = h
1200 self.lastheader = h
1201 self.ui.write(h)
1201 self.ui.write(h)
1202
1202
1203 # write changeset metadata, then patch if requested
1203 # write changeset metadata, then patch if requested
1204 key = types['changeset']
1204 key = types['changeset']
1205 self.ui.write(templater.stringify(self.t(key, **props)))
1205 self.ui.write(templater.stringify(self.t(key, **props)))
1206 self.showpatch(ctx.node(), matchfn)
1206 self.showpatch(ctx.node(), matchfn)
1207
1207
1208 if types['footer']:
1208 if types['footer']:
1209 if not self.footer:
1209 if not self.footer:
1210 self.footer = templater.stringify(self.t(types['footer'],
1210 self.footer = templater.stringify(self.t(types['footer'],
1211 **props))
1211 **props))
1212
1212
1213 except KeyError, inst:
1213 except KeyError, inst:
1214 msg = _("%s: no key named '%s'")
1214 msg = _("%s: no key named '%s'")
1215 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1215 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1216 except SyntaxError, inst:
1216 except SyntaxError, inst:
1217 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1217 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1218
1218
1219 def gettemplate(ui, tmpl, style):
1219 def gettemplate(ui, tmpl, style):
1220 """
1220 """
1221 Find the template matching the given template spec or style.
1221 Find the template matching the given template spec or style.
1222 """
1222 """
1223
1223
1224 # ui settings
1224 # ui settings
1225 if not tmpl and not style: # template are stronger than style
1225 if not tmpl and not style: # template are stronger than style
1226 tmpl = ui.config('ui', 'logtemplate')
1226 tmpl = ui.config('ui', 'logtemplate')
1227 if tmpl:
1227 if tmpl:
1228 try:
1228 try:
1229 tmpl = templater.parsestring(tmpl)
1229 tmpl = templater.parsestring(tmpl)
1230 except SyntaxError:
1230 except SyntaxError:
1231 tmpl = templater.parsestring(tmpl, quoted=False)
1231 tmpl = templater.parsestring(tmpl, quoted=False)
1232 return tmpl, None
1232 return tmpl, None
1233 else:
1233 else:
1234 style = util.expandpath(ui.config('ui', 'style', ''))
1234 style = util.expandpath(ui.config('ui', 'style', ''))
1235
1235
1236 if not tmpl and style:
1236 if not tmpl and style:
1237 mapfile = style
1237 mapfile = style
1238 if not os.path.split(mapfile)[0]:
1238 if not os.path.split(mapfile)[0]:
1239 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1239 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1240 or templater.templatepath(mapfile))
1240 or templater.templatepath(mapfile))
1241 if mapname:
1241 if mapname:
1242 mapfile = mapname
1242 mapfile = mapname
1243 return None, mapfile
1243 return None, mapfile
1244
1244
1245 if not tmpl:
1245 if not tmpl:
1246 return None, None
1246 return None, None
1247
1247
1248 # looks like a literal template?
1248 # looks like a literal template?
1249 if '{' in tmpl:
1249 if '{' in tmpl:
1250 return tmpl, None
1250 return tmpl, None
1251
1251
1252 # perhaps a stock style?
1252 # perhaps a stock style?
1253 if not os.path.split(tmpl)[0]:
1253 if not os.path.split(tmpl)[0]:
1254 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1254 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1255 or templater.templatepath(tmpl))
1255 or templater.templatepath(tmpl))
1256 if mapname and os.path.isfile(mapname):
1256 if mapname and os.path.isfile(mapname):
1257 return None, mapname
1257 return None, mapname
1258
1258
1259 # perhaps it's a reference to [templates]
1259 # perhaps it's a reference to [templates]
1260 t = ui.config('templates', tmpl)
1260 t = ui.config('templates', tmpl)
1261 if t:
1261 if t:
1262 try:
1262 try:
1263 tmpl = templater.parsestring(t)
1263 tmpl = templater.parsestring(t)
1264 except SyntaxError:
1264 except SyntaxError:
1265 tmpl = templater.parsestring(t, quoted=False)
1265 tmpl = templater.parsestring(t, quoted=False)
1266 return tmpl, None
1266 return tmpl, None
1267
1267
1268 if tmpl == 'list':
1268 if tmpl == 'list':
1269 ui.write(_("available styles: %s\n") % templater.stylelist())
1269 ui.write(_("available styles: %s\n") % templater.stylelist())
1270 raise util.Abort(_("specify a template"))
1270 raise util.Abort(_("specify a template"))
1271
1271
1272 # perhaps it's a path to a map or a template
1272 # perhaps it's a path to a map or a template
1273 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1273 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1274 # is it a mapfile for a style?
1274 # is it a mapfile for a style?
1275 if os.path.basename(tmpl).startswith("map-"):
1275 if os.path.basename(tmpl).startswith("map-"):
1276 return None, os.path.realpath(tmpl)
1276 return None, os.path.realpath(tmpl)
1277 tmpl = open(tmpl).read()
1277 tmpl = open(tmpl).read()
1278 return tmpl, None
1278 return tmpl, None
1279
1279
1280 # constant string?
1280 # constant string?
1281 return tmpl, None
1281 return tmpl, None
1282
1282
1283 def show_changeset(ui, repo, opts, buffered=False):
1283 def show_changeset(ui, repo, opts, buffered=False):
1284 """show one changeset using template or regular display.
1284 """show one changeset using template or regular display.
1285
1285
1286 Display format will be the first non-empty hit of:
1286 Display format will be the first non-empty hit of:
1287 1. option 'template'
1287 1. option 'template'
1288 2. option 'style'
1288 2. option 'style'
1289 3. [ui] setting 'logtemplate'
1289 3. [ui] setting 'logtemplate'
1290 4. [ui] setting 'style'
1290 4. [ui] setting 'style'
1291 If all of these values are either the unset or the empty string,
1291 If all of these values are either the unset or the empty string,
1292 regular display via changeset_printer() is done.
1292 regular display via changeset_printer() is done.
1293 """
1293 """
1294 # options
1294 # options
1295 matchfn = None
1295 matchfn = None
1296 if opts.get('patch') or opts.get('stat'):
1296 if opts.get('patch') or opts.get('stat'):
1297 matchfn = scmutil.matchall(repo)
1297 matchfn = scmutil.matchall(repo)
1298
1298
1299 if opts.get('template') == 'json':
1299 if opts.get('template') == 'json':
1300 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1300 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1301
1301
1302 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1302 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1303
1303
1304 if not tmpl and not mapfile:
1304 if not tmpl and not mapfile:
1305 return changeset_printer(ui, repo, matchfn, opts, buffered)
1305 return changeset_printer(ui, repo, matchfn, opts, buffered)
1306
1306
1307 try:
1307 try:
1308 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1308 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1309 buffered)
1309 buffered)
1310 except SyntaxError, inst:
1310 except SyntaxError, inst:
1311 raise util.Abort(inst.args[0])
1311 raise util.Abort(inst.args[0])
1312 return t
1312 return t
1313
1313
1314 def showmarker(ui, marker):
1314 def showmarker(ui, marker):
1315 """utility function to display obsolescence marker in a readable way
1315 """utility function to display obsolescence marker in a readable way
1316
1316
1317 To be used by debug function."""
1317 To be used by debug function."""
1318 ui.write(hex(marker.precnode()))
1318 ui.write(hex(marker.precnode()))
1319 for repl in marker.succnodes():
1319 for repl in marker.succnodes():
1320 ui.write(' ')
1320 ui.write(' ')
1321 ui.write(hex(repl))
1321 ui.write(hex(repl))
1322 ui.write(' %X ' % marker.flags())
1322 ui.write(' %X ' % marker.flags())
1323 parents = marker.parentnodes()
1323 parents = marker.parentnodes()
1324 if parents is not None:
1324 if parents is not None:
1325 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1325 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1326 ui.write('(%s) ' % util.datestr(marker.date()))
1326 ui.write('(%s) ' % util.datestr(marker.date()))
1327 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1327 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1328 sorted(marker.metadata().items())
1328 sorted(marker.metadata().items())
1329 if t[0] != 'date')))
1329 if t[0] != 'date')))
1330 ui.write('\n')
1330 ui.write('\n')
1331
1331
1332 def finddate(ui, repo, date):
1332 def finddate(ui, repo, date):
1333 """Find the tipmost changeset that matches the given date spec"""
1333 """Find the tipmost changeset that matches the given date spec"""
1334
1334
1335 df = util.matchdate(date)
1335 df = util.matchdate(date)
1336 m = scmutil.matchall(repo)
1336 m = scmutil.matchall(repo)
1337 results = {}
1337 results = {}
1338
1338
1339 def prep(ctx, fns):
1339 def prep(ctx, fns):
1340 d = ctx.date()
1340 d = ctx.date()
1341 if df(d[0]):
1341 if df(d[0]):
1342 results[ctx.rev()] = d
1342 results[ctx.rev()] = d
1343
1343
1344 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1344 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1345 rev = ctx.rev()
1345 rev = ctx.rev()
1346 if rev in results:
1346 if rev in results:
1347 ui.status(_("found revision %s from %s\n") %
1347 ui.status(_("found revision %s from %s\n") %
1348 (rev, util.datestr(results[rev])))
1348 (rev, util.datestr(results[rev])))
1349 return str(rev)
1349 return str(rev)
1350
1350
1351 raise util.Abort(_("revision matching date not found"))
1351 raise util.Abort(_("revision matching date not found"))
1352
1352
1353 def increasingwindows(windowsize=8, sizelimit=512):
1353 def increasingwindows(windowsize=8, sizelimit=512):
1354 while True:
1354 while True:
1355 yield windowsize
1355 yield windowsize
1356 if windowsize < sizelimit:
1356 if windowsize < sizelimit:
1357 windowsize *= 2
1357 windowsize *= 2
1358
1358
1359 class FileWalkError(Exception):
1359 class FileWalkError(Exception):
1360 pass
1360 pass
1361
1361
1362 def walkfilerevs(repo, match, follow, revs, fncache):
1362 def walkfilerevs(repo, match, follow, revs, fncache):
1363 '''Walks the file history for the matched files.
1363 '''Walks the file history for the matched files.
1364
1364
1365 Returns the changeset revs that are involved in the file history.
1365 Returns the changeset revs that are involved in the file history.
1366
1366
1367 Throws FileWalkError if the file history can't be walked using
1367 Throws FileWalkError if the file history can't be walked using
1368 filelogs alone.
1368 filelogs alone.
1369 '''
1369 '''
1370 wanted = set()
1370 wanted = set()
1371 copies = []
1371 copies = []
1372 minrev, maxrev = min(revs), max(revs)
1372 minrev, maxrev = min(revs), max(revs)
1373 def filerevgen(filelog, last):
1373 def filerevgen(filelog, last):
1374 """
1374 """
1375 Only files, no patterns. Check the history of each file.
1375 Only files, no patterns. Check the history of each file.
1376
1376
1377 Examines filelog entries within minrev, maxrev linkrev range
1377 Examines filelog entries within minrev, maxrev linkrev range
1378 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1378 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1379 tuples in backwards order
1379 tuples in backwards order
1380 """
1380 """
1381 cl_count = len(repo)
1381 cl_count = len(repo)
1382 revs = []
1382 revs = []
1383 for j in xrange(0, last + 1):
1383 for j in xrange(0, last + 1):
1384 linkrev = filelog.linkrev(j)
1384 linkrev = filelog.linkrev(j)
1385 if linkrev < minrev:
1385 if linkrev < minrev:
1386 continue
1386 continue
1387 # only yield rev for which we have the changelog, it can
1387 # only yield rev for which we have the changelog, it can
1388 # happen while doing "hg log" during a pull or commit
1388 # happen while doing "hg log" during a pull or commit
1389 if linkrev >= cl_count:
1389 if linkrev >= cl_count:
1390 break
1390 break
1391
1391
1392 parentlinkrevs = []
1392 parentlinkrevs = []
1393 for p in filelog.parentrevs(j):
1393 for p in filelog.parentrevs(j):
1394 if p != nullrev:
1394 if p != nullrev:
1395 parentlinkrevs.append(filelog.linkrev(p))
1395 parentlinkrevs.append(filelog.linkrev(p))
1396 n = filelog.node(j)
1396 n = filelog.node(j)
1397 revs.append((linkrev, parentlinkrevs,
1397 revs.append((linkrev, parentlinkrevs,
1398 follow and filelog.renamed(n)))
1398 follow and filelog.renamed(n)))
1399
1399
1400 return reversed(revs)
1400 return reversed(revs)
1401 def iterfiles():
1401 def iterfiles():
1402 pctx = repo['.']
1402 pctx = repo['.']
1403 for filename in match.files():
1403 for filename in match.files():
1404 if follow:
1404 if follow:
1405 if filename not in pctx:
1405 if filename not in pctx:
1406 raise util.Abort(_('cannot follow file not in parent '
1406 raise util.Abort(_('cannot follow file not in parent '
1407 'revision: "%s"') % filename)
1407 'revision: "%s"') % filename)
1408 yield filename, pctx[filename].filenode()
1408 yield filename, pctx[filename].filenode()
1409 else:
1409 else:
1410 yield filename, None
1410 yield filename, None
1411 for filename_node in copies:
1411 for filename_node in copies:
1412 yield filename_node
1412 yield filename_node
1413
1413
1414 for file_, node in iterfiles():
1414 for file_, node in iterfiles():
1415 filelog = repo.file(file_)
1415 filelog = repo.file(file_)
1416 if not len(filelog):
1416 if not len(filelog):
1417 if node is None:
1417 if node is None:
1418 # A zero count may be a directory or deleted file, so
1418 # A zero count may be a directory or deleted file, so
1419 # try to find matching entries on the slow path.
1419 # try to find matching entries on the slow path.
1420 if follow:
1420 if follow:
1421 raise util.Abort(
1421 raise util.Abort(
1422 _('cannot follow nonexistent file: "%s"') % file_)
1422 _('cannot follow nonexistent file: "%s"') % file_)
1423 raise FileWalkError("Cannot walk via filelog")
1423 raise FileWalkError("Cannot walk via filelog")
1424 else:
1424 else:
1425 continue
1425 continue
1426
1426
1427 if node is None:
1427 if node is None:
1428 last = len(filelog) - 1
1428 last = len(filelog) - 1
1429 else:
1429 else:
1430 last = filelog.rev(node)
1430 last = filelog.rev(node)
1431
1431
1432
1432
1433 # keep track of all ancestors of the file
1433 # keep track of all ancestors of the file
1434 ancestors = set([filelog.linkrev(last)])
1434 ancestors = set([filelog.linkrev(last)])
1435
1435
1436 # iterate from latest to oldest revision
1436 # iterate from latest to oldest revision
1437 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1437 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1438 if not follow:
1438 if not follow:
1439 if rev > maxrev:
1439 if rev > maxrev:
1440 continue
1440 continue
1441 else:
1441 else:
1442 # Note that last might not be the first interesting
1442 # Note that last might not be the first interesting
1443 # rev to us:
1443 # rev to us:
1444 # if the file has been changed after maxrev, we'll
1444 # if the file has been changed after maxrev, we'll
1445 # have linkrev(last) > maxrev, and we still need
1445 # have linkrev(last) > maxrev, and we still need
1446 # to explore the file graph
1446 # to explore the file graph
1447 if rev not in ancestors:
1447 if rev not in ancestors:
1448 continue
1448 continue
1449 # XXX insert 1327 fix here
1449 # XXX insert 1327 fix here
1450 if flparentlinkrevs:
1450 if flparentlinkrevs:
1451 ancestors.update(flparentlinkrevs)
1451 ancestors.update(flparentlinkrevs)
1452
1452
1453 fncache.setdefault(rev, []).append(file_)
1453 fncache.setdefault(rev, []).append(file_)
1454 wanted.add(rev)
1454 wanted.add(rev)
1455 if copied:
1455 if copied:
1456 copies.append(copied)
1456 copies.append(copied)
1457
1457
1458 return wanted
1458 return wanted
1459
1459
1460 def walkchangerevs(repo, match, opts, prepare):
1460 def walkchangerevs(repo, match, opts, prepare):
1461 '''Iterate over files and the revs in which they changed.
1461 '''Iterate over files and the revs in which they changed.
1462
1462
1463 Callers most commonly need to iterate backwards over the history
1463 Callers most commonly need to iterate backwards over the history
1464 in which they are interested. Doing so has awful (quadratic-looking)
1464 in which they are interested. Doing so has awful (quadratic-looking)
1465 performance, so we use iterators in a "windowed" way.
1465 performance, so we use iterators in a "windowed" way.
1466
1466
1467 We walk a window of revisions in the desired order. Within the
1467 We walk a window of revisions in the desired order. Within the
1468 window, we first walk forwards to gather data, then in the desired
1468 window, we first walk forwards to gather data, then in the desired
1469 order (usually backwards) to display it.
1469 order (usually backwards) to display it.
1470
1470
1471 This function returns an iterator yielding contexts. Before
1471 This function returns an iterator yielding contexts. Before
1472 yielding each context, the iterator will first call the prepare
1472 yielding each context, the iterator will first call the prepare
1473 function on each context in the window in forward order.'''
1473 function on each context in the window in forward order.'''
1474
1474
1475 follow = opts.get('follow') or opts.get('follow_first')
1475 follow = opts.get('follow') or opts.get('follow_first')
1476 revs = _logrevs(repo, opts)
1476 revs = _logrevs(repo, opts)
1477 if not revs:
1477 if not revs:
1478 return []
1478 return []
1479 wanted = set()
1479 wanted = set()
1480 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1480 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1481 fncache = {}
1481 fncache = {}
1482 change = repo.changectx
1482 change = repo.changectx
1483
1483
1484 # First step is to fill wanted, the set of revisions that we want to yield.
1484 # First step is to fill wanted, the set of revisions that we want to yield.
1485 # When it does not induce extra cost, we also fill fncache for revisions in
1485 # When it does not induce extra cost, we also fill fncache for revisions in
1486 # wanted: a cache of filenames that were changed (ctx.files()) and that
1486 # wanted: a cache of filenames that were changed (ctx.files()) and that
1487 # match the file filtering conditions.
1487 # match the file filtering conditions.
1488
1488
1489 if not slowpath and not match.files():
1489 if not slowpath and not match.files():
1490 # No files, no patterns. Display all revs.
1490 # No files, no patterns. Display all revs.
1491 wanted = revs
1491 wanted = revs
1492
1492
1493 if not slowpath and match.files():
1493 if not slowpath and match.files():
1494 # We only have to read through the filelog to find wanted revisions
1494 # We only have to read through the filelog to find wanted revisions
1495
1495
1496 try:
1496 try:
1497 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1497 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1498 except FileWalkError:
1498 except FileWalkError:
1499 slowpath = True
1499 slowpath = True
1500
1500
1501 # We decided to fall back to the slowpath because at least one
1501 # We decided to fall back to the slowpath because at least one
1502 # of the paths was not a file. Check to see if at least one of them
1502 # of the paths was not a file. Check to see if at least one of them
1503 # existed in history, otherwise simply return
1503 # existed in history, otherwise simply return
1504 for path in match.files():
1504 for path in match.files():
1505 if path == '.' or path in repo.store:
1505 if path == '.' or path in repo.store:
1506 break
1506 break
1507 else:
1507 else:
1508 return []
1508 return []
1509
1509
1510 if slowpath:
1510 if slowpath:
1511 # We have to read the changelog to match filenames against
1511 # We have to read the changelog to match filenames against
1512 # changed files
1512 # changed files
1513
1513
1514 if follow:
1514 if follow:
1515 raise util.Abort(_('can only follow copies/renames for explicit '
1515 raise util.Abort(_('can only follow copies/renames for explicit '
1516 'filenames'))
1516 'filenames'))
1517
1517
1518 # The slow path checks files modified in every changeset.
1518 # The slow path checks files modified in every changeset.
1519 # This is really slow on large repos, so compute the set lazily.
1519 # This is really slow on large repos, so compute the set lazily.
1520 class lazywantedset(object):
1520 class lazywantedset(object):
1521 def __init__(self):
1521 def __init__(self):
1522 self.set = set()
1522 self.set = set()
1523 self.revs = set(revs)
1523 self.revs = set(revs)
1524
1524
1525 # No need to worry about locality here because it will be accessed
1525 # No need to worry about locality here because it will be accessed
1526 # in the same order as the increasing window below.
1526 # in the same order as the increasing window below.
1527 def __contains__(self, value):
1527 def __contains__(self, value):
1528 if value in self.set:
1528 if value in self.set:
1529 return True
1529 return True
1530 elif not value in self.revs:
1530 elif not value in self.revs:
1531 return False
1531 return False
1532 else:
1532 else:
1533 self.revs.discard(value)
1533 self.revs.discard(value)
1534 ctx = change(value)
1534 ctx = change(value)
1535 matches = filter(match, ctx.files())
1535 matches = filter(match, ctx.files())
1536 if matches:
1536 if matches:
1537 fncache[value] = matches
1537 fncache[value] = matches
1538 self.set.add(value)
1538 self.set.add(value)
1539 return True
1539 return True
1540 return False
1540 return False
1541
1541
1542 def discard(self, value):
1542 def discard(self, value):
1543 self.revs.discard(value)
1543 self.revs.discard(value)
1544 self.set.discard(value)
1544 self.set.discard(value)
1545
1545
1546 wanted = lazywantedset()
1546 wanted = lazywantedset()
1547
1547
1548 class followfilter(object):
1548 class followfilter(object):
1549 def __init__(self, onlyfirst=False):
1549 def __init__(self, onlyfirst=False):
1550 self.startrev = nullrev
1550 self.startrev = nullrev
1551 self.roots = set()
1551 self.roots = set()
1552 self.onlyfirst = onlyfirst
1552 self.onlyfirst = onlyfirst
1553
1553
1554 def match(self, rev):
1554 def match(self, rev):
1555 def realparents(rev):
1555 def realparents(rev):
1556 if self.onlyfirst:
1556 if self.onlyfirst:
1557 return repo.changelog.parentrevs(rev)[0:1]
1557 return repo.changelog.parentrevs(rev)[0:1]
1558 else:
1558 else:
1559 return filter(lambda x: x != nullrev,
1559 return filter(lambda x: x != nullrev,
1560 repo.changelog.parentrevs(rev))
1560 repo.changelog.parentrevs(rev))
1561
1561
1562 if self.startrev == nullrev:
1562 if self.startrev == nullrev:
1563 self.startrev = rev
1563 self.startrev = rev
1564 return True
1564 return True
1565
1565
1566 if rev > self.startrev:
1566 if rev > self.startrev:
1567 # forward: all descendants
1567 # forward: all descendants
1568 if not self.roots:
1568 if not self.roots:
1569 self.roots.add(self.startrev)
1569 self.roots.add(self.startrev)
1570 for parent in realparents(rev):
1570 for parent in realparents(rev):
1571 if parent in self.roots:
1571 if parent in self.roots:
1572 self.roots.add(rev)
1572 self.roots.add(rev)
1573 return True
1573 return True
1574 else:
1574 else:
1575 # backwards: all parents
1575 # backwards: all parents
1576 if not self.roots:
1576 if not self.roots:
1577 self.roots.update(realparents(self.startrev))
1577 self.roots.update(realparents(self.startrev))
1578 if rev in self.roots:
1578 if rev in self.roots:
1579 self.roots.remove(rev)
1579 self.roots.remove(rev)
1580 self.roots.update(realparents(rev))
1580 self.roots.update(realparents(rev))
1581 return True
1581 return True
1582
1582
1583 return False
1583 return False
1584
1584
1585 # it might be worthwhile to do this in the iterator if the rev range
1585 # it might be worthwhile to do this in the iterator if the rev range
1586 # is descending and the prune args are all within that range
1586 # is descending and the prune args are all within that range
1587 for rev in opts.get('prune', ()):
1587 for rev in opts.get('prune', ()):
1588 rev = repo[rev].rev()
1588 rev = repo[rev].rev()
1589 ff = followfilter()
1589 ff = followfilter()
1590 stop = min(revs[0], revs[-1])
1590 stop = min(revs[0], revs[-1])
1591 for x in xrange(rev, stop - 1, -1):
1591 for x in xrange(rev, stop - 1, -1):
1592 if ff.match(x):
1592 if ff.match(x):
1593 wanted = wanted - [x]
1593 wanted = wanted - [x]
1594
1594
1595 # Now that wanted is correctly initialized, we can iterate over the
1595 # Now that wanted is correctly initialized, we can iterate over the
1596 # revision range, yielding only revisions in wanted.
1596 # revision range, yielding only revisions in wanted.
1597 def iterate():
1597 def iterate():
1598 if follow and not match.files():
1598 if follow and not match.files():
1599 ff = followfilter(onlyfirst=opts.get('follow_first'))
1599 ff = followfilter(onlyfirst=opts.get('follow_first'))
1600 def want(rev):
1600 def want(rev):
1601 return ff.match(rev) and rev in wanted
1601 return ff.match(rev) and rev in wanted
1602 else:
1602 else:
1603 def want(rev):
1603 def want(rev):
1604 return rev in wanted
1604 return rev in wanted
1605
1605
1606 it = iter(revs)
1606 it = iter(revs)
1607 stopiteration = False
1607 stopiteration = False
1608 for windowsize in increasingwindows():
1608 for windowsize in increasingwindows():
1609 nrevs = []
1609 nrevs = []
1610 for i in xrange(windowsize):
1610 for i in xrange(windowsize):
1611 try:
1611 try:
1612 rev = it.next()
1612 rev = it.next()
1613 if want(rev):
1613 if want(rev):
1614 nrevs.append(rev)
1614 nrevs.append(rev)
1615 except (StopIteration):
1615 except (StopIteration):
1616 stopiteration = True
1616 stopiteration = True
1617 break
1617 break
1618 for rev in sorted(nrevs):
1618 for rev in sorted(nrevs):
1619 fns = fncache.get(rev)
1619 fns = fncache.get(rev)
1620 ctx = change(rev)
1620 ctx = change(rev)
1621 if not fns:
1621 if not fns:
1622 def fns_generator():
1622 def fns_generator():
1623 for f in ctx.files():
1623 for f in ctx.files():
1624 if match(f):
1624 if match(f):
1625 yield f
1625 yield f
1626 fns = fns_generator()
1626 fns = fns_generator()
1627 prepare(ctx, fns)
1627 prepare(ctx, fns)
1628 for rev in nrevs:
1628 for rev in nrevs:
1629 yield change(rev)
1629 yield change(rev)
1630
1630
1631 if stopiteration:
1631 if stopiteration:
1632 break
1632 break
1633
1633
1634 return iterate()
1634 return iterate()
1635
1635
1636 def _makefollowlogfilematcher(repo, files, followfirst):
1636 def _makefollowlogfilematcher(repo, files, followfirst):
1637 # When displaying a revision with --patch --follow FILE, we have
1637 # When displaying a revision with --patch --follow FILE, we have
1638 # to know which file of the revision must be diffed. With
1638 # to know which file of the revision must be diffed. With
1639 # --follow, we want the names of the ancestors of FILE in the
1639 # --follow, we want the names of the ancestors of FILE in the
1640 # revision, stored in "fcache". "fcache" is populated by
1640 # revision, stored in "fcache". "fcache" is populated by
1641 # reproducing the graph traversal already done by --follow revset
1641 # reproducing the graph traversal already done by --follow revset
1642 # and relating linkrevs to file names (which is not "correct" but
1642 # and relating linkrevs to file names (which is not "correct" but
1643 # good enough).
1643 # good enough).
1644 fcache = {}
1644 fcache = {}
1645 fcacheready = [False]
1645 fcacheready = [False]
1646 pctx = repo['.']
1646 pctx = repo['.']
1647
1647
1648 def populate():
1648 def populate():
1649 for fn in files:
1649 for fn in files:
1650 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1650 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1651 for c in i:
1651 for c in i:
1652 fcache.setdefault(c.linkrev(), set()).add(c.path())
1652 fcache.setdefault(c.linkrev(), set()).add(c.path())
1653
1653
1654 def filematcher(rev):
1654 def filematcher(rev):
1655 if not fcacheready[0]:
1655 if not fcacheready[0]:
1656 # Lazy initialization
1656 # Lazy initialization
1657 fcacheready[0] = True
1657 fcacheready[0] = True
1658 populate()
1658 populate()
1659 return scmutil.matchfiles(repo, fcache.get(rev, []))
1659 return scmutil.matchfiles(repo, fcache.get(rev, []))
1660
1660
1661 return filematcher
1661 return filematcher
1662
1662
1663 def _makenofollowlogfilematcher(repo, pats, opts):
1663 def _makenofollowlogfilematcher(repo, pats, opts):
1664 '''hook for extensions to override the filematcher for non-follow cases'''
1664 '''hook for extensions to override the filematcher for non-follow cases'''
1665 return None
1665 return None
1666
1666
1667 def _makelogrevset(repo, pats, opts, revs):
1667 def _makelogrevset(repo, pats, opts, revs):
1668 """Return (expr, filematcher) where expr is a revset string built
1668 """Return (expr, filematcher) where expr is a revset string built
1669 from log options and file patterns or None. If --stat or --patch
1669 from log options and file patterns or None. If --stat or --patch
1670 are not passed filematcher is None. Otherwise it is a callable
1670 are not passed filematcher is None. Otherwise it is a callable
1671 taking a revision number and returning a match objects filtering
1671 taking a revision number and returning a match objects filtering
1672 the files to be detailed when displaying the revision.
1672 the files to be detailed when displaying the revision.
1673 """
1673 """
1674 opt2revset = {
1674 opt2revset = {
1675 'no_merges': ('not merge()', None),
1675 'no_merges': ('not merge()', None),
1676 'only_merges': ('merge()', None),
1676 'only_merges': ('merge()', None),
1677 '_ancestors': ('ancestors(%(val)s)', None),
1677 '_ancestors': ('ancestors(%(val)s)', None),
1678 '_fancestors': ('_firstancestors(%(val)s)', None),
1678 '_fancestors': ('_firstancestors(%(val)s)', None),
1679 '_descendants': ('descendants(%(val)s)', None),
1679 '_descendants': ('descendants(%(val)s)', None),
1680 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1680 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1681 '_matchfiles': ('_matchfiles(%(val)s)', None),
1681 '_matchfiles': ('_matchfiles(%(val)s)', None),
1682 'date': ('date(%(val)r)', None),
1682 'date': ('date(%(val)r)', None),
1683 'branch': ('branch(%(val)r)', ' or '),
1683 'branch': ('branch(%(val)r)', ' or '),
1684 '_patslog': ('filelog(%(val)r)', ' or '),
1684 '_patslog': ('filelog(%(val)r)', ' or '),
1685 '_patsfollow': ('follow(%(val)r)', ' or '),
1685 '_patsfollow': ('follow(%(val)r)', ' or '),
1686 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1686 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1687 'keyword': ('keyword(%(val)r)', ' or '),
1687 'keyword': ('keyword(%(val)r)', ' or '),
1688 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1688 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1689 'user': ('user(%(val)r)', ' or '),
1689 'user': ('user(%(val)r)', ' or '),
1690 }
1690 }
1691
1691
1692 opts = dict(opts)
1692 opts = dict(opts)
1693 # follow or not follow?
1693 # follow or not follow?
1694 follow = opts.get('follow') or opts.get('follow_first')
1694 follow = opts.get('follow') or opts.get('follow_first')
1695 followfirst = opts.get('follow_first') and 1 or 0
1695 followfirst = opts.get('follow_first') and 1 or 0
1696 # --follow with FILE behaviour depends on revs...
1696 # --follow with FILE behaviour depends on revs...
1697 it = iter(revs)
1697 it = iter(revs)
1698 startrev = it.next()
1698 startrev = it.next()
1699 try:
1699 try:
1700 followdescendants = startrev < it.next()
1700 followdescendants = startrev < it.next()
1701 except (StopIteration):
1701 except (StopIteration):
1702 followdescendants = False
1702 followdescendants = False
1703
1703
1704 # branch and only_branch are really aliases and must be handled at
1704 # branch and only_branch are really aliases and must be handled at
1705 # the same time
1705 # the same time
1706 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1706 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1707 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1707 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1708 # pats/include/exclude are passed to match.match() directly in
1708 # pats/include/exclude are passed to match.match() directly in
1709 # _matchfiles() revset but walkchangerevs() builds its matcher with
1709 # _matchfiles() revset but walkchangerevs() builds its matcher with
1710 # scmutil.match(). The difference is input pats are globbed on
1710 # scmutil.match(). The difference is input pats are globbed on
1711 # platforms without shell expansion (windows).
1711 # platforms without shell expansion (windows).
1712 pctx = repo[None]
1712 pctx = repo[None]
1713 match, pats = scmutil.matchandpats(pctx, pats, opts)
1713 match, pats = scmutil.matchandpats(pctx, pats, opts)
1714 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1714 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1715 if not slowpath:
1715 if not slowpath:
1716 for f in match.files():
1716 for f in match.files():
1717 if follow and f not in pctx:
1717 if follow and f not in pctx:
1718 # If the file exists, it may be a directory, so let it
1718 # If the file exists, it may be a directory, so let it
1719 # take the slow path.
1719 # take the slow path.
1720 if os.path.exists(repo.wjoin(f)):
1720 if os.path.exists(repo.wjoin(f)):
1721 slowpath = True
1721 slowpath = True
1722 continue
1722 continue
1723 else:
1723 else:
1724 raise util.Abort(_('cannot follow file not in parent '
1724 raise util.Abort(_('cannot follow file not in parent '
1725 'revision: "%s"') % f)
1725 'revision: "%s"') % f)
1726 filelog = repo.file(f)
1726 filelog = repo.file(f)
1727 if not filelog:
1727 if not filelog:
1728 # A zero count may be a directory or deleted file, so
1728 # A zero count may be a directory or deleted file, so
1729 # try to find matching entries on the slow path.
1729 # try to find matching entries on the slow path.
1730 if follow:
1730 if follow:
1731 raise util.Abort(
1731 raise util.Abort(
1732 _('cannot follow nonexistent file: "%s"') % f)
1732 _('cannot follow nonexistent file: "%s"') % f)
1733 slowpath = True
1733 slowpath = True
1734
1734
1735 # We decided to fall back to the slowpath because at least one
1735 # We decided to fall back to the slowpath because at least one
1736 # of the paths was not a file. Check to see if at least one of them
1736 # of the paths was not a file. Check to see if at least one of them
1737 # existed in history - in that case, we'll continue down the
1737 # existed in history - in that case, we'll continue down the
1738 # slowpath; otherwise, we can turn off the slowpath
1738 # slowpath; otherwise, we can turn off the slowpath
1739 if slowpath:
1739 if slowpath:
1740 for path in match.files():
1740 for path in match.files():
1741 if path == '.' or path in repo.store:
1741 if path == '.' or path in repo.store:
1742 break
1742 break
1743 else:
1743 else:
1744 slowpath = False
1744 slowpath = False
1745
1745
1746 fpats = ('_patsfollow', '_patsfollowfirst')
1746 fpats = ('_patsfollow', '_patsfollowfirst')
1747 fnopats = (('_ancestors', '_fancestors'),
1747 fnopats = (('_ancestors', '_fancestors'),
1748 ('_descendants', '_fdescendants'))
1748 ('_descendants', '_fdescendants'))
1749 if slowpath:
1749 if slowpath:
1750 # See walkchangerevs() slow path.
1750 # See walkchangerevs() slow path.
1751 #
1751 #
1752 # pats/include/exclude cannot be represented as separate
1752 # pats/include/exclude cannot be represented as separate
1753 # revset expressions as their filtering logic applies at file
1753 # revset expressions as their filtering logic applies at file
1754 # level. For instance "-I a -X a" matches a revision touching
1754 # level. For instance "-I a -X a" matches a revision touching
1755 # "a" and "b" while "file(a) and not file(b)" does
1755 # "a" and "b" while "file(a) and not file(b)" does
1756 # not. Besides, filesets are evaluated against the working
1756 # not. Besides, filesets are evaluated against the working
1757 # directory.
1757 # directory.
1758 matchargs = ['r:', 'd:relpath']
1758 matchargs = ['r:', 'd:relpath']
1759 for p in pats:
1759 for p in pats:
1760 matchargs.append('p:' + p)
1760 matchargs.append('p:' + p)
1761 for p in opts.get('include', []):
1761 for p in opts.get('include', []):
1762 matchargs.append('i:' + p)
1762 matchargs.append('i:' + p)
1763 for p in opts.get('exclude', []):
1763 for p in opts.get('exclude', []):
1764 matchargs.append('x:' + p)
1764 matchargs.append('x:' + p)
1765 matchargs = ','.join(('%r' % p) for p in matchargs)
1765 matchargs = ','.join(('%r' % p) for p in matchargs)
1766 opts['_matchfiles'] = matchargs
1766 opts['_matchfiles'] = matchargs
1767 if follow:
1767 if follow:
1768 opts[fnopats[0][followfirst]] = '.'
1768 opts[fnopats[0][followfirst]] = '.'
1769 else:
1769 else:
1770 if follow:
1770 if follow:
1771 if pats:
1771 if pats:
1772 # follow() revset interprets its file argument as a
1772 # follow() revset interprets its file argument as a
1773 # manifest entry, so use match.files(), not pats.
1773 # manifest entry, so use match.files(), not pats.
1774 opts[fpats[followfirst]] = list(match.files())
1774 opts[fpats[followfirst]] = list(match.files())
1775 else:
1775 else:
1776 op = fnopats[followdescendants][followfirst]
1776 op = fnopats[followdescendants][followfirst]
1777 opts[op] = 'rev(%d)' % startrev
1777 opts[op] = 'rev(%d)' % startrev
1778 else:
1778 else:
1779 opts['_patslog'] = list(pats)
1779 opts['_patslog'] = list(pats)
1780
1780
1781 filematcher = None
1781 filematcher = None
1782 if opts.get('patch') or opts.get('stat'):
1782 if opts.get('patch') or opts.get('stat'):
1783 # When following files, track renames via a special matcher.
1783 # When following files, track renames via a special matcher.
1784 # If we're forced to take the slowpath it means we're following
1784 # If we're forced to take the slowpath it means we're following
1785 # at least one pattern/directory, so don't bother with rename tracking.
1785 # at least one pattern/directory, so don't bother with rename tracking.
1786 if follow and not match.always() and not slowpath:
1786 if follow and not match.always() and not slowpath:
1787 # _makefollowlogfilematcher expects its files argument to be
1787 # _makefollowlogfilematcher expects its files argument to be
1788 # relative to the repo root, so use match.files(), not pats.
1788 # relative to the repo root, so use match.files(), not pats.
1789 filematcher = _makefollowlogfilematcher(repo, match.files(),
1789 filematcher = _makefollowlogfilematcher(repo, match.files(),
1790 followfirst)
1790 followfirst)
1791 else:
1791 else:
1792 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
1792 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
1793 if filematcher is None:
1793 if filematcher is None:
1794 filematcher = lambda rev: match
1794 filematcher = lambda rev: match
1795
1795
1796 expr = []
1796 expr = []
1797 for op, val in sorted(opts.iteritems()):
1797 for op, val in sorted(opts.iteritems()):
1798 if not val:
1798 if not val:
1799 continue
1799 continue
1800 if op not in opt2revset:
1800 if op not in opt2revset:
1801 continue
1801 continue
1802 revop, andor = opt2revset[op]
1802 revop, andor = opt2revset[op]
1803 if '%(val)' not in revop:
1803 if '%(val)' not in revop:
1804 expr.append(revop)
1804 expr.append(revop)
1805 else:
1805 else:
1806 if not isinstance(val, list):
1806 if not isinstance(val, list):
1807 e = revop % {'val': val}
1807 e = revop % {'val': val}
1808 else:
1808 else:
1809 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1809 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1810 expr.append(e)
1810 expr.append(e)
1811
1811
1812 if expr:
1812 if expr:
1813 expr = '(' + ' and '.join(expr) + ')'
1813 expr = '(' + ' and '.join(expr) + ')'
1814 else:
1814 else:
1815 expr = None
1815 expr = None
1816 return expr, filematcher
1816 return expr, filematcher
1817
1817
1818 def _logrevs(repo, opts):
1818 def _logrevs(repo, opts):
1819 # Default --rev value depends on --follow but --follow behaviour
1819 # Default --rev value depends on --follow but --follow behaviour
1820 # depends on revisions resolved from --rev...
1820 # depends on revisions resolved from --rev...
1821 follow = opts.get('follow') or opts.get('follow_first')
1821 follow = opts.get('follow') or opts.get('follow_first')
1822 if opts.get('rev'):
1822 if opts.get('rev'):
1823 revs = scmutil.revrange(repo, opts['rev'])
1823 revs = scmutil.revrange(repo, opts['rev'])
1824 elif follow and repo.dirstate.p1() == nullid:
1824 elif follow and repo.dirstate.p1() == nullid:
1825 revs = revset.baseset()
1825 revs = revset.baseset()
1826 elif follow:
1826 elif follow:
1827 revs = repo.revs('reverse(:.)')
1827 revs = repo.revs('reverse(:.)')
1828 else:
1828 else:
1829 revs = revset.spanset(repo)
1829 revs = revset.spanset(repo)
1830 revs.reverse()
1830 revs.reverse()
1831 return revs
1831 return revs
1832
1832
1833 def getgraphlogrevs(repo, pats, opts):
1833 def getgraphlogrevs(repo, pats, opts):
1834 """Return (revs, expr, filematcher) where revs is an iterable of
1834 """Return (revs, expr, filematcher) where revs is an iterable of
1835 revision numbers, expr is a revset string built from log options
1835 revision numbers, expr is a revset string built from log options
1836 and file patterns or None, and used to filter 'revs'. If --stat or
1836 and file patterns or None, and used to filter 'revs'. If --stat or
1837 --patch are not passed filematcher is None. Otherwise it is a
1837 --patch are not passed filematcher is None. Otherwise it is a
1838 callable taking a revision number and returning a match objects
1838 callable taking a revision number and returning a match objects
1839 filtering the files to be detailed when displaying the revision.
1839 filtering the files to be detailed when displaying the revision.
1840 """
1840 """
1841 limit = loglimit(opts)
1841 limit = loglimit(opts)
1842 revs = _logrevs(repo, opts)
1842 revs = _logrevs(repo, opts)
1843 if not revs:
1843 if not revs:
1844 return revset.baseset(), None, None
1844 return revset.baseset(), None, None
1845 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1845 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1846 if opts.get('rev'):
1846 if opts.get('rev'):
1847 # User-specified revs might be unsorted, but don't sort before
1847 # User-specified revs might be unsorted, but don't sort before
1848 # _makelogrevset because it might depend on the order of revs
1848 # _makelogrevset because it might depend on the order of revs
1849 revs.sort(reverse=True)
1849 revs.sort(reverse=True)
1850 if expr:
1850 if expr:
1851 # Revset matchers often operate faster on revisions in changelog
1851 # Revset matchers often operate faster on revisions in changelog
1852 # order, because most filters deal with the changelog.
1852 # order, because most filters deal with the changelog.
1853 revs.reverse()
1853 revs.reverse()
1854 matcher = revset.match(repo.ui, expr)
1854 matcher = revset.match(repo.ui, expr)
1855 # Revset matches can reorder revisions. "A or B" typically returns
1855 # Revset matches can reorder revisions. "A or B" typically returns
1856 # returns the revision matching A then the revision matching B. Sort
1856 # returns the revision matching A then the revision matching B. Sort
1857 # again to fix that.
1857 # again to fix that.
1858 revs = matcher(repo, revs)
1858 revs = matcher(repo, revs)
1859 revs.sort(reverse=True)
1859 revs.sort(reverse=True)
1860 if limit is not None:
1860 if limit is not None:
1861 limitedrevs = []
1861 limitedrevs = []
1862 for idx, rev in enumerate(revs):
1862 for idx, rev in enumerate(revs):
1863 if idx >= limit:
1863 if idx >= limit:
1864 break
1864 break
1865 limitedrevs.append(rev)
1865 limitedrevs.append(rev)
1866 revs = revset.baseset(limitedrevs)
1866 revs = revset.baseset(limitedrevs)
1867
1867
1868 return revs, expr, filematcher
1868 return revs, expr, filematcher
1869
1869
1870 def getlogrevs(repo, pats, opts):
1870 def getlogrevs(repo, pats, opts):
1871 """Return (revs, expr, filematcher) where revs is an iterable of
1871 """Return (revs, expr, filematcher) where revs is an iterable of
1872 revision numbers, expr is a revset string built from log options
1872 revision numbers, expr is a revset string built from log options
1873 and file patterns or None, and used to filter 'revs'. If --stat or
1873 and file patterns or None, and used to filter 'revs'. If --stat or
1874 --patch are not passed filematcher is None. Otherwise it is a
1874 --patch are not passed filematcher is None. Otherwise it is a
1875 callable taking a revision number and returning a match objects
1875 callable taking a revision number and returning a match objects
1876 filtering the files to be detailed when displaying the revision.
1876 filtering the files to be detailed when displaying the revision.
1877 """
1877 """
1878 limit = loglimit(opts)
1878 limit = loglimit(opts)
1879 revs = _logrevs(repo, opts)
1879 revs = _logrevs(repo, opts)
1880 if not revs:
1880 if not revs:
1881 return revset.baseset([]), None, None
1881 return revset.baseset([]), None, None
1882 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1882 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1883 if expr:
1883 if expr:
1884 # Revset matchers often operate faster on revisions in changelog
1884 # Revset matchers often operate faster on revisions in changelog
1885 # order, because most filters deal with the changelog.
1885 # order, because most filters deal with the changelog.
1886 if not opts.get('rev'):
1886 if not opts.get('rev'):
1887 revs.reverse()
1887 revs.reverse()
1888 matcher = revset.match(repo.ui, expr)
1888 matcher = revset.match(repo.ui, expr)
1889 # Revset matches can reorder revisions. "A or B" typically returns
1889 # Revset matches can reorder revisions. "A or B" typically returns
1890 # returns the revision matching A then the revision matching B. Sort
1890 # returns the revision matching A then the revision matching B. Sort
1891 # again to fix that.
1891 # again to fix that.
1892 revs = matcher(repo, revs)
1892 revs = matcher(repo, revs)
1893 if not opts.get('rev'):
1893 if not opts.get('rev'):
1894 revs.sort(reverse=True)
1894 revs.sort(reverse=True)
1895 if limit is not None:
1895 if limit is not None:
1896 count = 0
1896 count = 0
1897 limitedrevs = []
1897 limitedrevs = []
1898 it = iter(revs)
1898 it = iter(revs)
1899 while count < limit:
1899 while count < limit:
1900 try:
1900 try:
1901 limitedrevs.append(it.next())
1901 limitedrevs.append(it.next())
1902 except (StopIteration):
1902 except (StopIteration):
1903 break
1903 break
1904 count += 1
1904 count += 1
1905 revs = revset.baseset(limitedrevs)
1905 revs = revset.baseset(limitedrevs)
1906
1906
1907 return revs, expr, filematcher
1907 return revs, expr, filematcher
1908
1908
1909 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1909 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1910 filematcher=None):
1910 filematcher=None):
1911 seen, state = [], graphmod.asciistate()
1911 seen, state = [], graphmod.asciistate()
1912 for rev, type, ctx, parents in dag:
1912 for rev, type, ctx, parents in dag:
1913 char = 'o'
1913 char = 'o'
1914 if ctx.node() in showparents:
1914 if ctx.node() in showparents:
1915 char = '@'
1915 char = '@'
1916 elif ctx.obsolete():
1916 elif ctx.obsolete():
1917 char = 'x'
1917 char = 'x'
1918 copies = None
1918 copies = None
1919 if getrenamed and ctx.rev():
1919 if getrenamed and ctx.rev():
1920 copies = []
1920 copies = []
1921 for fn in ctx.files():
1921 for fn in ctx.files():
1922 rename = getrenamed(fn, ctx.rev())
1922 rename = getrenamed(fn, ctx.rev())
1923 if rename:
1923 if rename:
1924 copies.append((fn, rename[0]))
1924 copies.append((fn, rename[0]))
1925 revmatchfn = None
1925 revmatchfn = None
1926 if filematcher is not None:
1926 if filematcher is not None:
1927 revmatchfn = filematcher(ctx.rev())
1927 revmatchfn = filematcher(ctx.rev())
1928 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1928 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1929 lines = displayer.hunk.pop(rev).split('\n')
1929 lines = displayer.hunk.pop(rev).split('\n')
1930 if not lines[-1]:
1930 if not lines[-1]:
1931 del lines[-1]
1931 del lines[-1]
1932 displayer.flush(rev)
1932 displayer.flush(rev)
1933 edges = edgefn(type, char, lines, seen, rev, parents)
1933 edges = edgefn(type, char, lines, seen, rev, parents)
1934 for type, char, lines, coldata in edges:
1934 for type, char, lines, coldata in edges:
1935 graphmod.ascii(ui, state, type, char, lines, coldata)
1935 graphmod.ascii(ui, state, type, char, lines, coldata)
1936 displayer.close()
1936 displayer.close()
1937
1937
1938 def graphlog(ui, repo, *pats, **opts):
1938 def graphlog(ui, repo, *pats, **opts):
1939 # Parameters are identical to log command ones
1939 # Parameters are identical to log command ones
1940 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1940 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1941 revdag = graphmod.dagwalker(repo, revs)
1941 revdag = graphmod.dagwalker(repo, revs)
1942
1942
1943 getrenamed = None
1943 getrenamed = None
1944 if opts.get('copies'):
1944 if opts.get('copies'):
1945 endrev = None
1945 endrev = None
1946 if opts.get('rev'):
1946 if opts.get('rev'):
1947 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
1947 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
1948 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1948 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1949 displayer = show_changeset(ui, repo, opts, buffered=True)
1949 displayer = show_changeset(ui, repo, opts, buffered=True)
1950 showparents = [ctx.node() for ctx in repo[None].parents()]
1950 showparents = [ctx.node() for ctx in repo[None].parents()]
1951 displaygraph(ui, revdag, displayer, showparents,
1951 displaygraph(ui, revdag, displayer, showparents,
1952 graphmod.asciiedges, getrenamed, filematcher)
1952 graphmod.asciiedges, getrenamed, filematcher)
1953
1953
1954 def checkunsupportedgraphflags(pats, opts):
1954 def checkunsupportedgraphflags(pats, opts):
1955 for op in ["newest_first"]:
1955 for op in ["newest_first"]:
1956 if op in opts and opts[op]:
1956 if op in opts and opts[op]:
1957 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1957 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1958 % op.replace("_", "-"))
1958 % op.replace("_", "-"))
1959
1959
1960 def graphrevs(repo, nodes, opts):
1960 def graphrevs(repo, nodes, opts):
1961 limit = loglimit(opts)
1961 limit = loglimit(opts)
1962 nodes.reverse()
1962 nodes.reverse()
1963 if limit is not None:
1963 if limit is not None:
1964 nodes = nodes[:limit]
1964 nodes = nodes[:limit]
1965 return graphmod.nodes(repo, nodes)
1965 return graphmod.nodes(repo, nodes)
1966
1966
1967 def add(ui, repo, match, prefix, explicitonly, **opts):
1967 def add(ui, repo, match, prefix, explicitonly, **opts):
1968 join = lambda f: os.path.join(prefix, f)
1968 join = lambda f: os.path.join(prefix, f)
1969 bad = []
1969 bad = []
1970 oldbad = match.bad
1970 oldbad = match.bad
1971 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1971 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1972 names = []
1972 names = []
1973 wctx = repo[None]
1973 wctx = repo[None]
1974 cca = None
1974 cca = None
1975 abort, warn = scmutil.checkportabilityalert(ui)
1975 abort, warn = scmutil.checkportabilityalert(ui)
1976 if abort or warn:
1976 if abort or warn:
1977 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1977 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1978 for f in wctx.walk(match):
1978 for f in wctx.walk(match):
1979 exact = match.exact(f)
1979 exact = match.exact(f)
1980 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1980 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1981 if cca:
1981 if cca:
1982 cca(f)
1982 cca(f)
1983 names.append(f)
1983 names.append(f)
1984 if ui.verbose or not exact:
1984 if ui.verbose or not exact:
1985 ui.status(_('adding %s\n') % match.rel(f))
1985 ui.status(_('adding %s\n') % match.rel(f))
1986
1986
1987 for subpath in sorted(wctx.substate):
1987 for subpath in sorted(wctx.substate):
1988 sub = wctx.sub(subpath)
1988 sub = wctx.sub(subpath)
1989 try:
1989 try:
1990 submatch = matchmod.narrowmatcher(subpath, match)
1990 submatch = matchmod.narrowmatcher(subpath, match)
1991 if opts.get('subrepos'):
1991 if opts.get('subrepos'):
1992 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1992 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
1993 else:
1993 else:
1994 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1994 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
1995 except error.LookupError:
1995 except error.LookupError:
1996 ui.status(_("skipping missing subrepository: %s\n")
1996 ui.status(_("skipping missing subrepository: %s\n")
1997 % join(subpath))
1997 % join(subpath))
1998
1998
1999 if not opts.get('dry_run'):
1999 if not opts.get('dry_run'):
2000 rejected = wctx.add(names, prefix)
2000 rejected = wctx.add(names, prefix)
2001 bad.extend(f for f in rejected if f in match.files())
2001 bad.extend(f for f in rejected if f in match.files())
2002 return bad
2002 return bad
2003
2003
2004 def forget(ui, repo, match, prefix, explicitonly):
2004 def forget(ui, repo, match, prefix, explicitonly):
2005 join = lambda f: os.path.join(prefix, f)
2005 join = lambda f: os.path.join(prefix, f)
2006 bad = []
2006 bad = []
2007 oldbad = match.bad
2007 oldbad = match.bad
2008 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2008 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2009 wctx = repo[None]
2009 wctx = repo[None]
2010 forgot = []
2010 forgot = []
2011 s = repo.status(match=match, clean=True)
2011 s = repo.status(match=match, clean=True)
2012 forget = sorted(s[0] + s[1] + s[3] + s[6])
2012 forget = sorted(s[0] + s[1] + s[3] + s[6])
2013 if explicitonly:
2013 if explicitonly:
2014 forget = [f for f in forget if match.exact(f)]
2014 forget = [f for f in forget if match.exact(f)]
2015
2015
2016 for subpath in sorted(wctx.substate):
2016 for subpath in sorted(wctx.substate):
2017 sub = wctx.sub(subpath)
2017 sub = wctx.sub(subpath)
2018 try:
2018 try:
2019 submatch = matchmod.narrowmatcher(subpath, match)
2019 submatch = matchmod.narrowmatcher(subpath, match)
2020 subbad, subforgot = sub.forget(submatch, prefix)
2020 subbad, subforgot = sub.forget(submatch, prefix)
2021 bad.extend([subpath + '/' + f for f in subbad])
2021 bad.extend([subpath + '/' + f for f in subbad])
2022 forgot.extend([subpath + '/' + f for f in subforgot])
2022 forgot.extend([subpath + '/' + f for f in subforgot])
2023 except error.LookupError:
2023 except error.LookupError:
2024 ui.status(_("skipping missing subrepository: %s\n")
2024 ui.status(_("skipping missing subrepository: %s\n")
2025 % join(subpath))
2025 % join(subpath))
2026
2026
2027 if not explicitonly:
2027 if not explicitonly:
2028 for f in match.files():
2028 for f in match.files():
2029 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2029 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2030 if f not in forgot:
2030 if f not in forgot:
2031 if repo.wvfs.exists(f):
2031 if repo.wvfs.exists(f):
2032 ui.warn(_('not removing %s: '
2032 ui.warn(_('not removing %s: '
2033 'file is already untracked\n')
2033 'file is already untracked\n')
2034 % match.rel(f))
2034 % match.rel(f))
2035 bad.append(f)
2035 bad.append(f)
2036
2036
2037 for f in forget:
2037 for f in forget:
2038 if ui.verbose or not match.exact(f):
2038 if ui.verbose or not match.exact(f):
2039 ui.status(_('removing %s\n') % match.rel(f))
2039 ui.status(_('removing %s\n') % match.rel(f))
2040
2040
2041 rejected = wctx.forget(forget, prefix)
2041 rejected = wctx.forget(forget, prefix)
2042 bad.extend(f for f in rejected if f in match.files())
2042 bad.extend(f for f in rejected if f in match.files())
2043 forgot.extend(f for f in forget if f not in rejected)
2043 forgot.extend(f for f in forget if f not in rejected)
2044 return bad, forgot
2044 return bad, forgot
2045
2045
2046 def remove(ui, repo, m, prefix, after, force, subrepos):
2046 def remove(ui, repo, m, prefix, after, force, subrepos):
2047 join = lambda f: os.path.join(prefix, f)
2047 join = lambda f: os.path.join(prefix, f)
2048 ret = 0
2048 ret = 0
2049 s = repo.status(match=m, clean=True)
2049 s = repo.status(match=m, clean=True)
2050 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2050 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2051
2051
2052 wctx = repo[None]
2052 wctx = repo[None]
2053
2053
2054 for subpath in sorted(wctx.substate):
2054 for subpath in sorted(wctx.substate):
2055 def matchessubrepo(matcher, subpath):
2055 def matchessubrepo(matcher, subpath):
2056 if matcher.exact(subpath):
2056 if matcher.exact(subpath):
2057 return True
2057 return True
2058 for f in matcher.files():
2058 for f in matcher.files():
2059 if f.startswith(subpath):
2059 if f.startswith(subpath):
2060 return True
2060 return True
2061 return False
2061 return False
2062
2062
2063 if subrepos or matchessubrepo(m, subpath):
2063 if subrepos or matchessubrepo(m, subpath):
2064 sub = wctx.sub(subpath)
2064 sub = wctx.sub(subpath)
2065 try:
2065 try:
2066 submatch = matchmod.narrowmatcher(subpath, m)
2066 submatch = matchmod.narrowmatcher(subpath, m)
2067 if sub.removefiles(submatch, prefix, after, force, subrepos):
2067 if sub.removefiles(submatch, prefix, after, force, subrepos):
2068 ret = 1
2068 ret = 1
2069 except error.LookupError:
2069 except error.LookupError:
2070 ui.status(_("skipping missing subrepository: %s\n")
2070 ui.status(_("skipping missing subrepository: %s\n")
2071 % join(subpath))
2071 % join(subpath))
2072
2072
2073 # warn about failure to delete explicit files/dirs
2073 # warn about failure to delete explicit files/dirs
2074 deleteddirs = scmutil.dirs(deleted)
2074 for f in m.files():
2075 for f in m.files():
2075 def insubrepo():
2076 def insubrepo():
2076 for subpath in wctx.substate:
2077 for subpath in wctx.substate:
2077 if f.startswith(subpath):
2078 if f.startswith(subpath):
2078 return True
2079 return True
2079 return False
2080 return False
2080
2081
2081 if f in repo.dirstate or f in wctx.dirs() or f == '.' or insubrepo():
2082 isdir = f in deleteddirs or f in wctx.dirs()
2083 if f in repo.dirstate or isdir or f == '.' or insubrepo():
2082 continue
2084 continue
2083
2085
2084 if repo.wvfs.exists(f):
2086 if repo.wvfs.exists(f):
2085 if repo.wvfs.isdir(f):
2087 if repo.wvfs.isdir(f):
2086 ui.warn(_('not removing %s: no tracked files\n')
2088 ui.warn(_('not removing %s: no tracked files\n')
2087 % m.rel(f))
2089 % m.rel(f))
2088 else:
2090 else:
2089 ui.warn(_('not removing %s: file is untracked\n')
2091 ui.warn(_('not removing %s: file is untracked\n')
2090 % m.rel(f))
2092 % m.rel(f))
2091 # missing files will generate a warning elsewhere
2093 # missing files will generate a warning elsewhere
2092 ret = 1
2094 ret = 1
2093
2095
2094 if force:
2096 if force:
2095 list = modified + deleted + clean + added
2097 list = modified + deleted + clean + added
2096 elif after:
2098 elif after:
2097 list = deleted
2099 list = deleted
2098 for f in modified + added + clean:
2100 for f in modified + added + clean:
2099 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2101 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2100 ret = 1
2102 ret = 1
2101 else:
2103 else:
2102 list = deleted + clean
2104 list = deleted + clean
2103 for f in modified:
2105 for f in modified:
2104 ui.warn(_('not removing %s: file is modified (use -f'
2106 ui.warn(_('not removing %s: file is modified (use -f'
2105 ' to force removal)\n') % m.rel(f))
2107 ' to force removal)\n') % m.rel(f))
2106 ret = 1
2108 ret = 1
2107 for f in added:
2109 for f in added:
2108 ui.warn(_('not removing %s: file has been marked for add'
2110 ui.warn(_('not removing %s: file has been marked for add'
2109 ' (use forget to undo)\n') % m.rel(f))
2111 ' (use forget to undo)\n') % m.rel(f))
2110 ret = 1
2112 ret = 1
2111
2113
2112 for f in sorted(list):
2114 for f in sorted(list):
2113 if ui.verbose or not m.exact(f):
2115 if ui.verbose or not m.exact(f):
2114 ui.status(_('removing %s\n') % m.rel(f))
2116 ui.status(_('removing %s\n') % m.rel(f))
2115
2117
2116 wlock = repo.wlock()
2118 wlock = repo.wlock()
2117 try:
2119 try:
2118 if not after:
2120 if not after:
2119 for f in list:
2121 for f in list:
2120 if f in added:
2122 if f in added:
2121 continue # we never unlink added files on remove
2123 continue # we never unlink added files on remove
2122 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2124 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2123 repo[None].forget(list)
2125 repo[None].forget(list)
2124 finally:
2126 finally:
2125 wlock.release()
2127 wlock.release()
2126
2128
2127 return ret
2129 return ret
2128
2130
2129 def cat(ui, repo, ctx, matcher, prefix, **opts):
2131 def cat(ui, repo, ctx, matcher, prefix, **opts):
2130 err = 1
2132 err = 1
2131
2133
2132 def write(path):
2134 def write(path):
2133 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2135 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2134 pathname=os.path.join(prefix, path))
2136 pathname=os.path.join(prefix, path))
2135 data = ctx[path].data()
2137 data = ctx[path].data()
2136 if opts.get('decode'):
2138 if opts.get('decode'):
2137 data = repo.wwritedata(path, data)
2139 data = repo.wwritedata(path, data)
2138 fp.write(data)
2140 fp.write(data)
2139 fp.close()
2141 fp.close()
2140
2142
2141 # Automation often uses hg cat on single files, so special case it
2143 # Automation often uses hg cat on single files, so special case it
2142 # for performance to avoid the cost of parsing the manifest.
2144 # for performance to avoid the cost of parsing the manifest.
2143 if len(matcher.files()) == 1 and not matcher.anypats():
2145 if len(matcher.files()) == 1 and not matcher.anypats():
2144 file = matcher.files()[0]
2146 file = matcher.files()[0]
2145 mf = repo.manifest
2147 mf = repo.manifest
2146 mfnode = ctx._changeset[0]
2148 mfnode = ctx._changeset[0]
2147 if mf.find(mfnode, file)[0]:
2149 if mf.find(mfnode, file)[0]:
2148 write(file)
2150 write(file)
2149 return 0
2151 return 0
2150
2152
2151 # Don't warn about "missing" files that are really in subrepos
2153 # Don't warn about "missing" files that are really in subrepos
2152 bad = matcher.bad
2154 bad = matcher.bad
2153
2155
2154 def badfn(path, msg):
2156 def badfn(path, msg):
2155 for subpath in ctx.substate:
2157 for subpath in ctx.substate:
2156 if path.startswith(subpath):
2158 if path.startswith(subpath):
2157 return
2159 return
2158 bad(path, msg)
2160 bad(path, msg)
2159
2161
2160 matcher.bad = badfn
2162 matcher.bad = badfn
2161
2163
2162 for abs in ctx.walk(matcher):
2164 for abs in ctx.walk(matcher):
2163 write(abs)
2165 write(abs)
2164 err = 0
2166 err = 0
2165
2167
2166 matcher.bad = bad
2168 matcher.bad = bad
2167
2169
2168 for subpath in sorted(ctx.substate):
2170 for subpath in sorted(ctx.substate):
2169 sub = ctx.sub(subpath)
2171 sub = ctx.sub(subpath)
2170 try:
2172 try:
2171 submatch = matchmod.narrowmatcher(subpath, matcher)
2173 submatch = matchmod.narrowmatcher(subpath, matcher)
2172
2174
2173 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2175 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2174 **opts):
2176 **opts):
2175 err = 0
2177 err = 0
2176 except error.RepoLookupError:
2178 except error.RepoLookupError:
2177 ui.status(_("skipping missing subrepository: %s\n")
2179 ui.status(_("skipping missing subrepository: %s\n")
2178 % os.path.join(prefix, subpath))
2180 % os.path.join(prefix, subpath))
2179
2181
2180 return err
2182 return err
2181
2183
2182 def commit(ui, repo, commitfunc, pats, opts):
2184 def commit(ui, repo, commitfunc, pats, opts):
2183 '''commit the specified files or all outstanding changes'''
2185 '''commit the specified files or all outstanding changes'''
2184 date = opts.get('date')
2186 date = opts.get('date')
2185 if date:
2187 if date:
2186 opts['date'] = util.parsedate(date)
2188 opts['date'] = util.parsedate(date)
2187 message = logmessage(ui, opts)
2189 message = logmessage(ui, opts)
2188 matcher = scmutil.match(repo[None], pats, opts)
2190 matcher = scmutil.match(repo[None], pats, opts)
2189
2191
2190 # extract addremove carefully -- this function can be called from a command
2192 # extract addremove carefully -- this function can be called from a command
2191 # that doesn't support addremove
2193 # that doesn't support addremove
2192 if opts.get('addremove'):
2194 if opts.get('addremove'):
2193 if scmutil.addremove(repo, matcher, "", opts) != 0:
2195 if scmutil.addremove(repo, matcher, "", opts) != 0:
2194 raise util.Abort(
2196 raise util.Abort(
2195 _("failed to mark all new/missing files as added/removed"))
2197 _("failed to mark all new/missing files as added/removed"))
2196
2198
2197 return commitfunc(ui, repo, message, matcher, opts)
2199 return commitfunc(ui, repo, message, matcher, opts)
2198
2200
2199 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2201 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2200 # amend will reuse the existing user if not specified, but the obsolete
2202 # amend will reuse the existing user if not specified, but the obsolete
2201 # marker creation requires that the current user's name is specified.
2203 # marker creation requires that the current user's name is specified.
2202 if obsolete._enabled:
2204 if obsolete._enabled:
2203 ui.username() # raise exception if username not set
2205 ui.username() # raise exception if username not set
2204
2206
2205 ui.note(_('amending changeset %s\n') % old)
2207 ui.note(_('amending changeset %s\n') % old)
2206 base = old.p1()
2208 base = old.p1()
2207
2209
2208 wlock = lock = newid = None
2210 wlock = lock = newid = None
2209 try:
2211 try:
2210 wlock = repo.wlock()
2212 wlock = repo.wlock()
2211 lock = repo.lock()
2213 lock = repo.lock()
2212 tr = repo.transaction('amend')
2214 tr = repo.transaction('amend')
2213 try:
2215 try:
2214 # See if we got a message from -m or -l, if not, open the editor
2216 # See if we got a message from -m or -l, if not, open the editor
2215 # with the message of the changeset to amend
2217 # with the message of the changeset to amend
2216 message = logmessage(ui, opts)
2218 message = logmessage(ui, opts)
2217 # ensure logfile does not conflict with later enforcement of the
2219 # ensure logfile does not conflict with later enforcement of the
2218 # message. potential logfile content has been processed by
2220 # message. potential logfile content has been processed by
2219 # `logmessage` anyway.
2221 # `logmessage` anyway.
2220 opts.pop('logfile')
2222 opts.pop('logfile')
2221 # First, do a regular commit to record all changes in the working
2223 # First, do a regular commit to record all changes in the working
2222 # directory (if there are any)
2224 # directory (if there are any)
2223 ui.callhooks = False
2225 ui.callhooks = False
2224 currentbookmark = repo._bookmarkcurrent
2226 currentbookmark = repo._bookmarkcurrent
2225 try:
2227 try:
2226 repo._bookmarkcurrent = None
2228 repo._bookmarkcurrent = None
2227 opts['message'] = 'temporary amend commit for %s' % old
2229 opts['message'] = 'temporary amend commit for %s' % old
2228 node = commit(ui, repo, commitfunc, pats, opts)
2230 node = commit(ui, repo, commitfunc, pats, opts)
2229 finally:
2231 finally:
2230 repo._bookmarkcurrent = currentbookmark
2232 repo._bookmarkcurrent = currentbookmark
2231 ui.callhooks = True
2233 ui.callhooks = True
2232 ctx = repo[node]
2234 ctx = repo[node]
2233
2235
2234 # Participating changesets:
2236 # Participating changesets:
2235 #
2237 #
2236 # node/ctx o - new (intermediate) commit that contains changes
2238 # node/ctx o - new (intermediate) commit that contains changes
2237 # | from working dir to go into amending commit
2239 # | from working dir to go into amending commit
2238 # | (or a workingctx if there were no changes)
2240 # | (or a workingctx if there were no changes)
2239 # |
2241 # |
2240 # old o - changeset to amend
2242 # old o - changeset to amend
2241 # |
2243 # |
2242 # base o - parent of amending changeset
2244 # base o - parent of amending changeset
2243
2245
2244 # Update extra dict from amended commit (e.g. to preserve graft
2246 # Update extra dict from amended commit (e.g. to preserve graft
2245 # source)
2247 # source)
2246 extra.update(old.extra())
2248 extra.update(old.extra())
2247
2249
2248 # Also update it from the intermediate commit or from the wctx
2250 # Also update it from the intermediate commit or from the wctx
2249 extra.update(ctx.extra())
2251 extra.update(ctx.extra())
2250
2252
2251 if len(old.parents()) > 1:
2253 if len(old.parents()) > 1:
2252 # ctx.files() isn't reliable for merges, so fall back to the
2254 # ctx.files() isn't reliable for merges, so fall back to the
2253 # slower repo.status() method
2255 # slower repo.status() method
2254 files = set([fn for st in repo.status(base, old)[:3]
2256 files = set([fn for st in repo.status(base, old)[:3]
2255 for fn in st])
2257 for fn in st])
2256 else:
2258 else:
2257 files = set(old.files())
2259 files = set(old.files())
2258
2260
2259 # Second, we use either the commit we just did, or if there were no
2261 # Second, we use either the commit we just did, or if there were no
2260 # changes the parent of the working directory as the version of the
2262 # changes the parent of the working directory as the version of the
2261 # files in the final amend commit
2263 # files in the final amend commit
2262 if node:
2264 if node:
2263 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2265 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2264
2266
2265 user = ctx.user()
2267 user = ctx.user()
2266 date = ctx.date()
2268 date = ctx.date()
2267 # Recompute copies (avoid recording a -> b -> a)
2269 # Recompute copies (avoid recording a -> b -> a)
2268 copied = copies.pathcopies(base, ctx)
2270 copied = copies.pathcopies(base, ctx)
2269 if old.p2:
2271 if old.p2:
2270 copied.update(copies.pathcopies(old.p2(), ctx))
2272 copied.update(copies.pathcopies(old.p2(), ctx))
2271
2273
2272 # Prune files which were reverted by the updates: if old
2274 # Prune files which were reverted by the updates: if old
2273 # introduced file X and our intermediate commit, node,
2275 # introduced file X and our intermediate commit, node,
2274 # renamed that file, then those two files are the same and
2276 # renamed that file, then those two files are the same and
2275 # we can discard X from our list of files. Likewise if X
2277 # we can discard X from our list of files. Likewise if X
2276 # was deleted, it's no longer relevant
2278 # was deleted, it's no longer relevant
2277 files.update(ctx.files())
2279 files.update(ctx.files())
2278
2280
2279 def samefile(f):
2281 def samefile(f):
2280 if f in ctx.manifest():
2282 if f in ctx.manifest():
2281 a = ctx.filectx(f)
2283 a = ctx.filectx(f)
2282 if f in base.manifest():
2284 if f in base.manifest():
2283 b = base.filectx(f)
2285 b = base.filectx(f)
2284 return (not a.cmp(b)
2286 return (not a.cmp(b)
2285 and a.flags() == b.flags())
2287 and a.flags() == b.flags())
2286 else:
2288 else:
2287 return False
2289 return False
2288 else:
2290 else:
2289 return f not in base.manifest()
2291 return f not in base.manifest()
2290 files = [f for f in files if not samefile(f)]
2292 files = [f for f in files if not samefile(f)]
2291
2293
2292 def filectxfn(repo, ctx_, path):
2294 def filectxfn(repo, ctx_, path):
2293 try:
2295 try:
2294 fctx = ctx[path]
2296 fctx = ctx[path]
2295 flags = fctx.flags()
2297 flags = fctx.flags()
2296 mctx = context.memfilectx(repo,
2298 mctx = context.memfilectx(repo,
2297 fctx.path(), fctx.data(),
2299 fctx.path(), fctx.data(),
2298 islink='l' in flags,
2300 islink='l' in flags,
2299 isexec='x' in flags,
2301 isexec='x' in flags,
2300 copied=copied.get(path))
2302 copied=copied.get(path))
2301 return mctx
2303 return mctx
2302 except KeyError:
2304 except KeyError:
2303 return None
2305 return None
2304 else:
2306 else:
2305 ui.note(_('copying changeset %s to %s\n') % (old, base))
2307 ui.note(_('copying changeset %s to %s\n') % (old, base))
2306
2308
2307 # Use version of files as in the old cset
2309 # Use version of files as in the old cset
2308 def filectxfn(repo, ctx_, path):
2310 def filectxfn(repo, ctx_, path):
2309 try:
2311 try:
2310 return old.filectx(path)
2312 return old.filectx(path)
2311 except KeyError:
2313 except KeyError:
2312 return None
2314 return None
2313
2315
2314 user = opts.get('user') or old.user()
2316 user = opts.get('user') or old.user()
2315 date = opts.get('date') or old.date()
2317 date = opts.get('date') or old.date()
2316 editform = mergeeditform(old, 'commit.amend')
2318 editform = mergeeditform(old, 'commit.amend')
2317 editor = getcommiteditor(editform=editform, **opts)
2319 editor = getcommiteditor(editform=editform, **opts)
2318 if not message:
2320 if not message:
2319 editor = getcommiteditor(edit=True, editform=editform)
2321 editor = getcommiteditor(edit=True, editform=editform)
2320 message = old.description()
2322 message = old.description()
2321
2323
2322 pureextra = extra.copy()
2324 pureextra = extra.copy()
2323 extra['amend_source'] = old.hex()
2325 extra['amend_source'] = old.hex()
2324
2326
2325 new = context.memctx(repo,
2327 new = context.memctx(repo,
2326 parents=[base.node(), old.p2().node()],
2328 parents=[base.node(), old.p2().node()],
2327 text=message,
2329 text=message,
2328 files=files,
2330 files=files,
2329 filectxfn=filectxfn,
2331 filectxfn=filectxfn,
2330 user=user,
2332 user=user,
2331 date=date,
2333 date=date,
2332 extra=extra,
2334 extra=extra,
2333 editor=editor)
2335 editor=editor)
2334
2336
2335 newdesc = changelog.stripdesc(new.description())
2337 newdesc = changelog.stripdesc(new.description())
2336 if ((not node)
2338 if ((not node)
2337 and newdesc == old.description()
2339 and newdesc == old.description()
2338 and user == old.user()
2340 and user == old.user()
2339 and date == old.date()
2341 and date == old.date()
2340 and pureextra == old.extra()):
2342 and pureextra == old.extra()):
2341 # nothing changed. continuing here would create a new node
2343 # nothing changed. continuing here would create a new node
2342 # anyway because of the amend_source noise.
2344 # anyway because of the amend_source noise.
2343 #
2345 #
2344 # This not what we expect from amend.
2346 # This not what we expect from amend.
2345 return old.node()
2347 return old.node()
2346
2348
2347 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2349 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2348 try:
2350 try:
2349 if opts.get('secret'):
2351 if opts.get('secret'):
2350 commitphase = 'secret'
2352 commitphase = 'secret'
2351 else:
2353 else:
2352 commitphase = old.phase()
2354 commitphase = old.phase()
2353 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2355 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2354 newid = repo.commitctx(new)
2356 newid = repo.commitctx(new)
2355 finally:
2357 finally:
2356 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2358 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2357 if newid != old.node():
2359 if newid != old.node():
2358 # Reroute the working copy parent to the new changeset
2360 # Reroute the working copy parent to the new changeset
2359 repo.setparents(newid, nullid)
2361 repo.setparents(newid, nullid)
2360
2362
2361 # Move bookmarks from old parent to amend commit
2363 # Move bookmarks from old parent to amend commit
2362 bms = repo.nodebookmarks(old.node())
2364 bms = repo.nodebookmarks(old.node())
2363 if bms:
2365 if bms:
2364 marks = repo._bookmarks
2366 marks = repo._bookmarks
2365 for bm in bms:
2367 for bm in bms:
2366 marks[bm] = newid
2368 marks[bm] = newid
2367 marks.write()
2369 marks.write()
2368 #commit the whole amend process
2370 #commit the whole amend process
2369 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2371 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2370 if createmarkers and newid != old.node():
2372 if createmarkers and newid != old.node():
2371 # mark the new changeset as successor of the rewritten one
2373 # mark the new changeset as successor of the rewritten one
2372 new = repo[newid]
2374 new = repo[newid]
2373 obs = [(old, (new,))]
2375 obs = [(old, (new,))]
2374 if node:
2376 if node:
2375 obs.append((ctx, ()))
2377 obs.append((ctx, ()))
2376
2378
2377 obsolete.createmarkers(repo, obs)
2379 obsolete.createmarkers(repo, obs)
2378 tr.close()
2380 tr.close()
2379 finally:
2381 finally:
2380 tr.release()
2382 tr.release()
2381 if not createmarkers and newid != old.node():
2383 if not createmarkers and newid != old.node():
2382 # Strip the intermediate commit (if there was one) and the amended
2384 # Strip the intermediate commit (if there was one) and the amended
2383 # commit
2385 # commit
2384 if node:
2386 if node:
2385 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2387 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2386 ui.note(_('stripping amended changeset %s\n') % old)
2388 ui.note(_('stripping amended changeset %s\n') % old)
2387 repair.strip(ui, repo, old.node(), topic='amend-backup')
2389 repair.strip(ui, repo, old.node(), topic='amend-backup')
2388 finally:
2390 finally:
2389 if newid is None:
2391 if newid is None:
2390 repo.dirstate.invalidate()
2392 repo.dirstate.invalidate()
2391 lockmod.release(lock, wlock)
2393 lockmod.release(lock, wlock)
2392 return newid
2394 return newid
2393
2395
2394 def commiteditor(repo, ctx, subs, editform=''):
2396 def commiteditor(repo, ctx, subs, editform=''):
2395 if ctx.description():
2397 if ctx.description():
2396 return ctx.description()
2398 return ctx.description()
2397 return commitforceeditor(repo, ctx, subs, editform=editform)
2399 return commitforceeditor(repo, ctx, subs, editform=editform)
2398
2400
2399 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2401 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2400 editform=''):
2402 editform=''):
2401 if not extramsg:
2403 if not extramsg:
2402 extramsg = _("Leave message empty to abort commit.")
2404 extramsg = _("Leave message empty to abort commit.")
2403
2405
2404 forms = [e for e in editform.split('.') if e]
2406 forms = [e for e in editform.split('.') if e]
2405 forms.insert(0, 'changeset')
2407 forms.insert(0, 'changeset')
2406 while forms:
2408 while forms:
2407 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2409 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2408 if tmpl:
2410 if tmpl:
2409 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2411 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2410 break
2412 break
2411 forms.pop()
2413 forms.pop()
2412 else:
2414 else:
2413 committext = buildcommittext(repo, ctx, subs, extramsg)
2415 committext = buildcommittext(repo, ctx, subs, extramsg)
2414
2416
2415 # run editor in the repository root
2417 # run editor in the repository root
2416 olddir = os.getcwd()
2418 olddir = os.getcwd()
2417 os.chdir(repo.root)
2419 os.chdir(repo.root)
2418 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2420 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2419 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2421 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2420 os.chdir(olddir)
2422 os.chdir(olddir)
2421
2423
2422 if finishdesc:
2424 if finishdesc:
2423 text = finishdesc(text)
2425 text = finishdesc(text)
2424 if not text.strip():
2426 if not text.strip():
2425 raise util.Abort(_("empty commit message"))
2427 raise util.Abort(_("empty commit message"))
2426
2428
2427 return text
2429 return text
2428
2430
2429 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2431 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2430 ui = repo.ui
2432 ui = repo.ui
2431 tmpl, mapfile = gettemplate(ui, tmpl, None)
2433 tmpl, mapfile = gettemplate(ui, tmpl, None)
2432
2434
2433 try:
2435 try:
2434 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2436 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2435 except SyntaxError, inst:
2437 except SyntaxError, inst:
2436 raise util.Abort(inst.args[0])
2438 raise util.Abort(inst.args[0])
2437
2439
2438 for k, v in repo.ui.configitems('committemplate'):
2440 for k, v in repo.ui.configitems('committemplate'):
2439 if k != 'changeset':
2441 if k != 'changeset':
2440 t.t.cache[k] = v
2442 t.t.cache[k] = v
2441
2443
2442 if not extramsg:
2444 if not extramsg:
2443 extramsg = '' # ensure that extramsg is string
2445 extramsg = '' # ensure that extramsg is string
2444
2446
2445 ui.pushbuffer()
2447 ui.pushbuffer()
2446 t.show(ctx, extramsg=extramsg)
2448 t.show(ctx, extramsg=extramsg)
2447 return ui.popbuffer()
2449 return ui.popbuffer()
2448
2450
2449 def buildcommittext(repo, ctx, subs, extramsg):
2451 def buildcommittext(repo, ctx, subs, extramsg):
2450 edittext = []
2452 edittext = []
2451 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2453 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2452 if ctx.description():
2454 if ctx.description():
2453 edittext.append(ctx.description())
2455 edittext.append(ctx.description())
2454 edittext.append("")
2456 edittext.append("")
2455 edittext.append("") # Empty line between message and comments.
2457 edittext.append("") # Empty line between message and comments.
2456 edittext.append(_("HG: Enter commit message."
2458 edittext.append(_("HG: Enter commit message."
2457 " Lines beginning with 'HG:' are removed."))
2459 " Lines beginning with 'HG:' are removed."))
2458 edittext.append("HG: %s" % extramsg)
2460 edittext.append("HG: %s" % extramsg)
2459 edittext.append("HG: --")
2461 edittext.append("HG: --")
2460 edittext.append(_("HG: user: %s") % ctx.user())
2462 edittext.append(_("HG: user: %s") % ctx.user())
2461 if ctx.p2():
2463 if ctx.p2():
2462 edittext.append(_("HG: branch merge"))
2464 edittext.append(_("HG: branch merge"))
2463 if ctx.branch():
2465 if ctx.branch():
2464 edittext.append(_("HG: branch '%s'") % ctx.branch())
2466 edittext.append(_("HG: branch '%s'") % ctx.branch())
2465 if bookmarks.iscurrent(repo):
2467 if bookmarks.iscurrent(repo):
2466 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2468 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2467 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2469 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2468 edittext.extend([_("HG: added %s") % f for f in added])
2470 edittext.extend([_("HG: added %s") % f for f in added])
2469 edittext.extend([_("HG: changed %s") % f for f in modified])
2471 edittext.extend([_("HG: changed %s") % f for f in modified])
2470 edittext.extend([_("HG: removed %s") % f for f in removed])
2472 edittext.extend([_("HG: removed %s") % f for f in removed])
2471 if not added and not modified and not removed:
2473 if not added and not modified and not removed:
2472 edittext.append(_("HG: no files changed"))
2474 edittext.append(_("HG: no files changed"))
2473 edittext.append("")
2475 edittext.append("")
2474
2476
2475 return "\n".join(edittext)
2477 return "\n".join(edittext)
2476
2478
2477 def commitstatus(repo, node, branch, bheads=None, opts={}):
2479 def commitstatus(repo, node, branch, bheads=None, opts={}):
2478 ctx = repo[node]
2480 ctx = repo[node]
2479 parents = ctx.parents()
2481 parents = ctx.parents()
2480
2482
2481 if (not opts.get('amend') and bheads and node not in bheads and not
2483 if (not opts.get('amend') and bheads and node not in bheads and not
2482 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2484 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2483 repo.ui.status(_('created new head\n'))
2485 repo.ui.status(_('created new head\n'))
2484 # The message is not printed for initial roots. For the other
2486 # The message is not printed for initial roots. For the other
2485 # changesets, it is printed in the following situations:
2487 # changesets, it is printed in the following situations:
2486 #
2488 #
2487 # Par column: for the 2 parents with ...
2489 # Par column: for the 2 parents with ...
2488 # N: null or no parent
2490 # N: null or no parent
2489 # B: parent is on another named branch
2491 # B: parent is on another named branch
2490 # C: parent is a regular non head changeset
2492 # C: parent is a regular non head changeset
2491 # H: parent was a branch head of the current branch
2493 # H: parent was a branch head of the current branch
2492 # Msg column: whether we print "created new head" message
2494 # Msg column: whether we print "created new head" message
2493 # In the following, it is assumed that there already exists some
2495 # In the following, it is assumed that there already exists some
2494 # initial branch heads of the current branch, otherwise nothing is
2496 # initial branch heads of the current branch, otherwise nothing is
2495 # printed anyway.
2497 # printed anyway.
2496 #
2498 #
2497 # Par Msg Comment
2499 # Par Msg Comment
2498 # N N y additional topo root
2500 # N N y additional topo root
2499 #
2501 #
2500 # B N y additional branch root
2502 # B N y additional branch root
2501 # C N y additional topo head
2503 # C N y additional topo head
2502 # H N n usual case
2504 # H N n usual case
2503 #
2505 #
2504 # B B y weird additional branch root
2506 # B B y weird additional branch root
2505 # C B y branch merge
2507 # C B y branch merge
2506 # H B n merge with named branch
2508 # H B n merge with named branch
2507 #
2509 #
2508 # C C y additional head from merge
2510 # C C y additional head from merge
2509 # C H n merge with a head
2511 # C H n merge with a head
2510 #
2512 #
2511 # H H n head merge: head count decreases
2513 # H H n head merge: head count decreases
2512
2514
2513 if not opts.get('close_branch'):
2515 if not opts.get('close_branch'):
2514 for r in parents:
2516 for r in parents:
2515 if r.closesbranch() and r.branch() == branch:
2517 if r.closesbranch() and r.branch() == branch:
2516 repo.ui.status(_('reopening closed branch head %d\n') % r)
2518 repo.ui.status(_('reopening closed branch head %d\n') % r)
2517
2519
2518 if repo.ui.debugflag:
2520 if repo.ui.debugflag:
2519 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2521 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2520 elif repo.ui.verbose:
2522 elif repo.ui.verbose:
2521 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2523 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2522
2524
2523 def revert(ui, repo, ctx, parents, *pats, **opts):
2525 def revert(ui, repo, ctx, parents, *pats, **opts):
2524 parent, p2 = parents
2526 parent, p2 = parents
2525 node = ctx.node()
2527 node = ctx.node()
2526
2528
2527 mf = ctx.manifest()
2529 mf = ctx.manifest()
2528 if node == p2:
2530 if node == p2:
2529 parent = p2
2531 parent = p2
2530 if node == parent:
2532 if node == parent:
2531 pmf = mf
2533 pmf = mf
2532 else:
2534 else:
2533 pmf = None
2535 pmf = None
2534
2536
2535 # need all matching names in dirstate and manifest of target rev,
2537 # need all matching names in dirstate and manifest of target rev,
2536 # so have to walk both. do not print errors if files exist in one
2538 # so have to walk both. do not print errors if files exist in one
2537 # but not other.
2539 # but not other.
2538
2540
2539 # `names` is a mapping for all elements in working copy and target revision
2541 # `names` is a mapping for all elements in working copy and target revision
2540 # The mapping is in the form:
2542 # The mapping is in the form:
2541 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2543 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2542 names = {}
2544 names = {}
2543
2545
2544 wlock = repo.wlock()
2546 wlock = repo.wlock()
2545 try:
2547 try:
2546 ## filling of the `names` mapping
2548 ## filling of the `names` mapping
2547 # walk dirstate to fill `names`
2549 # walk dirstate to fill `names`
2548
2550
2549 m = scmutil.match(repo[None], pats, opts)
2551 m = scmutil.match(repo[None], pats, opts)
2550 if not m.always() or node != parent:
2552 if not m.always() or node != parent:
2551 m.bad = lambda x, y: False
2553 m.bad = lambda x, y: False
2552 for abs in repo.walk(m):
2554 for abs in repo.walk(m):
2553 names[abs] = m.rel(abs), m.exact(abs)
2555 names[abs] = m.rel(abs), m.exact(abs)
2554
2556
2555 # walk target manifest to fill `names`
2557 # walk target manifest to fill `names`
2556
2558
2557 def badfn(path, msg):
2559 def badfn(path, msg):
2558 if path in names:
2560 if path in names:
2559 return
2561 return
2560 if path in ctx.substate:
2562 if path in ctx.substate:
2561 return
2563 return
2562 path_ = path + '/'
2564 path_ = path + '/'
2563 for f in names:
2565 for f in names:
2564 if f.startswith(path_):
2566 if f.startswith(path_):
2565 return
2567 return
2566 ui.warn("%s: %s\n" % (m.rel(path), msg))
2568 ui.warn("%s: %s\n" % (m.rel(path), msg))
2567
2569
2568 m = scmutil.match(ctx, pats, opts)
2570 m = scmutil.match(ctx, pats, opts)
2569 m.bad = badfn
2571 m.bad = badfn
2570 for abs in ctx.walk(m):
2572 for abs in ctx.walk(m):
2571 if abs not in names:
2573 if abs not in names:
2572 names[abs] = m.rel(abs), m.exact(abs)
2574 names[abs] = m.rel(abs), m.exact(abs)
2573
2575
2574 # Find status of all file in `names`.
2576 # Find status of all file in `names`.
2575 m = scmutil.matchfiles(repo, names)
2577 m = scmutil.matchfiles(repo, names)
2576
2578
2577 changes = repo.status(node1=node, match=m,
2579 changes = repo.status(node1=node, match=m,
2578 unknown=True, ignored=True, clean=True)
2580 unknown=True, ignored=True, clean=True)
2579 else:
2581 else:
2580 changes = repo.status(match=m)
2582 changes = repo.status(match=m)
2581 for kind in changes:
2583 for kind in changes:
2582 for abs in kind:
2584 for abs in kind:
2583 names[abs] = m.rel(abs), m.exact(abs)
2585 names[abs] = m.rel(abs), m.exact(abs)
2584
2586
2585 m = scmutil.matchfiles(repo, names)
2587 m = scmutil.matchfiles(repo, names)
2586
2588
2587 modified = set(changes.modified)
2589 modified = set(changes.modified)
2588 added = set(changes.added)
2590 added = set(changes.added)
2589 removed = set(changes.removed)
2591 removed = set(changes.removed)
2590 _deleted = set(changes.deleted)
2592 _deleted = set(changes.deleted)
2591 unknown = set(changes.unknown)
2593 unknown = set(changes.unknown)
2592 unknown.update(changes.ignored)
2594 unknown.update(changes.ignored)
2593 clean = set(changes.clean)
2595 clean = set(changes.clean)
2594 modadded = set()
2596 modadded = set()
2595
2597
2596 # split between files known in target manifest and the others
2598 # split between files known in target manifest and the others
2597 smf = set(mf)
2599 smf = set(mf)
2598
2600
2599 # determine the exact nature of the deleted changesets
2601 # determine the exact nature of the deleted changesets
2600 deladded = _deleted - smf
2602 deladded = _deleted - smf
2601 deleted = _deleted - deladded
2603 deleted = _deleted - deladded
2602
2604
2603 # We need to account for the state of the file in the dirstate,
2605 # We need to account for the state of the file in the dirstate,
2604 # even when we revert against something else than parent. This will
2606 # even when we revert against something else than parent. This will
2605 # slightly alter the behavior of revert (doing back up or not, delete
2607 # slightly alter the behavior of revert (doing back up or not, delete
2606 # or just forget etc).
2608 # or just forget etc).
2607 if parent == node:
2609 if parent == node:
2608 dsmodified = modified
2610 dsmodified = modified
2609 dsadded = added
2611 dsadded = added
2610 dsremoved = removed
2612 dsremoved = removed
2611 # store all local modifications, useful later for rename detection
2613 # store all local modifications, useful later for rename detection
2612 localchanges = dsmodified | dsadded
2614 localchanges = dsmodified | dsadded
2613 modified, added, removed = set(), set(), set()
2615 modified, added, removed = set(), set(), set()
2614 else:
2616 else:
2615 changes = repo.status(node1=parent, match=m)
2617 changes = repo.status(node1=parent, match=m)
2616 dsmodified = set(changes.modified)
2618 dsmodified = set(changes.modified)
2617 dsadded = set(changes.added)
2619 dsadded = set(changes.added)
2618 dsremoved = set(changes.removed)
2620 dsremoved = set(changes.removed)
2619 # store all local modifications, useful later for rename detection
2621 # store all local modifications, useful later for rename detection
2620 localchanges = dsmodified | dsadded
2622 localchanges = dsmodified | dsadded
2621
2623
2622 # only take into account for removes between wc and target
2624 # only take into account for removes between wc and target
2623 clean |= dsremoved - removed
2625 clean |= dsremoved - removed
2624 dsremoved &= removed
2626 dsremoved &= removed
2625 # distinct between dirstate remove and other
2627 # distinct between dirstate remove and other
2626 removed -= dsremoved
2628 removed -= dsremoved
2627
2629
2628 modadded = added & dsmodified
2630 modadded = added & dsmodified
2629 added -= modadded
2631 added -= modadded
2630
2632
2631 # tell newly modified apart.
2633 # tell newly modified apart.
2632 dsmodified &= modified
2634 dsmodified &= modified
2633 dsmodified |= modified & dsadded # dirstate added may needs backup
2635 dsmodified |= modified & dsadded # dirstate added may needs backup
2634 modified -= dsmodified
2636 modified -= dsmodified
2635
2637
2636 # We need to wait for some post-processing to update this set
2638 # We need to wait for some post-processing to update this set
2637 # before making the distinction. The dirstate will be used for
2639 # before making the distinction. The dirstate will be used for
2638 # that purpose.
2640 # that purpose.
2639 dsadded = added
2641 dsadded = added
2640
2642
2641 # in case of merge, files that are actually added can be reported as
2643 # in case of merge, files that are actually added can be reported as
2642 # modified, we need to post process the result
2644 # modified, we need to post process the result
2643 if p2 != nullid:
2645 if p2 != nullid:
2644 if pmf is None:
2646 if pmf is None:
2645 # only need parent manifest in the merge case,
2647 # only need parent manifest in the merge case,
2646 # so do not read by default
2648 # so do not read by default
2647 pmf = repo[parent].manifest()
2649 pmf = repo[parent].manifest()
2648 mergeadd = dsmodified - set(pmf)
2650 mergeadd = dsmodified - set(pmf)
2649 dsadded |= mergeadd
2651 dsadded |= mergeadd
2650 dsmodified -= mergeadd
2652 dsmodified -= mergeadd
2651
2653
2652 # if f is a rename, update `names` to also revert the source
2654 # if f is a rename, update `names` to also revert the source
2653 cwd = repo.getcwd()
2655 cwd = repo.getcwd()
2654 for f in localchanges:
2656 for f in localchanges:
2655 src = repo.dirstate.copied(f)
2657 src = repo.dirstate.copied(f)
2656 # XXX should we check for rename down to target node?
2658 # XXX should we check for rename down to target node?
2657 if src and src not in names and repo.dirstate[src] == 'r':
2659 if src and src not in names and repo.dirstate[src] == 'r':
2658 dsremoved.add(src)
2660 dsremoved.add(src)
2659 names[src] = (repo.pathto(src, cwd), True)
2661 names[src] = (repo.pathto(src, cwd), True)
2660
2662
2661 # distinguish between file to forget and the other
2663 # distinguish between file to forget and the other
2662 added = set()
2664 added = set()
2663 for abs in dsadded:
2665 for abs in dsadded:
2664 if repo.dirstate[abs] != 'a':
2666 if repo.dirstate[abs] != 'a':
2665 added.add(abs)
2667 added.add(abs)
2666 dsadded -= added
2668 dsadded -= added
2667
2669
2668 for abs in deladded:
2670 for abs in deladded:
2669 if repo.dirstate[abs] == 'a':
2671 if repo.dirstate[abs] == 'a':
2670 dsadded.add(abs)
2672 dsadded.add(abs)
2671 deladded -= dsadded
2673 deladded -= dsadded
2672
2674
2673 # For files marked as removed, we check if an unknown file is present at
2675 # For files marked as removed, we check if an unknown file is present at
2674 # the same path. If a such file exists it may need to be backed up.
2676 # the same path. If a such file exists it may need to be backed up.
2675 # Making the distinction at this stage helps have simpler backup
2677 # Making the distinction at this stage helps have simpler backup
2676 # logic.
2678 # logic.
2677 removunk = set()
2679 removunk = set()
2678 for abs in removed:
2680 for abs in removed:
2679 target = repo.wjoin(abs)
2681 target = repo.wjoin(abs)
2680 if os.path.lexists(target):
2682 if os.path.lexists(target):
2681 removunk.add(abs)
2683 removunk.add(abs)
2682 removed -= removunk
2684 removed -= removunk
2683
2685
2684 dsremovunk = set()
2686 dsremovunk = set()
2685 for abs in dsremoved:
2687 for abs in dsremoved:
2686 target = repo.wjoin(abs)
2688 target = repo.wjoin(abs)
2687 if os.path.lexists(target):
2689 if os.path.lexists(target):
2688 dsremovunk.add(abs)
2690 dsremovunk.add(abs)
2689 dsremoved -= dsremovunk
2691 dsremoved -= dsremovunk
2690
2692
2691 # action to be actually performed by revert
2693 # action to be actually performed by revert
2692 # (<list of file>, message>) tuple
2694 # (<list of file>, message>) tuple
2693 actions = {'revert': ([], _('reverting %s\n')),
2695 actions = {'revert': ([], _('reverting %s\n')),
2694 'add': ([], _('adding %s\n')),
2696 'add': ([], _('adding %s\n')),
2695 'remove': ([], _('removing %s\n')),
2697 'remove': ([], _('removing %s\n')),
2696 'drop': ([], _('removing %s\n')),
2698 'drop': ([], _('removing %s\n')),
2697 'forget': ([], _('forgetting %s\n')),
2699 'forget': ([], _('forgetting %s\n')),
2698 'undelete': ([], _('undeleting %s\n')),
2700 'undelete': ([], _('undeleting %s\n')),
2699 'noop': (None, _('no changes needed to %s\n')),
2701 'noop': (None, _('no changes needed to %s\n')),
2700 'unknown': (None, _('file not managed: %s\n')),
2702 'unknown': (None, _('file not managed: %s\n')),
2701 }
2703 }
2702
2704
2703 # "constant" that convey the backup strategy.
2705 # "constant" that convey the backup strategy.
2704 # All set to `discard` if `no-backup` is set do avoid checking
2706 # All set to `discard` if `no-backup` is set do avoid checking
2705 # no_backup lower in the code.
2707 # no_backup lower in the code.
2706 # These values are ordered for comparison purposes
2708 # These values are ordered for comparison purposes
2707 backup = 2 # unconditionally do backup
2709 backup = 2 # unconditionally do backup
2708 check = 1 # check if the existing file differs from target
2710 check = 1 # check if the existing file differs from target
2709 discard = 0 # never do backup
2711 discard = 0 # never do backup
2710 if opts.get('no_backup'):
2712 if opts.get('no_backup'):
2711 backup = check = discard
2713 backup = check = discard
2712
2714
2713 backupanddel = actions['remove']
2715 backupanddel = actions['remove']
2714 if not opts.get('no_backup'):
2716 if not opts.get('no_backup'):
2715 backupanddel = actions['drop']
2717 backupanddel = actions['drop']
2716
2718
2717 disptable = (
2719 disptable = (
2718 # dispatch table:
2720 # dispatch table:
2719 # file state
2721 # file state
2720 # action
2722 # action
2721 # make backup
2723 # make backup
2722
2724
2723 ## Sets that results that will change file on disk
2725 ## Sets that results that will change file on disk
2724 # Modified compared to target, no local change
2726 # Modified compared to target, no local change
2725 (modified, actions['revert'], discard),
2727 (modified, actions['revert'], discard),
2726 # Modified compared to target, but local file is deleted
2728 # Modified compared to target, but local file is deleted
2727 (deleted, actions['revert'], discard),
2729 (deleted, actions['revert'], discard),
2728 # Modified compared to target, local change
2730 # Modified compared to target, local change
2729 (dsmodified, actions['revert'], backup),
2731 (dsmodified, actions['revert'], backup),
2730 # Added since target
2732 # Added since target
2731 (added, actions['remove'], discard),
2733 (added, actions['remove'], discard),
2732 # Added in working directory
2734 # Added in working directory
2733 (dsadded, actions['forget'], discard),
2735 (dsadded, actions['forget'], discard),
2734 # Added since target, have local modification
2736 # Added since target, have local modification
2735 (modadded, backupanddel, backup),
2737 (modadded, backupanddel, backup),
2736 # Added since target but file is missing in working directory
2738 # Added since target but file is missing in working directory
2737 (deladded, actions['drop'], discard),
2739 (deladded, actions['drop'], discard),
2738 # Removed since target, before working copy parent
2740 # Removed since target, before working copy parent
2739 (removed, actions['add'], discard),
2741 (removed, actions['add'], discard),
2740 # Same as `removed` but an unknown file exists at the same path
2742 # Same as `removed` but an unknown file exists at the same path
2741 (removunk, actions['add'], check),
2743 (removunk, actions['add'], check),
2742 # Removed since targe, marked as such in working copy parent
2744 # Removed since targe, marked as such in working copy parent
2743 (dsremoved, actions['undelete'], discard),
2745 (dsremoved, actions['undelete'], discard),
2744 # Same as `dsremoved` but an unknown file exists at the same path
2746 # Same as `dsremoved` but an unknown file exists at the same path
2745 (dsremovunk, actions['undelete'], check),
2747 (dsremovunk, actions['undelete'], check),
2746 ## the following sets does not result in any file changes
2748 ## the following sets does not result in any file changes
2747 # File with no modification
2749 # File with no modification
2748 (clean, actions['noop'], discard),
2750 (clean, actions['noop'], discard),
2749 # Existing file, not tracked anywhere
2751 # Existing file, not tracked anywhere
2750 (unknown, actions['unknown'], discard),
2752 (unknown, actions['unknown'], discard),
2751 )
2753 )
2752
2754
2753 wctx = repo[None]
2755 wctx = repo[None]
2754 for abs, (rel, exact) in sorted(names.items()):
2756 for abs, (rel, exact) in sorted(names.items()):
2755 # target file to be touch on disk (relative to cwd)
2757 # target file to be touch on disk (relative to cwd)
2756 target = repo.wjoin(abs)
2758 target = repo.wjoin(abs)
2757 # search the entry in the dispatch table.
2759 # search the entry in the dispatch table.
2758 # if the file is in any of these sets, it was touched in the working
2760 # if the file is in any of these sets, it was touched in the working
2759 # directory parent and we are sure it needs to be reverted.
2761 # directory parent and we are sure it needs to be reverted.
2760 for table, (xlist, msg), dobackup in disptable:
2762 for table, (xlist, msg), dobackup in disptable:
2761 if abs not in table:
2763 if abs not in table:
2762 continue
2764 continue
2763 if xlist is not None:
2765 if xlist is not None:
2764 xlist.append(abs)
2766 xlist.append(abs)
2765 if dobackup and (backup <= dobackup
2767 if dobackup and (backup <= dobackup
2766 or wctx[abs].cmp(ctx[abs])):
2768 or wctx[abs].cmp(ctx[abs])):
2767 bakname = "%s.orig" % rel
2769 bakname = "%s.orig" % rel
2768 ui.note(_('saving current version of %s as %s\n') %
2770 ui.note(_('saving current version of %s as %s\n') %
2769 (rel, bakname))
2771 (rel, bakname))
2770 if not opts.get('dry_run'):
2772 if not opts.get('dry_run'):
2771 util.rename(target, bakname)
2773 util.rename(target, bakname)
2772 if ui.verbose or not exact:
2774 if ui.verbose or not exact:
2773 if not isinstance(msg, basestring):
2775 if not isinstance(msg, basestring):
2774 msg = msg(abs)
2776 msg = msg(abs)
2775 ui.status(msg % rel)
2777 ui.status(msg % rel)
2776 elif exact:
2778 elif exact:
2777 ui.warn(msg % rel)
2779 ui.warn(msg % rel)
2778 break
2780 break
2779
2781
2780
2782
2781 if not opts.get('dry_run'):
2783 if not opts.get('dry_run'):
2782 needdata = ('revert', 'add', 'undelete')
2784 needdata = ('revert', 'add', 'undelete')
2783 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
2785 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
2784
2786
2785 _performrevert(repo, parents, ctx, actions)
2787 _performrevert(repo, parents, ctx, actions)
2786
2788
2787 # get the list of subrepos that must be reverted
2789 # get the list of subrepos that must be reverted
2788 subrepomatch = scmutil.match(ctx, pats, opts)
2790 subrepomatch = scmutil.match(ctx, pats, opts)
2789 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
2791 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
2790
2792
2791 if targetsubs:
2793 if targetsubs:
2792 # Revert the subrepos on the revert list
2794 # Revert the subrepos on the revert list
2793 for sub in targetsubs:
2795 for sub in targetsubs:
2794 ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
2796 ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
2795 finally:
2797 finally:
2796 wlock.release()
2798 wlock.release()
2797
2799
2798 def _revertprefetch(repo, ctx, *files):
2800 def _revertprefetch(repo, ctx, *files):
2799 """Let extension changing the storage layer prefetch content"""
2801 """Let extension changing the storage layer prefetch content"""
2800 pass
2802 pass
2801
2803
2802 def _performrevert(repo, parents, ctx, actions):
2804 def _performrevert(repo, parents, ctx, actions):
2803 """function that actually perform all the actions computed for revert
2805 """function that actually perform all the actions computed for revert
2804
2806
2805 This is an independent function to let extension to plug in and react to
2807 This is an independent function to let extension to plug in and react to
2806 the imminent revert.
2808 the imminent revert.
2807
2809
2808 Make sure you have the working directory locked when calling this function.
2810 Make sure you have the working directory locked when calling this function.
2809 """
2811 """
2810 parent, p2 = parents
2812 parent, p2 = parents
2811 node = ctx.node()
2813 node = ctx.node()
2812 def checkout(f):
2814 def checkout(f):
2813 fc = ctx[f]
2815 fc = ctx[f]
2814 repo.wwrite(f, fc.data(), fc.flags())
2816 repo.wwrite(f, fc.data(), fc.flags())
2815
2817
2816 audit_path = pathutil.pathauditor(repo.root)
2818 audit_path = pathutil.pathauditor(repo.root)
2817 for f in actions['forget'][0]:
2819 for f in actions['forget'][0]:
2818 repo.dirstate.drop(f)
2820 repo.dirstate.drop(f)
2819 for f in actions['remove'][0]:
2821 for f in actions['remove'][0]:
2820 audit_path(f)
2822 audit_path(f)
2821 util.unlinkpath(repo.wjoin(f))
2823 util.unlinkpath(repo.wjoin(f))
2822 repo.dirstate.remove(f)
2824 repo.dirstate.remove(f)
2823 for f in actions['drop'][0]:
2825 for f in actions['drop'][0]:
2824 audit_path(f)
2826 audit_path(f)
2825 repo.dirstate.remove(f)
2827 repo.dirstate.remove(f)
2826
2828
2827 normal = None
2829 normal = None
2828 if node == parent:
2830 if node == parent:
2829 # We're reverting to our parent. If possible, we'd like status
2831 # We're reverting to our parent. If possible, we'd like status
2830 # to report the file as clean. We have to use normallookup for
2832 # to report the file as clean. We have to use normallookup for
2831 # merges to avoid losing information about merged/dirty files.
2833 # merges to avoid losing information about merged/dirty files.
2832 if p2 != nullid:
2834 if p2 != nullid:
2833 normal = repo.dirstate.normallookup
2835 normal = repo.dirstate.normallookup
2834 else:
2836 else:
2835 normal = repo.dirstate.normal
2837 normal = repo.dirstate.normal
2836 for f in actions['revert'][0]:
2838 for f in actions['revert'][0]:
2837 checkout(f)
2839 checkout(f)
2838 if normal:
2840 if normal:
2839 normal(f)
2841 normal(f)
2840
2842
2841 for f in actions['add'][0]:
2843 for f in actions['add'][0]:
2842 checkout(f)
2844 checkout(f)
2843 repo.dirstate.add(f)
2845 repo.dirstate.add(f)
2844
2846
2845 normal = repo.dirstate.normallookup
2847 normal = repo.dirstate.normallookup
2846 if node == parent and p2 == nullid:
2848 if node == parent and p2 == nullid:
2847 normal = repo.dirstate.normal
2849 normal = repo.dirstate.normal
2848 for f in actions['undelete'][0]:
2850 for f in actions['undelete'][0]:
2849 checkout(f)
2851 checkout(f)
2850 normal(f)
2852 normal(f)
2851
2853
2852 copied = copies.pathcopies(repo[parent], ctx)
2854 copied = copies.pathcopies(repo[parent], ctx)
2853
2855
2854 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
2856 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
2855 if f in copied:
2857 if f in copied:
2856 repo.dirstate.copy(copied[f], f)
2858 repo.dirstate.copy(copied[f], f)
2857
2859
2858 def command(table):
2860 def command(table):
2859 """Returns a function object to be used as a decorator for making commands.
2861 """Returns a function object to be used as a decorator for making commands.
2860
2862
2861 This function receives a command table as its argument. The table should
2863 This function receives a command table as its argument. The table should
2862 be a dict.
2864 be a dict.
2863
2865
2864 The returned function can be used as a decorator for adding commands
2866 The returned function can be used as a decorator for adding commands
2865 to that command table. This function accepts multiple arguments to define
2867 to that command table. This function accepts multiple arguments to define
2866 a command.
2868 a command.
2867
2869
2868 The first argument is the command name.
2870 The first argument is the command name.
2869
2871
2870 The options argument is an iterable of tuples defining command arguments.
2872 The options argument is an iterable of tuples defining command arguments.
2871 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
2873 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
2872
2874
2873 The synopsis argument defines a short, one line summary of how to use the
2875 The synopsis argument defines a short, one line summary of how to use the
2874 command. This shows up in the help output.
2876 command. This shows up in the help output.
2875
2877
2876 The norepo argument defines whether the command does not require a
2878 The norepo argument defines whether the command does not require a
2877 local repository. Most commands operate against a repository, thus the
2879 local repository. Most commands operate against a repository, thus the
2878 default is False.
2880 default is False.
2879
2881
2880 The optionalrepo argument defines whether the command optionally requires
2882 The optionalrepo argument defines whether the command optionally requires
2881 a local repository.
2883 a local repository.
2882
2884
2883 The inferrepo argument defines whether to try to find a repository from the
2885 The inferrepo argument defines whether to try to find a repository from the
2884 command line arguments. If True, arguments will be examined for potential
2886 command line arguments. If True, arguments will be examined for potential
2885 repository locations. See ``findrepo()``. If a repository is found, it
2887 repository locations. See ``findrepo()``. If a repository is found, it
2886 will be used.
2888 will be used.
2887 """
2889 """
2888 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
2890 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
2889 inferrepo=False):
2891 inferrepo=False):
2890 def decorator(func):
2892 def decorator(func):
2891 if synopsis:
2893 if synopsis:
2892 table[name] = func, list(options), synopsis
2894 table[name] = func, list(options), synopsis
2893 else:
2895 else:
2894 table[name] = func, list(options)
2896 table[name] = func, list(options)
2895
2897
2896 if norepo:
2898 if norepo:
2897 # Avoid import cycle.
2899 # Avoid import cycle.
2898 import commands
2900 import commands
2899 commands.norepo += ' %s' % ' '.join(parsealiases(name))
2901 commands.norepo += ' %s' % ' '.join(parsealiases(name))
2900
2902
2901 if optionalrepo:
2903 if optionalrepo:
2902 import commands
2904 import commands
2903 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
2905 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
2904
2906
2905 if inferrepo:
2907 if inferrepo:
2906 import commands
2908 import commands
2907 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
2909 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
2908
2910
2909 return func
2911 return func
2910 return decorator
2912 return decorator
2911
2913
2912 return cmd
2914 return cmd
2913
2915
2914 # a list of (ui, repo, otherpeer, opts, missing) functions called by
2916 # a list of (ui, repo, otherpeer, opts, missing) functions called by
2915 # commands.outgoing. "missing" is "missing" of the result of
2917 # commands.outgoing. "missing" is "missing" of the result of
2916 # "findcommonoutgoing()"
2918 # "findcommonoutgoing()"
2917 outgoinghooks = util.hooks()
2919 outgoinghooks = util.hooks()
2918
2920
2919 # a list of (ui, repo) functions called by commands.summary
2921 # a list of (ui, repo) functions called by commands.summary
2920 summaryhooks = util.hooks()
2922 summaryhooks = util.hooks()
2921
2923
2922 # a list of (ui, repo, opts, changes) functions called by commands.summary.
2924 # a list of (ui, repo, opts, changes) functions called by commands.summary.
2923 #
2925 #
2924 # functions should return tuple of booleans below, if 'changes' is None:
2926 # functions should return tuple of booleans below, if 'changes' is None:
2925 # (whether-incomings-are-needed, whether-outgoings-are-needed)
2927 # (whether-incomings-are-needed, whether-outgoings-are-needed)
2926 #
2928 #
2927 # otherwise, 'changes' is a tuple of tuples below:
2929 # otherwise, 'changes' is a tuple of tuples below:
2928 # - (sourceurl, sourcebranch, sourcepeer, incoming)
2930 # - (sourceurl, sourcebranch, sourcepeer, incoming)
2929 # - (desturl, destbranch, destpeer, outgoing)
2931 # - (desturl, destbranch, destpeer, outgoing)
2930 summaryremotehooks = util.hooks()
2932 summaryremotehooks = util.hooks()
2931
2933
2932 # A list of state files kept by multistep operations like graft.
2934 # A list of state files kept by multistep operations like graft.
2933 # Since graft cannot be aborted, it is considered 'clearable' by update.
2935 # Since graft cannot be aborted, it is considered 'clearable' by update.
2934 # note: bisect is intentionally excluded
2936 # note: bisect is intentionally excluded
2935 # (state file, clearable, allowcommit, error, hint)
2937 # (state file, clearable, allowcommit, error, hint)
2936 unfinishedstates = [
2938 unfinishedstates = [
2937 ('graftstate', True, False, _('graft in progress'),
2939 ('graftstate', True, False, _('graft in progress'),
2938 _("use 'hg graft --continue' or 'hg update' to abort")),
2940 _("use 'hg graft --continue' or 'hg update' to abort")),
2939 ('updatestate', True, False, _('last update was interrupted'),
2941 ('updatestate', True, False, _('last update was interrupted'),
2940 _("use 'hg update' to get a consistent checkout"))
2942 _("use 'hg update' to get a consistent checkout"))
2941 ]
2943 ]
2942
2944
2943 def checkunfinished(repo, commit=False):
2945 def checkunfinished(repo, commit=False):
2944 '''Look for an unfinished multistep operation, like graft, and abort
2946 '''Look for an unfinished multistep operation, like graft, and abort
2945 if found. It's probably good to check this right before
2947 if found. It's probably good to check this right before
2946 bailifchanged().
2948 bailifchanged().
2947 '''
2949 '''
2948 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2950 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2949 if commit and allowcommit:
2951 if commit and allowcommit:
2950 continue
2952 continue
2951 if repo.vfs.exists(f):
2953 if repo.vfs.exists(f):
2952 raise util.Abort(msg, hint=hint)
2954 raise util.Abort(msg, hint=hint)
2953
2955
2954 def clearunfinished(repo):
2956 def clearunfinished(repo):
2955 '''Check for unfinished operations (as above), and clear the ones
2957 '''Check for unfinished operations (as above), and clear the ones
2956 that are clearable.
2958 that are clearable.
2957 '''
2959 '''
2958 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2960 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2959 if not clearable and repo.vfs.exists(f):
2961 if not clearable and repo.vfs.exists(f):
2960 raise util.Abort(msg, hint=hint)
2962 raise util.Abort(msg, hint=hint)
2961 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2963 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2962 if clearable and repo.vfs.exists(f):
2964 if clearable and repo.vfs.exists(f):
2963 util.unlink(repo.join(f))
2965 util.unlink(repo.join(f))
@@ -1,1868 +1,1865 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import os, errno, stat
12 import os, errno, stat
13 import obsolete as obsmod
13 import obsolete as obsmod
14 import repoview
14 import repoview
15 import fileset
15 import fileset
16 import revlog
16 import revlog
17
17
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 # Phony node value to stand-in for new files in some uses of
20 # Phony node value to stand-in for new files in some uses of
21 # manifests. Manifests support 21-byte hashes for nodes which are
21 # manifests. Manifests support 21-byte hashes for nodes which are
22 # dirty in the working copy.
22 # dirty in the working copy.
23 _newnode = '!' * 21
23 _newnode = '!' * 21
24
24
25 class basectx(object):
25 class basectx(object):
26 """A basectx object represents the common logic for its children:
26 """A basectx object represents the common logic for its children:
27 changectx: read-only context that is already present in the repo,
27 changectx: read-only context that is already present in the repo,
28 workingctx: a context that represents the working directory and can
28 workingctx: a context that represents the working directory and can
29 be committed,
29 be committed,
30 memctx: a context that represents changes in-memory and can also
30 memctx: a context that represents changes in-memory and can also
31 be committed."""
31 be committed."""
32 def __new__(cls, repo, changeid='', *args, **kwargs):
32 def __new__(cls, repo, changeid='', *args, **kwargs):
33 if isinstance(changeid, basectx):
33 if isinstance(changeid, basectx):
34 return changeid
34 return changeid
35
35
36 o = super(basectx, cls).__new__(cls)
36 o = super(basectx, cls).__new__(cls)
37
37
38 o._repo = repo
38 o._repo = repo
39 o._rev = nullrev
39 o._rev = nullrev
40 o._node = nullid
40 o._node = nullid
41
41
42 return o
42 return o
43
43
44 def __str__(self):
44 def __str__(self):
45 return short(self.node())
45 return short(self.node())
46
46
47 def __int__(self):
47 def __int__(self):
48 return self.rev()
48 return self.rev()
49
49
50 def __repr__(self):
50 def __repr__(self):
51 return "<%s %s>" % (type(self).__name__, str(self))
51 return "<%s %s>" % (type(self).__name__, str(self))
52
52
53 def __eq__(self, other):
53 def __eq__(self, other):
54 try:
54 try:
55 return type(self) == type(other) and self._rev == other._rev
55 return type(self) == type(other) and self._rev == other._rev
56 except AttributeError:
56 except AttributeError:
57 return False
57 return False
58
58
59 def __ne__(self, other):
59 def __ne__(self, other):
60 return not (self == other)
60 return not (self == other)
61
61
62 def __contains__(self, key):
62 def __contains__(self, key):
63 return key in self._manifest
63 return key in self._manifest
64
64
65 def __getitem__(self, key):
65 def __getitem__(self, key):
66 return self.filectx(key)
66 return self.filectx(key)
67
67
68 def __iter__(self):
68 def __iter__(self):
69 for f in sorted(self._manifest):
69 for f in sorted(self._manifest):
70 yield f
70 yield f
71
71
72 def _manifestmatches(self, match, s):
72 def _manifestmatches(self, match, s):
73 """generate a new manifest filtered by the match argument
73 """generate a new manifest filtered by the match argument
74
74
75 This method is for internal use only and mainly exists to provide an
75 This method is for internal use only and mainly exists to provide an
76 object oriented way for other contexts to customize the manifest
76 object oriented way for other contexts to customize the manifest
77 generation.
77 generation.
78 """
78 """
79 return self.manifest().matches(match)
79 return self.manifest().matches(match)
80
80
81 def _matchstatus(self, other, match):
81 def _matchstatus(self, other, match):
82 """return match.always if match is none
82 """return match.always if match is none
83
83
84 This internal method provides a way for child objects to override the
84 This internal method provides a way for child objects to override the
85 match operator.
85 match operator.
86 """
86 """
87 return match or matchmod.always(self._repo.root, self._repo.getcwd())
87 return match or matchmod.always(self._repo.root, self._repo.getcwd())
88
88
89 def _buildstatus(self, other, s, match, listignored, listclean,
89 def _buildstatus(self, other, s, match, listignored, listclean,
90 listunknown):
90 listunknown):
91 """build a status with respect to another context"""
91 """build a status with respect to another context"""
92 # Load earliest manifest first for caching reasons. More specifically,
92 # Load earliest manifest first for caching reasons. More specifically,
93 # if you have revisions 1000 and 1001, 1001 is probably stored as a
93 # if you have revisions 1000 and 1001, 1001 is probably stored as a
94 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
94 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
95 # 1000 and cache it so that when you read 1001, we just need to apply a
95 # 1000 and cache it so that when you read 1001, we just need to apply a
96 # delta to what's in the cache. So that's one full reconstruction + one
96 # delta to what's in the cache. So that's one full reconstruction + one
97 # delta application.
97 # delta application.
98 if self.rev() is not None and self.rev() < other.rev():
98 if self.rev() is not None and self.rev() < other.rev():
99 self.manifest()
99 self.manifest()
100 mf1 = other._manifestmatches(match, s)
100 mf1 = other._manifestmatches(match, s)
101 mf2 = self._manifestmatches(match, s)
101 mf2 = self._manifestmatches(match, s)
102
102
103 modified, added = [], []
103 modified, added = [], []
104 removed = []
104 removed = []
105 clean = []
105 clean = []
106 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
106 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
107 deletedset = set(deleted)
107 deletedset = set(deleted)
108 d = mf1.diff(mf2, clean=listclean)
108 d = mf1.diff(mf2, clean=listclean)
109 for fn, value in d.iteritems():
109 for fn, value in d.iteritems():
110 if fn in deletedset:
110 if fn in deletedset:
111 continue
111 continue
112 if value is None:
112 if value is None:
113 clean.append(fn)
113 clean.append(fn)
114 continue
114 continue
115 (node1, flag1), (node2, flag2) = value
115 (node1, flag1), (node2, flag2) = value
116 if node1 is None:
116 if node1 is None:
117 added.append(fn)
117 added.append(fn)
118 elif node2 is None:
118 elif node2 is None:
119 removed.append(fn)
119 removed.append(fn)
120 elif node2 != _newnode:
120 elif node2 != _newnode:
121 # The file was not a new file in mf2, so an entry
121 # The file was not a new file in mf2, so an entry
122 # from diff is really a difference.
122 # from diff is really a difference.
123 modified.append(fn)
123 modified.append(fn)
124 elif self[fn].cmp(other[fn]):
124 elif self[fn].cmp(other[fn]):
125 # node2 was newnode, but the working file doesn't
125 # node2 was newnode, but the working file doesn't
126 # match the one in mf1.
126 # match the one in mf1.
127 modified.append(fn)
127 modified.append(fn)
128 else:
128 else:
129 clean.append(fn)
129 clean.append(fn)
130
130
131 if removed:
131 if removed:
132 # need to filter files if they are already reported as removed
132 # need to filter files if they are already reported as removed
133 unknown = [fn for fn in unknown if fn not in mf1]
133 unknown = [fn for fn in unknown if fn not in mf1]
134 ignored = [fn for fn in ignored if fn not in mf1]
134 ignored = [fn for fn in ignored if fn not in mf1]
135 # if they're deleted, don't report them as removed
135 # if they're deleted, don't report them as removed
136 removed = [fn for fn in removed if fn not in deletedset]
136 removed = [fn for fn in removed if fn not in deletedset]
137
137
138 return scmutil.status(modified, added, removed, deleted, unknown,
138 return scmutil.status(modified, added, removed, deleted, unknown,
139 ignored, clean)
139 ignored, clean)
140
140
141 @propertycache
141 @propertycache
142 def substate(self):
142 def substate(self):
143 return subrepo.state(self, self._repo.ui)
143 return subrepo.state(self, self._repo.ui)
144
144
145 def subrev(self, subpath):
145 def subrev(self, subpath):
146 return self.substate[subpath][1]
146 return self.substate[subpath][1]
147
147
148 def rev(self):
148 def rev(self):
149 return self._rev
149 return self._rev
150 def node(self):
150 def node(self):
151 return self._node
151 return self._node
152 def hex(self):
152 def hex(self):
153 return hex(self.node())
153 return hex(self.node())
154 def manifest(self):
154 def manifest(self):
155 return self._manifest
155 return self._manifest
156 def phasestr(self):
156 def phasestr(self):
157 return phases.phasenames[self.phase()]
157 return phases.phasenames[self.phase()]
158 def mutable(self):
158 def mutable(self):
159 return self.phase() > phases.public
159 return self.phase() > phases.public
160
160
161 def getfileset(self, expr):
161 def getfileset(self, expr):
162 return fileset.getfileset(self, expr)
162 return fileset.getfileset(self, expr)
163
163
164 def obsolete(self):
164 def obsolete(self):
165 """True if the changeset is obsolete"""
165 """True if the changeset is obsolete"""
166 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
166 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
167
167
168 def extinct(self):
168 def extinct(self):
169 """True if the changeset is extinct"""
169 """True if the changeset is extinct"""
170 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
170 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
171
171
172 def unstable(self):
172 def unstable(self):
173 """True if the changeset is not obsolete but it's ancestor are"""
173 """True if the changeset is not obsolete but it's ancestor are"""
174 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
174 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
175
175
176 def bumped(self):
176 def bumped(self):
177 """True if the changeset try to be a successor of a public changeset
177 """True if the changeset try to be a successor of a public changeset
178
178
179 Only non-public and non-obsolete changesets may be bumped.
179 Only non-public and non-obsolete changesets may be bumped.
180 """
180 """
181 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
181 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
182
182
183 def divergent(self):
183 def divergent(self):
184 """Is a successors of a changeset with multiple possible successors set
184 """Is a successors of a changeset with multiple possible successors set
185
185
186 Only non-public and non-obsolete changesets may be divergent.
186 Only non-public and non-obsolete changesets may be divergent.
187 """
187 """
188 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
188 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
189
189
190 def troubled(self):
190 def troubled(self):
191 """True if the changeset is either unstable, bumped or divergent"""
191 """True if the changeset is either unstable, bumped or divergent"""
192 return self.unstable() or self.bumped() or self.divergent()
192 return self.unstable() or self.bumped() or self.divergent()
193
193
194 def troubles(self):
194 def troubles(self):
195 """return the list of troubles affecting this changesets.
195 """return the list of troubles affecting this changesets.
196
196
197 Troubles are returned as strings. possible values are:
197 Troubles are returned as strings. possible values are:
198 - unstable,
198 - unstable,
199 - bumped,
199 - bumped,
200 - divergent.
200 - divergent.
201 """
201 """
202 troubles = []
202 troubles = []
203 if self.unstable():
203 if self.unstable():
204 troubles.append('unstable')
204 troubles.append('unstable')
205 if self.bumped():
205 if self.bumped():
206 troubles.append('bumped')
206 troubles.append('bumped')
207 if self.divergent():
207 if self.divergent():
208 troubles.append('divergent')
208 troubles.append('divergent')
209 return troubles
209 return troubles
210
210
211 def parents(self):
211 def parents(self):
212 """return contexts for each parent changeset"""
212 """return contexts for each parent changeset"""
213 return self._parents
213 return self._parents
214
214
215 def p1(self):
215 def p1(self):
216 return self._parents[0]
216 return self._parents[0]
217
217
218 def p2(self):
218 def p2(self):
219 if len(self._parents) == 2:
219 if len(self._parents) == 2:
220 return self._parents[1]
220 return self._parents[1]
221 return changectx(self._repo, -1)
221 return changectx(self._repo, -1)
222
222
223 def _fileinfo(self, path):
223 def _fileinfo(self, path):
224 if '_manifest' in self.__dict__:
224 if '_manifest' in self.__dict__:
225 try:
225 try:
226 return self._manifest[path], self._manifest.flags(path)
226 return self._manifest[path], self._manifest.flags(path)
227 except KeyError:
227 except KeyError:
228 raise error.ManifestLookupError(self._node, path,
228 raise error.ManifestLookupError(self._node, path,
229 _('not found in manifest'))
229 _('not found in manifest'))
230 if '_manifestdelta' in self.__dict__ or path in self.files():
230 if '_manifestdelta' in self.__dict__ or path in self.files():
231 if path in self._manifestdelta:
231 if path in self._manifestdelta:
232 return (self._manifestdelta[path],
232 return (self._manifestdelta[path],
233 self._manifestdelta.flags(path))
233 self._manifestdelta.flags(path))
234 node, flag = self._repo.manifest.find(self._changeset[0], path)
234 node, flag = self._repo.manifest.find(self._changeset[0], path)
235 if not node:
235 if not node:
236 raise error.ManifestLookupError(self._node, path,
236 raise error.ManifestLookupError(self._node, path,
237 _('not found in manifest'))
237 _('not found in manifest'))
238
238
239 return node, flag
239 return node, flag
240
240
241 def filenode(self, path):
241 def filenode(self, path):
242 return self._fileinfo(path)[0]
242 return self._fileinfo(path)[0]
243
243
244 def flags(self, path):
244 def flags(self, path):
245 try:
245 try:
246 return self._fileinfo(path)[1]
246 return self._fileinfo(path)[1]
247 except error.LookupError:
247 except error.LookupError:
248 return ''
248 return ''
249
249
250 def sub(self, path):
250 def sub(self, path):
251 return subrepo.subrepo(self, path)
251 return subrepo.subrepo(self, path)
252
252
253 def match(self, pats=[], include=None, exclude=None, default='glob'):
253 def match(self, pats=[], include=None, exclude=None, default='glob'):
254 r = self._repo
254 r = self._repo
255 return matchmod.match(r.root, r.getcwd(), pats,
255 return matchmod.match(r.root, r.getcwd(), pats,
256 include, exclude, default,
256 include, exclude, default,
257 auditor=r.auditor, ctx=self)
257 auditor=r.auditor, ctx=self)
258
258
259 def diff(self, ctx2=None, match=None, **opts):
259 def diff(self, ctx2=None, match=None, **opts):
260 """Returns a diff generator for the given contexts and matcher"""
260 """Returns a diff generator for the given contexts and matcher"""
261 if ctx2 is None:
261 if ctx2 is None:
262 ctx2 = self.p1()
262 ctx2 = self.p1()
263 if ctx2 is not None:
263 if ctx2 is not None:
264 ctx2 = self._repo[ctx2]
264 ctx2 = self._repo[ctx2]
265 diffopts = patch.diffopts(self._repo.ui, opts)
265 diffopts = patch.diffopts(self._repo.ui, opts)
266 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
266 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
267
267
268 @propertycache
268 @propertycache
269 def _dirs(self):
269 def _dirs(self):
270 return scmutil.dirs(self._manifest)
270 return scmutil.dirs(self._manifest)
271
271
272 def dirs(self):
272 def dirs(self):
273 return self._dirs
273 return self._dirs
274
274
275 def dirty(self, missing=False, merge=True, branch=True):
275 def dirty(self, missing=False, merge=True, branch=True):
276 return False
276 return False
277
277
278 def status(self, other=None, match=None, listignored=False,
278 def status(self, other=None, match=None, listignored=False,
279 listclean=False, listunknown=False, listsubrepos=False):
279 listclean=False, listunknown=False, listsubrepos=False):
280 """return status of files between two nodes or node and working
280 """return status of files between two nodes or node and working
281 directory.
281 directory.
282
282
283 If other is None, compare this node with working directory.
283 If other is None, compare this node with working directory.
284
284
285 returns (modified, added, removed, deleted, unknown, ignored, clean)
285 returns (modified, added, removed, deleted, unknown, ignored, clean)
286 """
286 """
287
287
288 ctx1 = self
288 ctx1 = self
289 ctx2 = self._repo[other]
289 ctx2 = self._repo[other]
290
290
291 # This next code block is, admittedly, fragile logic that tests for
291 # This next code block is, admittedly, fragile logic that tests for
292 # reversing the contexts and wouldn't need to exist if it weren't for
292 # reversing the contexts and wouldn't need to exist if it weren't for
293 # the fast (and common) code path of comparing the working directory
293 # the fast (and common) code path of comparing the working directory
294 # with its first parent.
294 # with its first parent.
295 #
295 #
296 # What we're aiming for here is the ability to call:
296 # What we're aiming for here is the ability to call:
297 #
297 #
298 # workingctx.status(parentctx)
298 # workingctx.status(parentctx)
299 #
299 #
300 # If we always built the manifest for each context and compared those,
300 # If we always built the manifest for each context and compared those,
301 # then we'd be done. But the special case of the above call means we
301 # then we'd be done. But the special case of the above call means we
302 # just copy the manifest of the parent.
302 # just copy the manifest of the parent.
303 reversed = False
303 reversed = False
304 if (not isinstance(ctx1, changectx)
304 if (not isinstance(ctx1, changectx)
305 and isinstance(ctx2, changectx)):
305 and isinstance(ctx2, changectx)):
306 reversed = True
306 reversed = True
307 ctx1, ctx2 = ctx2, ctx1
307 ctx1, ctx2 = ctx2, ctx1
308
308
309 match = ctx2._matchstatus(ctx1, match)
309 match = ctx2._matchstatus(ctx1, match)
310 r = scmutil.status([], [], [], [], [], [], [])
310 r = scmutil.status([], [], [], [], [], [], [])
311 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
311 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
312 listunknown)
312 listunknown)
313
313
314 if reversed:
314 if reversed:
315 # Reverse added and removed. Clear deleted, unknown and ignored as
315 # Reverse added and removed. Clear deleted, unknown and ignored as
316 # these make no sense to reverse.
316 # these make no sense to reverse.
317 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
317 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
318 r.clean)
318 r.clean)
319
319
320 if listsubrepos:
320 if listsubrepos:
321 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
321 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
322 rev2 = ctx2.subrev(subpath)
322 rev2 = ctx2.subrev(subpath)
323 try:
323 try:
324 submatch = matchmod.narrowmatcher(subpath, match)
324 submatch = matchmod.narrowmatcher(subpath, match)
325 s = sub.status(rev2, match=submatch, ignored=listignored,
325 s = sub.status(rev2, match=submatch, ignored=listignored,
326 clean=listclean, unknown=listunknown,
326 clean=listclean, unknown=listunknown,
327 listsubrepos=True)
327 listsubrepos=True)
328 for rfiles, sfiles in zip(r, s):
328 for rfiles, sfiles in zip(r, s):
329 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
329 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
330 except error.LookupError:
330 except error.LookupError:
331 self._repo.ui.status(_("skipping missing "
331 self._repo.ui.status(_("skipping missing "
332 "subrepository: %s\n") % subpath)
332 "subrepository: %s\n") % subpath)
333
333
334 for l in r:
334 for l in r:
335 l.sort()
335 l.sort()
336
336
337 return r
337 return r
338
338
339
339
340 def makememctx(repo, parents, text, user, date, branch, files, store,
340 def makememctx(repo, parents, text, user, date, branch, files, store,
341 editor=None):
341 editor=None):
342 def getfilectx(repo, memctx, path):
342 def getfilectx(repo, memctx, path):
343 data, mode, copied = store.getfile(path)
343 data, mode, copied = store.getfile(path)
344 if data is None:
344 if data is None:
345 return None
345 return None
346 islink, isexec = mode
346 islink, isexec = mode
347 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
347 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
348 copied=copied, memctx=memctx)
348 copied=copied, memctx=memctx)
349 extra = {}
349 extra = {}
350 if branch:
350 if branch:
351 extra['branch'] = encoding.fromlocal(branch)
351 extra['branch'] = encoding.fromlocal(branch)
352 ctx = memctx(repo, parents, text, files, getfilectx, user,
352 ctx = memctx(repo, parents, text, files, getfilectx, user,
353 date, extra, editor)
353 date, extra, editor)
354 return ctx
354 return ctx
355
355
356 class changectx(basectx):
356 class changectx(basectx):
357 """A changecontext object makes access to data related to a particular
357 """A changecontext object makes access to data related to a particular
358 changeset convenient. It represents a read-only context already present in
358 changeset convenient. It represents a read-only context already present in
359 the repo."""
359 the repo."""
360 def __init__(self, repo, changeid=''):
360 def __init__(self, repo, changeid=''):
361 """changeid is a revision number, node, or tag"""
361 """changeid is a revision number, node, or tag"""
362
362
363 # since basectx.__new__ already took care of copying the object, we
363 # since basectx.__new__ already took care of copying the object, we
364 # don't need to do anything in __init__, so we just exit here
364 # don't need to do anything in __init__, so we just exit here
365 if isinstance(changeid, basectx):
365 if isinstance(changeid, basectx):
366 return
366 return
367
367
368 if changeid == '':
368 if changeid == '':
369 changeid = '.'
369 changeid = '.'
370 self._repo = repo
370 self._repo = repo
371
371
372 try:
372 try:
373 if isinstance(changeid, int):
373 if isinstance(changeid, int):
374 self._node = repo.changelog.node(changeid)
374 self._node = repo.changelog.node(changeid)
375 self._rev = changeid
375 self._rev = changeid
376 return
376 return
377 if isinstance(changeid, long):
377 if isinstance(changeid, long):
378 changeid = str(changeid)
378 changeid = str(changeid)
379 if changeid == 'null':
379 if changeid == 'null':
380 self._node = nullid
380 self._node = nullid
381 self._rev = nullrev
381 self._rev = nullrev
382 return
382 return
383 if changeid == 'tip':
383 if changeid == 'tip':
384 self._node = repo.changelog.tip()
384 self._node = repo.changelog.tip()
385 self._rev = repo.changelog.rev(self._node)
385 self._rev = repo.changelog.rev(self._node)
386 return
386 return
387 if changeid == '.' or changeid == repo.dirstate.p1():
387 if changeid == '.' or changeid == repo.dirstate.p1():
388 # this is a hack to delay/avoid loading obsmarkers
388 # this is a hack to delay/avoid loading obsmarkers
389 # when we know that '.' won't be hidden
389 # when we know that '.' won't be hidden
390 self._node = repo.dirstate.p1()
390 self._node = repo.dirstate.p1()
391 self._rev = repo.unfiltered().changelog.rev(self._node)
391 self._rev = repo.unfiltered().changelog.rev(self._node)
392 return
392 return
393 if len(changeid) == 20:
393 if len(changeid) == 20:
394 try:
394 try:
395 self._node = changeid
395 self._node = changeid
396 self._rev = repo.changelog.rev(changeid)
396 self._rev = repo.changelog.rev(changeid)
397 return
397 return
398 except error.FilteredRepoLookupError:
398 except error.FilteredRepoLookupError:
399 raise
399 raise
400 except LookupError:
400 except LookupError:
401 pass
401 pass
402
402
403 try:
403 try:
404 r = int(changeid)
404 r = int(changeid)
405 if str(r) != changeid:
405 if str(r) != changeid:
406 raise ValueError
406 raise ValueError
407 l = len(repo.changelog)
407 l = len(repo.changelog)
408 if r < 0:
408 if r < 0:
409 r += l
409 r += l
410 if r < 0 or r >= l:
410 if r < 0 or r >= l:
411 raise ValueError
411 raise ValueError
412 self._rev = r
412 self._rev = r
413 self._node = repo.changelog.node(r)
413 self._node = repo.changelog.node(r)
414 return
414 return
415 except error.FilteredIndexError:
415 except error.FilteredIndexError:
416 raise
416 raise
417 except (ValueError, OverflowError, IndexError):
417 except (ValueError, OverflowError, IndexError):
418 pass
418 pass
419
419
420 if len(changeid) == 40:
420 if len(changeid) == 40:
421 try:
421 try:
422 self._node = bin(changeid)
422 self._node = bin(changeid)
423 self._rev = repo.changelog.rev(self._node)
423 self._rev = repo.changelog.rev(self._node)
424 return
424 return
425 except error.FilteredLookupError:
425 except error.FilteredLookupError:
426 raise
426 raise
427 except (TypeError, LookupError):
427 except (TypeError, LookupError):
428 pass
428 pass
429
429
430 # lookup bookmarks through the name interface
430 # lookup bookmarks through the name interface
431 try:
431 try:
432 self._node = repo.names.singlenode(repo, changeid)
432 self._node = repo.names.singlenode(repo, changeid)
433 self._rev = repo.changelog.rev(self._node)
433 self._rev = repo.changelog.rev(self._node)
434 return
434 return
435 except KeyError:
435 except KeyError:
436 pass
436 pass
437 except error.FilteredRepoLookupError:
437 except error.FilteredRepoLookupError:
438 raise
438 raise
439 except error.RepoLookupError:
439 except error.RepoLookupError:
440 pass
440 pass
441
441
442 self._node = repo.unfiltered().changelog._partialmatch(changeid)
442 self._node = repo.unfiltered().changelog._partialmatch(changeid)
443 if self._node is not None:
443 if self._node is not None:
444 self._rev = repo.changelog.rev(self._node)
444 self._rev = repo.changelog.rev(self._node)
445 return
445 return
446
446
447 # lookup failed
447 # lookup failed
448 # check if it might have come from damaged dirstate
448 # check if it might have come from damaged dirstate
449 #
449 #
450 # XXX we could avoid the unfiltered if we had a recognizable
450 # XXX we could avoid the unfiltered if we had a recognizable
451 # exception for filtered changeset access
451 # exception for filtered changeset access
452 if changeid in repo.unfiltered().dirstate.parents():
452 if changeid in repo.unfiltered().dirstate.parents():
453 msg = _("working directory has unknown parent '%s'!")
453 msg = _("working directory has unknown parent '%s'!")
454 raise error.Abort(msg % short(changeid))
454 raise error.Abort(msg % short(changeid))
455 try:
455 try:
456 if len(changeid) == 20:
456 if len(changeid) == 20:
457 changeid = hex(changeid)
457 changeid = hex(changeid)
458 except TypeError:
458 except TypeError:
459 pass
459 pass
460 except (error.FilteredIndexError, error.FilteredLookupError,
460 except (error.FilteredIndexError, error.FilteredLookupError,
461 error.FilteredRepoLookupError):
461 error.FilteredRepoLookupError):
462 if repo.filtername == 'visible':
462 if repo.filtername == 'visible':
463 msg = _("hidden revision '%s'") % changeid
463 msg = _("hidden revision '%s'") % changeid
464 hint = _('use --hidden to access hidden revisions')
464 hint = _('use --hidden to access hidden revisions')
465 raise error.FilteredRepoLookupError(msg, hint=hint)
465 raise error.FilteredRepoLookupError(msg, hint=hint)
466 msg = _("filtered revision '%s' (not in '%s' subset)")
466 msg = _("filtered revision '%s' (not in '%s' subset)")
467 msg %= (changeid, repo.filtername)
467 msg %= (changeid, repo.filtername)
468 raise error.FilteredRepoLookupError(msg)
468 raise error.FilteredRepoLookupError(msg)
469 except IndexError:
469 except IndexError:
470 pass
470 pass
471 raise error.RepoLookupError(
471 raise error.RepoLookupError(
472 _("unknown revision '%s'") % changeid)
472 _("unknown revision '%s'") % changeid)
473
473
474 def __hash__(self):
474 def __hash__(self):
475 try:
475 try:
476 return hash(self._rev)
476 return hash(self._rev)
477 except AttributeError:
477 except AttributeError:
478 return id(self)
478 return id(self)
479
479
480 def __nonzero__(self):
480 def __nonzero__(self):
481 return self._rev != nullrev
481 return self._rev != nullrev
482
482
483 @propertycache
483 @propertycache
484 def _changeset(self):
484 def _changeset(self):
485 return self._repo.changelog.read(self.rev())
485 return self._repo.changelog.read(self.rev())
486
486
487 @propertycache
487 @propertycache
488 def _manifest(self):
488 def _manifest(self):
489 return self._repo.manifest.read(self._changeset[0])
489 return self._repo.manifest.read(self._changeset[0])
490
490
491 @propertycache
491 @propertycache
492 def _manifestdelta(self):
492 def _manifestdelta(self):
493 return self._repo.manifest.readdelta(self._changeset[0])
493 return self._repo.manifest.readdelta(self._changeset[0])
494
494
495 @propertycache
495 @propertycache
496 def _parents(self):
496 def _parents(self):
497 p = self._repo.changelog.parentrevs(self._rev)
497 p = self._repo.changelog.parentrevs(self._rev)
498 if p[1] == nullrev:
498 if p[1] == nullrev:
499 p = p[:-1]
499 p = p[:-1]
500 return [changectx(self._repo, x) for x in p]
500 return [changectx(self._repo, x) for x in p]
501
501
502 def changeset(self):
502 def changeset(self):
503 return self._changeset
503 return self._changeset
504 def manifestnode(self):
504 def manifestnode(self):
505 return self._changeset[0]
505 return self._changeset[0]
506
506
507 def user(self):
507 def user(self):
508 return self._changeset[1]
508 return self._changeset[1]
509 def date(self):
509 def date(self):
510 return self._changeset[2]
510 return self._changeset[2]
511 def files(self):
511 def files(self):
512 return self._changeset[3]
512 return self._changeset[3]
513 def description(self):
513 def description(self):
514 return self._changeset[4]
514 return self._changeset[4]
515 def branch(self):
515 def branch(self):
516 return encoding.tolocal(self._changeset[5].get("branch"))
516 return encoding.tolocal(self._changeset[5].get("branch"))
517 def closesbranch(self):
517 def closesbranch(self):
518 return 'close' in self._changeset[5]
518 return 'close' in self._changeset[5]
519 def extra(self):
519 def extra(self):
520 return self._changeset[5]
520 return self._changeset[5]
521 def tags(self):
521 def tags(self):
522 return self._repo.nodetags(self._node)
522 return self._repo.nodetags(self._node)
523 def bookmarks(self):
523 def bookmarks(self):
524 return self._repo.nodebookmarks(self._node)
524 return self._repo.nodebookmarks(self._node)
525 def phase(self):
525 def phase(self):
526 return self._repo._phasecache.phase(self._repo, self._rev)
526 return self._repo._phasecache.phase(self._repo, self._rev)
527 def hidden(self):
527 def hidden(self):
528 return self._rev in repoview.filterrevs(self._repo, 'visible')
528 return self._rev in repoview.filterrevs(self._repo, 'visible')
529
529
530 def children(self):
530 def children(self):
531 """return contexts for each child changeset"""
531 """return contexts for each child changeset"""
532 c = self._repo.changelog.children(self._node)
532 c = self._repo.changelog.children(self._node)
533 return [changectx(self._repo, x) for x in c]
533 return [changectx(self._repo, x) for x in c]
534
534
535 def ancestors(self):
535 def ancestors(self):
536 for a in self._repo.changelog.ancestors([self._rev]):
536 for a in self._repo.changelog.ancestors([self._rev]):
537 yield changectx(self._repo, a)
537 yield changectx(self._repo, a)
538
538
539 def descendants(self):
539 def descendants(self):
540 for d in self._repo.changelog.descendants([self._rev]):
540 for d in self._repo.changelog.descendants([self._rev]):
541 yield changectx(self._repo, d)
541 yield changectx(self._repo, d)
542
542
543 def filectx(self, path, fileid=None, filelog=None):
543 def filectx(self, path, fileid=None, filelog=None):
544 """get a file context from this changeset"""
544 """get a file context from this changeset"""
545 if fileid is None:
545 if fileid is None:
546 fileid = self.filenode(path)
546 fileid = self.filenode(path)
547 return filectx(self._repo, path, fileid=fileid,
547 return filectx(self._repo, path, fileid=fileid,
548 changectx=self, filelog=filelog)
548 changectx=self, filelog=filelog)
549
549
550 def ancestor(self, c2, warn=False):
550 def ancestor(self, c2, warn=False):
551 """return the "best" ancestor context of self and c2
551 """return the "best" ancestor context of self and c2
552
552
553 If there are multiple candidates, it will show a message and check
553 If there are multiple candidates, it will show a message and check
554 merge.preferancestor configuration before falling back to the
554 merge.preferancestor configuration before falling back to the
555 revlog ancestor."""
555 revlog ancestor."""
556 # deal with workingctxs
556 # deal with workingctxs
557 n2 = c2._node
557 n2 = c2._node
558 if n2 is None:
558 if n2 is None:
559 n2 = c2._parents[0]._node
559 n2 = c2._parents[0]._node
560 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
560 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
561 if not cahs:
561 if not cahs:
562 anc = nullid
562 anc = nullid
563 elif len(cahs) == 1:
563 elif len(cahs) == 1:
564 anc = cahs[0]
564 anc = cahs[0]
565 else:
565 else:
566 for r in self._repo.ui.configlist('merge', 'preferancestor'):
566 for r in self._repo.ui.configlist('merge', 'preferancestor'):
567 try:
567 try:
568 ctx = changectx(self._repo, r)
568 ctx = changectx(self._repo, r)
569 except error.RepoLookupError:
569 except error.RepoLookupError:
570 continue
570 continue
571 anc = ctx.node()
571 anc = ctx.node()
572 if anc in cahs:
572 if anc in cahs:
573 break
573 break
574 else:
574 else:
575 anc = self._repo.changelog.ancestor(self._node, n2)
575 anc = self._repo.changelog.ancestor(self._node, n2)
576 if warn:
576 if warn:
577 self._repo.ui.status(
577 self._repo.ui.status(
578 (_("note: using %s as ancestor of %s and %s\n") %
578 (_("note: using %s as ancestor of %s and %s\n") %
579 (short(anc), short(self._node), short(n2))) +
579 (short(anc), short(self._node), short(n2))) +
580 ''.join(_(" alternatively, use --config "
580 ''.join(_(" alternatively, use --config "
581 "merge.preferancestor=%s\n") %
581 "merge.preferancestor=%s\n") %
582 short(n) for n in sorted(cahs) if n != anc))
582 short(n) for n in sorted(cahs) if n != anc))
583 return changectx(self._repo, anc)
583 return changectx(self._repo, anc)
584
584
585 def descendant(self, other):
585 def descendant(self, other):
586 """True if other is descendant of this changeset"""
586 """True if other is descendant of this changeset"""
587 return self._repo.changelog.descendant(self._rev, other._rev)
587 return self._repo.changelog.descendant(self._rev, other._rev)
588
588
589 def walk(self, match):
589 def walk(self, match):
590 fset = set(match.files())
590 fset = set(match.files())
591 # for dirstate.walk, files=['.'] means "walk the whole tree".
591 # for dirstate.walk, files=['.'] means "walk the whole tree".
592 # follow that here, too
592 # follow that here, too
593 fset.discard('.')
593 fset.discard('.')
594
594
595 # avoid the entire walk if we're only looking for specific files
595 # avoid the entire walk if we're only looking for specific files
596 if fset and not match.anypats():
596 if fset and not match.anypats():
597 if util.all([fn in self for fn in fset]):
597 if util.all([fn in self for fn in fset]):
598 for fn in sorted(fset):
598 for fn in sorted(fset):
599 if match(fn):
599 if match(fn):
600 yield fn
600 yield fn
601 raise StopIteration
601 raise StopIteration
602
602
603 for fn in self:
603 for fn in self:
604 if fn in fset:
604 if fn in fset:
605 # specified pattern is the exact name
605 # specified pattern is the exact name
606 fset.remove(fn)
606 fset.remove(fn)
607 if match(fn):
607 if match(fn):
608 yield fn
608 yield fn
609 for fn in sorted(fset):
609 for fn in sorted(fset):
610 if fn in self._dirs:
610 if fn in self._dirs:
611 # specified pattern is a directory
611 # specified pattern is a directory
612 continue
612 continue
613 match.bad(fn, _('no such file in rev %s') % self)
613 match.bad(fn, _('no such file in rev %s') % self)
614
614
615 def matches(self, match):
615 def matches(self, match):
616 return self.walk(match)
616 return self.walk(match)
617
617
618 class basefilectx(object):
618 class basefilectx(object):
619 """A filecontext object represents the common logic for its children:
619 """A filecontext object represents the common logic for its children:
620 filectx: read-only access to a filerevision that is already present
620 filectx: read-only access to a filerevision that is already present
621 in the repo,
621 in the repo,
622 workingfilectx: a filecontext that represents files from the working
622 workingfilectx: a filecontext that represents files from the working
623 directory,
623 directory,
624 memfilectx: a filecontext that represents files in-memory."""
624 memfilectx: a filecontext that represents files in-memory."""
625 def __new__(cls, repo, path, *args, **kwargs):
625 def __new__(cls, repo, path, *args, **kwargs):
626 return super(basefilectx, cls).__new__(cls)
626 return super(basefilectx, cls).__new__(cls)
627
627
628 @propertycache
628 @propertycache
629 def _filelog(self):
629 def _filelog(self):
630 return self._repo.file(self._path)
630 return self._repo.file(self._path)
631
631
632 @propertycache
632 @propertycache
633 def _changeid(self):
633 def _changeid(self):
634 if '_changeid' in self.__dict__:
634 if '_changeid' in self.__dict__:
635 return self._changeid
635 return self._changeid
636 elif '_changectx' in self.__dict__:
636 elif '_changectx' in self.__dict__:
637 return self._changectx.rev()
637 return self._changectx.rev()
638 elif '_descendantrev' in self.__dict__:
638 elif '_descendantrev' in self.__dict__:
639 # this file context was created from a revision with a known
639 # this file context was created from a revision with a known
640 # descendant, we can (lazily) correct for linkrev aliases
640 # descendant, we can (lazily) correct for linkrev aliases
641 return self._adjustlinkrev(self._path, self._filelog,
641 return self._adjustlinkrev(self._path, self._filelog,
642 self._filenode, self._descendantrev)
642 self._filenode, self._descendantrev)
643 else:
643 else:
644 return self._filelog.linkrev(self._filerev)
644 return self._filelog.linkrev(self._filerev)
645
645
646 @propertycache
646 @propertycache
647 def _filenode(self):
647 def _filenode(self):
648 if '_fileid' in self.__dict__:
648 if '_fileid' in self.__dict__:
649 return self._filelog.lookup(self._fileid)
649 return self._filelog.lookup(self._fileid)
650 else:
650 else:
651 return self._changectx.filenode(self._path)
651 return self._changectx.filenode(self._path)
652
652
653 @propertycache
653 @propertycache
654 def _filerev(self):
654 def _filerev(self):
655 return self._filelog.rev(self._filenode)
655 return self._filelog.rev(self._filenode)
656
656
657 @propertycache
657 @propertycache
658 def _repopath(self):
658 def _repopath(self):
659 return self._path
659 return self._path
660
660
661 def __nonzero__(self):
661 def __nonzero__(self):
662 try:
662 try:
663 self._filenode
663 self._filenode
664 return True
664 return True
665 except error.LookupError:
665 except error.LookupError:
666 # file is missing
666 # file is missing
667 return False
667 return False
668
668
669 def __str__(self):
669 def __str__(self):
670 return "%s@%s" % (self.path(), self._changectx)
670 return "%s@%s" % (self.path(), self._changectx)
671
671
672 def __repr__(self):
672 def __repr__(self):
673 return "<%s %s>" % (type(self).__name__, str(self))
673 return "<%s %s>" % (type(self).__name__, str(self))
674
674
675 def __hash__(self):
675 def __hash__(self):
676 try:
676 try:
677 return hash((self._path, self._filenode))
677 return hash((self._path, self._filenode))
678 except AttributeError:
678 except AttributeError:
679 return id(self)
679 return id(self)
680
680
681 def __eq__(self, other):
681 def __eq__(self, other):
682 try:
682 try:
683 return (type(self) == type(other) and self._path == other._path
683 return (type(self) == type(other) and self._path == other._path
684 and self._filenode == other._filenode)
684 and self._filenode == other._filenode)
685 except AttributeError:
685 except AttributeError:
686 return False
686 return False
687
687
688 def __ne__(self, other):
688 def __ne__(self, other):
689 return not (self == other)
689 return not (self == other)
690
690
691 def filerev(self):
691 def filerev(self):
692 return self._filerev
692 return self._filerev
693 def filenode(self):
693 def filenode(self):
694 return self._filenode
694 return self._filenode
695 def flags(self):
695 def flags(self):
696 return self._changectx.flags(self._path)
696 return self._changectx.flags(self._path)
697 def filelog(self):
697 def filelog(self):
698 return self._filelog
698 return self._filelog
699 def rev(self):
699 def rev(self):
700 return self._changeid
700 return self._changeid
701 def linkrev(self):
701 def linkrev(self):
702 return self._filelog.linkrev(self._filerev)
702 return self._filelog.linkrev(self._filerev)
703 def node(self):
703 def node(self):
704 return self._changectx.node()
704 return self._changectx.node()
705 def hex(self):
705 def hex(self):
706 return self._changectx.hex()
706 return self._changectx.hex()
707 def user(self):
707 def user(self):
708 return self._changectx.user()
708 return self._changectx.user()
709 def date(self):
709 def date(self):
710 return self._changectx.date()
710 return self._changectx.date()
711 def files(self):
711 def files(self):
712 return self._changectx.files()
712 return self._changectx.files()
713 def description(self):
713 def description(self):
714 return self._changectx.description()
714 return self._changectx.description()
715 def branch(self):
715 def branch(self):
716 return self._changectx.branch()
716 return self._changectx.branch()
717 def extra(self):
717 def extra(self):
718 return self._changectx.extra()
718 return self._changectx.extra()
719 def phase(self):
719 def phase(self):
720 return self._changectx.phase()
720 return self._changectx.phase()
721 def phasestr(self):
721 def phasestr(self):
722 return self._changectx.phasestr()
722 return self._changectx.phasestr()
723 def manifest(self):
723 def manifest(self):
724 return self._changectx.manifest()
724 return self._changectx.manifest()
725 def changectx(self):
725 def changectx(self):
726 return self._changectx
726 return self._changectx
727
727
728 def path(self):
728 def path(self):
729 return self._path
729 return self._path
730
730
731 def isbinary(self):
731 def isbinary(self):
732 try:
732 try:
733 return util.binary(self.data())
733 return util.binary(self.data())
734 except IOError:
734 except IOError:
735 return False
735 return False
736 def isexec(self):
736 def isexec(self):
737 return 'x' in self.flags()
737 return 'x' in self.flags()
738 def islink(self):
738 def islink(self):
739 return 'l' in self.flags()
739 return 'l' in self.flags()
740
740
741 def cmp(self, fctx):
741 def cmp(self, fctx):
742 """compare with other file context
742 """compare with other file context
743
743
744 returns True if different than fctx.
744 returns True if different than fctx.
745 """
745 """
746 if (fctx._filerev is None
746 if (fctx._filerev is None
747 and (self._repo._encodefilterpats
747 and (self._repo._encodefilterpats
748 # if file data starts with '\1\n', empty metadata block is
748 # if file data starts with '\1\n', empty metadata block is
749 # prepended, which adds 4 bytes to filelog.size().
749 # prepended, which adds 4 bytes to filelog.size().
750 or self.size() - 4 == fctx.size())
750 or self.size() - 4 == fctx.size())
751 or self.size() == fctx.size()):
751 or self.size() == fctx.size()):
752 return self._filelog.cmp(self._filenode, fctx.data())
752 return self._filelog.cmp(self._filenode, fctx.data())
753
753
754 return True
754 return True
755
755
756 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
756 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
757 """return the first ancestor of <srcrev> introducing <fnode>
757 """return the first ancestor of <srcrev> introducing <fnode>
758
758
759 If the linkrev of the file revision does not point to an ancestor of
759 If the linkrev of the file revision does not point to an ancestor of
760 srcrev, we'll walk down the ancestors until we find one introducing
760 srcrev, we'll walk down the ancestors until we find one introducing
761 this file revision.
761 this file revision.
762
762
763 :repo: a localrepository object (used to access changelog and manifest)
763 :repo: a localrepository object (used to access changelog and manifest)
764 :path: the file path
764 :path: the file path
765 :fnode: the nodeid of the file revision
765 :fnode: the nodeid of the file revision
766 :filelog: the filelog of this path
766 :filelog: the filelog of this path
767 :srcrev: the changeset revision we search ancestors from
767 :srcrev: the changeset revision we search ancestors from
768 :inclusive: if true, the src revision will also be checked
768 :inclusive: if true, the src revision will also be checked
769 """
769 """
770 repo = self._repo
770 repo = self._repo
771 cl = repo.unfiltered().changelog
771 cl = repo.unfiltered().changelog
772 ma = repo.manifest
772 ma = repo.manifest
773 # fetch the linkrev
773 # fetch the linkrev
774 fr = filelog.rev(fnode)
774 fr = filelog.rev(fnode)
775 lkr = filelog.linkrev(fr)
775 lkr = filelog.linkrev(fr)
776 # hack to reuse ancestor computation when searching for renames
776 # hack to reuse ancestor computation when searching for renames
777 memberanc = getattr(self, '_ancestrycontext', None)
777 memberanc = getattr(self, '_ancestrycontext', None)
778 iteranc = None
778 iteranc = None
779 if memberanc is None:
779 if memberanc is None:
780 memberanc = iteranc = cl.ancestors([srcrev], lkr,
780 memberanc = iteranc = cl.ancestors([srcrev], lkr,
781 inclusive=inclusive)
781 inclusive=inclusive)
782 # check if this linkrev is an ancestor of srcrev
782 # check if this linkrev is an ancestor of srcrev
783 if lkr not in memberanc:
783 if lkr not in memberanc:
784 if iteranc is None:
784 if iteranc is None:
785 iteranc = cl.ancestors([srcrev], lkr, inclusive=inclusive)
785 iteranc = cl.ancestors([srcrev], lkr, inclusive=inclusive)
786 for a in iteranc:
786 for a in iteranc:
787 ac = cl.read(a) # get changeset data (we avoid object creation)
787 ac = cl.read(a) # get changeset data (we avoid object creation)
788 if path in ac[3]: # checking the 'files' field.
788 if path in ac[3]: # checking the 'files' field.
789 # The file has been touched, check if the content is
789 # The file has been touched, check if the content is
790 # similar to the one we search for.
790 # similar to the one we search for.
791 if fnode == ma.readfast(ac[0]).get(path):
791 if fnode == ma.readfast(ac[0]).get(path):
792 return a
792 return a
793 # In theory, we should never get out of that loop without a result.
793 # In theory, we should never get out of that loop without a result.
794 # But if manifest uses a buggy file revision (not children of the
794 # But if manifest uses a buggy file revision (not children of the
795 # one it replaces) we could. Such a buggy situation will likely
795 # one it replaces) we could. Such a buggy situation will likely
796 # result is crash somewhere else at to some point.
796 # result is crash somewhere else at to some point.
797 return lkr
797 return lkr
798
798
799 def introrev(self):
799 def introrev(self):
800 """return the rev of the changeset which introduced this file revision
800 """return the rev of the changeset which introduced this file revision
801
801
802 This method is different from linkrev because it take into account the
802 This method is different from linkrev because it take into account the
803 changeset the filectx was created from. It ensures the returned
803 changeset the filectx was created from. It ensures the returned
804 revision is one of its ancestors. This prevents bugs from
804 revision is one of its ancestors. This prevents bugs from
805 'linkrev-shadowing' when a file revision is used by multiple
805 'linkrev-shadowing' when a file revision is used by multiple
806 changesets.
806 changesets.
807 """
807 """
808 lkr = self.linkrev()
808 lkr = self.linkrev()
809 attrs = vars(self)
809 attrs = vars(self)
810 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
810 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
811 if noctx or self.rev() == lkr:
811 if noctx or self.rev() == lkr:
812 return self.linkrev()
812 return self.linkrev()
813 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
813 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
814 self.rev(), inclusive=True)
814 self.rev(), inclusive=True)
815
815
816 def parents(self):
816 def parents(self):
817 _path = self._path
817 _path = self._path
818 fl = self._filelog
818 fl = self._filelog
819 parents = self._filelog.parents(self._filenode)
819 parents = self._filelog.parents(self._filenode)
820 pl = [(_path, node, fl) for node in parents if node != nullid]
820 pl = [(_path, node, fl) for node in parents if node != nullid]
821
821
822 r = fl.renamed(self._filenode)
822 r = fl.renamed(self._filenode)
823 if r:
823 if r:
824 # - In the simple rename case, both parent are nullid, pl is empty.
824 # - In the simple rename case, both parent are nullid, pl is empty.
825 # - In case of merge, only one of the parent is null id and should
825 # - In case of merge, only one of the parent is null id and should
826 # be replaced with the rename information. This parent is -always-
826 # be replaced with the rename information. This parent is -always-
827 # the first one.
827 # the first one.
828 #
828 #
829 # As null id have always been filtered out in the previous list
829 # As null id have always been filtered out in the previous list
830 # comprehension, inserting to 0 will always result in "replacing
830 # comprehension, inserting to 0 will always result in "replacing
831 # first nullid parent with rename information.
831 # first nullid parent with rename information.
832 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
832 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
833
833
834 ret = []
834 ret = []
835 for path, fnode, l in pl:
835 for path, fnode, l in pl:
836 if '_changeid' in vars(self) or '_changectx' in vars(self):
836 if '_changeid' in vars(self) or '_changectx' in vars(self):
837 # If self is associated with a changeset (probably explicitly
837 # If self is associated with a changeset (probably explicitly
838 # fed), ensure the created filectx is associated with a
838 # fed), ensure the created filectx is associated with a
839 # changeset that is an ancestor of self.changectx.
839 # changeset that is an ancestor of self.changectx.
840 # This lets us later use _adjustlinkrev to get a correct link.
840 # This lets us later use _adjustlinkrev to get a correct link.
841 fctx = filectx(self._repo, path, fileid=fnode, filelog=l)
841 fctx = filectx(self._repo, path, fileid=fnode, filelog=l)
842 fctx._descendantrev = self.rev()
842 fctx._descendantrev = self.rev()
843 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
843 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
844 elif '_descendantrev' in vars(self):
844 elif '_descendantrev' in vars(self):
845 # Otherwise propagate _descendantrev if we have one associated.
845 # Otherwise propagate _descendantrev if we have one associated.
846 fctx = filectx(self._repo, path, fileid=fnode, filelog=l)
846 fctx = filectx(self._repo, path, fileid=fnode, filelog=l)
847 fctx._descendantrev = self._descendantrev
847 fctx._descendantrev = self._descendantrev
848 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
848 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
849 else:
849 else:
850 fctx = filectx(self._repo, path, fileid=fnode, filelog=l)
850 fctx = filectx(self._repo, path, fileid=fnode, filelog=l)
851 ret.append(fctx)
851 ret.append(fctx)
852 return ret
852 return ret
853
853
854 def p1(self):
854 def p1(self):
855 return self.parents()[0]
855 return self.parents()[0]
856
856
857 def p2(self):
857 def p2(self):
858 p = self.parents()
858 p = self.parents()
859 if len(p) == 2:
859 if len(p) == 2:
860 return p[1]
860 return p[1]
861 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
861 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
862
862
863 def annotate(self, follow=False, linenumber=None, diffopts=None):
863 def annotate(self, follow=False, linenumber=None, diffopts=None):
864 '''returns a list of tuples of (ctx, line) for each line
864 '''returns a list of tuples of (ctx, line) for each line
865 in the file, where ctx is the filectx of the node where
865 in the file, where ctx is the filectx of the node where
866 that line was last changed.
866 that line was last changed.
867 This returns tuples of ((ctx, linenumber), line) for each line,
867 This returns tuples of ((ctx, linenumber), line) for each line,
868 if "linenumber" parameter is NOT "None".
868 if "linenumber" parameter is NOT "None".
869 In such tuples, linenumber means one at the first appearance
869 In such tuples, linenumber means one at the first appearance
870 in the managed file.
870 in the managed file.
871 To reduce annotation cost,
871 To reduce annotation cost,
872 this returns fixed value(False is used) as linenumber,
872 this returns fixed value(False is used) as linenumber,
873 if "linenumber" parameter is "False".'''
873 if "linenumber" parameter is "False".'''
874
874
875 if linenumber is None:
875 if linenumber is None:
876 def decorate(text, rev):
876 def decorate(text, rev):
877 return ([rev] * len(text.splitlines()), text)
877 return ([rev] * len(text.splitlines()), text)
878 elif linenumber:
878 elif linenumber:
879 def decorate(text, rev):
879 def decorate(text, rev):
880 size = len(text.splitlines())
880 size = len(text.splitlines())
881 return ([(rev, i) for i in xrange(1, size + 1)], text)
881 return ([(rev, i) for i in xrange(1, size + 1)], text)
882 else:
882 else:
883 def decorate(text, rev):
883 def decorate(text, rev):
884 return ([(rev, False)] * len(text.splitlines()), text)
884 return ([(rev, False)] * len(text.splitlines()), text)
885
885
886 def pair(parent, child):
886 def pair(parent, child):
887 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
887 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
888 refine=True)
888 refine=True)
889 for (a1, a2, b1, b2), t in blocks:
889 for (a1, a2, b1, b2), t in blocks:
890 # Changed blocks ('!') or blocks made only of blank lines ('~')
890 # Changed blocks ('!') or blocks made only of blank lines ('~')
891 # belong to the child.
891 # belong to the child.
892 if t == '=':
892 if t == '=':
893 child[0][b1:b2] = parent[0][a1:a2]
893 child[0][b1:b2] = parent[0][a1:a2]
894 return child
894 return child
895
895
896 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
896 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
897
897
898 def parents(f):
898 def parents(f):
899 pl = f.parents()
899 pl = f.parents()
900
900
901 # Don't return renamed parents if we aren't following.
901 # Don't return renamed parents if we aren't following.
902 if not follow:
902 if not follow:
903 pl = [p for p in pl if p.path() == f.path()]
903 pl = [p for p in pl if p.path() == f.path()]
904
904
905 # renamed filectx won't have a filelog yet, so set it
905 # renamed filectx won't have a filelog yet, so set it
906 # from the cache to save time
906 # from the cache to save time
907 for p in pl:
907 for p in pl:
908 if not '_filelog' in p.__dict__:
908 if not '_filelog' in p.__dict__:
909 p._filelog = getlog(p.path())
909 p._filelog = getlog(p.path())
910
910
911 return pl
911 return pl
912
912
913 # use linkrev to find the first changeset where self appeared
913 # use linkrev to find the first changeset where self appeared
914 base = self
914 base = self
915 introrev = self.introrev()
915 introrev = self.introrev()
916 if self.rev() != introrev:
916 if self.rev() != introrev:
917 base = self.filectx(self.filenode(), changeid=introrev)
917 base = self.filectx(self.filenode(), changeid=introrev)
918
918
919 # This algorithm would prefer to be recursive, but Python is a
919 # This algorithm would prefer to be recursive, but Python is a
920 # bit recursion-hostile. Instead we do an iterative
920 # bit recursion-hostile. Instead we do an iterative
921 # depth-first search.
921 # depth-first search.
922
922
923 visit = [base]
923 visit = [base]
924 hist = {}
924 hist = {}
925 pcache = {}
925 pcache = {}
926 needed = {base: 1}
926 needed = {base: 1}
927 while visit:
927 while visit:
928 f = visit[-1]
928 f = visit[-1]
929 pcached = f in pcache
929 pcached = f in pcache
930 if not pcached:
930 if not pcached:
931 pcache[f] = parents(f)
931 pcache[f] = parents(f)
932
932
933 ready = True
933 ready = True
934 pl = pcache[f]
934 pl = pcache[f]
935 for p in pl:
935 for p in pl:
936 if p not in hist:
936 if p not in hist:
937 ready = False
937 ready = False
938 visit.append(p)
938 visit.append(p)
939 if not pcached:
939 if not pcached:
940 needed[p] = needed.get(p, 0) + 1
940 needed[p] = needed.get(p, 0) + 1
941 if ready:
941 if ready:
942 visit.pop()
942 visit.pop()
943 reusable = f in hist
943 reusable = f in hist
944 if reusable:
944 if reusable:
945 curr = hist[f]
945 curr = hist[f]
946 else:
946 else:
947 curr = decorate(f.data(), f)
947 curr = decorate(f.data(), f)
948 for p in pl:
948 for p in pl:
949 if not reusable:
949 if not reusable:
950 curr = pair(hist[p], curr)
950 curr = pair(hist[p], curr)
951 if needed[p] == 1:
951 if needed[p] == 1:
952 del hist[p]
952 del hist[p]
953 del needed[p]
953 del needed[p]
954 else:
954 else:
955 needed[p] -= 1
955 needed[p] -= 1
956
956
957 hist[f] = curr
957 hist[f] = curr
958 pcache[f] = []
958 pcache[f] = []
959
959
960 return zip(hist[base][0], hist[base][1].splitlines(True))
960 return zip(hist[base][0], hist[base][1].splitlines(True))
961
961
962 def ancestors(self, followfirst=False):
962 def ancestors(self, followfirst=False):
963 visit = {}
963 visit = {}
964 c = self
964 c = self
965 cut = followfirst and 1 or None
965 cut = followfirst and 1 or None
966 while True:
966 while True:
967 for parent in c.parents()[:cut]:
967 for parent in c.parents()[:cut]:
968 visit[(parent.linkrev(), parent.filenode())] = parent
968 visit[(parent.linkrev(), parent.filenode())] = parent
969 if not visit:
969 if not visit:
970 break
970 break
971 c = visit.pop(max(visit))
971 c = visit.pop(max(visit))
972 yield c
972 yield c
973
973
974 class filectx(basefilectx):
974 class filectx(basefilectx):
975 """A filecontext object makes access to data related to a particular
975 """A filecontext object makes access to data related to a particular
976 filerevision convenient."""
976 filerevision convenient."""
977 def __init__(self, repo, path, changeid=None, fileid=None,
977 def __init__(self, repo, path, changeid=None, fileid=None,
978 filelog=None, changectx=None):
978 filelog=None, changectx=None):
979 """changeid can be a changeset revision, node, or tag.
979 """changeid can be a changeset revision, node, or tag.
980 fileid can be a file revision or node."""
980 fileid can be a file revision or node."""
981 self._repo = repo
981 self._repo = repo
982 self._path = path
982 self._path = path
983
983
984 assert (changeid is not None
984 assert (changeid is not None
985 or fileid is not None
985 or fileid is not None
986 or changectx is not None), \
986 or changectx is not None), \
987 ("bad args: changeid=%r, fileid=%r, changectx=%r"
987 ("bad args: changeid=%r, fileid=%r, changectx=%r"
988 % (changeid, fileid, changectx))
988 % (changeid, fileid, changectx))
989
989
990 if filelog is not None:
990 if filelog is not None:
991 self._filelog = filelog
991 self._filelog = filelog
992
992
993 if changeid is not None:
993 if changeid is not None:
994 self._changeid = changeid
994 self._changeid = changeid
995 if changectx is not None:
995 if changectx is not None:
996 self._changectx = changectx
996 self._changectx = changectx
997 if fileid is not None:
997 if fileid is not None:
998 self._fileid = fileid
998 self._fileid = fileid
999
999
1000 @propertycache
1000 @propertycache
1001 def _changectx(self):
1001 def _changectx(self):
1002 try:
1002 try:
1003 return changectx(self._repo, self._changeid)
1003 return changectx(self._repo, self._changeid)
1004 except error.FilteredRepoLookupError:
1004 except error.FilteredRepoLookupError:
1005 # Linkrev may point to any revision in the repository. When the
1005 # Linkrev may point to any revision in the repository. When the
1006 # repository is filtered this may lead to `filectx` trying to build
1006 # repository is filtered this may lead to `filectx` trying to build
1007 # `changectx` for filtered revision. In such case we fallback to
1007 # `changectx` for filtered revision. In such case we fallback to
1008 # creating `changectx` on the unfiltered version of the reposition.
1008 # creating `changectx` on the unfiltered version of the reposition.
1009 # This fallback should not be an issue because `changectx` from
1009 # This fallback should not be an issue because `changectx` from
1010 # `filectx` are not used in complex operations that care about
1010 # `filectx` are not used in complex operations that care about
1011 # filtering.
1011 # filtering.
1012 #
1012 #
1013 # This fallback is a cheap and dirty fix that prevent several
1013 # This fallback is a cheap and dirty fix that prevent several
1014 # crashes. It does not ensure the behavior is correct. However the
1014 # crashes. It does not ensure the behavior is correct. However the
1015 # behavior was not correct before filtering either and "incorrect
1015 # behavior was not correct before filtering either and "incorrect
1016 # behavior" is seen as better as "crash"
1016 # behavior" is seen as better as "crash"
1017 #
1017 #
1018 # Linkrevs have several serious troubles with filtering that are
1018 # Linkrevs have several serious troubles with filtering that are
1019 # complicated to solve. Proper handling of the issue here should be
1019 # complicated to solve. Proper handling of the issue here should be
1020 # considered when solving linkrev issue are on the table.
1020 # considered when solving linkrev issue are on the table.
1021 return changectx(self._repo.unfiltered(), self._changeid)
1021 return changectx(self._repo.unfiltered(), self._changeid)
1022
1022
1023 def filectx(self, fileid, changeid=None):
1023 def filectx(self, fileid, changeid=None):
1024 '''opens an arbitrary revision of the file without
1024 '''opens an arbitrary revision of the file without
1025 opening a new filelog'''
1025 opening a new filelog'''
1026 return filectx(self._repo, self._path, fileid=fileid,
1026 return filectx(self._repo, self._path, fileid=fileid,
1027 filelog=self._filelog, changeid=changeid)
1027 filelog=self._filelog, changeid=changeid)
1028
1028
1029 def data(self):
1029 def data(self):
1030 try:
1030 try:
1031 return self._filelog.read(self._filenode)
1031 return self._filelog.read(self._filenode)
1032 except error.CensoredNodeError:
1032 except error.CensoredNodeError:
1033 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1033 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1034 return ""
1034 return ""
1035 raise util.Abort(_("censored node: %s") % short(self._filenode),
1035 raise util.Abort(_("censored node: %s") % short(self._filenode),
1036 hint=_("set censor.policy to ignore errors"))
1036 hint=_("set censor.policy to ignore errors"))
1037
1037
1038 def size(self):
1038 def size(self):
1039 return self._filelog.size(self._filerev)
1039 return self._filelog.size(self._filerev)
1040
1040
1041 def renamed(self):
1041 def renamed(self):
1042 """check if file was actually renamed in this changeset revision
1042 """check if file was actually renamed in this changeset revision
1043
1043
1044 If rename logged in file revision, we report copy for changeset only
1044 If rename logged in file revision, we report copy for changeset only
1045 if file revisions linkrev points back to the changeset in question
1045 if file revisions linkrev points back to the changeset in question
1046 or both changeset parents contain different file revisions.
1046 or both changeset parents contain different file revisions.
1047 """
1047 """
1048
1048
1049 renamed = self._filelog.renamed(self._filenode)
1049 renamed = self._filelog.renamed(self._filenode)
1050 if not renamed:
1050 if not renamed:
1051 return renamed
1051 return renamed
1052
1052
1053 if self.rev() == self.linkrev():
1053 if self.rev() == self.linkrev():
1054 return renamed
1054 return renamed
1055
1055
1056 name = self.path()
1056 name = self.path()
1057 fnode = self._filenode
1057 fnode = self._filenode
1058 for p in self._changectx.parents():
1058 for p in self._changectx.parents():
1059 try:
1059 try:
1060 if fnode == p.filenode(name):
1060 if fnode == p.filenode(name):
1061 return None
1061 return None
1062 except error.LookupError:
1062 except error.LookupError:
1063 pass
1063 pass
1064 return renamed
1064 return renamed
1065
1065
1066 def children(self):
1066 def children(self):
1067 # hard for renames
1067 # hard for renames
1068 c = self._filelog.children(self._filenode)
1068 c = self._filelog.children(self._filenode)
1069 return [filectx(self._repo, self._path, fileid=x,
1069 return [filectx(self._repo, self._path, fileid=x,
1070 filelog=self._filelog) for x in c]
1070 filelog=self._filelog) for x in c]
1071
1071
1072 class committablectx(basectx):
1072 class committablectx(basectx):
1073 """A committablectx object provides common functionality for a context that
1073 """A committablectx object provides common functionality for a context that
1074 wants the ability to commit, e.g. workingctx or memctx."""
1074 wants the ability to commit, e.g. workingctx or memctx."""
1075 def __init__(self, repo, text="", user=None, date=None, extra=None,
1075 def __init__(self, repo, text="", user=None, date=None, extra=None,
1076 changes=None):
1076 changes=None):
1077 self._repo = repo
1077 self._repo = repo
1078 self._rev = None
1078 self._rev = None
1079 self._node = None
1079 self._node = None
1080 self._text = text
1080 self._text = text
1081 if date:
1081 if date:
1082 self._date = util.parsedate(date)
1082 self._date = util.parsedate(date)
1083 if user:
1083 if user:
1084 self._user = user
1084 self._user = user
1085 if changes:
1085 if changes:
1086 self._status = changes
1086 self._status = changes
1087
1087
1088 self._extra = {}
1088 self._extra = {}
1089 if extra:
1089 if extra:
1090 self._extra = extra.copy()
1090 self._extra = extra.copy()
1091 if 'branch' not in self._extra:
1091 if 'branch' not in self._extra:
1092 try:
1092 try:
1093 branch = encoding.fromlocal(self._repo.dirstate.branch())
1093 branch = encoding.fromlocal(self._repo.dirstate.branch())
1094 except UnicodeDecodeError:
1094 except UnicodeDecodeError:
1095 raise util.Abort(_('branch name not in UTF-8!'))
1095 raise util.Abort(_('branch name not in UTF-8!'))
1096 self._extra['branch'] = branch
1096 self._extra['branch'] = branch
1097 if self._extra['branch'] == '':
1097 if self._extra['branch'] == '':
1098 self._extra['branch'] = 'default'
1098 self._extra['branch'] = 'default'
1099
1099
1100 def __str__(self):
1100 def __str__(self):
1101 return str(self._parents[0]) + "+"
1101 return str(self._parents[0]) + "+"
1102
1102
1103 def __nonzero__(self):
1103 def __nonzero__(self):
1104 return True
1104 return True
1105
1105
1106 def _buildflagfunc(self):
1106 def _buildflagfunc(self):
1107 # Create a fallback function for getting file flags when the
1107 # Create a fallback function for getting file flags when the
1108 # filesystem doesn't support them
1108 # filesystem doesn't support them
1109
1109
1110 copiesget = self._repo.dirstate.copies().get
1110 copiesget = self._repo.dirstate.copies().get
1111
1111
1112 if len(self._parents) < 2:
1112 if len(self._parents) < 2:
1113 # when we have one parent, it's easy: copy from parent
1113 # when we have one parent, it's easy: copy from parent
1114 man = self._parents[0].manifest()
1114 man = self._parents[0].manifest()
1115 def func(f):
1115 def func(f):
1116 f = copiesget(f, f)
1116 f = copiesget(f, f)
1117 return man.flags(f)
1117 return man.flags(f)
1118 else:
1118 else:
1119 # merges are tricky: we try to reconstruct the unstored
1119 # merges are tricky: we try to reconstruct the unstored
1120 # result from the merge (issue1802)
1120 # result from the merge (issue1802)
1121 p1, p2 = self._parents
1121 p1, p2 = self._parents
1122 pa = p1.ancestor(p2)
1122 pa = p1.ancestor(p2)
1123 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1123 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1124
1124
1125 def func(f):
1125 def func(f):
1126 f = copiesget(f, f) # may be wrong for merges with copies
1126 f = copiesget(f, f) # may be wrong for merges with copies
1127 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1127 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1128 if fl1 == fl2:
1128 if fl1 == fl2:
1129 return fl1
1129 return fl1
1130 if fl1 == fla:
1130 if fl1 == fla:
1131 return fl2
1131 return fl2
1132 if fl2 == fla:
1132 if fl2 == fla:
1133 return fl1
1133 return fl1
1134 return '' # punt for conflicts
1134 return '' # punt for conflicts
1135
1135
1136 return func
1136 return func
1137
1137
1138 @propertycache
1138 @propertycache
1139 def _flagfunc(self):
1139 def _flagfunc(self):
1140 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1140 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1141
1141
1142 @propertycache
1142 @propertycache
1143 def _manifest(self):
1143 def _manifest(self):
1144 """generate a manifest corresponding to the values in self._status
1144 """generate a manifest corresponding to the values in self._status
1145
1145
1146 This reuse the file nodeid from parent, but we append an extra letter
1146 This reuse the file nodeid from parent, but we append an extra letter
1147 when modified. Modified files get an extra 'm' while added files get
1147 when modified. Modified files get an extra 'm' while added files get
1148 an extra 'a'. This is used by manifests merge to see that files
1148 an extra 'a'. This is used by manifests merge to see that files
1149 are different and by update logic to avoid deleting newly added files.
1149 are different and by update logic to avoid deleting newly added files.
1150 """
1150 """
1151
1151
1152 man1 = self._parents[0].manifest()
1152 man1 = self._parents[0].manifest()
1153 man = man1.copy()
1153 man = man1.copy()
1154 if len(self._parents) > 1:
1154 if len(self._parents) > 1:
1155 man2 = self.p2().manifest()
1155 man2 = self.p2().manifest()
1156 def getman(f):
1156 def getman(f):
1157 if f in man1:
1157 if f in man1:
1158 return man1
1158 return man1
1159 return man2
1159 return man2
1160 else:
1160 else:
1161 getman = lambda f: man1
1161 getman = lambda f: man1
1162
1162
1163 copied = self._repo.dirstate.copies()
1163 copied = self._repo.dirstate.copies()
1164 ff = self._flagfunc
1164 ff = self._flagfunc
1165 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1165 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1166 for f in l:
1166 for f in l:
1167 orig = copied.get(f, f)
1167 orig = copied.get(f, f)
1168 man[f] = getman(orig).get(orig, nullid) + i
1168 man[f] = getman(orig).get(orig, nullid) + i
1169 try:
1169 try:
1170 man.setflag(f, ff(f))
1170 man.setflag(f, ff(f))
1171 except OSError:
1171 except OSError:
1172 pass
1172 pass
1173
1173
1174 for f in self._status.deleted + self._status.removed:
1174 for f in self._status.deleted + self._status.removed:
1175 if f in man:
1175 if f in man:
1176 del man[f]
1176 del man[f]
1177
1177
1178 return man
1178 return man
1179
1179
1180 @propertycache
1180 @propertycache
1181 def _status(self):
1181 def _status(self):
1182 return self._repo.status()
1182 return self._repo.status()
1183
1183
1184 @propertycache
1184 @propertycache
1185 def _user(self):
1185 def _user(self):
1186 return self._repo.ui.username()
1186 return self._repo.ui.username()
1187
1187
1188 @propertycache
1188 @propertycache
1189 def _date(self):
1189 def _date(self):
1190 return util.makedate()
1190 return util.makedate()
1191
1191
1192 def subrev(self, subpath):
1192 def subrev(self, subpath):
1193 return None
1193 return None
1194
1194
1195 def user(self):
1195 def user(self):
1196 return self._user or self._repo.ui.username()
1196 return self._user or self._repo.ui.username()
1197 def date(self):
1197 def date(self):
1198 return self._date
1198 return self._date
1199 def description(self):
1199 def description(self):
1200 return self._text
1200 return self._text
1201 def files(self):
1201 def files(self):
1202 return sorted(self._status.modified + self._status.added +
1202 return sorted(self._status.modified + self._status.added +
1203 self._status.removed)
1203 self._status.removed)
1204
1204
1205 def modified(self):
1205 def modified(self):
1206 return self._status.modified
1206 return self._status.modified
1207 def added(self):
1207 def added(self):
1208 return self._status.added
1208 return self._status.added
1209 def removed(self):
1209 def removed(self):
1210 return self._status.removed
1210 return self._status.removed
1211 def deleted(self):
1211 def deleted(self):
1212 return self._status.deleted
1212 return self._status.deleted
1213 def branch(self):
1213 def branch(self):
1214 return encoding.tolocal(self._extra['branch'])
1214 return encoding.tolocal(self._extra['branch'])
1215 def closesbranch(self):
1215 def closesbranch(self):
1216 return 'close' in self._extra
1216 return 'close' in self._extra
1217 def extra(self):
1217 def extra(self):
1218 return self._extra
1218 return self._extra
1219
1219
1220 def tags(self):
1220 def tags(self):
1221 t = []
1221 t = []
1222 for p in self.parents():
1222 for p in self.parents():
1223 t.extend(p.tags())
1223 t.extend(p.tags())
1224 return t
1224 return t
1225
1225
1226 def bookmarks(self):
1226 def bookmarks(self):
1227 b = []
1227 b = []
1228 for p in self.parents():
1228 for p in self.parents():
1229 b.extend(p.bookmarks())
1229 b.extend(p.bookmarks())
1230 return b
1230 return b
1231
1231
1232 def phase(self):
1232 def phase(self):
1233 phase = phases.draft # default phase to draft
1233 phase = phases.draft # default phase to draft
1234 for p in self.parents():
1234 for p in self.parents():
1235 phase = max(phase, p.phase())
1235 phase = max(phase, p.phase())
1236 return phase
1236 return phase
1237
1237
1238 def hidden(self):
1238 def hidden(self):
1239 return False
1239 return False
1240
1240
1241 def children(self):
1241 def children(self):
1242 return []
1242 return []
1243
1243
1244 def flags(self, path):
1244 def flags(self, path):
1245 if '_manifest' in self.__dict__:
1245 if '_manifest' in self.__dict__:
1246 try:
1246 try:
1247 return self._manifest.flags(path)
1247 return self._manifest.flags(path)
1248 except KeyError:
1248 except KeyError:
1249 return ''
1249 return ''
1250
1250
1251 try:
1251 try:
1252 return self._flagfunc(path)
1252 return self._flagfunc(path)
1253 except OSError:
1253 except OSError:
1254 return ''
1254 return ''
1255
1255
1256 def ancestor(self, c2):
1256 def ancestor(self, c2):
1257 """return the "best" ancestor context of self and c2"""
1257 """return the "best" ancestor context of self and c2"""
1258 return self._parents[0].ancestor(c2) # punt on two parents for now
1258 return self._parents[0].ancestor(c2) # punt on two parents for now
1259
1259
1260 def walk(self, match):
1260 def walk(self, match):
1261 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1261 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1262 True, False))
1262 True, False))
1263
1263
1264 def matches(self, match):
1264 def matches(self, match):
1265 return sorted(self._repo.dirstate.matches(match))
1265 return sorted(self._repo.dirstate.matches(match))
1266
1266
1267 def ancestors(self):
1267 def ancestors(self):
1268 for p in self._parents:
1268 for p in self._parents:
1269 yield p
1269 yield p
1270 for a in self._repo.changelog.ancestors(
1270 for a in self._repo.changelog.ancestors(
1271 [p.rev() for p in self._parents]):
1271 [p.rev() for p in self._parents]):
1272 yield changectx(self._repo, a)
1272 yield changectx(self._repo, a)
1273
1273
1274 def markcommitted(self, node):
1274 def markcommitted(self, node):
1275 """Perform post-commit cleanup necessary after committing this ctx
1275 """Perform post-commit cleanup necessary after committing this ctx
1276
1276
1277 Specifically, this updates backing stores this working context
1277 Specifically, this updates backing stores this working context
1278 wraps to reflect the fact that the changes reflected by this
1278 wraps to reflect the fact that the changes reflected by this
1279 workingctx have been committed. For example, it marks
1279 workingctx have been committed. For example, it marks
1280 modified and added files as normal in the dirstate.
1280 modified and added files as normal in the dirstate.
1281
1281
1282 """
1282 """
1283
1283
1284 self._repo.dirstate.beginparentchange()
1284 self._repo.dirstate.beginparentchange()
1285 for f in self.modified() + self.added():
1285 for f in self.modified() + self.added():
1286 self._repo.dirstate.normal(f)
1286 self._repo.dirstate.normal(f)
1287 for f in self.removed():
1287 for f in self.removed():
1288 self._repo.dirstate.drop(f)
1288 self._repo.dirstate.drop(f)
1289 self._repo.dirstate.setparents(node)
1289 self._repo.dirstate.setparents(node)
1290 self._repo.dirstate.endparentchange()
1290 self._repo.dirstate.endparentchange()
1291
1291
1292 def dirs(self):
1293 return self._repo.dirstate.dirs()
1294
1295 class workingctx(committablectx):
1292 class workingctx(committablectx):
1296 """A workingctx object makes access to data related to
1293 """A workingctx object makes access to data related to
1297 the current working directory convenient.
1294 the current working directory convenient.
1298 date - any valid date string or (unixtime, offset), or None.
1295 date - any valid date string or (unixtime, offset), or None.
1299 user - username string, or None.
1296 user - username string, or None.
1300 extra - a dictionary of extra values, or None.
1297 extra - a dictionary of extra values, or None.
1301 changes - a list of file lists as returned by localrepo.status()
1298 changes - a list of file lists as returned by localrepo.status()
1302 or None to use the repository status.
1299 or None to use the repository status.
1303 """
1300 """
1304 def __init__(self, repo, text="", user=None, date=None, extra=None,
1301 def __init__(self, repo, text="", user=None, date=None, extra=None,
1305 changes=None):
1302 changes=None):
1306 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1303 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1307
1304
1308 def __iter__(self):
1305 def __iter__(self):
1309 d = self._repo.dirstate
1306 d = self._repo.dirstate
1310 for f in d:
1307 for f in d:
1311 if d[f] != 'r':
1308 if d[f] != 'r':
1312 yield f
1309 yield f
1313
1310
1314 def __contains__(self, key):
1311 def __contains__(self, key):
1315 return self._repo.dirstate[key] not in "?r"
1312 return self._repo.dirstate[key] not in "?r"
1316
1313
1317 @propertycache
1314 @propertycache
1318 def _parents(self):
1315 def _parents(self):
1319 p = self._repo.dirstate.parents()
1316 p = self._repo.dirstate.parents()
1320 if p[1] == nullid:
1317 if p[1] == nullid:
1321 p = p[:-1]
1318 p = p[:-1]
1322 return [changectx(self._repo, x) for x in p]
1319 return [changectx(self._repo, x) for x in p]
1323
1320
1324 def filectx(self, path, filelog=None):
1321 def filectx(self, path, filelog=None):
1325 """get a file context from the working directory"""
1322 """get a file context from the working directory"""
1326 return workingfilectx(self._repo, path, workingctx=self,
1323 return workingfilectx(self._repo, path, workingctx=self,
1327 filelog=filelog)
1324 filelog=filelog)
1328
1325
1329 def dirty(self, missing=False, merge=True, branch=True):
1326 def dirty(self, missing=False, merge=True, branch=True):
1330 "check whether a working directory is modified"
1327 "check whether a working directory is modified"
1331 # check subrepos first
1328 # check subrepos first
1332 for s in sorted(self.substate):
1329 for s in sorted(self.substate):
1333 if self.sub(s).dirty():
1330 if self.sub(s).dirty():
1334 return True
1331 return True
1335 # check current working dir
1332 # check current working dir
1336 return ((merge and self.p2()) or
1333 return ((merge and self.p2()) or
1337 (branch and self.branch() != self.p1().branch()) or
1334 (branch and self.branch() != self.p1().branch()) or
1338 self.modified() or self.added() or self.removed() or
1335 self.modified() or self.added() or self.removed() or
1339 (missing and self.deleted()))
1336 (missing and self.deleted()))
1340
1337
1341 def add(self, list, prefix=""):
1338 def add(self, list, prefix=""):
1342 join = lambda f: os.path.join(prefix, f)
1339 join = lambda f: os.path.join(prefix, f)
1343 wlock = self._repo.wlock()
1340 wlock = self._repo.wlock()
1344 ui, ds = self._repo.ui, self._repo.dirstate
1341 ui, ds = self._repo.ui, self._repo.dirstate
1345 try:
1342 try:
1346 rejected = []
1343 rejected = []
1347 lstat = self._repo.wvfs.lstat
1344 lstat = self._repo.wvfs.lstat
1348 for f in list:
1345 for f in list:
1349 scmutil.checkportable(ui, join(f))
1346 scmutil.checkportable(ui, join(f))
1350 try:
1347 try:
1351 st = lstat(f)
1348 st = lstat(f)
1352 except OSError:
1349 except OSError:
1353 ui.warn(_("%s does not exist!\n") % join(f))
1350 ui.warn(_("%s does not exist!\n") % join(f))
1354 rejected.append(f)
1351 rejected.append(f)
1355 continue
1352 continue
1356 if st.st_size > 10000000:
1353 if st.st_size > 10000000:
1357 ui.warn(_("%s: up to %d MB of RAM may be required "
1354 ui.warn(_("%s: up to %d MB of RAM may be required "
1358 "to manage this file\n"
1355 "to manage this file\n"
1359 "(use 'hg revert %s' to cancel the "
1356 "(use 'hg revert %s' to cancel the "
1360 "pending addition)\n")
1357 "pending addition)\n")
1361 % (f, 3 * st.st_size // 1000000, join(f)))
1358 % (f, 3 * st.st_size // 1000000, join(f)))
1362 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1359 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1363 ui.warn(_("%s not added: only files and symlinks "
1360 ui.warn(_("%s not added: only files and symlinks "
1364 "supported currently\n") % join(f))
1361 "supported currently\n") % join(f))
1365 rejected.append(f)
1362 rejected.append(f)
1366 elif ds[f] in 'amn':
1363 elif ds[f] in 'amn':
1367 ui.warn(_("%s already tracked!\n") % join(f))
1364 ui.warn(_("%s already tracked!\n") % join(f))
1368 elif ds[f] == 'r':
1365 elif ds[f] == 'r':
1369 ds.normallookup(f)
1366 ds.normallookup(f)
1370 else:
1367 else:
1371 ds.add(f)
1368 ds.add(f)
1372 return rejected
1369 return rejected
1373 finally:
1370 finally:
1374 wlock.release()
1371 wlock.release()
1375
1372
1376 def forget(self, files, prefix=""):
1373 def forget(self, files, prefix=""):
1377 join = lambda f: os.path.join(prefix, f)
1374 join = lambda f: os.path.join(prefix, f)
1378 wlock = self._repo.wlock()
1375 wlock = self._repo.wlock()
1379 try:
1376 try:
1380 rejected = []
1377 rejected = []
1381 for f in files:
1378 for f in files:
1382 if f not in self._repo.dirstate:
1379 if f not in self._repo.dirstate:
1383 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1380 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1384 rejected.append(f)
1381 rejected.append(f)
1385 elif self._repo.dirstate[f] != 'a':
1382 elif self._repo.dirstate[f] != 'a':
1386 self._repo.dirstate.remove(f)
1383 self._repo.dirstate.remove(f)
1387 else:
1384 else:
1388 self._repo.dirstate.drop(f)
1385 self._repo.dirstate.drop(f)
1389 return rejected
1386 return rejected
1390 finally:
1387 finally:
1391 wlock.release()
1388 wlock.release()
1392
1389
1393 def undelete(self, list):
1390 def undelete(self, list):
1394 pctxs = self.parents()
1391 pctxs = self.parents()
1395 wlock = self._repo.wlock()
1392 wlock = self._repo.wlock()
1396 try:
1393 try:
1397 for f in list:
1394 for f in list:
1398 if self._repo.dirstate[f] != 'r':
1395 if self._repo.dirstate[f] != 'r':
1399 self._repo.ui.warn(_("%s not removed!\n") % f)
1396 self._repo.ui.warn(_("%s not removed!\n") % f)
1400 else:
1397 else:
1401 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1398 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1402 t = fctx.data()
1399 t = fctx.data()
1403 self._repo.wwrite(f, t, fctx.flags())
1400 self._repo.wwrite(f, t, fctx.flags())
1404 self._repo.dirstate.normal(f)
1401 self._repo.dirstate.normal(f)
1405 finally:
1402 finally:
1406 wlock.release()
1403 wlock.release()
1407
1404
1408 def copy(self, source, dest):
1405 def copy(self, source, dest):
1409 try:
1406 try:
1410 st = self._repo.wvfs.lstat(dest)
1407 st = self._repo.wvfs.lstat(dest)
1411 except OSError, err:
1408 except OSError, err:
1412 if err.errno != errno.ENOENT:
1409 if err.errno != errno.ENOENT:
1413 raise
1410 raise
1414 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1411 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1415 return
1412 return
1416 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1413 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1417 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1414 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1418 "symbolic link\n") % dest)
1415 "symbolic link\n") % dest)
1419 else:
1416 else:
1420 wlock = self._repo.wlock()
1417 wlock = self._repo.wlock()
1421 try:
1418 try:
1422 if self._repo.dirstate[dest] in '?':
1419 if self._repo.dirstate[dest] in '?':
1423 self._repo.dirstate.add(dest)
1420 self._repo.dirstate.add(dest)
1424 elif self._repo.dirstate[dest] in 'r':
1421 elif self._repo.dirstate[dest] in 'r':
1425 self._repo.dirstate.normallookup(dest)
1422 self._repo.dirstate.normallookup(dest)
1426 self._repo.dirstate.copy(source, dest)
1423 self._repo.dirstate.copy(source, dest)
1427 finally:
1424 finally:
1428 wlock.release()
1425 wlock.release()
1429
1426
1430 def _filtersuspectsymlink(self, files):
1427 def _filtersuspectsymlink(self, files):
1431 if not files or self._repo.dirstate._checklink:
1428 if not files or self._repo.dirstate._checklink:
1432 return files
1429 return files
1433
1430
1434 # Symlink placeholders may get non-symlink-like contents
1431 # Symlink placeholders may get non-symlink-like contents
1435 # via user error or dereferencing by NFS or Samba servers,
1432 # via user error or dereferencing by NFS or Samba servers,
1436 # so we filter out any placeholders that don't look like a
1433 # so we filter out any placeholders that don't look like a
1437 # symlink
1434 # symlink
1438 sane = []
1435 sane = []
1439 for f in files:
1436 for f in files:
1440 if self.flags(f) == 'l':
1437 if self.flags(f) == 'l':
1441 d = self[f].data()
1438 d = self[f].data()
1442 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1439 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1443 self._repo.ui.debug('ignoring suspect symlink placeholder'
1440 self._repo.ui.debug('ignoring suspect symlink placeholder'
1444 ' "%s"\n' % f)
1441 ' "%s"\n' % f)
1445 continue
1442 continue
1446 sane.append(f)
1443 sane.append(f)
1447 return sane
1444 return sane
1448
1445
1449 def _checklookup(self, files):
1446 def _checklookup(self, files):
1450 # check for any possibly clean files
1447 # check for any possibly clean files
1451 if not files:
1448 if not files:
1452 return [], []
1449 return [], []
1453
1450
1454 modified = []
1451 modified = []
1455 fixup = []
1452 fixup = []
1456 pctx = self._parents[0]
1453 pctx = self._parents[0]
1457 # do a full compare of any files that might have changed
1454 # do a full compare of any files that might have changed
1458 for f in sorted(files):
1455 for f in sorted(files):
1459 if (f not in pctx or self.flags(f) != pctx.flags(f)
1456 if (f not in pctx or self.flags(f) != pctx.flags(f)
1460 or pctx[f].cmp(self[f])):
1457 or pctx[f].cmp(self[f])):
1461 modified.append(f)
1458 modified.append(f)
1462 else:
1459 else:
1463 fixup.append(f)
1460 fixup.append(f)
1464
1461
1465 # update dirstate for files that are actually clean
1462 # update dirstate for files that are actually clean
1466 if fixup:
1463 if fixup:
1467 try:
1464 try:
1468 # updating the dirstate is optional
1465 # updating the dirstate is optional
1469 # so we don't wait on the lock
1466 # so we don't wait on the lock
1470 # wlock can invalidate the dirstate, so cache normal _after_
1467 # wlock can invalidate the dirstate, so cache normal _after_
1471 # taking the lock
1468 # taking the lock
1472 wlock = self._repo.wlock(False)
1469 wlock = self._repo.wlock(False)
1473 normal = self._repo.dirstate.normal
1470 normal = self._repo.dirstate.normal
1474 try:
1471 try:
1475 for f in fixup:
1472 for f in fixup:
1476 normal(f)
1473 normal(f)
1477 finally:
1474 finally:
1478 wlock.release()
1475 wlock.release()
1479 except error.LockError:
1476 except error.LockError:
1480 pass
1477 pass
1481 return modified, fixup
1478 return modified, fixup
1482
1479
1483 def _manifestmatches(self, match, s):
1480 def _manifestmatches(self, match, s):
1484 """Slow path for workingctx
1481 """Slow path for workingctx
1485
1482
1486 The fast path is when we compare the working directory to its parent
1483 The fast path is when we compare the working directory to its parent
1487 which means this function is comparing with a non-parent; therefore we
1484 which means this function is comparing with a non-parent; therefore we
1488 need to build a manifest and return what matches.
1485 need to build a manifest and return what matches.
1489 """
1486 """
1490 mf = self._repo['.']._manifestmatches(match, s)
1487 mf = self._repo['.']._manifestmatches(match, s)
1491 for f in s.modified + s.added:
1488 for f in s.modified + s.added:
1492 mf[f] = _newnode
1489 mf[f] = _newnode
1493 mf.setflag(f, self.flags(f))
1490 mf.setflag(f, self.flags(f))
1494 for f in s.removed:
1491 for f in s.removed:
1495 if f in mf:
1492 if f in mf:
1496 del mf[f]
1493 del mf[f]
1497 return mf
1494 return mf
1498
1495
1499 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1496 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1500 unknown=False):
1497 unknown=False):
1501 '''Gets the status from the dirstate -- internal use only.'''
1498 '''Gets the status from the dirstate -- internal use only.'''
1502 listignored, listclean, listunknown = ignored, clean, unknown
1499 listignored, listclean, listunknown = ignored, clean, unknown
1503 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1500 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1504 subrepos = []
1501 subrepos = []
1505 if '.hgsub' in self:
1502 if '.hgsub' in self:
1506 subrepos = sorted(self.substate)
1503 subrepos = sorted(self.substate)
1507 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1504 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1508 listclean, listunknown)
1505 listclean, listunknown)
1509
1506
1510 # check for any possibly clean files
1507 # check for any possibly clean files
1511 if cmp:
1508 if cmp:
1512 modified2, fixup = self._checklookup(cmp)
1509 modified2, fixup = self._checklookup(cmp)
1513 s.modified.extend(modified2)
1510 s.modified.extend(modified2)
1514
1511
1515 # update dirstate for files that are actually clean
1512 # update dirstate for files that are actually clean
1516 if fixup and listclean:
1513 if fixup and listclean:
1517 s.clean.extend(fixup)
1514 s.clean.extend(fixup)
1518
1515
1519 if match.always():
1516 if match.always():
1520 # cache for performance
1517 # cache for performance
1521 if s.unknown or s.ignored or s.clean:
1518 if s.unknown or s.ignored or s.clean:
1522 # "_status" is cached with list*=False in the normal route
1519 # "_status" is cached with list*=False in the normal route
1523 self._status = scmutil.status(s.modified, s.added, s.removed,
1520 self._status = scmutil.status(s.modified, s.added, s.removed,
1524 s.deleted, [], [], [])
1521 s.deleted, [], [], [])
1525 else:
1522 else:
1526 self._status = s
1523 self._status = s
1527
1524
1528 return s
1525 return s
1529
1526
1530 def _buildstatus(self, other, s, match, listignored, listclean,
1527 def _buildstatus(self, other, s, match, listignored, listclean,
1531 listunknown):
1528 listunknown):
1532 """build a status with respect to another context
1529 """build a status with respect to another context
1533
1530
1534 This includes logic for maintaining the fast path of status when
1531 This includes logic for maintaining the fast path of status when
1535 comparing the working directory against its parent, which is to skip
1532 comparing the working directory against its parent, which is to skip
1536 building a new manifest if self (working directory) is not comparing
1533 building a new manifest if self (working directory) is not comparing
1537 against its parent (repo['.']).
1534 against its parent (repo['.']).
1538 """
1535 """
1539 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1536 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1540 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1537 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1541 # might have accidentally ended up with the entire contents of the file
1538 # might have accidentally ended up with the entire contents of the file
1542 # they are supposed to be linking to.
1539 # they are supposed to be linking to.
1543 s.modified[:] = self._filtersuspectsymlink(s.modified)
1540 s.modified[:] = self._filtersuspectsymlink(s.modified)
1544 if other != self._repo['.']:
1541 if other != self._repo['.']:
1545 s = super(workingctx, self)._buildstatus(other, s, match,
1542 s = super(workingctx, self)._buildstatus(other, s, match,
1546 listignored, listclean,
1543 listignored, listclean,
1547 listunknown)
1544 listunknown)
1548 return s
1545 return s
1549
1546
1550 def _matchstatus(self, other, match):
1547 def _matchstatus(self, other, match):
1551 """override the match method with a filter for directory patterns
1548 """override the match method with a filter for directory patterns
1552
1549
1553 We use inheritance to customize the match.bad method only in cases of
1550 We use inheritance to customize the match.bad method only in cases of
1554 workingctx since it belongs only to the working directory when
1551 workingctx since it belongs only to the working directory when
1555 comparing against the parent changeset.
1552 comparing against the parent changeset.
1556
1553
1557 If we aren't comparing against the working directory's parent, then we
1554 If we aren't comparing against the working directory's parent, then we
1558 just use the default match object sent to us.
1555 just use the default match object sent to us.
1559 """
1556 """
1560 superself = super(workingctx, self)
1557 superself = super(workingctx, self)
1561 match = superself._matchstatus(other, match)
1558 match = superself._matchstatus(other, match)
1562 if other != self._repo['.']:
1559 if other != self._repo['.']:
1563 def bad(f, msg):
1560 def bad(f, msg):
1564 # 'f' may be a directory pattern from 'match.files()',
1561 # 'f' may be a directory pattern from 'match.files()',
1565 # so 'f not in ctx1' is not enough
1562 # so 'f not in ctx1' is not enough
1566 if f not in other and f not in other.dirs():
1563 if f not in other and f not in other.dirs():
1567 self._repo.ui.warn('%s: %s\n' %
1564 self._repo.ui.warn('%s: %s\n' %
1568 (self._repo.dirstate.pathto(f), msg))
1565 (self._repo.dirstate.pathto(f), msg))
1569 match.bad = bad
1566 match.bad = bad
1570 return match
1567 return match
1571
1568
1572 class committablefilectx(basefilectx):
1569 class committablefilectx(basefilectx):
1573 """A committablefilectx provides common functionality for a file context
1570 """A committablefilectx provides common functionality for a file context
1574 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1571 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1575 def __init__(self, repo, path, filelog=None, ctx=None):
1572 def __init__(self, repo, path, filelog=None, ctx=None):
1576 self._repo = repo
1573 self._repo = repo
1577 self._path = path
1574 self._path = path
1578 self._changeid = None
1575 self._changeid = None
1579 self._filerev = self._filenode = None
1576 self._filerev = self._filenode = None
1580
1577
1581 if filelog is not None:
1578 if filelog is not None:
1582 self._filelog = filelog
1579 self._filelog = filelog
1583 if ctx:
1580 if ctx:
1584 self._changectx = ctx
1581 self._changectx = ctx
1585
1582
1586 def __nonzero__(self):
1583 def __nonzero__(self):
1587 return True
1584 return True
1588
1585
1589 def parents(self):
1586 def parents(self):
1590 '''return parent filectxs, following copies if necessary'''
1587 '''return parent filectxs, following copies if necessary'''
1591 def filenode(ctx, path):
1588 def filenode(ctx, path):
1592 return ctx._manifest.get(path, nullid)
1589 return ctx._manifest.get(path, nullid)
1593
1590
1594 path = self._path
1591 path = self._path
1595 fl = self._filelog
1592 fl = self._filelog
1596 pcl = self._changectx._parents
1593 pcl = self._changectx._parents
1597 renamed = self.renamed()
1594 renamed = self.renamed()
1598
1595
1599 if renamed:
1596 if renamed:
1600 pl = [renamed + (None,)]
1597 pl = [renamed + (None,)]
1601 else:
1598 else:
1602 pl = [(path, filenode(pcl[0], path), fl)]
1599 pl = [(path, filenode(pcl[0], path), fl)]
1603
1600
1604 for pc in pcl[1:]:
1601 for pc in pcl[1:]:
1605 pl.append((path, filenode(pc, path), fl))
1602 pl.append((path, filenode(pc, path), fl))
1606
1603
1607 return [filectx(self._repo, p, fileid=n, filelog=l)
1604 return [filectx(self._repo, p, fileid=n, filelog=l)
1608 for p, n, l in pl if n != nullid]
1605 for p, n, l in pl if n != nullid]
1609
1606
1610 def children(self):
1607 def children(self):
1611 return []
1608 return []
1612
1609
1613 class workingfilectx(committablefilectx):
1610 class workingfilectx(committablefilectx):
1614 """A workingfilectx object makes access to data related to a particular
1611 """A workingfilectx object makes access to data related to a particular
1615 file in the working directory convenient."""
1612 file in the working directory convenient."""
1616 def __init__(self, repo, path, filelog=None, workingctx=None):
1613 def __init__(self, repo, path, filelog=None, workingctx=None):
1617 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1614 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1618
1615
1619 @propertycache
1616 @propertycache
1620 def _changectx(self):
1617 def _changectx(self):
1621 return workingctx(self._repo)
1618 return workingctx(self._repo)
1622
1619
1623 def data(self):
1620 def data(self):
1624 return self._repo.wread(self._path)
1621 return self._repo.wread(self._path)
1625 def renamed(self):
1622 def renamed(self):
1626 rp = self._repo.dirstate.copied(self._path)
1623 rp = self._repo.dirstate.copied(self._path)
1627 if not rp:
1624 if not rp:
1628 return None
1625 return None
1629 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1626 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1630
1627
1631 def size(self):
1628 def size(self):
1632 return self._repo.wvfs.lstat(self._path).st_size
1629 return self._repo.wvfs.lstat(self._path).st_size
1633 def date(self):
1630 def date(self):
1634 t, tz = self._changectx.date()
1631 t, tz = self._changectx.date()
1635 try:
1632 try:
1636 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1633 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1637 except OSError, err:
1634 except OSError, err:
1638 if err.errno != errno.ENOENT:
1635 if err.errno != errno.ENOENT:
1639 raise
1636 raise
1640 return (t, tz)
1637 return (t, tz)
1641
1638
1642 def cmp(self, fctx):
1639 def cmp(self, fctx):
1643 """compare with other file context
1640 """compare with other file context
1644
1641
1645 returns True if different than fctx.
1642 returns True if different than fctx.
1646 """
1643 """
1647 # fctx should be a filectx (not a workingfilectx)
1644 # fctx should be a filectx (not a workingfilectx)
1648 # invert comparison to reuse the same code path
1645 # invert comparison to reuse the same code path
1649 return fctx.cmp(self)
1646 return fctx.cmp(self)
1650
1647
1651 def remove(self, ignoremissing=False):
1648 def remove(self, ignoremissing=False):
1652 """wraps unlink for a repo's working directory"""
1649 """wraps unlink for a repo's working directory"""
1653 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1650 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1654
1651
1655 def write(self, data, flags):
1652 def write(self, data, flags):
1656 """wraps repo.wwrite"""
1653 """wraps repo.wwrite"""
1657 self._repo.wwrite(self._path, data, flags)
1654 self._repo.wwrite(self._path, data, flags)
1658
1655
1659 class workingcommitctx(workingctx):
1656 class workingcommitctx(workingctx):
1660 """A workingcommitctx object makes access to data related to
1657 """A workingcommitctx object makes access to data related to
1661 the revision being committed convenient.
1658 the revision being committed convenient.
1662
1659
1663 This hides changes in the working directory, if they aren't
1660 This hides changes in the working directory, if they aren't
1664 committed in this context.
1661 committed in this context.
1665 """
1662 """
1666 def __init__(self, repo, changes,
1663 def __init__(self, repo, changes,
1667 text="", user=None, date=None, extra=None):
1664 text="", user=None, date=None, extra=None):
1668 super(workingctx, self).__init__(repo, text, user, date, extra,
1665 super(workingctx, self).__init__(repo, text, user, date, extra,
1669 changes)
1666 changes)
1670
1667
1671 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1668 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1672 unknown=False):
1669 unknown=False):
1673 """Return matched files only in ``self._status``
1670 """Return matched files only in ``self._status``
1674
1671
1675 Uncommitted files appear "clean" via this context, even if
1672 Uncommitted files appear "clean" via this context, even if
1676 they aren't actually so in the working directory.
1673 they aren't actually so in the working directory.
1677 """
1674 """
1678 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1675 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1679 if clean:
1676 if clean:
1680 clean = [f for f in self._manifest if f not in self._changedset]
1677 clean = [f for f in self._manifest if f not in self._changedset]
1681 else:
1678 else:
1682 clean = []
1679 clean = []
1683 return scmutil.status([f for f in self._status.modified if match(f)],
1680 return scmutil.status([f for f in self._status.modified if match(f)],
1684 [f for f in self._status.added if match(f)],
1681 [f for f in self._status.added if match(f)],
1685 [f for f in self._status.removed if match(f)],
1682 [f for f in self._status.removed if match(f)],
1686 [], [], [], clean)
1683 [], [], [], clean)
1687
1684
1688 @propertycache
1685 @propertycache
1689 def _changedset(self):
1686 def _changedset(self):
1690 """Return the set of files changed in this context
1687 """Return the set of files changed in this context
1691 """
1688 """
1692 changed = set(self._status.modified)
1689 changed = set(self._status.modified)
1693 changed.update(self._status.added)
1690 changed.update(self._status.added)
1694 changed.update(self._status.removed)
1691 changed.update(self._status.removed)
1695 return changed
1692 return changed
1696
1693
1697 class memctx(committablectx):
1694 class memctx(committablectx):
1698 """Use memctx to perform in-memory commits via localrepo.commitctx().
1695 """Use memctx to perform in-memory commits via localrepo.commitctx().
1699
1696
1700 Revision information is supplied at initialization time while
1697 Revision information is supplied at initialization time while
1701 related files data and is made available through a callback
1698 related files data and is made available through a callback
1702 mechanism. 'repo' is the current localrepo, 'parents' is a
1699 mechanism. 'repo' is the current localrepo, 'parents' is a
1703 sequence of two parent revisions identifiers (pass None for every
1700 sequence of two parent revisions identifiers (pass None for every
1704 missing parent), 'text' is the commit message and 'files' lists
1701 missing parent), 'text' is the commit message and 'files' lists
1705 names of files touched by the revision (normalized and relative to
1702 names of files touched by the revision (normalized and relative to
1706 repository root).
1703 repository root).
1707
1704
1708 filectxfn(repo, memctx, path) is a callable receiving the
1705 filectxfn(repo, memctx, path) is a callable receiving the
1709 repository, the current memctx object and the normalized path of
1706 repository, the current memctx object and the normalized path of
1710 requested file, relative to repository root. It is fired by the
1707 requested file, relative to repository root. It is fired by the
1711 commit function for every file in 'files', but calls order is
1708 commit function for every file in 'files', but calls order is
1712 undefined. If the file is available in the revision being
1709 undefined. If the file is available in the revision being
1713 committed (updated or added), filectxfn returns a memfilectx
1710 committed (updated or added), filectxfn returns a memfilectx
1714 object. If the file was removed, filectxfn raises an
1711 object. If the file was removed, filectxfn raises an
1715 IOError. Moved files are represented by marking the source file
1712 IOError. Moved files are represented by marking the source file
1716 removed and the new file added with copy information (see
1713 removed and the new file added with copy information (see
1717 memfilectx).
1714 memfilectx).
1718
1715
1719 user receives the committer name and defaults to current
1716 user receives the committer name and defaults to current
1720 repository username, date is the commit date in any format
1717 repository username, date is the commit date in any format
1721 supported by util.parsedate() and defaults to current date, extra
1718 supported by util.parsedate() and defaults to current date, extra
1722 is a dictionary of metadata or is left empty.
1719 is a dictionary of metadata or is left empty.
1723 """
1720 """
1724
1721
1725 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1722 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1726 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1723 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1727 # this field to determine what to do in filectxfn.
1724 # this field to determine what to do in filectxfn.
1728 _returnnoneformissingfiles = True
1725 _returnnoneformissingfiles = True
1729
1726
1730 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1727 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1731 date=None, extra=None, editor=False):
1728 date=None, extra=None, editor=False):
1732 super(memctx, self).__init__(repo, text, user, date, extra)
1729 super(memctx, self).__init__(repo, text, user, date, extra)
1733 self._rev = None
1730 self._rev = None
1734 self._node = None
1731 self._node = None
1735 parents = [(p or nullid) for p in parents]
1732 parents = [(p or nullid) for p in parents]
1736 p1, p2 = parents
1733 p1, p2 = parents
1737 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1734 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1738 files = sorted(set(files))
1735 files = sorted(set(files))
1739 self._files = files
1736 self._files = files
1740 self.substate = {}
1737 self.substate = {}
1741
1738
1742 # if store is not callable, wrap it in a function
1739 # if store is not callable, wrap it in a function
1743 if not callable(filectxfn):
1740 if not callable(filectxfn):
1744 def getfilectx(repo, memctx, path):
1741 def getfilectx(repo, memctx, path):
1745 fctx = filectxfn[path]
1742 fctx = filectxfn[path]
1746 # this is weird but apparently we only keep track of one parent
1743 # this is weird but apparently we only keep track of one parent
1747 # (why not only store that instead of a tuple?)
1744 # (why not only store that instead of a tuple?)
1748 copied = fctx.renamed()
1745 copied = fctx.renamed()
1749 if copied:
1746 if copied:
1750 copied = copied[0]
1747 copied = copied[0]
1751 return memfilectx(repo, path, fctx.data(),
1748 return memfilectx(repo, path, fctx.data(),
1752 islink=fctx.islink(), isexec=fctx.isexec(),
1749 islink=fctx.islink(), isexec=fctx.isexec(),
1753 copied=copied, memctx=memctx)
1750 copied=copied, memctx=memctx)
1754 self._filectxfn = getfilectx
1751 self._filectxfn = getfilectx
1755 else:
1752 else:
1756 # "util.cachefunc" reduces invocation of possibly expensive
1753 # "util.cachefunc" reduces invocation of possibly expensive
1757 # "filectxfn" for performance (e.g. converting from another VCS)
1754 # "filectxfn" for performance (e.g. converting from another VCS)
1758 self._filectxfn = util.cachefunc(filectxfn)
1755 self._filectxfn = util.cachefunc(filectxfn)
1759
1756
1760 self._extra = extra and extra.copy() or {}
1757 self._extra = extra and extra.copy() or {}
1761 if self._extra.get('branch', '') == '':
1758 if self._extra.get('branch', '') == '':
1762 self._extra['branch'] = 'default'
1759 self._extra['branch'] = 'default'
1763
1760
1764 if editor:
1761 if editor:
1765 self._text = editor(self._repo, self, [])
1762 self._text = editor(self._repo, self, [])
1766 self._repo.savecommitmessage(self._text)
1763 self._repo.savecommitmessage(self._text)
1767
1764
1768 def filectx(self, path, filelog=None):
1765 def filectx(self, path, filelog=None):
1769 """get a file context from the working directory
1766 """get a file context from the working directory
1770
1767
1771 Returns None if file doesn't exist and should be removed."""
1768 Returns None if file doesn't exist and should be removed."""
1772 return self._filectxfn(self._repo, self, path)
1769 return self._filectxfn(self._repo, self, path)
1773
1770
1774 def commit(self):
1771 def commit(self):
1775 """commit context to the repo"""
1772 """commit context to the repo"""
1776 return self._repo.commitctx(self)
1773 return self._repo.commitctx(self)
1777
1774
1778 @propertycache
1775 @propertycache
1779 def _manifest(self):
1776 def _manifest(self):
1780 """generate a manifest based on the return values of filectxfn"""
1777 """generate a manifest based on the return values of filectxfn"""
1781
1778
1782 # keep this simple for now; just worry about p1
1779 # keep this simple for now; just worry about p1
1783 pctx = self._parents[0]
1780 pctx = self._parents[0]
1784 man = pctx.manifest().copy()
1781 man = pctx.manifest().copy()
1785
1782
1786 for f in self._status.modified:
1783 for f in self._status.modified:
1787 p1node = nullid
1784 p1node = nullid
1788 p2node = nullid
1785 p2node = nullid
1789 p = pctx[f].parents() # if file isn't in pctx, check p2?
1786 p = pctx[f].parents() # if file isn't in pctx, check p2?
1790 if len(p) > 0:
1787 if len(p) > 0:
1791 p1node = p[0].node()
1788 p1node = p[0].node()
1792 if len(p) > 1:
1789 if len(p) > 1:
1793 p2node = p[1].node()
1790 p2node = p[1].node()
1794 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1791 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1795
1792
1796 for f in self._status.added:
1793 for f in self._status.added:
1797 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1794 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1798
1795
1799 for f in self._status.removed:
1796 for f in self._status.removed:
1800 if f in man:
1797 if f in man:
1801 del man[f]
1798 del man[f]
1802
1799
1803 return man
1800 return man
1804
1801
1805 @propertycache
1802 @propertycache
1806 def _status(self):
1803 def _status(self):
1807 """Calculate exact status from ``files`` specified at construction
1804 """Calculate exact status from ``files`` specified at construction
1808 """
1805 """
1809 man1 = self.p1().manifest()
1806 man1 = self.p1().manifest()
1810 p2 = self._parents[1]
1807 p2 = self._parents[1]
1811 # "1 < len(self._parents)" can't be used for checking
1808 # "1 < len(self._parents)" can't be used for checking
1812 # existence of the 2nd parent, because "memctx._parents" is
1809 # existence of the 2nd parent, because "memctx._parents" is
1813 # explicitly initialized by the list, of which length is 2.
1810 # explicitly initialized by the list, of which length is 2.
1814 if p2.node() != nullid:
1811 if p2.node() != nullid:
1815 man2 = p2.manifest()
1812 man2 = p2.manifest()
1816 managing = lambda f: f in man1 or f in man2
1813 managing = lambda f: f in man1 or f in man2
1817 else:
1814 else:
1818 managing = lambda f: f in man1
1815 managing = lambda f: f in man1
1819
1816
1820 modified, added, removed = [], [], []
1817 modified, added, removed = [], [], []
1821 for f in self._files:
1818 for f in self._files:
1822 if not managing(f):
1819 if not managing(f):
1823 added.append(f)
1820 added.append(f)
1824 elif self[f]:
1821 elif self[f]:
1825 modified.append(f)
1822 modified.append(f)
1826 else:
1823 else:
1827 removed.append(f)
1824 removed.append(f)
1828
1825
1829 return scmutil.status(modified, added, removed, [], [], [], [])
1826 return scmutil.status(modified, added, removed, [], [], [], [])
1830
1827
1831 class memfilectx(committablefilectx):
1828 class memfilectx(committablefilectx):
1832 """memfilectx represents an in-memory file to commit.
1829 """memfilectx represents an in-memory file to commit.
1833
1830
1834 See memctx and committablefilectx for more details.
1831 See memctx and committablefilectx for more details.
1835 """
1832 """
1836 def __init__(self, repo, path, data, islink=False,
1833 def __init__(self, repo, path, data, islink=False,
1837 isexec=False, copied=None, memctx=None):
1834 isexec=False, copied=None, memctx=None):
1838 """
1835 """
1839 path is the normalized file path relative to repository root.
1836 path is the normalized file path relative to repository root.
1840 data is the file content as a string.
1837 data is the file content as a string.
1841 islink is True if the file is a symbolic link.
1838 islink is True if the file is a symbolic link.
1842 isexec is True if the file is executable.
1839 isexec is True if the file is executable.
1843 copied is the source file path if current file was copied in the
1840 copied is the source file path if current file was copied in the
1844 revision being committed, or None."""
1841 revision being committed, or None."""
1845 super(memfilectx, self).__init__(repo, path, None, memctx)
1842 super(memfilectx, self).__init__(repo, path, None, memctx)
1846 self._data = data
1843 self._data = data
1847 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1844 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1848 self._copied = None
1845 self._copied = None
1849 if copied:
1846 if copied:
1850 self._copied = (copied, nullid)
1847 self._copied = (copied, nullid)
1851
1848
1852 def data(self):
1849 def data(self):
1853 return self._data
1850 return self._data
1854 def size(self):
1851 def size(self):
1855 return len(self.data())
1852 return len(self.data())
1856 def flags(self):
1853 def flags(self):
1857 return self._flags
1854 return self._flags
1858 def renamed(self):
1855 def renamed(self):
1859 return self._copied
1856 return self._copied
1860
1857
1861 def remove(self, ignoremissing=False):
1858 def remove(self, ignoremissing=False):
1862 """wraps unlink for a repo's working directory"""
1859 """wraps unlink for a repo's working directory"""
1863 # need to figure out what to do here
1860 # need to figure out what to do here
1864 del self._changectx[self._path]
1861 del self._changectx[self._path]
1865
1862
1866 def write(self, data, flags):
1863 def write(self, data, flags):
1867 """wraps repo.wwrite"""
1864 """wraps repo.wwrite"""
1868 self._data = data
1865 self._data = data
General Comments 0
You need to be logged in to leave comments. Login now