##// END OF EJS Templates
narrowmatcher: propagate the rel() method...
Matt Harbison -
r23686:164915e8 default
parent child Browse files
Show More
@@ -1,2973 +1,2973 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, tempfile
10 import os, sys, errno, re, tempfile
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
11 import util, scmutil, templater, patch, error, templatekw, revlog, copies
12 import match as matchmod
12 import match as matchmod
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
13 import context, repair, graphmod, revset, phases, obsolete, pathutil
14 import changelog
14 import changelog
15 import bookmarks
15 import bookmarks
16 import encoding
16 import encoding
17 import lock as lockmod
17 import lock as lockmod
18
18
19 def parsealiases(cmd):
19 def parsealiases(cmd):
20 return cmd.lstrip("^").split("|")
20 return cmd.lstrip("^").split("|")
21
21
22 def findpossible(cmd, table, strict=False):
22 def findpossible(cmd, table, strict=False):
23 """
23 """
24 Return cmd -> (aliases, command table entry)
24 Return cmd -> (aliases, command table entry)
25 for each matching command.
25 for each matching command.
26 Return debug commands (or their aliases) only if no normal command matches.
26 Return debug commands (or their aliases) only if no normal command matches.
27 """
27 """
28 choice = {}
28 choice = {}
29 debugchoice = {}
29 debugchoice = {}
30
30
31 if cmd in table:
31 if cmd in table:
32 # short-circuit exact matches, "log" alias beats "^log|history"
32 # short-circuit exact matches, "log" alias beats "^log|history"
33 keys = [cmd]
33 keys = [cmd]
34 else:
34 else:
35 keys = table.keys()
35 keys = table.keys()
36
36
37 for e in keys:
37 for e in keys:
38 aliases = parsealiases(e)
38 aliases = parsealiases(e)
39 found = None
39 found = None
40 if cmd in aliases:
40 if cmd in aliases:
41 found = cmd
41 found = cmd
42 elif not strict:
42 elif not strict:
43 for a in aliases:
43 for a in aliases:
44 if a.startswith(cmd):
44 if a.startswith(cmd):
45 found = a
45 found = a
46 break
46 break
47 if found is not None:
47 if found is not None:
48 if aliases[0].startswith("debug") or found.startswith("debug"):
48 if aliases[0].startswith("debug") or found.startswith("debug"):
49 debugchoice[found] = (aliases, table[e])
49 debugchoice[found] = (aliases, table[e])
50 else:
50 else:
51 choice[found] = (aliases, table[e])
51 choice[found] = (aliases, table[e])
52
52
53 if not choice and debugchoice:
53 if not choice and debugchoice:
54 choice = debugchoice
54 choice = debugchoice
55
55
56 return choice
56 return choice
57
57
58 def findcmd(cmd, table, strict=True):
58 def findcmd(cmd, table, strict=True):
59 """Return (aliases, command table entry) for command string."""
59 """Return (aliases, command table entry) for command string."""
60 choice = findpossible(cmd, table, strict)
60 choice = findpossible(cmd, table, strict)
61
61
62 if cmd in choice:
62 if cmd in choice:
63 return choice[cmd]
63 return choice[cmd]
64
64
65 if len(choice) > 1:
65 if len(choice) > 1:
66 clist = choice.keys()
66 clist = choice.keys()
67 clist.sort()
67 clist.sort()
68 raise error.AmbiguousCommand(cmd, clist)
68 raise error.AmbiguousCommand(cmd, clist)
69
69
70 if choice:
70 if choice:
71 return choice.values()[0]
71 return choice.values()[0]
72
72
73 raise error.UnknownCommand(cmd)
73 raise error.UnknownCommand(cmd)
74
74
75 def findrepo(p):
75 def findrepo(p):
76 while not os.path.isdir(os.path.join(p, ".hg")):
76 while not os.path.isdir(os.path.join(p, ".hg")):
77 oldp, p = p, os.path.dirname(p)
77 oldp, p = p, os.path.dirname(p)
78 if p == oldp:
78 if p == oldp:
79 return None
79 return None
80
80
81 return p
81 return p
82
82
83 def bailifchanged(repo):
83 def bailifchanged(repo):
84 if repo.dirstate.p2() != nullid:
84 if repo.dirstate.p2() != nullid:
85 raise util.Abort(_('outstanding uncommitted merge'))
85 raise util.Abort(_('outstanding uncommitted merge'))
86 modified, added, removed, deleted = repo.status()[:4]
86 modified, added, removed, deleted = repo.status()[:4]
87 if modified or added or removed or deleted:
87 if modified or added or removed or deleted:
88 raise util.Abort(_('uncommitted changes'))
88 raise util.Abort(_('uncommitted changes'))
89 ctx = repo[None]
89 ctx = repo[None]
90 for s in sorted(ctx.substate):
90 for s in sorted(ctx.substate):
91 if ctx.sub(s).dirty():
91 if ctx.sub(s).dirty():
92 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
92 raise util.Abort(_("uncommitted changes in subrepo %s") % s)
93
93
94 def logmessage(ui, opts):
94 def logmessage(ui, opts):
95 """ get the log message according to -m and -l option """
95 """ get the log message according to -m and -l option """
96 message = opts.get('message')
96 message = opts.get('message')
97 logfile = opts.get('logfile')
97 logfile = opts.get('logfile')
98
98
99 if message and logfile:
99 if message and logfile:
100 raise util.Abort(_('options --message and --logfile are mutually '
100 raise util.Abort(_('options --message and --logfile are mutually '
101 'exclusive'))
101 'exclusive'))
102 if not message and logfile:
102 if not message and logfile:
103 try:
103 try:
104 if logfile == '-':
104 if logfile == '-':
105 message = ui.fin.read()
105 message = ui.fin.read()
106 else:
106 else:
107 message = '\n'.join(util.readfile(logfile).splitlines())
107 message = '\n'.join(util.readfile(logfile).splitlines())
108 except IOError, inst:
108 except IOError, inst:
109 raise util.Abort(_("can't read commit message '%s': %s") %
109 raise util.Abort(_("can't read commit message '%s': %s") %
110 (logfile, inst.strerror))
110 (logfile, inst.strerror))
111 return message
111 return message
112
112
113 def mergeeditform(ctxorbool, baseform):
113 def mergeeditform(ctxorbool, baseform):
114 """build appropriate editform from ctxorbool and baseform
114 """build appropriate editform from ctxorbool and baseform
115
115
116 'ctxorbool' is one of a ctx to be committed, or a bool whether
116 'ctxorbool' is one of a ctx to be committed, or a bool whether
117 merging is committed.
117 merging is committed.
118
118
119 This returns editform 'baseform' with '.merge' if merging is
119 This returns editform 'baseform' with '.merge' if merging is
120 committed, or one with '.normal' suffix otherwise.
120 committed, or one with '.normal' suffix otherwise.
121 """
121 """
122 if isinstance(ctxorbool, bool):
122 if isinstance(ctxorbool, bool):
123 if ctxorbool:
123 if ctxorbool:
124 return baseform + ".merge"
124 return baseform + ".merge"
125 elif 1 < len(ctxorbool.parents()):
125 elif 1 < len(ctxorbool.parents()):
126 return baseform + ".merge"
126 return baseform + ".merge"
127
127
128 return baseform + ".normal"
128 return baseform + ".normal"
129
129
130 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
130 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
131 editform='', **opts):
131 editform='', **opts):
132 """get appropriate commit message editor according to '--edit' option
132 """get appropriate commit message editor according to '--edit' option
133
133
134 'finishdesc' is a function to be called with edited commit message
134 'finishdesc' is a function to be called with edited commit message
135 (= 'description' of the new changeset) just after editing, but
135 (= 'description' of the new changeset) just after editing, but
136 before checking empty-ness. It should return actual text to be
136 before checking empty-ness. It should return actual text to be
137 stored into history. This allows to change description before
137 stored into history. This allows to change description before
138 storing.
138 storing.
139
139
140 'extramsg' is a extra message to be shown in the editor instead of
140 'extramsg' is a extra message to be shown in the editor instead of
141 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
141 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
142 is automatically added.
142 is automatically added.
143
143
144 'editform' is a dot-separated list of names, to distinguish
144 'editform' is a dot-separated list of names, to distinguish
145 the purpose of commit text editing.
145 the purpose of commit text editing.
146
146
147 'getcommiteditor' returns 'commitforceeditor' regardless of
147 'getcommiteditor' returns 'commitforceeditor' regardless of
148 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
148 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
149 they are specific for usage in MQ.
149 they are specific for usage in MQ.
150 """
150 """
151 if edit or finishdesc or extramsg:
151 if edit or finishdesc or extramsg:
152 return lambda r, c, s: commitforceeditor(r, c, s,
152 return lambda r, c, s: commitforceeditor(r, c, s,
153 finishdesc=finishdesc,
153 finishdesc=finishdesc,
154 extramsg=extramsg,
154 extramsg=extramsg,
155 editform=editform)
155 editform=editform)
156 elif editform:
156 elif editform:
157 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
157 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
158 else:
158 else:
159 return commiteditor
159 return commiteditor
160
160
161 def loglimit(opts):
161 def loglimit(opts):
162 """get the log limit according to option -l/--limit"""
162 """get the log limit according to option -l/--limit"""
163 limit = opts.get('limit')
163 limit = opts.get('limit')
164 if limit:
164 if limit:
165 try:
165 try:
166 limit = int(limit)
166 limit = int(limit)
167 except ValueError:
167 except ValueError:
168 raise util.Abort(_('limit must be a positive integer'))
168 raise util.Abort(_('limit must be a positive integer'))
169 if limit <= 0:
169 if limit <= 0:
170 raise util.Abort(_('limit must be positive'))
170 raise util.Abort(_('limit must be positive'))
171 else:
171 else:
172 limit = None
172 limit = None
173 return limit
173 return limit
174
174
175 def makefilename(repo, pat, node, desc=None,
175 def makefilename(repo, pat, node, desc=None,
176 total=None, seqno=None, revwidth=None, pathname=None):
176 total=None, seqno=None, revwidth=None, pathname=None):
177 node_expander = {
177 node_expander = {
178 'H': lambda: hex(node),
178 'H': lambda: hex(node),
179 'R': lambda: str(repo.changelog.rev(node)),
179 'R': lambda: str(repo.changelog.rev(node)),
180 'h': lambda: short(node),
180 'h': lambda: short(node),
181 'm': lambda: re.sub('[^\w]', '_', str(desc))
181 'm': lambda: re.sub('[^\w]', '_', str(desc))
182 }
182 }
183 expander = {
183 expander = {
184 '%': lambda: '%',
184 '%': lambda: '%',
185 'b': lambda: os.path.basename(repo.root),
185 'b': lambda: os.path.basename(repo.root),
186 }
186 }
187
187
188 try:
188 try:
189 if node:
189 if node:
190 expander.update(node_expander)
190 expander.update(node_expander)
191 if node:
191 if node:
192 expander['r'] = (lambda:
192 expander['r'] = (lambda:
193 str(repo.changelog.rev(node)).zfill(revwidth or 0))
193 str(repo.changelog.rev(node)).zfill(revwidth or 0))
194 if total is not None:
194 if total is not None:
195 expander['N'] = lambda: str(total)
195 expander['N'] = lambda: str(total)
196 if seqno is not None:
196 if seqno is not None:
197 expander['n'] = lambda: str(seqno)
197 expander['n'] = lambda: str(seqno)
198 if total is not None and seqno is not None:
198 if total is not None and seqno is not None:
199 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
199 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
200 if pathname is not None:
200 if pathname is not None:
201 expander['s'] = lambda: os.path.basename(pathname)
201 expander['s'] = lambda: os.path.basename(pathname)
202 expander['d'] = lambda: os.path.dirname(pathname) or '.'
202 expander['d'] = lambda: os.path.dirname(pathname) or '.'
203 expander['p'] = lambda: pathname
203 expander['p'] = lambda: pathname
204
204
205 newname = []
205 newname = []
206 patlen = len(pat)
206 patlen = len(pat)
207 i = 0
207 i = 0
208 while i < patlen:
208 while i < patlen:
209 c = pat[i]
209 c = pat[i]
210 if c == '%':
210 if c == '%':
211 i += 1
211 i += 1
212 c = pat[i]
212 c = pat[i]
213 c = expander[c]()
213 c = expander[c]()
214 newname.append(c)
214 newname.append(c)
215 i += 1
215 i += 1
216 return ''.join(newname)
216 return ''.join(newname)
217 except KeyError, inst:
217 except KeyError, inst:
218 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
218 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
219 inst.args[0])
219 inst.args[0])
220
220
221 def makefileobj(repo, pat, node=None, desc=None, total=None,
221 def makefileobj(repo, pat, node=None, desc=None, total=None,
222 seqno=None, revwidth=None, mode='wb', modemap=None,
222 seqno=None, revwidth=None, mode='wb', modemap=None,
223 pathname=None):
223 pathname=None):
224
224
225 writable = mode not in ('r', 'rb')
225 writable = mode not in ('r', 'rb')
226
226
227 if not pat or pat == '-':
227 if not pat or pat == '-':
228 fp = writable and repo.ui.fout or repo.ui.fin
228 fp = writable and repo.ui.fout or repo.ui.fin
229 if util.safehasattr(fp, 'fileno'):
229 if util.safehasattr(fp, 'fileno'):
230 return os.fdopen(os.dup(fp.fileno()), mode)
230 return os.fdopen(os.dup(fp.fileno()), mode)
231 else:
231 else:
232 # if this fp can't be duped properly, return
232 # if this fp can't be duped properly, return
233 # a dummy object that can be closed
233 # a dummy object that can be closed
234 class wrappedfileobj(object):
234 class wrappedfileobj(object):
235 noop = lambda x: None
235 noop = lambda x: None
236 def __init__(self, f):
236 def __init__(self, f):
237 self.f = f
237 self.f = f
238 def __getattr__(self, attr):
238 def __getattr__(self, attr):
239 if attr == 'close':
239 if attr == 'close':
240 return self.noop
240 return self.noop
241 else:
241 else:
242 return getattr(self.f, attr)
242 return getattr(self.f, attr)
243
243
244 return wrappedfileobj(fp)
244 return wrappedfileobj(fp)
245 if util.safehasattr(pat, 'write') and writable:
245 if util.safehasattr(pat, 'write') and writable:
246 return pat
246 return pat
247 if util.safehasattr(pat, 'read') and 'r' in mode:
247 if util.safehasattr(pat, 'read') and 'r' in mode:
248 return pat
248 return pat
249 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
249 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
250 if modemap is not None:
250 if modemap is not None:
251 mode = modemap.get(fn, mode)
251 mode = modemap.get(fn, mode)
252 if mode == 'wb':
252 if mode == 'wb':
253 modemap[fn] = 'ab'
253 modemap[fn] = 'ab'
254 return open(fn, mode)
254 return open(fn, mode)
255
255
256 def openrevlog(repo, cmd, file_, opts):
256 def openrevlog(repo, cmd, file_, opts):
257 """opens the changelog, manifest, a filelog or a given revlog"""
257 """opens the changelog, manifest, a filelog or a given revlog"""
258 cl = opts['changelog']
258 cl = opts['changelog']
259 mf = opts['manifest']
259 mf = opts['manifest']
260 msg = None
260 msg = None
261 if cl and mf:
261 if cl and mf:
262 msg = _('cannot specify --changelog and --manifest at the same time')
262 msg = _('cannot specify --changelog and --manifest at the same time')
263 elif cl or mf:
263 elif cl or mf:
264 if file_:
264 if file_:
265 msg = _('cannot specify filename with --changelog or --manifest')
265 msg = _('cannot specify filename with --changelog or --manifest')
266 elif not repo:
266 elif not repo:
267 msg = _('cannot specify --changelog or --manifest '
267 msg = _('cannot specify --changelog or --manifest '
268 'without a repository')
268 'without a repository')
269 if msg:
269 if msg:
270 raise util.Abort(msg)
270 raise util.Abort(msg)
271
271
272 r = None
272 r = None
273 if repo:
273 if repo:
274 if cl:
274 if cl:
275 r = repo.unfiltered().changelog
275 r = repo.unfiltered().changelog
276 elif mf:
276 elif mf:
277 r = repo.manifest
277 r = repo.manifest
278 elif file_:
278 elif file_:
279 filelog = repo.file(file_)
279 filelog = repo.file(file_)
280 if len(filelog):
280 if len(filelog):
281 r = filelog
281 r = filelog
282 if not r:
282 if not r:
283 if not file_:
283 if not file_:
284 raise error.CommandError(cmd, _('invalid arguments'))
284 raise error.CommandError(cmd, _('invalid arguments'))
285 if not os.path.isfile(file_):
285 if not os.path.isfile(file_):
286 raise util.Abort(_("revlog '%s' not found") % file_)
286 raise util.Abort(_("revlog '%s' not found") % file_)
287 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
287 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
288 file_[:-2] + ".i")
288 file_[:-2] + ".i")
289 return r
289 return r
290
290
291 def copy(ui, repo, pats, opts, rename=False):
291 def copy(ui, repo, pats, opts, rename=False):
292 # called with the repo lock held
292 # called with the repo lock held
293 #
293 #
294 # hgsep => pathname that uses "/" to separate directories
294 # hgsep => pathname that uses "/" to separate directories
295 # ossep => pathname that uses os.sep to separate directories
295 # ossep => pathname that uses os.sep to separate directories
296 cwd = repo.getcwd()
296 cwd = repo.getcwd()
297 targets = {}
297 targets = {}
298 after = opts.get("after")
298 after = opts.get("after")
299 dryrun = opts.get("dry_run")
299 dryrun = opts.get("dry_run")
300 wctx = repo[None]
300 wctx = repo[None]
301
301
302 def walkpat(pat):
302 def walkpat(pat):
303 srcs = []
303 srcs = []
304 badstates = after and '?' or '?r'
304 badstates = after and '?' or '?r'
305 m = scmutil.match(repo[None], [pat], opts, globbed=True)
305 m = scmutil.match(repo[None], [pat], opts, globbed=True)
306 for abs in repo.walk(m):
306 for abs in repo.walk(m):
307 state = repo.dirstate[abs]
307 state = repo.dirstate[abs]
308 rel = m.rel(abs)
308 rel = m.rel(abs)
309 exact = m.exact(abs)
309 exact = m.exact(abs)
310 if state in badstates:
310 if state in badstates:
311 if exact and state == '?':
311 if exact and state == '?':
312 ui.warn(_('%s: not copying - file is not managed\n') % rel)
312 ui.warn(_('%s: not copying - file is not managed\n') % rel)
313 if exact and state == 'r':
313 if exact and state == 'r':
314 ui.warn(_('%s: not copying - file has been marked for'
314 ui.warn(_('%s: not copying - file has been marked for'
315 ' remove\n') % rel)
315 ' remove\n') % rel)
316 continue
316 continue
317 # abs: hgsep
317 # abs: hgsep
318 # rel: ossep
318 # rel: ossep
319 srcs.append((abs, rel, exact))
319 srcs.append((abs, rel, exact))
320 return srcs
320 return srcs
321
321
322 # abssrc: hgsep
322 # abssrc: hgsep
323 # relsrc: ossep
323 # relsrc: ossep
324 # otarget: ossep
324 # otarget: ossep
325 def copyfile(abssrc, relsrc, otarget, exact):
325 def copyfile(abssrc, relsrc, otarget, exact):
326 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
326 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
327 if '/' in abstarget:
327 if '/' in abstarget:
328 # We cannot normalize abstarget itself, this would prevent
328 # We cannot normalize abstarget itself, this would prevent
329 # case only renames, like a => A.
329 # case only renames, like a => A.
330 abspath, absname = abstarget.rsplit('/', 1)
330 abspath, absname = abstarget.rsplit('/', 1)
331 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
331 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
332 reltarget = repo.pathto(abstarget, cwd)
332 reltarget = repo.pathto(abstarget, cwd)
333 target = repo.wjoin(abstarget)
333 target = repo.wjoin(abstarget)
334 src = repo.wjoin(abssrc)
334 src = repo.wjoin(abssrc)
335 state = repo.dirstate[abstarget]
335 state = repo.dirstate[abstarget]
336
336
337 scmutil.checkportable(ui, abstarget)
337 scmutil.checkportable(ui, abstarget)
338
338
339 # check for collisions
339 # check for collisions
340 prevsrc = targets.get(abstarget)
340 prevsrc = targets.get(abstarget)
341 if prevsrc is not None:
341 if prevsrc is not None:
342 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
342 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
343 (reltarget, repo.pathto(abssrc, cwd),
343 (reltarget, repo.pathto(abssrc, cwd),
344 repo.pathto(prevsrc, cwd)))
344 repo.pathto(prevsrc, cwd)))
345 return
345 return
346
346
347 # check for overwrites
347 # check for overwrites
348 exists = os.path.lexists(target)
348 exists = os.path.lexists(target)
349 samefile = False
349 samefile = False
350 if exists and abssrc != abstarget:
350 if exists and abssrc != abstarget:
351 if (repo.dirstate.normalize(abssrc) ==
351 if (repo.dirstate.normalize(abssrc) ==
352 repo.dirstate.normalize(abstarget)):
352 repo.dirstate.normalize(abstarget)):
353 if not rename:
353 if not rename:
354 ui.warn(_("%s: can't copy - same file\n") % reltarget)
354 ui.warn(_("%s: can't copy - same file\n") % reltarget)
355 return
355 return
356 exists = False
356 exists = False
357 samefile = True
357 samefile = True
358
358
359 if not after and exists or after and state in 'mn':
359 if not after and exists or after and state in 'mn':
360 if not opts['force']:
360 if not opts['force']:
361 ui.warn(_('%s: not overwriting - file exists\n') %
361 ui.warn(_('%s: not overwriting - file exists\n') %
362 reltarget)
362 reltarget)
363 return
363 return
364
364
365 if after:
365 if after:
366 if not exists:
366 if not exists:
367 if rename:
367 if rename:
368 ui.warn(_('%s: not recording move - %s does not exist\n') %
368 ui.warn(_('%s: not recording move - %s does not exist\n') %
369 (relsrc, reltarget))
369 (relsrc, reltarget))
370 else:
370 else:
371 ui.warn(_('%s: not recording copy - %s does not exist\n') %
371 ui.warn(_('%s: not recording copy - %s does not exist\n') %
372 (relsrc, reltarget))
372 (relsrc, reltarget))
373 return
373 return
374 elif not dryrun:
374 elif not dryrun:
375 try:
375 try:
376 if exists:
376 if exists:
377 os.unlink(target)
377 os.unlink(target)
378 targetdir = os.path.dirname(target) or '.'
378 targetdir = os.path.dirname(target) or '.'
379 if not os.path.isdir(targetdir):
379 if not os.path.isdir(targetdir):
380 os.makedirs(targetdir)
380 os.makedirs(targetdir)
381 if samefile:
381 if samefile:
382 tmp = target + "~hgrename"
382 tmp = target + "~hgrename"
383 os.rename(src, tmp)
383 os.rename(src, tmp)
384 os.rename(tmp, target)
384 os.rename(tmp, target)
385 else:
385 else:
386 util.copyfile(src, target)
386 util.copyfile(src, target)
387 srcexists = True
387 srcexists = True
388 except IOError, inst:
388 except IOError, inst:
389 if inst.errno == errno.ENOENT:
389 if inst.errno == errno.ENOENT:
390 ui.warn(_('%s: deleted in working copy\n') % relsrc)
390 ui.warn(_('%s: deleted in working copy\n') % relsrc)
391 srcexists = False
391 srcexists = False
392 else:
392 else:
393 ui.warn(_('%s: cannot copy - %s\n') %
393 ui.warn(_('%s: cannot copy - %s\n') %
394 (relsrc, inst.strerror))
394 (relsrc, inst.strerror))
395 return True # report a failure
395 return True # report a failure
396
396
397 if ui.verbose or not exact:
397 if ui.verbose or not exact:
398 if rename:
398 if rename:
399 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
399 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
400 else:
400 else:
401 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
401 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
402
402
403 targets[abstarget] = abssrc
403 targets[abstarget] = abssrc
404
404
405 # fix up dirstate
405 # fix up dirstate
406 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
406 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
407 dryrun=dryrun, cwd=cwd)
407 dryrun=dryrun, cwd=cwd)
408 if rename and not dryrun:
408 if rename and not dryrun:
409 if not after and srcexists and not samefile:
409 if not after and srcexists and not samefile:
410 util.unlinkpath(repo.wjoin(abssrc))
410 util.unlinkpath(repo.wjoin(abssrc))
411 wctx.forget([abssrc])
411 wctx.forget([abssrc])
412
412
413 # pat: ossep
413 # pat: ossep
414 # dest ossep
414 # dest ossep
415 # srcs: list of (hgsep, hgsep, ossep, bool)
415 # srcs: list of (hgsep, hgsep, ossep, bool)
416 # return: function that takes hgsep and returns ossep
416 # return: function that takes hgsep and returns ossep
417 def targetpathfn(pat, dest, srcs):
417 def targetpathfn(pat, dest, srcs):
418 if os.path.isdir(pat):
418 if os.path.isdir(pat):
419 abspfx = pathutil.canonpath(repo.root, cwd, pat)
419 abspfx = pathutil.canonpath(repo.root, cwd, pat)
420 abspfx = util.localpath(abspfx)
420 abspfx = util.localpath(abspfx)
421 if destdirexists:
421 if destdirexists:
422 striplen = len(os.path.split(abspfx)[0])
422 striplen = len(os.path.split(abspfx)[0])
423 else:
423 else:
424 striplen = len(abspfx)
424 striplen = len(abspfx)
425 if striplen:
425 if striplen:
426 striplen += len(os.sep)
426 striplen += len(os.sep)
427 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
427 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
428 elif destdirexists:
428 elif destdirexists:
429 res = lambda p: os.path.join(dest,
429 res = lambda p: os.path.join(dest,
430 os.path.basename(util.localpath(p)))
430 os.path.basename(util.localpath(p)))
431 else:
431 else:
432 res = lambda p: dest
432 res = lambda p: dest
433 return res
433 return res
434
434
435 # pat: ossep
435 # pat: ossep
436 # dest ossep
436 # dest ossep
437 # srcs: list of (hgsep, hgsep, ossep, bool)
437 # srcs: list of (hgsep, hgsep, ossep, bool)
438 # return: function that takes hgsep and returns ossep
438 # return: function that takes hgsep and returns ossep
439 def targetpathafterfn(pat, dest, srcs):
439 def targetpathafterfn(pat, dest, srcs):
440 if matchmod.patkind(pat):
440 if matchmod.patkind(pat):
441 # a mercurial pattern
441 # a mercurial pattern
442 res = lambda p: os.path.join(dest,
442 res = lambda p: os.path.join(dest,
443 os.path.basename(util.localpath(p)))
443 os.path.basename(util.localpath(p)))
444 else:
444 else:
445 abspfx = pathutil.canonpath(repo.root, cwd, pat)
445 abspfx = pathutil.canonpath(repo.root, cwd, pat)
446 if len(abspfx) < len(srcs[0][0]):
446 if len(abspfx) < len(srcs[0][0]):
447 # A directory. Either the target path contains the last
447 # A directory. Either the target path contains the last
448 # component of the source path or it does not.
448 # component of the source path or it does not.
449 def evalpath(striplen):
449 def evalpath(striplen):
450 score = 0
450 score = 0
451 for s in srcs:
451 for s in srcs:
452 t = os.path.join(dest, util.localpath(s[0])[striplen:])
452 t = os.path.join(dest, util.localpath(s[0])[striplen:])
453 if os.path.lexists(t):
453 if os.path.lexists(t):
454 score += 1
454 score += 1
455 return score
455 return score
456
456
457 abspfx = util.localpath(abspfx)
457 abspfx = util.localpath(abspfx)
458 striplen = len(abspfx)
458 striplen = len(abspfx)
459 if striplen:
459 if striplen:
460 striplen += len(os.sep)
460 striplen += len(os.sep)
461 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
461 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
462 score = evalpath(striplen)
462 score = evalpath(striplen)
463 striplen1 = len(os.path.split(abspfx)[0])
463 striplen1 = len(os.path.split(abspfx)[0])
464 if striplen1:
464 if striplen1:
465 striplen1 += len(os.sep)
465 striplen1 += len(os.sep)
466 if evalpath(striplen1) > score:
466 if evalpath(striplen1) > score:
467 striplen = striplen1
467 striplen = striplen1
468 res = lambda p: os.path.join(dest,
468 res = lambda p: os.path.join(dest,
469 util.localpath(p)[striplen:])
469 util.localpath(p)[striplen:])
470 else:
470 else:
471 # a file
471 # a file
472 if destdirexists:
472 if destdirexists:
473 res = lambda p: os.path.join(dest,
473 res = lambda p: os.path.join(dest,
474 os.path.basename(util.localpath(p)))
474 os.path.basename(util.localpath(p)))
475 else:
475 else:
476 res = lambda p: dest
476 res = lambda p: dest
477 return res
477 return res
478
478
479
479
480 pats = scmutil.expandpats(pats)
480 pats = scmutil.expandpats(pats)
481 if not pats:
481 if not pats:
482 raise util.Abort(_('no source or destination specified'))
482 raise util.Abort(_('no source or destination specified'))
483 if len(pats) == 1:
483 if len(pats) == 1:
484 raise util.Abort(_('no destination specified'))
484 raise util.Abort(_('no destination specified'))
485 dest = pats.pop()
485 dest = pats.pop()
486 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
486 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
487 if not destdirexists:
487 if not destdirexists:
488 if len(pats) > 1 or matchmod.patkind(pats[0]):
488 if len(pats) > 1 or matchmod.patkind(pats[0]):
489 raise util.Abort(_('with multiple sources, destination must be an '
489 raise util.Abort(_('with multiple sources, destination must be an '
490 'existing directory'))
490 'existing directory'))
491 if util.endswithsep(dest):
491 if util.endswithsep(dest):
492 raise util.Abort(_('destination %s is not a directory') % dest)
492 raise util.Abort(_('destination %s is not a directory') % dest)
493
493
494 tfn = targetpathfn
494 tfn = targetpathfn
495 if after:
495 if after:
496 tfn = targetpathafterfn
496 tfn = targetpathafterfn
497 copylist = []
497 copylist = []
498 for pat in pats:
498 for pat in pats:
499 srcs = walkpat(pat)
499 srcs = walkpat(pat)
500 if not srcs:
500 if not srcs:
501 continue
501 continue
502 copylist.append((tfn(pat, dest, srcs), srcs))
502 copylist.append((tfn(pat, dest, srcs), srcs))
503 if not copylist:
503 if not copylist:
504 raise util.Abort(_('no files to copy'))
504 raise util.Abort(_('no files to copy'))
505
505
506 errors = 0
506 errors = 0
507 for targetpath, srcs in copylist:
507 for targetpath, srcs in copylist:
508 for abssrc, relsrc, exact in srcs:
508 for abssrc, relsrc, exact in srcs:
509 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
509 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
510 errors += 1
510 errors += 1
511
511
512 if errors:
512 if errors:
513 ui.warn(_('(consider using --after)\n'))
513 ui.warn(_('(consider using --after)\n'))
514
514
515 return errors != 0
515 return errors != 0
516
516
517 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
517 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
518 runargs=None, appendpid=False):
518 runargs=None, appendpid=False):
519 '''Run a command as a service.'''
519 '''Run a command as a service.'''
520
520
521 def writepid(pid):
521 def writepid(pid):
522 if opts['pid_file']:
522 if opts['pid_file']:
523 mode = appendpid and 'a' or 'w'
523 mode = appendpid and 'a' or 'w'
524 fp = open(opts['pid_file'], mode)
524 fp = open(opts['pid_file'], mode)
525 fp.write(str(pid) + '\n')
525 fp.write(str(pid) + '\n')
526 fp.close()
526 fp.close()
527
527
528 if opts['daemon'] and not opts['daemon_pipefds']:
528 if opts['daemon'] and not opts['daemon_pipefds']:
529 # Signal child process startup with file removal
529 # Signal child process startup with file removal
530 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
530 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
531 os.close(lockfd)
531 os.close(lockfd)
532 try:
532 try:
533 if not runargs:
533 if not runargs:
534 runargs = util.hgcmd() + sys.argv[1:]
534 runargs = util.hgcmd() + sys.argv[1:]
535 runargs.append('--daemon-pipefds=%s' % lockpath)
535 runargs.append('--daemon-pipefds=%s' % lockpath)
536 # Don't pass --cwd to the child process, because we've already
536 # Don't pass --cwd to the child process, because we've already
537 # changed directory.
537 # changed directory.
538 for i in xrange(1, len(runargs)):
538 for i in xrange(1, len(runargs)):
539 if runargs[i].startswith('--cwd='):
539 if runargs[i].startswith('--cwd='):
540 del runargs[i]
540 del runargs[i]
541 break
541 break
542 elif runargs[i].startswith('--cwd'):
542 elif runargs[i].startswith('--cwd'):
543 del runargs[i:i + 2]
543 del runargs[i:i + 2]
544 break
544 break
545 def condfn():
545 def condfn():
546 return not os.path.exists(lockpath)
546 return not os.path.exists(lockpath)
547 pid = util.rundetached(runargs, condfn)
547 pid = util.rundetached(runargs, condfn)
548 if pid < 0:
548 if pid < 0:
549 raise util.Abort(_('child process failed to start'))
549 raise util.Abort(_('child process failed to start'))
550 writepid(pid)
550 writepid(pid)
551 finally:
551 finally:
552 try:
552 try:
553 os.unlink(lockpath)
553 os.unlink(lockpath)
554 except OSError, e:
554 except OSError, e:
555 if e.errno != errno.ENOENT:
555 if e.errno != errno.ENOENT:
556 raise
556 raise
557 if parentfn:
557 if parentfn:
558 return parentfn(pid)
558 return parentfn(pid)
559 else:
559 else:
560 return
560 return
561
561
562 if initfn:
562 if initfn:
563 initfn()
563 initfn()
564
564
565 if not opts['daemon']:
565 if not opts['daemon']:
566 writepid(os.getpid())
566 writepid(os.getpid())
567
567
568 if opts['daemon_pipefds']:
568 if opts['daemon_pipefds']:
569 lockpath = opts['daemon_pipefds']
569 lockpath = opts['daemon_pipefds']
570 try:
570 try:
571 os.setsid()
571 os.setsid()
572 except AttributeError:
572 except AttributeError:
573 pass
573 pass
574 os.unlink(lockpath)
574 os.unlink(lockpath)
575 util.hidewindow()
575 util.hidewindow()
576 sys.stdout.flush()
576 sys.stdout.flush()
577 sys.stderr.flush()
577 sys.stderr.flush()
578
578
579 nullfd = os.open(os.devnull, os.O_RDWR)
579 nullfd = os.open(os.devnull, os.O_RDWR)
580 logfilefd = nullfd
580 logfilefd = nullfd
581 if logfile:
581 if logfile:
582 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
582 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
583 os.dup2(nullfd, 0)
583 os.dup2(nullfd, 0)
584 os.dup2(logfilefd, 1)
584 os.dup2(logfilefd, 1)
585 os.dup2(logfilefd, 2)
585 os.dup2(logfilefd, 2)
586 if nullfd not in (0, 1, 2):
586 if nullfd not in (0, 1, 2):
587 os.close(nullfd)
587 os.close(nullfd)
588 if logfile and logfilefd not in (0, 1, 2):
588 if logfile and logfilefd not in (0, 1, 2):
589 os.close(logfilefd)
589 os.close(logfilefd)
590
590
591 if runfn:
591 if runfn:
592 return runfn()
592 return runfn()
593
593
594 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
594 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
595 """Utility function used by commands.import to import a single patch
595 """Utility function used by commands.import to import a single patch
596
596
597 This function is explicitly defined here to help the evolve extension to
597 This function is explicitly defined here to help the evolve extension to
598 wrap this part of the import logic.
598 wrap this part of the import logic.
599
599
600 The API is currently a bit ugly because it a simple code translation from
600 The API is currently a bit ugly because it a simple code translation from
601 the import command. Feel free to make it better.
601 the import command. Feel free to make it better.
602
602
603 :hunk: a patch (as a binary string)
603 :hunk: a patch (as a binary string)
604 :parents: nodes that will be parent of the created commit
604 :parents: nodes that will be parent of the created commit
605 :opts: the full dict of option passed to the import command
605 :opts: the full dict of option passed to the import command
606 :msgs: list to save commit message to.
606 :msgs: list to save commit message to.
607 (used in case we need to save it when failing)
607 (used in case we need to save it when failing)
608 :updatefunc: a function that update a repo to a given node
608 :updatefunc: a function that update a repo to a given node
609 updatefunc(<repo>, <node>)
609 updatefunc(<repo>, <node>)
610 """
610 """
611 tmpname, message, user, date, branch, nodeid, p1, p2 = \
611 tmpname, message, user, date, branch, nodeid, p1, p2 = \
612 patch.extract(ui, hunk)
612 patch.extract(ui, hunk)
613
613
614 update = not opts.get('bypass')
614 update = not opts.get('bypass')
615 strip = opts["strip"]
615 strip = opts["strip"]
616 sim = float(opts.get('similarity') or 0)
616 sim = float(opts.get('similarity') or 0)
617 if not tmpname:
617 if not tmpname:
618 return (None, None, False)
618 return (None, None, False)
619 msg = _('applied to working directory')
619 msg = _('applied to working directory')
620
620
621 rejects = False
621 rejects = False
622
622
623 try:
623 try:
624 cmdline_message = logmessage(ui, opts)
624 cmdline_message = logmessage(ui, opts)
625 if cmdline_message:
625 if cmdline_message:
626 # pickup the cmdline msg
626 # pickup the cmdline msg
627 message = cmdline_message
627 message = cmdline_message
628 elif message:
628 elif message:
629 # pickup the patch msg
629 # pickup the patch msg
630 message = message.strip()
630 message = message.strip()
631 else:
631 else:
632 # launch the editor
632 # launch the editor
633 message = None
633 message = None
634 ui.debug('message:\n%s\n' % message)
634 ui.debug('message:\n%s\n' % message)
635
635
636 if len(parents) == 1:
636 if len(parents) == 1:
637 parents.append(repo[nullid])
637 parents.append(repo[nullid])
638 if opts.get('exact'):
638 if opts.get('exact'):
639 if not nodeid or not p1:
639 if not nodeid or not p1:
640 raise util.Abort(_('not a Mercurial patch'))
640 raise util.Abort(_('not a Mercurial patch'))
641 p1 = repo[p1]
641 p1 = repo[p1]
642 p2 = repo[p2 or nullid]
642 p2 = repo[p2 or nullid]
643 elif p2:
643 elif p2:
644 try:
644 try:
645 p1 = repo[p1]
645 p1 = repo[p1]
646 p2 = repo[p2]
646 p2 = repo[p2]
647 # Without any options, consider p2 only if the
647 # Without any options, consider p2 only if the
648 # patch is being applied on top of the recorded
648 # patch is being applied on top of the recorded
649 # first parent.
649 # first parent.
650 if p1 != parents[0]:
650 if p1 != parents[0]:
651 p1 = parents[0]
651 p1 = parents[0]
652 p2 = repo[nullid]
652 p2 = repo[nullid]
653 except error.RepoError:
653 except error.RepoError:
654 p1, p2 = parents
654 p1, p2 = parents
655 if p2.node() == nullid:
655 if p2.node() == nullid:
656 ui.warn(_("warning: import the patch as a normal revision\n"
656 ui.warn(_("warning: import the patch as a normal revision\n"
657 "(use --exact to import the patch as a merge)\n"))
657 "(use --exact to import the patch as a merge)\n"))
658 else:
658 else:
659 p1, p2 = parents
659 p1, p2 = parents
660
660
661 n = None
661 n = None
662 if update:
662 if update:
663 repo.dirstate.beginparentchange()
663 repo.dirstate.beginparentchange()
664 if p1 != parents[0]:
664 if p1 != parents[0]:
665 updatefunc(repo, p1.node())
665 updatefunc(repo, p1.node())
666 if p2 != parents[1]:
666 if p2 != parents[1]:
667 repo.setparents(p1.node(), p2.node())
667 repo.setparents(p1.node(), p2.node())
668
668
669 if opts.get('exact') or opts.get('import_branch'):
669 if opts.get('exact') or opts.get('import_branch'):
670 repo.dirstate.setbranch(branch or 'default')
670 repo.dirstate.setbranch(branch or 'default')
671
671
672 partial = opts.get('partial', False)
672 partial = opts.get('partial', False)
673 files = set()
673 files = set()
674 try:
674 try:
675 patch.patch(ui, repo, tmpname, strip=strip, files=files,
675 patch.patch(ui, repo, tmpname, strip=strip, files=files,
676 eolmode=None, similarity=sim / 100.0)
676 eolmode=None, similarity=sim / 100.0)
677 except patch.PatchError, e:
677 except patch.PatchError, e:
678 if not partial:
678 if not partial:
679 raise util.Abort(str(e))
679 raise util.Abort(str(e))
680 if partial:
680 if partial:
681 rejects = True
681 rejects = True
682
682
683 files = list(files)
683 files = list(files)
684 if opts.get('no_commit'):
684 if opts.get('no_commit'):
685 if message:
685 if message:
686 msgs.append(message)
686 msgs.append(message)
687 else:
687 else:
688 if opts.get('exact') or p2:
688 if opts.get('exact') or p2:
689 # If you got here, you either use --force and know what
689 # If you got here, you either use --force and know what
690 # you are doing or used --exact or a merge patch while
690 # you are doing or used --exact or a merge patch while
691 # being updated to its first parent.
691 # being updated to its first parent.
692 m = None
692 m = None
693 else:
693 else:
694 m = scmutil.matchfiles(repo, files or [])
694 m = scmutil.matchfiles(repo, files or [])
695 editform = mergeeditform(repo[None], 'import.normal')
695 editform = mergeeditform(repo[None], 'import.normal')
696 if opts.get('exact'):
696 if opts.get('exact'):
697 editor = None
697 editor = None
698 else:
698 else:
699 editor = getcommiteditor(editform=editform, **opts)
699 editor = getcommiteditor(editform=editform, **opts)
700 n = repo.commit(message, opts.get('user') or user,
700 n = repo.commit(message, opts.get('user') or user,
701 opts.get('date') or date, match=m,
701 opts.get('date') or date, match=m,
702 editor=editor, force=partial)
702 editor=editor, force=partial)
703 repo.dirstate.endparentchange()
703 repo.dirstate.endparentchange()
704 else:
704 else:
705 if opts.get('exact') or opts.get('import_branch'):
705 if opts.get('exact') or opts.get('import_branch'):
706 branch = branch or 'default'
706 branch = branch or 'default'
707 else:
707 else:
708 branch = p1.branch()
708 branch = p1.branch()
709 store = patch.filestore()
709 store = patch.filestore()
710 try:
710 try:
711 files = set()
711 files = set()
712 try:
712 try:
713 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
713 patch.patchrepo(ui, repo, p1, store, tmpname, strip,
714 files, eolmode=None)
714 files, eolmode=None)
715 except patch.PatchError, e:
715 except patch.PatchError, e:
716 raise util.Abort(str(e))
716 raise util.Abort(str(e))
717 if opts.get('exact'):
717 if opts.get('exact'):
718 editor = None
718 editor = None
719 else:
719 else:
720 editor = getcommiteditor(editform='import.bypass')
720 editor = getcommiteditor(editform='import.bypass')
721 memctx = context.makememctx(repo, (p1.node(), p2.node()),
721 memctx = context.makememctx(repo, (p1.node(), p2.node()),
722 message,
722 message,
723 opts.get('user') or user,
723 opts.get('user') or user,
724 opts.get('date') or date,
724 opts.get('date') or date,
725 branch, files, store,
725 branch, files, store,
726 editor=editor)
726 editor=editor)
727 n = memctx.commit()
727 n = memctx.commit()
728 finally:
728 finally:
729 store.close()
729 store.close()
730 if opts.get('exact') and opts.get('no_commit'):
730 if opts.get('exact') and opts.get('no_commit'):
731 # --exact with --no-commit is still useful in that it does merge
731 # --exact with --no-commit is still useful in that it does merge
732 # and branch bits
732 # and branch bits
733 ui.warn(_("warning: can't check exact import with --no-commit\n"))
733 ui.warn(_("warning: can't check exact import with --no-commit\n"))
734 elif opts.get('exact') and hex(n) != nodeid:
734 elif opts.get('exact') and hex(n) != nodeid:
735 raise util.Abort(_('patch is damaged or loses information'))
735 raise util.Abort(_('patch is damaged or loses information'))
736 if n:
736 if n:
737 # i18n: refers to a short changeset id
737 # i18n: refers to a short changeset id
738 msg = _('created %s') % short(n)
738 msg = _('created %s') % short(n)
739 return (msg, n, rejects)
739 return (msg, n, rejects)
740 finally:
740 finally:
741 os.unlink(tmpname)
741 os.unlink(tmpname)
742
742
743 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
743 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
744 opts=None):
744 opts=None):
745 '''export changesets as hg patches.'''
745 '''export changesets as hg patches.'''
746
746
747 total = len(revs)
747 total = len(revs)
748 revwidth = max([len(str(rev)) for rev in revs])
748 revwidth = max([len(str(rev)) for rev in revs])
749 filemode = {}
749 filemode = {}
750
750
751 def single(rev, seqno, fp):
751 def single(rev, seqno, fp):
752 ctx = repo[rev]
752 ctx = repo[rev]
753 node = ctx.node()
753 node = ctx.node()
754 parents = [p.node() for p in ctx.parents() if p]
754 parents = [p.node() for p in ctx.parents() if p]
755 branch = ctx.branch()
755 branch = ctx.branch()
756 if switch_parent:
756 if switch_parent:
757 parents.reverse()
757 parents.reverse()
758 prev = (parents and parents[0]) or nullid
758 prev = (parents and parents[0]) or nullid
759
759
760 shouldclose = False
760 shouldclose = False
761 if not fp and len(template) > 0:
761 if not fp and len(template) > 0:
762 desc_lines = ctx.description().rstrip().split('\n')
762 desc_lines = ctx.description().rstrip().split('\n')
763 desc = desc_lines[0] #Commit always has a first line.
763 desc = desc_lines[0] #Commit always has a first line.
764 fp = makefileobj(repo, template, node, desc=desc, total=total,
764 fp = makefileobj(repo, template, node, desc=desc, total=total,
765 seqno=seqno, revwidth=revwidth, mode='wb',
765 seqno=seqno, revwidth=revwidth, mode='wb',
766 modemap=filemode)
766 modemap=filemode)
767 if fp != template:
767 if fp != template:
768 shouldclose = True
768 shouldclose = True
769 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
769 if fp and fp != sys.stdout and util.safehasattr(fp, 'name'):
770 repo.ui.note("%s\n" % fp.name)
770 repo.ui.note("%s\n" % fp.name)
771
771
772 if not fp:
772 if not fp:
773 write = repo.ui.write
773 write = repo.ui.write
774 else:
774 else:
775 def write(s, **kw):
775 def write(s, **kw):
776 fp.write(s)
776 fp.write(s)
777
777
778
778
779 write("# HG changeset patch\n")
779 write("# HG changeset patch\n")
780 write("# User %s\n" % ctx.user())
780 write("# User %s\n" % ctx.user())
781 write("# Date %d %d\n" % ctx.date())
781 write("# Date %d %d\n" % ctx.date())
782 write("# %s\n" % util.datestr(ctx.date()))
782 write("# %s\n" % util.datestr(ctx.date()))
783 if branch and branch != 'default':
783 if branch and branch != 'default':
784 write("# Branch %s\n" % branch)
784 write("# Branch %s\n" % branch)
785 write("# Node ID %s\n" % hex(node))
785 write("# Node ID %s\n" % hex(node))
786 write("# Parent %s\n" % hex(prev))
786 write("# Parent %s\n" % hex(prev))
787 if len(parents) > 1:
787 if len(parents) > 1:
788 write("# Parent %s\n" % hex(parents[1]))
788 write("# Parent %s\n" % hex(parents[1]))
789 write(ctx.description().rstrip())
789 write(ctx.description().rstrip())
790 write("\n\n")
790 write("\n\n")
791
791
792 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
792 for chunk, label in patch.diffui(repo, prev, node, opts=opts):
793 write(chunk, label=label)
793 write(chunk, label=label)
794
794
795 if shouldclose:
795 if shouldclose:
796 fp.close()
796 fp.close()
797
797
798 for seqno, rev in enumerate(revs):
798 for seqno, rev in enumerate(revs):
799 single(rev, seqno + 1, fp)
799 single(rev, seqno + 1, fp)
800
800
801 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
801 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
802 changes=None, stat=False, fp=None, prefix='',
802 changes=None, stat=False, fp=None, prefix='',
803 listsubrepos=False):
803 listsubrepos=False):
804 '''show diff or diffstat.'''
804 '''show diff or diffstat.'''
805 if fp is None:
805 if fp is None:
806 write = ui.write
806 write = ui.write
807 else:
807 else:
808 def write(s, **kw):
808 def write(s, **kw):
809 fp.write(s)
809 fp.write(s)
810
810
811 if stat:
811 if stat:
812 diffopts = diffopts.copy(context=0)
812 diffopts = diffopts.copy(context=0)
813 width = 80
813 width = 80
814 if not ui.plain():
814 if not ui.plain():
815 width = ui.termwidth()
815 width = ui.termwidth()
816 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
816 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
817 prefix=prefix)
817 prefix=prefix)
818 for chunk, label in patch.diffstatui(util.iterlines(chunks),
818 for chunk, label in patch.diffstatui(util.iterlines(chunks),
819 width=width,
819 width=width,
820 git=diffopts.git):
820 git=diffopts.git):
821 write(chunk, label=label)
821 write(chunk, label=label)
822 else:
822 else:
823 for chunk, label in patch.diffui(repo, node1, node2, match,
823 for chunk, label in patch.diffui(repo, node1, node2, match,
824 changes, diffopts, prefix=prefix):
824 changes, diffopts, prefix=prefix):
825 write(chunk, label=label)
825 write(chunk, label=label)
826
826
827 if listsubrepos:
827 if listsubrepos:
828 ctx1 = repo[node1]
828 ctx1 = repo[node1]
829 ctx2 = repo[node2]
829 ctx2 = repo[node2]
830 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
830 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
831 tempnode2 = node2
831 tempnode2 = node2
832 try:
832 try:
833 if node2 is not None:
833 if node2 is not None:
834 tempnode2 = ctx2.substate[subpath][1]
834 tempnode2 = ctx2.substate[subpath][1]
835 except KeyError:
835 except KeyError:
836 # A subrepo that existed in node1 was deleted between node1 and
836 # A subrepo that existed in node1 was deleted between node1 and
837 # node2 (inclusive). Thus, ctx2's substate won't contain that
837 # node2 (inclusive). Thus, ctx2's substate won't contain that
838 # subpath. The best we can do is to ignore it.
838 # subpath. The best we can do is to ignore it.
839 tempnode2 = None
839 tempnode2 = None
840 submatch = matchmod.narrowmatcher(subpath, match)
840 submatch = matchmod.narrowmatcher(subpath, match)
841 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
841 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
842 stat=stat, fp=fp, prefix=prefix)
842 stat=stat, fp=fp, prefix=prefix)
843
843
844 class changeset_printer(object):
844 class changeset_printer(object):
845 '''show changeset information when templating not requested.'''
845 '''show changeset information when templating not requested.'''
846
846
847 def __init__(self, ui, repo, matchfn, diffopts, buffered):
847 def __init__(self, ui, repo, matchfn, diffopts, buffered):
848 self.ui = ui
848 self.ui = ui
849 self.repo = repo
849 self.repo = repo
850 self.buffered = buffered
850 self.buffered = buffered
851 self.matchfn = matchfn
851 self.matchfn = matchfn
852 self.diffopts = diffopts
852 self.diffopts = diffopts
853 self.header = {}
853 self.header = {}
854 self.hunk = {}
854 self.hunk = {}
855 self.lastheader = None
855 self.lastheader = None
856 self.footer = None
856 self.footer = None
857
857
858 def flush(self, rev):
858 def flush(self, rev):
859 if rev in self.header:
859 if rev in self.header:
860 h = self.header[rev]
860 h = self.header[rev]
861 if h != self.lastheader:
861 if h != self.lastheader:
862 self.lastheader = h
862 self.lastheader = h
863 self.ui.write(h)
863 self.ui.write(h)
864 del self.header[rev]
864 del self.header[rev]
865 if rev in self.hunk:
865 if rev in self.hunk:
866 self.ui.write(self.hunk[rev])
866 self.ui.write(self.hunk[rev])
867 del self.hunk[rev]
867 del self.hunk[rev]
868 return 1
868 return 1
869 return 0
869 return 0
870
870
871 def close(self):
871 def close(self):
872 if self.footer:
872 if self.footer:
873 self.ui.write(self.footer)
873 self.ui.write(self.footer)
874
874
875 def show(self, ctx, copies=None, matchfn=None, **props):
875 def show(self, ctx, copies=None, matchfn=None, **props):
876 if self.buffered:
876 if self.buffered:
877 self.ui.pushbuffer()
877 self.ui.pushbuffer()
878 self._show(ctx, copies, matchfn, props)
878 self._show(ctx, copies, matchfn, props)
879 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
879 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
880 else:
880 else:
881 self._show(ctx, copies, matchfn, props)
881 self._show(ctx, copies, matchfn, props)
882
882
883 def _show(self, ctx, copies, matchfn, props):
883 def _show(self, ctx, copies, matchfn, props):
884 '''show a single changeset or file revision'''
884 '''show a single changeset or file revision'''
885 changenode = ctx.node()
885 changenode = ctx.node()
886 rev = ctx.rev()
886 rev = ctx.rev()
887
887
888 if self.ui.quiet:
888 if self.ui.quiet:
889 self.ui.write("%d:%s\n" % (rev, short(changenode)),
889 self.ui.write("%d:%s\n" % (rev, short(changenode)),
890 label='log.node')
890 label='log.node')
891 return
891 return
892
892
893 log = self.repo.changelog
893 log = self.repo.changelog
894 date = util.datestr(ctx.date())
894 date = util.datestr(ctx.date())
895
895
896 hexfunc = self.ui.debugflag and hex or short
896 hexfunc = self.ui.debugflag and hex or short
897
897
898 parents = [(p, hexfunc(log.node(p)))
898 parents = [(p, hexfunc(log.node(p)))
899 for p in self._meaningful_parentrevs(log, rev)]
899 for p in self._meaningful_parentrevs(log, rev)]
900
900
901 # i18n: column positioning for "hg log"
901 # i18n: column positioning for "hg log"
902 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
902 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
903 label='log.changeset changeset.%s' % ctx.phasestr())
903 label='log.changeset changeset.%s' % ctx.phasestr())
904
904
905 branch = ctx.branch()
905 branch = ctx.branch()
906 # don't show the default branch name
906 # don't show the default branch name
907 if branch != 'default':
907 if branch != 'default':
908 # i18n: column positioning for "hg log"
908 # i18n: column positioning for "hg log"
909 self.ui.write(_("branch: %s\n") % branch,
909 self.ui.write(_("branch: %s\n") % branch,
910 label='log.branch')
910 label='log.branch')
911 for bookmark in self.repo.nodebookmarks(changenode):
911 for bookmark in self.repo.nodebookmarks(changenode):
912 # i18n: column positioning for "hg log"
912 # i18n: column positioning for "hg log"
913 self.ui.write(_("bookmark: %s\n") % bookmark,
913 self.ui.write(_("bookmark: %s\n") % bookmark,
914 label='log.bookmark')
914 label='log.bookmark')
915 for tag in self.repo.nodetags(changenode):
915 for tag in self.repo.nodetags(changenode):
916 # i18n: column positioning for "hg log"
916 # i18n: column positioning for "hg log"
917 self.ui.write(_("tag: %s\n") % tag,
917 self.ui.write(_("tag: %s\n") % tag,
918 label='log.tag')
918 label='log.tag')
919 if self.ui.debugflag:
919 if self.ui.debugflag:
920 # i18n: column positioning for "hg log"
920 # i18n: column positioning for "hg log"
921 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
921 self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
922 label='log.phase')
922 label='log.phase')
923 for parent in parents:
923 for parent in parents:
924 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
924 label = 'log.parent changeset.%s' % self.repo[parent[0]].phasestr()
925 # i18n: column positioning for "hg log"
925 # i18n: column positioning for "hg log"
926 self.ui.write(_("parent: %d:%s\n") % parent,
926 self.ui.write(_("parent: %d:%s\n") % parent,
927 label=label)
927 label=label)
928
928
929 if self.ui.debugflag:
929 if self.ui.debugflag:
930 mnode = ctx.manifestnode()
930 mnode = ctx.manifestnode()
931 # i18n: column positioning for "hg log"
931 # i18n: column positioning for "hg log"
932 self.ui.write(_("manifest: %d:%s\n") %
932 self.ui.write(_("manifest: %d:%s\n") %
933 (self.repo.manifest.rev(mnode), hex(mnode)),
933 (self.repo.manifest.rev(mnode), hex(mnode)),
934 label='ui.debug log.manifest')
934 label='ui.debug log.manifest')
935 # i18n: column positioning for "hg log"
935 # i18n: column positioning for "hg log"
936 self.ui.write(_("user: %s\n") % ctx.user(),
936 self.ui.write(_("user: %s\n") % ctx.user(),
937 label='log.user')
937 label='log.user')
938 # i18n: column positioning for "hg log"
938 # i18n: column positioning for "hg log"
939 self.ui.write(_("date: %s\n") % date,
939 self.ui.write(_("date: %s\n") % date,
940 label='log.date')
940 label='log.date')
941
941
942 if self.ui.debugflag:
942 if self.ui.debugflag:
943 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
943 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
944 for key, value in zip([# i18n: column positioning for "hg log"
944 for key, value in zip([# i18n: column positioning for "hg log"
945 _("files:"),
945 _("files:"),
946 # i18n: column positioning for "hg log"
946 # i18n: column positioning for "hg log"
947 _("files+:"),
947 _("files+:"),
948 # i18n: column positioning for "hg log"
948 # i18n: column positioning for "hg log"
949 _("files-:")], files):
949 _("files-:")], files):
950 if value:
950 if value:
951 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
951 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
952 label='ui.debug log.files')
952 label='ui.debug log.files')
953 elif ctx.files() and self.ui.verbose:
953 elif ctx.files() and self.ui.verbose:
954 # i18n: column positioning for "hg log"
954 # i18n: column positioning for "hg log"
955 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
955 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
956 label='ui.note log.files')
956 label='ui.note log.files')
957 if copies and self.ui.verbose:
957 if copies and self.ui.verbose:
958 copies = ['%s (%s)' % c for c in copies]
958 copies = ['%s (%s)' % c for c in copies]
959 # i18n: column positioning for "hg log"
959 # i18n: column positioning for "hg log"
960 self.ui.write(_("copies: %s\n") % ' '.join(copies),
960 self.ui.write(_("copies: %s\n") % ' '.join(copies),
961 label='ui.note log.copies')
961 label='ui.note log.copies')
962
962
963 extra = ctx.extra()
963 extra = ctx.extra()
964 if extra and self.ui.debugflag:
964 if extra and self.ui.debugflag:
965 for key, value in sorted(extra.items()):
965 for key, value in sorted(extra.items()):
966 # i18n: column positioning for "hg log"
966 # i18n: column positioning for "hg log"
967 self.ui.write(_("extra: %s=%s\n")
967 self.ui.write(_("extra: %s=%s\n")
968 % (key, value.encode('string_escape')),
968 % (key, value.encode('string_escape')),
969 label='ui.debug log.extra')
969 label='ui.debug log.extra')
970
970
971 description = ctx.description().strip()
971 description = ctx.description().strip()
972 if description:
972 if description:
973 if self.ui.verbose:
973 if self.ui.verbose:
974 self.ui.write(_("description:\n"),
974 self.ui.write(_("description:\n"),
975 label='ui.note log.description')
975 label='ui.note log.description')
976 self.ui.write(description,
976 self.ui.write(description,
977 label='ui.note log.description')
977 label='ui.note log.description')
978 self.ui.write("\n\n")
978 self.ui.write("\n\n")
979 else:
979 else:
980 # i18n: column positioning for "hg log"
980 # i18n: column positioning for "hg log"
981 self.ui.write(_("summary: %s\n") %
981 self.ui.write(_("summary: %s\n") %
982 description.splitlines()[0],
982 description.splitlines()[0],
983 label='log.summary')
983 label='log.summary')
984 self.ui.write("\n")
984 self.ui.write("\n")
985
985
986 self.showpatch(changenode, matchfn)
986 self.showpatch(changenode, matchfn)
987
987
988 def showpatch(self, node, matchfn):
988 def showpatch(self, node, matchfn):
989 if not matchfn:
989 if not matchfn:
990 matchfn = self.matchfn
990 matchfn = self.matchfn
991 if matchfn:
991 if matchfn:
992 stat = self.diffopts.get('stat')
992 stat = self.diffopts.get('stat')
993 diff = self.diffopts.get('patch')
993 diff = self.diffopts.get('patch')
994 diffopts = patch.diffopts(self.ui, self.diffopts)
994 diffopts = patch.diffopts(self.ui, self.diffopts)
995 prev = self.repo.changelog.parents(node)[0]
995 prev = self.repo.changelog.parents(node)[0]
996 if stat:
996 if stat:
997 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
997 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
998 match=matchfn, stat=True)
998 match=matchfn, stat=True)
999 if diff:
999 if diff:
1000 if stat:
1000 if stat:
1001 self.ui.write("\n")
1001 self.ui.write("\n")
1002 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1002 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1003 match=matchfn, stat=False)
1003 match=matchfn, stat=False)
1004 self.ui.write("\n")
1004 self.ui.write("\n")
1005
1005
1006 def _meaningful_parentrevs(self, log, rev):
1006 def _meaningful_parentrevs(self, log, rev):
1007 """Return list of meaningful (or all if debug) parentrevs for rev.
1007 """Return list of meaningful (or all if debug) parentrevs for rev.
1008
1008
1009 For merges (two non-nullrev revisions) both parents are meaningful.
1009 For merges (two non-nullrev revisions) both parents are meaningful.
1010 Otherwise the first parent revision is considered meaningful if it
1010 Otherwise the first parent revision is considered meaningful if it
1011 is not the preceding revision.
1011 is not the preceding revision.
1012 """
1012 """
1013 parents = log.parentrevs(rev)
1013 parents = log.parentrevs(rev)
1014 if not self.ui.debugflag and parents[1] == nullrev:
1014 if not self.ui.debugflag and parents[1] == nullrev:
1015 if parents[0] >= rev - 1:
1015 if parents[0] >= rev - 1:
1016 parents = []
1016 parents = []
1017 else:
1017 else:
1018 parents = [parents[0]]
1018 parents = [parents[0]]
1019 return parents
1019 return parents
1020
1020
1021 class jsonchangeset(changeset_printer):
1021 class jsonchangeset(changeset_printer):
1022 '''format changeset information.'''
1022 '''format changeset information.'''
1023
1023
1024 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1024 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1025 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1025 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1026 self.cache = {}
1026 self.cache = {}
1027 self._first = True
1027 self._first = True
1028
1028
1029 def close(self):
1029 def close(self):
1030 if not self._first:
1030 if not self._first:
1031 self.ui.write("\n]\n")
1031 self.ui.write("\n]\n")
1032 else:
1032 else:
1033 self.ui.write("[]\n")
1033 self.ui.write("[]\n")
1034
1034
1035 def _show(self, ctx, copies, matchfn, props):
1035 def _show(self, ctx, copies, matchfn, props):
1036 '''show a single changeset or file revision'''
1036 '''show a single changeset or file revision'''
1037 hexnode = hex(ctx.node())
1037 hexnode = hex(ctx.node())
1038 rev = ctx.rev()
1038 rev = ctx.rev()
1039 j = encoding.jsonescape
1039 j = encoding.jsonescape
1040
1040
1041 if self._first:
1041 if self._first:
1042 self.ui.write("[\n {")
1042 self.ui.write("[\n {")
1043 self._first = False
1043 self._first = False
1044 else:
1044 else:
1045 self.ui.write(",\n {")
1045 self.ui.write(",\n {")
1046
1046
1047 if self.ui.quiet:
1047 if self.ui.quiet:
1048 self.ui.write('\n "rev": %d' % rev)
1048 self.ui.write('\n "rev": %d' % rev)
1049 self.ui.write(',\n "node": "%s"' % hexnode)
1049 self.ui.write(',\n "node": "%s"' % hexnode)
1050 self.ui.write('\n }')
1050 self.ui.write('\n }')
1051 return
1051 return
1052
1052
1053 self.ui.write('\n "rev": %d' % rev)
1053 self.ui.write('\n "rev": %d' % rev)
1054 self.ui.write(',\n "node": "%s"' % hexnode)
1054 self.ui.write(',\n "node": "%s"' % hexnode)
1055 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1055 self.ui.write(',\n "branch": "%s"' % j(ctx.branch()))
1056 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1056 self.ui.write(',\n "phase": "%s"' % ctx.phasestr())
1057 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1057 self.ui.write(',\n "user": "%s"' % j(ctx.user()))
1058 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1058 self.ui.write(',\n "date": [%d, %d]' % ctx.date())
1059 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1059 self.ui.write(',\n "desc": "%s"' % j(ctx.description()))
1060
1060
1061 self.ui.write(',\n "bookmarks": [%s]' %
1061 self.ui.write(',\n "bookmarks": [%s]' %
1062 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1062 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1063 self.ui.write(',\n "tags": [%s]' %
1063 self.ui.write(',\n "tags": [%s]' %
1064 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1064 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1065 self.ui.write(',\n "parents": [%s]' %
1065 self.ui.write(',\n "parents": [%s]' %
1066 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1066 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1067
1067
1068 if self.ui.debugflag:
1068 if self.ui.debugflag:
1069 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1069 self.ui.write(',\n "manifest": "%s"' % hex(ctx.manifestnode()))
1070
1070
1071 self.ui.write(',\n "extra": {%s}' %
1071 self.ui.write(',\n "extra": {%s}' %
1072 ", ".join('"%s": "%s"' % (j(k), j(v))
1072 ", ".join('"%s": "%s"' % (j(k), j(v))
1073 for k, v in ctx.extra().items()))
1073 for k, v in ctx.extra().items()))
1074
1074
1075 files = ctx.status(ctx.p1())
1075 files = ctx.status(ctx.p1())
1076 self.ui.write(',\n "modified": [%s]' %
1076 self.ui.write(',\n "modified": [%s]' %
1077 ", ".join('"%s"' % j(f) for f in files[0]))
1077 ", ".join('"%s"' % j(f) for f in files[0]))
1078 self.ui.write(',\n "added": [%s]' %
1078 self.ui.write(',\n "added": [%s]' %
1079 ", ".join('"%s"' % j(f) for f in files[1]))
1079 ", ".join('"%s"' % j(f) for f in files[1]))
1080 self.ui.write(',\n "removed": [%s]' %
1080 self.ui.write(',\n "removed": [%s]' %
1081 ", ".join('"%s"' % j(f) for f in files[2]))
1081 ", ".join('"%s"' % j(f) for f in files[2]))
1082
1082
1083 elif self.ui.verbose:
1083 elif self.ui.verbose:
1084 self.ui.write(',\n "files": [%s]' %
1084 self.ui.write(',\n "files": [%s]' %
1085 ", ".join('"%s"' % j(f) for f in ctx.files()))
1085 ", ".join('"%s"' % j(f) for f in ctx.files()))
1086
1086
1087 if copies:
1087 if copies:
1088 self.ui.write(',\n "copies": {%s}' %
1088 self.ui.write(',\n "copies": {%s}' %
1089 ", ".join('"%s": %s' % (j(k), j(copies[k]))
1089 ", ".join('"%s": %s' % (j(k), j(copies[k]))
1090 for k in copies))
1090 for k in copies))
1091
1091
1092 matchfn = self.matchfn
1092 matchfn = self.matchfn
1093 if matchfn:
1093 if matchfn:
1094 stat = self.diffopts.get('stat')
1094 stat = self.diffopts.get('stat')
1095 diff = self.diffopts.get('patch')
1095 diff = self.diffopts.get('patch')
1096 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1096 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1097 node, prev = ctx.node(), ctx.p1().node()
1097 node, prev = ctx.node(), ctx.p1().node()
1098 if stat:
1098 if stat:
1099 self.ui.pushbuffer()
1099 self.ui.pushbuffer()
1100 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1100 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1101 match=matchfn, stat=True)
1101 match=matchfn, stat=True)
1102 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1102 self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer()))
1103 if diff:
1103 if diff:
1104 self.ui.pushbuffer()
1104 self.ui.pushbuffer()
1105 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1105 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1106 match=matchfn, stat=False)
1106 match=matchfn, stat=False)
1107 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1107 self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer()))
1108
1108
1109 self.ui.write("\n }")
1109 self.ui.write("\n }")
1110
1110
1111 class changeset_templater(changeset_printer):
1111 class changeset_templater(changeset_printer):
1112 '''format changeset information.'''
1112 '''format changeset information.'''
1113
1113
1114 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1114 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1115 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1115 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1116 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1116 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1117 defaulttempl = {
1117 defaulttempl = {
1118 'parent': '{rev}:{node|formatnode} ',
1118 'parent': '{rev}:{node|formatnode} ',
1119 'manifest': '{rev}:{node|formatnode}',
1119 'manifest': '{rev}:{node|formatnode}',
1120 'file_copy': '{name} ({source})',
1120 'file_copy': '{name} ({source})',
1121 'extra': '{key}={value|stringescape}'
1121 'extra': '{key}={value|stringescape}'
1122 }
1122 }
1123 # filecopy is preserved for compatibility reasons
1123 # filecopy is preserved for compatibility reasons
1124 defaulttempl['filecopy'] = defaulttempl['file_copy']
1124 defaulttempl['filecopy'] = defaulttempl['file_copy']
1125 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1125 self.t = templater.templater(mapfile, {'formatnode': formatnode},
1126 cache=defaulttempl)
1126 cache=defaulttempl)
1127 if tmpl:
1127 if tmpl:
1128 self.t.cache['changeset'] = tmpl
1128 self.t.cache['changeset'] = tmpl
1129
1129
1130 self.cache = {}
1130 self.cache = {}
1131
1131
1132 def _meaningful_parentrevs(self, ctx):
1132 def _meaningful_parentrevs(self, ctx):
1133 """Return list of meaningful (or all if debug) parentrevs for rev.
1133 """Return list of meaningful (or all if debug) parentrevs for rev.
1134 """
1134 """
1135 parents = ctx.parents()
1135 parents = ctx.parents()
1136 if len(parents) > 1:
1136 if len(parents) > 1:
1137 return parents
1137 return parents
1138 if self.ui.debugflag:
1138 if self.ui.debugflag:
1139 return [parents[0], self.repo['null']]
1139 return [parents[0], self.repo['null']]
1140 if parents[0].rev() >= ctx.rev() - 1:
1140 if parents[0].rev() >= ctx.rev() - 1:
1141 return []
1141 return []
1142 return parents
1142 return parents
1143
1143
1144 def _show(self, ctx, copies, matchfn, props):
1144 def _show(self, ctx, copies, matchfn, props):
1145 '''show a single changeset or file revision'''
1145 '''show a single changeset or file revision'''
1146
1146
1147 showlist = templatekw.showlist
1147 showlist = templatekw.showlist
1148
1148
1149 # showparents() behaviour depends on ui trace level which
1149 # showparents() behaviour depends on ui trace level which
1150 # causes unexpected behaviours at templating level and makes
1150 # causes unexpected behaviours at templating level and makes
1151 # it harder to extract it in a standalone function. Its
1151 # it harder to extract it in a standalone function. Its
1152 # behaviour cannot be changed so leave it here for now.
1152 # behaviour cannot be changed so leave it here for now.
1153 def showparents(**args):
1153 def showparents(**args):
1154 ctx = args['ctx']
1154 ctx = args['ctx']
1155 parents = [[('rev', p.rev()),
1155 parents = [[('rev', p.rev()),
1156 ('node', p.hex()),
1156 ('node', p.hex()),
1157 ('phase', p.phasestr())]
1157 ('phase', p.phasestr())]
1158 for p in self._meaningful_parentrevs(ctx)]
1158 for p in self._meaningful_parentrevs(ctx)]
1159 return showlist('parent', parents, **args)
1159 return showlist('parent', parents, **args)
1160
1160
1161 props = props.copy()
1161 props = props.copy()
1162 props.update(templatekw.keywords)
1162 props.update(templatekw.keywords)
1163 props['parents'] = showparents
1163 props['parents'] = showparents
1164 props['templ'] = self.t
1164 props['templ'] = self.t
1165 props['ctx'] = ctx
1165 props['ctx'] = ctx
1166 props['repo'] = self.repo
1166 props['repo'] = self.repo
1167 props['revcache'] = {'copies': copies}
1167 props['revcache'] = {'copies': copies}
1168 props['cache'] = self.cache
1168 props['cache'] = self.cache
1169
1169
1170 # find correct templates for current mode
1170 # find correct templates for current mode
1171
1171
1172 tmplmodes = [
1172 tmplmodes = [
1173 (True, None),
1173 (True, None),
1174 (self.ui.verbose, 'verbose'),
1174 (self.ui.verbose, 'verbose'),
1175 (self.ui.quiet, 'quiet'),
1175 (self.ui.quiet, 'quiet'),
1176 (self.ui.debugflag, 'debug'),
1176 (self.ui.debugflag, 'debug'),
1177 ]
1177 ]
1178
1178
1179 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1179 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
1180 for mode, postfix in tmplmodes:
1180 for mode, postfix in tmplmodes:
1181 for type in types:
1181 for type in types:
1182 cur = postfix and ('%s_%s' % (type, postfix)) or type
1182 cur = postfix and ('%s_%s' % (type, postfix)) or type
1183 if mode and cur in self.t:
1183 if mode and cur in self.t:
1184 types[type] = cur
1184 types[type] = cur
1185
1185
1186 try:
1186 try:
1187
1187
1188 # write header
1188 # write header
1189 if types['header']:
1189 if types['header']:
1190 h = templater.stringify(self.t(types['header'], **props))
1190 h = templater.stringify(self.t(types['header'], **props))
1191 if self.buffered:
1191 if self.buffered:
1192 self.header[ctx.rev()] = h
1192 self.header[ctx.rev()] = h
1193 else:
1193 else:
1194 if self.lastheader != h:
1194 if self.lastheader != h:
1195 self.lastheader = h
1195 self.lastheader = h
1196 self.ui.write(h)
1196 self.ui.write(h)
1197
1197
1198 # write changeset metadata, then patch if requested
1198 # write changeset metadata, then patch if requested
1199 key = types['changeset']
1199 key = types['changeset']
1200 self.ui.write(templater.stringify(self.t(key, **props)))
1200 self.ui.write(templater.stringify(self.t(key, **props)))
1201 self.showpatch(ctx.node(), matchfn)
1201 self.showpatch(ctx.node(), matchfn)
1202
1202
1203 if types['footer']:
1203 if types['footer']:
1204 if not self.footer:
1204 if not self.footer:
1205 self.footer = templater.stringify(self.t(types['footer'],
1205 self.footer = templater.stringify(self.t(types['footer'],
1206 **props))
1206 **props))
1207
1207
1208 except KeyError, inst:
1208 except KeyError, inst:
1209 msg = _("%s: no key named '%s'")
1209 msg = _("%s: no key named '%s'")
1210 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1210 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
1211 except SyntaxError, inst:
1211 except SyntaxError, inst:
1212 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1212 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
1213
1213
1214 def gettemplate(ui, tmpl, style):
1214 def gettemplate(ui, tmpl, style):
1215 """
1215 """
1216 Find the template matching the given template spec or style.
1216 Find the template matching the given template spec or style.
1217 """
1217 """
1218
1218
1219 # ui settings
1219 # ui settings
1220 if not tmpl and not style: # template are stronger than style
1220 if not tmpl and not style: # template are stronger than style
1221 tmpl = ui.config('ui', 'logtemplate')
1221 tmpl = ui.config('ui', 'logtemplate')
1222 if tmpl:
1222 if tmpl:
1223 try:
1223 try:
1224 tmpl = templater.parsestring(tmpl)
1224 tmpl = templater.parsestring(tmpl)
1225 except SyntaxError:
1225 except SyntaxError:
1226 tmpl = templater.parsestring(tmpl, quoted=False)
1226 tmpl = templater.parsestring(tmpl, quoted=False)
1227 return tmpl, None
1227 return tmpl, None
1228 else:
1228 else:
1229 style = util.expandpath(ui.config('ui', 'style', ''))
1229 style = util.expandpath(ui.config('ui', 'style', ''))
1230
1230
1231 if not tmpl and style:
1231 if not tmpl and style:
1232 mapfile = style
1232 mapfile = style
1233 if not os.path.split(mapfile)[0]:
1233 if not os.path.split(mapfile)[0]:
1234 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1234 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1235 or templater.templatepath(mapfile))
1235 or templater.templatepath(mapfile))
1236 if mapname:
1236 if mapname:
1237 mapfile = mapname
1237 mapfile = mapname
1238 return None, mapfile
1238 return None, mapfile
1239
1239
1240 if not tmpl:
1240 if not tmpl:
1241 return None, None
1241 return None, None
1242
1242
1243 # looks like a literal template?
1243 # looks like a literal template?
1244 if '{' in tmpl:
1244 if '{' in tmpl:
1245 return tmpl, None
1245 return tmpl, None
1246
1246
1247 # perhaps a stock style?
1247 # perhaps a stock style?
1248 if not os.path.split(tmpl)[0]:
1248 if not os.path.split(tmpl)[0]:
1249 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1249 mapname = (templater.templatepath('map-cmdline.' + tmpl)
1250 or templater.templatepath(tmpl))
1250 or templater.templatepath(tmpl))
1251 if mapname and os.path.isfile(mapname):
1251 if mapname and os.path.isfile(mapname):
1252 return None, mapname
1252 return None, mapname
1253
1253
1254 # perhaps it's a reference to [templates]
1254 # perhaps it's a reference to [templates]
1255 t = ui.config('templates', tmpl)
1255 t = ui.config('templates', tmpl)
1256 if t:
1256 if t:
1257 try:
1257 try:
1258 tmpl = templater.parsestring(t)
1258 tmpl = templater.parsestring(t)
1259 except SyntaxError:
1259 except SyntaxError:
1260 tmpl = templater.parsestring(t, quoted=False)
1260 tmpl = templater.parsestring(t, quoted=False)
1261 return tmpl, None
1261 return tmpl, None
1262
1262
1263 if tmpl == 'list':
1263 if tmpl == 'list':
1264 ui.write(_("available styles: %s\n") % templater.stylelist())
1264 ui.write(_("available styles: %s\n") % templater.stylelist())
1265 raise util.Abort(_("specify a template"))
1265 raise util.Abort(_("specify a template"))
1266
1266
1267 # perhaps it's a path to a map or a template
1267 # perhaps it's a path to a map or a template
1268 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1268 if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl):
1269 # is it a mapfile for a style?
1269 # is it a mapfile for a style?
1270 if os.path.basename(tmpl).startswith("map-"):
1270 if os.path.basename(tmpl).startswith("map-"):
1271 return None, os.path.realpath(tmpl)
1271 return None, os.path.realpath(tmpl)
1272 tmpl = open(tmpl).read()
1272 tmpl = open(tmpl).read()
1273 return tmpl, None
1273 return tmpl, None
1274
1274
1275 # constant string?
1275 # constant string?
1276 return tmpl, None
1276 return tmpl, None
1277
1277
1278 def show_changeset(ui, repo, opts, buffered=False):
1278 def show_changeset(ui, repo, opts, buffered=False):
1279 """show one changeset using template or regular display.
1279 """show one changeset using template or regular display.
1280
1280
1281 Display format will be the first non-empty hit of:
1281 Display format will be the first non-empty hit of:
1282 1. option 'template'
1282 1. option 'template'
1283 2. option 'style'
1283 2. option 'style'
1284 3. [ui] setting 'logtemplate'
1284 3. [ui] setting 'logtemplate'
1285 4. [ui] setting 'style'
1285 4. [ui] setting 'style'
1286 If all of these values are either the unset or the empty string,
1286 If all of these values are either the unset or the empty string,
1287 regular display via changeset_printer() is done.
1287 regular display via changeset_printer() is done.
1288 """
1288 """
1289 # options
1289 # options
1290 matchfn = None
1290 matchfn = None
1291 if opts.get('patch') or opts.get('stat'):
1291 if opts.get('patch') or opts.get('stat'):
1292 matchfn = scmutil.matchall(repo)
1292 matchfn = scmutil.matchall(repo)
1293
1293
1294 if opts.get('template') == 'json':
1294 if opts.get('template') == 'json':
1295 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1295 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1296
1296
1297 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1297 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1298
1298
1299 if not tmpl and not mapfile:
1299 if not tmpl and not mapfile:
1300 return changeset_printer(ui, repo, matchfn, opts, buffered)
1300 return changeset_printer(ui, repo, matchfn, opts, buffered)
1301
1301
1302 try:
1302 try:
1303 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1303 t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile,
1304 buffered)
1304 buffered)
1305 except SyntaxError, inst:
1305 except SyntaxError, inst:
1306 raise util.Abort(inst.args[0])
1306 raise util.Abort(inst.args[0])
1307 return t
1307 return t
1308
1308
1309 def showmarker(ui, marker):
1309 def showmarker(ui, marker):
1310 """utility function to display obsolescence marker in a readable way
1310 """utility function to display obsolescence marker in a readable way
1311
1311
1312 To be used by debug function."""
1312 To be used by debug function."""
1313 ui.write(hex(marker.precnode()))
1313 ui.write(hex(marker.precnode()))
1314 for repl in marker.succnodes():
1314 for repl in marker.succnodes():
1315 ui.write(' ')
1315 ui.write(' ')
1316 ui.write(hex(repl))
1316 ui.write(hex(repl))
1317 ui.write(' %X ' % marker.flags())
1317 ui.write(' %X ' % marker.flags())
1318 parents = marker.parentnodes()
1318 parents = marker.parentnodes()
1319 if parents is not None:
1319 if parents is not None:
1320 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1320 ui.write('{%s} ' % ', '.join(hex(p) for p in parents))
1321 ui.write('(%s) ' % util.datestr(marker.date()))
1321 ui.write('(%s) ' % util.datestr(marker.date()))
1322 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1322 ui.write('{%s}' % (', '.join('%r: %r' % t for t in
1323 sorted(marker.metadata().items())
1323 sorted(marker.metadata().items())
1324 if t[0] != 'date')))
1324 if t[0] != 'date')))
1325 ui.write('\n')
1325 ui.write('\n')
1326
1326
1327 def finddate(ui, repo, date):
1327 def finddate(ui, repo, date):
1328 """Find the tipmost changeset that matches the given date spec"""
1328 """Find the tipmost changeset that matches the given date spec"""
1329
1329
1330 df = util.matchdate(date)
1330 df = util.matchdate(date)
1331 m = scmutil.matchall(repo)
1331 m = scmutil.matchall(repo)
1332 results = {}
1332 results = {}
1333
1333
1334 def prep(ctx, fns):
1334 def prep(ctx, fns):
1335 d = ctx.date()
1335 d = ctx.date()
1336 if df(d[0]):
1336 if df(d[0]):
1337 results[ctx.rev()] = d
1337 results[ctx.rev()] = d
1338
1338
1339 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1339 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1340 rev = ctx.rev()
1340 rev = ctx.rev()
1341 if rev in results:
1341 if rev in results:
1342 ui.status(_("found revision %s from %s\n") %
1342 ui.status(_("found revision %s from %s\n") %
1343 (rev, util.datestr(results[rev])))
1343 (rev, util.datestr(results[rev])))
1344 return str(rev)
1344 return str(rev)
1345
1345
1346 raise util.Abort(_("revision matching date not found"))
1346 raise util.Abort(_("revision matching date not found"))
1347
1347
1348 def increasingwindows(windowsize=8, sizelimit=512):
1348 def increasingwindows(windowsize=8, sizelimit=512):
1349 while True:
1349 while True:
1350 yield windowsize
1350 yield windowsize
1351 if windowsize < sizelimit:
1351 if windowsize < sizelimit:
1352 windowsize *= 2
1352 windowsize *= 2
1353
1353
1354 class FileWalkError(Exception):
1354 class FileWalkError(Exception):
1355 pass
1355 pass
1356
1356
1357 def walkfilerevs(repo, match, follow, revs, fncache):
1357 def walkfilerevs(repo, match, follow, revs, fncache):
1358 '''Walks the file history for the matched files.
1358 '''Walks the file history for the matched files.
1359
1359
1360 Returns the changeset revs that are involved in the file history.
1360 Returns the changeset revs that are involved in the file history.
1361
1361
1362 Throws FileWalkError if the file history can't be walked using
1362 Throws FileWalkError if the file history can't be walked using
1363 filelogs alone.
1363 filelogs alone.
1364 '''
1364 '''
1365 wanted = set()
1365 wanted = set()
1366 copies = []
1366 copies = []
1367 minrev, maxrev = min(revs), max(revs)
1367 minrev, maxrev = min(revs), max(revs)
1368 def filerevgen(filelog, last):
1368 def filerevgen(filelog, last):
1369 """
1369 """
1370 Only files, no patterns. Check the history of each file.
1370 Only files, no patterns. Check the history of each file.
1371
1371
1372 Examines filelog entries within minrev, maxrev linkrev range
1372 Examines filelog entries within minrev, maxrev linkrev range
1373 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1373 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1374 tuples in backwards order
1374 tuples in backwards order
1375 """
1375 """
1376 cl_count = len(repo)
1376 cl_count = len(repo)
1377 revs = []
1377 revs = []
1378 for j in xrange(0, last + 1):
1378 for j in xrange(0, last + 1):
1379 linkrev = filelog.linkrev(j)
1379 linkrev = filelog.linkrev(j)
1380 if linkrev < minrev:
1380 if linkrev < minrev:
1381 continue
1381 continue
1382 # only yield rev for which we have the changelog, it can
1382 # only yield rev for which we have the changelog, it can
1383 # happen while doing "hg log" during a pull or commit
1383 # happen while doing "hg log" during a pull or commit
1384 if linkrev >= cl_count:
1384 if linkrev >= cl_count:
1385 break
1385 break
1386
1386
1387 parentlinkrevs = []
1387 parentlinkrevs = []
1388 for p in filelog.parentrevs(j):
1388 for p in filelog.parentrevs(j):
1389 if p != nullrev:
1389 if p != nullrev:
1390 parentlinkrevs.append(filelog.linkrev(p))
1390 parentlinkrevs.append(filelog.linkrev(p))
1391 n = filelog.node(j)
1391 n = filelog.node(j)
1392 revs.append((linkrev, parentlinkrevs,
1392 revs.append((linkrev, parentlinkrevs,
1393 follow and filelog.renamed(n)))
1393 follow and filelog.renamed(n)))
1394
1394
1395 return reversed(revs)
1395 return reversed(revs)
1396 def iterfiles():
1396 def iterfiles():
1397 pctx = repo['.']
1397 pctx = repo['.']
1398 for filename in match.files():
1398 for filename in match.files():
1399 if follow:
1399 if follow:
1400 if filename not in pctx:
1400 if filename not in pctx:
1401 raise util.Abort(_('cannot follow file not in parent '
1401 raise util.Abort(_('cannot follow file not in parent '
1402 'revision: "%s"') % filename)
1402 'revision: "%s"') % filename)
1403 yield filename, pctx[filename].filenode()
1403 yield filename, pctx[filename].filenode()
1404 else:
1404 else:
1405 yield filename, None
1405 yield filename, None
1406 for filename_node in copies:
1406 for filename_node in copies:
1407 yield filename_node
1407 yield filename_node
1408
1408
1409 for file_, node in iterfiles():
1409 for file_, node in iterfiles():
1410 filelog = repo.file(file_)
1410 filelog = repo.file(file_)
1411 if not len(filelog):
1411 if not len(filelog):
1412 if node is None:
1412 if node is None:
1413 # A zero count may be a directory or deleted file, so
1413 # A zero count may be a directory or deleted file, so
1414 # try to find matching entries on the slow path.
1414 # try to find matching entries on the slow path.
1415 if follow:
1415 if follow:
1416 raise util.Abort(
1416 raise util.Abort(
1417 _('cannot follow nonexistent file: "%s"') % file_)
1417 _('cannot follow nonexistent file: "%s"') % file_)
1418 raise FileWalkError("Cannot walk via filelog")
1418 raise FileWalkError("Cannot walk via filelog")
1419 else:
1419 else:
1420 continue
1420 continue
1421
1421
1422 if node is None:
1422 if node is None:
1423 last = len(filelog) - 1
1423 last = len(filelog) - 1
1424 else:
1424 else:
1425 last = filelog.rev(node)
1425 last = filelog.rev(node)
1426
1426
1427
1427
1428 # keep track of all ancestors of the file
1428 # keep track of all ancestors of the file
1429 ancestors = set([filelog.linkrev(last)])
1429 ancestors = set([filelog.linkrev(last)])
1430
1430
1431 # iterate from latest to oldest revision
1431 # iterate from latest to oldest revision
1432 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1432 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1433 if not follow:
1433 if not follow:
1434 if rev > maxrev:
1434 if rev > maxrev:
1435 continue
1435 continue
1436 else:
1436 else:
1437 # Note that last might not be the first interesting
1437 # Note that last might not be the first interesting
1438 # rev to us:
1438 # rev to us:
1439 # if the file has been changed after maxrev, we'll
1439 # if the file has been changed after maxrev, we'll
1440 # have linkrev(last) > maxrev, and we still need
1440 # have linkrev(last) > maxrev, and we still need
1441 # to explore the file graph
1441 # to explore the file graph
1442 if rev not in ancestors:
1442 if rev not in ancestors:
1443 continue
1443 continue
1444 # XXX insert 1327 fix here
1444 # XXX insert 1327 fix here
1445 if flparentlinkrevs:
1445 if flparentlinkrevs:
1446 ancestors.update(flparentlinkrevs)
1446 ancestors.update(flparentlinkrevs)
1447
1447
1448 fncache.setdefault(rev, []).append(file_)
1448 fncache.setdefault(rev, []).append(file_)
1449 wanted.add(rev)
1449 wanted.add(rev)
1450 if copied:
1450 if copied:
1451 copies.append(copied)
1451 copies.append(copied)
1452
1452
1453 return wanted
1453 return wanted
1454
1454
1455 def walkchangerevs(repo, match, opts, prepare):
1455 def walkchangerevs(repo, match, opts, prepare):
1456 '''Iterate over files and the revs in which they changed.
1456 '''Iterate over files and the revs in which they changed.
1457
1457
1458 Callers most commonly need to iterate backwards over the history
1458 Callers most commonly need to iterate backwards over the history
1459 in which they are interested. Doing so has awful (quadratic-looking)
1459 in which they are interested. Doing so has awful (quadratic-looking)
1460 performance, so we use iterators in a "windowed" way.
1460 performance, so we use iterators in a "windowed" way.
1461
1461
1462 We walk a window of revisions in the desired order. Within the
1462 We walk a window of revisions in the desired order. Within the
1463 window, we first walk forwards to gather data, then in the desired
1463 window, we first walk forwards to gather data, then in the desired
1464 order (usually backwards) to display it.
1464 order (usually backwards) to display it.
1465
1465
1466 This function returns an iterator yielding contexts. Before
1466 This function returns an iterator yielding contexts. Before
1467 yielding each context, the iterator will first call the prepare
1467 yielding each context, the iterator will first call the prepare
1468 function on each context in the window in forward order.'''
1468 function on each context in the window in forward order.'''
1469
1469
1470 follow = opts.get('follow') or opts.get('follow_first')
1470 follow = opts.get('follow') or opts.get('follow_first')
1471
1471
1472 if opts.get('rev'):
1472 if opts.get('rev'):
1473 revs = scmutil.revrange(repo, opts.get('rev'))
1473 revs = scmutil.revrange(repo, opts.get('rev'))
1474 elif follow:
1474 elif follow:
1475 revs = repo.revs('reverse(:.)')
1475 revs = repo.revs('reverse(:.)')
1476 else:
1476 else:
1477 revs = revset.spanset(repo)
1477 revs = revset.spanset(repo)
1478 revs.reverse()
1478 revs.reverse()
1479 if not revs:
1479 if not revs:
1480 return []
1480 return []
1481 wanted = set()
1481 wanted = set()
1482 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1482 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1483 fncache = {}
1483 fncache = {}
1484 change = repo.changectx
1484 change = repo.changectx
1485
1485
1486 # First step is to fill wanted, the set of revisions that we want to yield.
1486 # First step is to fill wanted, the set of revisions that we want to yield.
1487 # When it does not induce extra cost, we also fill fncache for revisions in
1487 # When it does not induce extra cost, we also fill fncache for revisions in
1488 # wanted: a cache of filenames that were changed (ctx.files()) and that
1488 # wanted: a cache of filenames that were changed (ctx.files()) and that
1489 # match the file filtering conditions.
1489 # match the file filtering conditions.
1490
1490
1491 if not slowpath and not match.files():
1491 if not slowpath and not match.files():
1492 # No files, no patterns. Display all revs.
1492 # No files, no patterns. Display all revs.
1493 wanted = revs
1493 wanted = revs
1494
1494
1495 if not slowpath and match.files():
1495 if not slowpath and match.files():
1496 # We only have to read through the filelog to find wanted revisions
1496 # We only have to read through the filelog to find wanted revisions
1497
1497
1498 try:
1498 try:
1499 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1499 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1500 except FileWalkError:
1500 except FileWalkError:
1501 slowpath = True
1501 slowpath = True
1502
1502
1503 # We decided to fall back to the slowpath because at least one
1503 # We decided to fall back to the slowpath because at least one
1504 # of the paths was not a file. Check to see if at least one of them
1504 # of the paths was not a file. Check to see if at least one of them
1505 # existed in history, otherwise simply return
1505 # existed in history, otherwise simply return
1506 for path in match.files():
1506 for path in match.files():
1507 if path == '.' or path in repo.store:
1507 if path == '.' or path in repo.store:
1508 break
1508 break
1509 else:
1509 else:
1510 return []
1510 return []
1511
1511
1512 if slowpath:
1512 if slowpath:
1513 # We have to read the changelog to match filenames against
1513 # We have to read the changelog to match filenames against
1514 # changed files
1514 # changed files
1515
1515
1516 if follow:
1516 if follow:
1517 raise util.Abort(_('can only follow copies/renames for explicit '
1517 raise util.Abort(_('can only follow copies/renames for explicit '
1518 'filenames'))
1518 'filenames'))
1519
1519
1520 # The slow path checks files modified in every changeset.
1520 # The slow path checks files modified in every changeset.
1521 # This is really slow on large repos, so compute the set lazily.
1521 # This is really slow on large repos, so compute the set lazily.
1522 class lazywantedset(object):
1522 class lazywantedset(object):
1523 def __init__(self):
1523 def __init__(self):
1524 self.set = set()
1524 self.set = set()
1525 self.revs = set(revs)
1525 self.revs = set(revs)
1526
1526
1527 # No need to worry about locality here because it will be accessed
1527 # No need to worry about locality here because it will be accessed
1528 # in the same order as the increasing window below.
1528 # in the same order as the increasing window below.
1529 def __contains__(self, value):
1529 def __contains__(self, value):
1530 if value in self.set:
1530 if value in self.set:
1531 return True
1531 return True
1532 elif not value in self.revs:
1532 elif not value in self.revs:
1533 return False
1533 return False
1534 else:
1534 else:
1535 self.revs.discard(value)
1535 self.revs.discard(value)
1536 ctx = change(value)
1536 ctx = change(value)
1537 matches = filter(match, ctx.files())
1537 matches = filter(match, ctx.files())
1538 if matches:
1538 if matches:
1539 fncache[value] = matches
1539 fncache[value] = matches
1540 self.set.add(value)
1540 self.set.add(value)
1541 return True
1541 return True
1542 return False
1542 return False
1543
1543
1544 def discard(self, value):
1544 def discard(self, value):
1545 self.revs.discard(value)
1545 self.revs.discard(value)
1546 self.set.discard(value)
1546 self.set.discard(value)
1547
1547
1548 wanted = lazywantedset()
1548 wanted = lazywantedset()
1549
1549
1550 class followfilter(object):
1550 class followfilter(object):
1551 def __init__(self, onlyfirst=False):
1551 def __init__(self, onlyfirst=False):
1552 self.startrev = nullrev
1552 self.startrev = nullrev
1553 self.roots = set()
1553 self.roots = set()
1554 self.onlyfirst = onlyfirst
1554 self.onlyfirst = onlyfirst
1555
1555
1556 def match(self, rev):
1556 def match(self, rev):
1557 def realparents(rev):
1557 def realparents(rev):
1558 if self.onlyfirst:
1558 if self.onlyfirst:
1559 return repo.changelog.parentrevs(rev)[0:1]
1559 return repo.changelog.parentrevs(rev)[0:1]
1560 else:
1560 else:
1561 return filter(lambda x: x != nullrev,
1561 return filter(lambda x: x != nullrev,
1562 repo.changelog.parentrevs(rev))
1562 repo.changelog.parentrevs(rev))
1563
1563
1564 if self.startrev == nullrev:
1564 if self.startrev == nullrev:
1565 self.startrev = rev
1565 self.startrev = rev
1566 return True
1566 return True
1567
1567
1568 if rev > self.startrev:
1568 if rev > self.startrev:
1569 # forward: all descendants
1569 # forward: all descendants
1570 if not self.roots:
1570 if not self.roots:
1571 self.roots.add(self.startrev)
1571 self.roots.add(self.startrev)
1572 for parent in realparents(rev):
1572 for parent in realparents(rev):
1573 if parent in self.roots:
1573 if parent in self.roots:
1574 self.roots.add(rev)
1574 self.roots.add(rev)
1575 return True
1575 return True
1576 else:
1576 else:
1577 # backwards: all parents
1577 # backwards: all parents
1578 if not self.roots:
1578 if not self.roots:
1579 self.roots.update(realparents(self.startrev))
1579 self.roots.update(realparents(self.startrev))
1580 if rev in self.roots:
1580 if rev in self.roots:
1581 self.roots.remove(rev)
1581 self.roots.remove(rev)
1582 self.roots.update(realparents(rev))
1582 self.roots.update(realparents(rev))
1583 return True
1583 return True
1584
1584
1585 return False
1585 return False
1586
1586
1587 # it might be worthwhile to do this in the iterator if the rev range
1587 # it might be worthwhile to do this in the iterator if the rev range
1588 # is descending and the prune args are all within that range
1588 # is descending and the prune args are all within that range
1589 for rev in opts.get('prune', ()):
1589 for rev in opts.get('prune', ()):
1590 rev = repo[rev].rev()
1590 rev = repo[rev].rev()
1591 ff = followfilter()
1591 ff = followfilter()
1592 stop = min(revs[0], revs[-1])
1592 stop = min(revs[0], revs[-1])
1593 for x in xrange(rev, stop - 1, -1):
1593 for x in xrange(rev, stop - 1, -1):
1594 if ff.match(x):
1594 if ff.match(x):
1595 wanted = wanted - [x]
1595 wanted = wanted - [x]
1596
1596
1597 # Now that wanted is correctly initialized, we can iterate over the
1597 # Now that wanted is correctly initialized, we can iterate over the
1598 # revision range, yielding only revisions in wanted.
1598 # revision range, yielding only revisions in wanted.
1599 def iterate():
1599 def iterate():
1600 if follow and not match.files():
1600 if follow and not match.files():
1601 ff = followfilter(onlyfirst=opts.get('follow_first'))
1601 ff = followfilter(onlyfirst=opts.get('follow_first'))
1602 def want(rev):
1602 def want(rev):
1603 return ff.match(rev) and rev in wanted
1603 return ff.match(rev) and rev in wanted
1604 else:
1604 else:
1605 def want(rev):
1605 def want(rev):
1606 return rev in wanted
1606 return rev in wanted
1607
1607
1608 it = iter(revs)
1608 it = iter(revs)
1609 stopiteration = False
1609 stopiteration = False
1610 for windowsize in increasingwindows():
1610 for windowsize in increasingwindows():
1611 nrevs = []
1611 nrevs = []
1612 for i in xrange(windowsize):
1612 for i in xrange(windowsize):
1613 try:
1613 try:
1614 rev = it.next()
1614 rev = it.next()
1615 if want(rev):
1615 if want(rev):
1616 nrevs.append(rev)
1616 nrevs.append(rev)
1617 except (StopIteration):
1617 except (StopIteration):
1618 stopiteration = True
1618 stopiteration = True
1619 break
1619 break
1620 for rev in sorted(nrevs):
1620 for rev in sorted(nrevs):
1621 fns = fncache.get(rev)
1621 fns = fncache.get(rev)
1622 ctx = change(rev)
1622 ctx = change(rev)
1623 if not fns:
1623 if not fns:
1624 def fns_generator():
1624 def fns_generator():
1625 for f in ctx.files():
1625 for f in ctx.files():
1626 if match(f):
1626 if match(f):
1627 yield f
1627 yield f
1628 fns = fns_generator()
1628 fns = fns_generator()
1629 prepare(ctx, fns)
1629 prepare(ctx, fns)
1630 for rev in nrevs:
1630 for rev in nrevs:
1631 yield change(rev)
1631 yield change(rev)
1632
1632
1633 if stopiteration:
1633 if stopiteration:
1634 break
1634 break
1635
1635
1636 return iterate()
1636 return iterate()
1637
1637
1638 def _makefollowlogfilematcher(repo, files, followfirst):
1638 def _makefollowlogfilematcher(repo, files, followfirst):
1639 # When displaying a revision with --patch --follow FILE, we have
1639 # When displaying a revision with --patch --follow FILE, we have
1640 # to know which file of the revision must be diffed. With
1640 # to know which file of the revision must be diffed. With
1641 # --follow, we want the names of the ancestors of FILE in the
1641 # --follow, we want the names of the ancestors of FILE in the
1642 # revision, stored in "fcache". "fcache" is populated by
1642 # revision, stored in "fcache". "fcache" is populated by
1643 # reproducing the graph traversal already done by --follow revset
1643 # reproducing the graph traversal already done by --follow revset
1644 # and relating linkrevs to file names (which is not "correct" but
1644 # and relating linkrevs to file names (which is not "correct" but
1645 # good enough).
1645 # good enough).
1646 fcache = {}
1646 fcache = {}
1647 fcacheready = [False]
1647 fcacheready = [False]
1648 pctx = repo['.']
1648 pctx = repo['.']
1649
1649
1650 def populate():
1650 def populate():
1651 for fn in files:
1651 for fn in files:
1652 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1652 for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
1653 for c in i:
1653 for c in i:
1654 fcache.setdefault(c.linkrev(), set()).add(c.path())
1654 fcache.setdefault(c.linkrev(), set()).add(c.path())
1655
1655
1656 def filematcher(rev):
1656 def filematcher(rev):
1657 if not fcacheready[0]:
1657 if not fcacheready[0]:
1658 # Lazy initialization
1658 # Lazy initialization
1659 fcacheready[0] = True
1659 fcacheready[0] = True
1660 populate()
1660 populate()
1661 return scmutil.matchfiles(repo, fcache.get(rev, []))
1661 return scmutil.matchfiles(repo, fcache.get(rev, []))
1662
1662
1663 return filematcher
1663 return filematcher
1664
1664
1665 def _makenofollowlogfilematcher(repo, pats, opts):
1665 def _makenofollowlogfilematcher(repo, pats, opts):
1666 '''hook for extensions to override the filematcher for non-follow cases'''
1666 '''hook for extensions to override the filematcher for non-follow cases'''
1667 return None
1667 return None
1668
1668
1669 def _makelogrevset(repo, pats, opts, revs):
1669 def _makelogrevset(repo, pats, opts, revs):
1670 """Return (expr, filematcher) where expr is a revset string built
1670 """Return (expr, filematcher) where expr is a revset string built
1671 from log options and file patterns or None. If --stat or --patch
1671 from log options and file patterns or None. If --stat or --patch
1672 are not passed filematcher is None. Otherwise it is a callable
1672 are not passed filematcher is None. Otherwise it is a callable
1673 taking a revision number and returning a match objects filtering
1673 taking a revision number and returning a match objects filtering
1674 the files to be detailed when displaying the revision.
1674 the files to be detailed when displaying the revision.
1675 """
1675 """
1676 opt2revset = {
1676 opt2revset = {
1677 'no_merges': ('not merge()', None),
1677 'no_merges': ('not merge()', None),
1678 'only_merges': ('merge()', None),
1678 'only_merges': ('merge()', None),
1679 '_ancestors': ('ancestors(%(val)s)', None),
1679 '_ancestors': ('ancestors(%(val)s)', None),
1680 '_fancestors': ('_firstancestors(%(val)s)', None),
1680 '_fancestors': ('_firstancestors(%(val)s)', None),
1681 '_descendants': ('descendants(%(val)s)', None),
1681 '_descendants': ('descendants(%(val)s)', None),
1682 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1682 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1683 '_matchfiles': ('_matchfiles(%(val)s)', None),
1683 '_matchfiles': ('_matchfiles(%(val)s)', None),
1684 'date': ('date(%(val)r)', None),
1684 'date': ('date(%(val)r)', None),
1685 'branch': ('branch(%(val)r)', ' or '),
1685 'branch': ('branch(%(val)r)', ' or '),
1686 '_patslog': ('filelog(%(val)r)', ' or '),
1686 '_patslog': ('filelog(%(val)r)', ' or '),
1687 '_patsfollow': ('follow(%(val)r)', ' or '),
1687 '_patsfollow': ('follow(%(val)r)', ' or '),
1688 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1688 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1689 'keyword': ('keyword(%(val)r)', ' or '),
1689 'keyword': ('keyword(%(val)r)', ' or '),
1690 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1690 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1691 'user': ('user(%(val)r)', ' or '),
1691 'user': ('user(%(val)r)', ' or '),
1692 }
1692 }
1693
1693
1694 opts = dict(opts)
1694 opts = dict(opts)
1695 # follow or not follow?
1695 # follow or not follow?
1696 follow = opts.get('follow') or opts.get('follow_first')
1696 follow = opts.get('follow') or opts.get('follow_first')
1697 followfirst = opts.get('follow_first') and 1 or 0
1697 followfirst = opts.get('follow_first') and 1 or 0
1698 # --follow with FILE behaviour depends on revs...
1698 # --follow with FILE behaviour depends on revs...
1699 it = iter(revs)
1699 it = iter(revs)
1700 startrev = it.next()
1700 startrev = it.next()
1701 try:
1701 try:
1702 followdescendants = startrev < it.next()
1702 followdescendants = startrev < it.next()
1703 except (StopIteration):
1703 except (StopIteration):
1704 followdescendants = False
1704 followdescendants = False
1705
1705
1706 # branch and only_branch are really aliases and must be handled at
1706 # branch and only_branch are really aliases and must be handled at
1707 # the same time
1707 # the same time
1708 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1708 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1709 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1709 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1710 # pats/include/exclude are passed to match.match() directly in
1710 # pats/include/exclude are passed to match.match() directly in
1711 # _matchfiles() revset but walkchangerevs() builds its matcher with
1711 # _matchfiles() revset but walkchangerevs() builds its matcher with
1712 # scmutil.match(). The difference is input pats are globbed on
1712 # scmutil.match(). The difference is input pats are globbed on
1713 # platforms without shell expansion (windows).
1713 # platforms without shell expansion (windows).
1714 pctx = repo[None]
1714 pctx = repo[None]
1715 match, pats = scmutil.matchandpats(pctx, pats, opts)
1715 match, pats = scmutil.matchandpats(pctx, pats, opts)
1716 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1716 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1717 if not slowpath:
1717 if not slowpath:
1718 for f in match.files():
1718 for f in match.files():
1719 if follow and f not in pctx:
1719 if follow and f not in pctx:
1720 # If the file exists, it may be a directory, so let it
1720 # If the file exists, it may be a directory, so let it
1721 # take the slow path.
1721 # take the slow path.
1722 if os.path.exists(repo.wjoin(f)):
1722 if os.path.exists(repo.wjoin(f)):
1723 slowpath = True
1723 slowpath = True
1724 continue
1724 continue
1725 else:
1725 else:
1726 raise util.Abort(_('cannot follow file not in parent '
1726 raise util.Abort(_('cannot follow file not in parent '
1727 'revision: "%s"') % f)
1727 'revision: "%s"') % f)
1728 filelog = repo.file(f)
1728 filelog = repo.file(f)
1729 if not filelog:
1729 if not filelog:
1730 # A zero count may be a directory or deleted file, so
1730 # A zero count may be a directory or deleted file, so
1731 # try to find matching entries on the slow path.
1731 # try to find matching entries on the slow path.
1732 if follow:
1732 if follow:
1733 raise util.Abort(
1733 raise util.Abort(
1734 _('cannot follow nonexistent file: "%s"') % f)
1734 _('cannot follow nonexistent file: "%s"') % f)
1735 slowpath = True
1735 slowpath = True
1736
1736
1737 # We decided to fall back to the slowpath because at least one
1737 # We decided to fall back to the slowpath because at least one
1738 # of the paths was not a file. Check to see if at least one of them
1738 # of the paths was not a file. Check to see if at least one of them
1739 # existed in history - in that case, we'll continue down the
1739 # existed in history - in that case, we'll continue down the
1740 # slowpath; otherwise, we can turn off the slowpath
1740 # slowpath; otherwise, we can turn off the slowpath
1741 if slowpath:
1741 if slowpath:
1742 for path in match.files():
1742 for path in match.files():
1743 if path == '.' or path in repo.store:
1743 if path == '.' or path in repo.store:
1744 break
1744 break
1745 else:
1745 else:
1746 slowpath = False
1746 slowpath = False
1747
1747
1748 fpats = ('_patsfollow', '_patsfollowfirst')
1748 fpats = ('_patsfollow', '_patsfollowfirst')
1749 fnopats = (('_ancestors', '_fancestors'),
1749 fnopats = (('_ancestors', '_fancestors'),
1750 ('_descendants', '_fdescendants'))
1750 ('_descendants', '_fdescendants'))
1751 if slowpath:
1751 if slowpath:
1752 # See walkchangerevs() slow path.
1752 # See walkchangerevs() slow path.
1753 #
1753 #
1754 # pats/include/exclude cannot be represented as separate
1754 # pats/include/exclude cannot be represented as separate
1755 # revset expressions as their filtering logic applies at file
1755 # revset expressions as their filtering logic applies at file
1756 # level. For instance "-I a -X a" matches a revision touching
1756 # level. For instance "-I a -X a" matches a revision touching
1757 # "a" and "b" while "file(a) and not file(b)" does
1757 # "a" and "b" while "file(a) and not file(b)" does
1758 # not. Besides, filesets are evaluated against the working
1758 # not. Besides, filesets are evaluated against the working
1759 # directory.
1759 # directory.
1760 matchargs = ['r:', 'd:relpath']
1760 matchargs = ['r:', 'd:relpath']
1761 for p in pats:
1761 for p in pats:
1762 matchargs.append('p:' + p)
1762 matchargs.append('p:' + p)
1763 for p in opts.get('include', []):
1763 for p in opts.get('include', []):
1764 matchargs.append('i:' + p)
1764 matchargs.append('i:' + p)
1765 for p in opts.get('exclude', []):
1765 for p in opts.get('exclude', []):
1766 matchargs.append('x:' + p)
1766 matchargs.append('x:' + p)
1767 matchargs = ','.join(('%r' % p) for p in matchargs)
1767 matchargs = ','.join(('%r' % p) for p in matchargs)
1768 opts['_matchfiles'] = matchargs
1768 opts['_matchfiles'] = matchargs
1769 if follow:
1769 if follow:
1770 opts[fnopats[0][followfirst]] = '.'
1770 opts[fnopats[0][followfirst]] = '.'
1771 else:
1771 else:
1772 if follow:
1772 if follow:
1773 if pats:
1773 if pats:
1774 # follow() revset interprets its file argument as a
1774 # follow() revset interprets its file argument as a
1775 # manifest entry, so use match.files(), not pats.
1775 # manifest entry, so use match.files(), not pats.
1776 opts[fpats[followfirst]] = list(match.files())
1776 opts[fpats[followfirst]] = list(match.files())
1777 else:
1777 else:
1778 opts[fnopats[followdescendants][followfirst]] = str(startrev)
1778 opts[fnopats[followdescendants][followfirst]] = str(startrev)
1779 else:
1779 else:
1780 opts['_patslog'] = list(pats)
1780 opts['_patslog'] = list(pats)
1781
1781
1782 filematcher = None
1782 filematcher = None
1783 if opts.get('patch') or opts.get('stat'):
1783 if opts.get('patch') or opts.get('stat'):
1784 # When following files, track renames via a special matcher.
1784 # When following files, track renames via a special matcher.
1785 # If we're forced to take the slowpath it means we're following
1785 # If we're forced to take the slowpath it means we're following
1786 # at least one pattern/directory, so don't bother with rename tracking.
1786 # at least one pattern/directory, so don't bother with rename tracking.
1787 if follow and not match.always() and not slowpath:
1787 if follow and not match.always() and not slowpath:
1788 # _makefollowlogfilematcher expects its files argument to be
1788 # _makefollowlogfilematcher expects its files argument to be
1789 # relative to the repo root, so use match.files(), not pats.
1789 # relative to the repo root, so use match.files(), not pats.
1790 filematcher = _makefollowlogfilematcher(repo, match.files(),
1790 filematcher = _makefollowlogfilematcher(repo, match.files(),
1791 followfirst)
1791 followfirst)
1792 else:
1792 else:
1793 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
1793 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
1794 if filematcher is None:
1794 if filematcher is None:
1795 filematcher = lambda rev: match
1795 filematcher = lambda rev: match
1796
1796
1797 expr = []
1797 expr = []
1798 for op, val in sorted(opts.iteritems()):
1798 for op, val in sorted(opts.iteritems()):
1799 if not val:
1799 if not val:
1800 continue
1800 continue
1801 if op not in opt2revset:
1801 if op not in opt2revset:
1802 continue
1802 continue
1803 revop, andor = opt2revset[op]
1803 revop, andor = opt2revset[op]
1804 if '%(val)' not in revop:
1804 if '%(val)' not in revop:
1805 expr.append(revop)
1805 expr.append(revop)
1806 else:
1806 else:
1807 if not isinstance(val, list):
1807 if not isinstance(val, list):
1808 e = revop % {'val': val}
1808 e = revop % {'val': val}
1809 else:
1809 else:
1810 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1810 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
1811 expr.append(e)
1811 expr.append(e)
1812
1812
1813 if expr:
1813 if expr:
1814 expr = '(' + ' and '.join(expr) + ')'
1814 expr = '(' + ' and '.join(expr) + ')'
1815 else:
1815 else:
1816 expr = None
1816 expr = None
1817 return expr, filematcher
1817 return expr, filematcher
1818
1818
1819 def getgraphlogrevs(repo, pats, opts):
1819 def getgraphlogrevs(repo, pats, opts):
1820 """Return (revs, expr, filematcher) where revs is an iterable of
1820 """Return (revs, expr, filematcher) where revs is an iterable of
1821 revision numbers, expr is a revset string built from log options
1821 revision numbers, expr is a revset string built from log options
1822 and file patterns or None, and used to filter 'revs'. If --stat or
1822 and file patterns or None, and used to filter 'revs'. If --stat or
1823 --patch are not passed filematcher is None. Otherwise it is a
1823 --patch are not passed filematcher is None. Otherwise it is a
1824 callable taking a revision number and returning a match objects
1824 callable taking a revision number and returning a match objects
1825 filtering the files to be detailed when displaying the revision.
1825 filtering the files to be detailed when displaying the revision.
1826 """
1826 """
1827 if not len(repo):
1827 if not len(repo):
1828 return [], None, None
1828 return [], None, None
1829 limit = loglimit(opts)
1829 limit = loglimit(opts)
1830 # Default --rev value depends on --follow but --follow behaviour
1830 # Default --rev value depends on --follow but --follow behaviour
1831 # depends on revisions resolved from --rev...
1831 # depends on revisions resolved from --rev...
1832 follow = opts.get('follow') or opts.get('follow_first')
1832 follow = opts.get('follow') or opts.get('follow_first')
1833 possiblyunsorted = False # whether revs might need sorting
1833 possiblyunsorted = False # whether revs might need sorting
1834 if opts.get('rev'):
1834 if opts.get('rev'):
1835 revs = scmutil.revrange(repo, opts['rev'])
1835 revs = scmutil.revrange(repo, opts['rev'])
1836 # Don't sort here because _makelogrevset might depend on the
1836 # Don't sort here because _makelogrevset might depend on the
1837 # order of revs
1837 # order of revs
1838 possiblyunsorted = True
1838 possiblyunsorted = True
1839 else:
1839 else:
1840 if follow and len(repo) > 0:
1840 if follow and len(repo) > 0:
1841 revs = repo.revs('reverse(:.)')
1841 revs = repo.revs('reverse(:.)')
1842 else:
1842 else:
1843 revs = revset.spanset(repo)
1843 revs = revset.spanset(repo)
1844 revs.reverse()
1844 revs.reverse()
1845 if not revs:
1845 if not revs:
1846 return revset.baseset(), None, None
1846 return revset.baseset(), None, None
1847 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1847 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1848 if possiblyunsorted:
1848 if possiblyunsorted:
1849 revs.sort(reverse=True)
1849 revs.sort(reverse=True)
1850 if expr:
1850 if expr:
1851 # Revset matchers often operate faster on revisions in changelog
1851 # Revset matchers often operate faster on revisions in changelog
1852 # order, because most filters deal with the changelog.
1852 # order, because most filters deal with the changelog.
1853 revs.reverse()
1853 revs.reverse()
1854 matcher = revset.match(repo.ui, expr)
1854 matcher = revset.match(repo.ui, expr)
1855 # Revset matches can reorder revisions. "A or B" typically returns
1855 # Revset matches can reorder revisions. "A or B" typically returns
1856 # returns the revision matching A then the revision matching B. Sort
1856 # returns the revision matching A then the revision matching B. Sort
1857 # again to fix that.
1857 # again to fix that.
1858 revs = matcher(repo, revs)
1858 revs = matcher(repo, revs)
1859 revs.sort(reverse=True)
1859 revs.sort(reverse=True)
1860 if limit is not None:
1860 if limit is not None:
1861 limitedrevs = []
1861 limitedrevs = []
1862 for idx, rev in enumerate(revs):
1862 for idx, rev in enumerate(revs):
1863 if idx >= limit:
1863 if idx >= limit:
1864 break
1864 break
1865 limitedrevs.append(rev)
1865 limitedrevs.append(rev)
1866 revs = revset.baseset(limitedrevs)
1866 revs = revset.baseset(limitedrevs)
1867
1867
1868 return revs, expr, filematcher
1868 return revs, expr, filematcher
1869
1869
1870 def getlogrevs(repo, pats, opts):
1870 def getlogrevs(repo, pats, opts):
1871 """Return (revs, expr, filematcher) where revs is an iterable of
1871 """Return (revs, expr, filematcher) where revs is an iterable of
1872 revision numbers, expr is a revset string built from log options
1872 revision numbers, expr is a revset string built from log options
1873 and file patterns or None, and used to filter 'revs'. If --stat or
1873 and file patterns or None, and used to filter 'revs'. If --stat or
1874 --patch are not passed filematcher is None. Otherwise it is a
1874 --patch are not passed filematcher is None. Otherwise it is a
1875 callable taking a revision number and returning a match objects
1875 callable taking a revision number and returning a match objects
1876 filtering the files to be detailed when displaying the revision.
1876 filtering the files to be detailed when displaying the revision.
1877 """
1877 """
1878 limit = loglimit(opts)
1878 limit = loglimit(opts)
1879 # Default --rev value depends on --follow but --follow behaviour
1879 # Default --rev value depends on --follow but --follow behaviour
1880 # depends on revisions resolved from --rev...
1880 # depends on revisions resolved from --rev...
1881 follow = opts.get('follow') or opts.get('follow_first')
1881 follow = opts.get('follow') or opts.get('follow_first')
1882 if opts.get('rev'):
1882 if opts.get('rev'):
1883 revs = scmutil.revrange(repo, opts['rev'])
1883 revs = scmutil.revrange(repo, opts['rev'])
1884 elif follow:
1884 elif follow:
1885 revs = repo.revs('reverse(:.)')
1885 revs = repo.revs('reverse(:.)')
1886 else:
1886 else:
1887 revs = revset.spanset(repo)
1887 revs = revset.spanset(repo)
1888 revs.reverse()
1888 revs.reverse()
1889 if not revs:
1889 if not revs:
1890 return revset.baseset([]), None, None
1890 return revset.baseset([]), None, None
1891 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1891 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
1892 if expr:
1892 if expr:
1893 # Revset matchers often operate faster on revisions in changelog
1893 # Revset matchers often operate faster on revisions in changelog
1894 # order, because most filters deal with the changelog.
1894 # order, because most filters deal with the changelog.
1895 if not opts.get('rev'):
1895 if not opts.get('rev'):
1896 revs.reverse()
1896 revs.reverse()
1897 matcher = revset.match(repo.ui, expr)
1897 matcher = revset.match(repo.ui, expr)
1898 # Revset matches can reorder revisions. "A or B" typically returns
1898 # Revset matches can reorder revisions. "A or B" typically returns
1899 # returns the revision matching A then the revision matching B. Sort
1899 # returns the revision matching A then the revision matching B. Sort
1900 # again to fix that.
1900 # again to fix that.
1901 revs = matcher(repo, revs)
1901 revs = matcher(repo, revs)
1902 if not opts.get('rev'):
1902 if not opts.get('rev'):
1903 revs.sort(reverse=True)
1903 revs.sort(reverse=True)
1904 if limit is not None:
1904 if limit is not None:
1905 count = 0
1905 count = 0
1906 limitedrevs = []
1906 limitedrevs = []
1907 it = iter(revs)
1907 it = iter(revs)
1908 while count < limit:
1908 while count < limit:
1909 try:
1909 try:
1910 limitedrevs.append(it.next())
1910 limitedrevs.append(it.next())
1911 except (StopIteration):
1911 except (StopIteration):
1912 break
1912 break
1913 count += 1
1913 count += 1
1914 revs = revset.baseset(limitedrevs)
1914 revs = revset.baseset(limitedrevs)
1915
1915
1916 return revs, expr, filematcher
1916 return revs, expr, filematcher
1917
1917
1918 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1918 def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
1919 filematcher=None):
1919 filematcher=None):
1920 seen, state = [], graphmod.asciistate()
1920 seen, state = [], graphmod.asciistate()
1921 for rev, type, ctx, parents in dag:
1921 for rev, type, ctx, parents in dag:
1922 char = 'o'
1922 char = 'o'
1923 if ctx.node() in showparents:
1923 if ctx.node() in showparents:
1924 char = '@'
1924 char = '@'
1925 elif ctx.obsolete():
1925 elif ctx.obsolete():
1926 char = 'x'
1926 char = 'x'
1927 copies = None
1927 copies = None
1928 if getrenamed and ctx.rev():
1928 if getrenamed and ctx.rev():
1929 copies = []
1929 copies = []
1930 for fn in ctx.files():
1930 for fn in ctx.files():
1931 rename = getrenamed(fn, ctx.rev())
1931 rename = getrenamed(fn, ctx.rev())
1932 if rename:
1932 if rename:
1933 copies.append((fn, rename[0]))
1933 copies.append((fn, rename[0]))
1934 revmatchfn = None
1934 revmatchfn = None
1935 if filematcher is not None:
1935 if filematcher is not None:
1936 revmatchfn = filematcher(ctx.rev())
1936 revmatchfn = filematcher(ctx.rev())
1937 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1937 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
1938 lines = displayer.hunk.pop(rev).split('\n')
1938 lines = displayer.hunk.pop(rev).split('\n')
1939 if not lines[-1]:
1939 if not lines[-1]:
1940 del lines[-1]
1940 del lines[-1]
1941 displayer.flush(rev)
1941 displayer.flush(rev)
1942 edges = edgefn(type, char, lines, seen, rev, parents)
1942 edges = edgefn(type, char, lines, seen, rev, parents)
1943 for type, char, lines, coldata in edges:
1943 for type, char, lines, coldata in edges:
1944 graphmod.ascii(ui, state, type, char, lines, coldata)
1944 graphmod.ascii(ui, state, type, char, lines, coldata)
1945 displayer.close()
1945 displayer.close()
1946
1946
1947 def graphlog(ui, repo, *pats, **opts):
1947 def graphlog(ui, repo, *pats, **opts):
1948 # Parameters are identical to log command ones
1948 # Parameters are identical to log command ones
1949 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1949 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
1950 revdag = graphmod.dagwalker(repo, revs)
1950 revdag = graphmod.dagwalker(repo, revs)
1951
1951
1952 getrenamed = None
1952 getrenamed = None
1953 if opts.get('copies'):
1953 if opts.get('copies'):
1954 endrev = None
1954 endrev = None
1955 if opts.get('rev'):
1955 if opts.get('rev'):
1956 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
1956 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
1957 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1957 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
1958 displayer = show_changeset(ui, repo, opts, buffered=True)
1958 displayer = show_changeset(ui, repo, opts, buffered=True)
1959 showparents = [ctx.node() for ctx in repo[None].parents()]
1959 showparents = [ctx.node() for ctx in repo[None].parents()]
1960 displaygraph(ui, revdag, displayer, showparents,
1960 displaygraph(ui, revdag, displayer, showparents,
1961 graphmod.asciiedges, getrenamed, filematcher)
1961 graphmod.asciiedges, getrenamed, filematcher)
1962
1962
1963 def checkunsupportedgraphflags(pats, opts):
1963 def checkunsupportedgraphflags(pats, opts):
1964 for op in ["newest_first"]:
1964 for op in ["newest_first"]:
1965 if op in opts and opts[op]:
1965 if op in opts and opts[op]:
1966 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1966 raise util.Abort(_("-G/--graph option is incompatible with --%s")
1967 % op.replace("_", "-"))
1967 % op.replace("_", "-"))
1968
1968
1969 def graphrevs(repo, nodes, opts):
1969 def graphrevs(repo, nodes, opts):
1970 limit = loglimit(opts)
1970 limit = loglimit(opts)
1971 nodes.reverse()
1971 nodes.reverse()
1972 if limit is not None:
1972 if limit is not None:
1973 nodes = nodes[:limit]
1973 nodes = nodes[:limit]
1974 return graphmod.nodes(repo, nodes)
1974 return graphmod.nodes(repo, nodes)
1975
1975
1976 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1976 def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
1977 join = lambda f: os.path.join(prefix, f)
1977 join = lambda f: os.path.join(prefix, f)
1978 bad = []
1978 bad = []
1979 oldbad = match.bad
1979 oldbad = match.bad
1980 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1980 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
1981 names = []
1981 names = []
1982 wctx = repo[None]
1982 wctx = repo[None]
1983 cca = None
1983 cca = None
1984 abort, warn = scmutil.checkportabilityalert(ui)
1984 abort, warn = scmutil.checkportabilityalert(ui)
1985 if abort or warn:
1985 if abort or warn:
1986 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1986 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
1987 for f in wctx.walk(match):
1987 for f in wctx.walk(match):
1988 exact = match.exact(f)
1988 exact = match.exact(f)
1989 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1989 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
1990 if cca:
1990 if cca:
1991 cca(f)
1991 cca(f)
1992 names.append(f)
1992 names.append(f)
1993 if ui.verbose or not exact:
1993 if ui.verbose or not exact:
1994 ui.status(_('adding %s\n') % match.rel(join(f)))
1994 ui.status(_('adding %s\n') % match.rel(f))
1995
1995
1996 for subpath in sorted(wctx.substate):
1996 for subpath in sorted(wctx.substate):
1997 sub = wctx.sub(subpath)
1997 sub = wctx.sub(subpath)
1998 try:
1998 try:
1999 submatch = matchmod.narrowmatcher(subpath, match)
1999 submatch = matchmod.narrowmatcher(subpath, match)
2000 if listsubrepos:
2000 if listsubrepos:
2001 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
2001 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
2002 False))
2002 False))
2003 else:
2003 else:
2004 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
2004 bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
2005 True))
2005 True))
2006 except error.LookupError:
2006 except error.LookupError:
2007 ui.status(_("skipping missing subrepository: %s\n")
2007 ui.status(_("skipping missing subrepository: %s\n")
2008 % join(subpath))
2008 % join(subpath))
2009
2009
2010 if not dryrun:
2010 if not dryrun:
2011 rejected = wctx.add(names, prefix)
2011 rejected = wctx.add(names, prefix)
2012 bad.extend(f for f in rejected if f in match.files())
2012 bad.extend(f for f in rejected if f in match.files())
2013 return bad
2013 return bad
2014
2014
2015 def forget(ui, repo, match, prefix, explicitonly):
2015 def forget(ui, repo, match, prefix, explicitonly):
2016 join = lambda f: os.path.join(prefix, f)
2016 join = lambda f: os.path.join(prefix, f)
2017 bad = []
2017 bad = []
2018 oldbad = match.bad
2018 oldbad = match.bad
2019 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2019 match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
2020 wctx = repo[None]
2020 wctx = repo[None]
2021 forgot = []
2021 forgot = []
2022 s = repo.status(match=match, clean=True)
2022 s = repo.status(match=match, clean=True)
2023 forget = sorted(s[0] + s[1] + s[3] + s[6])
2023 forget = sorted(s[0] + s[1] + s[3] + s[6])
2024 if explicitonly:
2024 if explicitonly:
2025 forget = [f for f in forget if match.exact(f)]
2025 forget = [f for f in forget if match.exact(f)]
2026
2026
2027 for subpath in sorted(wctx.substate):
2027 for subpath in sorted(wctx.substate):
2028 sub = wctx.sub(subpath)
2028 sub = wctx.sub(subpath)
2029 try:
2029 try:
2030 submatch = matchmod.narrowmatcher(subpath, match)
2030 submatch = matchmod.narrowmatcher(subpath, match)
2031 subbad, subforgot = sub.forget(submatch, prefix)
2031 subbad, subforgot = sub.forget(submatch, prefix)
2032 bad.extend([subpath + '/' + f for f in subbad])
2032 bad.extend([subpath + '/' + f for f in subbad])
2033 forgot.extend([subpath + '/' + f for f in subforgot])
2033 forgot.extend([subpath + '/' + f for f in subforgot])
2034 except error.LookupError:
2034 except error.LookupError:
2035 ui.status(_("skipping missing subrepository: %s\n")
2035 ui.status(_("skipping missing subrepository: %s\n")
2036 % join(subpath))
2036 % join(subpath))
2037
2037
2038 if not explicitonly:
2038 if not explicitonly:
2039 for f in match.files():
2039 for f in match.files():
2040 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2040 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2041 if f not in forgot:
2041 if f not in forgot:
2042 if repo.wvfs.exists(f):
2042 if repo.wvfs.exists(f):
2043 ui.warn(_('not removing %s: '
2043 ui.warn(_('not removing %s: '
2044 'file is already untracked\n')
2044 'file is already untracked\n')
2045 % match.rel(join(f)))
2045 % match.rel(f))
2046 bad.append(f)
2046 bad.append(f)
2047
2047
2048 for f in forget:
2048 for f in forget:
2049 if ui.verbose or not match.exact(f):
2049 if ui.verbose or not match.exact(f):
2050 ui.status(_('removing %s\n') % match.rel(join(f)))
2050 ui.status(_('removing %s\n') % match.rel(f))
2051
2051
2052 rejected = wctx.forget(forget, prefix)
2052 rejected = wctx.forget(forget, prefix)
2053 bad.extend(f for f in rejected if f in match.files())
2053 bad.extend(f for f in rejected if f in match.files())
2054 forgot.extend(forget)
2054 forgot.extend(forget)
2055 return bad, forgot
2055 return bad, forgot
2056
2056
2057 def remove(ui, repo, m, prefix, after, force, subrepos):
2057 def remove(ui, repo, m, prefix, after, force, subrepos):
2058 join = lambda f: os.path.join(prefix, f)
2058 join = lambda f: os.path.join(prefix, f)
2059 ret = 0
2059 ret = 0
2060 s = repo.status(match=m, clean=True)
2060 s = repo.status(match=m, clean=True)
2061 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2061 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2062
2062
2063 wctx = repo[None]
2063 wctx = repo[None]
2064
2064
2065 for subpath in sorted(wctx.substate):
2065 for subpath in sorted(wctx.substate):
2066 def matchessubrepo(matcher, subpath):
2066 def matchessubrepo(matcher, subpath):
2067 if matcher.exact(subpath):
2067 if matcher.exact(subpath):
2068 return True
2068 return True
2069 for f in matcher.files():
2069 for f in matcher.files():
2070 if f.startswith(subpath):
2070 if f.startswith(subpath):
2071 return True
2071 return True
2072 return False
2072 return False
2073
2073
2074 if subrepos or matchessubrepo(m, subpath):
2074 if subrepos or matchessubrepo(m, subpath):
2075 sub = wctx.sub(subpath)
2075 sub = wctx.sub(subpath)
2076 try:
2076 try:
2077 submatch = matchmod.narrowmatcher(subpath, m)
2077 submatch = matchmod.narrowmatcher(subpath, m)
2078 if sub.removefiles(submatch, prefix, after, force, subrepos):
2078 if sub.removefiles(submatch, prefix, after, force, subrepos):
2079 ret = 1
2079 ret = 1
2080 except error.LookupError:
2080 except error.LookupError:
2081 ui.status(_("skipping missing subrepository: %s\n")
2081 ui.status(_("skipping missing subrepository: %s\n")
2082 % join(subpath))
2082 % join(subpath))
2083
2083
2084 # warn about failure to delete explicit files/dirs
2084 # warn about failure to delete explicit files/dirs
2085 for f in m.files():
2085 for f in m.files():
2086 def insubrepo():
2086 def insubrepo():
2087 for subpath in wctx.substate:
2087 for subpath in wctx.substate:
2088 if f.startswith(subpath):
2088 if f.startswith(subpath):
2089 return True
2089 return True
2090 return False
2090 return False
2091
2091
2092 if f in repo.dirstate or f in wctx.dirs() or f == '.' or insubrepo():
2092 if f in repo.dirstate or f in wctx.dirs() or f == '.' or insubrepo():
2093 continue
2093 continue
2094
2094
2095 if repo.wvfs.exists(f):
2095 if repo.wvfs.exists(f):
2096 if repo.wvfs.isdir(f):
2096 if repo.wvfs.isdir(f):
2097 ui.warn(_('not removing %s: no tracked files\n')
2097 ui.warn(_('not removing %s: no tracked files\n')
2098 % m.rel(join(f)))
2098 % m.rel(f))
2099 else:
2099 else:
2100 ui.warn(_('not removing %s: file is untracked\n')
2100 ui.warn(_('not removing %s: file is untracked\n')
2101 % m.rel(join(f)))
2101 % m.rel(f))
2102 # missing files will generate a warning elsewhere
2102 # missing files will generate a warning elsewhere
2103 ret = 1
2103 ret = 1
2104
2104
2105 if force:
2105 if force:
2106 list = modified + deleted + clean + added
2106 list = modified + deleted + clean + added
2107 elif after:
2107 elif after:
2108 list = deleted
2108 list = deleted
2109 for f in modified + added + clean:
2109 for f in modified + added + clean:
2110 ui.warn(_('not removing %s: file still exists\n') % m.rel(join(f)))
2110 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
2111 ret = 1
2111 ret = 1
2112 else:
2112 else:
2113 list = deleted + clean
2113 list = deleted + clean
2114 for f in modified:
2114 for f in modified:
2115 ui.warn(_('not removing %s: file is modified (use -f'
2115 ui.warn(_('not removing %s: file is modified (use -f'
2116 ' to force removal)\n') % m.rel(join(f)))
2116 ' to force removal)\n') % m.rel(f))
2117 ret = 1
2117 ret = 1
2118 for f in added:
2118 for f in added:
2119 ui.warn(_('not removing %s: file has been marked for add'
2119 ui.warn(_('not removing %s: file has been marked for add'
2120 ' (use forget to undo)\n') % m.rel(join(f)))
2120 ' (use forget to undo)\n') % m.rel(f))
2121 ret = 1
2121 ret = 1
2122
2122
2123 for f in sorted(list):
2123 for f in sorted(list):
2124 if ui.verbose or not m.exact(f):
2124 if ui.verbose or not m.exact(f):
2125 ui.status(_('removing %s\n') % m.rel(join(f)))
2125 ui.status(_('removing %s\n') % m.rel(f))
2126
2126
2127 wlock = repo.wlock()
2127 wlock = repo.wlock()
2128 try:
2128 try:
2129 if not after:
2129 if not after:
2130 for f in list:
2130 for f in list:
2131 if f in added:
2131 if f in added:
2132 continue # we never unlink added files on remove
2132 continue # we never unlink added files on remove
2133 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2133 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2134 repo[None].forget(list)
2134 repo[None].forget(list)
2135 finally:
2135 finally:
2136 wlock.release()
2136 wlock.release()
2137
2137
2138 return ret
2138 return ret
2139
2139
2140 def cat(ui, repo, ctx, matcher, prefix, **opts):
2140 def cat(ui, repo, ctx, matcher, prefix, **opts):
2141 err = 1
2141 err = 1
2142
2142
2143 def write(path):
2143 def write(path):
2144 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2144 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2145 pathname=os.path.join(prefix, path))
2145 pathname=os.path.join(prefix, path))
2146 data = ctx[path].data()
2146 data = ctx[path].data()
2147 if opts.get('decode'):
2147 if opts.get('decode'):
2148 data = repo.wwritedata(path, data)
2148 data = repo.wwritedata(path, data)
2149 fp.write(data)
2149 fp.write(data)
2150 fp.close()
2150 fp.close()
2151
2151
2152 # Automation often uses hg cat on single files, so special case it
2152 # Automation often uses hg cat on single files, so special case it
2153 # for performance to avoid the cost of parsing the manifest.
2153 # for performance to avoid the cost of parsing the manifest.
2154 if len(matcher.files()) == 1 and not matcher.anypats():
2154 if len(matcher.files()) == 1 and not matcher.anypats():
2155 file = matcher.files()[0]
2155 file = matcher.files()[0]
2156 mf = repo.manifest
2156 mf = repo.manifest
2157 mfnode = ctx._changeset[0]
2157 mfnode = ctx._changeset[0]
2158 if mf.find(mfnode, file)[0]:
2158 if mf.find(mfnode, file)[0]:
2159 write(file)
2159 write(file)
2160 return 0
2160 return 0
2161
2161
2162 # Don't warn about "missing" files that are really in subrepos
2162 # Don't warn about "missing" files that are really in subrepos
2163 bad = matcher.bad
2163 bad = matcher.bad
2164
2164
2165 def badfn(path, msg):
2165 def badfn(path, msg):
2166 for subpath in ctx.substate:
2166 for subpath in ctx.substate:
2167 if path.startswith(subpath):
2167 if path.startswith(subpath):
2168 return
2168 return
2169 bad(path, msg)
2169 bad(path, msg)
2170
2170
2171 matcher.bad = badfn
2171 matcher.bad = badfn
2172
2172
2173 for abs in ctx.walk(matcher):
2173 for abs in ctx.walk(matcher):
2174 write(abs)
2174 write(abs)
2175 err = 0
2175 err = 0
2176
2176
2177 matcher.bad = bad
2177 matcher.bad = bad
2178
2178
2179 for subpath in sorted(ctx.substate):
2179 for subpath in sorted(ctx.substate):
2180 sub = ctx.sub(subpath)
2180 sub = ctx.sub(subpath)
2181 try:
2181 try:
2182 submatch = matchmod.narrowmatcher(subpath, matcher)
2182 submatch = matchmod.narrowmatcher(subpath, matcher)
2183
2183
2184 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2184 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2185 **opts):
2185 **opts):
2186 err = 0
2186 err = 0
2187 except error.RepoLookupError:
2187 except error.RepoLookupError:
2188 ui.status(_("skipping missing subrepository: %s\n")
2188 ui.status(_("skipping missing subrepository: %s\n")
2189 % os.path.join(prefix, subpath))
2189 % os.path.join(prefix, subpath))
2190
2190
2191 return err
2191 return err
2192
2192
2193 def commit(ui, repo, commitfunc, pats, opts):
2193 def commit(ui, repo, commitfunc, pats, opts):
2194 '''commit the specified files or all outstanding changes'''
2194 '''commit the specified files or all outstanding changes'''
2195 date = opts.get('date')
2195 date = opts.get('date')
2196 if date:
2196 if date:
2197 opts['date'] = util.parsedate(date)
2197 opts['date'] = util.parsedate(date)
2198 message = logmessage(ui, opts)
2198 message = logmessage(ui, opts)
2199 matcher = scmutil.match(repo[None], pats, opts)
2199 matcher = scmutil.match(repo[None], pats, opts)
2200
2200
2201 # extract addremove carefully -- this function can be called from a command
2201 # extract addremove carefully -- this function can be called from a command
2202 # that doesn't support addremove
2202 # that doesn't support addremove
2203 if opts.get('addremove'):
2203 if opts.get('addremove'):
2204 if scmutil.addremove(repo, matcher, "", opts) != 0:
2204 if scmutil.addremove(repo, matcher, "", opts) != 0:
2205 raise util.Abort(
2205 raise util.Abort(
2206 _("failed to mark all new/missing files as added/removed"))
2206 _("failed to mark all new/missing files as added/removed"))
2207
2207
2208 return commitfunc(ui, repo, message, matcher, opts)
2208 return commitfunc(ui, repo, message, matcher, opts)
2209
2209
2210 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2210 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2211 # amend will reuse the existing user if not specified, but the obsolete
2211 # amend will reuse the existing user if not specified, but the obsolete
2212 # marker creation requires that the current user's name is specified.
2212 # marker creation requires that the current user's name is specified.
2213 if obsolete._enabled:
2213 if obsolete._enabled:
2214 ui.username() # raise exception if username not set
2214 ui.username() # raise exception if username not set
2215
2215
2216 ui.note(_('amending changeset %s\n') % old)
2216 ui.note(_('amending changeset %s\n') % old)
2217 base = old.p1()
2217 base = old.p1()
2218
2218
2219 wlock = lock = newid = None
2219 wlock = lock = newid = None
2220 try:
2220 try:
2221 wlock = repo.wlock()
2221 wlock = repo.wlock()
2222 lock = repo.lock()
2222 lock = repo.lock()
2223 tr = repo.transaction('amend')
2223 tr = repo.transaction('amend')
2224 try:
2224 try:
2225 # See if we got a message from -m or -l, if not, open the editor
2225 # See if we got a message from -m or -l, if not, open the editor
2226 # with the message of the changeset to amend
2226 # with the message of the changeset to amend
2227 message = logmessage(ui, opts)
2227 message = logmessage(ui, opts)
2228 # ensure logfile does not conflict with later enforcement of the
2228 # ensure logfile does not conflict with later enforcement of the
2229 # message. potential logfile content has been processed by
2229 # message. potential logfile content has been processed by
2230 # `logmessage` anyway.
2230 # `logmessage` anyway.
2231 opts.pop('logfile')
2231 opts.pop('logfile')
2232 # First, do a regular commit to record all changes in the working
2232 # First, do a regular commit to record all changes in the working
2233 # directory (if there are any)
2233 # directory (if there are any)
2234 ui.callhooks = False
2234 ui.callhooks = False
2235 currentbookmark = repo._bookmarkcurrent
2235 currentbookmark = repo._bookmarkcurrent
2236 try:
2236 try:
2237 repo._bookmarkcurrent = None
2237 repo._bookmarkcurrent = None
2238 opts['message'] = 'temporary amend commit for %s' % old
2238 opts['message'] = 'temporary amend commit for %s' % old
2239 node = commit(ui, repo, commitfunc, pats, opts)
2239 node = commit(ui, repo, commitfunc, pats, opts)
2240 finally:
2240 finally:
2241 repo._bookmarkcurrent = currentbookmark
2241 repo._bookmarkcurrent = currentbookmark
2242 ui.callhooks = True
2242 ui.callhooks = True
2243 ctx = repo[node]
2243 ctx = repo[node]
2244
2244
2245 # Participating changesets:
2245 # Participating changesets:
2246 #
2246 #
2247 # node/ctx o - new (intermediate) commit that contains changes
2247 # node/ctx o - new (intermediate) commit that contains changes
2248 # | from working dir to go into amending commit
2248 # | from working dir to go into amending commit
2249 # | (or a workingctx if there were no changes)
2249 # | (or a workingctx if there were no changes)
2250 # |
2250 # |
2251 # old o - changeset to amend
2251 # old o - changeset to amend
2252 # |
2252 # |
2253 # base o - parent of amending changeset
2253 # base o - parent of amending changeset
2254
2254
2255 # Update extra dict from amended commit (e.g. to preserve graft
2255 # Update extra dict from amended commit (e.g. to preserve graft
2256 # source)
2256 # source)
2257 extra.update(old.extra())
2257 extra.update(old.extra())
2258
2258
2259 # Also update it from the intermediate commit or from the wctx
2259 # Also update it from the intermediate commit or from the wctx
2260 extra.update(ctx.extra())
2260 extra.update(ctx.extra())
2261
2261
2262 if len(old.parents()) > 1:
2262 if len(old.parents()) > 1:
2263 # ctx.files() isn't reliable for merges, so fall back to the
2263 # ctx.files() isn't reliable for merges, so fall back to the
2264 # slower repo.status() method
2264 # slower repo.status() method
2265 files = set([fn for st in repo.status(base, old)[:3]
2265 files = set([fn for st in repo.status(base, old)[:3]
2266 for fn in st])
2266 for fn in st])
2267 else:
2267 else:
2268 files = set(old.files())
2268 files = set(old.files())
2269
2269
2270 # Second, we use either the commit we just did, or if there were no
2270 # Second, we use either the commit we just did, or if there were no
2271 # changes the parent of the working directory as the version of the
2271 # changes the parent of the working directory as the version of the
2272 # files in the final amend commit
2272 # files in the final amend commit
2273 if node:
2273 if node:
2274 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2274 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2275
2275
2276 user = ctx.user()
2276 user = ctx.user()
2277 date = ctx.date()
2277 date = ctx.date()
2278 # Recompute copies (avoid recording a -> b -> a)
2278 # Recompute copies (avoid recording a -> b -> a)
2279 copied = copies.pathcopies(base, ctx)
2279 copied = copies.pathcopies(base, ctx)
2280
2280
2281 # Prune files which were reverted by the updates: if old
2281 # Prune files which were reverted by the updates: if old
2282 # introduced file X and our intermediate commit, node,
2282 # introduced file X and our intermediate commit, node,
2283 # renamed that file, then those two files are the same and
2283 # renamed that file, then those two files are the same and
2284 # we can discard X from our list of files. Likewise if X
2284 # we can discard X from our list of files. Likewise if X
2285 # was deleted, it's no longer relevant
2285 # was deleted, it's no longer relevant
2286 files.update(ctx.files())
2286 files.update(ctx.files())
2287
2287
2288 def samefile(f):
2288 def samefile(f):
2289 if f in ctx.manifest():
2289 if f in ctx.manifest():
2290 a = ctx.filectx(f)
2290 a = ctx.filectx(f)
2291 if f in base.manifest():
2291 if f in base.manifest():
2292 b = base.filectx(f)
2292 b = base.filectx(f)
2293 return (not a.cmp(b)
2293 return (not a.cmp(b)
2294 and a.flags() == b.flags())
2294 and a.flags() == b.flags())
2295 else:
2295 else:
2296 return False
2296 return False
2297 else:
2297 else:
2298 return f not in base.manifest()
2298 return f not in base.manifest()
2299 files = [f for f in files if not samefile(f)]
2299 files = [f for f in files if not samefile(f)]
2300
2300
2301 def filectxfn(repo, ctx_, path):
2301 def filectxfn(repo, ctx_, path):
2302 try:
2302 try:
2303 fctx = ctx[path]
2303 fctx = ctx[path]
2304 flags = fctx.flags()
2304 flags = fctx.flags()
2305 mctx = context.memfilectx(repo,
2305 mctx = context.memfilectx(repo,
2306 fctx.path(), fctx.data(),
2306 fctx.path(), fctx.data(),
2307 islink='l' in flags,
2307 islink='l' in flags,
2308 isexec='x' in flags,
2308 isexec='x' in flags,
2309 copied=copied.get(path))
2309 copied=copied.get(path))
2310 return mctx
2310 return mctx
2311 except KeyError:
2311 except KeyError:
2312 return None
2312 return None
2313 else:
2313 else:
2314 ui.note(_('copying changeset %s to %s\n') % (old, base))
2314 ui.note(_('copying changeset %s to %s\n') % (old, base))
2315
2315
2316 # Use version of files as in the old cset
2316 # Use version of files as in the old cset
2317 def filectxfn(repo, ctx_, path):
2317 def filectxfn(repo, ctx_, path):
2318 try:
2318 try:
2319 return old.filectx(path)
2319 return old.filectx(path)
2320 except KeyError:
2320 except KeyError:
2321 return None
2321 return None
2322
2322
2323 user = opts.get('user') or old.user()
2323 user = opts.get('user') or old.user()
2324 date = opts.get('date') or old.date()
2324 date = opts.get('date') or old.date()
2325 editform = mergeeditform(old, 'commit.amend')
2325 editform = mergeeditform(old, 'commit.amend')
2326 editor = getcommiteditor(editform=editform, **opts)
2326 editor = getcommiteditor(editform=editform, **opts)
2327 if not message:
2327 if not message:
2328 editor = getcommiteditor(edit=True, editform=editform)
2328 editor = getcommiteditor(edit=True, editform=editform)
2329 message = old.description()
2329 message = old.description()
2330
2330
2331 pureextra = extra.copy()
2331 pureextra = extra.copy()
2332 extra['amend_source'] = old.hex()
2332 extra['amend_source'] = old.hex()
2333
2333
2334 new = context.memctx(repo,
2334 new = context.memctx(repo,
2335 parents=[base.node(), old.p2().node()],
2335 parents=[base.node(), old.p2().node()],
2336 text=message,
2336 text=message,
2337 files=files,
2337 files=files,
2338 filectxfn=filectxfn,
2338 filectxfn=filectxfn,
2339 user=user,
2339 user=user,
2340 date=date,
2340 date=date,
2341 extra=extra,
2341 extra=extra,
2342 editor=editor)
2342 editor=editor)
2343
2343
2344 newdesc = changelog.stripdesc(new.description())
2344 newdesc = changelog.stripdesc(new.description())
2345 if ((not node)
2345 if ((not node)
2346 and newdesc == old.description()
2346 and newdesc == old.description()
2347 and user == old.user()
2347 and user == old.user()
2348 and date == old.date()
2348 and date == old.date()
2349 and pureextra == old.extra()):
2349 and pureextra == old.extra()):
2350 # nothing changed. continuing here would create a new node
2350 # nothing changed. continuing here would create a new node
2351 # anyway because of the amend_source noise.
2351 # anyway because of the amend_source noise.
2352 #
2352 #
2353 # This not what we expect from amend.
2353 # This not what we expect from amend.
2354 return old.node()
2354 return old.node()
2355
2355
2356 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2356 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2357 try:
2357 try:
2358 if opts.get('secret'):
2358 if opts.get('secret'):
2359 commitphase = 'secret'
2359 commitphase = 'secret'
2360 else:
2360 else:
2361 commitphase = old.phase()
2361 commitphase = old.phase()
2362 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2362 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2363 newid = repo.commitctx(new)
2363 newid = repo.commitctx(new)
2364 finally:
2364 finally:
2365 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2365 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2366 if newid != old.node():
2366 if newid != old.node():
2367 # Reroute the working copy parent to the new changeset
2367 # Reroute the working copy parent to the new changeset
2368 repo.setparents(newid, nullid)
2368 repo.setparents(newid, nullid)
2369
2369
2370 # Move bookmarks from old parent to amend commit
2370 # Move bookmarks from old parent to amend commit
2371 bms = repo.nodebookmarks(old.node())
2371 bms = repo.nodebookmarks(old.node())
2372 if bms:
2372 if bms:
2373 marks = repo._bookmarks
2373 marks = repo._bookmarks
2374 for bm in bms:
2374 for bm in bms:
2375 marks[bm] = newid
2375 marks[bm] = newid
2376 marks.write()
2376 marks.write()
2377 #commit the whole amend process
2377 #commit the whole amend process
2378 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2378 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2379 if createmarkers and newid != old.node():
2379 if createmarkers and newid != old.node():
2380 # mark the new changeset as successor of the rewritten one
2380 # mark the new changeset as successor of the rewritten one
2381 new = repo[newid]
2381 new = repo[newid]
2382 obs = [(old, (new,))]
2382 obs = [(old, (new,))]
2383 if node:
2383 if node:
2384 obs.append((ctx, ()))
2384 obs.append((ctx, ()))
2385
2385
2386 obsolete.createmarkers(repo, obs)
2386 obsolete.createmarkers(repo, obs)
2387 tr.close()
2387 tr.close()
2388 finally:
2388 finally:
2389 tr.release()
2389 tr.release()
2390 if not createmarkers and newid != old.node():
2390 if not createmarkers and newid != old.node():
2391 # Strip the intermediate commit (if there was one) and the amended
2391 # Strip the intermediate commit (if there was one) and the amended
2392 # commit
2392 # commit
2393 if node:
2393 if node:
2394 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2394 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2395 ui.note(_('stripping amended changeset %s\n') % old)
2395 ui.note(_('stripping amended changeset %s\n') % old)
2396 repair.strip(ui, repo, old.node(), topic='amend-backup')
2396 repair.strip(ui, repo, old.node(), topic='amend-backup')
2397 finally:
2397 finally:
2398 if newid is None:
2398 if newid is None:
2399 repo.dirstate.invalidate()
2399 repo.dirstate.invalidate()
2400 lockmod.release(lock, wlock)
2400 lockmod.release(lock, wlock)
2401 return newid
2401 return newid
2402
2402
2403 def commiteditor(repo, ctx, subs, editform=''):
2403 def commiteditor(repo, ctx, subs, editform=''):
2404 if ctx.description():
2404 if ctx.description():
2405 return ctx.description()
2405 return ctx.description()
2406 return commitforceeditor(repo, ctx, subs, editform=editform)
2406 return commitforceeditor(repo, ctx, subs, editform=editform)
2407
2407
2408 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2408 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2409 editform=''):
2409 editform=''):
2410 if not extramsg:
2410 if not extramsg:
2411 extramsg = _("Leave message empty to abort commit.")
2411 extramsg = _("Leave message empty to abort commit.")
2412
2412
2413 forms = [e for e in editform.split('.') if e]
2413 forms = [e for e in editform.split('.') if e]
2414 forms.insert(0, 'changeset')
2414 forms.insert(0, 'changeset')
2415 while forms:
2415 while forms:
2416 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2416 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2417 if tmpl:
2417 if tmpl:
2418 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2418 committext = buildcommittemplate(repo, ctx, subs, extramsg, tmpl)
2419 break
2419 break
2420 forms.pop()
2420 forms.pop()
2421 else:
2421 else:
2422 committext = buildcommittext(repo, ctx, subs, extramsg)
2422 committext = buildcommittext(repo, ctx, subs, extramsg)
2423
2423
2424 # run editor in the repository root
2424 # run editor in the repository root
2425 olddir = os.getcwd()
2425 olddir = os.getcwd()
2426 os.chdir(repo.root)
2426 os.chdir(repo.root)
2427 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2427 text = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform)
2428 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2428 text = re.sub("(?m)^HG:.*(\n|$)", "", text)
2429 os.chdir(olddir)
2429 os.chdir(olddir)
2430
2430
2431 if finishdesc:
2431 if finishdesc:
2432 text = finishdesc(text)
2432 text = finishdesc(text)
2433 if not text.strip():
2433 if not text.strip():
2434 raise util.Abort(_("empty commit message"))
2434 raise util.Abort(_("empty commit message"))
2435
2435
2436 return text
2436 return text
2437
2437
2438 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2438 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2439 ui = repo.ui
2439 ui = repo.ui
2440 tmpl, mapfile = gettemplate(ui, tmpl, None)
2440 tmpl, mapfile = gettemplate(ui, tmpl, None)
2441
2441
2442 try:
2442 try:
2443 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2443 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2444 except SyntaxError, inst:
2444 except SyntaxError, inst:
2445 raise util.Abort(inst.args[0])
2445 raise util.Abort(inst.args[0])
2446
2446
2447 for k, v in repo.ui.configitems('committemplate'):
2447 for k, v in repo.ui.configitems('committemplate'):
2448 if k != 'changeset':
2448 if k != 'changeset':
2449 t.t.cache[k] = v
2449 t.t.cache[k] = v
2450
2450
2451 if not extramsg:
2451 if not extramsg:
2452 extramsg = '' # ensure that extramsg is string
2452 extramsg = '' # ensure that extramsg is string
2453
2453
2454 ui.pushbuffer()
2454 ui.pushbuffer()
2455 t.show(ctx, extramsg=extramsg)
2455 t.show(ctx, extramsg=extramsg)
2456 return ui.popbuffer()
2456 return ui.popbuffer()
2457
2457
2458 def buildcommittext(repo, ctx, subs, extramsg):
2458 def buildcommittext(repo, ctx, subs, extramsg):
2459 edittext = []
2459 edittext = []
2460 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2460 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2461 if ctx.description():
2461 if ctx.description():
2462 edittext.append(ctx.description())
2462 edittext.append(ctx.description())
2463 edittext.append("")
2463 edittext.append("")
2464 edittext.append("") # Empty line between message and comments.
2464 edittext.append("") # Empty line between message and comments.
2465 edittext.append(_("HG: Enter commit message."
2465 edittext.append(_("HG: Enter commit message."
2466 " Lines beginning with 'HG:' are removed."))
2466 " Lines beginning with 'HG:' are removed."))
2467 edittext.append("HG: %s" % extramsg)
2467 edittext.append("HG: %s" % extramsg)
2468 edittext.append("HG: --")
2468 edittext.append("HG: --")
2469 edittext.append(_("HG: user: %s") % ctx.user())
2469 edittext.append(_("HG: user: %s") % ctx.user())
2470 if ctx.p2():
2470 if ctx.p2():
2471 edittext.append(_("HG: branch merge"))
2471 edittext.append(_("HG: branch merge"))
2472 if ctx.branch():
2472 if ctx.branch():
2473 edittext.append(_("HG: branch '%s'") % ctx.branch())
2473 edittext.append(_("HG: branch '%s'") % ctx.branch())
2474 if bookmarks.iscurrent(repo):
2474 if bookmarks.iscurrent(repo):
2475 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2475 edittext.append(_("HG: bookmark '%s'") % repo._bookmarkcurrent)
2476 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2476 edittext.extend([_("HG: subrepo %s") % s for s in subs])
2477 edittext.extend([_("HG: added %s") % f for f in added])
2477 edittext.extend([_("HG: added %s") % f for f in added])
2478 edittext.extend([_("HG: changed %s") % f for f in modified])
2478 edittext.extend([_("HG: changed %s") % f for f in modified])
2479 edittext.extend([_("HG: removed %s") % f for f in removed])
2479 edittext.extend([_("HG: removed %s") % f for f in removed])
2480 if not added and not modified and not removed:
2480 if not added and not modified and not removed:
2481 edittext.append(_("HG: no files changed"))
2481 edittext.append(_("HG: no files changed"))
2482 edittext.append("")
2482 edittext.append("")
2483
2483
2484 return "\n".join(edittext)
2484 return "\n".join(edittext)
2485
2485
2486 def commitstatus(repo, node, branch, bheads=None, opts={}):
2486 def commitstatus(repo, node, branch, bheads=None, opts={}):
2487 ctx = repo[node]
2487 ctx = repo[node]
2488 parents = ctx.parents()
2488 parents = ctx.parents()
2489
2489
2490 if (not opts.get('amend') and bheads and node not in bheads and not
2490 if (not opts.get('amend') and bheads and node not in bheads and not
2491 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2491 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2492 repo.ui.status(_('created new head\n'))
2492 repo.ui.status(_('created new head\n'))
2493 # The message is not printed for initial roots. For the other
2493 # The message is not printed for initial roots. For the other
2494 # changesets, it is printed in the following situations:
2494 # changesets, it is printed in the following situations:
2495 #
2495 #
2496 # Par column: for the 2 parents with ...
2496 # Par column: for the 2 parents with ...
2497 # N: null or no parent
2497 # N: null or no parent
2498 # B: parent is on another named branch
2498 # B: parent is on another named branch
2499 # C: parent is a regular non head changeset
2499 # C: parent is a regular non head changeset
2500 # H: parent was a branch head of the current branch
2500 # H: parent was a branch head of the current branch
2501 # Msg column: whether we print "created new head" message
2501 # Msg column: whether we print "created new head" message
2502 # In the following, it is assumed that there already exists some
2502 # In the following, it is assumed that there already exists some
2503 # initial branch heads of the current branch, otherwise nothing is
2503 # initial branch heads of the current branch, otherwise nothing is
2504 # printed anyway.
2504 # printed anyway.
2505 #
2505 #
2506 # Par Msg Comment
2506 # Par Msg Comment
2507 # N N y additional topo root
2507 # N N y additional topo root
2508 #
2508 #
2509 # B N y additional branch root
2509 # B N y additional branch root
2510 # C N y additional topo head
2510 # C N y additional topo head
2511 # H N n usual case
2511 # H N n usual case
2512 #
2512 #
2513 # B B y weird additional branch root
2513 # B B y weird additional branch root
2514 # C B y branch merge
2514 # C B y branch merge
2515 # H B n merge with named branch
2515 # H B n merge with named branch
2516 #
2516 #
2517 # C C y additional head from merge
2517 # C C y additional head from merge
2518 # C H n merge with a head
2518 # C H n merge with a head
2519 #
2519 #
2520 # H H n head merge: head count decreases
2520 # H H n head merge: head count decreases
2521
2521
2522 if not opts.get('close_branch'):
2522 if not opts.get('close_branch'):
2523 for r in parents:
2523 for r in parents:
2524 if r.closesbranch() and r.branch() == branch:
2524 if r.closesbranch() and r.branch() == branch:
2525 repo.ui.status(_('reopening closed branch head %d\n') % r)
2525 repo.ui.status(_('reopening closed branch head %d\n') % r)
2526
2526
2527 if repo.ui.debugflag:
2527 if repo.ui.debugflag:
2528 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2528 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2529 elif repo.ui.verbose:
2529 elif repo.ui.verbose:
2530 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2530 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2531
2531
2532 def revert(ui, repo, ctx, parents, *pats, **opts):
2532 def revert(ui, repo, ctx, parents, *pats, **opts):
2533 parent, p2 = parents
2533 parent, p2 = parents
2534 node = ctx.node()
2534 node = ctx.node()
2535
2535
2536 mf = ctx.manifest()
2536 mf = ctx.manifest()
2537 if node == p2:
2537 if node == p2:
2538 parent = p2
2538 parent = p2
2539 if node == parent:
2539 if node == parent:
2540 pmf = mf
2540 pmf = mf
2541 else:
2541 else:
2542 pmf = None
2542 pmf = None
2543
2543
2544 # need all matching names in dirstate and manifest of target rev,
2544 # need all matching names in dirstate and manifest of target rev,
2545 # so have to walk both. do not print errors if files exist in one
2545 # so have to walk both. do not print errors if files exist in one
2546 # but not other.
2546 # but not other.
2547
2547
2548 # `names` is a mapping for all elements in working copy and target revision
2548 # `names` is a mapping for all elements in working copy and target revision
2549 # The mapping is in the form:
2549 # The mapping is in the form:
2550 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2550 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2551 names = {}
2551 names = {}
2552
2552
2553 wlock = repo.wlock()
2553 wlock = repo.wlock()
2554 try:
2554 try:
2555 ## filling of the `names` mapping
2555 ## filling of the `names` mapping
2556 # walk dirstate to fill `names`
2556 # walk dirstate to fill `names`
2557
2557
2558 m = scmutil.match(repo[None], pats, opts)
2558 m = scmutil.match(repo[None], pats, opts)
2559 if not m.always() or node != parent:
2559 if not m.always() or node != parent:
2560 m.bad = lambda x, y: False
2560 m.bad = lambda x, y: False
2561 for abs in repo.walk(m):
2561 for abs in repo.walk(m):
2562 names[abs] = m.rel(abs), m.exact(abs)
2562 names[abs] = m.rel(abs), m.exact(abs)
2563
2563
2564 # walk target manifest to fill `names`
2564 # walk target manifest to fill `names`
2565
2565
2566 def badfn(path, msg):
2566 def badfn(path, msg):
2567 if path in names:
2567 if path in names:
2568 return
2568 return
2569 if path in ctx.substate:
2569 if path in ctx.substate:
2570 return
2570 return
2571 path_ = path + '/'
2571 path_ = path + '/'
2572 for f in names:
2572 for f in names:
2573 if f.startswith(path_):
2573 if f.startswith(path_):
2574 return
2574 return
2575 ui.warn("%s: %s\n" % (m.rel(path), msg))
2575 ui.warn("%s: %s\n" % (m.rel(path), msg))
2576
2576
2577 m = scmutil.match(ctx, pats, opts)
2577 m = scmutil.match(ctx, pats, opts)
2578 m.bad = badfn
2578 m.bad = badfn
2579 for abs in ctx.walk(m):
2579 for abs in ctx.walk(m):
2580 if abs not in names:
2580 if abs not in names:
2581 names[abs] = m.rel(abs), m.exact(abs)
2581 names[abs] = m.rel(abs), m.exact(abs)
2582
2582
2583 # Find status of all file in `names`.
2583 # Find status of all file in `names`.
2584 m = scmutil.matchfiles(repo, names)
2584 m = scmutil.matchfiles(repo, names)
2585
2585
2586 changes = repo.status(node1=node, match=m,
2586 changes = repo.status(node1=node, match=m,
2587 unknown=True, ignored=True, clean=True)
2587 unknown=True, ignored=True, clean=True)
2588 else:
2588 else:
2589 changes = repo.status(match=m)
2589 changes = repo.status(match=m)
2590 for kind in changes:
2590 for kind in changes:
2591 for abs in kind:
2591 for abs in kind:
2592 names[abs] = m.rel(abs), m.exact(abs)
2592 names[abs] = m.rel(abs), m.exact(abs)
2593
2593
2594 m = scmutil.matchfiles(repo, names)
2594 m = scmutil.matchfiles(repo, names)
2595
2595
2596 modified = set(changes.modified)
2596 modified = set(changes.modified)
2597 added = set(changes.added)
2597 added = set(changes.added)
2598 removed = set(changes.removed)
2598 removed = set(changes.removed)
2599 _deleted = set(changes.deleted)
2599 _deleted = set(changes.deleted)
2600 unknown = set(changes.unknown)
2600 unknown = set(changes.unknown)
2601 unknown.update(changes.ignored)
2601 unknown.update(changes.ignored)
2602 clean = set(changes.clean)
2602 clean = set(changes.clean)
2603 modadded = set()
2603 modadded = set()
2604
2604
2605 # split between files known in target manifest and the others
2605 # split between files known in target manifest and the others
2606 smf = set(mf)
2606 smf = set(mf)
2607
2607
2608 # determine the exact nature of the deleted changesets
2608 # determine the exact nature of the deleted changesets
2609 deladded = _deleted - smf
2609 deladded = _deleted - smf
2610 deleted = _deleted - deladded
2610 deleted = _deleted - deladded
2611
2611
2612 # We need to account for the state of file in the dirstate.
2612 # We need to account for the state of file in the dirstate.
2613 #
2613 #
2614 # Even, when we revert against something else than parent. This will
2614 # Even, when we revert against something else than parent. This will
2615 # slightly alter the behavior of revert (doing back up or not, delete
2615 # slightly alter the behavior of revert (doing back up or not, delete
2616 # or just forget etc).
2616 # or just forget etc).
2617 if parent == node:
2617 if parent == node:
2618 dsmodified = modified
2618 dsmodified = modified
2619 dsadded = added
2619 dsadded = added
2620 dsremoved = removed
2620 dsremoved = removed
2621 # store all local modifications, useful later for rename detection
2621 # store all local modifications, useful later for rename detection
2622 localchanges = dsmodified | dsadded
2622 localchanges = dsmodified | dsadded
2623 modified, added, removed = set(), set(), set()
2623 modified, added, removed = set(), set(), set()
2624 else:
2624 else:
2625 changes = repo.status(node1=parent, match=m)
2625 changes = repo.status(node1=parent, match=m)
2626 dsmodified = set(changes.modified)
2626 dsmodified = set(changes.modified)
2627 dsadded = set(changes.added)
2627 dsadded = set(changes.added)
2628 dsremoved = set(changes.removed)
2628 dsremoved = set(changes.removed)
2629 # store all local modifications, useful later for rename detection
2629 # store all local modifications, useful later for rename detection
2630 localchanges = dsmodified | dsadded
2630 localchanges = dsmodified | dsadded
2631
2631
2632 # only take into account for removes between wc and target
2632 # only take into account for removes between wc and target
2633 clean |= dsremoved - removed
2633 clean |= dsremoved - removed
2634 dsremoved &= removed
2634 dsremoved &= removed
2635 # distinct between dirstate remove and other
2635 # distinct between dirstate remove and other
2636 removed -= dsremoved
2636 removed -= dsremoved
2637
2637
2638 modadded = added & dsmodified
2638 modadded = added & dsmodified
2639 added -= modadded
2639 added -= modadded
2640
2640
2641 # tell newly modified apart.
2641 # tell newly modified apart.
2642 dsmodified &= modified
2642 dsmodified &= modified
2643 dsmodified |= modified & dsadded # dirstate added may needs backup
2643 dsmodified |= modified & dsadded # dirstate added may needs backup
2644 modified -= dsmodified
2644 modified -= dsmodified
2645
2645
2646 # We need to wait for some post-processing to update this set
2646 # We need to wait for some post-processing to update this set
2647 # before making the distinction. The dirstate will be used for
2647 # before making the distinction. The dirstate will be used for
2648 # that purpose.
2648 # that purpose.
2649 dsadded = added
2649 dsadded = added
2650
2650
2651 # in case of merge, files that are actually added can be reported as
2651 # in case of merge, files that are actually added can be reported as
2652 # modified, we need to post process the result
2652 # modified, we need to post process the result
2653 if p2 != nullid:
2653 if p2 != nullid:
2654 if pmf is None:
2654 if pmf is None:
2655 # only need parent manifest in the merge case,
2655 # only need parent manifest in the merge case,
2656 # so do not read by default
2656 # so do not read by default
2657 pmf = repo[parent].manifest()
2657 pmf = repo[parent].manifest()
2658 mergeadd = dsmodified - set(pmf)
2658 mergeadd = dsmodified - set(pmf)
2659 dsadded |= mergeadd
2659 dsadded |= mergeadd
2660 dsmodified -= mergeadd
2660 dsmodified -= mergeadd
2661
2661
2662 # if f is a rename, update `names` to also revert the source
2662 # if f is a rename, update `names` to also revert the source
2663 cwd = repo.getcwd()
2663 cwd = repo.getcwd()
2664 for f in localchanges:
2664 for f in localchanges:
2665 src = repo.dirstate.copied(f)
2665 src = repo.dirstate.copied(f)
2666 # XXX should we check for rename down to target node?
2666 # XXX should we check for rename down to target node?
2667 if src and src not in names and repo.dirstate[src] == 'r':
2667 if src and src not in names and repo.dirstate[src] == 'r':
2668 dsremoved.add(src)
2668 dsremoved.add(src)
2669 names[src] = (repo.pathto(src, cwd), True)
2669 names[src] = (repo.pathto(src, cwd), True)
2670
2670
2671 # distinguish between file to forget and the other
2671 # distinguish between file to forget and the other
2672 added = set()
2672 added = set()
2673 for abs in dsadded:
2673 for abs in dsadded:
2674 if repo.dirstate[abs] != 'a':
2674 if repo.dirstate[abs] != 'a':
2675 added.add(abs)
2675 added.add(abs)
2676 dsadded -= added
2676 dsadded -= added
2677
2677
2678 for abs in deladded:
2678 for abs in deladded:
2679 if repo.dirstate[abs] == 'a':
2679 if repo.dirstate[abs] == 'a':
2680 dsadded.add(abs)
2680 dsadded.add(abs)
2681 deladded -= dsadded
2681 deladded -= dsadded
2682
2682
2683 # For files marked as removed, we check if an unknown file is present at
2683 # For files marked as removed, we check if an unknown file is present at
2684 # the same path. If a such file exists it may need to be backed up.
2684 # the same path. If a such file exists it may need to be backed up.
2685 # Making the distinction at this stage helps have simpler backup
2685 # Making the distinction at this stage helps have simpler backup
2686 # logic.
2686 # logic.
2687 removunk = set()
2687 removunk = set()
2688 for abs in removed:
2688 for abs in removed:
2689 target = repo.wjoin(abs)
2689 target = repo.wjoin(abs)
2690 if os.path.lexists(target):
2690 if os.path.lexists(target):
2691 removunk.add(abs)
2691 removunk.add(abs)
2692 removed -= removunk
2692 removed -= removunk
2693
2693
2694 dsremovunk = set()
2694 dsremovunk = set()
2695 for abs in dsremoved:
2695 for abs in dsremoved:
2696 target = repo.wjoin(abs)
2696 target = repo.wjoin(abs)
2697 if os.path.lexists(target):
2697 if os.path.lexists(target):
2698 dsremovunk.add(abs)
2698 dsremovunk.add(abs)
2699 dsremoved -= dsremovunk
2699 dsremoved -= dsremovunk
2700
2700
2701 # action to be actually performed by revert
2701 # action to be actually performed by revert
2702 # (<list of file>, message>) tuple
2702 # (<list of file>, message>) tuple
2703 actions = {'revert': ([], _('reverting %s\n')),
2703 actions = {'revert': ([], _('reverting %s\n')),
2704 'add': ([], _('adding %s\n')),
2704 'add': ([], _('adding %s\n')),
2705 'remove': ([], _('removing %s\n')),
2705 'remove': ([], _('removing %s\n')),
2706 'drop': ([], _('removing %s\n')),
2706 'drop': ([], _('removing %s\n')),
2707 'forget': ([], _('forgetting %s\n')),
2707 'forget': ([], _('forgetting %s\n')),
2708 'undelete': ([], _('undeleting %s\n')),
2708 'undelete': ([], _('undeleting %s\n')),
2709 'noop': (None, _('no changes needed to %s\n')),
2709 'noop': (None, _('no changes needed to %s\n')),
2710 'unknown': (None, _('file not managed: %s\n')),
2710 'unknown': (None, _('file not managed: %s\n')),
2711 }
2711 }
2712
2712
2713 # "constant" that convey the backup strategy.
2713 # "constant" that convey the backup strategy.
2714 # All set to `discard` if `no-backup` is set do avoid checking
2714 # All set to `discard` if `no-backup` is set do avoid checking
2715 # no_backup lower in the code.
2715 # no_backup lower in the code.
2716 # These values are ordered for comparison purposes
2716 # These values are ordered for comparison purposes
2717 backup = 2 # unconditionally do backup
2717 backup = 2 # unconditionally do backup
2718 check = 1 # check if the existing file differs from target
2718 check = 1 # check if the existing file differs from target
2719 discard = 0 # never do backup
2719 discard = 0 # never do backup
2720 if opts.get('no_backup'):
2720 if opts.get('no_backup'):
2721 backup = check = discard
2721 backup = check = discard
2722
2722
2723 backupanddel = actions['remove']
2723 backupanddel = actions['remove']
2724 if not opts.get('no_backup'):
2724 if not opts.get('no_backup'):
2725 backupanddel = actions['drop']
2725 backupanddel = actions['drop']
2726
2726
2727 disptable = (
2727 disptable = (
2728 # dispatch table:
2728 # dispatch table:
2729 # file state
2729 # file state
2730 # action
2730 # action
2731 # make backup
2731 # make backup
2732
2732
2733 ## Sets that results that will change file on disk
2733 ## Sets that results that will change file on disk
2734 # Modified compared to target, no local change
2734 # Modified compared to target, no local change
2735 (modified, actions['revert'], discard),
2735 (modified, actions['revert'], discard),
2736 # Modified compared to target, but local file is deleted
2736 # Modified compared to target, but local file is deleted
2737 (deleted, actions['revert'], discard),
2737 (deleted, actions['revert'], discard),
2738 # Modified compared to target, local change
2738 # Modified compared to target, local change
2739 (dsmodified, actions['revert'], backup),
2739 (dsmodified, actions['revert'], backup),
2740 # Added since target
2740 # Added since target
2741 (added, actions['remove'], discard),
2741 (added, actions['remove'], discard),
2742 # Added in working directory
2742 # Added in working directory
2743 (dsadded, actions['forget'], discard),
2743 (dsadded, actions['forget'], discard),
2744 # Added since target, have local modification
2744 # Added since target, have local modification
2745 (modadded, backupanddel, backup),
2745 (modadded, backupanddel, backup),
2746 # Added since target but file is missing in working directory
2746 # Added since target but file is missing in working directory
2747 (deladded, actions['drop'], discard),
2747 (deladded, actions['drop'], discard),
2748 # Removed since target, before working copy parent
2748 # Removed since target, before working copy parent
2749 (removed, actions['add'], discard),
2749 (removed, actions['add'], discard),
2750 # Same as `removed` but an unknown file exists at the same path
2750 # Same as `removed` but an unknown file exists at the same path
2751 (removunk, actions['add'], check),
2751 (removunk, actions['add'], check),
2752 # Removed since targe, marked as such in working copy parent
2752 # Removed since targe, marked as such in working copy parent
2753 (dsremoved, actions['undelete'], discard),
2753 (dsremoved, actions['undelete'], discard),
2754 # Same as `dsremoved` but an unknown file exists at the same path
2754 # Same as `dsremoved` but an unknown file exists at the same path
2755 (dsremovunk, actions['undelete'], check),
2755 (dsremovunk, actions['undelete'], check),
2756 ## the following sets does not result in any file changes
2756 ## the following sets does not result in any file changes
2757 # File with no modification
2757 # File with no modification
2758 (clean, actions['noop'], discard),
2758 (clean, actions['noop'], discard),
2759 # Existing file, not tracked anywhere
2759 # Existing file, not tracked anywhere
2760 (unknown, actions['unknown'], discard),
2760 (unknown, actions['unknown'], discard),
2761 )
2761 )
2762
2762
2763 needdata = ('revert', 'add', 'undelete')
2763 needdata = ('revert', 'add', 'undelete')
2764 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
2764 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
2765
2765
2766 wctx = repo[None]
2766 wctx = repo[None]
2767 for abs, (rel, exact) in sorted(names.items()):
2767 for abs, (rel, exact) in sorted(names.items()):
2768 # target file to be touch on disk (relative to cwd)
2768 # target file to be touch on disk (relative to cwd)
2769 target = repo.wjoin(abs)
2769 target = repo.wjoin(abs)
2770 # search the entry in the dispatch table.
2770 # search the entry in the dispatch table.
2771 # if the file is in any of these sets, it was touched in the working
2771 # if the file is in any of these sets, it was touched in the working
2772 # directory parent and we are sure it needs to be reverted.
2772 # directory parent and we are sure it needs to be reverted.
2773 for table, (xlist, msg), dobackup in disptable:
2773 for table, (xlist, msg), dobackup in disptable:
2774 if abs not in table:
2774 if abs not in table:
2775 continue
2775 continue
2776 if xlist is not None:
2776 if xlist is not None:
2777 xlist.append(abs)
2777 xlist.append(abs)
2778 if dobackup and (backup <= dobackup
2778 if dobackup and (backup <= dobackup
2779 or wctx[abs].cmp(ctx[abs])):
2779 or wctx[abs].cmp(ctx[abs])):
2780 bakname = "%s.orig" % rel
2780 bakname = "%s.orig" % rel
2781 ui.note(_('saving current version of %s as %s\n') %
2781 ui.note(_('saving current version of %s as %s\n') %
2782 (rel, bakname))
2782 (rel, bakname))
2783 if not opts.get('dry_run'):
2783 if not opts.get('dry_run'):
2784 util.rename(target, bakname)
2784 util.rename(target, bakname)
2785 if ui.verbose or not exact:
2785 if ui.verbose or not exact:
2786 if not isinstance(msg, basestring):
2786 if not isinstance(msg, basestring):
2787 msg = msg(abs)
2787 msg = msg(abs)
2788 ui.status(msg % rel)
2788 ui.status(msg % rel)
2789 elif exact:
2789 elif exact:
2790 ui.warn(msg % rel)
2790 ui.warn(msg % rel)
2791 break
2791 break
2792
2792
2793
2793
2794 if not opts.get('dry_run'):
2794 if not opts.get('dry_run'):
2795 _performrevert(repo, parents, ctx, actions)
2795 _performrevert(repo, parents, ctx, actions)
2796
2796
2797 # get the list of subrepos that must be reverted
2797 # get the list of subrepos that must be reverted
2798 subrepomatch = scmutil.match(ctx, pats, opts)
2798 subrepomatch = scmutil.match(ctx, pats, opts)
2799 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
2799 targetsubs = sorted(s for s in ctx.substate if subrepomatch(s))
2800
2800
2801 if targetsubs:
2801 if targetsubs:
2802 # Revert the subrepos on the revert list
2802 # Revert the subrepos on the revert list
2803 for sub in targetsubs:
2803 for sub in targetsubs:
2804 ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
2804 ctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
2805 finally:
2805 finally:
2806 wlock.release()
2806 wlock.release()
2807
2807
2808 def _revertprefetch(repo, ctx, *files):
2808 def _revertprefetch(repo, ctx, *files):
2809 """Let extension changing the storage layer prefetch content"""
2809 """Let extension changing the storage layer prefetch content"""
2810 pass
2810 pass
2811
2811
2812 def _performrevert(repo, parents, ctx, actions):
2812 def _performrevert(repo, parents, ctx, actions):
2813 """function that actually perform all the actions computed for revert
2813 """function that actually perform all the actions computed for revert
2814
2814
2815 This is an independent function to let extension to plug in and react to
2815 This is an independent function to let extension to plug in and react to
2816 the imminent revert.
2816 the imminent revert.
2817
2817
2818 Make sure you have the working directory locked when calling this function.
2818 Make sure you have the working directory locked when calling this function.
2819 """
2819 """
2820 parent, p2 = parents
2820 parent, p2 = parents
2821 node = ctx.node()
2821 node = ctx.node()
2822 def checkout(f):
2822 def checkout(f):
2823 fc = ctx[f]
2823 fc = ctx[f]
2824 repo.wwrite(f, fc.data(), fc.flags())
2824 repo.wwrite(f, fc.data(), fc.flags())
2825
2825
2826 audit_path = pathutil.pathauditor(repo.root)
2826 audit_path = pathutil.pathauditor(repo.root)
2827 for f in actions['forget'][0]:
2827 for f in actions['forget'][0]:
2828 repo.dirstate.drop(f)
2828 repo.dirstate.drop(f)
2829 for f in actions['remove'][0]:
2829 for f in actions['remove'][0]:
2830 audit_path(f)
2830 audit_path(f)
2831 util.unlinkpath(repo.wjoin(f))
2831 util.unlinkpath(repo.wjoin(f))
2832 repo.dirstate.remove(f)
2832 repo.dirstate.remove(f)
2833 for f in actions['drop'][0]:
2833 for f in actions['drop'][0]:
2834 audit_path(f)
2834 audit_path(f)
2835 repo.dirstate.remove(f)
2835 repo.dirstate.remove(f)
2836
2836
2837 normal = None
2837 normal = None
2838 if node == parent:
2838 if node == parent:
2839 # We're reverting to our parent. If possible, we'd like status
2839 # We're reverting to our parent. If possible, we'd like status
2840 # to report the file as clean. We have to use normallookup for
2840 # to report the file as clean. We have to use normallookup for
2841 # merges to avoid losing information about merged/dirty files.
2841 # merges to avoid losing information about merged/dirty files.
2842 if p2 != nullid:
2842 if p2 != nullid:
2843 normal = repo.dirstate.normallookup
2843 normal = repo.dirstate.normallookup
2844 else:
2844 else:
2845 normal = repo.dirstate.normal
2845 normal = repo.dirstate.normal
2846 for f in actions['revert'][0]:
2846 for f in actions['revert'][0]:
2847 checkout(f)
2847 checkout(f)
2848 if normal:
2848 if normal:
2849 normal(f)
2849 normal(f)
2850
2850
2851 for f in actions['add'][0]:
2851 for f in actions['add'][0]:
2852 checkout(f)
2852 checkout(f)
2853 repo.dirstate.add(f)
2853 repo.dirstate.add(f)
2854
2854
2855 normal = repo.dirstate.normallookup
2855 normal = repo.dirstate.normallookup
2856 if node == parent and p2 == nullid:
2856 if node == parent and p2 == nullid:
2857 normal = repo.dirstate.normal
2857 normal = repo.dirstate.normal
2858 for f in actions['undelete'][0]:
2858 for f in actions['undelete'][0]:
2859 checkout(f)
2859 checkout(f)
2860 normal(f)
2860 normal(f)
2861
2861
2862 copied = copies.pathcopies(repo[parent], ctx)
2862 copied = copies.pathcopies(repo[parent], ctx)
2863
2863
2864 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
2864 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
2865 if f in copied:
2865 if f in copied:
2866 repo.dirstate.copy(copied[f], f)
2866 repo.dirstate.copy(copied[f], f)
2867
2867
2868 def command(table):
2868 def command(table):
2869 """Returns a function object to be used as a decorator for making commands.
2869 """Returns a function object to be used as a decorator for making commands.
2870
2870
2871 This function receives a command table as its argument. The table should
2871 This function receives a command table as its argument. The table should
2872 be a dict.
2872 be a dict.
2873
2873
2874 The returned function can be used as a decorator for adding commands
2874 The returned function can be used as a decorator for adding commands
2875 to that command table. This function accepts multiple arguments to define
2875 to that command table. This function accepts multiple arguments to define
2876 a command.
2876 a command.
2877
2877
2878 The first argument is the command name.
2878 The first argument is the command name.
2879
2879
2880 The options argument is an iterable of tuples defining command arguments.
2880 The options argument is an iterable of tuples defining command arguments.
2881 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
2881 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
2882
2882
2883 The synopsis argument defines a short, one line summary of how to use the
2883 The synopsis argument defines a short, one line summary of how to use the
2884 command. This shows up in the help output.
2884 command. This shows up in the help output.
2885
2885
2886 The norepo argument defines whether the command does not require a
2886 The norepo argument defines whether the command does not require a
2887 local repository. Most commands operate against a repository, thus the
2887 local repository. Most commands operate against a repository, thus the
2888 default is False.
2888 default is False.
2889
2889
2890 The optionalrepo argument defines whether the command optionally requires
2890 The optionalrepo argument defines whether the command optionally requires
2891 a local repository.
2891 a local repository.
2892
2892
2893 The inferrepo argument defines whether to try to find a repository from the
2893 The inferrepo argument defines whether to try to find a repository from the
2894 command line arguments. If True, arguments will be examined for potential
2894 command line arguments. If True, arguments will be examined for potential
2895 repository locations. See ``findrepo()``. If a repository is found, it
2895 repository locations. See ``findrepo()``. If a repository is found, it
2896 will be used.
2896 will be used.
2897 """
2897 """
2898 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
2898 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
2899 inferrepo=False):
2899 inferrepo=False):
2900 def decorator(func):
2900 def decorator(func):
2901 if synopsis:
2901 if synopsis:
2902 table[name] = func, list(options), synopsis
2902 table[name] = func, list(options), synopsis
2903 else:
2903 else:
2904 table[name] = func, list(options)
2904 table[name] = func, list(options)
2905
2905
2906 if norepo:
2906 if norepo:
2907 # Avoid import cycle.
2907 # Avoid import cycle.
2908 import commands
2908 import commands
2909 commands.norepo += ' %s' % ' '.join(parsealiases(name))
2909 commands.norepo += ' %s' % ' '.join(parsealiases(name))
2910
2910
2911 if optionalrepo:
2911 if optionalrepo:
2912 import commands
2912 import commands
2913 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
2913 commands.optionalrepo += ' %s' % ' '.join(parsealiases(name))
2914
2914
2915 if inferrepo:
2915 if inferrepo:
2916 import commands
2916 import commands
2917 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
2917 commands.inferrepo += ' %s' % ' '.join(parsealiases(name))
2918
2918
2919 return func
2919 return func
2920 return decorator
2920 return decorator
2921
2921
2922 return cmd
2922 return cmd
2923
2923
2924 # a list of (ui, repo, otherpeer, opts, missing) functions called by
2924 # a list of (ui, repo, otherpeer, opts, missing) functions called by
2925 # commands.outgoing. "missing" is "missing" of the result of
2925 # commands.outgoing. "missing" is "missing" of the result of
2926 # "findcommonoutgoing()"
2926 # "findcommonoutgoing()"
2927 outgoinghooks = util.hooks()
2927 outgoinghooks = util.hooks()
2928
2928
2929 # a list of (ui, repo) functions called by commands.summary
2929 # a list of (ui, repo) functions called by commands.summary
2930 summaryhooks = util.hooks()
2930 summaryhooks = util.hooks()
2931
2931
2932 # a list of (ui, repo, opts, changes) functions called by commands.summary.
2932 # a list of (ui, repo, opts, changes) functions called by commands.summary.
2933 #
2933 #
2934 # functions should return tuple of booleans below, if 'changes' is None:
2934 # functions should return tuple of booleans below, if 'changes' is None:
2935 # (whether-incomings-are-needed, whether-outgoings-are-needed)
2935 # (whether-incomings-are-needed, whether-outgoings-are-needed)
2936 #
2936 #
2937 # otherwise, 'changes' is a tuple of tuples below:
2937 # otherwise, 'changes' is a tuple of tuples below:
2938 # - (sourceurl, sourcebranch, sourcepeer, incoming)
2938 # - (sourceurl, sourcebranch, sourcepeer, incoming)
2939 # - (desturl, destbranch, destpeer, outgoing)
2939 # - (desturl, destbranch, destpeer, outgoing)
2940 summaryremotehooks = util.hooks()
2940 summaryremotehooks = util.hooks()
2941
2941
2942 # A list of state files kept by multistep operations like graft.
2942 # A list of state files kept by multistep operations like graft.
2943 # Since graft cannot be aborted, it is considered 'clearable' by update.
2943 # Since graft cannot be aborted, it is considered 'clearable' by update.
2944 # note: bisect is intentionally excluded
2944 # note: bisect is intentionally excluded
2945 # (state file, clearable, allowcommit, error, hint)
2945 # (state file, clearable, allowcommit, error, hint)
2946 unfinishedstates = [
2946 unfinishedstates = [
2947 ('graftstate', True, False, _('graft in progress'),
2947 ('graftstate', True, False, _('graft in progress'),
2948 _("use 'hg graft --continue' or 'hg update' to abort")),
2948 _("use 'hg graft --continue' or 'hg update' to abort")),
2949 ('updatestate', True, False, _('last update was interrupted'),
2949 ('updatestate', True, False, _('last update was interrupted'),
2950 _("use 'hg update' to get a consistent checkout"))
2950 _("use 'hg update' to get a consistent checkout"))
2951 ]
2951 ]
2952
2952
2953 def checkunfinished(repo, commit=False):
2953 def checkunfinished(repo, commit=False):
2954 '''Look for an unfinished multistep operation, like graft, and abort
2954 '''Look for an unfinished multistep operation, like graft, and abort
2955 if found. It's probably good to check this right before
2955 if found. It's probably good to check this right before
2956 bailifchanged().
2956 bailifchanged().
2957 '''
2957 '''
2958 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2958 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2959 if commit and allowcommit:
2959 if commit and allowcommit:
2960 continue
2960 continue
2961 if repo.vfs.exists(f):
2961 if repo.vfs.exists(f):
2962 raise util.Abort(msg, hint=hint)
2962 raise util.Abort(msg, hint=hint)
2963
2963
2964 def clearunfinished(repo):
2964 def clearunfinished(repo):
2965 '''Check for unfinished operations (as above), and clear the ones
2965 '''Check for unfinished operations (as above), and clear the ones
2966 that are clearable.
2966 that are clearable.
2967 '''
2967 '''
2968 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2968 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2969 if not clearable and repo.vfs.exists(f):
2969 if not clearable and repo.vfs.exists(f):
2970 raise util.Abort(msg, hint=hint)
2970 raise util.Abort(msg, hint=hint)
2971 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2971 for f, clearable, allowcommit, msg, hint in unfinishedstates:
2972 if clearable and repo.vfs.exists(f):
2972 if clearable and repo.vfs.exists(f):
2973 util.unlink(repo.join(f))
2973 util.unlink(repo.join(f))
@@ -1,423 +1,426 b''
1 # match.py - filename matching
1 # match.py - filename matching
2 #
2 #
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9 import util, pathutil
9 import util, pathutil
10 from i18n import _
10 from i18n import _
11
11
12 def _rematcher(regex):
12 def _rematcher(regex):
13 '''compile the regexp with the best available regexp engine and return a
13 '''compile the regexp with the best available regexp engine and return a
14 matcher function'''
14 matcher function'''
15 m = util.re.compile(regex)
15 m = util.re.compile(regex)
16 try:
16 try:
17 # slightly faster, provided by facebook's re2 bindings
17 # slightly faster, provided by facebook's re2 bindings
18 return m.test_match
18 return m.test_match
19 except AttributeError:
19 except AttributeError:
20 return m.match
20 return m.match
21
21
22 def _expandsets(kindpats, ctx):
22 def _expandsets(kindpats, ctx):
23 '''Returns the kindpats list with the 'set' patterns expanded.'''
23 '''Returns the kindpats list with the 'set' patterns expanded.'''
24 fset = set()
24 fset = set()
25 other = []
25 other = []
26
26
27 for kind, pat in kindpats:
27 for kind, pat in kindpats:
28 if kind == 'set':
28 if kind == 'set':
29 if not ctx:
29 if not ctx:
30 raise util.Abort("fileset expression with no context")
30 raise util.Abort("fileset expression with no context")
31 s = ctx.getfileset(pat)
31 s = ctx.getfileset(pat)
32 fset.update(s)
32 fset.update(s)
33 continue
33 continue
34 other.append((kind, pat))
34 other.append((kind, pat))
35 return fset, other
35 return fset, other
36
36
37 class match(object):
37 class match(object):
38 def __init__(self, root, cwd, patterns, include=[], exclude=[],
38 def __init__(self, root, cwd, patterns, include=[], exclude=[],
39 default='glob', exact=False, auditor=None, ctx=None):
39 default='glob', exact=False, auditor=None, ctx=None):
40 """build an object to match a set of file patterns
40 """build an object to match a set of file patterns
41
41
42 arguments:
42 arguments:
43 root - the canonical root of the tree you're matching against
43 root - the canonical root of the tree you're matching against
44 cwd - the current working directory, if relevant
44 cwd - the current working directory, if relevant
45 patterns - patterns to find
45 patterns - patterns to find
46 include - patterns to include (unless they are excluded)
46 include - patterns to include (unless they are excluded)
47 exclude - patterns to exclude (even if they are included)
47 exclude - patterns to exclude (even if they are included)
48 default - if a pattern in patterns has no explicit type, assume this one
48 default - if a pattern in patterns has no explicit type, assume this one
49 exact - patterns are actually filenames (include/exclude still apply)
49 exact - patterns are actually filenames (include/exclude still apply)
50
50
51 a pattern is one of:
51 a pattern is one of:
52 'glob:<glob>' - a glob relative to cwd
52 'glob:<glob>' - a glob relative to cwd
53 're:<regexp>' - a regular expression
53 're:<regexp>' - a regular expression
54 'path:<path>' - a path relative to repository root
54 'path:<path>' - a path relative to repository root
55 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
55 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
56 'relpath:<path>' - a path relative to cwd
56 'relpath:<path>' - a path relative to cwd
57 'relre:<regexp>' - a regexp that needn't match the start of a name
57 'relre:<regexp>' - a regexp that needn't match the start of a name
58 'set:<fileset>' - a fileset expression
58 'set:<fileset>' - a fileset expression
59 '<something>' - a pattern of the specified default type
59 '<something>' - a pattern of the specified default type
60 """
60 """
61
61
62 self._root = root
62 self._root = root
63 self._cwd = cwd
63 self._cwd = cwd
64 self._files = [] # exact files and roots of patterns
64 self._files = [] # exact files and roots of patterns
65 self._anypats = bool(include or exclude)
65 self._anypats = bool(include or exclude)
66 self._ctx = ctx
66 self._ctx = ctx
67 self._always = False
67 self._always = False
68 self._pathrestricted = bool(include or exclude or patterns)
68 self._pathrestricted = bool(include or exclude or patterns)
69
69
70 matchfns = []
70 matchfns = []
71 if include:
71 if include:
72 kindpats = _normalize(include, 'glob', root, cwd, auditor)
72 kindpats = _normalize(include, 'glob', root, cwd, auditor)
73 self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)')
73 self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)')
74 matchfns.append(im)
74 matchfns.append(im)
75 if exclude:
75 if exclude:
76 kindpats = _normalize(exclude, 'glob', root, cwd, auditor)
76 kindpats = _normalize(exclude, 'glob', root, cwd, auditor)
77 self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)')
77 self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)')
78 matchfns.append(lambda f: not em(f))
78 matchfns.append(lambda f: not em(f))
79 if exact:
79 if exact:
80 if isinstance(patterns, list):
80 if isinstance(patterns, list):
81 self._files = patterns
81 self._files = patterns
82 else:
82 else:
83 self._files = list(patterns)
83 self._files = list(patterns)
84 matchfns.append(self.exact)
84 matchfns.append(self.exact)
85 elif patterns:
85 elif patterns:
86 kindpats = _normalize(patterns, default, root, cwd, auditor)
86 kindpats = _normalize(patterns, default, root, cwd, auditor)
87 self._files = _roots(kindpats)
87 self._files = _roots(kindpats)
88 self._anypats = self._anypats or _anypats(kindpats)
88 self._anypats = self._anypats or _anypats(kindpats)
89 self.patternspat, pm = _buildmatch(ctx, kindpats, '$')
89 self.patternspat, pm = _buildmatch(ctx, kindpats, '$')
90 matchfns.append(pm)
90 matchfns.append(pm)
91
91
92 if not matchfns:
92 if not matchfns:
93 m = util.always
93 m = util.always
94 self._always = True
94 self._always = True
95 elif len(matchfns) == 1:
95 elif len(matchfns) == 1:
96 m = matchfns[0]
96 m = matchfns[0]
97 else:
97 else:
98 def m(f):
98 def m(f):
99 for matchfn in matchfns:
99 for matchfn in matchfns:
100 if not matchfn(f):
100 if not matchfn(f):
101 return False
101 return False
102 return True
102 return True
103
103
104 self.matchfn = m
104 self.matchfn = m
105 self._fmap = set(self._files)
105 self._fmap = set(self._files)
106
106
107 def __call__(self, fn):
107 def __call__(self, fn):
108 return self.matchfn(fn)
108 return self.matchfn(fn)
109 def __iter__(self):
109 def __iter__(self):
110 for f in self._files:
110 for f in self._files:
111 yield f
111 yield f
112
112
113 # Callbacks related to how the matcher is used by dirstate.walk.
113 # Callbacks related to how the matcher is used by dirstate.walk.
114 # Subscribers to these events must monkeypatch the matcher object.
114 # Subscribers to these events must monkeypatch the matcher object.
115 def bad(self, f, msg):
115 def bad(self, f, msg):
116 '''Callback from dirstate.walk for each explicit file that can't be
116 '''Callback from dirstate.walk for each explicit file that can't be
117 found/accessed, with an error message.'''
117 found/accessed, with an error message.'''
118 pass
118 pass
119
119
120 # If an explicitdir is set, it will be called when an explicitly listed
120 # If an explicitdir is set, it will be called when an explicitly listed
121 # directory is visited.
121 # directory is visited.
122 explicitdir = None
122 explicitdir = None
123
123
124 # If an traversedir is set, it will be called when a directory discovered
124 # If an traversedir is set, it will be called when a directory discovered
125 # by recursive traversal is visited.
125 # by recursive traversal is visited.
126 traversedir = None
126 traversedir = None
127
127
128 def abs(self, f):
128 def abs(self, f):
129 '''Convert a repo path back to path that is relative to the root of the
129 '''Convert a repo path back to path that is relative to the root of the
130 matcher.'''
130 matcher.'''
131 return f
131 return f
132
132
133 def rel(self, f):
133 def rel(self, f):
134 '''Convert repo path back to path that is relative to cwd of matcher.'''
134 '''Convert repo path back to path that is relative to cwd of matcher.'''
135 return util.pathto(self._root, self._cwd, f)
135 return util.pathto(self._root, self._cwd, f)
136
136
137 def uipath(self, f):
137 def uipath(self, f):
138 '''Convert repo path to a display path. If patterns or -I/-X were used
138 '''Convert repo path to a display path. If patterns or -I/-X were used
139 to create this matcher, the display path will be relative to cwd.
139 to create this matcher, the display path will be relative to cwd.
140 Otherwise it is relative to the root of the repo.'''
140 Otherwise it is relative to the root of the repo.'''
141 return (self._pathrestricted and self.rel(f)) or f
141 return (self._pathrestricted and self.rel(f)) or self.abs(f)
142
142
143 def files(self):
143 def files(self):
144 '''Explicitly listed files or patterns or roots:
144 '''Explicitly listed files or patterns or roots:
145 if no patterns or .always(): empty list,
145 if no patterns or .always(): empty list,
146 if exact: list exact files,
146 if exact: list exact files,
147 if not .anypats(): list all files and dirs,
147 if not .anypats(): list all files and dirs,
148 else: optimal roots'''
148 else: optimal roots'''
149 return self._files
149 return self._files
150
150
151 def exact(self, f):
151 def exact(self, f):
152 '''Returns True if f is in .files().'''
152 '''Returns True if f is in .files().'''
153 return f in self._fmap
153 return f in self._fmap
154
154
155 def anypats(self):
155 def anypats(self):
156 '''Matcher uses patterns or include/exclude.'''
156 '''Matcher uses patterns or include/exclude.'''
157 return self._anypats
157 return self._anypats
158
158
159 def always(self):
159 def always(self):
160 '''Matcher will match everything and .files() will be empty
160 '''Matcher will match everything and .files() will be empty
161 - optimization might be possible and necessary.'''
161 - optimization might be possible and necessary.'''
162 return self._always
162 return self._always
163
163
164 def exact(root, cwd, files):
164 def exact(root, cwd, files):
165 return match(root, cwd, files, exact=True)
165 return match(root, cwd, files, exact=True)
166
166
167 def always(root, cwd):
167 def always(root, cwd):
168 return match(root, cwd, [])
168 return match(root, cwd, [])
169
169
170 class narrowmatcher(match):
170 class narrowmatcher(match):
171 """Adapt a matcher to work on a subdirectory only.
171 """Adapt a matcher to work on a subdirectory only.
172
172
173 The paths are remapped to remove/insert the path as needed:
173 The paths are remapped to remove/insert the path as needed:
174
174
175 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
175 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
176 >>> m2 = narrowmatcher('sub', m1)
176 >>> m2 = narrowmatcher('sub', m1)
177 >>> bool(m2('a.txt'))
177 >>> bool(m2('a.txt'))
178 False
178 False
179 >>> bool(m2('b.txt'))
179 >>> bool(m2('b.txt'))
180 True
180 True
181 >>> bool(m2.matchfn('a.txt'))
181 >>> bool(m2.matchfn('a.txt'))
182 False
182 False
183 >>> bool(m2.matchfn('b.txt'))
183 >>> bool(m2.matchfn('b.txt'))
184 True
184 True
185 >>> m2.files()
185 >>> m2.files()
186 ['b.txt']
186 ['b.txt']
187 >>> m2.exact('b.txt')
187 >>> m2.exact('b.txt')
188 True
188 True
189 >>> m2.rel('b.txt')
189 >>> util.pconvert(m2.rel('b.txt'))
190 'b.txt'
190 'sub/b.txt'
191 >>> def bad(f, msg):
191 >>> def bad(f, msg):
192 ... print "%s: %s" % (f, msg)
192 ... print "%s: %s" % (f, msg)
193 >>> m1.bad = bad
193 >>> m1.bad = bad
194 >>> m2.bad('x.txt', 'No such file')
194 >>> m2.bad('x.txt', 'No such file')
195 sub/x.txt: No such file
195 sub/x.txt: No such file
196 >>> m2.abs('c.txt')
196 >>> m2.abs('c.txt')
197 'sub/c.txt'
197 'sub/c.txt'
198 """
198 """
199
199
200 def __init__(self, path, matcher):
200 def __init__(self, path, matcher):
201 self._root = matcher._root
201 self._root = matcher._root
202 self._cwd = matcher._cwd
202 self._cwd = matcher._cwd
203 self._path = path
203 self._path = path
204 self._matcher = matcher
204 self._matcher = matcher
205 self._always = matcher._always
205 self._always = matcher._always
206 self._pathrestricted = matcher._pathrestricted
206 self._pathrestricted = matcher._pathrestricted
207
207
208 self._files = [f[len(path) + 1:] for f in matcher._files
208 self._files = [f[len(path) + 1:] for f in matcher._files
209 if f.startswith(path + "/")]
209 if f.startswith(path + "/")]
210 self._anypats = matcher._anypats
210 self._anypats = matcher._anypats
211 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
211 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
212 self._fmap = set(self._files)
212 self._fmap = set(self._files)
213
213
214 def abs(self, f):
214 def abs(self, f):
215 return self._matcher.abs(self._path + "/" + f)
215 return self._matcher.abs(self._path + "/" + f)
216
216
217 def bad(self, f, msg):
217 def bad(self, f, msg):
218 self._matcher.bad(self._path + "/" + f, msg)
218 self._matcher.bad(self._path + "/" + f, msg)
219
219
220 def rel(self, f):
221 return self._matcher.rel(self._path + "/" + f)
222
220 def patkind(pattern, default=None):
223 def patkind(pattern, default=None):
221 '''If pattern is 'kind:pat' with a known kind, return kind.'''
224 '''If pattern is 'kind:pat' with a known kind, return kind.'''
222 return _patsplit(pattern, default)[0]
225 return _patsplit(pattern, default)[0]
223
226
224 def _patsplit(pattern, default):
227 def _patsplit(pattern, default):
225 """Split a string into the optional pattern kind prefix and the actual
228 """Split a string into the optional pattern kind prefix and the actual
226 pattern."""
229 pattern."""
227 if ':' in pattern:
230 if ':' in pattern:
228 kind, pat = pattern.split(':', 1)
231 kind, pat = pattern.split(':', 1)
229 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
232 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
230 'listfile', 'listfile0', 'set'):
233 'listfile', 'listfile0', 'set'):
231 return kind, pat
234 return kind, pat
232 return default, pattern
235 return default, pattern
233
236
234 def _globre(pat):
237 def _globre(pat):
235 r'''Convert an extended glob string to a regexp string.
238 r'''Convert an extended glob string to a regexp string.
236
239
237 >>> print _globre(r'?')
240 >>> print _globre(r'?')
238 .
241 .
239 >>> print _globre(r'*')
242 >>> print _globre(r'*')
240 [^/]*
243 [^/]*
241 >>> print _globre(r'**')
244 >>> print _globre(r'**')
242 .*
245 .*
243 >>> print _globre(r'**/a')
246 >>> print _globre(r'**/a')
244 (?:.*/)?a
247 (?:.*/)?a
245 >>> print _globre(r'a/**/b')
248 >>> print _globre(r'a/**/b')
246 a\/(?:.*/)?b
249 a\/(?:.*/)?b
247 >>> print _globre(r'[a*?!^][^b][!c]')
250 >>> print _globre(r'[a*?!^][^b][!c]')
248 [a*?!^][\^b][^c]
251 [a*?!^][\^b][^c]
249 >>> print _globre(r'{a,b}')
252 >>> print _globre(r'{a,b}')
250 (?:a|b)
253 (?:a|b)
251 >>> print _globre(r'.\*\?')
254 >>> print _globre(r'.\*\?')
252 \.\*\?
255 \.\*\?
253 '''
256 '''
254 i, n = 0, len(pat)
257 i, n = 0, len(pat)
255 res = ''
258 res = ''
256 group = 0
259 group = 0
257 escape = util.re.escape
260 escape = util.re.escape
258 def peek():
261 def peek():
259 return i < n and pat[i]
262 return i < n and pat[i]
260 while i < n:
263 while i < n:
261 c = pat[i]
264 c = pat[i]
262 i += 1
265 i += 1
263 if c not in '*?[{},\\':
266 if c not in '*?[{},\\':
264 res += escape(c)
267 res += escape(c)
265 elif c == '*':
268 elif c == '*':
266 if peek() == '*':
269 if peek() == '*':
267 i += 1
270 i += 1
268 if peek() == '/':
271 if peek() == '/':
269 i += 1
272 i += 1
270 res += '(?:.*/)?'
273 res += '(?:.*/)?'
271 else:
274 else:
272 res += '.*'
275 res += '.*'
273 else:
276 else:
274 res += '[^/]*'
277 res += '[^/]*'
275 elif c == '?':
278 elif c == '?':
276 res += '.'
279 res += '.'
277 elif c == '[':
280 elif c == '[':
278 j = i
281 j = i
279 if j < n and pat[j] in '!]':
282 if j < n and pat[j] in '!]':
280 j += 1
283 j += 1
281 while j < n and pat[j] != ']':
284 while j < n and pat[j] != ']':
282 j += 1
285 j += 1
283 if j >= n:
286 if j >= n:
284 res += '\\['
287 res += '\\['
285 else:
288 else:
286 stuff = pat[i:j].replace('\\','\\\\')
289 stuff = pat[i:j].replace('\\','\\\\')
287 i = j + 1
290 i = j + 1
288 if stuff[0] == '!':
291 if stuff[0] == '!':
289 stuff = '^' + stuff[1:]
292 stuff = '^' + stuff[1:]
290 elif stuff[0] == '^':
293 elif stuff[0] == '^':
291 stuff = '\\' + stuff
294 stuff = '\\' + stuff
292 res = '%s[%s]' % (res, stuff)
295 res = '%s[%s]' % (res, stuff)
293 elif c == '{':
296 elif c == '{':
294 group += 1
297 group += 1
295 res += '(?:'
298 res += '(?:'
296 elif c == '}' and group:
299 elif c == '}' and group:
297 res += ')'
300 res += ')'
298 group -= 1
301 group -= 1
299 elif c == ',' and group:
302 elif c == ',' and group:
300 res += '|'
303 res += '|'
301 elif c == '\\':
304 elif c == '\\':
302 p = peek()
305 p = peek()
303 if p:
306 if p:
304 i += 1
307 i += 1
305 res += escape(p)
308 res += escape(p)
306 else:
309 else:
307 res += escape(c)
310 res += escape(c)
308 else:
311 else:
309 res += escape(c)
312 res += escape(c)
310 return res
313 return res
311
314
312 def _regex(kind, pat, globsuffix):
315 def _regex(kind, pat, globsuffix):
313 '''Convert a (normalized) pattern of any kind into a regular expression.
316 '''Convert a (normalized) pattern of any kind into a regular expression.
314 globsuffix is appended to the regexp of globs.'''
317 globsuffix is appended to the regexp of globs.'''
315 if not pat:
318 if not pat:
316 return ''
319 return ''
317 if kind == 're':
320 if kind == 're':
318 return pat
321 return pat
319 if kind == 'path':
322 if kind == 'path':
320 return '^' + util.re.escape(pat) + '(?:/|$)'
323 return '^' + util.re.escape(pat) + '(?:/|$)'
321 if kind == 'relglob':
324 if kind == 'relglob':
322 return '(?:|.*/)' + _globre(pat) + globsuffix
325 return '(?:|.*/)' + _globre(pat) + globsuffix
323 if kind == 'relpath':
326 if kind == 'relpath':
324 return util.re.escape(pat) + '(?:/|$)'
327 return util.re.escape(pat) + '(?:/|$)'
325 if kind == 'relre':
328 if kind == 'relre':
326 if pat.startswith('^'):
329 if pat.startswith('^'):
327 return pat
330 return pat
328 return '.*' + pat
331 return '.*' + pat
329 return _globre(pat) + globsuffix
332 return _globre(pat) + globsuffix
330
333
331 def _buildmatch(ctx, kindpats, globsuffix):
334 def _buildmatch(ctx, kindpats, globsuffix):
332 '''Return regexp string and a matcher function for kindpats.
335 '''Return regexp string and a matcher function for kindpats.
333 globsuffix is appended to the regexp of globs.'''
336 globsuffix is appended to the regexp of globs.'''
334 fset, kindpats = _expandsets(kindpats, ctx)
337 fset, kindpats = _expandsets(kindpats, ctx)
335 if not kindpats:
338 if not kindpats:
336 return "", fset.__contains__
339 return "", fset.__contains__
337
340
338 regex, mf = _buildregexmatch(kindpats, globsuffix)
341 regex, mf = _buildregexmatch(kindpats, globsuffix)
339 if fset:
342 if fset:
340 return regex, lambda f: f in fset or mf(f)
343 return regex, lambda f: f in fset or mf(f)
341 return regex, mf
344 return regex, mf
342
345
343 def _buildregexmatch(kindpats, globsuffix):
346 def _buildregexmatch(kindpats, globsuffix):
344 """Build a match function from a list of kinds and kindpats,
347 """Build a match function from a list of kinds and kindpats,
345 return regexp string and a matcher function."""
348 return regexp string and a matcher function."""
346 try:
349 try:
347 regex = '(?:%s)' % '|'.join([_regex(k, p, globsuffix)
350 regex = '(?:%s)' % '|'.join([_regex(k, p, globsuffix)
348 for (k, p) in kindpats])
351 for (k, p) in kindpats])
349 if len(regex) > 20000:
352 if len(regex) > 20000:
350 raise OverflowError
353 raise OverflowError
351 return regex, _rematcher(regex)
354 return regex, _rematcher(regex)
352 except OverflowError:
355 except OverflowError:
353 # We're using a Python with a tiny regex engine and we
356 # We're using a Python with a tiny regex engine and we
354 # made it explode, so we'll divide the pattern list in two
357 # made it explode, so we'll divide the pattern list in two
355 # until it works
358 # until it works
356 l = len(kindpats)
359 l = len(kindpats)
357 if l < 2:
360 if l < 2:
358 raise
361 raise
359 regexa, a = _buildregexmatch(kindpats[:l//2], globsuffix)
362 regexa, a = _buildregexmatch(kindpats[:l//2], globsuffix)
360 regexb, b = _buildregexmatch(kindpats[l//2:], globsuffix)
363 regexb, b = _buildregexmatch(kindpats[l//2:], globsuffix)
361 return regex, lambda s: a(s) or b(s)
364 return regex, lambda s: a(s) or b(s)
362 except re.error:
365 except re.error:
363 for k, p in kindpats:
366 for k, p in kindpats:
364 try:
367 try:
365 _rematcher('(?:%s)' % _regex(k, p, globsuffix))
368 _rematcher('(?:%s)' % _regex(k, p, globsuffix))
366 except re.error:
369 except re.error:
367 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
370 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
368 raise util.Abort(_("invalid pattern"))
371 raise util.Abort(_("invalid pattern"))
369
372
370 def _normalize(patterns, default, root, cwd, auditor):
373 def _normalize(patterns, default, root, cwd, auditor):
371 '''Convert 'kind:pat' from the patterns list to tuples with kind and
374 '''Convert 'kind:pat' from the patterns list to tuples with kind and
372 normalized and rooted patterns and with listfiles expanded.'''
375 normalized and rooted patterns and with listfiles expanded.'''
373 kindpats = []
376 kindpats = []
374 for kind, pat in [_patsplit(p, default) for p in patterns]:
377 for kind, pat in [_patsplit(p, default) for p in patterns]:
375 if kind in ('glob', 'relpath'):
378 if kind in ('glob', 'relpath'):
376 pat = pathutil.canonpath(root, cwd, pat, auditor)
379 pat = pathutil.canonpath(root, cwd, pat, auditor)
377 elif kind in ('relglob', 'path'):
380 elif kind in ('relglob', 'path'):
378 pat = util.normpath(pat)
381 pat = util.normpath(pat)
379 elif kind in ('listfile', 'listfile0'):
382 elif kind in ('listfile', 'listfile0'):
380 try:
383 try:
381 files = util.readfile(pat)
384 files = util.readfile(pat)
382 if kind == 'listfile0':
385 if kind == 'listfile0':
383 files = files.split('\0')
386 files = files.split('\0')
384 else:
387 else:
385 files = files.splitlines()
388 files = files.splitlines()
386 files = [f for f in files if f]
389 files = [f for f in files if f]
387 except EnvironmentError:
390 except EnvironmentError:
388 raise util.Abort(_("unable to read file list (%s)") % pat)
391 raise util.Abort(_("unable to read file list (%s)") % pat)
389 kindpats += _normalize(files, default, root, cwd, auditor)
392 kindpats += _normalize(files, default, root, cwd, auditor)
390 continue
393 continue
391 # else: re or relre - which cannot be normalized
394 # else: re or relre - which cannot be normalized
392 kindpats.append((kind, pat))
395 kindpats.append((kind, pat))
393 return kindpats
396 return kindpats
394
397
395 def _roots(kindpats):
398 def _roots(kindpats):
396 '''return roots and exact explicitly listed files from patterns
399 '''return roots and exact explicitly listed files from patterns
397
400
398 >>> _roots([('glob', 'g/*'), ('glob', 'g'), ('glob', 'g*')])
401 >>> _roots([('glob', 'g/*'), ('glob', 'g'), ('glob', 'g*')])
399 ['g', 'g', '.']
402 ['g', 'g', '.']
400 >>> _roots([('relpath', 'r'), ('path', 'p/p'), ('path', '')])
403 >>> _roots([('relpath', 'r'), ('path', 'p/p'), ('path', '')])
401 ['r', 'p/p', '.']
404 ['r', 'p/p', '.']
402 >>> _roots([('relglob', 'rg*'), ('re', 're/'), ('relre', 'rr')])
405 >>> _roots([('relglob', 'rg*'), ('re', 're/'), ('relre', 'rr')])
403 ['.', '.', '.']
406 ['.', '.', '.']
404 '''
407 '''
405 r = []
408 r = []
406 for kind, pat in kindpats:
409 for kind, pat in kindpats:
407 if kind == 'glob': # find the non-glob prefix
410 if kind == 'glob': # find the non-glob prefix
408 root = []
411 root = []
409 for p in pat.split('/'):
412 for p in pat.split('/'):
410 if '[' in p or '{' in p or '*' in p or '?' in p:
413 if '[' in p or '{' in p or '*' in p or '?' in p:
411 break
414 break
412 root.append(p)
415 root.append(p)
413 r.append('/'.join(root) or '.')
416 r.append('/'.join(root) or '.')
414 elif kind in ('relpath', 'path'):
417 elif kind in ('relpath', 'path'):
415 r.append(pat or '.')
418 r.append(pat or '.')
416 else: # relglob, re, relre
419 else: # relglob, re, relre
417 r.append('.')
420 r.append('.')
418 return r
421 return r
419
422
420 def _anypats(kindpats):
423 def _anypats(kindpats):
421 for kind, pat in kindpats:
424 for kind, pat in kindpats:
422 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
425 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
423 return True
426 return True
@@ -1,1108 +1,1108 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 class abstractvfs(object):
175 class abstractvfs(object):
176 """Abstract base class; cannot be instantiated"""
176 """Abstract base class; cannot be instantiated"""
177
177
178 def __init__(self, *args, **kwargs):
178 def __init__(self, *args, **kwargs):
179 '''Prevent instantiation; don't call this from subclasses.'''
179 '''Prevent instantiation; don't call this from subclasses.'''
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
181
181
182 def tryread(self, path):
182 def tryread(self, path):
183 '''gracefully return an empty string for missing files'''
183 '''gracefully return an empty string for missing files'''
184 try:
184 try:
185 return self.read(path)
185 return self.read(path)
186 except IOError, inst:
186 except IOError, inst:
187 if inst.errno != errno.ENOENT:
187 if inst.errno != errno.ENOENT:
188 raise
188 raise
189 return ""
189 return ""
190
190
191 def tryreadlines(self, path, mode='rb'):
191 def tryreadlines(self, path, mode='rb'):
192 '''gracefully return an empty array for missing files'''
192 '''gracefully return an empty array for missing files'''
193 try:
193 try:
194 return self.readlines(path, mode=mode)
194 return self.readlines(path, mode=mode)
195 except IOError, inst:
195 except IOError, inst:
196 if inst.errno != errno.ENOENT:
196 if inst.errno != errno.ENOENT:
197 raise
197 raise
198 return []
198 return []
199
199
200 def open(self, path, mode="r", text=False, atomictemp=False,
200 def open(self, path, mode="r", text=False, atomictemp=False,
201 notindexed=False):
201 notindexed=False):
202 '''Open ``path`` file, which is relative to vfs root.
202 '''Open ``path`` file, which is relative to vfs root.
203
203
204 Newly created directories are marked as "not to be indexed by
204 Newly created directories are marked as "not to be indexed by
205 the content indexing service", if ``notindexed`` is specified
205 the content indexing service", if ``notindexed`` is specified
206 for "write" mode access.
206 for "write" mode access.
207 '''
207 '''
208 self.open = self.__call__
208 self.open = self.__call__
209 return self.__call__(path, mode, text, atomictemp, notindexed)
209 return self.__call__(path, mode, text, atomictemp, notindexed)
210
210
211 def read(self, path):
211 def read(self, path):
212 fp = self(path, 'rb')
212 fp = self(path, 'rb')
213 try:
213 try:
214 return fp.read()
214 return fp.read()
215 finally:
215 finally:
216 fp.close()
216 fp.close()
217
217
218 def readlines(self, path, mode='rb'):
218 def readlines(self, path, mode='rb'):
219 fp = self(path, mode=mode)
219 fp = self(path, mode=mode)
220 try:
220 try:
221 return fp.readlines()
221 return fp.readlines()
222 finally:
222 finally:
223 fp.close()
223 fp.close()
224
224
225 def write(self, path, data):
225 def write(self, path, data):
226 fp = self(path, 'wb')
226 fp = self(path, 'wb')
227 try:
227 try:
228 return fp.write(data)
228 return fp.write(data)
229 finally:
229 finally:
230 fp.close()
230 fp.close()
231
231
232 def writelines(self, path, data, mode='wb', notindexed=False):
232 def writelines(self, path, data, mode='wb', notindexed=False):
233 fp = self(path, mode=mode, notindexed=notindexed)
233 fp = self(path, mode=mode, notindexed=notindexed)
234 try:
234 try:
235 return fp.writelines(data)
235 return fp.writelines(data)
236 finally:
236 finally:
237 fp.close()
237 fp.close()
238
238
239 def append(self, path, data):
239 def append(self, path, data):
240 fp = self(path, 'ab')
240 fp = self(path, 'ab')
241 try:
241 try:
242 return fp.write(data)
242 return fp.write(data)
243 finally:
243 finally:
244 fp.close()
244 fp.close()
245
245
246 def chmod(self, path, mode):
246 def chmod(self, path, mode):
247 return os.chmod(self.join(path), mode)
247 return os.chmod(self.join(path), mode)
248
248
249 def exists(self, path=None):
249 def exists(self, path=None):
250 return os.path.exists(self.join(path))
250 return os.path.exists(self.join(path))
251
251
252 def fstat(self, fp):
252 def fstat(self, fp):
253 return util.fstat(fp)
253 return util.fstat(fp)
254
254
255 def isdir(self, path=None):
255 def isdir(self, path=None):
256 return os.path.isdir(self.join(path))
256 return os.path.isdir(self.join(path))
257
257
258 def isfile(self, path=None):
258 def isfile(self, path=None):
259 return os.path.isfile(self.join(path))
259 return os.path.isfile(self.join(path))
260
260
261 def islink(self, path=None):
261 def islink(self, path=None):
262 return os.path.islink(self.join(path))
262 return os.path.islink(self.join(path))
263
263
264 def reljoin(self, *paths):
264 def reljoin(self, *paths):
265 """join various elements of a path together (as os.path.join would do)
265 """join various elements of a path together (as os.path.join would do)
266
266
267 The vfs base is not injected so that path stay relative. This exists
267 The vfs base is not injected so that path stay relative. This exists
268 to allow handling of strange encoding if needed."""
268 to allow handling of strange encoding if needed."""
269 return os.path.join(*paths)
269 return os.path.join(*paths)
270
270
271 def split(self, path):
271 def split(self, path):
272 """split top-most element of a path (as os.path.split would do)
272 """split top-most element of a path (as os.path.split would do)
273
273
274 This exists to allow handling of strange encoding if needed."""
274 This exists to allow handling of strange encoding if needed."""
275 return os.path.split(path)
275 return os.path.split(path)
276
276
277 def lexists(self, path=None):
277 def lexists(self, path=None):
278 return os.path.lexists(self.join(path))
278 return os.path.lexists(self.join(path))
279
279
280 def lstat(self, path=None):
280 def lstat(self, path=None):
281 return os.lstat(self.join(path))
281 return os.lstat(self.join(path))
282
282
283 def listdir(self, path=None):
283 def listdir(self, path=None):
284 return os.listdir(self.join(path))
284 return os.listdir(self.join(path))
285
285
286 def makedir(self, path=None, notindexed=True):
286 def makedir(self, path=None, notindexed=True):
287 return util.makedir(self.join(path), notindexed)
287 return util.makedir(self.join(path), notindexed)
288
288
289 def makedirs(self, path=None, mode=None):
289 def makedirs(self, path=None, mode=None):
290 return util.makedirs(self.join(path), mode)
290 return util.makedirs(self.join(path), mode)
291
291
292 def makelock(self, info, path):
292 def makelock(self, info, path):
293 return util.makelock(info, self.join(path))
293 return util.makelock(info, self.join(path))
294
294
295 def mkdir(self, path=None):
295 def mkdir(self, path=None):
296 return os.mkdir(self.join(path))
296 return os.mkdir(self.join(path))
297
297
298 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
298 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
299 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
299 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
300 dir=self.join(dir), text=text)
300 dir=self.join(dir), text=text)
301 dname, fname = util.split(name)
301 dname, fname = util.split(name)
302 if dir:
302 if dir:
303 return fd, os.path.join(dir, fname)
303 return fd, os.path.join(dir, fname)
304 else:
304 else:
305 return fd, fname
305 return fd, fname
306
306
307 def readdir(self, path=None, stat=None, skip=None):
307 def readdir(self, path=None, stat=None, skip=None):
308 return osutil.listdir(self.join(path), stat, skip)
308 return osutil.listdir(self.join(path), stat, skip)
309
309
310 def readlock(self, path):
310 def readlock(self, path):
311 return util.readlock(self.join(path))
311 return util.readlock(self.join(path))
312
312
313 def rename(self, src, dst):
313 def rename(self, src, dst):
314 return util.rename(self.join(src), self.join(dst))
314 return util.rename(self.join(src), self.join(dst))
315
315
316 def readlink(self, path):
316 def readlink(self, path):
317 return os.readlink(self.join(path))
317 return os.readlink(self.join(path))
318
318
319 def setflags(self, path, l, x):
319 def setflags(self, path, l, x):
320 return util.setflags(self.join(path), l, x)
320 return util.setflags(self.join(path), l, x)
321
321
322 def stat(self, path=None):
322 def stat(self, path=None):
323 return os.stat(self.join(path))
323 return os.stat(self.join(path))
324
324
325 def unlink(self, path=None):
325 def unlink(self, path=None):
326 return util.unlink(self.join(path))
326 return util.unlink(self.join(path))
327
327
328 def unlinkpath(self, path=None, ignoremissing=False):
328 def unlinkpath(self, path=None, ignoremissing=False):
329 return util.unlinkpath(self.join(path), ignoremissing)
329 return util.unlinkpath(self.join(path), ignoremissing)
330
330
331 def utime(self, path=None, t=None):
331 def utime(self, path=None, t=None):
332 return os.utime(self.join(path), t)
332 return os.utime(self.join(path), t)
333
333
334 class vfs(abstractvfs):
334 class vfs(abstractvfs):
335 '''Operate files relative to a base directory
335 '''Operate files relative to a base directory
336
336
337 This class is used to hide the details of COW semantics and
337 This class is used to hide the details of COW semantics and
338 remote file access from higher level code.
338 remote file access from higher level code.
339 '''
339 '''
340 def __init__(self, base, audit=True, expandpath=False, realpath=False):
340 def __init__(self, base, audit=True, expandpath=False, realpath=False):
341 if expandpath:
341 if expandpath:
342 base = util.expandpath(base)
342 base = util.expandpath(base)
343 if realpath:
343 if realpath:
344 base = os.path.realpath(base)
344 base = os.path.realpath(base)
345 self.base = base
345 self.base = base
346 self._setmustaudit(audit)
346 self._setmustaudit(audit)
347 self.createmode = None
347 self.createmode = None
348 self._trustnlink = None
348 self._trustnlink = None
349
349
350 def _getmustaudit(self):
350 def _getmustaudit(self):
351 return self._audit
351 return self._audit
352
352
353 def _setmustaudit(self, onoff):
353 def _setmustaudit(self, onoff):
354 self._audit = onoff
354 self._audit = onoff
355 if onoff:
355 if onoff:
356 self.audit = pathutil.pathauditor(self.base)
356 self.audit = pathutil.pathauditor(self.base)
357 else:
357 else:
358 self.audit = util.always
358 self.audit = util.always
359
359
360 mustaudit = property(_getmustaudit, _setmustaudit)
360 mustaudit = property(_getmustaudit, _setmustaudit)
361
361
362 @util.propertycache
362 @util.propertycache
363 def _cansymlink(self):
363 def _cansymlink(self):
364 return util.checklink(self.base)
364 return util.checklink(self.base)
365
365
366 @util.propertycache
366 @util.propertycache
367 def _chmod(self):
367 def _chmod(self):
368 return util.checkexec(self.base)
368 return util.checkexec(self.base)
369
369
370 def _fixfilemode(self, name):
370 def _fixfilemode(self, name):
371 if self.createmode is None or not self._chmod:
371 if self.createmode is None or not self._chmod:
372 return
372 return
373 os.chmod(name, self.createmode & 0666)
373 os.chmod(name, self.createmode & 0666)
374
374
375 def __call__(self, path, mode="r", text=False, atomictemp=False,
375 def __call__(self, path, mode="r", text=False, atomictemp=False,
376 notindexed=False):
376 notindexed=False):
377 '''Open ``path`` file, which is relative to vfs root.
377 '''Open ``path`` file, which is relative to vfs root.
378
378
379 Newly created directories are marked as "not to be indexed by
379 Newly created directories are marked as "not to be indexed by
380 the content indexing service", if ``notindexed`` is specified
380 the content indexing service", if ``notindexed`` is specified
381 for "write" mode access.
381 for "write" mode access.
382 '''
382 '''
383 if self._audit:
383 if self._audit:
384 r = util.checkosfilename(path)
384 r = util.checkosfilename(path)
385 if r:
385 if r:
386 raise util.Abort("%s: %r" % (r, path))
386 raise util.Abort("%s: %r" % (r, path))
387 self.audit(path)
387 self.audit(path)
388 f = self.join(path)
388 f = self.join(path)
389
389
390 if not text and "b" not in mode:
390 if not text and "b" not in mode:
391 mode += "b" # for that other OS
391 mode += "b" # for that other OS
392
392
393 nlink = -1
393 nlink = -1
394 if mode not in ('r', 'rb'):
394 if mode not in ('r', 'rb'):
395 dirname, basename = util.split(f)
395 dirname, basename = util.split(f)
396 # If basename is empty, then the path is malformed because it points
396 # If basename is empty, then the path is malformed because it points
397 # to a directory. Let the posixfile() call below raise IOError.
397 # to a directory. Let the posixfile() call below raise IOError.
398 if basename:
398 if basename:
399 if atomictemp:
399 if atomictemp:
400 util.ensuredirs(dirname, self.createmode, notindexed)
400 util.ensuredirs(dirname, self.createmode, notindexed)
401 return util.atomictempfile(f, mode, self.createmode)
401 return util.atomictempfile(f, mode, self.createmode)
402 try:
402 try:
403 if 'w' in mode:
403 if 'w' in mode:
404 util.unlink(f)
404 util.unlink(f)
405 nlink = 0
405 nlink = 0
406 else:
406 else:
407 # nlinks() may behave differently for files on Windows
407 # nlinks() may behave differently for files on Windows
408 # shares if the file is open.
408 # shares if the file is open.
409 fd = util.posixfile(f)
409 fd = util.posixfile(f)
410 nlink = util.nlinks(f)
410 nlink = util.nlinks(f)
411 if nlink < 1:
411 if nlink < 1:
412 nlink = 2 # force mktempcopy (issue1922)
412 nlink = 2 # force mktempcopy (issue1922)
413 fd.close()
413 fd.close()
414 except (OSError, IOError), e:
414 except (OSError, IOError), e:
415 if e.errno != errno.ENOENT:
415 if e.errno != errno.ENOENT:
416 raise
416 raise
417 nlink = 0
417 nlink = 0
418 util.ensuredirs(dirname, self.createmode, notindexed)
418 util.ensuredirs(dirname, self.createmode, notindexed)
419 if nlink > 0:
419 if nlink > 0:
420 if self._trustnlink is None:
420 if self._trustnlink is None:
421 self._trustnlink = nlink > 1 or util.checknlink(f)
421 self._trustnlink = nlink > 1 or util.checknlink(f)
422 if nlink > 1 or not self._trustnlink:
422 if nlink > 1 or not self._trustnlink:
423 util.rename(util.mktempcopy(f), f)
423 util.rename(util.mktempcopy(f), f)
424 fp = util.posixfile(f, mode)
424 fp = util.posixfile(f, mode)
425 if nlink == 0:
425 if nlink == 0:
426 self._fixfilemode(f)
426 self._fixfilemode(f)
427 return fp
427 return fp
428
428
429 def symlink(self, src, dst):
429 def symlink(self, src, dst):
430 self.audit(dst)
430 self.audit(dst)
431 linkname = self.join(dst)
431 linkname = self.join(dst)
432 try:
432 try:
433 os.unlink(linkname)
433 os.unlink(linkname)
434 except OSError:
434 except OSError:
435 pass
435 pass
436
436
437 util.ensuredirs(os.path.dirname(linkname), self.createmode)
437 util.ensuredirs(os.path.dirname(linkname), self.createmode)
438
438
439 if self._cansymlink:
439 if self._cansymlink:
440 try:
440 try:
441 os.symlink(src, linkname)
441 os.symlink(src, linkname)
442 except OSError, err:
442 except OSError, err:
443 raise OSError(err.errno, _('could not symlink to %r: %s') %
443 raise OSError(err.errno, _('could not symlink to %r: %s') %
444 (src, err.strerror), linkname)
444 (src, err.strerror), linkname)
445 else:
445 else:
446 self.write(dst, src)
446 self.write(dst, src)
447
447
448 def join(self, path):
448 def join(self, path):
449 if path:
449 if path:
450 return os.path.join(self.base, path)
450 return os.path.join(self.base, path)
451 else:
451 else:
452 return self.base
452 return self.base
453
453
454 opener = vfs
454 opener = vfs
455
455
456 class auditvfs(object):
456 class auditvfs(object):
457 def __init__(self, vfs):
457 def __init__(self, vfs):
458 self.vfs = vfs
458 self.vfs = vfs
459
459
460 def _getmustaudit(self):
460 def _getmustaudit(self):
461 return self.vfs.mustaudit
461 return self.vfs.mustaudit
462
462
463 def _setmustaudit(self, onoff):
463 def _setmustaudit(self, onoff):
464 self.vfs.mustaudit = onoff
464 self.vfs.mustaudit = onoff
465
465
466 mustaudit = property(_getmustaudit, _setmustaudit)
466 mustaudit = property(_getmustaudit, _setmustaudit)
467
467
468 class filtervfs(abstractvfs, auditvfs):
468 class filtervfs(abstractvfs, auditvfs):
469 '''Wrapper vfs for filtering filenames with a function.'''
469 '''Wrapper vfs for filtering filenames with a function.'''
470
470
471 def __init__(self, vfs, filter):
471 def __init__(self, vfs, filter):
472 auditvfs.__init__(self, vfs)
472 auditvfs.__init__(self, vfs)
473 self._filter = filter
473 self._filter = filter
474
474
475 def __call__(self, path, *args, **kwargs):
475 def __call__(self, path, *args, **kwargs):
476 return self.vfs(self._filter(path), *args, **kwargs)
476 return self.vfs(self._filter(path), *args, **kwargs)
477
477
478 def join(self, path):
478 def join(self, path):
479 if path:
479 if path:
480 return self.vfs.join(self._filter(path))
480 return self.vfs.join(self._filter(path))
481 else:
481 else:
482 return self.vfs.join(path)
482 return self.vfs.join(path)
483
483
484 filteropener = filtervfs
484 filteropener = filtervfs
485
485
486 class readonlyvfs(abstractvfs, auditvfs):
486 class readonlyvfs(abstractvfs, auditvfs):
487 '''Wrapper vfs preventing any writing.'''
487 '''Wrapper vfs preventing any writing.'''
488
488
489 def __init__(self, vfs):
489 def __init__(self, vfs):
490 auditvfs.__init__(self, vfs)
490 auditvfs.__init__(self, vfs)
491
491
492 def __call__(self, path, mode='r', *args, **kw):
492 def __call__(self, path, mode='r', *args, **kw):
493 if mode not in ('r', 'rb'):
493 if mode not in ('r', 'rb'):
494 raise util.Abort('this vfs is read only')
494 raise util.Abort('this vfs is read only')
495 return self.vfs(path, mode, *args, **kw)
495 return self.vfs(path, mode, *args, **kw)
496
496
497
497
498 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
498 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
499 '''yield every hg repository under path, always recursively.
499 '''yield every hg repository under path, always recursively.
500 The recurse flag will only control recursion into repo working dirs'''
500 The recurse flag will only control recursion into repo working dirs'''
501 def errhandler(err):
501 def errhandler(err):
502 if err.filename == path:
502 if err.filename == path:
503 raise err
503 raise err
504 samestat = getattr(os.path, 'samestat', None)
504 samestat = getattr(os.path, 'samestat', None)
505 if followsym and samestat is not None:
505 if followsym and samestat is not None:
506 def adddir(dirlst, dirname):
506 def adddir(dirlst, dirname):
507 match = False
507 match = False
508 dirstat = os.stat(dirname)
508 dirstat = os.stat(dirname)
509 for lstdirstat in dirlst:
509 for lstdirstat in dirlst:
510 if samestat(dirstat, lstdirstat):
510 if samestat(dirstat, lstdirstat):
511 match = True
511 match = True
512 break
512 break
513 if not match:
513 if not match:
514 dirlst.append(dirstat)
514 dirlst.append(dirstat)
515 return not match
515 return not match
516 else:
516 else:
517 followsym = False
517 followsym = False
518
518
519 if (seen_dirs is None) and followsym:
519 if (seen_dirs is None) and followsym:
520 seen_dirs = []
520 seen_dirs = []
521 adddir(seen_dirs, path)
521 adddir(seen_dirs, path)
522 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
522 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
523 dirs.sort()
523 dirs.sort()
524 if '.hg' in dirs:
524 if '.hg' in dirs:
525 yield root # found a repository
525 yield root # found a repository
526 qroot = os.path.join(root, '.hg', 'patches')
526 qroot = os.path.join(root, '.hg', 'patches')
527 if os.path.isdir(os.path.join(qroot, '.hg')):
527 if os.path.isdir(os.path.join(qroot, '.hg')):
528 yield qroot # we have a patch queue repo here
528 yield qroot # we have a patch queue repo here
529 if recurse:
529 if recurse:
530 # avoid recursing inside the .hg directory
530 # avoid recursing inside the .hg directory
531 dirs.remove('.hg')
531 dirs.remove('.hg')
532 else:
532 else:
533 dirs[:] = [] # don't descend further
533 dirs[:] = [] # don't descend further
534 elif followsym:
534 elif followsym:
535 newdirs = []
535 newdirs = []
536 for d in dirs:
536 for d in dirs:
537 fname = os.path.join(root, d)
537 fname = os.path.join(root, d)
538 if adddir(seen_dirs, fname):
538 if adddir(seen_dirs, fname):
539 if os.path.islink(fname):
539 if os.path.islink(fname):
540 for hgname in walkrepos(fname, True, seen_dirs):
540 for hgname in walkrepos(fname, True, seen_dirs):
541 yield hgname
541 yield hgname
542 else:
542 else:
543 newdirs.append(d)
543 newdirs.append(d)
544 dirs[:] = newdirs
544 dirs[:] = newdirs
545
545
546 def osrcpath():
546 def osrcpath():
547 '''return default os-specific hgrc search path'''
547 '''return default os-specific hgrc search path'''
548 path = []
548 path = []
549 defaultpath = os.path.join(util.datapath, 'default.d')
549 defaultpath = os.path.join(util.datapath, 'default.d')
550 if os.path.isdir(defaultpath):
550 if os.path.isdir(defaultpath):
551 for f, kind in osutil.listdir(defaultpath):
551 for f, kind in osutil.listdir(defaultpath):
552 if f.endswith('.rc'):
552 if f.endswith('.rc'):
553 path.append(os.path.join(defaultpath, f))
553 path.append(os.path.join(defaultpath, f))
554 path.extend(systemrcpath())
554 path.extend(systemrcpath())
555 path.extend(userrcpath())
555 path.extend(userrcpath())
556 path = [os.path.normpath(f) for f in path]
556 path = [os.path.normpath(f) for f in path]
557 return path
557 return path
558
558
559 _rcpath = None
559 _rcpath = None
560
560
561 def rcpath():
561 def rcpath():
562 '''return hgrc search path. if env var HGRCPATH is set, use it.
562 '''return hgrc search path. if env var HGRCPATH is set, use it.
563 for each item in path, if directory, use files ending in .rc,
563 for each item in path, if directory, use files ending in .rc,
564 else use item.
564 else use item.
565 make HGRCPATH empty to only look in .hg/hgrc of current repo.
565 make HGRCPATH empty to only look in .hg/hgrc of current repo.
566 if no HGRCPATH, use default os-specific path.'''
566 if no HGRCPATH, use default os-specific path.'''
567 global _rcpath
567 global _rcpath
568 if _rcpath is None:
568 if _rcpath is None:
569 if 'HGRCPATH' in os.environ:
569 if 'HGRCPATH' in os.environ:
570 _rcpath = []
570 _rcpath = []
571 for p in os.environ['HGRCPATH'].split(os.pathsep):
571 for p in os.environ['HGRCPATH'].split(os.pathsep):
572 if not p:
572 if not p:
573 continue
573 continue
574 p = util.expandpath(p)
574 p = util.expandpath(p)
575 if os.path.isdir(p):
575 if os.path.isdir(p):
576 for f, kind in osutil.listdir(p):
576 for f, kind in osutil.listdir(p):
577 if f.endswith('.rc'):
577 if f.endswith('.rc'):
578 _rcpath.append(os.path.join(p, f))
578 _rcpath.append(os.path.join(p, f))
579 else:
579 else:
580 _rcpath.append(p)
580 _rcpath.append(p)
581 else:
581 else:
582 _rcpath = osrcpath()
582 _rcpath = osrcpath()
583 return _rcpath
583 return _rcpath
584
584
585 def revsingle(repo, revspec, default='.'):
585 def revsingle(repo, revspec, default='.'):
586 if not revspec and revspec != 0:
586 if not revspec and revspec != 0:
587 return repo[default]
587 return repo[default]
588
588
589 l = revrange(repo, [revspec])
589 l = revrange(repo, [revspec])
590 if not l:
590 if not l:
591 raise util.Abort(_('empty revision set'))
591 raise util.Abort(_('empty revision set'))
592 return repo[l.last()]
592 return repo[l.last()]
593
593
594 def revpair(repo, revs):
594 def revpair(repo, revs):
595 if not revs:
595 if not revs:
596 return repo.dirstate.p1(), None
596 return repo.dirstate.p1(), None
597
597
598 l = revrange(repo, revs)
598 l = revrange(repo, revs)
599
599
600 if not l:
600 if not l:
601 first = second = None
601 first = second = None
602 elif l.isascending():
602 elif l.isascending():
603 first = l.min()
603 first = l.min()
604 second = l.max()
604 second = l.max()
605 elif l.isdescending():
605 elif l.isdescending():
606 first = l.max()
606 first = l.max()
607 second = l.min()
607 second = l.min()
608 else:
608 else:
609 first = l.first()
609 first = l.first()
610 second = l.last()
610 second = l.last()
611
611
612 if first is None:
612 if first is None:
613 raise util.Abort(_('empty revision range'))
613 raise util.Abort(_('empty revision range'))
614
614
615 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
615 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
616 return repo.lookup(first), None
616 return repo.lookup(first), None
617
617
618 return repo.lookup(first), repo.lookup(second)
618 return repo.lookup(first), repo.lookup(second)
619
619
620 _revrangesep = ':'
620 _revrangesep = ':'
621
621
622 def revrange(repo, revs):
622 def revrange(repo, revs):
623 """Yield revision as strings from a list of revision specifications."""
623 """Yield revision as strings from a list of revision specifications."""
624
624
625 def revfix(repo, val, defval):
625 def revfix(repo, val, defval):
626 if not val and val != 0 and defval is not None:
626 if not val and val != 0 and defval is not None:
627 return defval
627 return defval
628 return repo[val].rev()
628 return repo[val].rev()
629
629
630 seen, l = set(), revset.baseset([])
630 seen, l = set(), revset.baseset([])
631 for spec in revs:
631 for spec in revs:
632 if l and not seen:
632 if l and not seen:
633 seen = set(l)
633 seen = set(l)
634 # attempt to parse old-style ranges first to deal with
634 # attempt to parse old-style ranges first to deal with
635 # things like old-tag which contain query metacharacters
635 # things like old-tag which contain query metacharacters
636 try:
636 try:
637 if isinstance(spec, int):
637 if isinstance(spec, int):
638 seen.add(spec)
638 seen.add(spec)
639 l = l + revset.baseset([spec])
639 l = l + revset.baseset([spec])
640 continue
640 continue
641
641
642 if _revrangesep in spec:
642 if _revrangesep in spec:
643 start, end = spec.split(_revrangesep, 1)
643 start, end = spec.split(_revrangesep, 1)
644 start = revfix(repo, start, 0)
644 start = revfix(repo, start, 0)
645 end = revfix(repo, end, len(repo) - 1)
645 end = revfix(repo, end, len(repo) - 1)
646 if end == nullrev and start < 0:
646 if end == nullrev and start < 0:
647 start = nullrev
647 start = nullrev
648 rangeiter = repo.changelog.revs(start, end)
648 rangeiter = repo.changelog.revs(start, end)
649 if not seen and not l:
649 if not seen and not l:
650 # by far the most common case: revs = ["-1:0"]
650 # by far the most common case: revs = ["-1:0"]
651 l = revset.baseset(rangeiter)
651 l = revset.baseset(rangeiter)
652 # defer syncing seen until next iteration
652 # defer syncing seen until next iteration
653 continue
653 continue
654 newrevs = set(rangeiter)
654 newrevs = set(rangeiter)
655 if seen:
655 if seen:
656 newrevs.difference_update(seen)
656 newrevs.difference_update(seen)
657 seen.update(newrevs)
657 seen.update(newrevs)
658 else:
658 else:
659 seen = newrevs
659 seen = newrevs
660 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
660 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
661 continue
661 continue
662 elif spec and spec in repo: # single unquoted rev
662 elif spec and spec in repo: # single unquoted rev
663 rev = revfix(repo, spec, None)
663 rev = revfix(repo, spec, None)
664 if rev in seen:
664 if rev in seen:
665 continue
665 continue
666 seen.add(rev)
666 seen.add(rev)
667 l = l + revset.baseset([rev])
667 l = l + revset.baseset([rev])
668 continue
668 continue
669 except error.RepoLookupError:
669 except error.RepoLookupError:
670 pass
670 pass
671
671
672 # fall through to new-style queries if old-style fails
672 # fall through to new-style queries if old-style fails
673 m = revset.match(repo.ui, spec, repo)
673 m = revset.match(repo.ui, spec, repo)
674 if seen or l:
674 if seen or l:
675 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
675 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
676 l = l + revset.baseset(dl)
676 l = l + revset.baseset(dl)
677 seen.update(dl)
677 seen.update(dl)
678 else:
678 else:
679 l = m(repo, revset.spanset(repo))
679 l = m(repo, revset.spanset(repo))
680
680
681 return l
681 return l
682
682
683 def expandpats(pats):
683 def expandpats(pats):
684 '''Expand bare globs when running on windows.
684 '''Expand bare globs when running on windows.
685 On posix we assume it already has already been done by sh.'''
685 On posix we assume it already has already been done by sh.'''
686 if not util.expandglobs:
686 if not util.expandglobs:
687 return list(pats)
687 return list(pats)
688 ret = []
688 ret = []
689 for kindpat in pats:
689 for kindpat in pats:
690 kind, pat = matchmod._patsplit(kindpat, None)
690 kind, pat = matchmod._patsplit(kindpat, None)
691 if kind is None:
691 if kind is None:
692 try:
692 try:
693 globbed = glob.glob(pat)
693 globbed = glob.glob(pat)
694 except re.error:
694 except re.error:
695 globbed = [pat]
695 globbed = [pat]
696 if globbed:
696 if globbed:
697 ret.extend(globbed)
697 ret.extend(globbed)
698 continue
698 continue
699 ret.append(kindpat)
699 ret.append(kindpat)
700 return ret
700 return ret
701
701
702 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
702 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
703 '''Return a matcher and the patterns that were used.
703 '''Return a matcher and the patterns that were used.
704 The matcher will warn about bad matches.'''
704 The matcher will warn about bad matches.'''
705 if pats == ("",):
705 if pats == ("",):
706 pats = []
706 pats = []
707 if not globbed and default == 'relpath':
707 if not globbed and default == 'relpath':
708 pats = expandpats(pats or [])
708 pats = expandpats(pats or [])
709
709
710 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
710 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
711 default)
711 default)
712 def badfn(f, msg):
712 def badfn(f, msg):
713 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
713 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
714 m.bad = badfn
714 m.bad = badfn
715 return m, pats
715 return m, pats
716
716
717 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
717 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
718 '''Return a matcher that will warn about bad matches.'''
718 '''Return a matcher that will warn about bad matches.'''
719 return matchandpats(ctx, pats, opts, globbed, default)[0]
719 return matchandpats(ctx, pats, opts, globbed, default)[0]
720
720
721 def matchall(repo):
721 def matchall(repo):
722 '''Return a matcher that will efficiently match everything.'''
722 '''Return a matcher that will efficiently match everything.'''
723 return matchmod.always(repo.root, repo.getcwd())
723 return matchmod.always(repo.root, repo.getcwd())
724
724
725 def matchfiles(repo, files):
725 def matchfiles(repo, files):
726 '''Return a matcher that will efficiently match exactly these files.'''
726 '''Return a matcher that will efficiently match exactly these files.'''
727 return matchmod.exact(repo.root, repo.getcwd(), files)
727 return matchmod.exact(repo.root, repo.getcwd(), files)
728
728
729 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
729 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
730 m = matcher
730 m = matcher
731 if dry_run is None:
731 if dry_run is None:
732 dry_run = opts.get('dry_run')
732 dry_run = opts.get('dry_run')
733 if similarity is None:
733 if similarity is None:
734 similarity = float(opts.get('similarity') or 0)
734 similarity = float(opts.get('similarity') or 0)
735
735
736 ret = 0
736 ret = 0
737 join = lambda f: os.path.join(prefix, f)
737 join = lambda f: os.path.join(prefix, f)
738
738
739 def matchessubrepo(matcher, subpath):
739 def matchessubrepo(matcher, subpath):
740 if matcher.exact(subpath):
740 if matcher.exact(subpath):
741 return True
741 return True
742 for f in matcher.files():
742 for f in matcher.files():
743 if f.startswith(subpath):
743 if f.startswith(subpath):
744 return True
744 return True
745 return False
745 return False
746
746
747 wctx = repo[None]
747 wctx = repo[None]
748 for subpath in sorted(wctx.substate):
748 for subpath in sorted(wctx.substate):
749 if opts.get('subrepos') or matchessubrepo(m, subpath):
749 if opts.get('subrepos') or matchessubrepo(m, subpath):
750 sub = wctx.sub(subpath)
750 sub = wctx.sub(subpath)
751 try:
751 try:
752 submatch = matchmod.narrowmatcher(subpath, m)
752 submatch = matchmod.narrowmatcher(subpath, m)
753 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
753 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
754 ret = 1
754 ret = 1
755 except error.LookupError:
755 except error.LookupError:
756 repo.ui.status(_("skipping missing subrepository: %s\n")
756 repo.ui.status(_("skipping missing subrepository: %s\n")
757 % join(subpath))
757 % join(subpath))
758
758
759 rejected = []
759 rejected = []
760 origbad = m.bad
760 origbad = m.bad
761 def badfn(f, msg):
761 def badfn(f, msg):
762 if f in m.files():
762 if f in m.files():
763 origbad(f, msg)
763 origbad(f, msg)
764 rejected.append(f)
764 rejected.append(f)
765
765
766 m.bad = badfn
766 m.bad = badfn
767 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
767 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
768 m.bad = origbad
768 m.bad = origbad
769
769
770 unknownset = set(unknown + forgotten)
770 unknownset = set(unknown + forgotten)
771 toprint = unknownset.copy()
771 toprint = unknownset.copy()
772 toprint.update(deleted)
772 toprint.update(deleted)
773 for abs in sorted(toprint):
773 for abs in sorted(toprint):
774 if repo.ui.verbose or not m.exact(abs):
774 if repo.ui.verbose or not m.exact(abs):
775 if abs in unknownset:
775 if abs in unknownset:
776 status = _('adding %s\n') % m.uipath(join(abs))
776 status = _('adding %s\n') % m.uipath(abs)
777 else:
777 else:
778 status = _('removing %s\n') % m.uipath(join(abs))
778 status = _('removing %s\n') % m.uipath(abs)
779 repo.ui.status(status)
779 repo.ui.status(status)
780
780
781 renames = _findrenames(repo, m, added + unknown, removed + deleted,
781 renames = _findrenames(repo, m, added + unknown, removed + deleted,
782 similarity)
782 similarity)
783
783
784 if not dry_run:
784 if not dry_run:
785 _markchanges(repo, unknown + forgotten, deleted, renames)
785 _markchanges(repo, unknown + forgotten, deleted, renames)
786
786
787 for f in rejected:
787 for f in rejected:
788 if f in m.files():
788 if f in m.files():
789 return 1
789 return 1
790 return ret
790 return ret
791
791
792 def marktouched(repo, files, similarity=0.0):
792 def marktouched(repo, files, similarity=0.0):
793 '''Assert that files have somehow been operated upon. files are relative to
793 '''Assert that files have somehow been operated upon. files are relative to
794 the repo root.'''
794 the repo root.'''
795 m = matchfiles(repo, files)
795 m = matchfiles(repo, files)
796 rejected = []
796 rejected = []
797 m.bad = lambda x, y: rejected.append(x)
797 m.bad = lambda x, y: rejected.append(x)
798
798
799 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
799 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
800
800
801 if repo.ui.verbose:
801 if repo.ui.verbose:
802 unknownset = set(unknown + forgotten)
802 unknownset = set(unknown + forgotten)
803 toprint = unknownset.copy()
803 toprint = unknownset.copy()
804 toprint.update(deleted)
804 toprint.update(deleted)
805 for abs in sorted(toprint):
805 for abs in sorted(toprint):
806 if abs in unknownset:
806 if abs in unknownset:
807 status = _('adding %s\n') % abs
807 status = _('adding %s\n') % abs
808 else:
808 else:
809 status = _('removing %s\n') % abs
809 status = _('removing %s\n') % abs
810 repo.ui.status(status)
810 repo.ui.status(status)
811
811
812 renames = _findrenames(repo, m, added + unknown, removed + deleted,
812 renames = _findrenames(repo, m, added + unknown, removed + deleted,
813 similarity)
813 similarity)
814
814
815 _markchanges(repo, unknown + forgotten, deleted, renames)
815 _markchanges(repo, unknown + forgotten, deleted, renames)
816
816
817 for f in rejected:
817 for f in rejected:
818 if f in m.files():
818 if f in m.files():
819 return 1
819 return 1
820 return 0
820 return 0
821
821
822 def _interestingfiles(repo, matcher):
822 def _interestingfiles(repo, matcher):
823 '''Walk dirstate with matcher, looking for files that addremove would care
823 '''Walk dirstate with matcher, looking for files that addremove would care
824 about.
824 about.
825
825
826 This is different from dirstate.status because it doesn't care about
826 This is different from dirstate.status because it doesn't care about
827 whether files are modified or clean.'''
827 whether files are modified or clean.'''
828 added, unknown, deleted, removed, forgotten = [], [], [], [], []
828 added, unknown, deleted, removed, forgotten = [], [], [], [], []
829 audit_path = pathutil.pathauditor(repo.root)
829 audit_path = pathutil.pathauditor(repo.root)
830
830
831 ctx = repo[None]
831 ctx = repo[None]
832 dirstate = repo.dirstate
832 dirstate = repo.dirstate
833 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
833 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
834 full=False)
834 full=False)
835 for abs, st in walkresults.iteritems():
835 for abs, st in walkresults.iteritems():
836 dstate = dirstate[abs]
836 dstate = dirstate[abs]
837 if dstate == '?' and audit_path.check(abs):
837 if dstate == '?' and audit_path.check(abs):
838 unknown.append(abs)
838 unknown.append(abs)
839 elif dstate != 'r' and not st:
839 elif dstate != 'r' and not st:
840 deleted.append(abs)
840 deleted.append(abs)
841 elif dstate == 'r' and st:
841 elif dstate == 'r' and st:
842 forgotten.append(abs)
842 forgotten.append(abs)
843 # for finding renames
843 # for finding renames
844 elif dstate == 'r' and not st:
844 elif dstate == 'r' and not st:
845 removed.append(abs)
845 removed.append(abs)
846 elif dstate == 'a':
846 elif dstate == 'a':
847 added.append(abs)
847 added.append(abs)
848
848
849 return added, unknown, deleted, removed, forgotten
849 return added, unknown, deleted, removed, forgotten
850
850
851 def _findrenames(repo, matcher, added, removed, similarity):
851 def _findrenames(repo, matcher, added, removed, similarity):
852 '''Find renames from removed files to added ones.'''
852 '''Find renames from removed files to added ones.'''
853 renames = {}
853 renames = {}
854 if similarity > 0:
854 if similarity > 0:
855 for old, new, score in similar.findrenames(repo, added, removed,
855 for old, new, score in similar.findrenames(repo, added, removed,
856 similarity):
856 similarity):
857 if (repo.ui.verbose or not matcher.exact(old)
857 if (repo.ui.verbose or not matcher.exact(old)
858 or not matcher.exact(new)):
858 or not matcher.exact(new)):
859 repo.ui.status(_('recording removal of %s as rename to %s '
859 repo.ui.status(_('recording removal of %s as rename to %s '
860 '(%d%% similar)\n') %
860 '(%d%% similar)\n') %
861 (matcher.rel(old), matcher.rel(new),
861 (matcher.rel(old), matcher.rel(new),
862 score * 100))
862 score * 100))
863 renames[new] = old
863 renames[new] = old
864 return renames
864 return renames
865
865
866 def _markchanges(repo, unknown, deleted, renames):
866 def _markchanges(repo, unknown, deleted, renames):
867 '''Marks the files in unknown as added, the files in deleted as removed,
867 '''Marks the files in unknown as added, the files in deleted as removed,
868 and the files in renames as copied.'''
868 and the files in renames as copied.'''
869 wctx = repo[None]
869 wctx = repo[None]
870 wlock = repo.wlock()
870 wlock = repo.wlock()
871 try:
871 try:
872 wctx.forget(deleted)
872 wctx.forget(deleted)
873 wctx.add(unknown)
873 wctx.add(unknown)
874 for new, old in renames.iteritems():
874 for new, old in renames.iteritems():
875 wctx.copy(old, new)
875 wctx.copy(old, new)
876 finally:
876 finally:
877 wlock.release()
877 wlock.release()
878
878
879 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
879 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
880 """Update the dirstate to reflect the intent of copying src to dst. For
880 """Update the dirstate to reflect the intent of copying src to dst. For
881 different reasons it might not end with dst being marked as copied from src.
881 different reasons it might not end with dst being marked as copied from src.
882 """
882 """
883 origsrc = repo.dirstate.copied(src) or src
883 origsrc = repo.dirstate.copied(src) or src
884 if dst == origsrc: # copying back a copy?
884 if dst == origsrc: # copying back a copy?
885 if repo.dirstate[dst] not in 'mn' and not dryrun:
885 if repo.dirstate[dst] not in 'mn' and not dryrun:
886 repo.dirstate.normallookup(dst)
886 repo.dirstate.normallookup(dst)
887 else:
887 else:
888 if repo.dirstate[origsrc] == 'a' and origsrc == src:
888 if repo.dirstate[origsrc] == 'a' and origsrc == src:
889 if not ui.quiet:
889 if not ui.quiet:
890 ui.warn(_("%s has not been committed yet, so no copy "
890 ui.warn(_("%s has not been committed yet, so no copy "
891 "data will be stored for %s.\n")
891 "data will be stored for %s.\n")
892 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
892 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
893 if repo.dirstate[dst] in '?r' and not dryrun:
893 if repo.dirstate[dst] in '?r' and not dryrun:
894 wctx.add([dst])
894 wctx.add([dst])
895 elif not dryrun:
895 elif not dryrun:
896 wctx.copy(origsrc, dst)
896 wctx.copy(origsrc, dst)
897
897
898 def readrequires(opener, supported):
898 def readrequires(opener, supported):
899 '''Reads and parses .hg/requires and checks if all entries found
899 '''Reads and parses .hg/requires and checks if all entries found
900 are in the list of supported features.'''
900 are in the list of supported features.'''
901 requirements = set(opener.read("requires").splitlines())
901 requirements = set(opener.read("requires").splitlines())
902 missings = []
902 missings = []
903 for r in requirements:
903 for r in requirements:
904 if r not in supported:
904 if r not in supported:
905 if not r or not r[0].isalnum():
905 if not r or not r[0].isalnum():
906 raise error.RequirementError(_(".hg/requires file is corrupt"))
906 raise error.RequirementError(_(".hg/requires file is corrupt"))
907 missings.append(r)
907 missings.append(r)
908 missings.sort()
908 missings.sort()
909 if missings:
909 if missings:
910 raise error.RequirementError(
910 raise error.RequirementError(
911 _("repository requires features unknown to this Mercurial: %s")
911 _("repository requires features unknown to this Mercurial: %s")
912 % " ".join(missings),
912 % " ".join(missings),
913 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
913 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
914 " for more information"))
914 " for more information"))
915 return requirements
915 return requirements
916
916
917 class filecachesubentry(object):
917 class filecachesubentry(object):
918 def __init__(self, path, stat):
918 def __init__(self, path, stat):
919 self.path = path
919 self.path = path
920 self.cachestat = None
920 self.cachestat = None
921 self._cacheable = None
921 self._cacheable = None
922
922
923 if stat:
923 if stat:
924 self.cachestat = filecachesubentry.stat(self.path)
924 self.cachestat = filecachesubentry.stat(self.path)
925
925
926 if self.cachestat:
926 if self.cachestat:
927 self._cacheable = self.cachestat.cacheable()
927 self._cacheable = self.cachestat.cacheable()
928 else:
928 else:
929 # None means we don't know yet
929 # None means we don't know yet
930 self._cacheable = None
930 self._cacheable = None
931
931
932 def refresh(self):
932 def refresh(self):
933 if self.cacheable():
933 if self.cacheable():
934 self.cachestat = filecachesubentry.stat(self.path)
934 self.cachestat = filecachesubentry.stat(self.path)
935
935
936 def cacheable(self):
936 def cacheable(self):
937 if self._cacheable is not None:
937 if self._cacheable is not None:
938 return self._cacheable
938 return self._cacheable
939
939
940 # we don't know yet, assume it is for now
940 # we don't know yet, assume it is for now
941 return True
941 return True
942
942
943 def changed(self):
943 def changed(self):
944 # no point in going further if we can't cache it
944 # no point in going further if we can't cache it
945 if not self.cacheable():
945 if not self.cacheable():
946 return True
946 return True
947
947
948 newstat = filecachesubentry.stat(self.path)
948 newstat = filecachesubentry.stat(self.path)
949
949
950 # we may not know if it's cacheable yet, check again now
950 # we may not know if it's cacheable yet, check again now
951 if newstat and self._cacheable is None:
951 if newstat and self._cacheable is None:
952 self._cacheable = newstat.cacheable()
952 self._cacheable = newstat.cacheable()
953
953
954 # check again
954 # check again
955 if not self._cacheable:
955 if not self._cacheable:
956 return True
956 return True
957
957
958 if self.cachestat != newstat:
958 if self.cachestat != newstat:
959 self.cachestat = newstat
959 self.cachestat = newstat
960 return True
960 return True
961 else:
961 else:
962 return False
962 return False
963
963
964 @staticmethod
964 @staticmethod
965 def stat(path):
965 def stat(path):
966 try:
966 try:
967 return util.cachestat(path)
967 return util.cachestat(path)
968 except OSError, e:
968 except OSError, e:
969 if e.errno != errno.ENOENT:
969 if e.errno != errno.ENOENT:
970 raise
970 raise
971
971
972 class filecacheentry(object):
972 class filecacheentry(object):
973 def __init__(self, paths, stat=True):
973 def __init__(self, paths, stat=True):
974 self._entries = []
974 self._entries = []
975 for path in paths:
975 for path in paths:
976 self._entries.append(filecachesubentry(path, stat))
976 self._entries.append(filecachesubentry(path, stat))
977
977
978 def changed(self):
978 def changed(self):
979 '''true if any entry has changed'''
979 '''true if any entry has changed'''
980 for entry in self._entries:
980 for entry in self._entries:
981 if entry.changed():
981 if entry.changed():
982 return True
982 return True
983 return False
983 return False
984
984
985 def refresh(self):
985 def refresh(self):
986 for entry in self._entries:
986 for entry in self._entries:
987 entry.refresh()
987 entry.refresh()
988
988
989 class filecache(object):
989 class filecache(object):
990 '''A property like decorator that tracks files under .hg/ for updates.
990 '''A property like decorator that tracks files under .hg/ for updates.
991
991
992 Records stat info when called in _filecache.
992 Records stat info when called in _filecache.
993
993
994 On subsequent calls, compares old stat info with new info, and recreates the
994 On subsequent calls, compares old stat info with new info, and recreates the
995 object when any of the files changes, updating the new stat info in
995 object when any of the files changes, updating the new stat info in
996 _filecache.
996 _filecache.
997
997
998 Mercurial either atomic renames or appends for files under .hg,
998 Mercurial either atomic renames or appends for files under .hg,
999 so to ensure the cache is reliable we need the filesystem to be able
999 so to ensure the cache is reliable we need the filesystem to be able
1000 to tell us if a file has been replaced. If it can't, we fallback to
1000 to tell us if a file has been replaced. If it can't, we fallback to
1001 recreating the object on every call (essentially the same behaviour as
1001 recreating the object on every call (essentially the same behaviour as
1002 propertycache).
1002 propertycache).
1003
1003
1004 '''
1004 '''
1005 def __init__(self, *paths):
1005 def __init__(self, *paths):
1006 self.paths = paths
1006 self.paths = paths
1007
1007
1008 def join(self, obj, fname):
1008 def join(self, obj, fname):
1009 """Used to compute the runtime path of a cached file.
1009 """Used to compute the runtime path of a cached file.
1010
1010
1011 Users should subclass filecache and provide their own version of this
1011 Users should subclass filecache and provide their own version of this
1012 function to call the appropriate join function on 'obj' (an instance
1012 function to call the appropriate join function on 'obj' (an instance
1013 of the class that its member function was decorated).
1013 of the class that its member function was decorated).
1014 """
1014 """
1015 return obj.join(fname)
1015 return obj.join(fname)
1016
1016
1017 def __call__(self, func):
1017 def __call__(self, func):
1018 self.func = func
1018 self.func = func
1019 self.name = func.__name__
1019 self.name = func.__name__
1020 return self
1020 return self
1021
1021
1022 def __get__(self, obj, type=None):
1022 def __get__(self, obj, type=None):
1023 # do we need to check if the file changed?
1023 # do we need to check if the file changed?
1024 if self.name in obj.__dict__:
1024 if self.name in obj.__dict__:
1025 assert self.name in obj._filecache, self.name
1025 assert self.name in obj._filecache, self.name
1026 return obj.__dict__[self.name]
1026 return obj.__dict__[self.name]
1027
1027
1028 entry = obj._filecache.get(self.name)
1028 entry = obj._filecache.get(self.name)
1029
1029
1030 if entry:
1030 if entry:
1031 if entry.changed():
1031 if entry.changed():
1032 entry.obj = self.func(obj)
1032 entry.obj = self.func(obj)
1033 else:
1033 else:
1034 paths = [self.join(obj, path) for path in self.paths]
1034 paths = [self.join(obj, path) for path in self.paths]
1035
1035
1036 # We stat -before- creating the object so our cache doesn't lie if
1036 # We stat -before- creating the object so our cache doesn't lie if
1037 # a writer modified between the time we read and stat
1037 # a writer modified between the time we read and stat
1038 entry = filecacheentry(paths, True)
1038 entry = filecacheentry(paths, True)
1039 entry.obj = self.func(obj)
1039 entry.obj = self.func(obj)
1040
1040
1041 obj._filecache[self.name] = entry
1041 obj._filecache[self.name] = entry
1042
1042
1043 obj.__dict__[self.name] = entry.obj
1043 obj.__dict__[self.name] = entry.obj
1044 return entry.obj
1044 return entry.obj
1045
1045
1046 def __set__(self, obj, value):
1046 def __set__(self, obj, value):
1047 if self.name not in obj._filecache:
1047 if self.name not in obj._filecache:
1048 # we add an entry for the missing value because X in __dict__
1048 # we add an entry for the missing value because X in __dict__
1049 # implies X in _filecache
1049 # implies X in _filecache
1050 paths = [self.join(obj, path) for path in self.paths]
1050 paths = [self.join(obj, path) for path in self.paths]
1051 ce = filecacheentry(paths, False)
1051 ce = filecacheentry(paths, False)
1052 obj._filecache[self.name] = ce
1052 obj._filecache[self.name] = ce
1053 else:
1053 else:
1054 ce = obj._filecache[self.name]
1054 ce = obj._filecache[self.name]
1055
1055
1056 ce.obj = value # update cached copy
1056 ce.obj = value # update cached copy
1057 obj.__dict__[self.name] = value # update copy returned by obj.x
1057 obj.__dict__[self.name] = value # update copy returned by obj.x
1058
1058
1059 def __delete__(self, obj):
1059 def __delete__(self, obj):
1060 try:
1060 try:
1061 del obj.__dict__[self.name]
1061 del obj.__dict__[self.name]
1062 except KeyError:
1062 except KeyError:
1063 raise AttributeError(self.name)
1063 raise AttributeError(self.name)
1064
1064
1065 class dirs(object):
1065 class dirs(object):
1066 '''a multiset of directory names from a dirstate or manifest'''
1066 '''a multiset of directory names from a dirstate or manifest'''
1067
1067
1068 def __init__(self, map, skip=None):
1068 def __init__(self, map, skip=None):
1069 self._dirs = {}
1069 self._dirs = {}
1070 addpath = self.addpath
1070 addpath = self.addpath
1071 if util.safehasattr(map, 'iteritems') and skip is not None:
1071 if util.safehasattr(map, 'iteritems') and skip is not None:
1072 for f, s in map.iteritems():
1072 for f, s in map.iteritems():
1073 if s[0] != skip:
1073 if s[0] != skip:
1074 addpath(f)
1074 addpath(f)
1075 else:
1075 else:
1076 for f in map:
1076 for f in map:
1077 addpath(f)
1077 addpath(f)
1078
1078
1079 def addpath(self, path):
1079 def addpath(self, path):
1080 dirs = self._dirs
1080 dirs = self._dirs
1081 for base in finddirs(path):
1081 for base in finddirs(path):
1082 if base in dirs:
1082 if base in dirs:
1083 dirs[base] += 1
1083 dirs[base] += 1
1084 return
1084 return
1085 dirs[base] = 1
1085 dirs[base] = 1
1086
1086
1087 def delpath(self, path):
1087 def delpath(self, path):
1088 dirs = self._dirs
1088 dirs = self._dirs
1089 for base in finddirs(path):
1089 for base in finddirs(path):
1090 if dirs[base] > 1:
1090 if dirs[base] > 1:
1091 dirs[base] -= 1
1091 dirs[base] -= 1
1092 return
1092 return
1093 del dirs[base]
1093 del dirs[base]
1094
1094
1095 def __iter__(self):
1095 def __iter__(self):
1096 return self._dirs.iterkeys()
1096 return self._dirs.iterkeys()
1097
1097
1098 def __contains__(self, d):
1098 def __contains__(self, d):
1099 return d in self._dirs
1099 return d in self._dirs
1100
1100
1101 if util.safehasattr(parsers, 'dirs'):
1101 if util.safehasattr(parsers, 'dirs'):
1102 dirs = parsers.dirs
1102 dirs = parsers.dirs
1103
1103
1104 def finddirs(path):
1104 def finddirs(path):
1105 pos = path.rfind('/')
1105 pos = path.rfind('/')
1106 while pos != -1:
1106 while pos != -1:
1107 yield path[:pos]
1107 yield path[:pos]
1108 pos = path.rfind('/', 0, pos)
1108 pos = path.rfind('/', 0, pos)
@@ -1,326 +1,326 b''
1 Preparing the subrepository 'sub2'
1 Preparing the subrepository 'sub2'
2
2
3 $ hg init sub2
3 $ hg init sub2
4 $ echo sub2 > sub2/sub2
4 $ echo sub2 > sub2/sub2
5 $ hg add -R sub2
5 $ hg add -R sub2
6 adding sub2/sub2 (glob)
6 adding sub2/sub2 (glob)
7 $ hg commit -R sub2 -m "sub2 import"
7 $ hg commit -R sub2 -m "sub2 import"
8
8
9 Preparing the 'sub1' repo which depends on the subrepo 'sub2'
9 Preparing the 'sub1' repo which depends on the subrepo 'sub2'
10
10
11 $ hg init sub1
11 $ hg init sub1
12 $ echo sub1 > sub1/sub1
12 $ echo sub1 > sub1/sub1
13 $ echo "sub2 = ../sub2" > sub1/.hgsub
13 $ echo "sub2 = ../sub2" > sub1/.hgsub
14 $ hg clone sub2 sub1/sub2
14 $ hg clone sub2 sub1/sub2
15 updating to branch default
15 updating to branch default
16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
17 $ hg add -R sub1
17 $ hg add -R sub1
18 adding sub1/.hgsub (glob)
18 adding sub1/.hgsub (glob)
19 adding sub1/sub1 (glob)
19 adding sub1/sub1 (glob)
20 $ hg commit -R sub1 -m "sub1 import"
20 $ hg commit -R sub1 -m "sub1 import"
21
21
22 Preparing the 'main' repo which depends on the subrepo 'sub1'
22 Preparing the 'main' repo which depends on the subrepo 'sub1'
23
23
24 $ hg init main
24 $ hg init main
25 $ echo main > main/main
25 $ echo main > main/main
26 $ echo "sub1 = ../sub1" > main/.hgsub
26 $ echo "sub1 = ../sub1" > main/.hgsub
27 $ hg clone sub1 main/sub1
27 $ hg clone sub1 main/sub1
28 updating to branch default
28 updating to branch default
29 cloning subrepo sub2 from $TESTTMP/sub2
29 cloning subrepo sub2 from $TESTTMP/sub2
30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 $ hg add -R main
31 $ hg add -R main
32 adding main/.hgsub (glob)
32 adding main/.hgsub (glob)
33 adding main/main (glob)
33 adding main/main (glob)
34 $ hg commit -R main -m "main import"
34 $ hg commit -R main -m "main import"
35
35
36 Cleaning both repositories, just as a clone -U
36 Cleaning both repositories, just as a clone -U
37
37
38 $ hg up -C -R sub2 null
38 $ hg up -C -R sub2 null
39 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
39 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
40 $ hg up -C -R sub1 null
40 $ hg up -C -R sub1 null
41 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
41 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
42 $ hg up -C -R main null
42 $ hg up -C -R main null
43 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
43 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
44 $ rm -rf main/sub1
44 $ rm -rf main/sub1
45 $ rm -rf sub1/sub2
45 $ rm -rf sub1/sub2
46
46
47 Clone main
47 Clone main
48
48
49 $ hg clone main cloned
49 $ hg clone main cloned
50 updating to branch default
50 updating to branch default
51 cloning subrepo sub1 from $TESTTMP/sub1
51 cloning subrepo sub1 from $TESTTMP/sub1
52 cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob)
52 cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob)
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
54
54
55 Checking cloned repo ids
55 Checking cloned repo ids
56
56
57 $ printf "cloned " ; hg id -R cloned
57 $ printf "cloned " ; hg id -R cloned
58 cloned 7f491f53a367 tip
58 cloned 7f491f53a367 tip
59 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
59 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
60 cloned/sub1 fc3b4ce2696f tip
60 cloned/sub1 fc3b4ce2696f tip
61 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
61 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
62 cloned/sub1/sub2 c57a0840e3ba tip
62 cloned/sub1/sub2 c57a0840e3ba tip
63
63
64 debugsub output for main and sub1
64 debugsub output for main and sub1
65
65
66 $ hg debugsub -R cloned
66 $ hg debugsub -R cloned
67 path sub1
67 path sub1
68 source ../sub1
68 source ../sub1
69 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd
69 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd
70 $ hg debugsub -R cloned/sub1
70 $ hg debugsub -R cloned/sub1
71 path sub2
71 path sub2
72 source ../sub2
72 source ../sub2
73 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c
73 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c
74
74
75 Modifying deeply nested 'sub2'
75 Modifying deeply nested 'sub2'
76
76
77 $ echo modified > cloned/sub1/sub2/sub2
77 $ echo modified > cloned/sub1/sub2/sub2
78 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
78 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned
79 committing subrepository sub1
79 committing subrepository sub1
80 committing subrepository sub1/sub2 (glob)
80 committing subrepository sub1/sub2 (glob)
81
81
82 Checking modified node ids
82 Checking modified node ids
83
83
84 $ printf "cloned " ; hg id -R cloned
84 $ printf "cloned " ; hg id -R cloned
85 cloned ffe6649062fe tip
85 cloned ffe6649062fe tip
86 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
86 $ printf "cloned/sub1 " ; hg id -R cloned/sub1
87 cloned/sub1 2ecb03bf44a9 tip
87 cloned/sub1 2ecb03bf44a9 tip
88 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
88 $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2
89 cloned/sub1/sub2 53dd3430bcaf tip
89 cloned/sub1/sub2 53dd3430bcaf tip
90
90
91 debugsub output for main and sub1
91 debugsub output for main and sub1
92
92
93 $ hg debugsub -R cloned
93 $ hg debugsub -R cloned
94 path sub1
94 path sub1
95 source ../sub1
95 source ../sub1
96 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9
96 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9
97 $ hg debugsub -R cloned/sub1
97 $ hg debugsub -R cloned/sub1
98 path sub2
98 path sub2
99 source ../sub2
99 source ../sub2
100 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
100 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487
101
101
102 Check that deep archiving works
102 Check that deep archiving works
103
103
104 $ cd cloned
104 $ cd cloned
105 $ echo 'test' > sub1/sub2/test.txt
105 $ echo 'test' > sub1/sub2/test.txt
106 $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
106 $ hg --config extensions.largefiles=! add sub1/sub2/test.txt
107 $ mkdir sub1/sub2/folder
107 $ mkdir sub1/sub2/folder
108 $ echo 'subfolder' > sub1/sub2/folder/test.txt
108 $ echo 'subfolder' > sub1/sub2/folder/test.txt
109 $ hg ci -ASm "add test.txt"
109 $ hg ci -ASm "add test.txt"
110 adding sub1/sub2/folder/test.txt (glob)
110 adding sub1/sub2/folder/test.txt
111 committing subrepository sub1
111 committing subrepository sub1
112 committing subrepository sub1/sub2 (glob)
112 committing subrepository sub1/sub2 (glob)
113
113
114 .. but first take a detour through some deep removal testing
114 .. but first take a detour through some deep removal testing
115
115
116 $ hg remove -S -I 're:.*.txt' .
116 $ hg remove -S -I 're:.*.txt' .
117 removing sub1/sub2/folder/test.txt (glob)
117 removing sub1/sub2/folder/test.txt (glob)
118 removing sub1/sub2/test.txt (glob)
118 removing sub1/sub2/test.txt (glob)
119 $ hg status -S
119 $ hg status -S
120 R sub1/sub2/folder/test.txt
120 R sub1/sub2/folder/test.txt
121 R sub1/sub2/test.txt
121 R sub1/sub2/test.txt
122 $ hg update -Cq
122 $ hg update -Cq
123 $ hg remove -I 're:.*.txt' sub1
123 $ hg remove -I 're:.*.txt' sub1
124 $ hg status -S
124 $ hg status -S
125 $ hg remove sub1/sub2/folder/test.txt
125 $ hg remove sub1/sub2/folder/test.txt
126 $ hg remove sub1/.hgsubstate
126 $ hg remove sub1/.hgsubstate
127 $ hg status -S
127 $ hg status -S
128 R sub1/.hgsubstate
128 R sub1/.hgsubstate
129 R sub1/sub2/folder/test.txt
129 R sub1/sub2/folder/test.txt
130 $ hg update -Cq
130 $ hg update -Cq
131 $ touch sub1/foo
131 $ touch sub1/foo
132 $ hg forget sub1/sub2/folder/test.txt
132 $ hg forget sub1/sub2/folder/test.txt
133 $ rm sub1/sub2/test.txt
133 $ rm sub1/sub2/test.txt
134
134
135 Test relative path printing + subrepos
135 Test relative path printing + subrepos
136 $ mkdir -p foo/bar
136 $ mkdir -p foo/bar
137 $ cd foo
137 $ cd foo
138 $ touch bar/abc
138 $ touch bar/abc
139 $ hg addremove -S ..
139 $ hg addremove -S ..
140 adding ../sub1/sub2/folder/test.txt (glob)
140 adding ../sub1/sub2/folder/test.txt (glob)
141 removing ../sub1/sub2/test.txt (glob)
141 removing ../sub1/sub2/test.txt (glob)
142 adding ../sub1/foo (glob)
142 adding ../sub1/foo (glob)
143 adding bar/abc (glob)
143 adding bar/abc (glob)
144 $ cd ..
144 $ cd ..
145 $ hg status -S
145 $ hg status -S
146 A foo/bar/abc
146 A foo/bar/abc
147 A sub1/foo
147 A sub1/foo
148 R sub1/sub2/test.txt
148 R sub1/sub2/test.txt
149 $ hg update -Cq
149 $ hg update -Cq
150 $ touch sub1/sub2/folder/bar
150 $ touch sub1/sub2/folder/bar
151 $ hg addremove sub1/sub2
151 $ hg addremove sub1/sub2
152 adding sub1/sub2/folder/bar (glob)
152 adding sub1/sub2/folder/bar (glob)
153 $ hg status -S
153 $ hg status -S
154 A sub1/sub2/folder/bar
154 A sub1/sub2/folder/bar
155 ? foo/bar/abc
155 ? foo/bar/abc
156 ? sub1/foo
156 ? sub1/foo
157 $ hg update -Cq
157 $ hg update -Cq
158 $ hg addremove sub1
158 $ hg addremove sub1
159 adding sub1/sub2/folder/bar (glob)
159 adding sub1/sub2/folder/bar (glob)
160 adding sub1/foo (glob)
160 adding sub1/foo (glob)
161 $ hg update -Cq
161 $ hg update -Cq
162 $ rm sub1/sub2/folder/test.txt
162 $ rm sub1/sub2/folder/test.txt
163 $ rm sub1/sub2/test.txt
163 $ rm sub1/sub2/test.txt
164 $ hg ci -ASm "remove test.txt"
164 $ hg ci -ASm "remove test.txt"
165 adding sub1/sub2/folder/bar (glob)
165 adding sub1/sub2/folder/bar
166 removing sub1/sub2/folder/test.txt (glob)
166 removing sub1/sub2/folder/test.txt
167 removing sub1/sub2/test.txt (glob)
167 removing sub1/sub2/test.txt
168 adding sub1/foo (glob)
168 adding sub1/foo
169 adding foo/bar/abc
169 adding foo/bar/abc
170 committing subrepository sub1
170 committing subrepository sub1
171 committing subrepository sub1/sub2 (glob)
171 committing subrepository sub1/sub2 (glob)
172 $ hg rollback -q
172 $ hg rollback -q
173 $ hg up -Cq
173 $ hg up -Cq
174
174
175 $ hg --config extensions.largefiles=! archive -S ../archive_all
175 $ hg --config extensions.largefiles=! archive -S ../archive_all
176 $ find ../archive_all | sort
176 $ find ../archive_all | sort
177 ../archive_all
177 ../archive_all
178 ../archive_all/.hg_archival.txt
178 ../archive_all/.hg_archival.txt
179 ../archive_all/.hgsub
179 ../archive_all/.hgsub
180 ../archive_all/.hgsubstate
180 ../archive_all/.hgsubstate
181 ../archive_all/main
181 ../archive_all/main
182 ../archive_all/sub1
182 ../archive_all/sub1
183 ../archive_all/sub1/.hgsub
183 ../archive_all/sub1/.hgsub
184 ../archive_all/sub1/.hgsubstate
184 ../archive_all/sub1/.hgsubstate
185 ../archive_all/sub1/sub1
185 ../archive_all/sub1/sub1
186 ../archive_all/sub1/sub2
186 ../archive_all/sub1/sub2
187 ../archive_all/sub1/sub2/folder
187 ../archive_all/sub1/sub2/folder
188 ../archive_all/sub1/sub2/folder/test.txt
188 ../archive_all/sub1/sub2/folder/test.txt
189 ../archive_all/sub1/sub2/sub2
189 ../archive_all/sub1/sub2/sub2
190 ../archive_all/sub1/sub2/test.txt
190 ../archive_all/sub1/sub2/test.txt
191
191
192 Check that archive -X works in deep subrepos
192 Check that archive -X works in deep subrepos
193
193
194 $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude
194 $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude
195 $ find ../archive_exclude | sort
195 $ find ../archive_exclude | sort
196 ../archive_exclude
196 ../archive_exclude
197 ../archive_exclude/.hg_archival.txt
197 ../archive_exclude/.hg_archival.txt
198 ../archive_exclude/.hgsub
198 ../archive_exclude/.hgsub
199 ../archive_exclude/.hgsubstate
199 ../archive_exclude/.hgsubstate
200 ../archive_exclude/main
200 ../archive_exclude/main
201 ../archive_exclude/sub1
201 ../archive_exclude/sub1
202 ../archive_exclude/sub1/.hgsub
202 ../archive_exclude/sub1/.hgsub
203 ../archive_exclude/sub1/.hgsubstate
203 ../archive_exclude/sub1/.hgsubstate
204 ../archive_exclude/sub1/sub1
204 ../archive_exclude/sub1/sub1
205 ../archive_exclude/sub1/sub2
205 ../archive_exclude/sub1/sub2
206 ../archive_exclude/sub1/sub2/sub2
206 ../archive_exclude/sub1/sub2/sub2
207
207
208 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include
208 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include
209 $ find ../archive_include | sort
209 $ find ../archive_include | sort
210 ../archive_include
210 ../archive_include
211 ../archive_include/sub1
211 ../archive_include/sub1
212 ../archive_include/sub1/sub2
212 ../archive_include/sub1/sub2
213 ../archive_include/sub1/sub2/folder
213 ../archive_include/sub1/sub2/folder
214 ../archive_include/sub1/sub2/folder/test.txt
214 ../archive_include/sub1/sub2/folder/test.txt
215 ../archive_include/sub1/sub2/test.txt
215 ../archive_include/sub1/sub2/test.txt
216
216
217 Check that deep archive works with largefiles (which overrides hgsubrepo impl)
217 Check that deep archive works with largefiles (which overrides hgsubrepo impl)
218 This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo
218 This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo
219 subrepos are archived properly.
219 subrepos are archived properly.
220 Note that add --large through a subrepo currently adds the file as a normal file
220 Note that add --large through a subrepo currently adds the file as a normal file
221
221
222 $ echo "large" > sub1/sub2/large.bin
222 $ echo "large" > sub1/sub2/large.bin
223 $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin
223 $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin
224 $ echo "large" > large.bin
224 $ echo "large" > large.bin
225 $ hg --config extensions.largefiles= add --large large.bin
225 $ hg --config extensions.largefiles= add --large large.bin
226 $ hg --config extensions.largefiles= ci -S -m "add large files"
226 $ hg --config extensions.largefiles= ci -S -m "add large files"
227 committing subrepository sub1
227 committing subrepository sub1
228 committing subrepository sub1/sub2 (glob)
228 committing subrepository sub1/sub2 (glob)
229
229
230 $ hg --config extensions.largefiles= archive -S ../archive_lf
230 $ hg --config extensions.largefiles= archive -S ../archive_lf
231 $ find ../archive_lf | sort
231 $ find ../archive_lf | sort
232 ../archive_lf
232 ../archive_lf
233 ../archive_lf/.hg_archival.txt
233 ../archive_lf/.hg_archival.txt
234 ../archive_lf/.hgsub
234 ../archive_lf/.hgsub
235 ../archive_lf/.hgsubstate
235 ../archive_lf/.hgsubstate
236 ../archive_lf/large.bin
236 ../archive_lf/large.bin
237 ../archive_lf/main
237 ../archive_lf/main
238 ../archive_lf/sub1
238 ../archive_lf/sub1
239 ../archive_lf/sub1/.hgsub
239 ../archive_lf/sub1/.hgsub
240 ../archive_lf/sub1/.hgsubstate
240 ../archive_lf/sub1/.hgsubstate
241 ../archive_lf/sub1/sub1
241 ../archive_lf/sub1/sub1
242 ../archive_lf/sub1/sub2
242 ../archive_lf/sub1/sub2
243 ../archive_lf/sub1/sub2/folder
243 ../archive_lf/sub1/sub2/folder
244 ../archive_lf/sub1/sub2/folder/test.txt
244 ../archive_lf/sub1/sub2/folder/test.txt
245 ../archive_lf/sub1/sub2/large.bin
245 ../archive_lf/sub1/sub2/large.bin
246 ../archive_lf/sub1/sub2/sub2
246 ../archive_lf/sub1/sub2/sub2
247 ../archive_lf/sub1/sub2/test.txt
247 ../archive_lf/sub1/sub2/test.txt
248 $ rm -rf ../archive_lf
248 $ rm -rf ../archive_lf
249
249
250 Exclude large files from main and sub-sub repo
250 Exclude large files from main and sub-sub repo
251
251
252 $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf
252 $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf
253 $ find ../archive_lf | sort
253 $ find ../archive_lf | sort
254 ../archive_lf
254 ../archive_lf
255 ../archive_lf/.hg_archival.txt
255 ../archive_lf/.hg_archival.txt
256 ../archive_lf/.hgsub
256 ../archive_lf/.hgsub
257 ../archive_lf/.hgsubstate
257 ../archive_lf/.hgsubstate
258 ../archive_lf/main
258 ../archive_lf/main
259 ../archive_lf/sub1
259 ../archive_lf/sub1
260 ../archive_lf/sub1/.hgsub
260 ../archive_lf/sub1/.hgsub
261 ../archive_lf/sub1/.hgsubstate
261 ../archive_lf/sub1/.hgsubstate
262 ../archive_lf/sub1/sub1
262 ../archive_lf/sub1/sub1
263 ../archive_lf/sub1/sub2
263 ../archive_lf/sub1/sub2
264 ../archive_lf/sub1/sub2/folder
264 ../archive_lf/sub1/sub2/folder
265 ../archive_lf/sub1/sub2/folder/test.txt
265 ../archive_lf/sub1/sub2/folder/test.txt
266 ../archive_lf/sub1/sub2/sub2
266 ../archive_lf/sub1/sub2/sub2
267 ../archive_lf/sub1/sub2/test.txt
267 ../archive_lf/sub1/sub2/test.txt
268 $ rm -rf ../archive_lf
268 $ rm -rf ../archive_lf
269
269
270 Exclude normal files from main and sub-sub repo
270 Exclude normal files from main and sub-sub repo
271
271
272 $ hg --config extensions.largefiles= archive -S -X '**.txt' ../archive_lf
272 $ hg --config extensions.largefiles= archive -S -X '**.txt' ../archive_lf
273 $ find ../archive_lf | sort
273 $ find ../archive_lf | sort
274 ../archive_lf
274 ../archive_lf
275 ../archive_lf/.hgsub
275 ../archive_lf/.hgsub
276 ../archive_lf/.hgsubstate
276 ../archive_lf/.hgsubstate
277 ../archive_lf/large.bin
277 ../archive_lf/large.bin
278 ../archive_lf/main
278 ../archive_lf/main
279 ../archive_lf/sub1
279 ../archive_lf/sub1
280 ../archive_lf/sub1/.hgsub
280 ../archive_lf/sub1/.hgsub
281 ../archive_lf/sub1/.hgsubstate
281 ../archive_lf/sub1/.hgsubstate
282 ../archive_lf/sub1/sub1
282 ../archive_lf/sub1/sub1
283 ../archive_lf/sub1/sub2
283 ../archive_lf/sub1/sub2
284 ../archive_lf/sub1/sub2/large.bin
284 ../archive_lf/sub1/sub2/large.bin
285 ../archive_lf/sub1/sub2/sub2
285 ../archive_lf/sub1/sub2/sub2
286 $ rm -rf ../archive_lf
286 $ rm -rf ../archive_lf
287
287
288 Include normal files from within a largefiles subrepo
288 Include normal files from within a largefiles subrepo
289
289
290 $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf
290 $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf
291 $ find ../archive_lf | sort
291 $ find ../archive_lf | sort
292 ../archive_lf
292 ../archive_lf
293 ../archive_lf/.hg_archival.txt
293 ../archive_lf/.hg_archival.txt
294 ../archive_lf/sub1
294 ../archive_lf/sub1
295 ../archive_lf/sub1/sub2
295 ../archive_lf/sub1/sub2
296 ../archive_lf/sub1/sub2/folder
296 ../archive_lf/sub1/sub2/folder
297 ../archive_lf/sub1/sub2/folder/test.txt
297 ../archive_lf/sub1/sub2/folder/test.txt
298 ../archive_lf/sub1/sub2/test.txt
298 ../archive_lf/sub1/sub2/test.txt
299 $ rm -rf ../archive_lf
299 $ rm -rf ../archive_lf
300
300
301 Include large files from within a largefiles subrepo
301 Include large files from within a largefiles subrepo
302
302
303 $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf
303 $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf
304 $ find ../archive_lf | sort
304 $ find ../archive_lf | sort
305 ../archive_lf
305 ../archive_lf
306 ../archive_lf/large.bin
306 ../archive_lf/large.bin
307 ../archive_lf/sub1
307 ../archive_lf/sub1
308 ../archive_lf/sub1/sub2
308 ../archive_lf/sub1/sub2
309 ../archive_lf/sub1/sub2/large.bin
309 ../archive_lf/sub1/sub2/large.bin
310 $ rm -rf ../archive_lf
310 $ rm -rf ../archive_lf
311
311
312 Find an exact largefile match in a largefiles subrepo
312 Find an exact largefile match in a largefiles subrepo
313
313
314 $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf
314 $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf
315 $ find ../archive_lf | sort
315 $ find ../archive_lf | sort
316 ../archive_lf
316 ../archive_lf
317 ../archive_lf/sub1
317 ../archive_lf/sub1
318 ../archive_lf/sub1/sub2
318 ../archive_lf/sub1/sub2
319 ../archive_lf/sub1/sub2/large.bin
319 ../archive_lf/sub1/sub2/large.bin
320 $ rm -rf ../archive_lf
320 $ rm -rf ../archive_lf
321
321
322 Find an exact match to a standin (should archive nothing)
322 Find an exact match to a standin (should archive nothing)
323 $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
323 $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf
324 $ find ../archive_lf 2> /dev/null | sort
324 $ find ../archive_lf 2> /dev/null | sort
325
325
326 $ cd ..
326 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now