##// END OF EJS Templates
py3: replace os.sep with pycompat.ossep (part 3 of 4)
Pulkit Goyal -
r30615:bb77654d default
parent child Browse files
Show More
@@ -1,3450 +1,3450 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import tempfile
13 import tempfile
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 bin,
17 bin,
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 )
22 )
23
23
24 from . import (
24 from . import (
25 bookmarks,
25 bookmarks,
26 changelog,
26 changelog,
27 copies,
27 copies,
28 crecord as crecordmod,
28 crecord as crecordmod,
29 dirstateguard as dirstateguardmod,
29 dirstateguard as dirstateguardmod,
30 encoding,
30 encoding,
31 error,
31 error,
32 formatter,
32 formatter,
33 graphmod,
33 graphmod,
34 lock as lockmod,
34 lock as lockmod,
35 match as matchmod,
35 match as matchmod,
36 mergeutil,
36 mergeutil,
37 obsolete,
37 obsolete,
38 patch,
38 patch,
39 pathutil,
39 pathutil,
40 phases,
40 phases,
41 pycompat,
41 pycompat,
42 repair,
42 repair,
43 revlog,
43 revlog,
44 revset,
44 revset,
45 scmutil,
45 scmutil,
46 templatekw,
46 templatekw,
47 templater,
47 templater,
48 util,
48 util,
49 )
49 )
50 stringio = util.stringio
50 stringio = util.stringio
51
51
52 def ishunk(x):
52 def ishunk(x):
53 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
53 hunkclasses = (crecordmod.uihunk, patch.recordhunk)
54 return isinstance(x, hunkclasses)
54 return isinstance(x, hunkclasses)
55
55
56 def newandmodified(chunks, originalchunks):
56 def newandmodified(chunks, originalchunks):
57 newlyaddedandmodifiedfiles = set()
57 newlyaddedandmodifiedfiles = set()
58 for chunk in chunks:
58 for chunk in chunks:
59 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
59 if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \
60 originalchunks:
60 originalchunks:
61 newlyaddedandmodifiedfiles.add(chunk.header.filename())
61 newlyaddedandmodifiedfiles.add(chunk.header.filename())
62 return newlyaddedandmodifiedfiles
62 return newlyaddedandmodifiedfiles
63
63
64 def parsealiases(cmd):
64 def parsealiases(cmd):
65 return cmd.lstrip("^").split("|")
65 return cmd.lstrip("^").split("|")
66
66
67 def setupwrapcolorwrite(ui):
67 def setupwrapcolorwrite(ui):
68 # wrap ui.write so diff output can be labeled/colorized
68 # wrap ui.write so diff output can be labeled/colorized
69 def wrapwrite(orig, *args, **kw):
69 def wrapwrite(orig, *args, **kw):
70 label = kw.pop('label', '')
70 label = kw.pop('label', '')
71 for chunk, l in patch.difflabel(lambda: args):
71 for chunk, l in patch.difflabel(lambda: args):
72 orig(chunk, label=label + l)
72 orig(chunk, label=label + l)
73
73
74 oldwrite = ui.write
74 oldwrite = ui.write
75 def wrap(*args, **kwargs):
75 def wrap(*args, **kwargs):
76 return wrapwrite(oldwrite, *args, **kwargs)
76 return wrapwrite(oldwrite, *args, **kwargs)
77 setattr(ui, 'write', wrap)
77 setattr(ui, 'write', wrap)
78 return oldwrite
78 return oldwrite
79
79
80 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
80 def filterchunks(ui, originalhunks, usecurses, testfile, operation=None):
81 if usecurses:
81 if usecurses:
82 if testfile:
82 if testfile:
83 recordfn = crecordmod.testdecorator(testfile,
83 recordfn = crecordmod.testdecorator(testfile,
84 crecordmod.testchunkselector)
84 crecordmod.testchunkselector)
85 else:
85 else:
86 recordfn = crecordmod.chunkselector
86 recordfn = crecordmod.chunkselector
87
87
88 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
88 return crecordmod.filterpatch(ui, originalhunks, recordfn, operation)
89
89
90 else:
90 else:
91 return patch.filterpatch(ui, originalhunks, operation)
91 return patch.filterpatch(ui, originalhunks, operation)
92
92
93 def recordfilter(ui, originalhunks, operation=None):
93 def recordfilter(ui, originalhunks, operation=None):
94 """ Prompts the user to filter the originalhunks and return a list of
94 """ Prompts the user to filter the originalhunks and return a list of
95 selected hunks.
95 selected hunks.
96 *operation* is used for to build ui messages to indicate the user what
96 *operation* is used for to build ui messages to indicate the user what
97 kind of filtering they are doing: reverting, committing, shelving, etc.
97 kind of filtering they are doing: reverting, committing, shelving, etc.
98 (see patch.filterpatch).
98 (see patch.filterpatch).
99 """
99 """
100 usecurses = crecordmod.checkcurses(ui)
100 usecurses = crecordmod.checkcurses(ui)
101 testfile = ui.config('experimental', 'crecordtest', None)
101 testfile = ui.config('experimental', 'crecordtest', None)
102 oldwrite = setupwrapcolorwrite(ui)
102 oldwrite = setupwrapcolorwrite(ui)
103 try:
103 try:
104 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
104 newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
105 testfile, operation)
105 testfile, operation)
106 finally:
106 finally:
107 ui.write = oldwrite
107 ui.write = oldwrite
108 return newchunks, newopts
108 return newchunks, newopts
109
109
110 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
110 def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
111 filterfn, *pats, **opts):
111 filterfn, *pats, **opts):
112 from . import merge as mergemod
112 from . import merge as mergemod
113 if not ui.interactive():
113 if not ui.interactive():
114 if cmdsuggest:
114 if cmdsuggest:
115 msg = _('running non-interactively, use %s instead') % cmdsuggest
115 msg = _('running non-interactively, use %s instead') % cmdsuggest
116 else:
116 else:
117 msg = _('running non-interactively')
117 msg = _('running non-interactively')
118 raise error.Abort(msg)
118 raise error.Abort(msg)
119
119
120 # make sure username is set before going interactive
120 # make sure username is set before going interactive
121 if not opts.get('user'):
121 if not opts.get('user'):
122 ui.username() # raise exception, username not provided
122 ui.username() # raise exception, username not provided
123
123
124 def recordfunc(ui, repo, message, match, opts):
124 def recordfunc(ui, repo, message, match, opts):
125 """This is generic record driver.
125 """This is generic record driver.
126
126
127 Its job is to interactively filter local changes, and
127 Its job is to interactively filter local changes, and
128 accordingly prepare working directory into a state in which the
128 accordingly prepare working directory into a state in which the
129 job can be delegated to a non-interactive commit command such as
129 job can be delegated to a non-interactive commit command such as
130 'commit' or 'qrefresh'.
130 'commit' or 'qrefresh'.
131
131
132 After the actual job is done by non-interactive command, the
132 After the actual job is done by non-interactive command, the
133 working directory is restored to its original state.
133 working directory is restored to its original state.
134
134
135 In the end we'll record interesting changes, and everything else
135 In the end we'll record interesting changes, and everything else
136 will be left in place, so the user can continue working.
136 will be left in place, so the user can continue working.
137 """
137 """
138
138
139 checkunfinished(repo, commit=True)
139 checkunfinished(repo, commit=True)
140 wctx = repo[None]
140 wctx = repo[None]
141 merge = len(wctx.parents()) > 1
141 merge = len(wctx.parents()) > 1
142 if merge:
142 if merge:
143 raise error.Abort(_('cannot partially commit a merge '
143 raise error.Abort(_('cannot partially commit a merge '
144 '(use "hg commit" instead)'))
144 '(use "hg commit" instead)'))
145
145
146 def fail(f, msg):
146 def fail(f, msg):
147 raise error.Abort('%s: %s' % (f, msg))
147 raise error.Abort('%s: %s' % (f, msg))
148
148
149 force = opts.get('force')
149 force = opts.get('force')
150 if not force:
150 if not force:
151 vdirs = []
151 vdirs = []
152 match.explicitdir = vdirs.append
152 match.explicitdir = vdirs.append
153 match.bad = fail
153 match.bad = fail
154
154
155 status = repo.status(match=match)
155 status = repo.status(match=match)
156 if not force:
156 if not force:
157 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
157 repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
158 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
158 diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True)
159 diffopts.nodates = True
159 diffopts.nodates = True
160 diffopts.git = True
160 diffopts.git = True
161 diffopts.showfunc = True
161 diffopts.showfunc = True
162 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
162 originaldiff = patch.diff(repo, changes=status, opts=diffopts)
163 originalchunks = patch.parsepatch(originaldiff)
163 originalchunks = patch.parsepatch(originaldiff)
164
164
165 # 1. filter patch, since we are intending to apply subset of it
165 # 1. filter patch, since we are intending to apply subset of it
166 try:
166 try:
167 chunks, newopts = filterfn(ui, originalchunks)
167 chunks, newopts = filterfn(ui, originalchunks)
168 except patch.PatchError as err:
168 except patch.PatchError as err:
169 raise error.Abort(_('error parsing patch: %s') % err)
169 raise error.Abort(_('error parsing patch: %s') % err)
170 opts.update(newopts)
170 opts.update(newopts)
171
171
172 # We need to keep a backup of files that have been newly added and
172 # We need to keep a backup of files that have been newly added and
173 # modified during the recording process because there is a previous
173 # modified during the recording process because there is a previous
174 # version without the edit in the workdir
174 # version without the edit in the workdir
175 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
175 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
176 contenders = set()
176 contenders = set()
177 for h in chunks:
177 for h in chunks:
178 try:
178 try:
179 contenders.update(set(h.files()))
179 contenders.update(set(h.files()))
180 except AttributeError:
180 except AttributeError:
181 pass
181 pass
182
182
183 changed = status.modified + status.added + status.removed
183 changed = status.modified + status.added + status.removed
184 newfiles = [f for f in changed if f in contenders]
184 newfiles = [f for f in changed if f in contenders]
185 if not newfiles:
185 if not newfiles:
186 ui.status(_('no changes to record\n'))
186 ui.status(_('no changes to record\n'))
187 return 0
187 return 0
188
188
189 modified = set(status.modified)
189 modified = set(status.modified)
190
190
191 # 2. backup changed files, so we can restore them in the end
191 # 2. backup changed files, so we can restore them in the end
192
192
193 if backupall:
193 if backupall:
194 tobackup = changed
194 tobackup = changed
195 else:
195 else:
196 tobackup = [f for f in newfiles if f in modified or f in \
196 tobackup = [f for f in newfiles if f in modified or f in \
197 newlyaddedandmodifiedfiles]
197 newlyaddedandmodifiedfiles]
198 backups = {}
198 backups = {}
199 if tobackup:
199 if tobackup:
200 backupdir = repo.join('record-backups')
200 backupdir = repo.join('record-backups')
201 try:
201 try:
202 os.mkdir(backupdir)
202 os.mkdir(backupdir)
203 except OSError as err:
203 except OSError as err:
204 if err.errno != errno.EEXIST:
204 if err.errno != errno.EEXIST:
205 raise
205 raise
206 try:
206 try:
207 # backup continues
207 # backup continues
208 for f in tobackup:
208 for f in tobackup:
209 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
209 fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
210 dir=backupdir)
210 dir=backupdir)
211 os.close(fd)
211 os.close(fd)
212 ui.debug('backup %r as %r\n' % (f, tmpname))
212 ui.debug('backup %r as %r\n' % (f, tmpname))
213 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
213 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
214 backups[f] = tmpname
214 backups[f] = tmpname
215
215
216 fp = stringio()
216 fp = stringio()
217 for c in chunks:
217 for c in chunks:
218 fname = c.filename()
218 fname = c.filename()
219 if fname in backups:
219 if fname in backups:
220 c.write(fp)
220 c.write(fp)
221 dopatch = fp.tell()
221 dopatch = fp.tell()
222 fp.seek(0)
222 fp.seek(0)
223
223
224 # 2.5 optionally review / modify patch in text editor
224 # 2.5 optionally review / modify patch in text editor
225 if opts.get('review', False):
225 if opts.get('review', False):
226 patchtext = (crecordmod.diffhelptext
226 patchtext = (crecordmod.diffhelptext
227 + crecordmod.patchhelptext
227 + crecordmod.patchhelptext
228 + fp.read())
228 + fp.read())
229 reviewedpatch = ui.edit(patchtext, "",
229 reviewedpatch = ui.edit(patchtext, "",
230 extra={"suffix": ".diff"})
230 extra={"suffix": ".diff"})
231 fp.truncate(0)
231 fp.truncate(0)
232 fp.write(reviewedpatch)
232 fp.write(reviewedpatch)
233 fp.seek(0)
233 fp.seek(0)
234
234
235 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
235 [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles]
236 # 3a. apply filtered patch to clean repo (clean)
236 # 3a. apply filtered patch to clean repo (clean)
237 if backups:
237 if backups:
238 # Equivalent to hg.revert
238 # Equivalent to hg.revert
239 m = scmutil.matchfiles(repo, backups.keys())
239 m = scmutil.matchfiles(repo, backups.keys())
240 mergemod.update(repo, repo.dirstate.p1(),
240 mergemod.update(repo, repo.dirstate.p1(),
241 False, True, matcher=m)
241 False, True, matcher=m)
242
242
243 # 3b. (apply)
243 # 3b. (apply)
244 if dopatch:
244 if dopatch:
245 try:
245 try:
246 ui.debug('applying patch\n')
246 ui.debug('applying patch\n')
247 ui.debug(fp.getvalue())
247 ui.debug(fp.getvalue())
248 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
248 patch.internalpatch(ui, repo, fp, 1, eolmode=None)
249 except patch.PatchError as err:
249 except patch.PatchError as err:
250 raise error.Abort(str(err))
250 raise error.Abort(str(err))
251 del fp
251 del fp
252
252
253 # 4. We prepared working directory according to filtered
253 # 4. We prepared working directory according to filtered
254 # patch. Now is the time to delegate the job to
254 # patch. Now is the time to delegate the job to
255 # commit/qrefresh or the like!
255 # commit/qrefresh or the like!
256
256
257 # Make all of the pathnames absolute.
257 # Make all of the pathnames absolute.
258 newfiles = [repo.wjoin(nf) for nf in newfiles]
258 newfiles = [repo.wjoin(nf) for nf in newfiles]
259 return commitfunc(ui, repo, *newfiles, **opts)
259 return commitfunc(ui, repo, *newfiles, **opts)
260 finally:
260 finally:
261 # 5. finally restore backed-up files
261 # 5. finally restore backed-up files
262 try:
262 try:
263 dirstate = repo.dirstate
263 dirstate = repo.dirstate
264 for realname, tmpname in backups.iteritems():
264 for realname, tmpname in backups.iteritems():
265 ui.debug('restoring %r to %r\n' % (tmpname, realname))
265 ui.debug('restoring %r to %r\n' % (tmpname, realname))
266
266
267 if dirstate[realname] == 'n':
267 if dirstate[realname] == 'n':
268 # without normallookup, restoring timestamp
268 # without normallookup, restoring timestamp
269 # may cause partially committed files
269 # may cause partially committed files
270 # to be treated as unmodified
270 # to be treated as unmodified
271 dirstate.normallookup(realname)
271 dirstate.normallookup(realname)
272
272
273 # copystat=True here and above are a hack to trick any
273 # copystat=True here and above are a hack to trick any
274 # editors that have f open that we haven't modified them.
274 # editors that have f open that we haven't modified them.
275 #
275 #
276 # Also note that this racy as an editor could notice the
276 # Also note that this racy as an editor could notice the
277 # file's mtime before we've finished writing it.
277 # file's mtime before we've finished writing it.
278 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
278 util.copyfile(tmpname, repo.wjoin(realname), copystat=True)
279 os.unlink(tmpname)
279 os.unlink(tmpname)
280 if tobackup:
280 if tobackup:
281 os.rmdir(backupdir)
281 os.rmdir(backupdir)
282 except OSError:
282 except OSError:
283 pass
283 pass
284
284
285 def recordinwlock(ui, repo, message, match, opts):
285 def recordinwlock(ui, repo, message, match, opts):
286 with repo.wlock():
286 with repo.wlock():
287 return recordfunc(ui, repo, message, match, opts)
287 return recordfunc(ui, repo, message, match, opts)
288
288
289 return commit(ui, repo, recordinwlock, pats, opts)
289 return commit(ui, repo, recordinwlock, pats, opts)
290
290
291 def findpossible(cmd, table, strict=False):
291 def findpossible(cmd, table, strict=False):
292 """
292 """
293 Return cmd -> (aliases, command table entry)
293 Return cmd -> (aliases, command table entry)
294 for each matching command.
294 for each matching command.
295 Return debug commands (or their aliases) only if no normal command matches.
295 Return debug commands (or their aliases) only if no normal command matches.
296 """
296 """
297 choice = {}
297 choice = {}
298 debugchoice = {}
298 debugchoice = {}
299
299
300 if cmd in table:
300 if cmd in table:
301 # short-circuit exact matches, "log" alias beats "^log|history"
301 # short-circuit exact matches, "log" alias beats "^log|history"
302 keys = [cmd]
302 keys = [cmd]
303 else:
303 else:
304 keys = table.keys()
304 keys = table.keys()
305
305
306 allcmds = []
306 allcmds = []
307 for e in keys:
307 for e in keys:
308 aliases = parsealiases(e)
308 aliases = parsealiases(e)
309 allcmds.extend(aliases)
309 allcmds.extend(aliases)
310 found = None
310 found = None
311 if cmd in aliases:
311 if cmd in aliases:
312 found = cmd
312 found = cmd
313 elif not strict:
313 elif not strict:
314 for a in aliases:
314 for a in aliases:
315 if a.startswith(cmd):
315 if a.startswith(cmd):
316 found = a
316 found = a
317 break
317 break
318 if found is not None:
318 if found is not None:
319 if aliases[0].startswith("debug") or found.startswith("debug"):
319 if aliases[0].startswith("debug") or found.startswith("debug"):
320 debugchoice[found] = (aliases, table[e])
320 debugchoice[found] = (aliases, table[e])
321 else:
321 else:
322 choice[found] = (aliases, table[e])
322 choice[found] = (aliases, table[e])
323
323
324 if not choice and debugchoice:
324 if not choice and debugchoice:
325 choice = debugchoice
325 choice = debugchoice
326
326
327 return choice, allcmds
327 return choice, allcmds
328
328
329 def findcmd(cmd, table, strict=True):
329 def findcmd(cmd, table, strict=True):
330 """Return (aliases, command table entry) for command string."""
330 """Return (aliases, command table entry) for command string."""
331 choice, allcmds = findpossible(cmd, table, strict)
331 choice, allcmds = findpossible(cmd, table, strict)
332
332
333 if cmd in choice:
333 if cmd in choice:
334 return choice[cmd]
334 return choice[cmd]
335
335
336 if len(choice) > 1:
336 if len(choice) > 1:
337 clist = choice.keys()
337 clist = choice.keys()
338 clist.sort()
338 clist.sort()
339 raise error.AmbiguousCommand(cmd, clist)
339 raise error.AmbiguousCommand(cmd, clist)
340
340
341 if choice:
341 if choice:
342 return choice.values()[0]
342 return choice.values()[0]
343
343
344 raise error.UnknownCommand(cmd, allcmds)
344 raise error.UnknownCommand(cmd, allcmds)
345
345
346 def findrepo(p):
346 def findrepo(p):
347 while not os.path.isdir(os.path.join(p, ".hg")):
347 while not os.path.isdir(os.path.join(p, ".hg")):
348 oldp, p = p, os.path.dirname(p)
348 oldp, p = p, os.path.dirname(p)
349 if p == oldp:
349 if p == oldp:
350 return None
350 return None
351
351
352 return p
352 return p
353
353
354 def bailifchanged(repo, merge=True):
354 def bailifchanged(repo, merge=True):
355 if merge and repo.dirstate.p2() != nullid:
355 if merge and repo.dirstate.p2() != nullid:
356 raise error.Abort(_('outstanding uncommitted merge'))
356 raise error.Abort(_('outstanding uncommitted merge'))
357 modified, added, removed, deleted = repo.status()[:4]
357 modified, added, removed, deleted = repo.status()[:4]
358 if modified or added or removed or deleted:
358 if modified or added or removed or deleted:
359 raise error.Abort(_('uncommitted changes'))
359 raise error.Abort(_('uncommitted changes'))
360 ctx = repo[None]
360 ctx = repo[None]
361 for s in sorted(ctx.substate):
361 for s in sorted(ctx.substate):
362 ctx.sub(s).bailifchanged()
362 ctx.sub(s).bailifchanged()
363
363
364 def logmessage(ui, opts):
364 def logmessage(ui, opts):
365 """ get the log message according to -m and -l option """
365 """ get the log message according to -m and -l option """
366 message = opts.get('message')
366 message = opts.get('message')
367 logfile = opts.get('logfile')
367 logfile = opts.get('logfile')
368
368
369 if message and logfile:
369 if message and logfile:
370 raise error.Abort(_('options --message and --logfile are mutually '
370 raise error.Abort(_('options --message and --logfile are mutually '
371 'exclusive'))
371 'exclusive'))
372 if not message and logfile:
372 if not message and logfile:
373 try:
373 try:
374 if logfile == '-':
374 if logfile == '-':
375 message = ui.fin.read()
375 message = ui.fin.read()
376 else:
376 else:
377 message = '\n'.join(util.readfile(logfile).splitlines())
377 message = '\n'.join(util.readfile(logfile).splitlines())
378 except IOError as inst:
378 except IOError as inst:
379 raise error.Abort(_("can't read commit message '%s': %s") %
379 raise error.Abort(_("can't read commit message '%s': %s") %
380 (logfile, inst.strerror))
380 (logfile, inst.strerror))
381 return message
381 return message
382
382
383 def mergeeditform(ctxorbool, baseformname):
383 def mergeeditform(ctxorbool, baseformname):
384 """return appropriate editform name (referencing a committemplate)
384 """return appropriate editform name (referencing a committemplate)
385
385
386 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
386 'ctxorbool' is either a ctx to be committed, or a bool indicating whether
387 merging is committed.
387 merging is committed.
388
388
389 This returns baseformname with '.merge' appended if it is a merge,
389 This returns baseformname with '.merge' appended if it is a merge,
390 otherwise '.normal' is appended.
390 otherwise '.normal' is appended.
391 """
391 """
392 if isinstance(ctxorbool, bool):
392 if isinstance(ctxorbool, bool):
393 if ctxorbool:
393 if ctxorbool:
394 return baseformname + ".merge"
394 return baseformname + ".merge"
395 elif 1 < len(ctxorbool.parents()):
395 elif 1 < len(ctxorbool.parents()):
396 return baseformname + ".merge"
396 return baseformname + ".merge"
397
397
398 return baseformname + ".normal"
398 return baseformname + ".normal"
399
399
400 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
400 def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
401 editform='', **opts):
401 editform='', **opts):
402 """get appropriate commit message editor according to '--edit' option
402 """get appropriate commit message editor according to '--edit' option
403
403
404 'finishdesc' is a function to be called with edited commit message
404 'finishdesc' is a function to be called with edited commit message
405 (= 'description' of the new changeset) just after editing, but
405 (= 'description' of the new changeset) just after editing, but
406 before checking empty-ness. It should return actual text to be
406 before checking empty-ness. It should return actual text to be
407 stored into history. This allows to change description before
407 stored into history. This allows to change description before
408 storing.
408 storing.
409
409
410 'extramsg' is a extra message to be shown in the editor instead of
410 'extramsg' is a extra message to be shown in the editor instead of
411 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
411 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL
412 is automatically added.
412 is automatically added.
413
413
414 'editform' is a dot-separated list of names, to distinguish
414 'editform' is a dot-separated list of names, to distinguish
415 the purpose of commit text editing.
415 the purpose of commit text editing.
416
416
417 'getcommiteditor' returns 'commitforceeditor' regardless of
417 'getcommiteditor' returns 'commitforceeditor' regardless of
418 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
418 'edit', if one of 'finishdesc' or 'extramsg' is specified, because
419 they are specific for usage in MQ.
419 they are specific for usage in MQ.
420 """
420 """
421 if edit or finishdesc or extramsg:
421 if edit or finishdesc or extramsg:
422 return lambda r, c, s: commitforceeditor(r, c, s,
422 return lambda r, c, s: commitforceeditor(r, c, s,
423 finishdesc=finishdesc,
423 finishdesc=finishdesc,
424 extramsg=extramsg,
424 extramsg=extramsg,
425 editform=editform)
425 editform=editform)
426 elif editform:
426 elif editform:
427 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
427 return lambda r, c, s: commiteditor(r, c, s, editform=editform)
428 else:
428 else:
429 return commiteditor
429 return commiteditor
430
430
431 def loglimit(opts):
431 def loglimit(opts):
432 """get the log limit according to option -l/--limit"""
432 """get the log limit according to option -l/--limit"""
433 limit = opts.get('limit')
433 limit = opts.get('limit')
434 if limit:
434 if limit:
435 try:
435 try:
436 limit = int(limit)
436 limit = int(limit)
437 except ValueError:
437 except ValueError:
438 raise error.Abort(_('limit must be a positive integer'))
438 raise error.Abort(_('limit must be a positive integer'))
439 if limit <= 0:
439 if limit <= 0:
440 raise error.Abort(_('limit must be positive'))
440 raise error.Abort(_('limit must be positive'))
441 else:
441 else:
442 limit = None
442 limit = None
443 return limit
443 return limit
444
444
445 def makefilename(repo, pat, node, desc=None,
445 def makefilename(repo, pat, node, desc=None,
446 total=None, seqno=None, revwidth=None, pathname=None):
446 total=None, seqno=None, revwidth=None, pathname=None):
447 node_expander = {
447 node_expander = {
448 'H': lambda: hex(node),
448 'H': lambda: hex(node),
449 'R': lambda: str(repo.changelog.rev(node)),
449 'R': lambda: str(repo.changelog.rev(node)),
450 'h': lambda: short(node),
450 'h': lambda: short(node),
451 'm': lambda: re.sub('[^\w]', '_', str(desc))
451 'm': lambda: re.sub('[^\w]', '_', str(desc))
452 }
452 }
453 expander = {
453 expander = {
454 '%': lambda: '%',
454 '%': lambda: '%',
455 'b': lambda: os.path.basename(repo.root),
455 'b': lambda: os.path.basename(repo.root),
456 }
456 }
457
457
458 try:
458 try:
459 if node:
459 if node:
460 expander.update(node_expander)
460 expander.update(node_expander)
461 if node:
461 if node:
462 expander['r'] = (lambda:
462 expander['r'] = (lambda:
463 str(repo.changelog.rev(node)).zfill(revwidth or 0))
463 str(repo.changelog.rev(node)).zfill(revwidth or 0))
464 if total is not None:
464 if total is not None:
465 expander['N'] = lambda: str(total)
465 expander['N'] = lambda: str(total)
466 if seqno is not None:
466 if seqno is not None:
467 expander['n'] = lambda: str(seqno)
467 expander['n'] = lambda: str(seqno)
468 if total is not None and seqno is not None:
468 if total is not None and seqno is not None:
469 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
469 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
470 if pathname is not None:
470 if pathname is not None:
471 expander['s'] = lambda: os.path.basename(pathname)
471 expander['s'] = lambda: os.path.basename(pathname)
472 expander['d'] = lambda: os.path.dirname(pathname) or '.'
472 expander['d'] = lambda: os.path.dirname(pathname) or '.'
473 expander['p'] = lambda: pathname
473 expander['p'] = lambda: pathname
474
474
475 newname = []
475 newname = []
476 patlen = len(pat)
476 patlen = len(pat)
477 i = 0
477 i = 0
478 while i < patlen:
478 while i < patlen:
479 c = pat[i]
479 c = pat[i]
480 if c == '%':
480 if c == '%':
481 i += 1
481 i += 1
482 c = pat[i]
482 c = pat[i]
483 c = expander[c]()
483 c = expander[c]()
484 newname.append(c)
484 newname.append(c)
485 i += 1
485 i += 1
486 return ''.join(newname)
486 return ''.join(newname)
487 except KeyError as inst:
487 except KeyError as inst:
488 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
488 raise error.Abort(_("invalid format spec '%%%s' in output filename") %
489 inst.args[0])
489 inst.args[0])
490
490
491 class _unclosablefile(object):
491 class _unclosablefile(object):
492 def __init__(self, fp):
492 def __init__(self, fp):
493 self._fp = fp
493 self._fp = fp
494
494
495 def close(self):
495 def close(self):
496 pass
496 pass
497
497
498 def __iter__(self):
498 def __iter__(self):
499 return iter(self._fp)
499 return iter(self._fp)
500
500
501 def __getattr__(self, attr):
501 def __getattr__(self, attr):
502 return getattr(self._fp, attr)
502 return getattr(self._fp, attr)
503
503
504 def __enter__(self):
504 def __enter__(self):
505 return self
505 return self
506
506
507 def __exit__(self, exc_type, exc_value, exc_tb):
507 def __exit__(self, exc_type, exc_value, exc_tb):
508 pass
508 pass
509
509
510 def makefileobj(repo, pat, node=None, desc=None, total=None,
510 def makefileobj(repo, pat, node=None, desc=None, total=None,
511 seqno=None, revwidth=None, mode='wb', modemap=None,
511 seqno=None, revwidth=None, mode='wb', modemap=None,
512 pathname=None):
512 pathname=None):
513
513
514 writable = mode not in ('r', 'rb')
514 writable = mode not in ('r', 'rb')
515
515
516 if not pat or pat == '-':
516 if not pat or pat == '-':
517 if writable:
517 if writable:
518 fp = repo.ui.fout
518 fp = repo.ui.fout
519 else:
519 else:
520 fp = repo.ui.fin
520 fp = repo.ui.fin
521 return _unclosablefile(fp)
521 return _unclosablefile(fp)
522 if util.safehasattr(pat, 'write') and writable:
522 if util.safehasattr(pat, 'write') and writable:
523 return pat
523 return pat
524 if util.safehasattr(pat, 'read') and 'r' in mode:
524 if util.safehasattr(pat, 'read') and 'r' in mode:
525 return pat
525 return pat
526 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
526 fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname)
527 if modemap is not None:
527 if modemap is not None:
528 mode = modemap.get(fn, mode)
528 mode = modemap.get(fn, mode)
529 if mode == 'wb':
529 if mode == 'wb':
530 modemap[fn] = 'ab'
530 modemap[fn] = 'ab'
531 return open(fn, mode)
531 return open(fn, mode)
532
532
533 def openrevlog(repo, cmd, file_, opts):
533 def openrevlog(repo, cmd, file_, opts):
534 """opens the changelog, manifest, a filelog or a given revlog"""
534 """opens the changelog, manifest, a filelog or a given revlog"""
535 cl = opts['changelog']
535 cl = opts['changelog']
536 mf = opts['manifest']
536 mf = opts['manifest']
537 dir = opts['dir']
537 dir = opts['dir']
538 msg = None
538 msg = None
539 if cl and mf:
539 if cl and mf:
540 msg = _('cannot specify --changelog and --manifest at the same time')
540 msg = _('cannot specify --changelog and --manifest at the same time')
541 elif cl and dir:
541 elif cl and dir:
542 msg = _('cannot specify --changelog and --dir at the same time')
542 msg = _('cannot specify --changelog and --dir at the same time')
543 elif cl or mf or dir:
543 elif cl or mf or dir:
544 if file_:
544 if file_:
545 msg = _('cannot specify filename with --changelog or --manifest')
545 msg = _('cannot specify filename with --changelog or --manifest')
546 elif not repo:
546 elif not repo:
547 msg = _('cannot specify --changelog or --manifest or --dir '
547 msg = _('cannot specify --changelog or --manifest or --dir '
548 'without a repository')
548 'without a repository')
549 if msg:
549 if msg:
550 raise error.Abort(msg)
550 raise error.Abort(msg)
551
551
552 r = None
552 r = None
553 if repo:
553 if repo:
554 if cl:
554 if cl:
555 r = repo.unfiltered().changelog
555 r = repo.unfiltered().changelog
556 elif dir:
556 elif dir:
557 if 'treemanifest' not in repo.requirements:
557 if 'treemanifest' not in repo.requirements:
558 raise error.Abort(_("--dir can only be used on repos with "
558 raise error.Abort(_("--dir can only be used on repos with "
559 "treemanifest enabled"))
559 "treemanifest enabled"))
560 dirlog = repo.manifestlog._revlog.dirlog(dir)
560 dirlog = repo.manifestlog._revlog.dirlog(dir)
561 if len(dirlog):
561 if len(dirlog):
562 r = dirlog
562 r = dirlog
563 elif mf:
563 elif mf:
564 r = repo.manifestlog._revlog
564 r = repo.manifestlog._revlog
565 elif file_:
565 elif file_:
566 filelog = repo.file(file_)
566 filelog = repo.file(file_)
567 if len(filelog):
567 if len(filelog):
568 r = filelog
568 r = filelog
569 if not r:
569 if not r:
570 if not file_:
570 if not file_:
571 raise error.CommandError(cmd, _('invalid arguments'))
571 raise error.CommandError(cmd, _('invalid arguments'))
572 if not os.path.isfile(file_):
572 if not os.path.isfile(file_):
573 raise error.Abort(_("revlog '%s' not found") % file_)
573 raise error.Abort(_("revlog '%s' not found") % file_)
574 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
574 r = revlog.revlog(scmutil.opener(pycompat.getcwd(), audit=False),
575 file_[:-2] + ".i")
575 file_[:-2] + ".i")
576 return r
576 return r
577
577
578 def copy(ui, repo, pats, opts, rename=False):
578 def copy(ui, repo, pats, opts, rename=False):
579 # called with the repo lock held
579 # called with the repo lock held
580 #
580 #
581 # hgsep => pathname that uses "/" to separate directories
581 # hgsep => pathname that uses "/" to separate directories
582 # ossep => pathname that uses os.sep to separate directories
582 # ossep => pathname that uses os.sep to separate directories
583 cwd = repo.getcwd()
583 cwd = repo.getcwd()
584 targets = {}
584 targets = {}
585 after = opts.get("after")
585 after = opts.get("after")
586 dryrun = opts.get("dry_run")
586 dryrun = opts.get("dry_run")
587 wctx = repo[None]
587 wctx = repo[None]
588
588
589 def walkpat(pat):
589 def walkpat(pat):
590 srcs = []
590 srcs = []
591 if after:
591 if after:
592 badstates = '?'
592 badstates = '?'
593 else:
593 else:
594 badstates = '?r'
594 badstates = '?r'
595 m = scmutil.match(repo[None], [pat], opts, globbed=True)
595 m = scmutil.match(repo[None], [pat], opts, globbed=True)
596 for abs in repo.walk(m):
596 for abs in repo.walk(m):
597 state = repo.dirstate[abs]
597 state = repo.dirstate[abs]
598 rel = m.rel(abs)
598 rel = m.rel(abs)
599 exact = m.exact(abs)
599 exact = m.exact(abs)
600 if state in badstates:
600 if state in badstates:
601 if exact and state == '?':
601 if exact and state == '?':
602 ui.warn(_('%s: not copying - file is not managed\n') % rel)
602 ui.warn(_('%s: not copying - file is not managed\n') % rel)
603 if exact and state == 'r':
603 if exact and state == 'r':
604 ui.warn(_('%s: not copying - file has been marked for'
604 ui.warn(_('%s: not copying - file has been marked for'
605 ' remove\n') % rel)
605 ' remove\n') % rel)
606 continue
606 continue
607 # abs: hgsep
607 # abs: hgsep
608 # rel: ossep
608 # rel: ossep
609 srcs.append((abs, rel, exact))
609 srcs.append((abs, rel, exact))
610 return srcs
610 return srcs
611
611
612 # abssrc: hgsep
612 # abssrc: hgsep
613 # relsrc: ossep
613 # relsrc: ossep
614 # otarget: ossep
614 # otarget: ossep
615 def copyfile(abssrc, relsrc, otarget, exact):
615 def copyfile(abssrc, relsrc, otarget, exact):
616 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
616 abstarget = pathutil.canonpath(repo.root, cwd, otarget)
617 if '/' in abstarget:
617 if '/' in abstarget:
618 # We cannot normalize abstarget itself, this would prevent
618 # We cannot normalize abstarget itself, this would prevent
619 # case only renames, like a => A.
619 # case only renames, like a => A.
620 abspath, absname = abstarget.rsplit('/', 1)
620 abspath, absname = abstarget.rsplit('/', 1)
621 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
621 abstarget = repo.dirstate.normalize(abspath) + '/' + absname
622 reltarget = repo.pathto(abstarget, cwd)
622 reltarget = repo.pathto(abstarget, cwd)
623 target = repo.wjoin(abstarget)
623 target = repo.wjoin(abstarget)
624 src = repo.wjoin(abssrc)
624 src = repo.wjoin(abssrc)
625 state = repo.dirstate[abstarget]
625 state = repo.dirstate[abstarget]
626
626
627 scmutil.checkportable(ui, abstarget)
627 scmutil.checkportable(ui, abstarget)
628
628
629 # check for collisions
629 # check for collisions
630 prevsrc = targets.get(abstarget)
630 prevsrc = targets.get(abstarget)
631 if prevsrc is not None:
631 if prevsrc is not None:
632 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
632 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
633 (reltarget, repo.pathto(abssrc, cwd),
633 (reltarget, repo.pathto(abssrc, cwd),
634 repo.pathto(prevsrc, cwd)))
634 repo.pathto(prevsrc, cwd)))
635 return
635 return
636
636
637 # check for overwrites
637 # check for overwrites
638 exists = os.path.lexists(target)
638 exists = os.path.lexists(target)
639 samefile = False
639 samefile = False
640 if exists and abssrc != abstarget:
640 if exists and abssrc != abstarget:
641 if (repo.dirstate.normalize(abssrc) ==
641 if (repo.dirstate.normalize(abssrc) ==
642 repo.dirstate.normalize(abstarget)):
642 repo.dirstate.normalize(abstarget)):
643 if not rename:
643 if not rename:
644 ui.warn(_("%s: can't copy - same file\n") % reltarget)
644 ui.warn(_("%s: can't copy - same file\n") % reltarget)
645 return
645 return
646 exists = False
646 exists = False
647 samefile = True
647 samefile = True
648
648
649 if not after and exists or after and state in 'mn':
649 if not after and exists or after and state in 'mn':
650 if not opts['force']:
650 if not opts['force']:
651 if state in 'mn':
651 if state in 'mn':
652 msg = _('%s: not overwriting - file already committed\n')
652 msg = _('%s: not overwriting - file already committed\n')
653 if after:
653 if after:
654 flags = '--after --force'
654 flags = '--after --force'
655 else:
655 else:
656 flags = '--force'
656 flags = '--force'
657 if rename:
657 if rename:
658 hint = _('(hg rename %s to replace the file by '
658 hint = _('(hg rename %s to replace the file by '
659 'recording a rename)\n') % flags
659 'recording a rename)\n') % flags
660 else:
660 else:
661 hint = _('(hg copy %s to replace the file by '
661 hint = _('(hg copy %s to replace the file by '
662 'recording a copy)\n') % flags
662 'recording a copy)\n') % flags
663 else:
663 else:
664 msg = _('%s: not overwriting - file exists\n')
664 msg = _('%s: not overwriting - file exists\n')
665 if rename:
665 if rename:
666 hint = _('(hg rename --after to record the rename)\n')
666 hint = _('(hg rename --after to record the rename)\n')
667 else:
667 else:
668 hint = _('(hg copy --after to record the copy)\n')
668 hint = _('(hg copy --after to record the copy)\n')
669 ui.warn(msg % reltarget)
669 ui.warn(msg % reltarget)
670 ui.warn(hint)
670 ui.warn(hint)
671 return
671 return
672
672
673 if after:
673 if after:
674 if not exists:
674 if not exists:
675 if rename:
675 if rename:
676 ui.warn(_('%s: not recording move - %s does not exist\n') %
676 ui.warn(_('%s: not recording move - %s does not exist\n') %
677 (relsrc, reltarget))
677 (relsrc, reltarget))
678 else:
678 else:
679 ui.warn(_('%s: not recording copy - %s does not exist\n') %
679 ui.warn(_('%s: not recording copy - %s does not exist\n') %
680 (relsrc, reltarget))
680 (relsrc, reltarget))
681 return
681 return
682 elif not dryrun:
682 elif not dryrun:
683 try:
683 try:
684 if exists:
684 if exists:
685 os.unlink(target)
685 os.unlink(target)
686 targetdir = os.path.dirname(target) or '.'
686 targetdir = os.path.dirname(target) or '.'
687 if not os.path.isdir(targetdir):
687 if not os.path.isdir(targetdir):
688 os.makedirs(targetdir)
688 os.makedirs(targetdir)
689 if samefile:
689 if samefile:
690 tmp = target + "~hgrename"
690 tmp = target + "~hgrename"
691 os.rename(src, tmp)
691 os.rename(src, tmp)
692 os.rename(tmp, target)
692 os.rename(tmp, target)
693 else:
693 else:
694 util.copyfile(src, target)
694 util.copyfile(src, target)
695 srcexists = True
695 srcexists = True
696 except IOError as inst:
696 except IOError as inst:
697 if inst.errno == errno.ENOENT:
697 if inst.errno == errno.ENOENT:
698 ui.warn(_('%s: deleted in working directory\n') % relsrc)
698 ui.warn(_('%s: deleted in working directory\n') % relsrc)
699 srcexists = False
699 srcexists = False
700 else:
700 else:
701 ui.warn(_('%s: cannot copy - %s\n') %
701 ui.warn(_('%s: cannot copy - %s\n') %
702 (relsrc, inst.strerror))
702 (relsrc, inst.strerror))
703 return True # report a failure
703 return True # report a failure
704
704
705 if ui.verbose or not exact:
705 if ui.verbose or not exact:
706 if rename:
706 if rename:
707 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
707 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
708 else:
708 else:
709 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
709 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
710
710
711 targets[abstarget] = abssrc
711 targets[abstarget] = abssrc
712
712
713 # fix up dirstate
713 # fix up dirstate
714 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
714 scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
715 dryrun=dryrun, cwd=cwd)
715 dryrun=dryrun, cwd=cwd)
716 if rename and not dryrun:
716 if rename and not dryrun:
717 if not after and srcexists and not samefile:
717 if not after and srcexists and not samefile:
718 util.unlinkpath(repo.wjoin(abssrc))
718 util.unlinkpath(repo.wjoin(abssrc))
719 wctx.forget([abssrc])
719 wctx.forget([abssrc])
720
720
721 # pat: ossep
721 # pat: ossep
722 # dest ossep
722 # dest ossep
723 # srcs: list of (hgsep, hgsep, ossep, bool)
723 # srcs: list of (hgsep, hgsep, ossep, bool)
724 # return: function that takes hgsep and returns ossep
724 # return: function that takes hgsep and returns ossep
725 def targetpathfn(pat, dest, srcs):
725 def targetpathfn(pat, dest, srcs):
726 if os.path.isdir(pat):
726 if os.path.isdir(pat):
727 abspfx = pathutil.canonpath(repo.root, cwd, pat)
727 abspfx = pathutil.canonpath(repo.root, cwd, pat)
728 abspfx = util.localpath(abspfx)
728 abspfx = util.localpath(abspfx)
729 if destdirexists:
729 if destdirexists:
730 striplen = len(os.path.split(abspfx)[0])
730 striplen = len(os.path.split(abspfx)[0])
731 else:
731 else:
732 striplen = len(abspfx)
732 striplen = len(abspfx)
733 if striplen:
733 if striplen:
734 striplen += len(os.sep)
734 striplen += len(pycompat.ossep)
735 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
735 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
736 elif destdirexists:
736 elif destdirexists:
737 res = lambda p: os.path.join(dest,
737 res = lambda p: os.path.join(dest,
738 os.path.basename(util.localpath(p)))
738 os.path.basename(util.localpath(p)))
739 else:
739 else:
740 res = lambda p: dest
740 res = lambda p: dest
741 return res
741 return res
742
742
743 # pat: ossep
743 # pat: ossep
744 # dest ossep
744 # dest ossep
745 # srcs: list of (hgsep, hgsep, ossep, bool)
745 # srcs: list of (hgsep, hgsep, ossep, bool)
746 # return: function that takes hgsep and returns ossep
746 # return: function that takes hgsep and returns ossep
747 def targetpathafterfn(pat, dest, srcs):
747 def targetpathafterfn(pat, dest, srcs):
748 if matchmod.patkind(pat):
748 if matchmod.patkind(pat):
749 # a mercurial pattern
749 # a mercurial pattern
750 res = lambda p: os.path.join(dest,
750 res = lambda p: os.path.join(dest,
751 os.path.basename(util.localpath(p)))
751 os.path.basename(util.localpath(p)))
752 else:
752 else:
753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
753 abspfx = pathutil.canonpath(repo.root, cwd, pat)
754 if len(abspfx) < len(srcs[0][0]):
754 if len(abspfx) < len(srcs[0][0]):
755 # A directory. Either the target path contains the last
755 # A directory. Either the target path contains the last
756 # component of the source path or it does not.
756 # component of the source path or it does not.
757 def evalpath(striplen):
757 def evalpath(striplen):
758 score = 0
758 score = 0
759 for s in srcs:
759 for s in srcs:
760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
760 t = os.path.join(dest, util.localpath(s[0])[striplen:])
761 if os.path.lexists(t):
761 if os.path.lexists(t):
762 score += 1
762 score += 1
763 return score
763 return score
764
764
765 abspfx = util.localpath(abspfx)
765 abspfx = util.localpath(abspfx)
766 striplen = len(abspfx)
766 striplen = len(abspfx)
767 if striplen:
767 if striplen:
768 striplen += len(os.sep)
768 striplen += len(pycompat.ossep)
769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
769 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
770 score = evalpath(striplen)
770 score = evalpath(striplen)
771 striplen1 = len(os.path.split(abspfx)[0])
771 striplen1 = len(os.path.split(abspfx)[0])
772 if striplen1:
772 if striplen1:
773 striplen1 += len(os.sep)
773 striplen1 += len(pycompat.ossep)
774 if evalpath(striplen1) > score:
774 if evalpath(striplen1) > score:
775 striplen = striplen1
775 striplen = striplen1
776 res = lambda p: os.path.join(dest,
776 res = lambda p: os.path.join(dest,
777 util.localpath(p)[striplen:])
777 util.localpath(p)[striplen:])
778 else:
778 else:
779 # a file
779 # a file
780 if destdirexists:
780 if destdirexists:
781 res = lambda p: os.path.join(dest,
781 res = lambda p: os.path.join(dest,
782 os.path.basename(util.localpath(p)))
782 os.path.basename(util.localpath(p)))
783 else:
783 else:
784 res = lambda p: dest
784 res = lambda p: dest
785 return res
785 return res
786
786
787 pats = scmutil.expandpats(pats)
787 pats = scmutil.expandpats(pats)
788 if not pats:
788 if not pats:
789 raise error.Abort(_('no source or destination specified'))
789 raise error.Abort(_('no source or destination specified'))
790 if len(pats) == 1:
790 if len(pats) == 1:
791 raise error.Abort(_('no destination specified'))
791 raise error.Abort(_('no destination specified'))
792 dest = pats.pop()
792 dest = pats.pop()
793 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
793 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
794 if not destdirexists:
794 if not destdirexists:
795 if len(pats) > 1 or matchmod.patkind(pats[0]):
795 if len(pats) > 1 or matchmod.patkind(pats[0]):
796 raise error.Abort(_('with multiple sources, destination must be an '
796 raise error.Abort(_('with multiple sources, destination must be an '
797 'existing directory'))
797 'existing directory'))
798 if util.endswithsep(dest):
798 if util.endswithsep(dest):
799 raise error.Abort(_('destination %s is not a directory') % dest)
799 raise error.Abort(_('destination %s is not a directory') % dest)
800
800
801 tfn = targetpathfn
801 tfn = targetpathfn
802 if after:
802 if after:
803 tfn = targetpathafterfn
803 tfn = targetpathafterfn
804 copylist = []
804 copylist = []
805 for pat in pats:
805 for pat in pats:
806 srcs = walkpat(pat)
806 srcs = walkpat(pat)
807 if not srcs:
807 if not srcs:
808 continue
808 continue
809 copylist.append((tfn(pat, dest, srcs), srcs))
809 copylist.append((tfn(pat, dest, srcs), srcs))
810 if not copylist:
810 if not copylist:
811 raise error.Abort(_('no files to copy'))
811 raise error.Abort(_('no files to copy'))
812
812
813 errors = 0
813 errors = 0
814 for targetpath, srcs in copylist:
814 for targetpath, srcs in copylist:
815 for abssrc, relsrc, exact in srcs:
815 for abssrc, relsrc, exact in srcs:
816 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
816 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
817 errors += 1
817 errors += 1
818
818
819 if errors:
819 if errors:
820 ui.warn(_('(consider using --after)\n'))
820 ui.warn(_('(consider using --after)\n'))
821
821
822 return errors != 0
822 return errors != 0
823
823
824 ## facility to let extension process additional data into an import patch
824 ## facility to let extension process additional data into an import patch
825 # list of identifier to be executed in order
825 # list of identifier to be executed in order
826 extrapreimport = [] # run before commit
826 extrapreimport = [] # run before commit
827 extrapostimport = [] # run after commit
827 extrapostimport = [] # run after commit
828 # mapping from identifier to actual import function
828 # mapping from identifier to actual import function
829 #
829 #
830 # 'preimport' are run before the commit is made and are provided the following
830 # 'preimport' are run before the commit is made and are provided the following
831 # arguments:
831 # arguments:
832 # - repo: the localrepository instance,
832 # - repo: the localrepository instance,
833 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
833 # - patchdata: data extracted from patch header (cf m.patch.patchheadermap),
834 # - extra: the future extra dictionary of the changeset, please mutate it,
834 # - extra: the future extra dictionary of the changeset, please mutate it,
835 # - opts: the import options.
835 # - opts: the import options.
836 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
836 # XXX ideally, we would just pass an ctx ready to be computed, that would allow
837 # mutation of in memory commit and more. Feel free to rework the code to get
837 # mutation of in memory commit and more. Feel free to rework the code to get
838 # there.
838 # there.
839 extrapreimportmap = {}
839 extrapreimportmap = {}
840 # 'postimport' are run after the commit is made and are provided the following
840 # 'postimport' are run after the commit is made and are provided the following
841 # argument:
841 # argument:
842 # - ctx: the changectx created by import.
842 # - ctx: the changectx created by import.
843 extrapostimportmap = {}
843 extrapostimportmap = {}
844
844
845 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
845 def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc):
846 """Utility function used by commands.import to import a single patch
846 """Utility function used by commands.import to import a single patch
847
847
848 This function is explicitly defined here to help the evolve extension to
848 This function is explicitly defined here to help the evolve extension to
849 wrap this part of the import logic.
849 wrap this part of the import logic.
850
850
851 The API is currently a bit ugly because it a simple code translation from
851 The API is currently a bit ugly because it a simple code translation from
852 the import command. Feel free to make it better.
852 the import command. Feel free to make it better.
853
853
854 :hunk: a patch (as a binary string)
854 :hunk: a patch (as a binary string)
855 :parents: nodes that will be parent of the created commit
855 :parents: nodes that will be parent of the created commit
856 :opts: the full dict of option passed to the import command
856 :opts: the full dict of option passed to the import command
857 :msgs: list to save commit message to.
857 :msgs: list to save commit message to.
858 (used in case we need to save it when failing)
858 (used in case we need to save it when failing)
859 :updatefunc: a function that update a repo to a given node
859 :updatefunc: a function that update a repo to a given node
860 updatefunc(<repo>, <node>)
860 updatefunc(<repo>, <node>)
861 """
861 """
862 # avoid cycle context -> subrepo -> cmdutil
862 # avoid cycle context -> subrepo -> cmdutil
863 from . import context
863 from . import context
864 extractdata = patch.extract(ui, hunk)
864 extractdata = patch.extract(ui, hunk)
865 tmpname = extractdata.get('filename')
865 tmpname = extractdata.get('filename')
866 message = extractdata.get('message')
866 message = extractdata.get('message')
867 user = opts.get('user') or extractdata.get('user')
867 user = opts.get('user') or extractdata.get('user')
868 date = opts.get('date') or extractdata.get('date')
868 date = opts.get('date') or extractdata.get('date')
869 branch = extractdata.get('branch')
869 branch = extractdata.get('branch')
870 nodeid = extractdata.get('nodeid')
870 nodeid = extractdata.get('nodeid')
871 p1 = extractdata.get('p1')
871 p1 = extractdata.get('p1')
872 p2 = extractdata.get('p2')
872 p2 = extractdata.get('p2')
873
873
874 nocommit = opts.get('no_commit')
874 nocommit = opts.get('no_commit')
875 importbranch = opts.get('import_branch')
875 importbranch = opts.get('import_branch')
876 update = not opts.get('bypass')
876 update = not opts.get('bypass')
877 strip = opts["strip"]
877 strip = opts["strip"]
878 prefix = opts["prefix"]
878 prefix = opts["prefix"]
879 sim = float(opts.get('similarity') or 0)
879 sim = float(opts.get('similarity') or 0)
880 if not tmpname:
880 if not tmpname:
881 return (None, None, False)
881 return (None, None, False)
882
882
883 rejects = False
883 rejects = False
884
884
885 try:
885 try:
886 cmdline_message = logmessage(ui, opts)
886 cmdline_message = logmessage(ui, opts)
887 if cmdline_message:
887 if cmdline_message:
888 # pickup the cmdline msg
888 # pickup the cmdline msg
889 message = cmdline_message
889 message = cmdline_message
890 elif message:
890 elif message:
891 # pickup the patch msg
891 # pickup the patch msg
892 message = message.strip()
892 message = message.strip()
893 else:
893 else:
894 # launch the editor
894 # launch the editor
895 message = None
895 message = None
896 ui.debug('message:\n%s\n' % message)
896 ui.debug('message:\n%s\n' % message)
897
897
898 if len(parents) == 1:
898 if len(parents) == 1:
899 parents.append(repo[nullid])
899 parents.append(repo[nullid])
900 if opts.get('exact'):
900 if opts.get('exact'):
901 if not nodeid or not p1:
901 if not nodeid or not p1:
902 raise error.Abort(_('not a Mercurial patch'))
902 raise error.Abort(_('not a Mercurial patch'))
903 p1 = repo[p1]
903 p1 = repo[p1]
904 p2 = repo[p2 or nullid]
904 p2 = repo[p2 or nullid]
905 elif p2:
905 elif p2:
906 try:
906 try:
907 p1 = repo[p1]
907 p1 = repo[p1]
908 p2 = repo[p2]
908 p2 = repo[p2]
909 # Without any options, consider p2 only if the
909 # Without any options, consider p2 only if the
910 # patch is being applied on top of the recorded
910 # patch is being applied on top of the recorded
911 # first parent.
911 # first parent.
912 if p1 != parents[0]:
912 if p1 != parents[0]:
913 p1 = parents[0]
913 p1 = parents[0]
914 p2 = repo[nullid]
914 p2 = repo[nullid]
915 except error.RepoError:
915 except error.RepoError:
916 p1, p2 = parents
916 p1, p2 = parents
917 if p2.node() == nullid:
917 if p2.node() == nullid:
918 ui.warn(_("warning: import the patch as a normal revision\n"
918 ui.warn(_("warning: import the patch as a normal revision\n"
919 "(use --exact to import the patch as a merge)\n"))
919 "(use --exact to import the patch as a merge)\n"))
920 else:
920 else:
921 p1, p2 = parents
921 p1, p2 = parents
922
922
923 n = None
923 n = None
924 if update:
924 if update:
925 if p1 != parents[0]:
925 if p1 != parents[0]:
926 updatefunc(repo, p1.node())
926 updatefunc(repo, p1.node())
927 if p2 != parents[1]:
927 if p2 != parents[1]:
928 repo.setparents(p1.node(), p2.node())
928 repo.setparents(p1.node(), p2.node())
929
929
930 if opts.get('exact') or importbranch:
930 if opts.get('exact') or importbranch:
931 repo.dirstate.setbranch(branch or 'default')
931 repo.dirstate.setbranch(branch or 'default')
932
932
933 partial = opts.get('partial', False)
933 partial = opts.get('partial', False)
934 files = set()
934 files = set()
935 try:
935 try:
936 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
936 patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
937 files=files, eolmode=None, similarity=sim / 100.0)
937 files=files, eolmode=None, similarity=sim / 100.0)
938 except patch.PatchError as e:
938 except patch.PatchError as e:
939 if not partial:
939 if not partial:
940 raise error.Abort(str(e))
940 raise error.Abort(str(e))
941 if partial:
941 if partial:
942 rejects = True
942 rejects = True
943
943
944 files = list(files)
944 files = list(files)
945 if nocommit:
945 if nocommit:
946 if message:
946 if message:
947 msgs.append(message)
947 msgs.append(message)
948 else:
948 else:
949 if opts.get('exact') or p2:
949 if opts.get('exact') or p2:
950 # If you got here, you either use --force and know what
950 # If you got here, you either use --force and know what
951 # you are doing or used --exact or a merge patch while
951 # you are doing or used --exact or a merge patch while
952 # being updated to its first parent.
952 # being updated to its first parent.
953 m = None
953 m = None
954 else:
954 else:
955 m = scmutil.matchfiles(repo, files or [])
955 m = scmutil.matchfiles(repo, files or [])
956 editform = mergeeditform(repo[None], 'import.normal')
956 editform = mergeeditform(repo[None], 'import.normal')
957 if opts.get('exact'):
957 if opts.get('exact'):
958 editor = None
958 editor = None
959 else:
959 else:
960 editor = getcommiteditor(editform=editform, **opts)
960 editor = getcommiteditor(editform=editform, **opts)
961 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
961 allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit')
962 extra = {}
962 extra = {}
963 for idfunc in extrapreimport:
963 for idfunc in extrapreimport:
964 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
964 extrapreimportmap[idfunc](repo, extractdata, extra, opts)
965 try:
965 try:
966 if partial:
966 if partial:
967 repo.ui.setconfig('ui', 'allowemptycommit', True)
967 repo.ui.setconfig('ui', 'allowemptycommit', True)
968 n = repo.commit(message, user,
968 n = repo.commit(message, user,
969 date, match=m,
969 date, match=m,
970 editor=editor, extra=extra)
970 editor=editor, extra=extra)
971 for idfunc in extrapostimport:
971 for idfunc in extrapostimport:
972 extrapostimportmap[idfunc](repo[n])
972 extrapostimportmap[idfunc](repo[n])
973 finally:
973 finally:
974 repo.ui.restoreconfig(allowemptyback)
974 repo.ui.restoreconfig(allowemptyback)
975 else:
975 else:
976 if opts.get('exact') or importbranch:
976 if opts.get('exact') or importbranch:
977 branch = branch or 'default'
977 branch = branch or 'default'
978 else:
978 else:
979 branch = p1.branch()
979 branch = p1.branch()
980 store = patch.filestore()
980 store = patch.filestore()
981 try:
981 try:
982 files = set()
982 files = set()
983 try:
983 try:
984 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
984 patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
985 files, eolmode=None)
985 files, eolmode=None)
986 except patch.PatchError as e:
986 except patch.PatchError as e:
987 raise error.Abort(str(e))
987 raise error.Abort(str(e))
988 if opts.get('exact'):
988 if opts.get('exact'):
989 editor = None
989 editor = None
990 else:
990 else:
991 editor = getcommiteditor(editform='import.bypass')
991 editor = getcommiteditor(editform='import.bypass')
992 memctx = context.makememctx(repo, (p1.node(), p2.node()),
992 memctx = context.makememctx(repo, (p1.node(), p2.node()),
993 message,
993 message,
994 user,
994 user,
995 date,
995 date,
996 branch, files, store,
996 branch, files, store,
997 editor=editor)
997 editor=editor)
998 n = memctx.commit()
998 n = memctx.commit()
999 finally:
999 finally:
1000 store.close()
1000 store.close()
1001 if opts.get('exact') and nocommit:
1001 if opts.get('exact') and nocommit:
1002 # --exact with --no-commit is still useful in that it does merge
1002 # --exact with --no-commit is still useful in that it does merge
1003 # and branch bits
1003 # and branch bits
1004 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1004 ui.warn(_("warning: can't check exact import with --no-commit\n"))
1005 elif opts.get('exact') and hex(n) != nodeid:
1005 elif opts.get('exact') and hex(n) != nodeid:
1006 raise error.Abort(_('patch is damaged or loses information'))
1006 raise error.Abort(_('patch is damaged or loses information'))
1007 msg = _('applied to working directory')
1007 msg = _('applied to working directory')
1008 if n:
1008 if n:
1009 # i18n: refers to a short changeset id
1009 # i18n: refers to a short changeset id
1010 msg = _('created %s') % short(n)
1010 msg = _('created %s') % short(n)
1011 return (msg, n, rejects)
1011 return (msg, n, rejects)
1012 finally:
1012 finally:
1013 os.unlink(tmpname)
1013 os.unlink(tmpname)
1014
1014
1015 # facility to let extensions include additional data in an exported patch
1015 # facility to let extensions include additional data in an exported patch
1016 # list of identifiers to be executed in order
1016 # list of identifiers to be executed in order
1017 extraexport = []
1017 extraexport = []
1018 # mapping from identifier to actual export function
1018 # mapping from identifier to actual export function
1019 # function as to return a string to be added to the header or None
1019 # function as to return a string to be added to the header or None
1020 # it is given two arguments (sequencenumber, changectx)
1020 # it is given two arguments (sequencenumber, changectx)
1021 extraexportmap = {}
1021 extraexportmap = {}
1022
1022
1023 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1023 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1024 opts=None, match=None):
1024 opts=None, match=None):
1025 '''export changesets as hg patches.'''
1025 '''export changesets as hg patches.'''
1026
1026
1027 total = len(revs)
1027 total = len(revs)
1028 revwidth = max([len(str(rev)) for rev in revs])
1028 revwidth = max([len(str(rev)) for rev in revs])
1029 filemode = {}
1029 filemode = {}
1030
1030
1031 def single(rev, seqno, fp):
1031 def single(rev, seqno, fp):
1032 ctx = repo[rev]
1032 ctx = repo[rev]
1033 node = ctx.node()
1033 node = ctx.node()
1034 parents = [p.node() for p in ctx.parents() if p]
1034 parents = [p.node() for p in ctx.parents() if p]
1035 branch = ctx.branch()
1035 branch = ctx.branch()
1036 if switch_parent:
1036 if switch_parent:
1037 parents.reverse()
1037 parents.reverse()
1038
1038
1039 if parents:
1039 if parents:
1040 prev = parents[0]
1040 prev = parents[0]
1041 else:
1041 else:
1042 prev = nullid
1042 prev = nullid
1043
1043
1044 shouldclose = False
1044 shouldclose = False
1045 if not fp and len(template) > 0:
1045 if not fp and len(template) > 0:
1046 desc_lines = ctx.description().rstrip().split('\n')
1046 desc_lines = ctx.description().rstrip().split('\n')
1047 desc = desc_lines[0] #Commit always has a first line.
1047 desc = desc_lines[0] #Commit always has a first line.
1048 fp = makefileobj(repo, template, node, desc=desc, total=total,
1048 fp = makefileobj(repo, template, node, desc=desc, total=total,
1049 seqno=seqno, revwidth=revwidth, mode='wb',
1049 seqno=seqno, revwidth=revwidth, mode='wb',
1050 modemap=filemode)
1050 modemap=filemode)
1051 shouldclose = True
1051 shouldclose = True
1052 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1052 if fp and not getattr(fp, 'name', '<unnamed>').startswith('<'):
1053 repo.ui.note("%s\n" % fp.name)
1053 repo.ui.note("%s\n" % fp.name)
1054
1054
1055 if not fp:
1055 if not fp:
1056 write = repo.ui.write
1056 write = repo.ui.write
1057 else:
1057 else:
1058 def write(s, **kw):
1058 def write(s, **kw):
1059 fp.write(s)
1059 fp.write(s)
1060
1060
1061 write("# HG changeset patch\n")
1061 write("# HG changeset patch\n")
1062 write("# User %s\n" % ctx.user())
1062 write("# User %s\n" % ctx.user())
1063 write("# Date %d %d\n" % ctx.date())
1063 write("# Date %d %d\n" % ctx.date())
1064 write("# %s\n" % util.datestr(ctx.date()))
1064 write("# %s\n" % util.datestr(ctx.date()))
1065 if branch and branch != 'default':
1065 if branch and branch != 'default':
1066 write("# Branch %s\n" % branch)
1066 write("# Branch %s\n" % branch)
1067 write("# Node ID %s\n" % hex(node))
1067 write("# Node ID %s\n" % hex(node))
1068 write("# Parent %s\n" % hex(prev))
1068 write("# Parent %s\n" % hex(prev))
1069 if len(parents) > 1:
1069 if len(parents) > 1:
1070 write("# Parent %s\n" % hex(parents[1]))
1070 write("# Parent %s\n" % hex(parents[1]))
1071
1071
1072 for headerid in extraexport:
1072 for headerid in extraexport:
1073 header = extraexportmap[headerid](seqno, ctx)
1073 header = extraexportmap[headerid](seqno, ctx)
1074 if header is not None:
1074 if header is not None:
1075 write('# %s\n' % header)
1075 write('# %s\n' % header)
1076 write(ctx.description().rstrip())
1076 write(ctx.description().rstrip())
1077 write("\n\n")
1077 write("\n\n")
1078
1078
1079 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1079 for chunk, label in patch.diffui(repo, prev, node, match, opts=opts):
1080 write(chunk, label=label)
1080 write(chunk, label=label)
1081
1081
1082 if shouldclose:
1082 if shouldclose:
1083 fp.close()
1083 fp.close()
1084
1084
1085 for seqno, rev in enumerate(revs):
1085 for seqno, rev in enumerate(revs):
1086 single(rev, seqno + 1, fp)
1086 single(rev, seqno + 1, fp)
1087
1087
1088 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1088 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
1089 changes=None, stat=False, fp=None, prefix='',
1089 changes=None, stat=False, fp=None, prefix='',
1090 root='', listsubrepos=False):
1090 root='', listsubrepos=False):
1091 '''show diff or diffstat.'''
1091 '''show diff or diffstat.'''
1092 if fp is None:
1092 if fp is None:
1093 write = ui.write
1093 write = ui.write
1094 else:
1094 else:
1095 def write(s, **kw):
1095 def write(s, **kw):
1096 fp.write(s)
1096 fp.write(s)
1097
1097
1098 if root:
1098 if root:
1099 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1099 relroot = pathutil.canonpath(repo.root, repo.getcwd(), root)
1100 else:
1100 else:
1101 relroot = ''
1101 relroot = ''
1102 if relroot != '':
1102 if relroot != '':
1103 # XXX relative roots currently don't work if the root is within a
1103 # XXX relative roots currently don't work if the root is within a
1104 # subrepo
1104 # subrepo
1105 uirelroot = match.uipath(relroot)
1105 uirelroot = match.uipath(relroot)
1106 relroot += '/'
1106 relroot += '/'
1107 for matchroot in match.files():
1107 for matchroot in match.files():
1108 if not matchroot.startswith(relroot):
1108 if not matchroot.startswith(relroot):
1109 ui.warn(_('warning: %s not inside relative root %s\n') % (
1109 ui.warn(_('warning: %s not inside relative root %s\n') % (
1110 match.uipath(matchroot), uirelroot))
1110 match.uipath(matchroot), uirelroot))
1111
1111
1112 if stat:
1112 if stat:
1113 diffopts = diffopts.copy(context=0)
1113 diffopts = diffopts.copy(context=0)
1114 width = 80
1114 width = 80
1115 if not ui.plain():
1115 if not ui.plain():
1116 width = ui.termwidth()
1116 width = ui.termwidth()
1117 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1117 chunks = patch.diff(repo, node1, node2, match, changes, diffopts,
1118 prefix=prefix, relroot=relroot)
1118 prefix=prefix, relroot=relroot)
1119 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1119 for chunk, label in patch.diffstatui(util.iterlines(chunks),
1120 width=width):
1120 width=width):
1121 write(chunk, label=label)
1121 write(chunk, label=label)
1122 else:
1122 else:
1123 for chunk, label in patch.diffui(repo, node1, node2, match,
1123 for chunk, label in patch.diffui(repo, node1, node2, match,
1124 changes, diffopts, prefix=prefix,
1124 changes, diffopts, prefix=prefix,
1125 relroot=relroot):
1125 relroot=relroot):
1126 write(chunk, label=label)
1126 write(chunk, label=label)
1127
1127
1128 if listsubrepos:
1128 if listsubrepos:
1129 ctx1 = repo[node1]
1129 ctx1 = repo[node1]
1130 ctx2 = repo[node2]
1130 ctx2 = repo[node2]
1131 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1131 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
1132 tempnode2 = node2
1132 tempnode2 = node2
1133 try:
1133 try:
1134 if node2 is not None:
1134 if node2 is not None:
1135 tempnode2 = ctx2.substate[subpath][1]
1135 tempnode2 = ctx2.substate[subpath][1]
1136 except KeyError:
1136 except KeyError:
1137 # A subrepo that existed in node1 was deleted between node1 and
1137 # A subrepo that existed in node1 was deleted between node1 and
1138 # node2 (inclusive). Thus, ctx2's substate won't contain that
1138 # node2 (inclusive). Thus, ctx2's substate won't contain that
1139 # subpath. The best we can do is to ignore it.
1139 # subpath. The best we can do is to ignore it.
1140 tempnode2 = None
1140 tempnode2 = None
1141 submatch = matchmod.subdirmatcher(subpath, match)
1141 submatch = matchmod.subdirmatcher(subpath, match)
1142 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1142 sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
1143 stat=stat, fp=fp, prefix=prefix)
1143 stat=stat, fp=fp, prefix=prefix)
1144
1144
1145 class changeset_printer(object):
1145 class changeset_printer(object):
1146 '''show changeset information when templating not requested.'''
1146 '''show changeset information when templating not requested.'''
1147
1147
1148 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1148 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1149 self.ui = ui
1149 self.ui = ui
1150 self.repo = repo
1150 self.repo = repo
1151 self.buffered = buffered
1151 self.buffered = buffered
1152 self.matchfn = matchfn
1152 self.matchfn = matchfn
1153 self.diffopts = diffopts
1153 self.diffopts = diffopts
1154 self.header = {}
1154 self.header = {}
1155 self.hunk = {}
1155 self.hunk = {}
1156 self.lastheader = None
1156 self.lastheader = None
1157 self.footer = None
1157 self.footer = None
1158
1158
1159 def flush(self, ctx):
1159 def flush(self, ctx):
1160 rev = ctx.rev()
1160 rev = ctx.rev()
1161 if rev in self.header:
1161 if rev in self.header:
1162 h = self.header[rev]
1162 h = self.header[rev]
1163 if h != self.lastheader:
1163 if h != self.lastheader:
1164 self.lastheader = h
1164 self.lastheader = h
1165 self.ui.write(h)
1165 self.ui.write(h)
1166 del self.header[rev]
1166 del self.header[rev]
1167 if rev in self.hunk:
1167 if rev in self.hunk:
1168 self.ui.write(self.hunk[rev])
1168 self.ui.write(self.hunk[rev])
1169 del self.hunk[rev]
1169 del self.hunk[rev]
1170 return 1
1170 return 1
1171 return 0
1171 return 0
1172
1172
1173 def close(self):
1173 def close(self):
1174 if self.footer:
1174 if self.footer:
1175 self.ui.write(self.footer)
1175 self.ui.write(self.footer)
1176
1176
1177 def show(self, ctx, copies=None, matchfn=None, **props):
1177 def show(self, ctx, copies=None, matchfn=None, **props):
1178 if self.buffered:
1178 if self.buffered:
1179 self.ui.pushbuffer(labeled=True)
1179 self.ui.pushbuffer(labeled=True)
1180 self._show(ctx, copies, matchfn, props)
1180 self._show(ctx, copies, matchfn, props)
1181 self.hunk[ctx.rev()] = self.ui.popbuffer()
1181 self.hunk[ctx.rev()] = self.ui.popbuffer()
1182 else:
1182 else:
1183 self._show(ctx, copies, matchfn, props)
1183 self._show(ctx, copies, matchfn, props)
1184
1184
1185 def _show(self, ctx, copies, matchfn, props):
1185 def _show(self, ctx, copies, matchfn, props):
1186 '''show a single changeset or file revision'''
1186 '''show a single changeset or file revision'''
1187 changenode = ctx.node()
1187 changenode = ctx.node()
1188 rev = ctx.rev()
1188 rev = ctx.rev()
1189 if self.ui.debugflag:
1189 if self.ui.debugflag:
1190 hexfunc = hex
1190 hexfunc = hex
1191 else:
1191 else:
1192 hexfunc = short
1192 hexfunc = short
1193 # as of now, wctx.node() and wctx.rev() return None, but we want to
1193 # as of now, wctx.node() and wctx.rev() return None, but we want to
1194 # show the same values as {node} and {rev} templatekw
1194 # show the same values as {node} and {rev} templatekw
1195 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1195 revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex())))
1196
1196
1197 if self.ui.quiet:
1197 if self.ui.quiet:
1198 self.ui.write("%d:%s\n" % revnode, label='log.node')
1198 self.ui.write("%d:%s\n" % revnode, label='log.node')
1199 return
1199 return
1200
1200
1201 date = util.datestr(ctx.date())
1201 date = util.datestr(ctx.date())
1202
1202
1203 # i18n: column positioning for "hg log"
1203 # i18n: column positioning for "hg log"
1204 self.ui.write(_("changeset: %d:%s\n") % revnode,
1204 self.ui.write(_("changeset: %d:%s\n") % revnode,
1205 label='log.changeset changeset.%s' % ctx.phasestr())
1205 label='log.changeset changeset.%s' % ctx.phasestr())
1206
1206
1207 # branches are shown first before any other names due to backwards
1207 # branches are shown first before any other names due to backwards
1208 # compatibility
1208 # compatibility
1209 branch = ctx.branch()
1209 branch = ctx.branch()
1210 # don't show the default branch name
1210 # don't show the default branch name
1211 if branch != 'default':
1211 if branch != 'default':
1212 # i18n: column positioning for "hg log"
1212 # i18n: column positioning for "hg log"
1213 self.ui.write(_("branch: %s\n") % branch,
1213 self.ui.write(_("branch: %s\n") % branch,
1214 label='log.branch')
1214 label='log.branch')
1215
1215
1216 for nsname, ns in self.repo.names.iteritems():
1216 for nsname, ns in self.repo.names.iteritems():
1217 # branches has special logic already handled above, so here we just
1217 # branches has special logic already handled above, so here we just
1218 # skip it
1218 # skip it
1219 if nsname == 'branches':
1219 if nsname == 'branches':
1220 continue
1220 continue
1221 # we will use the templatename as the color name since those two
1221 # we will use the templatename as the color name since those two
1222 # should be the same
1222 # should be the same
1223 for name in ns.names(self.repo, changenode):
1223 for name in ns.names(self.repo, changenode):
1224 self.ui.write(ns.logfmt % name,
1224 self.ui.write(ns.logfmt % name,
1225 label='log.%s' % ns.colorname)
1225 label='log.%s' % ns.colorname)
1226 if self.ui.debugflag:
1226 if self.ui.debugflag:
1227 # i18n: column positioning for "hg log"
1227 # i18n: column positioning for "hg log"
1228 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1228 self.ui.write(_("phase: %s\n") % ctx.phasestr(),
1229 label='log.phase')
1229 label='log.phase')
1230 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1230 for pctx in scmutil.meaningfulparents(self.repo, ctx):
1231 label = 'log.parent changeset.%s' % pctx.phasestr()
1231 label = 'log.parent changeset.%s' % pctx.phasestr()
1232 # i18n: column positioning for "hg log"
1232 # i18n: column positioning for "hg log"
1233 self.ui.write(_("parent: %d:%s\n")
1233 self.ui.write(_("parent: %d:%s\n")
1234 % (pctx.rev(), hexfunc(pctx.node())),
1234 % (pctx.rev(), hexfunc(pctx.node())),
1235 label=label)
1235 label=label)
1236
1236
1237 if self.ui.debugflag and rev is not None:
1237 if self.ui.debugflag and rev is not None:
1238 mnode = ctx.manifestnode()
1238 mnode = ctx.manifestnode()
1239 # i18n: column positioning for "hg log"
1239 # i18n: column positioning for "hg log"
1240 self.ui.write(_("manifest: %d:%s\n") %
1240 self.ui.write(_("manifest: %d:%s\n") %
1241 (self.repo.manifestlog._revlog.rev(mnode),
1241 (self.repo.manifestlog._revlog.rev(mnode),
1242 hex(mnode)),
1242 hex(mnode)),
1243 label='ui.debug log.manifest')
1243 label='ui.debug log.manifest')
1244 # i18n: column positioning for "hg log"
1244 # i18n: column positioning for "hg log"
1245 self.ui.write(_("user: %s\n") % ctx.user(),
1245 self.ui.write(_("user: %s\n") % ctx.user(),
1246 label='log.user')
1246 label='log.user')
1247 # i18n: column positioning for "hg log"
1247 # i18n: column positioning for "hg log"
1248 self.ui.write(_("date: %s\n") % date,
1248 self.ui.write(_("date: %s\n") % date,
1249 label='log.date')
1249 label='log.date')
1250
1250
1251 if self.ui.debugflag:
1251 if self.ui.debugflag:
1252 files = ctx.p1().status(ctx)[:3]
1252 files = ctx.p1().status(ctx)[:3]
1253 for key, value in zip([# i18n: column positioning for "hg log"
1253 for key, value in zip([# i18n: column positioning for "hg log"
1254 _("files:"),
1254 _("files:"),
1255 # i18n: column positioning for "hg log"
1255 # i18n: column positioning for "hg log"
1256 _("files+:"),
1256 _("files+:"),
1257 # i18n: column positioning for "hg log"
1257 # i18n: column positioning for "hg log"
1258 _("files-:")], files):
1258 _("files-:")], files):
1259 if value:
1259 if value:
1260 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1260 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
1261 label='ui.debug log.files')
1261 label='ui.debug log.files')
1262 elif ctx.files() and self.ui.verbose:
1262 elif ctx.files() and self.ui.verbose:
1263 # i18n: column positioning for "hg log"
1263 # i18n: column positioning for "hg log"
1264 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1264 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
1265 label='ui.note log.files')
1265 label='ui.note log.files')
1266 if copies and self.ui.verbose:
1266 if copies and self.ui.verbose:
1267 copies = ['%s (%s)' % c for c in copies]
1267 copies = ['%s (%s)' % c for c in copies]
1268 # i18n: column positioning for "hg log"
1268 # i18n: column positioning for "hg log"
1269 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1269 self.ui.write(_("copies: %s\n") % ' '.join(copies),
1270 label='ui.note log.copies')
1270 label='ui.note log.copies')
1271
1271
1272 extra = ctx.extra()
1272 extra = ctx.extra()
1273 if extra and self.ui.debugflag:
1273 if extra and self.ui.debugflag:
1274 for key, value in sorted(extra.items()):
1274 for key, value in sorted(extra.items()):
1275 # i18n: column positioning for "hg log"
1275 # i18n: column positioning for "hg log"
1276 self.ui.write(_("extra: %s=%s\n")
1276 self.ui.write(_("extra: %s=%s\n")
1277 % (key, value.encode('string_escape')),
1277 % (key, value.encode('string_escape')),
1278 label='ui.debug log.extra')
1278 label='ui.debug log.extra')
1279
1279
1280 description = ctx.description().strip()
1280 description = ctx.description().strip()
1281 if description:
1281 if description:
1282 if self.ui.verbose:
1282 if self.ui.verbose:
1283 self.ui.write(_("description:\n"),
1283 self.ui.write(_("description:\n"),
1284 label='ui.note log.description')
1284 label='ui.note log.description')
1285 self.ui.write(description,
1285 self.ui.write(description,
1286 label='ui.note log.description')
1286 label='ui.note log.description')
1287 self.ui.write("\n\n")
1287 self.ui.write("\n\n")
1288 else:
1288 else:
1289 # i18n: column positioning for "hg log"
1289 # i18n: column positioning for "hg log"
1290 self.ui.write(_("summary: %s\n") %
1290 self.ui.write(_("summary: %s\n") %
1291 description.splitlines()[0],
1291 description.splitlines()[0],
1292 label='log.summary')
1292 label='log.summary')
1293 self.ui.write("\n")
1293 self.ui.write("\n")
1294
1294
1295 self.showpatch(ctx, matchfn)
1295 self.showpatch(ctx, matchfn)
1296
1296
1297 def showpatch(self, ctx, matchfn):
1297 def showpatch(self, ctx, matchfn):
1298 if not matchfn:
1298 if not matchfn:
1299 matchfn = self.matchfn
1299 matchfn = self.matchfn
1300 if matchfn:
1300 if matchfn:
1301 stat = self.diffopts.get('stat')
1301 stat = self.diffopts.get('stat')
1302 diff = self.diffopts.get('patch')
1302 diff = self.diffopts.get('patch')
1303 diffopts = patch.diffallopts(self.ui, self.diffopts)
1303 diffopts = patch.diffallopts(self.ui, self.diffopts)
1304 node = ctx.node()
1304 node = ctx.node()
1305 prev = ctx.p1().node()
1305 prev = ctx.p1().node()
1306 if stat:
1306 if stat:
1307 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1307 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1308 match=matchfn, stat=True)
1308 match=matchfn, stat=True)
1309 if diff:
1309 if diff:
1310 if stat:
1310 if stat:
1311 self.ui.write("\n")
1311 self.ui.write("\n")
1312 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1312 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1313 match=matchfn, stat=False)
1313 match=matchfn, stat=False)
1314 self.ui.write("\n")
1314 self.ui.write("\n")
1315
1315
1316 class jsonchangeset(changeset_printer):
1316 class jsonchangeset(changeset_printer):
1317 '''format changeset information.'''
1317 '''format changeset information.'''
1318
1318
1319 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1319 def __init__(self, ui, repo, matchfn, diffopts, buffered):
1320 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1320 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1321 self.cache = {}
1321 self.cache = {}
1322 self._first = True
1322 self._first = True
1323
1323
1324 def close(self):
1324 def close(self):
1325 if not self._first:
1325 if not self._first:
1326 self.ui.write("\n]\n")
1326 self.ui.write("\n]\n")
1327 else:
1327 else:
1328 self.ui.write("[]\n")
1328 self.ui.write("[]\n")
1329
1329
1330 def _show(self, ctx, copies, matchfn, props):
1330 def _show(self, ctx, copies, matchfn, props):
1331 '''show a single changeset or file revision'''
1331 '''show a single changeset or file revision'''
1332 rev = ctx.rev()
1332 rev = ctx.rev()
1333 if rev is None:
1333 if rev is None:
1334 jrev = jnode = 'null'
1334 jrev = jnode = 'null'
1335 else:
1335 else:
1336 jrev = str(rev)
1336 jrev = str(rev)
1337 jnode = '"%s"' % hex(ctx.node())
1337 jnode = '"%s"' % hex(ctx.node())
1338 j = encoding.jsonescape
1338 j = encoding.jsonescape
1339
1339
1340 if self._first:
1340 if self._first:
1341 self.ui.write("[\n {")
1341 self.ui.write("[\n {")
1342 self._first = False
1342 self._first = False
1343 else:
1343 else:
1344 self.ui.write(",\n {")
1344 self.ui.write(",\n {")
1345
1345
1346 if self.ui.quiet:
1346 if self.ui.quiet:
1347 self.ui.write(('\n "rev": %s') % jrev)
1347 self.ui.write(('\n "rev": %s') % jrev)
1348 self.ui.write((',\n "node": %s') % jnode)
1348 self.ui.write((',\n "node": %s') % jnode)
1349 self.ui.write('\n }')
1349 self.ui.write('\n }')
1350 return
1350 return
1351
1351
1352 self.ui.write(('\n "rev": %s') % jrev)
1352 self.ui.write(('\n "rev": %s') % jrev)
1353 self.ui.write((',\n "node": %s') % jnode)
1353 self.ui.write((',\n "node": %s') % jnode)
1354 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1354 self.ui.write((',\n "branch": "%s"') % j(ctx.branch()))
1355 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1355 self.ui.write((',\n "phase": "%s"') % ctx.phasestr())
1356 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1356 self.ui.write((',\n "user": "%s"') % j(ctx.user()))
1357 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1357 self.ui.write((',\n "date": [%d, %d]') % ctx.date())
1358 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1358 self.ui.write((',\n "desc": "%s"') % j(ctx.description()))
1359
1359
1360 self.ui.write((',\n "bookmarks": [%s]') %
1360 self.ui.write((',\n "bookmarks": [%s]') %
1361 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1361 ", ".join('"%s"' % j(b) for b in ctx.bookmarks()))
1362 self.ui.write((',\n "tags": [%s]') %
1362 self.ui.write((',\n "tags": [%s]') %
1363 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1363 ", ".join('"%s"' % j(t) for t in ctx.tags()))
1364 self.ui.write((',\n "parents": [%s]') %
1364 self.ui.write((',\n "parents": [%s]') %
1365 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1365 ", ".join('"%s"' % c.hex() for c in ctx.parents()))
1366
1366
1367 if self.ui.debugflag:
1367 if self.ui.debugflag:
1368 if rev is None:
1368 if rev is None:
1369 jmanifestnode = 'null'
1369 jmanifestnode = 'null'
1370 else:
1370 else:
1371 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1371 jmanifestnode = '"%s"' % hex(ctx.manifestnode())
1372 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1372 self.ui.write((',\n "manifest": %s') % jmanifestnode)
1373
1373
1374 self.ui.write((',\n "extra": {%s}') %
1374 self.ui.write((',\n "extra": {%s}') %
1375 ", ".join('"%s": "%s"' % (j(k), j(v))
1375 ", ".join('"%s": "%s"' % (j(k), j(v))
1376 for k, v in ctx.extra().items()))
1376 for k, v in ctx.extra().items()))
1377
1377
1378 files = ctx.p1().status(ctx)
1378 files = ctx.p1().status(ctx)
1379 self.ui.write((',\n "modified": [%s]') %
1379 self.ui.write((',\n "modified": [%s]') %
1380 ", ".join('"%s"' % j(f) for f in files[0]))
1380 ", ".join('"%s"' % j(f) for f in files[0]))
1381 self.ui.write((',\n "added": [%s]') %
1381 self.ui.write((',\n "added": [%s]') %
1382 ", ".join('"%s"' % j(f) for f in files[1]))
1382 ", ".join('"%s"' % j(f) for f in files[1]))
1383 self.ui.write((',\n "removed": [%s]') %
1383 self.ui.write((',\n "removed": [%s]') %
1384 ", ".join('"%s"' % j(f) for f in files[2]))
1384 ", ".join('"%s"' % j(f) for f in files[2]))
1385
1385
1386 elif self.ui.verbose:
1386 elif self.ui.verbose:
1387 self.ui.write((',\n "files": [%s]') %
1387 self.ui.write((',\n "files": [%s]') %
1388 ", ".join('"%s"' % j(f) for f in ctx.files()))
1388 ", ".join('"%s"' % j(f) for f in ctx.files()))
1389
1389
1390 if copies:
1390 if copies:
1391 self.ui.write((',\n "copies": {%s}') %
1391 self.ui.write((',\n "copies": {%s}') %
1392 ", ".join('"%s": "%s"' % (j(k), j(v))
1392 ", ".join('"%s": "%s"' % (j(k), j(v))
1393 for k, v in copies))
1393 for k, v in copies))
1394
1394
1395 matchfn = self.matchfn
1395 matchfn = self.matchfn
1396 if matchfn:
1396 if matchfn:
1397 stat = self.diffopts.get('stat')
1397 stat = self.diffopts.get('stat')
1398 diff = self.diffopts.get('patch')
1398 diff = self.diffopts.get('patch')
1399 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1399 diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True)
1400 node, prev = ctx.node(), ctx.p1().node()
1400 node, prev = ctx.node(), ctx.p1().node()
1401 if stat:
1401 if stat:
1402 self.ui.pushbuffer()
1402 self.ui.pushbuffer()
1403 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1403 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1404 match=matchfn, stat=True)
1404 match=matchfn, stat=True)
1405 self.ui.write((',\n "diffstat": "%s"')
1405 self.ui.write((',\n "diffstat": "%s"')
1406 % j(self.ui.popbuffer()))
1406 % j(self.ui.popbuffer()))
1407 if diff:
1407 if diff:
1408 self.ui.pushbuffer()
1408 self.ui.pushbuffer()
1409 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1409 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
1410 match=matchfn, stat=False)
1410 match=matchfn, stat=False)
1411 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1411 self.ui.write((',\n "diff": "%s"') % j(self.ui.popbuffer()))
1412
1412
1413 self.ui.write("\n }")
1413 self.ui.write("\n }")
1414
1414
1415 class changeset_templater(changeset_printer):
1415 class changeset_templater(changeset_printer):
1416 '''format changeset information.'''
1416 '''format changeset information.'''
1417
1417
1418 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1418 def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered):
1419 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1419 changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered)
1420 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1420 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
1421 filters = {'formatnode': formatnode}
1421 filters = {'formatnode': formatnode}
1422 defaulttempl = {
1422 defaulttempl = {
1423 'parent': '{rev}:{node|formatnode} ',
1423 'parent': '{rev}:{node|formatnode} ',
1424 'manifest': '{rev}:{node|formatnode}',
1424 'manifest': '{rev}:{node|formatnode}',
1425 'file_copy': '{name} ({source})',
1425 'file_copy': '{name} ({source})',
1426 'extra': '{key}={value|stringescape}'
1426 'extra': '{key}={value|stringescape}'
1427 }
1427 }
1428 # filecopy is preserved for compatibility reasons
1428 # filecopy is preserved for compatibility reasons
1429 defaulttempl['filecopy'] = defaulttempl['file_copy']
1429 defaulttempl['filecopy'] = defaulttempl['file_copy']
1430 assert not (tmpl and mapfile)
1430 assert not (tmpl and mapfile)
1431 if mapfile:
1431 if mapfile:
1432 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1432 self.t = templater.templater.frommapfile(mapfile, filters=filters,
1433 cache=defaulttempl)
1433 cache=defaulttempl)
1434 else:
1434 else:
1435 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1435 self.t = formatter.maketemplater(ui, 'changeset', tmpl,
1436 filters=filters,
1436 filters=filters,
1437 cache=defaulttempl)
1437 cache=defaulttempl)
1438
1438
1439 self.cache = {}
1439 self.cache = {}
1440
1440
1441 # find correct templates for current mode
1441 # find correct templates for current mode
1442 tmplmodes = [
1442 tmplmodes = [
1443 (True, None),
1443 (True, None),
1444 (self.ui.verbose, 'verbose'),
1444 (self.ui.verbose, 'verbose'),
1445 (self.ui.quiet, 'quiet'),
1445 (self.ui.quiet, 'quiet'),
1446 (self.ui.debugflag, 'debug'),
1446 (self.ui.debugflag, 'debug'),
1447 ]
1447 ]
1448
1448
1449 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1449 self._parts = {'header': '', 'footer': '', 'changeset': 'changeset',
1450 'docheader': '', 'docfooter': ''}
1450 'docheader': '', 'docfooter': ''}
1451 for mode, postfix in tmplmodes:
1451 for mode, postfix in tmplmodes:
1452 for t in self._parts:
1452 for t in self._parts:
1453 cur = t
1453 cur = t
1454 if postfix:
1454 if postfix:
1455 cur += "_" + postfix
1455 cur += "_" + postfix
1456 if mode and cur in self.t:
1456 if mode and cur in self.t:
1457 self._parts[t] = cur
1457 self._parts[t] = cur
1458
1458
1459 if self._parts['docheader']:
1459 if self._parts['docheader']:
1460 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1460 self.ui.write(templater.stringify(self.t(self._parts['docheader'])))
1461
1461
1462 def close(self):
1462 def close(self):
1463 if self._parts['docfooter']:
1463 if self._parts['docfooter']:
1464 if not self.footer:
1464 if not self.footer:
1465 self.footer = ""
1465 self.footer = ""
1466 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1466 self.footer += templater.stringify(self.t(self._parts['docfooter']))
1467 return super(changeset_templater, self).close()
1467 return super(changeset_templater, self).close()
1468
1468
1469 def _show(self, ctx, copies, matchfn, props):
1469 def _show(self, ctx, copies, matchfn, props):
1470 '''show a single changeset or file revision'''
1470 '''show a single changeset or file revision'''
1471 props = props.copy()
1471 props = props.copy()
1472 props.update(templatekw.keywords)
1472 props.update(templatekw.keywords)
1473 props['templ'] = self.t
1473 props['templ'] = self.t
1474 props['ctx'] = ctx
1474 props['ctx'] = ctx
1475 props['repo'] = self.repo
1475 props['repo'] = self.repo
1476 props['ui'] = self.repo.ui
1476 props['ui'] = self.repo.ui
1477 props['revcache'] = {'copies': copies}
1477 props['revcache'] = {'copies': copies}
1478 props['cache'] = self.cache
1478 props['cache'] = self.cache
1479
1479
1480 # write header
1480 # write header
1481 if self._parts['header']:
1481 if self._parts['header']:
1482 h = templater.stringify(self.t(self._parts['header'], **props))
1482 h = templater.stringify(self.t(self._parts['header'], **props))
1483 if self.buffered:
1483 if self.buffered:
1484 self.header[ctx.rev()] = h
1484 self.header[ctx.rev()] = h
1485 else:
1485 else:
1486 if self.lastheader != h:
1486 if self.lastheader != h:
1487 self.lastheader = h
1487 self.lastheader = h
1488 self.ui.write(h)
1488 self.ui.write(h)
1489
1489
1490 # write changeset metadata, then patch if requested
1490 # write changeset metadata, then patch if requested
1491 key = self._parts['changeset']
1491 key = self._parts['changeset']
1492 self.ui.write(templater.stringify(self.t(key, **props)))
1492 self.ui.write(templater.stringify(self.t(key, **props)))
1493 self.showpatch(ctx, matchfn)
1493 self.showpatch(ctx, matchfn)
1494
1494
1495 if self._parts['footer']:
1495 if self._parts['footer']:
1496 if not self.footer:
1496 if not self.footer:
1497 self.footer = templater.stringify(
1497 self.footer = templater.stringify(
1498 self.t(self._parts['footer'], **props))
1498 self.t(self._parts['footer'], **props))
1499
1499
1500 def gettemplate(ui, tmpl, style):
1500 def gettemplate(ui, tmpl, style):
1501 """
1501 """
1502 Find the template matching the given template spec or style.
1502 Find the template matching the given template spec or style.
1503 """
1503 """
1504
1504
1505 # ui settings
1505 # ui settings
1506 if not tmpl and not style: # template are stronger than style
1506 if not tmpl and not style: # template are stronger than style
1507 tmpl = ui.config('ui', 'logtemplate')
1507 tmpl = ui.config('ui', 'logtemplate')
1508 if tmpl:
1508 if tmpl:
1509 return templater.unquotestring(tmpl), None
1509 return templater.unquotestring(tmpl), None
1510 else:
1510 else:
1511 style = util.expandpath(ui.config('ui', 'style', ''))
1511 style = util.expandpath(ui.config('ui', 'style', ''))
1512
1512
1513 if not tmpl and style:
1513 if not tmpl and style:
1514 mapfile = style
1514 mapfile = style
1515 if not os.path.split(mapfile)[0]:
1515 if not os.path.split(mapfile)[0]:
1516 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1516 mapname = (templater.templatepath('map-cmdline.' + mapfile)
1517 or templater.templatepath(mapfile))
1517 or templater.templatepath(mapfile))
1518 if mapname:
1518 if mapname:
1519 mapfile = mapname
1519 mapfile = mapname
1520 return None, mapfile
1520 return None, mapfile
1521
1521
1522 if not tmpl:
1522 if not tmpl:
1523 return None, None
1523 return None, None
1524
1524
1525 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1525 return formatter.lookuptemplate(ui, 'changeset', tmpl)
1526
1526
1527 def show_changeset(ui, repo, opts, buffered=False):
1527 def show_changeset(ui, repo, opts, buffered=False):
1528 """show one changeset using template or regular display.
1528 """show one changeset using template or regular display.
1529
1529
1530 Display format will be the first non-empty hit of:
1530 Display format will be the first non-empty hit of:
1531 1. option 'template'
1531 1. option 'template'
1532 2. option 'style'
1532 2. option 'style'
1533 3. [ui] setting 'logtemplate'
1533 3. [ui] setting 'logtemplate'
1534 4. [ui] setting 'style'
1534 4. [ui] setting 'style'
1535 If all of these values are either the unset or the empty string,
1535 If all of these values are either the unset or the empty string,
1536 regular display via changeset_printer() is done.
1536 regular display via changeset_printer() is done.
1537 """
1537 """
1538 # options
1538 # options
1539 matchfn = None
1539 matchfn = None
1540 if opts.get('patch') or opts.get('stat'):
1540 if opts.get('patch') or opts.get('stat'):
1541 matchfn = scmutil.matchall(repo)
1541 matchfn = scmutil.matchall(repo)
1542
1542
1543 if opts.get('template') == 'json':
1543 if opts.get('template') == 'json':
1544 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1544 return jsonchangeset(ui, repo, matchfn, opts, buffered)
1545
1545
1546 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1546 tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style'))
1547
1547
1548 if not tmpl and not mapfile:
1548 if not tmpl and not mapfile:
1549 return changeset_printer(ui, repo, matchfn, opts, buffered)
1549 return changeset_printer(ui, repo, matchfn, opts, buffered)
1550
1550
1551 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1551 return changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered)
1552
1552
1553 def showmarker(fm, marker, index=None):
1553 def showmarker(fm, marker, index=None):
1554 """utility function to display obsolescence marker in a readable way
1554 """utility function to display obsolescence marker in a readable way
1555
1555
1556 To be used by debug function."""
1556 To be used by debug function."""
1557 if index is not None:
1557 if index is not None:
1558 fm.write('index', '%i ', index)
1558 fm.write('index', '%i ', index)
1559 fm.write('precnode', '%s ', hex(marker.precnode()))
1559 fm.write('precnode', '%s ', hex(marker.precnode()))
1560 succs = marker.succnodes()
1560 succs = marker.succnodes()
1561 fm.condwrite(succs, 'succnodes', '%s ',
1561 fm.condwrite(succs, 'succnodes', '%s ',
1562 fm.formatlist(map(hex, succs), name='node'))
1562 fm.formatlist(map(hex, succs), name='node'))
1563 fm.write('flag', '%X ', marker.flags())
1563 fm.write('flag', '%X ', marker.flags())
1564 parents = marker.parentnodes()
1564 parents = marker.parentnodes()
1565 if parents is not None:
1565 if parents is not None:
1566 fm.write('parentnodes', '{%s} ',
1566 fm.write('parentnodes', '{%s} ',
1567 fm.formatlist(map(hex, parents), name='node', sep=', '))
1567 fm.formatlist(map(hex, parents), name='node', sep=', '))
1568 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1568 fm.write('date', '(%s) ', fm.formatdate(marker.date()))
1569 meta = marker.metadata().copy()
1569 meta = marker.metadata().copy()
1570 meta.pop('date', None)
1570 meta.pop('date', None)
1571 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1571 fm.write('metadata', '{%s}', fm.formatdict(meta, fmt='%r: %r', sep=', '))
1572 fm.plain('\n')
1572 fm.plain('\n')
1573
1573
1574 def finddate(ui, repo, date):
1574 def finddate(ui, repo, date):
1575 """Find the tipmost changeset that matches the given date spec"""
1575 """Find the tipmost changeset that matches the given date spec"""
1576
1576
1577 df = util.matchdate(date)
1577 df = util.matchdate(date)
1578 m = scmutil.matchall(repo)
1578 m = scmutil.matchall(repo)
1579 results = {}
1579 results = {}
1580
1580
1581 def prep(ctx, fns):
1581 def prep(ctx, fns):
1582 d = ctx.date()
1582 d = ctx.date()
1583 if df(d[0]):
1583 if df(d[0]):
1584 results[ctx.rev()] = d
1584 results[ctx.rev()] = d
1585
1585
1586 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1586 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1587 rev = ctx.rev()
1587 rev = ctx.rev()
1588 if rev in results:
1588 if rev in results:
1589 ui.status(_("found revision %s from %s\n") %
1589 ui.status(_("found revision %s from %s\n") %
1590 (rev, util.datestr(results[rev])))
1590 (rev, util.datestr(results[rev])))
1591 return str(rev)
1591 return str(rev)
1592
1592
1593 raise error.Abort(_("revision matching date not found"))
1593 raise error.Abort(_("revision matching date not found"))
1594
1594
1595 def increasingwindows(windowsize=8, sizelimit=512):
1595 def increasingwindows(windowsize=8, sizelimit=512):
1596 while True:
1596 while True:
1597 yield windowsize
1597 yield windowsize
1598 if windowsize < sizelimit:
1598 if windowsize < sizelimit:
1599 windowsize *= 2
1599 windowsize *= 2
1600
1600
1601 class FileWalkError(Exception):
1601 class FileWalkError(Exception):
1602 pass
1602 pass
1603
1603
1604 def walkfilerevs(repo, match, follow, revs, fncache):
1604 def walkfilerevs(repo, match, follow, revs, fncache):
1605 '''Walks the file history for the matched files.
1605 '''Walks the file history for the matched files.
1606
1606
1607 Returns the changeset revs that are involved in the file history.
1607 Returns the changeset revs that are involved in the file history.
1608
1608
1609 Throws FileWalkError if the file history can't be walked using
1609 Throws FileWalkError if the file history can't be walked using
1610 filelogs alone.
1610 filelogs alone.
1611 '''
1611 '''
1612 wanted = set()
1612 wanted = set()
1613 copies = []
1613 copies = []
1614 minrev, maxrev = min(revs), max(revs)
1614 minrev, maxrev = min(revs), max(revs)
1615 def filerevgen(filelog, last):
1615 def filerevgen(filelog, last):
1616 """
1616 """
1617 Only files, no patterns. Check the history of each file.
1617 Only files, no patterns. Check the history of each file.
1618
1618
1619 Examines filelog entries within minrev, maxrev linkrev range
1619 Examines filelog entries within minrev, maxrev linkrev range
1620 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1620 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1621 tuples in backwards order
1621 tuples in backwards order
1622 """
1622 """
1623 cl_count = len(repo)
1623 cl_count = len(repo)
1624 revs = []
1624 revs = []
1625 for j in xrange(0, last + 1):
1625 for j in xrange(0, last + 1):
1626 linkrev = filelog.linkrev(j)
1626 linkrev = filelog.linkrev(j)
1627 if linkrev < minrev:
1627 if linkrev < minrev:
1628 continue
1628 continue
1629 # only yield rev for which we have the changelog, it can
1629 # only yield rev for which we have the changelog, it can
1630 # happen while doing "hg log" during a pull or commit
1630 # happen while doing "hg log" during a pull or commit
1631 if linkrev >= cl_count:
1631 if linkrev >= cl_count:
1632 break
1632 break
1633
1633
1634 parentlinkrevs = []
1634 parentlinkrevs = []
1635 for p in filelog.parentrevs(j):
1635 for p in filelog.parentrevs(j):
1636 if p != nullrev:
1636 if p != nullrev:
1637 parentlinkrevs.append(filelog.linkrev(p))
1637 parentlinkrevs.append(filelog.linkrev(p))
1638 n = filelog.node(j)
1638 n = filelog.node(j)
1639 revs.append((linkrev, parentlinkrevs,
1639 revs.append((linkrev, parentlinkrevs,
1640 follow and filelog.renamed(n)))
1640 follow and filelog.renamed(n)))
1641
1641
1642 return reversed(revs)
1642 return reversed(revs)
1643 def iterfiles():
1643 def iterfiles():
1644 pctx = repo['.']
1644 pctx = repo['.']
1645 for filename in match.files():
1645 for filename in match.files():
1646 if follow:
1646 if follow:
1647 if filename not in pctx:
1647 if filename not in pctx:
1648 raise error.Abort(_('cannot follow file not in parent '
1648 raise error.Abort(_('cannot follow file not in parent '
1649 'revision: "%s"') % filename)
1649 'revision: "%s"') % filename)
1650 yield filename, pctx[filename].filenode()
1650 yield filename, pctx[filename].filenode()
1651 else:
1651 else:
1652 yield filename, None
1652 yield filename, None
1653 for filename_node in copies:
1653 for filename_node in copies:
1654 yield filename_node
1654 yield filename_node
1655
1655
1656 for file_, node in iterfiles():
1656 for file_, node in iterfiles():
1657 filelog = repo.file(file_)
1657 filelog = repo.file(file_)
1658 if not len(filelog):
1658 if not len(filelog):
1659 if node is None:
1659 if node is None:
1660 # A zero count may be a directory or deleted file, so
1660 # A zero count may be a directory or deleted file, so
1661 # try to find matching entries on the slow path.
1661 # try to find matching entries on the slow path.
1662 if follow:
1662 if follow:
1663 raise error.Abort(
1663 raise error.Abort(
1664 _('cannot follow nonexistent file: "%s"') % file_)
1664 _('cannot follow nonexistent file: "%s"') % file_)
1665 raise FileWalkError("Cannot walk via filelog")
1665 raise FileWalkError("Cannot walk via filelog")
1666 else:
1666 else:
1667 continue
1667 continue
1668
1668
1669 if node is None:
1669 if node is None:
1670 last = len(filelog) - 1
1670 last = len(filelog) - 1
1671 else:
1671 else:
1672 last = filelog.rev(node)
1672 last = filelog.rev(node)
1673
1673
1674 # keep track of all ancestors of the file
1674 # keep track of all ancestors of the file
1675 ancestors = set([filelog.linkrev(last)])
1675 ancestors = set([filelog.linkrev(last)])
1676
1676
1677 # iterate from latest to oldest revision
1677 # iterate from latest to oldest revision
1678 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1678 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1679 if not follow:
1679 if not follow:
1680 if rev > maxrev:
1680 if rev > maxrev:
1681 continue
1681 continue
1682 else:
1682 else:
1683 # Note that last might not be the first interesting
1683 # Note that last might not be the first interesting
1684 # rev to us:
1684 # rev to us:
1685 # if the file has been changed after maxrev, we'll
1685 # if the file has been changed after maxrev, we'll
1686 # have linkrev(last) > maxrev, and we still need
1686 # have linkrev(last) > maxrev, and we still need
1687 # to explore the file graph
1687 # to explore the file graph
1688 if rev not in ancestors:
1688 if rev not in ancestors:
1689 continue
1689 continue
1690 # XXX insert 1327 fix here
1690 # XXX insert 1327 fix here
1691 if flparentlinkrevs:
1691 if flparentlinkrevs:
1692 ancestors.update(flparentlinkrevs)
1692 ancestors.update(flparentlinkrevs)
1693
1693
1694 fncache.setdefault(rev, []).append(file_)
1694 fncache.setdefault(rev, []).append(file_)
1695 wanted.add(rev)
1695 wanted.add(rev)
1696 if copied:
1696 if copied:
1697 copies.append(copied)
1697 copies.append(copied)
1698
1698
1699 return wanted
1699 return wanted
1700
1700
1701 class _followfilter(object):
1701 class _followfilter(object):
1702 def __init__(self, repo, onlyfirst=False):
1702 def __init__(self, repo, onlyfirst=False):
1703 self.repo = repo
1703 self.repo = repo
1704 self.startrev = nullrev
1704 self.startrev = nullrev
1705 self.roots = set()
1705 self.roots = set()
1706 self.onlyfirst = onlyfirst
1706 self.onlyfirst = onlyfirst
1707
1707
1708 def match(self, rev):
1708 def match(self, rev):
1709 def realparents(rev):
1709 def realparents(rev):
1710 if self.onlyfirst:
1710 if self.onlyfirst:
1711 return self.repo.changelog.parentrevs(rev)[0:1]
1711 return self.repo.changelog.parentrevs(rev)[0:1]
1712 else:
1712 else:
1713 return filter(lambda x: x != nullrev,
1713 return filter(lambda x: x != nullrev,
1714 self.repo.changelog.parentrevs(rev))
1714 self.repo.changelog.parentrevs(rev))
1715
1715
1716 if self.startrev == nullrev:
1716 if self.startrev == nullrev:
1717 self.startrev = rev
1717 self.startrev = rev
1718 return True
1718 return True
1719
1719
1720 if rev > self.startrev:
1720 if rev > self.startrev:
1721 # forward: all descendants
1721 # forward: all descendants
1722 if not self.roots:
1722 if not self.roots:
1723 self.roots.add(self.startrev)
1723 self.roots.add(self.startrev)
1724 for parent in realparents(rev):
1724 for parent in realparents(rev):
1725 if parent in self.roots:
1725 if parent in self.roots:
1726 self.roots.add(rev)
1726 self.roots.add(rev)
1727 return True
1727 return True
1728 else:
1728 else:
1729 # backwards: all parents
1729 # backwards: all parents
1730 if not self.roots:
1730 if not self.roots:
1731 self.roots.update(realparents(self.startrev))
1731 self.roots.update(realparents(self.startrev))
1732 if rev in self.roots:
1732 if rev in self.roots:
1733 self.roots.remove(rev)
1733 self.roots.remove(rev)
1734 self.roots.update(realparents(rev))
1734 self.roots.update(realparents(rev))
1735 return True
1735 return True
1736
1736
1737 return False
1737 return False
1738
1738
1739 def walkchangerevs(repo, match, opts, prepare):
1739 def walkchangerevs(repo, match, opts, prepare):
1740 '''Iterate over files and the revs in which they changed.
1740 '''Iterate over files and the revs in which they changed.
1741
1741
1742 Callers most commonly need to iterate backwards over the history
1742 Callers most commonly need to iterate backwards over the history
1743 in which they are interested. Doing so has awful (quadratic-looking)
1743 in which they are interested. Doing so has awful (quadratic-looking)
1744 performance, so we use iterators in a "windowed" way.
1744 performance, so we use iterators in a "windowed" way.
1745
1745
1746 We walk a window of revisions in the desired order. Within the
1746 We walk a window of revisions in the desired order. Within the
1747 window, we first walk forwards to gather data, then in the desired
1747 window, we first walk forwards to gather data, then in the desired
1748 order (usually backwards) to display it.
1748 order (usually backwards) to display it.
1749
1749
1750 This function returns an iterator yielding contexts. Before
1750 This function returns an iterator yielding contexts. Before
1751 yielding each context, the iterator will first call the prepare
1751 yielding each context, the iterator will first call the prepare
1752 function on each context in the window in forward order.'''
1752 function on each context in the window in forward order.'''
1753
1753
1754 follow = opts.get('follow') or opts.get('follow_first')
1754 follow = opts.get('follow') or opts.get('follow_first')
1755 revs = _logrevs(repo, opts)
1755 revs = _logrevs(repo, opts)
1756 if not revs:
1756 if not revs:
1757 return []
1757 return []
1758 wanted = set()
1758 wanted = set()
1759 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1759 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1760 opts.get('removed'))
1760 opts.get('removed'))
1761 fncache = {}
1761 fncache = {}
1762 change = repo.changectx
1762 change = repo.changectx
1763
1763
1764 # First step is to fill wanted, the set of revisions that we want to yield.
1764 # First step is to fill wanted, the set of revisions that we want to yield.
1765 # When it does not induce extra cost, we also fill fncache for revisions in
1765 # When it does not induce extra cost, we also fill fncache for revisions in
1766 # wanted: a cache of filenames that were changed (ctx.files()) and that
1766 # wanted: a cache of filenames that were changed (ctx.files()) and that
1767 # match the file filtering conditions.
1767 # match the file filtering conditions.
1768
1768
1769 if match.always():
1769 if match.always():
1770 # No files, no patterns. Display all revs.
1770 # No files, no patterns. Display all revs.
1771 wanted = revs
1771 wanted = revs
1772 elif not slowpath:
1772 elif not slowpath:
1773 # We only have to read through the filelog to find wanted revisions
1773 # We only have to read through the filelog to find wanted revisions
1774
1774
1775 try:
1775 try:
1776 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1776 wanted = walkfilerevs(repo, match, follow, revs, fncache)
1777 except FileWalkError:
1777 except FileWalkError:
1778 slowpath = True
1778 slowpath = True
1779
1779
1780 # We decided to fall back to the slowpath because at least one
1780 # We decided to fall back to the slowpath because at least one
1781 # of the paths was not a file. Check to see if at least one of them
1781 # of the paths was not a file. Check to see if at least one of them
1782 # existed in history, otherwise simply return
1782 # existed in history, otherwise simply return
1783 for path in match.files():
1783 for path in match.files():
1784 if path == '.' or path in repo.store:
1784 if path == '.' or path in repo.store:
1785 break
1785 break
1786 else:
1786 else:
1787 return []
1787 return []
1788
1788
1789 if slowpath:
1789 if slowpath:
1790 # We have to read the changelog to match filenames against
1790 # We have to read the changelog to match filenames against
1791 # changed files
1791 # changed files
1792
1792
1793 if follow:
1793 if follow:
1794 raise error.Abort(_('can only follow copies/renames for explicit '
1794 raise error.Abort(_('can only follow copies/renames for explicit '
1795 'filenames'))
1795 'filenames'))
1796
1796
1797 # The slow path checks files modified in every changeset.
1797 # The slow path checks files modified in every changeset.
1798 # This is really slow on large repos, so compute the set lazily.
1798 # This is really slow on large repos, so compute the set lazily.
1799 class lazywantedset(object):
1799 class lazywantedset(object):
1800 def __init__(self):
1800 def __init__(self):
1801 self.set = set()
1801 self.set = set()
1802 self.revs = set(revs)
1802 self.revs = set(revs)
1803
1803
1804 # No need to worry about locality here because it will be accessed
1804 # No need to worry about locality here because it will be accessed
1805 # in the same order as the increasing window below.
1805 # in the same order as the increasing window below.
1806 def __contains__(self, value):
1806 def __contains__(self, value):
1807 if value in self.set:
1807 if value in self.set:
1808 return True
1808 return True
1809 elif not value in self.revs:
1809 elif not value in self.revs:
1810 return False
1810 return False
1811 else:
1811 else:
1812 self.revs.discard(value)
1812 self.revs.discard(value)
1813 ctx = change(value)
1813 ctx = change(value)
1814 matches = filter(match, ctx.files())
1814 matches = filter(match, ctx.files())
1815 if matches:
1815 if matches:
1816 fncache[value] = matches
1816 fncache[value] = matches
1817 self.set.add(value)
1817 self.set.add(value)
1818 return True
1818 return True
1819 return False
1819 return False
1820
1820
1821 def discard(self, value):
1821 def discard(self, value):
1822 self.revs.discard(value)
1822 self.revs.discard(value)
1823 self.set.discard(value)
1823 self.set.discard(value)
1824
1824
1825 wanted = lazywantedset()
1825 wanted = lazywantedset()
1826
1826
1827 # it might be worthwhile to do this in the iterator if the rev range
1827 # it might be worthwhile to do this in the iterator if the rev range
1828 # is descending and the prune args are all within that range
1828 # is descending and the prune args are all within that range
1829 for rev in opts.get('prune', ()):
1829 for rev in opts.get('prune', ()):
1830 rev = repo[rev].rev()
1830 rev = repo[rev].rev()
1831 ff = _followfilter(repo)
1831 ff = _followfilter(repo)
1832 stop = min(revs[0], revs[-1])
1832 stop = min(revs[0], revs[-1])
1833 for x in xrange(rev, stop - 1, -1):
1833 for x in xrange(rev, stop - 1, -1):
1834 if ff.match(x):
1834 if ff.match(x):
1835 wanted = wanted - [x]
1835 wanted = wanted - [x]
1836
1836
1837 # Now that wanted is correctly initialized, we can iterate over the
1837 # Now that wanted is correctly initialized, we can iterate over the
1838 # revision range, yielding only revisions in wanted.
1838 # revision range, yielding only revisions in wanted.
1839 def iterate():
1839 def iterate():
1840 if follow and match.always():
1840 if follow and match.always():
1841 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1841 ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
1842 def want(rev):
1842 def want(rev):
1843 return ff.match(rev) and rev in wanted
1843 return ff.match(rev) and rev in wanted
1844 else:
1844 else:
1845 def want(rev):
1845 def want(rev):
1846 return rev in wanted
1846 return rev in wanted
1847
1847
1848 it = iter(revs)
1848 it = iter(revs)
1849 stopiteration = False
1849 stopiteration = False
1850 for windowsize in increasingwindows():
1850 for windowsize in increasingwindows():
1851 nrevs = []
1851 nrevs = []
1852 for i in xrange(windowsize):
1852 for i in xrange(windowsize):
1853 rev = next(it, None)
1853 rev = next(it, None)
1854 if rev is None:
1854 if rev is None:
1855 stopiteration = True
1855 stopiteration = True
1856 break
1856 break
1857 elif want(rev):
1857 elif want(rev):
1858 nrevs.append(rev)
1858 nrevs.append(rev)
1859 for rev in sorted(nrevs):
1859 for rev in sorted(nrevs):
1860 fns = fncache.get(rev)
1860 fns = fncache.get(rev)
1861 ctx = change(rev)
1861 ctx = change(rev)
1862 if not fns:
1862 if not fns:
1863 def fns_generator():
1863 def fns_generator():
1864 for f in ctx.files():
1864 for f in ctx.files():
1865 if match(f):
1865 if match(f):
1866 yield f
1866 yield f
1867 fns = fns_generator()
1867 fns = fns_generator()
1868 prepare(ctx, fns)
1868 prepare(ctx, fns)
1869 for rev in nrevs:
1869 for rev in nrevs:
1870 yield change(rev)
1870 yield change(rev)
1871
1871
1872 if stopiteration:
1872 if stopiteration:
1873 break
1873 break
1874
1874
1875 return iterate()
1875 return iterate()
1876
1876
1877 def _makefollowlogfilematcher(repo, files, followfirst):
1877 def _makefollowlogfilematcher(repo, files, followfirst):
1878 # When displaying a revision with --patch --follow FILE, we have
1878 # When displaying a revision with --patch --follow FILE, we have
1879 # to know which file of the revision must be diffed. With
1879 # to know which file of the revision must be diffed. With
1880 # --follow, we want the names of the ancestors of FILE in the
1880 # --follow, we want the names of the ancestors of FILE in the
1881 # revision, stored in "fcache". "fcache" is populated by
1881 # revision, stored in "fcache". "fcache" is populated by
1882 # reproducing the graph traversal already done by --follow revset
1882 # reproducing the graph traversal already done by --follow revset
1883 # and relating revs to file names (which is not "correct" but
1883 # and relating revs to file names (which is not "correct" but
1884 # good enough).
1884 # good enough).
1885 fcache = {}
1885 fcache = {}
1886 fcacheready = [False]
1886 fcacheready = [False]
1887 pctx = repo['.']
1887 pctx = repo['.']
1888
1888
1889 def populate():
1889 def populate():
1890 for fn in files:
1890 for fn in files:
1891 fctx = pctx[fn]
1891 fctx = pctx[fn]
1892 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1892 fcache.setdefault(fctx.introrev(), set()).add(fctx.path())
1893 for c in fctx.ancestors(followfirst=followfirst):
1893 for c in fctx.ancestors(followfirst=followfirst):
1894 fcache.setdefault(c.rev(), set()).add(c.path())
1894 fcache.setdefault(c.rev(), set()).add(c.path())
1895
1895
1896 def filematcher(rev):
1896 def filematcher(rev):
1897 if not fcacheready[0]:
1897 if not fcacheready[0]:
1898 # Lazy initialization
1898 # Lazy initialization
1899 fcacheready[0] = True
1899 fcacheready[0] = True
1900 populate()
1900 populate()
1901 return scmutil.matchfiles(repo, fcache.get(rev, []))
1901 return scmutil.matchfiles(repo, fcache.get(rev, []))
1902
1902
1903 return filematcher
1903 return filematcher
1904
1904
1905 def _makenofollowlogfilematcher(repo, pats, opts):
1905 def _makenofollowlogfilematcher(repo, pats, opts):
1906 '''hook for extensions to override the filematcher for non-follow cases'''
1906 '''hook for extensions to override the filematcher for non-follow cases'''
1907 return None
1907 return None
1908
1908
1909 def _makelogrevset(repo, pats, opts, revs):
1909 def _makelogrevset(repo, pats, opts, revs):
1910 """Return (expr, filematcher) where expr is a revset string built
1910 """Return (expr, filematcher) where expr is a revset string built
1911 from log options and file patterns or None. If --stat or --patch
1911 from log options and file patterns or None. If --stat or --patch
1912 are not passed filematcher is None. Otherwise it is a callable
1912 are not passed filematcher is None. Otherwise it is a callable
1913 taking a revision number and returning a match objects filtering
1913 taking a revision number and returning a match objects filtering
1914 the files to be detailed when displaying the revision.
1914 the files to be detailed when displaying the revision.
1915 """
1915 """
1916 opt2revset = {
1916 opt2revset = {
1917 'no_merges': ('not merge()', None),
1917 'no_merges': ('not merge()', None),
1918 'only_merges': ('merge()', None),
1918 'only_merges': ('merge()', None),
1919 '_ancestors': ('ancestors(%(val)s)', None),
1919 '_ancestors': ('ancestors(%(val)s)', None),
1920 '_fancestors': ('_firstancestors(%(val)s)', None),
1920 '_fancestors': ('_firstancestors(%(val)s)', None),
1921 '_descendants': ('descendants(%(val)s)', None),
1921 '_descendants': ('descendants(%(val)s)', None),
1922 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1922 '_fdescendants': ('_firstdescendants(%(val)s)', None),
1923 '_matchfiles': ('_matchfiles(%(val)s)', None),
1923 '_matchfiles': ('_matchfiles(%(val)s)', None),
1924 'date': ('date(%(val)r)', None),
1924 'date': ('date(%(val)r)', None),
1925 'branch': ('branch(%(val)r)', ' or '),
1925 'branch': ('branch(%(val)r)', ' or '),
1926 '_patslog': ('filelog(%(val)r)', ' or '),
1926 '_patslog': ('filelog(%(val)r)', ' or '),
1927 '_patsfollow': ('follow(%(val)r)', ' or '),
1927 '_patsfollow': ('follow(%(val)r)', ' or '),
1928 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1928 '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
1929 'keyword': ('keyword(%(val)r)', ' or '),
1929 'keyword': ('keyword(%(val)r)', ' or '),
1930 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1930 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
1931 'user': ('user(%(val)r)', ' or '),
1931 'user': ('user(%(val)r)', ' or '),
1932 }
1932 }
1933
1933
1934 opts = dict(opts)
1934 opts = dict(opts)
1935 # follow or not follow?
1935 # follow or not follow?
1936 follow = opts.get('follow') or opts.get('follow_first')
1936 follow = opts.get('follow') or opts.get('follow_first')
1937 if opts.get('follow_first'):
1937 if opts.get('follow_first'):
1938 followfirst = 1
1938 followfirst = 1
1939 else:
1939 else:
1940 followfirst = 0
1940 followfirst = 0
1941 # --follow with FILE behavior depends on revs...
1941 # --follow with FILE behavior depends on revs...
1942 it = iter(revs)
1942 it = iter(revs)
1943 startrev = next(it)
1943 startrev = next(it)
1944 followdescendants = startrev < next(it, startrev)
1944 followdescendants = startrev < next(it, startrev)
1945
1945
1946 # branch and only_branch are really aliases and must be handled at
1946 # branch and only_branch are really aliases and must be handled at
1947 # the same time
1947 # the same time
1948 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1948 opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
1949 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1949 opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
1950 # pats/include/exclude are passed to match.match() directly in
1950 # pats/include/exclude are passed to match.match() directly in
1951 # _matchfiles() revset but walkchangerevs() builds its matcher with
1951 # _matchfiles() revset but walkchangerevs() builds its matcher with
1952 # scmutil.match(). The difference is input pats are globbed on
1952 # scmutil.match(). The difference is input pats are globbed on
1953 # platforms without shell expansion (windows).
1953 # platforms without shell expansion (windows).
1954 wctx = repo[None]
1954 wctx = repo[None]
1955 match, pats = scmutil.matchandpats(wctx, pats, opts)
1955 match, pats = scmutil.matchandpats(wctx, pats, opts)
1956 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1956 slowpath = match.anypats() or ((match.isexact() or match.prefix()) and
1957 opts.get('removed'))
1957 opts.get('removed'))
1958 if not slowpath:
1958 if not slowpath:
1959 for f in match.files():
1959 for f in match.files():
1960 if follow and f not in wctx:
1960 if follow and f not in wctx:
1961 # If the file exists, it may be a directory, so let it
1961 # If the file exists, it may be a directory, so let it
1962 # take the slow path.
1962 # take the slow path.
1963 if os.path.exists(repo.wjoin(f)):
1963 if os.path.exists(repo.wjoin(f)):
1964 slowpath = True
1964 slowpath = True
1965 continue
1965 continue
1966 else:
1966 else:
1967 raise error.Abort(_('cannot follow file not in parent '
1967 raise error.Abort(_('cannot follow file not in parent '
1968 'revision: "%s"') % f)
1968 'revision: "%s"') % f)
1969 filelog = repo.file(f)
1969 filelog = repo.file(f)
1970 if not filelog:
1970 if not filelog:
1971 # A zero count may be a directory or deleted file, so
1971 # A zero count may be a directory or deleted file, so
1972 # try to find matching entries on the slow path.
1972 # try to find matching entries on the slow path.
1973 if follow:
1973 if follow:
1974 raise error.Abort(
1974 raise error.Abort(
1975 _('cannot follow nonexistent file: "%s"') % f)
1975 _('cannot follow nonexistent file: "%s"') % f)
1976 slowpath = True
1976 slowpath = True
1977
1977
1978 # We decided to fall back to the slowpath because at least one
1978 # We decided to fall back to the slowpath because at least one
1979 # of the paths was not a file. Check to see if at least one of them
1979 # of the paths was not a file. Check to see if at least one of them
1980 # existed in history - in that case, we'll continue down the
1980 # existed in history - in that case, we'll continue down the
1981 # slowpath; otherwise, we can turn off the slowpath
1981 # slowpath; otherwise, we can turn off the slowpath
1982 if slowpath:
1982 if slowpath:
1983 for path in match.files():
1983 for path in match.files():
1984 if path == '.' or path in repo.store:
1984 if path == '.' or path in repo.store:
1985 break
1985 break
1986 else:
1986 else:
1987 slowpath = False
1987 slowpath = False
1988
1988
1989 fpats = ('_patsfollow', '_patsfollowfirst')
1989 fpats = ('_patsfollow', '_patsfollowfirst')
1990 fnopats = (('_ancestors', '_fancestors'),
1990 fnopats = (('_ancestors', '_fancestors'),
1991 ('_descendants', '_fdescendants'))
1991 ('_descendants', '_fdescendants'))
1992 if slowpath:
1992 if slowpath:
1993 # See walkchangerevs() slow path.
1993 # See walkchangerevs() slow path.
1994 #
1994 #
1995 # pats/include/exclude cannot be represented as separate
1995 # pats/include/exclude cannot be represented as separate
1996 # revset expressions as their filtering logic applies at file
1996 # revset expressions as their filtering logic applies at file
1997 # level. For instance "-I a -X a" matches a revision touching
1997 # level. For instance "-I a -X a" matches a revision touching
1998 # "a" and "b" while "file(a) and not file(b)" does
1998 # "a" and "b" while "file(a) and not file(b)" does
1999 # not. Besides, filesets are evaluated against the working
1999 # not. Besides, filesets are evaluated against the working
2000 # directory.
2000 # directory.
2001 matchargs = ['r:', 'd:relpath']
2001 matchargs = ['r:', 'd:relpath']
2002 for p in pats:
2002 for p in pats:
2003 matchargs.append('p:' + p)
2003 matchargs.append('p:' + p)
2004 for p in opts.get('include', []):
2004 for p in opts.get('include', []):
2005 matchargs.append('i:' + p)
2005 matchargs.append('i:' + p)
2006 for p in opts.get('exclude', []):
2006 for p in opts.get('exclude', []):
2007 matchargs.append('x:' + p)
2007 matchargs.append('x:' + p)
2008 matchargs = ','.join(('%r' % p) for p in matchargs)
2008 matchargs = ','.join(('%r' % p) for p in matchargs)
2009 opts['_matchfiles'] = matchargs
2009 opts['_matchfiles'] = matchargs
2010 if follow:
2010 if follow:
2011 opts[fnopats[0][followfirst]] = '.'
2011 opts[fnopats[0][followfirst]] = '.'
2012 else:
2012 else:
2013 if follow:
2013 if follow:
2014 if pats:
2014 if pats:
2015 # follow() revset interprets its file argument as a
2015 # follow() revset interprets its file argument as a
2016 # manifest entry, so use match.files(), not pats.
2016 # manifest entry, so use match.files(), not pats.
2017 opts[fpats[followfirst]] = list(match.files())
2017 opts[fpats[followfirst]] = list(match.files())
2018 else:
2018 else:
2019 op = fnopats[followdescendants][followfirst]
2019 op = fnopats[followdescendants][followfirst]
2020 opts[op] = 'rev(%d)' % startrev
2020 opts[op] = 'rev(%d)' % startrev
2021 else:
2021 else:
2022 opts['_patslog'] = list(pats)
2022 opts['_patslog'] = list(pats)
2023
2023
2024 filematcher = None
2024 filematcher = None
2025 if opts.get('patch') or opts.get('stat'):
2025 if opts.get('patch') or opts.get('stat'):
2026 # When following files, track renames via a special matcher.
2026 # When following files, track renames via a special matcher.
2027 # If we're forced to take the slowpath it means we're following
2027 # If we're forced to take the slowpath it means we're following
2028 # at least one pattern/directory, so don't bother with rename tracking.
2028 # at least one pattern/directory, so don't bother with rename tracking.
2029 if follow and not match.always() and not slowpath:
2029 if follow and not match.always() and not slowpath:
2030 # _makefollowlogfilematcher expects its files argument to be
2030 # _makefollowlogfilematcher expects its files argument to be
2031 # relative to the repo root, so use match.files(), not pats.
2031 # relative to the repo root, so use match.files(), not pats.
2032 filematcher = _makefollowlogfilematcher(repo, match.files(),
2032 filematcher = _makefollowlogfilematcher(repo, match.files(),
2033 followfirst)
2033 followfirst)
2034 else:
2034 else:
2035 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2035 filematcher = _makenofollowlogfilematcher(repo, pats, opts)
2036 if filematcher is None:
2036 if filematcher is None:
2037 filematcher = lambda rev: match
2037 filematcher = lambda rev: match
2038
2038
2039 expr = []
2039 expr = []
2040 for op, val in sorted(opts.iteritems()):
2040 for op, val in sorted(opts.iteritems()):
2041 if not val:
2041 if not val:
2042 continue
2042 continue
2043 if op not in opt2revset:
2043 if op not in opt2revset:
2044 continue
2044 continue
2045 revop, andor = opt2revset[op]
2045 revop, andor = opt2revset[op]
2046 if '%(val)' not in revop:
2046 if '%(val)' not in revop:
2047 expr.append(revop)
2047 expr.append(revop)
2048 else:
2048 else:
2049 if not isinstance(val, list):
2049 if not isinstance(val, list):
2050 e = revop % {'val': val}
2050 e = revop % {'val': val}
2051 else:
2051 else:
2052 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2052 e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
2053 expr.append(e)
2053 expr.append(e)
2054
2054
2055 if expr:
2055 if expr:
2056 expr = '(' + ' and '.join(expr) + ')'
2056 expr = '(' + ' and '.join(expr) + ')'
2057 else:
2057 else:
2058 expr = None
2058 expr = None
2059 return expr, filematcher
2059 return expr, filematcher
2060
2060
2061 def _logrevs(repo, opts):
2061 def _logrevs(repo, opts):
2062 # Default --rev value depends on --follow but --follow behavior
2062 # Default --rev value depends on --follow but --follow behavior
2063 # depends on revisions resolved from --rev...
2063 # depends on revisions resolved from --rev...
2064 follow = opts.get('follow') or opts.get('follow_first')
2064 follow = opts.get('follow') or opts.get('follow_first')
2065 if opts.get('rev'):
2065 if opts.get('rev'):
2066 revs = scmutil.revrange(repo, opts['rev'])
2066 revs = scmutil.revrange(repo, opts['rev'])
2067 elif follow and repo.dirstate.p1() == nullid:
2067 elif follow and repo.dirstate.p1() == nullid:
2068 revs = revset.baseset()
2068 revs = revset.baseset()
2069 elif follow:
2069 elif follow:
2070 revs = repo.revs('reverse(:.)')
2070 revs = repo.revs('reverse(:.)')
2071 else:
2071 else:
2072 revs = revset.spanset(repo)
2072 revs = revset.spanset(repo)
2073 revs.reverse()
2073 revs.reverse()
2074 return revs
2074 return revs
2075
2075
2076 def getgraphlogrevs(repo, pats, opts):
2076 def getgraphlogrevs(repo, pats, opts):
2077 """Return (revs, expr, filematcher) where revs is an iterable of
2077 """Return (revs, expr, filematcher) where revs is an iterable of
2078 revision numbers, expr is a revset string built from log options
2078 revision numbers, expr is a revset string built from log options
2079 and file patterns or None, and used to filter 'revs'. If --stat or
2079 and file patterns or None, and used to filter 'revs'. If --stat or
2080 --patch are not passed filematcher is None. Otherwise it is a
2080 --patch are not passed filematcher is None. Otherwise it is a
2081 callable taking a revision number and returning a match objects
2081 callable taking a revision number and returning a match objects
2082 filtering the files to be detailed when displaying the revision.
2082 filtering the files to be detailed when displaying the revision.
2083 """
2083 """
2084 limit = loglimit(opts)
2084 limit = loglimit(opts)
2085 revs = _logrevs(repo, opts)
2085 revs = _logrevs(repo, opts)
2086 if not revs:
2086 if not revs:
2087 return revset.baseset(), None, None
2087 return revset.baseset(), None, None
2088 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2088 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2089 if opts.get('rev'):
2089 if opts.get('rev'):
2090 # User-specified revs might be unsorted, but don't sort before
2090 # User-specified revs might be unsorted, but don't sort before
2091 # _makelogrevset because it might depend on the order of revs
2091 # _makelogrevset because it might depend on the order of revs
2092 if not (revs.isdescending() or revs.istopo()):
2092 if not (revs.isdescending() or revs.istopo()):
2093 revs.sort(reverse=True)
2093 revs.sort(reverse=True)
2094 if expr:
2094 if expr:
2095 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2095 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2096 revs = matcher(repo, revs)
2096 revs = matcher(repo, revs)
2097 if limit is not None:
2097 if limit is not None:
2098 limitedrevs = []
2098 limitedrevs = []
2099 for idx, rev in enumerate(revs):
2099 for idx, rev in enumerate(revs):
2100 if idx >= limit:
2100 if idx >= limit:
2101 break
2101 break
2102 limitedrevs.append(rev)
2102 limitedrevs.append(rev)
2103 revs = revset.baseset(limitedrevs)
2103 revs = revset.baseset(limitedrevs)
2104
2104
2105 return revs, expr, filematcher
2105 return revs, expr, filematcher
2106
2106
2107 def getlogrevs(repo, pats, opts):
2107 def getlogrevs(repo, pats, opts):
2108 """Return (revs, expr, filematcher) where revs is an iterable of
2108 """Return (revs, expr, filematcher) where revs is an iterable of
2109 revision numbers, expr is a revset string built from log options
2109 revision numbers, expr is a revset string built from log options
2110 and file patterns or None, and used to filter 'revs'. If --stat or
2110 and file patterns or None, and used to filter 'revs'. If --stat or
2111 --patch are not passed filematcher is None. Otherwise it is a
2111 --patch are not passed filematcher is None. Otherwise it is a
2112 callable taking a revision number and returning a match objects
2112 callable taking a revision number and returning a match objects
2113 filtering the files to be detailed when displaying the revision.
2113 filtering the files to be detailed when displaying the revision.
2114 """
2114 """
2115 limit = loglimit(opts)
2115 limit = loglimit(opts)
2116 revs = _logrevs(repo, opts)
2116 revs = _logrevs(repo, opts)
2117 if not revs:
2117 if not revs:
2118 return revset.baseset([]), None, None
2118 return revset.baseset([]), None, None
2119 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2119 expr, filematcher = _makelogrevset(repo, pats, opts, revs)
2120 if expr:
2120 if expr:
2121 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2121 matcher = revset.match(repo.ui, expr, order=revset.followorder)
2122 revs = matcher(repo, revs)
2122 revs = matcher(repo, revs)
2123 if limit is not None:
2123 if limit is not None:
2124 limitedrevs = []
2124 limitedrevs = []
2125 for idx, r in enumerate(revs):
2125 for idx, r in enumerate(revs):
2126 if limit <= idx:
2126 if limit <= idx:
2127 break
2127 break
2128 limitedrevs.append(r)
2128 limitedrevs.append(r)
2129 revs = revset.baseset(limitedrevs)
2129 revs = revset.baseset(limitedrevs)
2130
2130
2131 return revs, expr, filematcher
2131 return revs, expr, filematcher
2132
2132
2133 def _graphnodeformatter(ui, displayer):
2133 def _graphnodeformatter(ui, displayer):
2134 spec = ui.config('ui', 'graphnodetemplate')
2134 spec = ui.config('ui', 'graphnodetemplate')
2135 if not spec:
2135 if not spec:
2136 return templatekw.showgraphnode # fast path for "{graphnode}"
2136 return templatekw.showgraphnode # fast path for "{graphnode}"
2137
2137
2138 templ = formatter.gettemplater(ui, 'graphnode', spec)
2138 templ = formatter.gettemplater(ui, 'graphnode', spec)
2139 cache = {}
2139 cache = {}
2140 if isinstance(displayer, changeset_templater):
2140 if isinstance(displayer, changeset_templater):
2141 cache = displayer.cache # reuse cache of slow templates
2141 cache = displayer.cache # reuse cache of slow templates
2142 props = templatekw.keywords.copy()
2142 props = templatekw.keywords.copy()
2143 props['templ'] = templ
2143 props['templ'] = templ
2144 props['cache'] = cache
2144 props['cache'] = cache
2145 def formatnode(repo, ctx):
2145 def formatnode(repo, ctx):
2146 props['ctx'] = ctx
2146 props['ctx'] = ctx
2147 props['repo'] = repo
2147 props['repo'] = repo
2148 props['ui'] = repo.ui
2148 props['ui'] = repo.ui
2149 props['revcache'] = {}
2149 props['revcache'] = {}
2150 return templater.stringify(templ('graphnode', **props))
2150 return templater.stringify(templ('graphnode', **props))
2151 return formatnode
2151 return formatnode
2152
2152
2153 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2153 def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None,
2154 filematcher=None):
2154 filematcher=None):
2155 formatnode = _graphnodeformatter(ui, displayer)
2155 formatnode = _graphnodeformatter(ui, displayer)
2156 state = graphmod.asciistate()
2156 state = graphmod.asciistate()
2157 styles = state['styles']
2157 styles = state['styles']
2158
2158
2159 # only set graph styling if HGPLAIN is not set.
2159 # only set graph styling if HGPLAIN is not set.
2160 if ui.plain('graph'):
2160 if ui.plain('graph'):
2161 # set all edge styles to |, the default pre-3.8 behaviour
2161 # set all edge styles to |, the default pre-3.8 behaviour
2162 styles.update(dict.fromkeys(styles, '|'))
2162 styles.update(dict.fromkeys(styles, '|'))
2163 else:
2163 else:
2164 edgetypes = {
2164 edgetypes = {
2165 'parent': graphmod.PARENT,
2165 'parent': graphmod.PARENT,
2166 'grandparent': graphmod.GRANDPARENT,
2166 'grandparent': graphmod.GRANDPARENT,
2167 'missing': graphmod.MISSINGPARENT
2167 'missing': graphmod.MISSINGPARENT
2168 }
2168 }
2169 for name, key in edgetypes.items():
2169 for name, key in edgetypes.items():
2170 # experimental config: experimental.graphstyle.*
2170 # experimental config: experimental.graphstyle.*
2171 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2171 styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
2172 styles[key])
2172 styles[key])
2173 if not styles[key]:
2173 if not styles[key]:
2174 styles[key] = None
2174 styles[key] = None
2175
2175
2176 # experimental config: experimental.graphshorten
2176 # experimental config: experimental.graphshorten
2177 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2177 state['graphshorten'] = ui.configbool('experimental', 'graphshorten')
2178
2178
2179 for rev, type, ctx, parents in dag:
2179 for rev, type, ctx, parents in dag:
2180 char = formatnode(repo, ctx)
2180 char = formatnode(repo, ctx)
2181 copies = None
2181 copies = None
2182 if getrenamed and ctx.rev():
2182 if getrenamed and ctx.rev():
2183 copies = []
2183 copies = []
2184 for fn in ctx.files():
2184 for fn in ctx.files():
2185 rename = getrenamed(fn, ctx.rev())
2185 rename = getrenamed(fn, ctx.rev())
2186 if rename:
2186 if rename:
2187 copies.append((fn, rename[0]))
2187 copies.append((fn, rename[0]))
2188 revmatchfn = None
2188 revmatchfn = None
2189 if filematcher is not None:
2189 if filematcher is not None:
2190 revmatchfn = filematcher(ctx.rev())
2190 revmatchfn = filematcher(ctx.rev())
2191 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2191 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2192 lines = displayer.hunk.pop(rev).split('\n')
2192 lines = displayer.hunk.pop(rev).split('\n')
2193 if not lines[-1]:
2193 if not lines[-1]:
2194 del lines[-1]
2194 del lines[-1]
2195 displayer.flush(ctx)
2195 displayer.flush(ctx)
2196 edges = edgefn(type, char, lines, state, rev, parents)
2196 edges = edgefn(type, char, lines, state, rev, parents)
2197 for type, char, lines, coldata in edges:
2197 for type, char, lines, coldata in edges:
2198 graphmod.ascii(ui, state, type, char, lines, coldata)
2198 graphmod.ascii(ui, state, type, char, lines, coldata)
2199 displayer.close()
2199 displayer.close()
2200
2200
2201 def graphlog(ui, repo, *pats, **opts):
2201 def graphlog(ui, repo, *pats, **opts):
2202 # Parameters are identical to log command ones
2202 # Parameters are identical to log command ones
2203 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2203 revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
2204 revdag = graphmod.dagwalker(repo, revs)
2204 revdag = graphmod.dagwalker(repo, revs)
2205
2205
2206 getrenamed = None
2206 getrenamed = None
2207 if opts.get('copies'):
2207 if opts.get('copies'):
2208 endrev = None
2208 endrev = None
2209 if opts.get('rev'):
2209 if opts.get('rev'):
2210 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2210 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
2211 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2211 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2212 displayer = show_changeset(ui, repo, opts, buffered=True)
2212 displayer = show_changeset(ui, repo, opts, buffered=True)
2213 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2213 displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed,
2214 filematcher)
2214 filematcher)
2215
2215
2216 def checkunsupportedgraphflags(pats, opts):
2216 def checkunsupportedgraphflags(pats, opts):
2217 for op in ["newest_first"]:
2217 for op in ["newest_first"]:
2218 if op in opts and opts[op]:
2218 if op in opts and opts[op]:
2219 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2219 raise error.Abort(_("-G/--graph option is incompatible with --%s")
2220 % op.replace("_", "-"))
2220 % op.replace("_", "-"))
2221
2221
2222 def graphrevs(repo, nodes, opts):
2222 def graphrevs(repo, nodes, opts):
2223 limit = loglimit(opts)
2223 limit = loglimit(opts)
2224 nodes.reverse()
2224 nodes.reverse()
2225 if limit is not None:
2225 if limit is not None:
2226 nodes = nodes[:limit]
2226 nodes = nodes[:limit]
2227 return graphmod.nodes(repo, nodes)
2227 return graphmod.nodes(repo, nodes)
2228
2228
2229 def add(ui, repo, match, prefix, explicitonly, **opts):
2229 def add(ui, repo, match, prefix, explicitonly, **opts):
2230 join = lambda f: os.path.join(prefix, f)
2230 join = lambda f: os.path.join(prefix, f)
2231 bad = []
2231 bad = []
2232
2232
2233 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2233 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2234 names = []
2234 names = []
2235 wctx = repo[None]
2235 wctx = repo[None]
2236 cca = None
2236 cca = None
2237 abort, warn = scmutil.checkportabilityalert(ui)
2237 abort, warn = scmutil.checkportabilityalert(ui)
2238 if abort or warn:
2238 if abort or warn:
2239 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2239 cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
2240
2240
2241 badmatch = matchmod.badmatch(match, badfn)
2241 badmatch = matchmod.badmatch(match, badfn)
2242 dirstate = repo.dirstate
2242 dirstate = repo.dirstate
2243 # We don't want to just call wctx.walk here, since it would return a lot of
2243 # We don't want to just call wctx.walk here, since it would return a lot of
2244 # clean files, which we aren't interested in and takes time.
2244 # clean files, which we aren't interested in and takes time.
2245 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2245 for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate),
2246 True, False, full=False)):
2246 True, False, full=False)):
2247 exact = match.exact(f)
2247 exact = match.exact(f)
2248 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2248 if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
2249 if cca:
2249 if cca:
2250 cca(f)
2250 cca(f)
2251 names.append(f)
2251 names.append(f)
2252 if ui.verbose or not exact:
2252 if ui.verbose or not exact:
2253 ui.status(_('adding %s\n') % match.rel(f))
2253 ui.status(_('adding %s\n') % match.rel(f))
2254
2254
2255 for subpath in sorted(wctx.substate):
2255 for subpath in sorted(wctx.substate):
2256 sub = wctx.sub(subpath)
2256 sub = wctx.sub(subpath)
2257 try:
2257 try:
2258 submatch = matchmod.subdirmatcher(subpath, match)
2258 submatch = matchmod.subdirmatcher(subpath, match)
2259 if opts.get('subrepos'):
2259 if opts.get('subrepos'):
2260 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2260 bad.extend(sub.add(ui, submatch, prefix, False, **opts))
2261 else:
2261 else:
2262 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2262 bad.extend(sub.add(ui, submatch, prefix, True, **opts))
2263 except error.LookupError:
2263 except error.LookupError:
2264 ui.status(_("skipping missing subrepository: %s\n")
2264 ui.status(_("skipping missing subrepository: %s\n")
2265 % join(subpath))
2265 % join(subpath))
2266
2266
2267 if not opts.get('dry_run'):
2267 if not opts.get('dry_run'):
2268 rejected = wctx.add(names, prefix)
2268 rejected = wctx.add(names, prefix)
2269 bad.extend(f for f in rejected if f in match.files())
2269 bad.extend(f for f in rejected if f in match.files())
2270 return bad
2270 return bad
2271
2271
2272 def forget(ui, repo, match, prefix, explicitonly):
2272 def forget(ui, repo, match, prefix, explicitonly):
2273 join = lambda f: os.path.join(prefix, f)
2273 join = lambda f: os.path.join(prefix, f)
2274 bad = []
2274 bad = []
2275 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2275 badfn = lambda x, y: bad.append(x) or match.bad(x, y)
2276 wctx = repo[None]
2276 wctx = repo[None]
2277 forgot = []
2277 forgot = []
2278
2278
2279 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2279 s = repo.status(match=matchmod.badmatch(match, badfn), clean=True)
2280 forget = sorted(s[0] + s[1] + s[3] + s[6])
2280 forget = sorted(s[0] + s[1] + s[3] + s[6])
2281 if explicitonly:
2281 if explicitonly:
2282 forget = [f for f in forget if match.exact(f)]
2282 forget = [f for f in forget if match.exact(f)]
2283
2283
2284 for subpath in sorted(wctx.substate):
2284 for subpath in sorted(wctx.substate):
2285 sub = wctx.sub(subpath)
2285 sub = wctx.sub(subpath)
2286 try:
2286 try:
2287 submatch = matchmod.subdirmatcher(subpath, match)
2287 submatch = matchmod.subdirmatcher(subpath, match)
2288 subbad, subforgot = sub.forget(submatch, prefix)
2288 subbad, subforgot = sub.forget(submatch, prefix)
2289 bad.extend([subpath + '/' + f for f in subbad])
2289 bad.extend([subpath + '/' + f for f in subbad])
2290 forgot.extend([subpath + '/' + f for f in subforgot])
2290 forgot.extend([subpath + '/' + f for f in subforgot])
2291 except error.LookupError:
2291 except error.LookupError:
2292 ui.status(_("skipping missing subrepository: %s\n")
2292 ui.status(_("skipping missing subrepository: %s\n")
2293 % join(subpath))
2293 % join(subpath))
2294
2294
2295 if not explicitonly:
2295 if not explicitonly:
2296 for f in match.files():
2296 for f in match.files():
2297 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2297 if f not in repo.dirstate and not repo.wvfs.isdir(f):
2298 if f not in forgot:
2298 if f not in forgot:
2299 if repo.wvfs.exists(f):
2299 if repo.wvfs.exists(f):
2300 # Don't complain if the exact case match wasn't given.
2300 # Don't complain if the exact case match wasn't given.
2301 # But don't do this until after checking 'forgot', so
2301 # But don't do this until after checking 'forgot', so
2302 # that subrepo files aren't normalized, and this op is
2302 # that subrepo files aren't normalized, and this op is
2303 # purely from data cached by the status walk above.
2303 # purely from data cached by the status walk above.
2304 if repo.dirstate.normalize(f) in repo.dirstate:
2304 if repo.dirstate.normalize(f) in repo.dirstate:
2305 continue
2305 continue
2306 ui.warn(_('not removing %s: '
2306 ui.warn(_('not removing %s: '
2307 'file is already untracked\n')
2307 'file is already untracked\n')
2308 % match.rel(f))
2308 % match.rel(f))
2309 bad.append(f)
2309 bad.append(f)
2310
2310
2311 for f in forget:
2311 for f in forget:
2312 if ui.verbose or not match.exact(f):
2312 if ui.verbose or not match.exact(f):
2313 ui.status(_('removing %s\n') % match.rel(f))
2313 ui.status(_('removing %s\n') % match.rel(f))
2314
2314
2315 rejected = wctx.forget(forget, prefix)
2315 rejected = wctx.forget(forget, prefix)
2316 bad.extend(f for f in rejected if f in match.files())
2316 bad.extend(f for f in rejected if f in match.files())
2317 forgot.extend(f for f in forget if f not in rejected)
2317 forgot.extend(f for f in forget if f not in rejected)
2318 return bad, forgot
2318 return bad, forgot
2319
2319
2320 def files(ui, ctx, m, fm, fmt, subrepos):
2320 def files(ui, ctx, m, fm, fmt, subrepos):
2321 rev = ctx.rev()
2321 rev = ctx.rev()
2322 ret = 1
2322 ret = 1
2323 ds = ctx.repo().dirstate
2323 ds = ctx.repo().dirstate
2324
2324
2325 for f in ctx.matches(m):
2325 for f in ctx.matches(m):
2326 if rev is None and ds[f] == 'r':
2326 if rev is None and ds[f] == 'r':
2327 continue
2327 continue
2328 fm.startitem()
2328 fm.startitem()
2329 if ui.verbose:
2329 if ui.verbose:
2330 fc = ctx[f]
2330 fc = ctx[f]
2331 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2331 fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags())
2332 fm.data(abspath=f)
2332 fm.data(abspath=f)
2333 fm.write('path', fmt, m.rel(f))
2333 fm.write('path', fmt, m.rel(f))
2334 ret = 0
2334 ret = 0
2335
2335
2336 for subpath in sorted(ctx.substate):
2336 for subpath in sorted(ctx.substate):
2337 submatch = matchmod.subdirmatcher(subpath, m)
2337 submatch = matchmod.subdirmatcher(subpath, m)
2338 if (subrepos or m.exact(subpath) or any(submatch.files())):
2338 if (subrepos or m.exact(subpath) or any(submatch.files())):
2339 sub = ctx.sub(subpath)
2339 sub = ctx.sub(subpath)
2340 try:
2340 try:
2341 recurse = m.exact(subpath) or subrepos
2341 recurse = m.exact(subpath) or subrepos
2342 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2342 if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0:
2343 ret = 0
2343 ret = 0
2344 except error.LookupError:
2344 except error.LookupError:
2345 ui.status(_("skipping missing subrepository: %s\n")
2345 ui.status(_("skipping missing subrepository: %s\n")
2346 % m.abs(subpath))
2346 % m.abs(subpath))
2347
2347
2348 return ret
2348 return ret
2349
2349
2350 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2350 def remove(ui, repo, m, prefix, after, force, subrepos, warnings=None):
2351 join = lambda f: os.path.join(prefix, f)
2351 join = lambda f: os.path.join(prefix, f)
2352 ret = 0
2352 ret = 0
2353 s = repo.status(match=m, clean=True)
2353 s = repo.status(match=m, clean=True)
2354 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2354 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2355
2355
2356 wctx = repo[None]
2356 wctx = repo[None]
2357
2357
2358 if warnings is None:
2358 if warnings is None:
2359 warnings = []
2359 warnings = []
2360 warn = True
2360 warn = True
2361 else:
2361 else:
2362 warn = False
2362 warn = False
2363
2363
2364 subs = sorted(wctx.substate)
2364 subs = sorted(wctx.substate)
2365 total = len(subs)
2365 total = len(subs)
2366 count = 0
2366 count = 0
2367 for subpath in subs:
2367 for subpath in subs:
2368 count += 1
2368 count += 1
2369 submatch = matchmod.subdirmatcher(subpath, m)
2369 submatch = matchmod.subdirmatcher(subpath, m)
2370 if subrepos or m.exact(subpath) or any(submatch.files()):
2370 if subrepos or m.exact(subpath) or any(submatch.files()):
2371 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2371 ui.progress(_('searching'), count, total=total, unit=_('subrepos'))
2372 sub = wctx.sub(subpath)
2372 sub = wctx.sub(subpath)
2373 try:
2373 try:
2374 if sub.removefiles(submatch, prefix, after, force, subrepos,
2374 if sub.removefiles(submatch, prefix, after, force, subrepos,
2375 warnings):
2375 warnings):
2376 ret = 1
2376 ret = 1
2377 except error.LookupError:
2377 except error.LookupError:
2378 warnings.append(_("skipping missing subrepository: %s\n")
2378 warnings.append(_("skipping missing subrepository: %s\n")
2379 % join(subpath))
2379 % join(subpath))
2380 ui.progress(_('searching'), None)
2380 ui.progress(_('searching'), None)
2381
2381
2382 # warn about failure to delete explicit files/dirs
2382 # warn about failure to delete explicit files/dirs
2383 deleteddirs = util.dirs(deleted)
2383 deleteddirs = util.dirs(deleted)
2384 files = m.files()
2384 files = m.files()
2385 total = len(files)
2385 total = len(files)
2386 count = 0
2386 count = 0
2387 for f in files:
2387 for f in files:
2388 def insubrepo():
2388 def insubrepo():
2389 for subpath in wctx.substate:
2389 for subpath in wctx.substate:
2390 if f.startswith(subpath + '/'):
2390 if f.startswith(subpath + '/'):
2391 return True
2391 return True
2392 return False
2392 return False
2393
2393
2394 count += 1
2394 count += 1
2395 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2395 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2396 isdir = f in deleteddirs or wctx.hasdir(f)
2396 isdir = f in deleteddirs or wctx.hasdir(f)
2397 if (f in repo.dirstate or isdir or f == '.'
2397 if (f in repo.dirstate or isdir or f == '.'
2398 or insubrepo() or f in subs):
2398 or insubrepo() or f in subs):
2399 continue
2399 continue
2400
2400
2401 if repo.wvfs.exists(f):
2401 if repo.wvfs.exists(f):
2402 if repo.wvfs.isdir(f):
2402 if repo.wvfs.isdir(f):
2403 warnings.append(_('not removing %s: no tracked files\n')
2403 warnings.append(_('not removing %s: no tracked files\n')
2404 % m.rel(f))
2404 % m.rel(f))
2405 else:
2405 else:
2406 warnings.append(_('not removing %s: file is untracked\n')
2406 warnings.append(_('not removing %s: file is untracked\n')
2407 % m.rel(f))
2407 % m.rel(f))
2408 # missing files will generate a warning elsewhere
2408 # missing files will generate a warning elsewhere
2409 ret = 1
2409 ret = 1
2410 ui.progress(_('deleting'), None)
2410 ui.progress(_('deleting'), None)
2411
2411
2412 if force:
2412 if force:
2413 list = modified + deleted + clean + added
2413 list = modified + deleted + clean + added
2414 elif after:
2414 elif after:
2415 list = deleted
2415 list = deleted
2416 remaining = modified + added + clean
2416 remaining = modified + added + clean
2417 total = len(remaining)
2417 total = len(remaining)
2418 count = 0
2418 count = 0
2419 for f in remaining:
2419 for f in remaining:
2420 count += 1
2420 count += 1
2421 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2421 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2422 warnings.append(_('not removing %s: file still exists\n')
2422 warnings.append(_('not removing %s: file still exists\n')
2423 % m.rel(f))
2423 % m.rel(f))
2424 ret = 1
2424 ret = 1
2425 ui.progress(_('skipping'), None)
2425 ui.progress(_('skipping'), None)
2426 else:
2426 else:
2427 list = deleted + clean
2427 list = deleted + clean
2428 total = len(modified) + len(added)
2428 total = len(modified) + len(added)
2429 count = 0
2429 count = 0
2430 for f in modified:
2430 for f in modified:
2431 count += 1
2431 count += 1
2432 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2432 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2433 warnings.append(_('not removing %s: file is modified (use -f'
2433 warnings.append(_('not removing %s: file is modified (use -f'
2434 ' to force removal)\n') % m.rel(f))
2434 ' to force removal)\n') % m.rel(f))
2435 ret = 1
2435 ret = 1
2436 for f in added:
2436 for f in added:
2437 count += 1
2437 count += 1
2438 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2438 ui.progress(_('skipping'), count, total=total, unit=_('files'))
2439 warnings.append(_("not removing %s: file has been marked for add"
2439 warnings.append(_("not removing %s: file has been marked for add"
2440 " (use 'hg forget' to undo add)\n") % m.rel(f))
2440 " (use 'hg forget' to undo add)\n") % m.rel(f))
2441 ret = 1
2441 ret = 1
2442 ui.progress(_('skipping'), None)
2442 ui.progress(_('skipping'), None)
2443
2443
2444 list = sorted(list)
2444 list = sorted(list)
2445 total = len(list)
2445 total = len(list)
2446 count = 0
2446 count = 0
2447 for f in list:
2447 for f in list:
2448 count += 1
2448 count += 1
2449 if ui.verbose or not m.exact(f):
2449 if ui.verbose or not m.exact(f):
2450 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2450 ui.progress(_('deleting'), count, total=total, unit=_('files'))
2451 ui.status(_('removing %s\n') % m.rel(f))
2451 ui.status(_('removing %s\n') % m.rel(f))
2452 ui.progress(_('deleting'), None)
2452 ui.progress(_('deleting'), None)
2453
2453
2454 with repo.wlock():
2454 with repo.wlock():
2455 if not after:
2455 if not after:
2456 for f in list:
2456 for f in list:
2457 if f in added:
2457 if f in added:
2458 continue # we never unlink added files on remove
2458 continue # we never unlink added files on remove
2459 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2459 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
2460 repo[None].forget(list)
2460 repo[None].forget(list)
2461
2461
2462 if warn:
2462 if warn:
2463 for warning in warnings:
2463 for warning in warnings:
2464 ui.warn(warning)
2464 ui.warn(warning)
2465
2465
2466 return ret
2466 return ret
2467
2467
2468 def cat(ui, repo, ctx, matcher, prefix, **opts):
2468 def cat(ui, repo, ctx, matcher, prefix, **opts):
2469 err = 1
2469 err = 1
2470
2470
2471 def write(path):
2471 def write(path):
2472 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2472 fp = makefileobj(repo, opts.get('output'), ctx.node(),
2473 pathname=os.path.join(prefix, path))
2473 pathname=os.path.join(prefix, path))
2474 data = ctx[path].data()
2474 data = ctx[path].data()
2475 if opts.get('decode'):
2475 if opts.get('decode'):
2476 data = repo.wwritedata(path, data)
2476 data = repo.wwritedata(path, data)
2477 fp.write(data)
2477 fp.write(data)
2478 fp.close()
2478 fp.close()
2479
2479
2480 # Automation often uses hg cat on single files, so special case it
2480 # Automation often uses hg cat on single files, so special case it
2481 # for performance to avoid the cost of parsing the manifest.
2481 # for performance to avoid the cost of parsing the manifest.
2482 if len(matcher.files()) == 1 and not matcher.anypats():
2482 if len(matcher.files()) == 1 and not matcher.anypats():
2483 file = matcher.files()[0]
2483 file = matcher.files()[0]
2484 mfl = repo.manifestlog
2484 mfl = repo.manifestlog
2485 mfnode = ctx.manifestnode()
2485 mfnode = ctx.manifestnode()
2486 try:
2486 try:
2487 if mfnode and mfl[mfnode].find(file)[0]:
2487 if mfnode and mfl[mfnode].find(file)[0]:
2488 write(file)
2488 write(file)
2489 return 0
2489 return 0
2490 except KeyError:
2490 except KeyError:
2491 pass
2491 pass
2492
2492
2493 for abs in ctx.walk(matcher):
2493 for abs in ctx.walk(matcher):
2494 write(abs)
2494 write(abs)
2495 err = 0
2495 err = 0
2496
2496
2497 for subpath in sorted(ctx.substate):
2497 for subpath in sorted(ctx.substate):
2498 sub = ctx.sub(subpath)
2498 sub = ctx.sub(subpath)
2499 try:
2499 try:
2500 submatch = matchmod.subdirmatcher(subpath, matcher)
2500 submatch = matchmod.subdirmatcher(subpath, matcher)
2501
2501
2502 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2502 if not sub.cat(submatch, os.path.join(prefix, sub._path),
2503 **opts):
2503 **opts):
2504 err = 0
2504 err = 0
2505 except error.RepoLookupError:
2505 except error.RepoLookupError:
2506 ui.status(_("skipping missing subrepository: %s\n")
2506 ui.status(_("skipping missing subrepository: %s\n")
2507 % os.path.join(prefix, subpath))
2507 % os.path.join(prefix, subpath))
2508
2508
2509 return err
2509 return err
2510
2510
2511 def commit(ui, repo, commitfunc, pats, opts):
2511 def commit(ui, repo, commitfunc, pats, opts):
2512 '''commit the specified files or all outstanding changes'''
2512 '''commit the specified files or all outstanding changes'''
2513 date = opts.get('date')
2513 date = opts.get('date')
2514 if date:
2514 if date:
2515 opts['date'] = util.parsedate(date)
2515 opts['date'] = util.parsedate(date)
2516 message = logmessage(ui, opts)
2516 message = logmessage(ui, opts)
2517 matcher = scmutil.match(repo[None], pats, opts)
2517 matcher = scmutil.match(repo[None], pats, opts)
2518
2518
2519 # extract addremove carefully -- this function can be called from a command
2519 # extract addremove carefully -- this function can be called from a command
2520 # that doesn't support addremove
2520 # that doesn't support addremove
2521 if opts.get('addremove'):
2521 if opts.get('addremove'):
2522 if scmutil.addremove(repo, matcher, "", opts) != 0:
2522 if scmutil.addremove(repo, matcher, "", opts) != 0:
2523 raise error.Abort(
2523 raise error.Abort(
2524 _("failed to mark all new/missing files as added/removed"))
2524 _("failed to mark all new/missing files as added/removed"))
2525
2525
2526 return commitfunc(ui, repo, message, matcher, opts)
2526 return commitfunc(ui, repo, message, matcher, opts)
2527
2527
2528 def samefile(f, ctx1, ctx2):
2528 def samefile(f, ctx1, ctx2):
2529 if f in ctx1.manifest():
2529 if f in ctx1.manifest():
2530 a = ctx1.filectx(f)
2530 a = ctx1.filectx(f)
2531 if f in ctx2.manifest():
2531 if f in ctx2.manifest():
2532 b = ctx2.filectx(f)
2532 b = ctx2.filectx(f)
2533 return (not a.cmp(b)
2533 return (not a.cmp(b)
2534 and a.flags() == b.flags())
2534 and a.flags() == b.flags())
2535 else:
2535 else:
2536 return False
2536 return False
2537 else:
2537 else:
2538 return f not in ctx2.manifest()
2538 return f not in ctx2.manifest()
2539
2539
2540 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2540 def amend(ui, repo, commitfunc, old, extra, pats, opts):
2541 # avoid cycle context -> subrepo -> cmdutil
2541 # avoid cycle context -> subrepo -> cmdutil
2542 from . import context
2542 from . import context
2543
2543
2544 # amend will reuse the existing user if not specified, but the obsolete
2544 # amend will reuse the existing user if not specified, but the obsolete
2545 # marker creation requires that the current user's name is specified.
2545 # marker creation requires that the current user's name is specified.
2546 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2546 if obsolete.isenabled(repo, obsolete.createmarkersopt):
2547 ui.username() # raise exception if username not set
2547 ui.username() # raise exception if username not set
2548
2548
2549 ui.note(_('amending changeset %s\n') % old)
2549 ui.note(_('amending changeset %s\n') % old)
2550 base = old.p1()
2550 base = old.p1()
2551 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2551 createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt)
2552
2552
2553 wlock = lock = newid = None
2553 wlock = lock = newid = None
2554 try:
2554 try:
2555 wlock = repo.wlock()
2555 wlock = repo.wlock()
2556 lock = repo.lock()
2556 lock = repo.lock()
2557 with repo.transaction('amend') as tr:
2557 with repo.transaction('amend') as tr:
2558 # See if we got a message from -m or -l, if not, open the editor
2558 # See if we got a message from -m or -l, if not, open the editor
2559 # with the message of the changeset to amend
2559 # with the message of the changeset to amend
2560 message = logmessage(ui, opts)
2560 message = logmessage(ui, opts)
2561 # ensure logfile does not conflict with later enforcement of the
2561 # ensure logfile does not conflict with later enforcement of the
2562 # message. potential logfile content has been processed by
2562 # message. potential logfile content has been processed by
2563 # `logmessage` anyway.
2563 # `logmessage` anyway.
2564 opts.pop('logfile')
2564 opts.pop('logfile')
2565 # First, do a regular commit to record all changes in the working
2565 # First, do a regular commit to record all changes in the working
2566 # directory (if there are any)
2566 # directory (if there are any)
2567 ui.callhooks = False
2567 ui.callhooks = False
2568 activebookmark = repo._bookmarks.active
2568 activebookmark = repo._bookmarks.active
2569 try:
2569 try:
2570 repo._bookmarks.active = None
2570 repo._bookmarks.active = None
2571 opts['message'] = 'temporary amend commit for %s' % old
2571 opts['message'] = 'temporary amend commit for %s' % old
2572 node = commit(ui, repo, commitfunc, pats, opts)
2572 node = commit(ui, repo, commitfunc, pats, opts)
2573 finally:
2573 finally:
2574 repo._bookmarks.active = activebookmark
2574 repo._bookmarks.active = activebookmark
2575 repo._bookmarks.recordchange(tr)
2575 repo._bookmarks.recordchange(tr)
2576 ui.callhooks = True
2576 ui.callhooks = True
2577 ctx = repo[node]
2577 ctx = repo[node]
2578
2578
2579 # Participating changesets:
2579 # Participating changesets:
2580 #
2580 #
2581 # node/ctx o - new (intermediate) commit that contains changes
2581 # node/ctx o - new (intermediate) commit that contains changes
2582 # | from working dir to go into amending commit
2582 # | from working dir to go into amending commit
2583 # | (or a workingctx if there were no changes)
2583 # | (or a workingctx if there were no changes)
2584 # |
2584 # |
2585 # old o - changeset to amend
2585 # old o - changeset to amend
2586 # |
2586 # |
2587 # base o - parent of amending changeset
2587 # base o - parent of amending changeset
2588
2588
2589 # Update extra dict from amended commit (e.g. to preserve graft
2589 # Update extra dict from amended commit (e.g. to preserve graft
2590 # source)
2590 # source)
2591 extra.update(old.extra())
2591 extra.update(old.extra())
2592
2592
2593 # Also update it from the intermediate commit or from the wctx
2593 # Also update it from the intermediate commit or from the wctx
2594 extra.update(ctx.extra())
2594 extra.update(ctx.extra())
2595
2595
2596 if len(old.parents()) > 1:
2596 if len(old.parents()) > 1:
2597 # ctx.files() isn't reliable for merges, so fall back to the
2597 # ctx.files() isn't reliable for merges, so fall back to the
2598 # slower repo.status() method
2598 # slower repo.status() method
2599 files = set([fn for st in repo.status(base, old)[:3]
2599 files = set([fn for st in repo.status(base, old)[:3]
2600 for fn in st])
2600 for fn in st])
2601 else:
2601 else:
2602 files = set(old.files())
2602 files = set(old.files())
2603
2603
2604 # Second, we use either the commit we just did, or if there were no
2604 # Second, we use either the commit we just did, or if there were no
2605 # changes the parent of the working directory as the version of the
2605 # changes the parent of the working directory as the version of the
2606 # files in the final amend commit
2606 # files in the final amend commit
2607 if node:
2607 if node:
2608 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2608 ui.note(_('copying changeset %s to %s\n') % (ctx, base))
2609
2609
2610 user = ctx.user()
2610 user = ctx.user()
2611 date = ctx.date()
2611 date = ctx.date()
2612 # Recompute copies (avoid recording a -> b -> a)
2612 # Recompute copies (avoid recording a -> b -> a)
2613 copied = copies.pathcopies(base, ctx)
2613 copied = copies.pathcopies(base, ctx)
2614 if old.p2:
2614 if old.p2:
2615 copied.update(copies.pathcopies(old.p2(), ctx))
2615 copied.update(copies.pathcopies(old.p2(), ctx))
2616
2616
2617 # Prune files which were reverted by the updates: if old
2617 # Prune files which were reverted by the updates: if old
2618 # introduced file X and our intermediate commit, node,
2618 # introduced file X and our intermediate commit, node,
2619 # renamed that file, then those two files are the same and
2619 # renamed that file, then those two files are the same and
2620 # we can discard X from our list of files. Likewise if X
2620 # we can discard X from our list of files. Likewise if X
2621 # was deleted, it's no longer relevant
2621 # was deleted, it's no longer relevant
2622 files.update(ctx.files())
2622 files.update(ctx.files())
2623 files = [f for f in files if not samefile(f, ctx, base)]
2623 files = [f for f in files if not samefile(f, ctx, base)]
2624
2624
2625 def filectxfn(repo, ctx_, path):
2625 def filectxfn(repo, ctx_, path):
2626 try:
2626 try:
2627 fctx = ctx[path]
2627 fctx = ctx[path]
2628 flags = fctx.flags()
2628 flags = fctx.flags()
2629 mctx = context.memfilectx(repo,
2629 mctx = context.memfilectx(repo,
2630 fctx.path(), fctx.data(),
2630 fctx.path(), fctx.data(),
2631 islink='l' in flags,
2631 islink='l' in flags,
2632 isexec='x' in flags,
2632 isexec='x' in flags,
2633 copied=copied.get(path))
2633 copied=copied.get(path))
2634 return mctx
2634 return mctx
2635 except KeyError:
2635 except KeyError:
2636 return None
2636 return None
2637 else:
2637 else:
2638 ui.note(_('copying changeset %s to %s\n') % (old, base))
2638 ui.note(_('copying changeset %s to %s\n') % (old, base))
2639
2639
2640 # Use version of files as in the old cset
2640 # Use version of files as in the old cset
2641 def filectxfn(repo, ctx_, path):
2641 def filectxfn(repo, ctx_, path):
2642 try:
2642 try:
2643 return old.filectx(path)
2643 return old.filectx(path)
2644 except KeyError:
2644 except KeyError:
2645 return None
2645 return None
2646
2646
2647 user = opts.get('user') or old.user()
2647 user = opts.get('user') or old.user()
2648 date = opts.get('date') or old.date()
2648 date = opts.get('date') or old.date()
2649 editform = mergeeditform(old, 'commit.amend')
2649 editform = mergeeditform(old, 'commit.amend')
2650 editor = getcommiteditor(editform=editform, **opts)
2650 editor = getcommiteditor(editform=editform, **opts)
2651 if not message:
2651 if not message:
2652 editor = getcommiteditor(edit=True, editform=editform)
2652 editor = getcommiteditor(edit=True, editform=editform)
2653 message = old.description()
2653 message = old.description()
2654
2654
2655 pureextra = extra.copy()
2655 pureextra = extra.copy()
2656 extra['amend_source'] = old.hex()
2656 extra['amend_source'] = old.hex()
2657
2657
2658 new = context.memctx(repo,
2658 new = context.memctx(repo,
2659 parents=[base.node(), old.p2().node()],
2659 parents=[base.node(), old.p2().node()],
2660 text=message,
2660 text=message,
2661 files=files,
2661 files=files,
2662 filectxfn=filectxfn,
2662 filectxfn=filectxfn,
2663 user=user,
2663 user=user,
2664 date=date,
2664 date=date,
2665 extra=extra,
2665 extra=extra,
2666 editor=editor)
2666 editor=editor)
2667
2667
2668 newdesc = changelog.stripdesc(new.description())
2668 newdesc = changelog.stripdesc(new.description())
2669 if ((not node)
2669 if ((not node)
2670 and newdesc == old.description()
2670 and newdesc == old.description()
2671 and user == old.user()
2671 and user == old.user()
2672 and date == old.date()
2672 and date == old.date()
2673 and pureextra == old.extra()):
2673 and pureextra == old.extra()):
2674 # nothing changed. continuing here would create a new node
2674 # nothing changed. continuing here would create a new node
2675 # anyway because of the amend_source noise.
2675 # anyway because of the amend_source noise.
2676 #
2676 #
2677 # This not what we expect from amend.
2677 # This not what we expect from amend.
2678 return old.node()
2678 return old.node()
2679
2679
2680 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2680 ph = repo.ui.config('phases', 'new-commit', phases.draft)
2681 try:
2681 try:
2682 if opts.get('secret'):
2682 if opts.get('secret'):
2683 commitphase = 'secret'
2683 commitphase = 'secret'
2684 else:
2684 else:
2685 commitphase = old.phase()
2685 commitphase = old.phase()
2686 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2686 repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend')
2687 newid = repo.commitctx(new)
2687 newid = repo.commitctx(new)
2688 finally:
2688 finally:
2689 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2689 repo.ui.setconfig('phases', 'new-commit', ph, 'amend')
2690 if newid != old.node():
2690 if newid != old.node():
2691 # Reroute the working copy parent to the new changeset
2691 # Reroute the working copy parent to the new changeset
2692 repo.setparents(newid, nullid)
2692 repo.setparents(newid, nullid)
2693
2693
2694 # Move bookmarks from old parent to amend commit
2694 # Move bookmarks from old parent to amend commit
2695 bms = repo.nodebookmarks(old.node())
2695 bms = repo.nodebookmarks(old.node())
2696 if bms:
2696 if bms:
2697 marks = repo._bookmarks
2697 marks = repo._bookmarks
2698 for bm in bms:
2698 for bm in bms:
2699 ui.debug('moving bookmarks %r from %s to %s\n' %
2699 ui.debug('moving bookmarks %r from %s to %s\n' %
2700 (marks, old.hex(), hex(newid)))
2700 (marks, old.hex(), hex(newid)))
2701 marks[bm] = newid
2701 marks[bm] = newid
2702 marks.recordchange(tr)
2702 marks.recordchange(tr)
2703 #commit the whole amend process
2703 #commit the whole amend process
2704 if createmarkers:
2704 if createmarkers:
2705 # mark the new changeset as successor of the rewritten one
2705 # mark the new changeset as successor of the rewritten one
2706 new = repo[newid]
2706 new = repo[newid]
2707 obs = [(old, (new,))]
2707 obs = [(old, (new,))]
2708 if node:
2708 if node:
2709 obs.append((ctx, ()))
2709 obs.append((ctx, ()))
2710
2710
2711 obsolete.createmarkers(repo, obs)
2711 obsolete.createmarkers(repo, obs)
2712 if not createmarkers and newid != old.node():
2712 if not createmarkers and newid != old.node():
2713 # Strip the intermediate commit (if there was one) and the amended
2713 # Strip the intermediate commit (if there was one) and the amended
2714 # commit
2714 # commit
2715 if node:
2715 if node:
2716 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2716 ui.note(_('stripping intermediate changeset %s\n') % ctx)
2717 ui.note(_('stripping amended changeset %s\n') % old)
2717 ui.note(_('stripping amended changeset %s\n') % old)
2718 repair.strip(ui, repo, old.node(), topic='amend-backup')
2718 repair.strip(ui, repo, old.node(), topic='amend-backup')
2719 finally:
2719 finally:
2720 lockmod.release(lock, wlock)
2720 lockmod.release(lock, wlock)
2721 return newid
2721 return newid
2722
2722
2723 def commiteditor(repo, ctx, subs, editform=''):
2723 def commiteditor(repo, ctx, subs, editform=''):
2724 if ctx.description():
2724 if ctx.description():
2725 return ctx.description()
2725 return ctx.description()
2726 return commitforceeditor(repo, ctx, subs, editform=editform,
2726 return commitforceeditor(repo, ctx, subs, editform=editform,
2727 unchangedmessagedetection=True)
2727 unchangedmessagedetection=True)
2728
2728
2729 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2729 def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
2730 editform='', unchangedmessagedetection=False):
2730 editform='', unchangedmessagedetection=False):
2731 if not extramsg:
2731 if not extramsg:
2732 extramsg = _("Leave message empty to abort commit.")
2732 extramsg = _("Leave message empty to abort commit.")
2733
2733
2734 forms = [e for e in editform.split('.') if e]
2734 forms = [e for e in editform.split('.') if e]
2735 forms.insert(0, 'changeset')
2735 forms.insert(0, 'changeset')
2736 templatetext = None
2736 templatetext = None
2737 while forms:
2737 while forms:
2738 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2738 tmpl = repo.ui.config('committemplate', '.'.join(forms))
2739 if tmpl:
2739 if tmpl:
2740 templatetext = committext = buildcommittemplate(
2740 templatetext = committext = buildcommittemplate(
2741 repo, ctx, subs, extramsg, tmpl)
2741 repo, ctx, subs, extramsg, tmpl)
2742 break
2742 break
2743 forms.pop()
2743 forms.pop()
2744 else:
2744 else:
2745 committext = buildcommittext(repo, ctx, subs, extramsg)
2745 committext = buildcommittext(repo, ctx, subs, extramsg)
2746
2746
2747 # run editor in the repository root
2747 # run editor in the repository root
2748 olddir = pycompat.getcwd()
2748 olddir = pycompat.getcwd()
2749 os.chdir(repo.root)
2749 os.chdir(repo.root)
2750
2750
2751 # make in-memory changes visible to external process
2751 # make in-memory changes visible to external process
2752 tr = repo.currenttransaction()
2752 tr = repo.currenttransaction()
2753 repo.dirstate.write(tr)
2753 repo.dirstate.write(tr)
2754 pending = tr and tr.writepending() and repo.root
2754 pending = tr and tr.writepending() and repo.root
2755
2755
2756 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2756 editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
2757 editform=editform, pending=pending)
2757 editform=editform, pending=pending)
2758 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2758 text = re.sub("(?m)^HG:.*(\n|$)", "", editortext)
2759 os.chdir(olddir)
2759 os.chdir(olddir)
2760
2760
2761 if finishdesc:
2761 if finishdesc:
2762 text = finishdesc(text)
2762 text = finishdesc(text)
2763 if not text.strip():
2763 if not text.strip():
2764 raise error.Abort(_("empty commit message"))
2764 raise error.Abort(_("empty commit message"))
2765 if unchangedmessagedetection and editortext == templatetext:
2765 if unchangedmessagedetection and editortext == templatetext:
2766 raise error.Abort(_("commit message unchanged"))
2766 raise error.Abort(_("commit message unchanged"))
2767
2767
2768 return text
2768 return text
2769
2769
2770 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2770 def buildcommittemplate(repo, ctx, subs, extramsg, tmpl):
2771 ui = repo.ui
2771 ui = repo.ui
2772 tmpl, mapfile = gettemplate(ui, tmpl, None)
2772 tmpl, mapfile = gettemplate(ui, tmpl, None)
2773
2773
2774 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2774 t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False)
2775
2775
2776 for k, v in repo.ui.configitems('committemplate'):
2776 for k, v in repo.ui.configitems('committemplate'):
2777 if k != 'changeset':
2777 if k != 'changeset':
2778 t.t.cache[k] = v
2778 t.t.cache[k] = v
2779
2779
2780 if not extramsg:
2780 if not extramsg:
2781 extramsg = '' # ensure that extramsg is string
2781 extramsg = '' # ensure that extramsg is string
2782
2782
2783 ui.pushbuffer()
2783 ui.pushbuffer()
2784 t.show(ctx, extramsg=extramsg)
2784 t.show(ctx, extramsg=extramsg)
2785 return ui.popbuffer()
2785 return ui.popbuffer()
2786
2786
2787 def hgprefix(msg):
2787 def hgprefix(msg):
2788 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2788 return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
2789
2789
2790 def buildcommittext(repo, ctx, subs, extramsg):
2790 def buildcommittext(repo, ctx, subs, extramsg):
2791 edittext = []
2791 edittext = []
2792 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2792 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
2793 if ctx.description():
2793 if ctx.description():
2794 edittext.append(ctx.description())
2794 edittext.append(ctx.description())
2795 edittext.append("")
2795 edittext.append("")
2796 edittext.append("") # Empty line between message and comments.
2796 edittext.append("") # Empty line between message and comments.
2797 edittext.append(hgprefix(_("Enter commit message."
2797 edittext.append(hgprefix(_("Enter commit message."
2798 " Lines beginning with 'HG:' are removed.")))
2798 " Lines beginning with 'HG:' are removed.")))
2799 edittext.append(hgprefix(extramsg))
2799 edittext.append(hgprefix(extramsg))
2800 edittext.append("HG: --")
2800 edittext.append("HG: --")
2801 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2801 edittext.append(hgprefix(_("user: %s") % ctx.user()))
2802 if ctx.p2():
2802 if ctx.p2():
2803 edittext.append(hgprefix(_("branch merge")))
2803 edittext.append(hgprefix(_("branch merge")))
2804 if ctx.branch():
2804 if ctx.branch():
2805 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2805 edittext.append(hgprefix(_("branch '%s'") % ctx.branch()))
2806 if bookmarks.isactivewdirparent(repo):
2806 if bookmarks.isactivewdirparent(repo):
2807 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2807 edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark))
2808 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2808 edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs])
2809 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2809 edittext.extend([hgprefix(_("added %s") % f) for f in added])
2810 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2810 edittext.extend([hgprefix(_("changed %s") % f) for f in modified])
2811 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2811 edittext.extend([hgprefix(_("removed %s") % f) for f in removed])
2812 if not added and not modified and not removed:
2812 if not added and not modified and not removed:
2813 edittext.append(hgprefix(_("no files changed")))
2813 edittext.append(hgprefix(_("no files changed")))
2814 edittext.append("")
2814 edittext.append("")
2815
2815
2816 return "\n".join(edittext)
2816 return "\n".join(edittext)
2817
2817
2818 def commitstatus(repo, node, branch, bheads=None, opts=None):
2818 def commitstatus(repo, node, branch, bheads=None, opts=None):
2819 if opts is None:
2819 if opts is None:
2820 opts = {}
2820 opts = {}
2821 ctx = repo[node]
2821 ctx = repo[node]
2822 parents = ctx.parents()
2822 parents = ctx.parents()
2823
2823
2824 if (not opts.get('amend') and bheads and node not in bheads and not
2824 if (not opts.get('amend') and bheads and node not in bheads and not
2825 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2825 [x for x in parents if x.node() in bheads and x.branch() == branch]):
2826 repo.ui.status(_('created new head\n'))
2826 repo.ui.status(_('created new head\n'))
2827 # The message is not printed for initial roots. For the other
2827 # The message is not printed for initial roots. For the other
2828 # changesets, it is printed in the following situations:
2828 # changesets, it is printed in the following situations:
2829 #
2829 #
2830 # Par column: for the 2 parents with ...
2830 # Par column: for the 2 parents with ...
2831 # N: null or no parent
2831 # N: null or no parent
2832 # B: parent is on another named branch
2832 # B: parent is on another named branch
2833 # C: parent is a regular non head changeset
2833 # C: parent is a regular non head changeset
2834 # H: parent was a branch head of the current branch
2834 # H: parent was a branch head of the current branch
2835 # Msg column: whether we print "created new head" message
2835 # Msg column: whether we print "created new head" message
2836 # In the following, it is assumed that there already exists some
2836 # In the following, it is assumed that there already exists some
2837 # initial branch heads of the current branch, otherwise nothing is
2837 # initial branch heads of the current branch, otherwise nothing is
2838 # printed anyway.
2838 # printed anyway.
2839 #
2839 #
2840 # Par Msg Comment
2840 # Par Msg Comment
2841 # N N y additional topo root
2841 # N N y additional topo root
2842 #
2842 #
2843 # B N y additional branch root
2843 # B N y additional branch root
2844 # C N y additional topo head
2844 # C N y additional topo head
2845 # H N n usual case
2845 # H N n usual case
2846 #
2846 #
2847 # B B y weird additional branch root
2847 # B B y weird additional branch root
2848 # C B y branch merge
2848 # C B y branch merge
2849 # H B n merge with named branch
2849 # H B n merge with named branch
2850 #
2850 #
2851 # C C y additional head from merge
2851 # C C y additional head from merge
2852 # C H n merge with a head
2852 # C H n merge with a head
2853 #
2853 #
2854 # H H n head merge: head count decreases
2854 # H H n head merge: head count decreases
2855
2855
2856 if not opts.get('close_branch'):
2856 if not opts.get('close_branch'):
2857 for r in parents:
2857 for r in parents:
2858 if r.closesbranch() and r.branch() == branch:
2858 if r.closesbranch() and r.branch() == branch:
2859 repo.ui.status(_('reopening closed branch head %d\n') % r)
2859 repo.ui.status(_('reopening closed branch head %d\n') % r)
2860
2860
2861 if repo.ui.debugflag:
2861 if repo.ui.debugflag:
2862 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2862 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
2863 elif repo.ui.verbose:
2863 elif repo.ui.verbose:
2864 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2864 repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
2865
2865
2866 def postcommitstatus(repo, pats, opts):
2866 def postcommitstatus(repo, pats, opts):
2867 return repo.status(match=scmutil.match(repo[None], pats, opts))
2867 return repo.status(match=scmutil.match(repo[None], pats, opts))
2868
2868
2869 def revert(ui, repo, ctx, parents, *pats, **opts):
2869 def revert(ui, repo, ctx, parents, *pats, **opts):
2870 parent, p2 = parents
2870 parent, p2 = parents
2871 node = ctx.node()
2871 node = ctx.node()
2872
2872
2873 mf = ctx.manifest()
2873 mf = ctx.manifest()
2874 if node == p2:
2874 if node == p2:
2875 parent = p2
2875 parent = p2
2876
2876
2877 # need all matching names in dirstate and manifest of target rev,
2877 # need all matching names in dirstate and manifest of target rev,
2878 # so have to walk both. do not print errors if files exist in one
2878 # so have to walk both. do not print errors if files exist in one
2879 # but not other. in both cases, filesets should be evaluated against
2879 # but not other. in both cases, filesets should be evaluated against
2880 # workingctx to get consistent result (issue4497). this means 'set:**'
2880 # workingctx to get consistent result (issue4497). this means 'set:**'
2881 # cannot be used to select missing files from target rev.
2881 # cannot be used to select missing files from target rev.
2882
2882
2883 # `names` is a mapping for all elements in working copy and target revision
2883 # `names` is a mapping for all elements in working copy and target revision
2884 # The mapping is in the form:
2884 # The mapping is in the form:
2885 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2885 # <asb path in repo> -> (<path from CWD>, <exactly specified by matcher?>)
2886 names = {}
2886 names = {}
2887
2887
2888 with repo.wlock():
2888 with repo.wlock():
2889 ## filling of the `names` mapping
2889 ## filling of the `names` mapping
2890 # walk dirstate to fill `names`
2890 # walk dirstate to fill `names`
2891
2891
2892 interactive = opts.get('interactive', False)
2892 interactive = opts.get('interactive', False)
2893 wctx = repo[None]
2893 wctx = repo[None]
2894 m = scmutil.match(wctx, pats, opts)
2894 m = scmutil.match(wctx, pats, opts)
2895
2895
2896 # we'll need this later
2896 # we'll need this later
2897 targetsubs = sorted(s for s in wctx.substate if m(s))
2897 targetsubs = sorted(s for s in wctx.substate if m(s))
2898
2898
2899 if not m.always():
2899 if not m.always():
2900 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2900 for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)):
2901 names[abs] = m.rel(abs), m.exact(abs)
2901 names[abs] = m.rel(abs), m.exact(abs)
2902
2902
2903 # walk target manifest to fill `names`
2903 # walk target manifest to fill `names`
2904
2904
2905 def badfn(path, msg):
2905 def badfn(path, msg):
2906 if path in names:
2906 if path in names:
2907 return
2907 return
2908 if path in ctx.substate:
2908 if path in ctx.substate:
2909 return
2909 return
2910 path_ = path + '/'
2910 path_ = path + '/'
2911 for f in names:
2911 for f in names:
2912 if f.startswith(path_):
2912 if f.startswith(path_):
2913 return
2913 return
2914 ui.warn("%s: %s\n" % (m.rel(path), msg))
2914 ui.warn("%s: %s\n" % (m.rel(path), msg))
2915
2915
2916 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2916 for abs in ctx.walk(matchmod.badmatch(m, badfn)):
2917 if abs not in names:
2917 if abs not in names:
2918 names[abs] = m.rel(abs), m.exact(abs)
2918 names[abs] = m.rel(abs), m.exact(abs)
2919
2919
2920 # Find status of all file in `names`.
2920 # Find status of all file in `names`.
2921 m = scmutil.matchfiles(repo, names)
2921 m = scmutil.matchfiles(repo, names)
2922
2922
2923 changes = repo.status(node1=node, match=m,
2923 changes = repo.status(node1=node, match=m,
2924 unknown=True, ignored=True, clean=True)
2924 unknown=True, ignored=True, clean=True)
2925 else:
2925 else:
2926 changes = repo.status(node1=node, match=m)
2926 changes = repo.status(node1=node, match=m)
2927 for kind in changes:
2927 for kind in changes:
2928 for abs in kind:
2928 for abs in kind:
2929 names[abs] = m.rel(abs), m.exact(abs)
2929 names[abs] = m.rel(abs), m.exact(abs)
2930
2930
2931 m = scmutil.matchfiles(repo, names)
2931 m = scmutil.matchfiles(repo, names)
2932
2932
2933 modified = set(changes.modified)
2933 modified = set(changes.modified)
2934 added = set(changes.added)
2934 added = set(changes.added)
2935 removed = set(changes.removed)
2935 removed = set(changes.removed)
2936 _deleted = set(changes.deleted)
2936 _deleted = set(changes.deleted)
2937 unknown = set(changes.unknown)
2937 unknown = set(changes.unknown)
2938 unknown.update(changes.ignored)
2938 unknown.update(changes.ignored)
2939 clean = set(changes.clean)
2939 clean = set(changes.clean)
2940 modadded = set()
2940 modadded = set()
2941
2941
2942 # split between files known in target manifest and the others
2942 # split between files known in target manifest and the others
2943 smf = set(mf)
2943 smf = set(mf)
2944
2944
2945 # determine the exact nature of the deleted changesets
2945 # determine the exact nature of the deleted changesets
2946 deladded = _deleted - smf
2946 deladded = _deleted - smf
2947 deleted = _deleted - deladded
2947 deleted = _deleted - deladded
2948
2948
2949 # We need to account for the state of the file in the dirstate,
2949 # We need to account for the state of the file in the dirstate,
2950 # even when we revert against something else than parent. This will
2950 # even when we revert against something else than parent. This will
2951 # slightly alter the behavior of revert (doing back up or not, delete
2951 # slightly alter the behavior of revert (doing back up or not, delete
2952 # or just forget etc).
2952 # or just forget etc).
2953 if parent == node:
2953 if parent == node:
2954 dsmodified = modified
2954 dsmodified = modified
2955 dsadded = added
2955 dsadded = added
2956 dsremoved = removed
2956 dsremoved = removed
2957 # store all local modifications, useful later for rename detection
2957 # store all local modifications, useful later for rename detection
2958 localchanges = dsmodified | dsadded
2958 localchanges = dsmodified | dsadded
2959 modified, added, removed = set(), set(), set()
2959 modified, added, removed = set(), set(), set()
2960 else:
2960 else:
2961 changes = repo.status(node1=parent, match=m)
2961 changes = repo.status(node1=parent, match=m)
2962 dsmodified = set(changes.modified)
2962 dsmodified = set(changes.modified)
2963 dsadded = set(changes.added)
2963 dsadded = set(changes.added)
2964 dsremoved = set(changes.removed)
2964 dsremoved = set(changes.removed)
2965 # store all local modifications, useful later for rename detection
2965 # store all local modifications, useful later for rename detection
2966 localchanges = dsmodified | dsadded
2966 localchanges = dsmodified | dsadded
2967
2967
2968 # only take into account for removes between wc and target
2968 # only take into account for removes between wc and target
2969 clean |= dsremoved - removed
2969 clean |= dsremoved - removed
2970 dsremoved &= removed
2970 dsremoved &= removed
2971 # distinct between dirstate remove and other
2971 # distinct between dirstate remove and other
2972 removed -= dsremoved
2972 removed -= dsremoved
2973
2973
2974 modadded = added & dsmodified
2974 modadded = added & dsmodified
2975 added -= modadded
2975 added -= modadded
2976
2976
2977 # tell newly modified apart.
2977 # tell newly modified apart.
2978 dsmodified &= modified
2978 dsmodified &= modified
2979 dsmodified |= modified & dsadded # dirstate added may need backup
2979 dsmodified |= modified & dsadded # dirstate added may need backup
2980 modified -= dsmodified
2980 modified -= dsmodified
2981
2981
2982 # We need to wait for some post-processing to update this set
2982 # We need to wait for some post-processing to update this set
2983 # before making the distinction. The dirstate will be used for
2983 # before making the distinction. The dirstate will be used for
2984 # that purpose.
2984 # that purpose.
2985 dsadded = added
2985 dsadded = added
2986
2986
2987 # in case of merge, files that are actually added can be reported as
2987 # in case of merge, files that are actually added can be reported as
2988 # modified, we need to post process the result
2988 # modified, we need to post process the result
2989 if p2 != nullid:
2989 if p2 != nullid:
2990 mergeadd = dsmodified - smf
2990 mergeadd = dsmodified - smf
2991 dsadded |= mergeadd
2991 dsadded |= mergeadd
2992 dsmodified -= mergeadd
2992 dsmodified -= mergeadd
2993
2993
2994 # if f is a rename, update `names` to also revert the source
2994 # if f is a rename, update `names` to also revert the source
2995 cwd = repo.getcwd()
2995 cwd = repo.getcwd()
2996 for f in localchanges:
2996 for f in localchanges:
2997 src = repo.dirstate.copied(f)
2997 src = repo.dirstate.copied(f)
2998 # XXX should we check for rename down to target node?
2998 # XXX should we check for rename down to target node?
2999 if src and src not in names and repo.dirstate[src] == 'r':
2999 if src and src not in names and repo.dirstate[src] == 'r':
3000 dsremoved.add(src)
3000 dsremoved.add(src)
3001 names[src] = (repo.pathto(src, cwd), True)
3001 names[src] = (repo.pathto(src, cwd), True)
3002
3002
3003 # distinguish between file to forget and the other
3003 # distinguish between file to forget and the other
3004 added = set()
3004 added = set()
3005 for abs in dsadded:
3005 for abs in dsadded:
3006 if repo.dirstate[abs] != 'a':
3006 if repo.dirstate[abs] != 'a':
3007 added.add(abs)
3007 added.add(abs)
3008 dsadded -= added
3008 dsadded -= added
3009
3009
3010 for abs in deladded:
3010 for abs in deladded:
3011 if repo.dirstate[abs] == 'a':
3011 if repo.dirstate[abs] == 'a':
3012 dsadded.add(abs)
3012 dsadded.add(abs)
3013 deladded -= dsadded
3013 deladded -= dsadded
3014
3014
3015 # For files marked as removed, we check if an unknown file is present at
3015 # For files marked as removed, we check if an unknown file is present at
3016 # the same path. If a such file exists it may need to be backed up.
3016 # the same path. If a such file exists it may need to be backed up.
3017 # Making the distinction at this stage helps have simpler backup
3017 # Making the distinction at this stage helps have simpler backup
3018 # logic.
3018 # logic.
3019 removunk = set()
3019 removunk = set()
3020 for abs in removed:
3020 for abs in removed:
3021 target = repo.wjoin(abs)
3021 target = repo.wjoin(abs)
3022 if os.path.lexists(target):
3022 if os.path.lexists(target):
3023 removunk.add(abs)
3023 removunk.add(abs)
3024 removed -= removunk
3024 removed -= removunk
3025
3025
3026 dsremovunk = set()
3026 dsremovunk = set()
3027 for abs in dsremoved:
3027 for abs in dsremoved:
3028 target = repo.wjoin(abs)
3028 target = repo.wjoin(abs)
3029 if os.path.lexists(target):
3029 if os.path.lexists(target):
3030 dsremovunk.add(abs)
3030 dsremovunk.add(abs)
3031 dsremoved -= dsremovunk
3031 dsremoved -= dsremovunk
3032
3032
3033 # action to be actually performed by revert
3033 # action to be actually performed by revert
3034 # (<list of file>, message>) tuple
3034 # (<list of file>, message>) tuple
3035 actions = {'revert': ([], _('reverting %s\n')),
3035 actions = {'revert': ([], _('reverting %s\n')),
3036 'add': ([], _('adding %s\n')),
3036 'add': ([], _('adding %s\n')),
3037 'remove': ([], _('removing %s\n')),
3037 'remove': ([], _('removing %s\n')),
3038 'drop': ([], _('removing %s\n')),
3038 'drop': ([], _('removing %s\n')),
3039 'forget': ([], _('forgetting %s\n')),
3039 'forget': ([], _('forgetting %s\n')),
3040 'undelete': ([], _('undeleting %s\n')),
3040 'undelete': ([], _('undeleting %s\n')),
3041 'noop': (None, _('no changes needed to %s\n')),
3041 'noop': (None, _('no changes needed to %s\n')),
3042 'unknown': (None, _('file not managed: %s\n')),
3042 'unknown': (None, _('file not managed: %s\n')),
3043 }
3043 }
3044
3044
3045 # "constant" that convey the backup strategy.
3045 # "constant" that convey the backup strategy.
3046 # All set to `discard` if `no-backup` is set do avoid checking
3046 # All set to `discard` if `no-backup` is set do avoid checking
3047 # no_backup lower in the code.
3047 # no_backup lower in the code.
3048 # These values are ordered for comparison purposes
3048 # These values are ordered for comparison purposes
3049 backupinteractive = 3 # do backup if interactively modified
3049 backupinteractive = 3 # do backup if interactively modified
3050 backup = 2 # unconditionally do backup
3050 backup = 2 # unconditionally do backup
3051 check = 1 # check if the existing file differs from target
3051 check = 1 # check if the existing file differs from target
3052 discard = 0 # never do backup
3052 discard = 0 # never do backup
3053 if opts.get('no_backup'):
3053 if opts.get('no_backup'):
3054 backupinteractive = backup = check = discard
3054 backupinteractive = backup = check = discard
3055 if interactive:
3055 if interactive:
3056 dsmodifiedbackup = backupinteractive
3056 dsmodifiedbackup = backupinteractive
3057 else:
3057 else:
3058 dsmodifiedbackup = backup
3058 dsmodifiedbackup = backup
3059 tobackup = set()
3059 tobackup = set()
3060
3060
3061 backupanddel = actions['remove']
3061 backupanddel = actions['remove']
3062 if not opts.get('no_backup'):
3062 if not opts.get('no_backup'):
3063 backupanddel = actions['drop']
3063 backupanddel = actions['drop']
3064
3064
3065 disptable = (
3065 disptable = (
3066 # dispatch table:
3066 # dispatch table:
3067 # file state
3067 # file state
3068 # action
3068 # action
3069 # make backup
3069 # make backup
3070
3070
3071 ## Sets that results that will change file on disk
3071 ## Sets that results that will change file on disk
3072 # Modified compared to target, no local change
3072 # Modified compared to target, no local change
3073 (modified, actions['revert'], discard),
3073 (modified, actions['revert'], discard),
3074 # Modified compared to target, but local file is deleted
3074 # Modified compared to target, but local file is deleted
3075 (deleted, actions['revert'], discard),
3075 (deleted, actions['revert'], discard),
3076 # Modified compared to target, local change
3076 # Modified compared to target, local change
3077 (dsmodified, actions['revert'], dsmodifiedbackup),
3077 (dsmodified, actions['revert'], dsmodifiedbackup),
3078 # Added since target
3078 # Added since target
3079 (added, actions['remove'], discard),
3079 (added, actions['remove'], discard),
3080 # Added in working directory
3080 # Added in working directory
3081 (dsadded, actions['forget'], discard),
3081 (dsadded, actions['forget'], discard),
3082 # Added since target, have local modification
3082 # Added since target, have local modification
3083 (modadded, backupanddel, backup),
3083 (modadded, backupanddel, backup),
3084 # Added since target but file is missing in working directory
3084 # Added since target but file is missing in working directory
3085 (deladded, actions['drop'], discard),
3085 (deladded, actions['drop'], discard),
3086 # Removed since target, before working copy parent
3086 # Removed since target, before working copy parent
3087 (removed, actions['add'], discard),
3087 (removed, actions['add'], discard),
3088 # Same as `removed` but an unknown file exists at the same path
3088 # Same as `removed` but an unknown file exists at the same path
3089 (removunk, actions['add'], check),
3089 (removunk, actions['add'], check),
3090 # Removed since targe, marked as such in working copy parent
3090 # Removed since targe, marked as such in working copy parent
3091 (dsremoved, actions['undelete'], discard),
3091 (dsremoved, actions['undelete'], discard),
3092 # Same as `dsremoved` but an unknown file exists at the same path
3092 # Same as `dsremoved` but an unknown file exists at the same path
3093 (dsremovunk, actions['undelete'], check),
3093 (dsremovunk, actions['undelete'], check),
3094 ## the following sets does not result in any file changes
3094 ## the following sets does not result in any file changes
3095 # File with no modification
3095 # File with no modification
3096 (clean, actions['noop'], discard),
3096 (clean, actions['noop'], discard),
3097 # Existing file, not tracked anywhere
3097 # Existing file, not tracked anywhere
3098 (unknown, actions['unknown'], discard),
3098 (unknown, actions['unknown'], discard),
3099 )
3099 )
3100
3100
3101 for abs, (rel, exact) in sorted(names.items()):
3101 for abs, (rel, exact) in sorted(names.items()):
3102 # target file to be touch on disk (relative to cwd)
3102 # target file to be touch on disk (relative to cwd)
3103 target = repo.wjoin(abs)
3103 target = repo.wjoin(abs)
3104 # search the entry in the dispatch table.
3104 # search the entry in the dispatch table.
3105 # if the file is in any of these sets, it was touched in the working
3105 # if the file is in any of these sets, it was touched in the working
3106 # directory parent and we are sure it needs to be reverted.
3106 # directory parent and we are sure it needs to be reverted.
3107 for table, (xlist, msg), dobackup in disptable:
3107 for table, (xlist, msg), dobackup in disptable:
3108 if abs not in table:
3108 if abs not in table:
3109 continue
3109 continue
3110 if xlist is not None:
3110 if xlist is not None:
3111 xlist.append(abs)
3111 xlist.append(abs)
3112 if dobackup:
3112 if dobackup:
3113 # If in interactive mode, don't automatically create
3113 # If in interactive mode, don't automatically create
3114 # .orig files (issue4793)
3114 # .orig files (issue4793)
3115 if dobackup == backupinteractive:
3115 if dobackup == backupinteractive:
3116 tobackup.add(abs)
3116 tobackup.add(abs)
3117 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3117 elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
3118 bakname = scmutil.origpath(ui, repo, rel)
3118 bakname = scmutil.origpath(ui, repo, rel)
3119 ui.note(_('saving current version of %s as %s\n') %
3119 ui.note(_('saving current version of %s as %s\n') %
3120 (rel, bakname))
3120 (rel, bakname))
3121 if not opts.get('dry_run'):
3121 if not opts.get('dry_run'):
3122 if interactive:
3122 if interactive:
3123 util.copyfile(target, bakname)
3123 util.copyfile(target, bakname)
3124 else:
3124 else:
3125 util.rename(target, bakname)
3125 util.rename(target, bakname)
3126 if ui.verbose or not exact:
3126 if ui.verbose or not exact:
3127 if not isinstance(msg, basestring):
3127 if not isinstance(msg, basestring):
3128 msg = msg(abs)
3128 msg = msg(abs)
3129 ui.status(msg % rel)
3129 ui.status(msg % rel)
3130 elif exact:
3130 elif exact:
3131 ui.warn(msg % rel)
3131 ui.warn(msg % rel)
3132 break
3132 break
3133
3133
3134 if not opts.get('dry_run'):
3134 if not opts.get('dry_run'):
3135 needdata = ('revert', 'add', 'undelete')
3135 needdata = ('revert', 'add', 'undelete')
3136 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3136 _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata])
3137 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3137 _performrevert(repo, parents, ctx, actions, interactive, tobackup)
3138
3138
3139 if targetsubs:
3139 if targetsubs:
3140 # Revert the subrepos on the revert list
3140 # Revert the subrepos on the revert list
3141 for sub in targetsubs:
3141 for sub in targetsubs:
3142 try:
3142 try:
3143 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3143 wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts)
3144 except KeyError:
3144 except KeyError:
3145 raise error.Abort("subrepository '%s' does not exist in %s!"
3145 raise error.Abort("subrepository '%s' does not exist in %s!"
3146 % (sub, short(ctx.node())))
3146 % (sub, short(ctx.node())))
3147
3147
3148 def _revertprefetch(repo, ctx, *files):
3148 def _revertprefetch(repo, ctx, *files):
3149 """Let extension changing the storage layer prefetch content"""
3149 """Let extension changing the storage layer prefetch content"""
3150 pass
3150 pass
3151
3151
3152 def _performrevert(repo, parents, ctx, actions, interactive=False,
3152 def _performrevert(repo, parents, ctx, actions, interactive=False,
3153 tobackup=None):
3153 tobackup=None):
3154 """function that actually perform all the actions computed for revert
3154 """function that actually perform all the actions computed for revert
3155
3155
3156 This is an independent function to let extension to plug in and react to
3156 This is an independent function to let extension to plug in and react to
3157 the imminent revert.
3157 the imminent revert.
3158
3158
3159 Make sure you have the working directory locked when calling this function.
3159 Make sure you have the working directory locked when calling this function.
3160 """
3160 """
3161 parent, p2 = parents
3161 parent, p2 = parents
3162 node = ctx.node()
3162 node = ctx.node()
3163 excluded_files = []
3163 excluded_files = []
3164 matcher_opts = {"exclude": excluded_files}
3164 matcher_opts = {"exclude": excluded_files}
3165
3165
3166 def checkout(f):
3166 def checkout(f):
3167 fc = ctx[f]
3167 fc = ctx[f]
3168 repo.wwrite(f, fc.data(), fc.flags())
3168 repo.wwrite(f, fc.data(), fc.flags())
3169
3169
3170 def doremove(f):
3170 def doremove(f):
3171 try:
3171 try:
3172 util.unlinkpath(repo.wjoin(f))
3172 util.unlinkpath(repo.wjoin(f))
3173 except OSError:
3173 except OSError:
3174 pass
3174 pass
3175 repo.dirstate.remove(f)
3175 repo.dirstate.remove(f)
3176
3176
3177 audit_path = pathutil.pathauditor(repo.root)
3177 audit_path = pathutil.pathauditor(repo.root)
3178 for f in actions['forget'][0]:
3178 for f in actions['forget'][0]:
3179 if interactive:
3179 if interactive:
3180 choice = repo.ui.promptchoice(
3180 choice = repo.ui.promptchoice(
3181 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3181 _("forget added file %s (Yn)?$$ &Yes $$ &No") % f)
3182 if choice == 0:
3182 if choice == 0:
3183 repo.dirstate.drop(f)
3183 repo.dirstate.drop(f)
3184 else:
3184 else:
3185 excluded_files.append(repo.wjoin(f))
3185 excluded_files.append(repo.wjoin(f))
3186 else:
3186 else:
3187 repo.dirstate.drop(f)
3187 repo.dirstate.drop(f)
3188 for f in actions['remove'][0]:
3188 for f in actions['remove'][0]:
3189 audit_path(f)
3189 audit_path(f)
3190 if interactive:
3190 if interactive:
3191 choice = repo.ui.promptchoice(
3191 choice = repo.ui.promptchoice(
3192 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3192 _("remove added file %s (Yn)?$$ &Yes $$ &No") % f)
3193 if choice == 0:
3193 if choice == 0:
3194 doremove(f)
3194 doremove(f)
3195 else:
3195 else:
3196 excluded_files.append(repo.wjoin(f))
3196 excluded_files.append(repo.wjoin(f))
3197 else:
3197 else:
3198 doremove(f)
3198 doremove(f)
3199 for f in actions['drop'][0]:
3199 for f in actions['drop'][0]:
3200 audit_path(f)
3200 audit_path(f)
3201 repo.dirstate.remove(f)
3201 repo.dirstate.remove(f)
3202
3202
3203 normal = None
3203 normal = None
3204 if node == parent:
3204 if node == parent:
3205 # We're reverting to our parent. If possible, we'd like status
3205 # We're reverting to our parent. If possible, we'd like status
3206 # to report the file as clean. We have to use normallookup for
3206 # to report the file as clean. We have to use normallookup for
3207 # merges to avoid losing information about merged/dirty files.
3207 # merges to avoid losing information about merged/dirty files.
3208 if p2 != nullid:
3208 if p2 != nullid:
3209 normal = repo.dirstate.normallookup
3209 normal = repo.dirstate.normallookup
3210 else:
3210 else:
3211 normal = repo.dirstate.normal
3211 normal = repo.dirstate.normal
3212
3212
3213 newlyaddedandmodifiedfiles = set()
3213 newlyaddedandmodifiedfiles = set()
3214 if interactive:
3214 if interactive:
3215 # Prompt the user for changes to revert
3215 # Prompt the user for changes to revert
3216 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3216 torevert = [repo.wjoin(f) for f in actions['revert'][0]]
3217 m = scmutil.match(ctx, torevert, matcher_opts)
3217 m = scmutil.match(ctx, torevert, matcher_opts)
3218 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3218 diffopts = patch.difffeatureopts(repo.ui, whitespace=True)
3219 diffopts.nodates = True
3219 diffopts.nodates = True
3220 diffopts.git = True
3220 diffopts.git = True
3221 reversehunks = repo.ui.configbool('experimental',
3221 reversehunks = repo.ui.configbool('experimental',
3222 'revertalternateinteractivemode',
3222 'revertalternateinteractivemode',
3223 True)
3223 True)
3224 if reversehunks:
3224 if reversehunks:
3225 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3225 diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts)
3226 else:
3226 else:
3227 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3227 diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts)
3228 originalchunks = patch.parsepatch(diff)
3228 originalchunks = patch.parsepatch(diff)
3229 operation = 'discard' if node == parent else 'revert'
3229 operation = 'discard' if node == parent else 'revert'
3230
3230
3231 try:
3231 try:
3232
3232
3233 chunks, opts = recordfilter(repo.ui, originalchunks,
3233 chunks, opts = recordfilter(repo.ui, originalchunks,
3234 operation=operation)
3234 operation=operation)
3235 if reversehunks:
3235 if reversehunks:
3236 chunks = patch.reversehunks(chunks)
3236 chunks = patch.reversehunks(chunks)
3237
3237
3238 except patch.PatchError as err:
3238 except patch.PatchError as err:
3239 raise error.Abort(_('error parsing patch: %s') % err)
3239 raise error.Abort(_('error parsing patch: %s') % err)
3240
3240
3241 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3241 newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks)
3242 if tobackup is None:
3242 if tobackup is None:
3243 tobackup = set()
3243 tobackup = set()
3244 # Apply changes
3244 # Apply changes
3245 fp = stringio()
3245 fp = stringio()
3246 for c in chunks:
3246 for c in chunks:
3247 # Create a backup file only if this hunk should be backed up
3247 # Create a backup file only if this hunk should be backed up
3248 if ishunk(c) and c.header.filename() in tobackup:
3248 if ishunk(c) and c.header.filename() in tobackup:
3249 abs = c.header.filename()
3249 abs = c.header.filename()
3250 target = repo.wjoin(abs)
3250 target = repo.wjoin(abs)
3251 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3251 bakname = scmutil.origpath(repo.ui, repo, m.rel(abs))
3252 util.copyfile(target, bakname)
3252 util.copyfile(target, bakname)
3253 tobackup.remove(abs)
3253 tobackup.remove(abs)
3254 c.write(fp)
3254 c.write(fp)
3255 dopatch = fp.tell()
3255 dopatch = fp.tell()
3256 fp.seek(0)
3256 fp.seek(0)
3257 if dopatch:
3257 if dopatch:
3258 try:
3258 try:
3259 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3259 patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None)
3260 except patch.PatchError as err:
3260 except patch.PatchError as err:
3261 raise error.Abort(str(err))
3261 raise error.Abort(str(err))
3262 del fp
3262 del fp
3263 else:
3263 else:
3264 for f in actions['revert'][0]:
3264 for f in actions['revert'][0]:
3265 checkout(f)
3265 checkout(f)
3266 if normal:
3266 if normal:
3267 normal(f)
3267 normal(f)
3268
3268
3269 for f in actions['add'][0]:
3269 for f in actions['add'][0]:
3270 # Don't checkout modified files, they are already created by the diff
3270 # Don't checkout modified files, they are already created by the diff
3271 if f not in newlyaddedandmodifiedfiles:
3271 if f not in newlyaddedandmodifiedfiles:
3272 checkout(f)
3272 checkout(f)
3273 repo.dirstate.add(f)
3273 repo.dirstate.add(f)
3274
3274
3275 normal = repo.dirstate.normallookup
3275 normal = repo.dirstate.normallookup
3276 if node == parent and p2 == nullid:
3276 if node == parent and p2 == nullid:
3277 normal = repo.dirstate.normal
3277 normal = repo.dirstate.normal
3278 for f in actions['undelete'][0]:
3278 for f in actions['undelete'][0]:
3279 checkout(f)
3279 checkout(f)
3280 normal(f)
3280 normal(f)
3281
3281
3282 copied = copies.pathcopies(repo[parent], ctx)
3282 copied = copies.pathcopies(repo[parent], ctx)
3283
3283
3284 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3284 for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]:
3285 if f in copied:
3285 if f in copied:
3286 repo.dirstate.copy(copied[f], f)
3286 repo.dirstate.copy(copied[f], f)
3287
3287
3288 def command(table):
3288 def command(table):
3289 """Returns a function object to be used as a decorator for making commands.
3289 """Returns a function object to be used as a decorator for making commands.
3290
3290
3291 This function receives a command table as its argument. The table should
3291 This function receives a command table as its argument. The table should
3292 be a dict.
3292 be a dict.
3293
3293
3294 The returned function can be used as a decorator for adding commands
3294 The returned function can be used as a decorator for adding commands
3295 to that command table. This function accepts multiple arguments to define
3295 to that command table. This function accepts multiple arguments to define
3296 a command.
3296 a command.
3297
3297
3298 The first argument is the command name.
3298 The first argument is the command name.
3299
3299
3300 The options argument is an iterable of tuples defining command arguments.
3300 The options argument is an iterable of tuples defining command arguments.
3301 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3301 See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple.
3302
3302
3303 The synopsis argument defines a short, one line summary of how to use the
3303 The synopsis argument defines a short, one line summary of how to use the
3304 command. This shows up in the help output.
3304 command. This shows up in the help output.
3305
3305
3306 The norepo argument defines whether the command does not require a
3306 The norepo argument defines whether the command does not require a
3307 local repository. Most commands operate against a repository, thus the
3307 local repository. Most commands operate against a repository, thus the
3308 default is False.
3308 default is False.
3309
3309
3310 The optionalrepo argument defines whether the command optionally requires
3310 The optionalrepo argument defines whether the command optionally requires
3311 a local repository.
3311 a local repository.
3312
3312
3313 The inferrepo argument defines whether to try to find a repository from the
3313 The inferrepo argument defines whether to try to find a repository from the
3314 command line arguments. If True, arguments will be examined for potential
3314 command line arguments. If True, arguments will be examined for potential
3315 repository locations. See ``findrepo()``. If a repository is found, it
3315 repository locations. See ``findrepo()``. If a repository is found, it
3316 will be used.
3316 will be used.
3317 """
3317 """
3318 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3318 def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False,
3319 inferrepo=False):
3319 inferrepo=False):
3320 def decorator(func):
3320 def decorator(func):
3321 func.norepo = norepo
3321 func.norepo = norepo
3322 func.optionalrepo = optionalrepo
3322 func.optionalrepo = optionalrepo
3323 func.inferrepo = inferrepo
3323 func.inferrepo = inferrepo
3324 if synopsis:
3324 if synopsis:
3325 table[name] = func, list(options), synopsis
3325 table[name] = func, list(options), synopsis
3326 else:
3326 else:
3327 table[name] = func, list(options)
3327 table[name] = func, list(options)
3328 return func
3328 return func
3329 return decorator
3329 return decorator
3330
3330
3331 return cmd
3331 return cmd
3332
3332
3333 def checkunresolved(ms):
3333 def checkunresolved(ms):
3334 ms._repo.ui.deprecwarn('checkunresolved moved from cmdutil to mergeutil',
3334 ms._repo.ui.deprecwarn('checkunresolved moved from cmdutil to mergeutil',
3335 '4.1')
3335 '4.1')
3336 return mergeutil.checkunresolved(ms)
3336 return mergeutil.checkunresolved(ms)
3337
3337
3338 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3338 # a list of (ui, repo, otherpeer, opts, missing) functions called by
3339 # commands.outgoing. "missing" is "missing" of the result of
3339 # commands.outgoing. "missing" is "missing" of the result of
3340 # "findcommonoutgoing()"
3340 # "findcommonoutgoing()"
3341 outgoinghooks = util.hooks()
3341 outgoinghooks = util.hooks()
3342
3342
3343 # a list of (ui, repo) functions called by commands.summary
3343 # a list of (ui, repo) functions called by commands.summary
3344 summaryhooks = util.hooks()
3344 summaryhooks = util.hooks()
3345
3345
3346 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3346 # a list of (ui, repo, opts, changes) functions called by commands.summary.
3347 #
3347 #
3348 # functions should return tuple of booleans below, if 'changes' is None:
3348 # functions should return tuple of booleans below, if 'changes' is None:
3349 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3349 # (whether-incomings-are-needed, whether-outgoings-are-needed)
3350 #
3350 #
3351 # otherwise, 'changes' is a tuple of tuples below:
3351 # otherwise, 'changes' is a tuple of tuples below:
3352 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3352 # - (sourceurl, sourcebranch, sourcepeer, incoming)
3353 # - (desturl, destbranch, destpeer, outgoing)
3353 # - (desturl, destbranch, destpeer, outgoing)
3354 summaryremotehooks = util.hooks()
3354 summaryremotehooks = util.hooks()
3355
3355
3356 # A list of state files kept by multistep operations like graft.
3356 # A list of state files kept by multistep operations like graft.
3357 # Since graft cannot be aborted, it is considered 'clearable' by update.
3357 # Since graft cannot be aborted, it is considered 'clearable' by update.
3358 # note: bisect is intentionally excluded
3358 # note: bisect is intentionally excluded
3359 # (state file, clearable, allowcommit, error, hint)
3359 # (state file, clearable, allowcommit, error, hint)
3360 unfinishedstates = [
3360 unfinishedstates = [
3361 ('graftstate', True, False, _('graft in progress'),
3361 ('graftstate', True, False, _('graft in progress'),
3362 _("use 'hg graft --continue' or 'hg update' to abort")),
3362 _("use 'hg graft --continue' or 'hg update' to abort")),
3363 ('updatestate', True, False, _('last update was interrupted'),
3363 ('updatestate', True, False, _('last update was interrupted'),
3364 _("use 'hg update' to get a consistent checkout"))
3364 _("use 'hg update' to get a consistent checkout"))
3365 ]
3365 ]
3366
3366
3367 def checkunfinished(repo, commit=False):
3367 def checkunfinished(repo, commit=False):
3368 '''Look for an unfinished multistep operation, like graft, and abort
3368 '''Look for an unfinished multistep operation, like graft, and abort
3369 if found. It's probably good to check this right before
3369 if found. It's probably good to check this right before
3370 bailifchanged().
3370 bailifchanged().
3371 '''
3371 '''
3372 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3372 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3373 if commit and allowcommit:
3373 if commit and allowcommit:
3374 continue
3374 continue
3375 if repo.vfs.exists(f):
3375 if repo.vfs.exists(f):
3376 raise error.Abort(msg, hint=hint)
3376 raise error.Abort(msg, hint=hint)
3377
3377
3378 def clearunfinished(repo):
3378 def clearunfinished(repo):
3379 '''Check for unfinished operations (as above), and clear the ones
3379 '''Check for unfinished operations (as above), and clear the ones
3380 that are clearable.
3380 that are clearable.
3381 '''
3381 '''
3382 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3382 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3383 if not clearable and repo.vfs.exists(f):
3383 if not clearable and repo.vfs.exists(f):
3384 raise error.Abort(msg, hint=hint)
3384 raise error.Abort(msg, hint=hint)
3385 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3385 for f, clearable, allowcommit, msg, hint in unfinishedstates:
3386 if clearable and repo.vfs.exists(f):
3386 if clearable and repo.vfs.exists(f):
3387 util.unlink(repo.join(f))
3387 util.unlink(repo.join(f))
3388
3388
3389 afterresolvedstates = [
3389 afterresolvedstates = [
3390 ('graftstate',
3390 ('graftstate',
3391 _('hg graft --continue')),
3391 _('hg graft --continue')),
3392 ]
3392 ]
3393
3393
3394 def howtocontinue(repo):
3394 def howtocontinue(repo):
3395 '''Check for an unfinished operation and return the command to finish
3395 '''Check for an unfinished operation and return the command to finish
3396 it.
3396 it.
3397
3397
3398 afterresolvedstates tuples define a .hg/{file} and the corresponding
3398 afterresolvedstates tuples define a .hg/{file} and the corresponding
3399 command needed to finish it.
3399 command needed to finish it.
3400
3400
3401 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3401 Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
3402 a boolean.
3402 a boolean.
3403 '''
3403 '''
3404 contmsg = _("continue: %s")
3404 contmsg = _("continue: %s")
3405 for f, msg in afterresolvedstates:
3405 for f, msg in afterresolvedstates:
3406 if repo.vfs.exists(f):
3406 if repo.vfs.exists(f):
3407 return contmsg % msg, True
3407 return contmsg % msg, True
3408 workingctx = repo[None]
3408 workingctx = repo[None]
3409 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3409 dirty = any(repo.status()) or any(workingctx.sub(s).dirty()
3410 for s in workingctx.substate)
3410 for s in workingctx.substate)
3411 if dirty:
3411 if dirty:
3412 return contmsg % _("hg commit"), False
3412 return contmsg % _("hg commit"), False
3413 return None, None
3413 return None, None
3414
3414
3415 def checkafterresolved(repo):
3415 def checkafterresolved(repo):
3416 '''Inform the user about the next action after completing hg resolve
3416 '''Inform the user about the next action after completing hg resolve
3417
3417
3418 If there's a matching afterresolvedstates, howtocontinue will yield
3418 If there's a matching afterresolvedstates, howtocontinue will yield
3419 repo.ui.warn as the reporter.
3419 repo.ui.warn as the reporter.
3420
3420
3421 Otherwise, it will yield repo.ui.note.
3421 Otherwise, it will yield repo.ui.note.
3422 '''
3422 '''
3423 msg, warning = howtocontinue(repo)
3423 msg, warning = howtocontinue(repo)
3424 if msg is not None:
3424 if msg is not None:
3425 if warning:
3425 if warning:
3426 repo.ui.warn("%s\n" % msg)
3426 repo.ui.warn("%s\n" % msg)
3427 else:
3427 else:
3428 repo.ui.note("%s\n" % msg)
3428 repo.ui.note("%s\n" % msg)
3429
3429
3430 def wrongtooltocontinue(repo, task):
3430 def wrongtooltocontinue(repo, task):
3431 '''Raise an abort suggesting how to properly continue if there is an
3431 '''Raise an abort suggesting how to properly continue if there is an
3432 active task.
3432 active task.
3433
3433
3434 Uses howtocontinue() to find the active task.
3434 Uses howtocontinue() to find the active task.
3435
3435
3436 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3436 If there's no task (repo.ui.note for 'hg commit'), it does not offer
3437 a hint.
3437 a hint.
3438 '''
3438 '''
3439 after = howtocontinue(repo)
3439 after = howtocontinue(repo)
3440 hint = None
3440 hint = None
3441 if after[1]:
3441 if after[1]:
3442 hint = after[0]
3442 hint = after[0]
3443 raise error.Abort(_('no %s in progress') % task, hint=hint)
3443 raise error.Abort(_('no %s in progress') % task, hint=hint)
3444
3444
3445 class dirstateguard(dirstateguardmod.dirstateguard):
3445 class dirstateguard(dirstateguardmod.dirstateguard):
3446 def __init__(self, repo, name):
3446 def __init__(self, repo, name):
3447 dirstateguardmod.dirstateguard.__init__(self, repo, name)
3447 dirstateguardmod.dirstateguard.__init__(self, repo, name)
3448 repo.ui.deprecwarn(
3448 repo.ui.deprecwarn(
3449 'dirstateguard has moved from cmdutil to dirstateguard',
3449 'dirstateguard has moved from cmdutil to dirstateguard',
3450 '4.1')
3450 '4.1')
@@ -1,196 +1,200 b''
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import mimetypes
12 import mimetypes
13 import os
13 import os
14
14
15 from .. import util
15 from .. import (
16 pycompat,
17 util,
18 )
16
19
17 httpserver = util.httpserver
20 httpserver = util.httpserver
18
21
19 HTTP_OK = 200
22 HTTP_OK = 200
20 HTTP_NOT_MODIFIED = 304
23 HTTP_NOT_MODIFIED = 304
21 HTTP_BAD_REQUEST = 400
24 HTTP_BAD_REQUEST = 400
22 HTTP_UNAUTHORIZED = 401
25 HTTP_UNAUTHORIZED = 401
23 HTTP_FORBIDDEN = 403
26 HTTP_FORBIDDEN = 403
24 HTTP_NOT_FOUND = 404
27 HTTP_NOT_FOUND = 404
25 HTTP_METHOD_NOT_ALLOWED = 405
28 HTTP_METHOD_NOT_ALLOWED = 405
26 HTTP_SERVER_ERROR = 500
29 HTTP_SERVER_ERROR = 500
27
30
28
31
29 def ismember(ui, username, userlist):
32 def ismember(ui, username, userlist):
30 """Check if username is a member of userlist.
33 """Check if username is a member of userlist.
31
34
32 If userlist has a single '*' member, all users are considered members.
35 If userlist has a single '*' member, all users are considered members.
33 Can be overridden by extensions to provide more complex authorization
36 Can be overridden by extensions to provide more complex authorization
34 schemes.
37 schemes.
35 """
38 """
36 return userlist == ['*'] or username in userlist
39 return userlist == ['*'] or username in userlist
37
40
38 def checkauthz(hgweb, req, op):
41 def checkauthz(hgweb, req, op):
39 '''Check permission for operation based on request data (including
42 '''Check permission for operation based on request data (including
40 authentication info). Return if op allowed, else raise an ErrorResponse
43 authentication info). Return if op allowed, else raise an ErrorResponse
41 exception.'''
44 exception.'''
42
45
43 user = req.env.get('REMOTE_USER')
46 user = req.env.get('REMOTE_USER')
44
47
45 deny_read = hgweb.configlist('web', 'deny_read')
48 deny_read = hgweb.configlist('web', 'deny_read')
46 if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)):
49 if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)):
47 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
50 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
48
51
49 allow_read = hgweb.configlist('web', 'allow_read')
52 allow_read = hgweb.configlist('web', 'allow_read')
50 if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)):
53 if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)):
51 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
54 raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized')
52
55
53 if op == 'pull' and not hgweb.allowpull:
56 if op == 'pull' and not hgweb.allowpull:
54 raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized')
57 raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized')
55 elif op == 'pull' or op is None: # op is None for interface requests
58 elif op == 'pull' or op is None: # op is None for interface requests
56 return
59 return
57
60
58 # enforce that you can only push using POST requests
61 # enforce that you can only push using POST requests
59 if req.env['REQUEST_METHOD'] != 'POST':
62 if req.env['REQUEST_METHOD'] != 'POST':
60 msg = 'push requires POST request'
63 msg = 'push requires POST request'
61 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
64 raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg)
62
65
63 # require ssl by default for pushing, auth info cannot be sniffed
66 # require ssl by default for pushing, auth info cannot be sniffed
64 # and replayed
67 # and replayed
65 scheme = req.env.get('wsgi.url_scheme')
68 scheme = req.env.get('wsgi.url_scheme')
66 if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https':
69 if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https':
67 raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required')
70 raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required')
68
71
69 deny = hgweb.configlist('web', 'deny_push')
72 deny = hgweb.configlist('web', 'deny_push')
70 if deny and (not user or ismember(hgweb.repo.ui, user, deny)):
73 if deny and (not user or ismember(hgweb.repo.ui, user, deny)):
71 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
74 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
72
75
73 allow = hgweb.configlist('web', 'allow_push')
76 allow = hgweb.configlist('web', 'allow_push')
74 if not (allow and ismember(hgweb.repo.ui, user, allow)):
77 if not (allow and ismember(hgweb.repo.ui, user, allow)):
75 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
78 raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
76
79
77 # Hooks for hgweb permission checks; extensions can add hooks here.
80 # Hooks for hgweb permission checks; extensions can add hooks here.
78 # Each hook is invoked like this: hook(hgweb, request, operation),
81 # Each hook is invoked like this: hook(hgweb, request, operation),
79 # where operation is either read, pull or push. Hooks should either
82 # where operation is either read, pull or push. Hooks should either
80 # raise an ErrorResponse exception, or just return.
83 # raise an ErrorResponse exception, or just return.
81 #
84 #
82 # It is possible to do both authentication and authorization through
85 # It is possible to do both authentication and authorization through
83 # this.
86 # this.
84 permhooks = [checkauthz]
87 permhooks = [checkauthz]
85
88
86
89
87 class ErrorResponse(Exception):
90 class ErrorResponse(Exception):
88 def __init__(self, code, message=None, headers=[]):
91 def __init__(self, code, message=None, headers=[]):
89 if message is None:
92 if message is None:
90 message = _statusmessage(code)
93 message = _statusmessage(code)
91 Exception.__init__(self, message)
94 Exception.__init__(self, message)
92 self.code = code
95 self.code = code
93 self.headers = headers
96 self.headers = headers
94
97
95 class continuereader(object):
98 class continuereader(object):
96 def __init__(self, f, write):
99 def __init__(self, f, write):
97 self.f = f
100 self.f = f
98 self._write = write
101 self._write = write
99 self.continued = False
102 self.continued = False
100
103
101 def read(self, amt=-1):
104 def read(self, amt=-1):
102 if not self.continued:
105 if not self.continued:
103 self.continued = True
106 self.continued = True
104 self._write('HTTP/1.1 100 Continue\r\n\r\n')
107 self._write('HTTP/1.1 100 Continue\r\n\r\n')
105 return self.f.read(amt)
108 return self.f.read(amt)
106
109
107 def __getattr__(self, attr):
110 def __getattr__(self, attr):
108 if attr in ('close', 'readline', 'readlines', '__iter__'):
111 if attr in ('close', 'readline', 'readlines', '__iter__'):
109 return getattr(self.f, attr)
112 return getattr(self.f, attr)
110 raise AttributeError
113 raise AttributeError
111
114
112 def _statusmessage(code):
115 def _statusmessage(code):
113 responses = httpserver.basehttprequesthandler.responses
116 responses = httpserver.basehttprequesthandler.responses
114 return responses.get(code, ('Error', 'Unknown error'))[0]
117 return responses.get(code, ('Error', 'Unknown error'))[0]
115
118
116 def statusmessage(code, message=None):
119 def statusmessage(code, message=None):
117 return '%d %s' % (code, message or _statusmessage(code))
120 return '%d %s' % (code, message or _statusmessage(code))
118
121
119 def get_stat(spath, fn):
122 def get_stat(spath, fn):
120 """stat fn if it exists, spath otherwise"""
123 """stat fn if it exists, spath otherwise"""
121 cl_path = os.path.join(spath, fn)
124 cl_path = os.path.join(spath, fn)
122 if os.path.exists(cl_path):
125 if os.path.exists(cl_path):
123 return os.stat(cl_path)
126 return os.stat(cl_path)
124 else:
127 else:
125 return os.stat(spath)
128 return os.stat(spath)
126
129
127 def get_mtime(spath):
130 def get_mtime(spath):
128 return get_stat(spath, "00changelog.i").st_mtime
131 return get_stat(spath, "00changelog.i").st_mtime
129
132
130 def staticfile(directory, fname, req):
133 def staticfile(directory, fname, req):
131 """return a file inside directory with guessed Content-Type header
134 """return a file inside directory with guessed Content-Type header
132
135
133 fname always uses '/' as directory separator and isn't allowed to
136 fname always uses '/' as directory separator and isn't allowed to
134 contain unusual path components.
137 contain unusual path components.
135 Content-Type is guessed using the mimetypes module.
138 Content-Type is guessed using the mimetypes module.
136 Return an empty string if fname is illegal or file not found.
139 Return an empty string if fname is illegal or file not found.
137
140
138 """
141 """
139 parts = fname.split('/')
142 parts = fname.split('/')
140 for part in parts:
143 for part in parts:
141 if (part in ('', os.curdir, os.pardir) or
144 if (part in ('', os.curdir, os.pardir) or
142 os.sep in part or os.altsep is not None and os.altsep in part):
145 pycompat.ossep in part or
146 os.altsep is not None and os.altsep in part):
143 return
147 return
144 fpath = os.path.join(*parts)
148 fpath = os.path.join(*parts)
145 if isinstance(directory, str):
149 if isinstance(directory, str):
146 directory = [directory]
150 directory = [directory]
147 for d in directory:
151 for d in directory:
148 path = os.path.join(d, fpath)
152 path = os.path.join(d, fpath)
149 if os.path.exists(path):
153 if os.path.exists(path):
150 break
154 break
151 try:
155 try:
152 os.stat(path)
156 os.stat(path)
153 ct = mimetypes.guess_type(path)[0] or "text/plain"
157 ct = mimetypes.guess_type(path)[0] or "text/plain"
154 fp = open(path, 'rb')
158 fp = open(path, 'rb')
155 data = fp.read()
159 data = fp.read()
156 fp.close()
160 fp.close()
157 req.respond(HTTP_OK, ct, body=data)
161 req.respond(HTTP_OK, ct, body=data)
158 except TypeError:
162 except TypeError:
159 raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename')
163 raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename')
160 except OSError as err:
164 except OSError as err:
161 if err.errno == errno.ENOENT:
165 if err.errno == errno.ENOENT:
162 raise ErrorResponse(HTTP_NOT_FOUND)
166 raise ErrorResponse(HTTP_NOT_FOUND)
163 else:
167 else:
164 raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror)
168 raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror)
165
169
166 def paritygen(stripecount, offset=0):
170 def paritygen(stripecount, offset=0):
167 """count parity of horizontal stripes for easier reading"""
171 """count parity of horizontal stripes for easier reading"""
168 if stripecount and offset:
172 if stripecount and offset:
169 # account for offset, e.g. due to building the list in reverse
173 # account for offset, e.g. due to building the list in reverse
170 count = (stripecount + offset) % stripecount
174 count = (stripecount + offset) % stripecount
171 parity = (stripecount + offset) / stripecount & 1
175 parity = (stripecount + offset) / stripecount & 1
172 else:
176 else:
173 count = 0
177 count = 0
174 parity = 0
178 parity = 0
175 while True:
179 while True:
176 yield parity
180 yield parity
177 count += 1
181 count += 1
178 if stripecount and count >= stripecount:
182 if stripecount and count >= stripecount:
179 parity = 1 - parity
183 parity = 1 - parity
180 count = 0
184 count = 0
181
185
182 def get_contact(config):
186 def get_contact(config):
183 """Return repo contact information or empty string.
187 """Return repo contact information or empty string.
184
188
185 web.contact is the primary source, but if that is not set, try
189 web.contact is the primary source, but if that is not set, try
186 ui.username or $EMAIL as a fallback to display something useful.
190 ui.username or $EMAIL as a fallback to display something useful.
187 """
191 """
188 return (config("web", "contact") or
192 return (config("web", "contact") or
189 config("ui", "username") or
193 config("ui", "username") or
190 os.environ.get("EMAIL") or "")
194 os.environ.get("EMAIL") or "")
191
195
192 def caching(web, req):
196 def caching(web, req):
193 tag = 'W/"%s"' % web.mtime
197 tag = 'W/"%s"' % web.mtime
194 if req.env.get('HTTP_IF_NONE_MATCH') == tag:
198 if req.env.get('HTTP_IF_NONE_MATCH') == tag:
195 raise ErrorResponse(HTTP_NOT_MODIFIED)
199 raise ErrorResponse(HTTP_NOT_MODIFIED)
196 req.headers.append(('ETag', tag))
200 req.headers.append(('ETag', tag))
@@ -1,1958 +1,1959 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22
22
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 cmdutil,
25 cmdutil,
26 config,
26 config,
27 error,
27 error,
28 exchange,
28 exchange,
29 filemerge,
29 filemerge,
30 match as matchmod,
30 match as matchmod,
31 node,
31 node,
32 pathutil,
32 pathutil,
33 phases,
33 phases,
34 pycompat,
34 scmutil,
35 scmutil,
35 util,
36 util,
36 )
37 )
37
38
38 hg = None
39 hg = None
39 propertycache = util.propertycache
40 propertycache = util.propertycache
40
41
41 nullstate = ('', '', 'empty')
42 nullstate = ('', '', 'empty')
42
43
43 def _expandedabspath(path):
44 def _expandedabspath(path):
44 '''
45 '''
45 get a path or url and if it is a path expand it and return an absolute path
46 get a path or url and if it is a path expand it and return an absolute path
46 '''
47 '''
47 expandedpath = util.urllocalpath(util.expandpath(path))
48 expandedpath = util.urllocalpath(util.expandpath(path))
48 u = util.url(expandedpath)
49 u = util.url(expandedpath)
49 if not u.scheme:
50 if not u.scheme:
50 path = util.normpath(os.path.abspath(u.path))
51 path = util.normpath(os.path.abspath(u.path))
51 return path
52 return path
52
53
53 def _getstorehashcachename(remotepath):
54 def _getstorehashcachename(remotepath):
54 '''get a unique filename for the store hash cache of a remote repository'''
55 '''get a unique filename for the store hash cache of a remote repository'''
55 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
56 return hashlib.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
56
57
57 class SubrepoAbort(error.Abort):
58 class SubrepoAbort(error.Abort):
58 """Exception class used to avoid handling a subrepo error more than once"""
59 """Exception class used to avoid handling a subrepo error more than once"""
59 def __init__(self, *args, **kw):
60 def __init__(self, *args, **kw):
60 self.subrepo = kw.pop('subrepo', None)
61 self.subrepo = kw.pop('subrepo', None)
61 self.cause = kw.pop('cause', None)
62 self.cause = kw.pop('cause', None)
62 error.Abort.__init__(self, *args, **kw)
63 error.Abort.__init__(self, *args, **kw)
63
64
64 def annotatesubrepoerror(func):
65 def annotatesubrepoerror(func):
65 def decoratedmethod(self, *args, **kargs):
66 def decoratedmethod(self, *args, **kargs):
66 try:
67 try:
67 res = func(self, *args, **kargs)
68 res = func(self, *args, **kargs)
68 except SubrepoAbort as ex:
69 except SubrepoAbort as ex:
69 # This exception has already been handled
70 # This exception has already been handled
70 raise ex
71 raise ex
71 except error.Abort as ex:
72 except error.Abort as ex:
72 subrepo = subrelpath(self)
73 subrepo = subrelpath(self)
73 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
74 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
74 # avoid handling this exception by raising a SubrepoAbort exception
75 # avoid handling this exception by raising a SubrepoAbort exception
75 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
76 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
76 cause=sys.exc_info())
77 cause=sys.exc_info())
77 return res
78 return res
78 return decoratedmethod
79 return decoratedmethod
79
80
80 def state(ctx, ui):
81 def state(ctx, ui):
81 """return a state dict, mapping subrepo paths configured in .hgsub
82 """return a state dict, mapping subrepo paths configured in .hgsub
82 to tuple: (source from .hgsub, revision from .hgsubstate, kind
83 to tuple: (source from .hgsub, revision from .hgsubstate, kind
83 (key in types dict))
84 (key in types dict))
84 """
85 """
85 p = config.config()
86 p = config.config()
86 repo = ctx.repo()
87 repo = ctx.repo()
87 def read(f, sections=None, remap=None):
88 def read(f, sections=None, remap=None):
88 if f in ctx:
89 if f in ctx:
89 try:
90 try:
90 data = ctx[f].data()
91 data = ctx[f].data()
91 except IOError as err:
92 except IOError as err:
92 if err.errno != errno.ENOENT:
93 if err.errno != errno.ENOENT:
93 raise
94 raise
94 # handle missing subrepo spec files as removed
95 # handle missing subrepo spec files as removed
95 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
96 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
96 repo.pathto(f))
97 repo.pathto(f))
97 return
98 return
98 p.parse(f, data, sections, remap, read)
99 p.parse(f, data, sections, remap, read)
99 else:
100 else:
100 raise error.Abort(_("subrepo spec file \'%s\' not found") %
101 raise error.Abort(_("subrepo spec file \'%s\' not found") %
101 repo.pathto(f))
102 repo.pathto(f))
102 if '.hgsub' in ctx:
103 if '.hgsub' in ctx:
103 read('.hgsub')
104 read('.hgsub')
104
105
105 for path, src in ui.configitems('subpaths'):
106 for path, src in ui.configitems('subpaths'):
106 p.set('subpaths', path, src, ui.configsource('subpaths', path))
107 p.set('subpaths', path, src, ui.configsource('subpaths', path))
107
108
108 rev = {}
109 rev = {}
109 if '.hgsubstate' in ctx:
110 if '.hgsubstate' in ctx:
110 try:
111 try:
111 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
112 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
112 l = l.lstrip()
113 l = l.lstrip()
113 if not l:
114 if not l:
114 continue
115 continue
115 try:
116 try:
116 revision, path = l.split(" ", 1)
117 revision, path = l.split(" ", 1)
117 except ValueError:
118 except ValueError:
118 raise error.Abort(_("invalid subrepository revision "
119 raise error.Abort(_("invalid subrepository revision "
119 "specifier in \'%s\' line %d")
120 "specifier in \'%s\' line %d")
120 % (repo.pathto('.hgsubstate'), (i + 1)))
121 % (repo.pathto('.hgsubstate'), (i + 1)))
121 rev[path] = revision
122 rev[path] = revision
122 except IOError as err:
123 except IOError as err:
123 if err.errno != errno.ENOENT:
124 if err.errno != errno.ENOENT:
124 raise
125 raise
125
126
126 def remap(src):
127 def remap(src):
127 for pattern, repl in p.items('subpaths'):
128 for pattern, repl in p.items('subpaths'):
128 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
129 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
129 # does a string decode.
130 # does a string decode.
130 repl = repl.encode('string-escape')
131 repl = repl.encode('string-escape')
131 # However, we still want to allow back references to go
132 # However, we still want to allow back references to go
132 # through unharmed, so we turn r'\\1' into r'\1'. Again,
133 # through unharmed, so we turn r'\\1' into r'\1'. Again,
133 # extra escapes are needed because re.sub string decodes.
134 # extra escapes are needed because re.sub string decodes.
134 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
135 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
135 try:
136 try:
136 src = re.sub(pattern, repl, src, 1)
137 src = re.sub(pattern, repl, src, 1)
137 except re.error as e:
138 except re.error as e:
138 raise error.Abort(_("bad subrepository pattern in %s: %s")
139 raise error.Abort(_("bad subrepository pattern in %s: %s")
139 % (p.source('subpaths', pattern), e))
140 % (p.source('subpaths', pattern), e))
140 return src
141 return src
141
142
142 state = {}
143 state = {}
143 for path, src in p[''].items():
144 for path, src in p[''].items():
144 kind = 'hg'
145 kind = 'hg'
145 if src.startswith('['):
146 if src.startswith('['):
146 if ']' not in src:
147 if ']' not in src:
147 raise error.Abort(_('missing ] in subrepo source'))
148 raise error.Abort(_('missing ] in subrepo source'))
148 kind, src = src.split(']', 1)
149 kind, src = src.split(']', 1)
149 kind = kind[1:]
150 kind = kind[1:]
150 src = src.lstrip() # strip any extra whitespace after ']'
151 src = src.lstrip() # strip any extra whitespace after ']'
151
152
152 if not util.url(src).isabs():
153 if not util.url(src).isabs():
153 parent = _abssource(repo, abort=False)
154 parent = _abssource(repo, abort=False)
154 if parent:
155 if parent:
155 parent = util.url(parent)
156 parent = util.url(parent)
156 parent.path = posixpath.join(parent.path or '', src)
157 parent.path = posixpath.join(parent.path or '', src)
157 parent.path = posixpath.normpath(parent.path)
158 parent.path = posixpath.normpath(parent.path)
158 joined = str(parent)
159 joined = str(parent)
159 # Remap the full joined path and use it if it changes,
160 # Remap the full joined path and use it if it changes,
160 # else remap the original source.
161 # else remap the original source.
161 remapped = remap(joined)
162 remapped = remap(joined)
162 if remapped == joined:
163 if remapped == joined:
163 src = remap(src)
164 src = remap(src)
164 else:
165 else:
165 src = remapped
166 src = remapped
166
167
167 src = remap(src)
168 src = remap(src)
168 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
169 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
169
170
170 return state
171 return state
171
172
172 def writestate(repo, state):
173 def writestate(repo, state):
173 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
174 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
174 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
175 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
175 if state[s][1] != nullstate[1]]
176 if state[s][1] != nullstate[1]]
176 repo.wwrite('.hgsubstate', ''.join(lines), '')
177 repo.wwrite('.hgsubstate', ''.join(lines), '')
177
178
178 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
179 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
179 """delegated from merge.applyupdates: merging of .hgsubstate file
180 """delegated from merge.applyupdates: merging of .hgsubstate file
180 in working context, merging context and ancestor context"""
181 in working context, merging context and ancestor context"""
181 if mctx == actx: # backwards?
182 if mctx == actx: # backwards?
182 actx = wctx.p1()
183 actx = wctx.p1()
183 s1 = wctx.substate
184 s1 = wctx.substate
184 s2 = mctx.substate
185 s2 = mctx.substate
185 sa = actx.substate
186 sa = actx.substate
186 sm = {}
187 sm = {}
187
188
188 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
189 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
189
190
190 def debug(s, msg, r=""):
191 def debug(s, msg, r=""):
191 if r:
192 if r:
192 r = "%s:%s:%s" % r
193 r = "%s:%s:%s" % r
193 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
194 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
194
195
195 for s, l in sorted(s1.iteritems()):
196 for s, l in sorted(s1.iteritems()):
196 a = sa.get(s, nullstate)
197 a = sa.get(s, nullstate)
197 ld = l # local state with possible dirty flag for compares
198 ld = l # local state with possible dirty flag for compares
198 if wctx.sub(s).dirty():
199 if wctx.sub(s).dirty():
199 ld = (l[0], l[1] + "+")
200 ld = (l[0], l[1] + "+")
200 if wctx == actx: # overwrite
201 if wctx == actx: # overwrite
201 a = ld
202 a = ld
202
203
203 if s in s2:
204 if s in s2:
204 prompts = filemerge.partextras(labels)
205 prompts = filemerge.partextras(labels)
205 prompts['s'] = s
206 prompts['s'] = s
206 r = s2[s]
207 r = s2[s]
207 if ld == r or r == a: # no change or local is newer
208 if ld == r or r == a: # no change or local is newer
208 sm[s] = l
209 sm[s] = l
209 continue
210 continue
210 elif ld == a: # other side changed
211 elif ld == a: # other side changed
211 debug(s, "other changed, get", r)
212 debug(s, "other changed, get", r)
212 wctx.sub(s).get(r, overwrite)
213 wctx.sub(s).get(r, overwrite)
213 sm[s] = r
214 sm[s] = r
214 elif ld[0] != r[0]: # sources differ
215 elif ld[0] != r[0]: # sources differ
215 prompts['lo'] = l[0]
216 prompts['lo'] = l[0]
216 prompts['ro'] = r[0]
217 prompts['ro'] = r[0]
217 if repo.ui.promptchoice(
218 if repo.ui.promptchoice(
218 _(' subrepository sources for %(s)s differ\n'
219 _(' subrepository sources for %(s)s differ\n'
219 'use (l)ocal%(l)s source (%(lo)s)'
220 'use (l)ocal%(l)s source (%(lo)s)'
220 ' or (r)emote%(o)s source (%(ro)s)?'
221 ' or (r)emote%(o)s source (%(ro)s)?'
221 '$$ &Local $$ &Remote') % prompts, 0):
222 '$$ &Local $$ &Remote') % prompts, 0):
222 debug(s, "prompt changed, get", r)
223 debug(s, "prompt changed, get", r)
223 wctx.sub(s).get(r, overwrite)
224 wctx.sub(s).get(r, overwrite)
224 sm[s] = r
225 sm[s] = r
225 elif ld[1] == a[1]: # local side is unchanged
226 elif ld[1] == a[1]: # local side is unchanged
226 debug(s, "other side changed, get", r)
227 debug(s, "other side changed, get", r)
227 wctx.sub(s).get(r, overwrite)
228 wctx.sub(s).get(r, overwrite)
228 sm[s] = r
229 sm[s] = r
229 else:
230 else:
230 debug(s, "both sides changed")
231 debug(s, "both sides changed")
231 srepo = wctx.sub(s)
232 srepo = wctx.sub(s)
232 prompts['sl'] = srepo.shortid(l[1])
233 prompts['sl'] = srepo.shortid(l[1])
233 prompts['sr'] = srepo.shortid(r[1])
234 prompts['sr'] = srepo.shortid(r[1])
234 option = repo.ui.promptchoice(
235 option = repo.ui.promptchoice(
235 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
236 _(' subrepository %(s)s diverged (local revision: %(sl)s, '
236 'remote revision: %(sr)s)\n'
237 'remote revision: %(sr)s)\n'
237 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
238 '(M)erge, keep (l)ocal%(l)s or keep (r)emote%(o)s?'
238 '$$ &Merge $$ &Local $$ &Remote')
239 '$$ &Merge $$ &Local $$ &Remote')
239 % prompts, 0)
240 % prompts, 0)
240 if option == 0:
241 if option == 0:
241 wctx.sub(s).merge(r)
242 wctx.sub(s).merge(r)
242 sm[s] = l
243 sm[s] = l
243 debug(s, "merge with", r)
244 debug(s, "merge with", r)
244 elif option == 1:
245 elif option == 1:
245 sm[s] = l
246 sm[s] = l
246 debug(s, "keep local subrepo revision", l)
247 debug(s, "keep local subrepo revision", l)
247 else:
248 else:
248 wctx.sub(s).get(r, overwrite)
249 wctx.sub(s).get(r, overwrite)
249 sm[s] = r
250 sm[s] = r
250 debug(s, "get remote subrepo revision", r)
251 debug(s, "get remote subrepo revision", r)
251 elif ld == a: # remote removed, local unchanged
252 elif ld == a: # remote removed, local unchanged
252 debug(s, "remote removed, remove")
253 debug(s, "remote removed, remove")
253 wctx.sub(s).remove()
254 wctx.sub(s).remove()
254 elif a == nullstate: # not present in remote or ancestor
255 elif a == nullstate: # not present in remote or ancestor
255 debug(s, "local added, keep")
256 debug(s, "local added, keep")
256 sm[s] = l
257 sm[s] = l
257 continue
258 continue
258 else:
259 else:
259 if repo.ui.promptchoice(
260 if repo.ui.promptchoice(
260 _(' local%(l)s changed subrepository %(s)s'
261 _(' local%(l)s changed subrepository %(s)s'
261 ' which remote%(o)s removed\n'
262 ' which remote%(o)s removed\n'
262 'use (c)hanged version or (d)elete?'
263 'use (c)hanged version or (d)elete?'
263 '$$ &Changed $$ &Delete') % prompts, 0):
264 '$$ &Changed $$ &Delete') % prompts, 0):
264 debug(s, "prompt remove")
265 debug(s, "prompt remove")
265 wctx.sub(s).remove()
266 wctx.sub(s).remove()
266
267
267 for s, r in sorted(s2.items()):
268 for s, r in sorted(s2.items()):
268 if s in s1:
269 if s in s1:
269 continue
270 continue
270 elif s not in sa:
271 elif s not in sa:
271 debug(s, "remote added, get", r)
272 debug(s, "remote added, get", r)
272 mctx.sub(s).get(r)
273 mctx.sub(s).get(r)
273 sm[s] = r
274 sm[s] = r
274 elif r != sa[s]:
275 elif r != sa[s]:
275 if repo.ui.promptchoice(
276 if repo.ui.promptchoice(
276 _(' remote%(o)s changed subrepository %(s)s'
277 _(' remote%(o)s changed subrepository %(s)s'
277 ' which local%(l)s removed\n'
278 ' which local%(l)s removed\n'
278 'use (c)hanged version or (d)elete?'
279 'use (c)hanged version or (d)elete?'
279 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
280 '$$ &Changed $$ &Delete') % prompts, 0) == 0:
280 debug(s, "prompt recreate", r)
281 debug(s, "prompt recreate", r)
281 mctx.sub(s).get(r)
282 mctx.sub(s).get(r)
282 sm[s] = r
283 sm[s] = r
283
284
284 # record merged .hgsubstate
285 # record merged .hgsubstate
285 writestate(repo, sm)
286 writestate(repo, sm)
286 return sm
287 return sm
287
288
288 def _updateprompt(ui, sub, dirty, local, remote):
289 def _updateprompt(ui, sub, dirty, local, remote):
289 if dirty:
290 if dirty:
290 msg = (_(' subrepository sources for %s differ\n'
291 msg = (_(' subrepository sources for %s differ\n'
291 'use (l)ocal source (%s) or (r)emote source (%s)?'
292 'use (l)ocal source (%s) or (r)emote source (%s)?'
292 '$$ &Local $$ &Remote')
293 '$$ &Local $$ &Remote')
293 % (subrelpath(sub), local, remote))
294 % (subrelpath(sub), local, remote))
294 else:
295 else:
295 msg = (_(' subrepository sources for %s differ (in checked out '
296 msg = (_(' subrepository sources for %s differ (in checked out '
296 'version)\n'
297 'version)\n'
297 'use (l)ocal source (%s) or (r)emote source (%s)?'
298 'use (l)ocal source (%s) or (r)emote source (%s)?'
298 '$$ &Local $$ &Remote')
299 '$$ &Local $$ &Remote')
299 % (subrelpath(sub), local, remote))
300 % (subrelpath(sub), local, remote))
300 return ui.promptchoice(msg, 0)
301 return ui.promptchoice(msg, 0)
301
302
302 def reporelpath(repo):
303 def reporelpath(repo):
303 """return path to this (sub)repo as seen from outermost repo"""
304 """return path to this (sub)repo as seen from outermost repo"""
304 parent = repo
305 parent = repo
305 while util.safehasattr(parent, '_subparent'):
306 while util.safehasattr(parent, '_subparent'):
306 parent = parent._subparent
307 parent = parent._subparent
307 return repo.root[len(pathutil.normasprefix(parent.root)):]
308 return repo.root[len(pathutil.normasprefix(parent.root)):]
308
309
309 def subrelpath(sub):
310 def subrelpath(sub):
310 """return path to this subrepo as seen from outermost repo"""
311 """return path to this subrepo as seen from outermost repo"""
311 return sub._relpath
312 return sub._relpath
312
313
313 def _abssource(repo, push=False, abort=True):
314 def _abssource(repo, push=False, abort=True):
314 """return pull/push path of repo - either based on parent repo .hgsub info
315 """return pull/push path of repo - either based on parent repo .hgsub info
315 or on the top repo config. Abort or return None if no source found."""
316 or on the top repo config. Abort or return None if no source found."""
316 if util.safehasattr(repo, '_subparent'):
317 if util.safehasattr(repo, '_subparent'):
317 source = util.url(repo._subsource)
318 source = util.url(repo._subsource)
318 if source.isabs():
319 if source.isabs():
319 return str(source)
320 return str(source)
320 source.path = posixpath.normpath(source.path)
321 source.path = posixpath.normpath(source.path)
321 parent = _abssource(repo._subparent, push, abort=False)
322 parent = _abssource(repo._subparent, push, abort=False)
322 if parent:
323 if parent:
323 parent = util.url(util.pconvert(parent))
324 parent = util.url(util.pconvert(parent))
324 parent.path = posixpath.join(parent.path or '', source.path)
325 parent.path = posixpath.join(parent.path or '', source.path)
325 parent.path = posixpath.normpath(parent.path)
326 parent.path = posixpath.normpath(parent.path)
326 return str(parent)
327 return str(parent)
327 else: # recursion reached top repo
328 else: # recursion reached top repo
328 if util.safehasattr(repo, '_subtoppath'):
329 if util.safehasattr(repo, '_subtoppath'):
329 return repo._subtoppath
330 return repo._subtoppath
330 if push and repo.ui.config('paths', 'default-push'):
331 if push and repo.ui.config('paths', 'default-push'):
331 return repo.ui.config('paths', 'default-push')
332 return repo.ui.config('paths', 'default-push')
332 if repo.ui.config('paths', 'default'):
333 if repo.ui.config('paths', 'default'):
333 return repo.ui.config('paths', 'default')
334 return repo.ui.config('paths', 'default')
334 if repo.shared():
335 if repo.shared():
335 # chop off the .hg component to get the default path form
336 # chop off the .hg component to get the default path form
336 return os.path.dirname(repo.sharedpath)
337 return os.path.dirname(repo.sharedpath)
337 if abort:
338 if abort:
338 raise error.Abort(_("default path for subrepository not found"))
339 raise error.Abort(_("default path for subrepository not found"))
339
340
340 def _sanitize(ui, vfs, ignore):
341 def _sanitize(ui, vfs, ignore):
341 for dirname, dirs, names in vfs.walk():
342 for dirname, dirs, names in vfs.walk():
342 for i, d in enumerate(dirs):
343 for i, d in enumerate(dirs):
343 if d.lower() == ignore:
344 if d.lower() == ignore:
344 del dirs[i]
345 del dirs[i]
345 break
346 break
346 if vfs.basename(dirname).lower() != '.hg':
347 if vfs.basename(dirname).lower() != '.hg':
347 continue
348 continue
348 for f in names:
349 for f in names:
349 if f.lower() == 'hgrc':
350 if f.lower() == 'hgrc':
350 ui.warn(_("warning: removing potentially hostile 'hgrc' "
351 ui.warn(_("warning: removing potentially hostile 'hgrc' "
351 "in '%s'\n") % vfs.join(dirname))
352 "in '%s'\n") % vfs.join(dirname))
352 vfs.unlink(vfs.reljoin(dirname, f))
353 vfs.unlink(vfs.reljoin(dirname, f))
353
354
354 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
355 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
355 """return instance of the right subrepo class for subrepo in path"""
356 """return instance of the right subrepo class for subrepo in path"""
356 # subrepo inherently violates our import layering rules
357 # subrepo inherently violates our import layering rules
357 # because it wants to make repo objects from deep inside the stack
358 # because it wants to make repo objects from deep inside the stack
358 # so we manually delay the circular imports to not break
359 # so we manually delay the circular imports to not break
359 # scripts that don't use our demand-loading
360 # scripts that don't use our demand-loading
360 global hg
361 global hg
361 from . import hg as h
362 from . import hg as h
362 hg = h
363 hg = h
363
364
364 pathutil.pathauditor(ctx.repo().root)(path)
365 pathutil.pathauditor(ctx.repo().root)(path)
365 state = ctx.substate[path]
366 state = ctx.substate[path]
366 if state[2] not in types:
367 if state[2] not in types:
367 raise error.Abort(_('unknown subrepo type %s') % state[2])
368 raise error.Abort(_('unknown subrepo type %s') % state[2])
368 if allowwdir:
369 if allowwdir:
369 state = (state[0], ctx.subrev(path), state[2])
370 state = (state[0], ctx.subrev(path), state[2])
370 return types[state[2]](ctx, path, state[:2], allowcreate)
371 return types[state[2]](ctx, path, state[:2], allowcreate)
371
372
372 def nullsubrepo(ctx, path, pctx):
373 def nullsubrepo(ctx, path, pctx):
373 """return an empty subrepo in pctx for the extant subrepo in ctx"""
374 """return an empty subrepo in pctx for the extant subrepo in ctx"""
374 # subrepo inherently violates our import layering rules
375 # subrepo inherently violates our import layering rules
375 # because it wants to make repo objects from deep inside the stack
376 # because it wants to make repo objects from deep inside the stack
376 # so we manually delay the circular imports to not break
377 # so we manually delay the circular imports to not break
377 # scripts that don't use our demand-loading
378 # scripts that don't use our demand-loading
378 global hg
379 global hg
379 from . import hg as h
380 from . import hg as h
380 hg = h
381 hg = h
381
382
382 pathutil.pathauditor(ctx.repo().root)(path)
383 pathutil.pathauditor(ctx.repo().root)(path)
383 state = ctx.substate[path]
384 state = ctx.substate[path]
384 if state[2] not in types:
385 if state[2] not in types:
385 raise error.Abort(_('unknown subrepo type %s') % state[2])
386 raise error.Abort(_('unknown subrepo type %s') % state[2])
386 subrev = ''
387 subrev = ''
387 if state[2] == 'hg':
388 if state[2] == 'hg':
388 subrev = "0" * 40
389 subrev = "0" * 40
389 return types[state[2]](pctx, path, (state[0], subrev), True)
390 return types[state[2]](pctx, path, (state[0], subrev), True)
390
391
391 def newcommitphase(ui, ctx):
392 def newcommitphase(ui, ctx):
392 commitphase = phases.newcommitphase(ui)
393 commitphase = phases.newcommitphase(ui)
393 substate = getattr(ctx, "substate", None)
394 substate = getattr(ctx, "substate", None)
394 if not substate:
395 if not substate:
395 return commitphase
396 return commitphase
396 check = ui.config('phases', 'checksubrepos', 'follow')
397 check = ui.config('phases', 'checksubrepos', 'follow')
397 if check not in ('ignore', 'follow', 'abort'):
398 if check not in ('ignore', 'follow', 'abort'):
398 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
399 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
399 % (check))
400 % (check))
400 if check == 'ignore':
401 if check == 'ignore':
401 return commitphase
402 return commitphase
402 maxphase = phases.public
403 maxphase = phases.public
403 maxsub = None
404 maxsub = None
404 for s in sorted(substate):
405 for s in sorted(substate):
405 sub = ctx.sub(s)
406 sub = ctx.sub(s)
406 subphase = sub.phase(substate[s][1])
407 subphase = sub.phase(substate[s][1])
407 if maxphase < subphase:
408 if maxphase < subphase:
408 maxphase = subphase
409 maxphase = subphase
409 maxsub = s
410 maxsub = s
410 if commitphase < maxphase:
411 if commitphase < maxphase:
411 if check == 'abort':
412 if check == 'abort':
412 raise error.Abort(_("can't commit in %s phase"
413 raise error.Abort(_("can't commit in %s phase"
413 " conflicting %s from subrepository %s") %
414 " conflicting %s from subrepository %s") %
414 (phases.phasenames[commitphase],
415 (phases.phasenames[commitphase],
415 phases.phasenames[maxphase], maxsub))
416 phases.phasenames[maxphase], maxsub))
416 ui.warn(_("warning: changes are committed in"
417 ui.warn(_("warning: changes are committed in"
417 " %s phase from subrepository %s\n") %
418 " %s phase from subrepository %s\n") %
418 (phases.phasenames[maxphase], maxsub))
419 (phases.phasenames[maxphase], maxsub))
419 return maxphase
420 return maxphase
420 return commitphase
421 return commitphase
421
422
422 # subrepo classes need to implement the following abstract class:
423 # subrepo classes need to implement the following abstract class:
423
424
424 class abstractsubrepo(object):
425 class abstractsubrepo(object):
425
426
426 def __init__(self, ctx, path):
427 def __init__(self, ctx, path):
427 """Initialize abstractsubrepo part
428 """Initialize abstractsubrepo part
428
429
429 ``ctx`` is the context referring this subrepository in the
430 ``ctx`` is the context referring this subrepository in the
430 parent repository.
431 parent repository.
431
432
432 ``path`` is the path to this subrepository as seen from
433 ``path`` is the path to this subrepository as seen from
433 innermost repository.
434 innermost repository.
434 """
435 """
435 self.ui = ctx.repo().ui
436 self.ui = ctx.repo().ui
436 self._ctx = ctx
437 self._ctx = ctx
437 self._path = path
438 self._path = path
438
439
439 def storeclean(self, path):
440 def storeclean(self, path):
440 """
441 """
441 returns true if the repository has not changed since it was last
442 returns true if the repository has not changed since it was last
442 cloned from or pushed to a given repository.
443 cloned from or pushed to a given repository.
443 """
444 """
444 return False
445 return False
445
446
446 def dirty(self, ignoreupdate=False):
447 def dirty(self, ignoreupdate=False):
447 """returns true if the dirstate of the subrepo is dirty or does not
448 """returns true if the dirstate of the subrepo is dirty or does not
448 match current stored state. If ignoreupdate is true, only check
449 match current stored state. If ignoreupdate is true, only check
449 whether the subrepo has uncommitted changes in its dirstate.
450 whether the subrepo has uncommitted changes in its dirstate.
450 """
451 """
451 raise NotImplementedError
452 raise NotImplementedError
452
453
453 def dirtyreason(self, ignoreupdate=False):
454 def dirtyreason(self, ignoreupdate=False):
454 """return reason string if it is ``dirty()``
455 """return reason string if it is ``dirty()``
455
456
456 Returned string should have enough information for the message
457 Returned string should have enough information for the message
457 of exception.
458 of exception.
458
459
459 This returns None, otherwise.
460 This returns None, otherwise.
460 """
461 """
461 if self.dirty(ignoreupdate=ignoreupdate):
462 if self.dirty(ignoreupdate=ignoreupdate):
462 return _("uncommitted changes in subrepository '%s'"
463 return _("uncommitted changes in subrepository '%s'"
463 ) % subrelpath(self)
464 ) % subrelpath(self)
464
465
465 def bailifchanged(self, ignoreupdate=False):
466 def bailifchanged(self, ignoreupdate=False):
466 """raise Abort if subrepository is ``dirty()``
467 """raise Abort if subrepository is ``dirty()``
467 """
468 """
468 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
469 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
469 if dirtyreason:
470 if dirtyreason:
470 raise error.Abort(dirtyreason)
471 raise error.Abort(dirtyreason)
471
472
472 def basestate(self):
473 def basestate(self):
473 """current working directory base state, disregarding .hgsubstate
474 """current working directory base state, disregarding .hgsubstate
474 state and working directory modifications"""
475 state and working directory modifications"""
475 raise NotImplementedError
476 raise NotImplementedError
476
477
477 def checknested(self, path):
478 def checknested(self, path):
478 """check if path is a subrepository within this repository"""
479 """check if path is a subrepository within this repository"""
479 return False
480 return False
480
481
481 def commit(self, text, user, date):
482 def commit(self, text, user, date):
482 """commit the current changes to the subrepo with the given
483 """commit the current changes to the subrepo with the given
483 log message. Use given user and date if possible. Return the
484 log message. Use given user and date if possible. Return the
484 new state of the subrepo.
485 new state of the subrepo.
485 """
486 """
486 raise NotImplementedError
487 raise NotImplementedError
487
488
488 def phase(self, state):
489 def phase(self, state):
489 """returns phase of specified state in the subrepository.
490 """returns phase of specified state in the subrepository.
490 """
491 """
491 return phases.public
492 return phases.public
492
493
493 def remove(self):
494 def remove(self):
494 """remove the subrepo
495 """remove the subrepo
495
496
496 (should verify the dirstate is not dirty first)
497 (should verify the dirstate is not dirty first)
497 """
498 """
498 raise NotImplementedError
499 raise NotImplementedError
499
500
500 def get(self, state, overwrite=False):
501 def get(self, state, overwrite=False):
501 """run whatever commands are needed to put the subrepo into
502 """run whatever commands are needed to put the subrepo into
502 this state
503 this state
503 """
504 """
504 raise NotImplementedError
505 raise NotImplementedError
505
506
506 def merge(self, state):
507 def merge(self, state):
507 """merge currently-saved state with the new state."""
508 """merge currently-saved state with the new state."""
508 raise NotImplementedError
509 raise NotImplementedError
509
510
510 def push(self, opts):
511 def push(self, opts):
511 """perform whatever action is analogous to 'hg push'
512 """perform whatever action is analogous to 'hg push'
512
513
513 This may be a no-op on some systems.
514 This may be a no-op on some systems.
514 """
515 """
515 raise NotImplementedError
516 raise NotImplementedError
516
517
517 def add(self, ui, match, prefix, explicitonly, **opts):
518 def add(self, ui, match, prefix, explicitonly, **opts):
518 return []
519 return []
519
520
520 def addremove(self, matcher, prefix, opts, dry_run, similarity):
521 def addremove(self, matcher, prefix, opts, dry_run, similarity):
521 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
522 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
522 return 1
523 return 1
523
524
524 def cat(self, match, prefix, **opts):
525 def cat(self, match, prefix, **opts):
525 return 1
526 return 1
526
527
527 def status(self, rev2, **opts):
528 def status(self, rev2, **opts):
528 return scmutil.status([], [], [], [], [], [], [])
529 return scmutil.status([], [], [], [], [], [], [])
529
530
530 def diff(self, ui, diffopts, node2, match, prefix, **opts):
531 def diff(self, ui, diffopts, node2, match, prefix, **opts):
531 pass
532 pass
532
533
533 def outgoing(self, ui, dest, opts):
534 def outgoing(self, ui, dest, opts):
534 return 1
535 return 1
535
536
536 def incoming(self, ui, source, opts):
537 def incoming(self, ui, source, opts):
537 return 1
538 return 1
538
539
539 def files(self):
540 def files(self):
540 """return filename iterator"""
541 """return filename iterator"""
541 raise NotImplementedError
542 raise NotImplementedError
542
543
543 def filedata(self, name):
544 def filedata(self, name):
544 """return file data"""
545 """return file data"""
545 raise NotImplementedError
546 raise NotImplementedError
546
547
547 def fileflags(self, name):
548 def fileflags(self, name):
548 """return file flags"""
549 """return file flags"""
549 return ''
550 return ''
550
551
551 def getfileset(self, expr):
552 def getfileset(self, expr):
552 """Resolve the fileset expression for this repo"""
553 """Resolve the fileset expression for this repo"""
553 return set()
554 return set()
554
555
555 def printfiles(self, ui, m, fm, fmt, subrepos):
556 def printfiles(self, ui, m, fm, fmt, subrepos):
556 """handle the files command for this subrepo"""
557 """handle the files command for this subrepo"""
557 return 1
558 return 1
558
559
559 def archive(self, archiver, prefix, match=None):
560 def archive(self, archiver, prefix, match=None):
560 if match is not None:
561 if match is not None:
561 files = [f for f in self.files() if match(f)]
562 files = [f for f in self.files() if match(f)]
562 else:
563 else:
563 files = self.files()
564 files = self.files()
564 total = len(files)
565 total = len(files)
565 relpath = subrelpath(self)
566 relpath = subrelpath(self)
566 self.ui.progress(_('archiving (%s)') % relpath, 0,
567 self.ui.progress(_('archiving (%s)') % relpath, 0,
567 unit=_('files'), total=total)
568 unit=_('files'), total=total)
568 for i, name in enumerate(files):
569 for i, name in enumerate(files):
569 flags = self.fileflags(name)
570 flags = self.fileflags(name)
570 mode = 'x' in flags and 0o755 or 0o644
571 mode = 'x' in flags and 0o755 or 0o644
571 symlink = 'l' in flags
572 symlink = 'l' in flags
572 archiver.addfile(prefix + self._path + '/' + name,
573 archiver.addfile(prefix + self._path + '/' + name,
573 mode, symlink, self.filedata(name))
574 mode, symlink, self.filedata(name))
574 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
575 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
575 unit=_('files'), total=total)
576 unit=_('files'), total=total)
576 self.ui.progress(_('archiving (%s)') % relpath, None)
577 self.ui.progress(_('archiving (%s)') % relpath, None)
577 return total
578 return total
578
579
579 def walk(self, match):
580 def walk(self, match):
580 '''
581 '''
581 walk recursively through the directory tree, finding all files
582 walk recursively through the directory tree, finding all files
582 matched by the match function
583 matched by the match function
583 '''
584 '''
584 pass
585 pass
585
586
586 def forget(self, match, prefix):
587 def forget(self, match, prefix):
587 return ([], [])
588 return ([], [])
588
589
589 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
590 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
590 """remove the matched files from the subrepository and the filesystem,
591 """remove the matched files from the subrepository and the filesystem,
591 possibly by force and/or after the file has been removed from the
592 possibly by force and/or after the file has been removed from the
592 filesystem. Return 0 on success, 1 on any warning.
593 filesystem. Return 0 on success, 1 on any warning.
593 """
594 """
594 warnings.append(_("warning: removefiles not implemented (%s)")
595 warnings.append(_("warning: removefiles not implemented (%s)")
595 % self._path)
596 % self._path)
596 return 1
597 return 1
597
598
598 def revert(self, substate, *pats, **opts):
599 def revert(self, substate, *pats, **opts):
599 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
600 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
600 % (substate[0], substate[2]))
601 % (substate[0], substate[2]))
601 return []
602 return []
602
603
603 def shortid(self, revid):
604 def shortid(self, revid):
604 return revid
605 return revid
605
606
606 def verify(self):
607 def verify(self):
607 '''verify the integrity of the repository. Return 0 on success or
608 '''verify the integrity of the repository. Return 0 on success or
608 warning, 1 on any error.
609 warning, 1 on any error.
609 '''
610 '''
610 return 0
611 return 0
611
612
612 @propertycache
613 @propertycache
613 def wvfs(self):
614 def wvfs(self):
614 """return vfs to access the working directory of this subrepository
615 """return vfs to access the working directory of this subrepository
615 """
616 """
616 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
617 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
617
618
618 @propertycache
619 @propertycache
619 def _relpath(self):
620 def _relpath(self):
620 """return path to this subrepository as seen from outermost repository
621 """return path to this subrepository as seen from outermost repository
621 """
622 """
622 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
623 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
623
624
624 class hgsubrepo(abstractsubrepo):
625 class hgsubrepo(abstractsubrepo):
625 def __init__(self, ctx, path, state, allowcreate):
626 def __init__(self, ctx, path, state, allowcreate):
626 super(hgsubrepo, self).__init__(ctx, path)
627 super(hgsubrepo, self).__init__(ctx, path)
627 self._state = state
628 self._state = state
628 r = ctx.repo()
629 r = ctx.repo()
629 root = r.wjoin(path)
630 root = r.wjoin(path)
630 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
631 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
631 self._repo = hg.repository(r.baseui, root, create=create)
632 self._repo = hg.repository(r.baseui, root, create=create)
632
633
633 # Propagate the parent's --hidden option
634 # Propagate the parent's --hidden option
634 if r is r.unfiltered():
635 if r is r.unfiltered():
635 self._repo = self._repo.unfiltered()
636 self._repo = self._repo.unfiltered()
636
637
637 self.ui = self._repo.ui
638 self.ui = self._repo.ui
638 for s, k in [('ui', 'commitsubrepos')]:
639 for s, k in [('ui', 'commitsubrepos')]:
639 v = r.ui.config(s, k)
640 v = r.ui.config(s, k)
640 if v:
641 if v:
641 self.ui.setconfig(s, k, v, 'subrepo')
642 self.ui.setconfig(s, k, v, 'subrepo')
642 # internal config: ui._usedassubrepo
643 # internal config: ui._usedassubrepo
643 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
644 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
644 self._initrepo(r, state[0], create)
645 self._initrepo(r, state[0], create)
645
646
646 def storeclean(self, path):
647 def storeclean(self, path):
647 with self._repo.lock():
648 with self._repo.lock():
648 return self._storeclean(path)
649 return self._storeclean(path)
649
650
650 def _storeclean(self, path):
651 def _storeclean(self, path):
651 clean = True
652 clean = True
652 itercache = self._calcstorehash(path)
653 itercache = self._calcstorehash(path)
653 for filehash in self._readstorehashcache(path):
654 for filehash in self._readstorehashcache(path):
654 if filehash != next(itercache, None):
655 if filehash != next(itercache, None):
655 clean = False
656 clean = False
656 break
657 break
657 if clean:
658 if clean:
658 # if not empty:
659 # if not empty:
659 # the cached and current pull states have a different size
660 # the cached and current pull states have a different size
660 clean = next(itercache, None) is None
661 clean = next(itercache, None) is None
661 return clean
662 return clean
662
663
663 def _calcstorehash(self, remotepath):
664 def _calcstorehash(self, remotepath):
664 '''calculate a unique "store hash"
665 '''calculate a unique "store hash"
665
666
666 This method is used to to detect when there are changes that may
667 This method is used to to detect when there are changes that may
667 require a push to a given remote path.'''
668 require a push to a given remote path.'''
668 # sort the files that will be hashed in increasing (likely) file size
669 # sort the files that will be hashed in increasing (likely) file size
669 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
670 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
670 yield '# %s\n' % _expandedabspath(remotepath)
671 yield '# %s\n' % _expandedabspath(remotepath)
671 vfs = self._repo.vfs
672 vfs = self._repo.vfs
672 for relname in filelist:
673 for relname in filelist:
673 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
674 filehash = hashlib.sha1(vfs.tryread(relname)).hexdigest()
674 yield '%s = %s\n' % (relname, filehash)
675 yield '%s = %s\n' % (relname, filehash)
675
676
676 @propertycache
677 @propertycache
677 def _cachestorehashvfs(self):
678 def _cachestorehashvfs(self):
678 return scmutil.vfs(self._repo.join('cache/storehash'))
679 return scmutil.vfs(self._repo.join('cache/storehash'))
679
680
680 def _readstorehashcache(self, remotepath):
681 def _readstorehashcache(self, remotepath):
681 '''read the store hash cache for a given remote repository'''
682 '''read the store hash cache for a given remote repository'''
682 cachefile = _getstorehashcachename(remotepath)
683 cachefile = _getstorehashcachename(remotepath)
683 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
684 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
684
685
685 def _cachestorehash(self, remotepath):
686 def _cachestorehash(self, remotepath):
686 '''cache the current store hash
687 '''cache the current store hash
687
688
688 Each remote repo requires its own store hash cache, because a subrepo
689 Each remote repo requires its own store hash cache, because a subrepo
689 store may be "clean" versus a given remote repo, but not versus another
690 store may be "clean" versus a given remote repo, but not versus another
690 '''
691 '''
691 cachefile = _getstorehashcachename(remotepath)
692 cachefile = _getstorehashcachename(remotepath)
692 with self._repo.lock():
693 with self._repo.lock():
693 storehash = list(self._calcstorehash(remotepath))
694 storehash = list(self._calcstorehash(remotepath))
694 vfs = self._cachestorehashvfs
695 vfs = self._cachestorehashvfs
695 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
696 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
696
697
697 def _getctx(self):
698 def _getctx(self):
698 '''fetch the context for this subrepo revision, possibly a workingctx
699 '''fetch the context for this subrepo revision, possibly a workingctx
699 '''
700 '''
700 if self._ctx.rev() is None:
701 if self._ctx.rev() is None:
701 return self._repo[None] # workingctx if parent is workingctx
702 return self._repo[None] # workingctx if parent is workingctx
702 else:
703 else:
703 rev = self._state[1]
704 rev = self._state[1]
704 return self._repo[rev]
705 return self._repo[rev]
705
706
706 @annotatesubrepoerror
707 @annotatesubrepoerror
707 def _initrepo(self, parentrepo, source, create):
708 def _initrepo(self, parentrepo, source, create):
708 self._repo._subparent = parentrepo
709 self._repo._subparent = parentrepo
709 self._repo._subsource = source
710 self._repo._subsource = source
710
711
711 if create:
712 if create:
712 lines = ['[paths]\n']
713 lines = ['[paths]\n']
713
714
714 def addpathconfig(key, value):
715 def addpathconfig(key, value):
715 if value:
716 if value:
716 lines.append('%s = %s\n' % (key, value))
717 lines.append('%s = %s\n' % (key, value))
717 self.ui.setconfig('paths', key, value, 'subrepo')
718 self.ui.setconfig('paths', key, value, 'subrepo')
718
719
719 defpath = _abssource(self._repo, abort=False)
720 defpath = _abssource(self._repo, abort=False)
720 defpushpath = _abssource(self._repo, True, abort=False)
721 defpushpath = _abssource(self._repo, True, abort=False)
721 addpathconfig('default', defpath)
722 addpathconfig('default', defpath)
722 if defpath != defpushpath:
723 if defpath != defpushpath:
723 addpathconfig('default-push', defpushpath)
724 addpathconfig('default-push', defpushpath)
724
725
725 fp = self._repo.vfs("hgrc", "w", text=True)
726 fp = self._repo.vfs("hgrc", "w", text=True)
726 try:
727 try:
727 fp.write(''.join(lines))
728 fp.write(''.join(lines))
728 finally:
729 finally:
729 fp.close()
730 fp.close()
730
731
731 @annotatesubrepoerror
732 @annotatesubrepoerror
732 def add(self, ui, match, prefix, explicitonly, **opts):
733 def add(self, ui, match, prefix, explicitonly, **opts):
733 return cmdutil.add(ui, self._repo, match,
734 return cmdutil.add(ui, self._repo, match,
734 self.wvfs.reljoin(prefix, self._path),
735 self.wvfs.reljoin(prefix, self._path),
735 explicitonly, **opts)
736 explicitonly, **opts)
736
737
737 @annotatesubrepoerror
738 @annotatesubrepoerror
738 def addremove(self, m, prefix, opts, dry_run, similarity):
739 def addremove(self, m, prefix, opts, dry_run, similarity):
739 # In the same way as sub directories are processed, once in a subrepo,
740 # In the same way as sub directories are processed, once in a subrepo,
740 # always entry any of its subrepos. Don't corrupt the options that will
741 # always entry any of its subrepos. Don't corrupt the options that will
741 # be used to process sibling subrepos however.
742 # be used to process sibling subrepos however.
742 opts = copy.copy(opts)
743 opts = copy.copy(opts)
743 opts['subrepos'] = True
744 opts['subrepos'] = True
744 return scmutil.addremove(self._repo, m,
745 return scmutil.addremove(self._repo, m,
745 self.wvfs.reljoin(prefix, self._path), opts,
746 self.wvfs.reljoin(prefix, self._path), opts,
746 dry_run, similarity)
747 dry_run, similarity)
747
748
748 @annotatesubrepoerror
749 @annotatesubrepoerror
749 def cat(self, match, prefix, **opts):
750 def cat(self, match, prefix, **opts):
750 rev = self._state[1]
751 rev = self._state[1]
751 ctx = self._repo[rev]
752 ctx = self._repo[rev]
752 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
753 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
753
754
754 @annotatesubrepoerror
755 @annotatesubrepoerror
755 def status(self, rev2, **opts):
756 def status(self, rev2, **opts):
756 try:
757 try:
757 rev1 = self._state[1]
758 rev1 = self._state[1]
758 ctx1 = self._repo[rev1]
759 ctx1 = self._repo[rev1]
759 ctx2 = self._repo[rev2]
760 ctx2 = self._repo[rev2]
760 return self._repo.status(ctx1, ctx2, **opts)
761 return self._repo.status(ctx1, ctx2, **opts)
761 except error.RepoLookupError as inst:
762 except error.RepoLookupError as inst:
762 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
763 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
763 % (inst, subrelpath(self)))
764 % (inst, subrelpath(self)))
764 return scmutil.status([], [], [], [], [], [], [])
765 return scmutil.status([], [], [], [], [], [], [])
765
766
766 @annotatesubrepoerror
767 @annotatesubrepoerror
767 def diff(self, ui, diffopts, node2, match, prefix, **opts):
768 def diff(self, ui, diffopts, node2, match, prefix, **opts):
768 try:
769 try:
769 node1 = node.bin(self._state[1])
770 node1 = node.bin(self._state[1])
770 # We currently expect node2 to come from substate and be
771 # We currently expect node2 to come from substate and be
771 # in hex format
772 # in hex format
772 if node2 is not None:
773 if node2 is not None:
773 node2 = node.bin(node2)
774 node2 = node.bin(node2)
774 cmdutil.diffordiffstat(ui, self._repo, diffopts,
775 cmdutil.diffordiffstat(ui, self._repo, diffopts,
775 node1, node2, match,
776 node1, node2, match,
776 prefix=posixpath.join(prefix, self._path),
777 prefix=posixpath.join(prefix, self._path),
777 listsubrepos=True, **opts)
778 listsubrepos=True, **opts)
778 except error.RepoLookupError as inst:
779 except error.RepoLookupError as inst:
779 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
780 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
780 % (inst, subrelpath(self)))
781 % (inst, subrelpath(self)))
781
782
782 @annotatesubrepoerror
783 @annotatesubrepoerror
783 def archive(self, archiver, prefix, match=None):
784 def archive(self, archiver, prefix, match=None):
784 self._get(self._state + ('hg',))
785 self._get(self._state + ('hg',))
785 total = abstractsubrepo.archive(self, archiver, prefix, match)
786 total = abstractsubrepo.archive(self, archiver, prefix, match)
786 rev = self._state[1]
787 rev = self._state[1]
787 ctx = self._repo[rev]
788 ctx = self._repo[rev]
788 for subpath in ctx.substate:
789 for subpath in ctx.substate:
789 s = subrepo(ctx, subpath, True)
790 s = subrepo(ctx, subpath, True)
790 submatch = matchmod.subdirmatcher(subpath, match)
791 submatch = matchmod.subdirmatcher(subpath, match)
791 total += s.archive(archiver, prefix + self._path + '/', submatch)
792 total += s.archive(archiver, prefix + self._path + '/', submatch)
792 return total
793 return total
793
794
794 @annotatesubrepoerror
795 @annotatesubrepoerror
795 def dirty(self, ignoreupdate=False):
796 def dirty(self, ignoreupdate=False):
796 r = self._state[1]
797 r = self._state[1]
797 if r == '' and not ignoreupdate: # no state recorded
798 if r == '' and not ignoreupdate: # no state recorded
798 return True
799 return True
799 w = self._repo[None]
800 w = self._repo[None]
800 if r != w.p1().hex() and not ignoreupdate:
801 if r != w.p1().hex() and not ignoreupdate:
801 # different version checked out
802 # different version checked out
802 return True
803 return True
803 return w.dirty() # working directory changed
804 return w.dirty() # working directory changed
804
805
805 def basestate(self):
806 def basestate(self):
806 return self._repo['.'].hex()
807 return self._repo['.'].hex()
807
808
808 def checknested(self, path):
809 def checknested(self, path):
809 return self._repo._checknested(self._repo.wjoin(path))
810 return self._repo._checknested(self._repo.wjoin(path))
810
811
811 @annotatesubrepoerror
812 @annotatesubrepoerror
812 def commit(self, text, user, date):
813 def commit(self, text, user, date):
813 # don't bother committing in the subrepo if it's only been
814 # don't bother committing in the subrepo if it's only been
814 # updated
815 # updated
815 if not self.dirty(True):
816 if not self.dirty(True):
816 return self._repo['.'].hex()
817 return self._repo['.'].hex()
817 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
818 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
818 n = self._repo.commit(text, user, date)
819 n = self._repo.commit(text, user, date)
819 if not n:
820 if not n:
820 return self._repo['.'].hex() # different version checked out
821 return self._repo['.'].hex() # different version checked out
821 return node.hex(n)
822 return node.hex(n)
822
823
823 @annotatesubrepoerror
824 @annotatesubrepoerror
824 def phase(self, state):
825 def phase(self, state):
825 return self._repo[state].phase()
826 return self._repo[state].phase()
826
827
827 @annotatesubrepoerror
828 @annotatesubrepoerror
828 def remove(self):
829 def remove(self):
829 # we can't fully delete the repository as it may contain
830 # we can't fully delete the repository as it may contain
830 # local-only history
831 # local-only history
831 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
832 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
832 hg.clean(self._repo, node.nullid, False)
833 hg.clean(self._repo, node.nullid, False)
833
834
834 def _get(self, state):
835 def _get(self, state):
835 source, revision, kind = state
836 source, revision, kind = state
836 if revision in self._repo.unfiltered():
837 if revision in self._repo.unfiltered():
837 return True
838 return True
838 self._repo._subsource = source
839 self._repo._subsource = source
839 srcurl = _abssource(self._repo)
840 srcurl = _abssource(self._repo)
840 other = hg.peer(self._repo, {}, srcurl)
841 other = hg.peer(self._repo, {}, srcurl)
841 if len(self._repo) == 0:
842 if len(self._repo) == 0:
842 self.ui.status(_('cloning subrepo %s from %s\n')
843 self.ui.status(_('cloning subrepo %s from %s\n')
843 % (subrelpath(self), srcurl))
844 % (subrelpath(self), srcurl))
844 parentrepo = self._repo._subparent
845 parentrepo = self._repo._subparent
845 # use self._repo.vfs instead of self.wvfs to remove .hg only
846 # use self._repo.vfs instead of self.wvfs to remove .hg only
846 self._repo.vfs.rmtree()
847 self._repo.vfs.rmtree()
847 other, cloned = hg.clone(self._repo._subparent.baseui, {},
848 other, cloned = hg.clone(self._repo._subparent.baseui, {},
848 other, self._repo.root,
849 other, self._repo.root,
849 update=False)
850 update=False)
850 self._repo = cloned.local()
851 self._repo = cloned.local()
851 self._initrepo(parentrepo, source, create=True)
852 self._initrepo(parentrepo, source, create=True)
852 self._cachestorehash(srcurl)
853 self._cachestorehash(srcurl)
853 else:
854 else:
854 self.ui.status(_('pulling subrepo %s from %s\n')
855 self.ui.status(_('pulling subrepo %s from %s\n')
855 % (subrelpath(self), srcurl))
856 % (subrelpath(self), srcurl))
856 cleansub = self.storeclean(srcurl)
857 cleansub = self.storeclean(srcurl)
857 exchange.pull(self._repo, other)
858 exchange.pull(self._repo, other)
858 if cleansub:
859 if cleansub:
859 # keep the repo clean after pull
860 # keep the repo clean after pull
860 self._cachestorehash(srcurl)
861 self._cachestorehash(srcurl)
861 return False
862 return False
862
863
863 @annotatesubrepoerror
864 @annotatesubrepoerror
864 def get(self, state, overwrite=False):
865 def get(self, state, overwrite=False):
865 inrepo = self._get(state)
866 inrepo = self._get(state)
866 source, revision, kind = state
867 source, revision, kind = state
867 repo = self._repo
868 repo = self._repo
868 repo.ui.debug("getting subrepo %s\n" % self._path)
869 repo.ui.debug("getting subrepo %s\n" % self._path)
869 if inrepo:
870 if inrepo:
870 urepo = repo.unfiltered()
871 urepo = repo.unfiltered()
871 ctx = urepo[revision]
872 ctx = urepo[revision]
872 if ctx.hidden():
873 if ctx.hidden():
873 urepo.ui.warn(
874 urepo.ui.warn(
874 _('revision %s in subrepo %s is hidden\n') \
875 _('revision %s in subrepo %s is hidden\n') \
875 % (revision[0:12], self._path))
876 % (revision[0:12], self._path))
876 repo = urepo
877 repo = urepo
877 hg.updaterepo(repo, revision, overwrite)
878 hg.updaterepo(repo, revision, overwrite)
878
879
879 @annotatesubrepoerror
880 @annotatesubrepoerror
880 def merge(self, state):
881 def merge(self, state):
881 self._get(state)
882 self._get(state)
882 cur = self._repo['.']
883 cur = self._repo['.']
883 dst = self._repo[state[1]]
884 dst = self._repo[state[1]]
884 anc = dst.ancestor(cur)
885 anc = dst.ancestor(cur)
885
886
886 def mergefunc():
887 def mergefunc():
887 if anc == cur and dst.branch() == cur.branch():
888 if anc == cur and dst.branch() == cur.branch():
888 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
889 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
889 hg.update(self._repo, state[1])
890 hg.update(self._repo, state[1])
890 elif anc == dst:
891 elif anc == dst:
891 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
892 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
892 else:
893 else:
893 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
894 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
894 hg.merge(self._repo, state[1], remind=False)
895 hg.merge(self._repo, state[1], remind=False)
895
896
896 wctx = self._repo[None]
897 wctx = self._repo[None]
897 if self.dirty():
898 if self.dirty():
898 if anc != dst:
899 if anc != dst:
899 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
900 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
900 mergefunc()
901 mergefunc()
901 else:
902 else:
902 mergefunc()
903 mergefunc()
903 else:
904 else:
904 mergefunc()
905 mergefunc()
905
906
906 @annotatesubrepoerror
907 @annotatesubrepoerror
907 def push(self, opts):
908 def push(self, opts):
908 force = opts.get('force')
909 force = opts.get('force')
909 newbranch = opts.get('new_branch')
910 newbranch = opts.get('new_branch')
910 ssh = opts.get('ssh')
911 ssh = opts.get('ssh')
911
912
912 # push subrepos depth-first for coherent ordering
913 # push subrepos depth-first for coherent ordering
913 c = self._repo['']
914 c = self._repo['']
914 subs = c.substate # only repos that are committed
915 subs = c.substate # only repos that are committed
915 for s in sorted(subs):
916 for s in sorted(subs):
916 if c.sub(s).push(opts) == 0:
917 if c.sub(s).push(opts) == 0:
917 return False
918 return False
918
919
919 dsturl = _abssource(self._repo, True)
920 dsturl = _abssource(self._repo, True)
920 if not force:
921 if not force:
921 if self.storeclean(dsturl):
922 if self.storeclean(dsturl):
922 self.ui.status(
923 self.ui.status(
923 _('no changes made to subrepo %s since last push to %s\n')
924 _('no changes made to subrepo %s since last push to %s\n')
924 % (subrelpath(self), dsturl))
925 % (subrelpath(self), dsturl))
925 return None
926 return None
926 self.ui.status(_('pushing subrepo %s to %s\n') %
927 self.ui.status(_('pushing subrepo %s to %s\n') %
927 (subrelpath(self), dsturl))
928 (subrelpath(self), dsturl))
928 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
929 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
929 res = exchange.push(self._repo, other, force, newbranch=newbranch)
930 res = exchange.push(self._repo, other, force, newbranch=newbranch)
930
931
931 # the repo is now clean
932 # the repo is now clean
932 self._cachestorehash(dsturl)
933 self._cachestorehash(dsturl)
933 return res.cgresult
934 return res.cgresult
934
935
935 @annotatesubrepoerror
936 @annotatesubrepoerror
936 def outgoing(self, ui, dest, opts):
937 def outgoing(self, ui, dest, opts):
937 if 'rev' in opts or 'branch' in opts:
938 if 'rev' in opts or 'branch' in opts:
938 opts = copy.copy(opts)
939 opts = copy.copy(opts)
939 opts.pop('rev', None)
940 opts.pop('rev', None)
940 opts.pop('branch', None)
941 opts.pop('branch', None)
941 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
942 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
942
943
943 @annotatesubrepoerror
944 @annotatesubrepoerror
944 def incoming(self, ui, source, opts):
945 def incoming(self, ui, source, opts):
945 if 'rev' in opts or 'branch' in opts:
946 if 'rev' in opts or 'branch' in opts:
946 opts = copy.copy(opts)
947 opts = copy.copy(opts)
947 opts.pop('rev', None)
948 opts.pop('rev', None)
948 opts.pop('branch', None)
949 opts.pop('branch', None)
949 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
950 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
950
951
951 @annotatesubrepoerror
952 @annotatesubrepoerror
952 def files(self):
953 def files(self):
953 rev = self._state[1]
954 rev = self._state[1]
954 ctx = self._repo[rev]
955 ctx = self._repo[rev]
955 return ctx.manifest().keys()
956 return ctx.manifest().keys()
956
957
957 def filedata(self, name):
958 def filedata(self, name):
958 rev = self._state[1]
959 rev = self._state[1]
959 return self._repo[rev][name].data()
960 return self._repo[rev][name].data()
960
961
961 def fileflags(self, name):
962 def fileflags(self, name):
962 rev = self._state[1]
963 rev = self._state[1]
963 ctx = self._repo[rev]
964 ctx = self._repo[rev]
964 return ctx.flags(name)
965 return ctx.flags(name)
965
966
966 @annotatesubrepoerror
967 @annotatesubrepoerror
967 def printfiles(self, ui, m, fm, fmt, subrepos):
968 def printfiles(self, ui, m, fm, fmt, subrepos):
968 # If the parent context is a workingctx, use the workingctx here for
969 # If the parent context is a workingctx, use the workingctx here for
969 # consistency.
970 # consistency.
970 if self._ctx.rev() is None:
971 if self._ctx.rev() is None:
971 ctx = self._repo[None]
972 ctx = self._repo[None]
972 else:
973 else:
973 rev = self._state[1]
974 rev = self._state[1]
974 ctx = self._repo[rev]
975 ctx = self._repo[rev]
975 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
976 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
976
977
977 @annotatesubrepoerror
978 @annotatesubrepoerror
978 def getfileset(self, expr):
979 def getfileset(self, expr):
979 if self._ctx.rev() is None:
980 if self._ctx.rev() is None:
980 ctx = self._repo[None]
981 ctx = self._repo[None]
981 else:
982 else:
982 rev = self._state[1]
983 rev = self._state[1]
983 ctx = self._repo[rev]
984 ctx = self._repo[rev]
984
985
985 files = ctx.getfileset(expr)
986 files = ctx.getfileset(expr)
986
987
987 for subpath in ctx.substate:
988 for subpath in ctx.substate:
988 sub = ctx.sub(subpath)
989 sub = ctx.sub(subpath)
989
990
990 try:
991 try:
991 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
992 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
992 except error.LookupError:
993 except error.LookupError:
993 self.ui.status(_("skipping missing subrepository: %s\n")
994 self.ui.status(_("skipping missing subrepository: %s\n")
994 % self.wvfs.reljoin(reporelpath(self), subpath))
995 % self.wvfs.reljoin(reporelpath(self), subpath))
995 return files
996 return files
996
997
997 def walk(self, match):
998 def walk(self, match):
998 ctx = self._repo[None]
999 ctx = self._repo[None]
999 return ctx.walk(match)
1000 return ctx.walk(match)
1000
1001
1001 @annotatesubrepoerror
1002 @annotatesubrepoerror
1002 def forget(self, match, prefix):
1003 def forget(self, match, prefix):
1003 return cmdutil.forget(self.ui, self._repo, match,
1004 return cmdutil.forget(self.ui, self._repo, match,
1004 self.wvfs.reljoin(prefix, self._path), True)
1005 self.wvfs.reljoin(prefix, self._path), True)
1005
1006
1006 @annotatesubrepoerror
1007 @annotatesubrepoerror
1007 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1008 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
1008 return cmdutil.remove(self.ui, self._repo, matcher,
1009 return cmdutil.remove(self.ui, self._repo, matcher,
1009 self.wvfs.reljoin(prefix, self._path),
1010 self.wvfs.reljoin(prefix, self._path),
1010 after, force, subrepos)
1011 after, force, subrepos)
1011
1012
1012 @annotatesubrepoerror
1013 @annotatesubrepoerror
1013 def revert(self, substate, *pats, **opts):
1014 def revert(self, substate, *pats, **opts):
1014 # reverting a subrepo is a 2 step process:
1015 # reverting a subrepo is a 2 step process:
1015 # 1. if the no_backup is not set, revert all modified
1016 # 1. if the no_backup is not set, revert all modified
1016 # files inside the subrepo
1017 # files inside the subrepo
1017 # 2. update the subrepo to the revision specified in
1018 # 2. update the subrepo to the revision specified in
1018 # the corresponding substate dictionary
1019 # the corresponding substate dictionary
1019 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1020 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1020 if not opts.get('no_backup'):
1021 if not opts.get('no_backup'):
1021 # Revert all files on the subrepo, creating backups
1022 # Revert all files on the subrepo, creating backups
1022 # Note that this will not recursively revert subrepos
1023 # Note that this will not recursively revert subrepos
1023 # We could do it if there was a set:subrepos() predicate
1024 # We could do it if there was a set:subrepos() predicate
1024 opts = opts.copy()
1025 opts = opts.copy()
1025 opts['date'] = None
1026 opts['date'] = None
1026 opts['rev'] = substate[1]
1027 opts['rev'] = substate[1]
1027
1028
1028 self.filerevert(*pats, **opts)
1029 self.filerevert(*pats, **opts)
1029
1030
1030 # Update the repo to the revision specified in the given substate
1031 # Update the repo to the revision specified in the given substate
1031 if not opts.get('dry_run'):
1032 if not opts.get('dry_run'):
1032 self.get(substate, overwrite=True)
1033 self.get(substate, overwrite=True)
1033
1034
1034 def filerevert(self, *pats, **opts):
1035 def filerevert(self, *pats, **opts):
1035 ctx = self._repo[opts['rev']]
1036 ctx = self._repo[opts['rev']]
1036 parents = self._repo.dirstate.parents()
1037 parents = self._repo.dirstate.parents()
1037 if opts.get('all'):
1038 if opts.get('all'):
1038 pats = ['set:modified()']
1039 pats = ['set:modified()']
1039 else:
1040 else:
1040 pats = []
1041 pats = []
1041 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1042 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1042
1043
1043 def shortid(self, revid):
1044 def shortid(self, revid):
1044 return revid[:12]
1045 return revid[:12]
1045
1046
1046 def verify(self):
1047 def verify(self):
1047 try:
1048 try:
1048 rev = self._state[1]
1049 rev = self._state[1]
1049 ctx = self._repo.unfiltered()[rev]
1050 ctx = self._repo.unfiltered()[rev]
1050 if ctx.hidden():
1051 if ctx.hidden():
1051 # Since hidden revisions aren't pushed/pulled, it seems worth an
1052 # Since hidden revisions aren't pushed/pulled, it seems worth an
1052 # explicit warning.
1053 # explicit warning.
1053 ui = self._repo.ui
1054 ui = self._repo.ui
1054 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1055 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1055 (self._relpath, node.short(self._ctx.node())))
1056 (self._relpath, node.short(self._ctx.node())))
1056 return 0
1057 return 0
1057 except error.RepoLookupError:
1058 except error.RepoLookupError:
1058 # A missing subrepo revision may be a case of needing to pull it, so
1059 # A missing subrepo revision may be a case of needing to pull it, so
1059 # don't treat this as an error.
1060 # don't treat this as an error.
1060 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1061 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1061 (self._relpath, node.short(self._ctx.node())))
1062 (self._relpath, node.short(self._ctx.node())))
1062 return 0
1063 return 0
1063
1064
1064 @propertycache
1065 @propertycache
1065 def wvfs(self):
1066 def wvfs(self):
1066 """return own wvfs for efficiency and consistency
1067 """return own wvfs for efficiency and consistency
1067 """
1068 """
1068 return self._repo.wvfs
1069 return self._repo.wvfs
1069
1070
1070 @propertycache
1071 @propertycache
1071 def _relpath(self):
1072 def _relpath(self):
1072 """return path to this subrepository as seen from outermost repository
1073 """return path to this subrepository as seen from outermost repository
1073 """
1074 """
1074 # Keep consistent dir separators by avoiding vfs.join(self._path)
1075 # Keep consistent dir separators by avoiding vfs.join(self._path)
1075 return reporelpath(self._repo)
1076 return reporelpath(self._repo)
1076
1077
1077 class svnsubrepo(abstractsubrepo):
1078 class svnsubrepo(abstractsubrepo):
1078 def __init__(self, ctx, path, state, allowcreate):
1079 def __init__(self, ctx, path, state, allowcreate):
1079 super(svnsubrepo, self).__init__(ctx, path)
1080 super(svnsubrepo, self).__init__(ctx, path)
1080 self._state = state
1081 self._state = state
1081 self._exe = util.findexe('svn')
1082 self._exe = util.findexe('svn')
1082 if not self._exe:
1083 if not self._exe:
1083 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1084 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1084 % self._path)
1085 % self._path)
1085
1086
1086 def _svncommand(self, commands, filename='', failok=False):
1087 def _svncommand(self, commands, filename='', failok=False):
1087 cmd = [self._exe]
1088 cmd = [self._exe]
1088 extrakw = {}
1089 extrakw = {}
1089 if not self.ui.interactive():
1090 if not self.ui.interactive():
1090 # Making stdin be a pipe should prevent svn from behaving
1091 # Making stdin be a pipe should prevent svn from behaving
1091 # interactively even if we can't pass --non-interactive.
1092 # interactively even if we can't pass --non-interactive.
1092 extrakw['stdin'] = subprocess.PIPE
1093 extrakw['stdin'] = subprocess.PIPE
1093 # Starting in svn 1.5 --non-interactive is a global flag
1094 # Starting in svn 1.5 --non-interactive is a global flag
1094 # instead of being per-command, but we need to support 1.4 so
1095 # instead of being per-command, but we need to support 1.4 so
1095 # we have to be intelligent about what commands take
1096 # we have to be intelligent about what commands take
1096 # --non-interactive.
1097 # --non-interactive.
1097 if commands[0] in ('update', 'checkout', 'commit'):
1098 if commands[0] in ('update', 'checkout', 'commit'):
1098 cmd.append('--non-interactive')
1099 cmd.append('--non-interactive')
1099 cmd.extend(commands)
1100 cmd.extend(commands)
1100 if filename is not None:
1101 if filename is not None:
1101 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1102 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1102 self._path, filename)
1103 self._path, filename)
1103 cmd.append(path)
1104 cmd.append(path)
1104 env = dict(os.environ)
1105 env = dict(os.environ)
1105 # Avoid localized output, preserve current locale for everything else.
1106 # Avoid localized output, preserve current locale for everything else.
1106 lc_all = env.get('LC_ALL')
1107 lc_all = env.get('LC_ALL')
1107 if lc_all:
1108 if lc_all:
1108 env['LANG'] = lc_all
1109 env['LANG'] = lc_all
1109 del env['LC_ALL']
1110 del env['LC_ALL']
1110 env['LC_MESSAGES'] = 'C'
1111 env['LC_MESSAGES'] = 'C'
1111 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1112 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1112 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1113 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1113 universal_newlines=True, env=env, **extrakw)
1114 universal_newlines=True, env=env, **extrakw)
1114 stdout, stderr = p.communicate()
1115 stdout, stderr = p.communicate()
1115 stderr = stderr.strip()
1116 stderr = stderr.strip()
1116 if not failok:
1117 if not failok:
1117 if p.returncode:
1118 if p.returncode:
1118 raise error.Abort(stderr or 'exited with code %d'
1119 raise error.Abort(stderr or 'exited with code %d'
1119 % p.returncode)
1120 % p.returncode)
1120 if stderr:
1121 if stderr:
1121 self.ui.warn(stderr + '\n')
1122 self.ui.warn(stderr + '\n')
1122 return stdout, stderr
1123 return stdout, stderr
1123
1124
1124 @propertycache
1125 @propertycache
1125 def _svnversion(self):
1126 def _svnversion(self):
1126 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1127 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1127 m = re.search(r'^(\d+)\.(\d+)', output)
1128 m = re.search(r'^(\d+)\.(\d+)', output)
1128 if not m:
1129 if not m:
1129 raise error.Abort(_('cannot retrieve svn tool version'))
1130 raise error.Abort(_('cannot retrieve svn tool version'))
1130 return (int(m.group(1)), int(m.group(2)))
1131 return (int(m.group(1)), int(m.group(2)))
1131
1132
1132 def _wcrevs(self):
1133 def _wcrevs(self):
1133 # Get the working directory revision as well as the last
1134 # Get the working directory revision as well as the last
1134 # commit revision so we can compare the subrepo state with
1135 # commit revision so we can compare the subrepo state with
1135 # both. We used to store the working directory one.
1136 # both. We used to store the working directory one.
1136 output, err = self._svncommand(['info', '--xml'])
1137 output, err = self._svncommand(['info', '--xml'])
1137 doc = xml.dom.minidom.parseString(output)
1138 doc = xml.dom.minidom.parseString(output)
1138 entries = doc.getElementsByTagName('entry')
1139 entries = doc.getElementsByTagName('entry')
1139 lastrev, rev = '0', '0'
1140 lastrev, rev = '0', '0'
1140 if entries:
1141 if entries:
1141 rev = str(entries[0].getAttribute('revision')) or '0'
1142 rev = str(entries[0].getAttribute('revision')) or '0'
1142 commits = entries[0].getElementsByTagName('commit')
1143 commits = entries[0].getElementsByTagName('commit')
1143 if commits:
1144 if commits:
1144 lastrev = str(commits[0].getAttribute('revision')) or '0'
1145 lastrev = str(commits[0].getAttribute('revision')) or '0'
1145 return (lastrev, rev)
1146 return (lastrev, rev)
1146
1147
1147 def _wcrev(self):
1148 def _wcrev(self):
1148 return self._wcrevs()[0]
1149 return self._wcrevs()[0]
1149
1150
1150 def _wcchanged(self):
1151 def _wcchanged(self):
1151 """Return (changes, extchanges, missing) where changes is True
1152 """Return (changes, extchanges, missing) where changes is True
1152 if the working directory was changed, extchanges is
1153 if the working directory was changed, extchanges is
1153 True if any of these changes concern an external entry and missing
1154 True if any of these changes concern an external entry and missing
1154 is True if any change is a missing entry.
1155 is True if any change is a missing entry.
1155 """
1156 """
1156 output, err = self._svncommand(['status', '--xml'])
1157 output, err = self._svncommand(['status', '--xml'])
1157 externals, changes, missing = [], [], []
1158 externals, changes, missing = [], [], []
1158 doc = xml.dom.minidom.parseString(output)
1159 doc = xml.dom.minidom.parseString(output)
1159 for e in doc.getElementsByTagName('entry'):
1160 for e in doc.getElementsByTagName('entry'):
1160 s = e.getElementsByTagName('wc-status')
1161 s = e.getElementsByTagName('wc-status')
1161 if not s:
1162 if not s:
1162 continue
1163 continue
1163 item = s[0].getAttribute('item')
1164 item = s[0].getAttribute('item')
1164 props = s[0].getAttribute('props')
1165 props = s[0].getAttribute('props')
1165 path = e.getAttribute('path')
1166 path = e.getAttribute('path')
1166 if item == 'external':
1167 if item == 'external':
1167 externals.append(path)
1168 externals.append(path)
1168 elif item == 'missing':
1169 elif item == 'missing':
1169 missing.append(path)
1170 missing.append(path)
1170 if (item not in ('', 'normal', 'unversioned', 'external')
1171 if (item not in ('', 'normal', 'unversioned', 'external')
1171 or props not in ('', 'none', 'normal')):
1172 or props not in ('', 'none', 'normal')):
1172 changes.append(path)
1173 changes.append(path)
1173 for path in changes:
1174 for path in changes:
1174 for ext in externals:
1175 for ext in externals:
1175 if path == ext or path.startswith(ext + os.sep):
1176 if path == ext or path.startswith(ext + pycompat.ossep):
1176 return True, True, bool(missing)
1177 return True, True, bool(missing)
1177 return bool(changes), False, bool(missing)
1178 return bool(changes), False, bool(missing)
1178
1179
1179 def dirty(self, ignoreupdate=False):
1180 def dirty(self, ignoreupdate=False):
1180 if not self._wcchanged()[0]:
1181 if not self._wcchanged()[0]:
1181 if self._state[1] in self._wcrevs() or ignoreupdate:
1182 if self._state[1] in self._wcrevs() or ignoreupdate:
1182 return False
1183 return False
1183 return True
1184 return True
1184
1185
1185 def basestate(self):
1186 def basestate(self):
1186 lastrev, rev = self._wcrevs()
1187 lastrev, rev = self._wcrevs()
1187 if lastrev != rev:
1188 if lastrev != rev:
1188 # Last committed rev is not the same than rev. We would
1189 # Last committed rev is not the same than rev. We would
1189 # like to take lastrev but we do not know if the subrepo
1190 # like to take lastrev but we do not know if the subrepo
1190 # URL exists at lastrev. Test it and fallback to rev it
1191 # URL exists at lastrev. Test it and fallback to rev it
1191 # is not there.
1192 # is not there.
1192 try:
1193 try:
1193 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1194 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1194 return lastrev
1195 return lastrev
1195 except error.Abort:
1196 except error.Abort:
1196 pass
1197 pass
1197 return rev
1198 return rev
1198
1199
1199 @annotatesubrepoerror
1200 @annotatesubrepoerror
1200 def commit(self, text, user, date):
1201 def commit(self, text, user, date):
1201 # user and date are out of our hands since svn is centralized
1202 # user and date are out of our hands since svn is centralized
1202 changed, extchanged, missing = self._wcchanged()
1203 changed, extchanged, missing = self._wcchanged()
1203 if not changed:
1204 if not changed:
1204 return self.basestate()
1205 return self.basestate()
1205 if extchanged:
1206 if extchanged:
1206 # Do not try to commit externals
1207 # Do not try to commit externals
1207 raise error.Abort(_('cannot commit svn externals'))
1208 raise error.Abort(_('cannot commit svn externals'))
1208 if missing:
1209 if missing:
1209 # svn can commit with missing entries but aborting like hg
1210 # svn can commit with missing entries but aborting like hg
1210 # seems a better approach.
1211 # seems a better approach.
1211 raise error.Abort(_('cannot commit missing svn entries'))
1212 raise error.Abort(_('cannot commit missing svn entries'))
1212 commitinfo, err = self._svncommand(['commit', '-m', text])
1213 commitinfo, err = self._svncommand(['commit', '-m', text])
1213 self.ui.status(commitinfo)
1214 self.ui.status(commitinfo)
1214 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1215 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1215 if not newrev:
1216 if not newrev:
1216 if not commitinfo.strip():
1217 if not commitinfo.strip():
1217 # Sometimes, our definition of "changed" differs from
1218 # Sometimes, our definition of "changed" differs from
1218 # svn one. For instance, svn ignores missing files
1219 # svn one. For instance, svn ignores missing files
1219 # when committing. If there are only missing files, no
1220 # when committing. If there are only missing files, no
1220 # commit is made, no output and no error code.
1221 # commit is made, no output and no error code.
1221 raise error.Abort(_('failed to commit svn changes'))
1222 raise error.Abort(_('failed to commit svn changes'))
1222 raise error.Abort(commitinfo.splitlines()[-1])
1223 raise error.Abort(commitinfo.splitlines()[-1])
1223 newrev = newrev.groups()[0]
1224 newrev = newrev.groups()[0]
1224 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1225 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1225 return newrev
1226 return newrev
1226
1227
1227 @annotatesubrepoerror
1228 @annotatesubrepoerror
1228 def remove(self):
1229 def remove(self):
1229 if self.dirty():
1230 if self.dirty():
1230 self.ui.warn(_('not removing repo %s because '
1231 self.ui.warn(_('not removing repo %s because '
1231 'it has changes.\n') % self._path)
1232 'it has changes.\n') % self._path)
1232 return
1233 return
1233 self.ui.note(_('removing subrepo %s\n') % self._path)
1234 self.ui.note(_('removing subrepo %s\n') % self._path)
1234
1235
1235 self.wvfs.rmtree(forcibly=True)
1236 self.wvfs.rmtree(forcibly=True)
1236 try:
1237 try:
1237 pwvfs = self._ctx.repo().wvfs
1238 pwvfs = self._ctx.repo().wvfs
1238 pwvfs.removedirs(pwvfs.dirname(self._path))
1239 pwvfs.removedirs(pwvfs.dirname(self._path))
1239 except OSError:
1240 except OSError:
1240 pass
1241 pass
1241
1242
1242 @annotatesubrepoerror
1243 @annotatesubrepoerror
1243 def get(self, state, overwrite=False):
1244 def get(self, state, overwrite=False):
1244 if overwrite:
1245 if overwrite:
1245 self._svncommand(['revert', '--recursive'])
1246 self._svncommand(['revert', '--recursive'])
1246 args = ['checkout']
1247 args = ['checkout']
1247 if self._svnversion >= (1, 5):
1248 if self._svnversion >= (1, 5):
1248 args.append('--force')
1249 args.append('--force')
1249 # The revision must be specified at the end of the URL to properly
1250 # The revision must be specified at the end of the URL to properly
1250 # update to a directory which has since been deleted and recreated.
1251 # update to a directory which has since been deleted and recreated.
1251 args.append('%s@%s' % (state[0], state[1]))
1252 args.append('%s@%s' % (state[0], state[1]))
1252 status, err = self._svncommand(args, failok=True)
1253 status, err = self._svncommand(args, failok=True)
1253 _sanitize(self.ui, self.wvfs, '.svn')
1254 _sanitize(self.ui, self.wvfs, '.svn')
1254 if not re.search('Checked out revision [0-9]+.', status):
1255 if not re.search('Checked out revision [0-9]+.', status):
1255 if ('is already a working copy for a different URL' in err
1256 if ('is already a working copy for a different URL' in err
1256 and (self._wcchanged()[:2] == (False, False))):
1257 and (self._wcchanged()[:2] == (False, False))):
1257 # obstructed but clean working copy, so just blow it away.
1258 # obstructed but clean working copy, so just blow it away.
1258 self.remove()
1259 self.remove()
1259 self.get(state, overwrite=False)
1260 self.get(state, overwrite=False)
1260 return
1261 return
1261 raise error.Abort((status or err).splitlines()[-1])
1262 raise error.Abort((status or err).splitlines()[-1])
1262 self.ui.status(status)
1263 self.ui.status(status)
1263
1264
1264 @annotatesubrepoerror
1265 @annotatesubrepoerror
1265 def merge(self, state):
1266 def merge(self, state):
1266 old = self._state[1]
1267 old = self._state[1]
1267 new = state[1]
1268 new = state[1]
1268 wcrev = self._wcrev()
1269 wcrev = self._wcrev()
1269 if new != wcrev:
1270 if new != wcrev:
1270 dirty = old == wcrev or self._wcchanged()[0]
1271 dirty = old == wcrev or self._wcchanged()[0]
1271 if _updateprompt(self.ui, self, dirty, wcrev, new):
1272 if _updateprompt(self.ui, self, dirty, wcrev, new):
1272 self.get(state, False)
1273 self.get(state, False)
1273
1274
1274 def push(self, opts):
1275 def push(self, opts):
1275 # push is a no-op for SVN
1276 # push is a no-op for SVN
1276 return True
1277 return True
1277
1278
1278 @annotatesubrepoerror
1279 @annotatesubrepoerror
1279 def files(self):
1280 def files(self):
1280 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1281 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1281 doc = xml.dom.minidom.parseString(output)
1282 doc = xml.dom.minidom.parseString(output)
1282 paths = []
1283 paths = []
1283 for e in doc.getElementsByTagName('entry'):
1284 for e in doc.getElementsByTagName('entry'):
1284 kind = str(e.getAttribute('kind'))
1285 kind = str(e.getAttribute('kind'))
1285 if kind != 'file':
1286 if kind != 'file':
1286 continue
1287 continue
1287 name = ''.join(c.data for c
1288 name = ''.join(c.data for c
1288 in e.getElementsByTagName('name')[0].childNodes
1289 in e.getElementsByTagName('name')[0].childNodes
1289 if c.nodeType == c.TEXT_NODE)
1290 if c.nodeType == c.TEXT_NODE)
1290 paths.append(name.encode('utf-8'))
1291 paths.append(name.encode('utf-8'))
1291 return paths
1292 return paths
1292
1293
1293 def filedata(self, name):
1294 def filedata(self, name):
1294 return self._svncommand(['cat'], name)[0]
1295 return self._svncommand(['cat'], name)[0]
1295
1296
1296
1297
1297 class gitsubrepo(abstractsubrepo):
1298 class gitsubrepo(abstractsubrepo):
1298 def __init__(self, ctx, path, state, allowcreate):
1299 def __init__(self, ctx, path, state, allowcreate):
1299 super(gitsubrepo, self).__init__(ctx, path)
1300 super(gitsubrepo, self).__init__(ctx, path)
1300 self._state = state
1301 self._state = state
1301 self._abspath = ctx.repo().wjoin(path)
1302 self._abspath = ctx.repo().wjoin(path)
1302 self._subparent = ctx.repo()
1303 self._subparent = ctx.repo()
1303 self._ensuregit()
1304 self._ensuregit()
1304
1305
1305 def _ensuregit(self):
1306 def _ensuregit(self):
1306 try:
1307 try:
1307 self._gitexecutable = 'git'
1308 self._gitexecutable = 'git'
1308 out, err = self._gitnodir(['--version'])
1309 out, err = self._gitnodir(['--version'])
1309 except OSError as e:
1310 except OSError as e:
1310 genericerror = _("error executing git for subrepo '%s': %s")
1311 genericerror = _("error executing git for subrepo '%s': %s")
1311 notfoundhint = _("check git is installed and in your PATH")
1312 notfoundhint = _("check git is installed and in your PATH")
1312 if e.errno != errno.ENOENT:
1313 if e.errno != errno.ENOENT:
1313 raise error.Abort(genericerror % (self._path, e.strerror))
1314 raise error.Abort(genericerror % (self._path, e.strerror))
1314 elif os.name == 'nt':
1315 elif os.name == 'nt':
1315 try:
1316 try:
1316 self._gitexecutable = 'git.cmd'
1317 self._gitexecutable = 'git.cmd'
1317 out, err = self._gitnodir(['--version'])
1318 out, err = self._gitnodir(['--version'])
1318 except OSError as e2:
1319 except OSError as e2:
1319 if e2.errno == errno.ENOENT:
1320 if e2.errno == errno.ENOENT:
1320 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1321 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1321 " for subrepo '%s'") % self._path,
1322 " for subrepo '%s'") % self._path,
1322 hint=notfoundhint)
1323 hint=notfoundhint)
1323 else:
1324 else:
1324 raise error.Abort(genericerror % (self._path,
1325 raise error.Abort(genericerror % (self._path,
1325 e2.strerror))
1326 e2.strerror))
1326 else:
1327 else:
1327 raise error.Abort(_("couldn't find git for subrepo '%s'")
1328 raise error.Abort(_("couldn't find git for subrepo '%s'")
1328 % self._path, hint=notfoundhint)
1329 % self._path, hint=notfoundhint)
1329 versionstatus = self._checkversion(out)
1330 versionstatus = self._checkversion(out)
1330 if versionstatus == 'unknown':
1331 if versionstatus == 'unknown':
1331 self.ui.warn(_('cannot retrieve git version\n'))
1332 self.ui.warn(_('cannot retrieve git version\n'))
1332 elif versionstatus == 'abort':
1333 elif versionstatus == 'abort':
1333 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1334 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1334 elif versionstatus == 'warning':
1335 elif versionstatus == 'warning':
1335 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1336 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1336
1337
1337 @staticmethod
1338 @staticmethod
1338 def _gitversion(out):
1339 def _gitversion(out):
1339 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1340 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1340 if m:
1341 if m:
1341 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1342 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1342
1343
1343 m = re.search(r'^git version (\d+)\.(\d+)', out)
1344 m = re.search(r'^git version (\d+)\.(\d+)', out)
1344 if m:
1345 if m:
1345 return (int(m.group(1)), int(m.group(2)), 0)
1346 return (int(m.group(1)), int(m.group(2)), 0)
1346
1347
1347 return -1
1348 return -1
1348
1349
1349 @staticmethod
1350 @staticmethod
1350 def _checkversion(out):
1351 def _checkversion(out):
1351 '''ensure git version is new enough
1352 '''ensure git version is new enough
1352
1353
1353 >>> _checkversion = gitsubrepo._checkversion
1354 >>> _checkversion = gitsubrepo._checkversion
1354 >>> _checkversion('git version 1.6.0')
1355 >>> _checkversion('git version 1.6.0')
1355 'ok'
1356 'ok'
1356 >>> _checkversion('git version 1.8.5')
1357 >>> _checkversion('git version 1.8.5')
1357 'ok'
1358 'ok'
1358 >>> _checkversion('git version 1.4.0')
1359 >>> _checkversion('git version 1.4.0')
1359 'abort'
1360 'abort'
1360 >>> _checkversion('git version 1.5.0')
1361 >>> _checkversion('git version 1.5.0')
1361 'warning'
1362 'warning'
1362 >>> _checkversion('git version 1.9-rc0')
1363 >>> _checkversion('git version 1.9-rc0')
1363 'ok'
1364 'ok'
1364 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1365 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1365 'ok'
1366 'ok'
1366 >>> _checkversion('git version 1.9.0.GIT')
1367 >>> _checkversion('git version 1.9.0.GIT')
1367 'ok'
1368 'ok'
1368 >>> _checkversion('git version 12345')
1369 >>> _checkversion('git version 12345')
1369 'unknown'
1370 'unknown'
1370 >>> _checkversion('no')
1371 >>> _checkversion('no')
1371 'unknown'
1372 'unknown'
1372 '''
1373 '''
1373 version = gitsubrepo._gitversion(out)
1374 version = gitsubrepo._gitversion(out)
1374 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1375 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1375 # despite the docstring comment. For now, error on 1.4.0, warn on
1376 # despite the docstring comment. For now, error on 1.4.0, warn on
1376 # 1.5.0 but attempt to continue.
1377 # 1.5.0 but attempt to continue.
1377 if version == -1:
1378 if version == -1:
1378 return 'unknown'
1379 return 'unknown'
1379 if version < (1, 5, 0):
1380 if version < (1, 5, 0):
1380 return 'abort'
1381 return 'abort'
1381 elif version < (1, 6, 0):
1382 elif version < (1, 6, 0):
1382 return 'warning'
1383 return 'warning'
1383 return 'ok'
1384 return 'ok'
1384
1385
1385 def _gitcommand(self, commands, env=None, stream=False):
1386 def _gitcommand(self, commands, env=None, stream=False):
1386 return self._gitdir(commands, env=env, stream=stream)[0]
1387 return self._gitdir(commands, env=env, stream=stream)[0]
1387
1388
1388 def _gitdir(self, commands, env=None, stream=False):
1389 def _gitdir(self, commands, env=None, stream=False):
1389 return self._gitnodir(commands, env=env, stream=stream,
1390 return self._gitnodir(commands, env=env, stream=stream,
1390 cwd=self._abspath)
1391 cwd=self._abspath)
1391
1392
1392 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1393 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1393 """Calls the git command
1394 """Calls the git command
1394
1395
1395 The methods tries to call the git command. versions prior to 1.6.0
1396 The methods tries to call the git command. versions prior to 1.6.0
1396 are not supported and very probably fail.
1397 are not supported and very probably fail.
1397 """
1398 """
1398 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1399 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1399 if env is None:
1400 if env is None:
1400 env = os.environ.copy()
1401 env = os.environ.copy()
1401 # disable localization for Git output (issue5176)
1402 # disable localization for Git output (issue5176)
1402 env['LC_ALL'] = 'C'
1403 env['LC_ALL'] = 'C'
1403 # fix for Git CVE-2015-7545
1404 # fix for Git CVE-2015-7545
1404 if 'GIT_ALLOW_PROTOCOL' not in env:
1405 if 'GIT_ALLOW_PROTOCOL' not in env:
1405 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1406 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1406 # unless ui.quiet is set, print git's stderr,
1407 # unless ui.quiet is set, print git's stderr,
1407 # which is mostly progress and useful info
1408 # which is mostly progress and useful info
1408 errpipe = None
1409 errpipe = None
1409 if self.ui.quiet:
1410 if self.ui.quiet:
1410 errpipe = open(os.devnull, 'w')
1411 errpipe = open(os.devnull, 'w')
1411 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1412 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1412 cwd=cwd, env=env, close_fds=util.closefds,
1413 cwd=cwd, env=env, close_fds=util.closefds,
1413 stdout=subprocess.PIPE, stderr=errpipe)
1414 stdout=subprocess.PIPE, stderr=errpipe)
1414 if stream:
1415 if stream:
1415 return p.stdout, None
1416 return p.stdout, None
1416
1417
1417 retdata = p.stdout.read().strip()
1418 retdata = p.stdout.read().strip()
1418 # wait for the child to exit to avoid race condition.
1419 # wait for the child to exit to avoid race condition.
1419 p.wait()
1420 p.wait()
1420
1421
1421 if p.returncode != 0 and p.returncode != 1:
1422 if p.returncode != 0 and p.returncode != 1:
1422 # there are certain error codes that are ok
1423 # there are certain error codes that are ok
1423 command = commands[0]
1424 command = commands[0]
1424 if command in ('cat-file', 'symbolic-ref'):
1425 if command in ('cat-file', 'symbolic-ref'):
1425 return retdata, p.returncode
1426 return retdata, p.returncode
1426 # for all others, abort
1427 # for all others, abort
1427 raise error.Abort(_('git %s error %d in %s') %
1428 raise error.Abort(_('git %s error %d in %s') %
1428 (command, p.returncode, self._relpath))
1429 (command, p.returncode, self._relpath))
1429
1430
1430 return retdata, p.returncode
1431 return retdata, p.returncode
1431
1432
1432 def _gitmissing(self):
1433 def _gitmissing(self):
1433 return not self.wvfs.exists('.git')
1434 return not self.wvfs.exists('.git')
1434
1435
1435 def _gitstate(self):
1436 def _gitstate(self):
1436 return self._gitcommand(['rev-parse', 'HEAD'])
1437 return self._gitcommand(['rev-parse', 'HEAD'])
1437
1438
1438 def _gitcurrentbranch(self):
1439 def _gitcurrentbranch(self):
1439 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1440 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1440 if err:
1441 if err:
1441 current = None
1442 current = None
1442 return current
1443 return current
1443
1444
1444 def _gitremote(self, remote):
1445 def _gitremote(self, remote):
1445 out = self._gitcommand(['remote', 'show', '-n', remote])
1446 out = self._gitcommand(['remote', 'show', '-n', remote])
1446 line = out.split('\n')[1]
1447 line = out.split('\n')[1]
1447 i = line.index('URL: ') + len('URL: ')
1448 i = line.index('URL: ') + len('URL: ')
1448 return line[i:]
1449 return line[i:]
1449
1450
1450 def _githavelocally(self, revision):
1451 def _githavelocally(self, revision):
1451 out, code = self._gitdir(['cat-file', '-e', revision])
1452 out, code = self._gitdir(['cat-file', '-e', revision])
1452 return code == 0
1453 return code == 0
1453
1454
1454 def _gitisancestor(self, r1, r2):
1455 def _gitisancestor(self, r1, r2):
1455 base = self._gitcommand(['merge-base', r1, r2])
1456 base = self._gitcommand(['merge-base', r1, r2])
1456 return base == r1
1457 return base == r1
1457
1458
1458 def _gitisbare(self):
1459 def _gitisbare(self):
1459 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1460 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1460
1461
1461 def _gitupdatestat(self):
1462 def _gitupdatestat(self):
1462 """This must be run before git diff-index.
1463 """This must be run before git diff-index.
1463 diff-index only looks at changes to file stat;
1464 diff-index only looks at changes to file stat;
1464 this command looks at file contents and updates the stat."""
1465 this command looks at file contents and updates the stat."""
1465 self._gitcommand(['update-index', '-q', '--refresh'])
1466 self._gitcommand(['update-index', '-q', '--refresh'])
1466
1467
1467 def _gitbranchmap(self):
1468 def _gitbranchmap(self):
1468 '''returns 2 things:
1469 '''returns 2 things:
1469 a map from git branch to revision
1470 a map from git branch to revision
1470 a map from revision to branches'''
1471 a map from revision to branches'''
1471 branch2rev = {}
1472 branch2rev = {}
1472 rev2branch = {}
1473 rev2branch = {}
1473
1474
1474 out = self._gitcommand(['for-each-ref', '--format',
1475 out = self._gitcommand(['for-each-ref', '--format',
1475 '%(objectname) %(refname)'])
1476 '%(objectname) %(refname)'])
1476 for line in out.split('\n'):
1477 for line in out.split('\n'):
1477 revision, ref = line.split(' ')
1478 revision, ref = line.split(' ')
1478 if (not ref.startswith('refs/heads/') and
1479 if (not ref.startswith('refs/heads/') and
1479 not ref.startswith('refs/remotes/')):
1480 not ref.startswith('refs/remotes/')):
1480 continue
1481 continue
1481 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1482 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1482 continue # ignore remote/HEAD redirects
1483 continue # ignore remote/HEAD redirects
1483 branch2rev[ref] = revision
1484 branch2rev[ref] = revision
1484 rev2branch.setdefault(revision, []).append(ref)
1485 rev2branch.setdefault(revision, []).append(ref)
1485 return branch2rev, rev2branch
1486 return branch2rev, rev2branch
1486
1487
1487 def _gittracking(self, branches):
1488 def _gittracking(self, branches):
1488 'return map of remote branch to local tracking branch'
1489 'return map of remote branch to local tracking branch'
1489 # assumes no more than one local tracking branch for each remote
1490 # assumes no more than one local tracking branch for each remote
1490 tracking = {}
1491 tracking = {}
1491 for b in branches:
1492 for b in branches:
1492 if b.startswith('refs/remotes/'):
1493 if b.startswith('refs/remotes/'):
1493 continue
1494 continue
1494 bname = b.split('/', 2)[2]
1495 bname = b.split('/', 2)[2]
1495 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1496 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1496 if remote:
1497 if remote:
1497 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1498 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1498 tracking['refs/remotes/%s/%s' %
1499 tracking['refs/remotes/%s/%s' %
1499 (remote, ref.split('/', 2)[2])] = b
1500 (remote, ref.split('/', 2)[2])] = b
1500 return tracking
1501 return tracking
1501
1502
1502 def _abssource(self, source):
1503 def _abssource(self, source):
1503 if '://' not in source:
1504 if '://' not in source:
1504 # recognize the scp syntax as an absolute source
1505 # recognize the scp syntax as an absolute source
1505 colon = source.find(':')
1506 colon = source.find(':')
1506 if colon != -1 and '/' not in source[:colon]:
1507 if colon != -1 and '/' not in source[:colon]:
1507 return source
1508 return source
1508 self._subsource = source
1509 self._subsource = source
1509 return _abssource(self)
1510 return _abssource(self)
1510
1511
1511 def _fetch(self, source, revision):
1512 def _fetch(self, source, revision):
1512 if self._gitmissing():
1513 if self._gitmissing():
1513 source = self._abssource(source)
1514 source = self._abssource(source)
1514 self.ui.status(_('cloning subrepo %s from %s\n') %
1515 self.ui.status(_('cloning subrepo %s from %s\n') %
1515 (self._relpath, source))
1516 (self._relpath, source))
1516 self._gitnodir(['clone', source, self._abspath])
1517 self._gitnodir(['clone', source, self._abspath])
1517 if self._githavelocally(revision):
1518 if self._githavelocally(revision):
1518 return
1519 return
1519 self.ui.status(_('pulling subrepo %s from %s\n') %
1520 self.ui.status(_('pulling subrepo %s from %s\n') %
1520 (self._relpath, self._gitremote('origin')))
1521 (self._relpath, self._gitremote('origin')))
1521 # try only origin: the originally cloned repo
1522 # try only origin: the originally cloned repo
1522 self._gitcommand(['fetch'])
1523 self._gitcommand(['fetch'])
1523 if not self._githavelocally(revision):
1524 if not self._githavelocally(revision):
1524 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1525 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1525 (revision, self._relpath))
1526 (revision, self._relpath))
1526
1527
1527 @annotatesubrepoerror
1528 @annotatesubrepoerror
1528 def dirty(self, ignoreupdate=False):
1529 def dirty(self, ignoreupdate=False):
1529 if self._gitmissing():
1530 if self._gitmissing():
1530 return self._state[1] != ''
1531 return self._state[1] != ''
1531 if self._gitisbare():
1532 if self._gitisbare():
1532 return True
1533 return True
1533 if not ignoreupdate and self._state[1] != self._gitstate():
1534 if not ignoreupdate and self._state[1] != self._gitstate():
1534 # different version checked out
1535 # different version checked out
1535 return True
1536 return True
1536 # check for staged changes or modified files; ignore untracked files
1537 # check for staged changes or modified files; ignore untracked files
1537 self._gitupdatestat()
1538 self._gitupdatestat()
1538 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1539 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1539 return code == 1
1540 return code == 1
1540
1541
1541 def basestate(self):
1542 def basestate(self):
1542 return self._gitstate()
1543 return self._gitstate()
1543
1544
1544 @annotatesubrepoerror
1545 @annotatesubrepoerror
1545 def get(self, state, overwrite=False):
1546 def get(self, state, overwrite=False):
1546 source, revision, kind = state
1547 source, revision, kind = state
1547 if not revision:
1548 if not revision:
1548 self.remove()
1549 self.remove()
1549 return
1550 return
1550 self._fetch(source, revision)
1551 self._fetch(source, revision)
1551 # if the repo was set to be bare, unbare it
1552 # if the repo was set to be bare, unbare it
1552 if self._gitisbare():
1553 if self._gitisbare():
1553 self._gitcommand(['config', 'core.bare', 'false'])
1554 self._gitcommand(['config', 'core.bare', 'false'])
1554 if self._gitstate() == revision:
1555 if self._gitstate() == revision:
1555 self._gitcommand(['reset', '--hard', 'HEAD'])
1556 self._gitcommand(['reset', '--hard', 'HEAD'])
1556 return
1557 return
1557 elif self._gitstate() == revision:
1558 elif self._gitstate() == revision:
1558 if overwrite:
1559 if overwrite:
1559 # first reset the index to unmark new files for commit, because
1560 # first reset the index to unmark new files for commit, because
1560 # reset --hard will otherwise throw away files added for commit,
1561 # reset --hard will otherwise throw away files added for commit,
1561 # not just unmark them.
1562 # not just unmark them.
1562 self._gitcommand(['reset', 'HEAD'])
1563 self._gitcommand(['reset', 'HEAD'])
1563 self._gitcommand(['reset', '--hard', 'HEAD'])
1564 self._gitcommand(['reset', '--hard', 'HEAD'])
1564 return
1565 return
1565 branch2rev, rev2branch = self._gitbranchmap()
1566 branch2rev, rev2branch = self._gitbranchmap()
1566
1567
1567 def checkout(args):
1568 def checkout(args):
1568 cmd = ['checkout']
1569 cmd = ['checkout']
1569 if overwrite:
1570 if overwrite:
1570 # first reset the index to unmark new files for commit, because
1571 # first reset the index to unmark new files for commit, because
1571 # the -f option will otherwise throw away files added for
1572 # the -f option will otherwise throw away files added for
1572 # commit, not just unmark them.
1573 # commit, not just unmark them.
1573 self._gitcommand(['reset', 'HEAD'])
1574 self._gitcommand(['reset', 'HEAD'])
1574 cmd.append('-f')
1575 cmd.append('-f')
1575 self._gitcommand(cmd + args)
1576 self._gitcommand(cmd + args)
1576 _sanitize(self.ui, self.wvfs, '.git')
1577 _sanitize(self.ui, self.wvfs, '.git')
1577
1578
1578 def rawcheckout():
1579 def rawcheckout():
1579 # no branch to checkout, check it out with no branch
1580 # no branch to checkout, check it out with no branch
1580 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1581 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1581 self._relpath)
1582 self._relpath)
1582 self.ui.warn(_('check out a git branch if you intend '
1583 self.ui.warn(_('check out a git branch if you intend '
1583 'to make changes\n'))
1584 'to make changes\n'))
1584 checkout(['-q', revision])
1585 checkout(['-q', revision])
1585
1586
1586 if revision not in rev2branch:
1587 if revision not in rev2branch:
1587 rawcheckout()
1588 rawcheckout()
1588 return
1589 return
1589 branches = rev2branch[revision]
1590 branches = rev2branch[revision]
1590 firstlocalbranch = None
1591 firstlocalbranch = None
1591 for b in branches:
1592 for b in branches:
1592 if b == 'refs/heads/master':
1593 if b == 'refs/heads/master':
1593 # master trumps all other branches
1594 # master trumps all other branches
1594 checkout(['refs/heads/master'])
1595 checkout(['refs/heads/master'])
1595 return
1596 return
1596 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1597 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1597 firstlocalbranch = b
1598 firstlocalbranch = b
1598 if firstlocalbranch:
1599 if firstlocalbranch:
1599 checkout([firstlocalbranch])
1600 checkout([firstlocalbranch])
1600 return
1601 return
1601
1602
1602 tracking = self._gittracking(branch2rev.keys())
1603 tracking = self._gittracking(branch2rev.keys())
1603 # choose a remote branch already tracked if possible
1604 # choose a remote branch already tracked if possible
1604 remote = branches[0]
1605 remote = branches[0]
1605 if remote not in tracking:
1606 if remote not in tracking:
1606 for b in branches:
1607 for b in branches:
1607 if b in tracking:
1608 if b in tracking:
1608 remote = b
1609 remote = b
1609 break
1610 break
1610
1611
1611 if remote not in tracking:
1612 if remote not in tracking:
1612 # create a new local tracking branch
1613 # create a new local tracking branch
1613 local = remote.split('/', 3)[3]
1614 local = remote.split('/', 3)[3]
1614 checkout(['-b', local, remote])
1615 checkout(['-b', local, remote])
1615 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1616 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1616 # When updating to a tracked remote branch,
1617 # When updating to a tracked remote branch,
1617 # if the local tracking branch is downstream of it,
1618 # if the local tracking branch is downstream of it,
1618 # a normal `git pull` would have performed a "fast-forward merge"
1619 # a normal `git pull` would have performed a "fast-forward merge"
1619 # which is equivalent to updating the local branch to the remote.
1620 # which is equivalent to updating the local branch to the remote.
1620 # Since we are only looking at branching at update, we need to
1621 # Since we are only looking at branching at update, we need to
1621 # detect this situation and perform this action lazily.
1622 # detect this situation and perform this action lazily.
1622 if tracking[remote] != self._gitcurrentbranch():
1623 if tracking[remote] != self._gitcurrentbranch():
1623 checkout([tracking[remote]])
1624 checkout([tracking[remote]])
1624 self._gitcommand(['merge', '--ff', remote])
1625 self._gitcommand(['merge', '--ff', remote])
1625 _sanitize(self.ui, self.wvfs, '.git')
1626 _sanitize(self.ui, self.wvfs, '.git')
1626 else:
1627 else:
1627 # a real merge would be required, just checkout the revision
1628 # a real merge would be required, just checkout the revision
1628 rawcheckout()
1629 rawcheckout()
1629
1630
1630 @annotatesubrepoerror
1631 @annotatesubrepoerror
1631 def commit(self, text, user, date):
1632 def commit(self, text, user, date):
1632 if self._gitmissing():
1633 if self._gitmissing():
1633 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1634 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1634 cmd = ['commit', '-a', '-m', text]
1635 cmd = ['commit', '-a', '-m', text]
1635 env = os.environ.copy()
1636 env = os.environ.copy()
1636 if user:
1637 if user:
1637 cmd += ['--author', user]
1638 cmd += ['--author', user]
1638 if date:
1639 if date:
1639 # git's date parser silently ignores when seconds < 1e9
1640 # git's date parser silently ignores when seconds < 1e9
1640 # convert to ISO8601
1641 # convert to ISO8601
1641 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1642 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1642 '%Y-%m-%dT%H:%M:%S %1%2')
1643 '%Y-%m-%dT%H:%M:%S %1%2')
1643 self._gitcommand(cmd, env=env)
1644 self._gitcommand(cmd, env=env)
1644 # make sure commit works otherwise HEAD might not exist under certain
1645 # make sure commit works otherwise HEAD might not exist under certain
1645 # circumstances
1646 # circumstances
1646 return self._gitstate()
1647 return self._gitstate()
1647
1648
1648 @annotatesubrepoerror
1649 @annotatesubrepoerror
1649 def merge(self, state):
1650 def merge(self, state):
1650 source, revision, kind = state
1651 source, revision, kind = state
1651 self._fetch(source, revision)
1652 self._fetch(source, revision)
1652 base = self._gitcommand(['merge-base', revision, self._state[1]])
1653 base = self._gitcommand(['merge-base', revision, self._state[1]])
1653 self._gitupdatestat()
1654 self._gitupdatestat()
1654 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1655 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1655
1656
1656 def mergefunc():
1657 def mergefunc():
1657 if base == revision:
1658 if base == revision:
1658 self.get(state) # fast forward merge
1659 self.get(state) # fast forward merge
1659 elif base != self._state[1]:
1660 elif base != self._state[1]:
1660 self._gitcommand(['merge', '--no-commit', revision])
1661 self._gitcommand(['merge', '--no-commit', revision])
1661 _sanitize(self.ui, self.wvfs, '.git')
1662 _sanitize(self.ui, self.wvfs, '.git')
1662
1663
1663 if self.dirty():
1664 if self.dirty():
1664 if self._gitstate() != revision:
1665 if self._gitstate() != revision:
1665 dirty = self._gitstate() == self._state[1] or code != 0
1666 dirty = self._gitstate() == self._state[1] or code != 0
1666 if _updateprompt(self.ui, self, dirty,
1667 if _updateprompt(self.ui, self, dirty,
1667 self._state[1][:7], revision[:7]):
1668 self._state[1][:7], revision[:7]):
1668 mergefunc()
1669 mergefunc()
1669 else:
1670 else:
1670 mergefunc()
1671 mergefunc()
1671
1672
1672 @annotatesubrepoerror
1673 @annotatesubrepoerror
1673 def push(self, opts):
1674 def push(self, opts):
1674 force = opts.get('force')
1675 force = opts.get('force')
1675
1676
1676 if not self._state[1]:
1677 if not self._state[1]:
1677 return True
1678 return True
1678 if self._gitmissing():
1679 if self._gitmissing():
1679 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1680 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1680 # if a branch in origin contains the revision, nothing to do
1681 # if a branch in origin contains the revision, nothing to do
1681 branch2rev, rev2branch = self._gitbranchmap()
1682 branch2rev, rev2branch = self._gitbranchmap()
1682 if self._state[1] in rev2branch:
1683 if self._state[1] in rev2branch:
1683 for b in rev2branch[self._state[1]]:
1684 for b in rev2branch[self._state[1]]:
1684 if b.startswith('refs/remotes/origin/'):
1685 if b.startswith('refs/remotes/origin/'):
1685 return True
1686 return True
1686 for b, revision in branch2rev.iteritems():
1687 for b, revision in branch2rev.iteritems():
1687 if b.startswith('refs/remotes/origin/'):
1688 if b.startswith('refs/remotes/origin/'):
1688 if self._gitisancestor(self._state[1], revision):
1689 if self._gitisancestor(self._state[1], revision):
1689 return True
1690 return True
1690 # otherwise, try to push the currently checked out branch
1691 # otherwise, try to push the currently checked out branch
1691 cmd = ['push']
1692 cmd = ['push']
1692 if force:
1693 if force:
1693 cmd.append('--force')
1694 cmd.append('--force')
1694
1695
1695 current = self._gitcurrentbranch()
1696 current = self._gitcurrentbranch()
1696 if current:
1697 if current:
1697 # determine if the current branch is even useful
1698 # determine if the current branch is even useful
1698 if not self._gitisancestor(self._state[1], current):
1699 if not self._gitisancestor(self._state[1], current):
1699 self.ui.warn(_('unrelated git branch checked out '
1700 self.ui.warn(_('unrelated git branch checked out '
1700 'in subrepo %s\n') % self._relpath)
1701 'in subrepo %s\n') % self._relpath)
1701 return False
1702 return False
1702 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1703 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1703 (current.split('/', 2)[2], self._relpath))
1704 (current.split('/', 2)[2], self._relpath))
1704 ret = self._gitdir(cmd + ['origin', current])
1705 ret = self._gitdir(cmd + ['origin', current])
1705 return ret[1] == 0
1706 return ret[1] == 0
1706 else:
1707 else:
1707 self.ui.warn(_('no branch checked out in subrepo %s\n'
1708 self.ui.warn(_('no branch checked out in subrepo %s\n'
1708 'cannot push revision %s\n') %
1709 'cannot push revision %s\n') %
1709 (self._relpath, self._state[1]))
1710 (self._relpath, self._state[1]))
1710 return False
1711 return False
1711
1712
1712 @annotatesubrepoerror
1713 @annotatesubrepoerror
1713 def add(self, ui, match, prefix, explicitonly, **opts):
1714 def add(self, ui, match, prefix, explicitonly, **opts):
1714 if self._gitmissing():
1715 if self._gitmissing():
1715 return []
1716 return []
1716
1717
1717 (modified, added, removed,
1718 (modified, added, removed,
1718 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1719 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1719 clean=True)
1720 clean=True)
1720
1721
1721 tracked = set()
1722 tracked = set()
1722 # dirstates 'amn' warn, 'r' is added again
1723 # dirstates 'amn' warn, 'r' is added again
1723 for l in (modified, added, deleted, clean):
1724 for l in (modified, added, deleted, clean):
1724 tracked.update(l)
1725 tracked.update(l)
1725
1726
1726 # Unknown files not of interest will be rejected by the matcher
1727 # Unknown files not of interest will be rejected by the matcher
1727 files = unknown
1728 files = unknown
1728 files.extend(match.files())
1729 files.extend(match.files())
1729
1730
1730 rejected = []
1731 rejected = []
1731
1732
1732 files = [f for f in sorted(set(files)) if match(f)]
1733 files = [f for f in sorted(set(files)) if match(f)]
1733 for f in files:
1734 for f in files:
1734 exact = match.exact(f)
1735 exact = match.exact(f)
1735 command = ["add"]
1736 command = ["add"]
1736 if exact:
1737 if exact:
1737 command.append("-f") #should be added, even if ignored
1738 command.append("-f") #should be added, even if ignored
1738 if ui.verbose or not exact:
1739 if ui.verbose or not exact:
1739 ui.status(_('adding %s\n') % match.rel(f))
1740 ui.status(_('adding %s\n') % match.rel(f))
1740
1741
1741 if f in tracked: # hg prints 'adding' even if already tracked
1742 if f in tracked: # hg prints 'adding' even if already tracked
1742 if exact:
1743 if exact:
1743 rejected.append(f)
1744 rejected.append(f)
1744 continue
1745 continue
1745 if not opts.get('dry_run'):
1746 if not opts.get('dry_run'):
1746 self._gitcommand(command + [f])
1747 self._gitcommand(command + [f])
1747
1748
1748 for f in rejected:
1749 for f in rejected:
1749 ui.warn(_("%s already tracked!\n") % match.abs(f))
1750 ui.warn(_("%s already tracked!\n") % match.abs(f))
1750
1751
1751 return rejected
1752 return rejected
1752
1753
1753 @annotatesubrepoerror
1754 @annotatesubrepoerror
1754 def remove(self):
1755 def remove(self):
1755 if self._gitmissing():
1756 if self._gitmissing():
1756 return
1757 return
1757 if self.dirty():
1758 if self.dirty():
1758 self.ui.warn(_('not removing repo %s because '
1759 self.ui.warn(_('not removing repo %s because '
1759 'it has changes.\n') % self._relpath)
1760 'it has changes.\n') % self._relpath)
1760 return
1761 return
1761 # we can't fully delete the repository as it may contain
1762 # we can't fully delete the repository as it may contain
1762 # local-only history
1763 # local-only history
1763 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1764 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1764 self._gitcommand(['config', 'core.bare', 'true'])
1765 self._gitcommand(['config', 'core.bare', 'true'])
1765 for f, kind in self.wvfs.readdir():
1766 for f, kind in self.wvfs.readdir():
1766 if f == '.git':
1767 if f == '.git':
1767 continue
1768 continue
1768 if kind == stat.S_IFDIR:
1769 if kind == stat.S_IFDIR:
1769 self.wvfs.rmtree(f)
1770 self.wvfs.rmtree(f)
1770 else:
1771 else:
1771 self.wvfs.unlink(f)
1772 self.wvfs.unlink(f)
1772
1773
1773 def archive(self, archiver, prefix, match=None):
1774 def archive(self, archiver, prefix, match=None):
1774 total = 0
1775 total = 0
1775 source, revision = self._state
1776 source, revision = self._state
1776 if not revision:
1777 if not revision:
1777 return total
1778 return total
1778 self._fetch(source, revision)
1779 self._fetch(source, revision)
1779
1780
1780 # Parse git's native archive command.
1781 # Parse git's native archive command.
1781 # This should be much faster than manually traversing the trees
1782 # This should be much faster than manually traversing the trees
1782 # and objects with many subprocess calls.
1783 # and objects with many subprocess calls.
1783 tarstream = self._gitcommand(['archive', revision], stream=True)
1784 tarstream = self._gitcommand(['archive', revision], stream=True)
1784 tar = tarfile.open(fileobj=tarstream, mode='r|')
1785 tar = tarfile.open(fileobj=tarstream, mode='r|')
1785 relpath = subrelpath(self)
1786 relpath = subrelpath(self)
1786 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1787 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1787 for i, info in enumerate(tar):
1788 for i, info in enumerate(tar):
1788 if info.isdir():
1789 if info.isdir():
1789 continue
1790 continue
1790 if match and not match(info.name):
1791 if match and not match(info.name):
1791 continue
1792 continue
1792 if info.issym():
1793 if info.issym():
1793 data = info.linkname
1794 data = info.linkname
1794 else:
1795 else:
1795 data = tar.extractfile(info).read()
1796 data = tar.extractfile(info).read()
1796 archiver.addfile(prefix + self._path + '/' + info.name,
1797 archiver.addfile(prefix + self._path + '/' + info.name,
1797 info.mode, info.issym(), data)
1798 info.mode, info.issym(), data)
1798 total += 1
1799 total += 1
1799 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1800 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1800 unit=_('files'))
1801 unit=_('files'))
1801 self.ui.progress(_('archiving (%s)') % relpath, None)
1802 self.ui.progress(_('archiving (%s)') % relpath, None)
1802 return total
1803 return total
1803
1804
1804
1805
1805 @annotatesubrepoerror
1806 @annotatesubrepoerror
1806 def cat(self, match, prefix, **opts):
1807 def cat(self, match, prefix, **opts):
1807 rev = self._state[1]
1808 rev = self._state[1]
1808 if match.anypats():
1809 if match.anypats():
1809 return 1 #No support for include/exclude yet
1810 return 1 #No support for include/exclude yet
1810
1811
1811 if not match.files():
1812 if not match.files():
1812 return 1
1813 return 1
1813
1814
1814 for f in match.files():
1815 for f in match.files():
1815 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1816 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1816 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1817 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1817 self._ctx.node(),
1818 self._ctx.node(),
1818 pathname=self.wvfs.reljoin(prefix, f))
1819 pathname=self.wvfs.reljoin(prefix, f))
1819 fp.write(output)
1820 fp.write(output)
1820 fp.close()
1821 fp.close()
1821 return 0
1822 return 0
1822
1823
1823
1824
1824 @annotatesubrepoerror
1825 @annotatesubrepoerror
1825 def status(self, rev2, **opts):
1826 def status(self, rev2, **opts):
1826 rev1 = self._state[1]
1827 rev1 = self._state[1]
1827 if self._gitmissing() or not rev1:
1828 if self._gitmissing() or not rev1:
1828 # if the repo is missing, return no results
1829 # if the repo is missing, return no results
1829 return scmutil.status([], [], [], [], [], [], [])
1830 return scmutil.status([], [], [], [], [], [], [])
1830 modified, added, removed = [], [], []
1831 modified, added, removed = [], [], []
1831 self._gitupdatestat()
1832 self._gitupdatestat()
1832 if rev2:
1833 if rev2:
1833 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1834 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1834 else:
1835 else:
1835 command = ['diff-index', '--no-renames', rev1]
1836 command = ['diff-index', '--no-renames', rev1]
1836 out = self._gitcommand(command)
1837 out = self._gitcommand(command)
1837 for line in out.split('\n'):
1838 for line in out.split('\n'):
1838 tab = line.find('\t')
1839 tab = line.find('\t')
1839 if tab == -1:
1840 if tab == -1:
1840 continue
1841 continue
1841 status, f = line[tab - 1], line[tab + 1:]
1842 status, f = line[tab - 1], line[tab + 1:]
1842 if status == 'M':
1843 if status == 'M':
1843 modified.append(f)
1844 modified.append(f)
1844 elif status == 'A':
1845 elif status == 'A':
1845 added.append(f)
1846 added.append(f)
1846 elif status == 'D':
1847 elif status == 'D':
1847 removed.append(f)
1848 removed.append(f)
1848
1849
1849 deleted, unknown, ignored, clean = [], [], [], []
1850 deleted, unknown, ignored, clean = [], [], [], []
1850
1851
1851 command = ['status', '--porcelain', '-z']
1852 command = ['status', '--porcelain', '-z']
1852 if opts.get('unknown'):
1853 if opts.get('unknown'):
1853 command += ['--untracked-files=all']
1854 command += ['--untracked-files=all']
1854 if opts.get('ignored'):
1855 if opts.get('ignored'):
1855 command += ['--ignored']
1856 command += ['--ignored']
1856 out = self._gitcommand(command)
1857 out = self._gitcommand(command)
1857
1858
1858 changedfiles = set()
1859 changedfiles = set()
1859 changedfiles.update(modified)
1860 changedfiles.update(modified)
1860 changedfiles.update(added)
1861 changedfiles.update(added)
1861 changedfiles.update(removed)
1862 changedfiles.update(removed)
1862 for line in out.split('\0'):
1863 for line in out.split('\0'):
1863 if not line:
1864 if not line:
1864 continue
1865 continue
1865 st = line[0:2]
1866 st = line[0:2]
1866 #moves and copies show 2 files on one line
1867 #moves and copies show 2 files on one line
1867 if line.find('\0') >= 0:
1868 if line.find('\0') >= 0:
1868 filename1, filename2 = line[3:].split('\0')
1869 filename1, filename2 = line[3:].split('\0')
1869 else:
1870 else:
1870 filename1 = line[3:]
1871 filename1 = line[3:]
1871 filename2 = None
1872 filename2 = None
1872
1873
1873 changedfiles.add(filename1)
1874 changedfiles.add(filename1)
1874 if filename2:
1875 if filename2:
1875 changedfiles.add(filename2)
1876 changedfiles.add(filename2)
1876
1877
1877 if st == '??':
1878 if st == '??':
1878 unknown.append(filename1)
1879 unknown.append(filename1)
1879 elif st == '!!':
1880 elif st == '!!':
1880 ignored.append(filename1)
1881 ignored.append(filename1)
1881
1882
1882 if opts.get('clean'):
1883 if opts.get('clean'):
1883 out = self._gitcommand(['ls-files'])
1884 out = self._gitcommand(['ls-files'])
1884 for f in out.split('\n'):
1885 for f in out.split('\n'):
1885 if not f in changedfiles:
1886 if not f in changedfiles:
1886 clean.append(f)
1887 clean.append(f)
1887
1888
1888 return scmutil.status(modified, added, removed, deleted,
1889 return scmutil.status(modified, added, removed, deleted,
1889 unknown, ignored, clean)
1890 unknown, ignored, clean)
1890
1891
1891 @annotatesubrepoerror
1892 @annotatesubrepoerror
1892 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1893 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1893 node1 = self._state[1]
1894 node1 = self._state[1]
1894 cmd = ['diff', '--no-renames']
1895 cmd = ['diff', '--no-renames']
1895 if opts['stat']:
1896 if opts['stat']:
1896 cmd.append('--stat')
1897 cmd.append('--stat')
1897 else:
1898 else:
1898 # for Git, this also implies '-p'
1899 # for Git, this also implies '-p'
1899 cmd.append('-U%d' % diffopts.context)
1900 cmd.append('-U%d' % diffopts.context)
1900
1901
1901 gitprefix = self.wvfs.reljoin(prefix, self._path)
1902 gitprefix = self.wvfs.reljoin(prefix, self._path)
1902
1903
1903 if diffopts.noprefix:
1904 if diffopts.noprefix:
1904 cmd.extend(['--src-prefix=%s/' % gitprefix,
1905 cmd.extend(['--src-prefix=%s/' % gitprefix,
1905 '--dst-prefix=%s/' % gitprefix])
1906 '--dst-prefix=%s/' % gitprefix])
1906 else:
1907 else:
1907 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1908 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1908 '--dst-prefix=b/%s/' % gitprefix])
1909 '--dst-prefix=b/%s/' % gitprefix])
1909
1910
1910 if diffopts.ignorews:
1911 if diffopts.ignorews:
1911 cmd.append('--ignore-all-space')
1912 cmd.append('--ignore-all-space')
1912 if diffopts.ignorewsamount:
1913 if diffopts.ignorewsamount:
1913 cmd.append('--ignore-space-change')
1914 cmd.append('--ignore-space-change')
1914 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1915 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1915 and diffopts.ignoreblanklines:
1916 and diffopts.ignoreblanklines:
1916 cmd.append('--ignore-blank-lines')
1917 cmd.append('--ignore-blank-lines')
1917
1918
1918 cmd.append(node1)
1919 cmd.append(node1)
1919 if node2:
1920 if node2:
1920 cmd.append(node2)
1921 cmd.append(node2)
1921
1922
1922 output = ""
1923 output = ""
1923 if match.always():
1924 if match.always():
1924 output += self._gitcommand(cmd) + '\n'
1925 output += self._gitcommand(cmd) + '\n'
1925 else:
1926 else:
1926 st = self.status(node2)[:3]
1927 st = self.status(node2)[:3]
1927 files = [f for sublist in st for f in sublist]
1928 files = [f for sublist in st for f in sublist]
1928 for f in files:
1929 for f in files:
1929 if match(f):
1930 if match(f):
1930 output += self._gitcommand(cmd + ['--', f]) + '\n'
1931 output += self._gitcommand(cmd + ['--', f]) + '\n'
1931
1932
1932 if output.strip():
1933 if output.strip():
1933 ui.write(output)
1934 ui.write(output)
1934
1935
1935 @annotatesubrepoerror
1936 @annotatesubrepoerror
1936 def revert(self, substate, *pats, **opts):
1937 def revert(self, substate, *pats, **opts):
1937 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1938 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1938 if not opts.get('no_backup'):
1939 if not opts.get('no_backup'):
1939 status = self.status(None)
1940 status = self.status(None)
1940 names = status.modified
1941 names = status.modified
1941 for name in names:
1942 for name in names:
1942 bakname = scmutil.origpath(self.ui, self._subparent, name)
1943 bakname = scmutil.origpath(self.ui, self._subparent, name)
1943 self.ui.note(_('saving current version of %s as %s\n') %
1944 self.ui.note(_('saving current version of %s as %s\n') %
1944 (name, bakname))
1945 (name, bakname))
1945 self.wvfs.rename(name, bakname)
1946 self.wvfs.rename(name, bakname)
1946
1947
1947 if not opts.get('dry_run'):
1948 if not opts.get('dry_run'):
1948 self.get(substate, overwrite=True)
1949 self.get(substate, overwrite=True)
1949 return []
1950 return []
1950
1951
1951 def shortid(self, revid):
1952 def shortid(self, revid):
1952 return revid[:7]
1953 return revid[:7]
1953
1954
1954 types = {
1955 types = {
1955 'hg': hgsubrepo,
1956 'hg': hgsubrepo,
1956 'svn': svnsubrepo,
1957 'svn': svnsubrepo,
1957 'git': gitsubrepo,
1958 'git': gitsubrepo,
1958 }
1959 }
@@ -1,1267 +1,1268 b''
1 # templater.py - template expansion for output
1 # templater.py - template expansion for output
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import re
11 import re
12 import types
12 import types
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 config,
16 config,
17 error,
17 error,
18 minirst,
18 minirst,
19 parser,
19 parser,
20 pycompat,
20 registrar,
21 registrar,
21 revset as revsetmod,
22 revset as revsetmod,
22 templatefilters,
23 templatefilters,
23 templatekw,
24 templatekw,
24 util,
25 util,
25 )
26 )
26
27
27 # template parsing
28 # template parsing
28
29
29 elements = {
30 elements = {
30 # token-type: binding-strength, primary, prefix, infix, suffix
31 # token-type: binding-strength, primary, prefix, infix, suffix
31 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
32 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
32 ",": (2, None, None, ("list", 2), None),
33 ",": (2, None, None, ("list", 2), None),
33 "|": (5, None, None, ("|", 5), None),
34 "|": (5, None, None, ("|", 5), None),
34 "%": (6, None, None, ("%", 6), None),
35 "%": (6, None, None, ("%", 6), None),
35 ")": (0, None, None, None, None),
36 ")": (0, None, None, None, None),
36 "+": (3, None, None, ("+", 3), None),
37 "+": (3, None, None, ("+", 3), None),
37 "-": (3, None, ("negate", 10), ("-", 3), None),
38 "-": (3, None, ("negate", 10), ("-", 3), None),
38 "*": (4, None, None, ("*", 4), None),
39 "*": (4, None, None, ("*", 4), None),
39 "/": (4, None, None, ("/", 4), None),
40 "/": (4, None, None, ("/", 4), None),
40 "integer": (0, "integer", None, None, None),
41 "integer": (0, "integer", None, None, None),
41 "symbol": (0, "symbol", None, None, None),
42 "symbol": (0, "symbol", None, None, None),
42 "string": (0, "string", None, None, None),
43 "string": (0, "string", None, None, None),
43 "template": (0, "template", None, None, None),
44 "template": (0, "template", None, None, None),
44 "end": (0, None, None, None, None),
45 "end": (0, None, None, None, None),
45 }
46 }
46
47
47 def tokenize(program, start, end, term=None):
48 def tokenize(program, start, end, term=None):
48 """Parse a template expression into a stream of tokens, which must end
49 """Parse a template expression into a stream of tokens, which must end
49 with term if specified"""
50 with term if specified"""
50 pos = start
51 pos = start
51 while pos < end:
52 while pos < end:
52 c = program[pos]
53 c = program[pos]
53 if c.isspace(): # skip inter-token whitespace
54 if c.isspace(): # skip inter-token whitespace
54 pass
55 pass
55 elif c in "(,)%|+-*/": # handle simple operators
56 elif c in "(,)%|+-*/": # handle simple operators
56 yield (c, None, pos)
57 yield (c, None, pos)
57 elif c in '"\'': # handle quoted templates
58 elif c in '"\'': # handle quoted templates
58 s = pos + 1
59 s = pos + 1
59 data, pos = _parsetemplate(program, s, end, c)
60 data, pos = _parsetemplate(program, s, end, c)
60 yield ('template', data, s)
61 yield ('template', data, s)
61 pos -= 1
62 pos -= 1
62 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
63 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
63 # handle quoted strings
64 # handle quoted strings
64 c = program[pos + 1]
65 c = program[pos + 1]
65 s = pos = pos + 2
66 s = pos = pos + 2
66 while pos < end: # find closing quote
67 while pos < end: # find closing quote
67 d = program[pos]
68 d = program[pos]
68 if d == '\\': # skip over escaped characters
69 if d == '\\': # skip over escaped characters
69 pos += 2
70 pos += 2
70 continue
71 continue
71 if d == c:
72 if d == c:
72 yield ('string', program[s:pos], s)
73 yield ('string', program[s:pos], s)
73 break
74 break
74 pos += 1
75 pos += 1
75 else:
76 else:
76 raise error.ParseError(_("unterminated string"), s)
77 raise error.ParseError(_("unterminated string"), s)
77 elif c.isdigit():
78 elif c.isdigit():
78 s = pos
79 s = pos
79 while pos < end:
80 while pos < end:
80 d = program[pos]
81 d = program[pos]
81 if not d.isdigit():
82 if not d.isdigit():
82 break
83 break
83 pos += 1
84 pos += 1
84 yield ('integer', program[s:pos], s)
85 yield ('integer', program[s:pos], s)
85 pos -= 1
86 pos -= 1
86 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
87 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"')
87 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
88 or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')):
88 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
89 # handle escaped quoted strings for compatibility with 2.9.2-3.4,
89 # where some of nested templates were preprocessed as strings and
90 # where some of nested templates were preprocessed as strings and
90 # then compiled. therefore, \"...\" was allowed. (issue4733)
91 # then compiled. therefore, \"...\" was allowed. (issue4733)
91 #
92 #
92 # processing flow of _evalifliteral() at 5ab28a2e9962:
93 # processing flow of _evalifliteral() at 5ab28a2e9962:
93 # outer template string -> stringify() -> compiletemplate()
94 # outer template string -> stringify() -> compiletemplate()
94 # ------------------------ ------------ ------------------
95 # ------------------------ ------------ ------------------
95 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
96 # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}]
96 # ~~~~~~~~
97 # ~~~~~~~~
97 # escaped quoted string
98 # escaped quoted string
98 if c == 'r':
99 if c == 'r':
99 pos += 1
100 pos += 1
100 token = 'string'
101 token = 'string'
101 else:
102 else:
102 token = 'template'
103 token = 'template'
103 quote = program[pos:pos + 2]
104 quote = program[pos:pos + 2]
104 s = pos = pos + 2
105 s = pos = pos + 2
105 while pos < end: # find closing escaped quote
106 while pos < end: # find closing escaped quote
106 if program.startswith('\\\\\\', pos, end):
107 if program.startswith('\\\\\\', pos, end):
107 pos += 4 # skip over double escaped characters
108 pos += 4 # skip over double escaped characters
108 continue
109 continue
109 if program.startswith(quote, pos, end):
110 if program.startswith(quote, pos, end):
110 # interpret as if it were a part of an outer string
111 # interpret as if it were a part of an outer string
111 data = parser.unescapestr(program[s:pos])
112 data = parser.unescapestr(program[s:pos])
112 if token == 'template':
113 if token == 'template':
113 data = _parsetemplate(data, 0, len(data))[0]
114 data = _parsetemplate(data, 0, len(data))[0]
114 yield (token, data, s)
115 yield (token, data, s)
115 pos += 1
116 pos += 1
116 break
117 break
117 pos += 1
118 pos += 1
118 else:
119 else:
119 raise error.ParseError(_("unterminated string"), s)
120 raise error.ParseError(_("unterminated string"), s)
120 elif c.isalnum() or c in '_':
121 elif c.isalnum() or c in '_':
121 s = pos
122 s = pos
122 pos += 1
123 pos += 1
123 while pos < end: # find end of symbol
124 while pos < end: # find end of symbol
124 d = program[pos]
125 d = program[pos]
125 if not (d.isalnum() or d == "_"):
126 if not (d.isalnum() or d == "_"):
126 break
127 break
127 pos += 1
128 pos += 1
128 sym = program[s:pos]
129 sym = program[s:pos]
129 yield ('symbol', sym, s)
130 yield ('symbol', sym, s)
130 pos -= 1
131 pos -= 1
131 elif c == term:
132 elif c == term:
132 yield ('end', None, pos + 1)
133 yield ('end', None, pos + 1)
133 return
134 return
134 else:
135 else:
135 raise error.ParseError(_("syntax error"), pos)
136 raise error.ParseError(_("syntax error"), pos)
136 pos += 1
137 pos += 1
137 if term:
138 if term:
138 raise error.ParseError(_("unterminated template expansion"), start)
139 raise error.ParseError(_("unterminated template expansion"), start)
139 yield ('end', None, pos)
140 yield ('end', None, pos)
140
141
141 def _parsetemplate(tmpl, start, stop, quote=''):
142 def _parsetemplate(tmpl, start, stop, quote=''):
142 r"""
143 r"""
143 >>> _parsetemplate('foo{bar}"baz', 0, 12)
144 >>> _parsetemplate('foo{bar}"baz', 0, 12)
144 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
145 ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12)
145 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
146 >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"')
146 ([('string', 'foo'), ('symbol', 'bar')], 9)
147 ([('string', 'foo'), ('symbol', 'bar')], 9)
147 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
148 >>> _parsetemplate('foo"{bar}', 0, 9, quote='"')
148 ([('string', 'foo')], 4)
149 ([('string', 'foo')], 4)
149 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
150 >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"')
150 ([('string', 'foo"'), ('string', 'bar')], 9)
151 ([('string', 'foo"'), ('string', 'bar')], 9)
151 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
152 >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"')
152 ([('string', 'foo\\')], 6)
153 ([('string', 'foo\\')], 6)
153 """
154 """
154 parsed = []
155 parsed = []
155 sepchars = '{' + quote
156 sepchars = '{' + quote
156 pos = start
157 pos = start
157 p = parser.parser(elements)
158 p = parser.parser(elements)
158 while pos < stop:
159 while pos < stop:
159 n = min((tmpl.find(c, pos, stop) for c in sepchars),
160 n = min((tmpl.find(c, pos, stop) for c in sepchars),
160 key=lambda n: (n < 0, n))
161 key=lambda n: (n < 0, n))
161 if n < 0:
162 if n < 0:
162 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
163 parsed.append(('string', parser.unescapestr(tmpl[pos:stop])))
163 pos = stop
164 pos = stop
164 break
165 break
165 c = tmpl[n]
166 c = tmpl[n]
166 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
167 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
167 if bs % 2 == 1:
168 if bs % 2 == 1:
168 # escaped (e.g. '\{', '\\\{', but not '\\{')
169 # escaped (e.g. '\{', '\\\{', but not '\\{')
169 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
170 parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c))
170 pos = n + 1
171 pos = n + 1
171 continue
172 continue
172 if n > pos:
173 if n > pos:
173 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
174 parsed.append(('string', parser.unescapestr(tmpl[pos:n])))
174 if c == quote:
175 if c == quote:
175 return parsed, n + 1
176 return parsed, n + 1
176
177
177 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
178 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
178 parsed.append(parseres)
179 parsed.append(parseres)
179
180
180 if quote:
181 if quote:
181 raise error.ParseError(_("unterminated string"), start)
182 raise error.ParseError(_("unterminated string"), start)
182 return parsed, pos
183 return parsed, pos
183
184
184 def _unnesttemplatelist(tree):
185 def _unnesttemplatelist(tree):
185 """Expand list of templates to node tuple
186 """Expand list of templates to node tuple
186
187
187 >>> def f(tree):
188 >>> def f(tree):
188 ... print prettyformat(_unnesttemplatelist(tree))
189 ... print prettyformat(_unnesttemplatelist(tree))
189 >>> f(('template', []))
190 >>> f(('template', []))
190 ('string', '')
191 ('string', '')
191 >>> f(('template', [('string', 'foo')]))
192 >>> f(('template', [('string', 'foo')]))
192 ('string', 'foo')
193 ('string', 'foo')
193 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
194 >>> f(('template', [('string', 'foo'), ('symbol', 'rev')]))
194 (template
195 (template
195 ('string', 'foo')
196 ('string', 'foo')
196 ('symbol', 'rev'))
197 ('symbol', 'rev'))
197 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
198 >>> f(('template', [('symbol', 'rev')])) # template(rev) -> str
198 (template
199 (template
199 ('symbol', 'rev'))
200 ('symbol', 'rev'))
200 >>> f(('template', [('template', [('string', 'foo')])]))
201 >>> f(('template', [('template', [('string', 'foo')])]))
201 ('string', 'foo')
202 ('string', 'foo')
202 """
203 """
203 if not isinstance(tree, tuple):
204 if not isinstance(tree, tuple):
204 return tree
205 return tree
205 op = tree[0]
206 op = tree[0]
206 if op != 'template':
207 if op != 'template':
207 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
208 return (op,) + tuple(_unnesttemplatelist(x) for x in tree[1:])
208
209
209 assert len(tree) == 2
210 assert len(tree) == 2
210 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
211 xs = tuple(_unnesttemplatelist(x) for x in tree[1])
211 if not xs:
212 if not xs:
212 return ('string', '') # empty template ""
213 return ('string', '') # empty template ""
213 elif len(xs) == 1 and xs[0][0] == 'string':
214 elif len(xs) == 1 and xs[0][0] == 'string':
214 return xs[0] # fast path for string with no template fragment "x"
215 return xs[0] # fast path for string with no template fragment "x"
215 else:
216 else:
216 return (op,) + xs
217 return (op,) + xs
217
218
218 def parse(tmpl):
219 def parse(tmpl):
219 """Parse template string into tree"""
220 """Parse template string into tree"""
220 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
221 parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
221 assert pos == len(tmpl), 'unquoted template should be consumed'
222 assert pos == len(tmpl), 'unquoted template should be consumed'
222 return _unnesttemplatelist(('template', parsed))
223 return _unnesttemplatelist(('template', parsed))
223
224
224 def _parseexpr(expr):
225 def _parseexpr(expr):
225 """Parse a template expression into tree
226 """Parse a template expression into tree
226
227
227 >>> _parseexpr('"foo"')
228 >>> _parseexpr('"foo"')
228 ('string', 'foo')
229 ('string', 'foo')
229 >>> _parseexpr('foo(bar)')
230 >>> _parseexpr('foo(bar)')
230 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
231 ('func', ('symbol', 'foo'), ('symbol', 'bar'))
231 >>> _parseexpr('foo(')
232 >>> _parseexpr('foo(')
232 Traceback (most recent call last):
233 Traceback (most recent call last):
233 ...
234 ...
234 ParseError: ('not a prefix: end', 4)
235 ParseError: ('not a prefix: end', 4)
235 >>> _parseexpr('"foo" "bar"')
236 >>> _parseexpr('"foo" "bar"')
236 Traceback (most recent call last):
237 Traceback (most recent call last):
237 ...
238 ...
238 ParseError: ('invalid token', 7)
239 ParseError: ('invalid token', 7)
239 """
240 """
240 p = parser.parser(elements)
241 p = parser.parser(elements)
241 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
242 tree, pos = p.parse(tokenize(expr, 0, len(expr)))
242 if pos != len(expr):
243 if pos != len(expr):
243 raise error.ParseError(_('invalid token'), pos)
244 raise error.ParseError(_('invalid token'), pos)
244 return _unnesttemplatelist(tree)
245 return _unnesttemplatelist(tree)
245
246
246 def prettyformat(tree):
247 def prettyformat(tree):
247 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
248 return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
248
249
249 def compileexp(exp, context, curmethods):
250 def compileexp(exp, context, curmethods):
250 """Compile parsed template tree to (func, data) pair"""
251 """Compile parsed template tree to (func, data) pair"""
251 t = exp[0]
252 t = exp[0]
252 if t in curmethods:
253 if t in curmethods:
253 return curmethods[t](exp, context)
254 return curmethods[t](exp, context)
254 raise error.ParseError(_("unknown method '%s'") % t)
255 raise error.ParseError(_("unknown method '%s'") % t)
255
256
256 # template evaluation
257 # template evaluation
257
258
258 def getsymbol(exp):
259 def getsymbol(exp):
259 if exp[0] == 'symbol':
260 if exp[0] == 'symbol':
260 return exp[1]
261 return exp[1]
261 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
262 raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
262
263
263 def getlist(x):
264 def getlist(x):
264 if not x:
265 if not x:
265 return []
266 return []
266 if x[0] == 'list':
267 if x[0] == 'list':
267 return getlist(x[1]) + [x[2]]
268 return getlist(x[1]) + [x[2]]
268 return [x]
269 return [x]
269
270
270 def gettemplate(exp, context):
271 def gettemplate(exp, context):
271 """Compile given template tree or load named template from map file;
272 """Compile given template tree or load named template from map file;
272 returns (func, data) pair"""
273 returns (func, data) pair"""
273 if exp[0] in ('template', 'string'):
274 if exp[0] in ('template', 'string'):
274 return compileexp(exp, context, methods)
275 return compileexp(exp, context, methods)
275 if exp[0] == 'symbol':
276 if exp[0] == 'symbol':
276 # unlike runsymbol(), here 'symbol' is always taken as template name
277 # unlike runsymbol(), here 'symbol' is always taken as template name
277 # even if it exists in mapping. this allows us to override mapping
278 # even if it exists in mapping. this allows us to override mapping
278 # by web templates, e.g. 'changelogtag' is redefined in map file.
279 # by web templates, e.g. 'changelogtag' is redefined in map file.
279 return context._load(exp[1])
280 return context._load(exp[1])
280 raise error.ParseError(_("expected template specifier"))
281 raise error.ParseError(_("expected template specifier"))
281
282
282 def evalfuncarg(context, mapping, arg):
283 def evalfuncarg(context, mapping, arg):
283 func, data = arg
284 func, data = arg
284 # func() may return string, generator of strings or arbitrary object such
285 # func() may return string, generator of strings or arbitrary object such
285 # as date tuple, but filter does not want generator.
286 # as date tuple, but filter does not want generator.
286 thing = func(context, mapping, data)
287 thing = func(context, mapping, data)
287 if isinstance(thing, types.GeneratorType):
288 if isinstance(thing, types.GeneratorType):
288 thing = stringify(thing)
289 thing = stringify(thing)
289 return thing
290 return thing
290
291
291 def evalboolean(context, mapping, arg):
292 def evalboolean(context, mapping, arg):
292 """Evaluate given argument as boolean, but also takes boolean literals"""
293 """Evaluate given argument as boolean, but also takes boolean literals"""
293 func, data = arg
294 func, data = arg
294 if func is runsymbol:
295 if func is runsymbol:
295 thing = func(context, mapping, data, default=None)
296 thing = func(context, mapping, data, default=None)
296 if thing is None:
297 if thing is None:
297 # not a template keyword, takes as a boolean literal
298 # not a template keyword, takes as a boolean literal
298 thing = util.parsebool(data)
299 thing = util.parsebool(data)
299 else:
300 else:
300 thing = func(context, mapping, data)
301 thing = func(context, mapping, data)
301 if isinstance(thing, bool):
302 if isinstance(thing, bool):
302 return thing
303 return thing
303 # other objects are evaluated as strings, which means 0 is True, but
304 # other objects are evaluated as strings, which means 0 is True, but
304 # empty dict/list should be False as they are expected to be ''
305 # empty dict/list should be False as they are expected to be ''
305 return bool(stringify(thing))
306 return bool(stringify(thing))
306
307
307 def evalinteger(context, mapping, arg, err):
308 def evalinteger(context, mapping, arg, err):
308 v = evalfuncarg(context, mapping, arg)
309 v = evalfuncarg(context, mapping, arg)
309 try:
310 try:
310 return int(v)
311 return int(v)
311 except (TypeError, ValueError):
312 except (TypeError, ValueError):
312 raise error.ParseError(err)
313 raise error.ParseError(err)
313
314
314 def evalstring(context, mapping, arg):
315 def evalstring(context, mapping, arg):
315 func, data = arg
316 func, data = arg
316 return stringify(func(context, mapping, data))
317 return stringify(func(context, mapping, data))
317
318
318 def evalstringliteral(context, mapping, arg):
319 def evalstringliteral(context, mapping, arg):
319 """Evaluate given argument as string template, but returns symbol name
320 """Evaluate given argument as string template, but returns symbol name
320 if it is unknown"""
321 if it is unknown"""
321 func, data = arg
322 func, data = arg
322 if func is runsymbol:
323 if func is runsymbol:
323 thing = func(context, mapping, data, default=data)
324 thing = func(context, mapping, data, default=data)
324 else:
325 else:
325 thing = func(context, mapping, data)
326 thing = func(context, mapping, data)
326 return stringify(thing)
327 return stringify(thing)
327
328
328 def runinteger(context, mapping, data):
329 def runinteger(context, mapping, data):
329 return int(data)
330 return int(data)
330
331
331 def runstring(context, mapping, data):
332 def runstring(context, mapping, data):
332 return data
333 return data
333
334
334 def _recursivesymbolblocker(key):
335 def _recursivesymbolblocker(key):
335 def showrecursion(**args):
336 def showrecursion(**args):
336 raise error.Abort(_("recursive reference '%s' in template") % key)
337 raise error.Abort(_("recursive reference '%s' in template") % key)
337 return showrecursion
338 return showrecursion
338
339
339 def _runrecursivesymbol(context, mapping, key):
340 def _runrecursivesymbol(context, mapping, key):
340 raise error.Abort(_("recursive reference '%s' in template") % key)
341 raise error.Abort(_("recursive reference '%s' in template") % key)
341
342
342 def runsymbol(context, mapping, key, default=''):
343 def runsymbol(context, mapping, key, default=''):
343 v = mapping.get(key)
344 v = mapping.get(key)
344 if v is None:
345 if v is None:
345 v = context._defaults.get(key)
346 v = context._defaults.get(key)
346 if v is None:
347 if v is None:
347 # put poison to cut recursion. we can't move this to parsing phase
348 # put poison to cut recursion. we can't move this to parsing phase
348 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
349 # because "x = {x}" is allowed if "x" is a keyword. (issue4758)
349 safemapping = mapping.copy()
350 safemapping = mapping.copy()
350 safemapping[key] = _recursivesymbolblocker(key)
351 safemapping[key] = _recursivesymbolblocker(key)
351 try:
352 try:
352 v = context.process(key, safemapping)
353 v = context.process(key, safemapping)
353 except TemplateNotFound:
354 except TemplateNotFound:
354 v = default
355 v = default
355 if callable(v):
356 if callable(v):
356 return v(**mapping)
357 return v(**mapping)
357 return v
358 return v
358
359
359 def buildtemplate(exp, context):
360 def buildtemplate(exp, context):
360 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
361 ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
361 return (runtemplate, ctmpl)
362 return (runtemplate, ctmpl)
362
363
363 def runtemplate(context, mapping, template):
364 def runtemplate(context, mapping, template):
364 for func, data in template:
365 for func, data in template:
365 yield func(context, mapping, data)
366 yield func(context, mapping, data)
366
367
367 def buildfilter(exp, context):
368 def buildfilter(exp, context):
368 arg = compileexp(exp[1], context, methods)
369 arg = compileexp(exp[1], context, methods)
369 n = getsymbol(exp[2])
370 n = getsymbol(exp[2])
370 if n in context._filters:
371 if n in context._filters:
371 filt = context._filters[n]
372 filt = context._filters[n]
372 return (runfilter, (arg, filt))
373 return (runfilter, (arg, filt))
373 if n in funcs:
374 if n in funcs:
374 f = funcs[n]
375 f = funcs[n]
375 return (f, [arg])
376 return (f, [arg])
376 raise error.ParseError(_("unknown function '%s'") % n)
377 raise error.ParseError(_("unknown function '%s'") % n)
377
378
378 def runfilter(context, mapping, data):
379 def runfilter(context, mapping, data):
379 arg, filt = data
380 arg, filt = data
380 thing = evalfuncarg(context, mapping, arg)
381 thing = evalfuncarg(context, mapping, arg)
381 try:
382 try:
382 return filt(thing)
383 return filt(thing)
383 except (ValueError, AttributeError, TypeError):
384 except (ValueError, AttributeError, TypeError):
384 if isinstance(arg[1], tuple):
385 if isinstance(arg[1], tuple):
385 dt = arg[1][1]
386 dt = arg[1][1]
386 else:
387 else:
387 dt = arg[1]
388 dt = arg[1]
388 raise error.Abort(_("template filter '%s' is not compatible with "
389 raise error.Abort(_("template filter '%s' is not compatible with "
389 "keyword '%s'") % (filt.func_name, dt))
390 "keyword '%s'") % (filt.func_name, dt))
390
391
391 def buildmap(exp, context):
392 def buildmap(exp, context):
392 func, data = compileexp(exp[1], context, methods)
393 func, data = compileexp(exp[1], context, methods)
393 tfunc, tdata = gettemplate(exp[2], context)
394 tfunc, tdata = gettemplate(exp[2], context)
394 return (runmap, (func, data, tfunc, tdata))
395 return (runmap, (func, data, tfunc, tdata))
395
396
396 def runmap(context, mapping, data):
397 def runmap(context, mapping, data):
397 func, data, tfunc, tdata = data
398 func, data, tfunc, tdata = data
398 d = func(context, mapping, data)
399 d = func(context, mapping, data)
399 if util.safehasattr(d, 'itermaps'):
400 if util.safehasattr(d, 'itermaps'):
400 diter = d.itermaps()
401 diter = d.itermaps()
401 else:
402 else:
402 try:
403 try:
403 diter = iter(d)
404 diter = iter(d)
404 except TypeError:
405 except TypeError:
405 if func is runsymbol:
406 if func is runsymbol:
406 raise error.ParseError(_("keyword '%s' is not iterable") % data)
407 raise error.ParseError(_("keyword '%s' is not iterable") % data)
407 else:
408 else:
408 raise error.ParseError(_("%r is not iterable") % d)
409 raise error.ParseError(_("%r is not iterable") % d)
409
410
410 for i in diter:
411 for i in diter:
411 lm = mapping.copy()
412 lm = mapping.copy()
412 if isinstance(i, dict):
413 if isinstance(i, dict):
413 lm.update(i)
414 lm.update(i)
414 lm['originalnode'] = mapping.get('node')
415 lm['originalnode'] = mapping.get('node')
415 yield tfunc(context, lm, tdata)
416 yield tfunc(context, lm, tdata)
416 else:
417 else:
417 # v is not an iterable of dicts, this happen when 'key'
418 # v is not an iterable of dicts, this happen when 'key'
418 # has been fully expanded already and format is useless.
419 # has been fully expanded already and format is useless.
419 # If so, return the expanded value.
420 # If so, return the expanded value.
420 yield i
421 yield i
421
422
422 def buildnegate(exp, context):
423 def buildnegate(exp, context):
423 arg = compileexp(exp[1], context, exprmethods)
424 arg = compileexp(exp[1], context, exprmethods)
424 return (runnegate, arg)
425 return (runnegate, arg)
425
426
426 def runnegate(context, mapping, data):
427 def runnegate(context, mapping, data):
427 data = evalinteger(context, mapping, data,
428 data = evalinteger(context, mapping, data,
428 _('negation needs an integer argument'))
429 _('negation needs an integer argument'))
429 return -data
430 return -data
430
431
431 def buildarithmetic(exp, context, func):
432 def buildarithmetic(exp, context, func):
432 left = compileexp(exp[1], context, exprmethods)
433 left = compileexp(exp[1], context, exprmethods)
433 right = compileexp(exp[2], context, exprmethods)
434 right = compileexp(exp[2], context, exprmethods)
434 return (runarithmetic, (func, left, right))
435 return (runarithmetic, (func, left, right))
435
436
436 def runarithmetic(context, mapping, data):
437 def runarithmetic(context, mapping, data):
437 func, left, right = data
438 func, left, right = data
438 left = evalinteger(context, mapping, left,
439 left = evalinteger(context, mapping, left,
439 _('arithmetic only defined on integers'))
440 _('arithmetic only defined on integers'))
440 right = evalinteger(context, mapping, right,
441 right = evalinteger(context, mapping, right,
441 _('arithmetic only defined on integers'))
442 _('arithmetic only defined on integers'))
442 try:
443 try:
443 return func(left, right)
444 return func(left, right)
444 except ZeroDivisionError:
445 except ZeroDivisionError:
445 raise error.Abort(_('division by zero is not defined'))
446 raise error.Abort(_('division by zero is not defined'))
446
447
447 def buildfunc(exp, context):
448 def buildfunc(exp, context):
448 n = getsymbol(exp[1])
449 n = getsymbol(exp[1])
449 args = [compileexp(x, context, exprmethods) for x in getlist(exp[2])]
450 args = [compileexp(x, context, exprmethods) for x in getlist(exp[2])]
450 if n in funcs:
451 if n in funcs:
451 f = funcs[n]
452 f = funcs[n]
452 return (f, args)
453 return (f, args)
453 if n in context._filters:
454 if n in context._filters:
454 if len(args) != 1:
455 if len(args) != 1:
455 raise error.ParseError(_("filter %s expects one argument") % n)
456 raise error.ParseError(_("filter %s expects one argument") % n)
456 f = context._filters[n]
457 f = context._filters[n]
457 return (runfilter, (args[0], f))
458 return (runfilter, (args[0], f))
458 raise error.ParseError(_("unknown function '%s'") % n)
459 raise error.ParseError(_("unknown function '%s'") % n)
459
460
460 # dict of template built-in functions
461 # dict of template built-in functions
461 funcs = {}
462 funcs = {}
462
463
463 templatefunc = registrar.templatefunc(funcs)
464 templatefunc = registrar.templatefunc(funcs)
464
465
465 @templatefunc('date(date[, fmt])')
466 @templatefunc('date(date[, fmt])')
466 def date(context, mapping, args):
467 def date(context, mapping, args):
467 """Format a date. See :hg:`help dates` for formatting
468 """Format a date. See :hg:`help dates` for formatting
468 strings. The default is a Unix date format, including the timezone:
469 strings. The default is a Unix date format, including the timezone:
469 "Mon Sep 04 15:13:13 2006 0700"."""
470 "Mon Sep 04 15:13:13 2006 0700"."""
470 if not (1 <= len(args) <= 2):
471 if not (1 <= len(args) <= 2):
471 # i18n: "date" is a keyword
472 # i18n: "date" is a keyword
472 raise error.ParseError(_("date expects one or two arguments"))
473 raise error.ParseError(_("date expects one or two arguments"))
473
474
474 date = evalfuncarg(context, mapping, args[0])
475 date = evalfuncarg(context, mapping, args[0])
475 fmt = None
476 fmt = None
476 if len(args) == 2:
477 if len(args) == 2:
477 fmt = evalstring(context, mapping, args[1])
478 fmt = evalstring(context, mapping, args[1])
478 try:
479 try:
479 if fmt is None:
480 if fmt is None:
480 return util.datestr(date)
481 return util.datestr(date)
481 else:
482 else:
482 return util.datestr(date, fmt)
483 return util.datestr(date, fmt)
483 except (TypeError, ValueError):
484 except (TypeError, ValueError):
484 # i18n: "date" is a keyword
485 # i18n: "date" is a keyword
485 raise error.ParseError(_("date expects a date information"))
486 raise error.ParseError(_("date expects a date information"))
486
487
487 @templatefunc('diff([includepattern [, excludepattern]])')
488 @templatefunc('diff([includepattern [, excludepattern]])')
488 def diff(context, mapping, args):
489 def diff(context, mapping, args):
489 """Show a diff, optionally
490 """Show a diff, optionally
490 specifying files to include or exclude."""
491 specifying files to include or exclude."""
491 if len(args) > 2:
492 if len(args) > 2:
492 # i18n: "diff" is a keyword
493 # i18n: "diff" is a keyword
493 raise error.ParseError(_("diff expects zero, one, or two arguments"))
494 raise error.ParseError(_("diff expects zero, one, or two arguments"))
494
495
495 def getpatterns(i):
496 def getpatterns(i):
496 if i < len(args):
497 if i < len(args):
497 s = evalstring(context, mapping, args[i]).strip()
498 s = evalstring(context, mapping, args[i]).strip()
498 if s:
499 if s:
499 return [s]
500 return [s]
500 return []
501 return []
501
502
502 ctx = mapping['ctx']
503 ctx = mapping['ctx']
503 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
504 chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)))
504
505
505 return ''.join(chunks)
506 return ''.join(chunks)
506
507
507 @templatefunc('files(pattern)')
508 @templatefunc('files(pattern)')
508 def files(context, mapping, args):
509 def files(context, mapping, args):
509 """All files of the current changeset matching the pattern. See
510 """All files of the current changeset matching the pattern. See
510 :hg:`help patterns`."""
511 :hg:`help patterns`."""
511 if not len(args) == 1:
512 if not len(args) == 1:
512 # i18n: "files" is a keyword
513 # i18n: "files" is a keyword
513 raise error.ParseError(_("files expects one argument"))
514 raise error.ParseError(_("files expects one argument"))
514
515
515 raw = evalstring(context, mapping, args[0])
516 raw = evalstring(context, mapping, args[0])
516 ctx = mapping['ctx']
517 ctx = mapping['ctx']
517 m = ctx.match([raw])
518 m = ctx.match([raw])
518 files = list(ctx.matches(m))
519 files = list(ctx.matches(m))
519 return templatekw.showlist("file", files, **mapping)
520 return templatekw.showlist("file", files, **mapping)
520
521
521 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
522 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
522 def fill(context, mapping, args):
523 def fill(context, mapping, args):
523 """Fill many
524 """Fill many
524 paragraphs with optional indentation. See the "fill" filter."""
525 paragraphs with optional indentation. See the "fill" filter."""
525 if not (1 <= len(args) <= 4):
526 if not (1 <= len(args) <= 4):
526 # i18n: "fill" is a keyword
527 # i18n: "fill" is a keyword
527 raise error.ParseError(_("fill expects one to four arguments"))
528 raise error.ParseError(_("fill expects one to four arguments"))
528
529
529 text = evalstring(context, mapping, args[0])
530 text = evalstring(context, mapping, args[0])
530 width = 76
531 width = 76
531 initindent = ''
532 initindent = ''
532 hangindent = ''
533 hangindent = ''
533 if 2 <= len(args) <= 4:
534 if 2 <= len(args) <= 4:
534 width = evalinteger(context, mapping, args[1],
535 width = evalinteger(context, mapping, args[1],
535 # i18n: "fill" is a keyword
536 # i18n: "fill" is a keyword
536 _("fill expects an integer width"))
537 _("fill expects an integer width"))
537 try:
538 try:
538 initindent = evalstring(context, mapping, args[2])
539 initindent = evalstring(context, mapping, args[2])
539 hangindent = evalstring(context, mapping, args[3])
540 hangindent = evalstring(context, mapping, args[3])
540 except IndexError:
541 except IndexError:
541 pass
542 pass
542
543
543 return templatefilters.fill(text, width, initindent, hangindent)
544 return templatefilters.fill(text, width, initindent, hangindent)
544
545
545 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])')
546 @templatefunc('pad(text, width[, fillchar=\' \'[, left=False]])')
546 def pad(context, mapping, args):
547 def pad(context, mapping, args):
547 """Pad text with a
548 """Pad text with a
548 fill character."""
549 fill character."""
549 if not (2 <= len(args) <= 4):
550 if not (2 <= len(args) <= 4):
550 # i18n: "pad" is a keyword
551 # i18n: "pad" is a keyword
551 raise error.ParseError(_("pad() expects two to four arguments"))
552 raise error.ParseError(_("pad() expects two to four arguments"))
552
553
553 width = evalinteger(context, mapping, args[1],
554 width = evalinteger(context, mapping, args[1],
554 # i18n: "pad" is a keyword
555 # i18n: "pad" is a keyword
555 _("pad() expects an integer width"))
556 _("pad() expects an integer width"))
556
557
557 text = evalstring(context, mapping, args[0])
558 text = evalstring(context, mapping, args[0])
558
559
559 left = False
560 left = False
560 fillchar = ' '
561 fillchar = ' '
561 if len(args) > 2:
562 if len(args) > 2:
562 fillchar = evalstring(context, mapping, args[2])
563 fillchar = evalstring(context, mapping, args[2])
563 if len(args) > 3:
564 if len(args) > 3:
564 left = evalboolean(context, mapping, args[3])
565 left = evalboolean(context, mapping, args[3])
565
566
566 if left:
567 if left:
567 return text.rjust(width, fillchar)
568 return text.rjust(width, fillchar)
568 else:
569 else:
569 return text.ljust(width, fillchar)
570 return text.ljust(width, fillchar)
570
571
571 @templatefunc('indent(text, indentchars[, firstline])')
572 @templatefunc('indent(text, indentchars[, firstline])')
572 def indent(context, mapping, args):
573 def indent(context, mapping, args):
573 """Indents all non-empty lines
574 """Indents all non-empty lines
574 with the characters given in the indentchars string. An optional
575 with the characters given in the indentchars string. An optional
575 third parameter will override the indent for the first line only
576 third parameter will override the indent for the first line only
576 if present."""
577 if present."""
577 if not (2 <= len(args) <= 3):
578 if not (2 <= len(args) <= 3):
578 # i18n: "indent" is a keyword
579 # i18n: "indent" is a keyword
579 raise error.ParseError(_("indent() expects two or three arguments"))
580 raise error.ParseError(_("indent() expects two or three arguments"))
580
581
581 text = evalstring(context, mapping, args[0])
582 text = evalstring(context, mapping, args[0])
582 indent = evalstring(context, mapping, args[1])
583 indent = evalstring(context, mapping, args[1])
583
584
584 if len(args) == 3:
585 if len(args) == 3:
585 firstline = evalstring(context, mapping, args[2])
586 firstline = evalstring(context, mapping, args[2])
586 else:
587 else:
587 firstline = indent
588 firstline = indent
588
589
589 # the indent function doesn't indent the first line, so we do it here
590 # the indent function doesn't indent the first line, so we do it here
590 return templatefilters.indent(firstline + text, indent)
591 return templatefilters.indent(firstline + text, indent)
591
592
592 @templatefunc('get(dict, key)')
593 @templatefunc('get(dict, key)')
593 def get(context, mapping, args):
594 def get(context, mapping, args):
594 """Get an attribute/key from an object. Some keywords
595 """Get an attribute/key from an object. Some keywords
595 are complex types. This function allows you to obtain the value of an
596 are complex types. This function allows you to obtain the value of an
596 attribute on these types."""
597 attribute on these types."""
597 if len(args) != 2:
598 if len(args) != 2:
598 # i18n: "get" is a keyword
599 # i18n: "get" is a keyword
599 raise error.ParseError(_("get() expects two arguments"))
600 raise error.ParseError(_("get() expects two arguments"))
600
601
601 dictarg = evalfuncarg(context, mapping, args[0])
602 dictarg = evalfuncarg(context, mapping, args[0])
602 if not util.safehasattr(dictarg, 'get'):
603 if not util.safehasattr(dictarg, 'get'):
603 # i18n: "get" is a keyword
604 # i18n: "get" is a keyword
604 raise error.ParseError(_("get() expects a dict as first argument"))
605 raise error.ParseError(_("get() expects a dict as first argument"))
605
606
606 key = evalfuncarg(context, mapping, args[1])
607 key = evalfuncarg(context, mapping, args[1])
607 return dictarg.get(key)
608 return dictarg.get(key)
608
609
609 @templatefunc('if(expr, then[, else])')
610 @templatefunc('if(expr, then[, else])')
610 def if_(context, mapping, args):
611 def if_(context, mapping, args):
611 """Conditionally execute based on the result of
612 """Conditionally execute based on the result of
612 an expression."""
613 an expression."""
613 if not (2 <= len(args) <= 3):
614 if not (2 <= len(args) <= 3):
614 # i18n: "if" is a keyword
615 # i18n: "if" is a keyword
615 raise error.ParseError(_("if expects two or three arguments"))
616 raise error.ParseError(_("if expects two or three arguments"))
616
617
617 test = evalboolean(context, mapping, args[0])
618 test = evalboolean(context, mapping, args[0])
618 if test:
619 if test:
619 yield args[1][0](context, mapping, args[1][1])
620 yield args[1][0](context, mapping, args[1][1])
620 elif len(args) == 3:
621 elif len(args) == 3:
621 yield args[2][0](context, mapping, args[2][1])
622 yield args[2][0](context, mapping, args[2][1])
622
623
623 @templatefunc('ifcontains(needle, haystack, then[, else])')
624 @templatefunc('ifcontains(needle, haystack, then[, else])')
624 def ifcontains(context, mapping, args):
625 def ifcontains(context, mapping, args):
625 """Conditionally execute based
626 """Conditionally execute based
626 on whether the item "needle" is in "haystack"."""
627 on whether the item "needle" is in "haystack"."""
627 if not (3 <= len(args) <= 4):
628 if not (3 <= len(args) <= 4):
628 # i18n: "ifcontains" is a keyword
629 # i18n: "ifcontains" is a keyword
629 raise error.ParseError(_("ifcontains expects three or four arguments"))
630 raise error.ParseError(_("ifcontains expects three or four arguments"))
630
631
631 needle = evalstring(context, mapping, args[0])
632 needle = evalstring(context, mapping, args[0])
632 haystack = evalfuncarg(context, mapping, args[1])
633 haystack = evalfuncarg(context, mapping, args[1])
633
634
634 if needle in haystack:
635 if needle in haystack:
635 yield args[2][0](context, mapping, args[2][1])
636 yield args[2][0](context, mapping, args[2][1])
636 elif len(args) == 4:
637 elif len(args) == 4:
637 yield args[3][0](context, mapping, args[3][1])
638 yield args[3][0](context, mapping, args[3][1])
638
639
639 @templatefunc('ifeq(expr1, expr2, then[, else])')
640 @templatefunc('ifeq(expr1, expr2, then[, else])')
640 def ifeq(context, mapping, args):
641 def ifeq(context, mapping, args):
641 """Conditionally execute based on
642 """Conditionally execute based on
642 whether 2 items are equivalent."""
643 whether 2 items are equivalent."""
643 if not (3 <= len(args) <= 4):
644 if not (3 <= len(args) <= 4):
644 # i18n: "ifeq" is a keyword
645 # i18n: "ifeq" is a keyword
645 raise error.ParseError(_("ifeq expects three or four arguments"))
646 raise error.ParseError(_("ifeq expects three or four arguments"))
646
647
647 test = evalstring(context, mapping, args[0])
648 test = evalstring(context, mapping, args[0])
648 match = evalstring(context, mapping, args[1])
649 match = evalstring(context, mapping, args[1])
649 if test == match:
650 if test == match:
650 yield args[2][0](context, mapping, args[2][1])
651 yield args[2][0](context, mapping, args[2][1])
651 elif len(args) == 4:
652 elif len(args) == 4:
652 yield args[3][0](context, mapping, args[3][1])
653 yield args[3][0](context, mapping, args[3][1])
653
654
654 @templatefunc('join(list, sep)')
655 @templatefunc('join(list, sep)')
655 def join(context, mapping, args):
656 def join(context, mapping, args):
656 """Join items in a list with a delimiter."""
657 """Join items in a list with a delimiter."""
657 if not (1 <= len(args) <= 2):
658 if not (1 <= len(args) <= 2):
658 # i18n: "join" is a keyword
659 # i18n: "join" is a keyword
659 raise error.ParseError(_("join expects one or two arguments"))
660 raise error.ParseError(_("join expects one or two arguments"))
660
661
661 joinset = args[0][0](context, mapping, args[0][1])
662 joinset = args[0][0](context, mapping, args[0][1])
662 if util.safehasattr(joinset, 'itermaps'):
663 if util.safehasattr(joinset, 'itermaps'):
663 jf = joinset.joinfmt
664 jf = joinset.joinfmt
664 joinset = [jf(x) for x in joinset.itermaps()]
665 joinset = [jf(x) for x in joinset.itermaps()]
665
666
666 joiner = " "
667 joiner = " "
667 if len(args) > 1:
668 if len(args) > 1:
668 joiner = evalstring(context, mapping, args[1])
669 joiner = evalstring(context, mapping, args[1])
669
670
670 first = True
671 first = True
671 for x in joinset:
672 for x in joinset:
672 if first:
673 if first:
673 first = False
674 first = False
674 else:
675 else:
675 yield joiner
676 yield joiner
676 yield x
677 yield x
677
678
678 @templatefunc('label(label, expr)')
679 @templatefunc('label(label, expr)')
679 def label(context, mapping, args):
680 def label(context, mapping, args):
680 """Apply a label to generated content. Content with
681 """Apply a label to generated content. Content with
681 a label applied can result in additional post-processing, such as
682 a label applied can result in additional post-processing, such as
682 automatic colorization."""
683 automatic colorization."""
683 if len(args) != 2:
684 if len(args) != 2:
684 # i18n: "label" is a keyword
685 # i18n: "label" is a keyword
685 raise error.ParseError(_("label expects two arguments"))
686 raise error.ParseError(_("label expects two arguments"))
686
687
687 ui = mapping['ui']
688 ui = mapping['ui']
688 thing = evalstring(context, mapping, args[1])
689 thing = evalstring(context, mapping, args[1])
689 # preserve unknown symbol as literal so effects like 'red', 'bold',
690 # preserve unknown symbol as literal so effects like 'red', 'bold',
690 # etc. don't need to be quoted
691 # etc. don't need to be quoted
691 label = evalstringliteral(context, mapping, args[0])
692 label = evalstringliteral(context, mapping, args[0])
692
693
693 return ui.label(thing, label)
694 return ui.label(thing, label)
694
695
695 @templatefunc('latesttag([pattern])')
696 @templatefunc('latesttag([pattern])')
696 def latesttag(context, mapping, args):
697 def latesttag(context, mapping, args):
697 """The global tags matching the given pattern on the
698 """The global tags matching the given pattern on the
698 most recent globally tagged ancestor of this changeset."""
699 most recent globally tagged ancestor of this changeset."""
699 if len(args) > 1:
700 if len(args) > 1:
700 # i18n: "latesttag" is a keyword
701 # i18n: "latesttag" is a keyword
701 raise error.ParseError(_("latesttag expects at most one argument"))
702 raise error.ParseError(_("latesttag expects at most one argument"))
702
703
703 pattern = None
704 pattern = None
704 if len(args) == 1:
705 if len(args) == 1:
705 pattern = evalstring(context, mapping, args[0])
706 pattern = evalstring(context, mapping, args[0])
706
707
707 return templatekw.showlatesttags(pattern, **mapping)
708 return templatekw.showlatesttags(pattern, **mapping)
708
709
709 @templatefunc('localdate(date[, tz])')
710 @templatefunc('localdate(date[, tz])')
710 def localdate(context, mapping, args):
711 def localdate(context, mapping, args):
711 """Converts a date to the specified timezone.
712 """Converts a date to the specified timezone.
712 The default is local date."""
713 The default is local date."""
713 if not (1 <= len(args) <= 2):
714 if not (1 <= len(args) <= 2):
714 # i18n: "localdate" is a keyword
715 # i18n: "localdate" is a keyword
715 raise error.ParseError(_("localdate expects one or two arguments"))
716 raise error.ParseError(_("localdate expects one or two arguments"))
716
717
717 date = evalfuncarg(context, mapping, args[0])
718 date = evalfuncarg(context, mapping, args[0])
718 try:
719 try:
719 date = util.parsedate(date)
720 date = util.parsedate(date)
720 except AttributeError: # not str nor date tuple
721 except AttributeError: # not str nor date tuple
721 # i18n: "localdate" is a keyword
722 # i18n: "localdate" is a keyword
722 raise error.ParseError(_("localdate expects a date information"))
723 raise error.ParseError(_("localdate expects a date information"))
723 if len(args) >= 2:
724 if len(args) >= 2:
724 tzoffset = None
725 tzoffset = None
725 tz = evalfuncarg(context, mapping, args[1])
726 tz = evalfuncarg(context, mapping, args[1])
726 if isinstance(tz, str):
727 if isinstance(tz, str):
727 tzoffset, remainder = util.parsetimezone(tz)
728 tzoffset, remainder = util.parsetimezone(tz)
728 if remainder:
729 if remainder:
729 tzoffset = None
730 tzoffset = None
730 if tzoffset is None:
731 if tzoffset is None:
731 try:
732 try:
732 tzoffset = int(tz)
733 tzoffset = int(tz)
733 except (TypeError, ValueError):
734 except (TypeError, ValueError):
734 # i18n: "localdate" is a keyword
735 # i18n: "localdate" is a keyword
735 raise error.ParseError(_("localdate expects a timezone"))
736 raise error.ParseError(_("localdate expects a timezone"))
736 else:
737 else:
737 tzoffset = util.makedate()[1]
738 tzoffset = util.makedate()[1]
738 return (date[0], tzoffset)
739 return (date[0], tzoffset)
739
740
740 @templatefunc('mod(a, b)')
741 @templatefunc('mod(a, b)')
741 def mod(context, mapping, args):
742 def mod(context, mapping, args):
742 """Calculate a mod b such that a / b + a mod b == a"""
743 """Calculate a mod b such that a / b + a mod b == a"""
743 if not len(args) == 2:
744 if not len(args) == 2:
744 # i18n: "mod" is a keyword
745 # i18n: "mod" is a keyword
745 raise error.ParseError(_("mod expects two arguments"))
746 raise error.ParseError(_("mod expects two arguments"))
746
747
747 func = lambda a, b: a % b
748 func = lambda a, b: a % b
748 return runarithmetic(context, mapping, (func, args[0], args[1]))
749 return runarithmetic(context, mapping, (func, args[0], args[1]))
749
750
750 @templatefunc('relpath(path)')
751 @templatefunc('relpath(path)')
751 def relpath(context, mapping, args):
752 def relpath(context, mapping, args):
752 """Convert a repository-absolute path into a filesystem path relative to
753 """Convert a repository-absolute path into a filesystem path relative to
753 the current working directory."""
754 the current working directory."""
754 if len(args) != 1:
755 if len(args) != 1:
755 # i18n: "relpath" is a keyword
756 # i18n: "relpath" is a keyword
756 raise error.ParseError(_("relpath expects one argument"))
757 raise error.ParseError(_("relpath expects one argument"))
757
758
758 repo = mapping['ctx'].repo()
759 repo = mapping['ctx'].repo()
759 path = evalstring(context, mapping, args[0])
760 path = evalstring(context, mapping, args[0])
760 return repo.pathto(path)
761 return repo.pathto(path)
761
762
762 @templatefunc('revset(query[, formatargs...])')
763 @templatefunc('revset(query[, formatargs...])')
763 def revset(context, mapping, args):
764 def revset(context, mapping, args):
764 """Execute a revision set query. See
765 """Execute a revision set query. See
765 :hg:`help revset`."""
766 :hg:`help revset`."""
766 if not len(args) > 0:
767 if not len(args) > 0:
767 # i18n: "revset" is a keyword
768 # i18n: "revset" is a keyword
768 raise error.ParseError(_("revset expects one or more arguments"))
769 raise error.ParseError(_("revset expects one or more arguments"))
769
770
770 raw = evalstring(context, mapping, args[0])
771 raw = evalstring(context, mapping, args[0])
771 ctx = mapping['ctx']
772 ctx = mapping['ctx']
772 repo = ctx.repo()
773 repo = ctx.repo()
773
774
774 def query(expr):
775 def query(expr):
775 m = revsetmod.match(repo.ui, expr)
776 m = revsetmod.match(repo.ui, expr)
776 return m(repo)
777 return m(repo)
777
778
778 if len(args) > 1:
779 if len(args) > 1:
779 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
780 formatargs = [evalfuncarg(context, mapping, a) for a in args[1:]]
780 revs = query(revsetmod.formatspec(raw, *formatargs))
781 revs = query(revsetmod.formatspec(raw, *formatargs))
781 revs = list(revs)
782 revs = list(revs)
782 else:
783 else:
783 revsetcache = mapping['cache'].setdefault("revsetcache", {})
784 revsetcache = mapping['cache'].setdefault("revsetcache", {})
784 if raw in revsetcache:
785 if raw in revsetcache:
785 revs = revsetcache[raw]
786 revs = revsetcache[raw]
786 else:
787 else:
787 revs = query(raw)
788 revs = query(raw)
788 revs = list(revs)
789 revs = list(revs)
789 revsetcache[raw] = revs
790 revsetcache[raw] = revs
790
791
791 return templatekw.showrevslist("revision", revs, **mapping)
792 return templatekw.showrevslist("revision", revs, **mapping)
792
793
793 @templatefunc('rstdoc(text, style)')
794 @templatefunc('rstdoc(text, style)')
794 def rstdoc(context, mapping, args):
795 def rstdoc(context, mapping, args):
795 """Format reStructuredText."""
796 """Format reStructuredText."""
796 if len(args) != 2:
797 if len(args) != 2:
797 # i18n: "rstdoc" is a keyword
798 # i18n: "rstdoc" is a keyword
798 raise error.ParseError(_("rstdoc expects two arguments"))
799 raise error.ParseError(_("rstdoc expects two arguments"))
799
800
800 text = evalstring(context, mapping, args[0])
801 text = evalstring(context, mapping, args[0])
801 style = evalstring(context, mapping, args[1])
802 style = evalstring(context, mapping, args[1])
802
803
803 return minirst.format(text, style=style, keep=['verbose'])
804 return minirst.format(text, style=style, keep=['verbose'])
804
805
805 @templatefunc('separate(sep, args)')
806 @templatefunc('separate(sep, args)')
806 def separate(context, mapping, args):
807 def separate(context, mapping, args):
807 """Add a separator between non-empty arguments."""
808 """Add a separator between non-empty arguments."""
808 if not args:
809 if not args:
809 # i18n: "separate" is a keyword
810 # i18n: "separate" is a keyword
810 raise error.ParseError(_("separate expects at least one argument"))
811 raise error.ParseError(_("separate expects at least one argument"))
811
812
812 sep = evalstring(context, mapping, args[0])
813 sep = evalstring(context, mapping, args[0])
813 first = True
814 first = True
814 for arg in args[1:]:
815 for arg in args[1:]:
815 argstr = evalstring(context, mapping, arg)
816 argstr = evalstring(context, mapping, arg)
816 if not argstr:
817 if not argstr:
817 continue
818 continue
818 if first:
819 if first:
819 first = False
820 first = False
820 else:
821 else:
821 yield sep
822 yield sep
822 yield argstr
823 yield argstr
823
824
824 @templatefunc('shortest(node, minlength=4)')
825 @templatefunc('shortest(node, minlength=4)')
825 def shortest(context, mapping, args):
826 def shortest(context, mapping, args):
826 """Obtain the shortest representation of
827 """Obtain the shortest representation of
827 a node."""
828 a node."""
828 if not (1 <= len(args) <= 2):
829 if not (1 <= len(args) <= 2):
829 # i18n: "shortest" is a keyword
830 # i18n: "shortest" is a keyword
830 raise error.ParseError(_("shortest() expects one or two arguments"))
831 raise error.ParseError(_("shortest() expects one or two arguments"))
831
832
832 node = evalstring(context, mapping, args[0])
833 node = evalstring(context, mapping, args[0])
833
834
834 minlength = 4
835 minlength = 4
835 if len(args) > 1:
836 if len(args) > 1:
836 minlength = evalinteger(context, mapping, args[1],
837 minlength = evalinteger(context, mapping, args[1],
837 # i18n: "shortest" is a keyword
838 # i18n: "shortest" is a keyword
838 _("shortest() expects an integer minlength"))
839 _("shortest() expects an integer minlength"))
839
840
840 # _partialmatch() of filtered changelog could take O(len(repo)) time,
841 # _partialmatch() of filtered changelog could take O(len(repo)) time,
841 # which would be unacceptably slow. so we look for hash collision in
842 # which would be unacceptably slow. so we look for hash collision in
842 # unfiltered space, which means some hashes may be slightly longer.
843 # unfiltered space, which means some hashes may be slightly longer.
843 cl = mapping['ctx']._repo.unfiltered().changelog
844 cl = mapping['ctx']._repo.unfiltered().changelog
844 def isvalid(test):
845 def isvalid(test):
845 try:
846 try:
846 if cl._partialmatch(test) is None:
847 if cl._partialmatch(test) is None:
847 return False
848 return False
848
849
849 try:
850 try:
850 i = int(test)
851 i = int(test)
851 # if we are a pure int, then starting with zero will not be
852 # if we are a pure int, then starting with zero will not be
852 # confused as a rev; or, obviously, if the int is larger than
853 # confused as a rev; or, obviously, if the int is larger than
853 # the value of the tip rev
854 # the value of the tip rev
854 if test[0] == '0' or i > len(cl):
855 if test[0] == '0' or i > len(cl):
855 return True
856 return True
856 return False
857 return False
857 except ValueError:
858 except ValueError:
858 return True
859 return True
859 except error.RevlogError:
860 except error.RevlogError:
860 return False
861 return False
861
862
862 shortest = node
863 shortest = node
863 startlength = max(6, minlength)
864 startlength = max(6, minlength)
864 length = startlength
865 length = startlength
865 while True:
866 while True:
866 test = node[:length]
867 test = node[:length]
867 if isvalid(test):
868 if isvalid(test):
868 shortest = test
869 shortest = test
869 if length == minlength or length > startlength:
870 if length == minlength or length > startlength:
870 return shortest
871 return shortest
871 length -= 1
872 length -= 1
872 else:
873 else:
873 length += 1
874 length += 1
874 if len(shortest) <= length:
875 if len(shortest) <= length:
875 return shortest
876 return shortest
876
877
877 @templatefunc('strip(text[, chars])')
878 @templatefunc('strip(text[, chars])')
878 def strip(context, mapping, args):
879 def strip(context, mapping, args):
879 """Strip characters from a string. By default,
880 """Strip characters from a string. By default,
880 strips all leading and trailing whitespace."""
881 strips all leading and trailing whitespace."""
881 if not (1 <= len(args) <= 2):
882 if not (1 <= len(args) <= 2):
882 # i18n: "strip" is a keyword
883 # i18n: "strip" is a keyword
883 raise error.ParseError(_("strip expects one or two arguments"))
884 raise error.ParseError(_("strip expects one or two arguments"))
884
885
885 text = evalstring(context, mapping, args[0])
886 text = evalstring(context, mapping, args[0])
886 if len(args) == 2:
887 if len(args) == 2:
887 chars = evalstring(context, mapping, args[1])
888 chars = evalstring(context, mapping, args[1])
888 return text.strip(chars)
889 return text.strip(chars)
889 return text.strip()
890 return text.strip()
890
891
891 @templatefunc('sub(pattern, replacement, expression)')
892 @templatefunc('sub(pattern, replacement, expression)')
892 def sub(context, mapping, args):
893 def sub(context, mapping, args):
893 """Perform text substitution
894 """Perform text substitution
894 using regular expressions."""
895 using regular expressions."""
895 if len(args) != 3:
896 if len(args) != 3:
896 # i18n: "sub" is a keyword
897 # i18n: "sub" is a keyword
897 raise error.ParseError(_("sub expects three arguments"))
898 raise error.ParseError(_("sub expects three arguments"))
898
899
899 pat = evalstring(context, mapping, args[0])
900 pat = evalstring(context, mapping, args[0])
900 rpl = evalstring(context, mapping, args[1])
901 rpl = evalstring(context, mapping, args[1])
901 src = evalstring(context, mapping, args[2])
902 src = evalstring(context, mapping, args[2])
902 try:
903 try:
903 patre = re.compile(pat)
904 patre = re.compile(pat)
904 except re.error:
905 except re.error:
905 # i18n: "sub" is a keyword
906 # i18n: "sub" is a keyword
906 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
907 raise error.ParseError(_("sub got an invalid pattern: %s") % pat)
907 try:
908 try:
908 yield patre.sub(rpl, src)
909 yield patre.sub(rpl, src)
909 except re.error:
910 except re.error:
910 # i18n: "sub" is a keyword
911 # i18n: "sub" is a keyword
911 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
912 raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
912
913
913 @templatefunc('startswith(pattern, text)')
914 @templatefunc('startswith(pattern, text)')
914 def startswith(context, mapping, args):
915 def startswith(context, mapping, args):
915 """Returns the value from the "text" argument
916 """Returns the value from the "text" argument
916 if it begins with the content from the "pattern" argument."""
917 if it begins with the content from the "pattern" argument."""
917 if len(args) != 2:
918 if len(args) != 2:
918 # i18n: "startswith" is a keyword
919 # i18n: "startswith" is a keyword
919 raise error.ParseError(_("startswith expects two arguments"))
920 raise error.ParseError(_("startswith expects two arguments"))
920
921
921 patn = evalstring(context, mapping, args[0])
922 patn = evalstring(context, mapping, args[0])
922 text = evalstring(context, mapping, args[1])
923 text = evalstring(context, mapping, args[1])
923 if text.startswith(patn):
924 if text.startswith(patn):
924 return text
925 return text
925 return ''
926 return ''
926
927
927 @templatefunc('word(number, text[, separator])')
928 @templatefunc('word(number, text[, separator])')
928 def word(context, mapping, args):
929 def word(context, mapping, args):
929 """Return the nth word from a string."""
930 """Return the nth word from a string."""
930 if not (2 <= len(args) <= 3):
931 if not (2 <= len(args) <= 3):
931 # i18n: "word" is a keyword
932 # i18n: "word" is a keyword
932 raise error.ParseError(_("word expects two or three arguments, got %d")
933 raise error.ParseError(_("word expects two or three arguments, got %d")
933 % len(args))
934 % len(args))
934
935
935 num = evalinteger(context, mapping, args[0],
936 num = evalinteger(context, mapping, args[0],
936 # i18n: "word" is a keyword
937 # i18n: "word" is a keyword
937 _("word expects an integer index"))
938 _("word expects an integer index"))
938 text = evalstring(context, mapping, args[1])
939 text = evalstring(context, mapping, args[1])
939 if len(args) == 3:
940 if len(args) == 3:
940 splitter = evalstring(context, mapping, args[2])
941 splitter = evalstring(context, mapping, args[2])
941 else:
942 else:
942 splitter = None
943 splitter = None
943
944
944 tokens = text.split(splitter)
945 tokens = text.split(splitter)
945 if num >= len(tokens) or num < -len(tokens):
946 if num >= len(tokens) or num < -len(tokens):
946 return ''
947 return ''
947 else:
948 else:
948 return tokens[num]
949 return tokens[num]
949
950
950 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
951 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
951 exprmethods = {
952 exprmethods = {
952 "integer": lambda e, c: (runinteger, e[1]),
953 "integer": lambda e, c: (runinteger, e[1]),
953 "string": lambda e, c: (runstring, e[1]),
954 "string": lambda e, c: (runstring, e[1]),
954 "symbol": lambda e, c: (runsymbol, e[1]),
955 "symbol": lambda e, c: (runsymbol, e[1]),
955 "template": buildtemplate,
956 "template": buildtemplate,
956 "group": lambda e, c: compileexp(e[1], c, exprmethods),
957 "group": lambda e, c: compileexp(e[1], c, exprmethods),
957 # ".": buildmember,
958 # ".": buildmember,
958 "|": buildfilter,
959 "|": buildfilter,
959 "%": buildmap,
960 "%": buildmap,
960 "func": buildfunc,
961 "func": buildfunc,
961 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
962 "+": lambda e, c: buildarithmetic(e, c, lambda a, b: a + b),
962 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
963 "-": lambda e, c: buildarithmetic(e, c, lambda a, b: a - b),
963 "negate": buildnegate,
964 "negate": buildnegate,
964 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
965 "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
965 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
966 "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
966 }
967 }
967
968
968 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
969 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
969 methods = exprmethods.copy()
970 methods = exprmethods.copy()
970 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
971 methods["integer"] = exprmethods["symbol"] # '{1}' as variable
971
972
972 class _aliasrules(parser.basealiasrules):
973 class _aliasrules(parser.basealiasrules):
973 """Parsing and expansion rule set of template aliases"""
974 """Parsing and expansion rule set of template aliases"""
974 _section = _('template alias')
975 _section = _('template alias')
975 _parse = staticmethod(_parseexpr)
976 _parse = staticmethod(_parseexpr)
976
977
977 @staticmethod
978 @staticmethod
978 def _trygetfunc(tree):
979 def _trygetfunc(tree):
979 """Return (name, args) if tree is func(...) or ...|filter; otherwise
980 """Return (name, args) if tree is func(...) or ...|filter; otherwise
980 None"""
981 None"""
981 if tree[0] == 'func' and tree[1][0] == 'symbol':
982 if tree[0] == 'func' and tree[1][0] == 'symbol':
982 return tree[1][1], getlist(tree[2])
983 return tree[1][1], getlist(tree[2])
983 if tree[0] == '|' and tree[2][0] == 'symbol':
984 if tree[0] == '|' and tree[2][0] == 'symbol':
984 return tree[2][1], [tree[1]]
985 return tree[2][1], [tree[1]]
985
986
986 def expandaliases(tree, aliases):
987 def expandaliases(tree, aliases):
987 """Return new tree of aliases are expanded"""
988 """Return new tree of aliases are expanded"""
988 aliasmap = _aliasrules.buildmap(aliases)
989 aliasmap = _aliasrules.buildmap(aliases)
989 return _aliasrules.expand(aliasmap, tree)
990 return _aliasrules.expand(aliasmap, tree)
990
991
991 # template engine
992 # template engine
992
993
993 stringify = templatefilters.stringify
994 stringify = templatefilters.stringify
994
995
995 def _flatten(thing):
996 def _flatten(thing):
996 '''yield a single stream from a possibly nested set of iterators'''
997 '''yield a single stream from a possibly nested set of iterators'''
997 if isinstance(thing, str):
998 if isinstance(thing, str):
998 yield thing
999 yield thing
999 elif thing is None:
1000 elif thing is None:
1000 pass
1001 pass
1001 elif not util.safehasattr(thing, '__iter__'):
1002 elif not util.safehasattr(thing, '__iter__'):
1002 yield str(thing)
1003 yield str(thing)
1003 else:
1004 else:
1004 for i in thing:
1005 for i in thing:
1005 if isinstance(i, str):
1006 if isinstance(i, str):
1006 yield i
1007 yield i
1007 elif i is None:
1008 elif i is None:
1008 pass
1009 pass
1009 elif not util.safehasattr(i, '__iter__'):
1010 elif not util.safehasattr(i, '__iter__'):
1010 yield str(i)
1011 yield str(i)
1011 else:
1012 else:
1012 for j in _flatten(i):
1013 for j in _flatten(i):
1013 yield j
1014 yield j
1014
1015
1015 def unquotestring(s):
1016 def unquotestring(s):
1016 '''unwrap quotes if any; otherwise returns unmodified string'''
1017 '''unwrap quotes if any; otherwise returns unmodified string'''
1017 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1018 if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
1018 return s
1019 return s
1019 return s[1:-1]
1020 return s[1:-1]
1020
1021
1021 class engine(object):
1022 class engine(object):
1022 '''template expansion engine.
1023 '''template expansion engine.
1023
1024
1024 template expansion works like this. a map file contains key=value
1025 template expansion works like this. a map file contains key=value
1025 pairs. if value is quoted, it is treated as string. otherwise, it
1026 pairs. if value is quoted, it is treated as string. otherwise, it
1026 is treated as name of template file.
1027 is treated as name of template file.
1027
1028
1028 templater is asked to expand a key in map. it looks up key, and
1029 templater is asked to expand a key in map. it looks up key, and
1029 looks for strings like this: {foo}. it expands {foo} by looking up
1030 looks for strings like this: {foo}. it expands {foo} by looking up
1030 foo in map, and substituting it. expansion is recursive: it stops
1031 foo in map, and substituting it. expansion is recursive: it stops
1031 when there is no more {foo} to replace.
1032 when there is no more {foo} to replace.
1032
1033
1033 expansion also allows formatting and filtering.
1034 expansion also allows formatting and filtering.
1034
1035
1035 format uses key to expand each item in list. syntax is
1036 format uses key to expand each item in list. syntax is
1036 {key%format}.
1037 {key%format}.
1037
1038
1038 filter uses function to transform value. syntax is
1039 filter uses function to transform value. syntax is
1039 {key|filter1|filter2|...}.'''
1040 {key|filter1|filter2|...}.'''
1040
1041
1041 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1042 def __init__(self, loader, filters=None, defaults=None, aliases=()):
1042 self._loader = loader
1043 self._loader = loader
1043 if filters is None:
1044 if filters is None:
1044 filters = {}
1045 filters = {}
1045 self._filters = filters
1046 self._filters = filters
1046 if defaults is None:
1047 if defaults is None:
1047 defaults = {}
1048 defaults = {}
1048 self._defaults = defaults
1049 self._defaults = defaults
1049 self._aliasmap = _aliasrules.buildmap(aliases)
1050 self._aliasmap = _aliasrules.buildmap(aliases)
1050 self._cache = {} # key: (func, data)
1051 self._cache = {} # key: (func, data)
1051
1052
1052 def _load(self, t):
1053 def _load(self, t):
1053 '''load, parse, and cache a template'''
1054 '''load, parse, and cache a template'''
1054 if t not in self._cache:
1055 if t not in self._cache:
1055 # put poison to cut recursion while compiling 't'
1056 # put poison to cut recursion while compiling 't'
1056 self._cache[t] = (_runrecursivesymbol, t)
1057 self._cache[t] = (_runrecursivesymbol, t)
1057 try:
1058 try:
1058 x = parse(self._loader(t))
1059 x = parse(self._loader(t))
1059 if self._aliasmap:
1060 if self._aliasmap:
1060 x = _aliasrules.expand(self._aliasmap, x)
1061 x = _aliasrules.expand(self._aliasmap, x)
1061 self._cache[t] = compileexp(x, self, methods)
1062 self._cache[t] = compileexp(x, self, methods)
1062 except: # re-raises
1063 except: # re-raises
1063 del self._cache[t]
1064 del self._cache[t]
1064 raise
1065 raise
1065 return self._cache[t]
1066 return self._cache[t]
1066
1067
1067 def process(self, t, mapping):
1068 def process(self, t, mapping):
1068 '''Perform expansion. t is name of map element to expand.
1069 '''Perform expansion. t is name of map element to expand.
1069 mapping contains added elements for use during expansion. Is a
1070 mapping contains added elements for use during expansion. Is a
1070 generator.'''
1071 generator.'''
1071 func, data = self._load(t)
1072 func, data = self._load(t)
1072 return _flatten(func(self, mapping, data))
1073 return _flatten(func(self, mapping, data))
1073
1074
1074 engines = {'default': engine}
1075 engines = {'default': engine}
1075
1076
1076 def stylelist():
1077 def stylelist():
1077 paths = templatepaths()
1078 paths = templatepaths()
1078 if not paths:
1079 if not paths:
1079 return _('no templates found, try `hg debuginstall` for more info')
1080 return _('no templates found, try `hg debuginstall` for more info')
1080 dirlist = os.listdir(paths[0])
1081 dirlist = os.listdir(paths[0])
1081 stylelist = []
1082 stylelist = []
1082 for file in dirlist:
1083 for file in dirlist:
1083 split = file.split(".")
1084 split = file.split(".")
1084 if split[-1] in ('orig', 'rej'):
1085 if split[-1] in ('orig', 'rej'):
1085 continue
1086 continue
1086 if split[0] == "map-cmdline":
1087 if split[0] == "map-cmdline":
1087 stylelist.append(split[1])
1088 stylelist.append(split[1])
1088 return ", ".join(sorted(stylelist))
1089 return ", ".join(sorted(stylelist))
1089
1090
1090 def _readmapfile(mapfile):
1091 def _readmapfile(mapfile):
1091 """Load template elements from the given map file"""
1092 """Load template elements from the given map file"""
1092 if not os.path.exists(mapfile):
1093 if not os.path.exists(mapfile):
1093 raise error.Abort(_("style '%s' not found") % mapfile,
1094 raise error.Abort(_("style '%s' not found") % mapfile,
1094 hint=_("available styles: %s") % stylelist())
1095 hint=_("available styles: %s") % stylelist())
1095
1096
1096 base = os.path.dirname(mapfile)
1097 base = os.path.dirname(mapfile)
1097 conf = config.config(includepaths=templatepaths())
1098 conf = config.config(includepaths=templatepaths())
1098 conf.read(mapfile)
1099 conf.read(mapfile)
1099
1100
1100 cache = {}
1101 cache = {}
1101 tmap = {}
1102 tmap = {}
1102 for key, val in conf[''].items():
1103 for key, val in conf[''].items():
1103 if not val:
1104 if not val:
1104 raise error.ParseError(_('missing value'), conf.source('', key))
1105 raise error.ParseError(_('missing value'), conf.source('', key))
1105 if val[0] in "'\"":
1106 if val[0] in "'\"":
1106 if val[0] != val[-1]:
1107 if val[0] != val[-1]:
1107 raise error.ParseError(_('unmatched quotes'),
1108 raise error.ParseError(_('unmatched quotes'),
1108 conf.source('', key))
1109 conf.source('', key))
1109 cache[key] = unquotestring(val)
1110 cache[key] = unquotestring(val)
1110 elif key == "__base__":
1111 elif key == "__base__":
1111 # treat as a pointer to a base class for this style
1112 # treat as a pointer to a base class for this style
1112 path = util.normpath(os.path.join(base, val))
1113 path = util.normpath(os.path.join(base, val))
1113
1114
1114 # fallback check in template paths
1115 # fallback check in template paths
1115 if not os.path.exists(path):
1116 if not os.path.exists(path):
1116 for p in templatepaths():
1117 for p in templatepaths():
1117 p2 = util.normpath(os.path.join(p, val))
1118 p2 = util.normpath(os.path.join(p, val))
1118 if os.path.isfile(p2):
1119 if os.path.isfile(p2):
1119 path = p2
1120 path = p2
1120 break
1121 break
1121 p3 = util.normpath(os.path.join(p2, "map"))
1122 p3 = util.normpath(os.path.join(p2, "map"))
1122 if os.path.isfile(p3):
1123 if os.path.isfile(p3):
1123 path = p3
1124 path = p3
1124 break
1125 break
1125
1126
1126 bcache, btmap = _readmapfile(path)
1127 bcache, btmap = _readmapfile(path)
1127 for k in bcache:
1128 for k in bcache:
1128 if k not in cache:
1129 if k not in cache:
1129 cache[k] = bcache[k]
1130 cache[k] = bcache[k]
1130 for k in btmap:
1131 for k in btmap:
1131 if k not in tmap:
1132 if k not in tmap:
1132 tmap[k] = btmap[k]
1133 tmap[k] = btmap[k]
1133 else:
1134 else:
1134 val = 'default', val
1135 val = 'default', val
1135 if ':' in val[1]:
1136 if ':' in val[1]:
1136 val = val[1].split(':', 1)
1137 val = val[1].split(':', 1)
1137 tmap[key] = val[0], os.path.join(base, val[1])
1138 tmap[key] = val[0], os.path.join(base, val[1])
1138 return cache, tmap
1139 return cache, tmap
1139
1140
1140 class TemplateNotFound(error.Abort):
1141 class TemplateNotFound(error.Abort):
1141 pass
1142 pass
1142
1143
1143 class templater(object):
1144 class templater(object):
1144
1145
1145 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1146 def __init__(self, filters=None, defaults=None, cache=None, aliases=(),
1146 minchunk=1024, maxchunk=65536):
1147 minchunk=1024, maxchunk=65536):
1147 '''set up template engine.
1148 '''set up template engine.
1148 filters is dict of functions. each transforms a value into another.
1149 filters is dict of functions. each transforms a value into another.
1149 defaults is dict of default map definitions.
1150 defaults is dict of default map definitions.
1150 aliases is list of alias (name, replacement) pairs.
1151 aliases is list of alias (name, replacement) pairs.
1151 '''
1152 '''
1152 if filters is None:
1153 if filters is None:
1153 filters = {}
1154 filters = {}
1154 if defaults is None:
1155 if defaults is None:
1155 defaults = {}
1156 defaults = {}
1156 if cache is None:
1157 if cache is None:
1157 cache = {}
1158 cache = {}
1158 self.cache = cache.copy()
1159 self.cache = cache.copy()
1159 self.map = {}
1160 self.map = {}
1160 self.filters = templatefilters.filters.copy()
1161 self.filters = templatefilters.filters.copy()
1161 self.filters.update(filters)
1162 self.filters.update(filters)
1162 self.defaults = defaults
1163 self.defaults = defaults
1163 self._aliases = aliases
1164 self._aliases = aliases
1164 self.minchunk, self.maxchunk = minchunk, maxchunk
1165 self.minchunk, self.maxchunk = minchunk, maxchunk
1165 self.ecache = {}
1166 self.ecache = {}
1166
1167
1167 @classmethod
1168 @classmethod
1168 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1169 def frommapfile(cls, mapfile, filters=None, defaults=None, cache=None,
1169 minchunk=1024, maxchunk=65536):
1170 minchunk=1024, maxchunk=65536):
1170 """Create templater from the specified map file"""
1171 """Create templater from the specified map file"""
1171 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1172 t = cls(filters, defaults, cache, [], minchunk, maxchunk)
1172 cache, tmap = _readmapfile(mapfile)
1173 cache, tmap = _readmapfile(mapfile)
1173 t.cache.update(cache)
1174 t.cache.update(cache)
1174 t.map = tmap
1175 t.map = tmap
1175 return t
1176 return t
1176
1177
1177 def __contains__(self, key):
1178 def __contains__(self, key):
1178 return key in self.cache or key in self.map
1179 return key in self.cache or key in self.map
1179
1180
1180 def load(self, t):
1181 def load(self, t):
1181 '''Get the template for the given template name. Use a local cache.'''
1182 '''Get the template for the given template name. Use a local cache.'''
1182 if t not in self.cache:
1183 if t not in self.cache:
1183 try:
1184 try:
1184 self.cache[t] = util.readfile(self.map[t][1])
1185 self.cache[t] = util.readfile(self.map[t][1])
1185 except KeyError as inst:
1186 except KeyError as inst:
1186 raise TemplateNotFound(_('"%s" not in template map') %
1187 raise TemplateNotFound(_('"%s" not in template map') %
1187 inst.args[0])
1188 inst.args[0])
1188 except IOError as inst:
1189 except IOError as inst:
1189 raise IOError(inst.args[0], _('template file %s: %s') %
1190 raise IOError(inst.args[0], _('template file %s: %s') %
1190 (self.map[t][1], inst.args[1]))
1191 (self.map[t][1], inst.args[1]))
1191 return self.cache[t]
1192 return self.cache[t]
1192
1193
1193 def __call__(self, t, **mapping):
1194 def __call__(self, t, **mapping):
1194 ttype = t in self.map and self.map[t][0] or 'default'
1195 ttype = t in self.map and self.map[t][0] or 'default'
1195 if ttype not in self.ecache:
1196 if ttype not in self.ecache:
1196 try:
1197 try:
1197 ecls = engines[ttype]
1198 ecls = engines[ttype]
1198 except KeyError:
1199 except KeyError:
1199 raise error.Abort(_('invalid template engine: %s') % ttype)
1200 raise error.Abort(_('invalid template engine: %s') % ttype)
1200 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1201 self.ecache[ttype] = ecls(self.load, self.filters, self.defaults,
1201 self._aliases)
1202 self._aliases)
1202 proc = self.ecache[ttype]
1203 proc = self.ecache[ttype]
1203
1204
1204 stream = proc.process(t, mapping)
1205 stream = proc.process(t, mapping)
1205 if self.minchunk:
1206 if self.minchunk:
1206 stream = util.increasingchunks(stream, min=self.minchunk,
1207 stream = util.increasingchunks(stream, min=self.minchunk,
1207 max=self.maxchunk)
1208 max=self.maxchunk)
1208 return stream
1209 return stream
1209
1210
1210 def templatepaths():
1211 def templatepaths():
1211 '''return locations used for template files.'''
1212 '''return locations used for template files.'''
1212 pathsrel = ['templates']
1213 pathsrel = ['templates']
1213 paths = [os.path.normpath(os.path.join(util.datapath, f))
1214 paths = [os.path.normpath(os.path.join(util.datapath, f))
1214 for f in pathsrel]
1215 for f in pathsrel]
1215 return [p for p in paths if os.path.isdir(p)]
1216 return [p for p in paths if os.path.isdir(p)]
1216
1217
1217 def templatepath(name):
1218 def templatepath(name):
1218 '''return location of template file. returns None if not found.'''
1219 '''return location of template file. returns None if not found.'''
1219 for p in templatepaths():
1220 for p in templatepaths():
1220 f = os.path.join(p, name)
1221 f = os.path.join(p, name)
1221 if os.path.exists(f):
1222 if os.path.exists(f):
1222 return f
1223 return f
1223 return None
1224 return None
1224
1225
1225 def stylemap(styles, paths=None):
1226 def stylemap(styles, paths=None):
1226 """Return path to mapfile for a given style.
1227 """Return path to mapfile for a given style.
1227
1228
1228 Searches mapfile in the following locations:
1229 Searches mapfile in the following locations:
1229 1. templatepath/style/map
1230 1. templatepath/style/map
1230 2. templatepath/map-style
1231 2. templatepath/map-style
1231 3. templatepath/map
1232 3. templatepath/map
1232 """
1233 """
1233
1234
1234 if paths is None:
1235 if paths is None:
1235 paths = templatepaths()
1236 paths = templatepaths()
1236 elif isinstance(paths, str):
1237 elif isinstance(paths, str):
1237 paths = [paths]
1238 paths = [paths]
1238
1239
1239 if isinstance(styles, str):
1240 if isinstance(styles, str):
1240 styles = [styles]
1241 styles = [styles]
1241
1242
1242 for style in styles:
1243 for style in styles:
1243 # only plain name is allowed to honor template paths
1244 # only plain name is allowed to honor template paths
1244 if (not style
1245 if (not style
1245 or style in (os.curdir, os.pardir)
1246 or style in (os.curdir, os.pardir)
1246 or os.sep in style
1247 or pycompat.ossep in style
1247 or os.altsep and os.altsep in style):
1248 or os.altsep and os.altsep in style):
1248 continue
1249 continue
1249 locations = [os.path.join(style, 'map'), 'map-' + style]
1250 locations = [os.path.join(style, 'map'), 'map-' + style]
1250 locations.append('map')
1251 locations.append('map')
1251
1252
1252 for path in paths:
1253 for path in paths:
1253 for location in locations:
1254 for location in locations:
1254 mapfile = os.path.join(path, location)
1255 mapfile = os.path.join(path, location)
1255 if os.path.isfile(mapfile):
1256 if os.path.isfile(mapfile):
1256 return style, mapfile
1257 return style, mapfile
1257
1258
1258 raise RuntimeError("No hgweb templates found in %r" % paths)
1259 raise RuntimeError("No hgweb templates found in %r" % paths)
1259
1260
1260 def loadfunction(ui, extname, registrarobj):
1261 def loadfunction(ui, extname, registrarobj):
1261 """Load template function from specified registrarobj
1262 """Load template function from specified registrarobj
1262 """
1263 """
1263 for name, func in registrarobj._table.iteritems():
1264 for name, func in registrarobj._table.iteritems():
1264 funcs[name] = func
1265 funcs[name] = func
1265
1266
1266 # tell hggettext to extract docstrings from these functions:
1267 # tell hggettext to extract docstrings from these functions:
1267 i18nfunctions = funcs.values()
1268 i18nfunctions = funcs.values()
@@ -1,479 +1,479 b''
1 # windows.py - Windows utility function implementations for Mercurial
1 # windows.py - Windows utility function implementations for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import msvcrt
11 import msvcrt
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15 import sys
15 import sys
16
16
17 from .i18n import _
17 from .i18n import _
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 osutil,
20 osutil,
21 pycompat,
21 pycompat,
22 win32,
22 win32,
23 )
23 )
24
24
25 try:
25 try:
26 import _winreg as winreg
26 import _winreg as winreg
27 winreg.CloseKey
27 winreg.CloseKey
28 except ImportError:
28 except ImportError:
29 import winreg
29 import winreg
30
30
31 executablepath = win32.executablepath
31 executablepath = win32.executablepath
32 getuser = win32.getuser
32 getuser = win32.getuser
33 hidewindow = win32.hidewindow
33 hidewindow = win32.hidewindow
34 makedir = win32.makedir
34 makedir = win32.makedir
35 nlinks = win32.nlinks
35 nlinks = win32.nlinks
36 oslink = win32.oslink
36 oslink = win32.oslink
37 samedevice = win32.samedevice
37 samedevice = win32.samedevice
38 samefile = win32.samefile
38 samefile = win32.samefile
39 setsignalhandler = win32.setsignalhandler
39 setsignalhandler = win32.setsignalhandler
40 spawndetached = win32.spawndetached
40 spawndetached = win32.spawndetached
41 split = os.path.split
41 split = os.path.split
42 testpid = win32.testpid
42 testpid = win32.testpid
43 unlink = win32.unlink
43 unlink = win32.unlink
44
44
45 umask = 0o022
45 umask = 0o022
46
46
47 class mixedfilemodewrapper(object):
47 class mixedfilemodewrapper(object):
48 """Wraps a file handle when it is opened in read/write mode.
48 """Wraps a file handle when it is opened in read/write mode.
49
49
50 fopen() and fdopen() on Windows have a specific-to-Windows requirement
50 fopen() and fdopen() on Windows have a specific-to-Windows requirement
51 that files opened with mode r+, w+, or a+ make a call to a file positioning
51 that files opened with mode r+, w+, or a+ make a call to a file positioning
52 function when switching between reads and writes. Without this extra call,
52 function when switching between reads and writes. Without this extra call,
53 Python will raise a not very intuitive "IOError: [Errno 0] Error."
53 Python will raise a not very intuitive "IOError: [Errno 0] Error."
54
54
55 This class wraps posixfile instances when the file is opened in read/write
55 This class wraps posixfile instances when the file is opened in read/write
56 mode and automatically adds checks or inserts appropriate file positioning
56 mode and automatically adds checks or inserts appropriate file positioning
57 calls when necessary.
57 calls when necessary.
58 """
58 """
59 OPNONE = 0
59 OPNONE = 0
60 OPREAD = 1
60 OPREAD = 1
61 OPWRITE = 2
61 OPWRITE = 2
62
62
63 def __init__(self, fp):
63 def __init__(self, fp):
64 object.__setattr__(self, '_fp', fp)
64 object.__setattr__(self, '_fp', fp)
65 object.__setattr__(self, '_lastop', 0)
65 object.__setattr__(self, '_lastop', 0)
66
66
67 def __getattr__(self, name):
67 def __getattr__(self, name):
68 return getattr(self._fp, name)
68 return getattr(self._fp, name)
69
69
70 def __setattr__(self, name, value):
70 def __setattr__(self, name, value):
71 return self._fp.__setattr__(name, value)
71 return self._fp.__setattr__(name, value)
72
72
73 def _noopseek(self):
73 def _noopseek(self):
74 self._fp.seek(0, os.SEEK_CUR)
74 self._fp.seek(0, os.SEEK_CUR)
75
75
76 def seek(self, *args, **kwargs):
76 def seek(self, *args, **kwargs):
77 object.__setattr__(self, '_lastop', self.OPNONE)
77 object.__setattr__(self, '_lastop', self.OPNONE)
78 return self._fp.seek(*args, **kwargs)
78 return self._fp.seek(*args, **kwargs)
79
79
80 def write(self, d):
80 def write(self, d):
81 if self._lastop == self.OPREAD:
81 if self._lastop == self.OPREAD:
82 self._noopseek()
82 self._noopseek()
83
83
84 object.__setattr__(self, '_lastop', self.OPWRITE)
84 object.__setattr__(self, '_lastop', self.OPWRITE)
85 return self._fp.write(d)
85 return self._fp.write(d)
86
86
87 def writelines(self, *args, **kwargs):
87 def writelines(self, *args, **kwargs):
88 if self._lastop == self.OPREAD:
88 if self._lastop == self.OPREAD:
89 self._noopeseek()
89 self._noopeseek()
90
90
91 object.__setattr__(self, '_lastop', self.OPWRITE)
91 object.__setattr__(self, '_lastop', self.OPWRITE)
92 return self._fp.writelines(*args, **kwargs)
92 return self._fp.writelines(*args, **kwargs)
93
93
94 def read(self, *args, **kwargs):
94 def read(self, *args, **kwargs):
95 if self._lastop == self.OPWRITE:
95 if self._lastop == self.OPWRITE:
96 self._noopseek()
96 self._noopseek()
97
97
98 object.__setattr__(self, '_lastop', self.OPREAD)
98 object.__setattr__(self, '_lastop', self.OPREAD)
99 return self._fp.read(*args, **kwargs)
99 return self._fp.read(*args, **kwargs)
100
100
101 def readline(self, *args, **kwargs):
101 def readline(self, *args, **kwargs):
102 if self._lastop == self.OPWRITE:
102 if self._lastop == self.OPWRITE:
103 self._noopseek()
103 self._noopseek()
104
104
105 object.__setattr__(self, '_lastop', self.OPREAD)
105 object.__setattr__(self, '_lastop', self.OPREAD)
106 return self._fp.readline(*args, **kwargs)
106 return self._fp.readline(*args, **kwargs)
107
107
108 def readlines(self, *args, **kwargs):
108 def readlines(self, *args, **kwargs):
109 if self._lastop == self.OPWRITE:
109 if self._lastop == self.OPWRITE:
110 self._noopseek()
110 self._noopseek()
111
111
112 object.__setattr__(self, '_lastop', self.OPREAD)
112 object.__setattr__(self, '_lastop', self.OPREAD)
113 return self._fp.readlines(*args, **kwargs)
113 return self._fp.readlines(*args, **kwargs)
114
114
115 def posixfile(name, mode='r', buffering=-1):
115 def posixfile(name, mode='r', buffering=-1):
116 '''Open a file with even more POSIX-like semantics'''
116 '''Open a file with even more POSIX-like semantics'''
117 try:
117 try:
118 fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
118 fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
119
119
120 # The position when opening in append mode is implementation defined, so
120 # The position when opening in append mode is implementation defined, so
121 # make it consistent with other platforms, which position at EOF.
121 # make it consistent with other platforms, which position at EOF.
122 if 'a' in mode:
122 if 'a' in mode:
123 fp.seek(0, os.SEEK_END)
123 fp.seek(0, os.SEEK_END)
124
124
125 if '+' in mode:
125 if '+' in mode:
126 return mixedfilemodewrapper(fp)
126 return mixedfilemodewrapper(fp)
127
127
128 return fp
128 return fp
129 except WindowsError as err:
129 except WindowsError as err:
130 # convert to a friendlier exception
130 # convert to a friendlier exception
131 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
131 raise IOError(err.errno, '%s: %s' % (name, err.strerror))
132
132
133 class winstdout(object):
133 class winstdout(object):
134 '''stdout on windows misbehaves if sent through a pipe'''
134 '''stdout on windows misbehaves if sent through a pipe'''
135
135
136 def __init__(self, fp):
136 def __init__(self, fp):
137 self.fp = fp
137 self.fp = fp
138
138
139 def __getattr__(self, key):
139 def __getattr__(self, key):
140 return getattr(self.fp, key)
140 return getattr(self.fp, key)
141
141
142 def close(self):
142 def close(self):
143 try:
143 try:
144 self.fp.close()
144 self.fp.close()
145 except IOError:
145 except IOError:
146 pass
146 pass
147
147
148 def write(self, s):
148 def write(self, s):
149 try:
149 try:
150 # This is workaround for "Not enough space" error on
150 # This is workaround for "Not enough space" error on
151 # writing large size of data to console.
151 # writing large size of data to console.
152 limit = 16000
152 limit = 16000
153 l = len(s)
153 l = len(s)
154 start = 0
154 start = 0
155 self.softspace = 0
155 self.softspace = 0
156 while start < l:
156 while start < l:
157 end = start + limit
157 end = start + limit
158 self.fp.write(s[start:end])
158 self.fp.write(s[start:end])
159 start = end
159 start = end
160 except IOError as inst:
160 except IOError as inst:
161 if inst.errno != 0:
161 if inst.errno != 0:
162 raise
162 raise
163 self.close()
163 self.close()
164 raise IOError(errno.EPIPE, 'Broken pipe')
164 raise IOError(errno.EPIPE, 'Broken pipe')
165
165
166 def flush(self):
166 def flush(self):
167 try:
167 try:
168 return self.fp.flush()
168 return self.fp.flush()
169 except IOError as inst:
169 except IOError as inst:
170 if inst.errno != errno.EINVAL:
170 if inst.errno != errno.EINVAL:
171 raise
171 raise
172 self.close()
172 self.close()
173 raise IOError(errno.EPIPE, 'Broken pipe')
173 raise IOError(errno.EPIPE, 'Broken pipe')
174
174
175 def _is_win_9x():
175 def _is_win_9x():
176 '''return true if run on windows 95, 98 or me.'''
176 '''return true if run on windows 95, 98 or me.'''
177 try:
177 try:
178 return sys.getwindowsversion()[3] == 1
178 return sys.getwindowsversion()[3] == 1
179 except AttributeError:
179 except AttributeError:
180 return 'command' in os.environ.get('comspec', '')
180 return 'command' in os.environ.get('comspec', '')
181
181
182 def openhardlinks():
182 def openhardlinks():
183 return not _is_win_9x()
183 return not _is_win_9x()
184
184
185 def parsepatchoutput(output_line):
185 def parsepatchoutput(output_line):
186 """parses the output produced by patch and returns the filename"""
186 """parses the output produced by patch and returns the filename"""
187 pf = output_line[14:]
187 pf = output_line[14:]
188 if pf[0] == '`':
188 if pf[0] == '`':
189 pf = pf[1:-1] # Remove the quotes
189 pf = pf[1:-1] # Remove the quotes
190 return pf
190 return pf
191
191
192 def sshargs(sshcmd, host, user, port):
192 def sshargs(sshcmd, host, user, port):
193 '''Build argument list for ssh or Plink'''
193 '''Build argument list for ssh or Plink'''
194 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
194 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
195 args = user and ("%s@%s" % (user, host)) or host
195 args = user and ("%s@%s" % (user, host)) or host
196 return port and ("%s %s %s" % (args, pflag, port)) or args
196 return port and ("%s %s %s" % (args, pflag, port)) or args
197
197
198 def setflags(f, l, x):
198 def setflags(f, l, x):
199 pass
199 pass
200
200
201 def copymode(src, dst, mode=None):
201 def copymode(src, dst, mode=None):
202 pass
202 pass
203
203
204 def checkexec(path):
204 def checkexec(path):
205 return False
205 return False
206
206
207 def checklink(path):
207 def checklink(path):
208 return False
208 return False
209
209
210 def setbinary(fd):
210 def setbinary(fd):
211 # When run without console, pipes may expose invalid
211 # When run without console, pipes may expose invalid
212 # fileno(), usually set to -1.
212 # fileno(), usually set to -1.
213 fno = getattr(fd, 'fileno', None)
213 fno = getattr(fd, 'fileno', None)
214 if fno is not None and fno() >= 0:
214 if fno is not None and fno() >= 0:
215 msvcrt.setmode(fno(), os.O_BINARY)
215 msvcrt.setmode(fno(), os.O_BINARY)
216
216
217 def pconvert(path):
217 def pconvert(path):
218 return path.replace(os.sep, '/')
218 return path.replace(pycompat.ossep, '/')
219
219
220 def localpath(path):
220 def localpath(path):
221 return path.replace('/', '\\')
221 return path.replace('/', '\\')
222
222
223 def normpath(path):
223 def normpath(path):
224 return pconvert(os.path.normpath(path))
224 return pconvert(os.path.normpath(path))
225
225
226 def normcase(path):
226 def normcase(path):
227 return encoding.upper(path) # NTFS compares via upper()
227 return encoding.upper(path) # NTFS compares via upper()
228
228
229 # see posix.py for definitions
229 # see posix.py for definitions
230 normcasespec = encoding.normcasespecs.upper
230 normcasespec = encoding.normcasespecs.upper
231 normcasefallback = encoding.upperfallback
231 normcasefallback = encoding.upperfallback
232
232
233 def samestat(s1, s2):
233 def samestat(s1, s2):
234 return False
234 return False
235
235
236 # A sequence of backslashes is special iff it precedes a double quote:
236 # A sequence of backslashes is special iff it precedes a double quote:
237 # - if there's an even number of backslashes, the double quote is not
237 # - if there's an even number of backslashes, the double quote is not
238 # quoted (i.e. it ends the quoted region)
238 # quoted (i.e. it ends the quoted region)
239 # - if there's an odd number of backslashes, the double quote is quoted
239 # - if there's an odd number of backslashes, the double quote is quoted
240 # - in both cases, every pair of backslashes is unquoted into a single
240 # - in both cases, every pair of backslashes is unquoted into a single
241 # backslash
241 # backslash
242 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
242 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
243 # So, to quote a string, we must surround it in double quotes, double
243 # So, to quote a string, we must surround it in double quotes, double
244 # the number of backslashes that precede double quotes and add another
244 # the number of backslashes that precede double quotes and add another
245 # backslash before every double quote (being careful with the double
245 # backslash before every double quote (being careful with the double
246 # quote we've appended to the end)
246 # quote we've appended to the end)
247 _quotere = None
247 _quotere = None
248 _needsshellquote = None
248 _needsshellquote = None
249 def shellquote(s):
249 def shellquote(s):
250 r"""
250 r"""
251 >>> shellquote(r'C:\Users\xyz')
251 >>> shellquote(r'C:\Users\xyz')
252 '"C:\\Users\\xyz"'
252 '"C:\\Users\\xyz"'
253 >>> shellquote(r'C:\Users\xyz/mixed')
253 >>> shellquote(r'C:\Users\xyz/mixed')
254 '"C:\\Users\\xyz/mixed"'
254 '"C:\\Users\\xyz/mixed"'
255 >>> # Would be safe not to quote too, since it is all double backslashes
255 >>> # Would be safe not to quote too, since it is all double backslashes
256 >>> shellquote(r'C:\\Users\\xyz')
256 >>> shellquote(r'C:\\Users\\xyz')
257 '"C:\\\\Users\\\\xyz"'
257 '"C:\\\\Users\\\\xyz"'
258 >>> # But this must be quoted
258 >>> # But this must be quoted
259 >>> shellquote(r'C:\\Users\\xyz/abc')
259 >>> shellquote(r'C:\\Users\\xyz/abc')
260 '"C:\\\\Users\\\\xyz/abc"'
260 '"C:\\\\Users\\\\xyz/abc"'
261 """
261 """
262 global _quotere
262 global _quotere
263 if _quotere is None:
263 if _quotere is None:
264 _quotere = re.compile(r'(\\*)("|\\$)')
264 _quotere = re.compile(r'(\\*)("|\\$)')
265 global _needsshellquote
265 global _needsshellquote
266 if _needsshellquote is None:
266 if _needsshellquote is None:
267 # ":" is also treated as "safe character", because it is used as a part
267 # ":" is also treated as "safe character", because it is used as a part
268 # of path name on Windows. "\" is also part of a path name, but isn't
268 # of path name on Windows. "\" is also part of a path name, but isn't
269 # safe because shlex.split() (kind of) treats it as an escape char and
269 # safe because shlex.split() (kind of) treats it as an escape char and
270 # drops it. It will leave the next character, even if it is another
270 # drops it. It will leave the next character, even if it is another
271 # "\".
271 # "\".
272 _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search
272 _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search
273 if s and not _needsshellquote(s) and not _quotere.search(s):
273 if s and not _needsshellquote(s) and not _quotere.search(s):
274 # "s" shouldn't have to be quoted
274 # "s" shouldn't have to be quoted
275 return s
275 return s
276 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
276 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
277
277
278 def quotecommand(cmd):
278 def quotecommand(cmd):
279 """Build a command string suitable for os.popen* calls."""
279 """Build a command string suitable for os.popen* calls."""
280 if sys.version_info < (2, 7, 1):
280 if sys.version_info < (2, 7, 1):
281 # Python versions since 2.7.1 do this extra quoting themselves
281 # Python versions since 2.7.1 do this extra quoting themselves
282 return '"' + cmd + '"'
282 return '"' + cmd + '"'
283 return cmd
283 return cmd
284
284
285 def popen(command, mode='r'):
285 def popen(command, mode='r'):
286 # Work around "popen spawned process may not write to stdout
286 # Work around "popen spawned process may not write to stdout
287 # under windows"
287 # under windows"
288 # http://bugs.python.org/issue1366
288 # http://bugs.python.org/issue1366
289 command += " 2> %s" % os.devnull
289 command += " 2> %s" % os.devnull
290 return os.popen(quotecommand(command), mode)
290 return os.popen(quotecommand(command), mode)
291
291
292 def explainexit(code):
292 def explainexit(code):
293 return _("exited with status %d") % code, code
293 return _("exited with status %d") % code, code
294
294
295 # if you change this stub into a real check, please try to implement the
295 # if you change this stub into a real check, please try to implement the
296 # username and groupname functions above, too.
296 # username and groupname functions above, too.
297 def isowner(st):
297 def isowner(st):
298 return True
298 return True
299
299
300 def findexe(command):
300 def findexe(command):
301 '''Find executable for command searching like cmd.exe does.
301 '''Find executable for command searching like cmd.exe does.
302 If command is a basename then PATH is searched for command.
302 If command is a basename then PATH is searched for command.
303 PATH isn't searched if command is an absolute or relative path.
303 PATH isn't searched if command is an absolute or relative path.
304 An extension from PATHEXT is found and added if not present.
304 An extension from PATHEXT is found and added if not present.
305 If command isn't found None is returned.'''
305 If command isn't found None is returned.'''
306 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
306 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
307 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
307 pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
308 if os.path.splitext(command)[1].lower() in pathexts:
308 if os.path.splitext(command)[1].lower() in pathexts:
309 pathexts = ['']
309 pathexts = ['']
310
310
311 def findexisting(pathcommand):
311 def findexisting(pathcommand):
312 'Will append extension (if needed) and return existing file'
312 'Will append extension (if needed) and return existing file'
313 for ext in pathexts:
313 for ext in pathexts:
314 executable = pathcommand + ext
314 executable = pathcommand + ext
315 if os.path.exists(executable):
315 if os.path.exists(executable):
316 return executable
316 return executable
317 return None
317 return None
318
318
319 if os.sep in command:
319 if pycompat.ossep in command:
320 return findexisting(command)
320 return findexisting(command)
321
321
322 for path in os.environ.get('PATH', '').split(pycompat.ospathsep):
322 for path in os.environ.get('PATH', '').split(pycompat.ospathsep):
323 executable = findexisting(os.path.join(path, command))
323 executable = findexisting(os.path.join(path, command))
324 if executable is not None:
324 if executable is not None:
325 return executable
325 return executable
326 return findexisting(os.path.expanduser(os.path.expandvars(command)))
326 return findexisting(os.path.expanduser(os.path.expandvars(command)))
327
327
328 _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
328 _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
329
329
330 def statfiles(files):
330 def statfiles(files):
331 '''Stat each file in files. Yield each stat, or None if a file
331 '''Stat each file in files. Yield each stat, or None if a file
332 does not exist or has a type we don't care about.
332 does not exist or has a type we don't care about.
333
333
334 Cluster and cache stat per directory to minimize number of OS stat calls.'''
334 Cluster and cache stat per directory to minimize number of OS stat calls.'''
335 dircache = {} # dirname -> filename -> status | None if file does not exist
335 dircache = {} # dirname -> filename -> status | None if file does not exist
336 getkind = stat.S_IFMT
336 getkind = stat.S_IFMT
337 for nf in files:
337 for nf in files:
338 nf = normcase(nf)
338 nf = normcase(nf)
339 dir, base = os.path.split(nf)
339 dir, base = os.path.split(nf)
340 if not dir:
340 if not dir:
341 dir = '.'
341 dir = '.'
342 cache = dircache.get(dir, None)
342 cache = dircache.get(dir, None)
343 if cache is None:
343 if cache is None:
344 try:
344 try:
345 dmap = dict([(normcase(n), s)
345 dmap = dict([(normcase(n), s)
346 for n, k, s in osutil.listdir(dir, True)
346 for n, k, s in osutil.listdir(dir, True)
347 if getkind(s.st_mode) in _wantedkinds])
347 if getkind(s.st_mode) in _wantedkinds])
348 except OSError as err:
348 except OSError as err:
349 # Python >= 2.5 returns ENOENT and adds winerror field
349 # Python >= 2.5 returns ENOENT and adds winerror field
350 # EINVAL is raised if dir is not a directory.
350 # EINVAL is raised if dir is not a directory.
351 if err.errno not in (errno.ENOENT, errno.EINVAL,
351 if err.errno not in (errno.ENOENT, errno.EINVAL,
352 errno.ENOTDIR):
352 errno.ENOTDIR):
353 raise
353 raise
354 dmap = {}
354 dmap = {}
355 cache = dircache.setdefault(dir, dmap)
355 cache = dircache.setdefault(dir, dmap)
356 yield cache.get(base, None)
356 yield cache.get(base, None)
357
357
358 def username(uid=None):
358 def username(uid=None):
359 """Return the name of the user with the given uid.
359 """Return the name of the user with the given uid.
360
360
361 If uid is None, return the name of the current user."""
361 If uid is None, return the name of the current user."""
362 return None
362 return None
363
363
364 def groupname(gid=None):
364 def groupname(gid=None):
365 """Return the name of the group with the given gid.
365 """Return the name of the group with the given gid.
366
366
367 If gid is None, return the name of the current group."""
367 If gid is None, return the name of the current group."""
368 return None
368 return None
369
369
370 def removedirs(name):
370 def removedirs(name):
371 """special version of os.removedirs that does not remove symlinked
371 """special version of os.removedirs that does not remove symlinked
372 directories or junction points if they actually contain files"""
372 directories or junction points if they actually contain files"""
373 if osutil.listdir(name):
373 if osutil.listdir(name):
374 return
374 return
375 os.rmdir(name)
375 os.rmdir(name)
376 head, tail = os.path.split(name)
376 head, tail = os.path.split(name)
377 if not tail:
377 if not tail:
378 head, tail = os.path.split(head)
378 head, tail = os.path.split(head)
379 while head and tail:
379 while head and tail:
380 try:
380 try:
381 if osutil.listdir(head):
381 if osutil.listdir(head):
382 return
382 return
383 os.rmdir(head)
383 os.rmdir(head)
384 except (ValueError, OSError):
384 except (ValueError, OSError):
385 break
385 break
386 head, tail = os.path.split(head)
386 head, tail = os.path.split(head)
387
387
388 def unlinkpath(f, ignoremissing=False):
388 def unlinkpath(f, ignoremissing=False):
389 """unlink and remove the directory if it is empty"""
389 """unlink and remove the directory if it is empty"""
390 try:
390 try:
391 unlink(f)
391 unlink(f)
392 except OSError as e:
392 except OSError as e:
393 if not (ignoremissing and e.errno == errno.ENOENT):
393 if not (ignoremissing and e.errno == errno.ENOENT):
394 raise
394 raise
395 # try removing directories that might now be empty
395 # try removing directories that might now be empty
396 try:
396 try:
397 removedirs(os.path.dirname(f))
397 removedirs(os.path.dirname(f))
398 except OSError:
398 except OSError:
399 pass
399 pass
400
400
401 def rename(src, dst):
401 def rename(src, dst):
402 '''atomically rename file src to dst, replacing dst if it exists'''
402 '''atomically rename file src to dst, replacing dst if it exists'''
403 try:
403 try:
404 os.rename(src, dst)
404 os.rename(src, dst)
405 except OSError as e:
405 except OSError as e:
406 if e.errno != errno.EEXIST:
406 if e.errno != errno.EEXIST:
407 raise
407 raise
408 unlink(dst)
408 unlink(dst)
409 os.rename(src, dst)
409 os.rename(src, dst)
410
410
411 def gethgcmd():
411 def gethgcmd():
412 return [sys.executable] + sys.argv[:1]
412 return [sys.executable] + sys.argv[:1]
413
413
414 def groupmembers(name):
414 def groupmembers(name):
415 # Don't support groups on Windows for now
415 # Don't support groups on Windows for now
416 raise KeyError
416 raise KeyError
417
417
418 def isexec(f):
418 def isexec(f):
419 return False
419 return False
420
420
421 class cachestat(object):
421 class cachestat(object):
422 def __init__(self, path):
422 def __init__(self, path):
423 pass
423 pass
424
424
425 def cacheable(self):
425 def cacheable(self):
426 return False
426 return False
427
427
428 def lookupreg(key, valname=None, scope=None):
428 def lookupreg(key, valname=None, scope=None):
429 ''' Look up a key/value name in the Windows registry.
429 ''' Look up a key/value name in the Windows registry.
430
430
431 valname: value name. If unspecified, the default value for the key
431 valname: value name. If unspecified, the default value for the key
432 is used.
432 is used.
433 scope: optionally specify scope for registry lookup, this can be
433 scope: optionally specify scope for registry lookup, this can be
434 a sequence of scopes to look up in order. Default (CURRENT_USER,
434 a sequence of scopes to look up in order. Default (CURRENT_USER,
435 LOCAL_MACHINE).
435 LOCAL_MACHINE).
436 '''
436 '''
437 if scope is None:
437 if scope is None:
438 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
438 scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
439 elif not isinstance(scope, (list, tuple)):
439 elif not isinstance(scope, (list, tuple)):
440 scope = (scope,)
440 scope = (scope,)
441 for s in scope:
441 for s in scope:
442 try:
442 try:
443 val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0]
443 val = winreg.QueryValueEx(winreg.OpenKey(s, key), valname)[0]
444 # never let a Unicode string escape into the wild
444 # never let a Unicode string escape into the wild
445 return encoding.tolocal(val.encode('UTF-8'))
445 return encoding.tolocal(val.encode('UTF-8'))
446 except EnvironmentError:
446 except EnvironmentError:
447 pass
447 pass
448
448
449 expandglobs = True
449 expandglobs = True
450
450
451 def statislink(st):
451 def statislink(st):
452 '''check whether a stat result is a symlink'''
452 '''check whether a stat result is a symlink'''
453 return False
453 return False
454
454
455 def statisexec(st):
455 def statisexec(st):
456 '''check whether a stat result is an executable file'''
456 '''check whether a stat result is an executable file'''
457 return False
457 return False
458
458
459 def poll(fds):
459 def poll(fds):
460 # see posix.py for description
460 # see posix.py for description
461 raise NotImplementedError()
461 raise NotImplementedError()
462
462
463 def readpipe(pipe):
463 def readpipe(pipe):
464 """Read all available data from a pipe."""
464 """Read all available data from a pipe."""
465 chunks = []
465 chunks = []
466 while True:
466 while True:
467 size = win32.peekpipe(pipe)
467 size = win32.peekpipe(pipe)
468 if not size:
468 if not size:
469 break
469 break
470
470
471 s = pipe.read(size)
471 s = pipe.read(size)
472 if not s:
472 if not s:
473 break
473 break
474 chunks.append(s)
474 chunks.append(s)
475
475
476 return ''.join(chunks)
476 return ''.join(chunks)
477
477
478 def bindunixsocket(sock, path):
478 def bindunixsocket(sock, path):
479 raise NotImplementedError('unsupported platform')
479 raise NotImplementedError('unsupported platform')
General Comments 0
You need to be logged in to leave comments. Login now