##// END OF EJS Templates
match: refactor patkind...
Matt Mackall -
r8568:4fa1618b default
parent child Browse files
Show More
@@ -1,1258 +1,1258 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat, errno, re
10 import os, sys, bisect, stat, errno, re
11 import mdiff, bdiff, util, templater, patch, error, encoding
11 import mdiff, bdiff, util, templater, patch, error, encoding
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = sys.maxint
98 limit = sys.maxint
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui # drop repo-specific config
104 dst = src.baseui # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src # keep all global options
107 dst = src # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114 # copy bundle-specific options
114 # copy bundle-specific options
115 r = src.config('bundle', 'mainreporoot')
115 r = src.config('bundle', 'mainreporoot')
116 if r:
116 if r:
117 dst.setconfig('bundle', 'mainreporoot', r)
117 dst.setconfig('bundle', 'mainreporoot', r)
118
118
119 return dst
119 return dst
120
120
121 def revpair(repo, revs):
121 def revpair(repo, revs):
122 '''return pair of nodes, given list of revisions. second item can
122 '''return pair of nodes, given list of revisions. second item can
123 be None, meaning use working dir.'''
123 be None, meaning use working dir.'''
124
124
125 def revfix(repo, val, defval):
125 def revfix(repo, val, defval):
126 if not val and val != 0 and defval is not None:
126 if not val and val != 0 and defval is not None:
127 val = defval
127 val = defval
128 return repo.lookup(val)
128 return repo.lookup(val)
129
129
130 if not revs:
130 if not revs:
131 return repo.dirstate.parents()[0], None
131 return repo.dirstate.parents()[0], None
132 end = None
132 end = None
133 if len(revs) == 1:
133 if len(revs) == 1:
134 if revrangesep in revs[0]:
134 if revrangesep in revs[0]:
135 start, end = revs[0].split(revrangesep, 1)
135 start, end = revs[0].split(revrangesep, 1)
136 start = revfix(repo, start, 0)
136 start = revfix(repo, start, 0)
137 end = revfix(repo, end, len(repo) - 1)
137 end = revfix(repo, end, len(repo) - 1)
138 else:
138 else:
139 start = revfix(repo, revs[0], None)
139 start = revfix(repo, revs[0], None)
140 elif len(revs) == 2:
140 elif len(revs) == 2:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 raise util.Abort(_('too many revisions specified'))
142 raise util.Abort(_('too many revisions specified'))
143 start = revfix(repo, revs[0], None)
143 start = revfix(repo, revs[0], None)
144 end = revfix(repo, revs[1], None)
144 end = revfix(repo, revs[1], None)
145 else:
145 else:
146 raise util.Abort(_('too many revisions specified'))
146 raise util.Abort(_('too many revisions specified'))
147 return start, end
147 return start, end
148
148
149 def revrange(repo, revs):
149 def revrange(repo, revs):
150 """Yield revision as strings from a list of revision specifications."""
150 """Yield revision as strings from a list of revision specifications."""
151
151
152 def revfix(repo, val, defval):
152 def revfix(repo, val, defval):
153 if not val and val != 0 and defval is not None:
153 if not val and val != 0 and defval is not None:
154 return defval
154 return defval
155 return repo.changelog.rev(repo.lookup(val))
155 return repo.changelog.rev(repo.lookup(val))
156
156
157 seen, l = set(), []
157 seen, l = set(), []
158 for spec in revs:
158 for spec in revs:
159 if revrangesep in spec:
159 if revrangesep in spec:
160 start, end = spec.split(revrangesep, 1)
160 start, end = spec.split(revrangesep, 1)
161 start = revfix(repo, start, 0)
161 start = revfix(repo, start, 0)
162 end = revfix(repo, end, len(repo) - 1)
162 end = revfix(repo, end, len(repo) - 1)
163 step = start > end and -1 or 1
163 step = start > end and -1 or 1
164 for rev in xrange(start, end+step, step):
164 for rev in xrange(start, end+step, step):
165 if rev in seen:
165 if rev in seen:
166 continue
166 continue
167 seen.add(rev)
167 seen.add(rev)
168 l.append(rev)
168 l.append(rev)
169 else:
169 else:
170 rev = revfix(repo, spec, None)
170 rev = revfix(repo, spec, None)
171 if rev in seen:
171 if rev in seen:
172 continue
172 continue
173 seen.add(rev)
173 seen.add(rev)
174 l.append(rev)
174 l.append(rev)
175
175
176 return l
176 return l
177
177
178 def make_filename(repo, pat, node,
178 def make_filename(repo, pat, node,
179 total=None, seqno=None, revwidth=None, pathname=None):
179 total=None, seqno=None, revwidth=None, pathname=None):
180 node_expander = {
180 node_expander = {
181 'H': lambda: hex(node),
181 'H': lambda: hex(node),
182 'R': lambda: str(repo.changelog.rev(node)),
182 'R': lambda: str(repo.changelog.rev(node)),
183 'h': lambda: short(node),
183 'h': lambda: short(node),
184 }
184 }
185 expander = {
185 expander = {
186 '%': lambda: '%',
186 '%': lambda: '%',
187 'b': lambda: os.path.basename(repo.root),
187 'b': lambda: os.path.basename(repo.root),
188 }
188 }
189
189
190 try:
190 try:
191 if node:
191 if node:
192 expander.update(node_expander)
192 expander.update(node_expander)
193 if node:
193 if node:
194 expander['r'] = (lambda:
194 expander['r'] = (lambda:
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 if total is not None:
196 if total is not None:
197 expander['N'] = lambda: str(total)
197 expander['N'] = lambda: str(total)
198 if seqno is not None:
198 if seqno is not None:
199 expander['n'] = lambda: str(seqno)
199 expander['n'] = lambda: str(seqno)
200 if total is not None and seqno is not None:
200 if total is not None and seqno is not None:
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 if pathname is not None:
202 if pathname is not None:
203 expander['s'] = lambda: os.path.basename(pathname)
203 expander['s'] = lambda: os.path.basename(pathname)
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 expander['p'] = lambda: pathname
205 expander['p'] = lambda: pathname
206
206
207 newname = []
207 newname = []
208 patlen = len(pat)
208 patlen = len(pat)
209 i = 0
209 i = 0
210 while i < patlen:
210 while i < patlen:
211 c = pat[i]
211 c = pat[i]
212 if c == '%':
212 if c == '%':
213 i += 1
213 i += 1
214 c = pat[i]
214 c = pat[i]
215 c = expander[c]()
215 c = expander[c]()
216 newname.append(c)
216 newname.append(c)
217 i += 1
217 i += 1
218 return ''.join(newname)
218 return ''.join(newname)
219 except KeyError, inst:
219 except KeyError, inst:
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
220 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
221 inst.args[0])
221 inst.args[0])
222
222
223 def make_file(repo, pat, node=None,
223 def make_file(repo, pat, node=None,
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225
225
226 writable = 'w' in mode or 'a' in mode
226 writable = 'w' in mode or 'a' in mode
227
227
228 if not pat or pat == '-':
228 if not pat or pat == '-':
229 return writable and sys.stdout or sys.stdin
229 return writable and sys.stdout or sys.stdin
230 if hasattr(pat, 'write') and writable:
230 if hasattr(pat, 'write') and writable:
231 return pat
231 return pat
232 if hasattr(pat, 'read') and 'r' in mode:
232 if hasattr(pat, 'read') and 'r' in mode:
233 return pat
233 return pat
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 pathname),
235 pathname),
236 mode)
236 mode)
237
237
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
238 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
239 if not globbed and default == 'relpath':
239 if not globbed and default == 'relpath':
240 pats = util.expand_glob(pats or [])
240 pats = util.expand_glob(pats or [])
241 m = _match.match(repo.root, repo.getcwd(), pats,
241 m = _match.match(repo.root, repo.getcwd(), pats,
242 opts.get('include'), opts.get('exclude'), default)
242 opts.get('include'), opts.get('exclude'), default)
243 def badfn(f, msg):
243 def badfn(f, msg):
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
244 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
245 return False
245 return False
246 m.bad = badfn
246 m.bad = badfn
247 return m
247 return m
248
248
249 def matchall(repo):
249 def matchall(repo):
250 return _match.always(repo.root, repo.getcwd())
250 return _match.always(repo.root, repo.getcwd())
251
251
252 def matchfiles(repo, files):
252 def matchfiles(repo, files):
253 return _match.exact(repo.root, repo.getcwd(), files)
253 return _match.exact(repo.root, repo.getcwd(), files)
254
254
255 def findrenames(repo, match=None, threshold=0.5):
255 def findrenames(repo, match=None, threshold=0.5):
256 '''find renamed files -- yields (before, after, score) tuples'''
256 '''find renamed files -- yields (before, after, score) tuples'''
257 added, removed = repo.status(match=match)[1:3]
257 added, removed = repo.status(match=match)[1:3]
258 ctx = repo['.']
258 ctx = repo['.']
259 for a in added:
259 for a in added:
260 aa = repo.wread(a)
260 aa = repo.wread(a)
261 bestname, bestscore = None, threshold
261 bestname, bestscore = None, threshold
262 for r in removed:
262 for r in removed:
263 rr = ctx.filectx(r).data()
263 rr = ctx.filectx(r).data()
264
264
265 # bdiff.blocks() returns blocks of matching lines
265 # bdiff.blocks() returns blocks of matching lines
266 # count the number of bytes in each
266 # count the number of bytes in each
267 equal = 0
267 equal = 0
268 alines = mdiff.splitnewlines(aa)
268 alines = mdiff.splitnewlines(aa)
269 matches = bdiff.blocks(aa, rr)
269 matches = bdiff.blocks(aa, rr)
270 for x1,x2,y1,y2 in matches:
270 for x1,x2,y1,y2 in matches:
271 for line in alines[x1:x2]:
271 for line in alines[x1:x2]:
272 equal += len(line)
272 equal += len(line)
273
273
274 lengths = len(aa) + len(rr)
274 lengths = len(aa) + len(rr)
275 if lengths:
275 if lengths:
276 myscore = equal*2.0 / lengths
276 myscore = equal*2.0 / lengths
277 if myscore >= bestscore:
277 if myscore >= bestscore:
278 bestname, bestscore = r, myscore
278 bestname, bestscore = r, myscore
279 if bestname:
279 if bestname:
280 yield bestname, a, bestscore
280 yield bestname, a, bestscore
281
281
282 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
282 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
283 if dry_run is None:
283 if dry_run is None:
284 dry_run = opts.get('dry_run')
284 dry_run = opts.get('dry_run')
285 if similarity is None:
285 if similarity is None:
286 similarity = float(opts.get('similarity') or 0)
286 similarity = float(opts.get('similarity') or 0)
287 add, remove = [], []
287 add, remove = [], []
288 audit_path = util.path_auditor(repo.root)
288 audit_path = util.path_auditor(repo.root)
289 m = match(repo, pats, opts)
289 m = match(repo, pats, opts)
290 for abs in repo.walk(m):
290 for abs in repo.walk(m):
291 target = repo.wjoin(abs)
291 target = repo.wjoin(abs)
292 good = True
292 good = True
293 try:
293 try:
294 audit_path(abs)
294 audit_path(abs)
295 except:
295 except:
296 good = False
296 good = False
297 rel = m.rel(abs)
297 rel = m.rel(abs)
298 exact = m.exact(abs)
298 exact = m.exact(abs)
299 if good and abs not in repo.dirstate:
299 if good and abs not in repo.dirstate:
300 add.append(abs)
300 add.append(abs)
301 if repo.ui.verbose or not exact:
301 if repo.ui.verbose or not exact:
302 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
302 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
303 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
303 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
304 or (os.path.isdir(target) and not os.path.islink(target))):
304 or (os.path.isdir(target) and not os.path.islink(target))):
305 remove.append(abs)
305 remove.append(abs)
306 if repo.ui.verbose or not exact:
306 if repo.ui.verbose or not exact:
307 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
307 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
308 if not dry_run:
308 if not dry_run:
309 repo.remove(remove)
309 repo.remove(remove)
310 repo.add(add)
310 repo.add(add)
311 if similarity > 0:
311 if similarity > 0:
312 for old, new, score in findrenames(repo, m, similarity):
312 for old, new, score in findrenames(repo, m, similarity):
313 oldexact, newexact = m.exact(old), m.exact(new)
313 oldexact, newexact = m.exact(old), m.exact(new)
314 if repo.ui.verbose or not oldexact or not newexact:
314 if repo.ui.verbose or not oldexact or not newexact:
315 oldrel, newrel = m.rel(old), m.rel(new)
315 oldrel, newrel = m.rel(old), m.rel(new)
316 repo.ui.status(_('recording removal of %s as rename to %s '
316 repo.ui.status(_('recording removal of %s as rename to %s '
317 '(%d%% similar)\n') %
317 '(%d%% similar)\n') %
318 (oldrel, newrel, score * 100))
318 (oldrel, newrel, score * 100))
319 if not dry_run:
319 if not dry_run:
320 repo.copy(old, new)
320 repo.copy(old, new)
321
321
322 def copy(ui, repo, pats, opts, rename=False):
322 def copy(ui, repo, pats, opts, rename=False):
323 # called with the repo lock held
323 # called with the repo lock held
324 #
324 #
325 # hgsep => pathname that uses "/" to separate directories
325 # hgsep => pathname that uses "/" to separate directories
326 # ossep => pathname that uses os.sep to separate directories
326 # ossep => pathname that uses os.sep to separate directories
327 cwd = repo.getcwd()
327 cwd = repo.getcwd()
328 targets = {}
328 targets = {}
329 after = opts.get("after")
329 after = opts.get("after")
330 dryrun = opts.get("dry_run")
330 dryrun = opts.get("dry_run")
331
331
332 def walkpat(pat):
332 def walkpat(pat):
333 srcs = []
333 srcs = []
334 m = match(repo, [pat], opts, globbed=True)
334 m = match(repo, [pat], opts, globbed=True)
335 for abs in repo.walk(m):
335 for abs in repo.walk(m):
336 state = repo.dirstate[abs]
336 state = repo.dirstate[abs]
337 rel = m.rel(abs)
337 rel = m.rel(abs)
338 exact = m.exact(abs)
338 exact = m.exact(abs)
339 if state in '?r':
339 if state in '?r':
340 if exact and state == '?':
340 if exact and state == '?':
341 ui.warn(_('%s: not copying - file is not managed\n') % rel)
341 ui.warn(_('%s: not copying - file is not managed\n') % rel)
342 if exact and state == 'r':
342 if exact and state == 'r':
343 ui.warn(_('%s: not copying - file has been marked for'
343 ui.warn(_('%s: not copying - file has been marked for'
344 ' remove\n') % rel)
344 ' remove\n') % rel)
345 continue
345 continue
346 # abs: hgsep
346 # abs: hgsep
347 # rel: ossep
347 # rel: ossep
348 srcs.append((abs, rel, exact))
348 srcs.append((abs, rel, exact))
349 return srcs
349 return srcs
350
350
351 # abssrc: hgsep
351 # abssrc: hgsep
352 # relsrc: ossep
352 # relsrc: ossep
353 # otarget: ossep
353 # otarget: ossep
354 def copyfile(abssrc, relsrc, otarget, exact):
354 def copyfile(abssrc, relsrc, otarget, exact):
355 abstarget = util.canonpath(repo.root, cwd, otarget)
355 abstarget = util.canonpath(repo.root, cwd, otarget)
356 reltarget = repo.pathto(abstarget, cwd)
356 reltarget = repo.pathto(abstarget, cwd)
357 target = repo.wjoin(abstarget)
357 target = repo.wjoin(abstarget)
358 src = repo.wjoin(abssrc)
358 src = repo.wjoin(abssrc)
359 state = repo.dirstate[abstarget]
359 state = repo.dirstate[abstarget]
360
360
361 # check for collisions
361 # check for collisions
362 prevsrc = targets.get(abstarget)
362 prevsrc = targets.get(abstarget)
363 if prevsrc is not None:
363 if prevsrc is not None:
364 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
364 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
365 (reltarget, repo.pathto(abssrc, cwd),
365 (reltarget, repo.pathto(abssrc, cwd),
366 repo.pathto(prevsrc, cwd)))
366 repo.pathto(prevsrc, cwd)))
367 return
367 return
368
368
369 # check for overwrites
369 # check for overwrites
370 exists = os.path.exists(target)
370 exists = os.path.exists(target)
371 if not after and exists or after and state in 'mn':
371 if not after and exists or after and state in 'mn':
372 if not opts['force']:
372 if not opts['force']:
373 ui.warn(_('%s: not overwriting - file exists\n') %
373 ui.warn(_('%s: not overwriting - file exists\n') %
374 reltarget)
374 reltarget)
375 return
375 return
376
376
377 if after:
377 if after:
378 if not exists:
378 if not exists:
379 return
379 return
380 elif not dryrun:
380 elif not dryrun:
381 try:
381 try:
382 if exists:
382 if exists:
383 os.unlink(target)
383 os.unlink(target)
384 targetdir = os.path.dirname(target) or '.'
384 targetdir = os.path.dirname(target) or '.'
385 if not os.path.isdir(targetdir):
385 if not os.path.isdir(targetdir):
386 os.makedirs(targetdir)
386 os.makedirs(targetdir)
387 util.copyfile(src, target)
387 util.copyfile(src, target)
388 except IOError, inst:
388 except IOError, inst:
389 if inst.errno == errno.ENOENT:
389 if inst.errno == errno.ENOENT:
390 ui.warn(_('%s: deleted in working copy\n') % relsrc)
390 ui.warn(_('%s: deleted in working copy\n') % relsrc)
391 else:
391 else:
392 ui.warn(_('%s: cannot copy - %s\n') %
392 ui.warn(_('%s: cannot copy - %s\n') %
393 (relsrc, inst.strerror))
393 (relsrc, inst.strerror))
394 return True # report a failure
394 return True # report a failure
395
395
396 if ui.verbose or not exact:
396 if ui.verbose or not exact:
397 if rename:
397 if rename:
398 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
398 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
399 else:
399 else:
400 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
400 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
401
401
402 targets[abstarget] = abssrc
402 targets[abstarget] = abssrc
403
403
404 # fix up dirstate
404 # fix up dirstate
405 origsrc = repo.dirstate.copied(abssrc) or abssrc
405 origsrc = repo.dirstate.copied(abssrc) or abssrc
406 if abstarget == origsrc: # copying back a copy?
406 if abstarget == origsrc: # copying back a copy?
407 if state not in 'mn' and not dryrun:
407 if state not in 'mn' and not dryrun:
408 repo.dirstate.normallookup(abstarget)
408 repo.dirstate.normallookup(abstarget)
409 else:
409 else:
410 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
410 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
411 if not ui.quiet:
411 if not ui.quiet:
412 ui.warn(_("%s has not been committed yet, so no copy "
412 ui.warn(_("%s has not been committed yet, so no copy "
413 "data will be stored for %s.\n")
413 "data will be stored for %s.\n")
414 % (repo.pathto(origsrc, cwd), reltarget))
414 % (repo.pathto(origsrc, cwd), reltarget))
415 if repo.dirstate[abstarget] in '?r' and not dryrun:
415 if repo.dirstate[abstarget] in '?r' and not dryrun:
416 repo.add([abstarget])
416 repo.add([abstarget])
417 elif not dryrun:
417 elif not dryrun:
418 repo.copy(origsrc, abstarget)
418 repo.copy(origsrc, abstarget)
419
419
420 if rename and not dryrun:
420 if rename and not dryrun:
421 repo.remove([abssrc], not after)
421 repo.remove([abssrc], not after)
422
422
423 # pat: ossep
423 # pat: ossep
424 # dest ossep
424 # dest ossep
425 # srcs: list of (hgsep, hgsep, ossep, bool)
425 # srcs: list of (hgsep, hgsep, ossep, bool)
426 # return: function that takes hgsep and returns ossep
426 # return: function that takes hgsep and returns ossep
427 def targetpathfn(pat, dest, srcs):
427 def targetpathfn(pat, dest, srcs):
428 if os.path.isdir(pat):
428 if os.path.isdir(pat):
429 abspfx = util.canonpath(repo.root, cwd, pat)
429 abspfx = util.canonpath(repo.root, cwd, pat)
430 abspfx = util.localpath(abspfx)
430 abspfx = util.localpath(abspfx)
431 if destdirexists:
431 if destdirexists:
432 striplen = len(os.path.split(abspfx)[0])
432 striplen = len(os.path.split(abspfx)[0])
433 else:
433 else:
434 striplen = len(abspfx)
434 striplen = len(abspfx)
435 if striplen:
435 if striplen:
436 striplen += len(os.sep)
436 striplen += len(os.sep)
437 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
437 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
438 elif destdirexists:
438 elif destdirexists:
439 res = lambda p: os.path.join(dest,
439 res = lambda p: os.path.join(dest,
440 os.path.basename(util.localpath(p)))
440 os.path.basename(util.localpath(p)))
441 else:
441 else:
442 res = lambda p: dest
442 res = lambda p: dest
443 return res
443 return res
444
444
445 # pat: ossep
445 # pat: ossep
446 # dest ossep
446 # dest ossep
447 # srcs: list of (hgsep, hgsep, ossep, bool)
447 # srcs: list of (hgsep, hgsep, ossep, bool)
448 # return: function that takes hgsep and returns ossep
448 # return: function that takes hgsep and returns ossep
449 def targetpathafterfn(pat, dest, srcs):
449 def targetpathafterfn(pat, dest, srcs):
450 if util.patkind(pat, None)[0]:
450 if _match.patkind(pat):
451 # a mercurial pattern
451 # a mercurial pattern
452 res = lambda p: os.path.join(dest,
452 res = lambda p: os.path.join(dest,
453 os.path.basename(util.localpath(p)))
453 os.path.basename(util.localpath(p)))
454 else:
454 else:
455 abspfx = util.canonpath(repo.root, cwd, pat)
455 abspfx = util.canonpath(repo.root, cwd, pat)
456 if len(abspfx) < len(srcs[0][0]):
456 if len(abspfx) < len(srcs[0][0]):
457 # A directory. Either the target path contains the last
457 # A directory. Either the target path contains the last
458 # component of the source path or it does not.
458 # component of the source path or it does not.
459 def evalpath(striplen):
459 def evalpath(striplen):
460 score = 0
460 score = 0
461 for s in srcs:
461 for s in srcs:
462 t = os.path.join(dest, util.localpath(s[0])[striplen:])
462 t = os.path.join(dest, util.localpath(s[0])[striplen:])
463 if os.path.exists(t):
463 if os.path.exists(t):
464 score += 1
464 score += 1
465 return score
465 return score
466
466
467 abspfx = util.localpath(abspfx)
467 abspfx = util.localpath(abspfx)
468 striplen = len(abspfx)
468 striplen = len(abspfx)
469 if striplen:
469 if striplen:
470 striplen += len(os.sep)
470 striplen += len(os.sep)
471 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
471 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
472 score = evalpath(striplen)
472 score = evalpath(striplen)
473 striplen1 = len(os.path.split(abspfx)[0])
473 striplen1 = len(os.path.split(abspfx)[0])
474 if striplen1:
474 if striplen1:
475 striplen1 += len(os.sep)
475 striplen1 += len(os.sep)
476 if evalpath(striplen1) > score:
476 if evalpath(striplen1) > score:
477 striplen = striplen1
477 striplen = striplen1
478 res = lambda p: os.path.join(dest,
478 res = lambda p: os.path.join(dest,
479 util.localpath(p)[striplen:])
479 util.localpath(p)[striplen:])
480 else:
480 else:
481 # a file
481 # a file
482 if destdirexists:
482 if destdirexists:
483 res = lambda p: os.path.join(dest,
483 res = lambda p: os.path.join(dest,
484 os.path.basename(util.localpath(p)))
484 os.path.basename(util.localpath(p)))
485 else:
485 else:
486 res = lambda p: dest
486 res = lambda p: dest
487 return res
487 return res
488
488
489
489
490 pats = util.expand_glob(pats)
490 pats = util.expand_glob(pats)
491 if not pats:
491 if not pats:
492 raise util.Abort(_('no source or destination specified'))
492 raise util.Abort(_('no source or destination specified'))
493 if len(pats) == 1:
493 if len(pats) == 1:
494 raise util.Abort(_('no destination specified'))
494 raise util.Abort(_('no destination specified'))
495 dest = pats.pop()
495 dest = pats.pop()
496 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
496 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
497 if not destdirexists:
497 if not destdirexists:
498 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
498 if len(pats) > 1 or _match.patkind(pats[0]):
499 raise util.Abort(_('with multiple sources, destination must be an '
499 raise util.Abort(_('with multiple sources, destination must be an '
500 'existing directory'))
500 'existing directory'))
501 if util.endswithsep(dest):
501 if util.endswithsep(dest):
502 raise util.Abort(_('destination %s is not a directory') % dest)
502 raise util.Abort(_('destination %s is not a directory') % dest)
503
503
504 tfn = targetpathfn
504 tfn = targetpathfn
505 if after:
505 if after:
506 tfn = targetpathafterfn
506 tfn = targetpathafterfn
507 copylist = []
507 copylist = []
508 for pat in pats:
508 for pat in pats:
509 srcs = walkpat(pat)
509 srcs = walkpat(pat)
510 if not srcs:
510 if not srcs:
511 continue
511 continue
512 copylist.append((tfn(pat, dest, srcs), srcs))
512 copylist.append((tfn(pat, dest, srcs), srcs))
513 if not copylist:
513 if not copylist:
514 raise util.Abort(_('no files to copy'))
514 raise util.Abort(_('no files to copy'))
515
515
516 errors = 0
516 errors = 0
517 for targetpath, srcs in copylist:
517 for targetpath, srcs in copylist:
518 for abssrc, relsrc, exact in srcs:
518 for abssrc, relsrc, exact in srcs:
519 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
519 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
520 errors += 1
520 errors += 1
521
521
522 if errors:
522 if errors:
523 ui.warn(_('(consider using --after)\n'))
523 ui.warn(_('(consider using --after)\n'))
524
524
525 return errors
525 return errors
526
526
527 def service(opts, parentfn=None, initfn=None, runfn=None):
527 def service(opts, parentfn=None, initfn=None, runfn=None):
528 '''Run a command as a service.'''
528 '''Run a command as a service.'''
529
529
530 if opts['daemon'] and not opts['daemon_pipefds']:
530 if opts['daemon'] and not opts['daemon_pipefds']:
531 rfd, wfd = os.pipe()
531 rfd, wfd = os.pipe()
532 args = sys.argv[:]
532 args = sys.argv[:]
533 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
533 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
534 # Don't pass --cwd to the child process, because we've already
534 # Don't pass --cwd to the child process, because we've already
535 # changed directory.
535 # changed directory.
536 for i in xrange(1,len(args)):
536 for i in xrange(1,len(args)):
537 if args[i].startswith('--cwd='):
537 if args[i].startswith('--cwd='):
538 del args[i]
538 del args[i]
539 break
539 break
540 elif args[i].startswith('--cwd'):
540 elif args[i].startswith('--cwd'):
541 del args[i:i+2]
541 del args[i:i+2]
542 break
542 break
543 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
543 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
544 args[0], args)
544 args[0], args)
545 os.close(wfd)
545 os.close(wfd)
546 os.read(rfd, 1)
546 os.read(rfd, 1)
547 if parentfn:
547 if parentfn:
548 return parentfn(pid)
548 return parentfn(pid)
549 else:
549 else:
550 os._exit(0)
550 os._exit(0)
551
551
552 if initfn:
552 if initfn:
553 initfn()
553 initfn()
554
554
555 if opts['pid_file']:
555 if opts['pid_file']:
556 fp = open(opts['pid_file'], 'w')
556 fp = open(opts['pid_file'], 'w')
557 fp.write(str(os.getpid()) + '\n')
557 fp.write(str(os.getpid()) + '\n')
558 fp.close()
558 fp.close()
559
559
560 if opts['daemon_pipefds']:
560 if opts['daemon_pipefds']:
561 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
561 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
562 os.close(rfd)
562 os.close(rfd)
563 try:
563 try:
564 os.setsid()
564 os.setsid()
565 except AttributeError:
565 except AttributeError:
566 pass
566 pass
567 os.write(wfd, 'y')
567 os.write(wfd, 'y')
568 os.close(wfd)
568 os.close(wfd)
569 sys.stdout.flush()
569 sys.stdout.flush()
570 sys.stderr.flush()
570 sys.stderr.flush()
571 fd = os.open(util.nulldev, os.O_RDWR)
571 fd = os.open(util.nulldev, os.O_RDWR)
572 if fd != 0: os.dup2(fd, 0)
572 if fd != 0: os.dup2(fd, 0)
573 if fd != 1: os.dup2(fd, 1)
573 if fd != 1: os.dup2(fd, 1)
574 if fd != 2: os.dup2(fd, 2)
574 if fd != 2: os.dup2(fd, 2)
575 if fd not in (0, 1, 2): os.close(fd)
575 if fd not in (0, 1, 2): os.close(fd)
576
576
577 if runfn:
577 if runfn:
578 return runfn()
578 return runfn()
579
579
580 class changeset_printer(object):
580 class changeset_printer(object):
581 '''show changeset information when templating not requested.'''
581 '''show changeset information when templating not requested.'''
582
582
583 def __init__(self, ui, repo, patch, diffopts, buffered):
583 def __init__(self, ui, repo, patch, diffopts, buffered):
584 self.ui = ui
584 self.ui = ui
585 self.repo = repo
585 self.repo = repo
586 self.buffered = buffered
586 self.buffered = buffered
587 self.patch = patch
587 self.patch = patch
588 self.diffopts = diffopts
588 self.diffopts = diffopts
589 self.header = {}
589 self.header = {}
590 self.hunk = {}
590 self.hunk = {}
591 self.lastheader = None
591 self.lastheader = None
592
592
593 def flush(self, rev):
593 def flush(self, rev):
594 if rev in self.header:
594 if rev in self.header:
595 h = self.header[rev]
595 h = self.header[rev]
596 if h != self.lastheader:
596 if h != self.lastheader:
597 self.lastheader = h
597 self.lastheader = h
598 self.ui.write(h)
598 self.ui.write(h)
599 del self.header[rev]
599 del self.header[rev]
600 if rev in self.hunk:
600 if rev in self.hunk:
601 self.ui.write(self.hunk[rev])
601 self.ui.write(self.hunk[rev])
602 del self.hunk[rev]
602 del self.hunk[rev]
603 return 1
603 return 1
604 return 0
604 return 0
605
605
606 def show(self, ctx, copies=(), **props):
606 def show(self, ctx, copies=(), **props):
607 if self.buffered:
607 if self.buffered:
608 self.ui.pushbuffer()
608 self.ui.pushbuffer()
609 self._show(ctx, copies, props)
609 self._show(ctx, copies, props)
610 self.hunk[ctx.rev()] = self.ui.popbuffer()
610 self.hunk[ctx.rev()] = self.ui.popbuffer()
611 else:
611 else:
612 self._show(ctx, copies, props)
612 self._show(ctx, copies, props)
613
613
614 def _show(self, ctx, copies, props):
614 def _show(self, ctx, copies, props):
615 '''show a single changeset or file revision'''
615 '''show a single changeset or file revision'''
616 changenode = ctx.node()
616 changenode = ctx.node()
617 rev = ctx.rev()
617 rev = ctx.rev()
618
618
619 if self.ui.quiet:
619 if self.ui.quiet:
620 self.ui.write("%d:%s\n" % (rev, short(changenode)))
620 self.ui.write("%d:%s\n" % (rev, short(changenode)))
621 return
621 return
622
622
623 log = self.repo.changelog
623 log = self.repo.changelog
624 changes = log.read(changenode)
624 changes = log.read(changenode)
625 date = util.datestr(changes[2])
625 date = util.datestr(changes[2])
626 extra = changes[5]
626 extra = changes[5]
627 branch = extra.get("branch")
627 branch = extra.get("branch")
628
628
629 hexfunc = self.ui.debugflag and hex or short
629 hexfunc = self.ui.debugflag and hex or short
630
630
631 parents = [(p, hexfunc(log.node(p)))
631 parents = [(p, hexfunc(log.node(p)))
632 for p in self._meaningful_parentrevs(log, rev)]
632 for p in self._meaningful_parentrevs(log, rev)]
633
633
634 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
634 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
635
635
636 # don't show the default branch name
636 # don't show the default branch name
637 if branch != 'default':
637 if branch != 'default':
638 branch = encoding.tolocal(branch)
638 branch = encoding.tolocal(branch)
639 self.ui.write(_("branch: %s\n") % branch)
639 self.ui.write(_("branch: %s\n") % branch)
640 for tag in self.repo.nodetags(changenode):
640 for tag in self.repo.nodetags(changenode):
641 self.ui.write(_("tag: %s\n") % tag)
641 self.ui.write(_("tag: %s\n") % tag)
642 for parent in parents:
642 for parent in parents:
643 self.ui.write(_("parent: %d:%s\n") % parent)
643 self.ui.write(_("parent: %d:%s\n") % parent)
644
644
645 if self.ui.debugflag:
645 if self.ui.debugflag:
646 self.ui.write(_("manifest: %d:%s\n") %
646 self.ui.write(_("manifest: %d:%s\n") %
647 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
647 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
648 self.ui.write(_("user: %s\n") % changes[1])
648 self.ui.write(_("user: %s\n") % changes[1])
649 self.ui.write(_("date: %s\n") % date)
649 self.ui.write(_("date: %s\n") % date)
650
650
651 if self.ui.debugflag:
651 if self.ui.debugflag:
652 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
652 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
653 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
653 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
654 files):
654 files):
655 if value:
655 if value:
656 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
656 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
657 elif changes[3] and self.ui.verbose:
657 elif changes[3] and self.ui.verbose:
658 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
658 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
659 if copies and self.ui.verbose:
659 if copies and self.ui.verbose:
660 copies = ['%s (%s)' % c for c in copies]
660 copies = ['%s (%s)' % c for c in copies]
661 self.ui.write(_("copies: %s\n") % ' '.join(copies))
661 self.ui.write(_("copies: %s\n") % ' '.join(copies))
662
662
663 if extra and self.ui.debugflag:
663 if extra and self.ui.debugflag:
664 for key, value in sorted(extra.items()):
664 for key, value in sorted(extra.items()):
665 self.ui.write(_("extra: %s=%s\n")
665 self.ui.write(_("extra: %s=%s\n")
666 % (key, value.encode('string_escape')))
666 % (key, value.encode('string_escape')))
667
667
668 description = changes[4].strip()
668 description = changes[4].strip()
669 if description:
669 if description:
670 if self.ui.verbose:
670 if self.ui.verbose:
671 self.ui.write(_("description:\n"))
671 self.ui.write(_("description:\n"))
672 self.ui.write(description)
672 self.ui.write(description)
673 self.ui.write("\n\n")
673 self.ui.write("\n\n")
674 else:
674 else:
675 self.ui.write(_("summary: %s\n") %
675 self.ui.write(_("summary: %s\n") %
676 description.splitlines()[0])
676 description.splitlines()[0])
677 self.ui.write("\n")
677 self.ui.write("\n")
678
678
679 self.showpatch(changenode)
679 self.showpatch(changenode)
680
680
681 def showpatch(self, node):
681 def showpatch(self, node):
682 if self.patch:
682 if self.patch:
683 prev = self.repo.changelog.parents(node)[0]
683 prev = self.repo.changelog.parents(node)[0]
684 chunks = patch.diff(self.repo, prev, node, match=self.patch,
684 chunks = patch.diff(self.repo, prev, node, match=self.patch,
685 opts=patch.diffopts(self.ui, self.diffopts))
685 opts=patch.diffopts(self.ui, self.diffopts))
686 for chunk in chunks:
686 for chunk in chunks:
687 self.ui.write(chunk)
687 self.ui.write(chunk)
688 self.ui.write("\n")
688 self.ui.write("\n")
689
689
690 def _meaningful_parentrevs(self, log, rev):
690 def _meaningful_parentrevs(self, log, rev):
691 """Return list of meaningful (or all if debug) parentrevs for rev.
691 """Return list of meaningful (or all if debug) parentrevs for rev.
692
692
693 For merges (two non-nullrev revisions) both parents are meaningful.
693 For merges (two non-nullrev revisions) both parents are meaningful.
694 Otherwise the first parent revision is considered meaningful if it
694 Otherwise the first parent revision is considered meaningful if it
695 is not the preceding revision.
695 is not the preceding revision.
696 """
696 """
697 parents = log.parentrevs(rev)
697 parents = log.parentrevs(rev)
698 if not self.ui.debugflag and parents[1] == nullrev:
698 if not self.ui.debugflag and parents[1] == nullrev:
699 if parents[0] >= rev - 1:
699 if parents[0] >= rev - 1:
700 parents = []
700 parents = []
701 else:
701 else:
702 parents = [parents[0]]
702 parents = [parents[0]]
703 return parents
703 return parents
704
704
705
705
706 class changeset_templater(changeset_printer):
706 class changeset_templater(changeset_printer):
707 '''format changeset information.'''
707 '''format changeset information.'''
708
708
709 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
709 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
710 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
710 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
711 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
711 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
712 self.t = templater.templater(mapfile, {'formatnode': formatnode},
712 self.t = templater.templater(mapfile, {'formatnode': formatnode},
713 cache={
713 cache={
714 'parent': '{rev}:{node|formatnode} ',
714 'parent': '{rev}:{node|formatnode} ',
715 'manifest': '{rev}:{node|formatnode}',
715 'manifest': '{rev}:{node|formatnode}',
716 'filecopy': '{name} ({source})'})
716 'filecopy': '{name} ({source})'})
717
717
718 def use_template(self, t):
718 def use_template(self, t):
719 '''set template string to use'''
719 '''set template string to use'''
720 self.t.cache['changeset'] = t
720 self.t.cache['changeset'] = t
721
721
722 def _meaningful_parentrevs(self, ctx):
722 def _meaningful_parentrevs(self, ctx):
723 """Return list of meaningful (or all if debug) parentrevs for rev.
723 """Return list of meaningful (or all if debug) parentrevs for rev.
724 """
724 """
725 parents = ctx.parents()
725 parents = ctx.parents()
726 if len(parents) > 1:
726 if len(parents) > 1:
727 return parents
727 return parents
728 if self.ui.debugflag:
728 if self.ui.debugflag:
729 return [parents[0], self.repo['null']]
729 return [parents[0], self.repo['null']]
730 if parents[0].rev() >= ctx.rev() - 1:
730 if parents[0].rev() >= ctx.rev() - 1:
731 return []
731 return []
732 return parents
732 return parents
733
733
734 def _show(self, ctx, copies, props):
734 def _show(self, ctx, copies, props):
735 '''show a single changeset or file revision'''
735 '''show a single changeset or file revision'''
736
736
737 def showlist(name, values, plural=None, **args):
737 def showlist(name, values, plural=None, **args):
738 '''expand set of values.
738 '''expand set of values.
739 name is name of key in template map.
739 name is name of key in template map.
740 values is list of strings or dicts.
740 values is list of strings or dicts.
741 plural is plural of name, if not simply name + 's'.
741 plural is plural of name, if not simply name + 's'.
742
742
743 expansion works like this, given name 'foo'.
743 expansion works like this, given name 'foo'.
744
744
745 if values is empty, expand 'no_foos'.
745 if values is empty, expand 'no_foos'.
746
746
747 if 'foo' not in template map, return values as a string,
747 if 'foo' not in template map, return values as a string,
748 joined by space.
748 joined by space.
749
749
750 expand 'start_foos'.
750 expand 'start_foos'.
751
751
752 for each value, expand 'foo'. if 'last_foo' in template
752 for each value, expand 'foo'. if 'last_foo' in template
753 map, expand it instead of 'foo' for last key.
753 map, expand it instead of 'foo' for last key.
754
754
755 expand 'end_foos'.
755 expand 'end_foos'.
756 '''
756 '''
757 if plural: names = plural
757 if plural: names = plural
758 else: names = name + 's'
758 else: names = name + 's'
759 if not values:
759 if not values:
760 noname = 'no_' + names
760 noname = 'no_' + names
761 if noname in self.t:
761 if noname in self.t:
762 yield self.t(noname, **args)
762 yield self.t(noname, **args)
763 return
763 return
764 if name not in self.t:
764 if name not in self.t:
765 if isinstance(values[0], str):
765 if isinstance(values[0], str):
766 yield ' '.join(values)
766 yield ' '.join(values)
767 else:
767 else:
768 for v in values:
768 for v in values:
769 yield dict(v, **args)
769 yield dict(v, **args)
770 return
770 return
771 startname = 'start_' + names
771 startname = 'start_' + names
772 if startname in self.t:
772 if startname in self.t:
773 yield self.t(startname, **args)
773 yield self.t(startname, **args)
774 vargs = args.copy()
774 vargs = args.copy()
775 def one(v, tag=name):
775 def one(v, tag=name):
776 try:
776 try:
777 vargs.update(v)
777 vargs.update(v)
778 except (AttributeError, ValueError):
778 except (AttributeError, ValueError):
779 try:
779 try:
780 for a, b in v:
780 for a, b in v:
781 vargs[a] = b
781 vargs[a] = b
782 except ValueError:
782 except ValueError:
783 vargs[name] = v
783 vargs[name] = v
784 return self.t(tag, **vargs)
784 return self.t(tag, **vargs)
785 lastname = 'last_' + name
785 lastname = 'last_' + name
786 if lastname in self.t:
786 if lastname in self.t:
787 last = values.pop()
787 last = values.pop()
788 else:
788 else:
789 last = None
789 last = None
790 for v in values:
790 for v in values:
791 yield one(v)
791 yield one(v)
792 if last is not None:
792 if last is not None:
793 yield one(last, tag=lastname)
793 yield one(last, tag=lastname)
794 endname = 'end_' + names
794 endname = 'end_' + names
795 if endname in self.t:
795 if endname in self.t:
796 yield self.t(endname, **args)
796 yield self.t(endname, **args)
797
797
798 def showbranches(**args):
798 def showbranches(**args):
799 branch = ctx.branch()
799 branch = ctx.branch()
800 if branch != 'default':
800 if branch != 'default':
801 branch = encoding.tolocal(branch)
801 branch = encoding.tolocal(branch)
802 return showlist('branch', [branch], plural='branches', **args)
802 return showlist('branch', [branch], plural='branches', **args)
803
803
804 def showparents(**args):
804 def showparents(**args):
805 parents = [[('rev', p.rev()), ('node', p.hex())]
805 parents = [[('rev', p.rev()), ('node', p.hex())]
806 for p in self._meaningful_parentrevs(ctx)]
806 for p in self._meaningful_parentrevs(ctx)]
807 return showlist('parent', parents, **args)
807 return showlist('parent', parents, **args)
808
808
809 def showtags(**args):
809 def showtags(**args):
810 return showlist('tag', ctx.tags(), **args)
810 return showlist('tag', ctx.tags(), **args)
811
811
812 def showextras(**args):
812 def showextras(**args):
813 for key, value in sorted(ctx.extra().items()):
813 for key, value in sorted(ctx.extra().items()):
814 args = args.copy()
814 args = args.copy()
815 args.update(dict(key=key, value=value))
815 args.update(dict(key=key, value=value))
816 yield self.t('extra', **args)
816 yield self.t('extra', **args)
817
817
818 def showcopies(**args):
818 def showcopies(**args):
819 c = [{'name': x[0], 'source': x[1]} for x in copies]
819 c = [{'name': x[0], 'source': x[1]} for x in copies]
820 return showlist('file_copy', c, plural='file_copies', **args)
820 return showlist('file_copy', c, plural='file_copies', **args)
821
821
822 files = []
822 files = []
823 def getfiles():
823 def getfiles():
824 if not files:
824 if not files:
825 files[:] = self.repo.status(ctx.parents()[0].node(),
825 files[:] = self.repo.status(ctx.parents()[0].node(),
826 ctx.node())[:3]
826 ctx.node())[:3]
827 return files
827 return files
828 def showfiles(**args):
828 def showfiles(**args):
829 return showlist('file', ctx.files(), **args)
829 return showlist('file', ctx.files(), **args)
830 def showmods(**args):
830 def showmods(**args):
831 return showlist('file_mod', getfiles()[0], **args)
831 return showlist('file_mod', getfiles()[0], **args)
832 def showadds(**args):
832 def showadds(**args):
833 return showlist('file_add', getfiles()[1], **args)
833 return showlist('file_add', getfiles()[1], **args)
834 def showdels(**args):
834 def showdels(**args):
835 return showlist('file_del', getfiles()[2], **args)
835 return showlist('file_del', getfiles()[2], **args)
836 def showmanifest(**args):
836 def showmanifest(**args):
837 args = args.copy()
837 args = args.copy()
838 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
838 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
839 node=hex(ctx.changeset()[0])))
839 node=hex(ctx.changeset()[0])))
840 return self.t('manifest', **args)
840 return self.t('manifest', **args)
841
841
842 def showdiffstat(**args):
842 def showdiffstat(**args):
843 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
843 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
844 files, adds, removes = 0, 0, 0
844 files, adds, removes = 0, 0, 0
845 for i in patch.diffstatdata(util.iterlines(diff)):
845 for i in patch.diffstatdata(util.iterlines(diff)):
846 files += 1
846 files += 1
847 adds += i[1]
847 adds += i[1]
848 removes += i[2]
848 removes += i[2]
849 return '%s: +%s/-%s' % (files, adds, removes)
849 return '%s: +%s/-%s' % (files, adds, removes)
850
850
851 defprops = {
851 defprops = {
852 'author': ctx.user(),
852 'author': ctx.user(),
853 'branches': showbranches,
853 'branches': showbranches,
854 'date': ctx.date(),
854 'date': ctx.date(),
855 'desc': ctx.description().strip(),
855 'desc': ctx.description().strip(),
856 'file_adds': showadds,
856 'file_adds': showadds,
857 'file_dels': showdels,
857 'file_dels': showdels,
858 'file_mods': showmods,
858 'file_mods': showmods,
859 'files': showfiles,
859 'files': showfiles,
860 'file_copies': showcopies,
860 'file_copies': showcopies,
861 'manifest': showmanifest,
861 'manifest': showmanifest,
862 'node': ctx.hex(),
862 'node': ctx.hex(),
863 'parents': showparents,
863 'parents': showparents,
864 'rev': ctx.rev(),
864 'rev': ctx.rev(),
865 'tags': showtags,
865 'tags': showtags,
866 'extras': showextras,
866 'extras': showextras,
867 'diffstat': showdiffstat,
867 'diffstat': showdiffstat,
868 }
868 }
869 props = props.copy()
869 props = props.copy()
870 props.update(defprops)
870 props.update(defprops)
871
871
872 # find correct templates for current mode
872 # find correct templates for current mode
873
873
874 tmplmodes = [
874 tmplmodes = [
875 (True, None),
875 (True, None),
876 (self.ui.verbose, 'verbose'),
876 (self.ui.verbose, 'verbose'),
877 (self.ui.quiet, 'quiet'),
877 (self.ui.quiet, 'quiet'),
878 (self.ui.debugflag, 'debug'),
878 (self.ui.debugflag, 'debug'),
879 ]
879 ]
880
880
881 types = {'header': '', 'changeset': 'changeset'}
881 types = {'header': '', 'changeset': 'changeset'}
882 for mode, postfix in tmplmodes:
882 for mode, postfix in tmplmodes:
883 for type in types:
883 for type in types:
884 cur = postfix and ('%s_%s' % (type, postfix)) or type
884 cur = postfix and ('%s_%s' % (type, postfix)) or type
885 if mode and cur in self.t:
885 if mode and cur in self.t:
886 types[type] = cur
886 types[type] = cur
887
887
888 try:
888 try:
889
889
890 # write header
890 # write header
891 if types['header']:
891 if types['header']:
892 h = templater.stringify(self.t(types['header'], **props))
892 h = templater.stringify(self.t(types['header'], **props))
893 if self.buffered:
893 if self.buffered:
894 self.header[ctx.rev()] = h
894 self.header[ctx.rev()] = h
895 else:
895 else:
896 self.ui.write(h)
896 self.ui.write(h)
897
897
898 # write changeset metadata, then patch if requested
898 # write changeset metadata, then patch if requested
899 key = types['changeset']
899 key = types['changeset']
900 self.ui.write(templater.stringify(self.t(key, **props)))
900 self.ui.write(templater.stringify(self.t(key, **props)))
901 self.showpatch(ctx.node())
901 self.showpatch(ctx.node())
902
902
903 except KeyError, inst:
903 except KeyError, inst:
904 msg = _("%s: no key named '%s'")
904 msg = _("%s: no key named '%s'")
905 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
905 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
906 except SyntaxError, inst:
906 except SyntaxError, inst:
907 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
907 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
908
908
909 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
909 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
910 """show one changeset using template or regular display.
910 """show one changeset using template or regular display.
911
911
912 Display format will be the first non-empty hit of:
912 Display format will be the first non-empty hit of:
913 1. option 'template'
913 1. option 'template'
914 2. option 'style'
914 2. option 'style'
915 3. [ui] setting 'logtemplate'
915 3. [ui] setting 'logtemplate'
916 4. [ui] setting 'style'
916 4. [ui] setting 'style'
917 If all of these values are either the unset or the empty string,
917 If all of these values are either the unset or the empty string,
918 regular display via changeset_printer() is done.
918 regular display via changeset_printer() is done.
919 """
919 """
920 # options
920 # options
921 patch = False
921 patch = False
922 if opts.get('patch'):
922 if opts.get('patch'):
923 patch = matchfn or matchall(repo)
923 patch = matchfn or matchall(repo)
924
924
925 tmpl = opts.get('template')
925 tmpl = opts.get('template')
926 style = None
926 style = None
927 if tmpl:
927 if tmpl:
928 tmpl = templater.parsestring(tmpl, quoted=False)
928 tmpl = templater.parsestring(tmpl, quoted=False)
929 else:
929 else:
930 style = opts.get('style')
930 style = opts.get('style')
931
931
932 # ui settings
932 # ui settings
933 if not (tmpl or style):
933 if not (tmpl or style):
934 tmpl = ui.config('ui', 'logtemplate')
934 tmpl = ui.config('ui', 'logtemplate')
935 if tmpl:
935 if tmpl:
936 tmpl = templater.parsestring(tmpl)
936 tmpl = templater.parsestring(tmpl)
937 else:
937 else:
938 style = ui.config('ui', 'style')
938 style = ui.config('ui', 'style')
939
939
940 if not (tmpl or style):
940 if not (tmpl or style):
941 return changeset_printer(ui, repo, patch, opts, buffered)
941 return changeset_printer(ui, repo, patch, opts, buffered)
942
942
943 mapfile = None
943 mapfile = None
944 if style and not tmpl:
944 if style and not tmpl:
945 mapfile = style
945 mapfile = style
946 if not os.path.split(mapfile)[0]:
946 if not os.path.split(mapfile)[0]:
947 mapname = (templater.templatepath('map-cmdline.' + mapfile)
947 mapname = (templater.templatepath('map-cmdline.' + mapfile)
948 or templater.templatepath(mapfile))
948 or templater.templatepath(mapfile))
949 if mapname: mapfile = mapname
949 if mapname: mapfile = mapname
950
950
951 try:
951 try:
952 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
952 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
953 except SyntaxError, inst:
953 except SyntaxError, inst:
954 raise util.Abort(inst.args[0])
954 raise util.Abort(inst.args[0])
955 if tmpl: t.use_template(tmpl)
955 if tmpl: t.use_template(tmpl)
956 return t
956 return t
957
957
958 def finddate(ui, repo, date):
958 def finddate(ui, repo, date):
959 """Find the tipmost changeset that matches the given date spec"""
959 """Find the tipmost changeset that matches the given date spec"""
960 df = util.matchdate(date)
960 df = util.matchdate(date)
961 get = util.cachefunc(lambda r: repo[r].changeset())
961 get = util.cachefunc(lambda r: repo[r].changeset())
962 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
962 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
963 results = {}
963 results = {}
964 for st, rev, fns in changeiter:
964 for st, rev, fns in changeiter:
965 if st == 'add':
965 if st == 'add':
966 d = get(rev)[2]
966 d = get(rev)[2]
967 if df(d[0]):
967 if df(d[0]):
968 results[rev] = d
968 results[rev] = d
969 elif st == 'iter':
969 elif st == 'iter':
970 if rev in results:
970 if rev in results:
971 ui.status(_("Found revision %s from %s\n") %
971 ui.status(_("Found revision %s from %s\n") %
972 (rev, util.datestr(results[rev])))
972 (rev, util.datestr(results[rev])))
973 return str(rev)
973 return str(rev)
974
974
975 raise util.Abort(_("revision matching date not found"))
975 raise util.Abort(_("revision matching date not found"))
976
976
977 def walkchangerevs(ui, repo, pats, change, opts):
977 def walkchangerevs(ui, repo, pats, change, opts):
978 '''Iterate over files and the revs in which they changed.
978 '''Iterate over files and the revs in which they changed.
979
979
980 Callers most commonly need to iterate backwards over the history
980 Callers most commonly need to iterate backwards over the history
981 in which they are interested. Doing so has awful (quadratic-looking)
981 in which they are interested. Doing so has awful (quadratic-looking)
982 performance, so we use iterators in a "windowed" way.
982 performance, so we use iterators in a "windowed" way.
983
983
984 We walk a window of revisions in the desired order. Within the
984 We walk a window of revisions in the desired order. Within the
985 window, we first walk forwards to gather data, then in the desired
985 window, we first walk forwards to gather data, then in the desired
986 order (usually backwards) to display it.
986 order (usually backwards) to display it.
987
987
988 This function returns an (iterator, matchfn) tuple. The iterator
988 This function returns an (iterator, matchfn) tuple. The iterator
989 yields 3-tuples. They will be of one of the following forms:
989 yields 3-tuples. They will be of one of the following forms:
990
990
991 "window", incrementing, lastrev: stepping through a window,
991 "window", incrementing, lastrev: stepping through a window,
992 positive if walking forwards through revs, last rev in the
992 positive if walking forwards through revs, last rev in the
993 sequence iterated over - use to reset state for the current window
993 sequence iterated over - use to reset state for the current window
994
994
995 "add", rev, fns: out-of-order traversal of the given file names
995 "add", rev, fns: out-of-order traversal of the given file names
996 fns, which changed during revision rev - use to gather data for
996 fns, which changed during revision rev - use to gather data for
997 possible display
997 possible display
998
998
999 "iter", rev, None: in-order traversal of the revs earlier iterated
999 "iter", rev, None: in-order traversal of the revs earlier iterated
1000 over with "add" - use to display data'''
1000 over with "add" - use to display data'''
1001
1001
1002 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1002 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1003 if start < end:
1003 if start < end:
1004 while start < end:
1004 while start < end:
1005 yield start, min(windowsize, end-start)
1005 yield start, min(windowsize, end-start)
1006 start += windowsize
1006 start += windowsize
1007 if windowsize < sizelimit:
1007 if windowsize < sizelimit:
1008 windowsize *= 2
1008 windowsize *= 2
1009 else:
1009 else:
1010 while start > end:
1010 while start > end:
1011 yield start, min(windowsize, start-end-1)
1011 yield start, min(windowsize, start-end-1)
1012 start -= windowsize
1012 start -= windowsize
1013 if windowsize < sizelimit:
1013 if windowsize < sizelimit:
1014 windowsize *= 2
1014 windowsize *= 2
1015
1015
1016 m = match(repo, pats, opts)
1016 m = match(repo, pats, opts)
1017 follow = opts.get('follow') or opts.get('follow_first')
1017 follow = opts.get('follow') or opts.get('follow_first')
1018
1018
1019 if not len(repo):
1019 if not len(repo):
1020 return [], m
1020 return [], m
1021
1021
1022 if follow:
1022 if follow:
1023 defrange = '%s:0' % repo['.'].rev()
1023 defrange = '%s:0' % repo['.'].rev()
1024 else:
1024 else:
1025 defrange = '-1:0'
1025 defrange = '-1:0'
1026 revs = revrange(repo, opts['rev'] or [defrange])
1026 revs = revrange(repo, opts['rev'] or [defrange])
1027 wanted = set()
1027 wanted = set()
1028 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1028 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1029 fncache = {}
1029 fncache = {}
1030
1030
1031 if not slowpath and not m.files():
1031 if not slowpath and not m.files():
1032 # No files, no patterns. Display all revs.
1032 # No files, no patterns. Display all revs.
1033 wanted = set(revs)
1033 wanted = set(revs)
1034 copies = []
1034 copies = []
1035 if not slowpath:
1035 if not slowpath:
1036 # Only files, no patterns. Check the history of each file.
1036 # Only files, no patterns. Check the history of each file.
1037 def filerevgen(filelog, node):
1037 def filerevgen(filelog, node):
1038 cl_count = len(repo)
1038 cl_count = len(repo)
1039 if node is None:
1039 if node is None:
1040 last = len(filelog) - 1
1040 last = len(filelog) - 1
1041 else:
1041 else:
1042 last = filelog.rev(node)
1042 last = filelog.rev(node)
1043 for i, window in increasing_windows(last, nullrev):
1043 for i, window in increasing_windows(last, nullrev):
1044 revs = []
1044 revs = []
1045 for j in xrange(i - window, i + 1):
1045 for j in xrange(i - window, i + 1):
1046 n = filelog.node(j)
1046 n = filelog.node(j)
1047 revs.append((filelog.linkrev(j),
1047 revs.append((filelog.linkrev(j),
1048 follow and filelog.renamed(n)))
1048 follow and filelog.renamed(n)))
1049 for rev in reversed(revs):
1049 for rev in reversed(revs):
1050 # only yield rev for which we have the changelog, it can
1050 # only yield rev for which we have the changelog, it can
1051 # happen while doing "hg log" during a pull or commit
1051 # happen while doing "hg log" during a pull or commit
1052 if rev[0] < cl_count:
1052 if rev[0] < cl_count:
1053 yield rev
1053 yield rev
1054 def iterfiles():
1054 def iterfiles():
1055 for filename in m.files():
1055 for filename in m.files():
1056 yield filename, None
1056 yield filename, None
1057 for filename_node in copies:
1057 for filename_node in copies:
1058 yield filename_node
1058 yield filename_node
1059 minrev, maxrev = min(revs), max(revs)
1059 minrev, maxrev = min(revs), max(revs)
1060 for file_, node in iterfiles():
1060 for file_, node in iterfiles():
1061 filelog = repo.file(file_)
1061 filelog = repo.file(file_)
1062 if not len(filelog):
1062 if not len(filelog):
1063 if node is None:
1063 if node is None:
1064 # A zero count may be a directory or deleted file, so
1064 # A zero count may be a directory or deleted file, so
1065 # try to find matching entries on the slow path.
1065 # try to find matching entries on the slow path.
1066 if follow:
1066 if follow:
1067 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1067 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1068 slowpath = True
1068 slowpath = True
1069 break
1069 break
1070 else:
1070 else:
1071 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1071 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1072 % (file_, short(node)))
1072 % (file_, short(node)))
1073 continue
1073 continue
1074 for rev, copied in filerevgen(filelog, node):
1074 for rev, copied in filerevgen(filelog, node):
1075 if rev <= maxrev:
1075 if rev <= maxrev:
1076 if rev < minrev:
1076 if rev < minrev:
1077 break
1077 break
1078 fncache.setdefault(rev, [])
1078 fncache.setdefault(rev, [])
1079 fncache[rev].append(file_)
1079 fncache[rev].append(file_)
1080 wanted.add(rev)
1080 wanted.add(rev)
1081 if follow and copied:
1081 if follow and copied:
1082 copies.append(copied)
1082 copies.append(copied)
1083 if slowpath:
1083 if slowpath:
1084 if follow:
1084 if follow:
1085 raise util.Abort(_('can only follow copies/renames for explicit '
1085 raise util.Abort(_('can only follow copies/renames for explicit '
1086 'file names'))
1086 'file names'))
1087
1087
1088 # The slow path checks files modified in every changeset.
1088 # The slow path checks files modified in every changeset.
1089 def changerevgen():
1089 def changerevgen():
1090 for i, window in increasing_windows(len(repo) - 1, nullrev):
1090 for i, window in increasing_windows(len(repo) - 1, nullrev):
1091 for j in xrange(i - window, i + 1):
1091 for j in xrange(i - window, i + 1):
1092 yield j, change(j)[3]
1092 yield j, change(j)[3]
1093
1093
1094 for rev, changefiles in changerevgen():
1094 for rev, changefiles in changerevgen():
1095 matches = filter(m, changefiles)
1095 matches = filter(m, changefiles)
1096 if matches:
1096 if matches:
1097 fncache[rev] = matches
1097 fncache[rev] = matches
1098 wanted.add(rev)
1098 wanted.add(rev)
1099
1099
1100 class followfilter:
1100 class followfilter:
1101 def __init__(self, onlyfirst=False):
1101 def __init__(self, onlyfirst=False):
1102 self.startrev = nullrev
1102 self.startrev = nullrev
1103 self.roots = []
1103 self.roots = []
1104 self.onlyfirst = onlyfirst
1104 self.onlyfirst = onlyfirst
1105
1105
1106 def match(self, rev):
1106 def match(self, rev):
1107 def realparents(rev):
1107 def realparents(rev):
1108 if self.onlyfirst:
1108 if self.onlyfirst:
1109 return repo.changelog.parentrevs(rev)[0:1]
1109 return repo.changelog.parentrevs(rev)[0:1]
1110 else:
1110 else:
1111 return filter(lambda x: x != nullrev,
1111 return filter(lambda x: x != nullrev,
1112 repo.changelog.parentrevs(rev))
1112 repo.changelog.parentrevs(rev))
1113
1113
1114 if self.startrev == nullrev:
1114 if self.startrev == nullrev:
1115 self.startrev = rev
1115 self.startrev = rev
1116 return True
1116 return True
1117
1117
1118 if rev > self.startrev:
1118 if rev > self.startrev:
1119 # forward: all descendants
1119 # forward: all descendants
1120 if not self.roots:
1120 if not self.roots:
1121 self.roots.append(self.startrev)
1121 self.roots.append(self.startrev)
1122 for parent in realparents(rev):
1122 for parent in realparents(rev):
1123 if parent in self.roots:
1123 if parent in self.roots:
1124 self.roots.append(rev)
1124 self.roots.append(rev)
1125 return True
1125 return True
1126 else:
1126 else:
1127 # backwards: all parents
1127 # backwards: all parents
1128 if not self.roots:
1128 if not self.roots:
1129 self.roots.extend(realparents(self.startrev))
1129 self.roots.extend(realparents(self.startrev))
1130 if rev in self.roots:
1130 if rev in self.roots:
1131 self.roots.remove(rev)
1131 self.roots.remove(rev)
1132 self.roots.extend(realparents(rev))
1132 self.roots.extend(realparents(rev))
1133 return True
1133 return True
1134
1134
1135 return False
1135 return False
1136
1136
1137 # it might be worthwhile to do this in the iterator if the rev range
1137 # it might be worthwhile to do this in the iterator if the rev range
1138 # is descending and the prune args are all within that range
1138 # is descending and the prune args are all within that range
1139 for rev in opts.get('prune', ()):
1139 for rev in opts.get('prune', ()):
1140 rev = repo.changelog.rev(repo.lookup(rev))
1140 rev = repo.changelog.rev(repo.lookup(rev))
1141 ff = followfilter()
1141 ff = followfilter()
1142 stop = min(revs[0], revs[-1])
1142 stop = min(revs[0], revs[-1])
1143 for x in xrange(rev, stop-1, -1):
1143 for x in xrange(rev, stop-1, -1):
1144 if ff.match(x):
1144 if ff.match(x):
1145 wanted.discard(x)
1145 wanted.discard(x)
1146
1146
1147 def iterate():
1147 def iterate():
1148 if follow and not m.files():
1148 if follow and not m.files():
1149 ff = followfilter(onlyfirst=opts.get('follow_first'))
1149 ff = followfilter(onlyfirst=opts.get('follow_first'))
1150 def want(rev):
1150 def want(rev):
1151 return ff.match(rev) and rev in wanted
1151 return ff.match(rev) and rev in wanted
1152 else:
1152 else:
1153 def want(rev):
1153 def want(rev):
1154 return rev in wanted
1154 return rev in wanted
1155
1155
1156 for i, window in increasing_windows(0, len(revs)):
1156 for i, window in increasing_windows(0, len(revs)):
1157 yield 'window', revs[0] < revs[-1], revs[-1]
1157 yield 'window', revs[0] < revs[-1], revs[-1]
1158 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1158 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1159 for rev in sorted(nrevs):
1159 for rev in sorted(nrevs):
1160 fns = fncache.get(rev)
1160 fns = fncache.get(rev)
1161 if not fns:
1161 if not fns:
1162 def fns_generator():
1162 def fns_generator():
1163 for f in change(rev)[3]:
1163 for f in change(rev)[3]:
1164 if m(f):
1164 if m(f):
1165 yield f
1165 yield f
1166 fns = fns_generator()
1166 fns = fns_generator()
1167 yield 'add', rev, fns
1167 yield 'add', rev, fns
1168 for rev in nrevs:
1168 for rev in nrevs:
1169 yield 'iter', rev, None
1169 yield 'iter', rev, None
1170 return iterate(), m
1170 return iterate(), m
1171
1171
1172 def commit(ui, repo, commitfunc, pats, opts):
1172 def commit(ui, repo, commitfunc, pats, opts):
1173 '''commit the specified files or all outstanding changes'''
1173 '''commit the specified files or all outstanding changes'''
1174 date = opts.get('date')
1174 date = opts.get('date')
1175 if date:
1175 if date:
1176 opts['date'] = util.parsedate(date)
1176 opts['date'] = util.parsedate(date)
1177 message = logmessage(opts)
1177 message = logmessage(opts)
1178
1178
1179 # extract addremove carefully -- this function can be called from a command
1179 # extract addremove carefully -- this function can be called from a command
1180 # that doesn't support addremove
1180 # that doesn't support addremove
1181 if opts.get('addremove'):
1181 if opts.get('addremove'):
1182 addremove(repo, pats, opts)
1182 addremove(repo, pats, opts)
1183
1183
1184 m = match(repo, pats, opts)
1184 m = match(repo, pats, opts)
1185 if pats:
1185 if pats:
1186 modified, added, removed = repo.status(match=m)[:3]
1186 modified, added, removed = repo.status(match=m)[:3]
1187 files = sorted(modified + added + removed)
1187 files = sorted(modified + added + removed)
1188
1188
1189 def is_dir(f):
1189 def is_dir(f):
1190 name = f + '/'
1190 name = f + '/'
1191 i = bisect.bisect(files, name)
1191 i = bisect.bisect(files, name)
1192 return i < len(files) and files[i].startswith(name)
1192 return i < len(files) and files[i].startswith(name)
1193
1193
1194 for f in m.files():
1194 for f in m.files():
1195 if f == '.':
1195 if f == '.':
1196 continue
1196 continue
1197 if f not in files:
1197 if f not in files:
1198 rf = repo.wjoin(f)
1198 rf = repo.wjoin(f)
1199 rel = repo.pathto(f)
1199 rel = repo.pathto(f)
1200 try:
1200 try:
1201 mode = os.lstat(rf)[stat.ST_MODE]
1201 mode = os.lstat(rf)[stat.ST_MODE]
1202 except OSError:
1202 except OSError:
1203 if is_dir(f): # deleted directory ?
1203 if is_dir(f): # deleted directory ?
1204 continue
1204 continue
1205 raise util.Abort(_("file %s not found!") % rel)
1205 raise util.Abort(_("file %s not found!") % rel)
1206 if stat.S_ISDIR(mode):
1206 if stat.S_ISDIR(mode):
1207 if not is_dir(f):
1207 if not is_dir(f):
1208 raise util.Abort(_("no match under directory %s!")
1208 raise util.Abort(_("no match under directory %s!")
1209 % rel)
1209 % rel)
1210 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1210 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1211 raise util.Abort(_("can't commit %s: "
1211 raise util.Abort(_("can't commit %s: "
1212 "unsupported file type!") % rel)
1212 "unsupported file type!") % rel)
1213 elif f not in repo.dirstate:
1213 elif f not in repo.dirstate:
1214 raise util.Abort(_("file %s not tracked!") % rel)
1214 raise util.Abort(_("file %s not tracked!") % rel)
1215 m = matchfiles(repo, files)
1215 m = matchfiles(repo, files)
1216 try:
1216 try:
1217 return commitfunc(ui, repo, message, m, opts)
1217 return commitfunc(ui, repo, message, m, opts)
1218 except ValueError, inst:
1218 except ValueError, inst:
1219 raise util.Abort(str(inst))
1219 raise util.Abort(str(inst))
1220
1220
1221 def commiteditor(repo, ctx, added, updated, removed):
1221 def commiteditor(repo, ctx, added, updated, removed):
1222 if ctx.description():
1222 if ctx.description():
1223 return ctx.description()
1223 return ctx.description()
1224 return commitforceeditor(repo, ctx, added, updated, removed)
1224 return commitforceeditor(repo, ctx, added, updated, removed)
1225
1225
1226 def commitforceeditor(repo, ctx, added, updated, removed):
1226 def commitforceeditor(repo, ctx, added, updated, removed):
1227 edittext = []
1227 edittext = []
1228 if ctx.description():
1228 if ctx.description():
1229 edittext.append(ctx.description())
1229 edittext.append(ctx.description())
1230 edittext.append("")
1230 edittext.append("")
1231 edittext.append("") # Empty line between message and comments.
1231 edittext.append("") # Empty line between message and comments.
1232 edittext.append(_("HG: Enter commit message."
1232 edittext.append(_("HG: Enter commit message."
1233 " Lines beginning with 'HG:' are removed."))
1233 " Lines beginning with 'HG:' are removed."))
1234 edittext.append(_("HG: Leave message empty to abort commit."))
1234 edittext.append(_("HG: Leave message empty to abort commit."))
1235 edittext.append("HG: --")
1235 edittext.append("HG: --")
1236 edittext.append(_("HG: user: %s") % ctx.user())
1236 edittext.append(_("HG: user: %s") % ctx.user())
1237 if ctx.p2():
1237 if ctx.p2():
1238 edittext.append(_("HG: branch merge"))
1238 edittext.append(_("HG: branch merge"))
1239 if ctx.branch():
1239 if ctx.branch():
1240 edittext.append(_("HG: branch '%s'")
1240 edittext.append(_("HG: branch '%s'")
1241 % encoding.tolocal(ctx.branch()))
1241 % encoding.tolocal(ctx.branch()))
1242 edittext.extend([_("HG: added %s") % f for f in added])
1242 edittext.extend([_("HG: added %s") % f for f in added])
1243 edittext.extend([_("HG: changed %s") % f for f in updated])
1243 edittext.extend([_("HG: changed %s") % f for f in updated])
1244 edittext.extend([_("HG: removed %s") % f for f in removed])
1244 edittext.extend([_("HG: removed %s") % f for f in removed])
1245 if not added and not updated and not removed:
1245 if not added and not updated and not removed:
1246 edittext.append(_("HG: no files changed"))
1246 edittext.append(_("HG: no files changed"))
1247 edittext.append("")
1247 edittext.append("")
1248 # run editor in the repository root
1248 # run editor in the repository root
1249 olddir = os.getcwd()
1249 olddir = os.getcwd()
1250 os.chdir(repo.root)
1250 os.chdir(repo.root)
1251 text = repo.ui.edit("\n".join(edittext), ctx.user())
1251 text = repo.ui.edit("\n".join(edittext), ctx.user())
1252 text = re.sub("(?m)^HG:.*\n", "", text)
1252 text = re.sub("(?m)^HG:.*\n", "", text)
1253 os.chdir(olddir)
1253 os.chdir(olddir)
1254
1254
1255 if not text.strip():
1255 if not text.strip():
1256 raise util.Abort(_("empty commit message"))
1256 raise util.Abort(_("empty commit message"))
1257
1257
1258 return text
1258 return text
@@ -1,55 +1,59 b''
1 # match.py - file name matching
1 # match.py - file name matching
2 #
2 #
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import util
8 import util
9
9
10 class _match(object):
10 class _match(object):
11 def __init__(self, root, cwd, files, mf, ap):
11 def __init__(self, root, cwd, files, mf, ap):
12 self._root = root
12 self._root = root
13 self._cwd = cwd
13 self._cwd = cwd
14 self._files = files
14 self._files = files
15 self._fmap = set(files)
15 self._fmap = set(files)
16 self.matchfn = mf
16 self.matchfn = mf
17 self._anypats = ap
17 self._anypats = ap
18 def __call__(self, fn):
18 def __call__(self, fn):
19 return self.matchfn(fn)
19 return self.matchfn(fn)
20 def __iter__(self):
20 def __iter__(self):
21 for f in self._files:
21 for f in self._files:
22 yield f
22 yield f
23 def bad(self, f, msg):
23 def bad(self, f, msg):
24 return True
24 return True
25 def dir(self, f):
25 def dir(self, f):
26 pass
26 pass
27 def missing(self, f):
27 def missing(self, f):
28 pass
28 pass
29 def exact(self, f):
29 def exact(self, f):
30 return f in self._fmap
30 return f in self._fmap
31 def rel(self, f):
31 def rel(self, f):
32 return util.pathto(self._root, self._cwd, f)
32 return util.pathto(self._root, self._cwd, f)
33 def files(self):
33 def files(self):
34 return self._files
34 return self._files
35 def anypats(self):
35 def anypats(self):
36 return self._anypats
36 return self._anypats
37
37
38 class always(_match):
38 class always(_match):
39 def __init__(self, root, cwd):
39 def __init__(self, root, cwd):
40 _match.__init__(self, root, cwd, [], lambda f: True, False)
40 _match.__init__(self, root, cwd, [], lambda f: True, False)
41
41
42 class never(_match):
42 class never(_match):
43 def __init__(self, root, cwd):
43 def __init__(self, root, cwd):
44 _match.__init__(self, root, cwd, [], lambda f: False, False)
44 _match.__init__(self, root, cwd, [], lambda f: False, False)
45
45
46 class exact(_match):
46 class exact(_match):
47 def __init__(self, root, cwd, files):
47 def __init__(self, root, cwd, files):
48 _match.__init__(self, root, cwd, files, self.exact, False)
48 _match.__init__(self, root, cwd, files, self.exact, False)
49
49
50 class match(_match):
50 class match(_match):
51 def __init__(self, root, cwd, patterns, include=[], exclude=[],
51 def __init__(self, root, cwd, patterns, include=[], exclude=[],
52 default='glob'):
52 default='glob'):
53 f, mf, ap = util.matcher(root, cwd, patterns, include, exclude,
53 f, mf, ap = util.matcher(root, cwd, patterns, include, exclude,
54 default)
54 default)
55 _match.__init__(self, root, cwd, f, mf, ap)
55 _match.__init__(self, root, cwd, f, mf, ap)
56
57 def patkind(pat):
58 return util._patsplit(pat, None)[0]
59
@@ -1,1455 +1,1455 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil
17 import error, osutil
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, glob, random
19 import os, stat, time, calendar, glob, random
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd):
42 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
42 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
43 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
43 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
44 return p.stdin, p.stdout
44 return p.stdin, p.stdout
45 def popen3(cmd):
45 def popen3(cmd):
46 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
46 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
48 stderr=subprocess.PIPE)
48 stderr=subprocess.PIPE)
49 return p.stdin, p.stdout, p.stderr
49 return p.stdin, p.stdout, p.stderr
50
50
51 def version():
51 def version():
52 """Return version information if available."""
52 """Return version information if available."""
53 try:
53 try:
54 import __version__
54 import __version__
55 return __version__.version
55 return __version__.version
56 except ImportError:
56 except ImportError:
57 return 'unknown'
57 return 'unknown'
58
58
59 # used by parsedate
59 # used by parsedate
60 defaultdateformats = (
60 defaultdateformats = (
61 '%Y-%m-%d %H:%M:%S',
61 '%Y-%m-%d %H:%M:%S',
62 '%Y-%m-%d %I:%M:%S%p',
62 '%Y-%m-%d %I:%M:%S%p',
63 '%Y-%m-%d %H:%M',
63 '%Y-%m-%d %H:%M',
64 '%Y-%m-%d %I:%M%p',
64 '%Y-%m-%d %I:%M%p',
65 '%Y-%m-%d',
65 '%Y-%m-%d',
66 '%m-%d',
66 '%m-%d',
67 '%m/%d',
67 '%m/%d',
68 '%m/%d/%y',
68 '%m/%d/%y',
69 '%m/%d/%Y',
69 '%m/%d/%Y',
70 '%a %b %d %H:%M:%S %Y',
70 '%a %b %d %H:%M:%S %Y',
71 '%a %b %d %I:%M:%S%p %Y',
71 '%a %b %d %I:%M:%S%p %Y',
72 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
72 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
73 '%b %d %H:%M:%S %Y',
73 '%b %d %H:%M:%S %Y',
74 '%b %d %I:%M:%S%p %Y',
74 '%b %d %I:%M:%S%p %Y',
75 '%b %d %H:%M:%S',
75 '%b %d %H:%M:%S',
76 '%b %d %I:%M:%S%p',
76 '%b %d %I:%M:%S%p',
77 '%b %d %H:%M',
77 '%b %d %H:%M',
78 '%b %d %I:%M%p',
78 '%b %d %I:%M%p',
79 '%b %d %Y',
79 '%b %d %Y',
80 '%b %d',
80 '%b %d',
81 '%H:%M:%S',
81 '%H:%M:%S',
82 '%I:%M:%SP',
82 '%I:%M:%SP',
83 '%H:%M',
83 '%H:%M',
84 '%I:%M%p',
84 '%I:%M%p',
85 )
85 )
86
86
87 extendeddateformats = defaultdateformats + (
87 extendeddateformats = defaultdateformats + (
88 "%Y",
88 "%Y",
89 "%Y-%m",
89 "%Y-%m",
90 "%b",
90 "%b",
91 "%b %Y",
91 "%b %Y",
92 )
92 )
93
93
94 def cachefunc(func):
94 def cachefunc(func):
95 '''cache the result of function calls'''
95 '''cache the result of function calls'''
96 # XXX doesn't handle keywords args
96 # XXX doesn't handle keywords args
97 cache = {}
97 cache = {}
98 if func.func_code.co_argcount == 1:
98 if func.func_code.co_argcount == 1:
99 # we gain a small amount of time because
99 # we gain a small amount of time because
100 # we don't need to pack/unpack the list
100 # we don't need to pack/unpack the list
101 def f(arg):
101 def f(arg):
102 if arg not in cache:
102 if arg not in cache:
103 cache[arg] = func(arg)
103 cache[arg] = func(arg)
104 return cache[arg]
104 return cache[arg]
105 else:
105 else:
106 def f(*args):
106 def f(*args):
107 if args not in cache:
107 if args not in cache:
108 cache[args] = func(*args)
108 cache[args] = func(*args)
109 return cache[args]
109 return cache[args]
110
110
111 return f
111 return f
112
112
113 class propertycache(object):
113 class propertycache(object):
114 def __init__(self, func):
114 def __init__(self, func):
115 self.func = func
115 self.func = func
116 self.name = func.__name__
116 self.name = func.__name__
117 def __get__(self, obj, type=None):
117 def __get__(self, obj, type=None):
118 result = self.func(obj)
118 result = self.func(obj)
119 setattr(obj, self.name, result)
119 setattr(obj, self.name, result)
120 return result
120 return result
121
121
122 def pipefilter(s, cmd):
122 def pipefilter(s, cmd):
123 '''filter string S through command CMD, returning its output'''
123 '''filter string S through command CMD, returning its output'''
124 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
124 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
125 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
125 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
126 pout, perr = p.communicate(s)
126 pout, perr = p.communicate(s)
127 return pout
127 return pout
128
128
129 def tempfilter(s, cmd):
129 def tempfilter(s, cmd):
130 '''filter string S through a pair of temporary files with CMD.
130 '''filter string S through a pair of temporary files with CMD.
131 CMD is used as a template to create the real command to be run,
131 CMD is used as a template to create the real command to be run,
132 with the strings INFILE and OUTFILE replaced by the real names of
132 with the strings INFILE and OUTFILE replaced by the real names of
133 the temporary files generated.'''
133 the temporary files generated.'''
134 inname, outname = None, None
134 inname, outname = None, None
135 try:
135 try:
136 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
136 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
137 fp = os.fdopen(infd, 'wb')
137 fp = os.fdopen(infd, 'wb')
138 fp.write(s)
138 fp.write(s)
139 fp.close()
139 fp.close()
140 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
140 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
141 os.close(outfd)
141 os.close(outfd)
142 cmd = cmd.replace('INFILE', inname)
142 cmd = cmd.replace('INFILE', inname)
143 cmd = cmd.replace('OUTFILE', outname)
143 cmd = cmd.replace('OUTFILE', outname)
144 code = os.system(cmd)
144 code = os.system(cmd)
145 if sys.platform == 'OpenVMS' and code & 1:
145 if sys.platform == 'OpenVMS' and code & 1:
146 code = 0
146 code = 0
147 if code: raise Abort(_("command '%s' failed: %s") %
147 if code: raise Abort(_("command '%s' failed: %s") %
148 (cmd, explain_exit(code)))
148 (cmd, explain_exit(code)))
149 return open(outname, 'rb').read()
149 return open(outname, 'rb').read()
150 finally:
150 finally:
151 try:
151 try:
152 if inname: os.unlink(inname)
152 if inname: os.unlink(inname)
153 except: pass
153 except: pass
154 try:
154 try:
155 if outname: os.unlink(outname)
155 if outname: os.unlink(outname)
156 except: pass
156 except: pass
157
157
158 filtertable = {
158 filtertable = {
159 'tempfile:': tempfilter,
159 'tempfile:': tempfilter,
160 'pipe:': pipefilter,
160 'pipe:': pipefilter,
161 }
161 }
162
162
163 def filter(s, cmd):
163 def filter(s, cmd):
164 "filter a string through a command that transforms its input to its output"
164 "filter a string through a command that transforms its input to its output"
165 for name, fn in filtertable.iteritems():
165 for name, fn in filtertable.iteritems():
166 if cmd.startswith(name):
166 if cmd.startswith(name):
167 return fn(s, cmd[len(name):].lstrip())
167 return fn(s, cmd[len(name):].lstrip())
168 return pipefilter(s, cmd)
168 return pipefilter(s, cmd)
169
169
170 def binary(s):
170 def binary(s):
171 """return true if a string is binary data"""
171 """return true if a string is binary data"""
172 return bool(s and '\0' in s)
172 return bool(s and '\0' in s)
173
173
174 def increasingchunks(source, min=1024, max=65536):
174 def increasingchunks(source, min=1024, max=65536):
175 '''return no less than min bytes per chunk while data remains,
175 '''return no less than min bytes per chunk while data remains,
176 doubling min after each chunk until it reaches max'''
176 doubling min after each chunk until it reaches max'''
177 def log2(x):
177 def log2(x):
178 if not x:
178 if not x:
179 return 0
179 return 0
180 i = 0
180 i = 0
181 while x:
181 while x:
182 x >>= 1
182 x >>= 1
183 i += 1
183 i += 1
184 return i - 1
184 return i - 1
185
185
186 buf = []
186 buf = []
187 blen = 0
187 blen = 0
188 for chunk in source:
188 for chunk in source:
189 buf.append(chunk)
189 buf.append(chunk)
190 blen += len(chunk)
190 blen += len(chunk)
191 if blen >= min:
191 if blen >= min:
192 if min < max:
192 if min < max:
193 min = min << 1
193 min = min << 1
194 nmin = 1 << log2(blen)
194 nmin = 1 << log2(blen)
195 if nmin > min:
195 if nmin > min:
196 min = nmin
196 min = nmin
197 if min > max:
197 if min > max:
198 min = max
198 min = max
199 yield ''.join(buf)
199 yield ''.join(buf)
200 blen = 0
200 blen = 0
201 buf = []
201 buf = []
202 if buf:
202 if buf:
203 yield ''.join(buf)
203 yield ''.join(buf)
204
204
205 Abort = error.Abort
205 Abort = error.Abort
206
206
207 def always(fn): return True
207 def always(fn): return True
208 def never(fn): return False
208 def never(fn): return False
209
209
210 def patkind(name, default):
210 def _patsplit(pat, default):
211 """Split a string into an optional pattern kind prefix and the
211 """Split a string into an optional pattern kind prefix and the
212 actual pattern."""
212 actual pattern."""
213 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
213 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
214 if name.startswith(prefix + ':'): return name.split(':', 1)
214 if pat.startswith(prefix + ':'): return pat.split(':', 1)
215 return default, name
215 return default, pat
216
216
217 def globre(pat, head='^', tail='$'):
217 def globre(pat, head='^', tail='$'):
218 "convert a glob pattern into a regexp"
218 "convert a glob pattern into a regexp"
219 i, n = 0, len(pat)
219 i, n = 0, len(pat)
220 res = ''
220 res = ''
221 group = 0
221 group = 0
222 def peek(): return i < n and pat[i]
222 def peek(): return i < n and pat[i]
223 while i < n:
223 while i < n:
224 c = pat[i]
224 c = pat[i]
225 i = i+1
225 i = i+1
226 if c == '*':
226 if c == '*':
227 if peek() == '*':
227 if peek() == '*':
228 i += 1
228 i += 1
229 res += '.*'
229 res += '.*'
230 else:
230 else:
231 res += '[^/]*'
231 res += '[^/]*'
232 elif c == '?':
232 elif c == '?':
233 res += '.'
233 res += '.'
234 elif c == '[':
234 elif c == '[':
235 j = i
235 j = i
236 if j < n and pat[j] in '!]':
236 if j < n and pat[j] in '!]':
237 j += 1
237 j += 1
238 while j < n and pat[j] != ']':
238 while j < n and pat[j] != ']':
239 j += 1
239 j += 1
240 if j >= n:
240 if j >= n:
241 res += '\\['
241 res += '\\['
242 else:
242 else:
243 stuff = pat[i:j].replace('\\','\\\\')
243 stuff = pat[i:j].replace('\\','\\\\')
244 i = j + 1
244 i = j + 1
245 if stuff[0] == '!':
245 if stuff[0] == '!':
246 stuff = '^' + stuff[1:]
246 stuff = '^' + stuff[1:]
247 elif stuff[0] == '^':
247 elif stuff[0] == '^':
248 stuff = '\\' + stuff
248 stuff = '\\' + stuff
249 res = '%s[%s]' % (res, stuff)
249 res = '%s[%s]' % (res, stuff)
250 elif c == '{':
250 elif c == '{':
251 group += 1
251 group += 1
252 res += '(?:'
252 res += '(?:'
253 elif c == '}' and group:
253 elif c == '}' and group:
254 res += ')'
254 res += ')'
255 group -= 1
255 group -= 1
256 elif c == ',' and group:
256 elif c == ',' and group:
257 res += '|'
257 res += '|'
258 elif c == '\\':
258 elif c == '\\':
259 p = peek()
259 p = peek()
260 if p:
260 if p:
261 i += 1
261 i += 1
262 res += re.escape(p)
262 res += re.escape(p)
263 else:
263 else:
264 res += re.escape(c)
264 res += re.escape(c)
265 else:
265 else:
266 res += re.escape(c)
266 res += re.escape(c)
267 return head + res + tail
267 return head + res + tail
268
268
269 _globchars = set('[{*?')
269 _globchars = set('[{*?')
270
270
271 def pathto(root, n1, n2):
271 def pathto(root, n1, n2):
272 '''return the relative path from one place to another.
272 '''return the relative path from one place to another.
273 root should use os.sep to separate directories
273 root should use os.sep to separate directories
274 n1 should use os.sep to separate directories
274 n1 should use os.sep to separate directories
275 n2 should use "/" to separate directories
275 n2 should use "/" to separate directories
276 returns an os.sep-separated path.
276 returns an os.sep-separated path.
277
277
278 If n1 is a relative path, it's assumed it's
278 If n1 is a relative path, it's assumed it's
279 relative to root.
279 relative to root.
280 n2 should always be relative to root.
280 n2 should always be relative to root.
281 '''
281 '''
282 if not n1: return localpath(n2)
282 if not n1: return localpath(n2)
283 if os.path.isabs(n1):
283 if os.path.isabs(n1):
284 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
284 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
285 return os.path.join(root, localpath(n2))
285 return os.path.join(root, localpath(n2))
286 n2 = '/'.join((pconvert(root), n2))
286 n2 = '/'.join((pconvert(root), n2))
287 a, b = splitpath(n1), n2.split('/')
287 a, b = splitpath(n1), n2.split('/')
288 a.reverse()
288 a.reverse()
289 b.reverse()
289 b.reverse()
290 while a and b and a[-1] == b[-1]:
290 while a and b and a[-1] == b[-1]:
291 a.pop()
291 a.pop()
292 b.pop()
292 b.pop()
293 b.reverse()
293 b.reverse()
294 return os.sep.join((['..'] * len(a)) + b) or '.'
294 return os.sep.join((['..'] * len(a)) + b) or '.'
295
295
296 def canonpath(root, cwd, myname):
296 def canonpath(root, cwd, myname):
297 """return the canonical path of myname, given cwd and root"""
297 """return the canonical path of myname, given cwd and root"""
298 if root == os.sep:
298 if root == os.sep:
299 rootsep = os.sep
299 rootsep = os.sep
300 elif endswithsep(root):
300 elif endswithsep(root):
301 rootsep = root
301 rootsep = root
302 else:
302 else:
303 rootsep = root + os.sep
303 rootsep = root + os.sep
304 name = myname
304 name = myname
305 if not os.path.isabs(name):
305 if not os.path.isabs(name):
306 name = os.path.join(root, cwd, name)
306 name = os.path.join(root, cwd, name)
307 name = os.path.normpath(name)
307 name = os.path.normpath(name)
308 audit_path = path_auditor(root)
308 audit_path = path_auditor(root)
309 if name != rootsep and name.startswith(rootsep):
309 if name != rootsep and name.startswith(rootsep):
310 name = name[len(rootsep):]
310 name = name[len(rootsep):]
311 audit_path(name)
311 audit_path(name)
312 return pconvert(name)
312 return pconvert(name)
313 elif name == root:
313 elif name == root:
314 return ''
314 return ''
315 else:
315 else:
316 # Determine whether `name' is in the hierarchy at or beneath `root',
316 # Determine whether `name' is in the hierarchy at or beneath `root',
317 # by iterating name=dirname(name) until that causes no change (can't
317 # by iterating name=dirname(name) until that causes no change (can't
318 # check name == '/', because that doesn't work on windows). For each
318 # check name == '/', because that doesn't work on windows). For each
319 # `name', compare dev/inode numbers. If they match, the list `rel'
319 # `name', compare dev/inode numbers. If they match, the list `rel'
320 # holds the reversed list of components making up the relative file
320 # holds the reversed list of components making up the relative file
321 # name we want.
321 # name we want.
322 root_st = os.stat(root)
322 root_st = os.stat(root)
323 rel = []
323 rel = []
324 while True:
324 while True:
325 try:
325 try:
326 name_st = os.stat(name)
326 name_st = os.stat(name)
327 except OSError:
327 except OSError:
328 break
328 break
329 if samestat(name_st, root_st):
329 if samestat(name_st, root_st):
330 if not rel:
330 if not rel:
331 # name was actually the same as root (maybe a symlink)
331 # name was actually the same as root (maybe a symlink)
332 return ''
332 return ''
333 rel.reverse()
333 rel.reverse()
334 name = os.path.join(*rel)
334 name = os.path.join(*rel)
335 audit_path(name)
335 audit_path(name)
336 return pconvert(name)
336 return pconvert(name)
337 dirname, basename = os.path.split(name)
337 dirname, basename = os.path.split(name)
338 rel.append(basename)
338 rel.append(basename)
339 if dirname == name:
339 if dirname == name:
340 break
340 break
341 name = dirname
341 name = dirname
342
342
343 raise Abort('%s not under root' % myname)
343 raise Abort('%s not under root' % myname)
344
344
345 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], dflt_pat='glob'):
345 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], dflt_pat='glob'):
346 """build a function to match a set of file patterns
346 """build a function to match a set of file patterns
347
347
348 arguments:
348 arguments:
349 canonroot - the canonical root of the tree you're matching against
349 canonroot - the canonical root of the tree you're matching against
350 cwd - the current working directory, if relevant
350 cwd - the current working directory, if relevant
351 names - patterns to find
351 names - patterns to find
352 inc - patterns to include
352 inc - patterns to include
353 exc - patterns to exclude
353 exc - patterns to exclude
354 dflt_pat - if a pattern in names has no explicit type, assume this one
354 dflt_pat - if a pattern in names has no explicit type, assume this one
355
355
356 a pattern is one of:
356 a pattern is one of:
357 'glob:<glob>' - a glob relative to cwd
357 'glob:<glob>' - a glob relative to cwd
358 're:<regexp>' - a regular expression
358 're:<regexp>' - a regular expression
359 'path:<path>' - a path relative to canonroot
359 'path:<path>' - a path relative to canonroot
360 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
360 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
361 'relpath:<path>' - a path relative to cwd
361 'relpath:<path>' - a path relative to cwd
362 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
362 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
363 '<something>' - one of the cases above, selected by the dflt_pat argument
363 '<something>' - one of the cases above, selected by the dflt_pat argument
364
364
365 returns:
365 returns:
366 a 3-tuple containing
366 a 3-tuple containing
367 - list of roots (places where one should start a recursive walk of the fs);
367 - list of roots (places where one should start a recursive walk of the fs);
368 this often matches the explicit non-pattern names passed in, but also
368 this often matches the explicit non-pattern names passed in, but also
369 includes the initial part of glob: patterns that has no glob characters
369 includes the initial part of glob: patterns that has no glob characters
370 - a bool match(filename) function
370 - a bool match(filename) function
371 - a bool indicating if any patterns were passed in
371 - a bool indicating if any patterns were passed in
372 """
372 """
373
373
374 # a common case: no patterns at all
374 # a common case: no patterns at all
375 if not names and not inc and not exc:
375 if not names and not inc and not exc:
376 return [], always, False
376 return [], always, False
377
377
378 def contains_glob(name):
378 def contains_glob(name):
379 for c in name:
379 for c in name:
380 if c in _globchars: return True
380 if c in _globchars: return True
381 return False
381 return False
382
382
383 def regex(kind, name, tail):
383 def regex(kind, name, tail):
384 '''convert a pattern into a regular expression'''
384 '''convert a pattern into a regular expression'''
385 if not name:
385 if not name:
386 return ''
386 return ''
387 if kind == 're':
387 if kind == 're':
388 return name
388 return name
389 elif kind == 'path':
389 elif kind == 'path':
390 return '^' + re.escape(name) + '(?:/|$)'
390 return '^' + re.escape(name) + '(?:/|$)'
391 elif kind == 'relglob':
391 elif kind == 'relglob':
392 return globre(name, '(?:|.*/)', tail)
392 return globre(name, '(?:|.*/)', tail)
393 elif kind == 'relpath':
393 elif kind == 'relpath':
394 return re.escape(name) + '(?:/|$)'
394 return re.escape(name) + '(?:/|$)'
395 elif kind == 'relre':
395 elif kind == 'relre':
396 if name.startswith('^'):
396 if name.startswith('^'):
397 return name
397 return name
398 return '.*' + name
398 return '.*' + name
399 return globre(name, '', tail)
399 return globre(name, '', tail)
400
400
401 def matchfn(pats, tail):
401 def matchfn(pats, tail):
402 """build a matching function from a set of patterns"""
402 """build a matching function from a set of patterns"""
403 if not pats:
403 if not pats:
404 return
404 return
405 try:
405 try:
406 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
406 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
407 if len(pat) > 20000:
407 if len(pat) > 20000:
408 raise OverflowError()
408 raise OverflowError()
409 return re.compile(pat).match
409 return re.compile(pat).match
410 except OverflowError:
410 except OverflowError:
411 # We're using a Python with a tiny regex engine and we
411 # We're using a Python with a tiny regex engine and we
412 # made it explode, so we'll divide the pattern list in two
412 # made it explode, so we'll divide the pattern list in two
413 # until it works
413 # until it works
414 l = len(pats)
414 l = len(pats)
415 if l < 2:
415 if l < 2:
416 raise
416 raise
417 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
417 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
418 return lambda s: a(s) or b(s)
418 return lambda s: a(s) or b(s)
419 except re.error:
419 except re.error:
420 for k, p in pats:
420 for k, p in pats:
421 try:
421 try:
422 re.compile('(?:%s)' % regex(k, p, tail))
422 re.compile('(?:%s)' % regex(k, p, tail))
423 except re.error:
423 except re.error:
424 raise Abort("invalid pattern (%s): %s" % (k, p))
424 raise Abort("invalid pattern (%s): %s" % (k, p))
425 raise Abort("invalid pattern")
425 raise Abort("invalid pattern")
426
426
427 def globprefix(pat):
427 def globprefix(pat):
428 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
428 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
429 root = []
429 root = []
430 for p in pat.split('/'):
430 for p in pat.split('/'):
431 if contains_glob(p): break
431 if contains_glob(p): break
432 root.append(p)
432 root.append(p)
433 return '/'.join(root) or '.'
433 return '/'.join(root) or '.'
434
434
435 def normalizepats(names, default):
435 def normalizepats(names, default):
436 pats = []
436 pats = []
437 roots = []
437 roots = []
438 anypats = False
438 anypats = False
439 for kind, name in [patkind(p, default) for p in names]:
439 for kind, name in [_patsplit(p, default) for p in names]:
440 if kind in ('glob', 'relpath'):
440 if kind in ('glob', 'relpath'):
441 name = canonpath(canonroot, cwd, name)
441 name = canonpath(canonroot, cwd, name)
442 elif kind in ('relglob', 'path'):
442 elif kind in ('relglob', 'path'):
443 name = normpath(name)
443 name = normpath(name)
444
444
445 pats.append((kind, name))
445 pats.append((kind, name))
446
446
447 if kind in ('glob', 're', 'relglob', 'relre'):
447 if kind in ('glob', 're', 'relglob', 'relre'):
448 anypats = True
448 anypats = True
449
449
450 if kind == 'glob':
450 if kind == 'glob':
451 root = globprefix(name)
451 root = globprefix(name)
452 roots.append(root)
452 roots.append(root)
453 elif kind in ('relpath', 'path'):
453 elif kind in ('relpath', 'path'):
454 roots.append(name or '.')
454 roots.append(name or '.')
455 elif kind == 'relglob':
455 elif kind == 'relglob':
456 roots.append('.')
456 roots.append('.')
457 return roots, pats, anypats
457 return roots, pats, anypats
458
458
459 roots, pats, anypats = normalizepats(names, dflt_pat)
459 roots, pats, anypats = normalizepats(names, dflt_pat)
460
460
461 patmatch = matchfn(pats, '$') or always
461 patmatch = matchfn(pats, '$') or always
462 incmatch = always
462 incmatch = always
463 if inc:
463 if inc:
464 dummy, inckinds, dummy = normalizepats(inc, 'glob')
464 dummy, inckinds, dummy = normalizepats(inc, 'glob')
465 incmatch = matchfn(inckinds, '(?:/|$)')
465 incmatch = matchfn(inckinds, '(?:/|$)')
466 excmatch = never
466 excmatch = never
467 if exc:
467 if exc:
468 dummy, exckinds, dummy = normalizepats(exc, 'glob')
468 dummy, exckinds, dummy = normalizepats(exc, 'glob')
469 excmatch = matchfn(exckinds, '(?:/|$)')
469 excmatch = matchfn(exckinds, '(?:/|$)')
470
470
471 if not names and inc and not exc:
471 if not names and inc and not exc:
472 # common case: hgignore patterns
472 # common case: hgignore patterns
473 match = incmatch
473 match = incmatch
474 else:
474 else:
475 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
475 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
476
476
477 return (roots, match, (inc or exc or anypats) and True)
477 return (roots, match, (inc or exc or anypats) and True)
478
478
479 _hgexecutable = None
479 _hgexecutable = None
480
480
481 def main_is_frozen():
481 def main_is_frozen():
482 """return True if we are a frozen executable.
482 """return True if we are a frozen executable.
483
483
484 The code supports py2exe (most common, Windows only) and tools/freeze
484 The code supports py2exe (most common, Windows only) and tools/freeze
485 (portable, not much used).
485 (portable, not much used).
486 """
486 """
487 return (hasattr(sys, "frozen") or # new py2exe
487 return (hasattr(sys, "frozen") or # new py2exe
488 hasattr(sys, "importers") or # old py2exe
488 hasattr(sys, "importers") or # old py2exe
489 imp.is_frozen("__main__")) # tools/freeze
489 imp.is_frozen("__main__")) # tools/freeze
490
490
491 def hgexecutable():
491 def hgexecutable():
492 """return location of the 'hg' executable.
492 """return location of the 'hg' executable.
493
493
494 Defaults to $HG or 'hg' in the search path.
494 Defaults to $HG or 'hg' in the search path.
495 """
495 """
496 if _hgexecutable is None:
496 if _hgexecutable is None:
497 hg = os.environ.get('HG')
497 hg = os.environ.get('HG')
498 if hg:
498 if hg:
499 set_hgexecutable(hg)
499 set_hgexecutable(hg)
500 elif main_is_frozen():
500 elif main_is_frozen():
501 set_hgexecutable(sys.executable)
501 set_hgexecutable(sys.executable)
502 else:
502 else:
503 set_hgexecutable(find_exe('hg') or 'hg')
503 set_hgexecutable(find_exe('hg') or 'hg')
504 return _hgexecutable
504 return _hgexecutable
505
505
506 def set_hgexecutable(path):
506 def set_hgexecutable(path):
507 """set location of the 'hg' executable"""
507 """set location of the 'hg' executable"""
508 global _hgexecutable
508 global _hgexecutable
509 _hgexecutable = path
509 _hgexecutable = path
510
510
511 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
511 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
512 '''enhanced shell command execution.
512 '''enhanced shell command execution.
513 run with environment maybe modified, maybe in different dir.
513 run with environment maybe modified, maybe in different dir.
514
514
515 if command fails and onerr is None, return status. if ui object,
515 if command fails and onerr is None, return status. if ui object,
516 print error message and return status, else raise onerr object as
516 print error message and return status, else raise onerr object as
517 exception.'''
517 exception.'''
518 def py2shell(val):
518 def py2shell(val):
519 'convert python object into string that is useful to shell'
519 'convert python object into string that is useful to shell'
520 if val is None or val is False:
520 if val is None or val is False:
521 return '0'
521 return '0'
522 if val is True:
522 if val is True:
523 return '1'
523 return '1'
524 return str(val)
524 return str(val)
525 oldenv = {}
525 oldenv = {}
526 for k in environ:
526 for k in environ:
527 oldenv[k] = os.environ.get(k)
527 oldenv[k] = os.environ.get(k)
528 if cwd is not None:
528 if cwd is not None:
529 oldcwd = os.getcwd()
529 oldcwd = os.getcwd()
530 origcmd = cmd
530 origcmd = cmd
531 if os.name == 'nt':
531 if os.name == 'nt':
532 cmd = '"%s"' % cmd
532 cmd = '"%s"' % cmd
533 try:
533 try:
534 for k, v in environ.iteritems():
534 for k, v in environ.iteritems():
535 os.environ[k] = py2shell(v)
535 os.environ[k] = py2shell(v)
536 os.environ['HG'] = hgexecutable()
536 os.environ['HG'] = hgexecutable()
537 if cwd is not None and oldcwd != cwd:
537 if cwd is not None and oldcwd != cwd:
538 os.chdir(cwd)
538 os.chdir(cwd)
539 rc = os.system(cmd)
539 rc = os.system(cmd)
540 if sys.platform == 'OpenVMS' and rc & 1:
540 if sys.platform == 'OpenVMS' and rc & 1:
541 rc = 0
541 rc = 0
542 if rc and onerr:
542 if rc and onerr:
543 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
543 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
544 explain_exit(rc)[0])
544 explain_exit(rc)[0])
545 if errprefix:
545 if errprefix:
546 errmsg = '%s: %s' % (errprefix, errmsg)
546 errmsg = '%s: %s' % (errprefix, errmsg)
547 try:
547 try:
548 onerr.warn(errmsg + '\n')
548 onerr.warn(errmsg + '\n')
549 except AttributeError:
549 except AttributeError:
550 raise onerr(errmsg)
550 raise onerr(errmsg)
551 return rc
551 return rc
552 finally:
552 finally:
553 for k, v in oldenv.iteritems():
553 for k, v in oldenv.iteritems():
554 if v is None:
554 if v is None:
555 del os.environ[k]
555 del os.environ[k]
556 else:
556 else:
557 os.environ[k] = v
557 os.environ[k] = v
558 if cwd is not None and oldcwd != cwd:
558 if cwd is not None and oldcwd != cwd:
559 os.chdir(oldcwd)
559 os.chdir(oldcwd)
560
560
561 def checksignature(func):
561 def checksignature(func):
562 '''wrap a function with code to check for calling errors'''
562 '''wrap a function with code to check for calling errors'''
563 def check(*args, **kwargs):
563 def check(*args, **kwargs):
564 try:
564 try:
565 return func(*args, **kwargs)
565 return func(*args, **kwargs)
566 except TypeError:
566 except TypeError:
567 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
567 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
568 raise error.SignatureError
568 raise error.SignatureError
569 raise
569 raise
570
570
571 return check
571 return check
572
572
573 # os.path.lexists is not available on python2.3
573 # os.path.lexists is not available on python2.3
574 def lexists(filename):
574 def lexists(filename):
575 "test whether a file with this name exists. does not follow symlinks"
575 "test whether a file with this name exists. does not follow symlinks"
576 try:
576 try:
577 os.lstat(filename)
577 os.lstat(filename)
578 except:
578 except:
579 return False
579 return False
580 return True
580 return True
581
581
582 def rename(src, dst):
582 def rename(src, dst):
583 """forcibly rename a file"""
583 """forcibly rename a file"""
584 try:
584 try:
585 os.rename(src, dst)
585 os.rename(src, dst)
586 except OSError, err: # FIXME: check err (EEXIST ?)
586 except OSError, err: # FIXME: check err (EEXIST ?)
587
587
588 # On windows, rename to existing file is not allowed, so we
588 # On windows, rename to existing file is not allowed, so we
589 # must delete destination first. But if a file is open, unlink
589 # must delete destination first. But if a file is open, unlink
590 # schedules it for delete but does not delete it. Rename
590 # schedules it for delete but does not delete it. Rename
591 # happens immediately even for open files, so we rename
591 # happens immediately even for open files, so we rename
592 # destination to a temporary name, then delete that. Then
592 # destination to a temporary name, then delete that. Then
593 # rename is safe to do.
593 # rename is safe to do.
594 # The temporary name is chosen at random to avoid the situation
594 # The temporary name is chosen at random to avoid the situation
595 # where a file is left lying around from a previous aborted run.
595 # where a file is left lying around from a previous aborted run.
596 # The usual race condition this introduces can't be avoided as
596 # The usual race condition this introduces can't be avoided as
597 # we need the name to rename into, and not the file itself. Due
597 # we need the name to rename into, and not the file itself. Due
598 # to the nature of the operation however, any races will at worst
598 # to the nature of the operation however, any races will at worst
599 # lead to the rename failing and the current operation aborting.
599 # lead to the rename failing and the current operation aborting.
600
600
601 def tempname(prefix):
601 def tempname(prefix):
602 for tries in xrange(10):
602 for tries in xrange(10):
603 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
603 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
604 if not os.path.exists(temp):
604 if not os.path.exists(temp):
605 return temp
605 return temp
606 raise IOError, (errno.EEXIST, "No usable temporary filename found")
606 raise IOError, (errno.EEXIST, "No usable temporary filename found")
607
607
608 temp = tempname(dst)
608 temp = tempname(dst)
609 os.rename(dst, temp)
609 os.rename(dst, temp)
610 os.unlink(temp)
610 os.unlink(temp)
611 os.rename(src, dst)
611 os.rename(src, dst)
612
612
613 def unlink(f):
613 def unlink(f):
614 """unlink and remove the directory if it is empty"""
614 """unlink and remove the directory if it is empty"""
615 os.unlink(f)
615 os.unlink(f)
616 # try removing directories that might now be empty
616 # try removing directories that might now be empty
617 try:
617 try:
618 os.removedirs(os.path.dirname(f))
618 os.removedirs(os.path.dirname(f))
619 except OSError:
619 except OSError:
620 pass
620 pass
621
621
622 def copyfile(src, dest):
622 def copyfile(src, dest):
623 "copy a file, preserving mode and atime/mtime"
623 "copy a file, preserving mode and atime/mtime"
624 if os.path.islink(src):
624 if os.path.islink(src):
625 try:
625 try:
626 os.unlink(dest)
626 os.unlink(dest)
627 except:
627 except:
628 pass
628 pass
629 os.symlink(os.readlink(src), dest)
629 os.symlink(os.readlink(src), dest)
630 else:
630 else:
631 try:
631 try:
632 shutil.copyfile(src, dest)
632 shutil.copyfile(src, dest)
633 shutil.copystat(src, dest)
633 shutil.copystat(src, dest)
634 except shutil.Error, inst:
634 except shutil.Error, inst:
635 raise Abort(str(inst))
635 raise Abort(str(inst))
636
636
637 def copyfiles(src, dst, hardlink=None):
637 def copyfiles(src, dst, hardlink=None):
638 """Copy a directory tree using hardlinks if possible"""
638 """Copy a directory tree using hardlinks if possible"""
639
639
640 if hardlink is None:
640 if hardlink is None:
641 hardlink = (os.stat(src).st_dev ==
641 hardlink = (os.stat(src).st_dev ==
642 os.stat(os.path.dirname(dst)).st_dev)
642 os.stat(os.path.dirname(dst)).st_dev)
643
643
644 if os.path.isdir(src):
644 if os.path.isdir(src):
645 os.mkdir(dst)
645 os.mkdir(dst)
646 for name, kind in osutil.listdir(src):
646 for name, kind in osutil.listdir(src):
647 srcname = os.path.join(src, name)
647 srcname = os.path.join(src, name)
648 dstname = os.path.join(dst, name)
648 dstname = os.path.join(dst, name)
649 copyfiles(srcname, dstname, hardlink)
649 copyfiles(srcname, dstname, hardlink)
650 else:
650 else:
651 if hardlink:
651 if hardlink:
652 try:
652 try:
653 os_link(src, dst)
653 os_link(src, dst)
654 except (IOError, OSError):
654 except (IOError, OSError):
655 hardlink = False
655 hardlink = False
656 shutil.copy(src, dst)
656 shutil.copy(src, dst)
657 else:
657 else:
658 shutil.copy(src, dst)
658 shutil.copy(src, dst)
659
659
660 class path_auditor(object):
660 class path_auditor(object):
661 '''ensure that a filesystem path contains no banned components.
661 '''ensure that a filesystem path contains no banned components.
662 the following properties of a path are checked:
662 the following properties of a path are checked:
663
663
664 - under top-level .hg
664 - under top-level .hg
665 - starts at the root of a windows drive
665 - starts at the root of a windows drive
666 - contains ".."
666 - contains ".."
667 - traverses a symlink (e.g. a/symlink_here/b)
667 - traverses a symlink (e.g. a/symlink_here/b)
668 - inside a nested repository'''
668 - inside a nested repository'''
669
669
670 def __init__(self, root):
670 def __init__(self, root):
671 self.audited = set()
671 self.audited = set()
672 self.auditeddir = set()
672 self.auditeddir = set()
673 self.root = root
673 self.root = root
674
674
675 def __call__(self, path):
675 def __call__(self, path):
676 if path in self.audited:
676 if path in self.audited:
677 return
677 return
678 normpath = os.path.normcase(path)
678 normpath = os.path.normcase(path)
679 parts = splitpath(normpath)
679 parts = splitpath(normpath)
680 if (os.path.splitdrive(path)[0]
680 if (os.path.splitdrive(path)[0]
681 or parts[0].lower() in ('.hg', '.hg.', '')
681 or parts[0].lower() in ('.hg', '.hg.', '')
682 or os.pardir in parts):
682 or os.pardir in parts):
683 raise Abort(_("path contains illegal component: %s") % path)
683 raise Abort(_("path contains illegal component: %s") % path)
684 if '.hg' in path.lower():
684 if '.hg' in path.lower():
685 lparts = [p.lower() for p in parts]
685 lparts = [p.lower() for p in parts]
686 for p in '.hg', '.hg.':
686 for p in '.hg', '.hg.':
687 if p in lparts[1:]:
687 if p in lparts[1:]:
688 pos = lparts.index(p)
688 pos = lparts.index(p)
689 base = os.path.join(*parts[:pos])
689 base = os.path.join(*parts[:pos])
690 raise Abort(_('path %r is inside repo %r') % (path, base))
690 raise Abort(_('path %r is inside repo %r') % (path, base))
691 def check(prefix):
691 def check(prefix):
692 curpath = os.path.join(self.root, prefix)
692 curpath = os.path.join(self.root, prefix)
693 try:
693 try:
694 st = os.lstat(curpath)
694 st = os.lstat(curpath)
695 except OSError, err:
695 except OSError, err:
696 # EINVAL can be raised as invalid path syntax under win32.
696 # EINVAL can be raised as invalid path syntax under win32.
697 # They must be ignored for patterns can be checked too.
697 # They must be ignored for patterns can be checked too.
698 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
698 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
699 raise
699 raise
700 else:
700 else:
701 if stat.S_ISLNK(st.st_mode):
701 if stat.S_ISLNK(st.st_mode):
702 raise Abort(_('path %r traverses symbolic link %r') %
702 raise Abort(_('path %r traverses symbolic link %r') %
703 (path, prefix))
703 (path, prefix))
704 elif (stat.S_ISDIR(st.st_mode) and
704 elif (stat.S_ISDIR(st.st_mode) and
705 os.path.isdir(os.path.join(curpath, '.hg'))):
705 os.path.isdir(os.path.join(curpath, '.hg'))):
706 raise Abort(_('path %r is inside repo %r') %
706 raise Abort(_('path %r is inside repo %r') %
707 (path, prefix))
707 (path, prefix))
708 parts.pop()
708 parts.pop()
709 prefixes = []
709 prefixes = []
710 for n in range(len(parts)):
710 for n in range(len(parts)):
711 prefix = os.sep.join(parts)
711 prefix = os.sep.join(parts)
712 if prefix in self.auditeddir:
712 if prefix in self.auditeddir:
713 break
713 break
714 check(prefix)
714 check(prefix)
715 prefixes.append(prefix)
715 prefixes.append(prefix)
716 parts.pop()
716 parts.pop()
717
717
718 self.audited.add(path)
718 self.audited.add(path)
719 # only add prefixes to the cache after checking everything: we don't
719 # only add prefixes to the cache after checking everything: we don't
720 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
720 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
721 self.auditeddir.update(prefixes)
721 self.auditeddir.update(prefixes)
722
722
723 def nlinks(pathname):
723 def nlinks(pathname):
724 """Return number of hardlinks for the given file."""
724 """Return number of hardlinks for the given file."""
725 return os.lstat(pathname).st_nlink
725 return os.lstat(pathname).st_nlink
726
726
727 if hasattr(os, 'link'):
727 if hasattr(os, 'link'):
728 os_link = os.link
728 os_link = os.link
729 else:
729 else:
730 def os_link(src, dst):
730 def os_link(src, dst):
731 raise OSError(0, _("Hardlinks not supported"))
731 raise OSError(0, _("Hardlinks not supported"))
732
732
733 def lookup_reg(key, name=None, scope=None):
733 def lookup_reg(key, name=None, scope=None):
734 return None
734 return None
735
735
736 if os.name == 'nt':
736 if os.name == 'nt':
737 from windows import *
737 from windows import *
738 def expand_glob(pats):
738 def expand_glob(pats):
739 '''On Windows, expand the implicit globs in a list of patterns'''
739 '''On Windows, expand the implicit globs in a list of patterns'''
740 ret = []
740 ret = []
741 for p in pats:
741 for p in pats:
742 kind, name = patkind(p, None)
742 kind, name = _patsplit(p, None)
743 if kind is None:
743 if kind is None:
744 globbed = glob.glob(name)
744 globbed = glob.glob(name)
745 if globbed:
745 if globbed:
746 ret.extend(globbed)
746 ret.extend(globbed)
747 continue
747 continue
748 # if we couldn't expand the glob, just keep it around
748 # if we couldn't expand the glob, just keep it around
749 ret.append(p)
749 ret.append(p)
750 return ret
750 return ret
751 else:
751 else:
752 from posix import *
752 from posix import *
753
753
754 def makelock(info, pathname):
754 def makelock(info, pathname):
755 try:
755 try:
756 return os.symlink(info, pathname)
756 return os.symlink(info, pathname)
757 except OSError, why:
757 except OSError, why:
758 if why.errno == errno.EEXIST:
758 if why.errno == errno.EEXIST:
759 raise
759 raise
760 except AttributeError: # no symlink in os
760 except AttributeError: # no symlink in os
761 pass
761 pass
762
762
763 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
763 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
764 os.write(ld, info)
764 os.write(ld, info)
765 os.close(ld)
765 os.close(ld)
766
766
767 def readlock(pathname):
767 def readlock(pathname):
768 try:
768 try:
769 return os.readlink(pathname)
769 return os.readlink(pathname)
770 except OSError, why:
770 except OSError, why:
771 if why.errno not in (errno.EINVAL, errno.ENOSYS):
771 if why.errno not in (errno.EINVAL, errno.ENOSYS):
772 raise
772 raise
773 except AttributeError: # no symlink in os
773 except AttributeError: # no symlink in os
774 pass
774 pass
775 return posixfile(pathname).read()
775 return posixfile(pathname).read()
776
776
777 def fstat(fp):
777 def fstat(fp):
778 '''stat file object that may not have fileno method.'''
778 '''stat file object that may not have fileno method.'''
779 try:
779 try:
780 return os.fstat(fp.fileno())
780 return os.fstat(fp.fileno())
781 except AttributeError:
781 except AttributeError:
782 return os.stat(fp.name)
782 return os.stat(fp.name)
783
783
784 # File system features
784 # File system features
785
785
786 def checkcase(path):
786 def checkcase(path):
787 """
787 """
788 Check whether the given path is on a case-sensitive filesystem
788 Check whether the given path is on a case-sensitive filesystem
789
789
790 Requires a path (like /foo/.hg) ending with a foldable final
790 Requires a path (like /foo/.hg) ending with a foldable final
791 directory component.
791 directory component.
792 """
792 """
793 s1 = os.stat(path)
793 s1 = os.stat(path)
794 d, b = os.path.split(path)
794 d, b = os.path.split(path)
795 p2 = os.path.join(d, b.upper())
795 p2 = os.path.join(d, b.upper())
796 if path == p2:
796 if path == p2:
797 p2 = os.path.join(d, b.lower())
797 p2 = os.path.join(d, b.lower())
798 try:
798 try:
799 s2 = os.stat(p2)
799 s2 = os.stat(p2)
800 if s2 == s1:
800 if s2 == s1:
801 return False
801 return False
802 return True
802 return True
803 except:
803 except:
804 return True
804 return True
805
805
806 _fspathcache = {}
806 _fspathcache = {}
807 def fspath(name, root):
807 def fspath(name, root):
808 '''Get name in the case stored in the filesystem
808 '''Get name in the case stored in the filesystem
809
809
810 The name is either relative to root, or it is an absolute path starting
810 The name is either relative to root, or it is an absolute path starting
811 with root. Note that this function is unnecessary, and should not be
811 with root. Note that this function is unnecessary, and should not be
812 called, for case-sensitive filesystems (simply because it's expensive).
812 called, for case-sensitive filesystems (simply because it's expensive).
813 '''
813 '''
814 # If name is absolute, make it relative
814 # If name is absolute, make it relative
815 if name.lower().startswith(root.lower()):
815 if name.lower().startswith(root.lower()):
816 l = len(root)
816 l = len(root)
817 if name[l] == os.sep or name[l] == os.altsep:
817 if name[l] == os.sep or name[l] == os.altsep:
818 l = l + 1
818 l = l + 1
819 name = name[l:]
819 name = name[l:]
820
820
821 if not os.path.exists(os.path.join(root, name)):
821 if not os.path.exists(os.path.join(root, name)):
822 return None
822 return None
823
823
824 seps = os.sep
824 seps = os.sep
825 if os.altsep:
825 if os.altsep:
826 seps = seps + os.altsep
826 seps = seps + os.altsep
827 # Protect backslashes. This gets silly very quickly.
827 # Protect backslashes. This gets silly very quickly.
828 seps.replace('\\','\\\\')
828 seps.replace('\\','\\\\')
829 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
829 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
830 dir = os.path.normcase(os.path.normpath(root))
830 dir = os.path.normcase(os.path.normpath(root))
831 result = []
831 result = []
832 for part, sep in pattern.findall(name):
832 for part, sep in pattern.findall(name):
833 if sep:
833 if sep:
834 result.append(sep)
834 result.append(sep)
835 continue
835 continue
836
836
837 if dir not in _fspathcache:
837 if dir not in _fspathcache:
838 _fspathcache[dir] = os.listdir(dir)
838 _fspathcache[dir] = os.listdir(dir)
839 contents = _fspathcache[dir]
839 contents = _fspathcache[dir]
840
840
841 lpart = part.lower()
841 lpart = part.lower()
842 for n in contents:
842 for n in contents:
843 if n.lower() == lpart:
843 if n.lower() == lpart:
844 result.append(n)
844 result.append(n)
845 break
845 break
846 else:
846 else:
847 # Cannot happen, as the file exists!
847 # Cannot happen, as the file exists!
848 result.append(part)
848 result.append(part)
849 dir = os.path.join(dir, lpart)
849 dir = os.path.join(dir, lpart)
850
850
851 return ''.join(result)
851 return ''.join(result)
852
852
853 def checkexec(path):
853 def checkexec(path):
854 """
854 """
855 Check whether the given path is on a filesystem with UNIX-like exec flags
855 Check whether the given path is on a filesystem with UNIX-like exec flags
856
856
857 Requires a directory (like /foo/.hg)
857 Requires a directory (like /foo/.hg)
858 """
858 """
859
859
860 # VFAT on some Linux versions can flip mode but it doesn't persist
860 # VFAT on some Linux versions can flip mode but it doesn't persist
861 # a FS remount. Frequently we can detect it if files are created
861 # a FS remount. Frequently we can detect it if files are created
862 # with exec bit on.
862 # with exec bit on.
863
863
864 try:
864 try:
865 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
865 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
866 fh, fn = tempfile.mkstemp("", "", path)
866 fh, fn = tempfile.mkstemp("", "", path)
867 try:
867 try:
868 os.close(fh)
868 os.close(fh)
869 m = os.stat(fn).st_mode & 0777
869 m = os.stat(fn).st_mode & 0777
870 new_file_has_exec = m & EXECFLAGS
870 new_file_has_exec = m & EXECFLAGS
871 os.chmod(fn, m ^ EXECFLAGS)
871 os.chmod(fn, m ^ EXECFLAGS)
872 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
872 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
873 finally:
873 finally:
874 os.unlink(fn)
874 os.unlink(fn)
875 except (IOError, OSError):
875 except (IOError, OSError):
876 # we don't care, the user probably won't be able to commit anyway
876 # we don't care, the user probably won't be able to commit anyway
877 return False
877 return False
878 return not (new_file_has_exec or exec_flags_cannot_flip)
878 return not (new_file_has_exec or exec_flags_cannot_flip)
879
879
880 def checklink(path):
880 def checklink(path):
881 """check whether the given path is on a symlink-capable filesystem"""
881 """check whether the given path is on a symlink-capable filesystem"""
882 # mktemp is not racy because symlink creation will fail if the
882 # mktemp is not racy because symlink creation will fail if the
883 # file already exists
883 # file already exists
884 name = tempfile.mktemp(dir=path)
884 name = tempfile.mktemp(dir=path)
885 try:
885 try:
886 os.symlink(".", name)
886 os.symlink(".", name)
887 os.unlink(name)
887 os.unlink(name)
888 return True
888 return True
889 except (OSError, AttributeError):
889 except (OSError, AttributeError):
890 return False
890 return False
891
891
892 def needbinarypatch():
892 def needbinarypatch():
893 """return True if patches should be applied in binary mode by default."""
893 """return True if patches should be applied in binary mode by default."""
894 return os.name == 'nt'
894 return os.name == 'nt'
895
895
896 def endswithsep(path):
896 def endswithsep(path):
897 '''Check path ends with os.sep or os.altsep.'''
897 '''Check path ends with os.sep or os.altsep.'''
898 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
898 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
899
899
900 def splitpath(path):
900 def splitpath(path):
901 '''Split path by os.sep.
901 '''Split path by os.sep.
902 Note that this function does not use os.altsep because this is
902 Note that this function does not use os.altsep because this is
903 an alternative of simple "xxx.split(os.sep)".
903 an alternative of simple "xxx.split(os.sep)".
904 It is recommended to use os.path.normpath() before using this
904 It is recommended to use os.path.normpath() before using this
905 function if need.'''
905 function if need.'''
906 return path.split(os.sep)
906 return path.split(os.sep)
907
907
908 def gui():
908 def gui():
909 '''Are we running in a GUI?'''
909 '''Are we running in a GUI?'''
910 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
910 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
911
911
912 def mktempcopy(name, emptyok=False, createmode=None):
912 def mktempcopy(name, emptyok=False, createmode=None):
913 """Create a temporary file with the same contents from name
913 """Create a temporary file with the same contents from name
914
914
915 The permission bits are copied from the original file.
915 The permission bits are copied from the original file.
916
916
917 If the temporary file is going to be truncated immediately, you
917 If the temporary file is going to be truncated immediately, you
918 can use emptyok=True as an optimization.
918 can use emptyok=True as an optimization.
919
919
920 Returns the name of the temporary file.
920 Returns the name of the temporary file.
921 """
921 """
922 d, fn = os.path.split(name)
922 d, fn = os.path.split(name)
923 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
923 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
924 os.close(fd)
924 os.close(fd)
925 # Temporary files are created with mode 0600, which is usually not
925 # Temporary files are created with mode 0600, which is usually not
926 # what we want. If the original file already exists, just copy
926 # what we want. If the original file already exists, just copy
927 # its mode. Otherwise, manually obey umask.
927 # its mode. Otherwise, manually obey umask.
928 try:
928 try:
929 st_mode = os.lstat(name).st_mode & 0777
929 st_mode = os.lstat(name).st_mode & 0777
930 except OSError, inst:
930 except OSError, inst:
931 if inst.errno != errno.ENOENT:
931 if inst.errno != errno.ENOENT:
932 raise
932 raise
933 st_mode = createmode
933 st_mode = createmode
934 if st_mode is None:
934 if st_mode is None:
935 st_mode = ~umask
935 st_mode = ~umask
936 st_mode &= 0666
936 st_mode &= 0666
937 os.chmod(temp, st_mode)
937 os.chmod(temp, st_mode)
938 if emptyok:
938 if emptyok:
939 return temp
939 return temp
940 try:
940 try:
941 try:
941 try:
942 ifp = posixfile(name, "rb")
942 ifp = posixfile(name, "rb")
943 except IOError, inst:
943 except IOError, inst:
944 if inst.errno == errno.ENOENT:
944 if inst.errno == errno.ENOENT:
945 return temp
945 return temp
946 if not getattr(inst, 'filename', None):
946 if not getattr(inst, 'filename', None):
947 inst.filename = name
947 inst.filename = name
948 raise
948 raise
949 ofp = posixfile(temp, "wb")
949 ofp = posixfile(temp, "wb")
950 for chunk in filechunkiter(ifp):
950 for chunk in filechunkiter(ifp):
951 ofp.write(chunk)
951 ofp.write(chunk)
952 ifp.close()
952 ifp.close()
953 ofp.close()
953 ofp.close()
954 except:
954 except:
955 try: os.unlink(temp)
955 try: os.unlink(temp)
956 except: pass
956 except: pass
957 raise
957 raise
958 return temp
958 return temp
959
959
960 class atomictempfile:
960 class atomictempfile:
961 """file-like object that atomically updates a file
961 """file-like object that atomically updates a file
962
962
963 All writes will be redirected to a temporary copy of the original
963 All writes will be redirected to a temporary copy of the original
964 file. When rename is called, the copy is renamed to the original
964 file. When rename is called, the copy is renamed to the original
965 name, making the changes visible.
965 name, making the changes visible.
966 """
966 """
967 def __init__(self, name, mode, createmode):
967 def __init__(self, name, mode, createmode):
968 self.__name = name
968 self.__name = name
969 self._fp = None
969 self._fp = None
970 self.temp = mktempcopy(name, emptyok=('w' in mode),
970 self.temp = mktempcopy(name, emptyok=('w' in mode),
971 createmode=createmode)
971 createmode=createmode)
972 self._fp = posixfile(self.temp, mode)
972 self._fp = posixfile(self.temp, mode)
973
973
974 def __getattr__(self, name):
974 def __getattr__(self, name):
975 return getattr(self._fp, name)
975 return getattr(self._fp, name)
976
976
977 def rename(self):
977 def rename(self):
978 if not self.closed:
978 if not self.closed:
979 self._fp.close()
979 self._fp.close()
980 rename(self.temp, localpath(self.__name))
980 rename(self.temp, localpath(self.__name))
981
981
982 def __del__(self):
982 def __del__(self):
983 if not self.closed:
983 if not self.closed:
984 try:
984 try:
985 os.unlink(self.temp)
985 os.unlink(self.temp)
986 except: pass
986 except: pass
987 if self._fp:
987 if self._fp:
988 self._fp.close()
988 self._fp.close()
989
989
990 def makedirs(name, mode=None):
990 def makedirs(name, mode=None):
991 """recursive directory creation with parent mode inheritance"""
991 """recursive directory creation with parent mode inheritance"""
992 try:
992 try:
993 os.mkdir(name)
993 os.mkdir(name)
994 if mode is not None:
994 if mode is not None:
995 os.chmod(name, mode)
995 os.chmod(name, mode)
996 return
996 return
997 except OSError, err:
997 except OSError, err:
998 if err.errno == errno.EEXIST:
998 if err.errno == errno.EEXIST:
999 return
999 return
1000 if err.errno != errno.ENOENT:
1000 if err.errno != errno.ENOENT:
1001 raise
1001 raise
1002 parent = os.path.abspath(os.path.dirname(name))
1002 parent = os.path.abspath(os.path.dirname(name))
1003 makedirs(parent, mode)
1003 makedirs(parent, mode)
1004 makedirs(name, mode)
1004 makedirs(name, mode)
1005
1005
1006 class opener(object):
1006 class opener(object):
1007 """Open files relative to a base directory
1007 """Open files relative to a base directory
1008
1008
1009 This class is used to hide the details of COW semantics and
1009 This class is used to hide the details of COW semantics and
1010 remote file access from higher level code.
1010 remote file access from higher level code.
1011 """
1011 """
1012 def __init__(self, base, audit=True):
1012 def __init__(self, base, audit=True):
1013 self.base = base
1013 self.base = base
1014 if audit:
1014 if audit:
1015 self.audit_path = path_auditor(base)
1015 self.audit_path = path_auditor(base)
1016 else:
1016 else:
1017 self.audit_path = always
1017 self.audit_path = always
1018 self.createmode = None
1018 self.createmode = None
1019
1019
1020 def __getattr__(self, name):
1020 def __getattr__(self, name):
1021 if name == '_can_symlink':
1021 if name == '_can_symlink':
1022 self._can_symlink = checklink(self.base)
1022 self._can_symlink = checklink(self.base)
1023 return self._can_symlink
1023 return self._can_symlink
1024 raise AttributeError(name)
1024 raise AttributeError(name)
1025
1025
1026 def _fixfilemode(self, name):
1026 def _fixfilemode(self, name):
1027 if self.createmode is None:
1027 if self.createmode is None:
1028 return
1028 return
1029 os.chmod(name, self.createmode & 0666)
1029 os.chmod(name, self.createmode & 0666)
1030
1030
1031 def __call__(self, path, mode="r", text=False, atomictemp=False):
1031 def __call__(self, path, mode="r", text=False, atomictemp=False):
1032 self.audit_path(path)
1032 self.audit_path(path)
1033 f = os.path.join(self.base, path)
1033 f = os.path.join(self.base, path)
1034
1034
1035 if not text and "b" not in mode:
1035 if not text and "b" not in mode:
1036 mode += "b" # for that other OS
1036 mode += "b" # for that other OS
1037
1037
1038 nlink = -1
1038 nlink = -1
1039 if mode not in ("r", "rb"):
1039 if mode not in ("r", "rb"):
1040 try:
1040 try:
1041 nlink = nlinks(f)
1041 nlink = nlinks(f)
1042 except OSError:
1042 except OSError:
1043 nlink = 0
1043 nlink = 0
1044 d = os.path.dirname(f)
1044 d = os.path.dirname(f)
1045 if not os.path.isdir(d):
1045 if not os.path.isdir(d):
1046 makedirs(d, self.createmode)
1046 makedirs(d, self.createmode)
1047 if atomictemp:
1047 if atomictemp:
1048 return atomictempfile(f, mode, self.createmode)
1048 return atomictempfile(f, mode, self.createmode)
1049 if nlink > 1:
1049 if nlink > 1:
1050 rename(mktempcopy(f), f)
1050 rename(mktempcopy(f), f)
1051 fp = posixfile(f, mode)
1051 fp = posixfile(f, mode)
1052 if nlink == 0:
1052 if nlink == 0:
1053 self._fixfilemode(f)
1053 self._fixfilemode(f)
1054 return fp
1054 return fp
1055
1055
1056 def symlink(self, src, dst):
1056 def symlink(self, src, dst):
1057 self.audit_path(dst)
1057 self.audit_path(dst)
1058 linkname = os.path.join(self.base, dst)
1058 linkname = os.path.join(self.base, dst)
1059 try:
1059 try:
1060 os.unlink(linkname)
1060 os.unlink(linkname)
1061 except OSError:
1061 except OSError:
1062 pass
1062 pass
1063
1063
1064 dirname = os.path.dirname(linkname)
1064 dirname = os.path.dirname(linkname)
1065 if not os.path.exists(dirname):
1065 if not os.path.exists(dirname):
1066 makedirs(dirname, self.createmode)
1066 makedirs(dirname, self.createmode)
1067
1067
1068 if self._can_symlink:
1068 if self._can_symlink:
1069 try:
1069 try:
1070 os.symlink(src, linkname)
1070 os.symlink(src, linkname)
1071 except OSError, err:
1071 except OSError, err:
1072 raise OSError(err.errno, _('could not symlink to %r: %s') %
1072 raise OSError(err.errno, _('could not symlink to %r: %s') %
1073 (src, err.strerror), linkname)
1073 (src, err.strerror), linkname)
1074 else:
1074 else:
1075 f = self(dst, "w")
1075 f = self(dst, "w")
1076 f.write(src)
1076 f.write(src)
1077 f.close()
1077 f.close()
1078 self._fixfilemode(dst)
1078 self._fixfilemode(dst)
1079
1079
1080 class chunkbuffer(object):
1080 class chunkbuffer(object):
1081 """Allow arbitrary sized chunks of data to be efficiently read from an
1081 """Allow arbitrary sized chunks of data to be efficiently read from an
1082 iterator over chunks of arbitrary size."""
1082 iterator over chunks of arbitrary size."""
1083
1083
1084 def __init__(self, in_iter):
1084 def __init__(self, in_iter):
1085 """in_iter is the iterator that's iterating over the input chunks.
1085 """in_iter is the iterator that's iterating over the input chunks.
1086 targetsize is how big a buffer to try to maintain."""
1086 targetsize is how big a buffer to try to maintain."""
1087 self.iter = iter(in_iter)
1087 self.iter = iter(in_iter)
1088 self.buf = ''
1088 self.buf = ''
1089 self.targetsize = 2**16
1089 self.targetsize = 2**16
1090
1090
1091 def read(self, l):
1091 def read(self, l):
1092 """Read L bytes of data from the iterator of chunks of data.
1092 """Read L bytes of data from the iterator of chunks of data.
1093 Returns less than L bytes if the iterator runs dry."""
1093 Returns less than L bytes if the iterator runs dry."""
1094 if l > len(self.buf) and self.iter:
1094 if l > len(self.buf) and self.iter:
1095 # Clamp to a multiple of self.targetsize
1095 # Clamp to a multiple of self.targetsize
1096 targetsize = max(l, self.targetsize)
1096 targetsize = max(l, self.targetsize)
1097 collector = cStringIO.StringIO()
1097 collector = cStringIO.StringIO()
1098 collector.write(self.buf)
1098 collector.write(self.buf)
1099 collected = len(self.buf)
1099 collected = len(self.buf)
1100 for chunk in self.iter:
1100 for chunk in self.iter:
1101 collector.write(chunk)
1101 collector.write(chunk)
1102 collected += len(chunk)
1102 collected += len(chunk)
1103 if collected >= targetsize:
1103 if collected >= targetsize:
1104 break
1104 break
1105 if collected < targetsize:
1105 if collected < targetsize:
1106 self.iter = False
1106 self.iter = False
1107 self.buf = collector.getvalue()
1107 self.buf = collector.getvalue()
1108 if len(self.buf) == l:
1108 if len(self.buf) == l:
1109 s, self.buf = str(self.buf), ''
1109 s, self.buf = str(self.buf), ''
1110 else:
1110 else:
1111 s, self.buf = self.buf[:l], buffer(self.buf, l)
1111 s, self.buf = self.buf[:l], buffer(self.buf, l)
1112 return s
1112 return s
1113
1113
1114 def filechunkiter(f, size=65536, limit=None):
1114 def filechunkiter(f, size=65536, limit=None):
1115 """Create a generator that produces the data in the file size
1115 """Create a generator that produces the data in the file size
1116 (default 65536) bytes at a time, up to optional limit (default is
1116 (default 65536) bytes at a time, up to optional limit (default is
1117 to read all data). Chunks may be less than size bytes if the
1117 to read all data). Chunks may be less than size bytes if the
1118 chunk is the last chunk in the file, or the file is a socket or
1118 chunk is the last chunk in the file, or the file is a socket or
1119 some other type of file that sometimes reads less data than is
1119 some other type of file that sometimes reads less data than is
1120 requested."""
1120 requested."""
1121 assert size >= 0
1121 assert size >= 0
1122 assert limit is None or limit >= 0
1122 assert limit is None or limit >= 0
1123 while True:
1123 while True:
1124 if limit is None: nbytes = size
1124 if limit is None: nbytes = size
1125 else: nbytes = min(limit, size)
1125 else: nbytes = min(limit, size)
1126 s = nbytes and f.read(nbytes)
1126 s = nbytes and f.read(nbytes)
1127 if not s: break
1127 if not s: break
1128 if limit: limit -= len(s)
1128 if limit: limit -= len(s)
1129 yield s
1129 yield s
1130
1130
1131 def makedate():
1131 def makedate():
1132 lt = time.localtime()
1132 lt = time.localtime()
1133 if lt[8] == 1 and time.daylight:
1133 if lt[8] == 1 and time.daylight:
1134 tz = time.altzone
1134 tz = time.altzone
1135 else:
1135 else:
1136 tz = time.timezone
1136 tz = time.timezone
1137 return time.mktime(lt), tz
1137 return time.mktime(lt), tz
1138
1138
1139 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1139 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1140 """represent a (unixtime, offset) tuple as a localized time.
1140 """represent a (unixtime, offset) tuple as a localized time.
1141 unixtime is seconds since the epoch, and offset is the time zone's
1141 unixtime is seconds since the epoch, and offset is the time zone's
1142 number of seconds away from UTC. if timezone is false, do not
1142 number of seconds away from UTC. if timezone is false, do not
1143 append time zone to string."""
1143 append time zone to string."""
1144 t, tz = date or makedate()
1144 t, tz = date or makedate()
1145 if "%1" in format or "%2" in format:
1145 if "%1" in format or "%2" in format:
1146 sign = (tz > 0) and "-" or "+"
1146 sign = (tz > 0) and "-" or "+"
1147 minutes = abs(tz) / 60
1147 minutes = abs(tz) / 60
1148 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1148 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1149 format = format.replace("%2", "%02d" % (minutes % 60))
1149 format = format.replace("%2", "%02d" % (minutes % 60))
1150 s = time.strftime(format, time.gmtime(float(t) - tz))
1150 s = time.strftime(format, time.gmtime(float(t) - tz))
1151 return s
1151 return s
1152
1152
1153 def shortdate(date=None):
1153 def shortdate(date=None):
1154 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1154 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1155 return datestr(date, format='%Y-%m-%d')
1155 return datestr(date, format='%Y-%m-%d')
1156
1156
1157 def strdate(string, format, defaults=[]):
1157 def strdate(string, format, defaults=[]):
1158 """parse a localized time string and return a (unixtime, offset) tuple.
1158 """parse a localized time string and return a (unixtime, offset) tuple.
1159 if the string cannot be parsed, ValueError is raised."""
1159 if the string cannot be parsed, ValueError is raised."""
1160 def timezone(string):
1160 def timezone(string):
1161 tz = string.split()[-1]
1161 tz = string.split()[-1]
1162 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1162 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1163 sign = (tz[0] == "+") and 1 or -1
1163 sign = (tz[0] == "+") and 1 or -1
1164 hours = int(tz[1:3])
1164 hours = int(tz[1:3])
1165 minutes = int(tz[3:5])
1165 minutes = int(tz[3:5])
1166 return -sign * (hours * 60 + minutes) * 60
1166 return -sign * (hours * 60 + minutes) * 60
1167 if tz == "GMT" or tz == "UTC":
1167 if tz == "GMT" or tz == "UTC":
1168 return 0
1168 return 0
1169 return None
1169 return None
1170
1170
1171 # NOTE: unixtime = localunixtime + offset
1171 # NOTE: unixtime = localunixtime + offset
1172 offset, date = timezone(string), string
1172 offset, date = timezone(string), string
1173 if offset != None:
1173 if offset != None:
1174 date = " ".join(string.split()[:-1])
1174 date = " ".join(string.split()[:-1])
1175
1175
1176 # add missing elements from defaults
1176 # add missing elements from defaults
1177 for part in defaults:
1177 for part in defaults:
1178 found = [True for p in part if ("%"+p) in format]
1178 found = [True for p in part if ("%"+p) in format]
1179 if not found:
1179 if not found:
1180 date += "@" + defaults[part]
1180 date += "@" + defaults[part]
1181 format += "@%" + part[0]
1181 format += "@%" + part[0]
1182
1182
1183 timetuple = time.strptime(date, format)
1183 timetuple = time.strptime(date, format)
1184 localunixtime = int(calendar.timegm(timetuple))
1184 localunixtime = int(calendar.timegm(timetuple))
1185 if offset is None:
1185 if offset is None:
1186 # local timezone
1186 # local timezone
1187 unixtime = int(time.mktime(timetuple))
1187 unixtime = int(time.mktime(timetuple))
1188 offset = unixtime - localunixtime
1188 offset = unixtime - localunixtime
1189 else:
1189 else:
1190 unixtime = localunixtime + offset
1190 unixtime = localunixtime + offset
1191 return unixtime, offset
1191 return unixtime, offset
1192
1192
1193 def parsedate(date, formats=None, defaults=None):
1193 def parsedate(date, formats=None, defaults=None):
1194 """parse a localized date/time string and return a (unixtime, offset) tuple.
1194 """parse a localized date/time string and return a (unixtime, offset) tuple.
1195
1195
1196 The date may be a "unixtime offset" string or in one of the specified
1196 The date may be a "unixtime offset" string or in one of the specified
1197 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1197 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1198 """
1198 """
1199 if not date:
1199 if not date:
1200 return 0, 0
1200 return 0, 0
1201 if isinstance(date, tuple) and len(date) == 2:
1201 if isinstance(date, tuple) and len(date) == 2:
1202 return date
1202 return date
1203 if not formats:
1203 if not formats:
1204 formats = defaultdateformats
1204 formats = defaultdateformats
1205 date = date.strip()
1205 date = date.strip()
1206 try:
1206 try:
1207 when, offset = map(int, date.split(' '))
1207 when, offset = map(int, date.split(' '))
1208 except ValueError:
1208 except ValueError:
1209 # fill out defaults
1209 # fill out defaults
1210 if not defaults:
1210 if not defaults:
1211 defaults = {}
1211 defaults = {}
1212 now = makedate()
1212 now = makedate()
1213 for part in "d mb yY HI M S".split():
1213 for part in "d mb yY HI M S".split():
1214 if part not in defaults:
1214 if part not in defaults:
1215 if part[0] in "HMS":
1215 if part[0] in "HMS":
1216 defaults[part] = "00"
1216 defaults[part] = "00"
1217 else:
1217 else:
1218 defaults[part] = datestr(now, "%" + part[0])
1218 defaults[part] = datestr(now, "%" + part[0])
1219
1219
1220 for format in formats:
1220 for format in formats:
1221 try:
1221 try:
1222 when, offset = strdate(date, format, defaults)
1222 when, offset = strdate(date, format, defaults)
1223 except (ValueError, OverflowError):
1223 except (ValueError, OverflowError):
1224 pass
1224 pass
1225 else:
1225 else:
1226 break
1226 break
1227 else:
1227 else:
1228 raise Abort(_('invalid date: %r ') % date)
1228 raise Abort(_('invalid date: %r ') % date)
1229 # validate explicit (probably user-specified) date and
1229 # validate explicit (probably user-specified) date and
1230 # time zone offset. values must fit in signed 32 bits for
1230 # time zone offset. values must fit in signed 32 bits for
1231 # current 32-bit linux runtimes. timezones go from UTC-12
1231 # current 32-bit linux runtimes. timezones go from UTC-12
1232 # to UTC+14
1232 # to UTC+14
1233 if abs(when) > 0x7fffffff:
1233 if abs(when) > 0x7fffffff:
1234 raise Abort(_('date exceeds 32 bits: %d') % when)
1234 raise Abort(_('date exceeds 32 bits: %d') % when)
1235 if offset < -50400 or offset > 43200:
1235 if offset < -50400 or offset > 43200:
1236 raise Abort(_('impossible time zone offset: %d') % offset)
1236 raise Abort(_('impossible time zone offset: %d') % offset)
1237 return when, offset
1237 return when, offset
1238
1238
1239 def matchdate(date):
1239 def matchdate(date):
1240 """Return a function that matches a given date match specifier
1240 """Return a function that matches a given date match specifier
1241
1241
1242 Formats include:
1242 Formats include:
1243
1243
1244 '{date}' match a given date to the accuracy provided
1244 '{date}' match a given date to the accuracy provided
1245
1245
1246 '<{date}' on or before a given date
1246 '<{date}' on or before a given date
1247
1247
1248 '>{date}' on or after a given date
1248 '>{date}' on or after a given date
1249
1249
1250 """
1250 """
1251
1251
1252 def lower(date):
1252 def lower(date):
1253 d = dict(mb="1", d="1")
1253 d = dict(mb="1", d="1")
1254 return parsedate(date, extendeddateformats, d)[0]
1254 return parsedate(date, extendeddateformats, d)[0]
1255
1255
1256 def upper(date):
1256 def upper(date):
1257 d = dict(mb="12", HI="23", M="59", S="59")
1257 d = dict(mb="12", HI="23", M="59", S="59")
1258 for days in "31 30 29".split():
1258 for days in "31 30 29".split():
1259 try:
1259 try:
1260 d["d"] = days
1260 d["d"] = days
1261 return parsedate(date, extendeddateformats, d)[0]
1261 return parsedate(date, extendeddateformats, d)[0]
1262 except:
1262 except:
1263 pass
1263 pass
1264 d["d"] = "28"
1264 d["d"] = "28"
1265 return parsedate(date, extendeddateformats, d)[0]
1265 return parsedate(date, extendeddateformats, d)[0]
1266
1266
1267 date = date.strip()
1267 date = date.strip()
1268 if date[0] == "<":
1268 if date[0] == "<":
1269 when = upper(date[1:])
1269 when = upper(date[1:])
1270 return lambda x: x <= when
1270 return lambda x: x <= when
1271 elif date[0] == ">":
1271 elif date[0] == ">":
1272 when = lower(date[1:])
1272 when = lower(date[1:])
1273 return lambda x: x >= when
1273 return lambda x: x >= when
1274 elif date[0] == "-":
1274 elif date[0] == "-":
1275 try:
1275 try:
1276 days = int(date[1:])
1276 days = int(date[1:])
1277 except ValueError:
1277 except ValueError:
1278 raise Abort(_("invalid day spec: %s") % date[1:])
1278 raise Abort(_("invalid day spec: %s") % date[1:])
1279 when = makedate()[0] - days * 3600 * 24
1279 when = makedate()[0] - days * 3600 * 24
1280 return lambda x: x >= when
1280 return lambda x: x >= when
1281 elif " to " in date:
1281 elif " to " in date:
1282 a, b = date.split(" to ")
1282 a, b = date.split(" to ")
1283 start, stop = lower(a), upper(b)
1283 start, stop = lower(a), upper(b)
1284 return lambda x: x >= start and x <= stop
1284 return lambda x: x >= start and x <= stop
1285 else:
1285 else:
1286 start, stop = lower(date), upper(date)
1286 start, stop = lower(date), upper(date)
1287 return lambda x: x >= start and x <= stop
1287 return lambda x: x >= start and x <= stop
1288
1288
1289 def shortuser(user):
1289 def shortuser(user):
1290 """Return a short representation of a user name or email address."""
1290 """Return a short representation of a user name or email address."""
1291 f = user.find('@')
1291 f = user.find('@')
1292 if f >= 0:
1292 if f >= 0:
1293 user = user[:f]
1293 user = user[:f]
1294 f = user.find('<')
1294 f = user.find('<')
1295 if f >= 0:
1295 if f >= 0:
1296 user = user[f+1:]
1296 user = user[f+1:]
1297 f = user.find(' ')
1297 f = user.find(' ')
1298 if f >= 0:
1298 if f >= 0:
1299 user = user[:f]
1299 user = user[:f]
1300 f = user.find('.')
1300 f = user.find('.')
1301 if f >= 0:
1301 if f >= 0:
1302 user = user[:f]
1302 user = user[:f]
1303 return user
1303 return user
1304
1304
1305 def email(author):
1305 def email(author):
1306 '''get email of author.'''
1306 '''get email of author.'''
1307 r = author.find('>')
1307 r = author.find('>')
1308 if r == -1: r = None
1308 if r == -1: r = None
1309 return author[author.find('<')+1:r]
1309 return author[author.find('<')+1:r]
1310
1310
1311 def ellipsis(text, maxlength=400):
1311 def ellipsis(text, maxlength=400):
1312 """Trim string to at most maxlength (default: 400) characters."""
1312 """Trim string to at most maxlength (default: 400) characters."""
1313 if len(text) <= maxlength:
1313 if len(text) <= maxlength:
1314 return text
1314 return text
1315 else:
1315 else:
1316 return "%s..." % (text[:maxlength-3])
1316 return "%s..." % (text[:maxlength-3])
1317
1317
1318 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1318 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1319 '''yield every hg repository under path, recursively.'''
1319 '''yield every hg repository under path, recursively.'''
1320 def errhandler(err):
1320 def errhandler(err):
1321 if err.filename == path:
1321 if err.filename == path:
1322 raise err
1322 raise err
1323 if followsym and hasattr(os.path, 'samestat'):
1323 if followsym and hasattr(os.path, 'samestat'):
1324 def _add_dir_if_not_there(dirlst, dirname):
1324 def _add_dir_if_not_there(dirlst, dirname):
1325 match = False
1325 match = False
1326 samestat = os.path.samestat
1326 samestat = os.path.samestat
1327 dirstat = os.stat(dirname)
1327 dirstat = os.stat(dirname)
1328 for lstdirstat in dirlst:
1328 for lstdirstat in dirlst:
1329 if samestat(dirstat, lstdirstat):
1329 if samestat(dirstat, lstdirstat):
1330 match = True
1330 match = True
1331 break
1331 break
1332 if not match:
1332 if not match:
1333 dirlst.append(dirstat)
1333 dirlst.append(dirstat)
1334 return not match
1334 return not match
1335 else:
1335 else:
1336 followsym = False
1336 followsym = False
1337
1337
1338 if (seen_dirs is None) and followsym:
1338 if (seen_dirs is None) and followsym:
1339 seen_dirs = []
1339 seen_dirs = []
1340 _add_dir_if_not_there(seen_dirs, path)
1340 _add_dir_if_not_there(seen_dirs, path)
1341 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1341 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1342 if '.hg' in dirs:
1342 if '.hg' in dirs:
1343 yield root # found a repository
1343 yield root # found a repository
1344 qroot = os.path.join(root, '.hg', 'patches')
1344 qroot = os.path.join(root, '.hg', 'patches')
1345 if os.path.isdir(os.path.join(qroot, '.hg')):
1345 if os.path.isdir(os.path.join(qroot, '.hg')):
1346 yield qroot # we have a patch queue repo here
1346 yield qroot # we have a patch queue repo here
1347 if recurse:
1347 if recurse:
1348 # avoid recursing inside the .hg directory
1348 # avoid recursing inside the .hg directory
1349 dirs.remove('.hg')
1349 dirs.remove('.hg')
1350 else:
1350 else:
1351 dirs[:] = [] # don't descend further
1351 dirs[:] = [] # don't descend further
1352 elif followsym:
1352 elif followsym:
1353 newdirs = []
1353 newdirs = []
1354 for d in dirs:
1354 for d in dirs:
1355 fname = os.path.join(root, d)
1355 fname = os.path.join(root, d)
1356 if _add_dir_if_not_there(seen_dirs, fname):
1356 if _add_dir_if_not_there(seen_dirs, fname):
1357 if os.path.islink(fname):
1357 if os.path.islink(fname):
1358 for hgname in walkrepos(fname, True, seen_dirs):
1358 for hgname in walkrepos(fname, True, seen_dirs):
1359 yield hgname
1359 yield hgname
1360 else:
1360 else:
1361 newdirs.append(d)
1361 newdirs.append(d)
1362 dirs[:] = newdirs
1362 dirs[:] = newdirs
1363
1363
1364 _rcpath = None
1364 _rcpath = None
1365
1365
1366 def os_rcpath():
1366 def os_rcpath():
1367 '''return default os-specific hgrc search path'''
1367 '''return default os-specific hgrc search path'''
1368 path = system_rcpath()
1368 path = system_rcpath()
1369 path.extend(user_rcpath())
1369 path.extend(user_rcpath())
1370 path = [os.path.normpath(f) for f in path]
1370 path = [os.path.normpath(f) for f in path]
1371 return path
1371 return path
1372
1372
1373 def rcpath():
1373 def rcpath():
1374 '''return hgrc search path. if env var HGRCPATH is set, use it.
1374 '''return hgrc search path. if env var HGRCPATH is set, use it.
1375 for each item in path, if directory, use files ending in .rc,
1375 for each item in path, if directory, use files ending in .rc,
1376 else use item.
1376 else use item.
1377 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1377 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1378 if no HGRCPATH, use default os-specific path.'''
1378 if no HGRCPATH, use default os-specific path.'''
1379 global _rcpath
1379 global _rcpath
1380 if _rcpath is None:
1380 if _rcpath is None:
1381 if 'HGRCPATH' in os.environ:
1381 if 'HGRCPATH' in os.environ:
1382 _rcpath = []
1382 _rcpath = []
1383 for p in os.environ['HGRCPATH'].split(os.pathsep):
1383 for p in os.environ['HGRCPATH'].split(os.pathsep):
1384 if not p: continue
1384 if not p: continue
1385 if os.path.isdir(p):
1385 if os.path.isdir(p):
1386 for f, kind in osutil.listdir(p):
1386 for f, kind in osutil.listdir(p):
1387 if f.endswith('.rc'):
1387 if f.endswith('.rc'):
1388 _rcpath.append(os.path.join(p, f))
1388 _rcpath.append(os.path.join(p, f))
1389 else:
1389 else:
1390 _rcpath.append(p)
1390 _rcpath.append(p)
1391 else:
1391 else:
1392 _rcpath = os_rcpath()
1392 _rcpath = os_rcpath()
1393 return _rcpath
1393 return _rcpath
1394
1394
1395 def bytecount(nbytes):
1395 def bytecount(nbytes):
1396 '''return byte count formatted as readable string, with units'''
1396 '''return byte count formatted as readable string, with units'''
1397
1397
1398 units = (
1398 units = (
1399 (100, 1<<30, _('%.0f GB')),
1399 (100, 1<<30, _('%.0f GB')),
1400 (10, 1<<30, _('%.1f GB')),
1400 (10, 1<<30, _('%.1f GB')),
1401 (1, 1<<30, _('%.2f GB')),
1401 (1, 1<<30, _('%.2f GB')),
1402 (100, 1<<20, _('%.0f MB')),
1402 (100, 1<<20, _('%.0f MB')),
1403 (10, 1<<20, _('%.1f MB')),
1403 (10, 1<<20, _('%.1f MB')),
1404 (1, 1<<20, _('%.2f MB')),
1404 (1, 1<<20, _('%.2f MB')),
1405 (100, 1<<10, _('%.0f KB')),
1405 (100, 1<<10, _('%.0f KB')),
1406 (10, 1<<10, _('%.1f KB')),
1406 (10, 1<<10, _('%.1f KB')),
1407 (1, 1<<10, _('%.2f KB')),
1407 (1, 1<<10, _('%.2f KB')),
1408 (1, 1, _('%.0f bytes')),
1408 (1, 1, _('%.0f bytes')),
1409 )
1409 )
1410
1410
1411 for multiplier, divisor, format in units:
1411 for multiplier, divisor, format in units:
1412 if nbytes >= divisor * multiplier:
1412 if nbytes >= divisor * multiplier:
1413 return format % (nbytes / float(divisor))
1413 return format % (nbytes / float(divisor))
1414 return units[-1][2] % nbytes
1414 return units[-1][2] % nbytes
1415
1415
1416 def drop_scheme(scheme, path):
1416 def drop_scheme(scheme, path):
1417 sc = scheme + ':'
1417 sc = scheme + ':'
1418 if path.startswith(sc):
1418 if path.startswith(sc):
1419 path = path[len(sc):]
1419 path = path[len(sc):]
1420 if path.startswith('//'):
1420 if path.startswith('//'):
1421 path = path[2:]
1421 path = path[2:]
1422 return path
1422 return path
1423
1423
1424 def uirepr(s):
1424 def uirepr(s):
1425 # Avoid double backslash in Windows path repr()
1425 # Avoid double backslash in Windows path repr()
1426 return repr(s).replace('\\\\', '\\')
1426 return repr(s).replace('\\\\', '\\')
1427
1427
1428 def termwidth():
1428 def termwidth():
1429 if 'COLUMNS' in os.environ:
1429 if 'COLUMNS' in os.environ:
1430 try:
1430 try:
1431 return int(os.environ['COLUMNS'])
1431 return int(os.environ['COLUMNS'])
1432 except ValueError:
1432 except ValueError:
1433 pass
1433 pass
1434 try:
1434 try:
1435 import termios, array, fcntl
1435 import termios, array, fcntl
1436 for dev in (sys.stdout, sys.stdin):
1436 for dev in (sys.stdout, sys.stdin):
1437 try:
1437 try:
1438 try:
1438 try:
1439 fd = dev.fileno()
1439 fd = dev.fileno()
1440 except AttributeError:
1440 except AttributeError:
1441 continue
1441 continue
1442 if not os.isatty(fd):
1442 if not os.isatty(fd):
1443 continue
1443 continue
1444 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1444 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1445 return array.array('h', arri)[1]
1445 return array.array('h', arri)[1]
1446 except ValueError:
1446 except ValueError:
1447 pass
1447 pass
1448 except ImportError:
1448 except ImportError:
1449 pass
1449 pass
1450 return 80
1450 return 80
1451
1451
1452 def iterlines(iterator):
1452 def iterlines(iterator):
1453 for chunk in iterator:
1453 for chunk in iterator:
1454 for line in chunk.splitlines():
1454 for line in chunk.splitlines():
1455 yield line
1455 yield line
General Comments 0
You need to be logged in to leave comments. Login now