##// END OF EJS Templates
walk: kill util.cmdmatcher and _matcher
Matt Mackall -
r6575:e08e0367 default
parent child Browse files
Show More
@@ -1,1181 +1,1182
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, bisect, stat
10 import os, sys, bisect, stat
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
11 import mdiff, bdiff, util, templater, templatefilters, patch, errno
12
12
13 revrangesep = ':'
13 revrangesep = ':'
14
14
15 class UnknownCommand(Exception):
15 class UnknownCommand(Exception):
16 """Exception raised if command is not in the command table."""
16 """Exception raised if command is not in the command table."""
17 class AmbiguousCommand(Exception):
17 class AmbiguousCommand(Exception):
18 """Exception raised if command shortcut matches more than one command."""
18 """Exception raised if command shortcut matches more than one command."""
19
19
20 def findpossible(ui, cmd, table):
20 def findpossible(ui, cmd, table):
21 """
21 """
22 Return cmd -> (aliases, command table entry)
22 Return cmd -> (aliases, command table entry)
23 for each matching command.
23 for each matching command.
24 Return debug commands (or their aliases) only if no normal command matches.
24 Return debug commands (or their aliases) only if no normal command matches.
25 """
25 """
26 choice = {}
26 choice = {}
27 debugchoice = {}
27 debugchoice = {}
28 for e in table.keys():
28 for e in table.keys():
29 aliases = e.lstrip("^").split("|")
29 aliases = e.lstrip("^").split("|")
30 found = None
30 found = None
31 if cmd in aliases:
31 if cmd in aliases:
32 found = cmd
32 found = cmd
33 elif not ui.config("ui", "strict"):
33 elif not ui.config("ui", "strict"):
34 for a in aliases:
34 for a in aliases:
35 if a.startswith(cmd):
35 if a.startswith(cmd):
36 found = a
36 found = a
37 break
37 break
38 if found is not None:
38 if found is not None:
39 if aliases[0].startswith("debug") or found.startswith("debug"):
39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 debugchoice[found] = (aliases, table[e])
40 debugchoice[found] = (aliases, table[e])
41 else:
41 else:
42 choice[found] = (aliases, table[e])
42 choice[found] = (aliases, table[e])
43
43
44 if not choice and debugchoice:
44 if not choice and debugchoice:
45 choice = debugchoice
45 choice = debugchoice
46
46
47 return choice
47 return choice
48
48
49 def findcmd(ui, cmd, table):
49 def findcmd(ui, cmd, table):
50 """Return (aliases, command table entry) for command string."""
50 """Return (aliases, command table entry) for command string."""
51 choice = findpossible(ui, cmd, table)
51 choice = findpossible(ui, cmd, table)
52
52
53 if cmd in choice:
53 if cmd in choice:
54 return choice[cmd]
54 return choice[cmd]
55
55
56 if len(choice) > 1:
56 if len(choice) > 1:
57 clist = choice.keys()
57 clist = choice.keys()
58 clist.sort()
58 clist.sort()
59 raise AmbiguousCommand(cmd, clist)
59 raise AmbiguousCommand(cmd, clist)
60
60
61 if choice:
61 if choice:
62 return choice.values()[0]
62 return choice.values()[0]
63
63
64 raise UnknownCommand(cmd)
64 raise UnknownCommand(cmd)
65
65
66 def bail_if_changed(repo):
66 def bail_if_changed(repo):
67 if repo.dirstate.parents()[1] != nullid:
67 if repo.dirstate.parents()[1] != nullid:
68 raise util.Abort(_('outstanding uncommitted merge'))
68 raise util.Abort(_('outstanding uncommitted merge'))
69 modified, added, removed, deleted = repo.status()[:4]
69 modified, added, removed, deleted = repo.status()[:4]
70 if modified or added or removed or deleted:
70 if modified or added or removed or deleted:
71 raise util.Abort(_("outstanding uncommitted changes"))
71 raise util.Abort(_("outstanding uncommitted changes"))
72
72
73 def logmessage(opts):
73 def logmessage(opts):
74 """ get the log message according to -m and -l option """
74 """ get the log message according to -m and -l option """
75 message = opts['message']
75 message = opts['message']
76 logfile = opts['logfile']
76 logfile = opts['logfile']
77
77
78 if message and logfile:
78 if message and logfile:
79 raise util.Abort(_('options --message and --logfile are mutually '
79 raise util.Abort(_('options --message and --logfile are mutually '
80 'exclusive'))
80 'exclusive'))
81 if not message and logfile:
81 if not message and logfile:
82 try:
82 try:
83 if logfile == '-':
83 if logfile == '-':
84 message = sys.stdin.read()
84 message = sys.stdin.read()
85 else:
85 else:
86 message = open(logfile).read()
86 message = open(logfile).read()
87 except IOError, inst:
87 except IOError, inst:
88 raise util.Abort(_("can't read commit message '%s': %s") %
88 raise util.Abort(_("can't read commit message '%s': %s") %
89 (logfile, inst.strerror))
89 (logfile, inst.strerror))
90 return message
90 return message
91
91
92 def loglimit(opts):
92 def loglimit(opts):
93 """get the log limit according to option -l/--limit"""
93 """get the log limit according to option -l/--limit"""
94 limit = opts.get('limit')
94 limit = opts.get('limit')
95 if limit:
95 if limit:
96 try:
96 try:
97 limit = int(limit)
97 limit = int(limit)
98 except ValueError:
98 except ValueError:
99 raise util.Abort(_('limit must be a positive integer'))
99 raise util.Abort(_('limit must be a positive integer'))
100 if limit <= 0: raise util.Abort(_('limit must be positive'))
100 if limit <= 0: raise util.Abort(_('limit must be positive'))
101 else:
101 else:
102 limit = sys.maxint
102 limit = sys.maxint
103 return limit
103 return limit
104
104
105 def setremoteconfig(ui, opts):
105 def setremoteconfig(ui, opts):
106 "copy remote options to ui tree"
106 "copy remote options to ui tree"
107 if opts.get('ssh'):
107 if opts.get('ssh'):
108 ui.setconfig("ui", "ssh", opts['ssh'])
108 ui.setconfig("ui", "ssh", opts['ssh'])
109 if opts.get('remotecmd'):
109 if opts.get('remotecmd'):
110 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
110 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
111
111
112 def revpair(repo, revs):
112 def revpair(repo, revs):
113 '''return pair of nodes, given list of revisions. second item can
113 '''return pair of nodes, given list of revisions. second item can
114 be None, meaning use working dir.'''
114 be None, meaning use working dir.'''
115
115
116 def revfix(repo, val, defval):
116 def revfix(repo, val, defval):
117 if not val and val != 0 and defval is not None:
117 if not val and val != 0 and defval is not None:
118 val = defval
118 val = defval
119 return repo.lookup(val)
119 return repo.lookup(val)
120
120
121 if not revs:
121 if not revs:
122 return repo.dirstate.parents()[0], None
122 return repo.dirstate.parents()[0], None
123 end = None
123 end = None
124 if len(revs) == 1:
124 if len(revs) == 1:
125 if revrangesep in revs[0]:
125 if revrangesep in revs[0]:
126 start, end = revs[0].split(revrangesep, 1)
126 start, end = revs[0].split(revrangesep, 1)
127 start = revfix(repo, start, 0)
127 start = revfix(repo, start, 0)
128 end = revfix(repo, end, repo.changelog.count() - 1)
128 end = revfix(repo, end, repo.changelog.count() - 1)
129 else:
129 else:
130 start = revfix(repo, revs[0], None)
130 start = revfix(repo, revs[0], None)
131 elif len(revs) == 2:
131 elif len(revs) == 2:
132 if revrangesep in revs[0] or revrangesep in revs[1]:
132 if revrangesep in revs[0] or revrangesep in revs[1]:
133 raise util.Abort(_('too many revisions specified'))
133 raise util.Abort(_('too many revisions specified'))
134 start = revfix(repo, revs[0], None)
134 start = revfix(repo, revs[0], None)
135 end = revfix(repo, revs[1], None)
135 end = revfix(repo, revs[1], None)
136 else:
136 else:
137 raise util.Abort(_('too many revisions specified'))
137 raise util.Abort(_('too many revisions specified'))
138 return start, end
138 return start, end
139
139
140 def revrange(repo, revs):
140 def revrange(repo, revs):
141 """Yield revision as strings from a list of revision specifications."""
141 """Yield revision as strings from a list of revision specifications."""
142
142
143 def revfix(repo, val, defval):
143 def revfix(repo, val, defval):
144 if not val and val != 0 and defval is not None:
144 if not val and val != 0 and defval is not None:
145 return defval
145 return defval
146 return repo.changelog.rev(repo.lookup(val))
146 return repo.changelog.rev(repo.lookup(val))
147
147
148 seen, l = {}, []
148 seen, l = {}, []
149 for spec in revs:
149 for spec in revs:
150 if revrangesep in spec:
150 if revrangesep in spec:
151 start, end = spec.split(revrangesep, 1)
151 start, end = spec.split(revrangesep, 1)
152 start = revfix(repo, start, 0)
152 start = revfix(repo, start, 0)
153 end = revfix(repo, end, repo.changelog.count() - 1)
153 end = revfix(repo, end, repo.changelog.count() - 1)
154 step = start > end and -1 or 1
154 step = start > end and -1 or 1
155 for rev in xrange(start, end+step, step):
155 for rev in xrange(start, end+step, step):
156 if rev in seen:
156 if rev in seen:
157 continue
157 continue
158 seen[rev] = 1
158 seen[rev] = 1
159 l.append(rev)
159 l.append(rev)
160 else:
160 else:
161 rev = revfix(repo, spec, None)
161 rev = revfix(repo, spec, None)
162 if rev in seen:
162 if rev in seen:
163 continue
163 continue
164 seen[rev] = 1
164 seen[rev] = 1
165 l.append(rev)
165 l.append(rev)
166
166
167 return l
167 return l
168
168
169 def make_filename(repo, pat, node,
169 def make_filename(repo, pat, node,
170 total=None, seqno=None, revwidth=None, pathname=None):
170 total=None, seqno=None, revwidth=None, pathname=None):
171 node_expander = {
171 node_expander = {
172 'H': lambda: hex(node),
172 'H': lambda: hex(node),
173 'R': lambda: str(repo.changelog.rev(node)),
173 'R': lambda: str(repo.changelog.rev(node)),
174 'h': lambda: short(node),
174 'h': lambda: short(node),
175 }
175 }
176 expander = {
176 expander = {
177 '%': lambda: '%',
177 '%': lambda: '%',
178 'b': lambda: os.path.basename(repo.root),
178 'b': lambda: os.path.basename(repo.root),
179 }
179 }
180
180
181 try:
181 try:
182 if node:
182 if node:
183 expander.update(node_expander)
183 expander.update(node_expander)
184 if node:
184 if node:
185 expander['r'] = (lambda:
185 expander['r'] = (lambda:
186 str(repo.changelog.rev(node)).zfill(revwidth or 0))
186 str(repo.changelog.rev(node)).zfill(revwidth or 0))
187 if total is not None:
187 if total is not None:
188 expander['N'] = lambda: str(total)
188 expander['N'] = lambda: str(total)
189 if seqno is not None:
189 if seqno is not None:
190 expander['n'] = lambda: str(seqno)
190 expander['n'] = lambda: str(seqno)
191 if total is not None and seqno is not None:
191 if total is not None and seqno is not None:
192 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
192 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
193 if pathname is not None:
193 if pathname is not None:
194 expander['s'] = lambda: os.path.basename(pathname)
194 expander['s'] = lambda: os.path.basename(pathname)
195 expander['d'] = lambda: os.path.dirname(pathname) or '.'
195 expander['d'] = lambda: os.path.dirname(pathname) or '.'
196 expander['p'] = lambda: pathname
196 expander['p'] = lambda: pathname
197
197
198 newname = []
198 newname = []
199 patlen = len(pat)
199 patlen = len(pat)
200 i = 0
200 i = 0
201 while i < patlen:
201 while i < patlen:
202 c = pat[i]
202 c = pat[i]
203 if c == '%':
203 if c == '%':
204 i += 1
204 i += 1
205 c = pat[i]
205 c = pat[i]
206 c = expander[c]()
206 c = expander[c]()
207 newname.append(c)
207 newname.append(c)
208 i += 1
208 i += 1
209 return ''.join(newname)
209 return ''.join(newname)
210 except KeyError, inst:
210 except KeyError, inst:
211 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
211 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
212 inst.args[0])
212 inst.args[0])
213
213
214 def make_file(repo, pat, node=None,
214 def make_file(repo, pat, node=None,
215 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
215 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
216 if not pat or pat == '-':
216 if not pat or pat == '-':
217 return 'w' in mode and sys.stdout or sys.stdin
217 return 'w' in mode and sys.stdout or sys.stdin
218 if hasattr(pat, 'write') and 'w' in mode:
218 if hasattr(pat, 'write') and 'w' in mode:
219 return pat
219 return pat
220 if hasattr(pat, 'read') and 'r' in mode:
220 if hasattr(pat, 'read') and 'r' in mode:
221 return pat
221 return pat
222 return open(make_filename(repo, pat, node, total, seqno, revwidth,
222 return open(make_filename(repo, pat, node, total, seqno, revwidth,
223 pathname),
223 pathname),
224 mode)
224 mode)
225
225
226 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
226 def matchpats(repo, pats=[], opts={}, globbed=False, default='relpath'):
227 cwd = repo.getcwd()
227 pats = pats or []
228 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
228 if not globbed and default == 'relpath':
229 opts.get('exclude'), globbed=globbed,
229 pats = util.expand_glob(pats or [])
230 default=default)
230 return util.matcher(repo.root, repo.getcwd(), pats, opts.get('include'),
231 opts.get('exclude'), None, default)
231
232
232 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
233 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
233 default=None):
234 default='relpath'):
234 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
235 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
235 default=default)
236 default=default)
236 exact = dict.fromkeys(files)
237 exact = dict.fromkeys(files)
237 cwd = repo.getcwd()
238 cwd = repo.getcwd()
238 for src, fn in repo.walk(node=node, files=files, match=matchfn,
239 for src, fn in repo.walk(node=node, files=files, match=matchfn,
239 badmatch=badmatch):
240 badmatch=badmatch):
240 yield src, fn, repo.pathto(fn, cwd), fn in exact
241 yield src, fn, repo.pathto(fn, cwd), fn in exact
241
242
242 def findrenames(repo, added=None, removed=None, threshold=0.5):
243 def findrenames(repo, added=None, removed=None, threshold=0.5):
243 '''find renamed files -- yields (before, after, score) tuples'''
244 '''find renamed files -- yields (before, after, score) tuples'''
244 if added is None or removed is None:
245 if added is None or removed is None:
245 added, removed = repo.status()[1:3]
246 added, removed = repo.status()[1:3]
246 ctx = repo.changectx()
247 ctx = repo.changectx()
247 for a in added:
248 for a in added:
248 aa = repo.wread(a)
249 aa = repo.wread(a)
249 bestname, bestscore = None, threshold
250 bestname, bestscore = None, threshold
250 for r in removed:
251 for r in removed:
251 rr = ctx.filectx(r).data()
252 rr = ctx.filectx(r).data()
252
253
253 # bdiff.blocks() returns blocks of matching lines
254 # bdiff.blocks() returns blocks of matching lines
254 # count the number of bytes in each
255 # count the number of bytes in each
255 equal = 0
256 equal = 0
256 alines = mdiff.splitnewlines(aa)
257 alines = mdiff.splitnewlines(aa)
257 matches = bdiff.blocks(aa, rr)
258 matches = bdiff.blocks(aa, rr)
258 for x1,x2,y1,y2 in matches:
259 for x1,x2,y1,y2 in matches:
259 for line in alines[x1:x2]:
260 for line in alines[x1:x2]:
260 equal += len(line)
261 equal += len(line)
261
262
262 lengths = len(aa) + len(rr)
263 lengths = len(aa) + len(rr)
263 if lengths:
264 if lengths:
264 myscore = equal*2.0 / lengths
265 myscore = equal*2.0 / lengths
265 if myscore >= bestscore:
266 if myscore >= bestscore:
266 bestname, bestscore = r, myscore
267 bestname, bestscore = r, myscore
267 if bestname:
268 if bestname:
268 yield bestname, a, bestscore
269 yield bestname, a, bestscore
269
270
270 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
271 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
271 if dry_run is None:
272 if dry_run is None:
272 dry_run = opts.get('dry_run')
273 dry_run = opts.get('dry_run')
273 if similarity is None:
274 if similarity is None:
274 similarity = float(opts.get('similarity') or 0)
275 similarity = float(opts.get('similarity') or 0)
275 add, remove = [], []
276 add, remove = [], []
276 mapping = {}
277 mapping = {}
277 for src, abs, rel, exact in walk(repo, pats, opts):
278 for src, abs, rel, exact in walk(repo, pats, opts):
278 target = repo.wjoin(abs)
279 target = repo.wjoin(abs)
279 if src == 'f' and abs not in repo.dirstate:
280 if src == 'f' and abs not in repo.dirstate:
280 add.append(abs)
281 add.append(abs)
281 mapping[abs] = rel, exact
282 mapping[abs] = rel, exact
282 if repo.ui.verbose or not exact:
283 if repo.ui.verbose or not exact:
283 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
284 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
284 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
285 if repo.dirstate[abs] != 'r' and (not util.lexists(target)
285 or (os.path.isdir(target) and not os.path.islink(target))):
286 or (os.path.isdir(target) and not os.path.islink(target))):
286 remove.append(abs)
287 remove.append(abs)
287 mapping[abs] = rel, exact
288 mapping[abs] = rel, exact
288 if repo.ui.verbose or not exact:
289 if repo.ui.verbose or not exact:
289 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
290 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
290 if not dry_run:
291 if not dry_run:
291 repo.remove(remove)
292 repo.remove(remove)
292 repo.add(add)
293 repo.add(add)
293 if similarity > 0:
294 if similarity > 0:
294 for old, new, score in findrenames(repo, add, remove, similarity):
295 for old, new, score in findrenames(repo, add, remove, similarity):
295 oldrel, oldexact = mapping[old]
296 oldrel, oldexact = mapping[old]
296 newrel, newexact = mapping[new]
297 newrel, newexact = mapping[new]
297 if repo.ui.verbose or not oldexact or not newexact:
298 if repo.ui.verbose or not oldexact or not newexact:
298 repo.ui.status(_('recording removal of %s as rename to %s '
299 repo.ui.status(_('recording removal of %s as rename to %s '
299 '(%d%% similar)\n') %
300 '(%d%% similar)\n') %
300 (oldrel, newrel, score * 100))
301 (oldrel, newrel, score * 100))
301 if not dry_run:
302 if not dry_run:
302 repo.copy(old, new)
303 repo.copy(old, new)
303
304
304 def copy(ui, repo, pats, opts, rename=False):
305 def copy(ui, repo, pats, opts, rename=False):
305 # called with the repo lock held
306 # called with the repo lock held
306 #
307 #
307 # hgsep => pathname that uses "/" to separate directories
308 # hgsep => pathname that uses "/" to separate directories
308 # ossep => pathname that uses os.sep to separate directories
309 # ossep => pathname that uses os.sep to separate directories
309 cwd = repo.getcwd()
310 cwd = repo.getcwd()
310 targets = {}
311 targets = {}
311 after = opts.get("after")
312 after = opts.get("after")
312 dryrun = opts.get("dry_run")
313 dryrun = opts.get("dry_run")
313
314
314 def walkpat(pat):
315 def walkpat(pat):
315 srcs = []
316 srcs = []
316 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
317 for tag, abs, rel, exact in walk(repo, [pat], opts, globbed=True):
317 state = repo.dirstate[abs]
318 state = repo.dirstate[abs]
318 if state in '?r':
319 if state in '?r':
319 if exact and state == '?':
320 if exact and state == '?':
320 ui.warn(_('%s: not copying - file is not managed\n') % rel)
321 ui.warn(_('%s: not copying - file is not managed\n') % rel)
321 if exact and state == 'r':
322 if exact and state == 'r':
322 ui.warn(_('%s: not copying - file has been marked for'
323 ui.warn(_('%s: not copying - file has been marked for'
323 ' remove\n') % rel)
324 ' remove\n') % rel)
324 continue
325 continue
325 # abs: hgsep
326 # abs: hgsep
326 # rel: ossep
327 # rel: ossep
327 srcs.append((abs, rel, exact))
328 srcs.append((abs, rel, exact))
328 return srcs
329 return srcs
329
330
330 # abssrc: hgsep
331 # abssrc: hgsep
331 # relsrc: ossep
332 # relsrc: ossep
332 # otarget: ossep
333 # otarget: ossep
333 def copyfile(abssrc, relsrc, otarget, exact):
334 def copyfile(abssrc, relsrc, otarget, exact):
334 abstarget = util.canonpath(repo.root, cwd, otarget)
335 abstarget = util.canonpath(repo.root, cwd, otarget)
335 reltarget = repo.pathto(abstarget, cwd)
336 reltarget = repo.pathto(abstarget, cwd)
336 target = repo.wjoin(abstarget)
337 target = repo.wjoin(abstarget)
337 src = repo.wjoin(abssrc)
338 src = repo.wjoin(abssrc)
338 state = repo.dirstate[abstarget]
339 state = repo.dirstate[abstarget]
339
340
340 # check for collisions
341 # check for collisions
341 prevsrc = targets.get(abstarget)
342 prevsrc = targets.get(abstarget)
342 if prevsrc is not None:
343 if prevsrc is not None:
343 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
344 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
344 (reltarget, repo.pathto(abssrc, cwd),
345 (reltarget, repo.pathto(abssrc, cwd),
345 repo.pathto(prevsrc, cwd)))
346 repo.pathto(prevsrc, cwd)))
346 return
347 return
347
348
348 # check for overwrites
349 # check for overwrites
349 exists = os.path.exists(target)
350 exists = os.path.exists(target)
350 if (not after and exists or after and state in 'mn'):
351 if (not after and exists or after and state in 'mn'):
351 if not opts['force']:
352 if not opts['force']:
352 ui.warn(_('%s: not overwriting - file exists\n') %
353 ui.warn(_('%s: not overwriting - file exists\n') %
353 reltarget)
354 reltarget)
354 return
355 return
355
356
356 if after:
357 if after:
357 if not exists:
358 if not exists:
358 return
359 return
359 elif not dryrun:
360 elif not dryrun:
360 try:
361 try:
361 if exists:
362 if exists:
362 os.unlink(target)
363 os.unlink(target)
363 targetdir = os.path.dirname(target) or '.'
364 targetdir = os.path.dirname(target) or '.'
364 if not os.path.isdir(targetdir):
365 if not os.path.isdir(targetdir):
365 os.makedirs(targetdir)
366 os.makedirs(targetdir)
366 util.copyfile(src, target)
367 util.copyfile(src, target)
367 except IOError, inst:
368 except IOError, inst:
368 if inst.errno == errno.ENOENT:
369 if inst.errno == errno.ENOENT:
369 ui.warn(_('%s: deleted in working copy\n') % relsrc)
370 ui.warn(_('%s: deleted in working copy\n') % relsrc)
370 else:
371 else:
371 ui.warn(_('%s: cannot copy - %s\n') %
372 ui.warn(_('%s: cannot copy - %s\n') %
372 (relsrc, inst.strerror))
373 (relsrc, inst.strerror))
373 return True # report a failure
374 return True # report a failure
374
375
375 if ui.verbose or not exact:
376 if ui.verbose or not exact:
376 action = rename and "moving" or "copying"
377 action = rename and "moving" or "copying"
377 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
378 ui.status(_('%s %s to %s\n') % (action, relsrc, reltarget))
378
379
379 targets[abstarget] = abssrc
380 targets[abstarget] = abssrc
380
381
381 # fix up dirstate
382 # fix up dirstate
382 origsrc = repo.dirstate.copied(abssrc) or abssrc
383 origsrc = repo.dirstate.copied(abssrc) or abssrc
383 if abstarget == origsrc: # copying back a copy?
384 if abstarget == origsrc: # copying back a copy?
384 if state not in 'mn' and not dryrun:
385 if state not in 'mn' and not dryrun:
385 repo.dirstate.normallookup(abstarget)
386 repo.dirstate.normallookup(abstarget)
386 else:
387 else:
387 if repo.dirstate[origsrc] == 'a':
388 if repo.dirstate[origsrc] == 'a':
388 if not ui.quiet:
389 if not ui.quiet:
389 ui.warn(_("%s has not been committed yet, so no copy "
390 ui.warn(_("%s has not been committed yet, so no copy "
390 "data will be stored for %s.\n")
391 "data will be stored for %s.\n")
391 % (repo.pathto(origsrc, cwd), reltarget))
392 % (repo.pathto(origsrc, cwd), reltarget))
392 if abstarget not in repo.dirstate and not dryrun:
393 if abstarget not in repo.dirstate and not dryrun:
393 repo.add([abstarget])
394 repo.add([abstarget])
394 elif not dryrun:
395 elif not dryrun:
395 repo.copy(origsrc, abstarget)
396 repo.copy(origsrc, abstarget)
396
397
397 if rename and not dryrun:
398 if rename and not dryrun:
398 repo.remove([abssrc], not after)
399 repo.remove([abssrc], not after)
399
400
400 # pat: ossep
401 # pat: ossep
401 # dest ossep
402 # dest ossep
402 # srcs: list of (hgsep, hgsep, ossep, bool)
403 # srcs: list of (hgsep, hgsep, ossep, bool)
403 # return: function that takes hgsep and returns ossep
404 # return: function that takes hgsep and returns ossep
404 def targetpathfn(pat, dest, srcs):
405 def targetpathfn(pat, dest, srcs):
405 if os.path.isdir(pat):
406 if os.path.isdir(pat):
406 abspfx = util.canonpath(repo.root, cwd, pat)
407 abspfx = util.canonpath(repo.root, cwd, pat)
407 abspfx = util.localpath(abspfx)
408 abspfx = util.localpath(abspfx)
408 if destdirexists:
409 if destdirexists:
409 striplen = len(os.path.split(abspfx)[0])
410 striplen = len(os.path.split(abspfx)[0])
410 else:
411 else:
411 striplen = len(abspfx)
412 striplen = len(abspfx)
412 if striplen:
413 if striplen:
413 striplen += len(os.sep)
414 striplen += len(os.sep)
414 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
415 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
415 elif destdirexists:
416 elif destdirexists:
416 res = lambda p: os.path.join(dest,
417 res = lambda p: os.path.join(dest,
417 os.path.basename(util.localpath(p)))
418 os.path.basename(util.localpath(p)))
418 else:
419 else:
419 res = lambda p: dest
420 res = lambda p: dest
420 return res
421 return res
421
422
422 # pat: ossep
423 # pat: ossep
423 # dest ossep
424 # dest ossep
424 # srcs: list of (hgsep, hgsep, ossep, bool)
425 # srcs: list of (hgsep, hgsep, ossep, bool)
425 # return: function that takes hgsep and returns ossep
426 # return: function that takes hgsep and returns ossep
426 def targetpathafterfn(pat, dest, srcs):
427 def targetpathafterfn(pat, dest, srcs):
427 if util.patkind(pat, None)[0]:
428 if util.patkind(pat, None)[0]:
428 # a mercurial pattern
429 # a mercurial pattern
429 res = lambda p: os.path.join(dest,
430 res = lambda p: os.path.join(dest,
430 os.path.basename(util.localpath(p)))
431 os.path.basename(util.localpath(p)))
431 else:
432 else:
432 abspfx = util.canonpath(repo.root, cwd, pat)
433 abspfx = util.canonpath(repo.root, cwd, pat)
433 if len(abspfx) < len(srcs[0][0]):
434 if len(abspfx) < len(srcs[0][0]):
434 # A directory. Either the target path contains the last
435 # A directory. Either the target path contains the last
435 # component of the source path or it does not.
436 # component of the source path or it does not.
436 def evalpath(striplen):
437 def evalpath(striplen):
437 score = 0
438 score = 0
438 for s in srcs:
439 for s in srcs:
439 t = os.path.join(dest, util.localpath(s[0])[striplen:])
440 t = os.path.join(dest, util.localpath(s[0])[striplen:])
440 if os.path.exists(t):
441 if os.path.exists(t):
441 score += 1
442 score += 1
442 return score
443 return score
443
444
444 abspfx = util.localpath(abspfx)
445 abspfx = util.localpath(abspfx)
445 striplen = len(abspfx)
446 striplen = len(abspfx)
446 if striplen:
447 if striplen:
447 striplen += len(os.sep)
448 striplen += len(os.sep)
448 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
449 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
449 score = evalpath(striplen)
450 score = evalpath(striplen)
450 striplen1 = len(os.path.split(abspfx)[0])
451 striplen1 = len(os.path.split(abspfx)[0])
451 if striplen1:
452 if striplen1:
452 striplen1 += len(os.sep)
453 striplen1 += len(os.sep)
453 if evalpath(striplen1) > score:
454 if evalpath(striplen1) > score:
454 striplen = striplen1
455 striplen = striplen1
455 res = lambda p: os.path.join(dest,
456 res = lambda p: os.path.join(dest,
456 util.localpath(p)[striplen:])
457 util.localpath(p)[striplen:])
457 else:
458 else:
458 # a file
459 # a file
459 if destdirexists:
460 if destdirexists:
460 res = lambda p: os.path.join(dest,
461 res = lambda p: os.path.join(dest,
461 os.path.basename(util.localpath(p)))
462 os.path.basename(util.localpath(p)))
462 else:
463 else:
463 res = lambda p: dest
464 res = lambda p: dest
464 return res
465 return res
465
466
466
467
467 pats = util.expand_glob(pats)
468 pats = util.expand_glob(pats)
468 if not pats:
469 if not pats:
469 raise util.Abort(_('no source or destination specified'))
470 raise util.Abort(_('no source or destination specified'))
470 if len(pats) == 1:
471 if len(pats) == 1:
471 raise util.Abort(_('no destination specified'))
472 raise util.Abort(_('no destination specified'))
472 dest = pats.pop()
473 dest = pats.pop()
473 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
474 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
474 if not destdirexists:
475 if not destdirexists:
475 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
476 if len(pats) > 1 or util.patkind(pats[0], None)[0]:
476 raise util.Abort(_('with multiple sources, destination must be an '
477 raise util.Abort(_('with multiple sources, destination must be an '
477 'existing directory'))
478 'existing directory'))
478 if util.endswithsep(dest):
479 if util.endswithsep(dest):
479 raise util.Abort(_('destination %s is not a directory') % dest)
480 raise util.Abort(_('destination %s is not a directory') % dest)
480
481
481 tfn = targetpathfn
482 tfn = targetpathfn
482 if after:
483 if after:
483 tfn = targetpathafterfn
484 tfn = targetpathafterfn
484 copylist = []
485 copylist = []
485 for pat in pats:
486 for pat in pats:
486 srcs = walkpat(pat)
487 srcs = walkpat(pat)
487 if not srcs:
488 if not srcs:
488 continue
489 continue
489 copylist.append((tfn(pat, dest, srcs), srcs))
490 copylist.append((tfn(pat, dest, srcs), srcs))
490 if not copylist:
491 if not copylist:
491 raise util.Abort(_('no files to copy'))
492 raise util.Abort(_('no files to copy'))
492
493
493 errors = 0
494 errors = 0
494 for targetpath, srcs in copylist:
495 for targetpath, srcs in copylist:
495 for abssrc, relsrc, exact in srcs:
496 for abssrc, relsrc, exact in srcs:
496 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
497 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
497 errors += 1
498 errors += 1
498
499
499 if errors:
500 if errors:
500 ui.warn(_('(consider using --after)\n'))
501 ui.warn(_('(consider using --after)\n'))
501
502
502 return errors
503 return errors
503
504
504 def service(opts, parentfn=None, initfn=None, runfn=None):
505 def service(opts, parentfn=None, initfn=None, runfn=None):
505 '''Run a command as a service.'''
506 '''Run a command as a service.'''
506
507
507 if opts['daemon'] and not opts['daemon_pipefds']:
508 if opts['daemon'] and not opts['daemon_pipefds']:
508 rfd, wfd = os.pipe()
509 rfd, wfd = os.pipe()
509 args = sys.argv[:]
510 args = sys.argv[:]
510 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
511 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
511 # Don't pass --cwd to the child process, because we've already
512 # Don't pass --cwd to the child process, because we've already
512 # changed directory.
513 # changed directory.
513 for i in xrange(1,len(args)):
514 for i in xrange(1,len(args)):
514 if args[i].startswith('--cwd='):
515 if args[i].startswith('--cwd='):
515 del args[i]
516 del args[i]
516 break
517 break
517 elif args[i].startswith('--cwd'):
518 elif args[i].startswith('--cwd'):
518 del args[i:i+2]
519 del args[i:i+2]
519 break
520 break
520 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
521 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
521 args[0], args)
522 args[0], args)
522 os.close(wfd)
523 os.close(wfd)
523 os.read(rfd, 1)
524 os.read(rfd, 1)
524 if parentfn:
525 if parentfn:
525 return parentfn(pid)
526 return parentfn(pid)
526 else:
527 else:
527 os._exit(0)
528 os._exit(0)
528
529
529 if initfn:
530 if initfn:
530 initfn()
531 initfn()
531
532
532 if opts['pid_file']:
533 if opts['pid_file']:
533 fp = open(opts['pid_file'], 'w')
534 fp = open(opts['pid_file'], 'w')
534 fp.write(str(os.getpid()) + '\n')
535 fp.write(str(os.getpid()) + '\n')
535 fp.close()
536 fp.close()
536
537
537 if opts['daemon_pipefds']:
538 if opts['daemon_pipefds']:
538 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
539 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
539 os.close(rfd)
540 os.close(rfd)
540 try:
541 try:
541 os.setsid()
542 os.setsid()
542 except AttributeError:
543 except AttributeError:
543 pass
544 pass
544 os.write(wfd, 'y')
545 os.write(wfd, 'y')
545 os.close(wfd)
546 os.close(wfd)
546 sys.stdout.flush()
547 sys.stdout.flush()
547 sys.stderr.flush()
548 sys.stderr.flush()
548 fd = os.open(util.nulldev, os.O_RDWR)
549 fd = os.open(util.nulldev, os.O_RDWR)
549 if fd != 0: os.dup2(fd, 0)
550 if fd != 0: os.dup2(fd, 0)
550 if fd != 1: os.dup2(fd, 1)
551 if fd != 1: os.dup2(fd, 1)
551 if fd != 2: os.dup2(fd, 2)
552 if fd != 2: os.dup2(fd, 2)
552 if fd not in (0, 1, 2): os.close(fd)
553 if fd not in (0, 1, 2): os.close(fd)
553
554
554 if runfn:
555 if runfn:
555 return runfn()
556 return runfn()
556
557
557 class changeset_printer(object):
558 class changeset_printer(object):
558 '''show changeset information when templating not requested.'''
559 '''show changeset information when templating not requested.'''
559
560
560 def __init__(self, ui, repo, patch, buffered):
561 def __init__(self, ui, repo, patch, buffered):
561 self.ui = ui
562 self.ui = ui
562 self.repo = repo
563 self.repo = repo
563 self.buffered = buffered
564 self.buffered = buffered
564 self.patch = patch
565 self.patch = patch
565 self.header = {}
566 self.header = {}
566 self.hunk = {}
567 self.hunk = {}
567 self.lastheader = None
568 self.lastheader = None
568
569
569 def flush(self, rev):
570 def flush(self, rev):
570 if rev in self.header:
571 if rev in self.header:
571 h = self.header[rev]
572 h = self.header[rev]
572 if h != self.lastheader:
573 if h != self.lastheader:
573 self.lastheader = h
574 self.lastheader = h
574 self.ui.write(h)
575 self.ui.write(h)
575 del self.header[rev]
576 del self.header[rev]
576 if rev in self.hunk:
577 if rev in self.hunk:
577 self.ui.write(self.hunk[rev])
578 self.ui.write(self.hunk[rev])
578 del self.hunk[rev]
579 del self.hunk[rev]
579 return 1
580 return 1
580 return 0
581 return 0
581
582
582 def show(self, rev=0, changenode=None, copies=(), **props):
583 def show(self, rev=0, changenode=None, copies=(), **props):
583 if self.buffered:
584 if self.buffered:
584 self.ui.pushbuffer()
585 self.ui.pushbuffer()
585 self._show(rev, changenode, copies, props)
586 self._show(rev, changenode, copies, props)
586 self.hunk[rev] = self.ui.popbuffer()
587 self.hunk[rev] = self.ui.popbuffer()
587 else:
588 else:
588 self._show(rev, changenode, copies, props)
589 self._show(rev, changenode, copies, props)
589
590
590 def _show(self, rev, changenode, copies, props):
591 def _show(self, rev, changenode, copies, props):
591 '''show a single changeset or file revision'''
592 '''show a single changeset or file revision'''
592 log = self.repo.changelog
593 log = self.repo.changelog
593 if changenode is None:
594 if changenode is None:
594 changenode = log.node(rev)
595 changenode = log.node(rev)
595 elif not rev:
596 elif not rev:
596 rev = log.rev(changenode)
597 rev = log.rev(changenode)
597
598
598 if self.ui.quiet:
599 if self.ui.quiet:
599 self.ui.write("%d:%s\n" % (rev, short(changenode)))
600 self.ui.write("%d:%s\n" % (rev, short(changenode)))
600 return
601 return
601
602
602 changes = log.read(changenode)
603 changes = log.read(changenode)
603 date = util.datestr(changes[2])
604 date = util.datestr(changes[2])
604 extra = changes[5]
605 extra = changes[5]
605 branch = extra.get("branch")
606 branch = extra.get("branch")
606
607
607 hexfunc = self.ui.debugflag and hex or short
608 hexfunc = self.ui.debugflag and hex or short
608
609
609 parents = [(p, hexfunc(log.node(p)))
610 parents = [(p, hexfunc(log.node(p)))
610 for p in self._meaningful_parentrevs(log, rev)]
611 for p in self._meaningful_parentrevs(log, rev)]
611
612
612 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
613 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
613
614
614 # don't show the default branch name
615 # don't show the default branch name
615 if branch != 'default':
616 if branch != 'default':
616 branch = util.tolocal(branch)
617 branch = util.tolocal(branch)
617 self.ui.write(_("branch: %s\n") % branch)
618 self.ui.write(_("branch: %s\n") % branch)
618 for tag in self.repo.nodetags(changenode):
619 for tag in self.repo.nodetags(changenode):
619 self.ui.write(_("tag: %s\n") % tag)
620 self.ui.write(_("tag: %s\n") % tag)
620 for parent in parents:
621 for parent in parents:
621 self.ui.write(_("parent: %d:%s\n") % parent)
622 self.ui.write(_("parent: %d:%s\n") % parent)
622
623
623 if self.ui.debugflag:
624 if self.ui.debugflag:
624 self.ui.write(_("manifest: %d:%s\n") %
625 self.ui.write(_("manifest: %d:%s\n") %
625 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
626 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
626 self.ui.write(_("user: %s\n") % changes[1])
627 self.ui.write(_("user: %s\n") % changes[1])
627 self.ui.write(_("date: %s\n") % date)
628 self.ui.write(_("date: %s\n") % date)
628
629
629 if self.ui.debugflag:
630 if self.ui.debugflag:
630 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
631 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
631 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
632 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
632 files):
633 files):
633 if value:
634 if value:
634 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
635 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
635 elif changes[3] and self.ui.verbose:
636 elif changes[3] and self.ui.verbose:
636 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
637 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
637 if copies and self.ui.verbose:
638 if copies and self.ui.verbose:
638 copies = ['%s (%s)' % c for c in copies]
639 copies = ['%s (%s)' % c for c in copies]
639 self.ui.write(_("copies: %s\n") % ' '.join(copies))
640 self.ui.write(_("copies: %s\n") % ' '.join(copies))
640
641
641 if extra and self.ui.debugflag:
642 if extra and self.ui.debugflag:
642 extraitems = extra.items()
643 extraitems = extra.items()
643 extraitems.sort()
644 extraitems.sort()
644 for key, value in extraitems:
645 for key, value in extraitems:
645 self.ui.write(_("extra: %s=%s\n")
646 self.ui.write(_("extra: %s=%s\n")
646 % (key, value.encode('string_escape')))
647 % (key, value.encode('string_escape')))
647
648
648 description = changes[4].strip()
649 description = changes[4].strip()
649 if description:
650 if description:
650 if self.ui.verbose:
651 if self.ui.verbose:
651 self.ui.write(_("description:\n"))
652 self.ui.write(_("description:\n"))
652 self.ui.write(description)
653 self.ui.write(description)
653 self.ui.write("\n\n")
654 self.ui.write("\n\n")
654 else:
655 else:
655 self.ui.write(_("summary: %s\n") %
656 self.ui.write(_("summary: %s\n") %
656 description.splitlines()[0])
657 description.splitlines()[0])
657 self.ui.write("\n")
658 self.ui.write("\n")
658
659
659 self.showpatch(changenode)
660 self.showpatch(changenode)
660
661
661 def showpatch(self, node):
662 def showpatch(self, node):
662 if self.patch:
663 if self.patch:
663 prev = self.repo.changelog.parents(node)[0]
664 prev = self.repo.changelog.parents(node)[0]
664 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
665 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui,
665 opts=patch.diffopts(self.ui))
666 opts=patch.diffopts(self.ui))
666 self.ui.write("\n")
667 self.ui.write("\n")
667
668
668 def _meaningful_parentrevs(self, log, rev):
669 def _meaningful_parentrevs(self, log, rev):
669 """Return list of meaningful (or all if debug) parentrevs for rev.
670 """Return list of meaningful (or all if debug) parentrevs for rev.
670
671
671 For merges (two non-nullrev revisions) both parents are meaningful.
672 For merges (two non-nullrev revisions) both parents are meaningful.
672 Otherwise the first parent revision is considered meaningful if it
673 Otherwise the first parent revision is considered meaningful if it
673 is not the preceding revision.
674 is not the preceding revision.
674 """
675 """
675 parents = log.parentrevs(rev)
676 parents = log.parentrevs(rev)
676 if not self.ui.debugflag and parents[1] == nullrev:
677 if not self.ui.debugflag and parents[1] == nullrev:
677 if parents[0] >= rev - 1:
678 if parents[0] >= rev - 1:
678 parents = []
679 parents = []
679 else:
680 else:
680 parents = [parents[0]]
681 parents = [parents[0]]
681 return parents
682 return parents
682
683
683
684
684 class changeset_templater(changeset_printer):
685 class changeset_templater(changeset_printer):
685 '''format changeset information.'''
686 '''format changeset information.'''
686
687
687 def __init__(self, ui, repo, patch, mapfile, buffered):
688 def __init__(self, ui, repo, patch, mapfile, buffered):
688 changeset_printer.__init__(self, ui, repo, patch, buffered)
689 changeset_printer.__init__(self, ui, repo, patch, buffered)
689 filters = templatefilters.filters.copy()
690 filters = templatefilters.filters.copy()
690 filters['formatnode'] = (ui.debugflag and (lambda x: x)
691 filters['formatnode'] = (ui.debugflag and (lambda x: x)
691 or (lambda x: x[:12]))
692 or (lambda x: x[:12]))
692 self.t = templater.templater(mapfile, filters,
693 self.t = templater.templater(mapfile, filters,
693 cache={
694 cache={
694 'parent': '{rev}:{node|formatnode} ',
695 'parent': '{rev}:{node|formatnode} ',
695 'manifest': '{rev}:{node|formatnode}',
696 'manifest': '{rev}:{node|formatnode}',
696 'filecopy': '{name} ({source})'})
697 'filecopy': '{name} ({source})'})
697
698
698 def use_template(self, t):
699 def use_template(self, t):
699 '''set template string to use'''
700 '''set template string to use'''
700 self.t.cache['changeset'] = t
701 self.t.cache['changeset'] = t
701
702
702 def _show(self, rev, changenode, copies, props):
703 def _show(self, rev, changenode, copies, props):
703 '''show a single changeset or file revision'''
704 '''show a single changeset or file revision'''
704 log = self.repo.changelog
705 log = self.repo.changelog
705 if changenode is None:
706 if changenode is None:
706 changenode = log.node(rev)
707 changenode = log.node(rev)
707 elif not rev:
708 elif not rev:
708 rev = log.rev(changenode)
709 rev = log.rev(changenode)
709
710
710 changes = log.read(changenode)
711 changes = log.read(changenode)
711
712
712 def showlist(name, values, plural=None, **args):
713 def showlist(name, values, plural=None, **args):
713 '''expand set of values.
714 '''expand set of values.
714 name is name of key in template map.
715 name is name of key in template map.
715 values is list of strings or dicts.
716 values is list of strings or dicts.
716 plural is plural of name, if not simply name + 's'.
717 plural is plural of name, if not simply name + 's'.
717
718
718 expansion works like this, given name 'foo'.
719 expansion works like this, given name 'foo'.
719
720
720 if values is empty, expand 'no_foos'.
721 if values is empty, expand 'no_foos'.
721
722
722 if 'foo' not in template map, return values as a string,
723 if 'foo' not in template map, return values as a string,
723 joined by space.
724 joined by space.
724
725
725 expand 'start_foos'.
726 expand 'start_foos'.
726
727
727 for each value, expand 'foo'. if 'last_foo' in template
728 for each value, expand 'foo'. if 'last_foo' in template
728 map, expand it instead of 'foo' for last key.
729 map, expand it instead of 'foo' for last key.
729
730
730 expand 'end_foos'.
731 expand 'end_foos'.
731 '''
732 '''
732 if plural: names = plural
733 if plural: names = plural
733 else: names = name + 's'
734 else: names = name + 's'
734 if not values:
735 if not values:
735 noname = 'no_' + names
736 noname = 'no_' + names
736 if noname in self.t:
737 if noname in self.t:
737 yield self.t(noname, **args)
738 yield self.t(noname, **args)
738 return
739 return
739 if name not in self.t:
740 if name not in self.t:
740 if isinstance(values[0], str):
741 if isinstance(values[0], str):
741 yield ' '.join(values)
742 yield ' '.join(values)
742 else:
743 else:
743 for v in values:
744 for v in values:
744 yield dict(v, **args)
745 yield dict(v, **args)
745 return
746 return
746 startname = 'start_' + names
747 startname = 'start_' + names
747 if startname in self.t:
748 if startname in self.t:
748 yield self.t(startname, **args)
749 yield self.t(startname, **args)
749 vargs = args.copy()
750 vargs = args.copy()
750 def one(v, tag=name):
751 def one(v, tag=name):
751 try:
752 try:
752 vargs.update(v)
753 vargs.update(v)
753 except (AttributeError, ValueError):
754 except (AttributeError, ValueError):
754 try:
755 try:
755 for a, b in v:
756 for a, b in v:
756 vargs[a] = b
757 vargs[a] = b
757 except ValueError:
758 except ValueError:
758 vargs[name] = v
759 vargs[name] = v
759 return self.t(tag, **vargs)
760 return self.t(tag, **vargs)
760 lastname = 'last_' + name
761 lastname = 'last_' + name
761 if lastname in self.t:
762 if lastname in self.t:
762 last = values.pop()
763 last = values.pop()
763 else:
764 else:
764 last = None
765 last = None
765 for v in values:
766 for v in values:
766 yield one(v)
767 yield one(v)
767 if last is not None:
768 if last is not None:
768 yield one(last, tag=lastname)
769 yield one(last, tag=lastname)
769 endname = 'end_' + names
770 endname = 'end_' + names
770 if endname in self.t:
771 if endname in self.t:
771 yield self.t(endname, **args)
772 yield self.t(endname, **args)
772
773
773 def showbranches(**args):
774 def showbranches(**args):
774 branch = changes[5].get("branch")
775 branch = changes[5].get("branch")
775 if branch != 'default':
776 if branch != 'default':
776 branch = util.tolocal(branch)
777 branch = util.tolocal(branch)
777 return showlist('branch', [branch], plural='branches', **args)
778 return showlist('branch', [branch], plural='branches', **args)
778
779
779 def showparents(**args):
780 def showparents(**args):
780 parents = [[('rev', p), ('node', hex(log.node(p)))]
781 parents = [[('rev', p), ('node', hex(log.node(p)))]
781 for p in self._meaningful_parentrevs(log, rev)]
782 for p in self._meaningful_parentrevs(log, rev)]
782 return showlist('parent', parents, **args)
783 return showlist('parent', parents, **args)
783
784
784 def showtags(**args):
785 def showtags(**args):
785 return showlist('tag', self.repo.nodetags(changenode), **args)
786 return showlist('tag', self.repo.nodetags(changenode), **args)
786
787
787 def showextras(**args):
788 def showextras(**args):
788 extras = changes[5].items()
789 extras = changes[5].items()
789 extras.sort()
790 extras.sort()
790 for key, value in extras:
791 for key, value in extras:
791 args = args.copy()
792 args = args.copy()
792 args.update(dict(key=key, value=value))
793 args.update(dict(key=key, value=value))
793 yield self.t('extra', **args)
794 yield self.t('extra', **args)
794
795
795 def showcopies(**args):
796 def showcopies(**args):
796 c = [{'name': x[0], 'source': x[1]} for x in copies]
797 c = [{'name': x[0], 'source': x[1]} for x in copies]
797 return showlist('file_copy', c, plural='file_copies', **args)
798 return showlist('file_copy', c, plural='file_copies', **args)
798
799
799 files = []
800 files = []
800 def getfiles():
801 def getfiles():
801 if not files:
802 if not files:
802 files[:] = self.repo.status(
803 files[:] = self.repo.status(
803 log.parents(changenode)[0], changenode)[:3]
804 log.parents(changenode)[0], changenode)[:3]
804 return files
805 return files
805 def showfiles(**args):
806 def showfiles(**args):
806 return showlist('file', changes[3], **args)
807 return showlist('file', changes[3], **args)
807 def showmods(**args):
808 def showmods(**args):
808 return showlist('file_mod', getfiles()[0], **args)
809 return showlist('file_mod', getfiles()[0], **args)
809 def showadds(**args):
810 def showadds(**args):
810 return showlist('file_add', getfiles()[1], **args)
811 return showlist('file_add', getfiles()[1], **args)
811 def showdels(**args):
812 def showdels(**args):
812 return showlist('file_del', getfiles()[2], **args)
813 return showlist('file_del', getfiles()[2], **args)
813 def showmanifest(**args):
814 def showmanifest(**args):
814 args = args.copy()
815 args = args.copy()
815 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
816 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
816 node=hex(changes[0])))
817 node=hex(changes[0])))
817 return self.t('manifest', **args)
818 return self.t('manifest', **args)
818
819
819 defprops = {
820 defprops = {
820 'author': changes[1],
821 'author': changes[1],
821 'branches': showbranches,
822 'branches': showbranches,
822 'date': changes[2],
823 'date': changes[2],
823 'desc': changes[4].strip(),
824 'desc': changes[4].strip(),
824 'file_adds': showadds,
825 'file_adds': showadds,
825 'file_dels': showdels,
826 'file_dels': showdels,
826 'file_mods': showmods,
827 'file_mods': showmods,
827 'files': showfiles,
828 'files': showfiles,
828 'file_copies': showcopies,
829 'file_copies': showcopies,
829 'manifest': showmanifest,
830 'manifest': showmanifest,
830 'node': hex(changenode),
831 'node': hex(changenode),
831 'parents': showparents,
832 'parents': showparents,
832 'rev': rev,
833 'rev': rev,
833 'tags': showtags,
834 'tags': showtags,
834 'extras': showextras,
835 'extras': showextras,
835 }
836 }
836 props = props.copy()
837 props = props.copy()
837 props.update(defprops)
838 props.update(defprops)
838
839
839 try:
840 try:
840 if self.ui.debugflag and 'header_debug' in self.t:
841 if self.ui.debugflag and 'header_debug' in self.t:
841 key = 'header_debug'
842 key = 'header_debug'
842 elif self.ui.quiet and 'header_quiet' in self.t:
843 elif self.ui.quiet and 'header_quiet' in self.t:
843 key = 'header_quiet'
844 key = 'header_quiet'
844 elif self.ui.verbose and 'header_verbose' in self.t:
845 elif self.ui.verbose and 'header_verbose' in self.t:
845 key = 'header_verbose'
846 key = 'header_verbose'
846 elif 'header' in self.t:
847 elif 'header' in self.t:
847 key = 'header'
848 key = 'header'
848 else:
849 else:
849 key = ''
850 key = ''
850 if key:
851 if key:
851 h = templater.stringify(self.t(key, **props))
852 h = templater.stringify(self.t(key, **props))
852 if self.buffered:
853 if self.buffered:
853 self.header[rev] = h
854 self.header[rev] = h
854 else:
855 else:
855 self.ui.write(h)
856 self.ui.write(h)
856 if self.ui.debugflag and 'changeset_debug' in self.t:
857 if self.ui.debugflag and 'changeset_debug' in self.t:
857 key = 'changeset_debug'
858 key = 'changeset_debug'
858 elif self.ui.quiet and 'changeset_quiet' in self.t:
859 elif self.ui.quiet and 'changeset_quiet' in self.t:
859 key = 'changeset_quiet'
860 key = 'changeset_quiet'
860 elif self.ui.verbose and 'changeset_verbose' in self.t:
861 elif self.ui.verbose and 'changeset_verbose' in self.t:
861 key = 'changeset_verbose'
862 key = 'changeset_verbose'
862 else:
863 else:
863 key = 'changeset'
864 key = 'changeset'
864 self.ui.write(templater.stringify(self.t(key, **props)))
865 self.ui.write(templater.stringify(self.t(key, **props)))
865 self.showpatch(changenode)
866 self.showpatch(changenode)
866 except KeyError, inst:
867 except KeyError, inst:
867 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
868 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
868 inst.args[0]))
869 inst.args[0]))
869 except SyntaxError, inst:
870 except SyntaxError, inst:
870 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
871 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
871
872
872 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
873 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
873 """show one changeset using template or regular display.
874 """show one changeset using template or regular display.
874
875
875 Display format will be the first non-empty hit of:
876 Display format will be the first non-empty hit of:
876 1. option 'template'
877 1. option 'template'
877 2. option 'style'
878 2. option 'style'
878 3. [ui] setting 'logtemplate'
879 3. [ui] setting 'logtemplate'
879 4. [ui] setting 'style'
880 4. [ui] setting 'style'
880 If all of these values are either the unset or the empty string,
881 If all of these values are either the unset or the empty string,
881 regular display via changeset_printer() is done.
882 regular display via changeset_printer() is done.
882 """
883 """
883 # options
884 # options
884 patch = False
885 patch = False
885 if opts.get('patch'):
886 if opts.get('patch'):
886 patch = matchfn or util.always
887 patch = matchfn or util.always
887
888
888 tmpl = opts.get('template')
889 tmpl = opts.get('template')
889 mapfile = None
890 mapfile = None
890 if tmpl:
891 if tmpl:
891 tmpl = templater.parsestring(tmpl, quoted=False)
892 tmpl = templater.parsestring(tmpl, quoted=False)
892 else:
893 else:
893 mapfile = opts.get('style')
894 mapfile = opts.get('style')
894 # ui settings
895 # ui settings
895 if not mapfile:
896 if not mapfile:
896 tmpl = ui.config('ui', 'logtemplate')
897 tmpl = ui.config('ui', 'logtemplate')
897 if tmpl:
898 if tmpl:
898 tmpl = templater.parsestring(tmpl)
899 tmpl = templater.parsestring(tmpl)
899 else:
900 else:
900 mapfile = ui.config('ui', 'style')
901 mapfile = ui.config('ui', 'style')
901
902
902 if tmpl or mapfile:
903 if tmpl or mapfile:
903 if mapfile:
904 if mapfile:
904 if not os.path.split(mapfile)[0]:
905 if not os.path.split(mapfile)[0]:
905 mapname = (templater.templatepath('map-cmdline.' + mapfile)
906 mapname = (templater.templatepath('map-cmdline.' + mapfile)
906 or templater.templatepath(mapfile))
907 or templater.templatepath(mapfile))
907 if mapname: mapfile = mapname
908 if mapname: mapfile = mapname
908 try:
909 try:
909 t = changeset_templater(ui, repo, patch, mapfile, buffered)
910 t = changeset_templater(ui, repo, patch, mapfile, buffered)
910 except SyntaxError, inst:
911 except SyntaxError, inst:
911 raise util.Abort(inst.args[0])
912 raise util.Abort(inst.args[0])
912 if tmpl: t.use_template(tmpl)
913 if tmpl: t.use_template(tmpl)
913 return t
914 return t
914 return changeset_printer(ui, repo, patch, buffered)
915 return changeset_printer(ui, repo, patch, buffered)
915
916
916 def finddate(ui, repo, date):
917 def finddate(ui, repo, date):
917 """Find the tipmost changeset that matches the given date spec"""
918 """Find the tipmost changeset that matches the given date spec"""
918 df = util.matchdate(date)
919 df = util.matchdate(date)
919 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
920 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
920 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
921 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
921 results = {}
922 results = {}
922 for st, rev, fns in changeiter:
923 for st, rev, fns in changeiter:
923 if st == 'add':
924 if st == 'add':
924 d = get(rev)[2]
925 d = get(rev)[2]
925 if df(d[0]):
926 if df(d[0]):
926 results[rev] = d
927 results[rev] = d
927 elif st == 'iter':
928 elif st == 'iter':
928 if rev in results:
929 if rev in results:
929 ui.status("Found revision %s from %s\n" %
930 ui.status("Found revision %s from %s\n" %
930 (rev, util.datestr(results[rev])))
931 (rev, util.datestr(results[rev])))
931 return str(rev)
932 return str(rev)
932
933
933 raise util.Abort(_("revision matching date not found"))
934 raise util.Abort(_("revision matching date not found"))
934
935
935 def walkchangerevs(ui, repo, pats, change, opts):
936 def walkchangerevs(ui, repo, pats, change, opts):
936 '''Iterate over files and the revs they changed in.
937 '''Iterate over files and the revs they changed in.
937
938
938 Callers most commonly need to iterate backwards over the history
939 Callers most commonly need to iterate backwards over the history
939 it is interested in. Doing so has awful (quadratic-looking)
940 it is interested in. Doing so has awful (quadratic-looking)
940 performance, so we use iterators in a "windowed" way.
941 performance, so we use iterators in a "windowed" way.
941
942
942 We walk a window of revisions in the desired order. Within the
943 We walk a window of revisions in the desired order. Within the
943 window, we first walk forwards to gather data, then in the desired
944 window, we first walk forwards to gather data, then in the desired
944 order (usually backwards) to display it.
945 order (usually backwards) to display it.
945
946
946 This function returns an (iterator, matchfn) tuple. The iterator
947 This function returns an (iterator, matchfn) tuple. The iterator
947 yields 3-tuples. They will be of one of the following forms:
948 yields 3-tuples. They will be of one of the following forms:
948
949
949 "window", incrementing, lastrev: stepping through a window,
950 "window", incrementing, lastrev: stepping through a window,
950 positive if walking forwards through revs, last rev in the
951 positive if walking forwards through revs, last rev in the
951 sequence iterated over - use to reset state for the current window
952 sequence iterated over - use to reset state for the current window
952
953
953 "add", rev, fns: out-of-order traversal of the given file names
954 "add", rev, fns: out-of-order traversal of the given file names
954 fns, which changed during revision rev - use to gather data for
955 fns, which changed during revision rev - use to gather data for
955 possible display
956 possible display
956
957
957 "iter", rev, None: in-order traversal of the revs earlier iterated
958 "iter", rev, None: in-order traversal of the revs earlier iterated
958 over with "add" - use to display data'''
959 over with "add" - use to display data'''
959
960
960 def increasing_windows(start, end, windowsize=8, sizelimit=512):
961 def increasing_windows(start, end, windowsize=8, sizelimit=512):
961 if start < end:
962 if start < end:
962 while start < end:
963 while start < end:
963 yield start, min(windowsize, end-start)
964 yield start, min(windowsize, end-start)
964 start += windowsize
965 start += windowsize
965 if windowsize < sizelimit:
966 if windowsize < sizelimit:
966 windowsize *= 2
967 windowsize *= 2
967 else:
968 else:
968 while start > end:
969 while start > end:
969 yield start, min(windowsize, start-end-1)
970 yield start, min(windowsize, start-end-1)
970 start -= windowsize
971 start -= windowsize
971 if windowsize < sizelimit:
972 if windowsize < sizelimit:
972 windowsize *= 2
973 windowsize *= 2
973
974
974 files, matchfn, anypats = matchpats(repo, pats, opts)
975 files, matchfn, anypats = matchpats(repo, pats, opts)
975 follow = opts.get('follow') or opts.get('follow_first')
976 follow = opts.get('follow') or opts.get('follow_first')
976
977
977 if repo.changelog.count() == 0:
978 if repo.changelog.count() == 0:
978 return [], matchfn
979 return [], matchfn
979
980
980 if follow:
981 if follow:
981 defrange = '%s:0' % repo.changectx().rev()
982 defrange = '%s:0' % repo.changectx().rev()
982 else:
983 else:
983 defrange = '-1:0'
984 defrange = '-1:0'
984 revs = revrange(repo, opts['rev'] or [defrange])
985 revs = revrange(repo, opts['rev'] or [defrange])
985 wanted = {}
986 wanted = {}
986 slowpath = anypats or opts.get('removed')
987 slowpath = anypats or opts.get('removed')
987 fncache = {}
988 fncache = {}
988
989
989 if not slowpath and not files:
990 if not slowpath and not files:
990 # No files, no patterns. Display all revs.
991 # No files, no patterns. Display all revs.
991 wanted = dict.fromkeys(revs)
992 wanted = dict.fromkeys(revs)
992 copies = []
993 copies = []
993 if not slowpath:
994 if not slowpath:
994 # Only files, no patterns. Check the history of each file.
995 # Only files, no patterns. Check the history of each file.
995 def filerevgen(filelog, node):
996 def filerevgen(filelog, node):
996 cl_count = repo.changelog.count()
997 cl_count = repo.changelog.count()
997 if node is None:
998 if node is None:
998 last = filelog.count() - 1
999 last = filelog.count() - 1
999 else:
1000 else:
1000 last = filelog.rev(node)
1001 last = filelog.rev(node)
1001 for i, window in increasing_windows(last, nullrev):
1002 for i, window in increasing_windows(last, nullrev):
1002 revs = []
1003 revs = []
1003 for j in xrange(i - window, i + 1):
1004 for j in xrange(i - window, i + 1):
1004 n = filelog.node(j)
1005 n = filelog.node(j)
1005 revs.append((filelog.linkrev(n),
1006 revs.append((filelog.linkrev(n),
1006 follow and filelog.renamed(n)))
1007 follow and filelog.renamed(n)))
1007 revs.reverse()
1008 revs.reverse()
1008 for rev in revs:
1009 for rev in revs:
1009 # only yield rev for which we have the changelog, it can
1010 # only yield rev for which we have the changelog, it can
1010 # happen while doing "hg log" during a pull or commit
1011 # happen while doing "hg log" during a pull or commit
1011 if rev[0] < cl_count:
1012 if rev[0] < cl_count:
1012 yield rev
1013 yield rev
1013 def iterfiles():
1014 def iterfiles():
1014 for filename in files:
1015 for filename in files:
1015 yield filename, None
1016 yield filename, None
1016 for filename_node in copies:
1017 for filename_node in copies:
1017 yield filename_node
1018 yield filename_node
1018 minrev, maxrev = min(revs), max(revs)
1019 minrev, maxrev = min(revs), max(revs)
1019 for file_, node in iterfiles():
1020 for file_, node in iterfiles():
1020 filelog = repo.file(file_)
1021 filelog = repo.file(file_)
1021 if filelog.count() == 0:
1022 if filelog.count() == 0:
1022 if node is None:
1023 if node is None:
1023 # A zero count may be a directory or deleted file, so
1024 # A zero count may be a directory or deleted file, so
1024 # try to find matching entries on the slow path.
1025 # try to find matching entries on the slow path.
1025 slowpath = True
1026 slowpath = True
1026 break
1027 break
1027 else:
1028 else:
1028 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1029 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1029 % (file_, short(node)))
1030 % (file_, short(node)))
1030 continue
1031 continue
1031 for rev, copied in filerevgen(filelog, node):
1032 for rev, copied in filerevgen(filelog, node):
1032 if rev <= maxrev:
1033 if rev <= maxrev:
1033 if rev < minrev:
1034 if rev < minrev:
1034 break
1035 break
1035 fncache.setdefault(rev, [])
1036 fncache.setdefault(rev, [])
1036 fncache[rev].append(file_)
1037 fncache[rev].append(file_)
1037 wanted[rev] = 1
1038 wanted[rev] = 1
1038 if follow and copied:
1039 if follow and copied:
1039 copies.append(copied)
1040 copies.append(copied)
1040 if slowpath:
1041 if slowpath:
1041 if follow:
1042 if follow:
1042 raise util.Abort(_('can only follow copies/renames for explicit '
1043 raise util.Abort(_('can only follow copies/renames for explicit '
1043 'file names'))
1044 'file names'))
1044
1045
1045 # The slow path checks files modified in every changeset.
1046 # The slow path checks files modified in every changeset.
1046 def changerevgen():
1047 def changerevgen():
1047 for i, window in increasing_windows(repo.changelog.count()-1,
1048 for i, window in increasing_windows(repo.changelog.count()-1,
1048 nullrev):
1049 nullrev):
1049 for j in xrange(i - window, i + 1):
1050 for j in xrange(i - window, i + 1):
1050 yield j, change(j)[3]
1051 yield j, change(j)[3]
1051
1052
1052 for rev, changefiles in changerevgen():
1053 for rev, changefiles in changerevgen():
1053 matches = filter(matchfn, changefiles)
1054 matches = filter(matchfn, changefiles)
1054 if matches:
1055 if matches:
1055 fncache[rev] = matches
1056 fncache[rev] = matches
1056 wanted[rev] = 1
1057 wanted[rev] = 1
1057
1058
1058 class followfilter:
1059 class followfilter:
1059 def __init__(self, onlyfirst=False):
1060 def __init__(self, onlyfirst=False):
1060 self.startrev = nullrev
1061 self.startrev = nullrev
1061 self.roots = []
1062 self.roots = []
1062 self.onlyfirst = onlyfirst
1063 self.onlyfirst = onlyfirst
1063
1064
1064 def match(self, rev):
1065 def match(self, rev):
1065 def realparents(rev):
1066 def realparents(rev):
1066 if self.onlyfirst:
1067 if self.onlyfirst:
1067 return repo.changelog.parentrevs(rev)[0:1]
1068 return repo.changelog.parentrevs(rev)[0:1]
1068 else:
1069 else:
1069 return filter(lambda x: x != nullrev,
1070 return filter(lambda x: x != nullrev,
1070 repo.changelog.parentrevs(rev))
1071 repo.changelog.parentrevs(rev))
1071
1072
1072 if self.startrev == nullrev:
1073 if self.startrev == nullrev:
1073 self.startrev = rev
1074 self.startrev = rev
1074 return True
1075 return True
1075
1076
1076 if rev > self.startrev:
1077 if rev > self.startrev:
1077 # forward: all descendants
1078 # forward: all descendants
1078 if not self.roots:
1079 if not self.roots:
1079 self.roots.append(self.startrev)
1080 self.roots.append(self.startrev)
1080 for parent in realparents(rev):
1081 for parent in realparents(rev):
1081 if parent in self.roots:
1082 if parent in self.roots:
1082 self.roots.append(rev)
1083 self.roots.append(rev)
1083 return True
1084 return True
1084 else:
1085 else:
1085 # backwards: all parents
1086 # backwards: all parents
1086 if not self.roots:
1087 if not self.roots:
1087 self.roots.extend(realparents(self.startrev))
1088 self.roots.extend(realparents(self.startrev))
1088 if rev in self.roots:
1089 if rev in self.roots:
1089 self.roots.remove(rev)
1090 self.roots.remove(rev)
1090 self.roots.extend(realparents(rev))
1091 self.roots.extend(realparents(rev))
1091 return True
1092 return True
1092
1093
1093 return False
1094 return False
1094
1095
1095 # it might be worthwhile to do this in the iterator if the rev range
1096 # it might be worthwhile to do this in the iterator if the rev range
1096 # is descending and the prune args are all within that range
1097 # is descending and the prune args are all within that range
1097 for rev in opts.get('prune', ()):
1098 for rev in opts.get('prune', ()):
1098 rev = repo.changelog.rev(repo.lookup(rev))
1099 rev = repo.changelog.rev(repo.lookup(rev))
1099 ff = followfilter()
1100 ff = followfilter()
1100 stop = min(revs[0], revs[-1])
1101 stop = min(revs[0], revs[-1])
1101 for x in xrange(rev, stop-1, -1):
1102 for x in xrange(rev, stop-1, -1):
1102 if ff.match(x) and x in wanted:
1103 if ff.match(x) and x in wanted:
1103 del wanted[x]
1104 del wanted[x]
1104
1105
1105 def iterate():
1106 def iterate():
1106 if follow and not files:
1107 if follow and not files:
1107 ff = followfilter(onlyfirst=opts.get('follow_first'))
1108 ff = followfilter(onlyfirst=opts.get('follow_first'))
1108 def want(rev):
1109 def want(rev):
1109 if ff.match(rev) and rev in wanted:
1110 if ff.match(rev) and rev in wanted:
1110 return True
1111 return True
1111 return False
1112 return False
1112 else:
1113 else:
1113 def want(rev):
1114 def want(rev):
1114 return rev in wanted
1115 return rev in wanted
1115
1116
1116 for i, window in increasing_windows(0, len(revs)):
1117 for i, window in increasing_windows(0, len(revs)):
1117 yield 'window', revs[0] < revs[-1], revs[-1]
1118 yield 'window', revs[0] < revs[-1], revs[-1]
1118 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1119 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1119 srevs = list(nrevs)
1120 srevs = list(nrevs)
1120 srevs.sort()
1121 srevs.sort()
1121 for rev in srevs:
1122 for rev in srevs:
1122 fns = fncache.get(rev)
1123 fns = fncache.get(rev)
1123 if not fns:
1124 if not fns:
1124 def fns_generator():
1125 def fns_generator():
1125 for f in change(rev)[3]:
1126 for f in change(rev)[3]:
1126 if matchfn(f):
1127 if matchfn(f):
1127 yield f
1128 yield f
1128 fns = fns_generator()
1129 fns = fns_generator()
1129 yield 'add', rev, fns
1130 yield 'add', rev, fns
1130 for rev in nrevs:
1131 for rev in nrevs:
1131 yield 'iter', rev, None
1132 yield 'iter', rev, None
1132 return iterate(), matchfn
1133 return iterate(), matchfn
1133
1134
1134 def commit(ui, repo, commitfunc, pats, opts):
1135 def commit(ui, repo, commitfunc, pats, opts):
1135 '''commit the specified files or all outstanding changes'''
1136 '''commit the specified files or all outstanding changes'''
1136 date = opts.get('date')
1137 date = opts.get('date')
1137 if date:
1138 if date:
1138 opts['date'] = util.parsedate(date)
1139 opts['date'] = util.parsedate(date)
1139 message = logmessage(opts)
1140 message = logmessage(opts)
1140
1141
1141 # extract addremove carefully -- this function can be called from a command
1142 # extract addremove carefully -- this function can be called from a command
1142 # that doesn't support addremove
1143 # that doesn't support addremove
1143 if opts.get('addremove'):
1144 if opts.get('addremove'):
1144 addremove(repo, pats, opts)
1145 addremove(repo, pats, opts)
1145
1146
1146 fns, match, anypats = matchpats(repo, pats, opts)
1147 fns, match, anypats = matchpats(repo, pats, opts)
1147 if pats:
1148 if pats:
1148 status = repo.status(files=fns, match=match)
1149 status = repo.status(files=fns, match=match)
1149 modified, added, removed, deleted, unknown = status[:5]
1150 modified, added, removed, deleted, unknown = status[:5]
1150 files = modified + added + removed
1151 files = modified + added + removed
1151 slist = None
1152 slist = None
1152 for f in fns:
1153 for f in fns:
1153 if f == '.':
1154 if f == '.':
1154 continue
1155 continue
1155 if f not in files:
1156 if f not in files:
1156 rf = repo.wjoin(f)
1157 rf = repo.wjoin(f)
1157 rel = repo.pathto(f)
1158 rel = repo.pathto(f)
1158 try:
1159 try:
1159 mode = os.lstat(rf)[stat.ST_MODE]
1160 mode = os.lstat(rf)[stat.ST_MODE]
1160 except OSError:
1161 except OSError:
1161 raise util.Abort(_("file %s not found!") % rel)
1162 raise util.Abort(_("file %s not found!") % rel)
1162 if stat.S_ISDIR(mode):
1163 if stat.S_ISDIR(mode):
1163 name = f + '/'
1164 name = f + '/'
1164 if slist is None:
1165 if slist is None:
1165 slist = list(files)
1166 slist = list(files)
1166 slist.sort()
1167 slist.sort()
1167 i = bisect.bisect(slist, name)
1168 i = bisect.bisect(slist, name)
1168 if i >= len(slist) or not slist[i].startswith(name):
1169 if i >= len(slist) or not slist[i].startswith(name):
1169 raise util.Abort(_("no match under directory %s!")
1170 raise util.Abort(_("no match under directory %s!")
1170 % rel)
1171 % rel)
1171 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1172 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
1172 raise util.Abort(_("can't commit %s: "
1173 raise util.Abort(_("can't commit %s: "
1173 "unsupported file type!") % rel)
1174 "unsupported file type!") % rel)
1174 elif f not in repo.dirstate:
1175 elif f not in repo.dirstate:
1175 raise util.Abort(_("file %s not tracked!") % rel)
1176 raise util.Abort(_("file %s not tracked!") % rel)
1176 else:
1177 else:
1177 files = []
1178 files = []
1178 try:
1179 try:
1179 return commitfunc(ui, repo, files, message, match, opts)
1180 return commitfunc(ui, repo, files, message, match, opts)
1180 except ValueError, inst:
1181 except ValueError, inst:
1181 raise util.Abort(str(inst))
1182 raise util.Abort(str(inst))
@@ -1,1866 +1,1856
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, re, shutil, sys, tempfile
16 import cStringIO, errno, getpass, re, shutil, sys, tempfile
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 import imp, urlparse
18 import imp, urlparse
19
19
20 # Python compatibility
20 # Python compatibility
21
21
22 try:
22 try:
23 set = set
23 set = set
24 frozenset = frozenset
24 frozenset = frozenset
25 except NameError:
25 except NameError:
26 from sets import Set as set, ImmutableSet as frozenset
26 from sets import Set as set, ImmutableSet as frozenset
27
27
28 _md5 = None
28 _md5 = None
29 def md5(s):
29 def md5(s):
30 global _md5
30 global _md5
31 if _md5 is None:
31 if _md5 is None:
32 try:
32 try:
33 import hashlib
33 import hashlib
34 _md5 = hashlib.md5
34 _md5 = hashlib.md5
35 except ImportError:
35 except ImportError:
36 import md5
36 import md5
37 _md5 = md5.md5
37 _md5 = md5.md5
38 return _md5(s)
38 return _md5(s)
39
39
40 _sha1 = None
40 _sha1 = None
41 def sha1(s):
41 def sha1(s):
42 global _sha1
42 global _sha1
43 if _sha1 is None:
43 if _sha1 is None:
44 try:
44 try:
45 import hashlib
45 import hashlib
46 _sha1 = hashlib.sha1
46 _sha1 = hashlib.sha1
47 except ImportError:
47 except ImportError:
48 import sha
48 import sha
49 _sha1 = sha.sha
49 _sha1 = sha.sha
50 return _sha1(s)
50 return _sha1(s)
51
51
52 try:
52 try:
53 _encoding = os.environ.get("HGENCODING")
53 _encoding = os.environ.get("HGENCODING")
54 if sys.platform == 'darwin' and not _encoding:
54 if sys.platform == 'darwin' and not _encoding:
55 # On darwin, getpreferredencoding ignores the locale environment and
55 # On darwin, getpreferredencoding ignores the locale environment and
56 # always returns mac-roman. We override this if the environment is
56 # always returns mac-roman. We override this if the environment is
57 # not C (has been customized by the user).
57 # not C (has been customized by the user).
58 locale.setlocale(locale.LC_CTYPE, '')
58 locale.setlocale(locale.LC_CTYPE, '')
59 _encoding = locale.getlocale()[1]
59 _encoding = locale.getlocale()[1]
60 if not _encoding:
60 if not _encoding:
61 _encoding = locale.getpreferredencoding() or 'ascii'
61 _encoding = locale.getpreferredencoding() or 'ascii'
62 except locale.Error:
62 except locale.Error:
63 _encoding = 'ascii'
63 _encoding = 'ascii'
64 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
64 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
65 _fallbackencoding = 'ISO-8859-1'
65 _fallbackencoding = 'ISO-8859-1'
66
66
67 def tolocal(s):
67 def tolocal(s):
68 """
68 """
69 Convert a string from internal UTF-8 to local encoding
69 Convert a string from internal UTF-8 to local encoding
70
70
71 All internal strings should be UTF-8 but some repos before the
71 All internal strings should be UTF-8 but some repos before the
72 implementation of locale support may contain latin1 or possibly
72 implementation of locale support may contain latin1 or possibly
73 other character sets. We attempt to decode everything strictly
73 other character sets. We attempt to decode everything strictly
74 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
74 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
75 replace unknown characters.
75 replace unknown characters.
76 """
76 """
77 for e in ('UTF-8', _fallbackencoding):
77 for e in ('UTF-8', _fallbackencoding):
78 try:
78 try:
79 u = s.decode(e) # attempt strict decoding
79 u = s.decode(e) # attempt strict decoding
80 return u.encode(_encoding, "replace")
80 return u.encode(_encoding, "replace")
81 except LookupError, k:
81 except LookupError, k:
82 raise Abort(_("%s, please check your locale settings") % k)
82 raise Abort(_("%s, please check your locale settings") % k)
83 except UnicodeDecodeError:
83 except UnicodeDecodeError:
84 pass
84 pass
85 u = s.decode("utf-8", "replace") # last ditch
85 u = s.decode("utf-8", "replace") # last ditch
86 return u.encode(_encoding, "replace")
86 return u.encode(_encoding, "replace")
87
87
88 def fromlocal(s):
88 def fromlocal(s):
89 """
89 """
90 Convert a string from the local character encoding to UTF-8
90 Convert a string from the local character encoding to UTF-8
91
91
92 We attempt to decode strings using the encoding mode set by
92 We attempt to decode strings using the encoding mode set by
93 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
93 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
94 characters will cause an error message. Other modes include
94 characters will cause an error message. Other modes include
95 'replace', which replaces unknown characters with a special
95 'replace', which replaces unknown characters with a special
96 Unicode character, and 'ignore', which drops the character.
96 Unicode character, and 'ignore', which drops the character.
97 """
97 """
98 try:
98 try:
99 return s.decode(_encoding, _encodingmode).encode("utf-8")
99 return s.decode(_encoding, _encodingmode).encode("utf-8")
100 except UnicodeDecodeError, inst:
100 except UnicodeDecodeError, inst:
101 sub = s[max(0, inst.start-10):inst.start+10]
101 sub = s[max(0, inst.start-10):inst.start+10]
102 raise Abort("decoding near '%s': %s!" % (sub, inst))
102 raise Abort("decoding near '%s': %s!" % (sub, inst))
103 except LookupError, k:
103 except LookupError, k:
104 raise Abort(_("%s, please check your locale settings") % k)
104 raise Abort(_("%s, please check your locale settings") % k)
105
105
106 def locallen(s):
106 def locallen(s):
107 """Find the length in characters of a local string"""
107 """Find the length in characters of a local string"""
108 return len(s.decode(_encoding, "replace"))
108 return len(s.decode(_encoding, "replace"))
109
109
110 # used by parsedate
110 # used by parsedate
111 defaultdateformats = (
111 defaultdateformats = (
112 '%Y-%m-%d %H:%M:%S',
112 '%Y-%m-%d %H:%M:%S',
113 '%Y-%m-%d %I:%M:%S%p',
113 '%Y-%m-%d %I:%M:%S%p',
114 '%Y-%m-%d %H:%M',
114 '%Y-%m-%d %H:%M',
115 '%Y-%m-%d %I:%M%p',
115 '%Y-%m-%d %I:%M%p',
116 '%Y-%m-%d',
116 '%Y-%m-%d',
117 '%m-%d',
117 '%m-%d',
118 '%m/%d',
118 '%m/%d',
119 '%m/%d/%y',
119 '%m/%d/%y',
120 '%m/%d/%Y',
120 '%m/%d/%Y',
121 '%a %b %d %H:%M:%S %Y',
121 '%a %b %d %H:%M:%S %Y',
122 '%a %b %d %I:%M:%S%p %Y',
122 '%a %b %d %I:%M:%S%p %Y',
123 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
123 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
124 '%b %d %H:%M:%S %Y',
124 '%b %d %H:%M:%S %Y',
125 '%b %d %I:%M:%S%p %Y',
125 '%b %d %I:%M:%S%p %Y',
126 '%b %d %H:%M:%S',
126 '%b %d %H:%M:%S',
127 '%b %d %I:%M:%S%p',
127 '%b %d %I:%M:%S%p',
128 '%b %d %H:%M',
128 '%b %d %H:%M',
129 '%b %d %I:%M%p',
129 '%b %d %I:%M%p',
130 '%b %d %Y',
130 '%b %d %Y',
131 '%b %d',
131 '%b %d',
132 '%H:%M:%S',
132 '%H:%M:%S',
133 '%I:%M:%SP',
133 '%I:%M:%SP',
134 '%H:%M',
134 '%H:%M',
135 '%I:%M%p',
135 '%I:%M%p',
136 )
136 )
137
137
138 extendeddateformats = defaultdateformats + (
138 extendeddateformats = defaultdateformats + (
139 "%Y",
139 "%Y",
140 "%Y-%m",
140 "%Y-%m",
141 "%b",
141 "%b",
142 "%b %Y",
142 "%b %Y",
143 )
143 )
144
144
145 class SignalInterrupt(Exception):
145 class SignalInterrupt(Exception):
146 """Exception raised on SIGTERM and SIGHUP."""
146 """Exception raised on SIGTERM and SIGHUP."""
147
147
148 # differences from SafeConfigParser:
148 # differences from SafeConfigParser:
149 # - case-sensitive keys
149 # - case-sensitive keys
150 # - allows values that are not strings (this means that you may not
150 # - allows values that are not strings (this means that you may not
151 # be able to save the configuration to a file)
151 # be able to save the configuration to a file)
152 class configparser(ConfigParser.SafeConfigParser):
152 class configparser(ConfigParser.SafeConfigParser):
153 def optionxform(self, optionstr):
153 def optionxform(self, optionstr):
154 return optionstr
154 return optionstr
155
155
156 def set(self, section, option, value):
156 def set(self, section, option, value):
157 return ConfigParser.ConfigParser.set(self, section, option, value)
157 return ConfigParser.ConfigParser.set(self, section, option, value)
158
158
159 def _interpolate(self, section, option, rawval, vars):
159 def _interpolate(self, section, option, rawval, vars):
160 if not isinstance(rawval, basestring):
160 if not isinstance(rawval, basestring):
161 return rawval
161 return rawval
162 return ConfigParser.SafeConfigParser._interpolate(self, section,
162 return ConfigParser.SafeConfigParser._interpolate(self, section,
163 option, rawval, vars)
163 option, rawval, vars)
164
164
165 def cachefunc(func):
165 def cachefunc(func):
166 '''cache the result of function calls'''
166 '''cache the result of function calls'''
167 # XXX doesn't handle keywords args
167 # XXX doesn't handle keywords args
168 cache = {}
168 cache = {}
169 if func.func_code.co_argcount == 1:
169 if func.func_code.co_argcount == 1:
170 # we gain a small amount of time because
170 # we gain a small amount of time because
171 # we don't need to pack/unpack the list
171 # we don't need to pack/unpack the list
172 def f(arg):
172 def f(arg):
173 if arg not in cache:
173 if arg not in cache:
174 cache[arg] = func(arg)
174 cache[arg] = func(arg)
175 return cache[arg]
175 return cache[arg]
176 else:
176 else:
177 def f(*args):
177 def f(*args):
178 if args not in cache:
178 if args not in cache:
179 cache[args] = func(*args)
179 cache[args] = func(*args)
180 return cache[args]
180 return cache[args]
181
181
182 return f
182 return f
183
183
184 def pipefilter(s, cmd):
184 def pipefilter(s, cmd):
185 '''filter string S through command CMD, returning its output'''
185 '''filter string S through command CMD, returning its output'''
186 (pin, pout) = os.popen2(cmd, 'b')
186 (pin, pout) = os.popen2(cmd, 'b')
187 def writer():
187 def writer():
188 try:
188 try:
189 pin.write(s)
189 pin.write(s)
190 pin.close()
190 pin.close()
191 except IOError, inst:
191 except IOError, inst:
192 if inst.errno != errno.EPIPE:
192 if inst.errno != errno.EPIPE:
193 raise
193 raise
194
194
195 # we should use select instead on UNIX, but this will work on most
195 # we should use select instead on UNIX, but this will work on most
196 # systems, including Windows
196 # systems, including Windows
197 w = threading.Thread(target=writer)
197 w = threading.Thread(target=writer)
198 w.start()
198 w.start()
199 f = pout.read()
199 f = pout.read()
200 pout.close()
200 pout.close()
201 w.join()
201 w.join()
202 return f
202 return f
203
203
204 def tempfilter(s, cmd):
204 def tempfilter(s, cmd):
205 '''filter string S through a pair of temporary files with CMD.
205 '''filter string S through a pair of temporary files with CMD.
206 CMD is used as a template to create the real command to be run,
206 CMD is used as a template to create the real command to be run,
207 with the strings INFILE and OUTFILE replaced by the real names of
207 with the strings INFILE and OUTFILE replaced by the real names of
208 the temporary files generated.'''
208 the temporary files generated.'''
209 inname, outname = None, None
209 inname, outname = None, None
210 try:
210 try:
211 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
211 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
212 fp = os.fdopen(infd, 'wb')
212 fp = os.fdopen(infd, 'wb')
213 fp.write(s)
213 fp.write(s)
214 fp.close()
214 fp.close()
215 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
215 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
216 os.close(outfd)
216 os.close(outfd)
217 cmd = cmd.replace('INFILE', inname)
217 cmd = cmd.replace('INFILE', inname)
218 cmd = cmd.replace('OUTFILE', outname)
218 cmd = cmd.replace('OUTFILE', outname)
219 code = os.system(cmd)
219 code = os.system(cmd)
220 if sys.platform == 'OpenVMS' and code & 1:
220 if sys.platform == 'OpenVMS' and code & 1:
221 code = 0
221 code = 0
222 if code: raise Abort(_("command '%s' failed: %s") %
222 if code: raise Abort(_("command '%s' failed: %s") %
223 (cmd, explain_exit(code)))
223 (cmd, explain_exit(code)))
224 return open(outname, 'rb').read()
224 return open(outname, 'rb').read()
225 finally:
225 finally:
226 try:
226 try:
227 if inname: os.unlink(inname)
227 if inname: os.unlink(inname)
228 except: pass
228 except: pass
229 try:
229 try:
230 if outname: os.unlink(outname)
230 if outname: os.unlink(outname)
231 except: pass
231 except: pass
232
232
233 filtertable = {
233 filtertable = {
234 'tempfile:': tempfilter,
234 'tempfile:': tempfilter,
235 'pipe:': pipefilter,
235 'pipe:': pipefilter,
236 }
236 }
237
237
238 def filter(s, cmd):
238 def filter(s, cmd):
239 "filter a string through a command that transforms its input to its output"
239 "filter a string through a command that transforms its input to its output"
240 for name, fn in filtertable.iteritems():
240 for name, fn in filtertable.iteritems():
241 if cmd.startswith(name):
241 if cmd.startswith(name):
242 return fn(s, cmd[len(name):].lstrip())
242 return fn(s, cmd[len(name):].lstrip())
243 return pipefilter(s, cmd)
243 return pipefilter(s, cmd)
244
244
245 def binary(s):
245 def binary(s):
246 """return true if a string is binary data"""
246 """return true if a string is binary data"""
247 if s and '\0' in s:
247 if s and '\0' in s:
248 return True
248 return True
249 return False
249 return False
250
250
251 def unique(g):
251 def unique(g):
252 """return the uniq elements of iterable g"""
252 """return the uniq elements of iterable g"""
253 return dict.fromkeys(g).keys()
253 return dict.fromkeys(g).keys()
254
254
255 class Abort(Exception):
255 class Abort(Exception):
256 """Raised if a command needs to print an error and exit."""
256 """Raised if a command needs to print an error and exit."""
257
257
258 class UnexpectedOutput(Abort):
258 class UnexpectedOutput(Abort):
259 """Raised to print an error with part of output and exit."""
259 """Raised to print an error with part of output and exit."""
260
260
261 def always(fn): return True
261 def always(fn): return True
262 def never(fn): return False
262 def never(fn): return False
263
263
264 def expand_glob(pats):
264 def expand_glob(pats):
265 '''On Windows, expand the implicit globs in a list of patterns'''
265 '''On Windows, expand the implicit globs in a list of patterns'''
266 if os.name != 'nt':
266 if os.name != 'nt':
267 return list(pats)
267 return list(pats)
268 ret = []
268 ret = []
269 for p in pats:
269 for p in pats:
270 kind, name = patkind(p, None)
270 kind, name = patkind(p, None)
271 if kind is None:
271 if kind is None:
272 globbed = glob.glob(name)
272 globbed = glob.glob(name)
273 if globbed:
273 if globbed:
274 ret.extend(globbed)
274 ret.extend(globbed)
275 continue
275 continue
276 # if we couldn't expand the glob, just keep it around
276 # if we couldn't expand the glob, just keep it around
277 ret.append(p)
277 ret.append(p)
278 return ret
278 return ret
279
279
280 def patkind(name, dflt_pat='glob'):
280 def patkind(name, dflt_pat='glob'):
281 """Split a string into an optional pattern kind prefix and the
281 """Split a string into an optional pattern kind prefix and the
282 actual pattern."""
282 actual pattern."""
283 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
283 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
284 if name.startswith(prefix + ':'): return name.split(':', 1)
284 if name.startswith(prefix + ':'): return name.split(':', 1)
285 return dflt_pat, name
285 return dflt_pat, name
286
286
287 def globre(pat, head='^', tail='$'):
287 def globre(pat, head='^', tail='$'):
288 "convert a glob pattern into a regexp"
288 "convert a glob pattern into a regexp"
289 i, n = 0, len(pat)
289 i, n = 0, len(pat)
290 res = ''
290 res = ''
291 group = 0
291 group = 0
292 def peek(): return i < n and pat[i]
292 def peek(): return i < n and pat[i]
293 while i < n:
293 while i < n:
294 c = pat[i]
294 c = pat[i]
295 i = i+1
295 i = i+1
296 if c == '*':
296 if c == '*':
297 if peek() == '*':
297 if peek() == '*':
298 i += 1
298 i += 1
299 res += '.*'
299 res += '.*'
300 else:
300 else:
301 res += '[^/]*'
301 res += '[^/]*'
302 elif c == '?':
302 elif c == '?':
303 res += '.'
303 res += '.'
304 elif c == '[':
304 elif c == '[':
305 j = i
305 j = i
306 if j < n and pat[j] in '!]':
306 if j < n and pat[j] in '!]':
307 j += 1
307 j += 1
308 while j < n and pat[j] != ']':
308 while j < n and pat[j] != ']':
309 j += 1
309 j += 1
310 if j >= n:
310 if j >= n:
311 res += '\\['
311 res += '\\['
312 else:
312 else:
313 stuff = pat[i:j].replace('\\','\\\\')
313 stuff = pat[i:j].replace('\\','\\\\')
314 i = j + 1
314 i = j + 1
315 if stuff[0] == '!':
315 if stuff[0] == '!':
316 stuff = '^' + stuff[1:]
316 stuff = '^' + stuff[1:]
317 elif stuff[0] == '^':
317 elif stuff[0] == '^':
318 stuff = '\\' + stuff
318 stuff = '\\' + stuff
319 res = '%s[%s]' % (res, stuff)
319 res = '%s[%s]' % (res, stuff)
320 elif c == '{':
320 elif c == '{':
321 group += 1
321 group += 1
322 res += '(?:'
322 res += '(?:'
323 elif c == '}' and group:
323 elif c == '}' and group:
324 res += ')'
324 res += ')'
325 group -= 1
325 group -= 1
326 elif c == ',' and group:
326 elif c == ',' and group:
327 res += '|'
327 res += '|'
328 elif c == '\\':
328 elif c == '\\':
329 p = peek()
329 p = peek()
330 if p:
330 if p:
331 i += 1
331 i += 1
332 res += re.escape(p)
332 res += re.escape(p)
333 else:
333 else:
334 res += re.escape(c)
334 res += re.escape(c)
335 else:
335 else:
336 res += re.escape(c)
336 res += re.escape(c)
337 return head + res + tail
337 return head + res + tail
338
338
339 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
339 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
340
340
341 def pathto(root, n1, n2):
341 def pathto(root, n1, n2):
342 '''return the relative path from one place to another.
342 '''return the relative path from one place to another.
343 root should use os.sep to separate directories
343 root should use os.sep to separate directories
344 n1 should use os.sep to separate directories
344 n1 should use os.sep to separate directories
345 n2 should use "/" to separate directories
345 n2 should use "/" to separate directories
346 returns an os.sep-separated path.
346 returns an os.sep-separated path.
347
347
348 If n1 is a relative path, it's assumed it's
348 If n1 is a relative path, it's assumed it's
349 relative to root.
349 relative to root.
350 n2 should always be relative to root.
350 n2 should always be relative to root.
351 '''
351 '''
352 if not n1: return localpath(n2)
352 if not n1: return localpath(n2)
353 if os.path.isabs(n1):
353 if os.path.isabs(n1):
354 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
354 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
355 return os.path.join(root, localpath(n2))
355 return os.path.join(root, localpath(n2))
356 n2 = '/'.join((pconvert(root), n2))
356 n2 = '/'.join((pconvert(root), n2))
357 a, b = splitpath(n1), n2.split('/')
357 a, b = splitpath(n1), n2.split('/')
358 a.reverse()
358 a.reverse()
359 b.reverse()
359 b.reverse()
360 while a and b and a[-1] == b[-1]:
360 while a and b and a[-1] == b[-1]:
361 a.pop()
361 a.pop()
362 b.pop()
362 b.pop()
363 b.reverse()
363 b.reverse()
364 return os.sep.join((['..'] * len(a)) + b) or '.'
364 return os.sep.join((['..'] * len(a)) + b) or '.'
365
365
366 def canonpath(root, cwd, myname):
366 def canonpath(root, cwd, myname):
367 """return the canonical path of myname, given cwd and root"""
367 """return the canonical path of myname, given cwd and root"""
368 if root == os.sep:
368 if root == os.sep:
369 rootsep = os.sep
369 rootsep = os.sep
370 elif endswithsep(root):
370 elif endswithsep(root):
371 rootsep = root
371 rootsep = root
372 else:
372 else:
373 rootsep = root + os.sep
373 rootsep = root + os.sep
374 name = myname
374 name = myname
375 if not os.path.isabs(name):
375 if not os.path.isabs(name):
376 name = os.path.join(root, cwd, name)
376 name = os.path.join(root, cwd, name)
377 name = os.path.normpath(name)
377 name = os.path.normpath(name)
378 audit_path = path_auditor(root)
378 audit_path = path_auditor(root)
379 if name != rootsep and name.startswith(rootsep):
379 if name != rootsep and name.startswith(rootsep):
380 name = name[len(rootsep):]
380 name = name[len(rootsep):]
381 audit_path(name)
381 audit_path(name)
382 return pconvert(name)
382 return pconvert(name)
383 elif name == root:
383 elif name == root:
384 return ''
384 return ''
385 else:
385 else:
386 # Determine whether `name' is in the hierarchy at or beneath `root',
386 # Determine whether `name' is in the hierarchy at or beneath `root',
387 # by iterating name=dirname(name) until that causes no change (can't
387 # by iterating name=dirname(name) until that causes no change (can't
388 # check name == '/', because that doesn't work on windows). For each
388 # check name == '/', because that doesn't work on windows). For each
389 # `name', compare dev/inode numbers. If they match, the list `rel'
389 # `name', compare dev/inode numbers. If they match, the list `rel'
390 # holds the reversed list of components making up the relative file
390 # holds the reversed list of components making up the relative file
391 # name we want.
391 # name we want.
392 root_st = os.stat(root)
392 root_st = os.stat(root)
393 rel = []
393 rel = []
394 while True:
394 while True:
395 try:
395 try:
396 name_st = os.stat(name)
396 name_st = os.stat(name)
397 except OSError:
397 except OSError:
398 break
398 break
399 if samestat(name_st, root_st):
399 if samestat(name_st, root_st):
400 if not rel:
400 if not rel:
401 # name was actually the same as root (maybe a symlink)
401 # name was actually the same as root (maybe a symlink)
402 return ''
402 return ''
403 rel.reverse()
403 rel.reverse()
404 name = os.path.join(*rel)
404 name = os.path.join(*rel)
405 audit_path(name)
405 audit_path(name)
406 return pconvert(name)
406 return pconvert(name)
407 dirname, basename = os.path.split(name)
407 dirname, basename = os.path.split(name)
408 rel.append(basename)
408 rel.append(basename)
409 if dirname == name:
409 if dirname == name:
410 break
410 break
411 name = dirname
411 name = dirname
412
412
413 raise Abort('%s not under root' % myname)
413 raise Abort('%s not under root' % myname)
414
414
415 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
415 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
416 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
417
418 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
419 globbed=False, default=None):
420 default = default or 'relpath'
421 if default == 'relpath' and not globbed:
422 names = expand_glob(names)
423 return _matcher(canonroot, cwd, names, inc, exc, default, src)
424
425 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
426 """build a function to match a set of file patterns
416 """build a function to match a set of file patterns
427
417
428 arguments:
418 arguments:
429 canonroot - the canonical root of the tree you're matching against
419 canonroot - the canonical root of the tree you're matching against
430 cwd - the current working directory, if relevant
420 cwd - the current working directory, if relevant
431 names - patterns to find
421 names - patterns to find
432 inc - patterns to include
422 inc - patterns to include
433 exc - patterns to exclude
423 exc - patterns to exclude
434 dflt_pat - if a pattern in names has no explicit type, assume this one
424 dflt_pat - if a pattern in names has no explicit type, assume this one
435 src - where these patterns came from (e.g. .hgignore)
425 src - where these patterns came from (e.g. .hgignore)
436
426
437 a pattern is one of:
427 a pattern is one of:
438 'glob:<glob>' - a glob relative to cwd
428 'glob:<glob>' - a glob relative to cwd
439 're:<regexp>' - a regular expression
429 're:<regexp>' - a regular expression
440 'path:<path>' - a path relative to canonroot
430 'path:<path>' - a path relative to canonroot
441 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
431 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
442 'relpath:<path>' - a path relative to cwd
432 'relpath:<path>' - a path relative to cwd
443 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
433 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
444 '<something>' - one of the cases above, selected by the dflt_pat argument
434 '<something>' - one of the cases above, selected by the dflt_pat argument
445
435
446 returns:
436 returns:
447 a 3-tuple containing
437 a 3-tuple containing
448 - list of roots (places where one should start a recursive walk of the fs);
438 - list of roots (places where one should start a recursive walk of the fs);
449 this often matches the explicit non-pattern names passed in, but also
439 this often matches the explicit non-pattern names passed in, but also
450 includes the initial part of glob: patterns that has no glob characters
440 includes the initial part of glob: patterns that has no glob characters
451 - a bool match(filename) function
441 - a bool match(filename) function
452 - a bool indicating if any patterns were passed in
442 - a bool indicating if any patterns were passed in
453 """
443 """
454
444
455 # a common case: no patterns at all
445 # a common case: no patterns at all
456 if not names and not inc and not exc:
446 if not names and not inc and not exc:
457 return [], always, False
447 return [], always, False
458
448
459 def contains_glob(name):
449 def contains_glob(name):
460 for c in name:
450 for c in name:
461 if c in _globchars: return True
451 if c in _globchars: return True
462 return False
452 return False
463
453
464 def regex(kind, name, tail):
454 def regex(kind, name, tail):
465 '''convert a pattern into a regular expression'''
455 '''convert a pattern into a regular expression'''
466 if not name:
456 if not name:
467 return ''
457 return ''
468 if kind == 're':
458 if kind == 're':
469 return name
459 return name
470 elif kind == 'path':
460 elif kind == 'path':
471 return '^' + re.escape(name) + '(?:/|$)'
461 return '^' + re.escape(name) + '(?:/|$)'
472 elif kind == 'relglob':
462 elif kind == 'relglob':
473 return globre(name, '(?:|.*/)', tail)
463 return globre(name, '(?:|.*/)', tail)
474 elif kind == 'relpath':
464 elif kind == 'relpath':
475 return re.escape(name) + '(?:/|$)'
465 return re.escape(name) + '(?:/|$)'
476 elif kind == 'relre':
466 elif kind == 'relre':
477 if name.startswith('^'):
467 if name.startswith('^'):
478 return name
468 return name
479 return '.*' + name
469 return '.*' + name
480 return globre(name, '', tail)
470 return globre(name, '', tail)
481
471
482 def matchfn(pats, tail):
472 def matchfn(pats, tail):
483 """build a matching function from a set of patterns"""
473 """build a matching function from a set of patterns"""
484 if not pats:
474 if not pats:
485 return
475 return
486 try:
476 try:
487 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
477 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
488 if len(pat) > 20000:
478 if len(pat) > 20000:
489 raise OverflowError()
479 raise OverflowError()
490 return re.compile(pat).match
480 return re.compile(pat).match
491 except OverflowError:
481 except OverflowError:
492 # We're using a Python with a tiny regex engine and we
482 # We're using a Python with a tiny regex engine and we
493 # made it explode, so we'll divide the pattern list in two
483 # made it explode, so we'll divide the pattern list in two
494 # until it works
484 # until it works
495 l = len(pats)
485 l = len(pats)
496 if l < 2:
486 if l < 2:
497 raise
487 raise
498 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
488 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
499 return lambda s: a(s) or b(s)
489 return lambda s: a(s) or b(s)
500 except re.error:
490 except re.error:
501 for k, p in pats:
491 for k, p in pats:
502 try:
492 try:
503 re.compile('(?:%s)' % regex(k, p, tail))
493 re.compile('(?:%s)' % regex(k, p, tail))
504 except re.error:
494 except re.error:
505 if src:
495 if src:
506 raise Abort("%s: invalid pattern (%s): %s" %
496 raise Abort("%s: invalid pattern (%s): %s" %
507 (src, k, p))
497 (src, k, p))
508 else:
498 else:
509 raise Abort("invalid pattern (%s): %s" % (k, p))
499 raise Abort("invalid pattern (%s): %s" % (k, p))
510 raise Abort("invalid pattern")
500 raise Abort("invalid pattern")
511
501
512 def globprefix(pat):
502 def globprefix(pat):
513 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
503 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
514 root = []
504 root = []
515 for p in pat.split('/'):
505 for p in pat.split('/'):
516 if contains_glob(p): break
506 if contains_glob(p): break
517 root.append(p)
507 root.append(p)
518 return '/'.join(root) or '.'
508 return '/'.join(root) or '.'
519
509
520 def normalizepats(names, default):
510 def normalizepats(names, default):
521 pats = []
511 pats = []
522 roots = []
512 roots = []
523 anypats = False
513 anypats = False
524 for kind, name in [patkind(p, default) for p in names]:
514 for kind, name in [patkind(p, default) for p in names]:
525 if kind in ('glob', 'relpath'):
515 if kind in ('glob', 'relpath'):
526 name = canonpath(canonroot, cwd, name)
516 name = canonpath(canonroot, cwd, name)
527 elif kind in ('relglob', 'path'):
517 elif kind in ('relglob', 'path'):
528 name = normpath(name)
518 name = normpath(name)
529
519
530 pats.append((kind, name))
520 pats.append((kind, name))
531
521
532 if kind in ('glob', 're', 'relglob', 'relre'):
522 if kind in ('glob', 're', 'relglob', 'relre'):
533 anypats = True
523 anypats = True
534
524
535 if kind == 'glob':
525 if kind == 'glob':
536 root = globprefix(name)
526 root = globprefix(name)
537 roots.append(root)
527 roots.append(root)
538 elif kind in ('relpath', 'path'):
528 elif kind in ('relpath', 'path'):
539 roots.append(name or '.')
529 roots.append(name or '.')
540 elif kind == 'relglob':
530 elif kind == 'relglob':
541 roots.append('.')
531 roots.append('.')
542 return roots, pats, anypats
532 return roots, pats, anypats
543
533
544 roots, pats, anypats = normalizepats(names, dflt_pat)
534 roots, pats, anypats = normalizepats(names, dflt_pat)
545
535
546 patmatch = matchfn(pats, '$') or always
536 patmatch = matchfn(pats, '$') or always
547 incmatch = always
537 incmatch = always
548 if inc:
538 if inc:
549 dummy, inckinds, dummy = normalizepats(inc, 'glob')
539 dummy, inckinds, dummy = normalizepats(inc, 'glob')
550 incmatch = matchfn(inckinds, '(?:/|$)')
540 incmatch = matchfn(inckinds, '(?:/|$)')
551 excmatch = lambda fn: False
541 excmatch = lambda fn: False
552 if exc:
542 if exc:
553 dummy, exckinds, dummy = normalizepats(exc, 'glob')
543 dummy, exckinds, dummy = normalizepats(exc, 'glob')
554 excmatch = matchfn(exckinds, '(?:/|$)')
544 excmatch = matchfn(exckinds, '(?:/|$)')
555
545
556 if not names and inc and not exc:
546 if not names and inc and not exc:
557 # common case: hgignore patterns
547 # common case: hgignore patterns
558 match = incmatch
548 match = incmatch
559 else:
549 else:
560 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
550 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
561
551
562 return (roots, match, (inc or exc or anypats) and True)
552 return (roots, match, (inc or exc or anypats) and True)
563
553
564 _hgexecutable = None
554 _hgexecutable = None
565
555
566 def main_is_frozen():
556 def main_is_frozen():
567 """return True if we are a frozen executable.
557 """return True if we are a frozen executable.
568
558
569 The code supports py2exe (most common, Windows only) and tools/freeze
559 The code supports py2exe (most common, Windows only) and tools/freeze
570 (portable, not much used).
560 (portable, not much used).
571 """
561 """
572 return (hasattr(sys, "frozen") or # new py2exe
562 return (hasattr(sys, "frozen") or # new py2exe
573 hasattr(sys, "importers") or # old py2exe
563 hasattr(sys, "importers") or # old py2exe
574 imp.is_frozen("__main__")) # tools/freeze
564 imp.is_frozen("__main__")) # tools/freeze
575
565
576 def hgexecutable():
566 def hgexecutable():
577 """return location of the 'hg' executable.
567 """return location of the 'hg' executable.
578
568
579 Defaults to $HG or 'hg' in the search path.
569 Defaults to $HG or 'hg' in the search path.
580 """
570 """
581 if _hgexecutable is None:
571 if _hgexecutable is None:
582 hg = os.environ.get('HG')
572 hg = os.environ.get('HG')
583 if hg:
573 if hg:
584 set_hgexecutable(hg)
574 set_hgexecutable(hg)
585 elif main_is_frozen():
575 elif main_is_frozen():
586 set_hgexecutable(sys.executable)
576 set_hgexecutable(sys.executable)
587 else:
577 else:
588 set_hgexecutable(find_exe('hg', 'hg'))
578 set_hgexecutable(find_exe('hg', 'hg'))
589 return _hgexecutable
579 return _hgexecutable
590
580
591 def set_hgexecutable(path):
581 def set_hgexecutable(path):
592 """set location of the 'hg' executable"""
582 """set location of the 'hg' executable"""
593 global _hgexecutable
583 global _hgexecutable
594 _hgexecutable = path
584 _hgexecutable = path
595
585
596 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
586 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
597 '''enhanced shell command execution.
587 '''enhanced shell command execution.
598 run with environment maybe modified, maybe in different dir.
588 run with environment maybe modified, maybe in different dir.
599
589
600 if command fails and onerr is None, return status. if ui object,
590 if command fails and onerr is None, return status. if ui object,
601 print error message and return status, else raise onerr object as
591 print error message and return status, else raise onerr object as
602 exception.'''
592 exception.'''
603 def py2shell(val):
593 def py2shell(val):
604 'convert python object into string that is useful to shell'
594 'convert python object into string that is useful to shell'
605 if val in (None, False):
595 if val in (None, False):
606 return '0'
596 return '0'
607 if val == True:
597 if val == True:
608 return '1'
598 return '1'
609 return str(val)
599 return str(val)
610 oldenv = {}
600 oldenv = {}
611 for k in environ:
601 for k in environ:
612 oldenv[k] = os.environ.get(k)
602 oldenv[k] = os.environ.get(k)
613 if cwd is not None:
603 if cwd is not None:
614 oldcwd = os.getcwd()
604 oldcwd = os.getcwd()
615 origcmd = cmd
605 origcmd = cmd
616 if os.name == 'nt':
606 if os.name == 'nt':
617 cmd = '"%s"' % cmd
607 cmd = '"%s"' % cmd
618 try:
608 try:
619 for k, v in environ.iteritems():
609 for k, v in environ.iteritems():
620 os.environ[k] = py2shell(v)
610 os.environ[k] = py2shell(v)
621 os.environ['HG'] = hgexecutable()
611 os.environ['HG'] = hgexecutable()
622 if cwd is not None and oldcwd != cwd:
612 if cwd is not None and oldcwd != cwd:
623 os.chdir(cwd)
613 os.chdir(cwd)
624 rc = os.system(cmd)
614 rc = os.system(cmd)
625 if sys.platform == 'OpenVMS' and rc & 1:
615 if sys.platform == 'OpenVMS' and rc & 1:
626 rc = 0
616 rc = 0
627 if rc and onerr:
617 if rc and onerr:
628 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
618 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
629 explain_exit(rc)[0])
619 explain_exit(rc)[0])
630 if errprefix:
620 if errprefix:
631 errmsg = '%s: %s' % (errprefix, errmsg)
621 errmsg = '%s: %s' % (errprefix, errmsg)
632 try:
622 try:
633 onerr.warn(errmsg + '\n')
623 onerr.warn(errmsg + '\n')
634 except AttributeError:
624 except AttributeError:
635 raise onerr(errmsg)
625 raise onerr(errmsg)
636 return rc
626 return rc
637 finally:
627 finally:
638 for k, v in oldenv.iteritems():
628 for k, v in oldenv.iteritems():
639 if v is None:
629 if v is None:
640 del os.environ[k]
630 del os.environ[k]
641 else:
631 else:
642 os.environ[k] = v
632 os.environ[k] = v
643 if cwd is not None and oldcwd != cwd:
633 if cwd is not None and oldcwd != cwd:
644 os.chdir(oldcwd)
634 os.chdir(oldcwd)
645
635
646 # os.path.lexists is not available on python2.3
636 # os.path.lexists is not available on python2.3
647 def lexists(filename):
637 def lexists(filename):
648 "test whether a file with this name exists. does not follow symlinks"
638 "test whether a file with this name exists. does not follow symlinks"
649 try:
639 try:
650 os.lstat(filename)
640 os.lstat(filename)
651 except:
641 except:
652 return False
642 return False
653 return True
643 return True
654
644
655 def rename(src, dst):
645 def rename(src, dst):
656 """forcibly rename a file"""
646 """forcibly rename a file"""
657 try:
647 try:
658 os.rename(src, dst)
648 os.rename(src, dst)
659 except OSError, err: # FIXME: check err (EEXIST ?)
649 except OSError, err: # FIXME: check err (EEXIST ?)
660 # on windows, rename to existing file is not allowed, so we
650 # on windows, rename to existing file is not allowed, so we
661 # must delete destination first. but if file is open, unlink
651 # must delete destination first. but if file is open, unlink
662 # schedules it for delete but does not delete it. rename
652 # schedules it for delete but does not delete it. rename
663 # happens immediately even for open files, so we create
653 # happens immediately even for open files, so we create
664 # temporary file, delete it, rename destination to that name,
654 # temporary file, delete it, rename destination to that name,
665 # then delete that. then rename is safe to do.
655 # then delete that. then rename is safe to do.
666 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
656 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
667 os.close(fd)
657 os.close(fd)
668 os.unlink(temp)
658 os.unlink(temp)
669 os.rename(dst, temp)
659 os.rename(dst, temp)
670 os.unlink(temp)
660 os.unlink(temp)
671 os.rename(src, dst)
661 os.rename(src, dst)
672
662
673 def unlink(f):
663 def unlink(f):
674 """unlink and remove the directory if it is empty"""
664 """unlink and remove the directory if it is empty"""
675 os.unlink(f)
665 os.unlink(f)
676 # try removing directories that might now be empty
666 # try removing directories that might now be empty
677 try:
667 try:
678 os.removedirs(os.path.dirname(f))
668 os.removedirs(os.path.dirname(f))
679 except OSError:
669 except OSError:
680 pass
670 pass
681
671
682 def copyfile(src, dest):
672 def copyfile(src, dest):
683 "copy a file, preserving mode"
673 "copy a file, preserving mode"
684 if os.path.islink(src):
674 if os.path.islink(src):
685 try:
675 try:
686 os.unlink(dest)
676 os.unlink(dest)
687 except:
677 except:
688 pass
678 pass
689 os.symlink(os.readlink(src), dest)
679 os.symlink(os.readlink(src), dest)
690 else:
680 else:
691 try:
681 try:
692 shutil.copyfile(src, dest)
682 shutil.copyfile(src, dest)
693 shutil.copymode(src, dest)
683 shutil.copymode(src, dest)
694 except shutil.Error, inst:
684 except shutil.Error, inst:
695 raise Abort(str(inst))
685 raise Abort(str(inst))
696
686
697 def copyfiles(src, dst, hardlink=None):
687 def copyfiles(src, dst, hardlink=None):
698 """Copy a directory tree using hardlinks if possible"""
688 """Copy a directory tree using hardlinks if possible"""
699
689
700 if hardlink is None:
690 if hardlink is None:
701 hardlink = (os.stat(src).st_dev ==
691 hardlink = (os.stat(src).st_dev ==
702 os.stat(os.path.dirname(dst)).st_dev)
692 os.stat(os.path.dirname(dst)).st_dev)
703
693
704 if os.path.isdir(src):
694 if os.path.isdir(src):
705 os.mkdir(dst)
695 os.mkdir(dst)
706 for name, kind in osutil.listdir(src):
696 for name, kind in osutil.listdir(src):
707 srcname = os.path.join(src, name)
697 srcname = os.path.join(src, name)
708 dstname = os.path.join(dst, name)
698 dstname = os.path.join(dst, name)
709 copyfiles(srcname, dstname, hardlink)
699 copyfiles(srcname, dstname, hardlink)
710 else:
700 else:
711 if hardlink:
701 if hardlink:
712 try:
702 try:
713 os_link(src, dst)
703 os_link(src, dst)
714 except (IOError, OSError):
704 except (IOError, OSError):
715 hardlink = False
705 hardlink = False
716 shutil.copy(src, dst)
706 shutil.copy(src, dst)
717 else:
707 else:
718 shutil.copy(src, dst)
708 shutil.copy(src, dst)
719
709
720 class path_auditor(object):
710 class path_auditor(object):
721 '''ensure that a filesystem path contains no banned components.
711 '''ensure that a filesystem path contains no banned components.
722 the following properties of a path are checked:
712 the following properties of a path are checked:
723
713
724 - under top-level .hg
714 - under top-level .hg
725 - starts at the root of a windows drive
715 - starts at the root of a windows drive
726 - contains ".."
716 - contains ".."
727 - traverses a symlink (e.g. a/symlink_here/b)
717 - traverses a symlink (e.g. a/symlink_here/b)
728 - inside a nested repository'''
718 - inside a nested repository'''
729
719
730 def __init__(self, root):
720 def __init__(self, root):
731 self.audited = set()
721 self.audited = set()
732 self.auditeddir = set()
722 self.auditeddir = set()
733 self.root = root
723 self.root = root
734
724
735 def __call__(self, path):
725 def __call__(self, path):
736 if path in self.audited:
726 if path in self.audited:
737 return
727 return
738 normpath = os.path.normcase(path)
728 normpath = os.path.normcase(path)
739 parts = splitpath(normpath)
729 parts = splitpath(normpath)
740 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
730 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
741 or os.pardir in parts):
731 or os.pardir in parts):
742 raise Abort(_("path contains illegal component: %s") % path)
732 raise Abort(_("path contains illegal component: %s") % path)
743 def check(prefix):
733 def check(prefix):
744 curpath = os.path.join(self.root, prefix)
734 curpath = os.path.join(self.root, prefix)
745 try:
735 try:
746 st = os.lstat(curpath)
736 st = os.lstat(curpath)
747 except OSError, err:
737 except OSError, err:
748 # EINVAL can be raised as invalid path syntax under win32.
738 # EINVAL can be raised as invalid path syntax under win32.
749 # They must be ignored for patterns can be checked too.
739 # They must be ignored for patterns can be checked too.
750 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
740 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
751 raise
741 raise
752 else:
742 else:
753 if stat.S_ISLNK(st.st_mode):
743 if stat.S_ISLNK(st.st_mode):
754 raise Abort(_('path %r traverses symbolic link %r') %
744 raise Abort(_('path %r traverses symbolic link %r') %
755 (path, prefix))
745 (path, prefix))
756 elif (stat.S_ISDIR(st.st_mode) and
746 elif (stat.S_ISDIR(st.st_mode) and
757 os.path.isdir(os.path.join(curpath, '.hg'))):
747 os.path.isdir(os.path.join(curpath, '.hg'))):
758 raise Abort(_('path %r is inside repo %r') %
748 raise Abort(_('path %r is inside repo %r') %
759 (path, prefix))
749 (path, prefix))
760 parts.pop()
750 parts.pop()
761 prefixes = []
751 prefixes = []
762 for n in range(len(parts)):
752 for n in range(len(parts)):
763 prefix = os.sep.join(parts)
753 prefix = os.sep.join(parts)
764 if prefix in self.auditeddir:
754 if prefix in self.auditeddir:
765 break
755 break
766 check(prefix)
756 check(prefix)
767 prefixes.append(prefix)
757 prefixes.append(prefix)
768 parts.pop()
758 parts.pop()
769
759
770 self.audited.add(path)
760 self.audited.add(path)
771 # only add prefixes to the cache after checking everything: we don't
761 # only add prefixes to the cache after checking everything: we don't
772 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
762 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
773 self.auditeddir.update(prefixes)
763 self.auditeddir.update(prefixes)
774
764
775 def _makelock_file(info, pathname):
765 def _makelock_file(info, pathname):
776 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
766 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
777 os.write(ld, info)
767 os.write(ld, info)
778 os.close(ld)
768 os.close(ld)
779
769
780 def _readlock_file(pathname):
770 def _readlock_file(pathname):
781 return posixfile(pathname).read()
771 return posixfile(pathname).read()
782
772
783 def nlinks(pathname):
773 def nlinks(pathname):
784 """Return number of hardlinks for the given file."""
774 """Return number of hardlinks for the given file."""
785 return os.lstat(pathname).st_nlink
775 return os.lstat(pathname).st_nlink
786
776
787 if hasattr(os, 'link'):
777 if hasattr(os, 'link'):
788 os_link = os.link
778 os_link = os.link
789 else:
779 else:
790 def os_link(src, dst):
780 def os_link(src, dst):
791 raise OSError(0, _("Hardlinks not supported"))
781 raise OSError(0, _("Hardlinks not supported"))
792
782
793 def fstat(fp):
783 def fstat(fp):
794 '''stat file object that may not have fileno method.'''
784 '''stat file object that may not have fileno method.'''
795 try:
785 try:
796 return os.fstat(fp.fileno())
786 return os.fstat(fp.fileno())
797 except AttributeError:
787 except AttributeError:
798 return os.stat(fp.name)
788 return os.stat(fp.name)
799
789
800 posixfile = file
790 posixfile = file
801
791
802 def openhardlinks():
792 def openhardlinks():
803 '''return true if it is safe to hold open file handles to hardlinks'''
793 '''return true if it is safe to hold open file handles to hardlinks'''
804 return True
794 return True
805
795
806 getuser_fallback = None
796 getuser_fallback = None
807
797
808 def getuser():
798 def getuser():
809 '''return name of current user'''
799 '''return name of current user'''
810 try:
800 try:
811 return getpass.getuser()
801 return getpass.getuser()
812 except ImportError:
802 except ImportError:
813 # import of pwd will fail on windows - try fallback
803 # import of pwd will fail on windows - try fallback
814 if getuser_fallback:
804 if getuser_fallback:
815 return getuser_fallback()
805 return getuser_fallback()
816 # raised if win32api not available
806 # raised if win32api not available
817 raise Abort(_('user name not available - set USERNAME '
807 raise Abort(_('user name not available - set USERNAME '
818 'environment variable'))
808 'environment variable'))
819
809
820 def username(uid=None):
810 def username(uid=None):
821 """Return the name of the user with the given uid.
811 """Return the name of the user with the given uid.
822
812
823 If uid is None, return the name of the current user."""
813 If uid is None, return the name of the current user."""
824 try:
814 try:
825 import pwd
815 import pwd
826 if uid is None:
816 if uid is None:
827 uid = os.getuid()
817 uid = os.getuid()
828 try:
818 try:
829 return pwd.getpwuid(uid)[0]
819 return pwd.getpwuid(uid)[0]
830 except KeyError:
820 except KeyError:
831 return str(uid)
821 return str(uid)
832 except ImportError:
822 except ImportError:
833 return None
823 return None
834
824
835 def groupname(gid=None):
825 def groupname(gid=None):
836 """Return the name of the group with the given gid.
826 """Return the name of the group with the given gid.
837
827
838 If gid is None, return the name of the current group."""
828 If gid is None, return the name of the current group."""
839 try:
829 try:
840 import grp
830 import grp
841 if gid is None:
831 if gid is None:
842 gid = os.getgid()
832 gid = os.getgid()
843 try:
833 try:
844 return grp.getgrgid(gid)[0]
834 return grp.getgrgid(gid)[0]
845 except KeyError:
835 except KeyError:
846 return str(gid)
836 return str(gid)
847 except ImportError:
837 except ImportError:
848 return None
838 return None
849
839
850 # File system features
840 # File system features
851
841
852 def checkfolding(path):
842 def checkfolding(path):
853 """
843 """
854 Check whether the given path is on a case-sensitive filesystem
844 Check whether the given path is on a case-sensitive filesystem
855
845
856 Requires a path (like /foo/.hg) ending with a foldable final
846 Requires a path (like /foo/.hg) ending with a foldable final
857 directory component.
847 directory component.
858 """
848 """
859 s1 = os.stat(path)
849 s1 = os.stat(path)
860 d, b = os.path.split(path)
850 d, b = os.path.split(path)
861 p2 = os.path.join(d, b.upper())
851 p2 = os.path.join(d, b.upper())
862 if path == p2:
852 if path == p2:
863 p2 = os.path.join(d, b.lower())
853 p2 = os.path.join(d, b.lower())
864 try:
854 try:
865 s2 = os.stat(p2)
855 s2 = os.stat(p2)
866 if s2 == s1:
856 if s2 == s1:
867 return False
857 return False
868 return True
858 return True
869 except:
859 except:
870 return True
860 return True
871
861
872 def checkexec(path):
862 def checkexec(path):
873 """
863 """
874 Check whether the given path is on a filesystem with UNIX-like exec flags
864 Check whether the given path is on a filesystem with UNIX-like exec flags
875
865
876 Requires a directory (like /foo/.hg)
866 Requires a directory (like /foo/.hg)
877 """
867 """
878
868
879 # VFAT on some Linux versions can flip mode but it doesn't persist
869 # VFAT on some Linux versions can flip mode but it doesn't persist
880 # a FS remount. Frequently we can detect it if files are created
870 # a FS remount. Frequently we can detect it if files are created
881 # with exec bit on.
871 # with exec bit on.
882
872
883 try:
873 try:
884 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
874 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
885 fh, fn = tempfile.mkstemp("", "", path)
875 fh, fn = tempfile.mkstemp("", "", path)
886 try:
876 try:
887 os.close(fh)
877 os.close(fh)
888 m = os.stat(fn).st_mode & 0777
878 m = os.stat(fn).st_mode & 0777
889 new_file_has_exec = m & EXECFLAGS
879 new_file_has_exec = m & EXECFLAGS
890 os.chmod(fn, m ^ EXECFLAGS)
880 os.chmod(fn, m ^ EXECFLAGS)
891 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
881 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
892 finally:
882 finally:
893 os.unlink(fn)
883 os.unlink(fn)
894 except (IOError, OSError):
884 except (IOError, OSError):
895 # we don't care, the user probably won't be able to commit anyway
885 # we don't care, the user probably won't be able to commit anyway
896 return False
886 return False
897 return not (new_file_has_exec or exec_flags_cannot_flip)
887 return not (new_file_has_exec or exec_flags_cannot_flip)
898
888
899 def execfunc(path, fallback):
889 def execfunc(path, fallback):
900 '''return an is_exec() function with default to fallback'''
890 '''return an is_exec() function with default to fallback'''
901 if checkexec(path):
891 if checkexec(path):
902 return lambda x: is_exec(os.path.join(path, x))
892 return lambda x: is_exec(os.path.join(path, x))
903 return fallback
893 return fallback
904
894
905 def checklink(path):
895 def checklink(path):
906 """check whether the given path is on a symlink-capable filesystem"""
896 """check whether the given path is on a symlink-capable filesystem"""
907 # mktemp is not racy because symlink creation will fail if the
897 # mktemp is not racy because symlink creation will fail if the
908 # file already exists
898 # file already exists
909 name = tempfile.mktemp(dir=path)
899 name = tempfile.mktemp(dir=path)
910 try:
900 try:
911 os.symlink(".", name)
901 os.symlink(".", name)
912 os.unlink(name)
902 os.unlink(name)
913 return True
903 return True
914 except (OSError, AttributeError):
904 except (OSError, AttributeError):
915 return False
905 return False
916
906
917 def linkfunc(path, fallback):
907 def linkfunc(path, fallback):
918 '''return an is_link() function with default to fallback'''
908 '''return an is_link() function with default to fallback'''
919 if checklink(path):
909 if checklink(path):
920 return lambda x: os.path.islink(os.path.join(path, x))
910 return lambda x: os.path.islink(os.path.join(path, x))
921 return fallback
911 return fallback
922
912
923 _umask = os.umask(0)
913 _umask = os.umask(0)
924 os.umask(_umask)
914 os.umask(_umask)
925
915
926 def needbinarypatch():
916 def needbinarypatch():
927 """return True if patches should be applied in binary mode by default."""
917 """return True if patches should be applied in binary mode by default."""
928 return os.name == 'nt'
918 return os.name == 'nt'
929
919
930 def endswithsep(path):
920 def endswithsep(path):
931 '''Check path ends with os.sep or os.altsep.'''
921 '''Check path ends with os.sep or os.altsep.'''
932 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
922 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
933
923
934 def splitpath(path):
924 def splitpath(path):
935 '''Split path by os.sep.
925 '''Split path by os.sep.
936 Note that this function does not use os.altsep because this is
926 Note that this function does not use os.altsep because this is
937 an alternative of simple "xxx.split(os.sep)".
927 an alternative of simple "xxx.split(os.sep)".
938 It is recommended to use os.path.normpath() before using this
928 It is recommended to use os.path.normpath() before using this
939 function if need.'''
929 function if need.'''
940 return path.split(os.sep)
930 return path.split(os.sep)
941
931
942 def gui():
932 def gui():
943 '''Are we running in a GUI?'''
933 '''Are we running in a GUI?'''
944 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
934 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
945
935
946 def lookup_reg(key, name=None, scope=None):
936 def lookup_reg(key, name=None, scope=None):
947 return None
937 return None
948
938
949 # Platform specific variants
939 # Platform specific variants
950 if os.name == 'nt':
940 if os.name == 'nt':
951 import msvcrt
941 import msvcrt
952 nulldev = 'NUL:'
942 nulldev = 'NUL:'
953
943
954 class winstdout:
944 class winstdout:
955 '''stdout on windows misbehaves if sent through a pipe'''
945 '''stdout on windows misbehaves if sent through a pipe'''
956
946
957 def __init__(self, fp):
947 def __init__(self, fp):
958 self.fp = fp
948 self.fp = fp
959
949
960 def __getattr__(self, key):
950 def __getattr__(self, key):
961 return getattr(self.fp, key)
951 return getattr(self.fp, key)
962
952
963 def close(self):
953 def close(self):
964 try:
954 try:
965 self.fp.close()
955 self.fp.close()
966 except: pass
956 except: pass
967
957
968 def write(self, s):
958 def write(self, s):
969 try:
959 try:
970 # This is workaround for "Not enough space" error on
960 # This is workaround for "Not enough space" error on
971 # writing large size of data to console.
961 # writing large size of data to console.
972 limit = 16000
962 limit = 16000
973 l = len(s)
963 l = len(s)
974 start = 0
964 start = 0
975 while start < l:
965 while start < l:
976 end = start + limit
966 end = start + limit
977 self.fp.write(s[start:end])
967 self.fp.write(s[start:end])
978 start = end
968 start = end
979 except IOError, inst:
969 except IOError, inst:
980 if inst.errno != 0: raise
970 if inst.errno != 0: raise
981 self.close()
971 self.close()
982 raise IOError(errno.EPIPE, 'Broken pipe')
972 raise IOError(errno.EPIPE, 'Broken pipe')
983
973
984 def flush(self):
974 def flush(self):
985 try:
975 try:
986 return self.fp.flush()
976 return self.fp.flush()
987 except IOError, inst:
977 except IOError, inst:
988 if inst.errno != errno.EINVAL: raise
978 if inst.errno != errno.EINVAL: raise
989 self.close()
979 self.close()
990 raise IOError(errno.EPIPE, 'Broken pipe')
980 raise IOError(errno.EPIPE, 'Broken pipe')
991
981
992 sys.stdout = winstdout(sys.stdout)
982 sys.stdout = winstdout(sys.stdout)
993
983
994 def _is_win_9x():
984 def _is_win_9x():
995 '''return true if run on windows 95, 98 or me.'''
985 '''return true if run on windows 95, 98 or me.'''
996 try:
986 try:
997 return sys.getwindowsversion()[3] == 1
987 return sys.getwindowsversion()[3] == 1
998 except AttributeError:
988 except AttributeError:
999 return 'command' in os.environ.get('comspec', '')
989 return 'command' in os.environ.get('comspec', '')
1000
990
1001 def openhardlinks():
991 def openhardlinks():
1002 return not _is_win_9x and "win32api" in locals()
992 return not _is_win_9x and "win32api" in locals()
1003
993
1004 def system_rcpath():
994 def system_rcpath():
1005 try:
995 try:
1006 return system_rcpath_win32()
996 return system_rcpath_win32()
1007 except:
997 except:
1008 return [r'c:\mercurial\mercurial.ini']
998 return [r'c:\mercurial\mercurial.ini']
1009
999
1010 def user_rcpath():
1000 def user_rcpath():
1011 '''return os-specific hgrc search path to the user dir'''
1001 '''return os-specific hgrc search path to the user dir'''
1012 try:
1002 try:
1013 path = user_rcpath_win32()
1003 path = user_rcpath_win32()
1014 except:
1004 except:
1015 home = os.path.expanduser('~')
1005 home = os.path.expanduser('~')
1016 path = [os.path.join(home, 'mercurial.ini'),
1006 path = [os.path.join(home, 'mercurial.ini'),
1017 os.path.join(home, '.hgrc')]
1007 os.path.join(home, '.hgrc')]
1018 userprofile = os.environ.get('USERPROFILE')
1008 userprofile = os.environ.get('USERPROFILE')
1019 if userprofile:
1009 if userprofile:
1020 path.append(os.path.join(userprofile, 'mercurial.ini'))
1010 path.append(os.path.join(userprofile, 'mercurial.ini'))
1021 path.append(os.path.join(userprofile, '.hgrc'))
1011 path.append(os.path.join(userprofile, '.hgrc'))
1022 return path
1012 return path
1023
1013
1024 def parse_patch_output(output_line):
1014 def parse_patch_output(output_line):
1025 """parses the output produced by patch and returns the file name"""
1015 """parses the output produced by patch and returns the file name"""
1026 pf = output_line[14:]
1016 pf = output_line[14:]
1027 if pf[0] == '`':
1017 if pf[0] == '`':
1028 pf = pf[1:-1] # Remove the quotes
1018 pf = pf[1:-1] # Remove the quotes
1029 return pf
1019 return pf
1030
1020
1031 def sshargs(sshcmd, host, user, port):
1021 def sshargs(sshcmd, host, user, port):
1032 '''Build argument list for ssh or Plink'''
1022 '''Build argument list for ssh or Plink'''
1033 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
1023 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
1034 args = user and ("%s@%s" % (user, host)) or host
1024 args = user and ("%s@%s" % (user, host)) or host
1035 return port and ("%s %s %s" % (args, pflag, port)) or args
1025 return port and ("%s %s %s" % (args, pflag, port)) or args
1036
1026
1037 def testpid(pid):
1027 def testpid(pid):
1038 '''return False if pid dead, True if running or not known'''
1028 '''return False if pid dead, True if running or not known'''
1039 return True
1029 return True
1040
1030
1041 def set_flags(f, flags):
1031 def set_flags(f, flags):
1042 pass
1032 pass
1043
1033
1044 def set_binary(fd):
1034 def set_binary(fd):
1045 # When run without console, pipes may expose invalid
1035 # When run without console, pipes may expose invalid
1046 # fileno(), usually set to -1.
1036 # fileno(), usually set to -1.
1047 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
1037 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
1048 msvcrt.setmode(fd.fileno(), os.O_BINARY)
1038 msvcrt.setmode(fd.fileno(), os.O_BINARY)
1049
1039
1050 def pconvert(path):
1040 def pconvert(path):
1051 return '/'.join(splitpath(path))
1041 return '/'.join(splitpath(path))
1052
1042
1053 def localpath(path):
1043 def localpath(path):
1054 return path.replace('/', '\\')
1044 return path.replace('/', '\\')
1055
1045
1056 def normpath(path):
1046 def normpath(path):
1057 return pconvert(os.path.normpath(path))
1047 return pconvert(os.path.normpath(path))
1058
1048
1059 makelock = _makelock_file
1049 makelock = _makelock_file
1060 readlock = _readlock_file
1050 readlock = _readlock_file
1061
1051
1062 def samestat(s1, s2):
1052 def samestat(s1, s2):
1063 return False
1053 return False
1064
1054
1065 # A sequence of backslashes is special iff it precedes a double quote:
1055 # A sequence of backslashes is special iff it precedes a double quote:
1066 # - if there's an even number of backslashes, the double quote is not
1056 # - if there's an even number of backslashes, the double quote is not
1067 # quoted (i.e. it ends the quoted region)
1057 # quoted (i.e. it ends the quoted region)
1068 # - if there's an odd number of backslashes, the double quote is quoted
1058 # - if there's an odd number of backslashes, the double quote is quoted
1069 # - in both cases, every pair of backslashes is unquoted into a single
1059 # - in both cases, every pair of backslashes is unquoted into a single
1070 # backslash
1060 # backslash
1071 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1061 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1072 # So, to quote a string, we must surround it in double quotes, double
1062 # So, to quote a string, we must surround it in double quotes, double
1073 # the number of backslashes that preceed double quotes and add another
1063 # the number of backslashes that preceed double quotes and add another
1074 # backslash before every double quote (being careful with the double
1064 # backslash before every double quote (being careful with the double
1075 # quote we've appended to the end)
1065 # quote we've appended to the end)
1076 _quotere = None
1066 _quotere = None
1077 def shellquote(s):
1067 def shellquote(s):
1078 global _quotere
1068 global _quotere
1079 if _quotere is None:
1069 if _quotere is None:
1080 _quotere = re.compile(r'(\\*)("|\\$)')
1070 _quotere = re.compile(r'(\\*)("|\\$)')
1081 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1071 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1082
1072
1083 def quotecommand(cmd):
1073 def quotecommand(cmd):
1084 """Build a command string suitable for os.popen* calls."""
1074 """Build a command string suitable for os.popen* calls."""
1085 # The extra quotes are needed because popen* runs the command
1075 # The extra quotes are needed because popen* runs the command
1086 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1076 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1087 return '"' + cmd + '"'
1077 return '"' + cmd + '"'
1088
1078
1089 def popen(command, mode='r'):
1079 def popen(command, mode='r'):
1090 # Work around "popen spawned process may not write to stdout
1080 # Work around "popen spawned process may not write to stdout
1091 # under windows"
1081 # under windows"
1092 # http://bugs.python.org/issue1366
1082 # http://bugs.python.org/issue1366
1093 command += " 2> %s" % nulldev
1083 command += " 2> %s" % nulldev
1094 return os.popen(quotecommand(command), mode)
1084 return os.popen(quotecommand(command), mode)
1095
1085
1096 def explain_exit(code):
1086 def explain_exit(code):
1097 return _("exited with status %d") % code, code
1087 return _("exited with status %d") % code, code
1098
1088
1099 # if you change this stub into a real check, please try to implement the
1089 # if you change this stub into a real check, please try to implement the
1100 # username and groupname functions above, too.
1090 # username and groupname functions above, too.
1101 def isowner(fp, st=None):
1091 def isowner(fp, st=None):
1102 return True
1092 return True
1103
1093
1104 def find_in_path(name, path, default=None):
1094 def find_in_path(name, path, default=None):
1105 '''find name in search path. path can be string (will be split
1095 '''find name in search path. path can be string (will be split
1106 with os.pathsep), or iterable thing that returns strings. if name
1096 with os.pathsep), or iterable thing that returns strings. if name
1107 found, return path to name. else return default. name is looked up
1097 found, return path to name. else return default. name is looked up
1108 using cmd.exe rules, using PATHEXT.'''
1098 using cmd.exe rules, using PATHEXT.'''
1109 if isinstance(path, str):
1099 if isinstance(path, str):
1110 path = path.split(os.pathsep)
1100 path = path.split(os.pathsep)
1111
1101
1112 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1102 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1113 pathext = pathext.lower().split(os.pathsep)
1103 pathext = pathext.lower().split(os.pathsep)
1114 isexec = os.path.splitext(name)[1].lower() in pathext
1104 isexec = os.path.splitext(name)[1].lower() in pathext
1115
1105
1116 for p in path:
1106 for p in path:
1117 p_name = os.path.join(p, name)
1107 p_name = os.path.join(p, name)
1118
1108
1119 if isexec and os.path.exists(p_name):
1109 if isexec and os.path.exists(p_name):
1120 return p_name
1110 return p_name
1121
1111
1122 for ext in pathext:
1112 for ext in pathext:
1123 p_name_ext = p_name + ext
1113 p_name_ext = p_name + ext
1124 if os.path.exists(p_name_ext):
1114 if os.path.exists(p_name_ext):
1125 return p_name_ext
1115 return p_name_ext
1126 return default
1116 return default
1127
1117
1128 def set_signal_handler():
1118 def set_signal_handler():
1129 try:
1119 try:
1130 set_signal_handler_win32()
1120 set_signal_handler_win32()
1131 except NameError:
1121 except NameError:
1132 pass
1122 pass
1133
1123
1134 try:
1124 try:
1135 # override functions with win32 versions if possible
1125 # override functions with win32 versions if possible
1136 from util_win32 import *
1126 from util_win32 import *
1137 if not _is_win_9x():
1127 if not _is_win_9x():
1138 posixfile = posixfile_nt
1128 posixfile = posixfile_nt
1139 except ImportError:
1129 except ImportError:
1140 pass
1130 pass
1141
1131
1142 else:
1132 else:
1143 nulldev = '/dev/null'
1133 nulldev = '/dev/null'
1144
1134
1145 def rcfiles(path):
1135 def rcfiles(path):
1146 rcs = [os.path.join(path, 'hgrc')]
1136 rcs = [os.path.join(path, 'hgrc')]
1147 rcdir = os.path.join(path, 'hgrc.d')
1137 rcdir = os.path.join(path, 'hgrc.d')
1148 try:
1138 try:
1149 rcs.extend([os.path.join(rcdir, f)
1139 rcs.extend([os.path.join(rcdir, f)
1150 for f, kind in osutil.listdir(rcdir)
1140 for f, kind in osutil.listdir(rcdir)
1151 if f.endswith(".rc")])
1141 if f.endswith(".rc")])
1152 except OSError:
1142 except OSError:
1153 pass
1143 pass
1154 return rcs
1144 return rcs
1155
1145
1156 def system_rcpath():
1146 def system_rcpath():
1157 path = []
1147 path = []
1158 # old mod_python does not set sys.argv
1148 # old mod_python does not set sys.argv
1159 if len(getattr(sys, 'argv', [])) > 0:
1149 if len(getattr(sys, 'argv', [])) > 0:
1160 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1150 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1161 '/../etc/mercurial'))
1151 '/../etc/mercurial'))
1162 path.extend(rcfiles('/etc/mercurial'))
1152 path.extend(rcfiles('/etc/mercurial'))
1163 return path
1153 return path
1164
1154
1165 def user_rcpath():
1155 def user_rcpath():
1166 return [os.path.expanduser('~/.hgrc')]
1156 return [os.path.expanduser('~/.hgrc')]
1167
1157
1168 def parse_patch_output(output_line):
1158 def parse_patch_output(output_line):
1169 """parses the output produced by patch and returns the file name"""
1159 """parses the output produced by patch and returns the file name"""
1170 pf = output_line[14:]
1160 pf = output_line[14:]
1171 if os.sys.platform == 'OpenVMS':
1161 if os.sys.platform == 'OpenVMS':
1172 if pf[0] == '`':
1162 if pf[0] == '`':
1173 pf = pf[1:-1] # Remove the quotes
1163 pf = pf[1:-1] # Remove the quotes
1174 else:
1164 else:
1175 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1165 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1176 pf = pf[1:-1] # Remove the quotes
1166 pf = pf[1:-1] # Remove the quotes
1177 return pf
1167 return pf
1178
1168
1179 def sshargs(sshcmd, host, user, port):
1169 def sshargs(sshcmd, host, user, port):
1180 '''Build argument list for ssh'''
1170 '''Build argument list for ssh'''
1181 args = user and ("%s@%s" % (user, host)) or host
1171 args = user and ("%s@%s" % (user, host)) or host
1182 return port and ("%s -p %s" % (args, port)) or args
1172 return port and ("%s -p %s" % (args, port)) or args
1183
1173
1184 def is_exec(f):
1174 def is_exec(f):
1185 """check whether a file is executable"""
1175 """check whether a file is executable"""
1186 return (os.lstat(f).st_mode & 0100 != 0)
1176 return (os.lstat(f).st_mode & 0100 != 0)
1187
1177
1188 def set_flags(f, flags):
1178 def set_flags(f, flags):
1189 s = os.lstat(f).st_mode
1179 s = os.lstat(f).st_mode
1190 x = "x" in flags
1180 x = "x" in flags
1191 l = "l" in flags
1181 l = "l" in flags
1192 if l:
1182 if l:
1193 if not stat.S_ISLNK(s):
1183 if not stat.S_ISLNK(s):
1194 # switch file to link
1184 # switch file to link
1195 data = file(f).read()
1185 data = file(f).read()
1196 os.unlink(f)
1186 os.unlink(f)
1197 os.symlink(data, f)
1187 os.symlink(data, f)
1198 # no chmod needed at this point
1188 # no chmod needed at this point
1199 return
1189 return
1200 if stat.S_ISLNK(s):
1190 if stat.S_ISLNK(s):
1201 # switch link to file
1191 # switch link to file
1202 data = os.readlink(f)
1192 data = os.readlink(f)
1203 os.unlink(f)
1193 os.unlink(f)
1204 file(f, "w").write(data)
1194 file(f, "w").write(data)
1205 s = 0666 & ~_umask # avoid restatting for chmod
1195 s = 0666 & ~_umask # avoid restatting for chmod
1206
1196
1207 sx = s & 0100
1197 sx = s & 0100
1208 if x and not sx:
1198 if x and not sx:
1209 # Turn on +x for every +r bit when making a file executable
1199 # Turn on +x for every +r bit when making a file executable
1210 # and obey umask.
1200 # and obey umask.
1211 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1201 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1212 elif not x and sx:
1202 elif not x and sx:
1213 # Turn off all +x bits
1203 # Turn off all +x bits
1214 os.chmod(f, s & 0666)
1204 os.chmod(f, s & 0666)
1215
1205
1216 def set_binary(fd):
1206 def set_binary(fd):
1217 pass
1207 pass
1218
1208
1219 def pconvert(path):
1209 def pconvert(path):
1220 return path
1210 return path
1221
1211
1222 def localpath(path):
1212 def localpath(path):
1223 return path
1213 return path
1224
1214
1225 normpath = os.path.normpath
1215 normpath = os.path.normpath
1226 samestat = os.path.samestat
1216 samestat = os.path.samestat
1227
1217
1228 def makelock(info, pathname):
1218 def makelock(info, pathname):
1229 try:
1219 try:
1230 os.symlink(info, pathname)
1220 os.symlink(info, pathname)
1231 except OSError, why:
1221 except OSError, why:
1232 if why.errno == errno.EEXIST:
1222 if why.errno == errno.EEXIST:
1233 raise
1223 raise
1234 else:
1224 else:
1235 _makelock_file(info, pathname)
1225 _makelock_file(info, pathname)
1236
1226
1237 def readlock(pathname):
1227 def readlock(pathname):
1238 try:
1228 try:
1239 return os.readlink(pathname)
1229 return os.readlink(pathname)
1240 except OSError, why:
1230 except OSError, why:
1241 if why.errno in (errno.EINVAL, errno.ENOSYS):
1231 if why.errno in (errno.EINVAL, errno.ENOSYS):
1242 return _readlock_file(pathname)
1232 return _readlock_file(pathname)
1243 else:
1233 else:
1244 raise
1234 raise
1245
1235
1246 def shellquote(s):
1236 def shellquote(s):
1247 if os.sys.platform == 'OpenVMS':
1237 if os.sys.platform == 'OpenVMS':
1248 return '"%s"' % s
1238 return '"%s"' % s
1249 else:
1239 else:
1250 return "'%s'" % s.replace("'", "'\\''")
1240 return "'%s'" % s.replace("'", "'\\''")
1251
1241
1252 def quotecommand(cmd):
1242 def quotecommand(cmd):
1253 return cmd
1243 return cmd
1254
1244
1255 def popen(command, mode='r'):
1245 def popen(command, mode='r'):
1256 return os.popen(command, mode)
1246 return os.popen(command, mode)
1257
1247
1258 def testpid(pid):
1248 def testpid(pid):
1259 '''return False if pid dead, True if running or not sure'''
1249 '''return False if pid dead, True if running or not sure'''
1260 if os.sys.platform == 'OpenVMS':
1250 if os.sys.platform == 'OpenVMS':
1261 return True
1251 return True
1262 try:
1252 try:
1263 os.kill(pid, 0)
1253 os.kill(pid, 0)
1264 return True
1254 return True
1265 except OSError, inst:
1255 except OSError, inst:
1266 return inst.errno != errno.ESRCH
1256 return inst.errno != errno.ESRCH
1267
1257
1268 def explain_exit(code):
1258 def explain_exit(code):
1269 """return a 2-tuple (desc, code) describing a process's status"""
1259 """return a 2-tuple (desc, code) describing a process's status"""
1270 if os.WIFEXITED(code):
1260 if os.WIFEXITED(code):
1271 val = os.WEXITSTATUS(code)
1261 val = os.WEXITSTATUS(code)
1272 return _("exited with status %d") % val, val
1262 return _("exited with status %d") % val, val
1273 elif os.WIFSIGNALED(code):
1263 elif os.WIFSIGNALED(code):
1274 val = os.WTERMSIG(code)
1264 val = os.WTERMSIG(code)
1275 return _("killed by signal %d") % val, val
1265 return _("killed by signal %d") % val, val
1276 elif os.WIFSTOPPED(code):
1266 elif os.WIFSTOPPED(code):
1277 val = os.WSTOPSIG(code)
1267 val = os.WSTOPSIG(code)
1278 return _("stopped by signal %d") % val, val
1268 return _("stopped by signal %d") % val, val
1279 raise ValueError(_("invalid exit code"))
1269 raise ValueError(_("invalid exit code"))
1280
1270
1281 def isowner(fp, st=None):
1271 def isowner(fp, st=None):
1282 """Return True if the file object f belongs to the current user.
1272 """Return True if the file object f belongs to the current user.
1283
1273
1284 The return value of a util.fstat(f) may be passed as the st argument.
1274 The return value of a util.fstat(f) may be passed as the st argument.
1285 """
1275 """
1286 if st is None:
1276 if st is None:
1287 st = fstat(fp)
1277 st = fstat(fp)
1288 return st.st_uid == os.getuid()
1278 return st.st_uid == os.getuid()
1289
1279
1290 def find_in_path(name, path, default=None):
1280 def find_in_path(name, path, default=None):
1291 '''find name in search path. path can be string (will be split
1281 '''find name in search path. path can be string (will be split
1292 with os.pathsep), or iterable thing that returns strings. if name
1282 with os.pathsep), or iterable thing that returns strings. if name
1293 found, return path to name. else return default.'''
1283 found, return path to name. else return default.'''
1294 if isinstance(path, str):
1284 if isinstance(path, str):
1295 path = path.split(os.pathsep)
1285 path = path.split(os.pathsep)
1296 for p in path:
1286 for p in path:
1297 p_name = os.path.join(p, name)
1287 p_name = os.path.join(p, name)
1298 if os.path.exists(p_name):
1288 if os.path.exists(p_name):
1299 return p_name
1289 return p_name
1300 return default
1290 return default
1301
1291
1302 def set_signal_handler():
1292 def set_signal_handler():
1303 pass
1293 pass
1304
1294
1305 def find_exe(name, default=None):
1295 def find_exe(name, default=None):
1306 '''find path of an executable.
1296 '''find path of an executable.
1307 if name contains a path component, return it as is. otherwise,
1297 if name contains a path component, return it as is. otherwise,
1308 use normal executable search path.'''
1298 use normal executable search path.'''
1309
1299
1310 if os.sep in name or sys.platform == 'OpenVMS':
1300 if os.sep in name or sys.platform == 'OpenVMS':
1311 # don't check the executable bit. if the file isn't
1301 # don't check the executable bit. if the file isn't
1312 # executable, whoever tries to actually run it will give a
1302 # executable, whoever tries to actually run it will give a
1313 # much more useful error message.
1303 # much more useful error message.
1314 return name
1304 return name
1315 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1305 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1316
1306
1317 def _buildencodefun():
1307 def _buildencodefun():
1318 e = '_'
1308 e = '_'
1319 win_reserved = [ord(x) for x in '\\:*?"<>|']
1309 win_reserved = [ord(x) for x in '\\:*?"<>|']
1320 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1310 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1321 for x in (range(32) + range(126, 256) + win_reserved):
1311 for x in (range(32) + range(126, 256) + win_reserved):
1322 cmap[chr(x)] = "~%02x" % x
1312 cmap[chr(x)] = "~%02x" % x
1323 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1313 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1324 cmap[chr(x)] = e + chr(x).lower()
1314 cmap[chr(x)] = e + chr(x).lower()
1325 dmap = {}
1315 dmap = {}
1326 for k, v in cmap.iteritems():
1316 for k, v in cmap.iteritems():
1327 dmap[v] = k
1317 dmap[v] = k
1328 def decode(s):
1318 def decode(s):
1329 i = 0
1319 i = 0
1330 while i < len(s):
1320 while i < len(s):
1331 for l in xrange(1, 4):
1321 for l in xrange(1, 4):
1332 try:
1322 try:
1333 yield dmap[s[i:i+l]]
1323 yield dmap[s[i:i+l]]
1334 i += l
1324 i += l
1335 break
1325 break
1336 except KeyError:
1326 except KeyError:
1337 pass
1327 pass
1338 else:
1328 else:
1339 raise KeyError
1329 raise KeyError
1340 return (lambda s: "".join([cmap[c] for c in s]),
1330 return (lambda s: "".join([cmap[c] for c in s]),
1341 lambda s: "".join(list(decode(s))))
1331 lambda s: "".join(list(decode(s))))
1342
1332
1343 encodefilename, decodefilename = _buildencodefun()
1333 encodefilename, decodefilename = _buildencodefun()
1344
1334
1345 def encodedopener(openerfn, fn):
1335 def encodedopener(openerfn, fn):
1346 def o(path, *args, **kw):
1336 def o(path, *args, **kw):
1347 return openerfn(fn(path), *args, **kw)
1337 return openerfn(fn(path), *args, **kw)
1348 return o
1338 return o
1349
1339
1350 def mktempcopy(name, emptyok=False, createmode=None):
1340 def mktempcopy(name, emptyok=False, createmode=None):
1351 """Create a temporary file with the same contents from name
1341 """Create a temporary file with the same contents from name
1352
1342
1353 The permission bits are copied from the original file.
1343 The permission bits are copied from the original file.
1354
1344
1355 If the temporary file is going to be truncated immediately, you
1345 If the temporary file is going to be truncated immediately, you
1356 can use emptyok=True as an optimization.
1346 can use emptyok=True as an optimization.
1357
1347
1358 Returns the name of the temporary file.
1348 Returns the name of the temporary file.
1359 """
1349 """
1360 d, fn = os.path.split(name)
1350 d, fn = os.path.split(name)
1361 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1351 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1362 os.close(fd)
1352 os.close(fd)
1363 # Temporary files are created with mode 0600, which is usually not
1353 # Temporary files are created with mode 0600, which is usually not
1364 # what we want. If the original file already exists, just copy
1354 # what we want. If the original file already exists, just copy
1365 # its mode. Otherwise, manually obey umask.
1355 # its mode. Otherwise, manually obey umask.
1366 try:
1356 try:
1367 st_mode = os.lstat(name).st_mode & 0777
1357 st_mode = os.lstat(name).st_mode & 0777
1368 except OSError, inst:
1358 except OSError, inst:
1369 if inst.errno != errno.ENOENT:
1359 if inst.errno != errno.ENOENT:
1370 raise
1360 raise
1371 st_mode = createmode
1361 st_mode = createmode
1372 if st_mode is None:
1362 if st_mode is None:
1373 st_mode = ~_umask
1363 st_mode = ~_umask
1374 st_mode &= 0666
1364 st_mode &= 0666
1375 os.chmod(temp, st_mode)
1365 os.chmod(temp, st_mode)
1376 if emptyok:
1366 if emptyok:
1377 return temp
1367 return temp
1378 try:
1368 try:
1379 try:
1369 try:
1380 ifp = posixfile(name, "rb")
1370 ifp = posixfile(name, "rb")
1381 except IOError, inst:
1371 except IOError, inst:
1382 if inst.errno == errno.ENOENT:
1372 if inst.errno == errno.ENOENT:
1383 return temp
1373 return temp
1384 if not getattr(inst, 'filename', None):
1374 if not getattr(inst, 'filename', None):
1385 inst.filename = name
1375 inst.filename = name
1386 raise
1376 raise
1387 ofp = posixfile(temp, "wb")
1377 ofp = posixfile(temp, "wb")
1388 for chunk in filechunkiter(ifp):
1378 for chunk in filechunkiter(ifp):
1389 ofp.write(chunk)
1379 ofp.write(chunk)
1390 ifp.close()
1380 ifp.close()
1391 ofp.close()
1381 ofp.close()
1392 except:
1382 except:
1393 try: os.unlink(temp)
1383 try: os.unlink(temp)
1394 except: pass
1384 except: pass
1395 raise
1385 raise
1396 return temp
1386 return temp
1397
1387
1398 class atomictempfile(posixfile):
1388 class atomictempfile(posixfile):
1399 """file-like object that atomically updates a file
1389 """file-like object that atomically updates a file
1400
1390
1401 All writes will be redirected to a temporary copy of the original
1391 All writes will be redirected to a temporary copy of the original
1402 file. When rename is called, the copy is renamed to the original
1392 file. When rename is called, the copy is renamed to the original
1403 name, making the changes visible.
1393 name, making the changes visible.
1404 """
1394 """
1405 def __init__(self, name, mode, createmode):
1395 def __init__(self, name, mode, createmode):
1406 self.__name = name
1396 self.__name = name
1407 self.temp = mktempcopy(name, emptyok=('w' in mode),
1397 self.temp = mktempcopy(name, emptyok=('w' in mode),
1408 createmode=createmode)
1398 createmode=createmode)
1409 posixfile.__init__(self, self.temp, mode)
1399 posixfile.__init__(self, self.temp, mode)
1410
1400
1411 def rename(self):
1401 def rename(self):
1412 if not self.closed:
1402 if not self.closed:
1413 posixfile.close(self)
1403 posixfile.close(self)
1414 rename(self.temp, localpath(self.__name))
1404 rename(self.temp, localpath(self.__name))
1415
1405
1416 def __del__(self):
1406 def __del__(self):
1417 if not self.closed:
1407 if not self.closed:
1418 try:
1408 try:
1419 os.unlink(self.temp)
1409 os.unlink(self.temp)
1420 except: pass
1410 except: pass
1421 posixfile.close(self)
1411 posixfile.close(self)
1422
1412
1423 def makedirs(name, mode=None):
1413 def makedirs(name, mode=None):
1424 """recursive directory creation with parent mode inheritance"""
1414 """recursive directory creation with parent mode inheritance"""
1425 try:
1415 try:
1426 os.mkdir(name)
1416 os.mkdir(name)
1427 if mode is not None:
1417 if mode is not None:
1428 os.chmod(name, mode)
1418 os.chmod(name, mode)
1429 return
1419 return
1430 except OSError, err:
1420 except OSError, err:
1431 if err.errno == errno.EEXIST:
1421 if err.errno == errno.EEXIST:
1432 return
1422 return
1433 if err.errno != errno.ENOENT:
1423 if err.errno != errno.ENOENT:
1434 raise
1424 raise
1435 parent = os.path.abspath(os.path.dirname(name))
1425 parent = os.path.abspath(os.path.dirname(name))
1436 makedirs(parent, mode)
1426 makedirs(parent, mode)
1437 makedirs(name, mode)
1427 makedirs(name, mode)
1438
1428
1439 class opener(object):
1429 class opener(object):
1440 """Open files relative to a base directory
1430 """Open files relative to a base directory
1441
1431
1442 This class is used to hide the details of COW semantics and
1432 This class is used to hide the details of COW semantics and
1443 remote file access from higher level code.
1433 remote file access from higher level code.
1444 """
1434 """
1445 def __init__(self, base, audit=True):
1435 def __init__(self, base, audit=True):
1446 self.base = base
1436 self.base = base
1447 if audit:
1437 if audit:
1448 self.audit_path = path_auditor(base)
1438 self.audit_path = path_auditor(base)
1449 else:
1439 else:
1450 self.audit_path = always
1440 self.audit_path = always
1451 self.createmode = None
1441 self.createmode = None
1452
1442
1453 def __getattr__(self, name):
1443 def __getattr__(self, name):
1454 if name == '_can_symlink':
1444 if name == '_can_symlink':
1455 self._can_symlink = checklink(self.base)
1445 self._can_symlink = checklink(self.base)
1456 return self._can_symlink
1446 return self._can_symlink
1457 raise AttributeError(name)
1447 raise AttributeError(name)
1458
1448
1459 def _fixfilemode(self, name):
1449 def _fixfilemode(self, name):
1460 if self.createmode is None:
1450 if self.createmode is None:
1461 return
1451 return
1462 os.chmod(name, self.createmode & 0666)
1452 os.chmod(name, self.createmode & 0666)
1463
1453
1464 def __call__(self, path, mode="r", text=False, atomictemp=False):
1454 def __call__(self, path, mode="r", text=False, atomictemp=False):
1465 self.audit_path(path)
1455 self.audit_path(path)
1466 f = os.path.join(self.base, path)
1456 f = os.path.join(self.base, path)
1467
1457
1468 if not text and "b" not in mode:
1458 if not text and "b" not in mode:
1469 mode += "b" # for that other OS
1459 mode += "b" # for that other OS
1470
1460
1471 nlink = -1
1461 nlink = -1
1472 if mode[0] != "r":
1462 if mode[0] != "r":
1473 try:
1463 try:
1474 nlink = nlinks(f)
1464 nlink = nlinks(f)
1475 except OSError:
1465 except OSError:
1476 nlink = 0
1466 nlink = 0
1477 d = os.path.dirname(f)
1467 d = os.path.dirname(f)
1478 if not os.path.isdir(d):
1468 if not os.path.isdir(d):
1479 makedirs(d, self.createmode)
1469 makedirs(d, self.createmode)
1480 if atomictemp:
1470 if atomictemp:
1481 return atomictempfile(f, mode, self.createmode)
1471 return atomictempfile(f, mode, self.createmode)
1482 if nlink > 1:
1472 if nlink > 1:
1483 rename(mktempcopy(f), f)
1473 rename(mktempcopy(f), f)
1484 fp = posixfile(f, mode)
1474 fp = posixfile(f, mode)
1485 if nlink == 0:
1475 if nlink == 0:
1486 self._fixfilemode(f)
1476 self._fixfilemode(f)
1487 return fp
1477 return fp
1488
1478
1489 def symlink(self, src, dst):
1479 def symlink(self, src, dst):
1490 self.audit_path(dst)
1480 self.audit_path(dst)
1491 linkname = os.path.join(self.base, dst)
1481 linkname = os.path.join(self.base, dst)
1492 try:
1482 try:
1493 os.unlink(linkname)
1483 os.unlink(linkname)
1494 except OSError:
1484 except OSError:
1495 pass
1485 pass
1496
1486
1497 dirname = os.path.dirname(linkname)
1487 dirname = os.path.dirname(linkname)
1498 if not os.path.exists(dirname):
1488 if not os.path.exists(dirname):
1499 makedirs(dirname, self.createmode)
1489 makedirs(dirname, self.createmode)
1500
1490
1501 if self._can_symlink:
1491 if self._can_symlink:
1502 try:
1492 try:
1503 os.symlink(src, linkname)
1493 os.symlink(src, linkname)
1504 except OSError, err:
1494 except OSError, err:
1505 raise OSError(err.errno, _('could not symlink to %r: %s') %
1495 raise OSError(err.errno, _('could not symlink to %r: %s') %
1506 (src, err.strerror), linkname)
1496 (src, err.strerror), linkname)
1507 else:
1497 else:
1508 f = self(dst, "w")
1498 f = self(dst, "w")
1509 f.write(src)
1499 f.write(src)
1510 f.close()
1500 f.close()
1511 self._fixfilemode(dst)
1501 self._fixfilemode(dst)
1512
1502
1513 class chunkbuffer(object):
1503 class chunkbuffer(object):
1514 """Allow arbitrary sized chunks of data to be efficiently read from an
1504 """Allow arbitrary sized chunks of data to be efficiently read from an
1515 iterator over chunks of arbitrary size."""
1505 iterator over chunks of arbitrary size."""
1516
1506
1517 def __init__(self, in_iter):
1507 def __init__(self, in_iter):
1518 """in_iter is the iterator that's iterating over the input chunks.
1508 """in_iter is the iterator that's iterating over the input chunks.
1519 targetsize is how big a buffer to try to maintain."""
1509 targetsize is how big a buffer to try to maintain."""
1520 self.iter = iter(in_iter)
1510 self.iter = iter(in_iter)
1521 self.buf = ''
1511 self.buf = ''
1522 self.targetsize = 2**16
1512 self.targetsize = 2**16
1523
1513
1524 def read(self, l):
1514 def read(self, l):
1525 """Read L bytes of data from the iterator of chunks of data.
1515 """Read L bytes of data from the iterator of chunks of data.
1526 Returns less than L bytes if the iterator runs dry."""
1516 Returns less than L bytes if the iterator runs dry."""
1527 if l > len(self.buf) and self.iter:
1517 if l > len(self.buf) and self.iter:
1528 # Clamp to a multiple of self.targetsize
1518 # Clamp to a multiple of self.targetsize
1529 targetsize = max(l, self.targetsize)
1519 targetsize = max(l, self.targetsize)
1530 collector = cStringIO.StringIO()
1520 collector = cStringIO.StringIO()
1531 collector.write(self.buf)
1521 collector.write(self.buf)
1532 collected = len(self.buf)
1522 collected = len(self.buf)
1533 for chunk in self.iter:
1523 for chunk in self.iter:
1534 collector.write(chunk)
1524 collector.write(chunk)
1535 collected += len(chunk)
1525 collected += len(chunk)
1536 if collected >= targetsize:
1526 if collected >= targetsize:
1537 break
1527 break
1538 if collected < targetsize:
1528 if collected < targetsize:
1539 self.iter = False
1529 self.iter = False
1540 self.buf = collector.getvalue()
1530 self.buf = collector.getvalue()
1541 if len(self.buf) == l:
1531 if len(self.buf) == l:
1542 s, self.buf = str(self.buf), ''
1532 s, self.buf = str(self.buf), ''
1543 else:
1533 else:
1544 s, self.buf = self.buf[:l], buffer(self.buf, l)
1534 s, self.buf = self.buf[:l], buffer(self.buf, l)
1545 return s
1535 return s
1546
1536
1547 def filechunkiter(f, size=65536, limit=None):
1537 def filechunkiter(f, size=65536, limit=None):
1548 """Create a generator that produces the data in the file size
1538 """Create a generator that produces the data in the file size
1549 (default 65536) bytes at a time, up to optional limit (default is
1539 (default 65536) bytes at a time, up to optional limit (default is
1550 to read all data). Chunks may be less than size bytes if the
1540 to read all data). Chunks may be less than size bytes if the
1551 chunk is the last chunk in the file, or the file is a socket or
1541 chunk is the last chunk in the file, or the file is a socket or
1552 some other type of file that sometimes reads less data than is
1542 some other type of file that sometimes reads less data than is
1553 requested."""
1543 requested."""
1554 assert size >= 0
1544 assert size >= 0
1555 assert limit is None or limit >= 0
1545 assert limit is None or limit >= 0
1556 while True:
1546 while True:
1557 if limit is None: nbytes = size
1547 if limit is None: nbytes = size
1558 else: nbytes = min(limit, size)
1548 else: nbytes = min(limit, size)
1559 s = nbytes and f.read(nbytes)
1549 s = nbytes and f.read(nbytes)
1560 if not s: break
1550 if not s: break
1561 if limit: limit -= len(s)
1551 if limit: limit -= len(s)
1562 yield s
1552 yield s
1563
1553
1564 def makedate():
1554 def makedate():
1565 lt = time.localtime()
1555 lt = time.localtime()
1566 if lt[8] == 1 and time.daylight:
1556 if lt[8] == 1 and time.daylight:
1567 tz = time.altzone
1557 tz = time.altzone
1568 else:
1558 else:
1569 tz = time.timezone
1559 tz = time.timezone
1570 return time.mktime(lt), tz
1560 return time.mktime(lt), tz
1571
1561
1572 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1562 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1573 """represent a (unixtime, offset) tuple as a localized time.
1563 """represent a (unixtime, offset) tuple as a localized time.
1574 unixtime is seconds since the epoch, and offset is the time zone's
1564 unixtime is seconds since the epoch, and offset is the time zone's
1575 number of seconds away from UTC. if timezone is false, do not
1565 number of seconds away from UTC. if timezone is false, do not
1576 append time zone to string."""
1566 append time zone to string."""
1577 t, tz = date or makedate()
1567 t, tz = date or makedate()
1578 if "%1" in format or "%2" in format:
1568 if "%1" in format or "%2" in format:
1579 sign = (tz > 0) and "-" or "+"
1569 sign = (tz > 0) and "-" or "+"
1580 minutes = abs(tz) / 60
1570 minutes = abs(tz) / 60
1581 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1571 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1582 format = format.replace("%2", "%02d" % (minutes % 60))
1572 format = format.replace("%2", "%02d" % (minutes % 60))
1583 s = time.strftime(format, time.gmtime(float(t) - tz))
1573 s = time.strftime(format, time.gmtime(float(t) - tz))
1584 return s
1574 return s
1585
1575
1586 def shortdate(date=None):
1576 def shortdate(date=None):
1587 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1577 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1588 return datestr(date, format='%Y-%m-%d')
1578 return datestr(date, format='%Y-%m-%d')
1589
1579
1590 def strdate(string, format, defaults=[]):
1580 def strdate(string, format, defaults=[]):
1591 """parse a localized time string and return a (unixtime, offset) tuple.
1581 """parse a localized time string and return a (unixtime, offset) tuple.
1592 if the string cannot be parsed, ValueError is raised."""
1582 if the string cannot be parsed, ValueError is raised."""
1593 def timezone(string):
1583 def timezone(string):
1594 tz = string.split()[-1]
1584 tz = string.split()[-1]
1595 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1585 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1596 sign = (tz[0] == "+") and 1 or -1
1586 sign = (tz[0] == "+") and 1 or -1
1597 hours = int(tz[1:3])
1587 hours = int(tz[1:3])
1598 minutes = int(tz[3:5])
1588 minutes = int(tz[3:5])
1599 return -sign * (hours * 60 + minutes) * 60
1589 return -sign * (hours * 60 + minutes) * 60
1600 if tz == "GMT" or tz == "UTC":
1590 if tz == "GMT" or tz == "UTC":
1601 return 0
1591 return 0
1602 return None
1592 return None
1603
1593
1604 # NOTE: unixtime = localunixtime + offset
1594 # NOTE: unixtime = localunixtime + offset
1605 offset, date = timezone(string), string
1595 offset, date = timezone(string), string
1606 if offset != None:
1596 if offset != None:
1607 date = " ".join(string.split()[:-1])
1597 date = " ".join(string.split()[:-1])
1608
1598
1609 # add missing elements from defaults
1599 # add missing elements from defaults
1610 for part in defaults:
1600 for part in defaults:
1611 found = [True for p in part if ("%"+p) in format]
1601 found = [True for p in part if ("%"+p) in format]
1612 if not found:
1602 if not found:
1613 date += "@" + defaults[part]
1603 date += "@" + defaults[part]
1614 format += "@%" + part[0]
1604 format += "@%" + part[0]
1615
1605
1616 timetuple = time.strptime(date, format)
1606 timetuple = time.strptime(date, format)
1617 localunixtime = int(calendar.timegm(timetuple))
1607 localunixtime = int(calendar.timegm(timetuple))
1618 if offset is None:
1608 if offset is None:
1619 # local timezone
1609 # local timezone
1620 unixtime = int(time.mktime(timetuple))
1610 unixtime = int(time.mktime(timetuple))
1621 offset = unixtime - localunixtime
1611 offset = unixtime - localunixtime
1622 else:
1612 else:
1623 unixtime = localunixtime + offset
1613 unixtime = localunixtime + offset
1624 return unixtime, offset
1614 return unixtime, offset
1625
1615
1626 def parsedate(date, formats=None, defaults=None):
1616 def parsedate(date, formats=None, defaults=None):
1627 """parse a localized date/time string and return a (unixtime, offset) tuple.
1617 """parse a localized date/time string and return a (unixtime, offset) tuple.
1628
1618
1629 The date may be a "unixtime offset" string or in one of the specified
1619 The date may be a "unixtime offset" string or in one of the specified
1630 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1620 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1631 """
1621 """
1632 if not date:
1622 if not date:
1633 return 0, 0
1623 return 0, 0
1634 if isinstance(date, tuple) and len(date) == 2:
1624 if isinstance(date, tuple) and len(date) == 2:
1635 return date
1625 return date
1636 if not formats:
1626 if not formats:
1637 formats = defaultdateformats
1627 formats = defaultdateformats
1638 date = date.strip()
1628 date = date.strip()
1639 try:
1629 try:
1640 when, offset = map(int, date.split(' '))
1630 when, offset = map(int, date.split(' '))
1641 except ValueError:
1631 except ValueError:
1642 # fill out defaults
1632 # fill out defaults
1643 if not defaults:
1633 if not defaults:
1644 defaults = {}
1634 defaults = {}
1645 now = makedate()
1635 now = makedate()
1646 for part in "d mb yY HI M S".split():
1636 for part in "d mb yY HI M S".split():
1647 if part not in defaults:
1637 if part not in defaults:
1648 if part[0] in "HMS":
1638 if part[0] in "HMS":
1649 defaults[part] = "00"
1639 defaults[part] = "00"
1650 else:
1640 else:
1651 defaults[part] = datestr(now, "%" + part[0])
1641 defaults[part] = datestr(now, "%" + part[0])
1652
1642
1653 for format in formats:
1643 for format in formats:
1654 try:
1644 try:
1655 when, offset = strdate(date, format, defaults)
1645 when, offset = strdate(date, format, defaults)
1656 except (ValueError, OverflowError):
1646 except (ValueError, OverflowError):
1657 pass
1647 pass
1658 else:
1648 else:
1659 break
1649 break
1660 else:
1650 else:
1661 raise Abort(_('invalid date: %r ') % date)
1651 raise Abort(_('invalid date: %r ') % date)
1662 # validate explicit (probably user-specified) date and
1652 # validate explicit (probably user-specified) date and
1663 # time zone offset. values must fit in signed 32 bits for
1653 # time zone offset. values must fit in signed 32 bits for
1664 # current 32-bit linux runtimes. timezones go from UTC-12
1654 # current 32-bit linux runtimes. timezones go from UTC-12
1665 # to UTC+14
1655 # to UTC+14
1666 if abs(when) > 0x7fffffff:
1656 if abs(when) > 0x7fffffff:
1667 raise Abort(_('date exceeds 32 bits: %d') % when)
1657 raise Abort(_('date exceeds 32 bits: %d') % when)
1668 if offset < -50400 or offset > 43200:
1658 if offset < -50400 or offset > 43200:
1669 raise Abort(_('impossible time zone offset: %d') % offset)
1659 raise Abort(_('impossible time zone offset: %d') % offset)
1670 return when, offset
1660 return when, offset
1671
1661
1672 def matchdate(date):
1662 def matchdate(date):
1673 """Return a function that matches a given date match specifier
1663 """Return a function that matches a given date match specifier
1674
1664
1675 Formats include:
1665 Formats include:
1676
1666
1677 '{date}' match a given date to the accuracy provided
1667 '{date}' match a given date to the accuracy provided
1678
1668
1679 '<{date}' on or before a given date
1669 '<{date}' on or before a given date
1680
1670
1681 '>{date}' on or after a given date
1671 '>{date}' on or after a given date
1682
1672
1683 """
1673 """
1684
1674
1685 def lower(date):
1675 def lower(date):
1686 d = dict(mb="1", d="1")
1676 d = dict(mb="1", d="1")
1687 return parsedate(date, extendeddateformats, d)[0]
1677 return parsedate(date, extendeddateformats, d)[0]
1688
1678
1689 def upper(date):
1679 def upper(date):
1690 d = dict(mb="12", HI="23", M="59", S="59")
1680 d = dict(mb="12", HI="23", M="59", S="59")
1691 for days in "31 30 29".split():
1681 for days in "31 30 29".split():
1692 try:
1682 try:
1693 d["d"] = days
1683 d["d"] = days
1694 return parsedate(date, extendeddateformats, d)[0]
1684 return parsedate(date, extendeddateformats, d)[0]
1695 except:
1685 except:
1696 pass
1686 pass
1697 d["d"] = "28"
1687 d["d"] = "28"
1698 return parsedate(date, extendeddateformats, d)[0]
1688 return parsedate(date, extendeddateformats, d)[0]
1699
1689
1700 if date[0] == "<":
1690 if date[0] == "<":
1701 when = upper(date[1:])
1691 when = upper(date[1:])
1702 return lambda x: x <= when
1692 return lambda x: x <= when
1703 elif date[0] == ">":
1693 elif date[0] == ">":
1704 when = lower(date[1:])
1694 when = lower(date[1:])
1705 return lambda x: x >= when
1695 return lambda x: x >= when
1706 elif date[0] == "-":
1696 elif date[0] == "-":
1707 try:
1697 try:
1708 days = int(date[1:])
1698 days = int(date[1:])
1709 except ValueError:
1699 except ValueError:
1710 raise Abort(_("invalid day spec: %s") % date[1:])
1700 raise Abort(_("invalid day spec: %s") % date[1:])
1711 when = makedate()[0] - days * 3600 * 24
1701 when = makedate()[0] - days * 3600 * 24
1712 return lambda x: x >= when
1702 return lambda x: x >= when
1713 elif " to " in date:
1703 elif " to " in date:
1714 a, b = date.split(" to ")
1704 a, b = date.split(" to ")
1715 start, stop = lower(a), upper(b)
1705 start, stop = lower(a), upper(b)
1716 return lambda x: x >= start and x <= stop
1706 return lambda x: x >= start and x <= stop
1717 else:
1707 else:
1718 start, stop = lower(date), upper(date)
1708 start, stop = lower(date), upper(date)
1719 return lambda x: x >= start and x <= stop
1709 return lambda x: x >= start and x <= stop
1720
1710
1721 def shortuser(user):
1711 def shortuser(user):
1722 """Return a short representation of a user name or email address."""
1712 """Return a short representation of a user name or email address."""
1723 f = user.find('@')
1713 f = user.find('@')
1724 if f >= 0:
1714 if f >= 0:
1725 user = user[:f]
1715 user = user[:f]
1726 f = user.find('<')
1716 f = user.find('<')
1727 if f >= 0:
1717 if f >= 0:
1728 user = user[f+1:]
1718 user = user[f+1:]
1729 f = user.find(' ')
1719 f = user.find(' ')
1730 if f >= 0:
1720 if f >= 0:
1731 user = user[:f]
1721 user = user[:f]
1732 f = user.find('.')
1722 f = user.find('.')
1733 if f >= 0:
1723 if f >= 0:
1734 user = user[:f]
1724 user = user[:f]
1735 return user
1725 return user
1736
1726
1737 def email(author):
1727 def email(author):
1738 '''get email of author.'''
1728 '''get email of author.'''
1739 r = author.find('>')
1729 r = author.find('>')
1740 if r == -1: r = None
1730 if r == -1: r = None
1741 return author[author.find('<')+1:r]
1731 return author[author.find('<')+1:r]
1742
1732
1743 def ellipsis(text, maxlength=400):
1733 def ellipsis(text, maxlength=400):
1744 """Trim string to at most maxlength (default: 400) characters."""
1734 """Trim string to at most maxlength (default: 400) characters."""
1745 if len(text) <= maxlength:
1735 if len(text) <= maxlength:
1746 return text
1736 return text
1747 else:
1737 else:
1748 return "%s..." % (text[:maxlength-3])
1738 return "%s..." % (text[:maxlength-3])
1749
1739
1750 def walkrepos(path, followsym=False, seen_dirs=None):
1740 def walkrepos(path, followsym=False, seen_dirs=None):
1751 '''yield every hg repository under path, recursively.'''
1741 '''yield every hg repository under path, recursively.'''
1752 def errhandler(err):
1742 def errhandler(err):
1753 if err.filename == path:
1743 if err.filename == path:
1754 raise err
1744 raise err
1755 if followsym and hasattr(os.path, 'samestat'):
1745 if followsym and hasattr(os.path, 'samestat'):
1756 def _add_dir_if_not_there(dirlst, dirname):
1746 def _add_dir_if_not_there(dirlst, dirname):
1757 match = False
1747 match = False
1758 samestat = os.path.samestat
1748 samestat = os.path.samestat
1759 dirstat = os.stat(dirname)
1749 dirstat = os.stat(dirname)
1760 for lstdirstat in dirlst:
1750 for lstdirstat in dirlst:
1761 if samestat(dirstat, lstdirstat):
1751 if samestat(dirstat, lstdirstat):
1762 match = True
1752 match = True
1763 break
1753 break
1764 if not match:
1754 if not match:
1765 dirlst.append(dirstat)
1755 dirlst.append(dirstat)
1766 return not match
1756 return not match
1767 else:
1757 else:
1768 followsym = False
1758 followsym = False
1769
1759
1770 if (seen_dirs is None) and followsym:
1760 if (seen_dirs is None) and followsym:
1771 seen_dirs = []
1761 seen_dirs = []
1772 _add_dir_if_not_there(seen_dirs, path)
1762 _add_dir_if_not_there(seen_dirs, path)
1773 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1763 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1774 if '.hg' in dirs:
1764 if '.hg' in dirs:
1775 dirs[:] = [] # don't descend further
1765 dirs[:] = [] # don't descend further
1776 yield root # found a repository
1766 yield root # found a repository
1777 qroot = os.path.join(root, '.hg', 'patches')
1767 qroot = os.path.join(root, '.hg', 'patches')
1778 if os.path.isdir(os.path.join(qroot, '.hg')):
1768 if os.path.isdir(os.path.join(qroot, '.hg')):
1779 yield qroot # we have a patch queue repo here
1769 yield qroot # we have a patch queue repo here
1780 elif followsym:
1770 elif followsym:
1781 newdirs = []
1771 newdirs = []
1782 for d in dirs:
1772 for d in dirs:
1783 fname = os.path.join(root, d)
1773 fname = os.path.join(root, d)
1784 if _add_dir_if_not_there(seen_dirs, fname):
1774 if _add_dir_if_not_there(seen_dirs, fname):
1785 if os.path.islink(fname):
1775 if os.path.islink(fname):
1786 for hgname in walkrepos(fname, True, seen_dirs):
1776 for hgname in walkrepos(fname, True, seen_dirs):
1787 yield hgname
1777 yield hgname
1788 else:
1778 else:
1789 newdirs.append(d)
1779 newdirs.append(d)
1790 dirs[:] = newdirs
1780 dirs[:] = newdirs
1791
1781
1792 _rcpath = None
1782 _rcpath = None
1793
1783
1794 def os_rcpath():
1784 def os_rcpath():
1795 '''return default os-specific hgrc search path'''
1785 '''return default os-specific hgrc search path'''
1796 path = system_rcpath()
1786 path = system_rcpath()
1797 path.extend(user_rcpath())
1787 path.extend(user_rcpath())
1798 path = [os.path.normpath(f) for f in path]
1788 path = [os.path.normpath(f) for f in path]
1799 return path
1789 return path
1800
1790
1801 def rcpath():
1791 def rcpath():
1802 '''return hgrc search path. if env var HGRCPATH is set, use it.
1792 '''return hgrc search path. if env var HGRCPATH is set, use it.
1803 for each item in path, if directory, use files ending in .rc,
1793 for each item in path, if directory, use files ending in .rc,
1804 else use item.
1794 else use item.
1805 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1795 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1806 if no HGRCPATH, use default os-specific path.'''
1796 if no HGRCPATH, use default os-specific path.'''
1807 global _rcpath
1797 global _rcpath
1808 if _rcpath is None:
1798 if _rcpath is None:
1809 if 'HGRCPATH' in os.environ:
1799 if 'HGRCPATH' in os.environ:
1810 _rcpath = []
1800 _rcpath = []
1811 for p in os.environ['HGRCPATH'].split(os.pathsep):
1801 for p in os.environ['HGRCPATH'].split(os.pathsep):
1812 if not p: continue
1802 if not p: continue
1813 if os.path.isdir(p):
1803 if os.path.isdir(p):
1814 for f, kind in osutil.listdir(p):
1804 for f, kind in osutil.listdir(p):
1815 if f.endswith('.rc'):
1805 if f.endswith('.rc'):
1816 _rcpath.append(os.path.join(p, f))
1806 _rcpath.append(os.path.join(p, f))
1817 else:
1807 else:
1818 _rcpath.append(p)
1808 _rcpath.append(p)
1819 else:
1809 else:
1820 _rcpath = os_rcpath()
1810 _rcpath = os_rcpath()
1821 return _rcpath
1811 return _rcpath
1822
1812
1823 def bytecount(nbytes):
1813 def bytecount(nbytes):
1824 '''return byte count formatted as readable string, with units'''
1814 '''return byte count formatted as readable string, with units'''
1825
1815
1826 units = (
1816 units = (
1827 (100, 1<<30, _('%.0f GB')),
1817 (100, 1<<30, _('%.0f GB')),
1828 (10, 1<<30, _('%.1f GB')),
1818 (10, 1<<30, _('%.1f GB')),
1829 (1, 1<<30, _('%.2f GB')),
1819 (1, 1<<30, _('%.2f GB')),
1830 (100, 1<<20, _('%.0f MB')),
1820 (100, 1<<20, _('%.0f MB')),
1831 (10, 1<<20, _('%.1f MB')),
1821 (10, 1<<20, _('%.1f MB')),
1832 (1, 1<<20, _('%.2f MB')),
1822 (1, 1<<20, _('%.2f MB')),
1833 (100, 1<<10, _('%.0f KB')),
1823 (100, 1<<10, _('%.0f KB')),
1834 (10, 1<<10, _('%.1f KB')),
1824 (10, 1<<10, _('%.1f KB')),
1835 (1, 1<<10, _('%.2f KB')),
1825 (1, 1<<10, _('%.2f KB')),
1836 (1, 1, _('%.0f bytes')),
1826 (1, 1, _('%.0f bytes')),
1837 )
1827 )
1838
1828
1839 for multiplier, divisor, format in units:
1829 for multiplier, divisor, format in units:
1840 if nbytes >= divisor * multiplier:
1830 if nbytes >= divisor * multiplier:
1841 return format % (nbytes / float(divisor))
1831 return format % (nbytes / float(divisor))
1842 return units[-1][2] % nbytes
1832 return units[-1][2] % nbytes
1843
1833
1844 def drop_scheme(scheme, path):
1834 def drop_scheme(scheme, path):
1845 sc = scheme + ':'
1835 sc = scheme + ':'
1846 if path.startswith(sc):
1836 if path.startswith(sc):
1847 path = path[len(sc):]
1837 path = path[len(sc):]
1848 if path.startswith('//'):
1838 if path.startswith('//'):
1849 path = path[2:]
1839 path = path[2:]
1850 return path
1840 return path
1851
1841
1852 def uirepr(s):
1842 def uirepr(s):
1853 # Avoid double backslash in Windows path repr()
1843 # Avoid double backslash in Windows path repr()
1854 return repr(s).replace('\\\\', '\\')
1844 return repr(s).replace('\\\\', '\\')
1855
1845
1856 def hidepassword(url):
1846 def hidepassword(url):
1857 '''hide user credential in a url string'''
1847 '''hide user credential in a url string'''
1858 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1848 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1859 netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc)
1849 netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc)
1860 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1850 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1861
1851
1862 def removeauth(url):
1852 def removeauth(url):
1863 '''remove all authentication information from a url string'''
1853 '''remove all authentication information from a url string'''
1864 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1854 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1865 netloc = netloc[netloc.find('@')+1:]
1855 netloc = netloc[netloc.find('@')+1:]
1866 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1856 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
General Comments 0
You need to be logged in to leave comments. Login now