##// END OF EJS Templates
cmdutil: hide child window created by win32 spawndetached()...
Patrick Mezard -
r10240:3af4b39a default
parent child Browse files
Show More
@@ -1,1172 +1,1173 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, glob, tempfile, time
10 import os, sys, errno, re, glob, tempfile, time
11 import mdiff, bdiff, util, templater, patch, error, encoding, templatekw
11 import mdiff, bdiff, util, templater, patch, error, encoding, templatekw
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = None
98 limit = None
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui.copy() # drop repo-specific config
104 dst = src.baseui.copy() # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src.copy() # keep all global options
107 dst = src.copy() # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114
114
115 # copy bundle-specific options
115 # copy bundle-specific options
116 r = src.config('bundle', 'mainreporoot')
116 r = src.config('bundle', 'mainreporoot')
117 if r:
117 if r:
118 dst.setconfig('bundle', 'mainreporoot', r)
118 dst.setconfig('bundle', 'mainreporoot', r)
119
119
120 # copy auth section settings
120 # copy auth section settings
121 for key, val in src.configitems('auth'):
121 for key, val in src.configitems('auth'):
122 dst.setconfig('auth', key, val)
122 dst.setconfig('auth', key, val)
123
123
124 return dst
124 return dst
125
125
126 def revpair(repo, revs):
126 def revpair(repo, revs):
127 '''return pair of nodes, given list of revisions. second item can
127 '''return pair of nodes, given list of revisions. second item can
128 be None, meaning use working dir.'''
128 be None, meaning use working dir.'''
129
129
130 def revfix(repo, val, defval):
130 def revfix(repo, val, defval):
131 if not val and val != 0 and defval is not None:
131 if not val and val != 0 and defval is not None:
132 val = defval
132 val = defval
133 return repo.lookup(val)
133 return repo.lookup(val)
134
134
135 if not revs:
135 if not revs:
136 return repo.dirstate.parents()[0], None
136 return repo.dirstate.parents()[0], None
137 end = None
137 end = None
138 if len(revs) == 1:
138 if len(revs) == 1:
139 if revrangesep in revs[0]:
139 if revrangesep in revs[0]:
140 start, end = revs[0].split(revrangesep, 1)
140 start, end = revs[0].split(revrangesep, 1)
141 start = revfix(repo, start, 0)
141 start = revfix(repo, start, 0)
142 end = revfix(repo, end, len(repo) - 1)
142 end = revfix(repo, end, len(repo) - 1)
143 else:
143 else:
144 start = revfix(repo, revs[0], None)
144 start = revfix(repo, revs[0], None)
145 elif len(revs) == 2:
145 elif len(revs) == 2:
146 if revrangesep in revs[0] or revrangesep in revs[1]:
146 if revrangesep in revs[0] or revrangesep in revs[1]:
147 raise util.Abort(_('too many revisions specified'))
147 raise util.Abort(_('too many revisions specified'))
148 start = revfix(repo, revs[0], None)
148 start = revfix(repo, revs[0], None)
149 end = revfix(repo, revs[1], None)
149 end = revfix(repo, revs[1], None)
150 else:
150 else:
151 raise util.Abort(_('too many revisions specified'))
151 raise util.Abort(_('too many revisions specified'))
152 return start, end
152 return start, end
153
153
154 def revrange(repo, revs):
154 def revrange(repo, revs):
155 """Yield revision as strings from a list of revision specifications."""
155 """Yield revision as strings from a list of revision specifications."""
156
156
157 def revfix(repo, val, defval):
157 def revfix(repo, val, defval):
158 if not val and val != 0 and defval is not None:
158 if not val and val != 0 and defval is not None:
159 return defval
159 return defval
160 return repo.changelog.rev(repo.lookup(val))
160 return repo.changelog.rev(repo.lookup(val))
161
161
162 seen, l = set(), []
162 seen, l = set(), []
163 for spec in revs:
163 for spec in revs:
164 if revrangesep in spec:
164 if revrangesep in spec:
165 start, end = spec.split(revrangesep, 1)
165 start, end = spec.split(revrangesep, 1)
166 start = revfix(repo, start, 0)
166 start = revfix(repo, start, 0)
167 end = revfix(repo, end, len(repo) - 1)
167 end = revfix(repo, end, len(repo) - 1)
168 step = start > end and -1 or 1
168 step = start > end and -1 or 1
169 for rev in xrange(start, end+step, step):
169 for rev in xrange(start, end+step, step):
170 if rev in seen:
170 if rev in seen:
171 continue
171 continue
172 seen.add(rev)
172 seen.add(rev)
173 l.append(rev)
173 l.append(rev)
174 else:
174 else:
175 rev = revfix(repo, spec, None)
175 rev = revfix(repo, spec, None)
176 if rev in seen:
176 if rev in seen:
177 continue
177 continue
178 seen.add(rev)
178 seen.add(rev)
179 l.append(rev)
179 l.append(rev)
180
180
181 return l
181 return l
182
182
183 def make_filename(repo, pat, node,
183 def make_filename(repo, pat, node,
184 total=None, seqno=None, revwidth=None, pathname=None):
184 total=None, seqno=None, revwidth=None, pathname=None):
185 node_expander = {
185 node_expander = {
186 'H': lambda: hex(node),
186 'H': lambda: hex(node),
187 'R': lambda: str(repo.changelog.rev(node)),
187 'R': lambda: str(repo.changelog.rev(node)),
188 'h': lambda: short(node),
188 'h': lambda: short(node),
189 }
189 }
190 expander = {
190 expander = {
191 '%': lambda: '%',
191 '%': lambda: '%',
192 'b': lambda: os.path.basename(repo.root),
192 'b': lambda: os.path.basename(repo.root),
193 }
193 }
194
194
195 try:
195 try:
196 if node:
196 if node:
197 expander.update(node_expander)
197 expander.update(node_expander)
198 if node:
198 if node:
199 expander['r'] = (lambda:
199 expander['r'] = (lambda:
200 str(repo.changelog.rev(node)).zfill(revwidth or 0))
200 str(repo.changelog.rev(node)).zfill(revwidth or 0))
201 if total is not None:
201 if total is not None:
202 expander['N'] = lambda: str(total)
202 expander['N'] = lambda: str(total)
203 if seqno is not None:
203 if seqno is not None:
204 expander['n'] = lambda: str(seqno)
204 expander['n'] = lambda: str(seqno)
205 if total is not None and seqno is not None:
205 if total is not None and seqno is not None:
206 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
206 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
207 if pathname is not None:
207 if pathname is not None:
208 expander['s'] = lambda: os.path.basename(pathname)
208 expander['s'] = lambda: os.path.basename(pathname)
209 expander['d'] = lambda: os.path.dirname(pathname) or '.'
209 expander['d'] = lambda: os.path.dirname(pathname) or '.'
210 expander['p'] = lambda: pathname
210 expander['p'] = lambda: pathname
211
211
212 newname = []
212 newname = []
213 patlen = len(pat)
213 patlen = len(pat)
214 i = 0
214 i = 0
215 while i < patlen:
215 while i < patlen:
216 c = pat[i]
216 c = pat[i]
217 if c == '%':
217 if c == '%':
218 i += 1
218 i += 1
219 c = pat[i]
219 c = pat[i]
220 c = expander[c]()
220 c = expander[c]()
221 newname.append(c)
221 newname.append(c)
222 i += 1
222 i += 1
223 return ''.join(newname)
223 return ''.join(newname)
224 except KeyError, inst:
224 except KeyError, inst:
225 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
225 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
226 inst.args[0])
226 inst.args[0])
227
227
228 def make_file(repo, pat, node=None,
228 def make_file(repo, pat, node=None,
229 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
229 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
230
230
231 writable = 'w' in mode or 'a' in mode
231 writable = 'w' in mode or 'a' in mode
232
232
233 if not pat or pat == '-':
233 if not pat or pat == '-':
234 return writable and sys.stdout or sys.stdin
234 return writable and sys.stdout or sys.stdin
235 if hasattr(pat, 'write') and writable:
235 if hasattr(pat, 'write') and writable:
236 return pat
236 return pat
237 if hasattr(pat, 'read') and 'r' in mode:
237 if hasattr(pat, 'read') and 'r' in mode:
238 return pat
238 return pat
239 return open(make_filename(repo, pat, node, total, seqno, revwidth,
239 return open(make_filename(repo, pat, node, total, seqno, revwidth,
240 pathname),
240 pathname),
241 mode)
241 mode)
242
242
243 def expandpats(pats):
243 def expandpats(pats):
244 if not util.expandglobs:
244 if not util.expandglobs:
245 return list(pats)
245 return list(pats)
246 ret = []
246 ret = []
247 for p in pats:
247 for p in pats:
248 kind, name = _match._patsplit(p, None)
248 kind, name = _match._patsplit(p, None)
249 if kind is None:
249 if kind is None:
250 try:
250 try:
251 globbed = glob.glob(name)
251 globbed = glob.glob(name)
252 except re.error:
252 except re.error:
253 globbed = [name]
253 globbed = [name]
254 if globbed:
254 if globbed:
255 ret.extend(globbed)
255 ret.extend(globbed)
256 continue
256 continue
257 ret.append(p)
257 ret.append(p)
258 return ret
258 return ret
259
259
260 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
260 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
261 if not globbed and default == 'relpath':
261 if not globbed and default == 'relpath':
262 pats = expandpats(pats or [])
262 pats = expandpats(pats or [])
263 m = _match.match(repo.root, repo.getcwd(), pats,
263 m = _match.match(repo.root, repo.getcwd(), pats,
264 opts.get('include'), opts.get('exclude'), default)
264 opts.get('include'), opts.get('exclude'), default)
265 def badfn(f, msg):
265 def badfn(f, msg):
266 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
266 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
267 m.bad = badfn
267 m.bad = badfn
268 return m
268 return m
269
269
270 def matchall(repo):
270 def matchall(repo):
271 return _match.always(repo.root, repo.getcwd())
271 return _match.always(repo.root, repo.getcwd())
272
272
273 def matchfiles(repo, files):
273 def matchfiles(repo, files):
274 return _match.exact(repo.root, repo.getcwd(), files)
274 return _match.exact(repo.root, repo.getcwd(), files)
275
275
276 def findrenames(repo, added, removed, threshold):
276 def findrenames(repo, added, removed, threshold):
277 '''find renamed files -- yields (before, after, score) tuples'''
277 '''find renamed files -- yields (before, after, score) tuples'''
278 copies = {}
278 copies = {}
279 ctx = repo['.']
279 ctx = repo['.']
280 for r in removed:
280 for r in removed:
281 if r not in ctx:
281 if r not in ctx:
282 continue
282 continue
283 fctx = ctx.filectx(r)
283 fctx = ctx.filectx(r)
284
284
285 def score(text):
285 def score(text):
286 if not len(text):
286 if not len(text):
287 return 0.0
287 return 0.0
288 if not fctx.cmp(text):
288 if not fctx.cmp(text):
289 return 1.0
289 return 1.0
290 if threshold == 1.0:
290 if threshold == 1.0:
291 return 0.0
291 return 0.0
292 orig = fctx.data()
292 orig = fctx.data()
293 # bdiff.blocks() returns blocks of matching lines
293 # bdiff.blocks() returns blocks of matching lines
294 # count the number of bytes in each
294 # count the number of bytes in each
295 equal = 0
295 equal = 0
296 alines = mdiff.splitnewlines(text)
296 alines = mdiff.splitnewlines(text)
297 matches = bdiff.blocks(text, orig)
297 matches = bdiff.blocks(text, orig)
298 for x1, x2, y1, y2 in matches:
298 for x1, x2, y1, y2 in matches:
299 for line in alines[x1:x2]:
299 for line in alines[x1:x2]:
300 equal += len(line)
300 equal += len(line)
301
301
302 lengths = len(text) + len(orig)
302 lengths = len(text) + len(orig)
303 return equal * 2.0 / lengths
303 return equal * 2.0 / lengths
304
304
305 for a in added:
305 for a in added:
306 bestscore = copies.get(a, (None, threshold))[1]
306 bestscore = copies.get(a, (None, threshold))[1]
307 myscore = score(repo.wread(a))
307 myscore = score(repo.wread(a))
308 if myscore >= bestscore:
308 if myscore >= bestscore:
309 copies[a] = (r, myscore)
309 copies[a] = (r, myscore)
310
310
311 for dest, v in copies.iteritems():
311 for dest, v in copies.iteritems():
312 source, score = v
312 source, score = v
313 yield source, dest, score
313 yield source, dest, score
314
314
315 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
315 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
316 if dry_run is None:
316 if dry_run is None:
317 dry_run = opts.get('dry_run')
317 dry_run = opts.get('dry_run')
318 if similarity is None:
318 if similarity is None:
319 similarity = float(opts.get('similarity') or 0)
319 similarity = float(opts.get('similarity') or 0)
320 # we'd use status here, except handling of symlinks and ignore is tricky
320 # we'd use status here, except handling of symlinks and ignore is tricky
321 added, unknown, deleted, removed = [], [], [], []
321 added, unknown, deleted, removed = [], [], [], []
322 audit_path = util.path_auditor(repo.root)
322 audit_path = util.path_auditor(repo.root)
323 m = match(repo, pats, opts)
323 m = match(repo, pats, opts)
324 for abs in repo.walk(m):
324 for abs in repo.walk(m):
325 target = repo.wjoin(abs)
325 target = repo.wjoin(abs)
326 good = True
326 good = True
327 try:
327 try:
328 audit_path(abs)
328 audit_path(abs)
329 except:
329 except:
330 good = False
330 good = False
331 rel = m.rel(abs)
331 rel = m.rel(abs)
332 exact = m.exact(abs)
332 exact = m.exact(abs)
333 if good and abs not in repo.dirstate:
333 if good and abs not in repo.dirstate:
334 unknown.append(abs)
334 unknown.append(abs)
335 if repo.ui.verbose or not exact:
335 if repo.ui.verbose or not exact:
336 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
336 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
337 elif repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
337 elif repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
338 or (os.path.isdir(target) and not os.path.islink(target))):
338 or (os.path.isdir(target) and not os.path.islink(target))):
339 deleted.append(abs)
339 deleted.append(abs)
340 if repo.ui.verbose or not exact:
340 if repo.ui.verbose or not exact:
341 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
341 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
342 # for finding renames
342 # for finding renames
343 elif repo.dirstate[abs] == 'r':
343 elif repo.dirstate[abs] == 'r':
344 removed.append(abs)
344 removed.append(abs)
345 elif repo.dirstate[abs] == 'a':
345 elif repo.dirstate[abs] == 'a':
346 added.append(abs)
346 added.append(abs)
347 if not dry_run:
347 if not dry_run:
348 repo.remove(deleted)
348 repo.remove(deleted)
349 repo.add(unknown)
349 repo.add(unknown)
350 if similarity > 0:
350 if similarity > 0:
351 for old, new, score in findrenames(repo, added + unknown,
351 for old, new, score in findrenames(repo, added + unknown,
352 removed + deleted, similarity):
352 removed + deleted, similarity):
353 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
353 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
354 repo.ui.status(_('recording removal of %s as rename to %s '
354 repo.ui.status(_('recording removal of %s as rename to %s '
355 '(%d%% similar)\n') %
355 '(%d%% similar)\n') %
356 (m.rel(old), m.rel(new), score * 100))
356 (m.rel(old), m.rel(new), score * 100))
357 if not dry_run:
357 if not dry_run:
358 repo.copy(old, new)
358 repo.copy(old, new)
359
359
360 def copy(ui, repo, pats, opts, rename=False):
360 def copy(ui, repo, pats, opts, rename=False):
361 # called with the repo lock held
361 # called with the repo lock held
362 #
362 #
363 # hgsep => pathname that uses "/" to separate directories
363 # hgsep => pathname that uses "/" to separate directories
364 # ossep => pathname that uses os.sep to separate directories
364 # ossep => pathname that uses os.sep to separate directories
365 cwd = repo.getcwd()
365 cwd = repo.getcwd()
366 targets = {}
366 targets = {}
367 after = opts.get("after")
367 after = opts.get("after")
368 dryrun = opts.get("dry_run")
368 dryrun = opts.get("dry_run")
369
369
370 def walkpat(pat):
370 def walkpat(pat):
371 srcs = []
371 srcs = []
372 m = match(repo, [pat], opts, globbed=True)
372 m = match(repo, [pat], opts, globbed=True)
373 for abs in repo.walk(m):
373 for abs in repo.walk(m):
374 state = repo.dirstate[abs]
374 state = repo.dirstate[abs]
375 rel = m.rel(abs)
375 rel = m.rel(abs)
376 exact = m.exact(abs)
376 exact = m.exact(abs)
377 if state in '?r':
377 if state in '?r':
378 if exact and state == '?':
378 if exact and state == '?':
379 ui.warn(_('%s: not copying - file is not managed\n') % rel)
379 ui.warn(_('%s: not copying - file is not managed\n') % rel)
380 if exact and state == 'r':
380 if exact and state == 'r':
381 ui.warn(_('%s: not copying - file has been marked for'
381 ui.warn(_('%s: not copying - file has been marked for'
382 ' remove\n') % rel)
382 ' remove\n') % rel)
383 continue
383 continue
384 # abs: hgsep
384 # abs: hgsep
385 # rel: ossep
385 # rel: ossep
386 srcs.append((abs, rel, exact))
386 srcs.append((abs, rel, exact))
387 return srcs
387 return srcs
388
388
389 # abssrc: hgsep
389 # abssrc: hgsep
390 # relsrc: ossep
390 # relsrc: ossep
391 # otarget: ossep
391 # otarget: ossep
392 def copyfile(abssrc, relsrc, otarget, exact):
392 def copyfile(abssrc, relsrc, otarget, exact):
393 abstarget = util.canonpath(repo.root, cwd, otarget)
393 abstarget = util.canonpath(repo.root, cwd, otarget)
394 reltarget = repo.pathto(abstarget, cwd)
394 reltarget = repo.pathto(abstarget, cwd)
395 target = repo.wjoin(abstarget)
395 target = repo.wjoin(abstarget)
396 src = repo.wjoin(abssrc)
396 src = repo.wjoin(abssrc)
397 state = repo.dirstate[abstarget]
397 state = repo.dirstate[abstarget]
398
398
399 # check for collisions
399 # check for collisions
400 prevsrc = targets.get(abstarget)
400 prevsrc = targets.get(abstarget)
401 if prevsrc is not None:
401 if prevsrc is not None:
402 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
402 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
403 (reltarget, repo.pathto(abssrc, cwd),
403 (reltarget, repo.pathto(abssrc, cwd),
404 repo.pathto(prevsrc, cwd)))
404 repo.pathto(prevsrc, cwd)))
405 return
405 return
406
406
407 # check for overwrites
407 # check for overwrites
408 exists = os.path.exists(target)
408 exists = os.path.exists(target)
409 if not after and exists or after and state in 'mn':
409 if not after and exists or after and state in 'mn':
410 if not opts['force']:
410 if not opts['force']:
411 ui.warn(_('%s: not overwriting - file exists\n') %
411 ui.warn(_('%s: not overwriting - file exists\n') %
412 reltarget)
412 reltarget)
413 return
413 return
414
414
415 if after:
415 if after:
416 if not exists:
416 if not exists:
417 return
417 return
418 elif not dryrun:
418 elif not dryrun:
419 try:
419 try:
420 if exists:
420 if exists:
421 os.unlink(target)
421 os.unlink(target)
422 targetdir = os.path.dirname(target) or '.'
422 targetdir = os.path.dirname(target) or '.'
423 if not os.path.isdir(targetdir):
423 if not os.path.isdir(targetdir):
424 os.makedirs(targetdir)
424 os.makedirs(targetdir)
425 util.copyfile(src, target)
425 util.copyfile(src, target)
426 except IOError, inst:
426 except IOError, inst:
427 if inst.errno == errno.ENOENT:
427 if inst.errno == errno.ENOENT:
428 ui.warn(_('%s: deleted in working copy\n') % relsrc)
428 ui.warn(_('%s: deleted in working copy\n') % relsrc)
429 else:
429 else:
430 ui.warn(_('%s: cannot copy - %s\n') %
430 ui.warn(_('%s: cannot copy - %s\n') %
431 (relsrc, inst.strerror))
431 (relsrc, inst.strerror))
432 return True # report a failure
432 return True # report a failure
433
433
434 if ui.verbose or not exact:
434 if ui.verbose or not exact:
435 if rename:
435 if rename:
436 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
436 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
437 else:
437 else:
438 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
438 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
439
439
440 targets[abstarget] = abssrc
440 targets[abstarget] = abssrc
441
441
442 # fix up dirstate
442 # fix up dirstate
443 origsrc = repo.dirstate.copied(abssrc) or abssrc
443 origsrc = repo.dirstate.copied(abssrc) or abssrc
444 if abstarget == origsrc: # copying back a copy?
444 if abstarget == origsrc: # copying back a copy?
445 if state not in 'mn' and not dryrun:
445 if state not in 'mn' and not dryrun:
446 repo.dirstate.normallookup(abstarget)
446 repo.dirstate.normallookup(abstarget)
447 else:
447 else:
448 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
448 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
449 if not ui.quiet:
449 if not ui.quiet:
450 ui.warn(_("%s has not been committed yet, so no copy "
450 ui.warn(_("%s has not been committed yet, so no copy "
451 "data will be stored for %s.\n")
451 "data will be stored for %s.\n")
452 % (repo.pathto(origsrc, cwd), reltarget))
452 % (repo.pathto(origsrc, cwd), reltarget))
453 if repo.dirstate[abstarget] in '?r' and not dryrun:
453 if repo.dirstate[abstarget] in '?r' and not dryrun:
454 repo.add([abstarget])
454 repo.add([abstarget])
455 elif not dryrun:
455 elif not dryrun:
456 repo.copy(origsrc, abstarget)
456 repo.copy(origsrc, abstarget)
457
457
458 if rename and not dryrun:
458 if rename and not dryrun:
459 repo.remove([abssrc], not after)
459 repo.remove([abssrc], not after)
460
460
461 # pat: ossep
461 # pat: ossep
462 # dest ossep
462 # dest ossep
463 # srcs: list of (hgsep, hgsep, ossep, bool)
463 # srcs: list of (hgsep, hgsep, ossep, bool)
464 # return: function that takes hgsep and returns ossep
464 # return: function that takes hgsep and returns ossep
465 def targetpathfn(pat, dest, srcs):
465 def targetpathfn(pat, dest, srcs):
466 if os.path.isdir(pat):
466 if os.path.isdir(pat):
467 abspfx = util.canonpath(repo.root, cwd, pat)
467 abspfx = util.canonpath(repo.root, cwd, pat)
468 abspfx = util.localpath(abspfx)
468 abspfx = util.localpath(abspfx)
469 if destdirexists:
469 if destdirexists:
470 striplen = len(os.path.split(abspfx)[0])
470 striplen = len(os.path.split(abspfx)[0])
471 else:
471 else:
472 striplen = len(abspfx)
472 striplen = len(abspfx)
473 if striplen:
473 if striplen:
474 striplen += len(os.sep)
474 striplen += len(os.sep)
475 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
475 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
476 elif destdirexists:
476 elif destdirexists:
477 res = lambda p: os.path.join(dest,
477 res = lambda p: os.path.join(dest,
478 os.path.basename(util.localpath(p)))
478 os.path.basename(util.localpath(p)))
479 else:
479 else:
480 res = lambda p: dest
480 res = lambda p: dest
481 return res
481 return res
482
482
483 # pat: ossep
483 # pat: ossep
484 # dest ossep
484 # dest ossep
485 # srcs: list of (hgsep, hgsep, ossep, bool)
485 # srcs: list of (hgsep, hgsep, ossep, bool)
486 # return: function that takes hgsep and returns ossep
486 # return: function that takes hgsep and returns ossep
487 def targetpathafterfn(pat, dest, srcs):
487 def targetpathafterfn(pat, dest, srcs):
488 if _match.patkind(pat):
488 if _match.patkind(pat):
489 # a mercurial pattern
489 # a mercurial pattern
490 res = lambda p: os.path.join(dest,
490 res = lambda p: os.path.join(dest,
491 os.path.basename(util.localpath(p)))
491 os.path.basename(util.localpath(p)))
492 else:
492 else:
493 abspfx = util.canonpath(repo.root, cwd, pat)
493 abspfx = util.canonpath(repo.root, cwd, pat)
494 if len(abspfx) < len(srcs[0][0]):
494 if len(abspfx) < len(srcs[0][0]):
495 # A directory. Either the target path contains the last
495 # A directory. Either the target path contains the last
496 # component of the source path or it does not.
496 # component of the source path or it does not.
497 def evalpath(striplen):
497 def evalpath(striplen):
498 score = 0
498 score = 0
499 for s in srcs:
499 for s in srcs:
500 t = os.path.join(dest, util.localpath(s[0])[striplen:])
500 t = os.path.join(dest, util.localpath(s[0])[striplen:])
501 if os.path.exists(t):
501 if os.path.exists(t):
502 score += 1
502 score += 1
503 return score
503 return score
504
504
505 abspfx = util.localpath(abspfx)
505 abspfx = util.localpath(abspfx)
506 striplen = len(abspfx)
506 striplen = len(abspfx)
507 if striplen:
507 if striplen:
508 striplen += len(os.sep)
508 striplen += len(os.sep)
509 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
509 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
510 score = evalpath(striplen)
510 score = evalpath(striplen)
511 striplen1 = len(os.path.split(abspfx)[0])
511 striplen1 = len(os.path.split(abspfx)[0])
512 if striplen1:
512 if striplen1:
513 striplen1 += len(os.sep)
513 striplen1 += len(os.sep)
514 if evalpath(striplen1) > score:
514 if evalpath(striplen1) > score:
515 striplen = striplen1
515 striplen = striplen1
516 res = lambda p: os.path.join(dest,
516 res = lambda p: os.path.join(dest,
517 util.localpath(p)[striplen:])
517 util.localpath(p)[striplen:])
518 else:
518 else:
519 # a file
519 # a file
520 if destdirexists:
520 if destdirexists:
521 res = lambda p: os.path.join(dest,
521 res = lambda p: os.path.join(dest,
522 os.path.basename(util.localpath(p)))
522 os.path.basename(util.localpath(p)))
523 else:
523 else:
524 res = lambda p: dest
524 res = lambda p: dest
525 return res
525 return res
526
526
527
527
528 pats = expandpats(pats)
528 pats = expandpats(pats)
529 if not pats:
529 if not pats:
530 raise util.Abort(_('no source or destination specified'))
530 raise util.Abort(_('no source or destination specified'))
531 if len(pats) == 1:
531 if len(pats) == 1:
532 raise util.Abort(_('no destination specified'))
532 raise util.Abort(_('no destination specified'))
533 dest = pats.pop()
533 dest = pats.pop()
534 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
534 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
535 if not destdirexists:
535 if not destdirexists:
536 if len(pats) > 1 or _match.patkind(pats[0]):
536 if len(pats) > 1 or _match.patkind(pats[0]):
537 raise util.Abort(_('with multiple sources, destination must be an '
537 raise util.Abort(_('with multiple sources, destination must be an '
538 'existing directory'))
538 'existing directory'))
539 if util.endswithsep(dest):
539 if util.endswithsep(dest):
540 raise util.Abort(_('destination %s is not a directory') % dest)
540 raise util.Abort(_('destination %s is not a directory') % dest)
541
541
542 tfn = targetpathfn
542 tfn = targetpathfn
543 if after:
543 if after:
544 tfn = targetpathafterfn
544 tfn = targetpathafterfn
545 copylist = []
545 copylist = []
546 for pat in pats:
546 for pat in pats:
547 srcs = walkpat(pat)
547 srcs = walkpat(pat)
548 if not srcs:
548 if not srcs:
549 continue
549 continue
550 copylist.append((tfn(pat, dest, srcs), srcs))
550 copylist.append((tfn(pat, dest, srcs), srcs))
551 if not copylist:
551 if not copylist:
552 raise util.Abort(_('no files to copy'))
552 raise util.Abort(_('no files to copy'))
553
553
554 errors = 0
554 errors = 0
555 for targetpath, srcs in copylist:
555 for targetpath, srcs in copylist:
556 for abssrc, relsrc, exact in srcs:
556 for abssrc, relsrc, exact in srcs:
557 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
557 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
558 errors += 1
558 errors += 1
559
559
560 if errors:
560 if errors:
561 ui.warn(_('(consider using --after)\n'))
561 ui.warn(_('(consider using --after)\n'))
562
562
563 return errors
563 return errors
564
564
565 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
565 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
566 runargs=None, appendpid=False):
566 runargs=None, appendpid=False):
567 '''Run a command as a service.'''
567 '''Run a command as a service.'''
568
568
569 if opts['daemon'] and not opts['daemon_pipefds']:
569 if opts['daemon'] and not opts['daemon_pipefds']:
570 # Signal child process startup with file removal
570 # Signal child process startup with file removal
571 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
571 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
572 os.close(lockfd)
572 os.close(lockfd)
573 try:
573 try:
574 if not runargs:
574 if not runargs:
575 runargs = util.hgcmd() + sys.argv[1:]
575 runargs = util.hgcmd() + sys.argv[1:]
576 runargs.append('--daemon-pipefds=%s' % lockpath)
576 runargs.append('--daemon-pipefds=%s' % lockpath)
577 # Don't pass --cwd to the child process, because we've already
577 # Don't pass --cwd to the child process, because we've already
578 # changed directory.
578 # changed directory.
579 for i in xrange(1,len(runargs)):
579 for i in xrange(1,len(runargs)):
580 if runargs[i].startswith('--cwd='):
580 if runargs[i].startswith('--cwd='):
581 del runargs[i]
581 del runargs[i]
582 break
582 break
583 elif runargs[i].startswith('--cwd'):
583 elif runargs[i].startswith('--cwd'):
584 del runargs[i:i+2]
584 del runargs[i:i+2]
585 break
585 break
586 pid = util.spawndetached(runargs)
586 pid = util.spawndetached(runargs)
587 while os.path.exists(lockpath):
587 while os.path.exists(lockpath):
588 time.sleep(0.1)
588 time.sleep(0.1)
589 finally:
589 finally:
590 try:
590 try:
591 os.unlink(lockpath)
591 os.unlink(lockpath)
592 except OSError, e:
592 except OSError, e:
593 if e.errno != errno.ENOENT:
593 if e.errno != errno.ENOENT:
594 raise
594 raise
595 if parentfn:
595 if parentfn:
596 return parentfn(pid)
596 return parentfn(pid)
597 else:
597 else:
598 return
598 return
599
599
600 if initfn:
600 if initfn:
601 initfn()
601 initfn()
602
602
603 if opts['pid_file']:
603 if opts['pid_file']:
604 mode = appendpid and 'a' or 'w'
604 mode = appendpid and 'a' or 'w'
605 fp = open(opts['pid_file'], mode)
605 fp = open(opts['pid_file'], mode)
606 fp.write(str(os.getpid()) + '\n')
606 fp.write(str(os.getpid()) + '\n')
607 fp.close()
607 fp.close()
608
608
609 if opts['daemon_pipefds']:
609 if opts['daemon_pipefds']:
610 lockpath = opts['daemon_pipefds']
610 lockpath = opts['daemon_pipefds']
611 try:
611 try:
612 os.setsid()
612 os.setsid()
613 except AttributeError:
613 except AttributeError:
614 pass
614 pass
615 os.unlink(lockpath)
615 os.unlink(lockpath)
616 util.hidewindow()
616 sys.stdout.flush()
617 sys.stdout.flush()
617 sys.stderr.flush()
618 sys.stderr.flush()
618
619
619 nullfd = os.open(util.nulldev, os.O_RDWR)
620 nullfd = os.open(util.nulldev, os.O_RDWR)
620 logfilefd = nullfd
621 logfilefd = nullfd
621 if logfile:
622 if logfile:
622 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
623 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
623 os.dup2(nullfd, 0)
624 os.dup2(nullfd, 0)
624 os.dup2(logfilefd, 1)
625 os.dup2(logfilefd, 1)
625 os.dup2(logfilefd, 2)
626 os.dup2(logfilefd, 2)
626 if nullfd not in (0, 1, 2):
627 if nullfd not in (0, 1, 2):
627 os.close(nullfd)
628 os.close(nullfd)
628 if logfile and logfilefd not in (0, 1, 2):
629 if logfile and logfilefd not in (0, 1, 2):
629 os.close(logfilefd)
630 os.close(logfilefd)
630
631
631 if runfn:
632 if runfn:
632 return runfn()
633 return runfn()
633
634
634 class changeset_printer(object):
635 class changeset_printer(object):
635 '''show changeset information when templating not requested.'''
636 '''show changeset information when templating not requested.'''
636
637
637 def __init__(self, ui, repo, patch, diffopts, buffered):
638 def __init__(self, ui, repo, patch, diffopts, buffered):
638 self.ui = ui
639 self.ui = ui
639 self.repo = repo
640 self.repo = repo
640 self.buffered = buffered
641 self.buffered = buffered
641 self.patch = patch
642 self.patch = patch
642 self.diffopts = diffopts
643 self.diffopts = diffopts
643 self.header = {}
644 self.header = {}
644 self.hunk = {}
645 self.hunk = {}
645 self.lastheader = None
646 self.lastheader = None
646 self.footer = None
647 self.footer = None
647
648
648 def flush(self, rev):
649 def flush(self, rev):
649 if rev in self.header:
650 if rev in self.header:
650 h = self.header[rev]
651 h = self.header[rev]
651 if h != self.lastheader:
652 if h != self.lastheader:
652 self.lastheader = h
653 self.lastheader = h
653 self.ui.write(h)
654 self.ui.write(h)
654 del self.header[rev]
655 del self.header[rev]
655 if rev in self.hunk:
656 if rev in self.hunk:
656 self.ui.write(self.hunk[rev])
657 self.ui.write(self.hunk[rev])
657 del self.hunk[rev]
658 del self.hunk[rev]
658 return 1
659 return 1
659 return 0
660 return 0
660
661
661 def close(self):
662 def close(self):
662 if self.footer:
663 if self.footer:
663 self.ui.write(self.footer)
664 self.ui.write(self.footer)
664
665
665 def show(self, ctx, copies=None, **props):
666 def show(self, ctx, copies=None, **props):
666 if self.buffered:
667 if self.buffered:
667 self.ui.pushbuffer()
668 self.ui.pushbuffer()
668 self._show(ctx, copies, props)
669 self._show(ctx, copies, props)
669 self.hunk[ctx.rev()] = self.ui.popbuffer()
670 self.hunk[ctx.rev()] = self.ui.popbuffer()
670 else:
671 else:
671 self._show(ctx, copies, props)
672 self._show(ctx, copies, props)
672
673
673 def _show(self, ctx, copies, props):
674 def _show(self, ctx, copies, props):
674 '''show a single changeset or file revision'''
675 '''show a single changeset or file revision'''
675 changenode = ctx.node()
676 changenode = ctx.node()
676 rev = ctx.rev()
677 rev = ctx.rev()
677
678
678 if self.ui.quiet:
679 if self.ui.quiet:
679 self.ui.write("%d:%s\n" % (rev, short(changenode)))
680 self.ui.write("%d:%s\n" % (rev, short(changenode)))
680 return
681 return
681
682
682 log = self.repo.changelog
683 log = self.repo.changelog
683 date = util.datestr(ctx.date())
684 date = util.datestr(ctx.date())
684
685
685 hexfunc = self.ui.debugflag and hex or short
686 hexfunc = self.ui.debugflag and hex or short
686
687
687 parents = [(p, hexfunc(log.node(p)))
688 parents = [(p, hexfunc(log.node(p)))
688 for p in self._meaningful_parentrevs(log, rev)]
689 for p in self._meaningful_parentrevs(log, rev)]
689
690
690 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
691 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
691
692
692 branch = ctx.branch()
693 branch = ctx.branch()
693 # don't show the default branch name
694 # don't show the default branch name
694 if branch != 'default':
695 if branch != 'default':
695 branch = encoding.tolocal(branch)
696 branch = encoding.tolocal(branch)
696 self.ui.write(_("branch: %s\n") % branch)
697 self.ui.write(_("branch: %s\n") % branch)
697 for tag in self.repo.nodetags(changenode):
698 for tag in self.repo.nodetags(changenode):
698 self.ui.write(_("tag: %s\n") % tag)
699 self.ui.write(_("tag: %s\n") % tag)
699 for parent in parents:
700 for parent in parents:
700 self.ui.write(_("parent: %d:%s\n") % parent)
701 self.ui.write(_("parent: %d:%s\n") % parent)
701
702
702 if self.ui.debugflag:
703 if self.ui.debugflag:
703 mnode = ctx.manifestnode()
704 mnode = ctx.manifestnode()
704 self.ui.write(_("manifest: %d:%s\n") %
705 self.ui.write(_("manifest: %d:%s\n") %
705 (self.repo.manifest.rev(mnode), hex(mnode)))
706 (self.repo.manifest.rev(mnode), hex(mnode)))
706 self.ui.write(_("user: %s\n") % ctx.user())
707 self.ui.write(_("user: %s\n") % ctx.user())
707 self.ui.write(_("date: %s\n") % date)
708 self.ui.write(_("date: %s\n") % date)
708
709
709 if self.ui.debugflag:
710 if self.ui.debugflag:
710 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
711 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
711 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
712 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
712 files):
713 files):
713 if value:
714 if value:
714 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
715 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
715 elif ctx.files() and self.ui.verbose:
716 elif ctx.files() and self.ui.verbose:
716 self.ui.write(_("files: %s\n") % " ".join(ctx.files()))
717 self.ui.write(_("files: %s\n") % " ".join(ctx.files()))
717 if copies and self.ui.verbose:
718 if copies and self.ui.verbose:
718 copies = ['%s (%s)' % c for c in copies]
719 copies = ['%s (%s)' % c for c in copies]
719 self.ui.write(_("copies: %s\n") % ' '.join(copies))
720 self.ui.write(_("copies: %s\n") % ' '.join(copies))
720
721
721 extra = ctx.extra()
722 extra = ctx.extra()
722 if extra and self.ui.debugflag:
723 if extra and self.ui.debugflag:
723 for key, value in sorted(extra.items()):
724 for key, value in sorted(extra.items()):
724 self.ui.write(_("extra: %s=%s\n")
725 self.ui.write(_("extra: %s=%s\n")
725 % (key, value.encode('string_escape')))
726 % (key, value.encode('string_escape')))
726
727
727 description = ctx.description().strip()
728 description = ctx.description().strip()
728 if description:
729 if description:
729 if self.ui.verbose:
730 if self.ui.verbose:
730 self.ui.write(_("description:\n"))
731 self.ui.write(_("description:\n"))
731 self.ui.write(description)
732 self.ui.write(description)
732 self.ui.write("\n\n")
733 self.ui.write("\n\n")
733 else:
734 else:
734 self.ui.write(_("summary: %s\n") %
735 self.ui.write(_("summary: %s\n") %
735 description.splitlines()[0])
736 description.splitlines()[0])
736 self.ui.write("\n")
737 self.ui.write("\n")
737
738
738 self.showpatch(changenode)
739 self.showpatch(changenode)
739
740
740 def showpatch(self, node):
741 def showpatch(self, node):
741 if self.patch:
742 if self.patch:
742 prev = self.repo.changelog.parents(node)[0]
743 prev = self.repo.changelog.parents(node)[0]
743 chunks = patch.diff(self.repo, prev, node, match=self.patch,
744 chunks = patch.diff(self.repo, prev, node, match=self.patch,
744 opts=patch.diffopts(self.ui, self.diffopts))
745 opts=patch.diffopts(self.ui, self.diffopts))
745 for chunk in chunks:
746 for chunk in chunks:
746 self.ui.write(chunk)
747 self.ui.write(chunk)
747 self.ui.write("\n")
748 self.ui.write("\n")
748
749
749 def _meaningful_parentrevs(self, log, rev):
750 def _meaningful_parentrevs(self, log, rev):
750 """Return list of meaningful (or all if debug) parentrevs for rev.
751 """Return list of meaningful (or all if debug) parentrevs for rev.
751
752
752 For merges (two non-nullrev revisions) both parents are meaningful.
753 For merges (two non-nullrev revisions) both parents are meaningful.
753 Otherwise the first parent revision is considered meaningful if it
754 Otherwise the first parent revision is considered meaningful if it
754 is not the preceding revision.
755 is not the preceding revision.
755 """
756 """
756 parents = log.parentrevs(rev)
757 parents = log.parentrevs(rev)
757 if not self.ui.debugflag and parents[1] == nullrev:
758 if not self.ui.debugflag and parents[1] == nullrev:
758 if parents[0] >= rev - 1:
759 if parents[0] >= rev - 1:
759 parents = []
760 parents = []
760 else:
761 else:
761 parents = [parents[0]]
762 parents = [parents[0]]
762 return parents
763 return parents
763
764
764
765
765 class changeset_templater(changeset_printer):
766 class changeset_templater(changeset_printer):
766 '''format changeset information.'''
767 '''format changeset information.'''
767
768
768 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
769 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
769 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
770 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
770 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
771 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
771 defaulttempl = {
772 defaulttempl = {
772 'parent': '{rev}:{node|formatnode} ',
773 'parent': '{rev}:{node|formatnode} ',
773 'manifest': '{rev}:{node|formatnode}',
774 'manifest': '{rev}:{node|formatnode}',
774 'file_copy': '{name} ({source})',
775 'file_copy': '{name} ({source})',
775 'extra': '{key}={value|stringescape}'
776 'extra': '{key}={value|stringescape}'
776 }
777 }
777 # filecopy is preserved for compatibility reasons
778 # filecopy is preserved for compatibility reasons
778 defaulttempl['filecopy'] = defaulttempl['file_copy']
779 defaulttempl['filecopy'] = defaulttempl['file_copy']
779 self.t = templater.templater(mapfile, {'formatnode': formatnode},
780 self.t = templater.templater(mapfile, {'formatnode': formatnode},
780 cache=defaulttempl)
781 cache=defaulttempl)
781 self.cache = {}
782 self.cache = {}
782
783
783 def use_template(self, t):
784 def use_template(self, t):
784 '''set template string to use'''
785 '''set template string to use'''
785 self.t.cache['changeset'] = t
786 self.t.cache['changeset'] = t
786
787
787 def _meaningful_parentrevs(self, ctx):
788 def _meaningful_parentrevs(self, ctx):
788 """Return list of meaningful (or all if debug) parentrevs for rev.
789 """Return list of meaningful (or all if debug) parentrevs for rev.
789 """
790 """
790 parents = ctx.parents()
791 parents = ctx.parents()
791 if len(parents) > 1:
792 if len(parents) > 1:
792 return parents
793 return parents
793 if self.ui.debugflag:
794 if self.ui.debugflag:
794 return [parents[0], self.repo['null']]
795 return [parents[0], self.repo['null']]
795 if parents[0].rev() >= ctx.rev() - 1:
796 if parents[0].rev() >= ctx.rev() - 1:
796 return []
797 return []
797 return parents
798 return parents
798
799
799 def _show(self, ctx, copies, props):
800 def _show(self, ctx, copies, props):
800 '''show a single changeset or file revision'''
801 '''show a single changeset or file revision'''
801
802
802 showlist = templatekw.showlist
803 showlist = templatekw.showlist
803
804
804 # showparents() behaviour depends on ui trace level which
805 # showparents() behaviour depends on ui trace level which
805 # causes unexpected behaviours at templating level and makes
806 # causes unexpected behaviours at templating level and makes
806 # it harder to extract it in a standalone function. Its
807 # it harder to extract it in a standalone function. Its
807 # behaviour cannot be changed so leave it here for now.
808 # behaviour cannot be changed so leave it here for now.
808 def showparents(repo, ctx, templ, **args):
809 def showparents(repo, ctx, templ, **args):
809 parents = [[('rev', p.rev()), ('node', p.hex())]
810 parents = [[('rev', p.rev()), ('node', p.hex())]
810 for p in self._meaningful_parentrevs(ctx)]
811 for p in self._meaningful_parentrevs(ctx)]
811 return showlist(templ, 'parent', parents, **args)
812 return showlist(templ, 'parent', parents, **args)
812
813
813 props = props.copy()
814 props = props.copy()
814 props.update(templatekw.keywords)
815 props.update(templatekw.keywords)
815 props['parents'] = showparents
816 props['parents'] = showparents
816 props['templ'] = self.t
817 props['templ'] = self.t
817 props['ctx'] = ctx
818 props['ctx'] = ctx
818 props['repo'] = self.repo
819 props['repo'] = self.repo
819 props['revcache'] = {'copies': copies}
820 props['revcache'] = {'copies': copies}
820 props['cache'] = self.cache
821 props['cache'] = self.cache
821
822
822 # find correct templates for current mode
823 # find correct templates for current mode
823
824
824 tmplmodes = [
825 tmplmodes = [
825 (True, None),
826 (True, None),
826 (self.ui.verbose, 'verbose'),
827 (self.ui.verbose, 'verbose'),
827 (self.ui.quiet, 'quiet'),
828 (self.ui.quiet, 'quiet'),
828 (self.ui.debugflag, 'debug'),
829 (self.ui.debugflag, 'debug'),
829 ]
830 ]
830
831
831 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
832 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
832 for mode, postfix in tmplmodes:
833 for mode, postfix in tmplmodes:
833 for type in types:
834 for type in types:
834 cur = postfix and ('%s_%s' % (type, postfix)) or type
835 cur = postfix and ('%s_%s' % (type, postfix)) or type
835 if mode and cur in self.t:
836 if mode and cur in self.t:
836 types[type] = cur
837 types[type] = cur
837
838
838 try:
839 try:
839
840
840 # write header
841 # write header
841 if types['header']:
842 if types['header']:
842 h = templater.stringify(self.t(types['header'], **props))
843 h = templater.stringify(self.t(types['header'], **props))
843 if self.buffered:
844 if self.buffered:
844 self.header[ctx.rev()] = h
845 self.header[ctx.rev()] = h
845 else:
846 else:
846 self.ui.write(h)
847 self.ui.write(h)
847
848
848 # write changeset metadata, then patch if requested
849 # write changeset metadata, then patch if requested
849 key = types['changeset']
850 key = types['changeset']
850 self.ui.write(templater.stringify(self.t(key, **props)))
851 self.ui.write(templater.stringify(self.t(key, **props)))
851 self.showpatch(ctx.node())
852 self.showpatch(ctx.node())
852
853
853 if types['footer']:
854 if types['footer']:
854 if not self.footer:
855 if not self.footer:
855 self.footer = templater.stringify(self.t(types['footer'],
856 self.footer = templater.stringify(self.t(types['footer'],
856 **props))
857 **props))
857
858
858 except KeyError, inst:
859 except KeyError, inst:
859 msg = _("%s: no key named '%s'")
860 msg = _("%s: no key named '%s'")
860 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
861 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
861 except SyntaxError, inst:
862 except SyntaxError, inst:
862 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
863 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
863
864
864 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
865 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
865 """show one changeset using template or regular display.
866 """show one changeset using template or regular display.
866
867
867 Display format will be the first non-empty hit of:
868 Display format will be the first non-empty hit of:
868 1. option 'template'
869 1. option 'template'
869 2. option 'style'
870 2. option 'style'
870 3. [ui] setting 'logtemplate'
871 3. [ui] setting 'logtemplate'
871 4. [ui] setting 'style'
872 4. [ui] setting 'style'
872 If all of these values are either the unset or the empty string,
873 If all of these values are either the unset or the empty string,
873 regular display via changeset_printer() is done.
874 regular display via changeset_printer() is done.
874 """
875 """
875 # options
876 # options
876 patch = False
877 patch = False
877 if opts.get('patch'):
878 if opts.get('patch'):
878 patch = matchfn or matchall(repo)
879 patch = matchfn or matchall(repo)
879
880
880 tmpl = opts.get('template')
881 tmpl = opts.get('template')
881 style = None
882 style = None
882 if tmpl:
883 if tmpl:
883 tmpl = templater.parsestring(tmpl, quoted=False)
884 tmpl = templater.parsestring(tmpl, quoted=False)
884 else:
885 else:
885 style = opts.get('style')
886 style = opts.get('style')
886
887
887 # ui settings
888 # ui settings
888 if not (tmpl or style):
889 if not (tmpl or style):
889 tmpl = ui.config('ui', 'logtemplate')
890 tmpl = ui.config('ui', 'logtemplate')
890 if tmpl:
891 if tmpl:
891 tmpl = templater.parsestring(tmpl)
892 tmpl = templater.parsestring(tmpl)
892 else:
893 else:
893 style = ui.config('ui', 'style')
894 style = ui.config('ui', 'style')
894
895
895 if not (tmpl or style):
896 if not (tmpl or style):
896 return changeset_printer(ui, repo, patch, opts, buffered)
897 return changeset_printer(ui, repo, patch, opts, buffered)
897
898
898 mapfile = None
899 mapfile = None
899 if style and not tmpl:
900 if style and not tmpl:
900 mapfile = style
901 mapfile = style
901 if not os.path.split(mapfile)[0]:
902 if not os.path.split(mapfile)[0]:
902 mapname = (templater.templatepath('map-cmdline.' + mapfile)
903 mapname = (templater.templatepath('map-cmdline.' + mapfile)
903 or templater.templatepath(mapfile))
904 or templater.templatepath(mapfile))
904 if mapname: mapfile = mapname
905 if mapname: mapfile = mapname
905
906
906 try:
907 try:
907 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
908 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
908 except SyntaxError, inst:
909 except SyntaxError, inst:
909 raise util.Abort(inst.args[0])
910 raise util.Abort(inst.args[0])
910 if tmpl: t.use_template(tmpl)
911 if tmpl: t.use_template(tmpl)
911 return t
912 return t
912
913
913 def finddate(ui, repo, date):
914 def finddate(ui, repo, date):
914 """Find the tipmost changeset that matches the given date spec"""
915 """Find the tipmost changeset that matches the given date spec"""
915
916
916 df = util.matchdate(date)
917 df = util.matchdate(date)
917 m = matchall(repo)
918 m = matchall(repo)
918 results = {}
919 results = {}
919
920
920 def prep(ctx, fns):
921 def prep(ctx, fns):
921 d = ctx.date()
922 d = ctx.date()
922 if df(d[0]):
923 if df(d[0]):
923 results[ctx.rev()] = d
924 results[ctx.rev()] = d
924
925
925 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
926 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
926 rev = ctx.rev()
927 rev = ctx.rev()
927 if rev in results:
928 if rev in results:
928 ui.status(_("Found revision %s from %s\n") %
929 ui.status(_("Found revision %s from %s\n") %
929 (rev, util.datestr(results[rev])))
930 (rev, util.datestr(results[rev])))
930 return str(rev)
931 return str(rev)
931
932
932 raise util.Abort(_("revision matching date not found"))
933 raise util.Abort(_("revision matching date not found"))
933
934
934 def walkchangerevs(repo, match, opts, prepare):
935 def walkchangerevs(repo, match, opts, prepare):
935 '''Iterate over files and the revs in which they changed.
936 '''Iterate over files and the revs in which they changed.
936
937
937 Callers most commonly need to iterate backwards over the history
938 Callers most commonly need to iterate backwards over the history
938 in which they are interested. Doing so has awful (quadratic-looking)
939 in which they are interested. Doing so has awful (quadratic-looking)
939 performance, so we use iterators in a "windowed" way.
940 performance, so we use iterators in a "windowed" way.
940
941
941 We walk a window of revisions in the desired order. Within the
942 We walk a window of revisions in the desired order. Within the
942 window, we first walk forwards to gather data, then in the desired
943 window, we first walk forwards to gather data, then in the desired
943 order (usually backwards) to display it.
944 order (usually backwards) to display it.
944
945
945 This function returns an iterator yielding contexts. Before
946 This function returns an iterator yielding contexts. Before
946 yielding each context, the iterator will first call the prepare
947 yielding each context, the iterator will first call the prepare
947 function on each context in the window in forward order.'''
948 function on each context in the window in forward order.'''
948
949
949 def increasing_windows(start, end, windowsize=8, sizelimit=512):
950 def increasing_windows(start, end, windowsize=8, sizelimit=512):
950 if start < end:
951 if start < end:
951 while start < end:
952 while start < end:
952 yield start, min(windowsize, end-start)
953 yield start, min(windowsize, end-start)
953 start += windowsize
954 start += windowsize
954 if windowsize < sizelimit:
955 if windowsize < sizelimit:
955 windowsize *= 2
956 windowsize *= 2
956 else:
957 else:
957 while start > end:
958 while start > end:
958 yield start, min(windowsize, start-end-1)
959 yield start, min(windowsize, start-end-1)
959 start -= windowsize
960 start -= windowsize
960 if windowsize < sizelimit:
961 if windowsize < sizelimit:
961 windowsize *= 2
962 windowsize *= 2
962
963
963 follow = opts.get('follow') or opts.get('follow_first')
964 follow = opts.get('follow') or opts.get('follow_first')
964
965
965 if not len(repo):
966 if not len(repo):
966 return []
967 return []
967
968
968 if follow:
969 if follow:
969 defrange = '%s:0' % repo['.'].rev()
970 defrange = '%s:0' % repo['.'].rev()
970 else:
971 else:
971 defrange = '-1:0'
972 defrange = '-1:0'
972 revs = revrange(repo, opts['rev'] or [defrange])
973 revs = revrange(repo, opts['rev'] or [defrange])
973 wanted = set()
974 wanted = set()
974 slowpath = match.anypats() or (match.files() and opts.get('removed'))
975 slowpath = match.anypats() or (match.files() and opts.get('removed'))
975 fncache = {}
976 fncache = {}
976 change = util.cachefunc(repo.changectx)
977 change = util.cachefunc(repo.changectx)
977
978
978 if not slowpath and not match.files():
979 if not slowpath and not match.files():
979 # No files, no patterns. Display all revs.
980 # No files, no patterns. Display all revs.
980 wanted = set(revs)
981 wanted = set(revs)
981 copies = []
982 copies = []
982
983
983 if not slowpath:
984 if not slowpath:
984 # Only files, no patterns. Check the history of each file.
985 # Only files, no patterns. Check the history of each file.
985 def filerevgen(filelog, node):
986 def filerevgen(filelog, node):
986 cl_count = len(repo)
987 cl_count = len(repo)
987 if node is None:
988 if node is None:
988 last = len(filelog) - 1
989 last = len(filelog) - 1
989 else:
990 else:
990 last = filelog.rev(node)
991 last = filelog.rev(node)
991 for i, window in increasing_windows(last, nullrev):
992 for i, window in increasing_windows(last, nullrev):
992 revs = []
993 revs = []
993 for j in xrange(i - window, i + 1):
994 for j in xrange(i - window, i + 1):
994 n = filelog.node(j)
995 n = filelog.node(j)
995 revs.append((filelog.linkrev(j),
996 revs.append((filelog.linkrev(j),
996 follow and filelog.renamed(n)))
997 follow and filelog.renamed(n)))
997 for rev in reversed(revs):
998 for rev in reversed(revs):
998 # only yield rev for which we have the changelog, it can
999 # only yield rev for which we have the changelog, it can
999 # happen while doing "hg log" during a pull or commit
1000 # happen while doing "hg log" during a pull or commit
1000 if rev[0] < cl_count:
1001 if rev[0] < cl_count:
1001 yield rev
1002 yield rev
1002 def iterfiles():
1003 def iterfiles():
1003 for filename in match.files():
1004 for filename in match.files():
1004 yield filename, None
1005 yield filename, None
1005 for filename_node in copies:
1006 for filename_node in copies:
1006 yield filename_node
1007 yield filename_node
1007 minrev, maxrev = min(revs), max(revs)
1008 minrev, maxrev = min(revs), max(revs)
1008 for file_, node in iterfiles():
1009 for file_, node in iterfiles():
1009 filelog = repo.file(file_)
1010 filelog = repo.file(file_)
1010 if not len(filelog):
1011 if not len(filelog):
1011 if node is None:
1012 if node is None:
1012 # A zero count may be a directory or deleted file, so
1013 # A zero count may be a directory or deleted file, so
1013 # try to find matching entries on the slow path.
1014 # try to find matching entries on the slow path.
1014 if follow:
1015 if follow:
1015 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1016 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1016 slowpath = True
1017 slowpath = True
1017 break
1018 break
1018 else:
1019 else:
1019 continue
1020 continue
1020 for rev, copied in filerevgen(filelog, node):
1021 for rev, copied in filerevgen(filelog, node):
1021 if rev <= maxrev:
1022 if rev <= maxrev:
1022 if rev < minrev:
1023 if rev < minrev:
1023 break
1024 break
1024 fncache.setdefault(rev, [])
1025 fncache.setdefault(rev, [])
1025 fncache[rev].append(file_)
1026 fncache[rev].append(file_)
1026 wanted.add(rev)
1027 wanted.add(rev)
1027 if follow and copied:
1028 if follow and copied:
1028 copies.append(copied)
1029 copies.append(copied)
1029 if slowpath:
1030 if slowpath:
1030 if follow:
1031 if follow:
1031 raise util.Abort(_('can only follow copies/renames for explicit '
1032 raise util.Abort(_('can only follow copies/renames for explicit '
1032 'filenames'))
1033 'filenames'))
1033
1034
1034 # The slow path checks files modified in every changeset.
1035 # The slow path checks files modified in every changeset.
1035 def changerevgen():
1036 def changerevgen():
1036 for i, window in increasing_windows(len(repo) - 1, nullrev):
1037 for i, window in increasing_windows(len(repo) - 1, nullrev):
1037 for j in xrange(i - window, i + 1):
1038 for j in xrange(i - window, i + 1):
1038 yield change(j)
1039 yield change(j)
1039
1040
1040 for ctx in changerevgen():
1041 for ctx in changerevgen():
1041 matches = filter(match, ctx.files())
1042 matches = filter(match, ctx.files())
1042 if matches:
1043 if matches:
1043 fncache[ctx.rev()] = matches
1044 fncache[ctx.rev()] = matches
1044 wanted.add(ctx.rev())
1045 wanted.add(ctx.rev())
1045
1046
1046 class followfilter(object):
1047 class followfilter(object):
1047 def __init__(self, onlyfirst=False):
1048 def __init__(self, onlyfirst=False):
1048 self.startrev = nullrev
1049 self.startrev = nullrev
1049 self.roots = set()
1050 self.roots = set()
1050 self.onlyfirst = onlyfirst
1051 self.onlyfirst = onlyfirst
1051
1052
1052 def match(self, rev):
1053 def match(self, rev):
1053 def realparents(rev):
1054 def realparents(rev):
1054 if self.onlyfirst:
1055 if self.onlyfirst:
1055 return repo.changelog.parentrevs(rev)[0:1]
1056 return repo.changelog.parentrevs(rev)[0:1]
1056 else:
1057 else:
1057 return filter(lambda x: x != nullrev,
1058 return filter(lambda x: x != nullrev,
1058 repo.changelog.parentrevs(rev))
1059 repo.changelog.parentrevs(rev))
1059
1060
1060 if self.startrev == nullrev:
1061 if self.startrev == nullrev:
1061 self.startrev = rev
1062 self.startrev = rev
1062 return True
1063 return True
1063
1064
1064 if rev > self.startrev:
1065 if rev > self.startrev:
1065 # forward: all descendants
1066 # forward: all descendants
1066 if not self.roots:
1067 if not self.roots:
1067 self.roots.add(self.startrev)
1068 self.roots.add(self.startrev)
1068 for parent in realparents(rev):
1069 for parent in realparents(rev):
1069 if parent in self.roots:
1070 if parent in self.roots:
1070 self.roots.add(rev)
1071 self.roots.add(rev)
1071 return True
1072 return True
1072 else:
1073 else:
1073 # backwards: all parents
1074 # backwards: all parents
1074 if not self.roots:
1075 if not self.roots:
1075 self.roots.update(realparents(self.startrev))
1076 self.roots.update(realparents(self.startrev))
1076 if rev in self.roots:
1077 if rev in self.roots:
1077 self.roots.remove(rev)
1078 self.roots.remove(rev)
1078 self.roots.update(realparents(rev))
1079 self.roots.update(realparents(rev))
1079 return True
1080 return True
1080
1081
1081 return False
1082 return False
1082
1083
1083 # it might be worthwhile to do this in the iterator if the rev range
1084 # it might be worthwhile to do this in the iterator if the rev range
1084 # is descending and the prune args are all within that range
1085 # is descending and the prune args are all within that range
1085 for rev in opts.get('prune', ()):
1086 for rev in opts.get('prune', ()):
1086 rev = repo.changelog.rev(repo.lookup(rev))
1087 rev = repo.changelog.rev(repo.lookup(rev))
1087 ff = followfilter()
1088 ff = followfilter()
1088 stop = min(revs[0], revs[-1])
1089 stop = min(revs[0], revs[-1])
1089 for x in xrange(rev, stop-1, -1):
1090 for x in xrange(rev, stop-1, -1):
1090 if ff.match(x):
1091 if ff.match(x):
1091 wanted.discard(x)
1092 wanted.discard(x)
1092
1093
1093 def iterate():
1094 def iterate():
1094 if follow and not match.files():
1095 if follow and not match.files():
1095 ff = followfilter(onlyfirst=opts.get('follow_first'))
1096 ff = followfilter(onlyfirst=opts.get('follow_first'))
1096 def want(rev):
1097 def want(rev):
1097 return ff.match(rev) and rev in wanted
1098 return ff.match(rev) and rev in wanted
1098 else:
1099 else:
1099 def want(rev):
1100 def want(rev):
1100 return rev in wanted
1101 return rev in wanted
1101
1102
1102 for i, window in increasing_windows(0, len(revs)):
1103 for i, window in increasing_windows(0, len(revs)):
1103 change = util.cachefunc(repo.changectx)
1104 change = util.cachefunc(repo.changectx)
1104 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1105 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1105 for rev in sorted(nrevs):
1106 for rev in sorted(nrevs):
1106 fns = fncache.get(rev)
1107 fns = fncache.get(rev)
1107 ctx = change(rev)
1108 ctx = change(rev)
1108 if not fns:
1109 if not fns:
1109 def fns_generator():
1110 def fns_generator():
1110 for f in ctx.files():
1111 for f in ctx.files():
1111 if match(f):
1112 if match(f):
1112 yield f
1113 yield f
1113 fns = fns_generator()
1114 fns = fns_generator()
1114 prepare(ctx, fns)
1115 prepare(ctx, fns)
1115 for rev in nrevs:
1116 for rev in nrevs:
1116 yield change(rev)
1117 yield change(rev)
1117 return iterate()
1118 return iterate()
1118
1119
1119 def commit(ui, repo, commitfunc, pats, opts):
1120 def commit(ui, repo, commitfunc, pats, opts):
1120 '''commit the specified files or all outstanding changes'''
1121 '''commit the specified files or all outstanding changes'''
1121 date = opts.get('date')
1122 date = opts.get('date')
1122 if date:
1123 if date:
1123 opts['date'] = util.parsedate(date)
1124 opts['date'] = util.parsedate(date)
1124 message = logmessage(opts)
1125 message = logmessage(opts)
1125
1126
1126 # extract addremove carefully -- this function can be called from a command
1127 # extract addremove carefully -- this function can be called from a command
1127 # that doesn't support addremove
1128 # that doesn't support addremove
1128 if opts.get('addremove'):
1129 if opts.get('addremove'):
1129 addremove(repo, pats, opts)
1130 addremove(repo, pats, opts)
1130
1131
1131 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1132 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1132
1133
1133 def commiteditor(repo, ctx, subs):
1134 def commiteditor(repo, ctx, subs):
1134 if ctx.description():
1135 if ctx.description():
1135 return ctx.description()
1136 return ctx.description()
1136 return commitforceeditor(repo, ctx, subs)
1137 return commitforceeditor(repo, ctx, subs)
1137
1138
1138 def commitforceeditor(repo, ctx, subs):
1139 def commitforceeditor(repo, ctx, subs):
1139 edittext = []
1140 edittext = []
1140 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1141 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1141 if ctx.description():
1142 if ctx.description():
1142 edittext.append(ctx.description())
1143 edittext.append(ctx.description())
1143 edittext.append("")
1144 edittext.append("")
1144 edittext.append("") # Empty line between message and comments.
1145 edittext.append("") # Empty line between message and comments.
1145 edittext.append(_("HG: Enter commit message."
1146 edittext.append(_("HG: Enter commit message."
1146 " Lines beginning with 'HG:' are removed."))
1147 " Lines beginning with 'HG:' are removed."))
1147 edittext.append(_("HG: Leave message empty to abort commit."))
1148 edittext.append(_("HG: Leave message empty to abort commit."))
1148 edittext.append("HG: --")
1149 edittext.append("HG: --")
1149 edittext.append(_("HG: user: %s") % ctx.user())
1150 edittext.append(_("HG: user: %s") % ctx.user())
1150 if ctx.p2():
1151 if ctx.p2():
1151 edittext.append(_("HG: branch merge"))
1152 edittext.append(_("HG: branch merge"))
1152 if ctx.branch():
1153 if ctx.branch():
1153 edittext.append(_("HG: branch '%s'")
1154 edittext.append(_("HG: branch '%s'")
1154 % encoding.tolocal(ctx.branch()))
1155 % encoding.tolocal(ctx.branch()))
1155 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1156 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1156 edittext.extend([_("HG: added %s") % f for f in added])
1157 edittext.extend([_("HG: added %s") % f for f in added])
1157 edittext.extend([_("HG: changed %s") % f for f in modified])
1158 edittext.extend([_("HG: changed %s") % f for f in modified])
1158 edittext.extend([_("HG: removed %s") % f for f in removed])
1159 edittext.extend([_("HG: removed %s") % f for f in removed])
1159 if not added and not modified and not removed:
1160 if not added and not modified and not removed:
1160 edittext.append(_("HG: no files changed"))
1161 edittext.append(_("HG: no files changed"))
1161 edittext.append("")
1162 edittext.append("")
1162 # run editor in the repository root
1163 # run editor in the repository root
1163 olddir = os.getcwd()
1164 olddir = os.getcwd()
1164 os.chdir(repo.root)
1165 os.chdir(repo.root)
1165 text = repo.ui.edit("\n".join(edittext), ctx.user())
1166 text = repo.ui.edit("\n".join(edittext), ctx.user())
1166 text = re.sub("(?m)^HG:.*\n", "", text)
1167 text = re.sub("(?m)^HG:.*\n", "", text)
1167 os.chdir(olddir)
1168 os.chdir(olddir)
1168
1169
1169 if not text.strip():
1170 if not text.strip():
1170 raise util.Abort(_("empty commit message"))
1171 raise util.Abort(_("empty commit message"))
1171
1172
1172 return text
1173 return text
@@ -1,1287 +1,1295 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap
19 import os, stat, time, calendar, textwrap
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41
41
42 def popen2(cmd, env=None, newlines=False):
42 def popen2(cmd, env=None, newlines=False):
43 # Setting bufsize to -1 lets the system decide the buffer size.
43 # Setting bufsize to -1 lets the system decide the buffer size.
44 # The default for bufsize is 0, meaning unbuffered. This leads to
44 # The default for bufsize is 0, meaning unbuffered. This leads to
45 # poor performance on Mac OS X: http://bugs.python.org/issue4194
45 # poor performance on Mac OS X: http://bugs.python.org/issue4194
46 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
46 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
47 close_fds=closefds,
47 close_fds=closefds,
48 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
48 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
49 universal_newlines=newlines,
49 universal_newlines=newlines,
50 env=env)
50 env=env)
51 return p.stdin, p.stdout
51 return p.stdin, p.stdout
52
52
53 def popen3(cmd, env=None, newlines=False):
53 def popen3(cmd, env=None, newlines=False):
54 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
54 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
55 close_fds=closefds,
55 close_fds=closefds,
56 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
56 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
57 stderr=subprocess.PIPE,
57 stderr=subprocess.PIPE,
58 universal_newlines=newlines,
58 universal_newlines=newlines,
59 env=env)
59 env=env)
60 return p.stdin, p.stdout, p.stderr
60 return p.stdin, p.stdout, p.stderr
61
61
62 def version():
62 def version():
63 """Return version information if available."""
63 """Return version information if available."""
64 try:
64 try:
65 import __version__
65 import __version__
66 return __version__.version
66 return __version__.version
67 except ImportError:
67 except ImportError:
68 return 'unknown'
68 return 'unknown'
69
69
70 # used by parsedate
70 # used by parsedate
71 defaultdateformats = (
71 defaultdateformats = (
72 '%Y-%m-%d %H:%M:%S',
72 '%Y-%m-%d %H:%M:%S',
73 '%Y-%m-%d %I:%M:%S%p',
73 '%Y-%m-%d %I:%M:%S%p',
74 '%Y-%m-%d %H:%M',
74 '%Y-%m-%d %H:%M',
75 '%Y-%m-%d %I:%M%p',
75 '%Y-%m-%d %I:%M%p',
76 '%Y-%m-%d',
76 '%Y-%m-%d',
77 '%m-%d',
77 '%m-%d',
78 '%m/%d',
78 '%m/%d',
79 '%m/%d/%y',
79 '%m/%d/%y',
80 '%m/%d/%Y',
80 '%m/%d/%Y',
81 '%a %b %d %H:%M:%S %Y',
81 '%a %b %d %H:%M:%S %Y',
82 '%a %b %d %I:%M:%S%p %Y',
82 '%a %b %d %I:%M:%S%p %Y',
83 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
83 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
84 '%b %d %H:%M:%S %Y',
84 '%b %d %H:%M:%S %Y',
85 '%b %d %I:%M:%S%p %Y',
85 '%b %d %I:%M:%S%p %Y',
86 '%b %d %H:%M:%S',
86 '%b %d %H:%M:%S',
87 '%b %d %I:%M:%S%p',
87 '%b %d %I:%M:%S%p',
88 '%b %d %H:%M',
88 '%b %d %H:%M',
89 '%b %d %I:%M%p',
89 '%b %d %I:%M%p',
90 '%b %d %Y',
90 '%b %d %Y',
91 '%b %d',
91 '%b %d',
92 '%H:%M:%S',
92 '%H:%M:%S',
93 '%I:%M:%S%p',
93 '%I:%M:%S%p',
94 '%H:%M',
94 '%H:%M',
95 '%I:%M%p',
95 '%I:%M%p',
96 )
96 )
97
97
98 extendeddateformats = defaultdateformats + (
98 extendeddateformats = defaultdateformats + (
99 "%Y",
99 "%Y",
100 "%Y-%m",
100 "%Y-%m",
101 "%b",
101 "%b",
102 "%b %Y",
102 "%b %Y",
103 )
103 )
104
104
105 def cachefunc(func):
105 def cachefunc(func):
106 '''cache the result of function calls'''
106 '''cache the result of function calls'''
107 # XXX doesn't handle keywords args
107 # XXX doesn't handle keywords args
108 cache = {}
108 cache = {}
109 if func.func_code.co_argcount == 1:
109 if func.func_code.co_argcount == 1:
110 # we gain a small amount of time because
110 # we gain a small amount of time because
111 # we don't need to pack/unpack the list
111 # we don't need to pack/unpack the list
112 def f(arg):
112 def f(arg):
113 if arg not in cache:
113 if arg not in cache:
114 cache[arg] = func(arg)
114 cache[arg] = func(arg)
115 return cache[arg]
115 return cache[arg]
116 else:
116 else:
117 def f(*args):
117 def f(*args):
118 if args not in cache:
118 if args not in cache:
119 cache[args] = func(*args)
119 cache[args] = func(*args)
120 return cache[args]
120 return cache[args]
121
121
122 return f
122 return f
123
123
124 def lrucachefunc(func):
124 def lrucachefunc(func):
125 '''cache most recent results of function calls'''
125 '''cache most recent results of function calls'''
126 cache = {}
126 cache = {}
127 order = []
127 order = []
128 if func.func_code.co_argcount == 1:
128 if func.func_code.co_argcount == 1:
129 def f(arg):
129 def f(arg):
130 if arg not in cache:
130 if arg not in cache:
131 if len(cache) > 20:
131 if len(cache) > 20:
132 del cache[order.pop(0)]
132 del cache[order.pop(0)]
133 cache[arg] = func(arg)
133 cache[arg] = func(arg)
134 else:
134 else:
135 order.remove(arg)
135 order.remove(arg)
136 order.append(arg)
136 order.append(arg)
137 return cache[arg]
137 return cache[arg]
138 else:
138 else:
139 def f(*args):
139 def f(*args):
140 if args not in cache:
140 if args not in cache:
141 if len(cache) > 20:
141 if len(cache) > 20:
142 del cache[order.pop(0)]
142 del cache[order.pop(0)]
143 cache[args] = func(*args)
143 cache[args] = func(*args)
144 else:
144 else:
145 order.remove(args)
145 order.remove(args)
146 order.append(args)
146 order.append(args)
147 return cache[args]
147 return cache[args]
148
148
149 return f
149 return f
150
150
151 class propertycache(object):
151 class propertycache(object):
152 def __init__(self, func):
152 def __init__(self, func):
153 self.func = func
153 self.func = func
154 self.name = func.__name__
154 self.name = func.__name__
155 def __get__(self, obj, type=None):
155 def __get__(self, obj, type=None):
156 result = self.func(obj)
156 result = self.func(obj)
157 setattr(obj, self.name, result)
157 setattr(obj, self.name, result)
158 return result
158 return result
159
159
160 def pipefilter(s, cmd):
160 def pipefilter(s, cmd):
161 '''filter string S through command CMD, returning its output'''
161 '''filter string S through command CMD, returning its output'''
162 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
162 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
163 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
163 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
164 pout, perr = p.communicate(s)
164 pout, perr = p.communicate(s)
165 return pout
165 return pout
166
166
167 def tempfilter(s, cmd):
167 def tempfilter(s, cmd):
168 '''filter string S through a pair of temporary files with CMD.
168 '''filter string S through a pair of temporary files with CMD.
169 CMD is used as a template to create the real command to be run,
169 CMD is used as a template to create the real command to be run,
170 with the strings INFILE and OUTFILE replaced by the real names of
170 with the strings INFILE and OUTFILE replaced by the real names of
171 the temporary files generated.'''
171 the temporary files generated.'''
172 inname, outname = None, None
172 inname, outname = None, None
173 try:
173 try:
174 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
174 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
175 fp = os.fdopen(infd, 'wb')
175 fp = os.fdopen(infd, 'wb')
176 fp.write(s)
176 fp.write(s)
177 fp.close()
177 fp.close()
178 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
178 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
179 os.close(outfd)
179 os.close(outfd)
180 cmd = cmd.replace('INFILE', inname)
180 cmd = cmd.replace('INFILE', inname)
181 cmd = cmd.replace('OUTFILE', outname)
181 cmd = cmd.replace('OUTFILE', outname)
182 code = os.system(cmd)
182 code = os.system(cmd)
183 if sys.platform == 'OpenVMS' and code & 1:
183 if sys.platform == 'OpenVMS' and code & 1:
184 code = 0
184 code = 0
185 if code: raise Abort(_("command '%s' failed: %s") %
185 if code: raise Abort(_("command '%s' failed: %s") %
186 (cmd, explain_exit(code)))
186 (cmd, explain_exit(code)))
187 return open(outname, 'rb').read()
187 return open(outname, 'rb').read()
188 finally:
188 finally:
189 try:
189 try:
190 if inname: os.unlink(inname)
190 if inname: os.unlink(inname)
191 except: pass
191 except: pass
192 try:
192 try:
193 if outname: os.unlink(outname)
193 if outname: os.unlink(outname)
194 except: pass
194 except: pass
195
195
196 filtertable = {
196 filtertable = {
197 'tempfile:': tempfilter,
197 'tempfile:': tempfilter,
198 'pipe:': pipefilter,
198 'pipe:': pipefilter,
199 }
199 }
200
200
201 def filter(s, cmd):
201 def filter(s, cmd):
202 "filter a string through a command that transforms its input to its output"
202 "filter a string through a command that transforms its input to its output"
203 for name, fn in filtertable.iteritems():
203 for name, fn in filtertable.iteritems():
204 if cmd.startswith(name):
204 if cmd.startswith(name):
205 return fn(s, cmd[len(name):].lstrip())
205 return fn(s, cmd[len(name):].lstrip())
206 return pipefilter(s, cmd)
206 return pipefilter(s, cmd)
207
207
208 def binary(s):
208 def binary(s):
209 """return true if a string is binary data"""
209 """return true if a string is binary data"""
210 return bool(s and '\0' in s)
210 return bool(s and '\0' in s)
211
211
212 def increasingchunks(source, min=1024, max=65536):
212 def increasingchunks(source, min=1024, max=65536):
213 '''return no less than min bytes per chunk while data remains,
213 '''return no less than min bytes per chunk while data remains,
214 doubling min after each chunk until it reaches max'''
214 doubling min after each chunk until it reaches max'''
215 def log2(x):
215 def log2(x):
216 if not x:
216 if not x:
217 return 0
217 return 0
218 i = 0
218 i = 0
219 while x:
219 while x:
220 x >>= 1
220 x >>= 1
221 i += 1
221 i += 1
222 return i - 1
222 return i - 1
223
223
224 buf = []
224 buf = []
225 blen = 0
225 blen = 0
226 for chunk in source:
226 for chunk in source:
227 buf.append(chunk)
227 buf.append(chunk)
228 blen += len(chunk)
228 blen += len(chunk)
229 if blen >= min:
229 if blen >= min:
230 if min < max:
230 if min < max:
231 min = min << 1
231 min = min << 1
232 nmin = 1 << log2(blen)
232 nmin = 1 << log2(blen)
233 if nmin > min:
233 if nmin > min:
234 min = nmin
234 min = nmin
235 if min > max:
235 if min > max:
236 min = max
236 min = max
237 yield ''.join(buf)
237 yield ''.join(buf)
238 blen = 0
238 blen = 0
239 buf = []
239 buf = []
240 if buf:
240 if buf:
241 yield ''.join(buf)
241 yield ''.join(buf)
242
242
243 Abort = error.Abort
243 Abort = error.Abort
244
244
245 def always(fn): return True
245 def always(fn): return True
246 def never(fn): return False
246 def never(fn): return False
247
247
248 def pathto(root, n1, n2):
248 def pathto(root, n1, n2):
249 '''return the relative path from one place to another.
249 '''return the relative path from one place to another.
250 root should use os.sep to separate directories
250 root should use os.sep to separate directories
251 n1 should use os.sep to separate directories
251 n1 should use os.sep to separate directories
252 n2 should use "/" to separate directories
252 n2 should use "/" to separate directories
253 returns an os.sep-separated path.
253 returns an os.sep-separated path.
254
254
255 If n1 is a relative path, it's assumed it's
255 If n1 is a relative path, it's assumed it's
256 relative to root.
256 relative to root.
257 n2 should always be relative to root.
257 n2 should always be relative to root.
258 '''
258 '''
259 if not n1: return localpath(n2)
259 if not n1: return localpath(n2)
260 if os.path.isabs(n1):
260 if os.path.isabs(n1):
261 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
261 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
262 return os.path.join(root, localpath(n2))
262 return os.path.join(root, localpath(n2))
263 n2 = '/'.join((pconvert(root), n2))
263 n2 = '/'.join((pconvert(root), n2))
264 a, b = splitpath(n1), n2.split('/')
264 a, b = splitpath(n1), n2.split('/')
265 a.reverse()
265 a.reverse()
266 b.reverse()
266 b.reverse()
267 while a and b and a[-1] == b[-1]:
267 while a and b and a[-1] == b[-1]:
268 a.pop()
268 a.pop()
269 b.pop()
269 b.pop()
270 b.reverse()
270 b.reverse()
271 return os.sep.join((['..'] * len(a)) + b) or '.'
271 return os.sep.join((['..'] * len(a)) + b) or '.'
272
272
273 def canonpath(root, cwd, myname):
273 def canonpath(root, cwd, myname):
274 """return the canonical path of myname, given cwd and root"""
274 """return the canonical path of myname, given cwd and root"""
275 if endswithsep(root):
275 if endswithsep(root):
276 rootsep = root
276 rootsep = root
277 else:
277 else:
278 rootsep = root + os.sep
278 rootsep = root + os.sep
279 name = myname
279 name = myname
280 if not os.path.isabs(name):
280 if not os.path.isabs(name):
281 name = os.path.join(root, cwd, name)
281 name = os.path.join(root, cwd, name)
282 name = os.path.normpath(name)
282 name = os.path.normpath(name)
283 audit_path = path_auditor(root)
283 audit_path = path_auditor(root)
284 if name != rootsep and name.startswith(rootsep):
284 if name != rootsep and name.startswith(rootsep):
285 name = name[len(rootsep):]
285 name = name[len(rootsep):]
286 audit_path(name)
286 audit_path(name)
287 return pconvert(name)
287 return pconvert(name)
288 elif name == root:
288 elif name == root:
289 return ''
289 return ''
290 else:
290 else:
291 # Determine whether `name' is in the hierarchy at or beneath `root',
291 # Determine whether `name' is in the hierarchy at or beneath `root',
292 # by iterating name=dirname(name) until that causes no change (can't
292 # by iterating name=dirname(name) until that causes no change (can't
293 # check name == '/', because that doesn't work on windows). For each
293 # check name == '/', because that doesn't work on windows). For each
294 # `name', compare dev/inode numbers. If they match, the list `rel'
294 # `name', compare dev/inode numbers. If they match, the list `rel'
295 # holds the reversed list of components making up the relative file
295 # holds the reversed list of components making up the relative file
296 # name we want.
296 # name we want.
297 root_st = os.stat(root)
297 root_st = os.stat(root)
298 rel = []
298 rel = []
299 while True:
299 while True:
300 try:
300 try:
301 name_st = os.stat(name)
301 name_st = os.stat(name)
302 except OSError:
302 except OSError:
303 break
303 break
304 if samestat(name_st, root_st):
304 if samestat(name_st, root_st):
305 if not rel:
305 if not rel:
306 # name was actually the same as root (maybe a symlink)
306 # name was actually the same as root (maybe a symlink)
307 return ''
307 return ''
308 rel.reverse()
308 rel.reverse()
309 name = os.path.join(*rel)
309 name = os.path.join(*rel)
310 audit_path(name)
310 audit_path(name)
311 return pconvert(name)
311 return pconvert(name)
312 dirname, basename = os.path.split(name)
312 dirname, basename = os.path.split(name)
313 rel.append(basename)
313 rel.append(basename)
314 if dirname == name:
314 if dirname == name:
315 break
315 break
316 name = dirname
316 name = dirname
317
317
318 raise Abort('%s not under root' % myname)
318 raise Abort('%s not under root' % myname)
319
319
320 _hgexecutable = None
320 _hgexecutable = None
321
321
322 def main_is_frozen():
322 def main_is_frozen():
323 """return True if we are a frozen executable.
323 """return True if we are a frozen executable.
324
324
325 The code supports py2exe (most common, Windows only) and tools/freeze
325 The code supports py2exe (most common, Windows only) and tools/freeze
326 (portable, not much used).
326 (portable, not much used).
327 """
327 """
328 return (hasattr(sys, "frozen") or # new py2exe
328 return (hasattr(sys, "frozen") or # new py2exe
329 hasattr(sys, "importers") or # old py2exe
329 hasattr(sys, "importers") or # old py2exe
330 imp.is_frozen("__main__")) # tools/freeze
330 imp.is_frozen("__main__")) # tools/freeze
331
331
332 def hgexecutable():
332 def hgexecutable():
333 """return location of the 'hg' executable.
333 """return location of the 'hg' executable.
334
334
335 Defaults to $HG or 'hg' in the search path.
335 Defaults to $HG or 'hg' in the search path.
336 """
336 """
337 if _hgexecutable is None:
337 if _hgexecutable is None:
338 hg = os.environ.get('HG')
338 hg = os.environ.get('HG')
339 if hg:
339 if hg:
340 set_hgexecutable(hg)
340 set_hgexecutable(hg)
341 elif main_is_frozen():
341 elif main_is_frozen():
342 set_hgexecutable(sys.executable)
342 set_hgexecutable(sys.executable)
343 else:
343 else:
344 exe = find_exe('hg') or os.path.basename(sys.argv[0])
344 exe = find_exe('hg') or os.path.basename(sys.argv[0])
345 set_hgexecutable(exe)
345 set_hgexecutable(exe)
346 return _hgexecutable
346 return _hgexecutable
347
347
348 def set_hgexecutable(path):
348 def set_hgexecutable(path):
349 """set location of the 'hg' executable"""
349 """set location of the 'hg' executable"""
350 global _hgexecutable
350 global _hgexecutable
351 _hgexecutable = path
351 _hgexecutable = path
352
352
353 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
353 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
354 '''enhanced shell command execution.
354 '''enhanced shell command execution.
355 run with environment maybe modified, maybe in different dir.
355 run with environment maybe modified, maybe in different dir.
356
356
357 if command fails and onerr is None, return status. if ui object,
357 if command fails and onerr is None, return status. if ui object,
358 print error message and return status, else raise onerr object as
358 print error message and return status, else raise onerr object as
359 exception.'''
359 exception.'''
360 def py2shell(val):
360 def py2shell(val):
361 'convert python object into string that is useful to shell'
361 'convert python object into string that is useful to shell'
362 if val is None or val is False:
362 if val is None or val is False:
363 return '0'
363 return '0'
364 if val is True:
364 if val is True:
365 return '1'
365 return '1'
366 return str(val)
366 return str(val)
367 origcmd = cmd
367 origcmd = cmd
368 if os.name == 'nt':
368 if os.name == 'nt':
369 cmd = '"%s"' % cmd
369 cmd = '"%s"' % cmd
370 env = dict(os.environ)
370 env = dict(os.environ)
371 env.update((k, py2shell(v)) for k, v in environ.iteritems())
371 env.update((k, py2shell(v)) for k, v in environ.iteritems())
372 env['HG'] = hgexecutable()
372 env['HG'] = hgexecutable()
373 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
373 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
374 env=env, cwd=cwd)
374 env=env, cwd=cwd)
375 if sys.platform == 'OpenVMS' and rc & 1:
375 if sys.platform == 'OpenVMS' and rc & 1:
376 rc = 0
376 rc = 0
377 if rc and onerr:
377 if rc and onerr:
378 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
378 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
379 explain_exit(rc)[0])
379 explain_exit(rc)[0])
380 if errprefix:
380 if errprefix:
381 errmsg = '%s: %s' % (errprefix, errmsg)
381 errmsg = '%s: %s' % (errprefix, errmsg)
382 try:
382 try:
383 onerr.warn(errmsg + '\n')
383 onerr.warn(errmsg + '\n')
384 except AttributeError:
384 except AttributeError:
385 raise onerr(errmsg)
385 raise onerr(errmsg)
386 return rc
386 return rc
387
387
388 def checksignature(func):
388 def checksignature(func):
389 '''wrap a function with code to check for calling errors'''
389 '''wrap a function with code to check for calling errors'''
390 def check(*args, **kwargs):
390 def check(*args, **kwargs):
391 try:
391 try:
392 return func(*args, **kwargs)
392 return func(*args, **kwargs)
393 except TypeError:
393 except TypeError:
394 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
394 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
395 raise error.SignatureError
395 raise error.SignatureError
396 raise
396 raise
397
397
398 return check
398 return check
399
399
400 # os.path.lexists is not available on python2.3
400 # os.path.lexists is not available on python2.3
401 def lexists(filename):
401 def lexists(filename):
402 "test whether a file with this name exists. does not follow symlinks"
402 "test whether a file with this name exists. does not follow symlinks"
403 try:
403 try:
404 os.lstat(filename)
404 os.lstat(filename)
405 except:
405 except:
406 return False
406 return False
407 return True
407 return True
408
408
409 def unlink(f):
409 def unlink(f):
410 """unlink and remove the directory if it is empty"""
410 """unlink and remove the directory if it is empty"""
411 os.unlink(f)
411 os.unlink(f)
412 # try removing directories that might now be empty
412 # try removing directories that might now be empty
413 try:
413 try:
414 os.removedirs(os.path.dirname(f))
414 os.removedirs(os.path.dirname(f))
415 except OSError:
415 except OSError:
416 pass
416 pass
417
417
418 def copyfile(src, dest):
418 def copyfile(src, dest):
419 "copy a file, preserving mode and atime/mtime"
419 "copy a file, preserving mode and atime/mtime"
420 if os.path.islink(src):
420 if os.path.islink(src):
421 try:
421 try:
422 os.unlink(dest)
422 os.unlink(dest)
423 except:
423 except:
424 pass
424 pass
425 os.symlink(os.readlink(src), dest)
425 os.symlink(os.readlink(src), dest)
426 else:
426 else:
427 try:
427 try:
428 shutil.copyfile(src, dest)
428 shutil.copyfile(src, dest)
429 shutil.copystat(src, dest)
429 shutil.copystat(src, dest)
430 except shutil.Error, inst:
430 except shutil.Error, inst:
431 raise Abort(str(inst))
431 raise Abort(str(inst))
432
432
433 def copyfiles(src, dst, hardlink=None):
433 def copyfiles(src, dst, hardlink=None):
434 """Copy a directory tree using hardlinks if possible"""
434 """Copy a directory tree using hardlinks if possible"""
435
435
436 if hardlink is None:
436 if hardlink is None:
437 hardlink = (os.stat(src).st_dev ==
437 hardlink = (os.stat(src).st_dev ==
438 os.stat(os.path.dirname(dst)).st_dev)
438 os.stat(os.path.dirname(dst)).st_dev)
439
439
440 if os.path.isdir(src):
440 if os.path.isdir(src):
441 os.mkdir(dst)
441 os.mkdir(dst)
442 for name, kind in osutil.listdir(src):
442 for name, kind in osutil.listdir(src):
443 srcname = os.path.join(src, name)
443 srcname = os.path.join(src, name)
444 dstname = os.path.join(dst, name)
444 dstname = os.path.join(dst, name)
445 copyfiles(srcname, dstname, hardlink)
445 copyfiles(srcname, dstname, hardlink)
446 else:
446 else:
447 if hardlink:
447 if hardlink:
448 try:
448 try:
449 os_link(src, dst)
449 os_link(src, dst)
450 except (IOError, OSError):
450 except (IOError, OSError):
451 hardlink = False
451 hardlink = False
452 shutil.copy(src, dst)
452 shutil.copy(src, dst)
453 else:
453 else:
454 shutil.copy(src, dst)
454 shutil.copy(src, dst)
455
455
456 class path_auditor(object):
456 class path_auditor(object):
457 '''ensure that a filesystem path contains no banned components.
457 '''ensure that a filesystem path contains no banned components.
458 the following properties of a path are checked:
458 the following properties of a path are checked:
459
459
460 - under top-level .hg
460 - under top-level .hg
461 - starts at the root of a windows drive
461 - starts at the root of a windows drive
462 - contains ".."
462 - contains ".."
463 - traverses a symlink (e.g. a/symlink_here/b)
463 - traverses a symlink (e.g. a/symlink_here/b)
464 - inside a nested repository'''
464 - inside a nested repository'''
465
465
466 def __init__(self, root):
466 def __init__(self, root):
467 self.audited = set()
467 self.audited = set()
468 self.auditeddir = set()
468 self.auditeddir = set()
469 self.root = root
469 self.root = root
470
470
471 def __call__(self, path):
471 def __call__(self, path):
472 if path in self.audited:
472 if path in self.audited:
473 return
473 return
474 normpath = os.path.normcase(path)
474 normpath = os.path.normcase(path)
475 parts = splitpath(normpath)
475 parts = splitpath(normpath)
476 if (os.path.splitdrive(path)[0]
476 if (os.path.splitdrive(path)[0]
477 or parts[0].lower() in ('.hg', '.hg.', '')
477 or parts[0].lower() in ('.hg', '.hg.', '')
478 or os.pardir in parts):
478 or os.pardir in parts):
479 raise Abort(_("path contains illegal component: %s") % path)
479 raise Abort(_("path contains illegal component: %s") % path)
480 if '.hg' in path.lower():
480 if '.hg' in path.lower():
481 lparts = [p.lower() for p in parts]
481 lparts = [p.lower() for p in parts]
482 for p in '.hg', '.hg.':
482 for p in '.hg', '.hg.':
483 if p in lparts[1:]:
483 if p in lparts[1:]:
484 pos = lparts.index(p)
484 pos = lparts.index(p)
485 base = os.path.join(*parts[:pos])
485 base = os.path.join(*parts[:pos])
486 raise Abort(_('path %r is inside repo %r') % (path, base))
486 raise Abort(_('path %r is inside repo %r') % (path, base))
487 def check(prefix):
487 def check(prefix):
488 curpath = os.path.join(self.root, prefix)
488 curpath = os.path.join(self.root, prefix)
489 try:
489 try:
490 st = os.lstat(curpath)
490 st = os.lstat(curpath)
491 except OSError, err:
491 except OSError, err:
492 # EINVAL can be raised as invalid path syntax under win32.
492 # EINVAL can be raised as invalid path syntax under win32.
493 # They must be ignored for patterns can be checked too.
493 # They must be ignored for patterns can be checked too.
494 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
494 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
495 raise
495 raise
496 else:
496 else:
497 if stat.S_ISLNK(st.st_mode):
497 if stat.S_ISLNK(st.st_mode):
498 raise Abort(_('path %r traverses symbolic link %r') %
498 raise Abort(_('path %r traverses symbolic link %r') %
499 (path, prefix))
499 (path, prefix))
500 elif (stat.S_ISDIR(st.st_mode) and
500 elif (stat.S_ISDIR(st.st_mode) and
501 os.path.isdir(os.path.join(curpath, '.hg'))):
501 os.path.isdir(os.path.join(curpath, '.hg'))):
502 raise Abort(_('path %r is inside repo %r') %
502 raise Abort(_('path %r is inside repo %r') %
503 (path, prefix))
503 (path, prefix))
504 parts.pop()
504 parts.pop()
505 prefixes = []
505 prefixes = []
506 while parts:
506 while parts:
507 prefix = os.sep.join(parts)
507 prefix = os.sep.join(parts)
508 if prefix in self.auditeddir:
508 if prefix in self.auditeddir:
509 break
509 break
510 check(prefix)
510 check(prefix)
511 prefixes.append(prefix)
511 prefixes.append(prefix)
512 parts.pop()
512 parts.pop()
513
513
514 self.audited.add(path)
514 self.audited.add(path)
515 # only add prefixes to the cache after checking everything: we don't
515 # only add prefixes to the cache after checking everything: we don't
516 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
516 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
517 self.auditeddir.update(prefixes)
517 self.auditeddir.update(prefixes)
518
518
519 def nlinks(pathname):
519 def nlinks(pathname):
520 """Return number of hardlinks for the given file."""
520 """Return number of hardlinks for the given file."""
521 return os.lstat(pathname).st_nlink
521 return os.lstat(pathname).st_nlink
522
522
523 if hasattr(os, 'link'):
523 if hasattr(os, 'link'):
524 os_link = os.link
524 os_link = os.link
525 else:
525 else:
526 def os_link(src, dst):
526 def os_link(src, dst):
527 raise OSError(0, _("Hardlinks not supported"))
527 raise OSError(0, _("Hardlinks not supported"))
528
528
529 def lookup_reg(key, name=None, scope=None):
529 def lookup_reg(key, name=None, scope=None):
530 return None
530 return None
531
531
532 def hidewindow():
533 """Hide current shell window.
534
535 Used to hide the window opened when starting asynchronous
536 child process under Windows, unneeded on other systems.
537 """
538 pass
539
532 if os.name == 'nt':
540 if os.name == 'nt':
533 from windows import *
541 from windows import *
534 else:
542 else:
535 from posix import *
543 from posix import *
536
544
537 def makelock(info, pathname):
545 def makelock(info, pathname):
538 try:
546 try:
539 return os.symlink(info, pathname)
547 return os.symlink(info, pathname)
540 except OSError, why:
548 except OSError, why:
541 if why.errno == errno.EEXIST:
549 if why.errno == errno.EEXIST:
542 raise
550 raise
543 except AttributeError: # no symlink in os
551 except AttributeError: # no symlink in os
544 pass
552 pass
545
553
546 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
554 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
547 os.write(ld, info)
555 os.write(ld, info)
548 os.close(ld)
556 os.close(ld)
549
557
550 def readlock(pathname):
558 def readlock(pathname):
551 try:
559 try:
552 return os.readlink(pathname)
560 return os.readlink(pathname)
553 except OSError, why:
561 except OSError, why:
554 if why.errno not in (errno.EINVAL, errno.ENOSYS):
562 if why.errno not in (errno.EINVAL, errno.ENOSYS):
555 raise
563 raise
556 except AttributeError: # no symlink in os
564 except AttributeError: # no symlink in os
557 pass
565 pass
558 return posixfile(pathname).read()
566 return posixfile(pathname).read()
559
567
560 def fstat(fp):
568 def fstat(fp):
561 '''stat file object that may not have fileno method.'''
569 '''stat file object that may not have fileno method.'''
562 try:
570 try:
563 return os.fstat(fp.fileno())
571 return os.fstat(fp.fileno())
564 except AttributeError:
572 except AttributeError:
565 return os.stat(fp.name)
573 return os.stat(fp.name)
566
574
567 # File system features
575 # File system features
568
576
569 def checkcase(path):
577 def checkcase(path):
570 """
578 """
571 Check whether the given path is on a case-sensitive filesystem
579 Check whether the given path is on a case-sensitive filesystem
572
580
573 Requires a path (like /foo/.hg) ending with a foldable final
581 Requires a path (like /foo/.hg) ending with a foldable final
574 directory component.
582 directory component.
575 """
583 """
576 s1 = os.stat(path)
584 s1 = os.stat(path)
577 d, b = os.path.split(path)
585 d, b = os.path.split(path)
578 p2 = os.path.join(d, b.upper())
586 p2 = os.path.join(d, b.upper())
579 if path == p2:
587 if path == p2:
580 p2 = os.path.join(d, b.lower())
588 p2 = os.path.join(d, b.lower())
581 try:
589 try:
582 s2 = os.stat(p2)
590 s2 = os.stat(p2)
583 if s2 == s1:
591 if s2 == s1:
584 return False
592 return False
585 return True
593 return True
586 except:
594 except:
587 return True
595 return True
588
596
589 _fspathcache = {}
597 _fspathcache = {}
590 def fspath(name, root):
598 def fspath(name, root):
591 '''Get name in the case stored in the filesystem
599 '''Get name in the case stored in the filesystem
592
600
593 The name is either relative to root, or it is an absolute path starting
601 The name is either relative to root, or it is an absolute path starting
594 with root. Note that this function is unnecessary, and should not be
602 with root. Note that this function is unnecessary, and should not be
595 called, for case-sensitive filesystems (simply because it's expensive).
603 called, for case-sensitive filesystems (simply because it's expensive).
596 '''
604 '''
597 # If name is absolute, make it relative
605 # If name is absolute, make it relative
598 if name.lower().startswith(root.lower()):
606 if name.lower().startswith(root.lower()):
599 l = len(root)
607 l = len(root)
600 if name[l] == os.sep or name[l] == os.altsep:
608 if name[l] == os.sep or name[l] == os.altsep:
601 l = l + 1
609 l = l + 1
602 name = name[l:]
610 name = name[l:]
603
611
604 if not os.path.exists(os.path.join(root, name)):
612 if not os.path.exists(os.path.join(root, name)):
605 return None
613 return None
606
614
607 seps = os.sep
615 seps = os.sep
608 if os.altsep:
616 if os.altsep:
609 seps = seps + os.altsep
617 seps = seps + os.altsep
610 # Protect backslashes. This gets silly very quickly.
618 # Protect backslashes. This gets silly very quickly.
611 seps.replace('\\','\\\\')
619 seps.replace('\\','\\\\')
612 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
620 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
613 dir = os.path.normcase(os.path.normpath(root))
621 dir = os.path.normcase(os.path.normpath(root))
614 result = []
622 result = []
615 for part, sep in pattern.findall(name):
623 for part, sep in pattern.findall(name):
616 if sep:
624 if sep:
617 result.append(sep)
625 result.append(sep)
618 continue
626 continue
619
627
620 if dir not in _fspathcache:
628 if dir not in _fspathcache:
621 _fspathcache[dir] = os.listdir(dir)
629 _fspathcache[dir] = os.listdir(dir)
622 contents = _fspathcache[dir]
630 contents = _fspathcache[dir]
623
631
624 lpart = part.lower()
632 lpart = part.lower()
625 lenp = len(part)
633 lenp = len(part)
626 for n in contents:
634 for n in contents:
627 if lenp == len(n) and n.lower() == lpart:
635 if lenp == len(n) and n.lower() == lpart:
628 result.append(n)
636 result.append(n)
629 break
637 break
630 else:
638 else:
631 # Cannot happen, as the file exists!
639 # Cannot happen, as the file exists!
632 result.append(part)
640 result.append(part)
633 dir = os.path.join(dir, lpart)
641 dir = os.path.join(dir, lpart)
634
642
635 return ''.join(result)
643 return ''.join(result)
636
644
637 def checkexec(path):
645 def checkexec(path):
638 """
646 """
639 Check whether the given path is on a filesystem with UNIX-like exec flags
647 Check whether the given path is on a filesystem with UNIX-like exec flags
640
648
641 Requires a directory (like /foo/.hg)
649 Requires a directory (like /foo/.hg)
642 """
650 """
643
651
644 # VFAT on some Linux versions can flip mode but it doesn't persist
652 # VFAT on some Linux versions can flip mode but it doesn't persist
645 # a FS remount. Frequently we can detect it if files are created
653 # a FS remount. Frequently we can detect it if files are created
646 # with exec bit on.
654 # with exec bit on.
647
655
648 try:
656 try:
649 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
657 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
650 fh, fn = tempfile.mkstemp("", "", path)
658 fh, fn = tempfile.mkstemp("", "", path)
651 try:
659 try:
652 os.close(fh)
660 os.close(fh)
653 m = os.stat(fn).st_mode & 0777
661 m = os.stat(fn).st_mode & 0777
654 new_file_has_exec = m & EXECFLAGS
662 new_file_has_exec = m & EXECFLAGS
655 os.chmod(fn, m ^ EXECFLAGS)
663 os.chmod(fn, m ^ EXECFLAGS)
656 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
664 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
657 finally:
665 finally:
658 os.unlink(fn)
666 os.unlink(fn)
659 except (IOError, OSError):
667 except (IOError, OSError):
660 # we don't care, the user probably won't be able to commit anyway
668 # we don't care, the user probably won't be able to commit anyway
661 return False
669 return False
662 return not (new_file_has_exec or exec_flags_cannot_flip)
670 return not (new_file_has_exec or exec_flags_cannot_flip)
663
671
664 def checklink(path):
672 def checklink(path):
665 """check whether the given path is on a symlink-capable filesystem"""
673 """check whether the given path is on a symlink-capable filesystem"""
666 # mktemp is not racy because symlink creation will fail if the
674 # mktemp is not racy because symlink creation will fail if the
667 # file already exists
675 # file already exists
668 name = tempfile.mktemp(dir=path)
676 name = tempfile.mktemp(dir=path)
669 try:
677 try:
670 os.symlink(".", name)
678 os.symlink(".", name)
671 os.unlink(name)
679 os.unlink(name)
672 return True
680 return True
673 except (OSError, AttributeError):
681 except (OSError, AttributeError):
674 return False
682 return False
675
683
676 def needbinarypatch():
684 def needbinarypatch():
677 """return True if patches should be applied in binary mode by default."""
685 """return True if patches should be applied in binary mode by default."""
678 return os.name == 'nt'
686 return os.name == 'nt'
679
687
680 def endswithsep(path):
688 def endswithsep(path):
681 '''Check path ends with os.sep or os.altsep.'''
689 '''Check path ends with os.sep or os.altsep.'''
682 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
690 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
683
691
684 def splitpath(path):
692 def splitpath(path):
685 '''Split path by os.sep.
693 '''Split path by os.sep.
686 Note that this function does not use os.altsep because this is
694 Note that this function does not use os.altsep because this is
687 an alternative of simple "xxx.split(os.sep)".
695 an alternative of simple "xxx.split(os.sep)".
688 It is recommended to use os.path.normpath() before using this
696 It is recommended to use os.path.normpath() before using this
689 function if need.'''
697 function if need.'''
690 return path.split(os.sep)
698 return path.split(os.sep)
691
699
692 def gui():
700 def gui():
693 '''Are we running in a GUI?'''
701 '''Are we running in a GUI?'''
694 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
702 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
695
703
696 def mktempcopy(name, emptyok=False, createmode=None):
704 def mktempcopy(name, emptyok=False, createmode=None):
697 """Create a temporary file with the same contents from name
705 """Create a temporary file with the same contents from name
698
706
699 The permission bits are copied from the original file.
707 The permission bits are copied from the original file.
700
708
701 If the temporary file is going to be truncated immediately, you
709 If the temporary file is going to be truncated immediately, you
702 can use emptyok=True as an optimization.
710 can use emptyok=True as an optimization.
703
711
704 Returns the name of the temporary file.
712 Returns the name of the temporary file.
705 """
713 """
706 d, fn = os.path.split(name)
714 d, fn = os.path.split(name)
707 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
715 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
708 os.close(fd)
716 os.close(fd)
709 # Temporary files are created with mode 0600, which is usually not
717 # Temporary files are created with mode 0600, which is usually not
710 # what we want. If the original file already exists, just copy
718 # what we want. If the original file already exists, just copy
711 # its mode. Otherwise, manually obey umask.
719 # its mode. Otherwise, manually obey umask.
712 try:
720 try:
713 st_mode = os.lstat(name).st_mode & 0777
721 st_mode = os.lstat(name).st_mode & 0777
714 except OSError, inst:
722 except OSError, inst:
715 if inst.errno != errno.ENOENT:
723 if inst.errno != errno.ENOENT:
716 raise
724 raise
717 st_mode = createmode
725 st_mode = createmode
718 if st_mode is None:
726 if st_mode is None:
719 st_mode = ~umask
727 st_mode = ~umask
720 st_mode &= 0666
728 st_mode &= 0666
721 os.chmod(temp, st_mode)
729 os.chmod(temp, st_mode)
722 if emptyok:
730 if emptyok:
723 return temp
731 return temp
724 try:
732 try:
725 try:
733 try:
726 ifp = posixfile(name, "rb")
734 ifp = posixfile(name, "rb")
727 except IOError, inst:
735 except IOError, inst:
728 if inst.errno == errno.ENOENT:
736 if inst.errno == errno.ENOENT:
729 return temp
737 return temp
730 if not getattr(inst, 'filename', None):
738 if not getattr(inst, 'filename', None):
731 inst.filename = name
739 inst.filename = name
732 raise
740 raise
733 ofp = posixfile(temp, "wb")
741 ofp = posixfile(temp, "wb")
734 for chunk in filechunkiter(ifp):
742 for chunk in filechunkiter(ifp):
735 ofp.write(chunk)
743 ofp.write(chunk)
736 ifp.close()
744 ifp.close()
737 ofp.close()
745 ofp.close()
738 except:
746 except:
739 try: os.unlink(temp)
747 try: os.unlink(temp)
740 except: pass
748 except: pass
741 raise
749 raise
742 return temp
750 return temp
743
751
744 class atomictempfile(object):
752 class atomictempfile(object):
745 """file-like object that atomically updates a file
753 """file-like object that atomically updates a file
746
754
747 All writes will be redirected to a temporary copy of the original
755 All writes will be redirected to a temporary copy of the original
748 file. When rename is called, the copy is renamed to the original
756 file. When rename is called, the copy is renamed to the original
749 name, making the changes visible.
757 name, making the changes visible.
750 """
758 """
751 def __init__(self, name, mode, createmode):
759 def __init__(self, name, mode, createmode):
752 self.__name = name
760 self.__name = name
753 self._fp = None
761 self._fp = None
754 self.temp = mktempcopy(name, emptyok=('w' in mode),
762 self.temp = mktempcopy(name, emptyok=('w' in mode),
755 createmode=createmode)
763 createmode=createmode)
756 self._fp = posixfile(self.temp, mode)
764 self._fp = posixfile(self.temp, mode)
757
765
758 def __getattr__(self, name):
766 def __getattr__(self, name):
759 return getattr(self._fp, name)
767 return getattr(self._fp, name)
760
768
761 def rename(self):
769 def rename(self):
762 if not self._fp.closed:
770 if not self._fp.closed:
763 self._fp.close()
771 self._fp.close()
764 rename(self.temp, localpath(self.__name))
772 rename(self.temp, localpath(self.__name))
765
773
766 def __del__(self):
774 def __del__(self):
767 if not self._fp:
775 if not self._fp:
768 return
776 return
769 if not self._fp.closed:
777 if not self._fp.closed:
770 try:
778 try:
771 os.unlink(self.temp)
779 os.unlink(self.temp)
772 except: pass
780 except: pass
773 self._fp.close()
781 self._fp.close()
774
782
775 def makedirs(name, mode=None):
783 def makedirs(name, mode=None):
776 """recursive directory creation with parent mode inheritance"""
784 """recursive directory creation with parent mode inheritance"""
777 try:
785 try:
778 os.mkdir(name)
786 os.mkdir(name)
779 if mode is not None:
787 if mode is not None:
780 os.chmod(name, mode)
788 os.chmod(name, mode)
781 return
789 return
782 except OSError, err:
790 except OSError, err:
783 if err.errno == errno.EEXIST:
791 if err.errno == errno.EEXIST:
784 return
792 return
785 if err.errno != errno.ENOENT:
793 if err.errno != errno.ENOENT:
786 raise
794 raise
787 parent = os.path.abspath(os.path.dirname(name))
795 parent = os.path.abspath(os.path.dirname(name))
788 makedirs(parent, mode)
796 makedirs(parent, mode)
789 makedirs(name, mode)
797 makedirs(name, mode)
790
798
791 class opener(object):
799 class opener(object):
792 """Open files relative to a base directory
800 """Open files relative to a base directory
793
801
794 This class is used to hide the details of COW semantics and
802 This class is used to hide the details of COW semantics and
795 remote file access from higher level code.
803 remote file access from higher level code.
796 """
804 """
797 def __init__(self, base, audit=True):
805 def __init__(self, base, audit=True):
798 self.base = base
806 self.base = base
799 if audit:
807 if audit:
800 self.audit_path = path_auditor(base)
808 self.audit_path = path_auditor(base)
801 else:
809 else:
802 self.audit_path = always
810 self.audit_path = always
803 self.createmode = None
811 self.createmode = None
804
812
805 @propertycache
813 @propertycache
806 def _can_symlink(self):
814 def _can_symlink(self):
807 return checklink(self.base)
815 return checklink(self.base)
808
816
809 def _fixfilemode(self, name):
817 def _fixfilemode(self, name):
810 if self.createmode is None:
818 if self.createmode is None:
811 return
819 return
812 os.chmod(name, self.createmode & 0666)
820 os.chmod(name, self.createmode & 0666)
813
821
814 def __call__(self, path, mode="r", text=False, atomictemp=False):
822 def __call__(self, path, mode="r", text=False, atomictemp=False):
815 self.audit_path(path)
823 self.audit_path(path)
816 f = os.path.join(self.base, path)
824 f = os.path.join(self.base, path)
817
825
818 if not text and "b" not in mode:
826 if not text and "b" not in mode:
819 mode += "b" # for that other OS
827 mode += "b" # for that other OS
820
828
821 nlink = -1
829 nlink = -1
822 if mode not in ("r", "rb"):
830 if mode not in ("r", "rb"):
823 try:
831 try:
824 nlink = nlinks(f)
832 nlink = nlinks(f)
825 except OSError:
833 except OSError:
826 nlink = 0
834 nlink = 0
827 d = os.path.dirname(f)
835 d = os.path.dirname(f)
828 if not os.path.isdir(d):
836 if not os.path.isdir(d):
829 makedirs(d, self.createmode)
837 makedirs(d, self.createmode)
830 if atomictemp:
838 if atomictemp:
831 return atomictempfile(f, mode, self.createmode)
839 return atomictempfile(f, mode, self.createmode)
832 if nlink > 1:
840 if nlink > 1:
833 rename(mktempcopy(f), f)
841 rename(mktempcopy(f), f)
834 fp = posixfile(f, mode)
842 fp = posixfile(f, mode)
835 if nlink == 0:
843 if nlink == 0:
836 self._fixfilemode(f)
844 self._fixfilemode(f)
837 return fp
845 return fp
838
846
839 def symlink(self, src, dst):
847 def symlink(self, src, dst):
840 self.audit_path(dst)
848 self.audit_path(dst)
841 linkname = os.path.join(self.base, dst)
849 linkname = os.path.join(self.base, dst)
842 try:
850 try:
843 os.unlink(linkname)
851 os.unlink(linkname)
844 except OSError:
852 except OSError:
845 pass
853 pass
846
854
847 dirname = os.path.dirname(linkname)
855 dirname = os.path.dirname(linkname)
848 if not os.path.exists(dirname):
856 if not os.path.exists(dirname):
849 makedirs(dirname, self.createmode)
857 makedirs(dirname, self.createmode)
850
858
851 if self._can_symlink:
859 if self._can_symlink:
852 try:
860 try:
853 os.symlink(src, linkname)
861 os.symlink(src, linkname)
854 except OSError, err:
862 except OSError, err:
855 raise OSError(err.errno, _('could not symlink to %r: %s') %
863 raise OSError(err.errno, _('could not symlink to %r: %s') %
856 (src, err.strerror), linkname)
864 (src, err.strerror), linkname)
857 else:
865 else:
858 f = self(dst, "w")
866 f = self(dst, "w")
859 f.write(src)
867 f.write(src)
860 f.close()
868 f.close()
861 self._fixfilemode(dst)
869 self._fixfilemode(dst)
862
870
863 class chunkbuffer(object):
871 class chunkbuffer(object):
864 """Allow arbitrary sized chunks of data to be efficiently read from an
872 """Allow arbitrary sized chunks of data to be efficiently read from an
865 iterator over chunks of arbitrary size."""
873 iterator over chunks of arbitrary size."""
866
874
867 def __init__(self, in_iter):
875 def __init__(self, in_iter):
868 """in_iter is the iterator that's iterating over the input chunks.
876 """in_iter is the iterator that's iterating over the input chunks.
869 targetsize is how big a buffer to try to maintain."""
877 targetsize is how big a buffer to try to maintain."""
870 self.iter = iter(in_iter)
878 self.iter = iter(in_iter)
871 self.buf = ''
879 self.buf = ''
872 self.targetsize = 2**16
880 self.targetsize = 2**16
873
881
874 def read(self, l):
882 def read(self, l):
875 """Read L bytes of data from the iterator of chunks of data.
883 """Read L bytes of data from the iterator of chunks of data.
876 Returns less than L bytes if the iterator runs dry."""
884 Returns less than L bytes if the iterator runs dry."""
877 if l > len(self.buf) and self.iter:
885 if l > len(self.buf) and self.iter:
878 # Clamp to a multiple of self.targetsize
886 # Clamp to a multiple of self.targetsize
879 targetsize = max(l, self.targetsize)
887 targetsize = max(l, self.targetsize)
880 collector = cStringIO.StringIO()
888 collector = cStringIO.StringIO()
881 collector.write(self.buf)
889 collector.write(self.buf)
882 collected = len(self.buf)
890 collected = len(self.buf)
883 for chunk in self.iter:
891 for chunk in self.iter:
884 collector.write(chunk)
892 collector.write(chunk)
885 collected += len(chunk)
893 collected += len(chunk)
886 if collected >= targetsize:
894 if collected >= targetsize:
887 break
895 break
888 if collected < targetsize:
896 if collected < targetsize:
889 self.iter = False
897 self.iter = False
890 self.buf = collector.getvalue()
898 self.buf = collector.getvalue()
891 if len(self.buf) == l:
899 if len(self.buf) == l:
892 s, self.buf = str(self.buf), ''
900 s, self.buf = str(self.buf), ''
893 else:
901 else:
894 s, self.buf = self.buf[:l], buffer(self.buf, l)
902 s, self.buf = self.buf[:l], buffer(self.buf, l)
895 return s
903 return s
896
904
897 def filechunkiter(f, size=65536, limit=None):
905 def filechunkiter(f, size=65536, limit=None):
898 """Create a generator that produces the data in the file size
906 """Create a generator that produces the data in the file size
899 (default 65536) bytes at a time, up to optional limit (default is
907 (default 65536) bytes at a time, up to optional limit (default is
900 to read all data). Chunks may be less than size bytes if the
908 to read all data). Chunks may be less than size bytes if the
901 chunk is the last chunk in the file, or the file is a socket or
909 chunk is the last chunk in the file, or the file is a socket or
902 some other type of file that sometimes reads less data than is
910 some other type of file that sometimes reads less data than is
903 requested."""
911 requested."""
904 assert size >= 0
912 assert size >= 0
905 assert limit is None or limit >= 0
913 assert limit is None or limit >= 0
906 while True:
914 while True:
907 if limit is None: nbytes = size
915 if limit is None: nbytes = size
908 else: nbytes = min(limit, size)
916 else: nbytes = min(limit, size)
909 s = nbytes and f.read(nbytes)
917 s = nbytes and f.read(nbytes)
910 if not s: break
918 if not s: break
911 if limit: limit -= len(s)
919 if limit: limit -= len(s)
912 yield s
920 yield s
913
921
914 def makedate():
922 def makedate():
915 lt = time.localtime()
923 lt = time.localtime()
916 if lt[8] == 1 and time.daylight:
924 if lt[8] == 1 and time.daylight:
917 tz = time.altzone
925 tz = time.altzone
918 else:
926 else:
919 tz = time.timezone
927 tz = time.timezone
920 return time.mktime(lt), tz
928 return time.mktime(lt), tz
921
929
922 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
930 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
923 """represent a (unixtime, offset) tuple as a localized time.
931 """represent a (unixtime, offset) tuple as a localized time.
924 unixtime is seconds since the epoch, and offset is the time zone's
932 unixtime is seconds since the epoch, and offset is the time zone's
925 number of seconds away from UTC. if timezone is false, do not
933 number of seconds away from UTC. if timezone is false, do not
926 append time zone to string."""
934 append time zone to string."""
927 t, tz = date or makedate()
935 t, tz = date or makedate()
928 if "%1" in format or "%2" in format:
936 if "%1" in format or "%2" in format:
929 sign = (tz > 0) and "-" or "+"
937 sign = (tz > 0) and "-" or "+"
930 minutes = abs(tz) // 60
938 minutes = abs(tz) // 60
931 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
939 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
932 format = format.replace("%2", "%02d" % (minutes % 60))
940 format = format.replace("%2", "%02d" % (minutes % 60))
933 s = time.strftime(format, time.gmtime(float(t) - tz))
941 s = time.strftime(format, time.gmtime(float(t) - tz))
934 return s
942 return s
935
943
936 def shortdate(date=None):
944 def shortdate(date=None):
937 """turn (timestamp, tzoff) tuple into iso 8631 date."""
945 """turn (timestamp, tzoff) tuple into iso 8631 date."""
938 return datestr(date, format='%Y-%m-%d')
946 return datestr(date, format='%Y-%m-%d')
939
947
940 def strdate(string, format, defaults=[]):
948 def strdate(string, format, defaults=[]):
941 """parse a localized time string and return a (unixtime, offset) tuple.
949 """parse a localized time string and return a (unixtime, offset) tuple.
942 if the string cannot be parsed, ValueError is raised."""
950 if the string cannot be parsed, ValueError is raised."""
943 def timezone(string):
951 def timezone(string):
944 tz = string.split()[-1]
952 tz = string.split()[-1]
945 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
953 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
946 sign = (tz[0] == "+") and 1 or -1
954 sign = (tz[0] == "+") and 1 or -1
947 hours = int(tz[1:3])
955 hours = int(tz[1:3])
948 minutes = int(tz[3:5])
956 minutes = int(tz[3:5])
949 return -sign * (hours * 60 + minutes) * 60
957 return -sign * (hours * 60 + minutes) * 60
950 if tz == "GMT" or tz == "UTC":
958 if tz == "GMT" or tz == "UTC":
951 return 0
959 return 0
952 return None
960 return None
953
961
954 # NOTE: unixtime = localunixtime + offset
962 # NOTE: unixtime = localunixtime + offset
955 offset, date = timezone(string), string
963 offset, date = timezone(string), string
956 if offset != None:
964 if offset != None:
957 date = " ".join(string.split()[:-1])
965 date = " ".join(string.split()[:-1])
958
966
959 # add missing elements from defaults
967 # add missing elements from defaults
960 for part in defaults:
968 for part in defaults:
961 found = [True for p in part if ("%"+p) in format]
969 found = [True for p in part if ("%"+p) in format]
962 if not found:
970 if not found:
963 date += "@" + defaults[part]
971 date += "@" + defaults[part]
964 format += "@%" + part[0]
972 format += "@%" + part[0]
965
973
966 timetuple = time.strptime(date, format)
974 timetuple = time.strptime(date, format)
967 localunixtime = int(calendar.timegm(timetuple))
975 localunixtime = int(calendar.timegm(timetuple))
968 if offset is None:
976 if offset is None:
969 # local timezone
977 # local timezone
970 unixtime = int(time.mktime(timetuple))
978 unixtime = int(time.mktime(timetuple))
971 offset = unixtime - localunixtime
979 offset = unixtime - localunixtime
972 else:
980 else:
973 unixtime = localunixtime + offset
981 unixtime = localunixtime + offset
974 return unixtime, offset
982 return unixtime, offset
975
983
976 def parsedate(date, formats=None, defaults=None):
984 def parsedate(date, formats=None, defaults=None):
977 """parse a localized date/time string and return a (unixtime, offset) tuple.
985 """parse a localized date/time string and return a (unixtime, offset) tuple.
978
986
979 The date may be a "unixtime offset" string or in one of the specified
987 The date may be a "unixtime offset" string or in one of the specified
980 formats. If the date already is a (unixtime, offset) tuple, it is returned.
988 formats. If the date already is a (unixtime, offset) tuple, it is returned.
981 """
989 """
982 if not date:
990 if not date:
983 return 0, 0
991 return 0, 0
984 if isinstance(date, tuple) and len(date) == 2:
992 if isinstance(date, tuple) and len(date) == 2:
985 return date
993 return date
986 if not formats:
994 if not formats:
987 formats = defaultdateformats
995 formats = defaultdateformats
988 date = date.strip()
996 date = date.strip()
989 try:
997 try:
990 when, offset = map(int, date.split(' '))
998 when, offset = map(int, date.split(' '))
991 except ValueError:
999 except ValueError:
992 # fill out defaults
1000 # fill out defaults
993 if not defaults:
1001 if not defaults:
994 defaults = {}
1002 defaults = {}
995 now = makedate()
1003 now = makedate()
996 for part in "d mb yY HI M S".split():
1004 for part in "d mb yY HI M S".split():
997 if part not in defaults:
1005 if part not in defaults:
998 if part[0] in "HMS":
1006 if part[0] in "HMS":
999 defaults[part] = "00"
1007 defaults[part] = "00"
1000 else:
1008 else:
1001 defaults[part] = datestr(now, "%" + part[0])
1009 defaults[part] = datestr(now, "%" + part[0])
1002
1010
1003 for format in formats:
1011 for format in formats:
1004 try:
1012 try:
1005 when, offset = strdate(date, format, defaults)
1013 when, offset = strdate(date, format, defaults)
1006 except (ValueError, OverflowError):
1014 except (ValueError, OverflowError):
1007 pass
1015 pass
1008 else:
1016 else:
1009 break
1017 break
1010 else:
1018 else:
1011 raise Abort(_('invalid date: %r ') % date)
1019 raise Abort(_('invalid date: %r ') % date)
1012 # validate explicit (probably user-specified) date and
1020 # validate explicit (probably user-specified) date and
1013 # time zone offset. values must fit in signed 32 bits for
1021 # time zone offset. values must fit in signed 32 bits for
1014 # current 32-bit linux runtimes. timezones go from UTC-12
1022 # current 32-bit linux runtimes. timezones go from UTC-12
1015 # to UTC+14
1023 # to UTC+14
1016 if abs(when) > 0x7fffffff:
1024 if abs(when) > 0x7fffffff:
1017 raise Abort(_('date exceeds 32 bits: %d') % when)
1025 raise Abort(_('date exceeds 32 bits: %d') % when)
1018 if offset < -50400 or offset > 43200:
1026 if offset < -50400 or offset > 43200:
1019 raise Abort(_('impossible time zone offset: %d') % offset)
1027 raise Abort(_('impossible time zone offset: %d') % offset)
1020 return when, offset
1028 return when, offset
1021
1029
1022 def matchdate(date):
1030 def matchdate(date):
1023 """Return a function that matches a given date match specifier
1031 """Return a function that matches a given date match specifier
1024
1032
1025 Formats include:
1033 Formats include:
1026
1034
1027 '{date}' match a given date to the accuracy provided
1035 '{date}' match a given date to the accuracy provided
1028
1036
1029 '<{date}' on or before a given date
1037 '<{date}' on or before a given date
1030
1038
1031 '>{date}' on or after a given date
1039 '>{date}' on or after a given date
1032
1040
1033 """
1041 """
1034
1042
1035 def lower(date):
1043 def lower(date):
1036 d = dict(mb="1", d="1")
1044 d = dict(mb="1", d="1")
1037 return parsedate(date, extendeddateformats, d)[0]
1045 return parsedate(date, extendeddateformats, d)[0]
1038
1046
1039 def upper(date):
1047 def upper(date):
1040 d = dict(mb="12", HI="23", M="59", S="59")
1048 d = dict(mb="12", HI="23", M="59", S="59")
1041 for days in "31 30 29".split():
1049 for days in "31 30 29".split():
1042 try:
1050 try:
1043 d["d"] = days
1051 d["d"] = days
1044 return parsedate(date, extendeddateformats, d)[0]
1052 return parsedate(date, extendeddateformats, d)[0]
1045 except:
1053 except:
1046 pass
1054 pass
1047 d["d"] = "28"
1055 d["d"] = "28"
1048 return parsedate(date, extendeddateformats, d)[0]
1056 return parsedate(date, extendeddateformats, d)[0]
1049
1057
1050 date = date.strip()
1058 date = date.strip()
1051 if date[0] == "<":
1059 if date[0] == "<":
1052 when = upper(date[1:])
1060 when = upper(date[1:])
1053 return lambda x: x <= when
1061 return lambda x: x <= when
1054 elif date[0] == ">":
1062 elif date[0] == ">":
1055 when = lower(date[1:])
1063 when = lower(date[1:])
1056 return lambda x: x >= when
1064 return lambda x: x >= when
1057 elif date[0] == "-":
1065 elif date[0] == "-":
1058 try:
1066 try:
1059 days = int(date[1:])
1067 days = int(date[1:])
1060 except ValueError:
1068 except ValueError:
1061 raise Abort(_("invalid day spec: %s") % date[1:])
1069 raise Abort(_("invalid day spec: %s") % date[1:])
1062 when = makedate()[0] - days * 3600 * 24
1070 when = makedate()[0] - days * 3600 * 24
1063 return lambda x: x >= when
1071 return lambda x: x >= when
1064 elif " to " in date:
1072 elif " to " in date:
1065 a, b = date.split(" to ")
1073 a, b = date.split(" to ")
1066 start, stop = lower(a), upper(b)
1074 start, stop = lower(a), upper(b)
1067 return lambda x: x >= start and x <= stop
1075 return lambda x: x >= start and x <= stop
1068 else:
1076 else:
1069 start, stop = lower(date), upper(date)
1077 start, stop = lower(date), upper(date)
1070 return lambda x: x >= start and x <= stop
1078 return lambda x: x >= start and x <= stop
1071
1079
1072 def shortuser(user):
1080 def shortuser(user):
1073 """Return a short representation of a user name or email address."""
1081 """Return a short representation of a user name or email address."""
1074 f = user.find('@')
1082 f = user.find('@')
1075 if f >= 0:
1083 if f >= 0:
1076 user = user[:f]
1084 user = user[:f]
1077 f = user.find('<')
1085 f = user.find('<')
1078 if f >= 0:
1086 if f >= 0:
1079 user = user[f+1:]
1087 user = user[f+1:]
1080 f = user.find(' ')
1088 f = user.find(' ')
1081 if f >= 0:
1089 if f >= 0:
1082 user = user[:f]
1090 user = user[:f]
1083 f = user.find('.')
1091 f = user.find('.')
1084 if f >= 0:
1092 if f >= 0:
1085 user = user[:f]
1093 user = user[:f]
1086 return user
1094 return user
1087
1095
1088 def email(author):
1096 def email(author):
1089 '''get email of author.'''
1097 '''get email of author.'''
1090 r = author.find('>')
1098 r = author.find('>')
1091 if r == -1: r = None
1099 if r == -1: r = None
1092 return author[author.find('<')+1:r]
1100 return author[author.find('<')+1:r]
1093
1101
1094 def ellipsis(text, maxlength=400):
1102 def ellipsis(text, maxlength=400):
1095 """Trim string to at most maxlength (default: 400) characters."""
1103 """Trim string to at most maxlength (default: 400) characters."""
1096 if len(text) <= maxlength:
1104 if len(text) <= maxlength:
1097 return text
1105 return text
1098 else:
1106 else:
1099 return "%s..." % (text[:maxlength-3])
1107 return "%s..." % (text[:maxlength-3])
1100
1108
1101 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1109 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1102 '''yield every hg repository under path, recursively.'''
1110 '''yield every hg repository under path, recursively.'''
1103 def errhandler(err):
1111 def errhandler(err):
1104 if err.filename == path:
1112 if err.filename == path:
1105 raise err
1113 raise err
1106 if followsym and hasattr(os.path, 'samestat'):
1114 if followsym and hasattr(os.path, 'samestat'):
1107 def _add_dir_if_not_there(dirlst, dirname):
1115 def _add_dir_if_not_there(dirlst, dirname):
1108 match = False
1116 match = False
1109 samestat = os.path.samestat
1117 samestat = os.path.samestat
1110 dirstat = os.stat(dirname)
1118 dirstat = os.stat(dirname)
1111 for lstdirstat in dirlst:
1119 for lstdirstat in dirlst:
1112 if samestat(dirstat, lstdirstat):
1120 if samestat(dirstat, lstdirstat):
1113 match = True
1121 match = True
1114 break
1122 break
1115 if not match:
1123 if not match:
1116 dirlst.append(dirstat)
1124 dirlst.append(dirstat)
1117 return not match
1125 return not match
1118 else:
1126 else:
1119 followsym = False
1127 followsym = False
1120
1128
1121 if (seen_dirs is None) and followsym:
1129 if (seen_dirs is None) and followsym:
1122 seen_dirs = []
1130 seen_dirs = []
1123 _add_dir_if_not_there(seen_dirs, path)
1131 _add_dir_if_not_there(seen_dirs, path)
1124 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1132 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1125 dirs.sort()
1133 dirs.sort()
1126 if '.hg' in dirs:
1134 if '.hg' in dirs:
1127 yield root # found a repository
1135 yield root # found a repository
1128 qroot = os.path.join(root, '.hg', 'patches')
1136 qroot = os.path.join(root, '.hg', 'patches')
1129 if os.path.isdir(os.path.join(qroot, '.hg')):
1137 if os.path.isdir(os.path.join(qroot, '.hg')):
1130 yield qroot # we have a patch queue repo here
1138 yield qroot # we have a patch queue repo here
1131 if recurse:
1139 if recurse:
1132 # avoid recursing inside the .hg directory
1140 # avoid recursing inside the .hg directory
1133 dirs.remove('.hg')
1141 dirs.remove('.hg')
1134 else:
1142 else:
1135 dirs[:] = [] # don't descend further
1143 dirs[:] = [] # don't descend further
1136 elif followsym:
1144 elif followsym:
1137 newdirs = []
1145 newdirs = []
1138 for d in dirs:
1146 for d in dirs:
1139 fname = os.path.join(root, d)
1147 fname = os.path.join(root, d)
1140 if _add_dir_if_not_there(seen_dirs, fname):
1148 if _add_dir_if_not_there(seen_dirs, fname):
1141 if os.path.islink(fname):
1149 if os.path.islink(fname):
1142 for hgname in walkrepos(fname, True, seen_dirs):
1150 for hgname in walkrepos(fname, True, seen_dirs):
1143 yield hgname
1151 yield hgname
1144 else:
1152 else:
1145 newdirs.append(d)
1153 newdirs.append(d)
1146 dirs[:] = newdirs
1154 dirs[:] = newdirs
1147
1155
1148 _rcpath = None
1156 _rcpath = None
1149
1157
1150 def os_rcpath():
1158 def os_rcpath():
1151 '''return default os-specific hgrc search path'''
1159 '''return default os-specific hgrc search path'''
1152 path = system_rcpath()
1160 path = system_rcpath()
1153 path.extend(user_rcpath())
1161 path.extend(user_rcpath())
1154 path = [os.path.normpath(f) for f in path]
1162 path = [os.path.normpath(f) for f in path]
1155 return path
1163 return path
1156
1164
1157 def rcpath():
1165 def rcpath():
1158 '''return hgrc search path. if env var HGRCPATH is set, use it.
1166 '''return hgrc search path. if env var HGRCPATH is set, use it.
1159 for each item in path, if directory, use files ending in .rc,
1167 for each item in path, if directory, use files ending in .rc,
1160 else use item.
1168 else use item.
1161 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1169 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1162 if no HGRCPATH, use default os-specific path.'''
1170 if no HGRCPATH, use default os-specific path.'''
1163 global _rcpath
1171 global _rcpath
1164 if _rcpath is None:
1172 if _rcpath is None:
1165 if 'HGRCPATH' in os.environ:
1173 if 'HGRCPATH' in os.environ:
1166 _rcpath = []
1174 _rcpath = []
1167 for p in os.environ['HGRCPATH'].split(os.pathsep):
1175 for p in os.environ['HGRCPATH'].split(os.pathsep):
1168 if not p: continue
1176 if not p: continue
1169 p = expandpath(p)
1177 p = expandpath(p)
1170 if os.path.isdir(p):
1178 if os.path.isdir(p):
1171 for f, kind in osutil.listdir(p):
1179 for f, kind in osutil.listdir(p):
1172 if f.endswith('.rc'):
1180 if f.endswith('.rc'):
1173 _rcpath.append(os.path.join(p, f))
1181 _rcpath.append(os.path.join(p, f))
1174 else:
1182 else:
1175 _rcpath.append(p)
1183 _rcpath.append(p)
1176 else:
1184 else:
1177 _rcpath = os_rcpath()
1185 _rcpath = os_rcpath()
1178 return _rcpath
1186 return _rcpath
1179
1187
1180 def bytecount(nbytes):
1188 def bytecount(nbytes):
1181 '''return byte count formatted as readable string, with units'''
1189 '''return byte count formatted as readable string, with units'''
1182
1190
1183 units = (
1191 units = (
1184 (100, 1<<30, _('%.0f GB')),
1192 (100, 1<<30, _('%.0f GB')),
1185 (10, 1<<30, _('%.1f GB')),
1193 (10, 1<<30, _('%.1f GB')),
1186 (1, 1<<30, _('%.2f GB')),
1194 (1, 1<<30, _('%.2f GB')),
1187 (100, 1<<20, _('%.0f MB')),
1195 (100, 1<<20, _('%.0f MB')),
1188 (10, 1<<20, _('%.1f MB')),
1196 (10, 1<<20, _('%.1f MB')),
1189 (1, 1<<20, _('%.2f MB')),
1197 (1, 1<<20, _('%.2f MB')),
1190 (100, 1<<10, _('%.0f KB')),
1198 (100, 1<<10, _('%.0f KB')),
1191 (10, 1<<10, _('%.1f KB')),
1199 (10, 1<<10, _('%.1f KB')),
1192 (1, 1<<10, _('%.2f KB')),
1200 (1, 1<<10, _('%.2f KB')),
1193 (1, 1, _('%.0f bytes')),
1201 (1, 1, _('%.0f bytes')),
1194 )
1202 )
1195
1203
1196 for multiplier, divisor, format in units:
1204 for multiplier, divisor, format in units:
1197 if nbytes >= divisor * multiplier:
1205 if nbytes >= divisor * multiplier:
1198 return format % (nbytes / float(divisor))
1206 return format % (nbytes / float(divisor))
1199 return units[-1][2] % nbytes
1207 return units[-1][2] % nbytes
1200
1208
1201 def drop_scheme(scheme, path):
1209 def drop_scheme(scheme, path):
1202 sc = scheme + ':'
1210 sc = scheme + ':'
1203 if path.startswith(sc):
1211 if path.startswith(sc):
1204 path = path[len(sc):]
1212 path = path[len(sc):]
1205 if path.startswith('//'):
1213 if path.startswith('//'):
1206 if scheme == 'file':
1214 if scheme == 'file':
1207 i = path.find('/', 2)
1215 i = path.find('/', 2)
1208 if i == -1:
1216 if i == -1:
1209 return ''
1217 return ''
1210 # On Windows, absolute paths are rooted at the current drive
1218 # On Windows, absolute paths are rooted at the current drive
1211 # root. On POSIX they are rooted at the file system root.
1219 # root. On POSIX they are rooted at the file system root.
1212 if os.name == 'nt':
1220 if os.name == 'nt':
1213 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1221 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1214 path = os.path.join(droot, path[i+1:])
1222 path = os.path.join(droot, path[i+1:])
1215 else:
1223 else:
1216 path = path[i:]
1224 path = path[i:]
1217 else:
1225 else:
1218 path = path[2:]
1226 path = path[2:]
1219 return path
1227 return path
1220
1228
1221 def uirepr(s):
1229 def uirepr(s):
1222 # Avoid double backslash in Windows path repr()
1230 # Avoid double backslash in Windows path repr()
1223 return repr(s).replace('\\\\', '\\')
1231 return repr(s).replace('\\\\', '\\')
1224
1232
1225 def termwidth():
1233 def termwidth():
1226 if 'COLUMNS' in os.environ:
1234 if 'COLUMNS' in os.environ:
1227 try:
1235 try:
1228 return int(os.environ['COLUMNS'])
1236 return int(os.environ['COLUMNS'])
1229 except ValueError:
1237 except ValueError:
1230 pass
1238 pass
1231 try:
1239 try:
1232 import termios, array, fcntl
1240 import termios, array, fcntl
1233 for dev in (sys.stdout, sys.stdin):
1241 for dev in (sys.stdout, sys.stdin):
1234 try:
1242 try:
1235 try:
1243 try:
1236 fd = dev.fileno()
1244 fd = dev.fileno()
1237 except AttributeError:
1245 except AttributeError:
1238 continue
1246 continue
1239 if not os.isatty(fd):
1247 if not os.isatty(fd):
1240 continue
1248 continue
1241 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1249 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1242 return array.array('h', arri)[1]
1250 return array.array('h', arri)[1]
1243 except ValueError:
1251 except ValueError:
1244 pass
1252 pass
1245 except IOError, e:
1253 except IOError, e:
1246 if e[0] == errno.EINVAL:
1254 if e[0] == errno.EINVAL:
1247 pass
1255 pass
1248 else:
1256 else:
1249 raise
1257 raise
1250 except ImportError:
1258 except ImportError:
1251 pass
1259 pass
1252 return 80
1260 return 80
1253
1261
1254 def wrap(line, hangindent, width=None):
1262 def wrap(line, hangindent, width=None):
1255 if width is None:
1263 if width is None:
1256 width = termwidth() - 2
1264 width = termwidth() - 2
1257 if width <= hangindent:
1265 if width <= hangindent:
1258 # adjust for weird terminal size
1266 # adjust for weird terminal size
1259 width = max(78, hangindent + 1)
1267 width = max(78, hangindent + 1)
1260 padding = '\n' + ' ' * hangindent
1268 padding = '\n' + ' ' * hangindent
1261 # To avoid corrupting multi-byte characters in line, we must wrap
1269 # To avoid corrupting multi-byte characters in line, we must wrap
1262 # a Unicode string instead of a bytestring.
1270 # a Unicode string instead of a bytestring.
1263 try:
1271 try:
1264 u = line.decode(encoding.encoding)
1272 u = line.decode(encoding.encoding)
1265 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1273 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1266 return w.encode(encoding.encoding)
1274 return w.encode(encoding.encoding)
1267 except UnicodeDecodeError:
1275 except UnicodeDecodeError:
1268 return padding.join(textwrap.wrap(line, width=width - hangindent))
1276 return padding.join(textwrap.wrap(line, width=width - hangindent))
1269
1277
1270 def iterlines(iterator):
1278 def iterlines(iterator):
1271 for chunk in iterator:
1279 for chunk in iterator:
1272 for line in chunk.splitlines():
1280 for line in chunk.splitlines():
1273 yield line
1281 yield line
1274
1282
1275 def expandpath(path):
1283 def expandpath(path):
1276 return os.path.expanduser(os.path.expandvars(path))
1284 return os.path.expanduser(os.path.expandvars(path))
1277
1285
1278 def hgcmd():
1286 def hgcmd():
1279 """Return the command used to execute current hg
1287 """Return the command used to execute current hg
1280
1288
1281 This is different from hgexecutable() because on Windows we want
1289 This is different from hgexecutable() because on Windows we want
1282 to avoid things opening new shell windows like batch files, so we
1290 to avoid things opening new shell windows like batch files, so we
1283 get either the python call or current executable.
1291 get either the python call or current executable.
1284 """
1292 """
1285 if main_is_frozen():
1293 if main_is_frozen():
1286 return [sys.executable]
1294 return [sys.executable]
1287 return gethgcmd()
1295 return gethgcmd()
@@ -1,174 +1,183 b''
1 # win32.py - utility functions that use win32 API
1 # win32.py - utility functions that use win32 API
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 """Utility functions that use win32 API.
8 """Utility functions that use win32 API.
9
9
10 Mark Hammond's win32all package allows better functionality on
10 Mark Hammond's win32all package allows better functionality on
11 Windows. This module overrides definitions in util.py. If not
11 Windows. This module overrides definitions in util.py. If not
12 available, import of this module will fail, and generic code will be
12 available, import of this module will fail, and generic code will be
13 used.
13 used.
14 """
14 """
15
15
16 import win32api
16 import win32api
17
17
18 import errno, os, sys, pywintypes, win32con, win32file, win32process
18 import errno, os, sys, pywintypes, win32con, win32file, win32process
19 import winerror
19 import winerror, win32gui
20 import osutil, encoding
20 import osutil, encoding
21 from win32com.shell import shell, shellcon
21 from win32com.shell import shell, shellcon
22
22
23 def os_link(src, dst):
23 def os_link(src, dst):
24 try:
24 try:
25 win32file.CreateHardLink(dst, src)
25 win32file.CreateHardLink(dst, src)
26 # CreateHardLink sometimes succeeds on mapped drives but
26 # CreateHardLink sometimes succeeds on mapped drives but
27 # following nlinks() returns 1. Check it now and bail out.
27 # following nlinks() returns 1. Check it now and bail out.
28 if nlinks(src) < 2:
28 if nlinks(src) < 2:
29 try:
29 try:
30 win32file.DeleteFile(dst)
30 win32file.DeleteFile(dst)
31 except:
31 except:
32 pass
32 pass
33 # Fake hardlinking error
33 # Fake hardlinking error
34 raise OSError(errno.EINVAL, 'Hardlinking not supported')
34 raise OSError(errno.EINVAL, 'Hardlinking not supported')
35 except pywintypes.error, details:
35 except pywintypes.error, details:
36 raise OSError(errno.EINVAL, 'target implements hardlinks improperly')
36 raise OSError(errno.EINVAL, 'target implements hardlinks improperly')
37 except NotImplementedError: # Another fake error win Win98
37 except NotImplementedError: # Another fake error win Win98
38 raise OSError(errno.EINVAL, 'Hardlinking not supported')
38 raise OSError(errno.EINVAL, 'Hardlinking not supported')
39
39
40 def _getfileinfo(pathname):
40 def _getfileinfo(pathname):
41 """Return number of hardlinks for the given file."""
41 """Return number of hardlinks for the given file."""
42 try:
42 try:
43 fh = win32file.CreateFile(pathname,
43 fh = win32file.CreateFile(pathname,
44 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
44 win32file.GENERIC_READ, win32file.FILE_SHARE_READ,
45 None, win32file.OPEN_EXISTING, 0, None)
45 None, win32file.OPEN_EXISTING, 0, None)
46 try:
46 try:
47 return win32file.GetFileInformationByHandle(fh)
47 return win32file.GetFileInformationByHandle(fh)
48 finally:
48 finally:
49 fh.Close()
49 fh.Close()
50 except pywintypes.error:
50 except pywintypes.error:
51 return None
51 return None
52
52
53 def nlinks(pathname):
53 def nlinks(pathname):
54 """Return number of hardlinks for the given file."""
54 """Return number of hardlinks for the given file."""
55 res = _getfileinfo(pathname)
55 res = _getfileinfo(pathname)
56 if res is not None:
56 if res is not None:
57 return res[7]
57 return res[7]
58 else:
58 else:
59 return os.lstat(pathname).st_nlink
59 return os.lstat(pathname).st_nlink
60
60
61 def samefile(fpath1, fpath2):
61 def samefile(fpath1, fpath2):
62 """Returns whether fpath1 and fpath2 refer to the same file. This is only
62 """Returns whether fpath1 and fpath2 refer to the same file. This is only
63 guaranteed to work for files, not directories."""
63 guaranteed to work for files, not directories."""
64 res1 = _getfileinfo(fpath1)
64 res1 = _getfileinfo(fpath1)
65 res2 = _getfileinfo(fpath2)
65 res2 = _getfileinfo(fpath2)
66 if res1 is not None and res2 is not None:
66 if res1 is not None and res2 is not None:
67 # Index 4 is the volume serial number, and 8 and 9 contain the file ID
67 # Index 4 is the volume serial number, and 8 and 9 contain the file ID
68 return res1[4] == res2[4] and res1[8] == res2[8] and res1[9] == res2[9]
68 return res1[4] == res2[4] and res1[8] == res2[8] and res1[9] == res2[9]
69 else:
69 else:
70 return False
70 return False
71
71
72 def samedevice(fpath1, fpath2):
72 def samedevice(fpath1, fpath2):
73 """Returns whether fpath1 and fpath2 are on the same device. This is only
73 """Returns whether fpath1 and fpath2 are on the same device. This is only
74 guaranteed to work for files, not directories."""
74 guaranteed to work for files, not directories."""
75 res1 = _getfileinfo(fpath1)
75 res1 = _getfileinfo(fpath1)
76 res2 = _getfileinfo(fpath2)
76 res2 = _getfileinfo(fpath2)
77 if res1 is not None and res2 is not None:
77 if res1 is not None and res2 is not None:
78 return res1[4] == res2[4]
78 return res1[4] == res2[4]
79 else:
79 else:
80 return False
80 return False
81
81
82 def testpid(pid):
82 def testpid(pid):
83 '''return True if pid is still running or unable to
83 '''return True if pid is still running or unable to
84 determine, False otherwise'''
84 determine, False otherwise'''
85 try:
85 try:
86 handle = win32api.OpenProcess(
86 handle = win32api.OpenProcess(
87 win32con.PROCESS_QUERY_INFORMATION, False, pid)
87 win32con.PROCESS_QUERY_INFORMATION, False, pid)
88 if handle:
88 if handle:
89 status = win32process.GetExitCodeProcess(handle)
89 status = win32process.GetExitCodeProcess(handle)
90 return status == win32con.STILL_ACTIVE
90 return status == win32con.STILL_ACTIVE
91 except pywintypes.error, details:
91 except pywintypes.error, details:
92 return details[0] != winerror.ERROR_INVALID_PARAMETER
92 return details[0] != winerror.ERROR_INVALID_PARAMETER
93 return True
93 return True
94
94
95 def lookup_reg(key, valname=None, scope=None):
95 def lookup_reg(key, valname=None, scope=None):
96 ''' Look up a key/value name in the Windows registry.
96 ''' Look up a key/value name in the Windows registry.
97
97
98 valname: value name. If unspecified, the default value for the key
98 valname: value name. If unspecified, the default value for the key
99 is used.
99 is used.
100 scope: optionally specify scope for registry lookup, this can be
100 scope: optionally specify scope for registry lookup, this can be
101 a sequence of scopes to look up in order. Default (CURRENT_USER,
101 a sequence of scopes to look up in order. Default (CURRENT_USER,
102 LOCAL_MACHINE).
102 LOCAL_MACHINE).
103 '''
103 '''
104 try:
104 try:
105 from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \
105 from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \
106 QueryValueEx, OpenKey
106 QueryValueEx, OpenKey
107 except ImportError:
107 except ImportError:
108 return None
108 return None
109
109
110 if scope is None:
110 if scope is None:
111 scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE)
111 scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE)
112 elif not isinstance(scope, (list, tuple)):
112 elif not isinstance(scope, (list, tuple)):
113 scope = (scope,)
113 scope = (scope,)
114 for s in scope:
114 for s in scope:
115 try:
115 try:
116 val = QueryValueEx(OpenKey(s, key), valname)[0]
116 val = QueryValueEx(OpenKey(s, key), valname)[0]
117 # never let a Unicode string escape into the wild
117 # never let a Unicode string escape into the wild
118 return encoding.tolocal(val.encode('UTF-8'))
118 return encoding.tolocal(val.encode('UTF-8'))
119 except EnvironmentError:
119 except EnvironmentError:
120 pass
120 pass
121
121
122 def system_rcpath_win32():
122 def system_rcpath_win32():
123 '''return default os-specific hgrc search path'''
123 '''return default os-specific hgrc search path'''
124 proc = win32api.GetCurrentProcess()
124 proc = win32api.GetCurrentProcess()
125 try:
125 try:
126 # This will fail on windows < NT
126 # This will fail on windows < NT
127 filename = win32process.GetModuleFileNameEx(proc, 0)
127 filename = win32process.GetModuleFileNameEx(proc, 0)
128 except:
128 except:
129 filename = win32api.GetModuleFileName(0)
129 filename = win32api.GetModuleFileName(0)
130 # Use mercurial.ini found in directory with hg.exe
130 # Use mercurial.ini found in directory with hg.exe
131 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
131 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
132 if os.path.isfile(progrc):
132 if os.path.isfile(progrc):
133 return [progrc]
133 return [progrc]
134 # else look for a system rcpath in the registry
134 # else look for a system rcpath in the registry
135 try:
135 try:
136 value = win32api.RegQueryValue(
136 value = win32api.RegQueryValue(
137 win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial')
137 win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial')
138 rcpath = []
138 rcpath = []
139 for p in value.split(os.pathsep):
139 for p in value.split(os.pathsep):
140 if p.lower().endswith('mercurial.ini'):
140 if p.lower().endswith('mercurial.ini'):
141 rcpath.append(p)
141 rcpath.append(p)
142 elif os.path.isdir(p):
142 elif os.path.isdir(p):
143 for f, kind in osutil.listdir(p):
143 for f, kind in osutil.listdir(p):
144 if f.endswith('.rc'):
144 if f.endswith('.rc'):
145 rcpath.append(os.path.join(p, f))
145 rcpath.append(os.path.join(p, f))
146 return rcpath
146 return rcpath
147 except pywintypes.error:
147 except pywintypes.error:
148 return []
148 return []
149
149
150 def user_rcpath_win32():
150 def user_rcpath_win32():
151 '''return os-specific hgrc search path to the user dir'''
151 '''return os-specific hgrc search path to the user dir'''
152 userdir = os.path.expanduser('~')
152 userdir = os.path.expanduser('~')
153 if sys.getwindowsversion()[3] != 2 and userdir == '~':
153 if sys.getwindowsversion()[3] != 2 and userdir == '~':
154 # We are on win < nt: fetch the APPDATA directory location and use
154 # We are on win < nt: fetch the APPDATA directory location and use
155 # the parent directory as the user home dir.
155 # the parent directory as the user home dir.
156 appdir = shell.SHGetPathFromIDList(
156 appdir = shell.SHGetPathFromIDList(
157 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
157 shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA))
158 userdir = os.path.dirname(appdir)
158 userdir = os.path.dirname(appdir)
159 return [os.path.join(userdir, 'mercurial.ini'),
159 return [os.path.join(userdir, 'mercurial.ini'),
160 os.path.join(userdir, '.hgrc')]
160 os.path.join(userdir, '.hgrc')]
161
161
162 def getuser():
162 def getuser():
163 '''return name of current user'''
163 '''return name of current user'''
164 return win32api.GetUserName()
164 return win32api.GetUserName()
165
165
166 def set_signal_handler_win32():
166 def set_signal_handler_win32():
167 """Register a termination handler for console events including
167 """Register a termination handler for console events including
168 CTRL+C. python signal handlers do not work well with socket
168 CTRL+C. python signal handlers do not work well with socket
169 operations.
169 operations.
170 """
170 """
171 def handler(event):
171 def handler(event):
172 win32process.ExitProcess(1)
172 win32process.ExitProcess(1)
173 win32api.SetConsoleCtrlHandler(handler)
173 win32api.SetConsoleCtrlHandler(handler)
174
174
175 def hidewindow():
176 def callback(*args, **kwargs):
177 hwnd, pid = args
178 wpid = win32process.GetWindowThreadProcessId(hwnd)[1]
179 if pid == wpid:
180 win32gui.ShowWindow(hwnd, win32con.SW_HIDE)
181
182 pid = win32process.GetCurrentProcessId()
183 win32gui.EnumWindows(callback, pid)
General Comments 0
You need to be logged in to leave comments. Login now