##// END OF EJS Templates
util: take propertycache from context.py
Matt Mackall -
r8207:dd8d5be5 default
parent child Browse files
Show More
@@ -1,806 +1,799 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import nullid, nullrev, short, hex
8 from node import nullid, nullrev, short, hex
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, error, util, os, errno
10 import ancestor, bdiff, error, util, os, errno
11
11
12 class propertycache(object):
12 propertycache = util.propertycache
13 def __init__(self, func):
14 self.func = func
15 self.name = func.__name__
16 def __get__(self, obj, type=None):
17 result = self.func(obj)
18 setattr(obj, self.name, result)
19 return result
20
13
21 class changectx(object):
14 class changectx(object):
22 """A changecontext object makes access to data related to a particular
15 """A changecontext object makes access to data related to a particular
23 changeset convenient."""
16 changeset convenient."""
24 def __init__(self, repo, changeid=''):
17 def __init__(self, repo, changeid=''):
25 """changeid is a revision number, node, or tag"""
18 """changeid is a revision number, node, or tag"""
26 if changeid == '':
19 if changeid == '':
27 changeid = '.'
20 changeid = '.'
28 self._repo = repo
21 self._repo = repo
29 if isinstance(changeid, (long, int)):
22 if isinstance(changeid, (long, int)):
30 self._rev = changeid
23 self._rev = changeid
31 self._node = self._repo.changelog.node(changeid)
24 self._node = self._repo.changelog.node(changeid)
32 else:
25 else:
33 self._node = self._repo.lookup(changeid)
26 self._node = self._repo.lookup(changeid)
34 self._rev = self._repo.changelog.rev(self._node)
27 self._rev = self._repo.changelog.rev(self._node)
35
28
36 def __str__(self):
29 def __str__(self):
37 return short(self.node())
30 return short(self.node())
38
31
39 def __int__(self):
32 def __int__(self):
40 return self.rev()
33 return self.rev()
41
34
42 def __repr__(self):
35 def __repr__(self):
43 return "<changectx %s>" % str(self)
36 return "<changectx %s>" % str(self)
44
37
45 def __hash__(self):
38 def __hash__(self):
46 try:
39 try:
47 return hash(self._rev)
40 return hash(self._rev)
48 except AttributeError:
41 except AttributeError:
49 return id(self)
42 return id(self)
50
43
51 def __eq__(self, other):
44 def __eq__(self, other):
52 try:
45 try:
53 return self._rev == other._rev
46 return self._rev == other._rev
54 except AttributeError:
47 except AttributeError:
55 return False
48 return False
56
49
57 def __ne__(self, other):
50 def __ne__(self, other):
58 return not (self == other)
51 return not (self == other)
59
52
60 def __nonzero__(self):
53 def __nonzero__(self):
61 return self._rev != nullrev
54 return self._rev != nullrev
62
55
63 @propertycache
56 @propertycache
64 def _changeset(self):
57 def _changeset(self):
65 return self._repo.changelog.read(self.node())
58 return self._repo.changelog.read(self.node())
66
59
67 @propertycache
60 @propertycache
68 def _manifest(self):
61 def _manifest(self):
69 return self._repo.manifest.read(self._changeset[0])
62 return self._repo.manifest.read(self._changeset[0])
70
63
71 @propertycache
64 @propertycache
72 def _manifestdelta(self):
65 def _manifestdelta(self):
73 return self._repo.manifest.readdelta(self._changeset[0])
66 return self._repo.manifest.readdelta(self._changeset[0])
74
67
75 @propertycache
68 @propertycache
76 def _parents(self):
69 def _parents(self):
77 p = self._repo.changelog.parentrevs(self._rev)
70 p = self._repo.changelog.parentrevs(self._rev)
78 if p[1] == nullrev:
71 if p[1] == nullrev:
79 p = p[:-1]
72 p = p[:-1]
80 return [changectx(self._repo, x) for x in p]
73 return [changectx(self._repo, x) for x in p]
81
74
82 def __contains__(self, key):
75 def __contains__(self, key):
83 return key in self._manifest
76 return key in self._manifest
84
77
85 def __getitem__(self, key):
78 def __getitem__(self, key):
86 return self.filectx(key)
79 return self.filectx(key)
87
80
88 def __iter__(self):
81 def __iter__(self):
89 for f in util.sort(self._manifest):
82 for f in util.sort(self._manifest):
90 yield f
83 yield f
91
84
92 def changeset(self): return self._changeset
85 def changeset(self): return self._changeset
93 def manifest(self): return self._manifest
86 def manifest(self): return self._manifest
94
87
95 def rev(self): return self._rev
88 def rev(self): return self._rev
96 def node(self): return self._node
89 def node(self): return self._node
97 def hex(self): return hex(self._node)
90 def hex(self): return hex(self._node)
98 def user(self): return self._changeset[1]
91 def user(self): return self._changeset[1]
99 def date(self): return self._changeset[2]
92 def date(self): return self._changeset[2]
100 def files(self): return self._changeset[3]
93 def files(self): return self._changeset[3]
101 def description(self): return self._changeset[4]
94 def description(self): return self._changeset[4]
102 def branch(self): return self._changeset[5].get("branch")
95 def branch(self): return self._changeset[5].get("branch")
103 def extra(self): return self._changeset[5]
96 def extra(self): return self._changeset[5]
104 def tags(self): return self._repo.nodetags(self._node)
97 def tags(self): return self._repo.nodetags(self._node)
105
98
106 def parents(self):
99 def parents(self):
107 """return contexts for each parent changeset"""
100 """return contexts for each parent changeset"""
108 return self._parents
101 return self._parents
109
102
110 def children(self):
103 def children(self):
111 """return contexts for each child changeset"""
104 """return contexts for each child changeset"""
112 c = self._repo.changelog.children(self._node)
105 c = self._repo.changelog.children(self._node)
113 return [changectx(self._repo, x) for x in c]
106 return [changectx(self._repo, x) for x in c]
114
107
115 def ancestors(self):
108 def ancestors(self):
116 for a in self._repo.changelog.ancestors(self._rev):
109 for a in self._repo.changelog.ancestors(self._rev):
117 yield changectx(self._repo, a)
110 yield changectx(self._repo, a)
118
111
119 def descendants(self):
112 def descendants(self):
120 for d in self._repo.changelog.descendants(self._rev):
113 for d in self._repo.changelog.descendants(self._rev):
121 yield changectx(self._repo, d)
114 yield changectx(self._repo, d)
122
115
123 def _fileinfo(self, path):
116 def _fileinfo(self, path):
124 if '_manifest' in self.__dict__:
117 if '_manifest' in self.__dict__:
125 try:
118 try:
126 return self._manifest[path], self._manifest.flags(path)
119 return self._manifest[path], self._manifest.flags(path)
127 except KeyError:
120 except KeyError:
128 raise error.LookupError(self._node, path,
121 raise error.LookupError(self._node, path,
129 _('not found in manifest'))
122 _('not found in manifest'))
130 if '_manifestdelta' in self.__dict__ or path in self.files():
123 if '_manifestdelta' in self.__dict__ or path in self.files():
131 if path in self._manifestdelta:
124 if path in self._manifestdelta:
132 return self._manifestdelta[path], self._manifestdelta.flags(path)
125 return self._manifestdelta[path], self._manifestdelta.flags(path)
133 node, flag = self._repo.manifest.find(self._changeset[0], path)
126 node, flag = self._repo.manifest.find(self._changeset[0], path)
134 if not node:
127 if not node:
135 raise error.LookupError(self._node, path,
128 raise error.LookupError(self._node, path,
136 _('not found in manifest'))
129 _('not found in manifest'))
137
130
138 return node, flag
131 return node, flag
139
132
140 def filenode(self, path):
133 def filenode(self, path):
141 return self._fileinfo(path)[0]
134 return self._fileinfo(path)[0]
142
135
143 def flags(self, path):
136 def flags(self, path):
144 try:
137 try:
145 return self._fileinfo(path)[1]
138 return self._fileinfo(path)[1]
146 except error.LookupError:
139 except error.LookupError:
147 return ''
140 return ''
148
141
149 def filectx(self, path, fileid=None, filelog=None):
142 def filectx(self, path, fileid=None, filelog=None):
150 """get a file context from this changeset"""
143 """get a file context from this changeset"""
151 if fileid is None:
144 if fileid is None:
152 fileid = self.filenode(path)
145 fileid = self.filenode(path)
153 return filectx(self._repo, path, fileid=fileid,
146 return filectx(self._repo, path, fileid=fileid,
154 changectx=self, filelog=filelog)
147 changectx=self, filelog=filelog)
155
148
156 def ancestor(self, c2):
149 def ancestor(self, c2):
157 """
150 """
158 return the ancestor context of self and c2
151 return the ancestor context of self and c2
159 """
152 """
160 n = self._repo.changelog.ancestor(self._node, c2._node)
153 n = self._repo.changelog.ancestor(self._node, c2._node)
161 return changectx(self._repo, n)
154 return changectx(self._repo, n)
162
155
163 def walk(self, match):
156 def walk(self, match):
164 fdict = dict.fromkeys(match.files())
157 fdict = dict.fromkeys(match.files())
165 # for dirstate.walk, files=['.'] means "walk the whole tree".
158 # for dirstate.walk, files=['.'] means "walk the whole tree".
166 # follow that here, too
159 # follow that here, too
167 fdict.pop('.', None)
160 fdict.pop('.', None)
168 for fn in self:
161 for fn in self:
169 for ffn in fdict:
162 for ffn in fdict:
170 # match if the file is the exact name or a directory
163 # match if the file is the exact name or a directory
171 if ffn == fn or fn.startswith("%s/" % ffn):
164 if ffn == fn or fn.startswith("%s/" % ffn):
172 del fdict[ffn]
165 del fdict[ffn]
173 break
166 break
174 if match(fn):
167 if match(fn):
175 yield fn
168 yield fn
176 for fn in util.sort(fdict):
169 for fn in util.sort(fdict):
177 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
170 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
178 yield fn
171 yield fn
179
172
180 class filectx(object):
173 class filectx(object):
181 """A filecontext object makes access to data related to a particular
174 """A filecontext object makes access to data related to a particular
182 filerevision convenient."""
175 filerevision convenient."""
183 def __init__(self, repo, path, changeid=None, fileid=None,
176 def __init__(self, repo, path, changeid=None, fileid=None,
184 filelog=None, changectx=None):
177 filelog=None, changectx=None):
185 """changeid can be a changeset revision, node, or tag.
178 """changeid can be a changeset revision, node, or tag.
186 fileid can be a file revision or node."""
179 fileid can be a file revision or node."""
187 self._repo = repo
180 self._repo = repo
188 self._path = path
181 self._path = path
189
182
190 assert (changeid is not None
183 assert (changeid is not None
191 or fileid is not None
184 or fileid is not None
192 or changectx is not None)
185 or changectx is not None)
193
186
194 if filelog:
187 if filelog:
195 self._filelog = filelog
188 self._filelog = filelog
196
189
197 if changeid is not None:
190 if changeid is not None:
198 self._changeid = changeid
191 self._changeid = changeid
199 if changectx is not None:
192 if changectx is not None:
200 self._changectx = changectx
193 self._changectx = changectx
201 if fileid is not None:
194 if fileid is not None:
202 self._fileid = fileid
195 self._fileid = fileid
203
196
204 @propertycache
197 @propertycache
205 def _changectx(self):
198 def _changectx(self):
206 return changectx(self._repo, self._changeid)
199 return changectx(self._repo, self._changeid)
207
200
208 @propertycache
201 @propertycache
209 def _filelog(self):
202 def _filelog(self):
210 return self._repo.file(self._path)
203 return self._repo.file(self._path)
211
204
212 @propertycache
205 @propertycache
213 def _changeid(self):
206 def _changeid(self):
214 if '_changectx' in self.__dict__:
207 if '_changectx' in self.__dict__:
215 return self._changectx.rev()
208 return self._changectx.rev()
216 else:
209 else:
217 return self._filelog.linkrev(self._filerev)
210 return self._filelog.linkrev(self._filerev)
218
211
219 @propertycache
212 @propertycache
220 def _filenode(self):
213 def _filenode(self):
221 if '_fileid' in self.__dict__:
214 if '_fileid' in self.__dict__:
222 return self._filelog.lookup(self._fileid)
215 return self._filelog.lookup(self._fileid)
223 else:
216 else:
224 return self._changectx.filenode(self._path)
217 return self._changectx.filenode(self._path)
225
218
226 @propertycache
219 @propertycache
227 def _filerev(self):
220 def _filerev(self):
228 return self._filelog.rev(self._filenode)
221 return self._filelog.rev(self._filenode)
229
222
230 @propertycache
223 @propertycache
231 def _repopath(self):
224 def _repopath(self):
232 return self._path
225 return self._path
233
226
234 def __nonzero__(self):
227 def __nonzero__(self):
235 try:
228 try:
236 self._filenode
229 self._filenode
237 return True
230 return True
238 except error.LookupError:
231 except error.LookupError:
239 # file is missing
232 # file is missing
240 return False
233 return False
241
234
242 def __str__(self):
235 def __str__(self):
243 return "%s@%s" % (self.path(), short(self.node()))
236 return "%s@%s" % (self.path(), short(self.node()))
244
237
245 def __repr__(self):
238 def __repr__(self):
246 return "<filectx %s>" % str(self)
239 return "<filectx %s>" % str(self)
247
240
248 def __hash__(self):
241 def __hash__(self):
249 try:
242 try:
250 return hash((self._path, self._fileid))
243 return hash((self._path, self._fileid))
251 except AttributeError:
244 except AttributeError:
252 return id(self)
245 return id(self)
253
246
254 def __eq__(self, other):
247 def __eq__(self, other):
255 try:
248 try:
256 return (self._path == other._path
249 return (self._path == other._path
257 and self._fileid == other._fileid)
250 and self._fileid == other._fileid)
258 except AttributeError:
251 except AttributeError:
259 return False
252 return False
260
253
261 def __ne__(self, other):
254 def __ne__(self, other):
262 return not (self == other)
255 return not (self == other)
263
256
264 def filectx(self, fileid):
257 def filectx(self, fileid):
265 '''opens an arbitrary revision of the file without
258 '''opens an arbitrary revision of the file without
266 opening a new filelog'''
259 opening a new filelog'''
267 return filectx(self._repo, self._path, fileid=fileid,
260 return filectx(self._repo, self._path, fileid=fileid,
268 filelog=self._filelog)
261 filelog=self._filelog)
269
262
270 def filerev(self): return self._filerev
263 def filerev(self): return self._filerev
271 def filenode(self): return self._filenode
264 def filenode(self): return self._filenode
272 def flags(self): return self._changectx.flags(self._path)
265 def flags(self): return self._changectx.flags(self._path)
273 def filelog(self): return self._filelog
266 def filelog(self): return self._filelog
274
267
275 def rev(self):
268 def rev(self):
276 if '_changectx' in self.__dict__:
269 if '_changectx' in self.__dict__:
277 return self._changectx.rev()
270 return self._changectx.rev()
278 if '_changeid' in self.__dict__:
271 if '_changeid' in self.__dict__:
279 return self._changectx.rev()
272 return self._changectx.rev()
280 return self._filelog.linkrev(self._filerev)
273 return self._filelog.linkrev(self._filerev)
281
274
282 def linkrev(self): return self._filelog.linkrev(self._filerev)
275 def linkrev(self): return self._filelog.linkrev(self._filerev)
283 def node(self): return self._changectx.node()
276 def node(self): return self._changectx.node()
284 def user(self): return self._changectx.user()
277 def user(self): return self._changectx.user()
285 def date(self): return self._changectx.date()
278 def date(self): return self._changectx.date()
286 def files(self): return self._changectx.files()
279 def files(self): return self._changectx.files()
287 def description(self): return self._changectx.description()
280 def description(self): return self._changectx.description()
288 def branch(self): return self._changectx.branch()
281 def branch(self): return self._changectx.branch()
289 def manifest(self): return self._changectx.manifest()
282 def manifest(self): return self._changectx.manifest()
290 def changectx(self): return self._changectx
283 def changectx(self): return self._changectx
291
284
292 def data(self): return self._filelog.read(self._filenode)
285 def data(self): return self._filelog.read(self._filenode)
293 def path(self): return self._path
286 def path(self): return self._path
294 def size(self): return self._filelog.size(self._filerev)
287 def size(self): return self._filelog.size(self._filerev)
295
288
296 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
289 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
297
290
298 def renamed(self):
291 def renamed(self):
299 """check if file was actually renamed in this changeset revision
292 """check if file was actually renamed in this changeset revision
300
293
301 If rename logged in file revision, we report copy for changeset only
294 If rename logged in file revision, we report copy for changeset only
302 if file revisions linkrev points back to the changeset in question
295 if file revisions linkrev points back to the changeset in question
303 or both changeset parents contain different file revisions.
296 or both changeset parents contain different file revisions.
304 """
297 """
305
298
306 renamed = self._filelog.renamed(self._filenode)
299 renamed = self._filelog.renamed(self._filenode)
307 if not renamed:
300 if not renamed:
308 return renamed
301 return renamed
309
302
310 if self.rev() == self.linkrev():
303 if self.rev() == self.linkrev():
311 return renamed
304 return renamed
312
305
313 name = self.path()
306 name = self.path()
314 fnode = self._filenode
307 fnode = self._filenode
315 for p in self._changectx.parents():
308 for p in self._changectx.parents():
316 try:
309 try:
317 if fnode == p.filenode(name):
310 if fnode == p.filenode(name):
318 return None
311 return None
319 except error.LookupError:
312 except error.LookupError:
320 pass
313 pass
321 return renamed
314 return renamed
322
315
323 def parents(self):
316 def parents(self):
324 p = self._path
317 p = self._path
325 fl = self._filelog
318 fl = self._filelog
326 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
319 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
327
320
328 r = self._filelog.renamed(self._filenode)
321 r = self._filelog.renamed(self._filenode)
329 if r:
322 if r:
330 pl[0] = (r[0], r[1], None)
323 pl[0] = (r[0], r[1], None)
331
324
332 return [filectx(self._repo, p, fileid=n, filelog=l)
325 return [filectx(self._repo, p, fileid=n, filelog=l)
333 for p,n,l in pl if n != nullid]
326 for p,n,l in pl if n != nullid]
334
327
335 def children(self):
328 def children(self):
336 # hard for renames
329 # hard for renames
337 c = self._filelog.children(self._filenode)
330 c = self._filelog.children(self._filenode)
338 return [filectx(self._repo, self._path, fileid=x,
331 return [filectx(self._repo, self._path, fileid=x,
339 filelog=self._filelog) for x in c]
332 filelog=self._filelog) for x in c]
340
333
341 def annotate(self, follow=False, linenumber=None):
334 def annotate(self, follow=False, linenumber=None):
342 '''returns a list of tuples of (ctx, line) for each line
335 '''returns a list of tuples of (ctx, line) for each line
343 in the file, where ctx is the filectx of the node where
336 in the file, where ctx is the filectx of the node where
344 that line was last changed.
337 that line was last changed.
345 This returns tuples of ((ctx, linenumber), line) for each line,
338 This returns tuples of ((ctx, linenumber), line) for each line,
346 if "linenumber" parameter is NOT "None".
339 if "linenumber" parameter is NOT "None".
347 In such tuples, linenumber means one at the first appearance
340 In such tuples, linenumber means one at the first appearance
348 in the managed file.
341 in the managed file.
349 To reduce annotation cost,
342 To reduce annotation cost,
350 this returns fixed value(False is used) as linenumber,
343 this returns fixed value(False is used) as linenumber,
351 if "linenumber" parameter is "False".'''
344 if "linenumber" parameter is "False".'''
352
345
353 def decorate_compat(text, rev):
346 def decorate_compat(text, rev):
354 return ([rev] * len(text.splitlines()), text)
347 return ([rev] * len(text.splitlines()), text)
355
348
356 def without_linenumber(text, rev):
349 def without_linenumber(text, rev):
357 return ([(rev, False)] * len(text.splitlines()), text)
350 return ([(rev, False)] * len(text.splitlines()), text)
358
351
359 def with_linenumber(text, rev):
352 def with_linenumber(text, rev):
360 size = len(text.splitlines())
353 size = len(text.splitlines())
361 return ([(rev, i) for i in xrange(1, size + 1)], text)
354 return ([(rev, i) for i in xrange(1, size + 1)], text)
362
355
363 decorate = (((linenumber is None) and decorate_compat) or
356 decorate = (((linenumber is None) and decorate_compat) or
364 (linenumber and with_linenumber) or
357 (linenumber and with_linenumber) or
365 without_linenumber)
358 without_linenumber)
366
359
367 def pair(parent, child):
360 def pair(parent, child):
368 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
361 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
369 child[0][b1:b2] = parent[0][a1:a2]
362 child[0][b1:b2] = parent[0][a1:a2]
370 return child
363 return child
371
364
372 getlog = util.cachefunc(lambda x: self._repo.file(x))
365 getlog = util.cachefunc(lambda x: self._repo.file(x))
373 def getctx(path, fileid):
366 def getctx(path, fileid):
374 log = path == self._path and self._filelog or getlog(path)
367 log = path == self._path and self._filelog or getlog(path)
375 return filectx(self._repo, path, fileid=fileid, filelog=log)
368 return filectx(self._repo, path, fileid=fileid, filelog=log)
376 getctx = util.cachefunc(getctx)
369 getctx = util.cachefunc(getctx)
377
370
378 def parents(f):
371 def parents(f):
379 # we want to reuse filectx objects as much as possible
372 # we want to reuse filectx objects as much as possible
380 p = f._path
373 p = f._path
381 if f._filerev is None: # working dir
374 if f._filerev is None: # working dir
382 pl = [(n.path(), n.filerev()) for n in f.parents()]
375 pl = [(n.path(), n.filerev()) for n in f.parents()]
383 else:
376 else:
384 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
377 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
385
378
386 if follow:
379 if follow:
387 r = f.renamed()
380 r = f.renamed()
388 if r:
381 if r:
389 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
382 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
390
383
391 return [getctx(p, n) for p, n in pl if n != nullrev]
384 return [getctx(p, n) for p, n in pl if n != nullrev]
392
385
393 # use linkrev to find the first changeset where self appeared
386 # use linkrev to find the first changeset where self appeared
394 if self.rev() != self.linkrev():
387 if self.rev() != self.linkrev():
395 base = self.filectx(self.filerev())
388 base = self.filectx(self.filerev())
396 else:
389 else:
397 base = self
390 base = self
398
391
399 # find all ancestors
392 # find all ancestors
400 needed = {base: 1}
393 needed = {base: 1}
401 visit = [base]
394 visit = [base]
402 files = [base._path]
395 files = [base._path]
403 while visit:
396 while visit:
404 f = visit.pop(0)
397 f = visit.pop(0)
405 for p in parents(f):
398 for p in parents(f):
406 if p not in needed:
399 if p not in needed:
407 needed[p] = 1
400 needed[p] = 1
408 visit.append(p)
401 visit.append(p)
409 if p._path not in files:
402 if p._path not in files:
410 files.append(p._path)
403 files.append(p._path)
411 else:
404 else:
412 # count how many times we'll use this
405 # count how many times we'll use this
413 needed[p] += 1
406 needed[p] += 1
414
407
415 # sort by revision (per file) which is a topological order
408 # sort by revision (per file) which is a topological order
416 visit = []
409 visit = []
417 for f in files:
410 for f in files:
418 fn = [(n.rev(), n) for n in needed if n._path == f]
411 fn = [(n.rev(), n) for n in needed if n._path == f]
419 visit.extend(fn)
412 visit.extend(fn)
420
413
421 hist = {}
414 hist = {}
422 for r, f in util.sort(visit):
415 for r, f in util.sort(visit):
423 curr = decorate(f.data(), f)
416 curr = decorate(f.data(), f)
424 for p in parents(f):
417 for p in parents(f):
425 if p != nullid:
418 if p != nullid:
426 curr = pair(hist[p], curr)
419 curr = pair(hist[p], curr)
427 # trim the history of unneeded revs
420 # trim the history of unneeded revs
428 needed[p] -= 1
421 needed[p] -= 1
429 if not needed[p]:
422 if not needed[p]:
430 del hist[p]
423 del hist[p]
431 hist[f] = curr
424 hist[f] = curr
432
425
433 return zip(hist[f][0], hist[f][1].splitlines(1))
426 return zip(hist[f][0], hist[f][1].splitlines(1))
434
427
435 def ancestor(self, fc2):
428 def ancestor(self, fc2):
436 """
429 """
437 find the common ancestor file context, if any, of self, and fc2
430 find the common ancestor file context, if any, of self, and fc2
438 """
431 """
439
432
440 acache = {}
433 acache = {}
441
434
442 # prime the ancestor cache for the working directory
435 # prime the ancestor cache for the working directory
443 for c in (self, fc2):
436 for c in (self, fc2):
444 if c._filerev == None:
437 if c._filerev == None:
445 pl = [(n.path(), n.filenode()) for n in c.parents()]
438 pl = [(n.path(), n.filenode()) for n in c.parents()]
446 acache[(c._path, None)] = pl
439 acache[(c._path, None)] = pl
447
440
448 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
441 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
449 def parents(vertex):
442 def parents(vertex):
450 if vertex in acache:
443 if vertex in acache:
451 return acache[vertex]
444 return acache[vertex]
452 f, n = vertex
445 f, n = vertex
453 if f not in flcache:
446 if f not in flcache:
454 flcache[f] = self._repo.file(f)
447 flcache[f] = self._repo.file(f)
455 fl = flcache[f]
448 fl = flcache[f]
456 pl = [(f, p) for p in fl.parents(n) if p != nullid]
449 pl = [(f, p) for p in fl.parents(n) if p != nullid]
457 re = fl.renamed(n)
450 re = fl.renamed(n)
458 if re:
451 if re:
459 pl.append(re)
452 pl.append(re)
460 acache[vertex] = pl
453 acache[vertex] = pl
461 return pl
454 return pl
462
455
463 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
456 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
464 v = ancestor.ancestor(a, b, parents)
457 v = ancestor.ancestor(a, b, parents)
465 if v:
458 if v:
466 f, n = v
459 f, n = v
467 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
460 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
468
461
469 return None
462 return None
470
463
471 class workingctx(changectx):
464 class workingctx(changectx):
472 """A workingctx object makes access to data related to
465 """A workingctx object makes access to data related to
473 the current working directory convenient.
466 the current working directory convenient.
474 parents - a pair of parent nodeids, or None to use the dirstate.
467 parents - a pair of parent nodeids, or None to use the dirstate.
475 date - any valid date string or (unixtime, offset), or None.
468 date - any valid date string or (unixtime, offset), or None.
476 user - username string, or None.
469 user - username string, or None.
477 extra - a dictionary of extra values, or None.
470 extra - a dictionary of extra values, or None.
478 changes - a list of file lists as returned by localrepo.status()
471 changes - a list of file lists as returned by localrepo.status()
479 or None to use the repository status.
472 or None to use the repository status.
480 """
473 """
481 def __init__(self, repo, parents=None, text="", user=None, date=None,
474 def __init__(self, repo, parents=None, text="", user=None, date=None,
482 extra=None, changes=None):
475 extra=None, changes=None):
483 self._repo = repo
476 self._repo = repo
484 self._rev = None
477 self._rev = None
485 self._node = None
478 self._node = None
486 self._text = text
479 self._text = text
487 if date:
480 if date:
488 self._date = util.parsedate(date)
481 self._date = util.parsedate(date)
489 if user:
482 if user:
490 self._user = user
483 self._user = user
491 if parents:
484 if parents:
492 self._parents = [changectx(self._repo, p) for p in parents]
485 self._parents = [changectx(self._repo, p) for p in parents]
493 if changes:
486 if changes:
494 self._status = list(changes)
487 self._status = list(changes)
495
488
496 self._extra = {}
489 self._extra = {}
497 if extra:
490 if extra:
498 self._extra = extra.copy()
491 self._extra = extra.copy()
499 if 'branch' not in self._extra:
492 if 'branch' not in self._extra:
500 branch = self._repo.dirstate.branch()
493 branch = self._repo.dirstate.branch()
501 try:
494 try:
502 branch = branch.decode('UTF-8').encode('UTF-8')
495 branch = branch.decode('UTF-8').encode('UTF-8')
503 except UnicodeDecodeError:
496 except UnicodeDecodeError:
504 raise util.Abort(_('branch name not in UTF-8!'))
497 raise util.Abort(_('branch name not in UTF-8!'))
505 self._extra['branch'] = branch
498 self._extra['branch'] = branch
506 if self._extra['branch'] == '':
499 if self._extra['branch'] == '':
507 self._extra['branch'] = 'default'
500 self._extra['branch'] = 'default'
508
501
509 def __str__(self):
502 def __str__(self):
510 return str(self._parents[0]) + "+"
503 return str(self._parents[0]) + "+"
511
504
512 def __nonzero__(self):
505 def __nonzero__(self):
513 return True
506 return True
514
507
515 def __contains__(self, key):
508 def __contains__(self, key):
516 return self._repo.dirstate[key] not in "?r"
509 return self._repo.dirstate[key] not in "?r"
517
510
518 @propertycache
511 @propertycache
519 def _manifest(self):
512 def _manifest(self):
520 """generate a manifest corresponding to the working directory"""
513 """generate a manifest corresponding to the working directory"""
521
514
522 man = self._parents[0].manifest().copy()
515 man = self._parents[0].manifest().copy()
523 copied = self._repo.dirstate.copies()
516 copied = self._repo.dirstate.copies()
524 cf = lambda x: man.flags(copied.get(x, x))
517 cf = lambda x: man.flags(copied.get(x, x))
525 ff = self._repo.dirstate.flagfunc(cf)
518 ff = self._repo.dirstate.flagfunc(cf)
526 modified, added, removed, deleted, unknown = self._status[:5]
519 modified, added, removed, deleted, unknown = self._status[:5]
527 for i, l in (("a", added), ("m", modified), ("u", unknown)):
520 for i, l in (("a", added), ("m", modified), ("u", unknown)):
528 for f in l:
521 for f in l:
529 man[f] = man.get(copied.get(f, f), nullid) + i
522 man[f] = man.get(copied.get(f, f), nullid) + i
530 try:
523 try:
531 man.set(f, ff(f))
524 man.set(f, ff(f))
532 except OSError:
525 except OSError:
533 pass
526 pass
534
527
535 for f in deleted + removed:
528 for f in deleted + removed:
536 if f in man:
529 if f in man:
537 del man[f]
530 del man[f]
538
531
539 return man
532 return man
540
533
541 @propertycache
534 @propertycache
542 def _status(self):
535 def _status(self):
543 return self._repo.status(unknown=True)
536 return self._repo.status(unknown=True)
544
537
545 @propertycache
538 @propertycache
546 def _user(self):
539 def _user(self):
547 return self._repo.ui.username()
540 return self._repo.ui.username()
548
541
549 @propertycache
542 @propertycache
550 def _date(self):
543 def _date(self):
551 return util.makedate()
544 return util.makedate()
552
545
553 @propertycache
546 @propertycache
554 def _parents(self):
547 def _parents(self):
555 p = self._repo.dirstate.parents()
548 p = self._repo.dirstate.parents()
556 if p[1] == nullid:
549 if p[1] == nullid:
557 p = p[:-1]
550 p = p[:-1]
558 self._parents = [changectx(self._repo, x) for x in p]
551 self._parents = [changectx(self._repo, x) for x in p]
559 return self._parents
552 return self._parents
560
553
561 def manifest(self): return self._manifest
554 def manifest(self): return self._manifest
562
555
563 def user(self): return self._user or self._repo.ui.username()
556 def user(self): return self._user or self._repo.ui.username()
564 def date(self): return self._date
557 def date(self): return self._date
565 def description(self): return self._text
558 def description(self): return self._text
566 def files(self):
559 def files(self):
567 return util.sort(self._status[0] + self._status[1] + self._status[2])
560 return util.sort(self._status[0] + self._status[1] + self._status[2])
568
561
569 def modified(self): return self._status[0]
562 def modified(self): return self._status[0]
570 def added(self): return self._status[1]
563 def added(self): return self._status[1]
571 def removed(self): return self._status[2]
564 def removed(self): return self._status[2]
572 def deleted(self): return self._status[3]
565 def deleted(self): return self._status[3]
573 def unknown(self): return self._status[4]
566 def unknown(self): return self._status[4]
574 def clean(self): return self._status[5]
567 def clean(self): return self._status[5]
575 def branch(self): return self._extra['branch']
568 def branch(self): return self._extra['branch']
576 def extra(self): return self._extra
569 def extra(self): return self._extra
577
570
578 def tags(self):
571 def tags(self):
579 t = []
572 t = []
580 [t.extend(p.tags()) for p in self.parents()]
573 [t.extend(p.tags()) for p in self.parents()]
581 return t
574 return t
582
575
583 def children(self):
576 def children(self):
584 return []
577 return []
585
578
586 def flags(self, path):
579 def flags(self, path):
587 if '_manifest' in self.__dict__:
580 if '_manifest' in self.__dict__:
588 try:
581 try:
589 return self._manifest.flags(path)
582 return self._manifest.flags(path)
590 except KeyError:
583 except KeyError:
591 return ''
584 return ''
592
585
593 pnode = self._parents[0].changeset()[0]
586 pnode = self._parents[0].changeset()[0]
594 orig = self._repo.dirstate.copies().get(path, path)
587 orig = self._repo.dirstate.copies().get(path, path)
595 node, flag = self._repo.manifest.find(pnode, orig)
588 node, flag = self._repo.manifest.find(pnode, orig)
596 try:
589 try:
597 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
590 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
598 return ff(path)
591 return ff(path)
599 except OSError:
592 except OSError:
600 pass
593 pass
601
594
602 if not node or path in self.deleted() or path in self.removed():
595 if not node or path in self.deleted() or path in self.removed():
603 return ''
596 return ''
604 return flag
597 return flag
605
598
606 def filectx(self, path, filelog=None):
599 def filectx(self, path, filelog=None):
607 """get a file context from the working directory"""
600 """get a file context from the working directory"""
608 return workingfilectx(self._repo, path, workingctx=self,
601 return workingfilectx(self._repo, path, workingctx=self,
609 filelog=filelog)
602 filelog=filelog)
610
603
611 def ancestor(self, c2):
604 def ancestor(self, c2):
612 """return the ancestor context of self and c2"""
605 """return the ancestor context of self and c2"""
613 return self._parents[0].ancestor(c2) # punt on two parents for now
606 return self._parents[0].ancestor(c2) # punt on two parents for now
614
607
615 def walk(self, match):
608 def walk(self, match):
616 return util.sort(self._repo.dirstate.walk(match, True, False).keys())
609 return util.sort(self._repo.dirstate.walk(match, True, False).keys())
617
610
618 class workingfilectx(filectx):
611 class workingfilectx(filectx):
619 """A workingfilectx object makes access to data related to a particular
612 """A workingfilectx object makes access to data related to a particular
620 file in the working directory convenient."""
613 file in the working directory convenient."""
621 def __init__(self, repo, path, filelog=None, workingctx=None):
614 def __init__(self, repo, path, filelog=None, workingctx=None):
622 """changeid can be a changeset revision, node, or tag.
615 """changeid can be a changeset revision, node, or tag.
623 fileid can be a file revision or node."""
616 fileid can be a file revision or node."""
624 self._repo = repo
617 self._repo = repo
625 self._path = path
618 self._path = path
626 self._changeid = None
619 self._changeid = None
627 self._filerev = self._filenode = None
620 self._filerev = self._filenode = None
628
621
629 if filelog:
622 if filelog:
630 self._filelog = filelog
623 self._filelog = filelog
631 if workingctx:
624 if workingctx:
632 self._changectx = workingctx
625 self._changectx = workingctx
633
626
634 @propertycache
627 @propertycache
635 def _changectx(self):
628 def _changectx(self):
636 return workingctx(self._repo)
629 return workingctx(self._repo)
637
630
638 @propertycache
631 @propertycache
639 def _repopath(self):
632 def _repopath(self):
640 return self._repo.dirstate.copied(self._path) or self._path
633 return self._repo.dirstate.copied(self._path) or self._path
641
634
642 @propertycache
635 @propertycache
643 def _filelog(self):
636 def _filelog(self):
644 return self._repo.file(self._repopath)
637 return self._repo.file(self._repopath)
645
638
646 def __nonzero__(self):
639 def __nonzero__(self):
647 return True
640 return True
648
641
649 def __str__(self):
642 def __str__(self):
650 return "%s@%s" % (self.path(), self._changectx)
643 return "%s@%s" % (self.path(), self._changectx)
651
644
652 def filectx(self, fileid):
645 def filectx(self, fileid):
653 '''opens an arbitrary revision of the file without
646 '''opens an arbitrary revision of the file without
654 opening a new filelog'''
647 opening a new filelog'''
655 return filectx(self._repo, self._repopath, fileid=fileid,
648 return filectx(self._repo, self._repopath, fileid=fileid,
656 filelog=self._filelog)
649 filelog=self._filelog)
657
650
658 def rev(self):
651 def rev(self):
659 if '_changectx' in self.__dict__:
652 if '_changectx' in self.__dict__:
660 return self._changectx.rev()
653 return self._changectx.rev()
661 return self._filelog.linkrev(self._filerev)
654 return self._filelog.linkrev(self._filerev)
662
655
663 def data(self): return self._repo.wread(self._path)
656 def data(self): return self._repo.wread(self._path)
664 def renamed(self):
657 def renamed(self):
665 rp = self._repopath
658 rp = self._repopath
666 if rp == self._path:
659 if rp == self._path:
667 return None
660 return None
668 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
661 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
669
662
670 def parents(self):
663 def parents(self):
671 '''return parent filectxs, following copies if necessary'''
664 '''return parent filectxs, following copies if necessary'''
672 p = self._path
665 p = self._path
673 rp = self._repopath
666 rp = self._repopath
674 pcl = self._changectx._parents
667 pcl = self._changectx._parents
675 fl = self._filelog
668 fl = self._filelog
676 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
669 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
677 if len(pcl) > 1:
670 if len(pcl) > 1:
678 if rp != p:
671 if rp != p:
679 fl = None
672 fl = None
680 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
673 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
681
674
682 return [filectx(self._repo, p, fileid=n, filelog=l)
675 return [filectx(self._repo, p, fileid=n, filelog=l)
683 for p,n,l in pl if n != nullid]
676 for p,n,l in pl if n != nullid]
684
677
685 def children(self):
678 def children(self):
686 return []
679 return []
687
680
688 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
681 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
689 def date(self):
682 def date(self):
690 t, tz = self._changectx.date()
683 t, tz = self._changectx.date()
691 try:
684 try:
692 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
685 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
693 except OSError, err:
686 except OSError, err:
694 if err.errno != errno.ENOENT: raise
687 if err.errno != errno.ENOENT: raise
695 return (t, tz)
688 return (t, tz)
696
689
697 def cmp(self, text): return self._repo.wread(self._path) == text
690 def cmp(self, text): return self._repo.wread(self._path) == text
698
691
699 class memctx(object):
692 class memctx(object):
700 """Use memctx to perform in-memory commits via localrepo.commitctx().
693 """Use memctx to perform in-memory commits via localrepo.commitctx().
701
694
702 Revision information is supplied at initialization time while
695 Revision information is supplied at initialization time while
703 related files data and is made available through a callback
696 related files data and is made available through a callback
704 mechanism. 'repo' is the current localrepo, 'parents' is a
697 mechanism. 'repo' is the current localrepo, 'parents' is a
705 sequence of two parent revisions identifiers (pass None for every
698 sequence of two parent revisions identifiers (pass None for every
706 missing parent), 'text' is the commit message and 'files' lists
699 missing parent), 'text' is the commit message and 'files' lists
707 names of files touched by the revision (normalized and relative to
700 names of files touched by the revision (normalized and relative to
708 repository root).
701 repository root).
709
702
710 filectxfn(repo, memctx, path) is a callable receiving the
703 filectxfn(repo, memctx, path) is a callable receiving the
711 repository, the current memctx object and the normalized path of
704 repository, the current memctx object and the normalized path of
712 requested file, relative to repository root. It is fired by the
705 requested file, relative to repository root. It is fired by the
713 commit function for every file in 'files', but calls order is
706 commit function for every file in 'files', but calls order is
714 undefined. If the file is available in the revision being
707 undefined. If the file is available in the revision being
715 committed (updated or added), filectxfn returns a memfilectx
708 committed (updated or added), filectxfn returns a memfilectx
716 object. If the file was removed, filectxfn raises an
709 object. If the file was removed, filectxfn raises an
717 IOError. Moved files are represented by marking the source file
710 IOError. Moved files are represented by marking the source file
718 removed and the new file added with copy information (see
711 removed and the new file added with copy information (see
719 memfilectx).
712 memfilectx).
720
713
721 user receives the committer name and defaults to current
714 user receives the committer name and defaults to current
722 repository username, date is the commit date in any format
715 repository username, date is the commit date in any format
723 supported by util.parsedate() and defaults to current date, extra
716 supported by util.parsedate() and defaults to current date, extra
724 is a dictionary of metadata or is left empty.
717 is a dictionary of metadata or is left empty.
725 """
718 """
726 def __init__(self, repo, parents, text, files, filectxfn, user=None,
719 def __init__(self, repo, parents, text, files, filectxfn, user=None,
727 date=None, extra=None):
720 date=None, extra=None):
728 self._repo = repo
721 self._repo = repo
729 self._rev = None
722 self._rev = None
730 self._node = None
723 self._node = None
731 self._text = text
724 self._text = text
732 self._date = date and util.parsedate(date) or util.makedate()
725 self._date = date and util.parsedate(date) or util.makedate()
733 self._user = user
726 self._user = user
734 parents = [(p or nullid) for p in parents]
727 parents = [(p or nullid) for p in parents]
735 p1, p2 = parents
728 p1, p2 = parents
736 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
729 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
737 files = util.sort(set(files))
730 files = util.sort(set(files))
738 self._status = [files, [], [], [], []]
731 self._status = [files, [], [], [], []]
739 self._filectxfn = filectxfn
732 self._filectxfn = filectxfn
740
733
741 self._extra = extra and extra.copy() or {}
734 self._extra = extra and extra.copy() or {}
742 if 'branch' not in self._extra:
735 if 'branch' not in self._extra:
743 self._extra['branch'] = 'default'
736 self._extra['branch'] = 'default'
744 elif self._extra.get('branch') == '':
737 elif self._extra.get('branch') == '':
745 self._extra['branch'] = 'default'
738 self._extra['branch'] = 'default'
746
739
747 def __str__(self):
740 def __str__(self):
748 return str(self._parents[0]) + "+"
741 return str(self._parents[0]) + "+"
749
742
750 def __int__(self):
743 def __int__(self):
751 return self._rev
744 return self._rev
752
745
753 def __nonzero__(self):
746 def __nonzero__(self):
754 return True
747 return True
755
748
756 def user(self): return self._user or self._repo.ui.username()
749 def user(self): return self._user or self._repo.ui.username()
757 def date(self): return self._date
750 def date(self): return self._date
758 def description(self): return self._text
751 def description(self): return self._text
759 def files(self): return self.modified()
752 def files(self): return self.modified()
760 def modified(self): return self._status[0]
753 def modified(self): return self._status[0]
761 def added(self): return self._status[1]
754 def added(self): return self._status[1]
762 def removed(self): return self._status[2]
755 def removed(self): return self._status[2]
763 def deleted(self): return self._status[3]
756 def deleted(self): return self._status[3]
764 def unknown(self): return self._status[4]
757 def unknown(self): return self._status[4]
765 def clean(self): return self._status[5]
758 def clean(self): return self._status[5]
766 def branch(self): return self._extra['branch']
759 def branch(self): return self._extra['branch']
767 def extra(self): return self._extra
760 def extra(self): return self._extra
768 def flags(self, f): return self[f].flags()
761 def flags(self, f): return self[f].flags()
769
762
770 def parents(self):
763 def parents(self):
771 """return contexts for each parent changeset"""
764 """return contexts for each parent changeset"""
772 return self._parents
765 return self._parents
773
766
774 def filectx(self, path, filelog=None):
767 def filectx(self, path, filelog=None):
775 """get a file context from the working directory"""
768 """get a file context from the working directory"""
776 return self._filectxfn(self._repo, self, path)
769 return self._filectxfn(self._repo, self, path)
777
770
778 class memfilectx(object):
771 class memfilectx(object):
779 """memfilectx represents an in-memory file to commit.
772 """memfilectx represents an in-memory file to commit.
780
773
781 See memctx for more details.
774 See memctx for more details.
782 """
775 """
783 def __init__(self, path, data, islink, isexec, copied):
776 def __init__(self, path, data, islink, isexec, copied):
784 """
777 """
785 path is the normalized file path relative to repository root.
778 path is the normalized file path relative to repository root.
786 data is the file content as a string.
779 data is the file content as a string.
787 islink is True if the file is a symbolic link.
780 islink is True if the file is a symbolic link.
788 isexec is True if the file is executable.
781 isexec is True if the file is executable.
789 copied is the source file path if current file was copied in the
782 copied is the source file path if current file was copied in the
790 revision being committed, or None."""
783 revision being committed, or None."""
791 self._path = path
784 self._path = path
792 self._data = data
785 self._data = data
793 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
786 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
794 self._copied = None
787 self._copied = None
795 if copied:
788 if copied:
796 self._copied = (copied, nullid)
789 self._copied = (copied, nullid)
797
790
798 def __nonzero__(self): return True
791 def __nonzero__(self): return True
799 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
792 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
800 def path(self): return self._path
793 def path(self): return self._path
801 def data(self): return self._data
794 def data(self): return self._data
802 def flags(self): return self._flags
795 def flags(self): return self._flags
803 def isexec(self): return 'x' in self._flags
796 def isexec(self): return 'x' in self._flags
804 def islink(self): return 'l' in self._flags
797 def islink(self): return 'l' in self._flags
805 def renamed(self): return self._copied
798 def renamed(self): return self._copied
806
799
@@ -1,1474 +1,1483 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 import os, stat, threading, time, calendar, glob, osutil
17 import os, stat, threading, time, calendar, glob, osutil
18 import imp
18 import imp
19
19
20 # Python compatibility
20 # Python compatibility
21
21
22 _md5 = None
22 _md5 = None
23 def md5(s):
23 def md5(s):
24 global _md5
24 global _md5
25 if _md5 is None:
25 if _md5 is None:
26 try:
26 try:
27 import hashlib
27 import hashlib
28 _md5 = hashlib.md5
28 _md5 = hashlib.md5
29 except ImportError:
29 except ImportError:
30 import md5
30 import md5
31 _md5 = md5.md5
31 _md5 = md5.md5
32 return _md5(s)
32 return _md5(s)
33
33
34 _sha1 = None
34 _sha1 = None
35 def sha1(s):
35 def sha1(s):
36 global _sha1
36 global _sha1
37 if _sha1 is None:
37 if _sha1 is None:
38 try:
38 try:
39 import hashlib
39 import hashlib
40 _sha1 = hashlib.sha1
40 _sha1 = hashlib.sha1
41 except ImportError:
41 except ImportError:
42 import sha
42 import sha
43 _sha1 = sha.sha
43 _sha1 = sha.sha
44 return _sha1(s)
44 return _sha1(s)
45
45
46 try:
46 try:
47 import subprocess
47 import subprocess
48 subprocess.Popen # trigger ImportError early
48 subprocess.Popen # trigger ImportError early
49 closefds = os.name == 'posix'
49 closefds = os.name == 'posix'
50 def popen2(cmd, mode='t', bufsize=-1):
50 def popen2(cmd, mode='t', bufsize=-1):
51 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
51 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
52 close_fds=closefds,
52 close_fds=closefds,
53 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
53 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
54 return p.stdin, p.stdout
54 return p.stdin, p.stdout
55 def popen3(cmd, mode='t', bufsize=-1):
55 def popen3(cmd, mode='t', bufsize=-1):
56 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
56 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
57 close_fds=closefds,
57 close_fds=closefds,
58 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
58 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
59 stderr=subprocess.PIPE)
59 stderr=subprocess.PIPE)
60 return p.stdin, p.stdout, p.stderr
60 return p.stdin, p.stdout, p.stderr
61 def Popen3(cmd, capturestderr=False, bufsize=-1):
61 def Popen3(cmd, capturestderr=False, bufsize=-1):
62 stderr = capturestderr and subprocess.PIPE or None
62 stderr = capturestderr and subprocess.PIPE or None
63 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
64 close_fds=closefds,
64 close_fds=closefds,
65 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
66 stderr=stderr)
66 stderr=stderr)
67 p.fromchild = p.stdout
67 p.fromchild = p.stdout
68 p.tochild = p.stdin
68 p.tochild = p.stdin
69 p.childerr = p.stderr
69 p.childerr = p.stderr
70 return p
70 return p
71 except ImportError:
71 except ImportError:
72 subprocess = None
72 subprocess = None
73 from popen2 import Popen3
73 from popen2 import Popen3
74 popen2 = os.popen2
74 popen2 = os.popen2
75 popen3 = os.popen3
75 popen3 = os.popen3
76
76
77
77
78 def version():
78 def version():
79 """Return version information if available."""
79 """Return version information if available."""
80 try:
80 try:
81 import __version__
81 import __version__
82 return __version__.version
82 return __version__.version
83 except ImportError:
83 except ImportError:
84 return 'unknown'
84 return 'unknown'
85
85
86 # used by parsedate
86 # used by parsedate
87 defaultdateformats = (
87 defaultdateformats = (
88 '%Y-%m-%d %H:%M:%S',
88 '%Y-%m-%d %H:%M:%S',
89 '%Y-%m-%d %I:%M:%S%p',
89 '%Y-%m-%d %I:%M:%S%p',
90 '%Y-%m-%d %H:%M',
90 '%Y-%m-%d %H:%M',
91 '%Y-%m-%d %I:%M%p',
91 '%Y-%m-%d %I:%M%p',
92 '%Y-%m-%d',
92 '%Y-%m-%d',
93 '%m-%d',
93 '%m-%d',
94 '%m/%d',
94 '%m/%d',
95 '%m/%d/%y',
95 '%m/%d/%y',
96 '%m/%d/%Y',
96 '%m/%d/%Y',
97 '%a %b %d %H:%M:%S %Y',
97 '%a %b %d %H:%M:%S %Y',
98 '%a %b %d %I:%M:%S%p %Y',
98 '%a %b %d %I:%M:%S%p %Y',
99 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
99 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
100 '%b %d %H:%M:%S %Y',
100 '%b %d %H:%M:%S %Y',
101 '%b %d %I:%M:%S%p %Y',
101 '%b %d %I:%M:%S%p %Y',
102 '%b %d %H:%M:%S',
102 '%b %d %H:%M:%S',
103 '%b %d %I:%M:%S%p',
103 '%b %d %I:%M:%S%p',
104 '%b %d %H:%M',
104 '%b %d %H:%M',
105 '%b %d %I:%M%p',
105 '%b %d %I:%M%p',
106 '%b %d %Y',
106 '%b %d %Y',
107 '%b %d',
107 '%b %d',
108 '%H:%M:%S',
108 '%H:%M:%S',
109 '%I:%M:%SP',
109 '%I:%M:%SP',
110 '%H:%M',
110 '%H:%M',
111 '%I:%M%p',
111 '%I:%M%p',
112 )
112 )
113
113
114 extendeddateformats = defaultdateformats + (
114 extendeddateformats = defaultdateformats + (
115 "%Y",
115 "%Y",
116 "%Y-%m",
116 "%Y-%m",
117 "%b",
117 "%b",
118 "%b %Y",
118 "%b %Y",
119 )
119 )
120
120
121 def cachefunc(func):
121 def cachefunc(func):
122 '''cache the result of function calls'''
122 '''cache the result of function calls'''
123 # XXX doesn't handle keywords args
123 # XXX doesn't handle keywords args
124 cache = {}
124 cache = {}
125 if func.func_code.co_argcount == 1:
125 if func.func_code.co_argcount == 1:
126 # we gain a small amount of time because
126 # we gain a small amount of time because
127 # we don't need to pack/unpack the list
127 # we don't need to pack/unpack the list
128 def f(arg):
128 def f(arg):
129 if arg not in cache:
129 if arg not in cache:
130 cache[arg] = func(arg)
130 cache[arg] = func(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 cache[args] = func(*args)
135 cache[args] = func(*args)
136 return cache[args]
136 return cache[args]
137
137
138 return f
138 return f
139
139
140 class propertycache(object):
141 def __init__(self, func):
142 self.func = func
143 self.name = func.__name__
144 def __get__(self, obj, type=None):
145 result = self.func(obj)
146 setattr(obj, self.name, result)
147 return result
148
140 def pipefilter(s, cmd):
149 def pipefilter(s, cmd):
141 '''filter string S through command CMD, returning its output'''
150 '''filter string S through command CMD, returning its output'''
142 (pin, pout) = popen2(cmd, 'b')
151 (pin, pout) = popen2(cmd, 'b')
143 def writer():
152 def writer():
144 try:
153 try:
145 pin.write(s)
154 pin.write(s)
146 pin.close()
155 pin.close()
147 except IOError, inst:
156 except IOError, inst:
148 if inst.errno != errno.EPIPE:
157 if inst.errno != errno.EPIPE:
149 raise
158 raise
150
159
151 # we should use select instead on UNIX, but this will work on most
160 # we should use select instead on UNIX, but this will work on most
152 # systems, including Windows
161 # systems, including Windows
153 w = threading.Thread(target=writer)
162 w = threading.Thread(target=writer)
154 w.start()
163 w.start()
155 f = pout.read()
164 f = pout.read()
156 pout.close()
165 pout.close()
157 w.join()
166 w.join()
158 return f
167 return f
159
168
160 def tempfilter(s, cmd):
169 def tempfilter(s, cmd):
161 '''filter string S through a pair of temporary files with CMD.
170 '''filter string S through a pair of temporary files with CMD.
162 CMD is used as a template to create the real command to be run,
171 CMD is used as a template to create the real command to be run,
163 with the strings INFILE and OUTFILE replaced by the real names of
172 with the strings INFILE and OUTFILE replaced by the real names of
164 the temporary files generated.'''
173 the temporary files generated.'''
165 inname, outname = None, None
174 inname, outname = None, None
166 try:
175 try:
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
176 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 fp = os.fdopen(infd, 'wb')
177 fp = os.fdopen(infd, 'wb')
169 fp.write(s)
178 fp.write(s)
170 fp.close()
179 fp.close()
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
180 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 os.close(outfd)
181 os.close(outfd)
173 cmd = cmd.replace('INFILE', inname)
182 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('OUTFILE', outname)
183 cmd = cmd.replace('OUTFILE', outname)
175 code = os.system(cmd)
184 code = os.system(cmd)
176 if sys.platform == 'OpenVMS' and code & 1:
185 if sys.platform == 'OpenVMS' and code & 1:
177 code = 0
186 code = 0
178 if code: raise Abort(_("command '%s' failed: %s") %
187 if code: raise Abort(_("command '%s' failed: %s") %
179 (cmd, explain_exit(code)))
188 (cmd, explain_exit(code)))
180 return open(outname, 'rb').read()
189 return open(outname, 'rb').read()
181 finally:
190 finally:
182 try:
191 try:
183 if inname: os.unlink(inname)
192 if inname: os.unlink(inname)
184 except: pass
193 except: pass
185 try:
194 try:
186 if outname: os.unlink(outname)
195 if outname: os.unlink(outname)
187 except: pass
196 except: pass
188
197
189 filtertable = {
198 filtertable = {
190 'tempfile:': tempfilter,
199 'tempfile:': tempfilter,
191 'pipe:': pipefilter,
200 'pipe:': pipefilter,
192 }
201 }
193
202
194 def filter(s, cmd):
203 def filter(s, cmd):
195 "filter a string through a command that transforms its input to its output"
204 "filter a string through a command that transforms its input to its output"
196 for name, fn in filtertable.iteritems():
205 for name, fn in filtertable.iteritems():
197 if cmd.startswith(name):
206 if cmd.startswith(name):
198 return fn(s, cmd[len(name):].lstrip())
207 return fn(s, cmd[len(name):].lstrip())
199 return pipefilter(s, cmd)
208 return pipefilter(s, cmd)
200
209
201 def binary(s):
210 def binary(s):
202 """return true if a string is binary data"""
211 """return true if a string is binary data"""
203 return bool(s and '\0' in s)
212 return bool(s and '\0' in s)
204
213
205 def sort(l):
214 def sort(l):
206 if not isinstance(l, list):
215 if not isinstance(l, list):
207 l = list(l)
216 l = list(l)
208 l.sort()
217 l.sort()
209 return l
218 return l
210
219
211 def increasingchunks(source, min=1024, max=65536):
220 def increasingchunks(source, min=1024, max=65536):
212 '''return no less than min bytes per chunk while data remains,
221 '''return no less than min bytes per chunk while data remains,
213 doubling min after each chunk until it reaches max'''
222 doubling min after each chunk until it reaches max'''
214 def log2(x):
223 def log2(x):
215 if not x:
224 if not x:
216 return 0
225 return 0
217 i = 0
226 i = 0
218 while x:
227 while x:
219 x >>= 1
228 x >>= 1
220 i += 1
229 i += 1
221 return i - 1
230 return i - 1
222
231
223 buf = []
232 buf = []
224 blen = 0
233 blen = 0
225 for chunk in source:
234 for chunk in source:
226 buf.append(chunk)
235 buf.append(chunk)
227 blen += len(chunk)
236 blen += len(chunk)
228 if blen >= min:
237 if blen >= min:
229 if min < max:
238 if min < max:
230 min = min << 1
239 min = min << 1
231 nmin = 1 << log2(blen)
240 nmin = 1 << log2(blen)
232 if nmin > min:
241 if nmin > min:
233 min = nmin
242 min = nmin
234 if min > max:
243 if min > max:
235 min = max
244 min = max
236 yield ''.join(buf)
245 yield ''.join(buf)
237 blen = 0
246 blen = 0
238 buf = []
247 buf = []
239 if buf:
248 if buf:
240 yield ''.join(buf)
249 yield ''.join(buf)
241
250
242 Abort = error.Abort
251 Abort = error.Abort
243
252
244 def always(fn): return True
253 def always(fn): return True
245 def never(fn): return False
254 def never(fn): return False
246
255
247 def patkind(name, default):
256 def patkind(name, default):
248 """Split a string into an optional pattern kind prefix and the
257 """Split a string into an optional pattern kind prefix and the
249 actual pattern."""
258 actual pattern."""
250 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
259 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
251 if name.startswith(prefix + ':'): return name.split(':', 1)
260 if name.startswith(prefix + ':'): return name.split(':', 1)
252 return default, name
261 return default, name
253
262
254 def globre(pat, head='^', tail='$'):
263 def globre(pat, head='^', tail='$'):
255 "convert a glob pattern into a regexp"
264 "convert a glob pattern into a regexp"
256 i, n = 0, len(pat)
265 i, n = 0, len(pat)
257 res = ''
266 res = ''
258 group = 0
267 group = 0
259 def peek(): return i < n and pat[i]
268 def peek(): return i < n and pat[i]
260 while i < n:
269 while i < n:
261 c = pat[i]
270 c = pat[i]
262 i = i+1
271 i = i+1
263 if c == '*':
272 if c == '*':
264 if peek() == '*':
273 if peek() == '*':
265 i += 1
274 i += 1
266 res += '.*'
275 res += '.*'
267 else:
276 else:
268 res += '[^/]*'
277 res += '[^/]*'
269 elif c == '?':
278 elif c == '?':
270 res += '.'
279 res += '.'
271 elif c == '[':
280 elif c == '[':
272 j = i
281 j = i
273 if j < n and pat[j] in '!]':
282 if j < n and pat[j] in '!]':
274 j += 1
283 j += 1
275 while j < n and pat[j] != ']':
284 while j < n and pat[j] != ']':
276 j += 1
285 j += 1
277 if j >= n:
286 if j >= n:
278 res += '\\['
287 res += '\\['
279 else:
288 else:
280 stuff = pat[i:j].replace('\\','\\\\')
289 stuff = pat[i:j].replace('\\','\\\\')
281 i = j + 1
290 i = j + 1
282 if stuff[0] == '!':
291 if stuff[0] == '!':
283 stuff = '^' + stuff[1:]
292 stuff = '^' + stuff[1:]
284 elif stuff[0] == '^':
293 elif stuff[0] == '^':
285 stuff = '\\' + stuff
294 stuff = '\\' + stuff
286 res = '%s[%s]' % (res, stuff)
295 res = '%s[%s]' % (res, stuff)
287 elif c == '{':
296 elif c == '{':
288 group += 1
297 group += 1
289 res += '(?:'
298 res += '(?:'
290 elif c == '}' and group:
299 elif c == '}' and group:
291 res += ')'
300 res += ')'
292 group -= 1
301 group -= 1
293 elif c == ',' and group:
302 elif c == ',' and group:
294 res += '|'
303 res += '|'
295 elif c == '\\':
304 elif c == '\\':
296 p = peek()
305 p = peek()
297 if p:
306 if p:
298 i += 1
307 i += 1
299 res += re.escape(p)
308 res += re.escape(p)
300 else:
309 else:
301 res += re.escape(c)
310 res += re.escape(c)
302 else:
311 else:
303 res += re.escape(c)
312 res += re.escape(c)
304 return head + res + tail
313 return head + res + tail
305
314
306 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
315 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
307
316
308 def pathto(root, n1, n2):
317 def pathto(root, n1, n2):
309 '''return the relative path from one place to another.
318 '''return the relative path from one place to another.
310 root should use os.sep to separate directories
319 root should use os.sep to separate directories
311 n1 should use os.sep to separate directories
320 n1 should use os.sep to separate directories
312 n2 should use "/" to separate directories
321 n2 should use "/" to separate directories
313 returns an os.sep-separated path.
322 returns an os.sep-separated path.
314
323
315 If n1 is a relative path, it's assumed it's
324 If n1 is a relative path, it's assumed it's
316 relative to root.
325 relative to root.
317 n2 should always be relative to root.
326 n2 should always be relative to root.
318 '''
327 '''
319 if not n1: return localpath(n2)
328 if not n1: return localpath(n2)
320 if os.path.isabs(n1):
329 if os.path.isabs(n1):
321 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
330 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
322 return os.path.join(root, localpath(n2))
331 return os.path.join(root, localpath(n2))
323 n2 = '/'.join((pconvert(root), n2))
332 n2 = '/'.join((pconvert(root), n2))
324 a, b = splitpath(n1), n2.split('/')
333 a, b = splitpath(n1), n2.split('/')
325 a.reverse()
334 a.reverse()
326 b.reverse()
335 b.reverse()
327 while a and b and a[-1] == b[-1]:
336 while a and b and a[-1] == b[-1]:
328 a.pop()
337 a.pop()
329 b.pop()
338 b.pop()
330 b.reverse()
339 b.reverse()
331 return os.sep.join((['..'] * len(a)) + b) or '.'
340 return os.sep.join((['..'] * len(a)) + b) or '.'
332
341
333 def canonpath(root, cwd, myname):
342 def canonpath(root, cwd, myname):
334 """return the canonical path of myname, given cwd and root"""
343 """return the canonical path of myname, given cwd and root"""
335 if root == os.sep:
344 if root == os.sep:
336 rootsep = os.sep
345 rootsep = os.sep
337 elif endswithsep(root):
346 elif endswithsep(root):
338 rootsep = root
347 rootsep = root
339 else:
348 else:
340 rootsep = root + os.sep
349 rootsep = root + os.sep
341 name = myname
350 name = myname
342 if not os.path.isabs(name):
351 if not os.path.isabs(name):
343 name = os.path.join(root, cwd, name)
352 name = os.path.join(root, cwd, name)
344 name = os.path.normpath(name)
353 name = os.path.normpath(name)
345 audit_path = path_auditor(root)
354 audit_path = path_auditor(root)
346 if name != rootsep and name.startswith(rootsep):
355 if name != rootsep and name.startswith(rootsep):
347 name = name[len(rootsep):]
356 name = name[len(rootsep):]
348 audit_path(name)
357 audit_path(name)
349 return pconvert(name)
358 return pconvert(name)
350 elif name == root:
359 elif name == root:
351 return ''
360 return ''
352 else:
361 else:
353 # Determine whether `name' is in the hierarchy at or beneath `root',
362 # Determine whether `name' is in the hierarchy at or beneath `root',
354 # by iterating name=dirname(name) until that causes no change (can't
363 # by iterating name=dirname(name) until that causes no change (can't
355 # check name == '/', because that doesn't work on windows). For each
364 # check name == '/', because that doesn't work on windows). For each
356 # `name', compare dev/inode numbers. If they match, the list `rel'
365 # `name', compare dev/inode numbers. If they match, the list `rel'
357 # holds the reversed list of components making up the relative file
366 # holds the reversed list of components making up the relative file
358 # name we want.
367 # name we want.
359 root_st = os.stat(root)
368 root_st = os.stat(root)
360 rel = []
369 rel = []
361 while True:
370 while True:
362 try:
371 try:
363 name_st = os.stat(name)
372 name_st = os.stat(name)
364 except OSError:
373 except OSError:
365 break
374 break
366 if samestat(name_st, root_st):
375 if samestat(name_st, root_st):
367 if not rel:
376 if not rel:
368 # name was actually the same as root (maybe a symlink)
377 # name was actually the same as root (maybe a symlink)
369 return ''
378 return ''
370 rel.reverse()
379 rel.reverse()
371 name = os.path.join(*rel)
380 name = os.path.join(*rel)
372 audit_path(name)
381 audit_path(name)
373 return pconvert(name)
382 return pconvert(name)
374 dirname, basename = os.path.split(name)
383 dirname, basename = os.path.split(name)
375 rel.append(basename)
384 rel.append(basename)
376 if dirname == name:
385 if dirname == name:
377 break
386 break
378 name = dirname
387 name = dirname
379
388
380 raise Abort('%s not under root' % myname)
389 raise Abort('%s not under root' % myname)
381
390
382 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
391 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
383 """build a function to match a set of file patterns
392 """build a function to match a set of file patterns
384
393
385 arguments:
394 arguments:
386 canonroot - the canonical root of the tree you're matching against
395 canonroot - the canonical root of the tree you're matching against
387 cwd - the current working directory, if relevant
396 cwd - the current working directory, if relevant
388 names - patterns to find
397 names - patterns to find
389 inc - patterns to include
398 inc - patterns to include
390 exc - patterns to exclude
399 exc - patterns to exclude
391 dflt_pat - if a pattern in names has no explicit type, assume this one
400 dflt_pat - if a pattern in names has no explicit type, assume this one
392 src - where these patterns came from (e.g. .hgignore)
401 src - where these patterns came from (e.g. .hgignore)
393
402
394 a pattern is one of:
403 a pattern is one of:
395 'glob:<glob>' - a glob relative to cwd
404 'glob:<glob>' - a glob relative to cwd
396 're:<regexp>' - a regular expression
405 're:<regexp>' - a regular expression
397 'path:<path>' - a path relative to canonroot
406 'path:<path>' - a path relative to canonroot
398 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
407 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
399 'relpath:<path>' - a path relative to cwd
408 'relpath:<path>' - a path relative to cwd
400 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
409 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
401 '<something>' - one of the cases above, selected by the dflt_pat argument
410 '<something>' - one of the cases above, selected by the dflt_pat argument
402
411
403 returns:
412 returns:
404 a 3-tuple containing
413 a 3-tuple containing
405 - list of roots (places where one should start a recursive walk of the fs);
414 - list of roots (places where one should start a recursive walk of the fs);
406 this often matches the explicit non-pattern names passed in, but also
415 this often matches the explicit non-pattern names passed in, but also
407 includes the initial part of glob: patterns that has no glob characters
416 includes the initial part of glob: patterns that has no glob characters
408 - a bool match(filename) function
417 - a bool match(filename) function
409 - a bool indicating if any patterns were passed in
418 - a bool indicating if any patterns were passed in
410 """
419 """
411
420
412 # a common case: no patterns at all
421 # a common case: no patterns at all
413 if not names and not inc and not exc:
422 if not names and not inc and not exc:
414 return [], always, False
423 return [], always, False
415
424
416 def contains_glob(name):
425 def contains_glob(name):
417 for c in name:
426 for c in name:
418 if c in _globchars: return True
427 if c in _globchars: return True
419 return False
428 return False
420
429
421 def regex(kind, name, tail):
430 def regex(kind, name, tail):
422 '''convert a pattern into a regular expression'''
431 '''convert a pattern into a regular expression'''
423 if not name:
432 if not name:
424 return ''
433 return ''
425 if kind == 're':
434 if kind == 're':
426 return name
435 return name
427 elif kind == 'path':
436 elif kind == 'path':
428 return '^' + re.escape(name) + '(?:/|$)'
437 return '^' + re.escape(name) + '(?:/|$)'
429 elif kind == 'relglob':
438 elif kind == 'relglob':
430 return globre(name, '(?:|.*/)', tail)
439 return globre(name, '(?:|.*/)', tail)
431 elif kind == 'relpath':
440 elif kind == 'relpath':
432 return re.escape(name) + '(?:/|$)'
441 return re.escape(name) + '(?:/|$)'
433 elif kind == 'relre':
442 elif kind == 'relre':
434 if name.startswith('^'):
443 if name.startswith('^'):
435 return name
444 return name
436 return '.*' + name
445 return '.*' + name
437 return globre(name, '', tail)
446 return globre(name, '', tail)
438
447
439 def matchfn(pats, tail):
448 def matchfn(pats, tail):
440 """build a matching function from a set of patterns"""
449 """build a matching function from a set of patterns"""
441 if not pats:
450 if not pats:
442 return
451 return
443 try:
452 try:
444 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
453 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
445 if len(pat) > 20000:
454 if len(pat) > 20000:
446 raise OverflowError()
455 raise OverflowError()
447 return re.compile(pat).match
456 return re.compile(pat).match
448 except OverflowError:
457 except OverflowError:
449 # We're using a Python with a tiny regex engine and we
458 # We're using a Python with a tiny regex engine and we
450 # made it explode, so we'll divide the pattern list in two
459 # made it explode, so we'll divide the pattern list in two
451 # until it works
460 # until it works
452 l = len(pats)
461 l = len(pats)
453 if l < 2:
462 if l < 2:
454 raise
463 raise
455 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
464 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
456 return lambda s: a(s) or b(s)
465 return lambda s: a(s) or b(s)
457 except re.error:
466 except re.error:
458 for k, p in pats:
467 for k, p in pats:
459 try:
468 try:
460 re.compile('(?:%s)' % regex(k, p, tail))
469 re.compile('(?:%s)' % regex(k, p, tail))
461 except re.error:
470 except re.error:
462 if src:
471 if src:
463 raise Abort("%s: invalid pattern (%s): %s" %
472 raise Abort("%s: invalid pattern (%s): %s" %
464 (src, k, p))
473 (src, k, p))
465 else:
474 else:
466 raise Abort("invalid pattern (%s): %s" % (k, p))
475 raise Abort("invalid pattern (%s): %s" % (k, p))
467 raise Abort("invalid pattern")
476 raise Abort("invalid pattern")
468
477
469 def globprefix(pat):
478 def globprefix(pat):
470 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
479 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
471 root = []
480 root = []
472 for p in pat.split('/'):
481 for p in pat.split('/'):
473 if contains_glob(p): break
482 if contains_glob(p): break
474 root.append(p)
483 root.append(p)
475 return '/'.join(root) or '.'
484 return '/'.join(root) or '.'
476
485
477 def normalizepats(names, default):
486 def normalizepats(names, default):
478 pats = []
487 pats = []
479 roots = []
488 roots = []
480 anypats = False
489 anypats = False
481 for kind, name in [patkind(p, default) for p in names]:
490 for kind, name in [patkind(p, default) for p in names]:
482 if kind in ('glob', 'relpath'):
491 if kind in ('glob', 'relpath'):
483 name = canonpath(canonroot, cwd, name)
492 name = canonpath(canonroot, cwd, name)
484 elif kind in ('relglob', 'path'):
493 elif kind in ('relglob', 'path'):
485 name = normpath(name)
494 name = normpath(name)
486
495
487 pats.append((kind, name))
496 pats.append((kind, name))
488
497
489 if kind in ('glob', 're', 'relglob', 'relre'):
498 if kind in ('glob', 're', 'relglob', 'relre'):
490 anypats = True
499 anypats = True
491
500
492 if kind == 'glob':
501 if kind == 'glob':
493 root = globprefix(name)
502 root = globprefix(name)
494 roots.append(root)
503 roots.append(root)
495 elif kind in ('relpath', 'path'):
504 elif kind in ('relpath', 'path'):
496 roots.append(name or '.')
505 roots.append(name or '.')
497 elif kind == 'relglob':
506 elif kind == 'relglob':
498 roots.append('.')
507 roots.append('.')
499 return roots, pats, anypats
508 return roots, pats, anypats
500
509
501 roots, pats, anypats = normalizepats(names, dflt_pat)
510 roots, pats, anypats = normalizepats(names, dflt_pat)
502
511
503 patmatch = matchfn(pats, '$') or always
512 patmatch = matchfn(pats, '$') or always
504 incmatch = always
513 incmatch = always
505 if inc:
514 if inc:
506 dummy, inckinds, dummy = normalizepats(inc, 'glob')
515 dummy, inckinds, dummy = normalizepats(inc, 'glob')
507 incmatch = matchfn(inckinds, '(?:/|$)')
516 incmatch = matchfn(inckinds, '(?:/|$)')
508 excmatch = never
517 excmatch = never
509 if exc:
518 if exc:
510 dummy, exckinds, dummy = normalizepats(exc, 'glob')
519 dummy, exckinds, dummy = normalizepats(exc, 'glob')
511 excmatch = matchfn(exckinds, '(?:/|$)')
520 excmatch = matchfn(exckinds, '(?:/|$)')
512
521
513 if not names and inc and not exc:
522 if not names and inc and not exc:
514 # common case: hgignore patterns
523 # common case: hgignore patterns
515 match = incmatch
524 match = incmatch
516 else:
525 else:
517 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
526 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
518
527
519 return (roots, match, (inc or exc or anypats) and True)
528 return (roots, match, (inc or exc or anypats) and True)
520
529
521 _hgexecutable = None
530 _hgexecutable = None
522
531
523 def main_is_frozen():
532 def main_is_frozen():
524 """return True if we are a frozen executable.
533 """return True if we are a frozen executable.
525
534
526 The code supports py2exe (most common, Windows only) and tools/freeze
535 The code supports py2exe (most common, Windows only) and tools/freeze
527 (portable, not much used).
536 (portable, not much used).
528 """
537 """
529 return (hasattr(sys, "frozen") or # new py2exe
538 return (hasattr(sys, "frozen") or # new py2exe
530 hasattr(sys, "importers") or # old py2exe
539 hasattr(sys, "importers") or # old py2exe
531 imp.is_frozen("__main__")) # tools/freeze
540 imp.is_frozen("__main__")) # tools/freeze
532
541
533 def hgexecutable():
542 def hgexecutable():
534 """return location of the 'hg' executable.
543 """return location of the 'hg' executable.
535
544
536 Defaults to $HG or 'hg' in the search path.
545 Defaults to $HG or 'hg' in the search path.
537 """
546 """
538 if _hgexecutable is None:
547 if _hgexecutable is None:
539 hg = os.environ.get('HG')
548 hg = os.environ.get('HG')
540 if hg:
549 if hg:
541 set_hgexecutable(hg)
550 set_hgexecutable(hg)
542 elif main_is_frozen():
551 elif main_is_frozen():
543 set_hgexecutable(sys.executable)
552 set_hgexecutable(sys.executable)
544 else:
553 else:
545 set_hgexecutable(find_exe('hg') or 'hg')
554 set_hgexecutable(find_exe('hg') or 'hg')
546 return _hgexecutable
555 return _hgexecutable
547
556
548 def set_hgexecutable(path):
557 def set_hgexecutable(path):
549 """set location of the 'hg' executable"""
558 """set location of the 'hg' executable"""
550 global _hgexecutable
559 global _hgexecutable
551 _hgexecutable = path
560 _hgexecutable = path
552
561
553 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
562 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
554 '''enhanced shell command execution.
563 '''enhanced shell command execution.
555 run with environment maybe modified, maybe in different dir.
564 run with environment maybe modified, maybe in different dir.
556
565
557 if command fails and onerr is None, return status. if ui object,
566 if command fails and onerr is None, return status. if ui object,
558 print error message and return status, else raise onerr object as
567 print error message and return status, else raise onerr object as
559 exception.'''
568 exception.'''
560 def py2shell(val):
569 def py2shell(val):
561 'convert python object into string that is useful to shell'
570 'convert python object into string that is useful to shell'
562 if val in (None, False):
571 if val in (None, False):
563 return '0'
572 return '0'
564 if val == True:
573 if val == True:
565 return '1'
574 return '1'
566 return str(val)
575 return str(val)
567 oldenv = {}
576 oldenv = {}
568 for k in environ:
577 for k in environ:
569 oldenv[k] = os.environ.get(k)
578 oldenv[k] = os.environ.get(k)
570 if cwd is not None:
579 if cwd is not None:
571 oldcwd = os.getcwd()
580 oldcwd = os.getcwd()
572 origcmd = cmd
581 origcmd = cmd
573 if os.name == 'nt':
582 if os.name == 'nt':
574 cmd = '"%s"' % cmd
583 cmd = '"%s"' % cmd
575 try:
584 try:
576 for k, v in environ.iteritems():
585 for k, v in environ.iteritems():
577 os.environ[k] = py2shell(v)
586 os.environ[k] = py2shell(v)
578 os.environ['HG'] = hgexecutable()
587 os.environ['HG'] = hgexecutable()
579 if cwd is not None and oldcwd != cwd:
588 if cwd is not None and oldcwd != cwd:
580 os.chdir(cwd)
589 os.chdir(cwd)
581 rc = os.system(cmd)
590 rc = os.system(cmd)
582 if sys.platform == 'OpenVMS' and rc & 1:
591 if sys.platform == 'OpenVMS' and rc & 1:
583 rc = 0
592 rc = 0
584 if rc and onerr:
593 if rc and onerr:
585 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
594 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
586 explain_exit(rc)[0])
595 explain_exit(rc)[0])
587 if errprefix:
596 if errprefix:
588 errmsg = '%s: %s' % (errprefix, errmsg)
597 errmsg = '%s: %s' % (errprefix, errmsg)
589 try:
598 try:
590 onerr.warn(errmsg + '\n')
599 onerr.warn(errmsg + '\n')
591 except AttributeError:
600 except AttributeError:
592 raise onerr(errmsg)
601 raise onerr(errmsg)
593 return rc
602 return rc
594 finally:
603 finally:
595 for k, v in oldenv.iteritems():
604 for k, v in oldenv.iteritems():
596 if v is None:
605 if v is None:
597 del os.environ[k]
606 del os.environ[k]
598 else:
607 else:
599 os.environ[k] = v
608 os.environ[k] = v
600 if cwd is not None and oldcwd != cwd:
609 if cwd is not None and oldcwd != cwd:
601 os.chdir(oldcwd)
610 os.chdir(oldcwd)
602
611
603 def checksignature(func):
612 def checksignature(func):
604 '''wrap a function with code to check for calling errors'''
613 '''wrap a function with code to check for calling errors'''
605 def check(*args, **kwargs):
614 def check(*args, **kwargs):
606 try:
615 try:
607 return func(*args, **kwargs)
616 return func(*args, **kwargs)
608 except TypeError:
617 except TypeError:
609 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
618 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
610 raise error.SignatureError
619 raise error.SignatureError
611 raise
620 raise
612
621
613 return check
622 return check
614
623
615 # os.path.lexists is not available on python2.3
624 # os.path.lexists is not available on python2.3
616 def lexists(filename):
625 def lexists(filename):
617 "test whether a file with this name exists. does not follow symlinks"
626 "test whether a file with this name exists. does not follow symlinks"
618 try:
627 try:
619 os.lstat(filename)
628 os.lstat(filename)
620 except:
629 except:
621 return False
630 return False
622 return True
631 return True
623
632
624 def rename(src, dst):
633 def rename(src, dst):
625 """forcibly rename a file"""
634 """forcibly rename a file"""
626 try:
635 try:
627 os.rename(src, dst)
636 os.rename(src, dst)
628 except OSError, err: # FIXME: check err (EEXIST ?)
637 except OSError, err: # FIXME: check err (EEXIST ?)
629 # on windows, rename to existing file is not allowed, so we
638 # on windows, rename to existing file is not allowed, so we
630 # must delete destination first. but if file is open, unlink
639 # must delete destination first. but if file is open, unlink
631 # schedules it for delete but does not delete it. rename
640 # schedules it for delete but does not delete it. rename
632 # happens immediately even for open files, so we rename
641 # happens immediately even for open files, so we rename
633 # destination to a temporary name, then delete that. then
642 # destination to a temporary name, then delete that. then
634 # rename is safe to do.
643 # rename is safe to do.
635 temp = dst + "-force-rename"
644 temp = dst + "-force-rename"
636 os.rename(dst, temp)
645 os.rename(dst, temp)
637 os.unlink(temp)
646 os.unlink(temp)
638 os.rename(src, dst)
647 os.rename(src, dst)
639
648
640 def unlink(f):
649 def unlink(f):
641 """unlink and remove the directory if it is empty"""
650 """unlink and remove the directory if it is empty"""
642 os.unlink(f)
651 os.unlink(f)
643 # try removing directories that might now be empty
652 # try removing directories that might now be empty
644 try:
653 try:
645 os.removedirs(os.path.dirname(f))
654 os.removedirs(os.path.dirname(f))
646 except OSError:
655 except OSError:
647 pass
656 pass
648
657
649 def copyfile(src, dest):
658 def copyfile(src, dest):
650 "copy a file, preserving mode and atime/mtime"
659 "copy a file, preserving mode and atime/mtime"
651 if os.path.islink(src):
660 if os.path.islink(src):
652 try:
661 try:
653 os.unlink(dest)
662 os.unlink(dest)
654 except:
663 except:
655 pass
664 pass
656 os.symlink(os.readlink(src), dest)
665 os.symlink(os.readlink(src), dest)
657 else:
666 else:
658 try:
667 try:
659 shutil.copyfile(src, dest)
668 shutil.copyfile(src, dest)
660 shutil.copystat(src, dest)
669 shutil.copystat(src, dest)
661 except shutil.Error, inst:
670 except shutil.Error, inst:
662 raise Abort(str(inst))
671 raise Abort(str(inst))
663
672
664 def copyfiles(src, dst, hardlink=None):
673 def copyfiles(src, dst, hardlink=None):
665 """Copy a directory tree using hardlinks if possible"""
674 """Copy a directory tree using hardlinks if possible"""
666
675
667 if hardlink is None:
676 if hardlink is None:
668 hardlink = (os.stat(src).st_dev ==
677 hardlink = (os.stat(src).st_dev ==
669 os.stat(os.path.dirname(dst)).st_dev)
678 os.stat(os.path.dirname(dst)).st_dev)
670
679
671 if os.path.isdir(src):
680 if os.path.isdir(src):
672 os.mkdir(dst)
681 os.mkdir(dst)
673 for name, kind in osutil.listdir(src):
682 for name, kind in osutil.listdir(src):
674 srcname = os.path.join(src, name)
683 srcname = os.path.join(src, name)
675 dstname = os.path.join(dst, name)
684 dstname = os.path.join(dst, name)
676 copyfiles(srcname, dstname, hardlink)
685 copyfiles(srcname, dstname, hardlink)
677 else:
686 else:
678 if hardlink:
687 if hardlink:
679 try:
688 try:
680 os_link(src, dst)
689 os_link(src, dst)
681 except (IOError, OSError):
690 except (IOError, OSError):
682 hardlink = False
691 hardlink = False
683 shutil.copy(src, dst)
692 shutil.copy(src, dst)
684 else:
693 else:
685 shutil.copy(src, dst)
694 shutil.copy(src, dst)
686
695
687 class path_auditor(object):
696 class path_auditor(object):
688 '''ensure that a filesystem path contains no banned components.
697 '''ensure that a filesystem path contains no banned components.
689 the following properties of a path are checked:
698 the following properties of a path are checked:
690
699
691 - under top-level .hg
700 - under top-level .hg
692 - starts at the root of a windows drive
701 - starts at the root of a windows drive
693 - contains ".."
702 - contains ".."
694 - traverses a symlink (e.g. a/symlink_here/b)
703 - traverses a symlink (e.g. a/symlink_here/b)
695 - inside a nested repository'''
704 - inside a nested repository'''
696
705
697 def __init__(self, root):
706 def __init__(self, root):
698 self.audited = set()
707 self.audited = set()
699 self.auditeddir = set()
708 self.auditeddir = set()
700 self.root = root
709 self.root = root
701
710
702 def __call__(self, path):
711 def __call__(self, path):
703 if path in self.audited:
712 if path in self.audited:
704 return
713 return
705 normpath = os.path.normcase(path)
714 normpath = os.path.normcase(path)
706 parts = splitpath(normpath)
715 parts = splitpath(normpath)
707 if (os.path.splitdrive(path)[0]
716 if (os.path.splitdrive(path)[0]
708 or parts[0].lower() in ('.hg', '.hg.', '')
717 or parts[0].lower() in ('.hg', '.hg.', '')
709 or os.pardir in parts):
718 or os.pardir in parts):
710 raise Abort(_("path contains illegal component: %s") % path)
719 raise Abort(_("path contains illegal component: %s") % path)
711 if '.hg' in path.lower():
720 if '.hg' in path.lower():
712 lparts = [p.lower() for p in parts]
721 lparts = [p.lower() for p in parts]
713 for p in '.hg', '.hg.':
722 for p in '.hg', '.hg.':
714 if p in lparts[1:]:
723 if p in lparts[1:]:
715 pos = lparts.index(p)
724 pos = lparts.index(p)
716 base = os.path.join(*parts[:pos])
725 base = os.path.join(*parts[:pos])
717 raise Abort(_('path %r is inside repo %r') % (path, base))
726 raise Abort(_('path %r is inside repo %r') % (path, base))
718 def check(prefix):
727 def check(prefix):
719 curpath = os.path.join(self.root, prefix)
728 curpath = os.path.join(self.root, prefix)
720 try:
729 try:
721 st = os.lstat(curpath)
730 st = os.lstat(curpath)
722 except OSError, err:
731 except OSError, err:
723 # EINVAL can be raised as invalid path syntax under win32.
732 # EINVAL can be raised as invalid path syntax under win32.
724 # They must be ignored for patterns can be checked too.
733 # They must be ignored for patterns can be checked too.
725 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
734 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
726 raise
735 raise
727 else:
736 else:
728 if stat.S_ISLNK(st.st_mode):
737 if stat.S_ISLNK(st.st_mode):
729 raise Abort(_('path %r traverses symbolic link %r') %
738 raise Abort(_('path %r traverses symbolic link %r') %
730 (path, prefix))
739 (path, prefix))
731 elif (stat.S_ISDIR(st.st_mode) and
740 elif (stat.S_ISDIR(st.st_mode) and
732 os.path.isdir(os.path.join(curpath, '.hg'))):
741 os.path.isdir(os.path.join(curpath, '.hg'))):
733 raise Abort(_('path %r is inside repo %r') %
742 raise Abort(_('path %r is inside repo %r') %
734 (path, prefix))
743 (path, prefix))
735 parts.pop()
744 parts.pop()
736 prefixes = []
745 prefixes = []
737 for n in range(len(parts)):
746 for n in range(len(parts)):
738 prefix = os.sep.join(parts)
747 prefix = os.sep.join(parts)
739 if prefix in self.auditeddir:
748 if prefix in self.auditeddir:
740 break
749 break
741 check(prefix)
750 check(prefix)
742 prefixes.append(prefix)
751 prefixes.append(prefix)
743 parts.pop()
752 parts.pop()
744
753
745 self.audited.add(path)
754 self.audited.add(path)
746 # only add prefixes to the cache after checking everything: we don't
755 # only add prefixes to the cache after checking everything: we don't
747 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
756 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
748 self.auditeddir.update(prefixes)
757 self.auditeddir.update(prefixes)
749
758
750 def nlinks(pathname):
759 def nlinks(pathname):
751 """Return number of hardlinks for the given file."""
760 """Return number of hardlinks for the given file."""
752 return os.lstat(pathname).st_nlink
761 return os.lstat(pathname).st_nlink
753
762
754 if hasattr(os, 'link'):
763 if hasattr(os, 'link'):
755 os_link = os.link
764 os_link = os.link
756 else:
765 else:
757 def os_link(src, dst):
766 def os_link(src, dst):
758 raise OSError(0, _("Hardlinks not supported"))
767 raise OSError(0, _("Hardlinks not supported"))
759
768
760 def lookup_reg(key, name=None, scope=None):
769 def lookup_reg(key, name=None, scope=None):
761 return None
770 return None
762
771
763 if os.name == 'nt':
772 if os.name == 'nt':
764 from windows import *
773 from windows import *
765 def expand_glob(pats):
774 def expand_glob(pats):
766 '''On Windows, expand the implicit globs in a list of patterns'''
775 '''On Windows, expand the implicit globs in a list of patterns'''
767 ret = []
776 ret = []
768 for p in pats:
777 for p in pats:
769 kind, name = patkind(p, None)
778 kind, name = patkind(p, None)
770 if kind is None:
779 if kind is None:
771 globbed = glob.glob(name)
780 globbed = glob.glob(name)
772 if globbed:
781 if globbed:
773 ret.extend(globbed)
782 ret.extend(globbed)
774 continue
783 continue
775 # if we couldn't expand the glob, just keep it around
784 # if we couldn't expand the glob, just keep it around
776 ret.append(p)
785 ret.append(p)
777 return ret
786 return ret
778 else:
787 else:
779 from posix import *
788 from posix import *
780
789
781 def makelock(info, pathname):
790 def makelock(info, pathname):
782 try:
791 try:
783 return os.symlink(info, pathname)
792 return os.symlink(info, pathname)
784 except OSError, why:
793 except OSError, why:
785 if why.errno == errno.EEXIST:
794 if why.errno == errno.EEXIST:
786 raise
795 raise
787 except AttributeError: # no symlink in os
796 except AttributeError: # no symlink in os
788 pass
797 pass
789
798
790 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
799 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
791 os.write(ld, info)
800 os.write(ld, info)
792 os.close(ld)
801 os.close(ld)
793
802
794 def readlock(pathname):
803 def readlock(pathname):
795 try:
804 try:
796 return os.readlink(pathname)
805 return os.readlink(pathname)
797 except OSError, why:
806 except OSError, why:
798 if why.errno not in (errno.EINVAL, errno.ENOSYS):
807 if why.errno not in (errno.EINVAL, errno.ENOSYS):
799 raise
808 raise
800 except AttributeError: # no symlink in os
809 except AttributeError: # no symlink in os
801 pass
810 pass
802 return posixfile(pathname).read()
811 return posixfile(pathname).read()
803
812
804 def fstat(fp):
813 def fstat(fp):
805 '''stat file object that may not have fileno method.'''
814 '''stat file object that may not have fileno method.'''
806 try:
815 try:
807 return os.fstat(fp.fileno())
816 return os.fstat(fp.fileno())
808 except AttributeError:
817 except AttributeError:
809 return os.stat(fp.name)
818 return os.stat(fp.name)
810
819
811 # File system features
820 # File system features
812
821
813 def checkcase(path):
822 def checkcase(path):
814 """
823 """
815 Check whether the given path is on a case-sensitive filesystem
824 Check whether the given path is on a case-sensitive filesystem
816
825
817 Requires a path (like /foo/.hg) ending with a foldable final
826 Requires a path (like /foo/.hg) ending with a foldable final
818 directory component.
827 directory component.
819 """
828 """
820 s1 = os.stat(path)
829 s1 = os.stat(path)
821 d, b = os.path.split(path)
830 d, b = os.path.split(path)
822 p2 = os.path.join(d, b.upper())
831 p2 = os.path.join(d, b.upper())
823 if path == p2:
832 if path == p2:
824 p2 = os.path.join(d, b.lower())
833 p2 = os.path.join(d, b.lower())
825 try:
834 try:
826 s2 = os.stat(p2)
835 s2 = os.stat(p2)
827 if s2 == s1:
836 if s2 == s1:
828 return False
837 return False
829 return True
838 return True
830 except:
839 except:
831 return True
840 return True
832
841
833 _fspathcache = {}
842 _fspathcache = {}
834 def fspath(name, root):
843 def fspath(name, root):
835 '''Get name in the case stored in the filesystem
844 '''Get name in the case stored in the filesystem
836
845
837 The name is either relative to root, or it is an absolute path starting
846 The name is either relative to root, or it is an absolute path starting
838 with root. Note that this function is unnecessary, and should not be
847 with root. Note that this function is unnecessary, and should not be
839 called, for case-sensitive filesystems (simply because it's expensive).
848 called, for case-sensitive filesystems (simply because it's expensive).
840 '''
849 '''
841 # If name is absolute, make it relative
850 # If name is absolute, make it relative
842 if name.lower().startswith(root.lower()):
851 if name.lower().startswith(root.lower()):
843 l = len(root)
852 l = len(root)
844 if name[l] == os.sep or name[l] == os.altsep:
853 if name[l] == os.sep or name[l] == os.altsep:
845 l = l + 1
854 l = l + 1
846 name = name[l:]
855 name = name[l:]
847
856
848 if not os.path.exists(os.path.join(root, name)):
857 if not os.path.exists(os.path.join(root, name)):
849 return None
858 return None
850
859
851 seps = os.sep
860 seps = os.sep
852 if os.altsep:
861 if os.altsep:
853 seps = seps + os.altsep
862 seps = seps + os.altsep
854 # Protect backslashes. This gets silly very quickly.
863 # Protect backslashes. This gets silly very quickly.
855 seps.replace('\\','\\\\')
864 seps.replace('\\','\\\\')
856 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
865 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
857 dir = os.path.normcase(os.path.normpath(root))
866 dir = os.path.normcase(os.path.normpath(root))
858 result = []
867 result = []
859 for part, sep in pattern.findall(name):
868 for part, sep in pattern.findall(name):
860 if sep:
869 if sep:
861 result.append(sep)
870 result.append(sep)
862 continue
871 continue
863
872
864 if dir not in _fspathcache:
873 if dir not in _fspathcache:
865 _fspathcache[dir] = os.listdir(dir)
874 _fspathcache[dir] = os.listdir(dir)
866 contents = _fspathcache[dir]
875 contents = _fspathcache[dir]
867
876
868 lpart = part.lower()
877 lpart = part.lower()
869 for n in contents:
878 for n in contents:
870 if n.lower() == lpart:
879 if n.lower() == lpart:
871 result.append(n)
880 result.append(n)
872 break
881 break
873 else:
882 else:
874 # Cannot happen, as the file exists!
883 # Cannot happen, as the file exists!
875 result.append(part)
884 result.append(part)
876 dir = os.path.join(dir, lpart)
885 dir = os.path.join(dir, lpart)
877
886
878 return ''.join(result)
887 return ''.join(result)
879
888
880 def checkexec(path):
889 def checkexec(path):
881 """
890 """
882 Check whether the given path is on a filesystem with UNIX-like exec flags
891 Check whether the given path is on a filesystem with UNIX-like exec flags
883
892
884 Requires a directory (like /foo/.hg)
893 Requires a directory (like /foo/.hg)
885 """
894 """
886
895
887 # VFAT on some Linux versions can flip mode but it doesn't persist
896 # VFAT on some Linux versions can flip mode but it doesn't persist
888 # a FS remount. Frequently we can detect it if files are created
897 # a FS remount. Frequently we can detect it if files are created
889 # with exec bit on.
898 # with exec bit on.
890
899
891 try:
900 try:
892 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
901 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
893 fh, fn = tempfile.mkstemp("", "", path)
902 fh, fn = tempfile.mkstemp("", "", path)
894 try:
903 try:
895 os.close(fh)
904 os.close(fh)
896 m = os.stat(fn).st_mode & 0777
905 m = os.stat(fn).st_mode & 0777
897 new_file_has_exec = m & EXECFLAGS
906 new_file_has_exec = m & EXECFLAGS
898 os.chmod(fn, m ^ EXECFLAGS)
907 os.chmod(fn, m ^ EXECFLAGS)
899 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
908 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
900 finally:
909 finally:
901 os.unlink(fn)
910 os.unlink(fn)
902 except (IOError, OSError):
911 except (IOError, OSError):
903 # we don't care, the user probably won't be able to commit anyway
912 # we don't care, the user probably won't be able to commit anyway
904 return False
913 return False
905 return not (new_file_has_exec or exec_flags_cannot_flip)
914 return not (new_file_has_exec or exec_flags_cannot_flip)
906
915
907 def checklink(path):
916 def checklink(path):
908 """check whether the given path is on a symlink-capable filesystem"""
917 """check whether the given path is on a symlink-capable filesystem"""
909 # mktemp is not racy because symlink creation will fail if the
918 # mktemp is not racy because symlink creation will fail if the
910 # file already exists
919 # file already exists
911 name = tempfile.mktemp(dir=path)
920 name = tempfile.mktemp(dir=path)
912 try:
921 try:
913 os.symlink(".", name)
922 os.symlink(".", name)
914 os.unlink(name)
923 os.unlink(name)
915 return True
924 return True
916 except (OSError, AttributeError):
925 except (OSError, AttributeError):
917 return False
926 return False
918
927
919 def needbinarypatch():
928 def needbinarypatch():
920 """return True if patches should be applied in binary mode by default."""
929 """return True if patches should be applied in binary mode by default."""
921 return os.name == 'nt'
930 return os.name == 'nt'
922
931
923 def endswithsep(path):
932 def endswithsep(path):
924 '''Check path ends with os.sep or os.altsep.'''
933 '''Check path ends with os.sep or os.altsep.'''
925 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
934 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
926
935
927 def splitpath(path):
936 def splitpath(path):
928 '''Split path by os.sep.
937 '''Split path by os.sep.
929 Note that this function does not use os.altsep because this is
938 Note that this function does not use os.altsep because this is
930 an alternative of simple "xxx.split(os.sep)".
939 an alternative of simple "xxx.split(os.sep)".
931 It is recommended to use os.path.normpath() before using this
940 It is recommended to use os.path.normpath() before using this
932 function if need.'''
941 function if need.'''
933 return path.split(os.sep)
942 return path.split(os.sep)
934
943
935 def gui():
944 def gui():
936 '''Are we running in a GUI?'''
945 '''Are we running in a GUI?'''
937 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
946 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
938
947
939 def mktempcopy(name, emptyok=False, createmode=None):
948 def mktempcopy(name, emptyok=False, createmode=None):
940 """Create a temporary file with the same contents from name
949 """Create a temporary file with the same contents from name
941
950
942 The permission bits are copied from the original file.
951 The permission bits are copied from the original file.
943
952
944 If the temporary file is going to be truncated immediately, you
953 If the temporary file is going to be truncated immediately, you
945 can use emptyok=True as an optimization.
954 can use emptyok=True as an optimization.
946
955
947 Returns the name of the temporary file.
956 Returns the name of the temporary file.
948 """
957 """
949 d, fn = os.path.split(name)
958 d, fn = os.path.split(name)
950 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
959 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
951 os.close(fd)
960 os.close(fd)
952 # Temporary files are created with mode 0600, which is usually not
961 # Temporary files are created with mode 0600, which is usually not
953 # what we want. If the original file already exists, just copy
962 # what we want. If the original file already exists, just copy
954 # its mode. Otherwise, manually obey umask.
963 # its mode. Otherwise, manually obey umask.
955 try:
964 try:
956 st_mode = os.lstat(name).st_mode & 0777
965 st_mode = os.lstat(name).st_mode & 0777
957 except OSError, inst:
966 except OSError, inst:
958 if inst.errno != errno.ENOENT:
967 if inst.errno != errno.ENOENT:
959 raise
968 raise
960 st_mode = createmode
969 st_mode = createmode
961 if st_mode is None:
970 if st_mode is None:
962 st_mode = ~umask
971 st_mode = ~umask
963 st_mode &= 0666
972 st_mode &= 0666
964 os.chmod(temp, st_mode)
973 os.chmod(temp, st_mode)
965 if emptyok:
974 if emptyok:
966 return temp
975 return temp
967 try:
976 try:
968 try:
977 try:
969 ifp = posixfile(name, "rb")
978 ifp = posixfile(name, "rb")
970 except IOError, inst:
979 except IOError, inst:
971 if inst.errno == errno.ENOENT:
980 if inst.errno == errno.ENOENT:
972 return temp
981 return temp
973 if not getattr(inst, 'filename', None):
982 if not getattr(inst, 'filename', None):
974 inst.filename = name
983 inst.filename = name
975 raise
984 raise
976 ofp = posixfile(temp, "wb")
985 ofp = posixfile(temp, "wb")
977 for chunk in filechunkiter(ifp):
986 for chunk in filechunkiter(ifp):
978 ofp.write(chunk)
987 ofp.write(chunk)
979 ifp.close()
988 ifp.close()
980 ofp.close()
989 ofp.close()
981 except:
990 except:
982 try: os.unlink(temp)
991 try: os.unlink(temp)
983 except: pass
992 except: pass
984 raise
993 raise
985 return temp
994 return temp
986
995
987 class atomictempfile(posixfile):
996 class atomictempfile(posixfile):
988 """file-like object that atomically updates a file
997 """file-like object that atomically updates a file
989
998
990 All writes will be redirected to a temporary copy of the original
999 All writes will be redirected to a temporary copy of the original
991 file. When rename is called, the copy is renamed to the original
1000 file. When rename is called, the copy is renamed to the original
992 name, making the changes visible.
1001 name, making the changes visible.
993 """
1002 """
994 def __init__(self, name, mode, createmode):
1003 def __init__(self, name, mode, createmode):
995 self.__name = name
1004 self.__name = name
996 self.temp = mktempcopy(name, emptyok=('w' in mode),
1005 self.temp = mktempcopy(name, emptyok=('w' in mode),
997 createmode=createmode)
1006 createmode=createmode)
998 posixfile.__init__(self, self.temp, mode)
1007 posixfile.__init__(self, self.temp, mode)
999
1008
1000 def rename(self):
1009 def rename(self):
1001 if not self.closed:
1010 if not self.closed:
1002 posixfile.close(self)
1011 posixfile.close(self)
1003 rename(self.temp, localpath(self.__name))
1012 rename(self.temp, localpath(self.__name))
1004
1013
1005 def __del__(self):
1014 def __del__(self):
1006 if not self.closed:
1015 if not self.closed:
1007 try:
1016 try:
1008 os.unlink(self.temp)
1017 os.unlink(self.temp)
1009 except: pass
1018 except: pass
1010 posixfile.close(self)
1019 posixfile.close(self)
1011
1020
1012 def makedirs(name, mode=None):
1021 def makedirs(name, mode=None):
1013 """recursive directory creation with parent mode inheritance"""
1022 """recursive directory creation with parent mode inheritance"""
1014 try:
1023 try:
1015 os.mkdir(name)
1024 os.mkdir(name)
1016 if mode is not None:
1025 if mode is not None:
1017 os.chmod(name, mode)
1026 os.chmod(name, mode)
1018 return
1027 return
1019 except OSError, err:
1028 except OSError, err:
1020 if err.errno == errno.EEXIST:
1029 if err.errno == errno.EEXIST:
1021 return
1030 return
1022 if err.errno != errno.ENOENT:
1031 if err.errno != errno.ENOENT:
1023 raise
1032 raise
1024 parent = os.path.abspath(os.path.dirname(name))
1033 parent = os.path.abspath(os.path.dirname(name))
1025 makedirs(parent, mode)
1034 makedirs(parent, mode)
1026 makedirs(name, mode)
1035 makedirs(name, mode)
1027
1036
1028 class opener(object):
1037 class opener(object):
1029 """Open files relative to a base directory
1038 """Open files relative to a base directory
1030
1039
1031 This class is used to hide the details of COW semantics and
1040 This class is used to hide the details of COW semantics and
1032 remote file access from higher level code.
1041 remote file access from higher level code.
1033 """
1042 """
1034 def __init__(self, base, audit=True):
1043 def __init__(self, base, audit=True):
1035 self.base = base
1044 self.base = base
1036 if audit:
1045 if audit:
1037 self.audit_path = path_auditor(base)
1046 self.audit_path = path_auditor(base)
1038 else:
1047 else:
1039 self.audit_path = always
1048 self.audit_path = always
1040 self.createmode = None
1049 self.createmode = None
1041
1050
1042 def __getattr__(self, name):
1051 def __getattr__(self, name):
1043 if name == '_can_symlink':
1052 if name == '_can_symlink':
1044 self._can_symlink = checklink(self.base)
1053 self._can_symlink = checklink(self.base)
1045 return self._can_symlink
1054 return self._can_symlink
1046 raise AttributeError(name)
1055 raise AttributeError(name)
1047
1056
1048 def _fixfilemode(self, name):
1057 def _fixfilemode(self, name):
1049 if self.createmode is None:
1058 if self.createmode is None:
1050 return
1059 return
1051 os.chmod(name, self.createmode & 0666)
1060 os.chmod(name, self.createmode & 0666)
1052
1061
1053 def __call__(self, path, mode="r", text=False, atomictemp=False):
1062 def __call__(self, path, mode="r", text=False, atomictemp=False):
1054 self.audit_path(path)
1063 self.audit_path(path)
1055 f = os.path.join(self.base, path)
1064 f = os.path.join(self.base, path)
1056
1065
1057 if not text and "b" not in mode:
1066 if not text and "b" not in mode:
1058 mode += "b" # for that other OS
1067 mode += "b" # for that other OS
1059
1068
1060 nlink = -1
1069 nlink = -1
1061 if mode not in ("r", "rb"):
1070 if mode not in ("r", "rb"):
1062 try:
1071 try:
1063 nlink = nlinks(f)
1072 nlink = nlinks(f)
1064 except OSError:
1073 except OSError:
1065 nlink = 0
1074 nlink = 0
1066 d = os.path.dirname(f)
1075 d = os.path.dirname(f)
1067 if not os.path.isdir(d):
1076 if not os.path.isdir(d):
1068 makedirs(d, self.createmode)
1077 makedirs(d, self.createmode)
1069 if atomictemp:
1078 if atomictemp:
1070 return atomictempfile(f, mode, self.createmode)
1079 return atomictempfile(f, mode, self.createmode)
1071 if nlink > 1:
1080 if nlink > 1:
1072 rename(mktempcopy(f), f)
1081 rename(mktempcopy(f), f)
1073 fp = posixfile(f, mode)
1082 fp = posixfile(f, mode)
1074 if nlink == 0:
1083 if nlink == 0:
1075 self._fixfilemode(f)
1084 self._fixfilemode(f)
1076 return fp
1085 return fp
1077
1086
1078 def symlink(self, src, dst):
1087 def symlink(self, src, dst):
1079 self.audit_path(dst)
1088 self.audit_path(dst)
1080 linkname = os.path.join(self.base, dst)
1089 linkname = os.path.join(self.base, dst)
1081 try:
1090 try:
1082 os.unlink(linkname)
1091 os.unlink(linkname)
1083 except OSError:
1092 except OSError:
1084 pass
1093 pass
1085
1094
1086 dirname = os.path.dirname(linkname)
1095 dirname = os.path.dirname(linkname)
1087 if not os.path.exists(dirname):
1096 if not os.path.exists(dirname):
1088 makedirs(dirname, self.createmode)
1097 makedirs(dirname, self.createmode)
1089
1098
1090 if self._can_symlink:
1099 if self._can_symlink:
1091 try:
1100 try:
1092 os.symlink(src, linkname)
1101 os.symlink(src, linkname)
1093 except OSError, err:
1102 except OSError, err:
1094 raise OSError(err.errno, _('could not symlink to %r: %s') %
1103 raise OSError(err.errno, _('could not symlink to %r: %s') %
1095 (src, err.strerror), linkname)
1104 (src, err.strerror), linkname)
1096 else:
1105 else:
1097 f = self(dst, "w")
1106 f = self(dst, "w")
1098 f.write(src)
1107 f.write(src)
1099 f.close()
1108 f.close()
1100 self._fixfilemode(dst)
1109 self._fixfilemode(dst)
1101
1110
1102 class chunkbuffer(object):
1111 class chunkbuffer(object):
1103 """Allow arbitrary sized chunks of data to be efficiently read from an
1112 """Allow arbitrary sized chunks of data to be efficiently read from an
1104 iterator over chunks of arbitrary size."""
1113 iterator over chunks of arbitrary size."""
1105
1114
1106 def __init__(self, in_iter):
1115 def __init__(self, in_iter):
1107 """in_iter is the iterator that's iterating over the input chunks.
1116 """in_iter is the iterator that's iterating over the input chunks.
1108 targetsize is how big a buffer to try to maintain."""
1117 targetsize is how big a buffer to try to maintain."""
1109 self.iter = iter(in_iter)
1118 self.iter = iter(in_iter)
1110 self.buf = ''
1119 self.buf = ''
1111 self.targetsize = 2**16
1120 self.targetsize = 2**16
1112
1121
1113 def read(self, l):
1122 def read(self, l):
1114 """Read L bytes of data from the iterator of chunks of data.
1123 """Read L bytes of data from the iterator of chunks of data.
1115 Returns less than L bytes if the iterator runs dry."""
1124 Returns less than L bytes if the iterator runs dry."""
1116 if l > len(self.buf) and self.iter:
1125 if l > len(self.buf) and self.iter:
1117 # Clamp to a multiple of self.targetsize
1126 # Clamp to a multiple of self.targetsize
1118 targetsize = max(l, self.targetsize)
1127 targetsize = max(l, self.targetsize)
1119 collector = cStringIO.StringIO()
1128 collector = cStringIO.StringIO()
1120 collector.write(self.buf)
1129 collector.write(self.buf)
1121 collected = len(self.buf)
1130 collected = len(self.buf)
1122 for chunk in self.iter:
1131 for chunk in self.iter:
1123 collector.write(chunk)
1132 collector.write(chunk)
1124 collected += len(chunk)
1133 collected += len(chunk)
1125 if collected >= targetsize:
1134 if collected >= targetsize:
1126 break
1135 break
1127 if collected < targetsize:
1136 if collected < targetsize:
1128 self.iter = False
1137 self.iter = False
1129 self.buf = collector.getvalue()
1138 self.buf = collector.getvalue()
1130 if len(self.buf) == l:
1139 if len(self.buf) == l:
1131 s, self.buf = str(self.buf), ''
1140 s, self.buf = str(self.buf), ''
1132 else:
1141 else:
1133 s, self.buf = self.buf[:l], buffer(self.buf, l)
1142 s, self.buf = self.buf[:l], buffer(self.buf, l)
1134 return s
1143 return s
1135
1144
1136 def filechunkiter(f, size=65536, limit=None):
1145 def filechunkiter(f, size=65536, limit=None):
1137 """Create a generator that produces the data in the file size
1146 """Create a generator that produces the data in the file size
1138 (default 65536) bytes at a time, up to optional limit (default is
1147 (default 65536) bytes at a time, up to optional limit (default is
1139 to read all data). Chunks may be less than size bytes if the
1148 to read all data). Chunks may be less than size bytes if the
1140 chunk is the last chunk in the file, or the file is a socket or
1149 chunk is the last chunk in the file, or the file is a socket or
1141 some other type of file that sometimes reads less data than is
1150 some other type of file that sometimes reads less data than is
1142 requested."""
1151 requested."""
1143 assert size >= 0
1152 assert size >= 0
1144 assert limit is None or limit >= 0
1153 assert limit is None or limit >= 0
1145 while True:
1154 while True:
1146 if limit is None: nbytes = size
1155 if limit is None: nbytes = size
1147 else: nbytes = min(limit, size)
1156 else: nbytes = min(limit, size)
1148 s = nbytes and f.read(nbytes)
1157 s = nbytes and f.read(nbytes)
1149 if not s: break
1158 if not s: break
1150 if limit: limit -= len(s)
1159 if limit: limit -= len(s)
1151 yield s
1160 yield s
1152
1161
1153 def makedate():
1162 def makedate():
1154 lt = time.localtime()
1163 lt = time.localtime()
1155 if lt[8] == 1 and time.daylight:
1164 if lt[8] == 1 and time.daylight:
1156 tz = time.altzone
1165 tz = time.altzone
1157 else:
1166 else:
1158 tz = time.timezone
1167 tz = time.timezone
1159 return time.mktime(lt), tz
1168 return time.mktime(lt), tz
1160
1169
1161 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1170 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1162 """represent a (unixtime, offset) tuple as a localized time.
1171 """represent a (unixtime, offset) tuple as a localized time.
1163 unixtime is seconds since the epoch, and offset is the time zone's
1172 unixtime is seconds since the epoch, and offset is the time zone's
1164 number of seconds away from UTC. if timezone is false, do not
1173 number of seconds away from UTC. if timezone is false, do not
1165 append time zone to string."""
1174 append time zone to string."""
1166 t, tz = date or makedate()
1175 t, tz = date or makedate()
1167 if "%1" in format or "%2" in format:
1176 if "%1" in format or "%2" in format:
1168 sign = (tz > 0) and "-" or "+"
1177 sign = (tz > 0) and "-" or "+"
1169 minutes = abs(tz) / 60
1178 minutes = abs(tz) / 60
1170 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1179 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1171 format = format.replace("%2", "%02d" % (minutes % 60))
1180 format = format.replace("%2", "%02d" % (minutes % 60))
1172 s = time.strftime(format, time.gmtime(float(t) - tz))
1181 s = time.strftime(format, time.gmtime(float(t) - tz))
1173 return s
1182 return s
1174
1183
1175 def shortdate(date=None):
1184 def shortdate(date=None):
1176 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1185 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1177 return datestr(date, format='%Y-%m-%d')
1186 return datestr(date, format='%Y-%m-%d')
1178
1187
1179 def strdate(string, format, defaults=[]):
1188 def strdate(string, format, defaults=[]):
1180 """parse a localized time string and return a (unixtime, offset) tuple.
1189 """parse a localized time string and return a (unixtime, offset) tuple.
1181 if the string cannot be parsed, ValueError is raised."""
1190 if the string cannot be parsed, ValueError is raised."""
1182 def timezone(string):
1191 def timezone(string):
1183 tz = string.split()[-1]
1192 tz = string.split()[-1]
1184 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1193 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1185 sign = (tz[0] == "+") and 1 or -1
1194 sign = (tz[0] == "+") and 1 or -1
1186 hours = int(tz[1:3])
1195 hours = int(tz[1:3])
1187 minutes = int(tz[3:5])
1196 minutes = int(tz[3:5])
1188 return -sign * (hours * 60 + minutes) * 60
1197 return -sign * (hours * 60 + minutes) * 60
1189 if tz == "GMT" or tz == "UTC":
1198 if tz == "GMT" or tz == "UTC":
1190 return 0
1199 return 0
1191 return None
1200 return None
1192
1201
1193 # NOTE: unixtime = localunixtime + offset
1202 # NOTE: unixtime = localunixtime + offset
1194 offset, date = timezone(string), string
1203 offset, date = timezone(string), string
1195 if offset != None:
1204 if offset != None:
1196 date = " ".join(string.split()[:-1])
1205 date = " ".join(string.split()[:-1])
1197
1206
1198 # add missing elements from defaults
1207 # add missing elements from defaults
1199 for part in defaults:
1208 for part in defaults:
1200 found = [True for p in part if ("%"+p) in format]
1209 found = [True for p in part if ("%"+p) in format]
1201 if not found:
1210 if not found:
1202 date += "@" + defaults[part]
1211 date += "@" + defaults[part]
1203 format += "@%" + part[0]
1212 format += "@%" + part[0]
1204
1213
1205 timetuple = time.strptime(date, format)
1214 timetuple = time.strptime(date, format)
1206 localunixtime = int(calendar.timegm(timetuple))
1215 localunixtime = int(calendar.timegm(timetuple))
1207 if offset is None:
1216 if offset is None:
1208 # local timezone
1217 # local timezone
1209 unixtime = int(time.mktime(timetuple))
1218 unixtime = int(time.mktime(timetuple))
1210 offset = unixtime - localunixtime
1219 offset = unixtime - localunixtime
1211 else:
1220 else:
1212 unixtime = localunixtime + offset
1221 unixtime = localunixtime + offset
1213 return unixtime, offset
1222 return unixtime, offset
1214
1223
1215 def parsedate(date, formats=None, defaults=None):
1224 def parsedate(date, formats=None, defaults=None):
1216 """parse a localized date/time string and return a (unixtime, offset) tuple.
1225 """parse a localized date/time string and return a (unixtime, offset) tuple.
1217
1226
1218 The date may be a "unixtime offset" string or in one of the specified
1227 The date may be a "unixtime offset" string or in one of the specified
1219 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1228 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1220 """
1229 """
1221 if not date:
1230 if not date:
1222 return 0, 0
1231 return 0, 0
1223 if isinstance(date, tuple) and len(date) == 2:
1232 if isinstance(date, tuple) and len(date) == 2:
1224 return date
1233 return date
1225 if not formats:
1234 if not formats:
1226 formats = defaultdateformats
1235 formats = defaultdateformats
1227 date = date.strip()
1236 date = date.strip()
1228 try:
1237 try:
1229 when, offset = map(int, date.split(' '))
1238 when, offset = map(int, date.split(' '))
1230 except ValueError:
1239 except ValueError:
1231 # fill out defaults
1240 # fill out defaults
1232 if not defaults:
1241 if not defaults:
1233 defaults = {}
1242 defaults = {}
1234 now = makedate()
1243 now = makedate()
1235 for part in "d mb yY HI M S".split():
1244 for part in "d mb yY HI M S".split():
1236 if part not in defaults:
1245 if part not in defaults:
1237 if part[0] in "HMS":
1246 if part[0] in "HMS":
1238 defaults[part] = "00"
1247 defaults[part] = "00"
1239 else:
1248 else:
1240 defaults[part] = datestr(now, "%" + part[0])
1249 defaults[part] = datestr(now, "%" + part[0])
1241
1250
1242 for format in formats:
1251 for format in formats:
1243 try:
1252 try:
1244 when, offset = strdate(date, format, defaults)
1253 when, offset = strdate(date, format, defaults)
1245 except (ValueError, OverflowError):
1254 except (ValueError, OverflowError):
1246 pass
1255 pass
1247 else:
1256 else:
1248 break
1257 break
1249 else:
1258 else:
1250 raise Abort(_('invalid date: %r ') % date)
1259 raise Abort(_('invalid date: %r ') % date)
1251 # validate explicit (probably user-specified) date and
1260 # validate explicit (probably user-specified) date and
1252 # time zone offset. values must fit in signed 32 bits for
1261 # time zone offset. values must fit in signed 32 bits for
1253 # current 32-bit linux runtimes. timezones go from UTC-12
1262 # current 32-bit linux runtimes. timezones go from UTC-12
1254 # to UTC+14
1263 # to UTC+14
1255 if abs(when) > 0x7fffffff:
1264 if abs(when) > 0x7fffffff:
1256 raise Abort(_('date exceeds 32 bits: %d') % when)
1265 raise Abort(_('date exceeds 32 bits: %d') % when)
1257 if offset < -50400 or offset > 43200:
1266 if offset < -50400 or offset > 43200:
1258 raise Abort(_('impossible time zone offset: %d') % offset)
1267 raise Abort(_('impossible time zone offset: %d') % offset)
1259 return when, offset
1268 return when, offset
1260
1269
1261 def matchdate(date):
1270 def matchdate(date):
1262 """Return a function that matches a given date match specifier
1271 """Return a function that matches a given date match specifier
1263
1272
1264 Formats include:
1273 Formats include:
1265
1274
1266 '{date}' match a given date to the accuracy provided
1275 '{date}' match a given date to the accuracy provided
1267
1276
1268 '<{date}' on or before a given date
1277 '<{date}' on or before a given date
1269
1278
1270 '>{date}' on or after a given date
1279 '>{date}' on or after a given date
1271
1280
1272 """
1281 """
1273
1282
1274 def lower(date):
1283 def lower(date):
1275 d = dict(mb="1", d="1")
1284 d = dict(mb="1", d="1")
1276 return parsedate(date, extendeddateformats, d)[0]
1285 return parsedate(date, extendeddateformats, d)[0]
1277
1286
1278 def upper(date):
1287 def upper(date):
1279 d = dict(mb="12", HI="23", M="59", S="59")
1288 d = dict(mb="12", HI="23", M="59", S="59")
1280 for days in "31 30 29".split():
1289 for days in "31 30 29".split():
1281 try:
1290 try:
1282 d["d"] = days
1291 d["d"] = days
1283 return parsedate(date, extendeddateformats, d)[0]
1292 return parsedate(date, extendeddateformats, d)[0]
1284 except:
1293 except:
1285 pass
1294 pass
1286 d["d"] = "28"
1295 d["d"] = "28"
1287 return parsedate(date, extendeddateformats, d)[0]
1296 return parsedate(date, extendeddateformats, d)[0]
1288
1297
1289 date = date.strip()
1298 date = date.strip()
1290 if date[0] == "<":
1299 if date[0] == "<":
1291 when = upper(date[1:])
1300 when = upper(date[1:])
1292 return lambda x: x <= when
1301 return lambda x: x <= when
1293 elif date[0] == ">":
1302 elif date[0] == ">":
1294 when = lower(date[1:])
1303 when = lower(date[1:])
1295 return lambda x: x >= when
1304 return lambda x: x >= when
1296 elif date[0] == "-":
1305 elif date[0] == "-":
1297 try:
1306 try:
1298 days = int(date[1:])
1307 days = int(date[1:])
1299 except ValueError:
1308 except ValueError:
1300 raise Abort(_("invalid day spec: %s") % date[1:])
1309 raise Abort(_("invalid day spec: %s") % date[1:])
1301 when = makedate()[0] - days * 3600 * 24
1310 when = makedate()[0] - days * 3600 * 24
1302 return lambda x: x >= when
1311 return lambda x: x >= when
1303 elif " to " in date:
1312 elif " to " in date:
1304 a, b = date.split(" to ")
1313 a, b = date.split(" to ")
1305 start, stop = lower(a), upper(b)
1314 start, stop = lower(a), upper(b)
1306 return lambda x: x >= start and x <= stop
1315 return lambda x: x >= start and x <= stop
1307 else:
1316 else:
1308 start, stop = lower(date), upper(date)
1317 start, stop = lower(date), upper(date)
1309 return lambda x: x >= start and x <= stop
1318 return lambda x: x >= start and x <= stop
1310
1319
1311 def shortuser(user):
1320 def shortuser(user):
1312 """Return a short representation of a user name or email address."""
1321 """Return a short representation of a user name or email address."""
1313 f = user.find('@')
1322 f = user.find('@')
1314 if f >= 0:
1323 if f >= 0:
1315 user = user[:f]
1324 user = user[:f]
1316 f = user.find('<')
1325 f = user.find('<')
1317 if f >= 0:
1326 if f >= 0:
1318 user = user[f+1:]
1327 user = user[f+1:]
1319 f = user.find(' ')
1328 f = user.find(' ')
1320 if f >= 0:
1329 if f >= 0:
1321 user = user[:f]
1330 user = user[:f]
1322 f = user.find('.')
1331 f = user.find('.')
1323 if f >= 0:
1332 if f >= 0:
1324 user = user[:f]
1333 user = user[:f]
1325 return user
1334 return user
1326
1335
1327 def email(author):
1336 def email(author):
1328 '''get email of author.'''
1337 '''get email of author.'''
1329 r = author.find('>')
1338 r = author.find('>')
1330 if r == -1: r = None
1339 if r == -1: r = None
1331 return author[author.find('<')+1:r]
1340 return author[author.find('<')+1:r]
1332
1341
1333 def ellipsis(text, maxlength=400):
1342 def ellipsis(text, maxlength=400):
1334 """Trim string to at most maxlength (default: 400) characters."""
1343 """Trim string to at most maxlength (default: 400) characters."""
1335 if len(text) <= maxlength:
1344 if len(text) <= maxlength:
1336 return text
1345 return text
1337 else:
1346 else:
1338 return "%s..." % (text[:maxlength-3])
1347 return "%s..." % (text[:maxlength-3])
1339
1348
1340 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1341 '''yield every hg repository under path, recursively.'''
1350 '''yield every hg repository under path, recursively.'''
1342 def errhandler(err):
1351 def errhandler(err):
1343 if err.filename == path:
1352 if err.filename == path:
1344 raise err
1353 raise err
1345 if followsym and hasattr(os.path, 'samestat'):
1354 if followsym and hasattr(os.path, 'samestat'):
1346 def _add_dir_if_not_there(dirlst, dirname):
1355 def _add_dir_if_not_there(dirlst, dirname):
1347 match = False
1356 match = False
1348 samestat = os.path.samestat
1357 samestat = os.path.samestat
1349 dirstat = os.stat(dirname)
1358 dirstat = os.stat(dirname)
1350 for lstdirstat in dirlst:
1359 for lstdirstat in dirlst:
1351 if samestat(dirstat, lstdirstat):
1360 if samestat(dirstat, lstdirstat):
1352 match = True
1361 match = True
1353 break
1362 break
1354 if not match:
1363 if not match:
1355 dirlst.append(dirstat)
1364 dirlst.append(dirstat)
1356 return not match
1365 return not match
1357 else:
1366 else:
1358 followsym = False
1367 followsym = False
1359
1368
1360 if (seen_dirs is None) and followsym:
1369 if (seen_dirs is None) and followsym:
1361 seen_dirs = []
1370 seen_dirs = []
1362 _add_dir_if_not_there(seen_dirs, path)
1371 _add_dir_if_not_there(seen_dirs, path)
1363 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1372 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1364 if '.hg' in dirs:
1373 if '.hg' in dirs:
1365 yield root # found a repository
1374 yield root # found a repository
1366 qroot = os.path.join(root, '.hg', 'patches')
1375 qroot = os.path.join(root, '.hg', 'patches')
1367 if os.path.isdir(os.path.join(qroot, '.hg')):
1376 if os.path.isdir(os.path.join(qroot, '.hg')):
1368 yield qroot # we have a patch queue repo here
1377 yield qroot # we have a patch queue repo here
1369 if recurse:
1378 if recurse:
1370 # avoid recursing inside the .hg directory
1379 # avoid recursing inside the .hg directory
1371 dirs.remove('.hg')
1380 dirs.remove('.hg')
1372 else:
1381 else:
1373 dirs[:] = [] # don't descend further
1382 dirs[:] = [] # don't descend further
1374 elif followsym:
1383 elif followsym:
1375 newdirs = []
1384 newdirs = []
1376 for d in dirs:
1385 for d in dirs:
1377 fname = os.path.join(root, d)
1386 fname = os.path.join(root, d)
1378 if _add_dir_if_not_there(seen_dirs, fname):
1387 if _add_dir_if_not_there(seen_dirs, fname):
1379 if os.path.islink(fname):
1388 if os.path.islink(fname):
1380 for hgname in walkrepos(fname, True, seen_dirs):
1389 for hgname in walkrepos(fname, True, seen_dirs):
1381 yield hgname
1390 yield hgname
1382 else:
1391 else:
1383 newdirs.append(d)
1392 newdirs.append(d)
1384 dirs[:] = newdirs
1393 dirs[:] = newdirs
1385
1394
1386 _rcpath = None
1395 _rcpath = None
1387
1396
1388 def os_rcpath():
1397 def os_rcpath():
1389 '''return default os-specific hgrc search path'''
1398 '''return default os-specific hgrc search path'''
1390 path = system_rcpath()
1399 path = system_rcpath()
1391 path.extend(user_rcpath())
1400 path.extend(user_rcpath())
1392 path = [os.path.normpath(f) for f in path]
1401 path = [os.path.normpath(f) for f in path]
1393 return path
1402 return path
1394
1403
1395 def rcpath():
1404 def rcpath():
1396 '''return hgrc search path. if env var HGRCPATH is set, use it.
1405 '''return hgrc search path. if env var HGRCPATH is set, use it.
1397 for each item in path, if directory, use files ending in .rc,
1406 for each item in path, if directory, use files ending in .rc,
1398 else use item.
1407 else use item.
1399 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1408 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1400 if no HGRCPATH, use default os-specific path.'''
1409 if no HGRCPATH, use default os-specific path.'''
1401 global _rcpath
1410 global _rcpath
1402 if _rcpath is None:
1411 if _rcpath is None:
1403 if 'HGRCPATH' in os.environ:
1412 if 'HGRCPATH' in os.environ:
1404 _rcpath = []
1413 _rcpath = []
1405 for p in os.environ['HGRCPATH'].split(os.pathsep):
1414 for p in os.environ['HGRCPATH'].split(os.pathsep):
1406 if not p: continue
1415 if not p: continue
1407 if os.path.isdir(p):
1416 if os.path.isdir(p):
1408 for f, kind in osutil.listdir(p):
1417 for f, kind in osutil.listdir(p):
1409 if f.endswith('.rc'):
1418 if f.endswith('.rc'):
1410 _rcpath.append(os.path.join(p, f))
1419 _rcpath.append(os.path.join(p, f))
1411 else:
1420 else:
1412 _rcpath.append(p)
1421 _rcpath.append(p)
1413 else:
1422 else:
1414 _rcpath = os_rcpath()
1423 _rcpath = os_rcpath()
1415 return _rcpath
1424 return _rcpath
1416
1425
1417 def bytecount(nbytes):
1426 def bytecount(nbytes):
1418 '''return byte count formatted as readable string, with units'''
1427 '''return byte count formatted as readable string, with units'''
1419
1428
1420 units = (
1429 units = (
1421 (100, 1<<30, _('%.0f GB')),
1430 (100, 1<<30, _('%.0f GB')),
1422 (10, 1<<30, _('%.1f GB')),
1431 (10, 1<<30, _('%.1f GB')),
1423 (1, 1<<30, _('%.2f GB')),
1432 (1, 1<<30, _('%.2f GB')),
1424 (100, 1<<20, _('%.0f MB')),
1433 (100, 1<<20, _('%.0f MB')),
1425 (10, 1<<20, _('%.1f MB')),
1434 (10, 1<<20, _('%.1f MB')),
1426 (1, 1<<20, _('%.2f MB')),
1435 (1, 1<<20, _('%.2f MB')),
1427 (100, 1<<10, _('%.0f KB')),
1436 (100, 1<<10, _('%.0f KB')),
1428 (10, 1<<10, _('%.1f KB')),
1437 (10, 1<<10, _('%.1f KB')),
1429 (1, 1<<10, _('%.2f KB')),
1438 (1, 1<<10, _('%.2f KB')),
1430 (1, 1, _('%.0f bytes')),
1439 (1, 1, _('%.0f bytes')),
1431 )
1440 )
1432
1441
1433 for multiplier, divisor, format in units:
1442 for multiplier, divisor, format in units:
1434 if nbytes >= divisor * multiplier:
1443 if nbytes >= divisor * multiplier:
1435 return format % (nbytes / float(divisor))
1444 return format % (nbytes / float(divisor))
1436 return units[-1][2] % nbytes
1445 return units[-1][2] % nbytes
1437
1446
1438 def drop_scheme(scheme, path):
1447 def drop_scheme(scheme, path):
1439 sc = scheme + ':'
1448 sc = scheme + ':'
1440 if path.startswith(sc):
1449 if path.startswith(sc):
1441 path = path[len(sc):]
1450 path = path[len(sc):]
1442 if path.startswith('//'):
1451 if path.startswith('//'):
1443 path = path[2:]
1452 path = path[2:]
1444 return path
1453 return path
1445
1454
1446 def uirepr(s):
1455 def uirepr(s):
1447 # Avoid double backslash in Windows path repr()
1456 # Avoid double backslash in Windows path repr()
1448 return repr(s).replace('\\\\', '\\')
1457 return repr(s).replace('\\\\', '\\')
1449
1458
1450 def termwidth():
1459 def termwidth():
1451 if 'COLUMNS' in os.environ:
1460 if 'COLUMNS' in os.environ:
1452 try:
1461 try:
1453 return int(os.environ['COLUMNS'])
1462 return int(os.environ['COLUMNS'])
1454 except ValueError:
1463 except ValueError:
1455 pass
1464 pass
1456 try:
1465 try:
1457 import termios, array, fcntl
1466 import termios, array, fcntl
1458 for dev in (sys.stdout, sys.stdin):
1467 for dev in (sys.stdout, sys.stdin):
1459 try:
1468 try:
1460 fd = dev.fileno()
1469 fd = dev.fileno()
1461 if not os.isatty(fd):
1470 if not os.isatty(fd):
1462 continue
1471 continue
1463 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1472 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1464 return array.array('h', arri)[1]
1473 return array.array('h', arri)[1]
1465 except ValueError:
1474 except ValueError:
1466 pass
1475 pass
1467 except ImportError:
1476 except ImportError:
1468 pass
1477 pass
1469 return 80
1478 return 80
1470
1479
1471 def iterlines(iterator):
1480 def iterlines(iterator):
1472 for chunk in iterator:
1481 for chunk in iterator:
1473 for line in chunk.splitlines():
1482 for line in chunk.splitlines():
1474 yield line
1483 yield line
General Comments 0
You need to be logged in to leave comments. Login now