##// END OF EJS Templates
transaction: really disable hardlink backups (issue4546)
Matt Harbison -
r24164:07a92bbd 3.3.2 stable
parent child Browse files
Show More
@@ -1,2231 +1,2231 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 import i18n
16 import i18n
17 _ = i18n._
17 _ = i18n._
18 import error, osutil, encoding
18 import error, osutil, encoding
19 import errno, shutil, sys, tempfile, traceback
19 import errno, shutil, sys, tempfile, traceback
20 import re as remod
20 import re as remod
21 import os, time, datetime, calendar, textwrap, signal, collections
21 import os, time, datetime, calendar, textwrap, signal, collections
22 import imp, socket, urllib, struct
22 import imp, socket, urllib, struct
23 import gc
23 import gc
24
24
25 if os.name == 'nt':
25 if os.name == 'nt':
26 import windows as platform
26 import windows as platform
27 else:
27 else:
28 import posix as platform
28 import posix as platform
29
29
30 cachestat = platform.cachestat
30 cachestat = platform.cachestat
31 checkexec = platform.checkexec
31 checkexec = platform.checkexec
32 checklink = platform.checklink
32 checklink = platform.checklink
33 copymode = platform.copymode
33 copymode = platform.copymode
34 executablepath = platform.executablepath
34 executablepath = platform.executablepath
35 expandglobs = platform.expandglobs
35 expandglobs = platform.expandglobs
36 explainexit = platform.explainexit
36 explainexit = platform.explainexit
37 findexe = platform.findexe
37 findexe = platform.findexe
38 gethgcmd = platform.gethgcmd
38 gethgcmd = platform.gethgcmd
39 getuser = platform.getuser
39 getuser = platform.getuser
40 groupmembers = platform.groupmembers
40 groupmembers = platform.groupmembers
41 groupname = platform.groupname
41 groupname = platform.groupname
42 hidewindow = platform.hidewindow
42 hidewindow = platform.hidewindow
43 isexec = platform.isexec
43 isexec = platform.isexec
44 isowner = platform.isowner
44 isowner = platform.isowner
45 localpath = platform.localpath
45 localpath = platform.localpath
46 lookupreg = platform.lookupreg
46 lookupreg = platform.lookupreg
47 makedir = platform.makedir
47 makedir = platform.makedir
48 nlinks = platform.nlinks
48 nlinks = platform.nlinks
49 normpath = platform.normpath
49 normpath = platform.normpath
50 normcase = platform.normcase
50 normcase = platform.normcase
51 openhardlinks = platform.openhardlinks
51 openhardlinks = platform.openhardlinks
52 oslink = platform.oslink
52 oslink = platform.oslink
53 parsepatchoutput = platform.parsepatchoutput
53 parsepatchoutput = platform.parsepatchoutput
54 pconvert = platform.pconvert
54 pconvert = platform.pconvert
55 popen = platform.popen
55 popen = platform.popen
56 posixfile = platform.posixfile
56 posixfile = platform.posixfile
57 quotecommand = platform.quotecommand
57 quotecommand = platform.quotecommand
58 readpipe = platform.readpipe
58 readpipe = platform.readpipe
59 rename = platform.rename
59 rename = platform.rename
60 samedevice = platform.samedevice
60 samedevice = platform.samedevice
61 samefile = platform.samefile
61 samefile = platform.samefile
62 samestat = platform.samestat
62 samestat = platform.samestat
63 setbinary = platform.setbinary
63 setbinary = platform.setbinary
64 setflags = platform.setflags
64 setflags = platform.setflags
65 setsignalhandler = platform.setsignalhandler
65 setsignalhandler = platform.setsignalhandler
66 shellquote = platform.shellquote
66 shellquote = platform.shellquote
67 spawndetached = platform.spawndetached
67 spawndetached = platform.spawndetached
68 split = platform.split
68 split = platform.split
69 sshargs = platform.sshargs
69 sshargs = platform.sshargs
70 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
70 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
71 statisexec = platform.statisexec
71 statisexec = platform.statisexec
72 statislink = platform.statislink
72 statislink = platform.statislink
73 termwidth = platform.termwidth
73 termwidth = platform.termwidth
74 testpid = platform.testpid
74 testpid = platform.testpid
75 umask = platform.umask
75 umask = platform.umask
76 unlink = platform.unlink
76 unlink = platform.unlink
77 unlinkpath = platform.unlinkpath
77 unlinkpath = platform.unlinkpath
78 username = platform.username
78 username = platform.username
79
79
80 # Python compatibility
80 # Python compatibility
81
81
82 _notset = object()
82 _notset = object()
83
83
84 def safehasattr(thing, attr):
84 def safehasattr(thing, attr):
85 return getattr(thing, attr, _notset) is not _notset
85 return getattr(thing, attr, _notset) is not _notset
86
86
87 def sha1(s=''):
87 def sha1(s=''):
88 '''
88 '''
89 Low-overhead wrapper around Python's SHA support
89 Low-overhead wrapper around Python's SHA support
90
90
91 >>> f = _fastsha1
91 >>> f = _fastsha1
92 >>> a = sha1()
92 >>> a = sha1()
93 >>> a = f()
93 >>> a = f()
94 >>> a.hexdigest()
94 >>> a.hexdigest()
95 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
95 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
96 '''
96 '''
97
97
98 return _fastsha1(s)
98 return _fastsha1(s)
99
99
100 def _fastsha1(s=''):
100 def _fastsha1(s=''):
101 # This function will import sha1 from hashlib or sha (whichever is
101 # This function will import sha1 from hashlib or sha (whichever is
102 # available) and overwrite itself with it on the first call.
102 # available) and overwrite itself with it on the first call.
103 # Subsequent calls will go directly to the imported function.
103 # Subsequent calls will go directly to the imported function.
104 if sys.version_info >= (2, 5):
104 if sys.version_info >= (2, 5):
105 from hashlib import sha1 as _sha1
105 from hashlib import sha1 as _sha1
106 else:
106 else:
107 from sha import sha as _sha1
107 from sha import sha as _sha1
108 global _fastsha1, sha1
108 global _fastsha1, sha1
109 _fastsha1 = sha1 = _sha1
109 _fastsha1 = sha1 = _sha1
110 return _sha1(s)
110 return _sha1(s)
111
111
112 def md5(s=''):
112 def md5(s=''):
113 try:
113 try:
114 from hashlib import md5 as _md5
114 from hashlib import md5 as _md5
115 except ImportError:
115 except ImportError:
116 from md5 import md5 as _md5
116 from md5 import md5 as _md5
117 global md5
117 global md5
118 md5 = _md5
118 md5 = _md5
119 return _md5(s)
119 return _md5(s)
120
120
121 DIGESTS = {
121 DIGESTS = {
122 'md5': md5,
122 'md5': md5,
123 'sha1': sha1,
123 'sha1': sha1,
124 }
124 }
125 # List of digest types from strongest to weakest
125 # List of digest types from strongest to weakest
126 DIGESTS_BY_STRENGTH = ['sha1', 'md5']
126 DIGESTS_BY_STRENGTH = ['sha1', 'md5']
127
127
128 try:
128 try:
129 import hashlib
129 import hashlib
130 DIGESTS.update({
130 DIGESTS.update({
131 'sha512': hashlib.sha512,
131 'sha512': hashlib.sha512,
132 })
132 })
133 DIGESTS_BY_STRENGTH.insert(0, 'sha512')
133 DIGESTS_BY_STRENGTH.insert(0, 'sha512')
134 except ImportError:
134 except ImportError:
135 pass
135 pass
136
136
137 for k in DIGESTS_BY_STRENGTH:
137 for k in DIGESTS_BY_STRENGTH:
138 assert k in DIGESTS
138 assert k in DIGESTS
139
139
140 class digester(object):
140 class digester(object):
141 """helper to compute digests.
141 """helper to compute digests.
142
142
143 This helper can be used to compute one or more digests given their name.
143 This helper can be used to compute one or more digests given their name.
144
144
145 >>> d = digester(['md5', 'sha1'])
145 >>> d = digester(['md5', 'sha1'])
146 >>> d.update('foo')
146 >>> d.update('foo')
147 >>> [k for k in sorted(d)]
147 >>> [k for k in sorted(d)]
148 ['md5', 'sha1']
148 ['md5', 'sha1']
149 >>> d['md5']
149 >>> d['md5']
150 'acbd18db4cc2f85cedef654fccc4a4d8'
150 'acbd18db4cc2f85cedef654fccc4a4d8'
151 >>> d['sha1']
151 >>> d['sha1']
152 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
152 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
153 >>> digester.preferred(['md5', 'sha1'])
153 >>> digester.preferred(['md5', 'sha1'])
154 'sha1'
154 'sha1'
155 """
155 """
156
156
157 def __init__(self, digests, s=''):
157 def __init__(self, digests, s=''):
158 self._hashes = {}
158 self._hashes = {}
159 for k in digests:
159 for k in digests:
160 if k not in DIGESTS:
160 if k not in DIGESTS:
161 raise Abort(_('unknown digest type: %s') % k)
161 raise Abort(_('unknown digest type: %s') % k)
162 self._hashes[k] = DIGESTS[k]()
162 self._hashes[k] = DIGESTS[k]()
163 if s:
163 if s:
164 self.update(s)
164 self.update(s)
165
165
166 def update(self, data):
166 def update(self, data):
167 for h in self._hashes.values():
167 for h in self._hashes.values():
168 h.update(data)
168 h.update(data)
169
169
170 def __getitem__(self, key):
170 def __getitem__(self, key):
171 if key not in DIGESTS:
171 if key not in DIGESTS:
172 raise Abort(_('unknown digest type: %s') % k)
172 raise Abort(_('unknown digest type: %s') % k)
173 return self._hashes[key].hexdigest()
173 return self._hashes[key].hexdigest()
174
174
175 def __iter__(self):
175 def __iter__(self):
176 return iter(self._hashes)
176 return iter(self._hashes)
177
177
178 @staticmethod
178 @staticmethod
179 def preferred(supported):
179 def preferred(supported):
180 """returns the strongest digest type in both supported and DIGESTS."""
180 """returns the strongest digest type in both supported and DIGESTS."""
181
181
182 for k in DIGESTS_BY_STRENGTH:
182 for k in DIGESTS_BY_STRENGTH:
183 if k in supported:
183 if k in supported:
184 return k
184 return k
185 return None
185 return None
186
186
187 class digestchecker(object):
187 class digestchecker(object):
188 """file handle wrapper that additionally checks content against a given
188 """file handle wrapper that additionally checks content against a given
189 size and digests.
189 size and digests.
190
190
191 d = digestchecker(fh, size, {'md5': '...'})
191 d = digestchecker(fh, size, {'md5': '...'})
192
192
193 When multiple digests are given, all of them are validated.
193 When multiple digests are given, all of them are validated.
194 """
194 """
195
195
196 def __init__(self, fh, size, digests):
196 def __init__(self, fh, size, digests):
197 self._fh = fh
197 self._fh = fh
198 self._size = size
198 self._size = size
199 self._got = 0
199 self._got = 0
200 self._digests = dict(digests)
200 self._digests = dict(digests)
201 self._digester = digester(self._digests.keys())
201 self._digester = digester(self._digests.keys())
202
202
203 def read(self, length=-1):
203 def read(self, length=-1):
204 content = self._fh.read(length)
204 content = self._fh.read(length)
205 self._digester.update(content)
205 self._digester.update(content)
206 self._got += len(content)
206 self._got += len(content)
207 return content
207 return content
208
208
209 def validate(self):
209 def validate(self):
210 if self._size != self._got:
210 if self._size != self._got:
211 raise Abort(_('size mismatch: expected %d, got %d') %
211 raise Abort(_('size mismatch: expected %d, got %d') %
212 (self._size, self._got))
212 (self._size, self._got))
213 for k, v in self._digests.items():
213 for k, v in self._digests.items():
214 if v != self._digester[k]:
214 if v != self._digester[k]:
215 # i18n: first parameter is a digest name
215 # i18n: first parameter is a digest name
216 raise Abort(_('%s mismatch: expected %s, got %s') %
216 raise Abort(_('%s mismatch: expected %s, got %s') %
217 (k, v, self._digester[k]))
217 (k, v, self._digester[k]))
218
218
219 try:
219 try:
220 buffer = buffer
220 buffer = buffer
221 except NameError:
221 except NameError:
222 if sys.version_info[0] < 3:
222 if sys.version_info[0] < 3:
223 def buffer(sliceable, offset=0):
223 def buffer(sliceable, offset=0):
224 return sliceable[offset:]
224 return sliceable[offset:]
225 else:
225 else:
226 def buffer(sliceable, offset=0):
226 def buffer(sliceable, offset=0):
227 return memoryview(sliceable)[offset:]
227 return memoryview(sliceable)[offset:]
228
228
229 import subprocess
229 import subprocess
230 closefds = os.name == 'posix'
230 closefds = os.name == 'posix'
231
231
232 def unpacker(fmt):
232 def unpacker(fmt):
233 """create a struct unpacker for the specified format"""
233 """create a struct unpacker for the specified format"""
234 try:
234 try:
235 # 2.5+
235 # 2.5+
236 return struct.Struct(fmt).unpack
236 return struct.Struct(fmt).unpack
237 except AttributeError:
237 except AttributeError:
238 # 2.4
238 # 2.4
239 return lambda buf: struct.unpack(fmt, buf)
239 return lambda buf: struct.unpack(fmt, buf)
240
240
241 def popen2(cmd, env=None, newlines=False):
241 def popen2(cmd, env=None, newlines=False):
242 # Setting bufsize to -1 lets the system decide the buffer size.
242 # Setting bufsize to -1 lets the system decide the buffer size.
243 # The default for bufsize is 0, meaning unbuffered. This leads to
243 # The default for bufsize is 0, meaning unbuffered. This leads to
244 # poor performance on Mac OS X: http://bugs.python.org/issue4194
244 # poor performance on Mac OS X: http://bugs.python.org/issue4194
245 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
245 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
246 close_fds=closefds,
246 close_fds=closefds,
247 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
247 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
248 universal_newlines=newlines,
248 universal_newlines=newlines,
249 env=env)
249 env=env)
250 return p.stdin, p.stdout
250 return p.stdin, p.stdout
251
251
252 def popen3(cmd, env=None, newlines=False):
252 def popen3(cmd, env=None, newlines=False):
253 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
253 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
254 return stdin, stdout, stderr
254 return stdin, stdout, stderr
255
255
256 def popen4(cmd, env=None, newlines=False):
256 def popen4(cmd, env=None, newlines=False):
257 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
257 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
258 close_fds=closefds,
258 close_fds=closefds,
259 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
259 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
260 stderr=subprocess.PIPE,
260 stderr=subprocess.PIPE,
261 universal_newlines=newlines,
261 universal_newlines=newlines,
262 env=env)
262 env=env)
263 return p.stdin, p.stdout, p.stderr, p
263 return p.stdin, p.stdout, p.stderr, p
264
264
265 def version():
265 def version():
266 """Return version information if available."""
266 """Return version information if available."""
267 try:
267 try:
268 import __version__
268 import __version__
269 return __version__.version
269 return __version__.version
270 except ImportError:
270 except ImportError:
271 return 'unknown'
271 return 'unknown'
272
272
273 # used by parsedate
273 # used by parsedate
274 defaultdateformats = (
274 defaultdateformats = (
275 '%Y-%m-%d %H:%M:%S',
275 '%Y-%m-%d %H:%M:%S',
276 '%Y-%m-%d %I:%M:%S%p',
276 '%Y-%m-%d %I:%M:%S%p',
277 '%Y-%m-%d %H:%M',
277 '%Y-%m-%d %H:%M',
278 '%Y-%m-%d %I:%M%p',
278 '%Y-%m-%d %I:%M%p',
279 '%Y-%m-%d',
279 '%Y-%m-%d',
280 '%m-%d',
280 '%m-%d',
281 '%m/%d',
281 '%m/%d',
282 '%m/%d/%y',
282 '%m/%d/%y',
283 '%m/%d/%Y',
283 '%m/%d/%Y',
284 '%a %b %d %H:%M:%S %Y',
284 '%a %b %d %H:%M:%S %Y',
285 '%a %b %d %I:%M:%S%p %Y',
285 '%a %b %d %I:%M:%S%p %Y',
286 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
286 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
287 '%b %d %H:%M:%S %Y',
287 '%b %d %H:%M:%S %Y',
288 '%b %d %I:%M:%S%p %Y',
288 '%b %d %I:%M:%S%p %Y',
289 '%b %d %H:%M:%S',
289 '%b %d %H:%M:%S',
290 '%b %d %I:%M:%S%p',
290 '%b %d %I:%M:%S%p',
291 '%b %d %H:%M',
291 '%b %d %H:%M',
292 '%b %d %I:%M%p',
292 '%b %d %I:%M%p',
293 '%b %d %Y',
293 '%b %d %Y',
294 '%b %d',
294 '%b %d',
295 '%H:%M:%S',
295 '%H:%M:%S',
296 '%I:%M:%S%p',
296 '%I:%M:%S%p',
297 '%H:%M',
297 '%H:%M',
298 '%I:%M%p',
298 '%I:%M%p',
299 )
299 )
300
300
301 extendeddateformats = defaultdateformats + (
301 extendeddateformats = defaultdateformats + (
302 "%Y",
302 "%Y",
303 "%Y-%m",
303 "%Y-%m",
304 "%b",
304 "%b",
305 "%b %Y",
305 "%b %Y",
306 )
306 )
307
307
308 def cachefunc(func):
308 def cachefunc(func):
309 '''cache the result of function calls'''
309 '''cache the result of function calls'''
310 # XXX doesn't handle keywords args
310 # XXX doesn't handle keywords args
311 if func.func_code.co_argcount == 0:
311 if func.func_code.co_argcount == 0:
312 cache = []
312 cache = []
313 def f():
313 def f():
314 if len(cache) == 0:
314 if len(cache) == 0:
315 cache.append(func())
315 cache.append(func())
316 return cache[0]
316 return cache[0]
317 return f
317 return f
318 cache = {}
318 cache = {}
319 if func.func_code.co_argcount == 1:
319 if func.func_code.co_argcount == 1:
320 # we gain a small amount of time because
320 # we gain a small amount of time because
321 # we don't need to pack/unpack the list
321 # we don't need to pack/unpack the list
322 def f(arg):
322 def f(arg):
323 if arg not in cache:
323 if arg not in cache:
324 cache[arg] = func(arg)
324 cache[arg] = func(arg)
325 return cache[arg]
325 return cache[arg]
326 else:
326 else:
327 def f(*args):
327 def f(*args):
328 if args not in cache:
328 if args not in cache:
329 cache[args] = func(*args)
329 cache[args] = func(*args)
330 return cache[args]
330 return cache[args]
331
331
332 return f
332 return f
333
333
334 try:
334 try:
335 collections.deque.remove
335 collections.deque.remove
336 deque = collections.deque
336 deque = collections.deque
337 except AttributeError:
337 except AttributeError:
338 # python 2.4 lacks deque.remove
338 # python 2.4 lacks deque.remove
339 class deque(collections.deque):
339 class deque(collections.deque):
340 def remove(self, val):
340 def remove(self, val):
341 for i, v in enumerate(self):
341 for i, v in enumerate(self):
342 if v == val:
342 if v == val:
343 del self[i]
343 del self[i]
344 break
344 break
345
345
346 class sortdict(dict):
346 class sortdict(dict):
347 '''a simple sorted dictionary'''
347 '''a simple sorted dictionary'''
348 def __init__(self, data=None):
348 def __init__(self, data=None):
349 self._list = []
349 self._list = []
350 if data:
350 if data:
351 self.update(data)
351 self.update(data)
352 def copy(self):
352 def copy(self):
353 return sortdict(self)
353 return sortdict(self)
354 def __setitem__(self, key, val):
354 def __setitem__(self, key, val):
355 if key in self:
355 if key in self:
356 self._list.remove(key)
356 self._list.remove(key)
357 self._list.append(key)
357 self._list.append(key)
358 dict.__setitem__(self, key, val)
358 dict.__setitem__(self, key, val)
359 def __iter__(self):
359 def __iter__(self):
360 return self._list.__iter__()
360 return self._list.__iter__()
361 def update(self, src):
361 def update(self, src):
362 for k in src:
362 for k in src:
363 self[k] = src[k]
363 self[k] = src[k]
364 def clear(self):
364 def clear(self):
365 dict.clear(self)
365 dict.clear(self)
366 self._list = []
366 self._list = []
367 def items(self):
367 def items(self):
368 return [(k, self[k]) for k in self._list]
368 return [(k, self[k]) for k in self._list]
369 def __delitem__(self, key):
369 def __delitem__(self, key):
370 dict.__delitem__(self, key)
370 dict.__delitem__(self, key)
371 self._list.remove(key)
371 self._list.remove(key)
372 def pop(self, key, *args, **kwargs):
372 def pop(self, key, *args, **kwargs):
373 dict.pop(self, key, *args, **kwargs)
373 dict.pop(self, key, *args, **kwargs)
374 try:
374 try:
375 self._list.remove(key)
375 self._list.remove(key)
376 except ValueError:
376 except ValueError:
377 pass
377 pass
378 def keys(self):
378 def keys(self):
379 return self._list
379 return self._list
380 def iterkeys(self):
380 def iterkeys(self):
381 return self._list.__iter__()
381 return self._list.__iter__()
382 def iteritems(self):
382 def iteritems(self):
383 for k in self._list:
383 for k in self._list:
384 yield k, self[k]
384 yield k, self[k]
385 def insert(self, index, key, val):
385 def insert(self, index, key, val):
386 self._list.insert(index, key)
386 self._list.insert(index, key)
387 dict.__setitem__(self, key, val)
387 dict.__setitem__(self, key, val)
388
388
389 class lrucachedict(object):
389 class lrucachedict(object):
390 '''cache most recent gets from or sets to this dictionary'''
390 '''cache most recent gets from or sets to this dictionary'''
391 def __init__(self, maxsize):
391 def __init__(self, maxsize):
392 self._cache = {}
392 self._cache = {}
393 self._maxsize = maxsize
393 self._maxsize = maxsize
394 self._order = deque()
394 self._order = deque()
395
395
396 def __getitem__(self, key):
396 def __getitem__(self, key):
397 value = self._cache[key]
397 value = self._cache[key]
398 self._order.remove(key)
398 self._order.remove(key)
399 self._order.append(key)
399 self._order.append(key)
400 return value
400 return value
401
401
402 def __setitem__(self, key, value):
402 def __setitem__(self, key, value):
403 if key not in self._cache:
403 if key not in self._cache:
404 if len(self._cache) >= self._maxsize:
404 if len(self._cache) >= self._maxsize:
405 del self._cache[self._order.popleft()]
405 del self._cache[self._order.popleft()]
406 else:
406 else:
407 self._order.remove(key)
407 self._order.remove(key)
408 self._cache[key] = value
408 self._cache[key] = value
409 self._order.append(key)
409 self._order.append(key)
410
410
411 def __contains__(self, key):
411 def __contains__(self, key):
412 return key in self._cache
412 return key in self._cache
413
413
414 def clear(self):
414 def clear(self):
415 self._cache.clear()
415 self._cache.clear()
416 self._order = deque()
416 self._order = deque()
417
417
418 def lrucachefunc(func):
418 def lrucachefunc(func):
419 '''cache most recent results of function calls'''
419 '''cache most recent results of function calls'''
420 cache = {}
420 cache = {}
421 order = deque()
421 order = deque()
422 if func.func_code.co_argcount == 1:
422 if func.func_code.co_argcount == 1:
423 def f(arg):
423 def f(arg):
424 if arg not in cache:
424 if arg not in cache:
425 if len(cache) > 20:
425 if len(cache) > 20:
426 del cache[order.popleft()]
426 del cache[order.popleft()]
427 cache[arg] = func(arg)
427 cache[arg] = func(arg)
428 else:
428 else:
429 order.remove(arg)
429 order.remove(arg)
430 order.append(arg)
430 order.append(arg)
431 return cache[arg]
431 return cache[arg]
432 else:
432 else:
433 def f(*args):
433 def f(*args):
434 if args not in cache:
434 if args not in cache:
435 if len(cache) > 20:
435 if len(cache) > 20:
436 del cache[order.popleft()]
436 del cache[order.popleft()]
437 cache[args] = func(*args)
437 cache[args] = func(*args)
438 else:
438 else:
439 order.remove(args)
439 order.remove(args)
440 order.append(args)
440 order.append(args)
441 return cache[args]
441 return cache[args]
442
442
443 return f
443 return f
444
444
445 class propertycache(object):
445 class propertycache(object):
446 def __init__(self, func):
446 def __init__(self, func):
447 self.func = func
447 self.func = func
448 self.name = func.__name__
448 self.name = func.__name__
449 def __get__(self, obj, type=None):
449 def __get__(self, obj, type=None):
450 result = self.func(obj)
450 result = self.func(obj)
451 self.cachevalue(obj, result)
451 self.cachevalue(obj, result)
452 return result
452 return result
453
453
454 def cachevalue(self, obj, value):
454 def cachevalue(self, obj, value):
455 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
455 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
456 obj.__dict__[self.name] = value
456 obj.__dict__[self.name] = value
457
457
458 def pipefilter(s, cmd):
458 def pipefilter(s, cmd):
459 '''filter string S through command CMD, returning its output'''
459 '''filter string S through command CMD, returning its output'''
460 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
460 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
461 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
461 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
462 pout, perr = p.communicate(s)
462 pout, perr = p.communicate(s)
463 return pout
463 return pout
464
464
465 def tempfilter(s, cmd):
465 def tempfilter(s, cmd):
466 '''filter string S through a pair of temporary files with CMD.
466 '''filter string S through a pair of temporary files with CMD.
467 CMD is used as a template to create the real command to be run,
467 CMD is used as a template to create the real command to be run,
468 with the strings INFILE and OUTFILE replaced by the real names of
468 with the strings INFILE and OUTFILE replaced by the real names of
469 the temporary files generated.'''
469 the temporary files generated.'''
470 inname, outname = None, None
470 inname, outname = None, None
471 try:
471 try:
472 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
472 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
473 fp = os.fdopen(infd, 'wb')
473 fp = os.fdopen(infd, 'wb')
474 fp.write(s)
474 fp.write(s)
475 fp.close()
475 fp.close()
476 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
476 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
477 os.close(outfd)
477 os.close(outfd)
478 cmd = cmd.replace('INFILE', inname)
478 cmd = cmd.replace('INFILE', inname)
479 cmd = cmd.replace('OUTFILE', outname)
479 cmd = cmd.replace('OUTFILE', outname)
480 code = os.system(cmd)
480 code = os.system(cmd)
481 if sys.platform == 'OpenVMS' and code & 1:
481 if sys.platform == 'OpenVMS' and code & 1:
482 code = 0
482 code = 0
483 if code:
483 if code:
484 raise Abort(_("command '%s' failed: %s") %
484 raise Abort(_("command '%s' failed: %s") %
485 (cmd, explainexit(code)))
485 (cmd, explainexit(code)))
486 fp = open(outname, 'rb')
486 fp = open(outname, 'rb')
487 r = fp.read()
487 r = fp.read()
488 fp.close()
488 fp.close()
489 return r
489 return r
490 finally:
490 finally:
491 try:
491 try:
492 if inname:
492 if inname:
493 os.unlink(inname)
493 os.unlink(inname)
494 except OSError:
494 except OSError:
495 pass
495 pass
496 try:
496 try:
497 if outname:
497 if outname:
498 os.unlink(outname)
498 os.unlink(outname)
499 except OSError:
499 except OSError:
500 pass
500 pass
501
501
502 filtertable = {
502 filtertable = {
503 'tempfile:': tempfilter,
503 'tempfile:': tempfilter,
504 'pipe:': pipefilter,
504 'pipe:': pipefilter,
505 }
505 }
506
506
507 def filter(s, cmd):
507 def filter(s, cmd):
508 "filter a string through a command that transforms its input to its output"
508 "filter a string through a command that transforms its input to its output"
509 for name, fn in filtertable.iteritems():
509 for name, fn in filtertable.iteritems():
510 if cmd.startswith(name):
510 if cmd.startswith(name):
511 return fn(s, cmd[len(name):].lstrip())
511 return fn(s, cmd[len(name):].lstrip())
512 return pipefilter(s, cmd)
512 return pipefilter(s, cmd)
513
513
514 def binary(s):
514 def binary(s):
515 """return true if a string is binary data"""
515 """return true if a string is binary data"""
516 return bool(s and '\0' in s)
516 return bool(s and '\0' in s)
517
517
518 def increasingchunks(source, min=1024, max=65536):
518 def increasingchunks(source, min=1024, max=65536):
519 '''return no less than min bytes per chunk while data remains,
519 '''return no less than min bytes per chunk while data remains,
520 doubling min after each chunk until it reaches max'''
520 doubling min after each chunk until it reaches max'''
521 def log2(x):
521 def log2(x):
522 if not x:
522 if not x:
523 return 0
523 return 0
524 i = 0
524 i = 0
525 while x:
525 while x:
526 x >>= 1
526 x >>= 1
527 i += 1
527 i += 1
528 return i - 1
528 return i - 1
529
529
530 buf = []
530 buf = []
531 blen = 0
531 blen = 0
532 for chunk in source:
532 for chunk in source:
533 buf.append(chunk)
533 buf.append(chunk)
534 blen += len(chunk)
534 blen += len(chunk)
535 if blen >= min:
535 if blen >= min:
536 if min < max:
536 if min < max:
537 min = min << 1
537 min = min << 1
538 nmin = 1 << log2(blen)
538 nmin = 1 << log2(blen)
539 if nmin > min:
539 if nmin > min:
540 min = nmin
540 min = nmin
541 if min > max:
541 if min > max:
542 min = max
542 min = max
543 yield ''.join(buf)
543 yield ''.join(buf)
544 blen = 0
544 blen = 0
545 buf = []
545 buf = []
546 if buf:
546 if buf:
547 yield ''.join(buf)
547 yield ''.join(buf)
548
548
549 Abort = error.Abort
549 Abort = error.Abort
550
550
551 def always(fn):
551 def always(fn):
552 return True
552 return True
553
553
554 def never(fn):
554 def never(fn):
555 return False
555 return False
556
556
557 def nogc(func):
557 def nogc(func):
558 """disable garbage collector
558 """disable garbage collector
559
559
560 Python's garbage collector triggers a GC each time a certain number of
560 Python's garbage collector triggers a GC each time a certain number of
561 container objects (the number being defined by gc.get_threshold()) are
561 container objects (the number being defined by gc.get_threshold()) are
562 allocated even when marked not to be tracked by the collector. Tracking has
562 allocated even when marked not to be tracked by the collector. Tracking has
563 no effect on when GCs are triggered, only on what objects the GC looks
563 no effect on when GCs are triggered, only on what objects the GC looks
564 into. As a workaround, disable GC while building complex (huge)
564 into. As a workaround, disable GC while building complex (huge)
565 containers.
565 containers.
566
566
567 This garbage collector issue have been fixed in 2.7.
567 This garbage collector issue have been fixed in 2.7.
568 """
568 """
569 def wrapper(*args, **kwargs):
569 def wrapper(*args, **kwargs):
570 gcenabled = gc.isenabled()
570 gcenabled = gc.isenabled()
571 gc.disable()
571 gc.disable()
572 try:
572 try:
573 return func(*args, **kwargs)
573 return func(*args, **kwargs)
574 finally:
574 finally:
575 if gcenabled:
575 if gcenabled:
576 gc.enable()
576 gc.enable()
577 return wrapper
577 return wrapper
578
578
579 def pathto(root, n1, n2):
579 def pathto(root, n1, n2):
580 '''return the relative path from one place to another.
580 '''return the relative path from one place to another.
581 root should use os.sep to separate directories
581 root should use os.sep to separate directories
582 n1 should use os.sep to separate directories
582 n1 should use os.sep to separate directories
583 n2 should use "/" to separate directories
583 n2 should use "/" to separate directories
584 returns an os.sep-separated path.
584 returns an os.sep-separated path.
585
585
586 If n1 is a relative path, it's assumed it's
586 If n1 is a relative path, it's assumed it's
587 relative to root.
587 relative to root.
588 n2 should always be relative to root.
588 n2 should always be relative to root.
589 '''
589 '''
590 if not n1:
590 if not n1:
591 return localpath(n2)
591 return localpath(n2)
592 if os.path.isabs(n1):
592 if os.path.isabs(n1):
593 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
593 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
594 return os.path.join(root, localpath(n2))
594 return os.path.join(root, localpath(n2))
595 n2 = '/'.join((pconvert(root), n2))
595 n2 = '/'.join((pconvert(root), n2))
596 a, b = splitpath(n1), n2.split('/')
596 a, b = splitpath(n1), n2.split('/')
597 a.reverse()
597 a.reverse()
598 b.reverse()
598 b.reverse()
599 while a and b and a[-1] == b[-1]:
599 while a and b and a[-1] == b[-1]:
600 a.pop()
600 a.pop()
601 b.pop()
601 b.pop()
602 b.reverse()
602 b.reverse()
603 return os.sep.join((['..'] * len(a)) + b) or '.'
603 return os.sep.join((['..'] * len(a)) + b) or '.'
604
604
605 def mainfrozen():
605 def mainfrozen():
606 """return True if we are a frozen executable.
606 """return True if we are a frozen executable.
607
607
608 The code supports py2exe (most common, Windows only) and tools/freeze
608 The code supports py2exe (most common, Windows only) and tools/freeze
609 (portable, not much used).
609 (portable, not much used).
610 """
610 """
611 return (safehasattr(sys, "frozen") or # new py2exe
611 return (safehasattr(sys, "frozen") or # new py2exe
612 safehasattr(sys, "importers") or # old py2exe
612 safehasattr(sys, "importers") or # old py2exe
613 imp.is_frozen("__main__")) # tools/freeze
613 imp.is_frozen("__main__")) # tools/freeze
614
614
615 # the location of data files matching the source code
615 # the location of data files matching the source code
616 if mainfrozen():
616 if mainfrozen():
617 # executable version (py2exe) doesn't support __file__
617 # executable version (py2exe) doesn't support __file__
618 datapath = os.path.dirname(sys.executable)
618 datapath = os.path.dirname(sys.executable)
619 else:
619 else:
620 datapath = os.path.dirname(__file__)
620 datapath = os.path.dirname(__file__)
621
621
622 i18n.setdatapath(datapath)
622 i18n.setdatapath(datapath)
623
623
624 _hgexecutable = None
624 _hgexecutable = None
625
625
626 def hgexecutable():
626 def hgexecutable():
627 """return location of the 'hg' executable.
627 """return location of the 'hg' executable.
628
628
629 Defaults to $HG or 'hg' in the search path.
629 Defaults to $HG or 'hg' in the search path.
630 """
630 """
631 if _hgexecutable is None:
631 if _hgexecutable is None:
632 hg = os.environ.get('HG')
632 hg = os.environ.get('HG')
633 mainmod = sys.modules['__main__']
633 mainmod = sys.modules['__main__']
634 if hg:
634 if hg:
635 _sethgexecutable(hg)
635 _sethgexecutable(hg)
636 elif mainfrozen():
636 elif mainfrozen():
637 _sethgexecutable(sys.executable)
637 _sethgexecutable(sys.executable)
638 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
638 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
639 _sethgexecutable(mainmod.__file__)
639 _sethgexecutable(mainmod.__file__)
640 else:
640 else:
641 exe = findexe('hg') or os.path.basename(sys.argv[0])
641 exe = findexe('hg') or os.path.basename(sys.argv[0])
642 _sethgexecutable(exe)
642 _sethgexecutable(exe)
643 return _hgexecutable
643 return _hgexecutable
644
644
645 def _sethgexecutable(path):
645 def _sethgexecutable(path):
646 """set location of the 'hg' executable"""
646 """set location of the 'hg' executable"""
647 global _hgexecutable
647 global _hgexecutable
648 _hgexecutable = path
648 _hgexecutable = path
649
649
650 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
650 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
651 '''enhanced shell command execution.
651 '''enhanced shell command execution.
652 run with environment maybe modified, maybe in different dir.
652 run with environment maybe modified, maybe in different dir.
653
653
654 if command fails and onerr is None, return status, else raise onerr
654 if command fails and onerr is None, return status, else raise onerr
655 object as exception.
655 object as exception.
656
656
657 if out is specified, it is assumed to be a file-like object that has a
657 if out is specified, it is assumed to be a file-like object that has a
658 write() method. stdout and stderr will be redirected to out.'''
658 write() method. stdout and stderr will be redirected to out.'''
659 try:
659 try:
660 sys.stdout.flush()
660 sys.stdout.flush()
661 except Exception:
661 except Exception:
662 pass
662 pass
663 def py2shell(val):
663 def py2shell(val):
664 'convert python object into string that is useful to shell'
664 'convert python object into string that is useful to shell'
665 if val is None or val is False:
665 if val is None or val is False:
666 return '0'
666 return '0'
667 if val is True:
667 if val is True:
668 return '1'
668 return '1'
669 return str(val)
669 return str(val)
670 origcmd = cmd
670 origcmd = cmd
671 cmd = quotecommand(cmd)
671 cmd = quotecommand(cmd)
672 if sys.platform == 'plan9' and (sys.version_info[0] == 2
672 if sys.platform == 'plan9' and (sys.version_info[0] == 2
673 and sys.version_info[1] < 7):
673 and sys.version_info[1] < 7):
674 # subprocess kludge to work around issues in half-baked Python
674 # subprocess kludge to work around issues in half-baked Python
675 # ports, notably bichued/python:
675 # ports, notably bichued/python:
676 if not cwd is None:
676 if not cwd is None:
677 os.chdir(cwd)
677 os.chdir(cwd)
678 rc = os.system(cmd)
678 rc = os.system(cmd)
679 else:
679 else:
680 env = dict(os.environ)
680 env = dict(os.environ)
681 env.update((k, py2shell(v)) for k, v in environ.iteritems())
681 env.update((k, py2shell(v)) for k, v in environ.iteritems())
682 env['HG'] = hgexecutable()
682 env['HG'] = hgexecutable()
683 if out is None or out == sys.__stdout__:
683 if out is None or out == sys.__stdout__:
684 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
684 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
685 env=env, cwd=cwd)
685 env=env, cwd=cwd)
686 else:
686 else:
687 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
687 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
688 env=env, cwd=cwd, stdout=subprocess.PIPE,
688 env=env, cwd=cwd, stdout=subprocess.PIPE,
689 stderr=subprocess.STDOUT)
689 stderr=subprocess.STDOUT)
690 while True:
690 while True:
691 line = proc.stdout.readline()
691 line = proc.stdout.readline()
692 if not line:
692 if not line:
693 break
693 break
694 out.write(line)
694 out.write(line)
695 proc.wait()
695 proc.wait()
696 rc = proc.returncode
696 rc = proc.returncode
697 if sys.platform == 'OpenVMS' and rc & 1:
697 if sys.platform == 'OpenVMS' and rc & 1:
698 rc = 0
698 rc = 0
699 if rc and onerr:
699 if rc and onerr:
700 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
700 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
701 explainexit(rc)[0])
701 explainexit(rc)[0])
702 if errprefix:
702 if errprefix:
703 errmsg = '%s: %s' % (errprefix, errmsg)
703 errmsg = '%s: %s' % (errprefix, errmsg)
704 raise onerr(errmsg)
704 raise onerr(errmsg)
705 return rc
705 return rc
706
706
707 def checksignature(func):
707 def checksignature(func):
708 '''wrap a function with code to check for calling errors'''
708 '''wrap a function with code to check for calling errors'''
709 def check(*args, **kwargs):
709 def check(*args, **kwargs):
710 try:
710 try:
711 return func(*args, **kwargs)
711 return func(*args, **kwargs)
712 except TypeError:
712 except TypeError:
713 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
713 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
714 raise error.SignatureError
714 raise error.SignatureError
715 raise
715 raise
716
716
717 return check
717 return check
718
718
719 def copyfile(src, dest, hardlink=False):
719 def copyfile(src, dest, hardlink=False):
720 "copy a file, preserving mode and atime/mtime"
720 "copy a file, preserving mode and atime/mtime"
721 if os.path.lexists(dest):
721 if os.path.lexists(dest):
722 unlink(dest)
722 unlink(dest)
723 # hardlinks are problematic on CIFS, quietly ignore this flag
723 # hardlinks are problematic on CIFS, quietly ignore this flag
724 # until we find a way to work around it cleanly (issue4546)
724 # until we find a way to work around it cleanly (issue4546)
725 if False or hardlink:
725 if False and hardlink:
726 try:
726 try:
727 oslink(src, dest)
727 oslink(src, dest)
728 return
728 return
729 except (IOError, OSError):
729 except (IOError, OSError):
730 pass # fall back to normal copy
730 pass # fall back to normal copy
731 if os.path.islink(src):
731 if os.path.islink(src):
732 os.symlink(os.readlink(src), dest)
732 os.symlink(os.readlink(src), dest)
733 else:
733 else:
734 try:
734 try:
735 shutil.copyfile(src, dest)
735 shutil.copyfile(src, dest)
736 shutil.copymode(src, dest)
736 shutil.copymode(src, dest)
737 except shutil.Error, inst:
737 except shutil.Error, inst:
738 raise Abort(str(inst))
738 raise Abort(str(inst))
739
739
740 def copyfiles(src, dst, hardlink=None):
740 def copyfiles(src, dst, hardlink=None):
741 """Copy a directory tree using hardlinks if possible"""
741 """Copy a directory tree using hardlinks if possible"""
742
742
743 if hardlink is None:
743 if hardlink is None:
744 hardlink = (os.stat(src).st_dev ==
744 hardlink = (os.stat(src).st_dev ==
745 os.stat(os.path.dirname(dst)).st_dev)
745 os.stat(os.path.dirname(dst)).st_dev)
746
746
747 num = 0
747 num = 0
748 if os.path.isdir(src):
748 if os.path.isdir(src):
749 os.mkdir(dst)
749 os.mkdir(dst)
750 for name, kind in osutil.listdir(src):
750 for name, kind in osutil.listdir(src):
751 srcname = os.path.join(src, name)
751 srcname = os.path.join(src, name)
752 dstname = os.path.join(dst, name)
752 dstname = os.path.join(dst, name)
753 hardlink, n = copyfiles(srcname, dstname, hardlink)
753 hardlink, n = copyfiles(srcname, dstname, hardlink)
754 num += n
754 num += n
755 else:
755 else:
756 if hardlink:
756 if hardlink:
757 try:
757 try:
758 oslink(src, dst)
758 oslink(src, dst)
759 except (IOError, OSError):
759 except (IOError, OSError):
760 hardlink = False
760 hardlink = False
761 shutil.copy(src, dst)
761 shutil.copy(src, dst)
762 else:
762 else:
763 shutil.copy(src, dst)
763 shutil.copy(src, dst)
764 num += 1
764 num += 1
765
765
766 return hardlink, num
766 return hardlink, num
767
767
768 _winreservednames = '''con prn aux nul
768 _winreservednames = '''con prn aux nul
769 com1 com2 com3 com4 com5 com6 com7 com8 com9
769 com1 com2 com3 com4 com5 com6 com7 com8 com9
770 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
770 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
771 _winreservedchars = ':*?"<>|'
771 _winreservedchars = ':*?"<>|'
772 def checkwinfilename(path):
772 def checkwinfilename(path):
773 r'''Check that the base-relative path is a valid filename on Windows.
773 r'''Check that the base-relative path is a valid filename on Windows.
774 Returns None if the path is ok, or a UI string describing the problem.
774 Returns None if the path is ok, or a UI string describing the problem.
775
775
776 >>> checkwinfilename("just/a/normal/path")
776 >>> checkwinfilename("just/a/normal/path")
777 >>> checkwinfilename("foo/bar/con.xml")
777 >>> checkwinfilename("foo/bar/con.xml")
778 "filename contains 'con', which is reserved on Windows"
778 "filename contains 'con', which is reserved on Windows"
779 >>> checkwinfilename("foo/con.xml/bar")
779 >>> checkwinfilename("foo/con.xml/bar")
780 "filename contains 'con', which is reserved on Windows"
780 "filename contains 'con', which is reserved on Windows"
781 >>> checkwinfilename("foo/bar/xml.con")
781 >>> checkwinfilename("foo/bar/xml.con")
782 >>> checkwinfilename("foo/bar/AUX/bla.txt")
782 >>> checkwinfilename("foo/bar/AUX/bla.txt")
783 "filename contains 'AUX', which is reserved on Windows"
783 "filename contains 'AUX', which is reserved on Windows"
784 >>> checkwinfilename("foo/bar/bla:.txt")
784 >>> checkwinfilename("foo/bar/bla:.txt")
785 "filename contains ':', which is reserved on Windows"
785 "filename contains ':', which is reserved on Windows"
786 >>> checkwinfilename("foo/bar/b\07la.txt")
786 >>> checkwinfilename("foo/bar/b\07la.txt")
787 "filename contains '\\x07', which is invalid on Windows"
787 "filename contains '\\x07', which is invalid on Windows"
788 >>> checkwinfilename("foo/bar/bla ")
788 >>> checkwinfilename("foo/bar/bla ")
789 "filename ends with ' ', which is not allowed on Windows"
789 "filename ends with ' ', which is not allowed on Windows"
790 >>> checkwinfilename("../bar")
790 >>> checkwinfilename("../bar")
791 >>> checkwinfilename("foo\\")
791 >>> checkwinfilename("foo\\")
792 "filename ends with '\\', which is invalid on Windows"
792 "filename ends with '\\', which is invalid on Windows"
793 >>> checkwinfilename("foo\\/bar")
793 >>> checkwinfilename("foo\\/bar")
794 "directory name ends with '\\', which is invalid on Windows"
794 "directory name ends with '\\', which is invalid on Windows"
795 '''
795 '''
796 if path.endswith('\\'):
796 if path.endswith('\\'):
797 return _("filename ends with '\\', which is invalid on Windows")
797 return _("filename ends with '\\', which is invalid on Windows")
798 if '\\/' in path:
798 if '\\/' in path:
799 return _("directory name ends with '\\', which is invalid on Windows")
799 return _("directory name ends with '\\', which is invalid on Windows")
800 for n in path.replace('\\', '/').split('/'):
800 for n in path.replace('\\', '/').split('/'):
801 if not n:
801 if not n:
802 continue
802 continue
803 for c in n:
803 for c in n:
804 if c in _winreservedchars:
804 if c in _winreservedchars:
805 return _("filename contains '%s', which is reserved "
805 return _("filename contains '%s', which is reserved "
806 "on Windows") % c
806 "on Windows") % c
807 if ord(c) <= 31:
807 if ord(c) <= 31:
808 return _("filename contains %r, which is invalid "
808 return _("filename contains %r, which is invalid "
809 "on Windows") % c
809 "on Windows") % c
810 base = n.split('.')[0]
810 base = n.split('.')[0]
811 if base and base.lower() in _winreservednames:
811 if base and base.lower() in _winreservednames:
812 return _("filename contains '%s', which is reserved "
812 return _("filename contains '%s', which is reserved "
813 "on Windows") % base
813 "on Windows") % base
814 t = n[-1]
814 t = n[-1]
815 if t in '. ' and n not in '..':
815 if t in '. ' and n not in '..':
816 return _("filename ends with '%s', which is not allowed "
816 return _("filename ends with '%s', which is not allowed "
817 "on Windows") % t
817 "on Windows") % t
818
818
819 if os.name == 'nt':
819 if os.name == 'nt':
820 checkosfilename = checkwinfilename
820 checkosfilename = checkwinfilename
821 else:
821 else:
822 checkosfilename = platform.checkosfilename
822 checkosfilename = platform.checkosfilename
823
823
824 def makelock(info, pathname):
824 def makelock(info, pathname):
825 try:
825 try:
826 return os.symlink(info, pathname)
826 return os.symlink(info, pathname)
827 except OSError, why:
827 except OSError, why:
828 if why.errno == errno.EEXIST:
828 if why.errno == errno.EEXIST:
829 raise
829 raise
830 except AttributeError: # no symlink in os
830 except AttributeError: # no symlink in os
831 pass
831 pass
832
832
833 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
833 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
834 os.write(ld, info)
834 os.write(ld, info)
835 os.close(ld)
835 os.close(ld)
836
836
837 def readlock(pathname):
837 def readlock(pathname):
838 try:
838 try:
839 return os.readlink(pathname)
839 return os.readlink(pathname)
840 except OSError, why:
840 except OSError, why:
841 if why.errno not in (errno.EINVAL, errno.ENOSYS):
841 if why.errno not in (errno.EINVAL, errno.ENOSYS):
842 raise
842 raise
843 except AttributeError: # no symlink in os
843 except AttributeError: # no symlink in os
844 pass
844 pass
845 fp = posixfile(pathname)
845 fp = posixfile(pathname)
846 r = fp.read()
846 r = fp.read()
847 fp.close()
847 fp.close()
848 return r
848 return r
849
849
850 def fstat(fp):
850 def fstat(fp):
851 '''stat file object that may not have fileno method.'''
851 '''stat file object that may not have fileno method.'''
852 try:
852 try:
853 return os.fstat(fp.fileno())
853 return os.fstat(fp.fileno())
854 except AttributeError:
854 except AttributeError:
855 return os.stat(fp.name)
855 return os.stat(fp.name)
856
856
857 # File system features
857 # File system features
858
858
859 def checkcase(path):
859 def checkcase(path):
860 """
860 """
861 Return true if the given path is on a case-sensitive filesystem
861 Return true if the given path is on a case-sensitive filesystem
862
862
863 Requires a path (like /foo/.hg) ending with a foldable final
863 Requires a path (like /foo/.hg) ending with a foldable final
864 directory component.
864 directory component.
865 """
865 """
866 s1 = os.stat(path)
866 s1 = os.stat(path)
867 d, b = os.path.split(path)
867 d, b = os.path.split(path)
868 b2 = b.upper()
868 b2 = b.upper()
869 if b == b2:
869 if b == b2:
870 b2 = b.lower()
870 b2 = b.lower()
871 if b == b2:
871 if b == b2:
872 return True # no evidence against case sensitivity
872 return True # no evidence against case sensitivity
873 p2 = os.path.join(d, b2)
873 p2 = os.path.join(d, b2)
874 try:
874 try:
875 s2 = os.stat(p2)
875 s2 = os.stat(p2)
876 if s2 == s1:
876 if s2 == s1:
877 return False
877 return False
878 return True
878 return True
879 except OSError:
879 except OSError:
880 return True
880 return True
881
881
882 try:
882 try:
883 import re2
883 import re2
884 _re2 = None
884 _re2 = None
885 except ImportError:
885 except ImportError:
886 _re2 = False
886 _re2 = False
887
887
888 class _re(object):
888 class _re(object):
889 def _checkre2(self):
889 def _checkre2(self):
890 global _re2
890 global _re2
891 try:
891 try:
892 # check if match works, see issue3964
892 # check if match works, see issue3964
893 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
893 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
894 except ImportError:
894 except ImportError:
895 _re2 = False
895 _re2 = False
896
896
897 def compile(self, pat, flags=0):
897 def compile(self, pat, flags=0):
898 '''Compile a regular expression, using re2 if possible
898 '''Compile a regular expression, using re2 if possible
899
899
900 For best performance, use only re2-compatible regexp features. The
900 For best performance, use only re2-compatible regexp features. The
901 only flags from the re module that are re2-compatible are
901 only flags from the re module that are re2-compatible are
902 IGNORECASE and MULTILINE.'''
902 IGNORECASE and MULTILINE.'''
903 if _re2 is None:
903 if _re2 is None:
904 self._checkre2()
904 self._checkre2()
905 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
905 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
906 if flags & remod.IGNORECASE:
906 if flags & remod.IGNORECASE:
907 pat = '(?i)' + pat
907 pat = '(?i)' + pat
908 if flags & remod.MULTILINE:
908 if flags & remod.MULTILINE:
909 pat = '(?m)' + pat
909 pat = '(?m)' + pat
910 try:
910 try:
911 return re2.compile(pat)
911 return re2.compile(pat)
912 except re2.error:
912 except re2.error:
913 pass
913 pass
914 return remod.compile(pat, flags)
914 return remod.compile(pat, flags)
915
915
916 @propertycache
916 @propertycache
917 def escape(self):
917 def escape(self):
918 '''Return the version of escape corresponding to self.compile.
918 '''Return the version of escape corresponding to self.compile.
919
919
920 This is imperfect because whether re2 or re is used for a particular
920 This is imperfect because whether re2 or re is used for a particular
921 function depends on the flags, etc, but it's the best we can do.
921 function depends on the flags, etc, but it's the best we can do.
922 '''
922 '''
923 global _re2
923 global _re2
924 if _re2 is None:
924 if _re2 is None:
925 self._checkre2()
925 self._checkre2()
926 if _re2:
926 if _re2:
927 return re2.escape
927 return re2.escape
928 else:
928 else:
929 return remod.escape
929 return remod.escape
930
930
931 re = _re()
931 re = _re()
932
932
933 _fspathcache = {}
933 _fspathcache = {}
934 def fspath(name, root):
934 def fspath(name, root):
935 '''Get name in the case stored in the filesystem
935 '''Get name in the case stored in the filesystem
936
936
937 The name should be relative to root, and be normcase-ed for efficiency.
937 The name should be relative to root, and be normcase-ed for efficiency.
938
938
939 Note that this function is unnecessary, and should not be
939 Note that this function is unnecessary, and should not be
940 called, for case-sensitive filesystems (simply because it's expensive).
940 called, for case-sensitive filesystems (simply because it's expensive).
941
941
942 The root should be normcase-ed, too.
942 The root should be normcase-ed, too.
943 '''
943 '''
944 def _makefspathcacheentry(dir):
944 def _makefspathcacheentry(dir):
945 return dict((normcase(n), n) for n in os.listdir(dir))
945 return dict((normcase(n), n) for n in os.listdir(dir))
946
946
947 seps = os.sep
947 seps = os.sep
948 if os.altsep:
948 if os.altsep:
949 seps = seps + os.altsep
949 seps = seps + os.altsep
950 # Protect backslashes. This gets silly very quickly.
950 # Protect backslashes. This gets silly very quickly.
951 seps.replace('\\','\\\\')
951 seps.replace('\\','\\\\')
952 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
952 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
953 dir = os.path.normpath(root)
953 dir = os.path.normpath(root)
954 result = []
954 result = []
955 for part, sep in pattern.findall(name):
955 for part, sep in pattern.findall(name):
956 if sep:
956 if sep:
957 result.append(sep)
957 result.append(sep)
958 continue
958 continue
959
959
960 if dir not in _fspathcache:
960 if dir not in _fspathcache:
961 _fspathcache[dir] = _makefspathcacheentry(dir)
961 _fspathcache[dir] = _makefspathcacheentry(dir)
962 contents = _fspathcache[dir]
962 contents = _fspathcache[dir]
963
963
964 found = contents.get(part)
964 found = contents.get(part)
965 if not found:
965 if not found:
966 # retry "once per directory" per "dirstate.walk" which
966 # retry "once per directory" per "dirstate.walk" which
967 # may take place for each patches of "hg qpush", for example
967 # may take place for each patches of "hg qpush", for example
968 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
968 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
969 found = contents.get(part)
969 found = contents.get(part)
970
970
971 result.append(found or part)
971 result.append(found or part)
972 dir = os.path.join(dir, part)
972 dir = os.path.join(dir, part)
973
973
974 return ''.join(result)
974 return ''.join(result)
975
975
976 def checknlink(testfile):
976 def checknlink(testfile):
977 '''check whether hardlink count reporting works properly'''
977 '''check whether hardlink count reporting works properly'''
978
978
979 # testfile may be open, so we need a separate file for checking to
979 # testfile may be open, so we need a separate file for checking to
980 # work around issue2543 (or testfile may get lost on Samba shares)
980 # work around issue2543 (or testfile may get lost on Samba shares)
981 f1 = testfile + ".hgtmp1"
981 f1 = testfile + ".hgtmp1"
982 if os.path.lexists(f1):
982 if os.path.lexists(f1):
983 return False
983 return False
984 try:
984 try:
985 posixfile(f1, 'w').close()
985 posixfile(f1, 'w').close()
986 except IOError:
986 except IOError:
987 return False
987 return False
988
988
989 f2 = testfile + ".hgtmp2"
989 f2 = testfile + ".hgtmp2"
990 fd = None
990 fd = None
991 try:
991 try:
992 try:
992 try:
993 oslink(f1, f2)
993 oslink(f1, f2)
994 except OSError:
994 except OSError:
995 return False
995 return False
996
996
997 # nlinks() may behave differently for files on Windows shares if
997 # nlinks() may behave differently for files on Windows shares if
998 # the file is open.
998 # the file is open.
999 fd = posixfile(f2)
999 fd = posixfile(f2)
1000 return nlinks(f2) > 1
1000 return nlinks(f2) > 1
1001 finally:
1001 finally:
1002 if fd is not None:
1002 if fd is not None:
1003 fd.close()
1003 fd.close()
1004 for f in (f1, f2):
1004 for f in (f1, f2):
1005 try:
1005 try:
1006 os.unlink(f)
1006 os.unlink(f)
1007 except OSError:
1007 except OSError:
1008 pass
1008 pass
1009
1009
1010 def endswithsep(path):
1010 def endswithsep(path):
1011 '''Check path ends with os.sep or os.altsep.'''
1011 '''Check path ends with os.sep or os.altsep.'''
1012 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1012 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1013
1013
1014 def splitpath(path):
1014 def splitpath(path):
1015 '''Split path by os.sep.
1015 '''Split path by os.sep.
1016 Note that this function does not use os.altsep because this is
1016 Note that this function does not use os.altsep because this is
1017 an alternative of simple "xxx.split(os.sep)".
1017 an alternative of simple "xxx.split(os.sep)".
1018 It is recommended to use os.path.normpath() before using this
1018 It is recommended to use os.path.normpath() before using this
1019 function if need.'''
1019 function if need.'''
1020 return path.split(os.sep)
1020 return path.split(os.sep)
1021
1021
1022 def gui():
1022 def gui():
1023 '''Are we running in a GUI?'''
1023 '''Are we running in a GUI?'''
1024 if sys.platform == 'darwin':
1024 if sys.platform == 'darwin':
1025 if 'SSH_CONNECTION' in os.environ:
1025 if 'SSH_CONNECTION' in os.environ:
1026 # handle SSH access to a box where the user is logged in
1026 # handle SSH access to a box where the user is logged in
1027 return False
1027 return False
1028 elif getattr(osutil, 'isgui', None):
1028 elif getattr(osutil, 'isgui', None):
1029 # check if a CoreGraphics session is available
1029 # check if a CoreGraphics session is available
1030 return osutil.isgui()
1030 return osutil.isgui()
1031 else:
1031 else:
1032 # pure build; use a safe default
1032 # pure build; use a safe default
1033 return True
1033 return True
1034 else:
1034 else:
1035 return os.name == "nt" or os.environ.get("DISPLAY")
1035 return os.name == "nt" or os.environ.get("DISPLAY")
1036
1036
1037 def mktempcopy(name, emptyok=False, createmode=None):
1037 def mktempcopy(name, emptyok=False, createmode=None):
1038 """Create a temporary file with the same contents from name
1038 """Create a temporary file with the same contents from name
1039
1039
1040 The permission bits are copied from the original file.
1040 The permission bits are copied from the original file.
1041
1041
1042 If the temporary file is going to be truncated immediately, you
1042 If the temporary file is going to be truncated immediately, you
1043 can use emptyok=True as an optimization.
1043 can use emptyok=True as an optimization.
1044
1044
1045 Returns the name of the temporary file.
1045 Returns the name of the temporary file.
1046 """
1046 """
1047 d, fn = os.path.split(name)
1047 d, fn = os.path.split(name)
1048 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1048 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1049 os.close(fd)
1049 os.close(fd)
1050 # Temporary files are created with mode 0600, which is usually not
1050 # Temporary files are created with mode 0600, which is usually not
1051 # what we want. If the original file already exists, just copy
1051 # what we want. If the original file already exists, just copy
1052 # its mode. Otherwise, manually obey umask.
1052 # its mode. Otherwise, manually obey umask.
1053 copymode(name, temp, createmode)
1053 copymode(name, temp, createmode)
1054 if emptyok:
1054 if emptyok:
1055 return temp
1055 return temp
1056 try:
1056 try:
1057 try:
1057 try:
1058 ifp = posixfile(name, "rb")
1058 ifp = posixfile(name, "rb")
1059 except IOError, inst:
1059 except IOError, inst:
1060 if inst.errno == errno.ENOENT:
1060 if inst.errno == errno.ENOENT:
1061 return temp
1061 return temp
1062 if not getattr(inst, 'filename', None):
1062 if not getattr(inst, 'filename', None):
1063 inst.filename = name
1063 inst.filename = name
1064 raise
1064 raise
1065 ofp = posixfile(temp, "wb")
1065 ofp = posixfile(temp, "wb")
1066 for chunk in filechunkiter(ifp):
1066 for chunk in filechunkiter(ifp):
1067 ofp.write(chunk)
1067 ofp.write(chunk)
1068 ifp.close()
1068 ifp.close()
1069 ofp.close()
1069 ofp.close()
1070 except: # re-raises
1070 except: # re-raises
1071 try: os.unlink(temp)
1071 try: os.unlink(temp)
1072 except OSError: pass
1072 except OSError: pass
1073 raise
1073 raise
1074 return temp
1074 return temp
1075
1075
1076 class atomictempfile(object):
1076 class atomictempfile(object):
1077 '''writable file object that atomically updates a file
1077 '''writable file object that atomically updates a file
1078
1078
1079 All writes will go to a temporary copy of the original file. Call
1079 All writes will go to a temporary copy of the original file. Call
1080 close() when you are done writing, and atomictempfile will rename
1080 close() when you are done writing, and atomictempfile will rename
1081 the temporary copy to the original name, making the changes
1081 the temporary copy to the original name, making the changes
1082 visible. If the object is destroyed without being closed, all your
1082 visible. If the object is destroyed without being closed, all your
1083 writes are discarded.
1083 writes are discarded.
1084 '''
1084 '''
1085 def __init__(self, name, mode='w+b', createmode=None):
1085 def __init__(self, name, mode='w+b', createmode=None):
1086 self.__name = name # permanent name
1086 self.__name = name # permanent name
1087 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1087 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1088 createmode=createmode)
1088 createmode=createmode)
1089 self._fp = posixfile(self._tempname, mode)
1089 self._fp = posixfile(self._tempname, mode)
1090
1090
1091 # delegated methods
1091 # delegated methods
1092 self.write = self._fp.write
1092 self.write = self._fp.write
1093 self.seek = self._fp.seek
1093 self.seek = self._fp.seek
1094 self.tell = self._fp.tell
1094 self.tell = self._fp.tell
1095 self.fileno = self._fp.fileno
1095 self.fileno = self._fp.fileno
1096
1096
1097 def close(self):
1097 def close(self):
1098 if not self._fp.closed:
1098 if not self._fp.closed:
1099 self._fp.close()
1099 self._fp.close()
1100 rename(self._tempname, localpath(self.__name))
1100 rename(self._tempname, localpath(self.__name))
1101
1101
1102 def discard(self):
1102 def discard(self):
1103 if not self._fp.closed:
1103 if not self._fp.closed:
1104 try:
1104 try:
1105 os.unlink(self._tempname)
1105 os.unlink(self._tempname)
1106 except OSError:
1106 except OSError:
1107 pass
1107 pass
1108 self._fp.close()
1108 self._fp.close()
1109
1109
1110 def __del__(self):
1110 def __del__(self):
1111 if safehasattr(self, '_fp'): # constructor actually did something
1111 if safehasattr(self, '_fp'): # constructor actually did something
1112 self.discard()
1112 self.discard()
1113
1113
1114 def makedirs(name, mode=None, notindexed=False):
1114 def makedirs(name, mode=None, notindexed=False):
1115 """recursive directory creation with parent mode inheritance"""
1115 """recursive directory creation with parent mode inheritance"""
1116 try:
1116 try:
1117 makedir(name, notindexed)
1117 makedir(name, notindexed)
1118 except OSError, err:
1118 except OSError, err:
1119 if err.errno == errno.EEXIST:
1119 if err.errno == errno.EEXIST:
1120 return
1120 return
1121 if err.errno != errno.ENOENT or not name:
1121 if err.errno != errno.ENOENT or not name:
1122 raise
1122 raise
1123 parent = os.path.dirname(os.path.abspath(name))
1123 parent = os.path.dirname(os.path.abspath(name))
1124 if parent == name:
1124 if parent == name:
1125 raise
1125 raise
1126 makedirs(parent, mode, notindexed)
1126 makedirs(parent, mode, notindexed)
1127 makedir(name, notindexed)
1127 makedir(name, notindexed)
1128 if mode is not None:
1128 if mode is not None:
1129 os.chmod(name, mode)
1129 os.chmod(name, mode)
1130
1130
1131 def ensuredirs(name, mode=None, notindexed=False):
1131 def ensuredirs(name, mode=None, notindexed=False):
1132 """race-safe recursive directory creation
1132 """race-safe recursive directory creation
1133
1133
1134 Newly created directories are marked as "not to be indexed by
1134 Newly created directories are marked as "not to be indexed by
1135 the content indexing service", if ``notindexed`` is specified
1135 the content indexing service", if ``notindexed`` is specified
1136 for "write" mode access.
1136 for "write" mode access.
1137 """
1137 """
1138 if os.path.isdir(name):
1138 if os.path.isdir(name):
1139 return
1139 return
1140 parent = os.path.dirname(os.path.abspath(name))
1140 parent = os.path.dirname(os.path.abspath(name))
1141 if parent != name:
1141 if parent != name:
1142 ensuredirs(parent, mode, notindexed)
1142 ensuredirs(parent, mode, notindexed)
1143 try:
1143 try:
1144 makedir(name, notindexed)
1144 makedir(name, notindexed)
1145 except OSError, err:
1145 except OSError, err:
1146 if err.errno == errno.EEXIST and os.path.isdir(name):
1146 if err.errno == errno.EEXIST and os.path.isdir(name):
1147 # someone else seems to have won a directory creation race
1147 # someone else seems to have won a directory creation race
1148 return
1148 return
1149 raise
1149 raise
1150 if mode is not None:
1150 if mode is not None:
1151 os.chmod(name, mode)
1151 os.chmod(name, mode)
1152
1152
1153 def readfile(path):
1153 def readfile(path):
1154 fp = open(path, 'rb')
1154 fp = open(path, 'rb')
1155 try:
1155 try:
1156 return fp.read()
1156 return fp.read()
1157 finally:
1157 finally:
1158 fp.close()
1158 fp.close()
1159
1159
1160 def writefile(path, text):
1160 def writefile(path, text):
1161 fp = open(path, 'wb')
1161 fp = open(path, 'wb')
1162 try:
1162 try:
1163 fp.write(text)
1163 fp.write(text)
1164 finally:
1164 finally:
1165 fp.close()
1165 fp.close()
1166
1166
1167 def appendfile(path, text):
1167 def appendfile(path, text):
1168 fp = open(path, 'ab')
1168 fp = open(path, 'ab')
1169 try:
1169 try:
1170 fp.write(text)
1170 fp.write(text)
1171 finally:
1171 finally:
1172 fp.close()
1172 fp.close()
1173
1173
1174 class chunkbuffer(object):
1174 class chunkbuffer(object):
1175 """Allow arbitrary sized chunks of data to be efficiently read from an
1175 """Allow arbitrary sized chunks of data to be efficiently read from an
1176 iterator over chunks of arbitrary size."""
1176 iterator over chunks of arbitrary size."""
1177
1177
1178 def __init__(self, in_iter):
1178 def __init__(self, in_iter):
1179 """in_iter is the iterator that's iterating over the input chunks.
1179 """in_iter is the iterator that's iterating over the input chunks.
1180 targetsize is how big a buffer to try to maintain."""
1180 targetsize is how big a buffer to try to maintain."""
1181 def splitbig(chunks):
1181 def splitbig(chunks):
1182 for chunk in chunks:
1182 for chunk in chunks:
1183 if len(chunk) > 2**20:
1183 if len(chunk) > 2**20:
1184 pos = 0
1184 pos = 0
1185 while pos < len(chunk):
1185 while pos < len(chunk):
1186 end = pos + 2 ** 18
1186 end = pos + 2 ** 18
1187 yield chunk[pos:end]
1187 yield chunk[pos:end]
1188 pos = end
1188 pos = end
1189 else:
1189 else:
1190 yield chunk
1190 yield chunk
1191 self.iter = splitbig(in_iter)
1191 self.iter = splitbig(in_iter)
1192 self._queue = deque()
1192 self._queue = deque()
1193
1193
1194 def read(self, l=None):
1194 def read(self, l=None):
1195 """Read L bytes of data from the iterator of chunks of data.
1195 """Read L bytes of data from the iterator of chunks of data.
1196 Returns less than L bytes if the iterator runs dry.
1196 Returns less than L bytes if the iterator runs dry.
1197
1197
1198 If size parameter is omitted, read everything"""
1198 If size parameter is omitted, read everything"""
1199 left = l
1199 left = l
1200 buf = []
1200 buf = []
1201 queue = self._queue
1201 queue = self._queue
1202 while left is None or left > 0:
1202 while left is None or left > 0:
1203 # refill the queue
1203 # refill the queue
1204 if not queue:
1204 if not queue:
1205 target = 2**18
1205 target = 2**18
1206 for chunk in self.iter:
1206 for chunk in self.iter:
1207 queue.append(chunk)
1207 queue.append(chunk)
1208 target -= len(chunk)
1208 target -= len(chunk)
1209 if target <= 0:
1209 if target <= 0:
1210 break
1210 break
1211 if not queue:
1211 if not queue:
1212 break
1212 break
1213
1213
1214 chunk = queue.popleft()
1214 chunk = queue.popleft()
1215 if left is not None:
1215 if left is not None:
1216 left -= len(chunk)
1216 left -= len(chunk)
1217 if left is not None and left < 0:
1217 if left is not None and left < 0:
1218 queue.appendleft(chunk[left:])
1218 queue.appendleft(chunk[left:])
1219 buf.append(chunk[:left])
1219 buf.append(chunk[:left])
1220 else:
1220 else:
1221 buf.append(chunk)
1221 buf.append(chunk)
1222
1222
1223 return ''.join(buf)
1223 return ''.join(buf)
1224
1224
1225 def filechunkiter(f, size=65536, limit=None):
1225 def filechunkiter(f, size=65536, limit=None):
1226 """Create a generator that produces the data in the file size
1226 """Create a generator that produces the data in the file size
1227 (default 65536) bytes at a time, up to optional limit (default is
1227 (default 65536) bytes at a time, up to optional limit (default is
1228 to read all data). Chunks may be less than size bytes if the
1228 to read all data). Chunks may be less than size bytes if the
1229 chunk is the last chunk in the file, or the file is a socket or
1229 chunk is the last chunk in the file, or the file is a socket or
1230 some other type of file that sometimes reads less data than is
1230 some other type of file that sometimes reads less data than is
1231 requested."""
1231 requested."""
1232 assert size >= 0
1232 assert size >= 0
1233 assert limit is None or limit >= 0
1233 assert limit is None or limit >= 0
1234 while True:
1234 while True:
1235 if limit is None:
1235 if limit is None:
1236 nbytes = size
1236 nbytes = size
1237 else:
1237 else:
1238 nbytes = min(limit, size)
1238 nbytes = min(limit, size)
1239 s = nbytes and f.read(nbytes)
1239 s = nbytes and f.read(nbytes)
1240 if not s:
1240 if not s:
1241 break
1241 break
1242 if limit:
1242 if limit:
1243 limit -= len(s)
1243 limit -= len(s)
1244 yield s
1244 yield s
1245
1245
1246 def makedate(timestamp=None):
1246 def makedate(timestamp=None):
1247 '''Return a unix timestamp (or the current time) as a (unixtime,
1247 '''Return a unix timestamp (or the current time) as a (unixtime,
1248 offset) tuple based off the local timezone.'''
1248 offset) tuple based off the local timezone.'''
1249 if timestamp is None:
1249 if timestamp is None:
1250 timestamp = time.time()
1250 timestamp = time.time()
1251 if timestamp < 0:
1251 if timestamp < 0:
1252 hint = _("check your clock")
1252 hint = _("check your clock")
1253 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1253 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1254 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1254 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1255 datetime.datetime.fromtimestamp(timestamp))
1255 datetime.datetime.fromtimestamp(timestamp))
1256 tz = delta.days * 86400 + delta.seconds
1256 tz = delta.days * 86400 + delta.seconds
1257 return timestamp, tz
1257 return timestamp, tz
1258
1258
1259 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1259 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1260 """represent a (unixtime, offset) tuple as a localized time.
1260 """represent a (unixtime, offset) tuple as a localized time.
1261 unixtime is seconds since the epoch, and offset is the time zone's
1261 unixtime is seconds since the epoch, and offset is the time zone's
1262 number of seconds away from UTC. if timezone is false, do not
1262 number of seconds away from UTC. if timezone is false, do not
1263 append time zone to string."""
1263 append time zone to string."""
1264 t, tz = date or makedate()
1264 t, tz = date or makedate()
1265 if t < 0:
1265 if t < 0:
1266 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1266 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1267 tz = 0
1267 tz = 0
1268 if "%1" in format or "%2" in format or "%z" in format:
1268 if "%1" in format or "%2" in format or "%z" in format:
1269 sign = (tz > 0) and "-" or "+"
1269 sign = (tz > 0) and "-" or "+"
1270 minutes = abs(tz) // 60
1270 minutes = abs(tz) // 60
1271 format = format.replace("%z", "%1%2")
1271 format = format.replace("%z", "%1%2")
1272 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1272 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1273 format = format.replace("%2", "%02d" % (minutes % 60))
1273 format = format.replace("%2", "%02d" % (minutes % 60))
1274 try:
1274 try:
1275 t = time.gmtime(float(t) - tz)
1275 t = time.gmtime(float(t) - tz)
1276 except ValueError:
1276 except ValueError:
1277 # time was out of range
1277 # time was out of range
1278 t = time.gmtime(sys.maxint)
1278 t = time.gmtime(sys.maxint)
1279 s = time.strftime(format, t)
1279 s = time.strftime(format, t)
1280 return s
1280 return s
1281
1281
1282 def shortdate(date=None):
1282 def shortdate(date=None):
1283 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1283 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1284 return datestr(date, format='%Y-%m-%d')
1284 return datestr(date, format='%Y-%m-%d')
1285
1285
1286 def strdate(string, format, defaults=[]):
1286 def strdate(string, format, defaults=[]):
1287 """parse a localized time string and return a (unixtime, offset) tuple.
1287 """parse a localized time string and return a (unixtime, offset) tuple.
1288 if the string cannot be parsed, ValueError is raised."""
1288 if the string cannot be parsed, ValueError is raised."""
1289 def timezone(string):
1289 def timezone(string):
1290 tz = string.split()[-1]
1290 tz = string.split()[-1]
1291 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1291 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1292 sign = (tz[0] == "+") and 1 or -1
1292 sign = (tz[0] == "+") and 1 or -1
1293 hours = int(tz[1:3])
1293 hours = int(tz[1:3])
1294 minutes = int(tz[3:5])
1294 minutes = int(tz[3:5])
1295 return -sign * (hours * 60 + minutes) * 60
1295 return -sign * (hours * 60 + minutes) * 60
1296 if tz == "GMT" or tz == "UTC":
1296 if tz == "GMT" or tz == "UTC":
1297 return 0
1297 return 0
1298 return None
1298 return None
1299
1299
1300 # NOTE: unixtime = localunixtime + offset
1300 # NOTE: unixtime = localunixtime + offset
1301 offset, date = timezone(string), string
1301 offset, date = timezone(string), string
1302 if offset is not None:
1302 if offset is not None:
1303 date = " ".join(string.split()[:-1])
1303 date = " ".join(string.split()[:-1])
1304
1304
1305 # add missing elements from defaults
1305 # add missing elements from defaults
1306 usenow = False # default to using biased defaults
1306 usenow = False # default to using biased defaults
1307 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1307 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1308 found = [True for p in part if ("%"+p) in format]
1308 found = [True for p in part if ("%"+p) in format]
1309 if not found:
1309 if not found:
1310 date += "@" + defaults[part][usenow]
1310 date += "@" + defaults[part][usenow]
1311 format += "@%" + part[0]
1311 format += "@%" + part[0]
1312 else:
1312 else:
1313 # We've found a specific time element, less specific time
1313 # We've found a specific time element, less specific time
1314 # elements are relative to today
1314 # elements are relative to today
1315 usenow = True
1315 usenow = True
1316
1316
1317 timetuple = time.strptime(date, format)
1317 timetuple = time.strptime(date, format)
1318 localunixtime = int(calendar.timegm(timetuple))
1318 localunixtime = int(calendar.timegm(timetuple))
1319 if offset is None:
1319 if offset is None:
1320 # local timezone
1320 # local timezone
1321 unixtime = int(time.mktime(timetuple))
1321 unixtime = int(time.mktime(timetuple))
1322 offset = unixtime - localunixtime
1322 offset = unixtime - localunixtime
1323 else:
1323 else:
1324 unixtime = localunixtime + offset
1324 unixtime = localunixtime + offset
1325 return unixtime, offset
1325 return unixtime, offset
1326
1326
1327 def parsedate(date, formats=None, bias={}):
1327 def parsedate(date, formats=None, bias={}):
1328 """parse a localized date/time and return a (unixtime, offset) tuple.
1328 """parse a localized date/time and return a (unixtime, offset) tuple.
1329
1329
1330 The date may be a "unixtime offset" string or in one of the specified
1330 The date may be a "unixtime offset" string or in one of the specified
1331 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1331 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1332
1332
1333 >>> parsedate(' today ') == parsedate(\
1333 >>> parsedate(' today ') == parsedate(\
1334 datetime.date.today().strftime('%b %d'))
1334 datetime.date.today().strftime('%b %d'))
1335 True
1335 True
1336 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1336 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1337 datetime.timedelta(days=1)\
1337 datetime.timedelta(days=1)\
1338 ).strftime('%b %d'))
1338 ).strftime('%b %d'))
1339 True
1339 True
1340 >>> now, tz = makedate()
1340 >>> now, tz = makedate()
1341 >>> strnow, strtz = parsedate('now')
1341 >>> strnow, strtz = parsedate('now')
1342 >>> (strnow - now) < 1
1342 >>> (strnow - now) < 1
1343 True
1343 True
1344 >>> tz == strtz
1344 >>> tz == strtz
1345 True
1345 True
1346 """
1346 """
1347 if not date:
1347 if not date:
1348 return 0, 0
1348 return 0, 0
1349 if isinstance(date, tuple) and len(date) == 2:
1349 if isinstance(date, tuple) and len(date) == 2:
1350 return date
1350 return date
1351 if not formats:
1351 if not formats:
1352 formats = defaultdateformats
1352 formats = defaultdateformats
1353 date = date.strip()
1353 date = date.strip()
1354
1354
1355 if date == _('now'):
1355 if date == _('now'):
1356 return makedate()
1356 return makedate()
1357 if date == _('today'):
1357 if date == _('today'):
1358 date = datetime.date.today().strftime('%b %d')
1358 date = datetime.date.today().strftime('%b %d')
1359 elif date == _('yesterday'):
1359 elif date == _('yesterday'):
1360 date = (datetime.date.today() -
1360 date = (datetime.date.today() -
1361 datetime.timedelta(days=1)).strftime('%b %d')
1361 datetime.timedelta(days=1)).strftime('%b %d')
1362
1362
1363 try:
1363 try:
1364 when, offset = map(int, date.split(' '))
1364 when, offset = map(int, date.split(' '))
1365 except ValueError:
1365 except ValueError:
1366 # fill out defaults
1366 # fill out defaults
1367 now = makedate()
1367 now = makedate()
1368 defaults = {}
1368 defaults = {}
1369 for part in ("d", "mb", "yY", "HI", "M", "S"):
1369 for part in ("d", "mb", "yY", "HI", "M", "S"):
1370 # this piece is for rounding the specific end of unknowns
1370 # this piece is for rounding the specific end of unknowns
1371 b = bias.get(part)
1371 b = bias.get(part)
1372 if b is None:
1372 if b is None:
1373 if part[0] in "HMS":
1373 if part[0] in "HMS":
1374 b = "00"
1374 b = "00"
1375 else:
1375 else:
1376 b = "0"
1376 b = "0"
1377
1377
1378 # this piece is for matching the generic end to today's date
1378 # this piece is for matching the generic end to today's date
1379 n = datestr(now, "%" + part[0])
1379 n = datestr(now, "%" + part[0])
1380
1380
1381 defaults[part] = (b, n)
1381 defaults[part] = (b, n)
1382
1382
1383 for format in formats:
1383 for format in formats:
1384 try:
1384 try:
1385 when, offset = strdate(date, format, defaults)
1385 when, offset = strdate(date, format, defaults)
1386 except (ValueError, OverflowError):
1386 except (ValueError, OverflowError):
1387 pass
1387 pass
1388 else:
1388 else:
1389 break
1389 break
1390 else:
1390 else:
1391 raise Abort(_('invalid date: %r') % date)
1391 raise Abort(_('invalid date: %r') % date)
1392 # validate explicit (probably user-specified) date and
1392 # validate explicit (probably user-specified) date and
1393 # time zone offset. values must fit in signed 32 bits for
1393 # time zone offset. values must fit in signed 32 bits for
1394 # current 32-bit linux runtimes. timezones go from UTC-12
1394 # current 32-bit linux runtimes. timezones go from UTC-12
1395 # to UTC+14
1395 # to UTC+14
1396 if abs(when) > 0x7fffffff:
1396 if abs(when) > 0x7fffffff:
1397 raise Abort(_('date exceeds 32 bits: %d') % when)
1397 raise Abort(_('date exceeds 32 bits: %d') % when)
1398 if when < 0:
1398 if when < 0:
1399 raise Abort(_('negative date value: %d') % when)
1399 raise Abort(_('negative date value: %d') % when)
1400 if offset < -50400 or offset > 43200:
1400 if offset < -50400 or offset > 43200:
1401 raise Abort(_('impossible time zone offset: %d') % offset)
1401 raise Abort(_('impossible time zone offset: %d') % offset)
1402 return when, offset
1402 return when, offset
1403
1403
1404 def matchdate(date):
1404 def matchdate(date):
1405 """Return a function that matches a given date match specifier
1405 """Return a function that matches a given date match specifier
1406
1406
1407 Formats include:
1407 Formats include:
1408
1408
1409 '{date}' match a given date to the accuracy provided
1409 '{date}' match a given date to the accuracy provided
1410
1410
1411 '<{date}' on or before a given date
1411 '<{date}' on or before a given date
1412
1412
1413 '>{date}' on or after a given date
1413 '>{date}' on or after a given date
1414
1414
1415 >>> p1 = parsedate("10:29:59")
1415 >>> p1 = parsedate("10:29:59")
1416 >>> p2 = parsedate("10:30:00")
1416 >>> p2 = parsedate("10:30:00")
1417 >>> p3 = parsedate("10:30:59")
1417 >>> p3 = parsedate("10:30:59")
1418 >>> p4 = parsedate("10:31:00")
1418 >>> p4 = parsedate("10:31:00")
1419 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1419 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1420 >>> f = matchdate("10:30")
1420 >>> f = matchdate("10:30")
1421 >>> f(p1[0])
1421 >>> f(p1[0])
1422 False
1422 False
1423 >>> f(p2[0])
1423 >>> f(p2[0])
1424 True
1424 True
1425 >>> f(p3[0])
1425 >>> f(p3[0])
1426 True
1426 True
1427 >>> f(p4[0])
1427 >>> f(p4[0])
1428 False
1428 False
1429 >>> f(p5[0])
1429 >>> f(p5[0])
1430 False
1430 False
1431 """
1431 """
1432
1432
1433 def lower(date):
1433 def lower(date):
1434 d = {'mb': "1", 'd': "1"}
1434 d = {'mb': "1", 'd': "1"}
1435 return parsedate(date, extendeddateformats, d)[0]
1435 return parsedate(date, extendeddateformats, d)[0]
1436
1436
1437 def upper(date):
1437 def upper(date):
1438 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1438 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1439 for days in ("31", "30", "29"):
1439 for days in ("31", "30", "29"):
1440 try:
1440 try:
1441 d["d"] = days
1441 d["d"] = days
1442 return parsedate(date, extendeddateformats, d)[0]
1442 return parsedate(date, extendeddateformats, d)[0]
1443 except Abort:
1443 except Abort:
1444 pass
1444 pass
1445 d["d"] = "28"
1445 d["d"] = "28"
1446 return parsedate(date, extendeddateformats, d)[0]
1446 return parsedate(date, extendeddateformats, d)[0]
1447
1447
1448 date = date.strip()
1448 date = date.strip()
1449
1449
1450 if not date:
1450 if not date:
1451 raise Abort(_("dates cannot consist entirely of whitespace"))
1451 raise Abort(_("dates cannot consist entirely of whitespace"))
1452 elif date[0] == "<":
1452 elif date[0] == "<":
1453 if not date[1:]:
1453 if not date[1:]:
1454 raise Abort(_("invalid day spec, use '<DATE'"))
1454 raise Abort(_("invalid day spec, use '<DATE'"))
1455 when = upper(date[1:])
1455 when = upper(date[1:])
1456 return lambda x: x <= when
1456 return lambda x: x <= when
1457 elif date[0] == ">":
1457 elif date[0] == ">":
1458 if not date[1:]:
1458 if not date[1:]:
1459 raise Abort(_("invalid day spec, use '>DATE'"))
1459 raise Abort(_("invalid day spec, use '>DATE'"))
1460 when = lower(date[1:])
1460 when = lower(date[1:])
1461 return lambda x: x >= when
1461 return lambda x: x >= when
1462 elif date[0] == "-":
1462 elif date[0] == "-":
1463 try:
1463 try:
1464 days = int(date[1:])
1464 days = int(date[1:])
1465 except ValueError:
1465 except ValueError:
1466 raise Abort(_("invalid day spec: %s") % date[1:])
1466 raise Abort(_("invalid day spec: %s") % date[1:])
1467 if days < 0:
1467 if days < 0:
1468 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1468 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1469 % date[1:])
1469 % date[1:])
1470 when = makedate()[0] - days * 3600 * 24
1470 when = makedate()[0] - days * 3600 * 24
1471 return lambda x: x >= when
1471 return lambda x: x >= when
1472 elif " to " in date:
1472 elif " to " in date:
1473 a, b = date.split(" to ")
1473 a, b = date.split(" to ")
1474 start, stop = lower(a), upper(b)
1474 start, stop = lower(a), upper(b)
1475 return lambda x: x >= start and x <= stop
1475 return lambda x: x >= start and x <= stop
1476 else:
1476 else:
1477 start, stop = lower(date), upper(date)
1477 start, stop = lower(date), upper(date)
1478 return lambda x: x >= start and x <= stop
1478 return lambda x: x >= start and x <= stop
1479
1479
1480 def shortuser(user):
1480 def shortuser(user):
1481 """Return a short representation of a user name or email address."""
1481 """Return a short representation of a user name or email address."""
1482 f = user.find('@')
1482 f = user.find('@')
1483 if f >= 0:
1483 if f >= 0:
1484 user = user[:f]
1484 user = user[:f]
1485 f = user.find('<')
1485 f = user.find('<')
1486 if f >= 0:
1486 if f >= 0:
1487 user = user[f + 1:]
1487 user = user[f + 1:]
1488 f = user.find(' ')
1488 f = user.find(' ')
1489 if f >= 0:
1489 if f >= 0:
1490 user = user[:f]
1490 user = user[:f]
1491 f = user.find('.')
1491 f = user.find('.')
1492 if f >= 0:
1492 if f >= 0:
1493 user = user[:f]
1493 user = user[:f]
1494 return user
1494 return user
1495
1495
1496 def emailuser(user):
1496 def emailuser(user):
1497 """Return the user portion of an email address."""
1497 """Return the user portion of an email address."""
1498 f = user.find('@')
1498 f = user.find('@')
1499 if f >= 0:
1499 if f >= 0:
1500 user = user[:f]
1500 user = user[:f]
1501 f = user.find('<')
1501 f = user.find('<')
1502 if f >= 0:
1502 if f >= 0:
1503 user = user[f + 1:]
1503 user = user[f + 1:]
1504 return user
1504 return user
1505
1505
1506 def email(author):
1506 def email(author):
1507 '''get email of author.'''
1507 '''get email of author.'''
1508 r = author.find('>')
1508 r = author.find('>')
1509 if r == -1:
1509 if r == -1:
1510 r = None
1510 r = None
1511 return author[author.find('<') + 1:r]
1511 return author[author.find('<') + 1:r]
1512
1512
1513 def ellipsis(text, maxlength=400):
1513 def ellipsis(text, maxlength=400):
1514 """Trim string to at most maxlength (default: 400) columns in display."""
1514 """Trim string to at most maxlength (default: 400) columns in display."""
1515 return encoding.trim(text, maxlength, ellipsis='...')
1515 return encoding.trim(text, maxlength, ellipsis='...')
1516
1516
1517 def unitcountfn(*unittable):
1517 def unitcountfn(*unittable):
1518 '''return a function that renders a readable count of some quantity'''
1518 '''return a function that renders a readable count of some quantity'''
1519
1519
1520 def go(count):
1520 def go(count):
1521 for multiplier, divisor, format in unittable:
1521 for multiplier, divisor, format in unittable:
1522 if count >= divisor * multiplier:
1522 if count >= divisor * multiplier:
1523 return format % (count / float(divisor))
1523 return format % (count / float(divisor))
1524 return unittable[-1][2] % count
1524 return unittable[-1][2] % count
1525
1525
1526 return go
1526 return go
1527
1527
1528 bytecount = unitcountfn(
1528 bytecount = unitcountfn(
1529 (100, 1 << 30, _('%.0f GB')),
1529 (100, 1 << 30, _('%.0f GB')),
1530 (10, 1 << 30, _('%.1f GB')),
1530 (10, 1 << 30, _('%.1f GB')),
1531 (1, 1 << 30, _('%.2f GB')),
1531 (1, 1 << 30, _('%.2f GB')),
1532 (100, 1 << 20, _('%.0f MB')),
1532 (100, 1 << 20, _('%.0f MB')),
1533 (10, 1 << 20, _('%.1f MB')),
1533 (10, 1 << 20, _('%.1f MB')),
1534 (1, 1 << 20, _('%.2f MB')),
1534 (1, 1 << 20, _('%.2f MB')),
1535 (100, 1 << 10, _('%.0f KB')),
1535 (100, 1 << 10, _('%.0f KB')),
1536 (10, 1 << 10, _('%.1f KB')),
1536 (10, 1 << 10, _('%.1f KB')),
1537 (1, 1 << 10, _('%.2f KB')),
1537 (1, 1 << 10, _('%.2f KB')),
1538 (1, 1, _('%.0f bytes')),
1538 (1, 1, _('%.0f bytes')),
1539 )
1539 )
1540
1540
1541 def uirepr(s):
1541 def uirepr(s):
1542 # Avoid double backslash in Windows path repr()
1542 # Avoid double backslash in Windows path repr()
1543 return repr(s).replace('\\\\', '\\')
1543 return repr(s).replace('\\\\', '\\')
1544
1544
1545 # delay import of textwrap
1545 # delay import of textwrap
1546 def MBTextWrapper(**kwargs):
1546 def MBTextWrapper(**kwargs):
1547 class tw(textwrap.TextWrapper):
1547 class tw(textwrap.TextWrapper):
1548 """
1548 """
1549 Extend TextWrapper for width-awareness.
1549 Extend TextWrapper for width-awareness.
1550
1550
1551 Neither number of 'bytes' in any encoding nor 'characters' is
1551 Neither number of 'bytes' in any encoding nor 'characters' is
1552 appropriate to calculate terminal columns for specified string.
1552 appropriate to calculate terminal columns for specified string.
1553
1553
1554 Original TextWrapper implementation uses built-in 'len()' directly,
1554 Original TextWrapper implementation uses built-in 'len()' directly,
1555 so overriding is needed to use width information of each characters.
1555 so overriding is needed to use width information of each characters.
1556
1556
1557 In addition, characters classified into 'ambiguous' width are
1557 In addition, characters classified into 'ambiguous' width are
1558 treated as wide in East Asian area, but as narrow in other.
1558 treated as wide in East Asian area, but as narrow in other.
1559
1559
1560 This requires use decision to determine width of such characters.
1560 This requires use decision to determine width of such characters.
1561 """
1561 """
1562 def __init__(self, **kwargs):
1562 def __init__(self, **kwargs):
1563 textwrap.TextWrapper.__init__(self, **kwargs)
1563 textwrap.TextWrapper.__init__(self, **kwargs)
1564
1564
1565 # for compatibility between 2.4 and 2.6
1565 # for compatibility between 2.4 and 2.6
1566 if getattr(self, 'drop_whitespace', None) is None:
1566 if getattr(self, 'drop_whitespace', None) is None:
1567 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1567 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1568
1568
1569 def _cutdown(self, ucstr, space_left):
1569 def _cutdown(self, ucstr, space_left):
1570 l = 0
1570 l = 0
1571 colwidth = encoding.ucolwidth
1571 colwidth = encoding.ucolwidth
1572 for i in xrange(len(ucstr)):
1572 for i in xrange(len(ucstr)):
1573 l += colwidth(ucstr[i])
1573 l += colwidth(ucstr[i])
1574 if space_left < l:
1574 if space_left < l:
1575 return (ucstr[:i], ucstr[i:])
1575 return (ucstr[:i], ucstr[i:])
1576 return ucstr, ''
1576 return ucstr, ''
1577
1577
1578 # overriding of base class
1578 # overriding of base class
1579 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1579 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1580 space_left = max(width - cur_len, 1)
1580 space_left = max(width - cur_len, 1)
1581
1581
1582 if self.break_long_words:
1582 if self.break_long_words:
1583 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1583 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1584 cur_line.append(cut)
1584 cur_line.append(cut)
1585 reversed_chunks[-1] = res
1585 reversed_chunks[-1] = res
1586 elif not cur_line:
1586 elif not cur_line:
1587 cur_line.append(reversed_chunks.pop())
1587 cur_line.append(reversed_chunks.pop())
1588
1588
1589 # this overriding code is imported from TextWrapper of python 2.6
1589 # this overriding code is imported from TextWrapper of python 2.6
1590 # to calculate columns of string by 'encoding.ucolwidth()'
1590 # to calculate columns of string by 'encoding.ucolwidth()'
1591 def _wrap_chunks(self, chunks):
1591 def _wrap_chunks(self, chunks):
1592 colwidth = encoding.ucolwidth
1592 colwidth = encoding.ucolwidth
1593
1593
1594 lines = []
1594 lines = []
1595 if self.width <= 0:
1595 if self.width <= 0:
1596 raise ValueError("invalid width %r (must be > 0)" % self.width)
1596 raise ValueError("invalid width %r (must be > 0)" % self.width)
1597
1597
1598 # Arrange in reverse order so items can be efficiently popped
1598 # Arrange in reverse order so items can be efficiently popped
1599 # from a stack of chucks.
1599 # from a stack of chucks.
1600 chunks.reverse()
1600 chunks.reverse()
1601
1601
1602 while chunks:
1602 while chunks:
1603
1603
1604 # Start the list of chunks that will make up the current line.
1604 # Start the list of chunks that will make up the current line.
1605 # cur_len is just the length of all the chunks in cur_line.
1605 # cur_len is just the length of all the chunks in cur_line.
1606 cur_line = []
1606 cur_line = []
1607 cur_len = 0
1607 cur_len = 0
1608
1608
1609 # Figure out which static string will prefix this line.
1609 # Figure out which static string will prefix this line.
1610 if lines:
1610 if lines:
1611 indent = self.subsequent_indent
1611 indent = self.subsequent_indent
1612 else:
1612 else:
1613 indent = self.initial_indent
1613 indent = self.initial_indent
1614
1614
1615 # Maximum width for this line.
1615 # Maximum width for this line.
1616 width = self.width - len(indent)
1616 width = self.width - len(indent)
1617
1617
1618 # First chunk on line is whitespace -- drop it, unless this
1618 # First chunk on line is whitespace -- drop it, unless this
1619 # is the very beginning of the text (i.e. no lines started yet).
1619 # is the very beginning of the text (i.e. no lines started yet).
1620 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1620 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1621 del chunks[-1]
1621 del chunks[-1]
1622
1622
1623 while chunks:
1623 while chunks:
1624 l = colwidth(chunks[-1])
1624 l = colwidth(chunks[-1])
1625
1625
1626 # Can at least squeeze this chunk onto the current line.
1626 # Can at least squeeze this chunk onto the current line.
1627 if cur_len + l <= width:
1627 if cur_len + l <= width:
1628 cur_line.append(chunks.pop())
1628 cur_line.append(chunks.pop())
1629 cur_len += l
1629 cur_len += l
1630
1630
1631 # Nope, this line is full.
1631 # Nope, this line is full.
1632 else:
1632 else:
1633 break
1633 break
1634
1634
1635 # The current line is full, and the next chunk is too big to
1635 # The current line is full, and the next chunk is too big to
1636 # fit on *any* line (not just this one).
1636 # fit on *any* line (not just this one).
1637 if chunks and colwidth(chunks[-1]) > width:
1637 if chunks and colwidth(chunks[-1]) > width:
1638 self._handle_long_word(chunks, cur_line, cur_len, width)
1638 self._handle_long_word(chunks, cur_line, cur_len, width)
1639
1639
1640 # If the last chunk on this line is all whitespace, drop it.
1640 # If the last chunk on this line is all whitespace, drop it.
1641 if (self.drop_whitespace and
1641 if (self.drop_whitespace and
1642 cur_line and cur_line[-1].strip() == ''):
1642 cur_line and cur_line[-1].strip() == ''):
1643 del cur_line[-1]
1643 del cur_line[-1]
1644
1644
1645 # Convert current line back to a string and store it in list
1645 # Convert current line back to a string and store it in list
1646 # of all lines (return value).
1646 # of all lines (return value).
1647 if cur_line:
1647 if cur_line:
1648 lines.append(indent + ''.join(cur_line))
1648 lines.append(indent + ''.join(cur_line))
1649
1649
1650 return lines
1650 return lines
1651
1651
1652 global MBTextWrapper
1652 global MBTextWrapper
1653 MBTextWrapper = tw
1653 MBTextWrapper = tw
1654 return tw(**kwargs)
1654 return tw(**kwargs)
1655
1655
1656 def wrap(line, width, initindent='', hangindent=''):
1656 def wrap(line, width, initindent='', hangindent=''):
1657 maxindent = max(len(hangindent), len(initindent))
1657 maxindent = max(len(hangindent), len(initindent))
1658 if width <= maxindent:
1658 if width <= maxindent:
1659 # adjust for weird terminal size
1659 # adjust for weird terminal size
1660 width = max(78, maxindent + 1)
1660 width = max(78, maxindent + 1)
1661 line = line.decode(encoding.encoding, encoding.encodingmode)
1661 line = line.decode(encoding.encoding, encoding.encodingmode)
1662 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1662 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1663 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1663 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1664 wrapper = MBTextWrapper(width=width,
1664 wrapper = MBTextWrapper(width=width,
1665 initial_indent=initindent,
1665 initial_indent=initindent,
1666 subsequent_indent=hangindent)
1666 subsequent_indent=hangindent)
1667 return wrapper.fill(line).encode(encoding.encoding)
1667 return wrapper.fill(line).encode(encoding.encoding)
1668
1668
1669 def iterlines(iterator):
1669 def iterlines(iterator):
1670 for chunk in iterator:
1670 for chunk in iterator:
1671 for line in chunk.splitlines():
1671 for line in chunk.splitlines():
1672 yield line
1672 yield line
1673
1673
1674 def expandpath(path):
1674 def expandpath(path):
1675 return os.path.expanduser(os.path.expandvars(path))
1675 return os.path.expanduser(os.path.expandvars(path))
1676
1676
1677 def hgcmd():
1677 def hgcmd():
1678 """Return the command used to execute current hg
1678 """Return the command used to execute current hg
1679
1679
1680 This is different from hgexecutable() because on Windows we want
1680 This is different from hgexecutable() because on Windows we want
1681 to avoid things opening new shell windows like batch files, so we
1681 to avoid things opening new shell windows like batch files, so we
1682 get either the python call or current executable.
1682 get either the python call or current executable.
1683 """
1683 """
1684 if mainfrozen():
1684 if mainfrozen():
1685 return [sys.executable]
1685 return [sys.executable]
1686 return gethgcmd()
1686 return gethgcmd()
1687
1687
1688 def rundetached(args, condfn):
1688 def rundetached(args, condfn):
1689 """Execute the argument list in a detached process.
1689 """Execute the argument list in a detached process.
1690
1690
1691 condfn is a callable which is called repeatedly and should return
1691 condfn is a callable which is called repeatedly and should return
1692 True once the child process is known to have started successfully.
1692 True once the child process is known to have started successfully.
1693 At this point, the child process PID is returned. If the child
1693 At this point, the child process PID is returned. If the child
1694 process fails to start or finishes before condfn() evaluates to
1694 process fails to start or finishes before condfn() evaluates to
1695 True, return -1.
1695 True, return -1.
1696 """
1696 """
1697 # Windows case is easier because the child process is either
1697 # Windows case is easier because the child process is either
1698 # successfully starting and validating the condition or exiting
1698 # successfully starting and validating the condition or exiting
1699 # on failure. We just poll on its PID. On Unix, if the child
1699 # on failure. We just poll on its PID. On Unix, if the child
1700 # process fails to start, it will be left in a zombie state until
1700 # process fails to start, it will be left in a zombie state until
1701 # the parent wait on it, which we cannot do since we expect a long
1701 # the parent wait on it, which we cannot do since we expect a long
1702 # running process on success. Instead we listen for SIGCHLD telling
1702 # running process on success. Instead we listen for SIGCHLD telling
1703 # us our child process terminated.
1703 # us our child process terminated.
1704 terminated = set()
1704 terminated = set()
1705 def handler(signum, frame):
1705 def handler(signum, frame):
1706 terminated.add(os.wait())
1706 terminated.add(os.wait())
1707 prevhandler = None
1707 prevhandler = None
1708 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1708 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1709 if SIGCHLD is not None:
1709 if SIGCHLD is not None:
1710 prevhandler = signal.signal(SIGCHLD, handler)
1710 prevhandler = signal.signal(SIGCHLD, handler)
1711 try:
1711 try:
1712 pid = spawndetached(args)
1712 pid = spawndetached(args)
1713 while not condfn():
1713 while not condfn():
1714 if ((pid in terminated or not testpid(pid))
1714 if ((pid in terminated or not testpid(pid))
1715 and not condfn()):
1715 and not condfn()):
1716 return -1
1716 return -1
1717 time.sleep(0.1)
1717 time.sleep(0.1)
1718 return pid
1718 return pid
1719 finally:
1719 finally:
1720 if prevhandler is not None:
1720 if prevhandler is not None:
1721 signal.signal(signal.SIGCHLD, prevhandler)
1721 signal.signal(signal.SIGCHLD, prevhandler)
1722
1722
1723 try:
1723 try:
1724 any, all = any, all
1724 any, all = any, all
1725 except NameError:
1725 except NameError:
1726 def any(iterable):
1726 def any(iterable):
1727 for i in iterable:
1727 for i in iterable:
1728 if i:
1728 if i:
1729 return True
1729 return True
1730 return False
1730 return False
1731
1731
1732 def all(iterable):
1732 def all(iterable):
1733 for i in iterable:
1733 for i in iterable:
1734 if not i:
1734 if not i:
1735 return False
1735 return False
1736 return True
1736 return True
1737
1737
1738 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1738 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1739 """Return the result of interpolating items in the mapping into string s.
1739 """Return the result of interpolating items in the mapping into string s.
1740
1740
1741 prefix is a single character string, or a two character string with
1741 prefix is a single character string, or a two character string with
1742 a backslash as the first character if the prefix needs to be escaped in
1742 a backslash as the first character if the prefix needs to be escaped in
1743 a regular expression.
1743 a regular expression.
1744
1744
1745 fn is an optional function that will be applied to the replacement text
1745 fn is an optional function that will be applied to the replacement text
1746 just before replacement.
1746 just before replacement.
1747
1747
1748 escape_prefix is an optional flag that allows using doubled prefix for
1748 escape_prefix is an optional flag that allows using doubled prefix for
1749 its escaping.
1749 its escaping.
1750 """
1750 """
1751 fn = fn or (lambda s: s)
1751 fn = fn or (lambda s: s)
1752 patterns = '|'.join(mapping.keys())
1752 patterns = '|'.join(mapping.keys())
1753 if escape_prefix:
1753 if escape_prefix:
1754 patterns += '|' + prefix
1754 patterns += '|' + prefix
1755 if len(prefix) > 1:
1755 if len(prefix) > 1:
1756 prefix_char = prefix[1:]
1756 prefix_char = prefix[1:]
1757 else:
1757 else:
1758 prefix_char = prefix
1758 prefix_char = prefix
1759 mapping[prefix_char] = prefix_char
1759 mapping[prefix_char] = prefix_char
1760 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1760 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1761 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1761 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1762
1762
1763 def getport(port):
1763 def getport(port):
1764 """Return the port for a given network service.
1764 """Return the port for a given network service.
1765
1765
1766 If port is an integer, it's returned as is. If it's a string, it's
1766 If port is an integer, it's returned as is. If it's a string, it's
1767 looked up using socket.getservbyname(). If there's no matching
1767 looked up using socket.getservbyname(). If there's no matching
1768 service, util.Abort is raised.
1768 service, util.Abort is raised.
1769 """
1769 """
1770 try:
1770 try:
1771 return int(port)
1771 return int(port)
1772 except ValueError:
1772 except ValueError:
1773 pass
1773 pass
1774
1774
1775 try:
1775 try:
1776 return socket.getservbyname(port)
1776 return socket.getservbyname(port)
1777 except socket.error:
1777 except socket.error:
1778 raise Abort(_("no port number associated with service '%s'") % port)
1778 raise Abort(_("no port number associated with service '%s'") % port)
1779
1779
1780 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1780 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1781 '0': False, 'no': False, 'false': False, 'off': False,
1781 '0': False, 'no': False, 'false': False, 'off': False,
1782 'never': False}
1782 'never': False}
1783
1783
1784 def parsebool(s):
1784 def parsebool(s):
1785 """Parse s into a boolean.
1785 """Parse s into a boolean.
1786
1786
1787 If s is not a valid boolean, returns None.
1787 If s is not a valid boolean, returns None.
1788 """
1788 """
1789 return _booleans.get(s.lower(), None)
1789 return _booleans.get(s.lower(), None)
1790
1790
1791 _hexdig = '0123456789ABCDEFabcdef'
1791 _hexdig = '0123456789ABCDEFabcdef'
1792 _hextochr = dict((a + b, chr(int(a + b, 16)))
1792 _hextochr = dict((a + b, chr(int(a + b, 16)))
1793 for a in _hexdig for b in _hexdig)
1793 for a in _hexdig for b in _hexdig)
1794
1794
1795 def _urlunquote(s):
1795 def _urlunquote(s):
1796 """Decode HTTP/HTML % encoding.
1796 """Decode HTTP/HTML % encoding.
1797
1797
1798 >>> _urlunquote('abc%20def')
1798 >>> _urlunquote('abc%20def')
1799 'abc def'
1799 'abc def'
1800 """
1800 """
1801 res = s.split('%')
1801 res = s.split('%')
1802 # fastpath
1802 # fastpath
1803 if len(res) == 1:
1803 if len(res) == 1:
1804 return s
1804 return s
1805 s = res[0]
1805 s = res[0]
1806 for item in res[1:]:
1806 for item in res[1:]:
1807 try:
1807 try:
1808 s += _hextochr[item[:2]] + item[2:]
1808 s += _hextochr[item[:2]] + item[2:]
1809 except KeyError:
1809 except KeyError:
1810 s += '%' + item
1810 s += '%' + item
1811 except UnicodeDecodeError:
1811 except UnicodeDecodeError:
1812 s += unichr(int(item[:2], 16)) + item[2:]
1812 s += unichr(int(item[:2], 16)) + item[2:]
1813 return s
1813 return s
1814
1814
1815 class url(object):
1815 class url(object):
1816 r"""Reliable URL parser.
1816 r"""Reliable URL parser.
1817
1817
1818 This parses URLs and provides attributes for the following
1818 This parses URLs and provides attributes for the following
1819 components:
1819 components:
1820
1820
1821 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1821 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1822
1822
1823 Missing components are set to None. The only exception is
1823 Missing components are set to None. The only exception is
1824 fragment, which is set to '' if present but empty.
1824 fragment, which is set to '' if present but empty.
1825
1825
1826 If parsefragment is False, fragment is included in query. If
1826 If parsefragment is False, fragment is included in query. If
1827 parsequery is False, query is included in path. If both are
1827 parsequery is False, query is included in path. If both are
1828 False, both fragment and query are included in path.
1828 False, both fragment and query are included in path.
1829
1829
1830 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1830 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1831
1831
1832 Note that for backward compatibility reasons, bundle URLs do not
1832 Note that for backward compatibility reasons, bundle URLs do not
1833 take host names. That means 'bundle://../' has a path of '../'.
1833 take host names. That means 'bundle://../' has a path of '../'.
1834
1834
1835 Examples:
1835 Examples:
1836
1836
1837 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1837 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1838 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1838 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1839 >>> url('ssh://[::1]:2200//home/joe/repo')
1839 >>> url('ssh://[::1]:2200//home/joe/repo')
1840 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1840 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1841 >>> url('file:///home/joe/repo')
1841 >>> url('file:///home/joe/repo')
1842 <url scheme: 'file', path: '/home/joe/repo'>
1842 <url scheme: 'file', path: '/home/joe/repo'>
1843 >>> url('file:///c:/temp/foo/')
1843 >>> url('file:///c:/temp/foo/')
1844 <url scheme: 'file', path: 'c:/temp/foo/'>
1844 <url scheme: 'file', path: 'c:/temp/foo/'>
1845 >>> url('bundle:foo')
1845 >>> url('bundle:foo')
1846 <url scheme: 'bundle', path: 'foo'>
1846 <url scheme: 'bundle', path: 'foo'>
1847 >>> url('bundle://../foo')
1847 >>> url('bundle://../foo')
1848 <url scheme: 'bundle', path: '../foo'>
1848 <url scheme: 'bundle', path: '../foo'>
1849 >>> url(r'c:\foo\bar')
1849 >>> url(r'c:\foo\bar')
1850 <url path: 'c:\\foo\\bar'>
1850 <url path: 'c:\\foo\\bar'>
1851 >>> url(r'\\blah\blah\blah')
1851 >>> url(r'\\blah\blah\blah')
1852 <url path: '\\\\blah\\blah\\blah'>
1852 <url path: '\\\\blah\\blah\\blah'>
1853 >>> url(r'\\blah\blah\blah#baz')
1853 >>> url(r'\\blah\blah\blah#baz')
1854 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1854 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1855 >>> url(r'file:///C:\users\me')
1855 >>> url(r'file:///C:\users\me')
1856 <url scheme: 'file', path: 'C:\\users\\me'>
1856 <url scheme: 'file', path: 'C:\\users\\me'>
1857
1857
1858 Authentication credentials:
1858 Authentication credentials:
1859
1859
1860 >>> url('ssh://joe:xyz@x/repo')
1860 >>> url('ssh://joe:xyz@x/repo')
1861 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1861 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1862 >>> url('ssh://joe@x/repo')
1862 >>> url('ssh://joe@x/repo')
1863 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1863 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1864
1864
1865 Query strings and fragments:
1865 Query strings and fragments:
1866
1866
1867 >>> url('http://host/a?b#c')
1867 >>> url('http://host/a?b#c')
1868 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1868 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1869 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1869 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1870 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1870 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1871 """
1871 """
1872
1872
1873 _safechars = "!~*'()+"
1873 _safechars = "!~*'()+"
1874 _safepchars = "/!~*'()+:\\"
1874 _safepchars = "/!~*'()+:\\"
1875 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
1875 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
1876
1876
1877 def __init__(self, path, parsequery=True, parsefragment=True):
1877 def __init__(self, path, parsequery=True, parsefragment=True):
1878 # We slowly chomp away at path until we have only the path left
1878 # We slowly chomp away at path until we have only the path left
1879 self.scheme = self.user = self.passwd = self.host = None
1879 self.scheme = self.user = self.passwd = self.host = None
1880 self.port = self.path = self.query = self.fragment = None
1880 self.port = self.path = self.query = self.fragment = None
1881 self._localpath = True
1881 self._localpath = True
1882 self._hostport = ''
1882 self._hostport = ''
1883 self._origpath = path
1883 self._origpath = path
1884
1884
1885 if parsefragment and '#' in path:
1885 if parsefragment and '#' in path:
1886 path, self.fragment = path.split('#', 1)
1886 path, self.fragment = path.split('#', 1)
1887 if not path:
1887 if not path:
1888 path = None
1888 path = None
1889
1889
1890 # special case for Windows drive letters and UNC paths
1890 # special case for Windows drive letters and UNC paths
1891 if hasdriveletter(path) or path.startswith(r'\\'):
1891 if hasdriveletter(path) or path.startswith(r'\\'):
1892 self.path = path
1892 self.path = path
1893 return
1893 return
1894
1894
1895 # For compatibility reasons, we can't handle bundle paths as
1895 # For compatibility reasons, we can't handle bundle paths as
1896 # normal URLS
1896 # normal URLS
1897 if path.startswith('bundle:'):
1897 if path.startswith('bundle:'):
1898 self.scheme = 'bundle'
1898 self.scheme = 'bundle'
1899 path = path[7:]
1899 path = path[7:]
1900 if path.startswith('//'):
1900 if path.startswith('//'):
1901 path = path[2:]
1901 path = path[2:]
1902 self.path = path
1902 self.path = path
1903 return
1903 return
1904
1904
1905 if self._matchscheme(path):
1905 if self._matchscheme(path):
1906 parts = path.split(':', 1)
1906 parts = path.split(':', 1)
1907 if parts[0]:
1907 if parts[0]:
1908 self.scheme, path = parts
1908 self.scheme, path = parts
1909 self._localpath = False
1909 self._localpath = False
1910
1910
1911 if not path:
1911 if not path:
1912 path = None
1912 path = None
1913 if self._localpath:
1913 if self._localpath:
1914 self.path = ''
1914 self.path = ''
1915 return
1915 return
1916 else:
1916 else:
1917 if self._localpath:
1917 if self._localpath:
1918 self.path = path
1918 self.path = path
1919 return
1919 return
1920
1920
1921 if parsequery and '?' in path:
1921 if parsequery and '?' in path:
1922 path, self.query = path.split('?', 1)
1922 path, self.query = path.split('?', 1)
1923 if not path:
1923 if not path:
1924 path = None
1924 path = None
1925 if not self.query:
1925 if not self.query:
1926 self.query = None
1926 self.query = None
1927
1927
1928 # // is required to specify a host/authority
1928 # // is required to specify a host/authority
1929 if path and path.startswith('//'):
1929 if path and path.startswith('//'):
1930 parts = path[2:].split('/', 1)
1930 parts = path[2:].split('/', 1)
1931 if len(parts) > 1:
1931 if len(parts) > 1:
1932 self.host, path = parts
1932 self.host, path = parts
1933 else:
1933 else:
1934 self.host = parts[0]
1934 self.host = parts[0]
1935 path = None
1935 path = None
1936 if not self.host:
1936 if not self.host:
1937 self.host = None
1937 self.host = None
1938 # path of file:///d is /d
1938 # path of file:///d is /d
1939 # path of file:///d:/ is d:/, not /d:/
1939 # path of file:///d:/ is d:/, not /d:/
1940 if path and not hasdriveletter(path):
1940 if path and not hasdriveletter(path):
1941 path = '/' + path
1941 path = '/' + path
1942
1942
1943 if self.host and '@' in self.host:
1943 if self.host and '@' in self.host:
1944 self.user, self.host = self.host.rsplit('@', 1)
1944 self.user, self.host = self.host.rsplit('@', 1)
1945 if ':' in self.user:
1945 if ':' in self.user:
1946 self.user, self.passwd = self.user.split(':', 1)
1946 self.user, self.passwd = self.user.split(':', 1)
1947 if not self.host:
1947 if not self.host:
1948 self.host = None
1948 self.host = None
1949
1949
1950 # Don't split on colons in IPv6 addresses without ports
1950 # Don't split on colons in IPv6 addresses without ports
1951 if (self.host and ':' in self.host and
1951 if (self.host and ':' in self.host and
1952 not (self.host.startswith('[') and self.host.endswith(']'))):
1952 not (self.host.startswith('[') and self.host.endswith(']'))):
1953 self._hostport = self.host
1953 self._hostport = self.host
1954 self.host, self.port = self.host.rsplit(':', 1)
1954 self.host, self.port = self.host.rsplit(':', 1)
1955 if not self.host:
1955 if not self.host:
1956 self.host = None
1956 self.host = None
1957
1957
1958 if (self.host and self.scheme == 'file' and
1958 if (self.host and self.scheme == 'file' and
1959 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1959 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1960 raise Abort(_('file:// URLs can only refer to localhost'))
1960 raise Abort(_('file:// URLs can only refer to localhost'))
1961
1961
1962 self.path = path
1962 self.path = path
1963
1963
1964 # leave the query string escaped
1964 # leave the query string escaped
1965 for a in ('user', 'passwd', 'host', 'port',
1965 for a in ('user', 'passwd', 'host', 'port',
1966 'path', 'fragment'):
1966 'path', 'fragment'):
1967 v = getattr(self, a)
1967 v = getattr(self, a)
1968 if v is not None:
1968 if v is not None:
1969 setattr(self, a, _urlunquote(v))
1969 setattr(self, a, _urlunquote(v))
1970
1970
1971 def __repr__(self):
1971 def __repr__(self):
1972 attrs = []
1972 attrs = []
1973 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1973 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1974 'query', 'fragment'):
1974 'query', 'fragment'):
1975 v = getattr(self, a)
1975 v = getattr(self, a)
1976 if v is not None:
1976 if v is not None:
1977 attrs.append('%s: %r' % (a, v))
1977 attrs.append('%s: %r' % (a, v))
1978 return '<url %s>' % ', '.join(attrs)
1978 return '<url %s>' % ', '.join(attrs)
1979
1979
1980 def __str__(self):
1980 def __str__(self):
1981 r"""Join the URL's components back into a URL string.
1981 r"""Join the URL's components back into a URL string.
1982
1982
1983 Examples:
1983 Examples:
1984
1984
1985 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1985 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1986 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1986 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1987 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1987 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1988 'http://user:pw@host:80/?foo=bar&baz=42'
1988 'http://user:pw@host:80/?foo=bar&baz=42'
1989 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1989 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1990 'http://user:pw@host:80/?foo=bar%3dbaz'
1990 'http://user:pw@host:80/?foo=bar%3dbaz'
1991 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1991 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1992 'ssh://user:pw@[::1]:2200//home/joe#'
1992 'ssh://user:pw@[::1]:2200//home/joe#'
1993 >>> str(url('http://localhost:80//'))
1993 >>> str(url('http://localhost:80//'))
1994 'http://localhost:80//'
1994 'http://localhost:80//'
1995 >>> str(url('http://localhost:80/'))
1995 >>> str(url('http://localhost:80/'))
1996 'http://localhost:80/'
1996 'http://localhost:80/'
1997 >>> str(url('http://localhost:80'))
1997 >>> str(url('http://localhost:80'))
1998 'http://localhost:80/'
1998 'http://localhost:80/'
1999 >>> str(url('bundle:foo'))
1999 >>> str(url('bundle:foo'))
2000 'bundle:foo'
2000 'bundle:foo'
2001 >>> str(url('bundle://../foo'))
2001 >>> str(url('bundle://../foo'))
2002 'bundle:../foo'
2002 'bundle:../foo'
2003 >>> str(url('path'))
2003 >>> str(url('path'))
2004 'path'
2004 'path'
2005 >>> str(url('file:///tmp/foo/bar'))
2005 >>> str(url('file:///tmp/foo/bar'))
2006 'file:///tmp/foo/bar'
2006 'file:///tmp/foo/bar'
2007 >>> str(url('file:///c:/tmp/foo/bar'))
2007 >>> str(url('file:///c:/tmp/foo/bar'))
2008 'file:///c:/tmp/foo/bar'
2008 'file:///c:/tmp/foo/bar'
2009 >>> print url(r'bundle:foo\bar')
2009 >>> print url(r'bundle:foo\bar')
2010 bundle:foo\bar
2010 bundle:foo\bar
2011 >>> print url(r'file:///D:\data\hg')
2011 >>> print url(r'file:///D:\data\hg')
2012 file:///D:\data\hg
2012 file:///D:\data\hg
2013 """
2013 """
2014 if self._localpath:
2014 if self._localpath:
2015 s = self.path
2015 s = self.path
2016 if self.scheme == 'bundle':
2016 if self.scheme == 'bundle':
2017 s = 'bundle:' + s
2017 s = 'bundle:' + s
2018 if self.fragment:
2018 if self.fragment:
2019 s += '#' + self.fragment
2019 s += '#' + self.fragment
2020 return s
2020 return s
2021
2021
2022 s = self.scheme + ':'
2022 s = self.scheme + ':'
2023 if self.user or self.passwd or self.host:
2023 if self.user or self.passwd or self.host:
2024 s += '//'
2024 s += '//'
2025 elif self.scheme and (not self.path or self.path.startswith('/')
2025 elif self.scheme and (not self.path or self.path.startswith('/')
2026 or hasdriveletter(self.path)):
2026 or hasdriveletter(self.path)):
2027 s += '//'
2027 s += '//'
2028 if hasdriveletter(self.path):
2028 if hasdriveletter(self.path):
2029 s += '/'
2029 s += '/'
2030 if self.user:
2030 if self.user:
2031 s += urllib.quote(self.user, safe=self._safechars)
2031 s += urllib.quote(self.user, safe=self._safechars)
2032 if self.passwd:
2032 if self.passwd:
2033 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2033 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2034 if self.user or self.passwd:
2034 if self.user or self.passwd:
2035 s += '@'
2035 s += '@'
2036 if self.host:
2036 if self.host:
2037 if not (self.host.startswith('[') and self.host.endswith(']')):
2037 if not (self.host.startswith('[') and self.host.endswith(']')):
2038 s += urllib.quote(self.host)
2038 s += urllib.quote(self.host)
2039 else:
2039 else:
2040 s += self.host
2040 s += self.host
2041 if self.port:
2041 if self.port:
2042 s += ':' + urllib.quote(self.port)
2042 s += ':' + urllib.quote(self.port)
2043 if self.host:
2043 if self.host:
2044 s += '/'
2044 s += '/'
2045 if self.path:
2045 if self.path:
2046 # TODO: similar to the query string, we should not unescape the
2046 # TODO: similar to the query string, we should not unescape the
2047 # path when we store it, the path might contain '%2f' = '/',
2047 # path when we store it, the path might contain '%2f' = '/',
2048 # which we should *not* escape.
2048 # which we should *not* escape.
2049 s += urllib.quote(self.path, safe=self._safepchars)
2049 s += urllib.quote(self.path, safe=self._safepchars)
2050 if self.query:
2050 if self.query:
2051 # we store the query in escaped form.
2051 # we store the query in escaped form.
2052 s += '?' + self.query
2052 s += '?' + self.query
2053 if self.fragment is not None:
2053 if self.fragment is not None:
2054 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2054 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2055 return s
2055 return s
2056
2056
2057 def authinfo(self):
2057 def authinfo(self):
2058 user, passwd = self.user, self.passwd
2058 user, passwd = self.user, self.passwd
2059 try:
2059 try:
2060 self.user, self.passwd = None, None
2060 self.user, self.passwd = None, None
2061 s = str(self)
2061 s = str(self)
2062 finally:
2062 finally:
2063 self.user, self.passwd = user, passwd
2063 self.user, self.passwd = user, passwd
2064 if not self.user:
2064 if not self.user:
2065 return (s, None)
2065 return (s, None)
2066 # authinfo[1] is passed to urllib2 password manager, and its
2066 # authinfo[1] is passed to urllib2 password manager, and its
2067 # URIs must not contain credentials. The host is passed in the
2067 # URIs must not contain credentials. The host is passed in the
2068 # URIs list because Python < 2.4.3 uses only that to search for
2068 # URIs list because Python < 2.4.3 uses only that to search for
2069 # a password.
2069 # a password.
2070 return (s, (None, (s, self.host),
2070 return (s, (None, (s, self.host),
2071 self.user, self.passwd or ''))
2071 self.user, self.passwd or ''))
2072
2072
2073 def isabs(self):
2073 def isabs(self):
2074 if self.scheme and self.scheme != 'file':
2074 if self.scheme and self.scheme != 'file':
2075 return True # remote URL
2075 return True # remote URL
2076 if hasdriveletter(self.path):
2076 if hasdriveletter(self.path):
2077 return True # absolute for our purposes - can't be joined()
2077 return True # absolute for our purposes - can't be joined()
2078 if self.path.startswith(r'\\'):
2078 if self.path.startswith(r'\\'):
2079 return True # Windows UNC path
2079 return True # Windows UNC path
2080 if self.path.startswith('/'):
2080 if self.path.startswith('/'):
2081 return True # POSIX-style
2081 return True # POSIX-style
2082 return False
2082 return False
2083
2083
2084 def localpath(self):
2084 def localpath(self):
2085 if self.scheme == 'file' or self.scheme == 'bundle':
2085 if self.scheme == 'file' or self.scheme == 'bundle':
2086 path = self.path or '/'
2086 path = self.path or '/'
2087 # For Windows, we need to promote hosts containing drive
2087 # For Windows, we need to promote hosts containing drive
2088 # letters to paths with drive letters.
2088 # letters to paths with drive letters.
2089 if hasdriveletter(self._hostport):
2089 if hasdriveletter(self._hostport):
2090 path = self._hostport + '/' + self.path
2090 path = self._hostport + '/' + self.path
2091 elif (self.host is not None and self.path
2091 elif (self.host is not None and self.path
2092 and not hasdriveletter(path)):
2092 and not hasdriveletter(path)):
2093 path = '/' + path
2093 path = '/' + path
2094 return path
2094 return path
2095 return self._origpath
2095 return self._origpath
2096
2096
2097 def islocal(self):
2097 def islocal(self):
2098 '''whether localpath will return something that posixfile can open'''
2098 '''whether localpath will return something that posixfile can open'''
2099 return (not self.scheme or self.scheme == 'file'
2099 return (not self.scheme or self.scheme == 'file'
2100 or self.scheme == 'bundle')
2100 or self.scheme == 'bundle')
2101
2101
2102 def hasscheme(path):
2102 def hasscheme(path):
2103 return bool(url(path).scheme)
2103 return bool(url(path).scheme)
2104
2104
2105 def hasdriveletter(path):
2105 def hasdriveletter(path):
2106 return path and path[1:2] == ':' and path[0:1].isalpha()
2106 return path and path[1:2] == ':' and path[0:1].isalpha()
2107
2107
2108 def urllocalpath(path):
2108 def urllocalpath(path):
2109 return url(path, parsequery=False, parsefragment=False).localpath()
2109 return url(path, parsequery=False, parsefragment=False).localpath()
2110
2110
2111 def hidepassword(u):
2111 def hidepassword(u):
2112 '''hide user credential in a url string'''
2112 '''hide user credential in a url string'''
2113 u = url(u)
2113 u = url(u)
2114 if u.passwd:
2114 if u.passwd:
2115 u.passwd = '***'
2115 u.passwd = '***'
2116 return str(u)
2116 return str(u)
2117
2117
2118 def removeauth(u):
2118 def removeauth(u):
2119 '''remove all authentication information from a url string'''
2119 '''remove all authentication information from a url string'''
2120 u = url(u)
2120 u = url(u)
2121 u.user = u.passwd = None
2121 u.user = u.passwd = None
2122 return str(u)
2122 return str(u)
2123
2123
2124 def isatty(fd):
2124 def isatty(fd):
2125 try:
2125 try:
2126 return fd.isatty()
2126 return fd.isatty()
2127 except AttributeError:
2127 except AttributeError:
2128 return False
2128 return False
2129
2129
2130 timecount = unitcountfn(
2130 timecount = unitcountfn(
2131 (1, 1e3, _('%.0f s')),
2131 (1, 1e3, _('%.0f s')),
2132 (100, 1, _('%.1f s')),
2132 (100, 1, _('%.1f s')),
2133 (10, 1, _('%.2f s')),
2133 (10, 1, _('%.2f s')),
2134 (1, 1, _('%.3f s')),
2134 (1, 1, _('%.3f s')),
2135 (100, 0.001, _('%.1f ms')),
2135 (100, 0.001, _('%.1f ms')),
2136 (10, 0.001, _('%.2f ms')),
2136 (10, 0.001, _('%.2f ms')),
2137 (1, 0.001, _('%.3f ms')),
2137 (1, 0.001, _('%.3f ms')),
2138 (100, 0.000001, _('%.1f us')),
2138 (100, 0.000001, _('%.1f us')),
2139 (10, 0.000001, _('%.2f us')),
2139 (10, 0.000001, _('%.2f us')),
2140 (1, 0.000001, _('%.3f us')),
2140 (1, 0.000001, _('%.3f us')),
2141 (100, 0.000000001, _('%.1f ns')),
2141 (100, 0.000000001, _('%.1f ns')),
2142 (10, 0.000000001, _('%.2f ns')),
2142 (10, 0.000000001, _('%.2f ns')),
2143 (1, 0.000000001, _('%.3f ns')),
2143 (1, 0.000000001, _('%.3f ns')),
2144 )
2144 )
2145
2145
2146 _timenesting = [0]
2146 _timenesting = [0]
2147
2147
2148 def timed(func):
2148 def timed(func):
2149 '''Report the execution time of a function call to stderr.
2149 '''Report the execution time of a function call to stderr.
2150
2150
2151 During development, use as a decorator when you need to measure
2151 During development, use as a decorator when you need to measure
2152 the cost of a function, e.g. as follows:
2152 the cost of a function, e.g. as follows:
2153
2153
2154 @util.timed
2154 @util.timed
2155 def foo(a, b, c):
2155 def foo(a, b, c):
2156 pass
2156 pass
2157 '''
2157 '''
2158
2158
2159 def wrapper(*args, **kwargs):
2159 def wrapper(*args, **kwargs):
2160 start = time.time()
2160 start = time.time()
2161 indent = 2
2161 indent = 2
2162 _timenesting[0] += indent
2162 _timenesting[0] += indent
2163 try:
2163 try:
2164 return func(*args, **kwargs)
2164 return func(*args, **kwargs)
2165 finally:
2165 finally:
2166 elapsed = time.time() - start
2166 elapsed = time.time() - start
2167 _timenesting[0] -= indent
2167 _timenesting[0] -= indent
2168 sys.stderr.write('%s%s: %s\n' %
2168 sys.stderr.write('%s%s: %s\n' %
2169 (' ' * _timenesting[0], func.__name__,
2169 (' ' * _timenesting[0], func.__name__,
2170 timecount(elapsed)))
2170 timecount(elapsed)))
2171 return wrapper
2171 return wrapper
2172
2172
2173 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2173 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2174 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2174 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2175
2175
2176 def sizetoint(s):
2176 def sizetoint(s):
2177 '''Convert a space specifier to a byte count.
2177 '''Convert a space specifier to a byte count.
2178
2178
2179 >>> sizetoint('30')
2179 >>> sizetoint('30')
2180 30
2180 30
2181 >>> sizetoint('2.2kb')
2181 >>> sizetoint('2.2kb')
2182 2252
2182 2252
2183 >>> sizetoint('6M')
2183 >>> sizetoint('6M')
2184 6291456
2184 6291456
2185 '''
2185 '''
2186 t = s.strip().lower()
2186 t = s.strip().lower()
2187 try:
2187 try:
2188 for k, u in _sizeunits:
2188 for k, u in _sizeunits:
2189 if t.endswith(k):
2189 if t.endswith(k):
2190 return int(float(t[:-len(k)]) * u)
2190 return int(float(t[:-len(k)]) * u)
2191 return int(t)
2191 return int(t)
2192 except ValueError:
2192 except ValueError:
2193 raise error.ParseError(_("couldn't parse size: %s") % s)
2193 raise error.ParseError(_("couldn't parse size: %s") % s)
2194
2194
2195 class hooks(object):
2195 class hooks(object):
2196 '''A collection of hook functions that can be used to extend a
2196 '''A collection of hook functions that can be used to extend a
2197 function's behaviour. Hooks are called in lexicographic order,
2197 function's behaviour. Hooks are called in lexicographic order,
2198 based on the names of their sources.'''
2198 based on the names of their sources.'''
2199
2199
2200 def __init__(self):
2200 def __init__(self):
2201 self._hooks = []
2201 self._hooks = []
2202
2202
2203 def add(self, source, hook):
2203 def add(self, source, hook):
2204 self._hooks.append((source, hook))
2204 self._hooks.append((source, hook))
2205
2205
2206 def __call__(self, *args):
2206 def __call__(self, *args):
2207 self._hooks.sort(key=lambda x: x[0])
2207 self._hooks.sort(key=lambda x: x[0])
2208 results = []
2208 results = []
2209 for source, hook in self._hooks:
2209 for source, hook in self._hooks:
2210 results.append(hook(*args))
2210 results.append(hook(*args))
2211 return results
2211 return results
2212
2212
2213 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2213 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2214 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2214 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2215 Skips the 'skip' last entries. By default it will flush stdout first.
2215 Skips the 'skip' last entries. By default it will flush stdout first.
2216 It can be used everywhere and do intentionally not require an ui object.
2216 It can be used everywhere and do intentionally not require an ui object.
2217 Not be used in production code but very convenient while developing.
2217 Not be used in production code but very convenient while developing.
2218 '''
2218 '''
2219 if otherf:
2219 if otherf:
2220 otherf.flush()
2220 otherf.flush()
2221 f.write('%s at:\n' % msg)
2221 f.write('%s at:\n' % msg)
2222 entries = [('%s:%s' % (fn, ln), func)
2222 entries = [('%s:%s' % (fn, ln), func)
2223 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2223 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2224 if entries:
2224 if entries:
2225 fnmax = max(len(entry[0]) for entry in entries)
2225 fnmax = max(len(entry[0]) for entry in entries)
2226 for fnln, func in entries:
2226 for fnln, func in entries:
2227 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2227 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2228 f.flush()
2228 f.flush()
2229
2229
2230 # convenient shortcut
2230 # convenient shortcut
2231 dst = debugstacktrace
2231 dst = debugstacktrace
@@ -1,367 +1,367 b''
1 #require hardlink
1 #require hardlink
2
2
3 $ cat > nlinks.py <<EOF
3 $ cat > nlinks.py <<EOF
4 > import sys
4 > import sys
5 > from mercurial import util
5 > from mercurial import util
6 > for f in sorted(sys.stdin.readlines()):
6 > for f in sorted(sys.stdin.readlines()):
7 > f = f[:-1]
7 > f = f[:-1]
8 > print util.nlinks(f), f
8 > print util.nlinks(f), f
9 > EOF
9 > EOF
10
10
11 $ nlinksdir()
11 $ nlinksdir()
12 > {
12 > {
13 > find $1 -type f | python $TESTTMP/nlinks.py
13 > find $1 -type f | python $TESTTMP/nlinks.py
14 > }
14 > }
15
15
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
16 Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux):
17
17
18 $ cat > linkcp.py <<EOF
18 $ cat > linkcp.py <<EOF
19 > from mercurial import util
19 > from mercurial import util
20 > import sys
20 > import sys
21 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
21 > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True)
22 > EOF
22 > EOF
23
23
24 $ linkcp()
24 $ linkcp()
25 > {
25 > {
26 > python $TESTTMP/linkcp.py $1 $2
26 > python $TESTTMP/linkcp.py $1 $2
27 > }
27 > }
28
28
29 Prepare repo r1:
29 Prepare repo r1:
30
30
31 $ hg init r1
31 $ hg init r1
32 $ cd r1
32 $ cd r1
33
33
34 $ echo c1 > f1
34 $ echo c1 > f1
35 $ hg add f1
35 $ hg add f1
36 $ hg ci -m0
36 $ hg ci -m0
37
37
38 $ mkdir d1
38 $ mkdir d1
39 $ cd d1
39 $ cd d1
40 $ echo c2 > f2
40 $ echo c2 > f2
41 $ hg add f2
41 $ hg add f2
42 $ hg ci -m1
42 $ hg ci -m1
43 $ cd ../..
43 $ cd ../..
44
44
45 $ nlinksdir r1/.hg/store
45 $ nlinksdir r1/.hg/store
46 1 r1/.hg/store/00changelog.i
46 1 r1/.hg/store/00changelog.i
47 1 r1/.hg/store/00manifest.i
47 1 r1/.hg/store/00manifest.i
48 1 r1/.hg/store/data/d1/f2.i
48 1 r1/.hg/store/data/d1/f2.i
49 1 r1/.hg/store/data/f1.i
49 1 r1/.hg/store/data/f1.i
50 1 r1/.hg/store/fncache
50 1 r1/.hg/store/fncache
51 1 r1/.hg/store/phaseroots
51 1 r1/.hg/store/phaseroots
52 1 r1/.hg/store/undo
52 1 r1/.hg/store/undo
53 1 r1/.hg/store/undo.backup.fncache
53 1 r1/.hg/store/undo.backup.fncache
54 1 r1/.hg/store/undo.backupfiles
54 1 r1/.hg/store/undo.backupfiles
55 1 r1/.hg/store/undo.phaseroots
55 1 r1/.hg/store/undo.phaseroots
56
56
57
57
58 Create hardlinked clone r2:
58 Create hardlinked clone r2:
59
59
60 $ hg clone -U --debug r1 r2
60 $ hg clone -U --debug r1 r2
61 linked 7 files
61 linked 7 files
62
62
63 Create non-hardlinked clone r3:
63 Create non-hardlinked clone r3:
64
64
65 $ hg clone --pull r1 r3
65 $ hg clone --pull r1 r3
66 requesting all changes
66 requesting all changes
67 adding changesets
67 adding changesets
68 adding manifests
68 adding manifests
69 adding file changes
69 adding file changes
70 added 2 changesets with 2 changes to 2 files
70 added 2 changesets with 2 changes to 2 files
71 updating to branch default
71 updating to branch default
72 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
72 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
73
73
74
74
75 Repos r1 and r2 should now contain hardlinked files:
75 Repos r1 and r2 should now contain hardlinked files:
76
76
77 $ nlinksdir r1/.hg/store
77 $ nlinksdir r1/.hg/store
78 2 r1/.hg/store/00changelog.i
78 2 r1/.hg/store/00changelog.i
79 2 r1/.hg/store/00manifest.i
79 2 r1/.hg/store/00manifest.i
80 2 r1/.hg/store/data/d1/f2.i
80 2 r1/.hg/store/data/d1/f2.i
81 2 r1/.hg/store/data/f1.i
81 2 r1/.hg/store/data/f1.i
82 2 r1/.hg/store/fncache
82 2 r1/.hg/store/fncache
83 1 r1/.hg/store/phaseroots
83 1 r1/.hg/store/phaseroots
84 1 r1/.hg/store/undo
84 1 r1/.hg/store/undo
85 1 r1/.hg/store/undo.backup.fncache
85 1 r1/.hg/store/undo.backup.fncache
86 1 r1/.hg/store/undo.backupfiles
86 1 r1/.hg/store/undo.backupfiles
87 1 r1/.hg/store/undo.phaseroots
87 1 r1/.hg/store/undo.phaseroots
88
88
89 $ nlinksdir r2/.hg/store
89 $ nlinksdir r2/.hg/store
90 2 r2/.hg/store/00changelog.i
90 2 r2/.hg/store/00changelog.i
91 2 r2/.hg/store/00manifest.i
91 2 r2/.hg/store/00manifest.i
92 2 r2/.hg/store/data/d1/f2.i
92 2 r2/.hg/store/data/d1/f2.i
93 2 r2/.hg/store/data/f1.i
93 2 r2/.hg/store/data/f1.i
94 2 r2/.hg/store/fncache
94 2 r2/.hg/store/fncache
95
95
96 Repo r3 should not be hardlinked:
96 Repo r3 should not be hardlinked:
97
97
98 $ nlinksdir r3/.hg/store
98 $ nlinksdir r3/.hg/store
99 1 r3/.hg/store/00changelog.i
99 1 r3/.hg/store/00changelog.i
100 1 r3/.hg/store/00manifest.i
100 1 r3/.hg/store/00manifest.i
101 1 r3/.hg/store/data/d1/f2.i
101 1 r3/.hg/store/data/d1/f2.i
102 1 r3/.hg/store/data/f1.i
102 1 r3/.hg/store/data/f1.i
103 1 r3/.hg/store/fncache
103 1 r3/.hg/store/fncache
104 1 r3/.hg/store/phaseroots
104 1 r3/.hg/store/phaseroots
105 1 r3/.hg/store/undo
105 1 r3/.hg/store/undo
106 1 r3/.hg/store/undo.backupfiles
106 1 r3/.hg/store/undo.backupfiles
107 1 r3/.hg/store/undo.phaseroots
107 1 r3/.hg/store/undo.phaseroots
108
108
109
109
110 Create a non-inlined filelog in r3:
110 Create a non-inlined filelog in r3:
111
111
112 $ cd r3/d1
112 $ cd r3/d1
113 >>> f = open('data1', 'wb')
113 >>> f = open('data1', 'wb')
114 >>> for x in range(10000):
114 >>> for x in range(10000):
115 ... f.write("%s\n" % str(x))
115 ... f.write("%s\n" % str(x))
116 >>> f.close()
116 >>> f.close()
117 $ for j in 0 1 2 3 4 5 6 7 8 9; do
117 $ for j in 0 1 2 3 4 5 6 7 8 9; do
118 > cat data1 >> f2
118 > cat data1 >> f2
119 > hg commit -m$j
119 > hg commit -m$j
120 > done
120 > done
121 $ cd ../..
121 $ cd ../..
122
122
123 $ nlinksdir r3/.hg/store
123 $ nlinksdir r3/.hg/store
124 1 r3/.hg/store/00changelog.i
124 1 r3/.hg/store/00changelog.i
125 1 r3/.hg/store/00manifest.i
125 1 r3/.hg/store/00manifest.i
126 1 r3/.hg/store/data/d1/f2.d
126 1 r3/.hg/store/data/d1/f2.d
127 1 r3/.hg/store/data/d1/f2.i
127 1 r3/.hg/store/data/d1/f2.i
128 1 r3/.hg/store/data/f1.i
128 1 r3/.hg/store/data/f1.i
129 1 r3/.hg/store/fncache
129 1 r3/.hg/store/fncache
130 1 r3/.hg/store/phaseroots
130 1 r3/.hg/store/phaseroots
131 1 r3/.hg/store/undo
131 1 r3/.hg/store/undo
132 1 r3/.hg/store/undo.backup.fncache
132 1 r3/.hg/store/undo.backup.fncache
133 1 r3/.hg/store/undo.backup.phaseroots
133 1 r3/.hg/store/undo.backup.phaseroots
134 1 r3/.hg/store/undo.backupfiles
134 1 r3/.hg/store/undo.backupfiles
135 1 r3/.hg/store/undo.phaseroots
135 1 r3/.hg/store/undo.phaseroots
136
136
137 Push to repo r1 should break up most hardlinks in r2:
137 Push to repo r1 should break up most hardlinks in r2:
138
138
139 $ hg -R r2 verify
139 $ hg -R r2 verify
140 checking changesets
140 checking changesets
141 checking manifests
141 checking manifests
142 crosschecking files in changesets and manifests
142 crosschecking files in changesets and manifests
143 checking files
143 checking files
144 2 files, 2 changesets, 2 total revisions
144 2 files, 2 changesets, 2 total revisions
145
145
146 $ cd r3
146 $ cd r3
147 $ hg push
147 $ hg push
148 pushing to $TESTTMP/r1 (glob)
148 pushing to $TESTTMP/r1 (glob)
149 searching for changes
149 searching for changes
150 adding changesets
150 adding changesets
151 adding manifests
151 adding manifests
152 adding file changes
152 adding file changes
153 added 10 changesets with 10 changes to 1 files
153 added 10 changesets with 10 changes to 1 files
154
154
155 $ cd ..
155 $ cd ..
156
156
157 $ nlinksdir r2/.hg/store
157 $ nlinksdir r2/.hg/store
158 1 r2/.hg/store/00changelog.i
158 1 r2/.hg/store/00changelog.i
159 1 r2/.hg/store/00manifest.i
159 1 r2/.hg/store/00manifest.i
160 1 r2/.hg/store/data/d1/f2.i
160 1 r2/.hg/store/data/d1/f2.i
161 2 r2/.hg/store/data/f1.i
161 2 r2/.hg/store/data/f1.i
162 2 r2/.hg/store/fncache
162 1 r2/.hg/store/fncache
163
163
164 $ hg -R r2 verify
164 $ hg -R r2 verify
165 checking changesets
165 checking changesets
166 checking manifests
166 checking manifests
167 crosschecking files in changesets and manifests
167 crosschecking files in changesets and manifests
168 checking files
168 checking files
169 2 files, 2 changesets, 2 total revisions
169 2 files, 2 changesets, 2 total revisions
170
170
171
171
172 $ cd r1
172 $ cd r1
173 $ hg up
173 $ hg up
174 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
174 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
175
175
176 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
176 Committing a change to f1 in r1 must break up hardlink f1.i in r2:
177
177
178 $ echo c1c1 >> f1
178 $ echo c1c1 >> f1
179 $ hg ci -m00
179 $ hg ci -m00
180 $ cd ..
180 $ cd ..
181
181
182 $ nlinksdir r2/.hg/store
182 $ nlinksdir r2/.hg/store
183 1 r2/.hg/store/00changelog.i
183 1 r2/.hg/store/00changelog.i
184 1 r2/.hg/store/00manifest.i
184 1 r2/.hg/store/00manifest.i
185 1 r2/.hg/store/data/d1/f2.i
185 1 r2/.hg/store/data/d1/f2.i
186 1 r2/.hg/store/data/f1.i
186 1 r2/.hg/store/data/f1.i
187 2 r2/.hg/store/fncache
187 1 r2/.hg/store/fncache
188
188
189
189
190 $ cd r3
190 $ cd r3
191 $ hg tip --template '{rev}:{node|short}\n'
191 $ hg tip --template '{rev}:{node|short}\n'
192 11:a6451b6bc41f
192 11:a6451b6bc41f
193 $ echo bla > f1
193 $ echo bla > f1
194 $ hg ci -m1
194 $ hg ci -m1
195 $ cd ..
195 $ cd ..
196
196
197 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
197 Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'):
198
198
199 $ linkcp r3 r4
199 $ linkcp r3 r4
200
200
201 r4 has hardlinks in the working dir (not just inside .hg):
201 r4 has hardlinks in the working dir (not just inside .hg):
202
202
203 $ nlinksdir r4
203 $ nlinksdir r4
204 2 r4/.hg/00changelog.i
204 2 r4/.hg/00changelog.i
205 2 r4/.hg/branch
205 2 r4/.hg/branch
206 2 r4/.hg/cache/branch2-served
206 2 r4/.hg/cache/branch2-served
207 2 r4/.hg/cache/rbc-names-v1
207 2 r4/.hg/cache/rbc-names-v1
208 2 r4/.hg/cache/rbc-revs-v1
208 2 r4/.hg/cache/rbc-revs-v1
209 2 r4/.hg/dirstate
209 2 r4/.hg/dirstate
210 2 r4/.hg/hgrc
210 2 r4/.hg/hgrc
211 2 r4/.hg/last-message.txt
211 2 r4/.hg/last-message.txt
212 2 r4/.hg/requires
212 2 r4/.hg/requires
213 2 r4/.hg/store/00changelog.i
213 2 r4/.hg/store/00changelog.i
214 2 r4/.hg/store/00manifest.i
214 2 r4/.hg/store/00manifest.i
215 2 r4/.hg/store/data/d1/f2.d
215 2 r4/.hg/store/data/d1/f2.d
216 2 r4/.hg/store/data/d1/f2.i
216 2 r4/.hg/store/data/d1/f2.i
217 2 r4/.hg/store/data/f1.i
217 2 r4/.hg/store/data/f1.i
218 2 r4/.hg/store/fncache
218 2 r4/.hg/store/fncache
219 2 r4/.hg/store/phaseroots
219 2 r4/.hg/store/phaseroots
220 2 r4/.hg/store/undo
220 2 r4/.hg/store/undo
221 2 r4/.hg/store/undo.backup.fncache
221 2 r4/.hg/store/undo.backup.fncache
222 2 r4/.hg/store/undo.backup.phaseroots
222 2 r4/.hg/store/undo.backup.phaseroots
223 2 r4/.hg/store/undo.backupfiles
223 2 r4/.hg/store/undo.backupfiles
224 2 r4/.hg/store/undo.phaseroots
224 2 r4/.hg/store/undo.phaseroots
225 2 r4/.hg/undo.bookmarks
225 2 r4/.hg/undo.bookmarks
226 2 r4/.hg/undo.branch
226 2 r4/.hg/undo.branch
227 2 r4/.hg/undo.desc
227 2 r4/.hg/undo.desc
228 2 r4/.hg/undo.dirstate
228 2 r4/.hg/undo.dirstate
229 2 r4/d1/data1
229 2 r4/d1/data1
230 2 r4/d1/f2
230 2 r4/d1/f2
231 2 r4/f1
231 2 r4/f1
232
232
233 Update back to revision 11 in r4 should break hardlink of file f1:
233 Update back to revision 11 in r4 should break hardlink of file f1:
234
234
235 $ hg -R r4 up 11
235 $ hg -R r4 up 11
236 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
236 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
237
237
238 $ nlinksdir r4
238 $ nlinksdir r4
239 2 r4/.hg/00changelog.i
239 2 r4/.hg/00changelog.i
240 1 r4/.hg/branch
240 1 r4/.hg/branch
241 2 r4/.hg/cache/branch2-served
241 2 r4/.hg/cache/branch2-served
242 2 r4/.hg/cache/rbc-names-v1
242 2 r4/.hg/cache/rbc-names-v1
243 2 r4/.hg/cache/rbc-revs-v1
243 2 r4/.hg/cache/rbc-revs-v1
244 1 r4/.hg/dirstate
244 1 r4/.hg/dirstate
245 2 r4/.hg/hgrc
245 2 r4/.hg/hgrc
246 2 r4/.hg/last-message.txt
246 2 r4/.hg/last-message.txt
247 2 r4/.hg/requires
247 2 r4/.hg/requires
248 2 r4/.hg/store/00changelog.i
248 2 r4/.hg/store/00changelog.i
249 2 r4/.hg/store/00manifest.i
249 2 r4/.hg/store/00manifest.i
250 2 r4/.hg/store/data/d1/f2.d
250 2 r4/.hg/store/data/d1/f2.d
251 2 r4/.hg/store/data/d1/f2.i
251 2 r4/.hg/store/data/d1/f2.i
252 2 r4/.hg/store/data/f1.i
252 2 r4/.hg/store/data/f1.i
253 2 r4/.hg/store/fncache
253 2 r4/.hg/store/fncache
254 2 r4/.hg/store/phaseroots
254 2 r4/.hg/store/phaseroots
255 2 r4/.hg/store/undo
255 2 r4/.hg/store/undo
256 2 r4/.hg/store/undo.backup.fncache
256 2 r4/.hg/store/undo.backup.fncache
257 2 r4/.hg/store/undo.backup.phaseroots
257 2 r4/.hg/store/undo.backup.phaseroots
258 2 r4/.hg/store/undo.backupfiles
258 2 r4/.hg/store/undo.backupfiles
259 2 r4/.hg/store/undo.phaseroots
259 2 r4/.hg/store/undo.phaseroots
260 2 r4/.hg/undo.bookmarks
260 2 r4/.hg/undo.bookmarks
261 2 r4/.hg/undo.branch
261 2 r4/.hg/undo.branch
262 2 r4/.hg/undo.desc
262 2 r4/.hg/undo.desc
263 2 r4/.hg/undo.dirstate
263 2 r4/.hg/undo.dirstate
264 2 r4/d1/data1
264 2 r4/d1/data1
265 2 r4/d1/f2
265 2 r4/d1/f2
266 1 r4/f1
266 1 r4/f1
267
267
268
268
269 Test hardlinking outside hg:
269 Test hardlinking outside hg:
270
270
271 $ mkdir x
271 $ mkdir x
272 $ echo foo > x/a
272 $ echo foo > x/a
273
273
274 $ linkcp x y
274 $ linkcp x y
275 $ echo bar >> y/a
275 $ echo bar >> y/a
276
276
277 No diff if hardlink:
277 No diff if hardlink:
278
278
279 $ diff x/a y/a
279 $ diff x/a y/a
280
280
281 Test mq hardlinking:
281 Test mq hardlinking:
282
282
283 $ echo "[extensions]" >> $HGRCPATH
283 $ echo "[extensions]" >> $HGRCPATH
284 $ echo "mq=" >> $HGRCPATH
284 $ echo "mq=" >> $HGRCPATH
285
285
286 $ hg init a
286 $ hg init a
287 $ cd a
287 $ cd a
288
288
289 $ hg qimport -n foo - << EOF
289 $ hg qimport -n foo - << EOF
290 > # HG changeset patch
290 > # HG changeset patch
291 > # Date 1 0
291 > # Date 1 0
292 > diff -r 2588a8b53d66 a
292 > diff -r 2588a8b53d66 a
293 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
293 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
294 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
294 > +++ b/a Wed Jul 23 15:54:29 2008 +0200
295 > @@ -0,0 +1,1 @@
295 > @@ -0,0 +1,1 @@
296 > +a
296 > +a
297 > EOF
297 > EOF
298 adding foo to series file
298 adding foo to series file
299
299
300 $ hg qpush
300 $ hg qpush
301 applying foo
301 applying foo
302 now at: foo
302 now at: foo
303
303
304 $ cd ..
304 $ cd ..
305 $ linkcp a b
305 $ linkcp a b
306 $ cd b
306 $ cd b
307
307
308 $ hg qimport -n bar - << EOF
308 $ hg qimport -n bar - << EOF
309 > # HG changeset patch
309 > # HG changeset patch
310 > # Date 2 0
310 > # Date 2 0
311 > diff -r 2588a8b53d66 a
311 > diff -r 2588a8b53d66 a
312 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
312 > --- /dev/null Thu Jan 01 00:00:00 1970 +0000
313 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
313 > +++ b/b Wed Jul 23 15:54:29 2008 +0200
314 > @@ -0,0 +1,1 @@
314 > @@ -0,0 +1,1 @@
315 > +b
315 > +b
316 > EOF
316 > EOF
317 adding bar to series file
317 adding bar to series file
318
318
319 $ hg qpush
319 $ hg qpush
320 applying bar
320 applying bar
321 now at: bar
321 now at: bar
322
322
323 $ cat .hg/patches/status
323 $ cat .hg/patches/status
324 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
324 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
325 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
325 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar
326
326
327 $ cat .hg/patches/series
327 $ cat .hg/patches/series
328 foo
328 foo
329 bar
329 bar
330
330
331 $ cat ../a/.hg/patches/status
331 $ cat ../a/.hg/patches/status
332 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
332 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo
333
333
334 $ cat ../a/.hg/patches/series
334 $ cat ../a/.hg/patches/series
335 foo
335 foo
336
336
337 Test tags hardlinking:
337 Test tags hardlinking:
338
338
339 $ hg qdel -r qbase:qtip
339 $ hg qdel -r qbase:qtip
340 patch foo finalized without changeset message
340 patch foo finalized without changeset message
341 patch bar finalized without changeset message
341 patch bar finalized without changeset message
342
342
343 $ hg tag -l lfoo
343 $ hg tag -l lfoo
344 $ hg tag foo
344 $ hg tag foo
345
345
346 $ cd ..
346 $ cd ..
347 $ linkcp b c
347 $ linkcp b c
348 $ cd c
348 $ cd c
349
349
350 $ hg tag -l -r 0 lbar
350 $ hg tag -l -r 0 lbar
351 $ hg tag -r 0 bar
351 $ hg tag -r 0 bar
352
352
353 $ cat .hgtags
353 $ cat .hgtags
354 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
354 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
355 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
355 430ed4828a74fa4047bc816a25500f7472ab4bfe bar
356
356
357 $ cat .hg/localtags
357 $ cat .hg/localtags
358 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
358 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
359 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
359 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar
360
360
361 $ cat ../b/.hgtags
361 $ cat ../b/.hgtags
362 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
362 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo
363
363
364 $ cat ../b/.hg/localtags
364 $ cat ../b/.hg/localtags
365 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
365 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo
366
366
367 $ cd ..
367 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now