##// END OF EJS Templates
util: use absolute_import
Gregory Szorc -
r27358:ac839ee4 default
parent child Browse files
Show More
@@ -1,2484 +1,2504
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 import i18n
16 from __future__ import absolute_import
17 _ = i18n._
17
18 import error, osutil, encoding, parsers
18 import bz2
19 import errno, shutil, sys, tempfile, traceback
19 import calendar
20 import collections
21 import datetime
22 import errno
23 import gc
24 import hashlib
25 import imp
26 import os
20 import re as remod
27 import re as remod
21 import os, time, datetime, calendar, textwrap, signal, collections
28 import shutil
22 import imp, socket, urllib
29 import signal
23 import gc
30 import socket
24 import bz2
31 import subprocess
32 import sys
33 import tempfile
34 import textwrap
35 import time
36 import traceback
37 import urllib
25 import zlib
38 import zlib
26 import hashlib
39
40 from . import (
41 encoding,
42 error,
43 i18n,
44 osutil,
45 parsers,
46 )
27
47
28 if os.name == 'nt':
48 if os.name == 'nt':
29 import windows as platform
49 from . import windows as platform
30 else:
50 else:
31 import posix as platform
51 from . import posix as platform
32
52
33 md5 = hashlib.md5
53 md5 = hashlib.md5
34 sha1 = hashlib.sha1
54 sha1 = hashlib.sha1
35 sha512 = hashlib.sha512
55 sha512 = hashlib.sha512
56 _ = i18n._
36
57
37 cachestat = platform.cachestat
58 cachestat = platform.cachestat
38 checkexec = platform.checkexec
59 checkexec = platform.checkexec
39 checklink = platform.checklink
60 checklink = platform.checklink
40 copymode = platform.copymode
61 copymode = platform.copymode
41 executablepath = platform.executablepath
62 executablepath = platform.executablepath
42 expandglobs = platform.expandglobs
63 expandglobs = platform.expandglobs
43 explainexit = platform.explainexit
64 explainexit = platform.explainexit
44 findexe = platform.findexe
65 findexe = platform.findexe
45 gethgcmd = platform.gethgcmd
66 gethgcmd = platform.gethgcmd
46 getuser = platform.getuser
67 getuser = platform.getuser
47 groupmembers = platform.groupmembers
68 groupmembers = platform.groupmembers
48 groupname = platform.groupname
69 groupname = platform.groupname
49 hidewindow = platform.hidewindow
70 hidewindow = platform.hidewindow
50 isexec = platform.isexec
71 isexec = platform.isexec
51 isowner = platform.isowner
72 isowner = platform.isowner
52 localpath = platform.localpath
73 localpath = platform.localpath
53 lookupreg = platform.lookupreg
74 lookupreg = platform.lookupreg
54 makedir = platform.makedir
75 makedir = platform.makedir
55 nlinks = platform.nlinks
76 nlinks = platform.nlinks
56 normpath = platform.normpath
77 normpath = platform.normpath
57 normcase = platform.normcase
78 normcase = platform.normcase
58 normcasespec = platform.normcasespec
79 normcasespec = platform.normcasespec
59 normcasefallback = platform.normcasefallback
80 normcasefallback = platform.normcasefallback
60 openhardlinks = platform.openhardlinks
81 openhardlinks = platform.openhardlinks
61 oslink = platform.oslink
82 oslink = platform.oslink
62 parsepatchoutput = platform.parsepatchoutput
83 parsepatchoutput = platform.parsepatchoutput
63 pconvert = platform.pconvert
84 pconvert = platform.pconvert
64 poll = platform.poll
85 poll = platform.poll
65 popen = platform.popen
86 popen = platform.popen
66 posixfile = platform.posixfile
87 posixfile = platform.posixfile
67 quotecommand = platform.quotecommand
88 quotecommand = platform.quotecommand
68 readpipe = platform.readpipe
89 readpipe = platform.readpipe
69 rename = platform.rename
90 rename = platform.rename
70 removedirs = platform.removedirs
91 removedirs = platform.removedirs
71 samedevice = platform.samedevice
92 samedevice = platform.samedevice
72 samefile = platform.samefile
93 samefile = platform.samefile
73 samestat = platform.samestat
94 samestat = platform.samestat
74 setbinary = platform.setbinary
95 setbinary = platform.setbinary
75 setflags = platform.setflags
96 setflags = platform.setflags
76 setsignalhandler = platform.setsignalhandler
97 setsignalhandler = platform.setsignalhandler
77 shellquote = platform.shellquote
98 shellquote = platform.shellquote
78 spawndetached = platform.spawndetached
99 spawndetached = platform.spawndetached
79 split = platform.split
100 split = platform.split
80 sshargs = platform.sshargs
101 sshargs = platform.sshargs
81 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
102 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
82 statisexec = platform.statisexec
103 statisexec = platform.statisexec
83 statislink = platform.statislink
104 statislink = platform.statislink
84 termwidth = platform.termwidth
105 termwidth = platform.termwidth
85 testpid = platform.testpid
106 testpid = platform.testpid
86 umask = platform.umask
107 umask = platform.umask
87 unlink = platform.unlink
108 unlink = platform.unlink
88 unlinkpath = platform.unlinkpath
109 unlinkpath = platform.unlinkpath
89 username = platform.username
110 username = platform.username
90
111
91 # Python compatibility
112 # Python compatibility
92
113
93 _notset = object()
114 _notset = object()
94
115
95 # disable Python's problematic floating point timestamps (issue4836)
116 # disable Python's problematic floating point timestamps (issue4836)
96 # (Python hypocritically says you shouldn't change this behavior in
117 # (Python hypocritically says you shouldn't change this behavior in
97 # libraries, and sure enough Mercurial is not a library.)
118 # libraries, and sure enough Mercurial is not a library.)
98 os.stat_float_times(False)
119 os.stat_float_times(False)
99
120
100 def safehasattr(thing, attr):
121 def safehasattr(thing, attr):
101 return getattr(thing, attr, _notset) is not _notset
122 return getattr(thing, attr, _notset) is not _notset
102
123
103 DIGESTS = {
124 DIGESTS = {
104 'md5': md5,
125 'md5': md5,
105 'sha1': sha1,
126 'sha1': sha1,
106 'sha512': sha512,
127 'sha512': sha512,
107 }
128 }
108 # List of digest types from strongest to weakest
129 # List of digest types from strongest to weakest
109 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
130 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
110
131
111 for k in DIGESTS_BY_STRENGTH:
132 for k in DIGESTS_BY_STRENGTH:
112 assert k in DIGESTS
133 assert k in DIGESTS
113
134
114 class digester(object):
135 class digester(object):
115 """helper to compute digests.
136 """helper to compute digests.
116
137
117 This helper can be used to compute one or more digests given their name.
138 This helper can be used to compute one or more digests given their name.
118
139
119 >>> d = digester(['md5', 'sha1'])
140 >>> d = digester(['md5', 'sha1'])
120 >>> d.update('foo')
141 >>> d.update('foo')
121 >>> [k for k in sorted(d)]
142 >>> [k for k in sorted(d)]
122 ['md5', 'sha1']
143 ['md5', 'sha1']
123 >>> d['md5']
144 >>> d['md5']
124 'acbd18db4cc2f85cedef654fccc4a4d8'
145 'acbd18db4cc2f85cedef654fccc4a4d8'
125 >>> d['sha1']
146 >>> d['sha1']
126 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
147 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
127 >>> digester.preferred(['md5', 'sha1'])
148 >>> digester.preferred(['md5', 'sha1'])
128 'sha1'
149 'sha1'
129 """
150 """
130
151
131 def __init__(self, digests, s=''):
152 def __init__(self, digests, s=''):
132 self._hashes = {}
153 self._hashes = {}
133 for k in digests:
154 for k in digests:
134 if k not in DIGESTS:
155 if k not in DIGESTS:
135 raise Abort(_('unknown digest type: %s') % k)
156 raise Abort(_('unknown digest type: %s') % k)
136 self._hashes[k] = DIGESTS[k]()
157 self._hashes[k] = DIGESTS[k]()
137 if s:
158 if s:
138 self.update(s)
159 self.update(s)
139
160
140 def update(self, data):
161 def update(self, data):
141 for h in self._hashes.values():
162 for h in self._hashes.values():
142 h.update(data)
163 h.update(data)
143
164
144 def __getitem__(self, key):
165 def __getitem__(self, key):
145 if key not in DIGESTS:
166 if key not in DIGESTS:
146 raise Abort(_('unknown digest type: %s') % k)
167 raise Abort(_('unknown digest type: %s') % k)
147 return self._hashes[key].hexdigest()
168 return self._hashes[key].hexdigest()
148
169
149 def __iter__(self):
170 def __iter__(self):
150 return iter(self._hashes)
171 return iter(self._hashes)
151
172
152 @staticmethod
173 @staticmethod
153 def preferred(supported):
174 def preferred(supported):
154 """returns the strongest digest type in both supported and DIGESTS."""
175 """returns the strongest digest type in both supported and DIGESTS."""
155
176
156 for k in DIGESTS_BY_STRENGTH:
177 for k in DIGESTS_BY_STRENGTH:
157 if k in supported:
178 if k in supported:
158 return k
179 return k
159 return None
180 return None
160
181
161 class digestchecker(object):
182 class digestchecker(object):
162 """file handle wrapper that additionally checks content against a given
183 """file handle wrapper that additionally checks content against a given
163 size and digests.
184 size and digests.
164
185
165 d = digestchecker(fh, size, {'md5': '...'})
186 d = digestchecker(fh, size, {'md5': '...'})
166
187
167 When multiple digests are given, all of them are validated.
188 When multiple digests are given, all of them are validated.
168 """
189 """
169
190
170 def __init__(self, fh, size, digests):
191 def __init__(self, fh, size, digests):
171 self._fh = fh
192 self._fh = fh
172 self._size = size
193 self._size = size
173 self._got = 0
194 self._got = 0
174 self._digests = dict(digests)
195 self._digests = dict(digests)
175 self._digester = digester(self._digests.keys())
196 self._digester = digester(self._digests.keys())
176
197
177 def read(self, length=-1):
198 def read(self, length=-1):
178 content = self._fh.read(length)
199 content = self._fh.read(length)
179 self._digester.update(content)
200 self._digester.update(content)
180 self._got += len(content)
201 self._got += len(content)
181 return content
202 return content
182
203
183 def validate(self):
204 def validate(self):
184 if self._size != self._got:
205 if self._size != self._got:
185 raise Abort(_('size mismatch: expected %d, got %d') %
206 raise Abort(_('size mismatch: expected %d, got %d') %
186 (self._size, self._got))
207 (self._size, self._got))
187 for k, v in self._digests.items():
208 for k, v in self._digests.items():
188 if v != self._digester[k]:
209 if v != self._digester[k]:
189 # i18n: first parameter is a digest name
210 # i18n: first parameter is a digest name
190 raise Abort(_('%s mismatch: expected %s, got %s') %
211 raise Abort(_('%s mismatch: expected %s, got %s') %
191 (k, v, self._digester[k]))
212 (k, v, self._digester[k]))
192
213
193 try:
214 try:
194 buffer = buffer
215 buffer = buffer
195 except NameError:
216 except NameError:
196 if sys.version_info[0] < 3:
217 if sys.version_info[0] < 3:
197 def buffer(sliceable, offset=0):
218 def buffer(sliceable, offset=0):
198 return sliceable[offset:]
219 return sliceable[offset:]
199 else:
220 else:
200 def buffer(sliceable, offset=0):
221 def buffer(sliceable, offset=0):
201 return memoryview(sliceable)[offset:]
222 return memoryview(sliceable)[offset:]
202
223
203 import subprocess
204 closefds = os.name == 'posix'
224 closefds = os.name == 'posix'
205
225
206 _chunksize = 4096
226 _chunksize = 4096
207
227
208 class bufferedinputpipe(object):
228 class bufferedinputpipe(object):
209 """a manually buffered input pipe
229 """a manually buffered input pipe
210
230
211 Python will not let us use buffered IO and lazy reading with 'polling' at
231 Python will not let us use buffered IO and lazy reading with 'polling' at
212 the same time. We cannot probe the buffer state and select will not detect
232 the same time. We cannot probe the buffer state and select will not detect
213 that data are ready to read if they are already buffered.
233 that data are ready to read if they are already buffered.
214
234
215 This class let us work around that by implementing its own buffering
235 This class let us work around that by implementing its own buffering
216 (allowing efficient readline) while offering a way to know if the buffer is
236 (allowing efficient readline) while offering a way to know if the buffer is
217 empty from the output (allowing collaboration of the buffer with polling).
237 empty from the output (allowing collaboration of the buffer with polling).
218
238
219 This class lives in the 'util' module because it makes use of the 'os'
239 This class lives in the 'util' module because it makes use of the 'os'
220 module from the python stdlib.
240 module from the python stdlib.
221 """
241 """
222
242
223 def __init__(self, input):
243 def __init__(self, input):
224 self._input = input
244 self._input = input
225 self._buffer = []
245 self._buffer = []
226 self._eof = False
246 self._eof = False
227 self._lenbuf = 0
247 self._lenbuf = 0
228
248
229 @property
249 @property
230 def hasbuffer(self):
250 def hasbuffer(self):
231 """True is any data is currently buffered
251 """True is any data is currently buffered
232
252
233 This will be used externally a pre-step for polling IO. If there is
253 This will be used externally a pre-step for polling IO. If there is
234 already data then no polling should be set in place."""
254 already data then no polling should be set in place."""
235 return bool(self._buffer)
255 return bool(self._buffer)
236
256
237 @property
257 @property
238 def closed(self):
258 def closed(self):
239 return self._input.closed
259 return self._input.closed
240
260
241 def fileno(self):
261 def fileno(self):
242 return self._input.fileno()
262 return self._input.fileno()
243
263
244 def close(self):
264 def close(self):
245 return self._input.close()
265 return self._input.close()
246
266
247 def read(self, size):
267 def read(self, size):
248 while (not self._eof) and (self._lenbuf < size):
268 while (not self._eof) and (self._lenbuf < size):
249 self._fillbuffer()
269 self._fillbuffer()
250 return self._frombuffer(size)
270 return self._frombuffer(size)
251
271
252 def readline(self, *args, **kwargs):
272 def readline(self, *args, **kwargs):
253 if 1 < len(self._buffer):
273 if 1 < len(self._buffer):
254 # this should not happen because both read and readline end with a
274 # this should not happen because both read and readline end with a
255 # _frombuffer call that collapse it.
275 # _frombuffer call that collapse it.
256 self._buffer = [''.join(self._buffer)]
276 self._buffer = [''.join(self._buffer)]
257 self._lenbuf = len(self._buffer[0])
277 self._lenbuf = len(self._buffer[0])
258 lfi = -1
278 lfi = -1
259 if self._buffer:
279 if self._buffer:
260 lfi = self._buffer[-1].find('\n')
280 lfi = self._buffer[-1].find('\n')
261 while (not self._eof) and lfi < 0:
281 while (not self._eof) and lfi < 0:
262 self._fillbuffer()
282 self._fillbuffer()
263 if self._buffer:
283 if self._buffer:
264 lfi = self._buffer[-1].find('\n')
284 lfi = self._buffer[-1].find('\n')
265 size = lfi + 1
285 size = lfi + 1
266 if lfi < 0: # end of file
286 if lfi < 0: # end of file
267 size = self._lenbuf
287 size = self._lenbuf
268 elif 1 < len(self._buffer):
288 elif 1 < len(self._buffer):
269 # we need to take previous chunks into account
289 # we need to take previous chunks into account
270 size += self._lenbuf - len(self._buffer[-1])
290 size += self._lenbuf - len(self._buffer[-1])
271 return self._frombuffer(size)
291 return self._frombuffer(size)
272
292
273 def _frombuffer(self, size):
293 def _frombuffer(self, size):
274 """return at most 'size' data from the buffer
294 """return at most 'size' data from the buffer
275
295
276 The data are removed from the buffer."""
296 The data are removed from the buffer."""
277 if size == 0 or not self._buffer:
297 if size == 0 or not self._buffer:
278 return ''
298 return ''
279 buf = self._buffer[0]
299 buf = self._buffer[0]
280 if 1 < len(self._buffer):
300 if 1 < len(self._buffer):
281 buf = ''.join(self._buffer)
301 buf = ''.join(self._buffer)
282
302
283 data = buf[:size]
303 data = buf[:size]
284 buf = buf[len(data):]
304 buf = buf[len(data):]
285 if buf:
305 if buf:
286 self._buffer = [buf]
306 self._buffer = [buf]
287 self._lenbuf = len(buf)
307 self._lenbuf = len(buf)
288 else:
308 else:
289 self._buffer = []
309 self._buffer = []
290 self._lenbuf = 0
310 self._lenbuf = 0
291 return data
311 return data
292
312
293 def _fillbuffer(self):
313 def _fillbuffer(self):
294 """read data to the buffer"""
314 """read data to the buffer"""
295 data = os.read(self._input.fileno(), _chunksize)
315 data = os.read(self._input.fileno(), _chunksize)
296 if not data:
316 if not data:
297 self._eof = True
317 self._eof = True
298 else:
318 else:
299 self._lenbuf += len(data)
319 self._lenbuf += len(data)
300 self._buffer.append(data)
320 self._buffer.append(data)
301
321
302 def popen2(cmd, env=None, newlines=False):
322 def popen2(cmd, env=None, newlines=False):
303 # Setting bufsize to -1 lets the system decide the buffer size.
323 # Setting bufsize to -1 lets the system decide the buffer size.
304 # The default for bufsize is 0, meaning unbuffered. This leads to
324 # The default for bufsize is 0, meaning unbuffered. This leads to
305 # poor performance on Mac OS X: http://bugs.python.org/issue4194
325 # poor performance on Mac OS X: http://bugs.python.org/issue4194
306 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
326 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
307 close_fds=closefds,
327 close_fds=closefds,
308 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
328 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
309 universal_newlines=newlines,
329 universal_newlines=newlines,
310 env=env)
330 env=env)
311 return p.stdin, p.stdout
331 return p.stdin, p.stdout
312
332
313 def popen3(cmd, env=None, newlines=False):
333 def popen3(cmd, env=None, newlines=False):
314 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
334 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
315 return stdin, stdout, stderr
335 return stdin, stdout, stderr
316
336
317 def popen4(cmd, env=None, newlines=False, bufsize=-1):
337 def popen4(cmd, env=None, newlines=False, bufsize=-1):
318 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
338 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
319 close_fds=closefds,
339 close_fds=closefds,
320 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
340 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
321 stderr=subprocess.PIPE,
341 stderr=subprocess.PIPE,
322 universal_newlines=newlines,
342 universal_newlines=newlines,
323 env=env)
343 env=env)
324 return p.stdin, p.stdout, p.stderr, p
344 return p.stdin, p.stdout, p.stderr, p
325
345
326 def version():
346 def version():
327 """Return version information if available."""
347 """Return version information if available."""
328 try:
348 try:
329 import __version__
349 from . import __version__
330 return __version__.version
350 return __version__.version
331 except ImportError:
351 except ImportError:
332 return 'unknown'
352 return 'unknown'
333
353
334 def versiontuple(v=None, n=4):
354 def versiontuple(v=None, n=4):
335 """Parses a Mercurial version string into an N-tuple.
355 """Parses a Mercurial version string into an N-tuple.
336
356
337 The version string to be parsed is specified with the ``v`` argument.
357 The version string to be parsed is specified with the ``v`` argument.
338 If it isn't defined, the current Mercurial version string will be parsed.
358 If it isn't defined, the current Mercurial version string will be parsed.
339
359
340 ``n`` can be 2, 3, or 4. Here is how some version strings map to
360 ``n`` can be 2, 3, or 4. Here is how some version strings map to
341 returned values:
361 returned values:
342
362
343 >>> v = '3.6.1+190-df9b73d2d444'
363 >>> v = '3.6.1+190-df9b73d2d444'
344 >>> versiontuple(v, 2)
364 >>> versiontuple(v, 2)
345 (3, 6)
365 (3, 6)
346 >>> versiontuple(v, 3)
366 >>> versiontuple(v, 3)
347 (3, 6, 1)
367 (3, 6, 1)
348 >>> versiontuple(v, 4)
368 >>> versiontuple(v, 4)
349 (3, 6, 1, '190-df9b73d2d444')
369 (3, 6, 1, '190-df9b73d2d444')
350
370
351 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
371 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
352 (3, 6, 1, '190-df9b73d2d444+20151118')
372 (3, 6, 1, '190-df9b73d2d444+20151118')
353
373
354 >>> v = '3.6'
374 >>> v = '3.6'
355 >>> versiontuple(v, 2)
375 >>> versiontuple(v, 2)
356 (3, 6)
376 (3, 6)
357 >>> versiontuple(v, 3)
377 >>> versiontuple(v, 3)
358 (3, 6, None)
378 (3, 6, None)
359 >>> versiontuple(v, 4)
379 >>> versiontuple(v, 4)
360 (3, 6, None, None)
380 (3, 6, None, None)
361 """
381 """
362 if not v:
382 if not v:
363 v = version()
383 v = version()
364 parts = v.split('+', 1)
384 parts = v.split('+', 1)
365 if len(parts) == 1:
385 if len(parts) == 1:
366 vparts, extra = parts[0], None
386 vparts, extra = parts[0], None
367 else:
387 else:
368 vparts, extra = parts
388 vparts, extra = parts
369
389
370 vints = []
390 vints = []
371 for i in vparts.split('.'):
391 for i in vparts.split('.'):
372 try:
392 try:
373 vints.append(int(i))
393 vints.append(int(i))
374 except ValueError:
394 except ValueError:
375 break
395 break
376 # (3, 6) -> (3, 6, None)
396 # (3, 6) -> (3, 6, None)
377 while len(vints) < 3:
397 while len(vints) < 3:
378 vints.append(None)
398 vints.append(None)
379
399
380 if n == 2:
400 if n == 2:
381 return (vints[0], vints[1])
401 return (vints[0], vints[1])
382 if n == 3:
402 if n == 3:
383 return (vints[0], vints[1], vints[2])
403 return (vints[0], vints[1], vints[2])
384 if n == 4:
404 if n == 4:
385 return (vints[0], vints[1], vints[2], extra)
405 return (vints[0], vints[1], vints[2], extra)
386
406
387 # used by parsedate
407 # used by parsedate
388 defaultdateformats = (
408 defaultdateformats = (
389 '%Y-%m-%d %H:%M:%S',
409 '%Y-%m-%d %H:%M:%S',
390 '%Y-%m-%d %I:%M:%S%p',
410 '%Y-%m-%d %I:%M:%S%p',
391 '%Y-%m-%d %H:%M',
411 '%Y-%m-%d %H:%M',
392 '%Y-%m-%d %I:%M%p',
412 '%Y-%m-%d %I:%M%p',
393 '%Y-%m-%d',
413 '%Y-%m-%d',
394 '%m-%d',
414 '%m-%d',
395 '%m/%d',
415 '%m/%d',
396 '%m/%d/%y',
416 '%m/%d/%y',
397 '%m/%d/%Y',
417 '%m/%d/%Y',
398 '%a %b %d %H:%M:%S %Y',
418 '%a %b %d %H:%M:%S %Y',
399 '%a %b %d %I:%M:%S%p %Y',
419 '%a %b %d %I:%M:%S%p %Y',
400 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
420 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
401 '%b %d %H:%M:%S %Y',
421 '%b %d %H:%M:%S %Y',
402 '%b %d %I:%M:%S%p %Y',
422 '%b %d %I:%M:%S%p %Y',
403 '%b %d %H:%M:%S',
423 '%b %d %H:%M:%S',
404 '%b %d %I:%M:%S%p',
424 '%b %d %I:%M:%S%p',
405 '%b %d %H:%M',
425 '%b %d %H:%M',
406 '%b %d %I:%M%p',
426 '%b %d %I:%M%p',
407 '%b %d %Y',
427 '%b %d %Y',
408 '%b %d',
428 '%b %d',
409 '%H:%M:%S',
429 '%H:%M:%S',
410 '%I:%M:%S%p',
430 '%I:%M:%S%p',
411 '%H:%M',
431 '%H:%M',
412 '%I:%M%p',
432 '%I:%M%p',
413 )
433 )
414
434
415 extendeddateformats = defaultdateformats + (
435 extendeddateformats = defaultdateformats + (
416 "%Y",
436 "%Y",
417 "%Y-%m",
437 "%Y-%m",
418 "%b",
438 "%b",
419 "%b %Y",
439 "%b %Y",
420 )
440 )
421
441
422 def cachefunc(func):
442 def cachefunc(func):
423 '''cache the result of function calls'''
443 '''cache the result of function calls'''
424 # XXX doesn't handle keywords args
444 # XXX doesn't handle keywords args
425 if func.func_code.co_argcount == 0:
445 if func.func_code.co_argcount == 0:
426 cache = []
446 cache = []
427 def f():
447 def f():
428 if len(cache) == 0:
448 if len(cache) == 0:
429 cache.append(func())
449 cache.append(func())
430 return cache[0]
450 return cache[0]
431 return f
451 return f
432 cache = {}
452 cache = {}
433 if func.func_code.co_argcount == 1:
453 if func.func_code.co_argcount == 1:
434 # we gain a small amount of time because
454 # we gain a small amount of time because
435 # we don't need to pack/unpack the list
455 # we don't need to pack/unpack the list
436 def f(arg):
456 def f(arg):
437 if arg not in cache:
457 if arg not in cache:
438 cache[arg] = func(arg)
458 cache[arg] = func(arg)
439 return cache[arg]
459 return cache[arg]
440 else:
460 else:
441 def f(*args):
461 def f(*args):
442 if args not in cache:
462 if args not in cache:
443 cache[args] = func(*args)
463 cache[args] = func(*args)
444 return cache[args]
464 return cache[args]
445
465
446 return f
466 return f
447
467
448 class sortdict(dict):
468 class sortdict(dict):
449 '''a simple sorted dictionary'''
469 '''a simple sorted dictionary'''
450 def __init__(self, data=None):
470 def __init__(self, data=None):
451 self._list = []
471 self._list = []
452 if data:
472 if data:
453 self.update(data)
473 self.update(data)
454 def copy(self):
474 def copy(self):
455 return sortdict(self)
475 return sortdict(self)
456 def __setitem__(self, key, val):
476 def __setitem__(self, key, val):
457 if key in self:
477 if key in self:
458 self._list.remove(key)
478 self._list.remove(key)
459 self._list.append(key)
479 self._list.append(key)
460 dict.__setitem__(self, key, val)
480 dict.__setitem__(self, key, val)
461 def __iter__(self):
481 def __iter__(self):
462 return self._list.__iter__()
482 return self._list.__iter__()
463 def update(self, src):
483 def update(self, src):
464 if isinstance(src, dict):
484 if isinstance(src, dict):
465 src = src.iteritems()
485 src = src.iteritems()
466 for k, v in src:
486 for k, v in src:
467 self[k] = v
487 self[k] = v
468 def clear(self):
488 def clear(self):
469 dict.clear(self)
489 dict.clear(self)
470 self._list = []
490 self._list = []
471 def items(self):
491 def items(self):
472 return [(k, self[k]) for k in self._list]
492 return [(k, self[k]) for k in self._list]
473 def __delitem__(self, key):
493 def __delitem__(self, key):
474 dict.__delitem__(self, key)
494 dict.__delitem__(self, key)
475 self._list.remove(key)
495 self._list.remove(key)
476 def pop(self, key, *args, **kwargs):
496 def pop(self, key, *args, **kwargs):
477 dict.pop(self, key, *args, **kwargs)
497 dict.pop(self, key, *args, **kwargs)
478 try:
498 try:
479 self._list.remove(key)
499 self._list.remove(key)
480 except ValueError:
500 except ValueError:
481 pass
501 pass
482 def keys(self):
502 def keys(self):
483 return self._list
503 return self._list
484 def iterkeys(self):
504 def iterkeys(self):
485 return self._list.__iter__()
505 return self._list.__iter__()
486 def iteritems(self):
506 def iteritems(self):
487 for k in self._list:
507 for k in self._list:
488 yield k, self[k]
508 yield k, self[k]
489 def insert(self, index, key, val):
509 def insert(self, index, key, val):
490 self._list.insert(index, key)
510 self._list.insert(index, key)
491 dict.__setitem__(self, key, val)
511 dict.__setitem__(self, key, val)
492
512
493 class lrucachedict(object):
513 class lrucachedict(object):
494 '''cache most recent gets from or sets to this dictionary'''
514 '''cache most recent gets from or sets to this dictionary'''
495 def __init__(self, maxsize):
515 def __init__(self, maxsize):
496 self._cache = {}
516 self._cache = {}
497 self._maxsize = maxsize
517 self._maxsize = maxsize
498 self._order = collections.deque()
518 self._order = collections.deque()
499
519
500 def __getitem__(self, key):
520 def __getitem__(self, key):
501 value = self._cache[key]
521 value = self._cache[key]
502 self._order.remove(key)
522 self._order.remove(key)
503 self._order.append(key)
523 self._order.append(key)
504 return value
524 return value
505
525
506 def __setitem__(self, key, value):
526 def __setitem__(self, key, value):
507 if key not in self._cache:
527 if key not in self._cache:
508 if len(self._cache) >= self._maxsize:
528 if len(self._cache) >= self._maxsize:
509 del self._cache[self._order.popleft()]
529 del self._cache[self._order.popleft()]
510 else:
530 else:
511 self._order.remove(key)
531 self._order.remove(key)
512 self._cache[key] = value
532 self._cache[key] = value
513 self._order.append(key)
533 self._order.append(key)
514
534
515 def __contains__(self, key):
535 def __contains__(self, key):
516 return key in self._cache
536 return key in self._cache
517
537
518 def clear(self):
538 def clear(self):
519 self._cache.clear()
539 self._cache.clear()
520 self._order = collections.deque()
540 self._order = collections.deque()
521
541
522 def lrucachefunc(func):
542 def lrucachefunc(func):
523 '''cache most recent results of function calls'''
543 '''cache most recent results of function calls'''
524 cache = {}
544 cache = {}
525 order = collections.deque()
545 order = collections.deque()
526 if func.func_code.co_argcount == 1:
546 if func.func_code.co_argcount == 1:
527 def f(arg):
547 def f(arg):
528 if arg not in cache:
548 if arg not in cache:
529 if len(cache) > 20:
549 if len(cache) > 20:
530 del cache[order.popleft()]
550 del cache[order.popleft()]
531 cache[arg] = func(arg)
551 cache[arg] = func(arg)
532 else:
552 else:
533 order.remove(arg)
553 order.remove(arg)
534 order.append(arg)
554 order.append(arg)
535 return cache[arg]
555 return cache[arg]
536 else:
556 else:
537 def f(*args):
557 def f(*args):
538 if args not in cache:
558 if args not in cache:
539 if len(cache) > 20:
559 if len(cache) > 20:
540 del cache[order.popleft()]
560 del cache[order.popleft()]
541 cache[args] = func(*args)
561 cache[args] = func(*args)
542 else:
562 else:
543 order.remove(args)
563 order.remove(args)
544 order.append(args)
564 order.append(args)
545 return cache[args]
565 return cache[args]
546
566
547 return f
567 return f
548
568
549 class propertycache(object):
569 class propertycache(object):
550 def __init__(self, func):
570 def __init__(self, func):
551 self.func = func
571 self.func = func
552 self.name = func.__name__
572 self.name = func.__name__
553 def __get__(self, obj, type=None):
573 def __get__(self, obj, type=None):
554 result = self.func(obj)
574 result = self.func(obj)
555 self.cachevalue(obj, result)
575 self.cachevalue(obj, result)
556 return result
576 return result
557
577
558 def cachevalue(self, obj, value):
578 def cachevalue(self, obj, value):
559 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
579 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
560 obj.__dict__[self.name] = value
580 obj.__dict__[self.name] = value
561
581
562 def pipefilter(s, cmd):
582 def pipefilter(s, cmd):
563 '''filter string S through command CMD, returning its output'''
583 '''filter string S through command CMD, returning its output'''
564 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
584 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
565 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
585 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
566 pout, perr = p.communicate(s)
586 pout, perr = p.communicate(s)
567 return pout
587 return pout
568
588
569 def tempfilter(s, cmd):
589 def tempfilter(s, cmd):
570 '''filter string S through a pair of temporary files with CMD.
590 '''filter string S through a pair of temporary files with CMD.
571 CMD is used as a template to create the real command to be run,
591 CMD is used as a template to create the real command to be run,
572 with the strings INFILE and OUTFILE replaced by the real names of
592 with the strings INFILE and OUTFILE replaced by the real names of
573 the temporary files generated.'''
593 the temporary files generated.'''
574 inname, outname = None, None
594 inname, outname = None, None
575 try:
595 try:
576 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
596 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
577 fp = os.fdopen(infd, 'wb')
597 fp = os.fdopen(infd, 'wb')
578 fp.write(s)
598 fp.write(s)
579 fp.close()
599 fp.close()
580 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
600 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
581 os.close(outfd)
601 os.close(outfd)
582 cmd = cmd.replace('INFILE', inname)
602 cmd = cmd.replace('INFILE', inname)
583 cmd = cmd.replace('OUTFILE', outname)
603 cmd = cmd.replace('OUTFILE', outname)
584 code = os.system(cmd)
604 code = os.system(cmd)
585 if sys.platform == 'OpenVMS' and code & 1:
605 if sys.platform == 'OpenVMS' and code & 1:
586 code = 0
606 code = 0
587 if code:
607 if code:
588 raise Abort(_("command '%s' failed: %s") %
608 raise Abort(_("command '%s' failed: %s") %
589 (cmd, explainexit(code)))
609 (cmd, explainexit(code)))
590 fp = open(outname, 'rb')
610 fp = open(outname, 'rb')
591 r = fp.read()
611 r = fp.read()
592 fp.close()
612 fp.close()
593 return r
613 return r
594 finally:
614 finally:
595 try:
615 try:
596 if inname:
616 if inname:
597 os.unlink(inname)
617 os.unlink(inname)
598 except OSError:
618 except OSError:
599 pass
619 pass
600 try:
620 try:
601 if outname:
621 if outname:
602 os.unlink(outname)
622 os.unlink(outname)
603 except OSError:
623 except OSError:
604 pass
624 pass
605
625
606 filtertable = {
626 filtertable = {
607 'tempfile:': tempfilter,
627 'tempfile:': tempfilter,
608 'pipe:': pipefilter,
628 'pipe:': pipefilter,
609 }
629 }
610
630
611 def filter(s, cmd):
631 def filter(s, cmd):
612 "filter a string through a command that transforms its input to its output"
632 "filter a string through a command that transforms its input to its output"
613 for name, fn in filtertable.iteritems():
633 for name, fn in filtertable.iteritems():
614 if cmd.startswith(name):
634 if cmd.startswith(name):
615 return fn(s, cmd[len(name):].lstrip())
635 return fn(s, cmd[len(name):].lstrip())
616 return pipefilter(s, cmd)
636 return pipefilter(s, cmd)
617
637
618 def binary(s):
638 def binary(s):
619 """return true if a string is binary data"""
639 """return true if a string is binary data"""
620 return bool(s and '\0' in s)
640 return bool(s and '\0' in s)
621
641
622 def increasingchunks(source, min=1024, max=65536):
642 def increasingchunks(source, min=1024, max=65536):
623 '''return no less than min bytes per chunk while data remains,
643 '''return no less than min bytes per chunk while data remains,
624 doubling min after each chunk until it reaches max'''
644 doubling min after each chunk until it reaches max'''
625 def log2(x):
645 def log2(x):
626 if not x:
646 if not x:
627 return 0
647 return 0
628 i = 0
648 i = 0
629 while x:
649 while x:
630 x >>= 1
650 x >>= 1
631 i += 1
651 i += 1
632 return i - 1
652 return i - 1
633
653
634 buf = []
654 buf = []
635 blen = 0
655 blen = 0
636 for chunk in source:
656 for chunk in source:
637 buf.append(chunk)
657 buf.append(chunk)
638 blen += len(chunk)
658 blen += len(chunk)
639 if blen >= min:
659 if blen >= min:
640 if min < max:
660 if min < max:
641 min = min << 1
661 min = min << 1
642 nmin = 1 << log2(blen)
662 nmin = 1 << log2(blen)
643 if nmin > min:
663 if nmin > min:
644 min = nmin
664 min = nmin
645 if min > max:
665 if min > max:
646 min = max
666 min = max
647 yield ''.join(buf)
667 yield ''.join(buf)
648 blen = 0
668 blen = 0
649 buf = []
669 buf = []
650 if buf:
670 if buf:
651 yield ''.join(buf)
671 yield ''.join(buf)
652
672
653 Abort = error.Abort
673 Abort = error.Abort
654
674
655 def always(fn):
675 def always(fn):
656 return True
676 return True
657
677
658 def never(fn):
678 def never(fn):
659 return False
679 return False
660
680
661 def nogc(func):
681 def nogc(func):
662 """disable garbage collector
682 """disable garbage collector
663
683
664 Python's garbage collector triggers a GC each time a certain number of
684 Python's garbage collector triggers a GC each time a certain number of
665 container objects (the number being defined by gc.get_threshold()) are
685 container objects (the number being defined by gc.get_threshold()) are
666 allocated even when marked not to be tracked by the collector. Tracking has
686 allocated even when marked not to be tracked by the collector. Tracking has
667 no effect on when GCs are triggered, only on what objects the GC looks
687 no effect on when GCs are triggered, only on what objects the GC looks
668 into. As a workaround, disable GC while building complex (huge)
688 into. As a workaround, disable GC while building complex (huge)
669 containers.
689 containers.
670
690
671 This garbage collector issue have been fixed in 2.7.
691 This garbage collector issue have been fixed in 2.7.
672 """
692 """
673 def wrapper(*args, **kwargs):
693 def wrapper(*args, **kwargs):
674 gcenabled = gc.isenabled()
694 gcenabled = gc.isenabled()
675 gc.disable()
695 gc.disable()
676 try:
696 try:
677 return func(*args, **kwargs)
697 return func(*args, **kwargs)
678 finally:
698 finally:
679 if gcenabled:
699 if gcenabled:
680 gc.enable()
700 gc.enable()
681 return wrapper
701 return wrapper
682
702
683 def pathto(root, n1, n2):
703 def pathto(root, n1, n2):
684 '''return the relative path from one place to another.
704 '''return the relative path from one place to another.
685 root should use os.sep to separate directories
705 root should use os.sep to separate directories
686 n1 should use os.sep to separate directories
706 n1 should use os.sep to separate directories
687 n2 should use "/" to separate directories
707 n2 should use "/" to separate directories
688 returns an os.sep-separated path.
708 returns an os.sep-separated path.
689
709
690 If n1 is a relative path, it's assumed it's
710 If n1 is a relative path, it's assumed it's
691 relative to root.
711 relative to root.
692 n2 should always be relative to root.
712 n2 should always be relative to root.
693 '''
713 '''
694 if not n1:
714 if not n1:
695 return localpath(n2)
715 return localpath(n2)
696 if os.path.isabs(n1):
716 if os.path.isabs(n1):
697 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
717 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
698 return os.path.join(root, localpath(n2))
718 return os.path.join(root, localpath(n2))
699 n2 = '/'.join((pconvert(root), n2))
719 n2 = '/'.join((pconvert(root), n2))
700 a, b = splitpath(n1), n2.split('/')
720 a, b = splitpath(n1), n2.split('/')
701 a.reverse()
721 a.reverse()
702 b.reverse()
722 b.reverse()
703 while a and b and a[-1] == b[-1]:
723 while a and b and a[-1] == b[-1]:
704 a.pop()
724 a.pop()
705 b.pop()
725 b.pop()
706 b.reverse()
726 b.reverse()
707 return os.sep.join((['..'] * len(a)) + b) or '.'
727 return os.sep.join((['..'] * len(a)) + b) or '.'
708
728
709 def mainfrozen():
729 def mainfrozen():
710 """return True if we are a frozen executable.
730 """return True if we are a frozen executable.
711
731
712 The code supports py2exe (most common, Windows only) and tools/freeze
732 The code supports py2exe (most common, Windows only) and tools/freeze
713 (portable, not much used).
733 (portable, not much used).
714 """
734 """
715 return (safehasattr(sys, "frozen") or # new py2exe
735 return (safehasattr(sys, "frozen") or # new py2exe
716 safehasattr(sys, "importers") or # old py2exe
736 safehasattr(sys, "importers") or # old py2exe
717 imp.is_frozen("__main__")) # tools/freeze
737 imp.is_frozen("__main__")) # tools/freeze
718
738
719 # the location of data files matching the source code
739 # the location of data files matching the source code
720 if mainfrozen():
740 if mainfrozen():
721 # executable version (py2exe) doesn't support __file__
741 # executable version (py2exe) doesn't support __file__
722 datapath = os.path.dirname(sys.executable)
742 datapath = os.path.dirname(sys.executable)
723 else:
743 else:
724 datapath = os.path.dirname(__file__)
744 datapath = os.path.dirname(__file__)
725
745
726 i18n.setdatapath(datapath)
746 i18n.setdatapath(datapath)
727
747
728 _hgexecutable = None
748 _hgexecutable = None
729
749
730 def hgexecutable():
750 def hgexecutable():
731 """return location of the 'hg' executable.
751 """return location of the 'hg' executable.
732
752
733 Defaults to $HG or 'hg' in the search path.
753 Defaults to $HG or 'hg' in the search path.
734 """
754 """
735 if _hgexecutable is None:
755 if _hgexecutable is None:
736 hg = os.environ.get('HG')
756 hg = os.environ.get('HG')
737 mainmod = sys.modules['__main__']
757 mainmod = sys.modules['__main__']
738 if hg:
758 if hg:
739 _sethgexecutable(hg)
759 _sethgexecutable(hg)
740 elif mainfrozen():
760 elif mainfrozen():
741 _sethgexecutable(sys.executable)
761 _sethgexecutable(sys.executable)
742 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
762 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
743 _sethgexecutable(mainmod.__file__)
763 _sethgexecutable(mainmod.__file__)
744 else:
764 else:
745 exe = findexe('hg') or os.path.basename(sys.argv[0])
765 exe = findexe('hg') or os.path.basename(sys.argv[0])
746 _sethgexecutable(exe)
766 _sethgexecutable(exe)
747 return _hgexecutable
767 return _hgexecutable
748
768
749 def _sethgexecutable(path):
769 def _sethgexecutable(path):
750 """set location of the 'hg' executable"""
770 """set location of the 'hg' executable"""
751 global _hgexecutable
771 global _hgexecutable
752 _hgexecutable = path
772 _hgexecutable = path
753
773
754 def _isstdout(f):
774 def _isstdout(f):
755 fileno = getattr(f, 'fileno', None)
775 fileno = getattr(f, 'fileno', None)
756 return fileno and fileno() == sys.__stdout__.fileno()
776 return fileno and fileno() == sys.__stdout__.fileno()
757
777
758 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
778 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
759 '''enhanced shell command execution.
779 '''enhanced shell command execution.
760 run with environment maybe modified, maybe in different dir.
780 run with environment maybe modified, maybe in different dir.
761
781
762 if command fails and onerr is None, return status, else raise onerr
782 if command fails and onerr is None, return status, else raise onerr
763 object as exception.
783 object as exception.
764
784
765 if out is specified, it is assumed to be a file-like object that has a
785 if out is specified, it is assumed to be a file-like object that has a
766 write() method. stdout and stderr will be redirected to out.'''
786 write() method. stdout and stderr will be redirected to out.'''
767 if environ is None:
787 if environ is None:
768 environ = {}
788 environ = {}
769 try:
789 try:
770 sys.stdout.flush()
790 sys.stdout.flush()
771 except Exception:
791 except Exception:
772 pass
792 pass
773 def py2shell(val):
793 def py2shell(val):
774 'convert python object into string that is useful to shell'
794 'convert python object into string that is useful to shell'
775 if val is None or val is False:
795 if val is None or val is False:
776 return '0'
796 return '0'
777 if val is True:
797 if val is True:
778 return '1'
798 return '1'
779 return str(val)
799 return str(val)
780 origcmd = cmd
800 origcmd = cmd
781 cmd = quotecommand(cmd)
801 cmd = quotecommand(cmd)
782 if sys.platform == 'plan9' and (sys.version_info[0] == 2
802 if sys.platform == 'plan9' and (sys.version_info[0] == 2
783 and sys.version_info[1] < 7):
803 and sys.version_info[1] < 7):
784 # subprocess kludge to work around issues in half-baked Python
804 # subprocess kludge to work around issues in half-baked Python
785 # ports, notably bichued/python:
805 # ports, notably bichued/python:
786 if not cwd is None:
806 if not cwd is None:
787 os.chdir(cwd)
807 os.chdir(cwd)
788 rc = os.system(cmd)
808 rc = os.system(cmd)
789 else:
809 else:
790 env = dict(os.environ)
810 env = dict(os.environ)
791 env.update((k, py2shell(v)) for k, v in environ.iteritems())
811 env.update((k, py2shell(v)) for k, v in environ.iteritems())
792 env['HG'] = hgexecutable()
812 env['HG'] = hgexecutable()
793 if out is None or _isstdout(out):
813 if out is None or _isstdout(out):
794 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
814 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
795 env=env, cwd=cwd)
815 env=env, cwd=cwd)
796 else:
816 else:
797 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
817 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
798 env=env, cwd=cwd, stdout=subprocess.PIPE,
818 env=env, cwd=cwd, stdout=subprocess.PIPE,
799 stderr=subprocess.STDOUT)
819 stderr=subprocess.STDOUT)
800 while True:
820 while True:
801 line = proc.stdout.readline()
821 line = proc.stdout.readline()
802 if not line:
822 if not line:
803 break
823 break
804 out.write(line)
824 out.write(line)
805 proc.wait()
825 proc.wait()
806 rc = proc.returncode
826 rc = proc.returncode
807 if sys.platform == 'OpenVMS' and rc & 1:
827 if sys.platform == 'OpenVMS' and rc & 1:
808 rc = 0
828 rc = 0
809 if rc and onerr:
829 if rc and onerr:
810 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
830 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
811 explainexit(rc)[0])
831 explainexit(rc)[0])
812 if errprefix:
832 if errprefix:
813 errmsg = '%s: %s' % (errprefix, errmsg)
833 errmsg = '%s: %s' % (errprefix, errmsg)
814 raise onerr(errmsg)
834 raise onerr(errmsg)
815 return rc
835 return rc
816
836
817 def checksignature(func):
837 def checksignature(func):
818 '''wrap a function with code to check for calling errors'''
838 '''wrap a function with code to check for calling errors'''
819 def check(*args, **kwargs):
839 def check(*args, **kwargs):
820 try:
840 try:
821 return func(*args, **kwargs)
841 return func(*args, **kwargs)
822 except TypeError:
842 except TypeError:
823 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
843 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
824 raise error.SignatureError
844 raise error.SignatureError
825 raise
845 raise
826
846
827 return check
847 return check
828
848
829 def copyfile(src, dest, hardlink=False):
849 def copyfile(src, dest, hardlink=False):
830 "copy a file, preserving mode and atime/mtime"
850 "copy a file, preserving mode and atime/mtime"
831 if os.path.lexists(dest):
851 if os.path.lexists(dest):
832 unlink(dest)
852 unlink(dest)
833 # hardlinks are problematic on CIFS, quietly ignore this flag
853 # hardlinks are problematic on CIFS, quietly ignore this flag
834 # until we find a way to work around it cleanly (issue4546)
854 # until we find a way to work around it cleanly (issue4546)
835 if False and hardlink:
855 if False and hardlink:
836 try:
856 try:
837 oslink(src, dest)
857 oslink(src, dest)
838 return
858 return
839 except (IOError, OSError):
859 except (IOError, OSError):
840 pass # fall back to normal copy
860 pass # fall back to normal copy
841 if os.path.islink(src):
861 if os.path.islink(src):
842 os.symlink(os.readlink(src), dest)
862 os.symlink(os.readlink(src), dest)
843 else:
863 else:
844 try:
864 try:
845 shutil.copyfile(src, dest)
865 shutil.copyfile(src, dest)
846 shutil.copymode(src, dest)
866 shutil.copymode(src, dest)
847 except shutil.Error as inst:
867 except shutil.Error as inst:
848 raise Abort(str(inst))
868 raise Abort(str(inst))
849
869
850 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
870 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
851 """Copy a directory tree using hardlinks if possible."""
871 """Copy a directory tree using hardlinks if possible."""
852 num = 0
872 num = 0
853
873
854 if hardlink is None:
874 if hardlink is None:
855 hardlink = (os.stat(src).st_dev ==
875 hardlink = (os.stat(src).st_dev ==
856 os.stat(os.path.dirname(dst)).st_dev)
876 os.stat(os.path.dirname(dst)).st_dev)
857 if hardlink:
877 if hardlink:
858 topic = _('linking')
878 topic = _('linking')
859 else:
879 else:
860 topic = _('copying')
880 topic = _('copying')
861
881
862 if os.path.isdir(src):
882 if os.path.isdir(src):
863 os.mkdir(dst)
883 os.mkdir(dst)
864 for name, kind in osutil.listdir(src):
884 for name, kind in osutil.listdir(src):
865 srcname = os.path.join(src, name)
885 srcname = os.path.join(src, name)
866 dstname = os.path.join(dst, name)
886 dstname = os.path.join(dst, name)
867 def nprog(t, pos):
887 def nprog(t, pos):
868 if pos is not None:
888 if pos is not None:
869 return progress(t, pos + num)
889 return progress(t, pos + num)
870 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
890 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
871 num += n
891 num += n
872 else:
892 else:
873 if hardlink:
893 if hardlink:
874 try:
894 try:
875 oslink(src, dst)
895 oslink(src, dst)
876 except (IOError, OSError):
896 except (IOError, OSError):
877 hardlink = False
897 hardlink = False
878 shutil.copy(src, dst)
898 shutil.copy(src, dst)
879 else:
899 else:
880 shutil.copy(src, dst)
900 shutil.copy(src, dst)
881 num += 1
901 num += 1
882 progress(topic, num)
902 progress(topic, num)
883 progress(topic, None)
903 progress(topic, None)
884
904
885 return hardlink, num
905 return hardlink, num
886
906
887 _winreservednames = '''con prn aux nul
907 _winreservednames = '''con prn aux nul
888 com1 com2 com3 com4 com5 com6 com7 com8 com9
908 com1 com2 com3 com4 com5 com6 com7 com8 com9
889 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
909 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
890 _winreservedchars = ':*?"<>|'
910 _winreservedchars = ':*?"<>|'
891 def checkwinfilename(path):
911 def checkwinfilename(path):
892 r'''Check that the base-relative path is a valid filename on Windows.
912 r'''Check that the base-relative path is a valid filename on Windows.
893 Returns None if the path is ok, or a UI string describing the problem.
913 Returns None if the path is ok, or a UI string describing the problem.
894
914
895 >>> checkwinfilename("just/a/normal/path")
915 >>> checkwinfilename("just/a/normal/path")
896 >>> checkwinfilename("foo/bar/con.xml")
916 >>> checkwinfilename("foo/bar/con.xml")
897 "filename contains 'con', which is reserved on Windows"
917 "filename contains 'con', which is reserved on Windows"
898 >>> checkwinfilename("foo/con.xml/bar")
918 >>> checkwinfilename("foo/con.xml/bar")
899 "filename contains 'con', which is reserved on Windows"
919 "filename contains 'con', which is reserved on Windows"
900 >>> checkwinfilename("foo/bar/xml.con")
920 >>> checkwinfilename("foo/bar/xml.con")
901 >>> checkwinfilename("foo/bar/AUX/bla.txt")
921 >>> checkwinfilename("foo/bar/AUX/bla.txt")
902 "filename contains 'AUX', which is reserved on Windows"
922 "filename contains 'AUX', which is reserved on Windows"
903 >>> checkwinfilename("foo/bar/bla:.txt")
923 >>> checkwinfilename("foo/bar/bla:.txt")
904 "filename contains ':', which is reserved on Windows"
924 "filename contains ':', which is reserved on Windows"
905 >>> checkwinfilename("foo/bar/b\07la.txt")
925 >>> checkwinfilename("foo/bar/b\07la.txt")
906 "filename contains '\\x07', which is invalid on Windows"
926 "filename contains '\\x07', which is invalid on Windows"
907 >>> checkwinfilename("foo/bar/bla ")
927 >>> checkwinfilename("foo/bar/bla ")
908 "filename ends with ' ', which is not allowed on Windows"
928 "filename ends with ' ', which is not allowed on Windows"
909 >>> checkwinfilename("../bar")
929 >>> checkwinfilename("../bar")
910 >>> checkwinfilename("foo\\")
930 >>> checkwinfilename("foo\\")
911 "filename ends with '\\', which is invalid on Windows"
931 "filename ends with '\\', which is invalid on Windows"
912 >>> checkwinfilename("foo\\/bar")
932 >>> checkwinfilename("foo\\/bar")
913 "directory name ends with '\\', which is invalid on Windows"
933 "directory name ends with '\\', which is invalid on Windows"
914 '''
934 '''
915 if path.endswith('\\'):
935 if path.endswith('\\'):
916 return _("filename ends with '\\', which is invalid on Windows")
936 return _("filename ends with '\\', which is invalid on Windows")
917 if '\\/' in path:
937 if '\\/' in path:
918 return _("directory name ends with '\\', which is invalid on Windows")
938 return _("directory name ends with '\\', which is invalid on Windows")
919 for n in path.replace('\\', '/').split('/'):
939 for n in path.replace('\\', '/').split('/'):
920 if not n:
940 if not n:
921 continue
941 continue
922 for c in n:
942 for c in n:
923 if c in _winreservedchars:
943 if c in _winreservedchars:
924 return _("filename contains '%s', which is reserved "
944 return _("filename contains '%s', which is reserved "
925 "on Windows") % c
945 "on Windows") % c
926 if ord(c) <= 31:
946 if ord(c) <= 31:
927 return _("filename contains %r, which is invalid "
947 return _("filename contains %r, which is invalid "
928 "on Windows") % c
948 "on Windows") % c
929 base = n.split('.')[0]
949 base = n.split('.')[0]
930 if base and base.lower() in _winreservednames:
950 if base and base.lower() in _winreservednames:
931 return _("filename contains '%s', which is reserved "
951 return _("filename contains '%s', which is reserved "
932 "on Windows") % base
952 "on Windows") % base
933 t = n[-1]
953 t = n[-1]
934 if t in '. ' and n not in '..':
954 if t in '. ' and n not in '..':
935 return _("filename ends with '%s', which is not allowed "
955 return _("filename ends with '%s', which is not allowed "
936 "on Windows") % t
956 "on Windows") % t
937
957
938 if os.name == 'nt':
958 if os.name == 'nt':
939 checkosfilename = checkwinfilename
959 checkosfilename = checkwinfilename
940 else:
960 else:
941 checkosfilename = platform.checkosfilename
961 checkosfilename = platform.checkosfilename
942
962
943 def makelock(info, pathname):
963 def makelock(info, pathname):
944 try:
964 try:
945 return os.symlink(info, pathname)
965 return os.symlink(info, pathname)
946 except OSError as why:
966 except OSError as why:
947 if why.errno == errno.EEXIST:
967 if why.errno == errno.EEXIST:
948 raise
968 raise
949 except AttributeError: # no symlink in os
969 except AttributeError: # no symlink in os
950 pass
970 pass
951
971
952 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
972 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
953 os.write(ld, info)
973 os.write(ld, info)
954 os.close(ld)
974 os.close(ld)
955
975
956 def readlock(pathname):
976 def readlock(pathname):
957 try:
977 try:
958 return os.readlink(pathname)
978 return os.readlink(pathname)
959 except OSError as why:
979 except OSError as why:
960 if why.errno not in (errno.EINVAL, errno.ENOSYS):
980 if why.errno not in (errno.EINVAL, errno.ENOSYS):
961 raise
981 raise
962 except AttributeError: # no symlink in os
982 except AttributeError: # no symlink in os
963 pass
983 pass
964 fp = posixfile(pathname)
984 fp = posixfile(pathname)
965 r = fp.read()
985 r = fp.read()
966 fp.close()
986 fp.close()
967 return r
987 return r
968
988
969 def fstat(fp):
989 def fstat(fp):
970 '''stat file object that may not have fileno method.'''
990 '''stat file object that may not have fileno method.'''
971 try:
991 try:
972 return os.fstat(fp.fileno())
992 return os.fstat(fp.fileno())
973 except AttributeError:
993 except AttributeError:
974 return os.stat(fp.name)
994 return os.stat(fp.name)
975
995
976 # File system features
996 # File system features
977
997
978 def checkcase(path):
998 def checkcase(path):
979 """
999 """
980 Return true if the given path is on a case-sensitive filesystem
1000 Return true if the given path is on a case-sensitive filesystem
981
1001
982 Requires a path (like /foo/.hg) ending with a foldable final
1002 Requires a path (like /foo/.hg) ending with a foldable final
983 directory component.
1003 directory component.
984 """
1004 """
985 s1 = os.lstat(path)
1005 s1 = os.lstat(path)
986 d, b = os.path.split(path)
1006 d, b = os.path.split(path)
987 b2 = b.upper()
1007 b2 = b.upper()
988 if b == b2:
1008 if b == b2:
989 b2 = b.lower()
1009 b2 = b.lower()
990 if b == b2:
1010 if b == b2:
991 return True # no evidence against case sensitivity
1011 return True # no evidence against case sensitivity
992 p2 = os.path.join(d, b2)
1012 p2 = os.path.join(d, b2)
993 try:
1013 try:
994 s2 = os.lstat(p2)
1014 s2 = os.lstat(p2)
995 if s2 == s1:
1015 if s2 == s1:
996 return False
1016 return False
997 return True
1017 return True
998 except OSError:
1018 except OSError:
999 return True
1019 return True
1000
1020
1001 try:
1021 try:
1002 import re2
1022 import re2
1003 _re2 = None
1023 _re2 = None
1004 except ImportError:
1024 except ImportError:
1005 _re2 = False
1025 _re2 = False
1006
1026
1007 class _re(object):
1027 class _re(object):
1008 def _checkre2(self):
1028 def _checkre2(self):
1009 global _re2
1029 global _re2
1010 try:
1030 try:
1011 # check if match works, see issue3964
1031 # check if match works, see issue3964
1012 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1032 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1013 except ImportError:
1033 except ImportError:
1014 _re2 = False
1034 _re2 = False
1015
1035
1016 def compile(self, pat, flags=0):
1036 def compile(self, pat, flags=0):
1017 '''Compile a regular expression, using re2 if possible
1037 '''Compile a regular expression, using re2 if possible
1018
1038
1019 For best performance, use only re2-compatible regexp features. The
1039 For best performance, use only re2-compatible regexp features. The
1020 only flags from the re module that are re2-compatible are
1040 only flags from the re module that are re2-compatible are
1021 IGNORECASE and MULTILINE.'''
1041 IGNORECASE and MULTILINE.'''
1022 if _re2 is None:
1042 if _re2 is None:
1023 self._checkre2()
1043 self._checkre2()
1024 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1044 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1025 if flags & remod.IGNORECASE:
1045 if flags & remod.IGNORECASE:
1026 pat = '(?i)' + pat
1046 pat = '(?i)' + pat
1027 if flags & remod.MULTILINE:
1047 if flags & remod.MULTILINE:
1028 pat = '(?m)' + pat
1048 pat = '(?m)' + pat
1029 try:
1049 try:
1030 return re2.compile(pat)
1050 return re2.compile(pat)
1031 except re2.error:
1051 except re2.error:
1032 pass
1052 pass
1033 return remod.compile(pat, flags)
1053 return remod.compile(pat, flags)
1034
1054
1035 @propertycache
1055 @propertycache
1036 def escape(self):
1056 def escape(self):
1037 '''Return the version of escape corresponding to self.compile.
1057 '''Return the version of escape corresponding to self.compile.
1038
1058
1039 This is imperfect because whether re2 or re is used for a particular
1059 This is imperfect because whether re2 or re is used for a particular
1040 function depends on the flags, etc, but it's the best we can do.
1060 function depends on the flags, etc, but it's the best we can do.
1041 '''
1061 '''
1042 global _re2
1062 global _re2
1043 if _re2 is None:
1063 if _re2 is None:
1044 self._checkre2()
1064 self._checkre2()
1045 if _re2:
1065 if _re2:
1046 return re2.escape
1066 return re2.escape
1047 else:
1067 else:
1048 return remod.escape
1068 return remod.escape
1049
1069
1050 re = _re()
1070 re = _re()
1051
1071
1052 _fspathcache = {}
1072 _fspathcache = {}
1053 def fspath(name, root):
1073 def fspath(name, root):
1054 '''Get name in the case stored in the filesystem
1074 '''Get name in the case stored in the filesystem
1055
1075
1056 The name should be relative to root, and be normcase-ed for efficiency.
1076 The name should be relative to root, and be normcase-ed for efficiency.
1057
1077
1058 Note that this function is unnecessary, and should not be
1078 Note that this function is unnecessary, and should not be
1059 called, for case-sensitive filesystems (simply because it's expensive).
1079 called, for case-sensitive filesystems (simply because it's expensive).
1060
1080
1061 The root should be normcase-ed, too.
1081 The root should be normcase-ed, too.
1062 '''
1082 '''
1063 def _makefspathcacheentry(dir):
1083 def _makefspathcacheentry(dir):
1064 return dict((normcase(n), n) for n in os.listdir(dir))
1084 return dict((normcase(n), n) for n in os.listdir(dir))
1065
1085
1066 seps = os.sep
1086 seps = os.sep
1067 if os.altsep:
1087 if os.altsep:
1068 seps = seps + os.altsep
1088 seps = seps + os.altsep
1069 # Protect backslashes. This gets silly very quickly.
1089 # Protect backslashes. This gets silly very quickly.
1070 seps.replace('\\','\\\\')
1090 seps.replace('\\','\\\\')
1071 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1091 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1072 dir = os.path.normpath(root)
1092 dir = os.path.normpath(root)
1073 result = []
1093 result = []
1074 for part, sep in pattern.findall(name):
1094 for part, sep in pattern.findall(name):
1075 if sep:
1095 if sep:
1076 result.append(sep)
1096 result.append(sep)
1077 continue
1097 continue
1078
1098
1079 if dir not in _fspathcache:
1099 if dir not in _fspathcache:
1080 _fspathcache[dir] = _makefspathcacheentry(dir)
1100 _fspathcache[dir] = _makefspathcacheentry(dir)
1081 contents = _fspathcache[dir]
1101 contents = _fspathcache[dir]
1082
1102
1083 found = contents.get(part)
1103 found = contents.get(part)
1084 if not found:
1104 if not found:
1085 # retry "once per directory" per "dirstate.walk" which
1105 # retry "once per directory" per "dirstate.walk" which
1086 # may take place for each patches of "hg qpush", for example
1106 # may take place for each patches of "hg qpush", for example
1087 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1107 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1088 found = contents.get(part)
1108 found = contents.get(part)
1089
1109
1090 result.append(found or part)
1110 result.append(found or part)
1091 dir = os.path.join(dir, part)
1111 dir = os.path.join(dir, part)
1092
1112
1093 return ''.join(result)
1113 return ''.join(result)
1094
1114
1095 def checknlink(testfile):
1115 def checknlink(testfile):
1096 '''check whether hardlink count reporting works properly'''
1116 '''check whether hardlink count reporting works properly'''
1097
1117
1098 # testfile may be open, so we need a separate file for checking to
1118 # testfile may be open, so we need a separate file for checking to
1099 # work around issue2543 (or testfile may get lost on Samba shares)
1119 # work around issue2543 (or testfile may get lost on Samba shares)
1100 f1 = testfile + ".hgtmp1"
1120 f1 = testfile + ".hgtmp1"
1101 if os.path.lexists(f1):
1121 if os.path.lexists(f1):
1102 return False
1122 return False
1103 try:
1123 try:
1104 posixfile(f1, 'w').close()
1124 posixfile(f1, 'w').close()
1105 except IOError:
1125 except IOError:
1106 return False
1126 return False
1107
1127
1108 f2 = testfile + ".hgtmp2"
1128 f2 = testfile + ".hgtmp2"
1109 fd = None
1129 fd = None
1110 try:
1130 try:
1111 oslink(f1, f2)
1131 oslink(f1, f2)
1112 # nlinks() may behave differently for files on Windows shares if
1132 # nlinks() may behave differently for files on Windows shares if
1113 # the file is open.
1133 # the file is open.
1114 fd = posixfile(f2)
1134 fd = posixfile(f2)
1115 return nlinks(f2) > 1
1135 return nlinks(f2) > 1
1116 except OSError:
1136 except OSError:
1117 return False
1137 return False
1118 finally:
1138 finally:
1119 if fd is not None:
1139 if fd is not None:
1120 fd.close()
1140 fd.close()
1121 for f in (f1, f2):
1141 for f in (f1, f2):
1122 try:
1142 try:
1123 os.unlink(f)
1143 os.unlink(f)
1124 except OSError:
1144 except OSError:
1125 pass
1145 pass
1126
1146
1127 def endswithsep(path):
1147 def endswithsep(path):
1128 '''Check path ends with os.sep or os.altsep.'''
1148 '''Check path ends with os.sep or os.altsep.'''
1129 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1149 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1130
1150
1131 def splitpath(path):
1151 def splitpath(path):
1132 '''Split path by os.sep.
1152 '''Split path by os.sep.
1133 Note that this function does not use os.altsep because this is
1153 Note that this function does not use os.altsep because this is
1134 an alternative of simple "xxx.split(os.sep)".
1154 an alternative of simple "xxx.split(os.sep)".
1135 It is recommended to use os.path.normpath() before using this
1155 It is recommended to use os.path.normpath() before using this
1136 function if need.'''
1156 function if need.'''
1137 return path.split(os.sep)
1157 return path.split(os.sep)
1138
1158
1139 def gui():
1159 def gui():
1140 '''Are we running in a GUI?'''
1160 '''Are we running in a GUI?'''
1141 if sys.platform == 'darwin':
1161 if sys.platform == 'darwin':
1142 if 'SSH_CONNECTION' in os.environ:
1162 if 'SSH_CONNECTION' in os.environ:
1143 # handle SSH access to a box where the user is logged in
1163 # handle SSH access to a box where the user is logged in
1144 return False
1164 return False
1145 elif getattr(osutil, 'isgui', None):
1165 elif getattr(osutil, 'isgui', None):
1146 # check if a CoreGraphics session is available
1166 # check if a CoreGraphics session is available
1147 return osutil.isgui()
1167 return osutil.isgui()
1148 else:
1168 else:
1149 # pure build; use a safe default
1169 # pure build; use a safe default
1150 return True
1170 return True
1151 else:
1171 else:
1152 return os.name == "nt" or os.environ.get("DISPLAY")
1172 return os.name == "nt" or os.environ.get("DISPLAY")
1153
1173
1154 def mktempcopy(name, emptyok=False, createmode=None):
1174 def mktempcopy(name, emptyok=False, createmode=None):
1155 """Create a temporary file with the same contents from name
1175 """Create a temporary file with the same contents from name
1156
1176
1157 The permission bits are copied from the original file.
1177 The permission bits are copied from the original file.
1158
1178
1159 If the temporary file is going to be truncated immediately, you
1179 If the temporary file is going to be truncated immediately, you
1160 can use emptyok=True as an optimization.
1180 can use emptyok=True as an optimization.
1161
1181
1162 Returns the name of the temporary file.
1182 Returns the name of the temporary file.
1163 """
1183 """
1164 d, fn = os.path.split(name)
1184 d, fn = os.path.split(name)
1165 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1185 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1166 os.close(fd)
1186 os.close(fd)
1167 # Temporary files are created with mode 0600, which is usually not
1187 # Temporary files are created with mode 0600, which is usually not
1168 # what we want. If the original file already exists, just copy
1188 # what we want. If the original file already exists, just copy
1169 # its mode. Otherwise, manually obey umask.
1189 # its mode. Otherwise, manually obey umask.
1170 copymode(name, temp, createmode)
1190 copymode(name, temp, createmode)
1171 if emptyok:
1191 if emptyok:
1172 return temp
1192 return temp
1173 try:
1193 try:
1174 try:
1194 try:
1175 ifp = posixfile(name, "rb")
1195 ifp = posixfile(name, "rb")
1176 except IOError as inst:
1196 except IOError as inst:
1177 if inst.errno == errno.ENOENT:
1197 if inst.errno == errno.ENOENT:
1178 return temp
1198 return temp
1179 if not getattr(inst, 'filename', None):
1199 if not getattr(inst, 'filename', None):
1180 inst.filename = name
1200 inst.filename = name
1181 raise
1201 raise
1182 ofp = posixfile(temp, "wb")
1202 ofp = posixfile(temp, "wb")
1183 for chunk in filechunkiter(ifp):
1203 for chunk in filechunkiter(ifp):
1184 ofp.write(chunk)
1204 ofp.write(chunk)
1185 ifp.close()
1205 ifp.close()
1186 ofp.close()
1206 ofp.close()
1187 except: # re-raises
1207 except: # re-raises
1188 try: os.unlink(temp)
1208 try: os.unlink(temp)
1189 except OSError: pass
1209 except OSError: pass
1190 raise
1210 raise
1191 return temp
1211 return temp
1192
1212
1193 class atomictempfile(object):
1213 class atomictempfile(object):
1194 '''writable file object that atomically updates a file
1214 '''writable file object that atomically updates a file
1195
1215
1196 All writes will go to a temporary copy of the original file. Call
1216 All writes will go to a temporary copy of the original file. Call
1197 close() when you are done writing, and atomictempfile will rename
1217 close() when you are done writing, and atomictempfile will rename
1198 the temporary copy to the original name, making the changes
1218 the temporary copy to the original name, making the changes
1199 visible. If the object is destroyed without being closed, all your
1219 visible. If the object is destroyed without being closed, all your
1200 writes are discarded.
1220 writes are discarded.
1201 '''
1221 '''
1202 def __init__(self, name, mode='w+b', createmode=None):
1222 def __init__(self, name, mode='w+b', createmode=None):
1203 self.__name = name # permanent name
1223 self.__name = name # permanent name
1204 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1224 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1205 createmode=createmode)
1225 createmode=createmode)
1206 self._fp = posixfile(self._tempname, mode)
1226 self._fp = posixfile(self._tempname, mode)
1207
1227
1208 # delegated methods
1228 # delegated methods
1209 self.write = self._fp.write
1229 self.write = self._fp.write
1210 self.seek = self._fp.seek
1230 self.seek = self._fp.seek
1211 self.tell = self._fp.tell
1231 self.tell = self._fp.tell
1212 self.fileno = self._fp.fileno
1232 self.fileno = self._fp.fileno
1213
1233
1214 def close(self):
1234 def close(self):
1215 if not self._fp.closed:
1235 if not self._fp.closed:
1216 self._fp.close()
1236 self._fp.close()
1217 rename(self._tempname, localpath(self.__name))
1237 rename(self._tempname, localpath(self.__name))
1218
1238
1219 def discard(self):
1239 def discard(self):
1220 if not self._fp.closed:
1240 if not self._fp.closed:
1221 try:
1241 try:
1222 os.unlink(self._tempname)
1242 os.unlink(self._tempname)
1223 except OSError:
1243 except OSError:
1224 pass
1244 pass
1225 self._fp.close()
1245 self._fp.close()
1226
1246
1227 def __del__(self):
1247 def __del__(self):
1228 if safehasattr(self, '_fp'): # constructor actually did something
1248 if safehasattr(self, '_fp'): # constructor actually did something
1229 self.discard()
1249 self.discard()
1230
1250
1231 def makedirs(name, mode=None, notindexed=False):
1251 def makedirs(name, mode=None, notindexed=False):
1232 """recursive directory creation with parent mode inheritance"""
1252 """recursive directory creation with parent mode inheritance"""
1233 try:
1253 try:
1234 makedir(name, notindexed)
1254 makedir(name, notindexed)
1235 except OSError as err:
1255 except OSError as err:
1236 if err.errno == errno.EEXIST:
1256 if err.errno == errno.EEXIST:
1237 return
1257 return
1238 if err.errno != errno.ENOENT or not name:
1258 if err.errno != errno.ENOENT or not name:
1239 raise
1259 raise
1240 parent = os.path.dirname(os.path.abspath(name))
1260 parent = os.path.dirname(os.path.abspath(name))
1241 if parent == name:
1261 if parent == name:
1242 raise
1262 raise
1243 makedirs(parent, mode, notindexed)
1263 makedirs(parent, mode, notindexed)
1244 makedir(name, notindexed)
1264 makedir(name, notindexed)
1245 if mode is not None:
1265 if mode is not None:
1246 os.chmod(name, mode)
1266 os.chmod(name, mode)
1247
1267
1248 def ensuredirs(name, mode=None, notindexed=False):
1268 def ensuredirs(name, mode=None, notindexed=False):
1249 """race-safe recursive directory creation
1269 """race-safe recursive directory creation
1250
1270
1251 Newly created directories are marked as "not to be indexed by
1271 Newly created directories are marked as "not to be indexed by
1252 the content indexing service", if ``notindexed`` is specified
1272 the content indexing service", if ``notindexed`` is specified
1253 for "write" mode access.
1273 for "write" mode access.
1254 """
1274 """
1255 if os.path.isdir(name):
1275 if os.path.isdir(name):
1256 return
1276 return
1257 parent = os.path.dirname(os.path.abspath(name))
1277 parent = os.path.dirname(os.path.abspath(name))
1258 if parent != name:
1278 if parent != name:
1259 ensuredirs(parent, mode, notindexed)
1279 ensuredirs(parent, mode, notindexed)
1260 try:
1280 try:
1261 makedir(name, notindexed)
1281 makedir(name, notindexed)
1262 except OSError as err:
1282 except OSError as err:
1263 if err.errno == errno.EEXIST and os.path.isdir(name):
1283 if err.errno == errno.EEXIST and os.path.isdir(name):
1264 # someone else seems to have won a directory creation race
1284 # someone else seems to have won a directory creation race
1265 return
1285 return
1266 raise
1286 raise
1267 if mode is not None:
1287 if mode is not None:
1268 os.chmod(name, mode)
1288 os.chmod(name, mode)
1269
1289
1270 def readfile(path):
1290 def readfile(path):
1271 fp = open(path, 'rb')
1291 fp = open(path, 'rb')
1272 try:
1292 try:
1273 return fp.read()
1293 return fp.read()
1274 finally:
1294 finally:
1275 fp.close()
1295 fp.close()
1276
1296
1277 def writefile(path, text):
1297 def writefile(path, text):
1278 fp = open(path, 'wb')
1298 fp = open(path, 'wb')
1279 try:
1299 try:
1280 fp.write(text)
1300 fp.write(text)
1281 finally:
1301 finally:
1282 fp.close()
1302 fp.close()
1283
1303
1284 def appendfile(path, text):
1304 def appendfile(path, text):
1285 fp = open(path, 'ab')
1305 fp = open(path, 'ab')
1286 try:
1306 try:
1287 fp.write(text)
1307 fp.write(text)
1288 finally:
1308 finally:
1289 fp.close()
1309 fp.close()
1290
1310
1291 class chunkbuffer(object):
1311 class chunkbuffer(object):
1292 """Allow arbitrary sized chunks of data to be efficiently read from an
1312 """Allow arbitrary sized chunks of data to be efficiently read from an
1293 iterator over chunks of arbitrary size."""
1313 iterator over chunks of arbitrary size."""
1294
1314
1295 def __init__(self, in_iter):
1315 def __init__(self, in_iter):
1296 """in_iter is the iterator that's iterating over the input chunks.
1316 """in_iter is the iterator that's iterating over the input chunks.
1297 targetsize is how big a buffer to try to maintain."""
1317 targetsize is how big a buffer to try to maintain."""
1298 def splitbig(chunks):
1318 def splitbig(chunks):
1299 for chunk in chunks:
1319 for chunk in chunks:
1300 if len(chunk) > 2**20:
1320 if len(chunk) > 2**20:
1301 pos = 0
1321 pos = 0
1302 while pos < len(chunk):
1322 while pos < len(chunk):
1303 end = pos + 2 ** 18
1323 end = pos + 2 ** 18
1304 yield chunk[pos:end]
1324 yield chunk[pos:end]
1305 pos = end
1325 pos = end
1306 else:
1326 else:
1307 yield chunk
1327 yield chunk
1308 self.iter = splitbig(in_iter)
1328 self.iter = splitbig(in_iter)
1309 self._queue = collections.deque()
1329 self._queue = collections.deque()
1310 self._chunkoffset = 0
1330 self._chunkoffset = 0
1311
1331
1312 def read(self, l=None):
1332 def read(self, l=None):
1313 """Read L bytes of data from the iterator of chunks of data.
1333 """Read L bytes of data from the iterator of chunks of data.
1314 Returns less than L bytes if the iterator runs dry.
1334 Returns less than L bytes if the iterator runs dry.
1315
1335
1316 If size parameter is omitted, read everything"""
1336 If size parameter is omitted, read everything"""
1317 if l is None:
1337 if l is None:
1318 return ''.join(self.iter)
1338 return ''.join(self.iter)
1319
1339
1320 left = l
1340 left = l
1321 buf = []
1341 buf = []
1322 queue = self._queue
1342 queue = self._queue
1323 while left > 0:
1343 while left > 0:
1324 # refill the queue
1344 # refill the queue
1325 if not queue:
1345 if not queue:
1326 target = 2**18
1346 target = 2**18
1327 for chunk in self.iter:
1347 for chunk in self.iter:
1328 queue.append(chunk)
1348 queue.append(chunk)
1329 target -= len(chunk)
1349 target -= len(chunk)
1330 if target <= 0:
1350 if target <= 0:
1331 break
1351 break
1332 if not queue:
1352 if not queue:
1333 break
1353 break
1334
1354
1335 # The easy way to do this would be to queue.popleft(), modify the
1355 # The easy way to do this would be to queue.popleft(), modify the
1336 # chunk (if necessary), then queue.appendleft(). However, for cases
1356 # chunk (if necessary), then queue.appendleft(). However, for cases
1337 # where we read partial chunk content, this incurs 2 dequeue
1357 # where we read partial chunk content, this incurs 2 dequeue
1338 # mutations and creates a new str for the remaining chunk in the
1358 # mutations and creates a new str for the remaining chunk in the
1339 # queue. Our code below avoids this overhead.
1359 # queue. Our code below avoids this overhead.
1340
1360
1341 chunk = queue[0]
1361 chunk = queue[0]
1342 chunkl = len(chunk)
1362 chunkl = len(chunk)
1343 offset = self._chunkoffset
1363 offset = self._chunkoffset
1344
1364
1345 # Use full chunk.
1365 # Use full chunk.
1346 if offset == 0 and left >= chunkl:
1366 if offset == 0 and left >= chunkl:
1347 left -= chunkl
1367 left -= chunkl
1348 queue.popleft()
1368 queue.popleft()
1349 buf.append(chunk)
1369 buf.append(chunk)
1350 # self._chunkoffset remains at 0.
1370 # self._chunkoffset remains at 0.
1351 continue
1371 continue
1352
1372
1353 chunkremaining = chunkl - offset
1373 chunkremaining = chunkl - offset
1354
1374
1355 # Use all of unconsumed part of chunk.
1375 # Use all of unconsumed part of chunk.
1356 if left >= chunkremaining:
1376 if left >= chunkremaining:
1357 left -= chunkremaining
1377 left -= chunkremaining
1358 queue.popleft()
1378 queue.popleft()
1359 # offset == 0 is enabled by block above, so this won't merely
1379 # offset == 0 is enabled by block above, so this won't merely
1360 # copy via ``chunk[0:]``.
1380 # copy via ``chunk[0:]``.
1361 buf.append(chunk[offset:])
1381 buf.append(chunk[offset:])
1362 self._chunkoffset = 0
1382 self._chunkoffset = 0
1363
1383
1364 # Partial chunk needed.
1384 # Partial chunk needed.
1365 else:
1385 else:
1366 buf.append(chunk[offset:offset + left])
1386 buf.append(chunk[offset:offset + left])
1367 self._chunkoffset += left
1387 self._chunkoffset += left
1368 left -= chunkremaining
1388 left -= chunkremaining
1369
1389
1370 return ''.join(buf)
1390 return ''.join(buf)
1371
1391
1372 def filechunkiter(f, size=65536, limit=None):
1392 def filechunkiter(f, size=65536, limit=None):
1373 """Create a generator that produces the data in the file size
1393 """Create a generator that produces the data in the file size
1374 (default 65536) bytes at a time, up to optional limit (default is
1394 (default 65536) bytes at a time, up to optional limit (default is
1375 to read all data). Chunks may be less than size bytes if the
1395 to read all data). Chunks may be less than size bytes if the
1376 chunk is the last chunk in the file, or the file is a socket or
1396 chunk is the last chunk in the file, or the file is a socket or
1377 some other type of file that sometimes reads less data than is
1397 some other type of file that sometimes reads less data than is
1378 requested."""
1398 requested."""
1379 assert size >= 0
1399 assert size >= 0
1380 assert limit is None or limit >= 0
1400 assert limit is None or limit >= 0
1381 while True:
1401 while True:
1382 if limit is None:
1402 if limit is None:
1383 nbytes = size
1403 nbytes = size
1384 else:
1404 else:
1385 nbytes = min(limit, size)
1405 nbytes = min(limit, size)
1386 s = nbytes and f.read(nbytes)
1406 s = nbytes and f.read(nbytes)
1387 if not s:
1407 if not s:
1388 break
1408 break
1389 if limit:
1409 if limit:
1390 limit -= len(s)
1410 limit -= len(s)
1391 yield s
1411 yield s
1392
1412
1393 def makedate(timestamp=None):
1413 def makedate(timestamp=None):
1394 '''Return a unix timestamp (or the current time) as a (unixtime,
1414 '''Return a unix timestamp (or the current time) as a (unixtime,
1395 offset) tuple based off the local timezone.'''
1415 offset) tuple based off the local timezone.'''
1396 if timestamp is None:
1416 if timestamp is None:
1397 timestamp = time.time()
1417 timestamp = time.time()
1398 if timestamp < 0:
1418 if timestamp < 0:
1399 hint = _("check your clock")
1419 hint = _("check your clock")
1400 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1420 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1401 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1421 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1402 datetime.datetime.fromtimestamp(timestamp))
1422 datetime.datetime.fromtimestamp(timestamp))
1403 tz = delta.days * 86400 + delta.seconds
1423 tz = delta.days * 86400 + delta.seconds
1404 return timestamp, tz
1424 return timestamp, tz
1405
1425
1406 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1426 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1407 """represent a (unixtime, offset) tuple as a localized time.
1427 """represent a (unixtime, offset) tuple as a localized time.
1408 unixtime is seconds since the epoch, and offset is the time zone's
1428 unixtime is seconds since the epoch, and offset is the time zone's
1409 number of seconds away from UTC. if timezone is false, do not
1429 number of seconds away from UTC. if timezone is false, do not
1410 append time zone to string."""
1430 append time zone to string."""
1411 t, tz = date or makedate()
1431 t, tz = date or makedate()
1412 if t < 0:
1432 if t < 0:
1413 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1433 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1414 tz = 0
1434 tz = 0
1415 if "%1" in format or "%2" in format or "%z" in format:
1435 if "%1" in format or "%2" in format or "%z" in format:
1416 sign = (tz > 0) and "-" or "+"
1436 sign = (tz > 0) and "-" or "+"
1417 minutes = abs(tz) // 60
1437 minutes = abs(tz) // 60
1418 q, r = divmod(minutes, 60)
1438 q, r = divmod(minutes, 60)
1419 format = format.replace("%z", "%1%2")
1439 format = format.replace("%z", "%1%2")
1420 format = format.replace("%1", "%c%02d" % (sign, q))
1440 format = format.replace("%1", "%c%02d" % (sign, q))
1421 format = format.replace("%2", "%02d" % r)
1441 format = format.replace("%2", "%02d" % r)
1422 try:
1442 try:
1423 t = time.gmtime(float(t) - tz)
1443 t = time.gmtime(float(t) - tz)
1424 except ValueError:
1444 except ValueError:
1425 # time was out of range
1445 # time was out of range
1426 t = time.gmtime(sys.maxint)
1446 t = time.gmtime(sys.maxint)
1427 s = time.strftime(format, t)
1447 s = time.strftime(format, t)
1428 return s
1448 return s
1429
1449
1430 def shortdate(date=None):
1450 def shortdate(date=None):
1431 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1451 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1432 return datestr(date, format='%Y-%m-%d')
1452 return datestr(date, format='%Y-%m-%d')
1433
1453
1434 def parsetimezone(tz):
1454 def parsetimezone(tz):
1435 """parse a timezone string and return an offset integer"""
1455 """parse a timezone string and return an offset integer"""
1436 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1456 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1437 sign = (tz[0] == "+") and 1 or -1
1457 sign = (tz[0] == "+") and 1 or -1
1438 hours = int(tz[1:3])
1458 hours = int(tz[1:3])
1439 minutes = int(tz[3:5])
1459 minutes = int(tz[3:5])
1440 return -sign * (hours * 60 + minutes) * 60
1460 return -sign * (hours * 60 + minutes) * 60
1441 if tz == "GMT" or tz == "UTC":
1461 if tz == "GMT" or tz == "UTC":
1442 return 0
1462 return 0
1443 return None
1463 return None
1444
1464
1445 def strdate(string, format, defaults=[]):
1465 def strdate(string, format, defaults=[]):
1446 """parse a localized time string and return a (unixtime, offset) tuple.
1466 """parse a localized time string and return a (unixtime, offset) tuple.
1447 if the string cannot be parsed, ValueError is raised."""
1467 if the string cannot be parsed, ValueError is raised."""
1448 # NOTE: unixtime = localunixtime + offset
1468 # NOTE: unixtime = localunixtime + offset
1449 offset, date = parsetimezone(string.split()[-1]), string
1469 offset, date = parsetimezone(string.split()[-1]), string
1450 if offset is not None:
1470 if offset is not None:
1451 date = " ".join(string.split()[:-1])
1471 date = " ".join(string.split()[:-1])
1452
1472
1453 # add missing elements from defaults
1473 # add missing elements from defaults
1454 usenow = False # default to using biased defaults
1474 usenow = False # default to using biased defaults
1455 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1475 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1456 found = [True for p in part if ("%"+p) in format]
1476 found = [True for p in part if ("%"+p) in format]
1457 if not found:
1477 if not found:
1458 date += "@" + defaults[part][usenow]
1478 date += "@" + defaults[part][usenow]
1459 format += "@%" + part[0]
1479 format += "@%" + part[0]
1460 else:
1480 else:
1461 # We've found a specific time element, less specific time
1481 # We've found a specific time element, less specific time
1462 # elements are relative to today
1482 # elements are relative to today
1463 usenow = True
1483 usenow = True
1464
1484
1465 timetuple = time.strptime(date, format)
1485 timetuple = time.strptime(date, format)
1466 localunixtime = int(calendar.timegm(timetuple))
1486 localunixtime = int(calendar.timegm(timetuple))
1467 if offset is None:
1487 if offset is None:
1468 # local timezone
1488 # local timezone
1469 unixtime = int(time.mktime(timetuple))
1489 unixtime = int(time.mktime(timetuple))
1470 offset = unixtime - localunixtime
1490 offset = unixtime - localunixtime
1471 else:
1491 else:
1472 unixtime = localunixtime + offset
1492 unixtime = localunixtime + offset
1473 return unixtime, offset
1493 return unixtime, offset
1474
1494
1475 def parsedate(date, formats=None, bias=None):
1495 def parsedate(date, formats=None, bias=None):
1476 """parse a localized date/time and return a (unixtime, offset) tuple.
1496 """parse a localized date/time and return a (unixtime, offset) tuple.
1477
1497
1478 The date may be a "unixtime offset" string or in one of the specified
1498 The date may be a "unixtime offset" string or in one of the specified
1479 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1499 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1480
1500
1481 >>> parsedate(' today ') == parsedate(\
1501 >>> parsedate(' today ') == parsedate(\
1482 datetime.date.today().strftime('%b %d'))
1502 datetime.date.today().strftime('%b %d'))
1483 True
1503 True
1484 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1504 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1485 datetime.timedelta(days=1)\
1505 datetime.timedelta(days=1)\
1486 ).strftime('%b %d'))
1506 ).strftime('%b %d'))
1487 True
1507 True
1488 >>> now, tz = makedate()
1508 >>> now, tz = makedate()
1489 >>> strnow, strtz = parsedate('now')
1509 >>> strnow, strtz = parsedate('now')
1490 >>> (strnow - now) < 1
1510 >>> (strnow - now) < 1
1491 True
1511 True
1492 >>> tz == strtz
1512 >>> tz == strtz
1493 True
1513 True
1494 """
1514 """
1495 if bias is None:
1515 if bias is None:
1496 bias = {}
1516 bias = {}
1497 if not date:
1517 if not date:
1498 return 0, 0
1518 return 0, 0
1499 if isinstance(date, tuple) and len(date) == 2:
1519 if isinstance(date, tuple) and len(date) == 2:
1500 return date
1520 return date
1501 if not formats:
1521 if not formats:
1502 formats = defaultdateformats
1522 formats = defaultdateformats
1503 date = date.strip()
1523 date = date.strip()
1504
1524
1505 if date == 'now' or date == _('now'):
1525 if date == 'now' or date == _('now'):
1506 return makedate()
1526 return makedate()
1507 if date == 'today' or date == _('today'):
1527 if date == 'today' or date == _('today'):
1508 date = datetime.date.today().strftime('%b %d')
1528 date = datetime.date.today().strftime('%b %d')
1509 elif date == 'yesterday' or date == _('yesterday'):
1529 elif date == 'yesterday' or date == _('yesterday'):
1510 date = (datetime.date.today() -
1530 date = (datetime.date.today() -
1511 datetime.timedelta(days=1)).strftime('%b %d')
1531 datetime.timedelta(days=1)).strftime('%b %d')
1512
1532
1513 try:
1533 try:
1514 when, offset = map(int, date.split(' '))
1534 when, offset = map(int, date.split(' '))
1515 except ValueError:
1535 except ValueError:
1516 # fill out defaults
1536 # fill out defaults
1517 now = makedate()
1537 now = makedate()
1518 defaults = {}
1538 defaults = {}
1519 for part in ("d", "mb", "yY", "HI", "M", "S"):
1539 for part in ("d", "mb", "yY", "HI", "M", "S"):
1520 # this piece is for rounding the specific end of unknowns
1540 # this piece is for rounding the specific end of unknowns
1521 b = bias.get(part)
1541 b = bias.get(part)
1522 if b is None:
1542 if b is None:
1523 if part[0] in "HMS":
1543 if part[0] in "HMS":
1524 b = "00"
1544 b = "00"
1525 else:
1545 else:
1526 b = "0"
1546 b = "0"
1527
1547
1528 # this piece is for matching the generic end to today's date
1548 # this piece is for matching the generic end to today's date
1529 n = datestr(now, "%" + part[0])
1549 n = datestr(now, "%" + part[0])
1530
1550
1531 defaults[part] = (b, n)
1551 defaults[part] = (b, n)
1532
1552
1533 for format in formats:
1553 for format in formats:
1534 try:
1554 try:
1535 when, offset = strdate(date, format, defaults)
1555 when, offset = strdate(date, format, defaults)
1536 except (ValueError, OverflowError):
1556 except (ValueError, OverflowError):
1537 pass
1557 pass
1538 else:
1558 else:
1539 break
1559 break
1540 else:
1560 else:
1541 raise Abort(_('invalid date: %r') % date)
1561 raise Abort(_('invalid date: %r') % date)
1542 # validate explicit (probably user-specified) date and
1562 # validate explicit (probably user-specified) date and
1543 # time zone offset. values must fit in signed 32 bits for
1563 # time zone offset. values must fit in signed 32 bits for
1544 # current 32-bit linux runtimes. timezones go from UTC-12
1564 # current 32-bit linux runtimes. timezones go from UTC-12
1545 # to UTC+14
1565 # to UTC+14
1546 if abs(when) > 0x7fffffff:
1566 if abs(when) > 0x7fffffff:
1547 raise Abort(_('date exceeds 32 bits: %d') % when)
1567 raise Abort(_('date exceeds 32 bits: %d') % when)
1548 if when < 0:
1568 if when < 0:
1549 raise Abort(_('negative date value: %d') % when)
1569 raise Abort(_('negative date value: %d') % when)
1550 if offset < -50400 or offset > 43200:
1570 if offset < -50400 or offset > 43200:
1551 raise Abort(_('impossible time zone offset: %d') % offset)
1571 raise Abort(_('impossible time zone offset: %d') % offset)
1552 return when, offset
1572 return when, offset
1553
1573
1554 def matchdate(date):
1574 def matchdate(date):
1555 """Return a function that matches a given date match specifier
1575 """Return a function that matches a given date match specifier
1556
1576
1557 Formats include:
1577 Formats include:
1558
1578
1559 '{date}' match a given date to the accuracy provided
1579 '{date}' match a given date to the accuracy provided
1560
1580
1561 '<{date}' on or before a given date
1581 '<{date}' on or before a given date
1562
1582
1563 '>{date}' on or after a given date
1583 '>{date}' on or after a given date
1564
1584
1565 >>> p1 = parsedate("10:29:59")
1585 >>> p1 = parsedate("10:29:59")
1566 >>> p2 = parsedate("10:30:00")
1586 >>> p2 = parsedate("10:30:00")
1567 >>> p3 = parsedate("10:30:59")
1587 >>> p3 = parsedate("10:30:59")
1568 >>> p4 = parsedate("10:31:00")
1588 >>> p4 = parsedate("10:31:00")
1569 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1589 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1570 >>> f = matchdate("10:30")
1590 >>> f = matchdate("10:30")
1571 >>> f(p1[0])
1591 >>> f(p1[0])
1572 False
1592 False
1573 >>> f(p2[0])
1593 >>> f(p2[0])
1574 True
1594 True
1575 >>> f(p3[0])
1595 >>> f(p3[0])
1576 True
1596 True
1577 >>> f(p4[0])
1597 >>> f(p4[0])
1578 False
1598 False
1579 >>> f(p5[0])
1599 >>> f(p5[0])
1580 False
1600 False
1581 """
1601 """
1582
1602
1583 def lower(date):
1603 def lower(date):
1584 d = {'mb': "1", 'd': "1"}
1604 d = {'mb': "1", 'd': "1"}
1585 return parsedate(date, extendeddateformats, d)[0]
1605 return parsedate(date, extendeddateformats, d)[0]
1586
1606
1587 def upper(date):
1607 def upper(date):
1588 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1608 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1589 for days in ("31", "30", "29"):
1609 for days in ("31", "30", "29"):
1590 try:
1610 try:
1591 d["d"] = days
1611 d["d"] = days
1592 return parsedate(date, extendeddateformats, d)[0]
1612 return parsedate(date, extendeddateformats, d)[0]
1593 except Abort:
1613 except Abort:
1594 pass
1614 pass
1595 d["d"] = "28"
1615 d["d"] = "28"
1596 return parsedate(date, extendeddateformats, d)[0]
1616 return parsedate(date, extendeddateformats, d)[0]
1597
1617
1598 date = date.strip()
1618 date = date.strip()
1599
1619
1600 if not date:
1620 if not date:
1601 raise Abort(_("dates cannot consist entirely of whitespace"))
1621 raise Abort(_("dates cannot consist entirely of whitespace"))
1602 elif date[0] == "<":
1622 elif date[0] == "<":
1603 if not date[1:]:
1623 if not date[1:]:
1604 raise Abort(_("invalid day spec, use '<DATE'"))
1624 raise Abort(_("invalid day spec, use '<DATE'"))
1605 when = upper(date[1:])
1625 when = upper(date[1:])
1606 return lambda x: x <= when
1626 return lambda x: x <= when
1607 elif date[0] == ">":
1627 elif date[0] == ">":
1608 if not date[1:]:
1628 if not date[1:]:
1609 raise Abort(_("invalid day spec, use '>DATE'"))
1629 raise Abort(_("invalid day spec, use '>DATE'"))
1610 when = lower(date[1:])
1630 when = lower(date[1:])
1611 return lambda x: x >= when
1631 return lambda x: x >= when
1612 elif date[0] == "-":
1632 elif date[0] == "-":
1613 try:
1633 try:
1614 days = int(date[1:])
1634 days = int(date[1:])
1615 except ValueError:
1635 except ValueError:
1616 raise Abort(_("invalid day spec: %s") % date[1:])
1636 raise Abort(_("invalid day spec: %s") % date[1:])
1617 if days < 0:
1637 if days < 0:
1618 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1638 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1619 % date[1:])
1639 % date[1:])
1620 when = makedate()[0] - days * 3600 * 24
1640 when = makedate()[0] - days * 3600 * 24
1621 return lambda x: x >= when
1641 return lambda x: x >= when
1622 elif " to " in date:
1642 elif " to " in date:
1623 a, b = date.split(" to ")
1643 a, b = date.split(" to ")
1624 start, stop = lower(a), upper(b)
1644 start, stop = lower(a), upper(b)
1625 return lambda x: x >= start and x <= stop
1645 return lambda x: x >= start and x <= stop
1626 else:
1646 else:
1627 start, stop = lower(date), upper(date)
1647 start, stop = lower(date), upper(date)
1628 return lambda x: x >= start and x <= stop
1648 return lambda x: x >= start and x <= stop
1629
1649
1630 def stringmatcher(pattern):
1650 def stringmatcher(pattern):
1631 """
1651 """
1632 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1652 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1633 returns the matcher name, pattern, and matcher function.
1653 returns the matcher name, pattern, and matcher function.
1634 missing or unknown prefixes are treated as literal matches.
1654 missing or unknown prefixes are treated as literal matches.
1635
1655
1636 helper for tests:
1656 helper for tests:
1637 >>> def test(pattern, *tests):
1657 >>> def test(pattern, *tests):
1638 ... kind, pattern, matcher = stringmatcher(pattern)
1658 ... kind, pattern, matcher = stringmatcher(pattern)
1639 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1659 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1640
1660
1641 exact matching (no prefix):
1661 exact matching (no prefix):
1642 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1662 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1643 ('literal', 'abcdefg', [False, False, True])
1663 ('literal', 'abcdefg', [False, False, True])
1644
1664
1645 regex matching ('re:' prefix)
1665 regex matching ('re:' prefix)
1646 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1666 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1647 ('re', 'a.+b', [False, False, True])
1667 ('re', 'a.+b', [False, False, True])
1648
1668
1649 force exact matches ('literal:' prefix)
1669 force exact matches ('literal:' prefix)
1650 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1670 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1651 ('literal', 're:foobar', [False, True])
1671 ('literal', 're:foobar', [False, True])
1652
1672
1653 unknown prefixes are ignored and treated as literals
1673 unknown prefixes are ignored and treated as literals
1654 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1674 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1655 ('literal', 'foo:bar', [False, False, True])
1675 ('literal', 'foo:bar', [False, False, True])
1656 """
1676 """
1657 if pattern.startswith('re:'):
1677 if pattern.startswith('re:'):
1658 pattern = pattern[3:]
1678 pattern = pattern[3:]
1659 try:
1679 try:
1660 regex = remod.compile(pattern)
1680 regex = remod.compile(pattern)
1661 except remod.error as e:
1681 except remod.error as e:
1662 raise error.ParseError(_('invalid regular expression: %s')
1682 raise error.ParseError(_('invalid regular expression: %s')
1663 % e)
1683 % e)
1664 return 're', pattern, regex.search
1684 return 're', pattern, regex.search
1665 elif pattern.startswith('literal:'):
1685 elif pattern.startswith('literal:'):
1666 pattern = pattern[8:]
1686 pattern = pattern[8:]
1667 return 'literal', pattern, pattern.__eq__
1687 return 'literal', pattern, pattern.__eq__
1668
1688
1669 def shortuser(user):
1689 def shortuser(user):
1670 """Return a short representation of a user name or email address."""
1690 """Return a short representation of a user name or email address."""
1671 f = user.find('@')
1691 f = user.find('@')
1672 if f >= 0:
1692 if f >= 0:
1673 user = user[:f]
1693 user = user[:f]
1674 f = user.find('<')
1694 f = user.find('<')
1675 if f >= 0:
1695 if f >= 0:
1676 user = user[f + 1:]
1696 user = user[f + 1:]
1677 f = user.find(' ')
1697 f = user.find(' ')
1678 if f >= 0:
1698 if f >= 0:
1679 user = user[:f]
1699 user = user[:f]
1680 f = user.find('.')
1700 f = user.find('.')
1681 if f >= 0:
1701 if f >= 0:
1682 user = user[:f]
1702 user = user[:f]
1683 return user
1703 return user
1684
1704
1685 def emailuser(user):
1705 def emailuser(user):
1686 """Return the user portion of an email address."""
1706 """Return the user portion of an email address."""
1687 f = user.find('@')
1707 f = user.find('@')
1688 if f >= 0:
1708 if f >= 0:
1689 user = user[:f]
1709 user = user[:f]
1690 f = user.find('<')
1710 f = user.find('<')
1691 if f >= 0:
1711 if f >= 0:
1692 user = user[f + 1:]
1712 user = user[f + 1:]
1693 return user
1713 return user
1694
1714
1695 def email(author):
1715 def email(author):
1696 '''get email of author.'''
1716 '''get email of author.'''
1697 r = author.find('>')
1717 r = author.find('>')
1698 if r == -1:
1718 if r == -1:
1699 r = None
1719 r = None
1700 return author[author.find('<') + 1:r]
1720 return author[author.find('<') + 1:r]
1701
1721
1702 def ellipsis(text, maxlength=400):
1722 def ellipsis(text, maxlength=400):
1703 """Trim string to at most maxlength (default: 400) columns in display."""
1723 """Trim string to at most maxlength (default: 400) columns in display."""
1704 return encoding.trim(text, maxlength, ellipsis='...')
1724 return encoding.trim(text, maxlength, ellipsis='...')
1705
1725
1706 def unitcountfn(*unittable):
1726 def unitcountfn(*unittable):
1707 '''return a function that renders a readable count of some quantity'''
1727 '''return a function that renders a readable count of some quantity'''
1708
1728
1709 def go(count):
1729 def go(count):
1710 for multiplier, divisor, format in unittable:
1730 for multiplier, divisor, format in unittable:
1711 if count >= divisor * multiplier:
1731 if count >= divisor * multiplier:
1712 return format % (count / float(divisor))
1732 return format % (count / float(divisor))
1713 return unittable[-1][2] % count
1733 return unittable[-1][2] % count
1714
1734
1715 return go
1735 return go
1716
1736
1717 bytecount = unitcountfn(
1737 bytecount = unitcountfn(
1718 (100, 1 << 30, _('%.0f GB')),
1738 (100, 1 << 30, _('%.0f GB')),
1719 (10, 1 << 30, _('%.1f GB')),
1739 (10, 1 << 30, _('%.1f GB')),
1720 (1, 1 << 30, _('%.2f GB')),
1740 (1, 1 << 30, _('%.2f GB')),
1721 (100, 1 << 20, _('%.0f MB')),
1741 (100, 1 << 20, _('%.0f MB')),
1722 (10, 1 << 20, _('%.1f MB')),
1742 (10, 1 << 20, _('%.1f MB')),
1723 (1, 1 << 20, _('%.2f MB')),
1743 (1, 1 << 20, _('%.2f MB')),
1724 (100, 1 << 10, _('%.0f KB')),
1744 (100, 1 << 10, _('%.0f KB')),
1725 (10, 1 << 10, _('%.1f KB')),
1745 (10, 1 << 10, _('%.1f KB')),
1726 (1, 1 << 10, _('%.2f KB')),
1746 (1, 1 << 10, _('%.2f KB')),
1727 (1, 1, _('%.0f bytes')),
1747 (1, 1, _('%.0f bytes')),
1728 )
1748 )
1729
1749
1730 def uirepr(s):
1750 def uirepr(s):
1731 # Avoid double backslash in Windows path repr()
1751 # Avoid double backslash in Windows path repr()
1732 return repr(s).replace('\\\\', '\\')
1752 return repr(s).replace('\\\\', '\\')
1733
1753
1734 # delay import of textwrap
1754 # delay import of textwrap
1735 def MBTextWrapper(**kwargs):
1755 def MBTextWrapper(**kwargs):
1736 class tw(textwrap.TextWrapper):
1756 class tw(textwrap.TextWrapper):
1737 """
1757 """
1738 Extend TextWrapper for width-awareness.
1758 Extend TextWrapper for width-awareness.
1739
1759
1740 Neither number of 'bytes' in any encoding nor 'characters' is
1760 Neither number of 'bytes' in any encoding nor 'characters' is
1741 appropriate to calculate terminal columns for specified string.
1761 appropriate to calculate terminal columns for specified string.
1742
1762
1743 Original TextWrapper implementation uses built-in 'len()' directly,
1763 Original TextWrapper implementation uses built-in 'len()' directly,
1744 so overriding is needed to use width information of each characters.
1764 so overriding is needed to use width information of each characters.
1745
1765
1746 In addition, characters classified into 'ambiguous' width are
1766 In addition, characters classified into 'ambiguous' width are
1747 treated as wide in East Asian area, but as narrow in other.
1767 treated as wide in East Asian area, but as narrow in other.
1748
1768
1749 This requires use decision to determine width of such characters.
1769 This requires use decision to determine width of such characters.
1750 """
1770 """
1751 def _cutdown(self, ucstr, space_left):
1771 def _cutdown(self, ucstr, space_left):
1752 l = 0
1772 l = 0
1753 colwidth = encoding.ucolwidth
1773 colwidth = encoding.ucolwidth
1754 for i in xrange(len(ucstr)):
1774 for i in xrange(len(ucstr)):
1755 l += colwidth(ucstr[i])
1775 l += colwidth(ucstr[i])
1756 if space_left < l:
1776 if space_left < l:
1757 return (ucstr[:i], ucstr[i:])
1777 return (ucstr[:i], ucstr[i:])
1758 return ucstr, ''
1778 return ucstr, ''
1759
1779
1760 # overriding of base class
1780 # overriding of base class
1761 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1781 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1762 space_left = max(width - cur_len, 1)
1782 space_left = max(width - cur_len, 1)
1763
1783
1764 if self.break_long_words:
1784 if self.break_long_words:
1765 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1785 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1766 cur_line.append(cut)
1786 cur_line.append(cut)
1767 reversed_chunks[-1] = res
1787 reversed_chunks[-1] = res
1768 elif not cur_line:
1788 elif not cur_line:
1769 cur_line.append(reversed_chunks.pop())
1789 cur_line.append(reversed_chunks.pop())
1770
1790
1771 # this overriding code is imported from TextWrapper of Python 2.6
1791 # this overriding code is imported from TextWrapper of Python 2.6
1772 # to calculate columns of string by 'encoding.ucolwidth()'
1792 # to calculate columns of string by 'encoding.ucolwidth()'
1773 def _wrap_chunks(self, chunks):
1793 def _wrap_chunks(self, chunks):
1774 colwidth = encoding.ucolwidth
1794 colwidth = encoding.ucolwidth
1775
1795
1776 lines = []
1796 lines = []
1777 if self.width <= 0:
1797 if self.width <= 0:
1778 raise ValueError("invalid width %r (must be > 0)" % self.width)
1798 raise ValueError("invalid width %r (must be > 0)" % self.width)
1779
1799
1780 # Arrange in reverse order so items can be efficiently popped
1800 # Arrange in reverse order so items can be efficiently popped
1781 # from a stack of chucks.
1801 # from a stack of chucks.
1782 chunks.reverse()
1802 chunks.reverse()
1783
1803
1784 while chunks:
1804 while chunks:
1785
1805
1786 # Start the list of chunks that will make up the current line.
1806 # Start the list of chunks that will make up the current line.
1787 # cur_len is just the length of all the chunks in cur_line.
1807 # cur_len is just the length of all the chunks in cur_line.
1788 cur_line = []
1808 cur_line = []
1789 cur_len = 0
1809 cur_len = 0
1790
1810
1791 # Figure out which static string will prefix this line.
1811 # Figure out which static string will prefix this line.
1792 if lines:
1812 if lines:
1793 indent = self.subsequent_indent
1813 indent = self.subsequent_indent
1794 else:
1814 else:
1795 indent = self.initial_indent
1815 indent = self.initial_indent
1796
1816
1797 # Maximum width for this line.
1817 # Maximum width for this line.
1798 width = self.width - len(indent)
1818 width = self.width - len(indent)
1799
1819
1800 # First chunk on line is whitespace -- drop it, unless this
1820 # First chunk on line is whitespace -- drop it, unless this
1801 # is the very beginning of the text (i.e. no lines started yet).
1821 # is the very beginning of the text (i.e. no lines started yet).
1802 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1822 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1803 del chunks[-1]
1823 del chunks[-1]
1804
1824
1805 while chunks:
1825 while chunks:
1806 l = colwidth(chunks[-1])
1826 l = colwidth(chunks[-1])
1807
1827
1808 # Can at least squeeze this chunk onto the current line.
1828 # Can at least squeeze this chunk onto the current line.
1809 if cur_len + l <= width:
1829 if cur_len + l <= width:
1810 cur_line.append(chunks.pop())
1830 cur_line.append(chunks.pop())
1811 cur_len += l
1831 cur_len += l
1812
1832
1813 # Nope, this line is full.
1833 # Nope, this line is full.
1814 else:
1834 else:
1815 break
1835 break
1816
1836
1817 # The current line is full, and the next chunk is too big to
1837 # The current line is full, and the next chunk is too big to
1818 # fit on *any* line (not just this one).
1838 # fit on *any* line (not just this one).
1819 if chunks and colwidth(chunks[-1]) > width:
1839 if chunks and colwidth(chunks[-1]) > width:
1820 self._handle_long_word(chunks, cur_line, cur_len, width)
1840 self._handle_long_word(chunks, cur_line, cur_len, width)
1821
1841
1822 # If the last chunk on this line is all whitespace, drop it.
1842 # If the last chunk on this line is all whitespace, drop it.
1823 if (self.drop_whitespace and
1843 if (self.drop_whitespace and
1824 cur_line and cur_line[-1].strip() == ''):
1844 cur_line and cur_line[-1].strip() == ''):
1825 del cur_line[-1]
1845 del cur_line[-1]
1826
1846
1827 # Convert current line back to a string and store it in list
1847 # Convert current line back to a string and store it in list
1828 # of all lines (return value).
1848 # of all lines (return value).
1829 if cur_line:
1849 if cur_line:
1830 lines.append(indent + ''.join(cur_line))
1850 lines.append(indent + ''.join(cur_line))
1831
1851
1832 return lines
1852 return lines
1833
1853
1834 global MBTextWrapper
1854 global MBTextWrapper
1835 MBTextWrapper = tw
1855 MBTextWrapper = tw
1836 return tw(**kwargs)
1856 return tw(**kwargs)
1837
1857
1838 def wrap(line, width, initindent='', hangindent=''):
1858 def wrap(line, width, initindent='', hangindent=''):
1839 maxindent = max(len(hangindent), len(initindent))
1859 maxindent = max(len(hangindent), len(initindent))
1840 if width <= maxindent:
1860 if width <= maxindent:
1841 # adjust for weird terminal size
1861 # adjust for weird terminal size
1842 width = max(78, maxindent + 1)
1862 width = max(78, maxindent + 1)
1843 line = line.decode(encoding.encoding, encoding.encodingmode)
1863 line = line.decode(encoding.encoding, encoding.encodingmode)
1844 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1864 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1845 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1865 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1846 wrapper = MBTextWrapper(width=width,
1866 wrapper = MBTextWrapper(width=width,
1847 initial_indent=initindent,
1867 initial_indent=initindent,
1848 subsequent_indent=hangindent)
1868 subsequent_indent=hangindent)
1849 return wrapper.fill(line).encode(encoding.encoding)
1869 return wrapper.fill(line).encode(encoding.encoding)
1850
1870
1851 def iterlines(iterator):
1871 def iterlines(iterator):
1852 for chunk in iterator:
1872 for chunk in iterator:
1853 for line in chunk.splitlines():
1873 for line in chunk.splitlines():
1854 yield line
1874 yield line
1855
1875
1856 def expandpath(path):
1876 def expandpath(path):
1857 return os.path.expanduser(os.path.expandvars(path))
1877 return os.path.expanduser(os.path.expandvars(path))
1858
1878
1859 def hgcmd():
1879 def hgcmd():
1860 """Return the command used to execute current hg
1880 """Return the command used to execute current hg
1861
1881
1862 This is different from hgexecutable() because on Windows we want
1882 This is different from hgexecutable() because on Windows we want
1863 to avoid things opening new shell windows like batch files, so we
1883 to avoid things opening new shell windows like batch files, so we
1864 get either the python call or current executable.
1884 get either the python call or current executable.
1865 """
1885 """
1866 if mainfrozen():
1886 if mainfrozen():
1867 return [sys.executable]
1887 return [sys.executable]
1868 return gethgcmd()
1888 return gethgcmd()
1869
1889
1870 def rundetached(args, condfn):
1890 def rundetached(args, condfn):
1871 """Execute the argument list in a detached process.
1891 """Execute the argument list in a detached process.
1872
1892
1873 condfn is a callable which is called repeatedly and should return
1893 condfn is a callable which is called repeatedly and should return
1874 True once the child process is known to have started successfully.
1894 True once the child process is known to have started successfully.
1875 At this point, the child process PID is returned. If the child
1895 At this point, the child process PID is returned. If the child
1876 process fails to start or finishes before condfn() evaluates to
1896 process fails to start or finishes before condfn() evaluates to
1877 True, return -1.
1897 True, return -1.
1878 """
1898 """
1879 # Windows case is easier because the child process is either
1899 # Windows case is easier because the child process is either
1880 # successfully starting and validating the condition or exiting
1900 # successfully starting and validating the condition or exiting
1881 # on failure. We just poll on its PID. On Unix, if the child
1901 # on failure. We just poll on its PID. On Unix, if the child
1882 # process fails to start, it will be left in a zombie state until
1902 # process fails to start, it will be left in a zombie state until
1883 # the parent wait on it, which we cannot do since we expect a long
1903 # the parent wait on it, which we cannot do since we expect a long
1884 # running process on success. Instead we listen for SIGCHLD telling
1904 # running process on success. Instead we listen for SIGCHLD telling
1885 # us our child process terminated.
1905 # us our child process terminated.
1886 terminated = set()
1906 terminated = set()
1887 def handler(signum, frame):
1907 def handler(signum, frame):
1888 terminated.add(os.wait())
1908 terminated.add(os.wait())
1889 prevhandler = None
1909 prevhandler = None
1890 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1910 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1891 if SIGCHLD is not None:
1911 if SIGCHLD is not None:
1892 prevhandler = signal.signal(SIGCHLD, handler)
1912 prevhandler = signal.signal(SIGCHLD, handler)
1893 try:
1913 try:
1894 pid = spawndetached(args)
1914 pid = spawndetached(args)
1895 while not condfn():
1915 while not condfn():
1896 if ((pid in terminated or not testpid(pid))
1916 if ((pid in terminated or not testpid(pid))
1897 and not condfn()):
1917 and not condfn()):
1898 return -1
1918 return -1
1899 time.sleep(0.1)
1919 time.sleep(0.1)
1900 return pid
1920 return pid
1901 finally:
1921 finally:
1902 if prevhandler is not None:
1922 if prevhandler is not None:
1903 signal.signal(signal.SIGCHLD, prevhandler)
1923 signal.signal(signal.SIGCHLD, prevhandler)
1904
1924
1905 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1925 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1906 """Return the result of interpolating items in the mapping into string s.
1926 """Return the result of interpolating items in the mapping into string s.
1907
1927
1908 prefix is a single character string, or a two character string with
1928 prefix is a single character string, or a two character string with
1909 a backslash as the first character if the prefix needs to be escaped in
1929 a backslash as the first character if the prefix needs to be escaped in
1910 a regular expression.
1930 a regular expression.
1911
1931
1912 fn is an optional function that will be applied to the replacement text
1932 fn is an optional function that will be applied to the replacement text
1913 just before replacement.
1933 just before replacement.
1914
1934
1915 escape_prefix is an optional flag that allows using doubled prefix for
1935 escape_prefix is an optional flag that allows using doubled prefix for
1916 its escaping.
1936 its escaping.
1917 """
1937 """
1918 fn = fn or (lambda s: s)
1938 fn = fn or (lambda s: s)
1919 patterns = '|'.join(mapping.keys())
1939 patterns = '|'.join(mapping.keys())
1920 if escape_prefix:
1940 if escape_prefix:
1921 patterns += '|' + prefix
1941 patterns += '|' + prefix
1922 if len(prefix) > 1:
1942 if len(prefix) > 1:
1923 prefix_char = prefix[1:]
1943 prefix_char = prefix[1:]
1924 else:
1944 else:
1925 prefix_char = prefix
1945 prefix_char = prefix
1926 mapping[prefix_char] = prefix_char
1946 mapping[prefix_char] = prefix_char
1927 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1947 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1928 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1948 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1929
1949
1930 def getport(port):
1950 def getport(port):
1931 """Return the port for a given network service.
1951 """Return the port for a given network service.
1932
1952
1933 If port is an integer, it's returned as is. If it's a string, it's
1953 If port is an integer, it's returned as is. If it's a string, it's
1934 looked up using socket.getservbyname(). If there's no matching
1954 looked up using socket.getservbyname(). If there's no matching
1935 service, error.Abort is raised.
1955 service, error.Abort is raised.
1936 """
1956 """
1937 try:
1957 try:
1938 return int(port)
1958 return int(port)
1939 except ValueError:
1959 except ValueError:
1940 pass
1960 pass
1941
1961
1942 try:
1962 try:
1943 return socket.getservbyname(port)
1963 return socket.getservbyname(port)
1944 except socket.error:
1964 except socket.error:
1945 raise Abort(_("no port number associated with service '%s'") % port)
1965 raise Abort(_("no port number associated with service '%s'") % port)
1946
1966
1947 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1967 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1948 '0': False, 'no': False, 'false': False, 'off': False,
1968 '0': False, 'no': False, 'false': False, 'off': False,
1949 'never': False}
1969 'never': False}
1950
1970
1951 def parsebool(s):
1971 def parsebool(s):
1952 """Parse s into a boolean.
1972 """Parse s into a boolean.
1953
1973
1954 If s is not a valid boolean, returns None.
1974 If s is not a valid boolean, returns None.
1955 """
1975 """
1956 return _booleans.get(s.lower(), None)
1976 return _booleans.get(s.lower(), None)
1957
1977
1958 _hexdig = '0123456789ABCDEFabcdef'
1978 _hexdig = '0123456789ABCDEFabcdef'
1959 _hextochr = dict((a + b, chr(int(a + b, 16)))
1979 _hextochr = dict((a + b, chr(int(a + b, 16)))
1960 for a in _hexdig for b in _hexdig)
1980 for a in _hexdig for b in _hexdig)
1961
1981
1962 def _urlunquote(s):
1982 def _urlunquote(s):
1963 """Decode HTTP/HTML % encoding.
1983 """Decode HTTP/HTML % encoding.
1964
1984
1965 >>> _urlunquote('abc%20def')
1985 >>> _urlunquote('abc%20def')
1966 'abc def'
1986 'abc def'
1967 """
1987 """
1968 res = s.split('%')
1988 res = s.split('%')
1969 # fastpath
1989 # fastpath
1970 if len(res) == 1:
1990 if len(res) == 1:
1971 return s
1991 return s
1972 s = res[0]
1992 s = res[0]
1973 for item in res[1:]:
1993 for item in res[1:]:
1974 try:
1994 try:
1975 s += _hextochr[item[:2]] + item[2:]
1995 s += _hextochr[item[:2]] + item[2:]
1976 except KeyError:
1996 except KeyError:
1977 s += '%' + item
1997 s += '%' + item
1978 except UnicodeDecodeError:
1998 except UnicodeDecodeError:
1979 s += unichr(int(item[:2], 16)) + item[2:]
1999 s += unichr(int(item[:2], 16)) + item[2:]
1980 return s
2000 return s
1981
2001
1982 class url(object):
2002 class url(object):
1983 r"""Reliable URL parser.
2003 r"""Reliable URL parser.
1984
2004
1985 This parses URLs and provides attributes for the following
2005 This parses URLs and provides attributes for the following
1986 components:
2006 components:
1987
2007
1988 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2008 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1989
2009
1990 Missing components are set to None. The only exception is
2010 Missing components are set to None. The only exception is
1991 fragment, which is set to '' if present but empty.
2011 fragment, which is set to '' if present but empty.
1992
2012
1993 If parsefragment is False, fragment is included in query. If
2013 If parsefragment is False, fragment is included in query. If
1994 parsequery is False, query is included in path. If both are
2014 parsequery is False, query is included in path. If both are
1995 False, both fragment and query are included in path.
2015 False, both fragment and query are included in path.
1996
2016
1997 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2017 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1998
2018
1999 Note that for backward compatibility reasons, bundle URLs do not
2019 Note that for backward compatibility reasons, bundle URLs do not
2000 take host names. That means 'bundle://../' has a path of '../'.
2020 take host names. That means 'bundle://../' has a path of '../'.
2001
2021
2002 Examples:
2022 Examples:
2003
2023
2004 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2024 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2005 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2025 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2006 >>> url('ssh://[::1]:2200//home/joe/repo')
2026 >>> url('ssh://[::1]:2200//home/joe/repo')
2007 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2027 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2008 >>> url('file:///home/joe/repo')
2028 >>> url('file:///home/joe/repo')
2009 <url scheme: 'file', path: '/home/joe/repo'>
2029 <url scheme: 'file', path: '/home/joe/repo'>
2010 >>> url('file:///c:/temp/foo/')
2030 >>> url('file:///c:/temp/foo/')
2011 <url scheme: 'file', path: 'c:/temp/foo/'>
2031 <url scheme: 'file', path: 'c:/temp/foo/'>
2012 >>> url('bundle:foo')
2032 >>> url('bundle:foo')
2013 <url scheme: 'bundle', path: 'foo'>
2033 <url scheme: 'bundle', path: 'foo'>
2014 >>> url('bundle://../foo')
2034 >>> url('bundle://../foo')
2015 <url scheme: 'bundle', path: '../foo'>
2035 <url scheme: 'bundle', path: '../foo'>
2016 >>> url(r'c:\foo\bar')
2036 >>> url(r'c:\foo\bar')
2017 <url path: 'c:\\foo\\bar'>
2037 <url path: 'c:\\foo\\bar'>
2018 >>> url(r'\\blah\blah\blah')
2038 >>> url(r'\\blah\blah\blah')
2019 <url path: '\\\\blah\\blah\\blah'>
2039 <url path: '\\\\blah\\blah\\blah'>
2020 >>> url(r'\\blah\blah\blah#baz')
2040 >>> url(r'\\blah\blah\blah#baz')
2021 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2041 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2022 >>> url(r'file:///C:\users\me')
2042 >>> url(r'file:///C:\users\me')
2023 <url scheme: 'file', path: 'C:\\users\\me'>
2043 <url scheme: 'file', path: 'C:\\users\\me'>
2024
2044
2025 Authentication credentials:
2045 Authentication credentials:
2026
2046
2027 >>> url('ssh://joe:xyz@x/repo')
2047 >>> url('ssh://joe:xyz@x/repo')
2028 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2048 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2029 >>> url('ssh://joe@x/repo')
2049 >>> url('ssh://joe@x/repo')
2030 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2050 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2031
2051
2032 Query strings and fragments:
2052 Query strings and fragments:
2033
2053
2034 >>> url('http://host/a?b#c')
2054 >>> url('http://host/a?b#c')
2035 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2055 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2036 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2056 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2037 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2057 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2038 """
2058 """
2039
2059
2040 _safechars = "!~*'()+"
2060 _safechars = "!~*'()+"
2041 _safepchars = "/!~*'()+:\\"
2061 _safepchars = "/!~*'()+:\\"
2042 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
2062 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
2043
2063
2044 def __init__(self, path, parsequery=True, parsefragment=True):
2064 def __init__(self, path, parsequery=True, parsefragment=True):
2045 # We slowly chomp away at path until we have only the path left
2065 # We slowly chomp away at path until we have only the path left
2046 self.scheme = self.user = self.passwd = self.host = None
2066 self.scheme = self.user = self.passwd = self.host = None
2047 self.port = self.path = self.query = self.fragment = None
2067 self.port = self.path = self.query = self.fragment = None
2048 self._localpath = True
2068 self._localpath = True
2049 self._hostport = ''
2069 self._hostport = ''
2050 self._origpath = path
2070 self._origpath = path
2051
2071
2052 if parsefragment and '#' in path:
2072 if parsefragment and '#' in path:
2053 path, self.fragment = path.split('#', 1)
2073 path, self.fragment = path.split('#', 1)
2054 if not path:
2074 if not path:
2055 path = None
2075 path = None
2056
2076
2057 # special case for Windows drive letters and UNC paths
2077 # special case for Windows drive letters and UNC paths
2058 if hasdriveletter(path) or path.startswith(r'\\'):
2078 if hasdriveletter(path) or path.startswith(r'\\'):
2059 self.path = path
2079 self.path = path
2060 return
2080 return
2061
2081
2062 # For compatibility reasons, we can't handle bundle paths as
2082 # For compatibility reasons, we can't handle bundle paths as
2063 # normal URLS
2083 # normal URLS
2064 if path.startswith('bundle:'):
2084 if path.startswith('bundle:'):
2065 self.scheme = 'bundle'
2085 self.scheme = 'bundle'
2066 path = path[7:]
2086 path = path[7:]
2067 if path.startswith('//'):
2087 if path.startswith('//'):
2068 path = path[2:]
2088 path = path[2:]
2069 self.path = path
2089 self.path = path
2070 return
2090 return
2071
2091
2072 if self._matchscheme(path):
2092 if self._matchscheme(path):
2073 parts = path.split(':', 1)
2093 parts = path.split(':', 1)
2074 if parts[0]:
2094 if parts[0]:
2075 self.scheme, path = parts
2095 self.scheme, path = parts
2076 self._localpath = False
2096 self._localpath = False
2077
2097
2078 if not path:
2098 if not path:
2079 path = None
2099 path = None
2080 if self._localpath:
2100 if self._localpath:
2081 self.path = ''
2101 self.path = ''
2082 return
2102 return
2083 else:
2103 else:
2084 if self._localpath:
2104 if self._localpath:
2085 self.path = path
2105 self.path = path
2086 return
2106 return
2087
2107
2088 if parsequery and '?' in path:
2108 if parsequery and '?' in path:
2089 path, self.query = path.split('?', 1)
2109 path, self.query = path.split('?', 1)
2090 if not path:
2110 if not path:
2091 path = None
2111 path = None
2092 if not self.query:
2112 if not self.query:
2093 self.query = None
2113 self.query = None
2094
2114
2095 # // is required to specify a host/authority
2115 # // is required to specify a host/authority
2096 if path and path.startswith('//'):
2116 if path and path.startswith('//'):
2097 parts = path[2:].split('/', 1)
2117 parts = path[2:].split('/', 1)
2098 if len(parts) > 1:
2118 if len(parts) > 1:
2099 self.host, path = parts
2119 self.host, path = parts
2100 else:
2120 else:
2101 self.host = parts[0]
2121 self.host = parts[0]
2102 path = None
2122 path = None
2103 if not self.host:
2123 if not self.host:
2104 self.host = None
2124 self.host = None
2105 # path of file:///d is /d
2125 # path of file:///d is /d
2106 # path of file:///d:/ is d:/, not /d:/
2126 # path of file:///d:/ is d:/, not /d:/
2107 if path and not hasdriveletter(path):
2127 if path and not hasdriveletter(path):
2108 path = '/' + path
2128 path = '/' + path
2109
2129
2110 if self.host and '@' in self.host:
2130 if self.host and '@' in self.host:
2111 self.user, self.host = self.host.rsplit('@', 1)
2131 self.user, self.host = self.host.rsplit('@', 1)
2112 if ':' in self.user:
2132 if ':' in self.user:
2113 self.user, self.passwd = self.user.split(':', 1)
2133 self.user, self.passwd = self.user.split(':', 1)
2114 if not self.host:
2134 if not self.host:
2115 self.host = None
2135 self.host = None
2116
2136
2117 # Don't split on colons in IPv6 addresses without ports
2137 # Don't split on colons in IPv6 addresses without ports
2118 if (self.host and ':' in self.host and
2138 if (self.host and ':' in self.host and
2119 not (self.host.startswith('[') and self.host.endswith(']'))):
2139 not (self.host.startswith('[') and self.host.endswith(']'))):
2120 self._hostport = self.host
2140 self._hostport = self.host
2121 self.host, self.port = self.host.rsplit(':', 1)
2141 self.host, self.port = self.host.rsplit(':', 1)
2122 if not self.host:
2142 if not self.host:
2123 self.host = None
2143 self.host = None
2124
2144
2125 if (self.host and self.scheme == 'file' and
2145 if (self.host and self.scheme == 'file' and
2126 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2146 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2127 raise Abort(_('file:// URLs can only refer to localhost'))
2147 raise Abort(_('file:// URLs can only refer to localhost'))
2128
2148
2129 self.path = path
2149 self.path = path
2130
2150
2131 # leave the query string escaped
2151 # leave the query string escaped
2132 for a in ('user', 'passwd', 'host', 'port',
2152 for a in ('user', 'passwd', 'host', 'port',
2133 'path', 'fragment'):
2153 'path', 'fragment'):
2134 v = getattr(self, a)
2154 v = getattr(self, a)
2135 if v is not None:
2155 if v is not None:
2136 setattr(self, a, _urlunquote(v))
2156 setattr(self, a, _urlunquote(v))
2137
2157
2138 def __repr__(self):
2158 def __repr__(self):
2139 attrs = []
2159 attrs = []
2140 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2160 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2141 'query', 'fragment'):
2161 'query', 'fragment'):
2142 v = getattr(self, a)
2162 v = getattr(self, a)
2143 if v is not None:
2163 if v is not None:
2144 attrs.append('%s: %r' % (a, v))
2164 attrs.append('%s: %r' % (a, v))
2145 return '<url %s>' % ', '.join(attrs)
2165 return '<url %s>' % ', '.join(attrs)
2146
2166
2147 def __str__(self):
2167 def __str__(self):
2148 r"""Join the URL's components back into a URL string.
2168 r"""Join the URL's components back into a URL string.
2149
2169
2150 Examples:
2170 Examples:
2151
2171
2152 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2172 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2153 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2173 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2154 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2174 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2155 'http://user:pw@host:80/?foo=bar&baz=42'
2175 'http://user:pw@host:80/?foo=bar&baz=42'
2156 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2176 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2157 'http://user:pw@host:80/?foo=bar%3dbaz'
2177 'http://user:pw@host:80/?foo=bar%3dbaz'
2158 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2178 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2159 'ssh://user:pw@[::1]:2200//home/joe#'
2179 'ssh://user:pw@[::1]:2200//home/joe#'
2160 >>> str(url('http://localhost:80//'))
2180 >>> str(url('http://localhost:80//'))
2161 'http://localhost:80//'
2181 'http://localhost:80//'
2162 >>> str(url('http://localhost:80/'))
2182 >>> str(url('http://localhost:80/'))
2163 'http://localhost:80/'
2183 'http://localhost:80/'
2164 >>> str(url('http://localhost:80'))
2184 >>> str(url('http://localhost:80'))
2165 'http://localhost:80/'
2185 'http://localhost:80/'
2166 >>> str(url('bundle:foo'))
2186 >>> str(url('bundle:foo'))
2167 'bundle:foo'
2187 'bundle:foo'
2168 >>> str(url('bundle://../foo'))
2188 >>> str(url('bundle://../foo'))
2169 'bundle:../foo'
2189 'bundle:../foo'
2170 >>> str(url('path'))
2190 >>> str(url('path'))
2171 'path'
2191 'path'
2172 >>> str(url('file:///tmp/foo/bar'))
2192 >>> str(url('file:///tmp/foo/bar'))
2173 'file:///tmp/foo/bar'
2193 'file:///tmp/foo/bar'
2174 >>> str(url('file:///c:/tmp/foo/bar'))
2194 >>> str(url('file:///c:/tmp/foo/bar'))
2175 'file:///c:/tmp/foo/bar'
2195 'file:///c:/tmp/foo/bar'
2176 >>> print url(r'bundle:foo\bar')
2196 >>> print url(r'bundle:foo\bar')
2177 bundle:foo\bar
2197 bundle:foo\bar
2178 >>> print url(r'file:///D:\data\hg')
2198 >>> print url(r'file:///D:\data\hg')
2179 file:///D:\data\hg
2199 file:///D:\data\hg
2180 """
2200 """
2181 if self._localpath:
2201 if self._localpath:
2182 s = self.path
2202 s = self.path
2183 if self.scheme == 'bundle':
2203 if self.scheme == 'bundle':
2184 s = 'bundle:' + s
2204 s = 'bundle:' + s
2185 if self.fragment:
2205 if self.fragment:
2186 s += '#' + self.fragment
2206 s += '#' + self.fragment
2187 return s
2207 return s
2188
2208
2189 s = self.scheme + ':'
2209 s = self.scheme + ':'
2190 if self.user or self.passwd or self.host:
2210 if self.user or self.passwd or self.host:
2191 s += '//'
2211 s += '//'
2192 elif self.scheme and (not self.path or self.path.startswith('/')
2212 elif self.scheme and (not self.path or self.path.startswith('/')
2193 or hasdriveletter(self.path)):
2213 or hasdriveletter(self.path)):
2194 s += '//'
2214 s += '//'
2195 if hasdriveletter(self.path):
2215 if hasdriveletter(self.path):
2196 s += '/'
2216 s += '/'
2197 if self.user:
2217 if self.user:
2198 s += urllib.quote(self.user, safe=self._safechars)
2218 s += urllib.quote(self.user, safe=self._safechars)
2199 if self.passwd:
2219 if self.passwd:
2200 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2220 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2201 if self.user or self.passwd:
2221 if self.user or self.passwd:
2202 s += '@'
2222 s += '@'
2203 if self.host:
2223 if self.host:
2204 if not (self.host.startswith('[') and self.host.endswith(']')):
2224 if not (self.host.startswith('[') and self.host.endswith(']')):
2205 s += urllib.quote(self.host)
2225 s += urllib.quote(self.host)
2206 else:
2226 else:
2207 s += self.host
2227 s += self.host
2208 if self.port:
2228 if self.port:
2209 s += ':' + urllib.quote(self.port)
2229 s += ':' + urllib.quote(self.port)
2210 if self.host:
2230 if self.host:
2211 s += '/'
2231 s += '/'
2212 if self.path:
2232 if self.path:
2213 # TODO: similar to the query string, we should not unescape the
2233 # TODO: similar to the query string, we should not unescape the
2214 # path when we store it, the path might contain '%2f' = '/',
2234 # path when we store it, the path might contain '%2f' = '/',
2215 # which we should *not* escape.
2235 # which we should *not* escape.
2216 s += urllib.quote(self.path, safe=self._safepchars)
2236 s += urllib.quote(self.path, safe=self._safepchars)
2217 if self.query:
2237 if self.query:
2218 # we store the query in escaped form.
2238 # we store the query in escaped form.
2219 s += '?' + self.query
2239 s += '?' + self.query
2220 if self.fragment is not None:
2240 if self.fragment is not None:
2221 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2241 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2222 return s
2242 return s
2223
2243
2224 def authinfo(self):
2244 def authinfo(self):
2225 user, passwd = self.user, self.passwd
2245 user, passwd = self.user, self.passwd
2226 try:
2246 try:
2227 self.user, self.passwd = None, None
2247 self.user, self.passwd = None, None
2228 s = str(self)
2248 s = str(self)
2229 finally:
2249 finally:
2230 self.user, self.passwd = user, passwd
2250 self.user, self.passwd = user, passwd
2231 if not self.user:
2251 if not self.user:
2232 return (s, None)
2252 return (s, None)
2233 # authinfo[1] is passed to urllib2 password manager, and its
2253 # authinfo[1] is passed to urllib2 password manager, and its
2234 # URIs must not contain credentials. The host is passed in the
2254 # URIs must not contain credentials. The host is passed in the
2235 # URIs list because Python < 2.4.3 uses only that to search for
2255 # URIs list because Python < 2.4.3 uses only that to search for
2236 # a password.
2256 # a password.
2237 return (s, (None, (s, self.host),
2257 return (s, (None, (s, self.host),
2238 self.user, self.passwd or ''))
2258 self.user, self.passwd or ''))
2239
2259
2240 def isabs(self):
2260 def isabs(self):
2241 if self.scheme and self.scheme != 'file':
2261 if self.scheme and self.scheme != 'file':
2242 return True # remote URL
2262 return True # remote URL
2243 if hasdriveletter(self.path):
2263 if hasdriveletter(self.path):
2244 return True # absolute for our purposes - can't be joined()
2264 return True # absolute for our purposes - can't be joined()
2245 if self.path.startswith(r'\\'):
2265 if self.path.startswith(r'\\'):
2246 return True # Windows UNC path
2266 return True # Windows UNC path
2247 if self.path.startswith('/'):
2267 if self.path.startswith('/'):
2248 return True # POSIX-style
2268 return True # POSIX-style
2249 return False
2269 return False
2250
2270
2251 def localpath(self):
2271 def localpath(self):
2252 if self.scheme == 'file' or self.scheme == 'bundle':
2272 if self.scheme == 'file' or self.scheme == 'bundle':
2253 path = self.path or '/'
2273 path = self.path or '/'
2254 # For Windows, we need to promote hosts containing drive
2274 # For Windows, we need to promote hosts containing drive
2255 # letters to paths with drive letters.
2275 # letters to paths with drive letters.
2256 if hasdriveletter(self._hostport):
2276 if hasdriveletter(self._hostport):
2257 path = self._hostport + '/' + self.path
2277 path = self._hostport + '/' + self.path
2258 elif (self.host is not None and self.path
2278 elif (self.host is not None and self.path
2259 and not hasdriveletter(path)):
2279 and not hasdriveletter(path)):
2260 path = '/' + path
2280 path = '/' + path
2261 return path
2281 return path
2262 return self._origpath
2282 return self._origpath
2263
2283
2264 def islocal(self):
2284 def islocal(self):
2265 '''whether localpath will return something that posixfile can open'''
2285 '''whether localpath will return something that posixfile can open'''
2266 return (not self.scheme or self.scheme == 'file'
2286 return (not self.scheme or self.scheme == 'file'
2267 or self.scheme == 'bundle')
2287 or self.scheme == 'bundle')
2268
2288
2269 def hasscheme(path):
2289 def hasscheme(path):
2270 return bool(url(path).scheme)
2290 return bool(url(path).scheme)
2271
2291
2272 def hasdriveletter(path):
2292 def hasdriveletter(path):
2273 return path and path[1:2] == ':' and path[0:1].isalpha()
2293 return path and path[1:2] == ':' and path[0:1].isalpha()
2274
2294
2275 def urllocalpath(path):
2295 def urllocalpath(path):
2276 return url(path, parsequery=False, parsefragment=False).localpath()
2296 return url(path, parsequery=False, parsefragment=False).localpath()
2277
2297
2278 def hidepassword(u):
2298 def hidepassword(u):
2279 '''hide user credential in a url string'''
2299 '''hide user credential in a url string'''
2280 u = url(u)
2300 u = url(u)
2281 if u.passwd:
2301 if u.passwd:
2282 u.passwd = '***'
2302 u.passwd = '***'
2283 return str(u)
2303 return str(u)
2284
2304
2285 def removeauth(u):
2305 def removeauth(u):
2286 '''remove all authentication information from a url string'''
2306 '''remove all authentication information from a url string'''
2287 u = url(u)
2307 u = url(u)
2288 u.user = u.passwd = None
2308 u.user = u.passwd = None
2289 return str(u)
2309 return str(u)
2290
2310
2291 def isatty(fd):
2311 def isatty(fd):
2292 try:
2312 try:
2293 return fd.isatty()
2313 return fd.isatty()
2294 except AttributeError:
2314 except AttributeError:
2295 return False
2315 return False
2296
2316
2297 timecount = unitcountfn(
2317 timecount = unitcountfn(
2298 (1, 1e3, _('%.0f s')),
2318 (1, 1e3, _('%.0f s')),
2299 (100, 1, _('%.1f s')),
2319 (100, 1, _('%.1f s')),
2300 (10, 1, _('%.2f s')),
2320 (10, 1, _('%.2f s')),
2301 (1, 1, _('%.3f s')),
2321 (1, 1, _('%.3f s')),
2302 (100, 0.001, _('%.1f ms')),
2322 (100, 0.001, _('%.1f ms')),
2303 (10, 0.001, _('%.2f ms')),
2323 (10, 0.001, _('%.2f ms')),
2304 (1, 0.001, _('%.3f ms')),
2324 (1, 0.001, _('%.3f ms')),
2305 (100, 0.000001, _('%.1f us')),
2325 (100, 0.000001, _('%.1f us')),
2306 (10, 0.000001, _('%.2f us')),
2326 (10, 0.000001, _('%.2f us')),
2307 (1, 0.000001, _('%.3f us')),
2327 (1, 0.000001, _('%.3f us')),
2308 (100, 0.000000001, _('%.1f ns')),
2328 (100, 0.000000001, _('%.1f ns')),
2309 (10, 0.000000001, _('%.2f ns')),
2329 (10, 0.000000001, _('%.2f ns')),
2310 (1, 0.000000001, _('%.3f ns')),
2330 (1, 0.000000001, _('%.3f ns')),
2311 )
2331 )
2312
2332
2313 _timenesting = [0]
2333 _timenesting = [0]
2314
2334
2315 def timed(func):
2335 def timed(func):
2316 '''Report the execution time of a function call to stderr.
2336 '''Report the execution time of a function call to stderr.
2317
2337
2318 During development, use as a decorator when you need to measure
2338 During development, use as a decorator when you need to measure
2319 the cost of a function, e.g. as follows:
2339 the cost of a function, e.g. as follows:
2320
2340
2321 @util.timed
2341 @util.timed
2322 def foo(a, b, c):
2342 def foo(a, b, c):
2323 pass
2343 pass
2324 '''
2344 '''
2325
2345
2326 def wrapper(*args, **kwargs):
2346 def wrapper(*args, **kwargs):
2327 start = time.time()
2347 start = time.time()
2328 indent = 2
2348 indent = 2
2329 _timenesting[0] += indent
2349 _timenesting[0] += indent
2330 try:
2350 try:
2331 return func(*args, **kwargs)
2351 return func(*args, **kwargs)
2332 finally:
2352 finally:
2333 elapsed = time.time() - start
2353 elapsed = time.time() - start
2334 _timenesting[0] -= indent
2354 _timenesting[0] -= indent
2335 sys.stderr.write('%s%s: %s\n' %
2355 sys.stderr.write('%s%s: %s\n' %
2336 (' ' * _timenesting[0], func.__name__,
2356 (' ' * _timenesting[0], func.__name__,
2337 timecount(elapsed)))
2357 timecount(elapsed)))
2338 return wrapper
2358 return wrapper
2339
2359
2340 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2360 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2341 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2361 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2342
2362
2343 def sizetoint(s):
2363 def sizetoint(s):
2344 '''Convert a space specifier to a byte count.
2364 '''Convert a space specifier to a byte count.
2345
2365
2346 >>> sizetoint('30')
2366 >>> sizetoint('30')
2347 30
2367 30
2348 >>> sizetoint('2.2kb')
2368 >>> sizetoint('2.2kb')
2349 2252
2369 2252
2350 >>> sizetoint('6M')
2370 >>> sizetoint('6M')
2351 6291456
2371 6291456
2352 '''
2372 '''
2353 t = s.strip().lower()
2373 t = s.strip().lower()
2354 try:
2374 try:
2355 for k, u in _sizeunits:
2375 for k, u in _sizeunits:
2356 if t.endswith(k):
2376 if t.endswith(k):
2357 return int(float(t[:-len(k)]) * u)
2377 return int(float(t[:-len(k)]) * u)
2358 return int(t)
2378 return int(t)
2359 except ValueError:
2379 except ValueError:
2360 raise error.ParseError(_("couldn't parse size: %s") % s)
2380 raise error.ParseError(_("couldn't parse size: %s") % s)
2361
2381
2362 class hooks(object):
2382 class hooks(object):
2363 '''A collection of hook functions that can be used to extend a
2383 '''A collection of hook functions that can be used to extend a
2364 function's behavior. Hooks are called in lexicographic order,
2384 function's behavior. Hooks are called in lexicographic order,
2365 based on the names of their sources.'''
2385 based on the names of their sources.'''
2366
2386
2367 def __init__(self):
2387 def __init__(self):
2368 self._hooks = []
2388 self._hooks = []
2369
2389
2370 def add(self, source, hook):
2390 def add(self, source, hook):
2371 self._hooks.append((source, hook))
2391 self._hooks.append((source, hook))
2372
2392
2373 def __call__(self, *args):
2393 def __call__(self, *args):
2374 self._hooks.sort(key=lambda x: x[0])
2394 self._hooks.sort(key=lambda x: x[0])
2375 results = []
2395 results = []
2376 for source, hook in self._hooks:
2396 for source, hook in self._hooks:
2377 results.append(hook(*args))
2397 results.append(hook(*args))
2378 return results
2398 return results
2379
2399
2380 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2400 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2381 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2401 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2382 Skips the 'skip' last entries. By default it will flush stdout first.
2402 Skips the 'skip' last entries. By default it will flush stdout first.
2383 It can be used everywhere and do intentionally not require an ui object.
2403 It can be used everywhere and do intentionally not require an ui object.
2384 Not be used in production code but very convenient while developing.
2404 Not be used in production code but very convenient while developing.
2385 '''
2405 '''
2386 if otherf:
2406 if otherf:
2387 otherf.flush()
2407 otherf.flush()
2388 f.write('%s at:\n' % msg)
2408 f.write('%s at:\n' % msg)
2389 entries = [('%s:%s' % (fn, ln), func)
2409 entries = [('%s:%s' % (fn, ln), func)
2390 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2410 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2391 if entries:
2411 if entries:
2392 fnmax = max(len(entry[0]) for entry in entries)
2412 fnmax = max(len(entry[0]) for entry in entries)
2393 for fnln, func in entries:
2413 for fnln, func in entries:
2394 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2414 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2395 f.flush()
2415 f.flush()
2396
2416
2397 class dirs(object):
2417 class dirs(object):
2398 '''a multiset of directory names from a dirstate or manifest'''
2418 '''a multiset of directory names from a dirstate or manifest'''
2399
2419
2400 def __init__(self, map, skip=None):
2420 def __init__(self, map, skip=None):
2401 self._dirs = {}
2421 self._dirs = {}
2402 addpath = self.addpath
2422 addpath = self.addpath
2403 if safehasattr(map, 'iteritems') and skip is not None:
2423 if safehasattr(map, 'iteritems') and skip is not None:
2404 for f, s in map.iteritems():
2424 for f, s in map.iteritems():
2405 if s[0] != skip:
2425 if s[0] != skip:
2406 addpath(f)
2426 addpath(f)
2407 else:
2427 else:
2408 for f in map:
2428 for f in map:
2409 addpath(f)
2429 addpath(f)
2410
2430
2411 def addpath(self, path):
2431 def addpath(self, path):
2412 dirs = self._dirs
2432 dirs = self._dirs
2413 for base in finddirs(path):
2433 for base in finddirs(path):
2414 if base in dirs:
2434 if base in dirs:
2415 dirs[base] += 1
2435 dirs[base] += 1
2416 return
2436 return
2417 dirs[base] = 1
2437 dirs[base] = 1
2418
2438
2419 def delpath(self, path):
2439 def delpath(self, path):
2420 dirs = self._dirs
2440 dirs = self._dirs
2421 for base in finddirs(path):
2441 for base in finddirs(path):
2422 if dirs[base] > 1:
2442 if dirs[base] > 1:
2423 dirs[base] -= 1
2443 dirs[base] -= 1
2424 return
2444 return
2425 del dirs[base]
2445 del dirs[base]
2426
2446
2427 def __iter__(self):
2447 def __iter__(self):
2428 return self._dirs.iterkeys()
2448 return self._dirs.iterkeys()
2429
2449
2430 def __contains__(self, d):
2450 def __contains__(self, d):
2431 return d in self._dirs
2451 return d in self._dirs
2432
2452
2433 if safehasattr(parsers, 'dirs'):
2453 if safehasattr(parsers, 'dirs'):
2434 dirs = parsers.dirs
2454 dirs = parsers.dirs
2435
2455
2436 def finddirs(path):
2456 def finddirs(path):
2437 pos = path.rfind('/')
2457 pos = path.rfind('/')
2438 while pos != -1:
2458 while pos != -1:
2439 yield path[:pos]
2459 yield path[:pos]
2440 pos = path.rfind('/', 0, pos)
2460 pos = path.rfind('/', 0, pos)
2441
2461
2442 # compression utility
2462 # compression utility
2443
2463
2444 class nocompress(object):
2464 class nocompress(object):
2445 def compress(self, x):
2465 def compress(self, x):
2446 return x
2466 return x
2447 def flush(self):
2467 def flush(self):
2448 return ""
2468 return ""
2449
2469
2450 compressors = {
2470 compressors = {
2451 None: nocompress,
2471 None: nocompress,
2452 # lambda to prevent early import
2472 # lambda to prevent early import
2453 'BZ': lambda: bz2.BZ2Compressor(),
2473 'BZ': lambda: bz2.BZ2Compressor(),
2454 'GZ': lambda: zlib.compressobj(),
2474 'GZ': lambda: zlib.compressobj(),
2455 }
2475 }
2456 # also support the old form by courtesies
2476 # also support the old form by courtesies
2457 compressors['UN'] = compressors[None]
2477 compressors['UN'] = compressors[None]
2458
2478
2459 def _makedecompressor(decompcls):
2479 def _makedecompressor(decompcls):
2460 def generator(f):
2480 def generator(f):
2461 d = decompcls()
2481 d = decompcls()
2462 for chunk in filechunkiter(f):
2482 for chunk in filechunkiter(f):
2463 yield d.decompress(chunk)
2483 yield d.decompress(chunk)
2464 def func(fh):
2484 def func(fh):
2465 return chunkbuffer(generator(fh))
2485 return chunkbuffer(generator(fh))
2466 return func
2486 return func
2467
2487
2468 def _bz2():
2488 def _bz2():
2469 d = bz2.BZ2Decompressor()
2489 d = bz2.BZ2Decompressor()
2470 # Bzip2 stream start with BZ, but we stripped it.
2490 # Bzip2 stream start with BZ, but we stripped it.
2471 # we put it back for good measure.
2491 # we put it back for good measure.
2472 d.decompress('BZ')
2492 d.decompress('BZ')
2473 return d
2493 return d
2474
2494
2475 decompressors = {None: lambda fh: fh,
2495 decompressors = {None: lambda fh: fh,
2476 '_truncatedBZ': _makedecompressor(_bz2),
2496 '_truncatedBZ': _makedecompressor(_bz2),
2477 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
2497 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
2478 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
2498 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
2479 }
2499 }
2480 # also support the old form by courtesies
2500 # also support the old form by courtesies
2481 decompressors['UN'] = decompressors[None]
2501 decompressors['UN'] = decompressors[None]
2482
2502
2483 # convenient shortcut
2503 # convenient shortcut
2484 dst = debugstacktrace
2504 dst = debugstacktrace
@@ -1,211 +1,210
1 #require test-repo
1 #require test-repo
2
2
3 $ cd "$TESTDIR"/..
3 $ cd "$TESTDIR"/..
4
4
5 $ hg files 'set:(**.py)' | xargs python contrib/check-py3-compat.py
5 $ hg files 'set:(**.py)' | xargs python contrib/check-py3-compat.py
6 contrib/casesmash.py not using absolute_import
6 contrib/casesmash.py not using absolute_import
7 contrib/check-code.py not using absolute_import
7 contrib/check-code.py not using absolute_import
8 contrib/check-code.py requires print_function
8 contrib/check-code.py requires print_function
9 contrib/check-config.py not using absolute_import
9 contrib/check-config.py not using absolute_import
10 contrib/check-config.py requires print_function
10 contrib/check-config.py requires print_function
11 contrib/debugcmdserver.py not using absolute_import
11 contrib/debugcmdserver.py not using absolute_import
12 contrib/debugcmdserver.py requires print_function
12 contrib/debugcmdserver.py requires print_function
13 contrib/debugshell.py not using absolute_import
13 contrib/debugshell.py not using absolute_import
14 contrib/fixpax.py not using absolute_import
14 contrib/fixpax.py not using absolute_import
15 contrib/fixpax.py requires print_function
15 contrib/fixpax.py requires print_function
16 contrib/hgclient.py not using absolute_import
16 contrib/hgclient.py not using absolute_import
17 contrib/hgclient.py requires print_function
17 contrib/hgclient.py requires print_function
18 contrib/hgfixes/fix_bytes.py not using absolute_import
18 contrib/hgfixes/fix_bytes.py not using absolute_import
19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
19 contrib/hgfixes/fix_bytesmod.py not using absolute_import
20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
20 contrib/hgfixes/fix_leftover_imports.py not using absolute_import
21 contrib/import-checker.py not using absolute_import
21 contrib/import-checker.py not using absolute_import
22 contrib/import-checker.py requires print_function
22 contrib/import-checker.py requires print_function
23 contrib/memory.py not using absolute_import
23 contrib/memory.py not using absolute_import
24 contrib/perf.py not using absolute_import
24 contrib/perf.py not using absolute_import
25 contrib/python-hook-examples.py not using absolute_import
25 contrib/python-hook-examples.py not using absolute_import
26 contrib/revsetbenchmarks.py not using absolute_import
26 contrib/revsetbenchmarks.py not using absolute_import
27 contrib/revsetbenchmarks.py requires print_function
27 contrib/revsetbenchmarks.py requires print_function
28 contrib/showstack.py not using absolute_import
28 contrib/showstack.py not using absolute_import
29 contrib/synthrepo.py not using absolute_import
29 contrib/synthrepo.py not using absolute_import
30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
30 contrib/win32/hgwebdir_wsgi.py not using absolute_import
31 doc/check-seclevel.py not using absolute_import
31 doc/check-seclevel.py not using absolute_import
32 doc/gendoc.py not using absolute_import
32 doc/gendoc.py not using absolute_import
33 doc/hgmanpage.py not using absolute_import
33 doc/hgmanpage.py not using absolute_import
34 hgext/__init__.py not using absolute_import
34 hgext/__init__.py not using absolute_import
35 hgext/acl.py not using absolute_import
35 hgext/acl.py not using absolute_import
36 hgext/blackbox.py not using absolute_import
36 hgext/blackbox.py not using absolute_import
37 hgext/bugzilla.py not using absolute_import
37 hgext/bugzilla.py not using absolute_import
38 hgext/censor.py not using absolute_import
38 hgext/censor.py not using absolute_import
39 hgext/children.py not using absolute_import
39 hgext/children.py not using absolute_import
40 hgext/churn.py not using absolute_import
40 hgext/churn.py not using absolute_import
41 hgext/clonebundles.py not using absolute_import
41 hgext/clonebundles.py not using absolute_import
42 hgext/color.py not using absolute_import
42 hgext/color.py not using absolute_import
43 hgext/convert/__init__.py not using absolute_import
43 hgext/convert/__init__.py not using absolute_import
44 hgext/convert/bzr.py not using absolute_import
44 hgext/convert/bzr.py not using absolute_import
45 hgext/convert/common.py not using absolute_import
45 hgext/convert/common.py not using absolute_import
46 hgext/convert/convcmd.py not using absolute_import
46 hgext/convert/convcmd.py not using absolute_import
47 hgext/convert/cvs.py not using absolute_import
47 hgext/convert/cvs.py not using absolute_import
48 hgext/convert/cvsps.py not using absolute_import
48 hgext/convert/cvsps.py not using absolute_import
49 hgext/convert/darcs.py not using absolute_import
49 hgext/convert/darcs.py not using absolute_import
50 hgext/convert/filemap.py not using absolute_import
50 hgext/convert/filemap.py not using absolute_import
51 hgext/convert/git.py not using absolute_import
51 hgext/convert/git.py not using absolute_import
52 hgext/convert/gnuarch.py not using absolute_import
52 hgext/convert/gnuarch.py not using absolute_import
53 hgext/convert/hg.py not using absolute_import
53 hgext/convert/hg.py not using absolute_import
54 hgext/convert/monotone.py not using absolute_import
54 hgext/convert/monotone.py not using absolute_import
55 hgext/convert/p4.py not using absolute_import
55 hgext/convert/p4.py not using absolute_import
56 hgext/convert/subversion.py not using absolute_import
56 hgext/convert/subversion.py not using absolute_import
57 hgext/convert/transport.py not using absolute_import
57 hgext/convert/transport.py not using absolute_import
58 hgext/eol.py not using absolute_import
58 hgext/eol.py not using absolute_import
59 hgext/extdiff.py not using absolute_import
59 hgext/extdiff.py not using absolute_import
60 hgext/factotum.py not using absolute_import
60 hgext/factotum.py not using absolute_import
61 hgext/fetch.py not using absolute_import
61 hgext/fetch.py not using absolute_import
62 hgext/gpg.py not using absolute_import
62 hgext/gpg.py not using absolute_import
63 hgext/graphlog.py not using absolute_import
63 hgext/graphlog.py not using absolute_import
64 hgext/hgcia.py not using absolute_import
64 hgext/hgcia.py not using absolute_import
65 hgext/hgk.py not using absolute_import
65 hgext/hgk.py not using absolute_import
66 hgext/highlight/__init__.py not using absolute_import
66 hgext/highlight/__init__.py not using absolute_import
67 hgext/highlight/highlight.py not using absolute_import
67 hgext/highlight/highlight.py not using absolute_import
68 hgext/histedit.py not using absolute_import
68 hgext/histedit.py not using absolute_import
69 hgext/keyword.py not using absolute_import
69 hgext/keyword.py not using absolute_import
70 hgext/largefiles/__init__.py not using absolute_import
70 hgext/largefiles/__init__.py not using absolute_import
71 hgext/largefiles/basestore.py not using absolute_import
71 hgext/largefiles/basestore.py not using absolute_import
72 hgext/largefiles/lfcommands.py not using absolute_import
72 hgext/largefiles/lfcommands.py not using absolute_import
73 hgext/largefiles/lfutil.py not using absolute_import
73 hgext/largefiles/lfutil.py not using absolute_import
74 hgext/largefiles/localstore.py not using absolute_import
74 hgext/largefiles/localstore.py not using absolute_import
75 hgext/largefiles/overrides.py not using absolute_import
75 hgext/largefiles/overrides.py not using absolute_import
76 hgext/largefiles/proto.py not using absolute_import
76 hgext/largefiles/proto.py not using absolute_import
77 hgext/largefiles/remotestore.py not using absolute_import
77 hgext/largefiles/remotestore.py not using absolute_import
78 hgext/largefiles/reposetup.py not using absolute_import
78 hgext/largefiles/reposetup.py not using absolute_import
79 hgext/largefiles/uisetup.py not using absolute_import
79 hgext/largefiles/uisetup.py not using absolute_import
80 hgext/largefiles/wirestore.py not using absolute_import
80 hgext/largefiles/wirestore.py not using absolute_import
81 hgext/mq.py not using absolute_import
81 hgext/mq.py not using absolute_import
82 hgext/notify.py not using absolute_import
82 hgext/notify.py not using absolute_import
83 hgext/pager.py not using absolute_import
83 hgext/pager.py not using absolute_import
84 hgext/patchbomb.py not using absolute_import
84 hgext/patchbomb.py not using absolute_import
85 hgext/purge.py not using absolute_import
85 hgext/purge.py not using absolute_import
86 hgext/rebase.py not using absolute_import
86 hgext/rebase.py not using absolute_import
87 hgext/record.py not using absolute_import
87 hgext/record.py not using absolute_import
88 hgext/relink.py not using absolute_import
88 hgext/relink.py not using absolute_import
89 hgext/schemes.py not using absolute_import
89 hgext/schemes.py not using absolute_import
90 hgext/share.py not using absolute_import
90 hgext/share.py not using absolute_import
91 hgext/shelve.py not using absolute_import
91 hgext/shelve.py not using absolute_import
92 hgext/strip.py not using absolute_import
92 hgext/strip.py not using absolute_import
93 hgext/transplant.py not using absolute_import
93 hgext/transplant.py not using absolute_import
94 hgext/win32mbcs.py not using absolute_import
94 hgext/win32mbcs.py not using absolute_import
95 hgext/win32text.py not using absolute_import
95 hgext/win32text.py not using absolute_import
96 hgext/zeroconf/Zeroconf.py not using absolute_import
96 hgext/zeroconf/Zeroconf.py not using absolute_import
97 hgext/zeroconf/Zeroconf.py requires print_function
97 hgext/zeroconf/Zeroconf.py requires print_function
98 hgext/zeroconf/__init__.py not using absolute_import
98 hgext/zeroconf/__init__.py not using absolute_import
99 i18n/check-translation.py not using absolute_import
99 i18n/check-translation.py not using absolute_import
100 i18n/polib.py not using absolute_import
100 i18n/polib.py not using absolute_import
101 mercurial/byterange.py not using absolute_import
101 mercurial/byterange.py not using absolute_import
102 mercurial/cmdutil.py not using absolute_import
102 mercurial/cmdutil.py not using absolute_import
103 mercurial/commands.py not using absolute_import
103 mercurial/commands.py not using absolute_import
104 mercurial/context.py not using absolute_import
104 mercurial/context.py not using absolute_import
105 mercurial/dirstate.py not using absolute_import
105 mercurial/dirstate.py not using absolute_import
106 mercurial/dispatch.py requires print_function
106 mercurial/dispatch.py requires print_function
107 mercurial/exchange.py not using absolute_import
107 mercurial/exchange.py not using absolute_import
108 mercurial/help.py not using absolute_import
108 mercurial/help.py not using absolute_import
109 mercurial/httpclient/__init__.py not using absolute_import
109 mercurial/httpclient/__init__.py not using absolute_import
110 mercurial/httpclient/_readers.py not using absolute_import
110 mercurial/httpclient/_readers.py not using absolute_import
111 mercurial/httpclient/socketutil.py not using absolute_import
111 mercurial/httpclient/socketutil.py not using absolute_import
112 mercurial/httpconnection.py not using absolute_import
112 mercurial/httpconnection.py not using absolute_import
113 mercurial/keepalive.py not using absolute_import
113 mercurial/keepalive.py not using absolute_import
114 mercurial/keepalive.py requires print_function
114 mercurial/keepalive.py requires print_function
115 mercurial/localrepo.py not using absolute_import
115 mercurial/localrepo.py not using absolute_import
116 mercurial/lsprof.py requires print_function
116 mercurial/lsprof.py requires print_function
117 mercurial/lsprofcalltree.py not using absolute_import
117 mercurial/lsprofcalltree.py not using absolute_import
118 mercurial/lsprofcalltree.py requires print_function
118 mercurial/lsprofcalltree.py requires print_function
119 mercurial/mail.py requires print_function
119 mercurial/mail.py requires print_function
120 mercurial/manifest.py not using absolute_import
120 mercurial/manifest.py not using absolute_import
121 mercurial/mdiff.py not using absolute_import
121 mercurial/mdiff.py not using absolute_import
122 mercurial/patch.py not using absolute_import
122 mercurial/patch.py not using absolute_import
123 mercurial/pvec.py not using absolute_import
123 mercurial/pvec.py not using absolute_import
124 mercurial/py3kcompat.py not using absolute_import
124 mercurial/py3kcompat.py not using absolute_import
125 mercurial/revlog.py not using absolute_import
125 mercurial/revlog.py not using absolute_import
126 mercurial/scmposix.py not using absolute_import
126 mercurial/scmposix.py not using absolute_import
127 mercurial/scmutil.py not using absolute_import
127 mercurial/scmutil.py not using absolute_import
128 mercurial/scmwindows.py not using absolute_import
128 mercurial/scmwindows.py not using absolute_import
129 mercurial/similar.py not using absolute_import
129 mercurial/similar.py not using absolute_import
130 mercurial/store.py not using absolute_import
130 mercurial/store.py not using absolute_import
131 mercurial/util.py not using absolute_import
132 mercurial/windows.py not using absolute_import
131 mercurial/windows.py not using absolute_import
133 setup.py not using absolute_import
132 setup.py not using absolute_import
134 tests/filterpyflakes.py requires print_function
133 tests/filterpyflakes.py requires print_function
135 tests/generate-working-copy-states.py requires print_function
134 tests/generate-working-copy-states.py requires print_function
136 tests/get-with-headers.py requires print_function
135 tests/get-with-headers.py requires print_function
137 tests/heredoctest.py requires print_function
136 tests/heredoctest.py requires print_function
138 tests/hypothesishelpers.py not using absolute_import
137 tests/hypothesishelpers.py not using absolute_import
139 tests/hypothesishelpers.py requires print_function
138 tests/hypothesishelpers.py requires print_function
140 tests/killdaemons.py not using absolute_import
139 tests/killdaemons.py not using absolute_import
141 tests/md5sum.py not using absolute_import
140 tests/md5sum.py not using absolute_import
142 tests/mockblackbox.py not using absolute_import
141 tests/mockblackbox.py not using absolute_import
143 tests/printenv.py not using absolute_import
142 tests/printenv.py not using absolute_import
144 tests/readlink.py not using absolute_import
143 tests/readlink.py not using absolute_import
145 tests/readlink.py requires print_function
144 tests/readlink.py requires print_function
146 tests/revlog-formatv0.py not using absolute_import
145 tests/revlog-formatv0.py not using absolute_import
147 tests/run-tests.py not using absolute_import
146 tests/run-tests.py not using absolute_import
148 tests/seq.py not using absolute_import
147 tests/seq.py not using absolute_import
149 tests/seq.py requires print_function
148 tests/seq.py requires print_function
150 tests/silenttestrunner.py not using absolute_import
149 tests/silenttestrunner.py not using absolute_import
151 tests/silenttestrunner.py requires print_function
150 tests/silenttestrunner.py requires print_function
152 tests/sitecustomize.py not using absolute_import
151 tests/sitecustomize.py not using absolute_import
153 tests/svn-safe-append.py not using absolute_import
152 tests/svn-safe-append.py not using absolute_import
154 tests/svnxml.py not using absolute_import
153 tests/svnxml.py not using absolute_import
155 tests/test-ancestor.py requires print_function
154 tests/test-ancestor.py requires print_function
156 tests/test-atomictempfile.py not using absolute_import
155 tests/test-atomictempfile.py not using absolute_import
157 tests/test-batching.py not using absolute_import
156 tests/test-batching.py not using absolute_import
158 tests/test-batching.py requires print_function
157 tests/test-batching.py requires print_function
159 tests/test-bdiff.py not using absolute_import
158 tests/test-bdiff.py not using absolute_import
160 tests/test-bdiff.py requires print_function
159 tests/test-bdiff.py requires print_function
161 tests/test-context.py not using absolute_import
160 tests/test-context.py not using absolute_import
162 tests/test-context.py requires print_function
161 tests/test-context.py requires print_function
163 tests/test-demandimport.py not using absolute_import
162 tests/test-demandimport.py not using absolute_import
164 tests/test-demandimport.py requires print_function
163 tests/test-demandimport.py requires print_function
165 tests/test-dispatch.py not using absolute_import
164 tests/test-dispatch.py not using absolute_import
166 tests/test-dispatch.py requires print_function
165 tests/test-dispatch.py requires print_function
167 tests/test-doctest.py not using absolute_import
166 tests/test-doctest.py not using absolute_import
168 tests/test-duplicateoptions.py not using absolute_import
167 tests/test-duplicateoptions.py not using absolute_import
169 tests/test-duplicateoptions.py requires print_function
168 tests/test-duplicateoptions.py requires print_function
170 tests/test-filecache.py not using absolute_import
169 tests/test-filecache.py not using absolute_import
171 tests/test-filecache.py requires print_function
170 tests/test-filecache.py requires print_function
172 tests/test-filelog.py not using absolute_import
171 tests/test-filelog.py not using absolute_import
173 tests/test-filelog.py requires print_function
172 tests/test-filelog.py requires print_function
174 tests/test-hg-parseurl.py not using absolute_import
173 tests/test-hg-parseurl.py not using absolute_import
175 tests/test-hg-parseurl.py requires print_function
174 tests/test-hg-parseurl.py requires print_function
176 tests/test-hgweb-auth.py not using absolute_import
175 tests/test-hgweb-auth.py not using absolute_import
177 tests/test-hgweb-auth.py requires print_function
176 tests/test-hgweb-auth.py requires print_function
178 tests/test-hgwebdir-paths.py not using absolute_import
177 tests/test-hgwebdir-paths.py not using absolute_import
179 tests/test-hybridencode.py not using absolute_import
178 tests/test-hybridencode.py not using absolute_import
180 tests/test-hybridencode.py requires print_function
179 tests/test-hybridencode.py requires print_function
181 tests/test-lrucachedict.py not using absolute_import
180 tests/test-lrucachedict.py not using absolute_import
182 tests/test-lrucachedict.py requires print_function
181 tests/test-lrucachedict.py requires print_function
183 tests/test-manifest.py not using absolute_import
182 tests/test-manifest.py not using absolute_import
184 tests/test-minirst.py not using absolute_import
183 tests/test-minirst.py not using absolute_import
185 tests/test-minirst.py requires print_function
184 tests/test-minirst.py requires print_function
186 tests/test-parseindex2.py not using absolute_import
185 tests/test-parseindex2.py not using absolute_import
187 tests/test-parseindex2.py requires print_function
186 tests/test-parseindex2.py requires print_function
188 tests/test-pathencode.py not using absolute_import
187 tests/test-pathencode.py not using absolute_import
189 tests/test-pathencode.py requires print_function
188 tests/test-pathencode.py requires print_function
190 tests/test-propertycache.py not using absolute_import
189 tests/test-propertycache.py not using absolute_import
191 tests/test-propertycache.py requires print_function
190 tests/test-propertycache.py requires print_function
192 tests/test-revlog-ancestry.py not using absolute_import
191 tests/test-revlog-ancestry.py not using absolute_import
193 tests/test-revlog-ancestry.py requires print_function
192 tests/test-revlog-ancestry.py requires print_function
194 tests/test-run-tests.py not using absolute_import
193 tests/test-run-tests.py not using absolute_import
195 tests/test-simplemerge.py not using absolute_import
194 tests/test-simplemerge.py not using absolute_import
196 tests/test-status-inprocess.py not using absolute_import
195 tests/test-status-inprocess.py not using absolute_import
197 tests/test-status-inprocess.py requires print_function
196 tests/test-status-inprocess.py requires print_function
198 tests/test-symlink-os-yes-fs-no.py not using absolute_import
197 tests/test-symlink-os-yes-fs-no.py not using absolute_import
199 tests/test-trusted.py not using absolute_import
198 tests/test-trusted.py not using absolute_import
200 tests/test-trusted.py requires print_function
199 tests/test-trusted.py requires print_function
201 tests/test-ui-color.py not using absolute_import
200 tests/test-ui-color.py not using absolute_import
202 tests/test-ui-color.py requires print_function
201 tests/test-ui-color.py requires print_function
203 tests/test-ui-config.py not using absolute_import
202 tests/test-ui-config.py not using absolute_import
204 tests/test-ui-config.py requires print_function
203 tests/test-ui-config.py requires print_function
205 tests/test-ui-verbosity.py not using absolute_import
204 tests/test-ui-verbosity.py not using absolute_import
206 tests/test-ui-verbosity.py requires print_function
205 tests/test-ui-verbosity.py requires print_function
207 tests/test-url.py not using absolute_import
206 tests/test-url.py not using absolute_import
208 tests/test-url.py requires print_function
207 tests/test-url.py requires print_function
209 tests/test-walkrepo.py requires print_function
208 tests/test-walkrepo.py requires print_function
210 tests/test-wireproto.py requires print_function
209 tests/test-wireproto.py requires print_function
211 tests/tinyproxy.py requires print_function
210 tests/tinyproxy.py requires print_function
General Comments 0
You need to be logged in to leave comments. Login now